Skip to content

Commit

Permalink
Merge pull request IntelRealSense#2158 from doronhi/ros1-metadata
Browse files Browse the repository at this point in the history
publish metadata
  • Loading branch information
doronhi authored Nov 15, 2021
2 parents d747ab0 + f45d816 commit ebe6e3f
Show file tree
Hide file tree
Showing 6 changed files with 134 additions and 30 deletions.
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,10 @@ The published topics differ according to the device and parameters.
After running the above command with D435i attached, the following list of topics will be available (This is a partial list. For full one type `rostopic list`):
- /camera/color/camera_info
- /camera/color/image_raw
- /camera/color/metadata
- /camera/depth/camera_info
- /camera/depth/image_rect_raw
- /camera/depth/metadata
- /camera/extrinsics/depth_to_color
- /camera/extrinsics/depth_to_infra1
- /camera/extrinsics/depth_to_infra2
Expand All @@ -133,8 +135,10 @@ After running the above command with D435i attached, the following list of topic
- /camera/infra2/camera_info
- /camera/infra2/image_rect_raw
- /camera/gyro/imu_info
- /camera/gyro/metadata
- /camera/gyro/sample
- /camera/accel/imu_info
- /camera/accel/metadata
- /camera/accel/sample
- /diagnostics

Expand Down
1 change: 1 addition & 0 deletions realsense2_camera/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ add_message_files(
FILES
IMUInfo.msg
Extrinsics.msg
Metadata.msg
)

add_service_files(
Expand Down
4 changes: 4 additions & 0 deletions realsense2_camera/include/base_realsense_node.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

#include "../include/realsense_node_factory.h"
#include <realsense2_camera/DeviceInfo.h>
#include "realsense2_camera/Metadata.h"
#include <ddynamic_reconfigure/ddynamic_reconfigure.h>

#include <diagnostic_updater/diagnostic_updater.h>
Expand Down Expand Up @@ -221,10 +222,12 @@ namespace realsense2_camera
std::map<stream_index_pair, cv::Mat>& images,
const std::map<stream_index_pair, ros::Publisher>& info_publishers,
const std::map<stream_index_pair, ImagePublisherWithFrequencyDiagnostics>& image_publishers,
const bool is_publishMetadata,
std::map<stream_index_pair, int>& seq,
std::map<stream_index_pair, sensor_msgs::CameraInfo>& camera_info,
const std::map<rs2_stream, std::string>& encoding,
bool copy_data_from_frame = true);
void publishMetadata(rs2::frame f, const std::string& frame_id);
bool getEnabledProfile(const stream_index_pair& stream_index, rs2::stream_profile& profile);

void publishAlignedDepthToOthers(rs2::frameset frames, const ros::Time& t);
Expand Down Expand Up @@ -288,6 +291,7 @@ namespace realsense2_camera
std::shared_ptr<SyncedImuPublisher> _synced_imu_publisher;
std::map<rs2_stream, int> _image_format;
std::map<stream_index_pair, ros::Publisher> _info_publisher;
std::map<stream_index_pair, std::shared_ptr<ros::Publisher>> _metadata_publishers;
std::map<stream_index_pair, cv::Mat> _image;
std::map<rs2_stream, std::string> _encoding;

Expand Down
2 changes: 2 additions & 0 deletions realsense2_camera/msg/Metadata.msg
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
std_msgs/Header header
string json_data
34 changes: 34 additions & 0 deletions realsense2_camera/scripts/echo_metadada.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/usr/bin/env python
import os
import sys
import rospy
from realsense2_camera.msg import Metadata
import json

def metadata_cb(msg):
aa = json.loads(msg.json_data)
os.system('clear')
print('\n'.join(['%10s:%-10s' % (key, str(value)) for key, value in aa.items()]))

def main():
if len(sys.argv) < 2 or '--help' in sys.argv or '/?' in sys.argv:
print ('USAGE:')
print('echo_metadata.py <topic>')
print('Demo for listening on given metadata topic.')
print('App subscribes on given topic')
print('App then prints metadata from messages')
print('')
print('Example: echo_metadata.py /camera/depth/metadata')
print('')
exit(-1)

topic = sys.argv[1]

rospy.init_node('metadata_tester', anonymous=True)

depth_sub = rospy.Subscriber(topic, Metadata, metadata_cb)

rospy.spin()

if __name__ == '__main__':
main()
119 changes: 89 additions & 30 deletions realsense2_camera/src/base_realsense_node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -968,18 +968,20 @@ void BaseRealSenseNode::setupPublishers()
{
if (_enable[stream])
{
std::stringstream image_raw, camera_info;
std::stringstream image_raw, camera_info, topic_metadata;
bool rectified_image = false;
if (stream == DEPTH || stream == CONFIDENCE || stream == INFRA1 || stream == INFRA2)
rectified_image = true;

std::string stream_name(STREAM_NAME(stream));
image_raw << stream_name << "/image_" << ((rectified_image)?"rect_":"") << "raw";
camera_info << stream_name << "/camera_info";
topic_metadata << stream_name << "/metadata";

std::shared_ptr<FrequencyDiagnostics> frequency_diagnostics(new FrequencyDiagnostics(_fps[stream], stream_name, _serial_no));
_image_publishers[stream] = {image_transport.advertise(image_raw.str(), 1), frequency_diagnostics};
_info_publisher[stream] = _node_handle.advertise<sensor_msgs::CameraInfo>(camera_info.str(), 1);
_metadata_publishers[stream] = std::make_shared<ros::Publisher>(_node_handle.advertise<realsense2_camera::Metadata>(topic_metadata.str(), 1));

if (_align_depth && stream == COLOR)
{
Expand Down Expand Up @@ -1012,16 +1014,19 @@ void BaseRealSenseNode::setupPublishers()
if (_enable[GYRO])
{
_imu_publishers[GYRO] = _node_handle.advertise<sensor_msgs::Imu>("gyro/sample", 100);
_metadata_publishers[GYRO] = std::make_shared<ros::Publisher>(_node_handle.advertise<realsense2_camera::Metadata>("gyro/metadata", 1));
}

if (_enable[ACCEL])
{
_imu_publishers[ACCEL] = _node_handle.advertise<sensor_msgs::Imu>("accel/sample", 100);
_metadata_publishers[ACCEL] = std::make_shared<ros::Publisher>(_node_handle.advertise<realsense2_camera::Metadata>("accel/metadata", 1));
}
}
if (_enable[POSE])
{
_imu_publishers[POSE] = _node_handle.advertise<nav_msgs::Odometry>("odom/sample", 100);
_metadata_publishers[POSE] = std::make_shared<ros::Publisher>(_node_handle.advertise<realsense2_camera::Metadata>("odom/metadata", 1));
}


Expand Down Expand Up @@ -1483,7 +1488,7 @@ void BaseRealSenseNode::imu_callback_sync(rs2::frame frame, imu_sync_method sync
}
}
m_mutex.unlock();
};
}

void BaseRealSenseNode::imu_callback(rs2::frame frame)
{
Expand All @@ -1501,10 +1506,9 @@ void BaseRealSenseNode::imu_callback(rs2::frame frame)
rs2_timestamp_domain_to_string(frame.get_frame_timestamp_domain()));

auto stream_index = (stream == GYRO.first)?GYRO:ACCEL;
ros::Time t(frameSystemTimeSec(frame));
if (0 != _imu_publishers[stream_index].getNumSubscribers())
{
ros::Time t(frameSystemTimeSec(frame));

auto imu_msg = sensor_msgs::Imu();
ImuMessage_AddDefaultValues(imu_msg);
imu_msg.header.frame_id = _optical_frame_id[stream_index];
Expand All @@ -1528,6 +1532,7 @@ void BaseRealSenseNode::imu_callback(rs2::frame frame)
_imu_publishers[stream_index].publish(imu_msg);
ROS_DEBUG("Publish %s stream", rs2_stream_to_string(frame.get_profile().stream_type()));
}
publishMetadata(frame, _optical_frame_id[stream_index]);
}

void BaseRealSenseNode::pose_callback(rs2::frame frame)
Expand Down Expand Up @@ -1620,6 +1625,7 @@ void BaseRealSenseNode::pose_callback(rs2::frame frame)
_imu_publishers[stream_index].publish(odom_msg);
ROS_DEBUG("Publish %s stream", rs2_stream_to_string(frame.get_profile().stream_type()));
}
publishMetadata(frame, _frame_id[POSE]);
}

void BaseRealSenseNode::frame_callback(rs2::frame frame)
Expand Down Expand Up @@ -1704,7 +1710,9 @@ void BaseRealSenseNode::frame_callback(rs2::frame frame)
publishFrame(f, t, COLOR,
_depth_aligned_image,
_depth_aligned_info_publisher,
_depth_aligned_image_publishers, _depth_aligned_seq,
_depth_aligned_image_publishers,
false,
_depth_aligned_seq,
_depth_aligned_camera_info,
_depth_aligned_encoding);
continue;
Expand All @@ -1714,7 +1722,9 @@ void BaseRealSenseNode::frame_callback(rs2::frame frame)
sip,
_image,
_info_publisher,
_image_publishers, _seq,
_image_publishers,
true,
_seq,
_camera_info,
_encoding);
}
Expand All @@ -1725,12 +1735,14 @@ void BaseRealSenseNode::frame_callback(rs2::frame frame)
frame_to_send = _colorizer->process(original_depth_frame);
else
frame_to_send = original_depth_frame;

publishFrame(frame_to_send, t,
DEPTH,
_image,
_info_publisher,
_image_publishers, _seq,
_image_publishers,
true,
_seq,
_camera_info,
_encoding);
}
Expand All @@ -1755,7 +1767,9 @@ void BaseRealSenseNode::frame_callback(rs2::frame frame)
sip,
_image,
_info_publisher,
_image_publishers, _seq,
_image_publishers,
true,
_seq,
_camera_info,
_encoding);
}
Expand All @@ -1765,7 +1779,7 @@ void BaseRealSenseNode::frame_callback(rs2::frame frame)
ROS_ERROR_STREAM("An error has occurred during frame callback: " << ex.what());
}
_synced_imu_publisher->Resume();
}; // frame_callback
} // frame_callback

void BaseRealSenseNode::multiple_message_callback(rs2::frame frame, imu_sync_method sync_method)
{
Expand Down Expand Up @@ -1834,7 +1848,7 @@ void BaseRealSenseNode::setupStreams()
for (const std::pair<stream_index_pair, std::vector<rs2::stream_profile>>& profile : _enabled_profiles)
{
std::string module_name = _sensors[profile.first].get_info(RS2_CAMERA_INFO_NAME);
ROS_INFO_STREAM("insert " << rs2_stream_to_string(profile.second.begin()->stream_type())
ROS_DEBUG_STREAM("insert " << rs2_stream_to_string(profile.second.begin()->stream_type())
<< " to " << module_name);
profiles[module_name].insert(profiles[module_name].begin(),
profile.second.begin(),
Expand Down Expand Up @@ -2048,7 +2062,6 @@ void BaseRealSenseNode::SetBaseStream()
void BaseRealSenseNode::publishStaticTransforms()
{
rs2::stream_profile base_profile = getAProfile(_base_stream);

// Publish static transforms
if (_publish_tf)
{
Expand Down Expand Up @@ -2165,8 +2178,8 @@ void BaseRealSenseNode::publishPointCloud(rs2::points pc, const ros::Time& t, co
if (use_texture)
{
std::set<rs2_format> available_formats{ rs2_format::RS2_FORMAT_RGB8, rs2_format::RS2_FORMAT_Y8 };
texture_frame_itr = std::find_if(frameset.begin(), frameset.end(), [&texture_source_id, &available_formats] (rs2::frame f)

texture_frame_itr = std::find_if(frameset.begin(), frameset.end(), [&texture_source_id, &available_formats] (rs2::frame f)
{return (rs2_stream(f.get_profile().stream_type()) == texture_source_id) &&
(available_formats.find(f.get_profile().format()) != available_formats.end()); });
if (texture_frame_itr == frameset.end())
Expand All @@ -2188,7 +2201,7 @@ void BaseRealSenseNode::publishPointCloud(rs2::points pc, const ros::Time& t, co
rs2_intrinsics depth_intrin = pc.get_profile().as<rs2::video_stream_profile>().get_intrinsics();

sensor_msgs::PointCloud2Modifier modifier(_msg_pointcloud);
modifier.setPointCloud2FieldsByString(1, "xyz");
modifier.setPointCloud2FieldsByString(1, "xyz");
modifier.resize(pc.size());
if (_ordered_pc)
{
Expand Down Expand Up @@ -2274,7 +2287,7 @@ void BaseRealSenseNode::publishPointCloud(rs2::points pc, const ros::Time& t, co
*iter_x = vertex->x;
*iter_y = vertex->y;
*iter_z = vertex->z;

++iter_x; ++iter_y; ++iter_z;
++valid_count;
}
Expand Down Expand Up @@ -2312,8 +2325,8 @@ rs2::stream_profile BaseRealSenseNode::getAProfile(const stream_index_pair& stre
{
const std::vector<rs2::stream_profile> profiles = _sensors[stream].get_stream_profiles();
return *(std::find_if(profiles.begin(), profiles.end(),
[&stream] (const rs2::stream_profile& profile) {
return ((profile.stream_type() == stream.first) && (profile.stream_index() == stream.second));
[&stream] (const rs2::stream_profile& profile) {
return ((profile.stream_type() == stream.first) && (profile.stream_index() == stream.second));
}));
}

Expand Down Expand Up @@ -2352,6 +2365,7 @@ void BaseRealSenseNode::publishFrame(rs2::frame f, const ros::Time& t,
std::map<stream_index_pair, cv::Mat>& images,
const std::map<stream_index_pair, ros::Publisher>& info_publishers,
const std::map<stream_index_pair, ImagePublisherWithFrequencyDiagnostics>& image_publishers,
const bool is_publishMetadata,
std::map<stream_index_pair, int>& seq,
std::map<stream_index_pair, sensor_msgs::CameraInfo>& camera_info,
const std::map<rs2_stream, std::string>& encoding,
Expand Down Expand Up @@ -2411,24 +2425,69 @@ void BaseRealSenseNode::publishFrame(rs2::frame f, const ros::Time& t,
img->header.seq = seq[stream];

image_publisher.first.publish(img);
// ROS_INFO_STREAM("fid: " << cam_info.header.seq << ", time: " << std::setprecision (20) << t.toSec());
ROS_DEBUG("%s stream published", rs2_stream_to_string(f.get_profile().stream_type()));
}
if (is_publishMetadata)
{
auto& cam_info = camera_info.at(stream);
publishMetadata(f, cam_info.header.frame_id);
}
}

bool BaseRealSenseNode::getEnabledProfile(const stream_index_pair& stream_index, rs2::stream_profile& profile)
void BaseRealSenseNode::publishMetadata(rs2::frame f, const std::string& frame_id)
{
stream_index_pair stream = {f.get_profile().stream_type(), f.get_profile().stream_index()};
if (_metadata_publishers.find(stream) != _metadata_publishers.end())
{
// Assuming that all D400 SKUs have depth sensor
auto profiles = _enabled_profiles[stream_index];
auto it = std::find_if(profiles.begin(), profiles.end(),
[&](const rs2::stream_profile& profile)
{ return (profile.stream_type() == stream_index.first); });
if (it == profiles.end())
return false;

profile = *it;
return true;
ros::Time t(frameSystemTimeSec(f));
auto& md_publisher = _metadata_publishers.at(stream);
if (0 != md_publisher->getNumSubscribers())
{
realsense2_camera::Metadata msg;
msg.header.frame_id = frame_id;
msg.header.stamp = t;
std::stringstream json_data;
const char* separator = ",";
json_data << "{";
// Add additional fields:
json_data << "\"" << "frame_number" << "\":" << f.get_frame_number();
json_data << separator << "\"" << "clock_domain" << "\":" << "\"" << create_graph_resource_name(rs2_timestamp_domain_to_string(f.get_frame_timestamp_domain())) << "\"";
json_data << separator << "\"" << "frame_timestamp" << "\":" << std::fixed << f.get_timestamp();

for (auto i = 0; i < RS2_FRAME_METADATA_COUNT; i++)
{
if (f.supports_frame_metadata((rs2_frame_metadata_value)i))
{
rs2_frame_metadata_value mparam = (rs2_frame_metadata_value)i;
std::string name = create_graph_resource_name(rs2_frame_metadata_to_string(mparam));
if (RS2_FRAME_METADATA_FRAME_TIMESTAMP == i)
{
name = "hw_timestamp";
}
rs2_metadata_type val = f.get_frame_metadata(mparam);
json_data << separator << "\"" << name << "\":" << val;
}
}
json_data << "}";
msg.json_data = json_data.str();
md_publisher->publish(msg);
}
}
}

bool BaseRealSenseNode::getEnabledProfile(const stream_index_pair& stream_index, rs2::stream_profile& profile)
{
// Assuming that all D400 SKUs have depth sensor
auto profiles = _enabled_profiles[stream_index];
auto it = std::find_if(profiles.begin(), profiles.end(),
[&](const rs2::stream_profile& profile)
{ return (profile.stream_type() == stream_index.first); });
if (it == profiles.end())
return false;

profile = *it;
return true;
}

void BaseRealSenseNode::startMonitoring()
{
Expand Down

0 comments on commit ebe6e3f

Please sign in to comment.