From 3b92132213911ff173842c50f040219504bf1ef8 Mon Sep 17 00:00:00 2001 From: Matt Alvarado Date: Wed, 15 Jun 2022 09:12:16 -0400 Subject: [PATCH 01/13] Update README with more detailed documentation links (#45) --- README.md | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 2bc4517..b0a90c9 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,19 @@ # multisense_ros -Wrappers, drivers, tools and additional API's for using MultiSense SL, S7, S7S, S21, M, ST21, and BCAM with ROS. +Wrappers, drivers, tools and additional API's for using MultiSense S27, S30, KS21, SL, S7, S7S, S21, M, ST21, BCAM with ROS. ### Installation, Documentation and Tutorials -[Installing multisense_ros](http://docs.carnegierobotics.com/SL/install.html#install) + +See the following for documentation on on installing and using multisense_ros for specific camera types. + +- [S27](https://docs.carnegierobotics.com/S27/index.html) +- [S30](https://docs.carnegierobotics.com/S30/index.html) +- [KS21](https://docs.carnegierobotics.com/KS21/index.html) +- [S21](https://docs.carnegierobotics.com/S21/index.html) +- [S7/S7S](https://docs.carnegierobotics.com/S7/index.html) +- [SL](https://docs.carnegierobotics.com/SL/index.html) + +Please see the following link for detailed documentation on the [LibMultiSense API](https://docs.carnegierobotics.com/libmultisense/index.html) wrapped by the ROS driver. ### Develop and Contribute From 8ccbb7c896afeb6d871237dbb0c05e4be1fa6477 Mon Sep 17 00:00:00 2001 From: David Robinson Date: Fri, 17 Jun 2022 13:17:57 +1200 Subject: [PATCH 02/13] Add apriltag messages and publisher --- multisense_lib/sensor_api | 2 +- multisense_ros/CMakeLists.txt | 5 +- multisense_ros/cfg/multisense.cfg | 3 + .../include/multisense_ros/camera.h | 4 + multisense_ros/msg/AprilTagCornerPoint.msg | 3 + multisense_ros/msg/AprilTagDetection.msg | 28 ++++ multisense_ros/msg/AprilTagDetectionArray.msg | 16 +++ multisense_ros/src/camera.cpp | 133 +++++++++++++++++- multisense_ros/src/reconfigure.cpp | 2 + 9 files changed, 190 insertions(+), 6 deletions(-) create mode 100644 multisense_ros/msg/AprilTagCornerPoint.msg create mode 100644 multisense_ros/msg/AprilTagDetection.msg create mode 100644 multisense_ros/msg/AprilTagDetectionArray.msg diff --git a/multisense_lib/sensor_api b/multisense_lib/sensor_api index 23b987b..b54bb08 160000 --- a/multisense_lib/sensor_api +++ b/multisense_lib/sensor_api @@ -1 +1 @@ -Subproject commit 23b987b1a6ecbdef7e82f7877e1c9b87f6604456 +Subproject commit b54bb0811698d5133c8eee90b53934650c56715f diff --git a/multisense_ros/CMakeLists.txt b/multisense_ros/CMakeLists.txt index 37075c3..f156687 100644 --- a/multisense_ros/CMakeLists.txt +++ b/multisense_ros/CMakeLists.txt @@ -50,7 +50,10 @@ add_message_files(DIRECTORY msg RawLidarCal.msg Histogram.msg DeviceStatus.msg - StampedPps.msg) + StampedPps.msg + AprilTagCornerPoint.msg + AprilTagDetection.msg + AprilTagDetectionArray.msg) generate_messages(DEPENDENCIES sensor_msgs) diff --git a/multisense_ros/cfg/multisense.cfg b/multisense_ros/cfg/multisense.cfg index f91fca4..00ddbe5 100755 --- a/multisense_ros/cfg/multisense.cfg +++ b/multisense_ros/cfg/multisense.cfg @@ -230,6 +230,9 @@ for feature_name, feature in SupportedFeatures.__members__.items(): if feature == SupportedFeatures.GROUND_SURFACE: gen.add("ground_surface_profile", bool_t, 0, "GroundSurfaceProfile", False); + + gen.add("apriltag_profile", bool_t, 0, "AprilTagProfile", False); + #endif #endif diff --git a/multisense_ros/include/multisense_ros/camera.h b/multisense_ros/include/multisense_ros/camera.h index 9f10c22..fdf8fee 100644 --- a/multisense_ros/include/multisense_ros/camera.h +++ b/multisense_ros/include/multisense_ros/camera.h @@ -77,6 +77,7 @@ class Camera { void colorizeCallback(const crl::multisense::image::Header& header); void groundSurfaceCallback(const crl::multisense::image::Header& header); void groundSurfaceSplineCallback(const crl::multisense::ground_surface::Header& header); + void apriltagCallback(const crl::multisense::apriltag::Header& header); void borderClipChanged(const BorderClip &borderClipType, double borderClipValue); @@ -140,6 +141,7 @@ class Camera { static constexpr char GROUND_SURFACE_IMAGE_TOPIC[] = "image"; static constexpr char GROUND_SURFACE_INFO_TOPIC[] = "camera_info"; static constexpr char GROUND_SURFACE_POINT_SPLINE_TOPIC[] = "spline"; + static constexpr char APRILTAG_DETECTION_TOPIC[] = "apriltag_detections"; // @@ -169,6 +171,7 @@ class Camera { ros::NodeHandle aux_nh_; ros::NodeHandle calibration_nh_; ros::NodeHandle ground_surface_nh_; + ros::NodeHandle apriltag_nh_; // // Image transports @@ -222,6 +225,7 @@ class Camera { ros::Publisher aux_rect_cam_info_pub_; ros::Publisher aux_rgb_rect_cam_info_pub_; ros::Publisher ground_surface_info_pub_; + ros::Publisher apriltag_detection_pub_; ros::Publisher luma_point_cloud_pub_; ros::Publisher color_point_cloud_pub_; diff --git a/multisense_ros/msg/AprilTagCornerPoint.msg b/multisense_ros/msg/AprilTagCornerPoint.msg new file mode 100644 index 0000000..4e44857 --- /dev/null +++ b/multisense_ros/msg/AprilTagCornerPoint.msg @@ -0,0 +1,3 @@ +# Apriltag corner point in pixels +float64 x +float64 y diff --git a/multisense_ros/msg/AprilTagDetection.msg b/multisense_ros/msg/AprilTagDetection.msg new file mode 100644 index 0000000..f318e13 --- /dev/null +++ b/multisense_ros/msg/AprilTagDetection.msg @@ -0,0 +1,28 @@ +# The family of the tag +uint64 family + +# The ID of the tag +uint32 id + +# The hamming distance between the detection and the real code +uint8 hamming + +# The quality/confidence of the binary decoding process +# average difference between intensity of data bit vs decision thresh. +# Higher is better. Only useful for small tags +float32 decisionMargin + +# The 3x3 homography matrix describing the projection from an +# "ideal" tag (with corners at (-1,1), (1,1), (1,-1), and (-1, +# -1)) to pixels in the image +std_msgs/Float64MultiArray tagToImageHomography + +# The 2D position of the origin of the tag in the image +float64[2] center + +# The 4 tag corner pixel locations in the order: +# point 0: [-squareLength / 2, squareLength / 2] +# point 1: [ squareLength / 2, squareLength / 2] +# point 2: [ squareLength / 2, -squareLength / 2] +# point 3: [-squareLength / 2, -squareLength / 2] +AprilTagCornerPoint[4] corners diff --git a/multisense_ros/msg/AprilTagDetectionArray.msg b/multisense_ros/msg/AprilTagDetectionArray.msg new file mode 100644 index 0000000..9e37249 --- /dev/null +++ b/multisense_ros/msg/AprilTagDetectionArray.msg @@ -0,0 +1,16 @@ +std_msgs/Header header + +# The frame ID of the image that the apriltags were detected on +int64 frameId + +# The frame timestamp (nanoseconds) of the image that the apriltags were detected on +int64 timestamp + +# Success flag to indicate whether for the apriltag algorithm ran successfully +uint8 success + +# The number of apriltags that were detected +uint32 numDetections + +# The collection of apriltag detections +AprilTagDetection[] detections diff --git a/multisense_ros/src/camera.cpp b/multisense_ros/src/camera.cpp index 22db581..70979cd 100644 --- a/multisense_ros/src/camera.cpp +++ b/multisense_ros/src/camera.cpp @@ -50,6 +50,10 @@ #include #include #include +#include +#include +#include + using namespace crl::multisense; @@ -114,6 +118,8 @@ void groundSurfaceCB(const image::Header& header, void* userDataP) { reinterpret_cast(userDataP)->groundSurfaceCallback(header); } void groundSurfaceSplineCB(const ground_surface::Header& header, void* userDataP) { reinterpret_cast(userDataP)->groundSurfaceSplineCallback(header); } +void apriltagCB(const apriltag::Header& header, void* userDataP) +{ reinterpret_cast(userDataP)->apriltagCallback(header); } bool isValidReprojectedPoint(const Eigen::Vector3f& pt, double squared_max_range) @@ -265,6 +271,7 @@ constexpr char Camera::COST_CAMERA_INFO_TOPIC[]; constexpr char Camera::GROUND_SURFACE_IMAGE_TOPIC[]; constexpr char Camera::GROUND_SURFACE_INFO_TOPIC[]; constexpr char Camera::GROUND_SURFACE_POINT_SPLINE_TOPIC[]; +constexpr char Camera::APRILTAG_DETECTION_TOPIC[]; Camera::Camera(Channel* driver, const std::string& tf_prefix) : driver_(driver), @@ -274,6 +281,7 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : aux_nh_(device_nh_, AUX), calibration_nh_(device_nh_, CALIBRATION), ground_surface_nh_(device_nh_, GROUND_SURFACE), + apriltag_nh_(device_nh_, LEFT), left_mono_transport_(left_nh_), right_mono_transport_(right_nh_), left_rect_transport_(left_nh_), @@ -346,14 +354,18 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : // // Create spline-based ground surface publishers for S27/S30 cameras - const bool can_support_ground_surface = + const bool can_support_on_board_algorithms = system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision || system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision; - if (can_support_ground_surface) { + if (can_support_on_board_algorithms) { ground_surface_cam_pub_ = ground_surface_transport_.advertise(GROUND_SURFACE_IMAGE_TOPIC, 5, std::bind(&Camera::connectStream, this, Source_Ground_Surface_Class_Image), std::bind(&Camera::disconnectStream, this, Source_Ground_Surface_Class_Image)); + + apriltag_detection_pub_ = apriltag_nh_.advertise(APRILTAG_DETECTION_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_AprilTag_Detections), + std::bind(&Camera::disconnectStream, this, Source_AprilTag_Detections)); } ground_surface_info_pub_ = ground_surface_nh_.advertise(GROUND_SURFACE_INFO_TOPIC, 1, true); @@ -672,7 +684,7 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : static_tf_broadcaster_.sendTransform(stamped_transforms); // - // Update our internal image config and publish intitial camera info + // Update our internal image config and publish initial camera info updateConfig(image_config); @@ -717,9 +729,16 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : // // Add ground surface callbacks for S27/S30 cameras - if (can_support_ground_surface) { + if (can_support_on_board_algorithms) { driver_->addIsolatedCallback(groundSurfaceCB, Source_Ground_Surface_Class_Image, this); driver_->addIsolatedCallback(groundSurfaceSplineCB, this); + driver_->addIsolatedCallback(apriltagCB, this); + + // // TODO(drobinson): Link to reconfigure + // Status status = driver_->startStreams(Source_AprilTag_Detections); + // if (Status_Ok != status) { + // std::cerr << "Failed to start streams: " << Channel::statusString(status) << std::endl; + // } } // @@ -761,6 +780,7 @@ Camera::~Camera() driver_->removeIsolatedCallback(groundSurfaceCB); driver_->removeIsolatedCallback(groundSurfaceSplineCB); + driver_->removeIsolatedCallback(apriltagCB); } void Camera::borderClipChanged(const BorderClip &borderClipType, double borderClipValue) @@ -2080,6 +2100,111 @@ void Camera::groundSurfaceSplineCallback(const ground_surface::Header& header) ground_surface_spline_pub_.publish(ground_surface_utilities::eigenToPointcloud(eigen_pcl, frame_id_origin_)); } +void Camera::apriltagCallback(const apriltag::Header& header) +{ + (void)header; + + std::cout << "----------------------------" << std::endl; + std::cout << "frameId: " << header.frameId << std::endl; + std::cout << "timestamp: " << header.timestamp << std::endl; + std::cout << "success: " << (header.success ? "true" : "false") << std::endl; + std::cout << "numDetections: " << header.numDetections << std::endl; + + for (auto &d : header.detections) + { + std::cout << "tag ID: " << d.id << ", family ID: " << d.family << std::endl; + std::cout << "\thamming: " << (int)d.hamming << std::endl; + std::cout << "\tdecisionMargin: " << d.decisionMargin << std::endl; + + std::cout << "\ttagToImageHomography: " << std::endl; + for (unsigned int i = 0; i < 3; i++) + std::cout << "\t\t" << d.tagToImageHomography[i][0] << " " + << d.tagToImageHomography[i][1] << " " + << d.tagToImageHomography[i][2] << std::endl; + + std::cout << "\tcenter: " << std::endl; + for (unsigned int i = 0; i < 2; i++) + std::cout << "\t\t" << d.center[i] << std::endl; + + std::cout << "\tcorners: " << std::endl; + for (unsigned int i = 0; i < 4; i++) + std::cout << "\t\t" << d.corners[i][0] << " " << d.corners[i][1] << std::endl; + } + + // Publish ROS message + AprilTagDetectionArray tag_detection_array; + + // TODO(drobinson): Simplify this + const time_t TICKS_PER_US = 1000ll; + int64_t microseconds = header.timestamp / TICKS_PER_US; + int64_t seconds = (microseconds / 1000000ll); + microseconds -= (seconds * 1000000ll); + const ros::Time ros_time((uint32_t)seconds, 1000 * (uint32_t)microseconds); + + tag_detection_array.header.seq = static_cast(header.frameId); + tag_detection_array.header.stamp = ros_time; + tag_detection_array.header.frame_id = frame_id_rectified_left_; + tag_detection_array.frameId = header.frameId; + tag_detection_array.timestamp = header.timestamp; + tag_detection_array.success = header.success; + tag_detection_array.numDetections = header.numDetections; + + for (auto &d : header.detections) + { + AprilTagDetection tag_detection; + + tag_detection.family = d.family; + tag_detection.id = d.id; + tag_detection.hamming = d.hamming; + tag_detection.decisionMargin = d.decisionMargin; + + // Send tagToImageHomography array + { + std_msgs::MultiArrayDimension height; + height.label = "height"; + height.size = 3; + height.stride = 9; + tag_detection.tagToImageHomography.layout.dim.push_back(height); + + std_msgs::MultiArrayDimension width; + width.label = "width"; + width.size = 3; + width.stride = 3; + tag_detection.tagToImageHomography.layout.dim.push_back(width); + + tag_detection.tagToImageHomography.layout.data_offset = 0; + + for (unsigned int i = 0; i < 3; i++) + { + for (unsigned int j = 0; j < 3; j++) + { + tag_detection.tagToImageHomography.data.push_back(d.tagToImageHomography[i][j]); + } + } + } + + // Send tag centers array + for (unsigned int i = 0; i < 2; i++) + { + tag_detection.center[i] = d.center[i]; + } + + // Send detection corners array + for (unsigned int i = 0; i < 4; i++) + { + AprilTagCornerPoint point; + point.x = d.corners[i][0]; + point.y = d.corners[i][1]; + + tag_detection.corners[i] = point; + } + + tag_detection_array.detections.push_back(tag_detection); + } + + apriltag_detection_pub_.publish(tag_detection_array); +} + void Camera::updateConfig(const image::Config& config) { stereo_calibration_manager_->updateConfig(config); diff --git a/multisense_ros/src/reconfigure.cpp b/multisense_ros/src/reconfigure.cpp index 6841d0e..577e6ef 100644 --- a/multisense_ros/src/reconfigure.cpp +++ b/multisense_ros/src/reconfigure.cpp @@ -743,6 +743,7 @@ template void Reconfigure::configureStereoProfile(crl::multisense::imag profile |= (dyn.high_contrast_profile ? crl::multisense::High_Contrast : profile); profile |= (dyn.show_roi_profile ? crl::multisense::Show_ROIs : profile); profile |= (dyn.full_res_aux_profile ? crl::multisense::Full_Res_Aux_Cam : profile); + profile |= (dyn.apriltag_profile ? crl::multisense::AprilTag : profile); cfg.setCameraProfile(profile); } @@ -755,6 +756,7 @@ template void Reconfigure::configureStereoProfileWithGroundSurface(crl: profile |= (dyn.show_roi_profile ? crl::multisense::Show_ROIs : profile); profile |= (dyn.full_res_aux_profile ? crl::multisense::Full_Res_Aux_Cam : profile); profile |= (dyn.ground_surface_profile ? crl::multisense::Ground_Surface : profile); + profile |= (dyn.apriltag_profile ? crl::multisense::AprilTag : profile); cfg.setCameraProfile(profile); } From 5b456b44718f5b7d82b21bc629410abef1546d87 Mon Sep 17 00:00:00 2001 From: David Robinson Date: Fri, 17 Jun 2022 13:18:37 +1200 Subject: [PATCH 03/13] Send row-major homography matrix --- multisense_lib/sensor_api | 2 +- multisense_ros/msg/AprilTagDetection.msg | 8 ++--- multisense_ros/src/camera.cpp | 37 +++++++++--------------- 3 files changed, 18 insertions(+), 29 deletions(-) diff --git a/multisense_lib/sensor_api b/multisense_lib/sensor_api index b54bb08..e085002 160000 --- a/multisense_lib/sensor_api +++ b/multisense_lib/sensor_api @@ -1 +1 @@ -Subproject commit b54bb0811698d5133c8eee90b53934650c56715f +Subproject commit e08500220d2b1a20b8b90266bf0b622469827e84 diff --git a/multisense_ros/msg/AprilTagDetection.msg b/multisense_ros/msg/AprilTagDetection.msg index f318e13..882d26b 100644 --- a/multisense_ros/msg/AprilTagDetection.msg +++ b/multisense_ros/msg/AprilTagDetection.msg @@ -12,10 +12,10 @@ uint8 hamming # Higher is better. Only useful for small tags float32 decisionMargin -# The 3x3 homography matrix describing the projection from an -# "ideal" tag (with corners at (-1,1), (1,1), (1,-1), and (-1, -# -1)) to pixels in the image -std_msgs/Float64MultiArray tagToImageHomography +# The 3x3 homography matrix (in row-major order) describing the projection +# from an "ideal" tag (with corners at (-1,1), (1,1), (1,-1), and (-1,-1)) +# to pixels in the image +float64[9] tagToImageHomography # The 2D position of the origin of the tag in the image float64[2] center diff --git a/multisense_ros/src/camera.cpp b/multisense_ros/src/camera.cpp index 70979cd..e8d10d7 100644 --- a/multisense_ros/src/camera.cpp +++ b/multisense_ros/src/camera.cpp @@ -2117,10 +2117,15 @@ void Camera::apriltagCallback(const apriltag::Header& header) std::cout << "\tdecisionMargin: " << d.decisionMargin << std::endl; std::cout << "\ttagToImageHomography: " << std::endl; - for (unsigned int i = 0; i < 3; i++) - std::cout << "\t\t" << d.tagToImageHomography[i][0] << " " - << d.tagToImageHomography[i][1] << " " - << d.tagToImageHomography[i][2] << std::endl; + for (unsigned int col = 0; col < 3; col++) + { + std::cout << "\t\t"; + for (unsigned int row = 0; row < 3; row++) + { + std::cout << d.tagToImageHomography[row][col] << " "; + } + std::cout << std::endl; + } std::cout << "\tcenter: " << std::endl; for (unsigned int i = 0; i < 2; i++) @@ -2134,7 +2139,7 @@ void Camera::apriltagCallback(const apriltag::Header& header) // Publish ROS message AprilTagDetectionArray tag_detection_array; - // TODO(drobinson): Simplify this + // TODO(drobinson): Simplify this conversion const time_t TICKS_PER_US = 1000ll; int64_t microseconds = header.timestamp / TICKS_PER_US; int64_t seconds = (microseconds / 1000000ll); @@ -2159,27 +2164,11 @@ void Camera::apriltagCallback(const apriltag::Header& header) tag_detection.decisionMargin = d.decisionMargin; // Send tagToImageHomography array + for (unsigned int col = 0; col < 3; col++) { - std_msgs::MultiArrayDimension height; - height.label = "height"; - height.size = 3; - height.stride = 9; - tag_detection.tagToImageHomography.layout.dim.push_back(height); - - std_msgs::MultiArrayDimension width; - width.label = "width"; - width.size = 3; - width.stride = 3; - tag_detection.tagToImageHomography.layout.dim.push_back(width); - - tag_detection.tagToImageHomography.layout.data_offset = 0; - - for (unsigned int i = 0; i < 3; i++) + for (unsigned int row = 0; row < 3; row++) { - for (unsigned int j = 0; j < 3; j++) - { - tag_detection.tagToImageHomography.data.push_back(d.tagToImageHomography[i][j]); - } + tag_detection.tagToImageHomography[row + col * 3] = d.tagToImageHomography[row][col]; } } From 3f75d77a2665e2ac5d4e1b9011ddf7a4a3fcfe26 Mon Sep 17 00:00:00 2001 From: David Robinson Date: Fri, 17 Jun 2022 13:21:50 +1200 Subject: [PATCH 04/13] Remove extraneous prints --- multisense_ros/src/camera.cpp | 49 +++-------------------------------- 1 file changed, 3 insertions(+), 46 deletions(-) diff --git a/multisense_ros/src/camera.cpp b/multisense_ros/src/camera.cpp index e8d10d7..9609c2e 100644 --- a/multisense_ros/src/camera.cpp +++ b/multisense_ros/src/camera.cpp @@ -733,12 +733,6 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : driver_->addIsolatedCallback(groundSurfaceCB, Source_Ground_Surface_Class_Image, this); driver_->addIsolatedCallback(groundSurfaceSplineCB, this); driver_->addIsolatedCallback(apriltagCB, this); - - // // TODO(drobinson): Link to reconfigure - // Status status = driver_->startStreams(Source_AprilTag_Detections); - // if (Status_Ok != status) { - // std::cerr << "Failed to start streams: " << Channel::statusString(status) << std::endl; - // } } // @@ -2102,49 +2096,15 @@ void Camera::groundSurfaceSplineCallback(const ground_surface::Header& header) void Camera::apriltagCallback(const apriltag::Header& header) { - (void)header; - - std::cout << "----------------------------" << std::endl; - std::cout << "frameId: " << header.frameId << std::endl; - std::cout << "timestamp: " << header.timestamp << std::endl; - std::cout << "success: " << (header.success ? "true" : "false") << std::endl; - std::cout << "numDetections: " << header.numDetections << std::endl; - - for (auto &d : header.detections) - { - std::cout << "tag ID: " << d.id << ", family ID: " << d.family << std::endl; - std::cout << "\thamming: " << (int)d.hamming << std::endl; - std::cout << "\tdecisionMargin: " << d.decisionMargin << std::endl; - - std::cout << "\ttagToImageHomography: " << std::endl; - for (unsigned int col = 0; col < 3; col++) - { - std::cout << "\t\t"; - for (unsigned int row = 0; row < 3; row++) - { - std::cout << d.tagToImageHomography[row][col] << " "; - } - std::cout << std::endl; - } - - std::cout << "\tcenter: " << std::endl; - for (unsigned int i = 0; i < 2; i++) - std::cout << "\t\t" << d.center[i] << std::endl; - - std::cout << "\tcorners: " << std::endl; - for (unsigned int i = 0; i < 4; i++) - std::cout << "\t\t" << d.corners[i][0] << " " << d.corners[i][1] << std::endl; - } - - // Publish ROS message + // Publish Apriltags as ROS message AprilTagDetectionArray tag_detection_array; // TODO(drobinson): Simplify this conversion - const time_t TICKS_PER_US = 1000ll; + const time_t TICKS_PER_US = 1000ll; int64_t microseconds = header.timestamp / TICKS_PER_US; int64_t seconds = (microseconds / 1000000ll); microseconds -= (seconds * 1000000ll); - const ros::Time ros_time((uint32_t)seconds, 1000 * (uint32_t)microseconds); + const ros::Time ros_time(static_cast(seconds), 1000 * static_cast(microseconds)); tag_detection_array.header.seq = static_cast(header.frameId); tag_detection_array.header.stamp = ros_time; @@ -2163,7 +2123,6 @@ void Camera::apriltagCallback(const apriltag::Header& header) tag_detection.hamming = d.hamming; tag_detection.decisionMargin = d.decisionMargin; - // Send tagToImageHomography array for (unsigned int col = 0; col < 3; col++) { for (unsigned int row = 0; row < 3; row++) @@ -2172,13 +2131,11 @@ void Camera::apriltagCallback(const apriltag::Header& header) } } - // Send tag centers array for (unsigned int i = 0; i < 2; i++) { tag_detection.center[i] = d.center[i]; } - // Send detection corners array for (unsigned int i = 0; i < 4; i++) { AprilTagCornerPoint point; From 738e667ab2a10e7b41ead5718de645801785c140 Mon Sep 17 00:00:00 2001 From: David Robinson Date: Sat, 18 Jun 2022 08:55:02 +1200 Subject: [PATCH 05/13] Simplfy ros timestamp conversion --- multisense_ros/src/camera.cpp | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/multisense_ros/src/camera.cpp b/multisense_ros/src/camera.cpp index 9609c2e..4901580 100644 --- a/multisense_ros/src/camera.cpp +++ b/multisense_ros/src/camera.cpp @@ -2096,16 +2096,14 @@ void Camera::groundSurfaceSplineCallback(const ground_surface::Header& header) void Camera::apriltagCallback(const apriltag::Header& header) { + // Convert camera timestamp to ROS timestamp + int64_t seconds = std::floor(header.timestamp / 1e9); + int64_t nanoseconds = header.timestamp - seconds * 1e9; + const ros::Time ros_time(static_cast(seconds), static_cast(nanoseconds)); + // Publish Apriltags as ROS message AprilTagDetectionArray tag_detection_array; - // TODO(drobinson): Simplify this conversion - const time_t TICKS_PER_US = 1000ll; - int64_t microseconds = header.timestamp / TICKS_PER_US; - int64_t seconds = (microseconds / 1000000ll); - microseconds -= (seconds * 1000000ll); - const ros::Time ros_time(static_cast(seconds), 1000 * static_cast(microseconds)); - tag_detection_array.header.seq = static_cast(header.frameId); tag_detection_array.header.stamp = ros_time; tag_detection_array.header.frame_id = frame_id_rectified_left_; From 5e9430699c86556ac93b634829f47427b27cd584 Mon Sep 17 00:00:00 2001 From: David Robinson Date: Wed, 22 Jun 2022 04:14:09 +1200 Subject: [PATCH 06/13] Add support for apriltag params --- multisense_lib/sensor_api | 2 +- multisense_ros/cfg/multisense.cfg | 23 ++++- .../include/multisense_ros/reconfigure.h | 11 ++- multisense_ros/src/reconfigure.cpp | 87 ++++++++++++++++++- 4 files changed, 117 insertions(+), 6 deletions(-) diff --git a/multisense_lib/sensor_api b/multisense_lib/sensor_api index e085002..5a3090b 160000 --- a/multisense_lib/sensor_api +++ b/multisense_lib/sensor_api @@ -1 +1 @@ -Subproject commit e08500220d2b1a20b8b90266bf0b622469827e84 +Subproject commit 5a3090b74757becb97aa87aba96afc6b82f0aec1 diff --git a/multisense_ros/cfg/multisense.cfg b/multisense_ros/cfg/multisense.cfg index 00ddbe5..7e57c20 100755 --- a/multisense_ros/cfg/multisense.cfg +++ b/multisense_ros/cfg/multisense.cfg @@ -25,6 +25,7 @@ class SensorConfig(object): class SupportedFeatures(Enum): NONE = 0 GROUND_SURFACE = 1 + APRILTAG = 2 sensorConfigList = [] sensorConfigList.append(SensorConfig(name="sl_bm_cmv2000", sgm=False, motor=True, imu=False, lighting=True , aux=False, ar0234=False, features=False)) @@ -231,7 +232,8 @@ for feature_name, feature in SupportedFeatures.__members__.items(): if feature == SupportedFeatures.GROUND_SURFACE: gen.add("ground_surface_profile", bool_t, 0, "GroundSurfaceProfile", False); - gen.add("apriltag_profile", bool_t, 0, "AprilTagProfile", False); + if feature == SupportedFeatures.APRILTAG: + gen.add("apriltag_profile", bool_t, 0, "AprilTagProfile", False); #endif #endif @@ -334,6 +336,25 @@ for feature_name, feature in SupportedFeatures.__members__.items(): gen.add("ground_surface_spline_draw_resolution", double_t, 0, "Resolution to draw resulting B-Spline model with in RVIZ", 0.1, 0.01, 1.0) #endif + if feature == SupportedFeatures.APRILTAG: + apriltag_family_enum = gen.enum([gen.const("tag25h9", str_t, "tag25h9", "tag25h9"), + gen.const("tag16h5", str_t, "tag16h5", "tag16h5"), + gen.const("tag36h10", str_t, "tag36h10", "tag36h10"), + gen.const("tag36h11", str_t, "tag36h11", "tag36h11"), + gen.const("tagCircle21h7", str_t, "tagCircle21h7", "tagCircle21h7"), + gen.const("tagCircle49h12", str_t, "tagCircle49h12", "tagCircle49h12"), + gen.const("tagCustom48h12", str_t, "tagCustom48h12", "tagCustom48h12"), + gen.const("tagStandard41h12", str_t, "tagStandard41h12", "tagStandard41h12"), + gen.const("tagStandard52h13", str_t, "tagStandard52h13", "tagStandard52h13")], + "Available apriltag families") + gen.add("apriltag_family", str_t, 0, "Apriltag family to detect", "tagStandard52h13", edit_method=apriltag_family_enum) + gen.add("apriltag_quad_detection_blur_sigma", double_t, 0, "Sigma of the Gaussian blur applied to the image before quad_detection, specified in full resolution pixels. Kernel size = 4*sigma, rounded up to the next odd number. (<0.5 results in no blur)", 0.75, 0.0, 5.0) + gen.add("apriltag_quad_detection_decimate", double_t, 0, "Amount to decimate image before detecting quads. Values < 1.0. (0.5 decimation reduces height/width by half)", 1.0, 0.0, 1.0) + gen.add("apriltag_min_border_width", int_t, 0, "Minimum border width that can be considered a valid tag. Used to filter contours based on area and perimeter. Units are in undecimated image pixels.", 5, 0, 10) + gen.add("apriltag_refine_quad_edges", bool_t, 0, "Whether or not to refine the edges before attempting to decode", False) + gen.add("apriltag_decode_sharpening", double_t, 0, "How much to sharpen the quad before sampling the pixels. 0 to turn off, large values are stronger sharpening", 0.25, 0.0, 1.0) + #endif + # Generate package name based on camera cfg and supported features if feature == SupportedFeatures.NONE: package_name = cfg.name diff --git a/multisense_ros/include/multisense_ros/reconfigure.h b/multisense_ros/include/multisense_ros/reconfigure.h index 413d314..562eaac 100644 --- a/multisense_ros/include/multisense_ros/reconfigure.h +++ b/multisense_ros/include/multisense_ros/reconfigure.h @@ -53,8 +53,11 @@ #include #include #include +#include #include #include +#include + #include #include @@ -94,6 +97,8 @@ class Reconfigure { void callback_s27_AR0234_ground_surface (multisense_ros::s27_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level); void callback_ks21_AR0234_ground_surface (multisense_ros::ks21_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level); + void callback_s27_AR0234_apriltag (multisense_ros::s27_sgm_AR0234_apriltagConfig& dyn, uint32_t level); + void callback_ks21_AR0234_apriltag (multisense_ros::ks21_sgm_AR0234_apriltagConfig& dyn, uint32_t level); // // Internal helper functions @@ -114,8 +119,10 @@ class Reconfigure { template void configurePtp(const T& dyn); template void configureStereoProfile(crl::multisense::image::Config &cfg, const T& dyn); template void configureStereoProfileWithGroundSurface(crl::multisense::image::Config &cfg, const T& dyn); + template void configureStereoProfileWithApriltag(crl::multisense::image::Config &cfg, const T& dyn); template void configureExtrinsics(const T& dyn); template void configureGroundSurfaceParams(const T& dyn); + template void configureApriltagParams(const T& dyn); // // CRL sensor API @@ -156,6 +163,8 @@ class Reconfigure { std::shared_ptr< dynamic_reconfigure::Server > server_ks21_sgm_AR0234_; std::shared_ptr< dynamic_reconfigure::Server > server_s27_AR0234_ground_surface_; std::shared_ptr< dynamic_reconfigure::Server > server_ks21_sgm_AR0234_ground_surface_; + std::shared_ptr< dynamic_reconfigure::Server > server_s27_AR0234_apriltag_; + std::shared_ptr< dynamic_reconfigure::Server > server_ks21_sgm_AR0234_apriltag_; // // Cached values for supported sub-systems (these may be unavailable on @@ -192,7 +201,7 @@ class Reconfigure { std::function extrinsics_callback_; // - // Extrinsics callback to modify pointcloud + // Callback to modify spline drawing parameters std::function spline_draw_parameters_callback_; }; diff --git a/multisense_ros/src/reconfigure.cpp b/multisense_ros/src/reconfigure.cpp index 577e6ef..79bdf79 100644 --- a/multisense_ros/src/reconfigure.cpp +++ b/multisense_ros/src/reconfigure.cpp @@ -87,11 +87,16 @@ Reconfigure::Reconfigure(Channel* driver, return; } + // Check which algorithms may be supported const bool ground_surface_supported = std::any_of(deviceModes.begin(), deviceModes.end(), [](const auto &mode) { return (mode.supportedDataSources & Source_Ground_Surface_Spline_Data) && (mode.supportedDataSources & Source_Ground_Surface_Class_Image); }); + const bool apriltag_supported = + std::any_of(deviceModes.begin(), deviceModes.end(), [](const auto &mode) { + return mode.supportedDataSources & Source_AprilTag_Detections; }); + if (deviceInfo.lightingType != 0 || system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == deviceInfo.hardwareRevision) { lighting_supported_ = true; @@ -169,6 +174,12 @@ Reconfigure::Reconfigure(Channel* driver, new dynamic_reconfigure::Server(device_nh_)); server_s27_AR0234_ground_surface_->setCallback(std::bind(&Reconfigure::callback_s27_AR0234_ground_surface, this, std::placeholders::_1, std::placeholders::_2)); + } else if (apriltag_supported) { + server_s27_AR0234_apriltag_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_s27_AR0234_apriltag_->setCallback(std::bind(&Reconfigure::callback_s27_AR0234_apriltag, this, + std::placeholders::_1, std::placeholders::_2)); } else { server_s27_AR0234_ = std::shared_ptr< dynamic_reconfigure::Server > ( @@ -184,6 +195,12 @@ Reconfigure::Reconfigure(Channel* driver, new dynamic_reconfigure::Server(device_nh_)); server_ks21_sgm_AR0234_ground_surface_->setCallback(std::bind(&Reconfigure::callback_ks21_AR0234_ground_surface, this, std::placeholders::_1, std::placeholders::_2)); + } else if (apriltag_supported) { + server_ks21_sgm_AR0234_apriltag_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_ks21_sgm_AR0234_apriltag_->setCallback(std::bind(&Reconfigure::callback_ks21_AR0234_apriltag, this, + std::placeholders::_1, std::placeholders::_2)); } else { server_ks21_sgm_AR0234_ = std::shared_ptr< dynamic_reconfigure::Server > ( @@ -280,6 +297,12 @@ Reconfigure::Reconfigure(Channel* driver, new dynamic_reconfigure::Server(device_nh_)); server_s27_AR0234_ground_surface_->setCallback(std::bind(&Reconfigure::callback_s27_AR0234_ground_surface, this, std::placeholders::_1, std::placeholders::_2)); + } else if (apriltag_supported) { + server_s27_AR0234_apriltag_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_s27_AR0234_apriltag_->setCallback(std::bind(&Reconfigure::callback_s27_AR0234_apriltag, this, + std::placeholders::_1, std::placeholders::_2)); } else { server_s27_AR0234_ = std::shared_ptr< dynamic_reconfigure::Server > ( @@ -743,7 +766,6 @@ template void Reconfigure::configureStereoProfile(crl::multisense::imag profile |= (dyn.high_contrast_profile ? crl::multisense::High_Contrast : profile); profile |= (dyn.show_roi_profile ? crl::multisense::Show_ROIs : profile); profile |= (dyn.full_res_aux_profile ? crl::multisense::Full_Res_Aux_Cam : profile); - profile |= (dyn.apriltag_profile ? crl::multisense::AprilTag : profile); cfg.setCameraProfile(profile); } @@ -756,6 +778,17 @@ template void Reconfigure::configureStereoProfileWithGroundSurface(crl: profile |= (dyn.show_roi_profile ? crl::multisense::Show_ROIs : profile); profile |= (dyn.full_res_aux_profile ? crl::multisense::Full_Res_Aux_Cam : profile); profile |= (dyn.ground_surface_profile ? crl::multisense::Ground_Surface : profile); + + cfg.setCameraProfile(profile); +} + +template void Reconfigure::configureStereoProfileWithApriltag(crl::multisense::image::Config &cfg, const T& dyn) +{ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; + profile |= (dyn.detail_disparity_profile ? crl::multisense::Detail_Disparity : profile); + profile |= (dyn.high_contrast_profile ? crl::multisense::High_Contrast : profile); + profile |= (dyn.show_roi_profile ? crl::multisense::Show_ROIs : profile); + profile |= (dyn.full_res_aux_profile ? crl::multisense::Full_Res_Aux_Cam : profile); profile |= (dyn.apriltag_profile ? crl::multisense::AprilTag : profile); cfg.setCameraProfile(profile); @@ -791,7 +824,7 @@ template void Reconfigure::configureExtrinsics(const T& dyn) template void Reconfigure::configureGroundSurfaceParams(const T& dyn) { // - // Update calibration on camera via libmultisense + // Update ground surface params on camera via libmultisense crl::multisense::system::GroundSurfaceParams params; params.ground_surface_number_of_levels_x = dyn.ground_surface_spline_resolution_x; @@ -838,6 +871,28 @@ template void Reconfigure::configureGroundSurfaceParams(const T& dyn) ); } +template void Reconfigure::configureApriltagParams(const T& dyn) +{ + // + // Update apriltag params on camera via libmultisense + crl::multisense::system::ApriltagParams params; + + params.family = dyn.apriltag_family; + params.quad_detection_blur_sigma = dyn.apriltag_quad_detection_blur_sigma; + params.quad_detection_decimate = dyn.apriltag_quad_detection_decimate; + params.min_border_width = dyn.apriltag_min_border_width; + params.refine_quad_edges = dyn.apriltag_refine_quad_edges; + params.decode_sharpening = dyn.apriltag_decode_sharpening; + + // Update ground surface params on camera + Status status = driver_->setApriltagParams(params); + if (Status_Ok != status) { + ROS_ERROR("Reconfigure: failed to set apriltag params: %s", + Channel::statusString(status)); + return; + } +} + #define GET_CONFIG() \ image::Config cfg; \ Status status = driver_->getImageConfig(cfg); \ @@ -971,7 +1026,32 @@ template void Reconfigure::configureGroundSurfaceParams(const T& dyn) configureGroundSurfaceParams(dyn); \ } while(0) +#define S27_SGM_APRILTAG() do { \ + GET_CONFIG(); \ + configureSgm(cfg, dyn); \ + configureStereoProfileWithApriltag(cfg, dyn); \ + configureAutoWhiteBalance(cfg, dyn); \ + configureAuxCamera(cfg, dyn); \ + configureCamera(cfg, dyn); \ + configureBorderClip(dyn); \ + configurePtp(dyn); \ + configurePointCloudRange(dyn); \ + configureExtrinsics(dyn); \ + configureApriltagParams(dyn); \ + } while(0) +#define KS21_SGM_APRILTAG() do { \ + GET_CONFIG(); \ + configureSgm(cfg, dyn); \ + configureStereoProfileWithApriltag(cfg, dyn); \ + configureCamera(cfg, dyn); \ + configureBorderClip(dyn); \ + configureLeds(dyn); \ + configurePtp(dyn); \ + configurePointCloudRange(dyn); \ + configureExtrinsics(dyn); \ + configureApriltagParams(dyn); \ + } while(0) // // The dynamic reconfigure callbacks (MultiSense S* & feature variations) @@ -985,9 +1065,10 @@ void Reconfigure::callback_mono_cmv2000 (multisense_ros::mono_cmv2000Config void Reconfigure::callback_mono_cmv4000 (multisense_ros::mono_cmv4000Config& dyn, uint32_t level) { (void) level; MONO_BM_IMU(); } void Reconfigure::callback_s27_AR0234 (multisense_ros::s27_sgm_AR0234Config& dyn, uint32_t level) { (void) level; S27_SGM(); } void Reconfigure::callback_ks21_AR0234 (multisense_ros::ks21_sgm_AR0234Config& dyn, uint32_t level) { (void) level; KS21_SGM(); } - void Reconfigure::callback_s27_AR0234_ground_surface (multisense_ros::s27_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level) { (void) level; S27_SGM_GROUND_SURFACE(); } void Reconfigure::callback_ks21_AR0234_ground_surface (multisense_ros::ks21_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level) { (void) level; KS21_SGM_GROUND_SURFACE(); } +void Reconfigure::callback_s27_AR0234_apriltag (multisense_ros::s27_sgm_AR0234_apriltagConfig& dyn, uint32_t level) { (void) level; S27_SGM_APRILTAG(); } +void Reconfigure::callback_ks21_AR0234_apriltag (multisense_ros::ks21_sgm_AR0234_apriltagConfig& dyn, uint32_t level) { (void) level; KS21_SGM_APRILTAG(); } // // BCAM (Sony IMX104) From b2835154758cf7988d22bfff287abaa727a54a91 Mon Sep 17 00:00:00 2001 From: David Robinson Date: Wed, 22 Jun 2022 12:05:55 +1200 Subject: [PATCH 07/13] Handle hamming distance param --- multisense_lib/sensor_api | 2 +- multisense_ros/cfg/multisense.cfg | 1 + multisense_ros/msg/AprilTagDetection.msg | 2 +- multisense_ros/src/reconfigure.cpp | 1 + 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/multisense_lib/sensor_api b/multisense_lib/sensor_api index 5a3090b..bd2cd64 160000 --- a/multisense_lib/sensor_api +++ b/multisense_lib/sensor_api @@ -1 +1 @@ -Subproject commit 5a3090b74757becb97aa87aba96afc6b82f0aec1 +Subproject commit bd2cd648b36d5c3ead9dde22d4fbd8aabc3045ce diff --git a/multisense_ros/cfg/multisense.cfg b/multisense_ros/cfg/multisense.cfg index 7e57c20..b8df00f 100755 --- a/multisense_ros/cfg/multisense.cfg +++ b/multisense_ros/cfg/multisense.cfg @@ -348,6 +348,7 @@ for feature_name, feature in SupportedFeatures.__members__.items(): gen.const("tagStandard52h13", str_t, "tagStandard52h13", "tagStandard52h13")], "Available apriltag families") gen.add("apriltag_family", str_t, 0, "Apriltag family to detect", "tagStandard52h13", edit_method=apriltag_family_enum) + gen.add("apriltag_max_hamming", int_t, 0, "The maximum hamming correction that will be applied while detecting codes (value of 1 causes the detector to pause for a while)", 0, 0, 1) gen.add("apriltag_quad_detection_blur_sigma", double_t, 0, "Sigma of the Gaussian blur applied to the image before quad_detection, specified in full resolution pixels. Kernel size = 4*sigma, rounded up to the next odd number. (<0.5 results in no blur)", 0.75, 0.0, 5.0) gen.add("apriltag_quad_detection_decimate", double_t, 0, "Amount to decimate image before detecting quads. Values < 1.0. (0.5 decimation reduces height/width by half)", 1.0, 0.0, 1.0) gen.add("apriltag_min_border_width", int_t, 0, "Minimum border width that can be considered a valid tag. Used to filter contours based on area and perimeter. Units are in undecimated image pixels.", 5, 0, 10) diff --git a/multisense_ros/msg/AprilTagDetection.msg b/multisense_ros/msg/AprilTagDetection.msg index 882d26b..98bf884 100644 --- a/multisense_ros/msg/AprilTagDetection.msg +++ b/multisense_ros/msg/AprilTagDetection.msg @@ -1,5 +1,5 @@ # The family of the tag -uint64 family +string family # The ID of the tag uint32 id diff --git a/multisense_ros/src/reconfigure.cpp b/multisense_ros/src/reconfigure.cpp index 79bdf79..bababf8 100644 --- a/multisense_ros/src/reconfigure.cpp +++ b/multisense_ros/src/reconfigure.cpp @@ -878,6 +878,7 @@ template void Reconfigure::configureApriltagParams(const T& dyn) crl::multisense::system::ApriltagParams params; params.family = dyn.apriltag_family; + params.max_hamming = dyn.apriltag_max_hamming; params.quad_detection_blur_sigma = dyn.apriltag_quad_detection_blur_sigma; params.quad_detection_decimate = dyn.apriltag_quad_detection_decimate; params.min_border_width = dyn.apriltag_min_border_width; From 054fb8ef4ee4559dd47b9f9465d3bef42f0e4744 Mon Sep 17 00:00:00 2001 From: Matt Alvarado Date: Wed, 22 Jun 2022 09:39:23 -0400 Subject: [PATCH 08/13] Properly handle the full translation between left/aux rectified images (#42) * Properly handle the full translation between left/aux rectified images * Fix aux projection logic * Add helper functions to prevent locking --- multisense_ros/cfg/multisense.cfg | 2 +- .../include/multisense_ros/camera_utilities.h | 35 +++++++- multisense_ros/src/camera.cpp | 72 ++++++++++------- multisense_ros/src/camera_utilities.cpp | 79 +++++++++++++++++-- 4 files changed, 148 insertions(+), 40 deletions(-) diff --git a/multisense_ros/cfg/multisense.cfg b/multisense_ros/cfg/multisense.cfg index f91fca4..516d3ad 100755 --- a/multisense_ros/cfg/multisense.cfg +++ b/multisense_ros/cfg/multisense.cfg @@ -136,7 +136,7 @@ for feature_name, feature in SupportedFeatures.__members__.items(): gen.const("1920x1200_256_disparities", str_t, "1920x1200x256", "1920x1200x256") ], "Available resolution settings") gen.add("resolution", str_t, 0, "sensor resolution", "960x600x256", edit_method=res_enum) - gen.add("fps", double_t, 0, "FPS", 10.0, 1.0, 64.0) + gen.add("fps", double_t, 0, "FPS", 10.0, 1.0, 30.0) roi_width = 1920 roi_height = 1200 auto_exposure_threshold = 0.95 diff --git a/multisense_ros/include/multisense_ros/camera_utilities.h b/multisense_ros/include/multisense_ros/camera_utilities.h index 77e6696..f1ce55e 100644 --- a/multisense_ros/include/multisense_ros/camera_utilities.h +++ b/multisense_ros/include/multisense_ros/camera_utilities.h @@ -166,14 +166,43 @@ class StereoCalibrationManger Eigen::Matrix4d Q() const; /// - /// @brief Translation which transforms points from the right camera frame into the left camera frame + /// @brief Translation which transforms points from the rectified left camera frame into the recified right camera + /// frame /// double T() const; /// - /// @brief Translation which transforms points from the aux camera frame into the left camera frame + /// @brief Translation which transforms points from the rectified left camera frame into the rectified aux + /// camera frame /// - double aux_T() const; + Eigen::Vector3d aux_T() const; + + /// + /// @brief Reproject disparity values into 3D + /// + Eigen::Vector3f reproject(size_t u, size_t v, double d) const; + + /// + /// @brief Reproject disparity values into 3D + /// + Eigen::Vector3f reproject(size_t u, + size_t v, + double d, + const sensor_msgs::CameraInfo &left_camera_info, + const sensor_msgs::CameraInfo &right_camera_info) const; + + /// + /// @brief Project points corresponding to disparity measurements in the left rectified image frame into the + /// aux rectified image plane + /// + Eigen::Vector2f rectifiedAuxProject(const Eigen::Vector3f &left_rectified_point) const; + + /// + /// @brief Project points corresponding to disparity measurements in the left rectified image frame into the + /// aux rectified image plane + /// + Eigen::Vector2f rectifiedAuxProject(const Eigen::Vector3f &left_rectified_point, + const sensor_msgs::CameraInfo &aux_camera_info) const; /// /// @brief Get the current main stereo pair operating resolution. This resolution applies for both the mono diff --git a/multisense_ros/src/camera.cpp b/multisense_ros/src/camera.cpp index 22db581..65527de 100644 --- a/multisense_ros/src/camera.cpp +++ b/multisense_ros/src/camera.cpp @@ -116,7 +116,7 @@ void groundSurfaceSplineCB(const ground_surface::Header& header, void* userDataP { reinterpret_cast(userDataP)->groundSurfaceSplineCallback(header); } -bool isValidReprojectedPoint(const Eigen::Vector3f& pt, double squared_max_range) +bool isValidReprojectedPoint(const Eigen::Vector3f& pt, float squared_max_range) { return pt[2] > 0.0f && std::isfinite(pt[2]) && pt.squaredNorm() < squared_max_range; } @@ -195,26 +195,38 @@ bool clipPoint(const BorderClip& borderClipType, return true; } -cv::Vec3b u_interpolate_color(double u, double v, const cv::Mat &image) +cv::Vec3b interpolate_color(const Eigen::Vector2f &pixel, const cv::Mat &image) { - const double width = image.cols; + const float width = image.cols; + const float height = image.rows; + + const float &u = pixel(0); + const float &v = pixel(1); // - // Interpolate in just the u dimension + // Implement a basic bileinar interpolation scheme + // https://en.wikipedia.org/wiki/Bilinear_interpolation // - const size_t min_u = static_cast(std::min(std::max(std::floor(u), 0.), width - 1.)); - const size_t max_u = static_cast(std::min(std::max(std::ceil(u), 0.), width - 1.)); + const size_t min_u = static_cast(std::min(std::max(std::floor(u), 0.f), width - 1.f)); + const size_t max_u = static_cast(std::min(std::max(std::floor(u) + 1, 0.f), width - 1.f)); + const size_t min_v = static_cast(std::min(std::max(std::floor(v), 0.f), height - 1.f)); + const size_t max_v = static_cast(std::min(std::max(std::floor(v) + 1, 0.f), height - 1.f)); - const cv::Vec3d element0 = image.at(width * v + min_u); - const cv::Vec3d element1 = image.at(width * v + max_u); + const cv::Vec3d element00 = image.at(width * min_v + min_u); + const cv::Vec3d element01 = image.at(width * min_v + max_u); + const cv::Vec3d element10 = image.at(width * max_v + min_u); + const cv::Vec3d element11 = image.at(width * max_v + max_u); const size_t delta_u = max_u - min_u; + const size_t delta_v = max_u - min_u; const double u_ratio = delta_u == 0 ? 1. : (static_cast(max_u) - u) / static_cast(delta_u); + const double v_ratio = delta_v == 0 ? 1. : (static_cast(max_v) - v) / static_cast(delta_v); - const cv::Vec3b result = (element0 * u_ratio + element1 * (1. - u_ratio)); + const cv::Vec3b f_xy0 = element00 * u_ratio + element01 * (1. - u_ratio); + const cv::Vec3b f_xy1 = element10 * u_ratio + element11 * (1. - u_ratio); - return result; + return (f_xy0 * v_ratio + f_xy1 * (1. - v_ratio)); } } // anonymous @@ -655,8 +667,10 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : if (has_aux_extrinsics) { + const Eigen::Vector3d aux_T = stereo_calibration_manager_->aux_T(); + tf2::Transform rectified_aux_T_rectified_left{tf2::Matrix3x3::getIdentity(), - tf2::Vector3{stereo_calibration_manager_->aux_T(), 0., 0.}}; + tf2::Vector3{aux_T(0), aux_T(1), aux_T(2)}}; stamped_transforms[3].header.stamp = ros::Time::now(); stamped_transforms[3].header.frame_id = frame_id_rectified_left_; stamped_transforms[3].child_frame_id = frame_id_rectified_aux_; @@ -1542,8 +1556,6 @@ void Camera::pointCloudCallback(const image::Header& header) color_organized_point_cloud_.row_step = header.width * color_organized_point_cloud_.point_step; } - const Eigen::Matrix4d Q = stereo_calibration_manager_->Q(); - const Eigen::Vector3f invalid_point(std::numeric_limits::quiet_NaN(), std::numeric_limits::quiet_NaN(), std::numeric_limits::quiet_NaN()); @@ -1598,15 +1610,17 @@ void Camera::pointCloudCallback(const image::Header& header) rectified_color = std::move(rect_rgb_image); } + const auto left_camera_info = stereo_calibration_manager_->leftCameraInfo(frame_id_left_, t); + const auto right_camera_info = stereo_calibration_manager_->rightCameraInfo(frame_id_right_, t); + const auto aux_camera_info = stereo_calibration_manager_->auxCameraInfo(frame_id_rectified_aux_, t, + rectified_color.cols, rectified_color.rows); + // // Iterate through our disparity image once populating our pointcloud structures if we plan to publish them uint32_t packed_color = 0; - const double squared_max_range = pointcloud_max_range_ * pointcloud_max_range_; - - const double aux_T = has_aux_camera_ ? stereo_calibration_manager_->aux_T() : stereo_calibration_manager_->T(); - const double T = stereo_calibration_manager_->T(); + const float squared_max_range = pointcloud_max_range_ * pointcloud_max_range_; size_t valid_points = 0; for (size_t y = 0 ; y < header.height ; ++y) @@ -1615,17 +1629,17 @@ void Camera::pointCloudCallback(const image::Header& header) { const size_t index = y * header.width + x; - double disparity = 0.0f; + float disparity = 0.0f; switch(header.bitsPerPixel) { case 16: { - disparity = static_cast(reinterpret_cast(header.imageDataP)[index]) / 16.0f; + disparity = static_cast(reinterpret_cast(header.imageDataP)[index]) / 16.0f; break; } case 32: { - disparity = static_cast(reinterpret_cast(header.imageDataP)[index]); + disparity = static_cast(reinterpret_cast(header.imageDataP)[index]); break; } default: @@ -1635,6 +1649,9 @@ void Camera::pointCloudCallback(const image::Header& header) } } + const Eigen::Vector3f point = stereo_calibration_manager_->reproject(x, y, disparity, + left_camera_info, right_camera_info); + // // We have a valid rectified color image meaning we plan to publish color pointcloud topics. Assemble the // color pixel here since it may be needed in our organized pointclouds @@ -1643,10 +1660,11 @@ void Camera::pointCloudCallback(const image::Header& header) { packed_color = 0; - const double color_d = has_aux_camera_ ? (disparity * aux_T) / T : 0.0; + const auto color_pixel = (has_aux_camera_ && disparity != 0.0) ? + interpolate_color(stereo_calibration_manager_->rectifiedAuxProject(point, aux_camera_info), + rectified_color) : + rectified_color.at(y, x); - const auto color_pixel = has_aux_camera_ ? u_interpolate_color(std::max(x - color_d, 0.), y, rectified_color) : - rectified_color.at(y, x); packed_color |= color_pixel[2] << 16 | color_pixel[1] << 8 | color_pixel[0]; } @@ -1655,7 +1673,7 @@ void Camera::pointCloudCallback(const image::Header& header) // If our disparity is 0 pixels our corresponding 3D point is infinite. If we plan to publish organized // pointclouds we will need to add a invalid point to our pointcloud(s) - if (disparity == 0.0f || clipPoint(border_clip_type_, border_clip_value_, header.width, header.height, x, y)) + if (disparity == 0.0 || clipPoint(border_clip_type_, border_clip_value_, header.width, header.height, x, y)) { if (pub_organized_pointcloud) { @@ -1670,12 +1688,6 @@ void Camera::pointCloudCallback(const image::Header& header) continue; } - const Eigen::Vector3f point = ((Q * Eigen::Vector4d(static_cast(x), - static_cast(y), - disparity, - 1.0)).hnormalized()).cast(); - - const bool valid = isValidReprojectedPoint(point, squared_max_range); if (pub_pointcloud && valid) diff --git a/multisense_ros/src/camera_utilities.cpp b/multisense_ros/src/camera_utilities.cpp index d78815f..65e80b3 100644 --- a/multisense_ros/src/camera_utilities.cpp +++ b/multisense_ros/src/camera_utilities.cpp @@ -334,20 +334,86 @@ double StereoCalibrationManger::T() const return right_camera_info_.P[3] / right_camera_info_.P[0]; } -double StereoCalibrationManger::aux_T() const +Eigen::Vector3d StereoCalibrationManger::aux_T() const { std::lock_guard lock(mutex_); // // The aux camera projection matrix is of the form: // - // [fx, 0, cx, t * fx] - // [ 0, fy, cy, 0 ] - // [ 0, 0, 1, 0 ] + // [fx, 0, cx, tx * fx] + // [ 0, fy, cy, ty * fy] + // [ 0, 0, 1, tz ] // // divide the t * fx term by fx to get t - return aux_camera_info_.P[3] / aux_camera_info_.P[0]; + return Eigen::Vector3d{aux_camera_info_.P[3] / aux_camera_info_.P[0], + aux_camera_info_.P[7] / aux_camera_info_.P[5], + aux_camera_info_.P[11]}; +} + +Eigen::Vector3f StereoCalibrationManger::reproject(size_t u, size_t v, double d) const +{ + std::lock_guard lock(mutex_); + + return reproject(u, v, d, left_camera_info_, right_camera_info_); +} + +Eigen::Vector3f StereoCalibrationManger::reproject(size_t u, + size_t v, + double d, + const sensor_msgs::CameraInfo &left_camera_info, + const sensor_msgs::CameraInfo &right_camera_info) const +{ + if (d == 0.0) + { + return Eigen::Vector3f{std::numeric_limits::max(), + std::numeric_limits::max(), + std::numeric_limits::max()}; + } + + + const double &fx = left_camera_info.P[0]; + const double &fy = left_camera_info.P[5]; + const double &cx = left_camera_info.P[2]; + const double &cy = left_camera_info.P[6]; + const double &cx_right = right_camera_info.P[2]; + const double tx = right_camera_info.P[3] / right_camera_info.P[0]; + + const double xB = ((fy * tx) * u) + (-fy * cx * tx); + const double yB = ((fx * tx) * v) + (-fx * cy * tx); + const double zB = (fx * fy * tx); + const double invB = 1. / (-fy * d) + (fy * (cx - cx_right)); + + return Eigen::Vector3f{static_cast(xB * invB), static_cast(yB * invB), static_cast(zB * invB)}; +} + +Eigen::Vector2f StereoCalibrationManger::rectifiedAuxProject(const Eigen::Vector3f &left_rectified_point) const +{ + std::lock_guard lock(mutex_); + + return rectifiedAuxProject(left_rectified_point, aux_camera_info_); +} + +Eigen::Vector2f StereoCalibrationManger::rectifiedAuxProject(const Eigen::Vector3f &left_rectified_point, + const sensor_msgs::CameraInfo &aux_camera_info) const +{ + const double &fx = aux_camera_info.P[0]; + const double &fy = aux_camera_info.P[5]; + const double &cx = aux_camera_info.P[2]; + const double &cy = aux_camera_info.P[6]; + const double &fxtx = aux_camera_info.P[3]; + const double &fyty = aux_camera_info.P[7]; + const double &tz = aux_camera_info.P[11]; + + // + // Project the left_rectified_point into the aux image using the auxP matrix + // + const double uB = (fx * left_rectified_point(0) + (cx * left_rectified_point(2)) + fxtx); + const double vB = (fy * left_rectified_point(1) + (cy * left_rectified_point(2)) + fyty); + const double invB = 1.0 / (left_rectified_point(2) + tz); + + return Eigen::Vector2f{static_cast(uB * invB), static_cast(vB * invB)}; } OperatingResolutionT StereoCalibrationManger::operatingStereoResolution() const @@ -374,7 +440,8 @@ OperatingResolutionT StereoCalibrationManger::operatingAuxResolution() const bool StereoCalibrationManger::validAux() const { - return std::isfinite(aux_T()); + const Eigen::Vector3d auxT = aux_T(); + return std::isfinite(auxT(0)) && std::isfinite(auxT(1)) && std::isfinite(auxT(2)) ; } sensor_msgs::CameraInfo StereoCalibrationManger::leftCameraInfo(const std::string& frame_id, From f40dbbe001a3a79d1277bc91279404ef6b5f5c1f Mon Sep 17 00:00:00 2001 From: David Robinson Date: Thu, 23 Jun 2022 03:57:28 +1200 Subject: [PATCH 09/13] Only advertise topics that are available --- multisense_ros/src/camera.cpp | 39 +++++++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/multisense_ros/src/camera.cpp b/multisense_ros/src/camera.cpp index 4901580..cb1676f 100644 --- a/multisense_ros/src/camera.cpp +++ b/multisense_ros/src/camera.cpp @@ -352,26 +352,40 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : histogram_pub_ = device_nh_.advertise(HISTOGRAM_TOPIC, 5); // - // Create spline-based ground surface publishers for S27/S30 cameras + // Create publishers for algorithms which are available - const bool can_support_on_board_algorithms = - system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision || - system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision; + std::vector device_modes; + status = driver_->getDeviceModes(device_modes); + if (Status_Ok != status) { + ROS_ERROR("Reconfigure: failed to query device modes: %s", Channel::statusString(status)); + return; + } - if (can_support_on_board_algorithms) { + const bool ground_surface_supported = + std::any_of(device_modes.begin(), device_modes.end(), [](const auto &mode) { + return (mode.supportedDataSources & Source_Ground_Surface_Spline_Data) && + (mode.supportedDataSources & Source_Ground_Surface_Class_Image); }); + + if (ground_surface_supported) { ground_surface_cam_pub_ = ground_surface_transport_.advertise(GROUND_SURFACE_IMAGE_TOPIC, 5, std::bind(&Camera::connectStream, this, Source_Ground_Surface_Class_Image), std::bind(&Camera::disconnectStream, this, Source_Ground_Surface_Class_Image)); + ground_surface_info_pub_ = ground_surface_nh_.advertise(GROUND_SURFACE_INFO_TOPIC, 1, true); + + ground_surface_spline_pub_ = ground_surface_nh_.advertise(GROUND_SURFACE_POINT_SPLINE_TOPIC, 5, true); + } + + const bool apriltag_supported = + std::any_of(device_modes.begin(), device_modes.end(), [](const auto &mode) { + return mode.supportedDataSources & Source_AprilTag_Detections; }); + + if (apriltag_supported) { apriltag_detection_pub_ = apriltag_nh_.advertise(APRILTAG_DETECTION_TOPIC, 5, std::bind(&Camera::connectStream, this, Source_AprilTag_Detections), std::bind(&Camera::disconnectStream, this, Source_AprilTag_Detections)); } - ground_surface_info_pub_ = ground_surface_nh_.advertise(GROUND_SURFACE_INFO_TOPIC, 1, true); - - ground_surface_spline_pub_ = ground_surface_nh_.advertise(GROUND_SURFACE_POINT_SPLINE_TOPIC, 5, true); - // // Create topic publishers (TODO: color topics should not be advertised if the device can't support it) @@ -727,11 +741,14 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : driver_->addIsolatedCallback(histCB, allImageSources, this); // - // Add ground surface callbacks for S27/S30 cameras + // Add algorithm callbacks - if (can_support_on_board_algorithms) { + if (ground_surface_supported) { driver_->addIsolatedCallback(groundSurfaceCB, Source_Ground_Surface_Class_Image, this); driver_->addIsolatedCallback(groundSurfaceSplineCB, this); + } + + if (apriltag_supported) { driver_->addIsolatedCallback(apriltagCB, this); } From 1ee0621702e21053fea5926e81e310459b05dc54 Mon Sep 17 00:00:00 2001 From: David Robinson Date: Sat, 25 Jun 2022 04:42:11 +1200 Subject: [PATCH 10/13] Add image source to tag detections --- multisense_lib/sensor_api | 2 +- multisense_ros/msg/AprilTagDetectionArray.msg | 6 +++--- multisense_ros/src/camera.cpp | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/multisense_lib/sensor_api b/multisense_lib/sensor_api index bd2cd64..a42ba9d 160000 --- a/multisense_lib/sensor_api +++ b/multisense_lib/sensor_api @@ -1 +1 @@ -Subproject commit bd2cd648b36d5c3ead9dde22d4fbd8aabc3045ce +Subproject commit a42ba9d71e11348c4a704997e3e80eb0a4aa8d60 diff --git a/multisense_ros/msg/AprilTagDetectionArray.msg b/multisense_ros/msg/AprilTagDetectionArray.msg index 9e37249..a6982d7 100644 --- a/multisense_ros/msg/AprilTagDetectionArray.msg +++ b/multisense_ros/msg/AprilTagDetectionArray.msg @@ -1,11 +1,11 @@ std_msgs/Header header -# The frame ID of the image that the apriltags were detected on -int64 frameId - # The frame timestamp (nanoseconds) of the image that the apriltags were detected on int64 timestamp +# The image source that the apriltags were detected on +string imageSource + # Success flag to indicate whether for the apriltag algorithm ran successfully uint8 success diff --git a/multisense_ros/src/camera.cpp b/multisense_ros/src/camera.cpp index cb1676f..32bb5e5 100644 --- a/multisense_ros/src/camera.cpp +++ b/multisense_ros/src/camera.cpp @@ -2124,7 +2124,7 @@ void Camera::apriltagCallback(const apriltag::Header& header) tag_detection_array.header.seq = static_cast(header.frameId); tag_detection_array.header.stamp = ros_time; tag_detection_array.header.frame_id = frame_id_rectified_left_; - tag_detection_array.frameId = header.frameId; + tag_detection_array.imageSource = header.imageSource; tag_detection_array.timestamp = header.timestamp; tag_detection_array.success = header.success; tag_detection_array.numDetections = header.numDetections; From e77adfe7852f15b2423ca817db08f1a59c822782 Mon Sep 17 00:00:00 2001 From: David Robinson Date: Mon, 27 Jun 2022 20:45:28 -0400 Subject: [PATCH 11/13] Remove comment from apriltag params --- multisense_lib/sensor_api | 2 +- multisense_ros/src/reconfigure.cpp | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/multisense_lib/sensor_api b/multisense_lib/sensor_api index a42ba9d..7265e59 160000 --- a/multisense_lib/sensor_api +++ b/multisense_lib/sensor_api @@ -1 +1 @@ -Subproject commit a42ba9d71e11348c4a704997e3e80eb0a4aa8d60 +Subproject commit 7265e599e11347fbdc94335bc791fcd20acc132d diff --git a/multisense_ros/src/reconfigure.cpp b/multisense_ros/src/reconfigure.cpp index bababf8..72106b3 100644 --- a/multisense_ros/src/reconfigure.cpp +++ b/multisense_ros/src/reconfigure.cpp @@ -885,7 +885,6 @@ template void Reconfigure::configureApriltagParams(const T& dyn) params.refine_quad_edges = dyn.apriltag_refine_quad_edges; params.decode_sharpening = dyn.apriltag_decode_sharpening; - // Update ground surface params on camera Status status = driver_->setApriltagParams(params); if (Status_Ok != status) { ROS_ERROR("Reconfigure: failed to set apriltag params: %s", From f496ba88e71611e1d75c3576b8125c498883e448 Mon Sep 17 00:00:00 2001 From: David Robinson Date: Tue, 28 Jun 2022 19:25:31 -0400 Subject: [PATCH 12/13] Update libmultisense to master --- multisense_lib/sensor_api | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/multisense_lib/sensor_api b/multisense_lib/sensor_api index 7265e59..729b6a3 160000 --- a/multisense_lib/sensor_api +++ b/multisense_lib/sensor_api @@ -1 +1 @@ -Subproject commit 7265e599e11347fbdc94335bc791fcd20acc132d +Subproject commit 729b6a3efaed324c61a59820d3be0b451ab81ac8 From 3e7557e195baf9b951e9eca9c233e659ef50a1be Mon Sep 17 00:00:00 2001 From: David Robinson Date: Wed, 29 Jun 2022 12:54:39 -0400 Subject: [PATCH 13/13] Merge with remote head changes --- multisense_bringup/CMakeLists.txt | 1 + multisense_bringup/multisense.launch | 5 +- multisense_bringup/remote_head.launch | 158 ++++++ .../meshes/multisense_remote_head_monocam.STL | 86 ++++ .../meshes/multisense_remote_head_stereo.STL | 86 ++++ .../meshes/multisense_remote_head_vpb.STL | 86 ++++ .../importable.urdf.xacro | 18 + .../multisense_remote_head_monocam.urdf.xacro | 68 +++ .../standalone.urdf.xacro | 6 + .../importable.urdf.xacro | 18 + .../multisense_remote_head_stereo.urdf.xacro | 68 +++ .../standalone.urdf.xacro | 6 + .../importable.urdf.xacro | 18 + .../multisense_remote_head_vpb.urdf.xacro | 68 +++ .../standalone.urdf.xacro | 6 + multisense_lib/sensor_api | 2 +- multisense_ros/cfg/multisense.cfg | 118 +++-- .../include/multisense_ros/camera.h | 25 + .../include/multisense_ros/reconfigure.h | 42 +- multisense_ros/src/camera.cpp | 470 +++++++++++------- multisense_ros/src/reconfigure.cpp | 216 ++++++-- multisense_ros/src/ros_driver.cpp | 29 +- 22 files changed, 1302 insertions(+), 298 deletions(-) create mode 100644 multisense_bringup/remote_head.launch create mode 100644 multisense_description/meshes/multisense_remote_head_monocam.STL create mode 100644 multisense_description/meshes/multisense_remote_head_stereo.STL create mode 100644 multisense_description/meshes/multisense_remote_head_vpb.STL create mode 100644 multisense_description/urdf/multisenseremote_head_monocam/importable.urdf.xacro create mode 100644 multisense_description/urdf/multisenseremote_head_monocam/multisense_remote_head_monocam.urdf.xacro create mode 100644 multisense_description/urdf/multisenseremote_head_monocam/standalone.urdf.xacro create mode 100644 multisense_description/urdf/multisenseremote_head_stereo/importable.urdf.xacro create mode 100644 multisense_description/urdf/multisenseremote_head_stereo/multisense_remote_head_stereo.urdf.xacro create mode 100644 multisense_description/urdf/multisenseremote_head_stereo/standalone.urdf.xacro create mode 100644 multisense_description/urdf/multisenseremote_head_vpb/importable.urdf.xacro create mode 100644 multisense_description/urdf/multisenseremote_head_vpb/multisense_remote_head_vpb.urdf.xacro create mode 100644 multisense_description/urdf/multisenseremote_head_vpb/standalone.urdf.xacro diff --git a/multisense_bringup/CMakeLists.txt b/multisense_bringup/CMakeLists.txt index 152e58c..76ffe5f 100644 --- a/multisense_bringup/CMakeLists.txt +++ b/multisense_bringup/CMakeLists.txt @@ -18,6 +18,7 @@ include_directories(include ${catkin_INCLUDE_DIRS}) install(FILES rviz_config.rviz multisense.launch + remote_head.launch DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} ) diff --git a/multisense_bringup/multisense.launch b/multisense_bringup/multisense.launch index 8f6e607..5f24a9f 100644 --- a/multisense_bringup/multisense.launch +++ b/multisense_bringup/multisense.launch @@ -1,11 +1,13 @@ - + + @@ -26,6 +28,7 @@ + diff --git a/multisense_bringup/remote_head.launch b/multisense_bringup/remote_head.launch new file mode 100644 index 0000000..6513fb0 --- /dev/null +++ b/multisense_bringup/remote_head.launch @@ -0,0 +1,158 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/meshes/multisense_remote_head_monocam.STL b/multisense_description/meshes/multisense_remote_head_monocam.STL new file mode 100644 index 0000000..4030ac9 --- /dev/null +++ b/multisense_description/meshes/multisense_remote_head_monocam.STL @@ -0,0 +1,86 @@ +solid model +facet normal 0.0 0.0 -1.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 0.0 -20.0 0.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal 0.0 0.0 -1.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 20.0 0.0 0.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal -0.0 -1.0 -0.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 0.0 -20.0 0.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal -0.0 -1.0 -0.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 20.0 -20.0 20.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal 1.0 0.0 0.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 20.0 -20.0 20.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal 1.0 0.0 0.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 20.0 0.0 0.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +facet normal -0.0 -0.0 1.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 0.0 0.0 20.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal -0.0 -0.0 1.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 20.0 -20.0 20.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +facet normal -1.0 0.0 0.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 0.0 -20.0 0.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal -1.0 0.0 0.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 0.0 0.0 20.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal -0.0 1.0 0.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 20.0 0.0 0.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal -0.0 1.0 0.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 0.0 0.0 20.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +endsolid model diff --git a/multisense_description/meshes/multisense_remote_head_stereo.STL b/multisense_description/meshes/multisense_remote_head_stereo.STL new file mode 100644 index 0000000..4030ac9 --- /dev/null +++ b/multisense_description/meshes/multisense_remote_head_stereo.STL @@ -0,0 +1,86 @@ +solid model +facet normal 0.0 0.0 -1.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 0.0 -20.0 0.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal 0.0 0.0 -1.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 20.0 0.0 0.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal -0.0 -1.0 -0.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 0.0 -20.0 0.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal -0.0 -1.0 -0.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 20.0 -20.0 20.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal 1.0 0.0 0.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 20.0 -20.0 20.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal 1.0 0.0 0.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 20.0 0.0 0.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +facet normal -0.0 -0.0 1.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 0.0 0.0 20.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal -0.0 -0.0 1.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 20.0 -20.0 20.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +facet normal -1.0 0.0 0.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 0.0 -20.0 0.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal -1.0 0.0 0.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 0.0 0.0 20.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal -0.0 1.0 0.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 20.0 0.0 0.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal -0.0 1.0 0.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 0.0 0.0 20.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +endsolid model diff --git a/multisense_description/meshes/multisense_remote_head_vpb.STL b/multisense_description/meshes/multisense_remote_head_vpb.STL new file mode 100644 index 0000000..4030ac9 --- /dev/null +++ b/multisense_description/meshes/multisense_remote_head_vpb.STL @@ -0,0 +1,86 @@ +solid model +facet normal 0.0 0.0 -1.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 0.0 -20.0 0.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal 0.0 0.0 -1.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 20.0 0.0 0.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal -0.0 -1.0 -0.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 0.0 -20.0 0.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal -0.0 -1.0 -0.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 20.0 -20.0 20.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal 1.0 0.0 0.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 20.0 -20.0 20.0 +vertex 20.0 -20.0 0.0 +endloop +endfacet +facet normal 1.0 0.0 0.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 20.0 0.0 0.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +facet normal -0.0 -0.0 1.0 +outer loop +vertex 20.0 -20.0 20.0 +vertex 0.0 0.0 20.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal -0.0 -0.0 1.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 20.0 -20.0 20.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +facet normal -1.0 0.0 0.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 0.0 -20.0 0.0 +vertex 0.0 -20.0 20.0 +endloop +endfacet +facet normal -1.0 0.0 0.0 +outer loop +vertex 0.0 -20.0 0.0 +vertex 0.0 0.0 20.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal -0.0 1.0 0.0 +outer loop +vertex 0.0 0.0 20.0 +vertex 20.0 0.0 0.0 +vertex 0.0 0.0 0.0 +endloop +endfacet +facet normal -0.0 1.0 0.0 +outer loop +vertex 20.0 0.0 0.0 +vertex 0.0 0.0 20.0 +vertex 20.0 0.0 20.0 +endloop +endfacet +endsolid model diff --git a/multisense_description/urdf/multisenseremote_head_monocam/importable.urdf.xacro b/multisense_description/urdf/multisenseremote_head_monocam/importable.urdf.xacro new file mode 100644 index 0000000..258988d --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_monocam/importable.urdf.xacro @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_monocam/multisense_remote_head_monocam.urdf.xacro b/multisense_description/urdf/multisenseremote_head_monocam/multisense_remote_head_monocam.urdf.xacro new file mode 100644 index 0000000..c2b0ae5 --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_monocam/multisense_remote_head_monocam.urdf.xacro @@ -0,0 +1,68 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_monocam/standalone.urdf.xacro b/multisense_description/urdf/multisenseremote_head_monocam/standalone.urdf.xacro new file mode 100644 index 0000000..43e1e89 --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_monocam/standalone.urdf.xacro @@ -0,0 +1,6 @@ + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_stereo/importable.urdf.xacro b/multisense_description/urdf/multisenseremote_head_stereo/importable.urdf.xacro new file mode 100644 index 0000000..146bbae --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_stereo/importable.urdf.xacro @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_stereo/multisense_remote_head_stereo.urdf.xacro b/multisense_description/urdf/multisenseremote_head_stereo/multisense_remote_head_stereo.urdf.xacro new file mode 100644 index 0000000..c7aa2e3 --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_stereo/multisense_remote_head_stereo.urdf.xacro @@ -0,0 +1,68 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_stereo/standalone.urdf.xacro b/multisense_description/urdf/multisenseremote_head_stereo/standalone.urdf.xacro new file mode 100644 index 0000000..4545c10 --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_stereo/standalone.urdf.xacro @@ -0,0 +1,6 @@ + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_vpb/importable.urdf.xacro b/multisense_description/urdf/multisenseremote_head_vpb/importable.urdf.xacro new file mode 100644 index 0000000..a51aa2a --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_vpb/importable.urdf.xacro @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_vpb/multisense_remote_head_vpb.urdf.xacro b/multisense_description/urdf/multisenseremote_head_vpb/multisense_remote_head_vpb.urdf.xacro new file mode 100644 index 0000000..22469f0 --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_vpb/multisense_remote_head_vpb.urdf.xacro @@ -0,0 +1,68 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/multisense_description/urdf/multisenseremote_head_vpb/standalone.urdf.xacro b/multisense_description/urdf/multisenseremote_head_vpb/standalone.urdf.xacro new file mode 100644 index 0000000..e91ea81 --- /dev/null +++ b/multisense_description/urdf/multisenseremote_head_vpb/standalone.urdf.xacro @@ -0,0 +1,6 @@ + + + + + + diff --git a/multisense_lib/sensor_api b/multisense_lib/sensor_api index 729b6a3..7265e59 160000 --- a/multisense_lib/sensor_api +++ b/multisense_lib/sensor_api @@ -1 +1 @@ -Subproject commit 729b6a3efaed324c61a59820d3be0b451ab81ac8 +Subproject commit 7265e599e11347fbdc94335bc791fcd20acc132d diff --git a/multisense_ros/cfg/multisense.cfg b/multisense_ros/cfg/multisense.cfg index 52fd0e0..1a74038 100755 --- a/multisense_ros/cfg/multisense.cfg +++ b/multisense_ros/cfg/multisense.cfg @@ -10,7 +10,7 @@ else: from dynamic_reconfigure.parameter_generator_catkin import * class SensorConfig(object): - def __init__(self, name=None, sgm=False, motor=False, imu=False, lighting=False, aux=False, ar0234=False, features=False): + def __init__(self, name=None, sgm=False, motor=False, imu=False, lighting=False, aux=False, ar0234=False, features=False, ptp=False): self.name = name self.sgm = sgm self.motor = motor @@ -19,6 +19,7 @@ class SensorConfig(object): self.aux = aux self.ar0234 = ar0234 self.features = features + self.ptp = ptp #enddef #endclass @@ -28,23 +29,29 @@ class SupportedFeatures(Enum): APRILTAG = 2 sensorConfigList = [] -sensorConfigList.append(SensorConfig(name="sl_bm_cmv2000", sgm=False, motor=True, imu=False, lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="sl_bm_cmv2000_imu", sgm=False, motor=True, imu=True , lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="sl_bm_cmv4000", sgm=False, motor=True, imu=False, lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="sl_bm_cmv4000_imu", sgm=False, motor=True, imu=True , lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="sl_sgm_cmv2000_imu", sgm=True, motor=True, imu=True , lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="sl_sgm_cmv4000_imu", sgm=True, motor=True, imu=True , lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="st21_sgm_vga_imu", sgm=True, motor=False, imu=True , lighting=False, aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="mono_cmv2000", sgm=False, motor=False, imu=True , lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="mono_cmv4000", sgm=False, motor=False, imu=True , lighting=True , aux=False, ar0234=False, features=False)) -sensorConfigList.append(SensorConfig(name="s27_sgm_AR0234", sgm=True, motor=False, imu=False, lighting=False, aux=True, ar0234=True , features=True)) -sensorConfigList.append(SensorConfig(name="ks21_sgm_AR0234", sgm=True, motor=False, imu=False, lighting=True , aux=False, ar0234=True , features=True)) +sensorConfigList.append(SensorConfig(name="sl_bm_cmv2000", sgm=False, motor=True, imu=False, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="sl_bm_cmv2000_imu", sgm=False, motor=True, imu=True, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="sl_bm_cmv4000", sgm=False, motor=True, imu=False, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="sl_bm_cmv4000_imu", sgm=False, motor=True, imu=True, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="sl_sgm_cmv2000_imu", sgm=True, motor=True, imu=True, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="sl_sgm_cmv4000_imu", sgm=True, motor=True, imu=True, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="st21_sgm_vga_imu", sgm=True, motor=False, imu=True, lighting=False, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="mono_cmv2000", sgm=False, motor=False, imu=True, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="mono_cmv4000", sgm=False, motor=False, imu=True, lighting=True, aux=False, ar0234=False, features=False, ptp=False)) +sensorConfigList.append(SensorConfig(name="s27_sgm_AR0234", sgm=True, motor=False, imu=False, lighting=False, aux=True, ar0234=True, features=True, ptp=True)) +sensorConfigList.append(SensorConfig(name="ks21_sgm_AR0234", sgm=True, motor=False, imu=False, lighting=True, aux=False, ar0234=True, features=True, ptp=True)) +sensorConfigList.append(SensorConfig(name="remote_head_vpb", sgm=False, motor=False, imu=False, lighting=False, aux=False, ar0234=False, features=True, ptp=True)) +sensorConfigList.append(SensorConfig(name="remote_head_sgm_AR0234", sgm=True, motor=False, imu=False, lighting=True, aux=False, ar0234=True, features=True, ptp=True)) +sensorConfigList.append(SensorConfig(name="remote_head_monocam_AR0234",sgm=False, motor=False, imu=False, lighting=True, aux=False, ar0234=True, features=True, ptp=True)) for feature_name, feature in SupportedFeatures.__members__.items(): for cfg in sensorConfigList: # Don't add feature config for unsupported cameras if not cfg.features and feature != SupportedFeatures.NONE: continue + if feature == SupportedFeatures.GROUND_SURFACE and not cfg.sgm: + # ground surface requires stereo + continue gen = ParameterGenerator() @@ -131,13 +138,23 @@ for feature_name, feature in SupportedFeatures.__members__.items(): roi_width = 2048 roi_height = 2048 elif cfg.ar0234: - res_enum = gen.enum([ gen.const("960x600_256_disparities", str_t, "960x600x256", "960x600x256"), - gen.const("1920x1200_64_disparities", str_t, "1920x1200x64", "1920x1200x64"), - gen.const("1920x1200_128_disparities", str_t, "1920x1200x128", "1920x1200x128"), - gen.const("1920x1200_256_disparities", str_t, "1920x1200x256", "1920x1200x256") ], - "Available resolution settings") - gen.add("resolution", str_t, 0, "sensor resolution", "960x600x256", edit_method=res_enum) - gen.add("fps", double_t, 0, "FPS", 10.0, 1.0, 30.0) + if cfg.sgm: + # next gen stereo cameras + res_enum = gen.enum([ gen.const("960x600_256_disparities", str_t, "960x600x256", "960x600x256"), + gen.const("1920x1200_64_disparities", str_t, "1920x1200x64", "1920x1200x64"), + gen.const("1920x1200_128_disparities", str_t, "1920x1200x128", "1920x1200x128"), + gen.const("1920x1200_256_disparities", str_t, "1920x1200x256", "1920x1200x256") ], + "Available resolution settings") + gen.add("resolution", str_t, 0, "sensor resolution", "960x600x256", edit_method=res_enum) + else: + # next gen mono cameras have no dispairty settings + res_enum = gen.enum([ gen.const("960x600", str_t, "960x600x64", "960x600x64"), + gen.const("1920x1200", str_t, "1920x1200x64", "1920x1200x64") ], + "Available resolution settings") + gen.add("resolution", str_t, 0, "sensor resolution", "1920x1200x64", edit_method=res_enum) + #endif + + gen.add("fps", double_t, 0, "FPS", 10.0, 1.0, 64.0) roi_width = 1920 roi_height = 1200 auto_exposure_threshold = 0.95 @@ -152,14 +169,23 @@ for feature_name, feature in SupportedFeatures.__members__.items(): gen.add("crop_offset", int_t, 0, "Crop Offset", 480, 0, 960) #endif - if cfg.name.find('st21_sgm_vga_imu') == -1: + if cfg.name.find('st21_sgm_vga_imu') == -1 and cfg.name.find('remote_head_vpb') == -1: gen.add("gain", double_t, 0, "SensorGain", 1.0, 1.0, 8.0) gen.add("auto_exposure", bool_t, 0, "AutoExposure", True) - gen.add("auto_exposure_max_time", double_t, 0, "AutoExposureMaxTime", 0.5, 0.0, 0.5); + + if cfg.ar0234: + gen.add("auto_exposure_max_time", double_t, 0, "AutoExposureMaxTime", 0.033, 0.0, 0.033); + else: + gen.add("auto_exposure_max_time", double_t, 0, "AutoExposureMaxTime", 0.5, 0.0, 0.5); + gen.add("auto_exposure_decay", int_t, 0, "AutoExposureDecay", 7, 0, 20) gen.add("auto_exposure_thresh", double_t, 0, "AutoExposureThresh", auto_exposure_threshold, 0.0, 1.0) gen.add("auto_exposure_target_intensity", double_t, 0, "AutoExposureTargetIntensity", 0.5, 0.0, 1.0) - gen.add("exposure_time", double_t, 0, "Exposure", 0.025, 0, 0.5) + + if cfg.ar0234: + gen.add("exposure_time", double_t, 0, "Exposure", 0.025, 0, 0.033) + else: + gen.add("exposure_time", double_t, 0, "Exposure", 0.025, 0, 0.5) if not cfg.ar0234: gen.add("auto_white_balance", bool_t, 0, "AutoWhiteBalance", True) @@ -188,11 +214,11 @@ for feature_name, feature in SupportedFeatures.__members__.items(): gen.add("enable_aux_controls", bool_t, 0, "AuxSensorControls", False) gen.add("aux_gain", double_t, 0, "AuxSensorGain", 1.0, 1.0, 8.0) gen.add("aux_auto_exposure", bool_t, 0, "AuxAutoExposure", True) - gen.add("aux_auto_exposure_max_time", double_t, 0, "AuxAutoExposureMaxTime", 0.5, 0.0, 0.5); + gen.add("aux_auto_exposure_max_time", double_t, 0, "AuxAutoExposureMaxTime", 0.033, 0.0, 0.033); gen.add("aux_auto_exposure_decay", int_t, 0, "AuxAutoExposureDecay", 7, 0, 20) gen.add("aux_auto_exposure_thresh", double_t, 0, "AuxAutoExposureThresh", auto_exposure_threshold, 0.0, 1.0) gen.add("aux_auto_exposure_target_intensity", double_t, 0, "AuxAutoExposureTargetIntensity", 0.5, 0.0, 1.0) - gen.add("aux_exposure_time", double_t, 0, "AuxExposure", 0.025, 0, 0.5) + gen.add("aux_exposure_time", double_t, 0, "AuxExposure", 0.025, 0, 0.033) gen.add("aux_roi_auto_exposure", bool_t, 0, "AuxRoiAutoExposure", False) gen.add("aux_roi_auto_exposure_x", int_t, 0, "AuxRoiAutoExposureX", 0, 0, 1920) gen.add("aux_roi_auto_exposure_y", int_t, 0, "AuxRoiAutoExposureY", 0, 0, 1188) @@ -217,17 +243,23 @@ for feature_name, feature in SupportedFeatures.__members__.items(): gen.add("network_time_sync", bool_t, 0, "NetworkTimeSynchronization", True); - if cfg.name.find('sgm_AR0234') != -1: + if cfg.ptp: gen.add("ptp_time_sync", bool_t, 0, "PTPTimeSynchronization", False); trigger_source_enum = gen.enum([ gen.const("internal", int_t, 0, ""), - gen.const("ptp", int_t, 3, "")], - "Trigger sources"); + gen.const("ptp", int_t, 3, "")], + "Trigger sources"); gen.add("trigger_source", int_t, 0, "Trigger Source", 0, edit_method=trigger_source_enum) + #endif + + if cfg.ar0234: + if cfg.sgm: + gen.add("detail_disparity_profile", bool_t, 0, "DetailDisparityProfile", False); - gen.add("detail_disparity_profile", bool_t, 0, "DetailDisparityProfile", False); gen.add("high_contrast_profile", bool_t, 0, "HighContrastProfile", False); gen.add("show_roi_profile", bool_t, 0, "ShowRoiProfile", False); - gen.add("full_res_aux_profile", bool_t, 0, "FullResAuxProfile", False); + if cfg.aux: + gen.add("full_res_aux_profile", bool_t, 0, "FullResAuxProfile", False); + #endif if feature == SupportedFeatures.GROUND_SURFACE: gen.add("ground_surface_profile", bool_t, 0, "GroundSurfaceProfile", False); @@ -288,21 +320,23 @@ for feature_name, feature in SupportedFeatures.__members__.items(): gen.add("magnetometer_range", int_t, 0, "Magnetometer Range", 0, edit_method=m_range_enum); #endif - clipping_enum = gen.enum([ gen.const("None", int_t, 0, "No Border Clip"), - gen.const("Rectangular", int_t, 1, "Rectangular Border Clip"), - gen.const("Circular", int_t, 2, "Circular Border Clip")], - "Available border clipping options") - - if "4000" in cfg.name: - clipping_max = 400.0 - else: - clipping_max = 200.0 - #endif + if cfg.sgm or cfg.name.find("sl_bm") != -1: + # clipping is a stereo only feature + clipping_enum = gen.enum([ gen.const("None", int_t, 0, "No Border Clip"), + gen.const("Rectangular", int_t, 1, "Rectangular Border Clip"), + gen.const("Circular", int_t, 2, "Circular Border Clip")], + "Available border clipping options") + + if "4000" in cfg.name: + clipping_max = 400.0 + else: + clipping_max = 200.0 + #endif - gen.add("border_clip_type", int_t, 0, "point cloud border clip type", 0, 0, 2, edit_method=clipping_enum) - gen.add("border_clip_value", double_t, 0, "point cloud border clip value", 0.0, 0.0, clipping_max) + gen.add("border_clip_type", int_t, 0, "point cloud border clip type", 0, 0, 2, edit_method=clipping_enum) + gen.add("border_clip_value", double_t, 0, "point cloud border clip value", 0.0, 0.0, clipping_max) - gen.add("max_point_cloud_range", double_t, 0, "max point cloud range", 15.0, 0.0, 100.0) + gen.add("max_point_cloud_range", double_t, 0, "max point cloud range", 15.0, 0.0, 100.0) gen.add("origin_from_camera_position_x_m", double_t, 0, "Origin from camera extrinsics transform x value (m)", 0.0, -100.0, 100.0) gen.add("origin_from_camera_position_y_m", double_t, 0, "Origin from camera extrinsics transform y value (m)", 0.0, -100.0, 100.0) diff --git a/multisense_ros/include/multisense_ros/camera.h b/multisense_ros/include/multisense_ros/camera.h index fdf8fee..79164da 100644 --- a/multisense_ros/include/multisense_ros/camera.h +++ b/multisense_ros/include/multisense_ros/camera.h @@ -344,11 +344,36 @@ class Camera { std::unordered_map>> image_buffers_; + // + // Has a left camera + + bool has_right_camera_ = false; + + // + // Has a left camera + + bool has_left_camera_ = false; + // // Has a 3rd aux color camera bool has_aux_camera_ = false; + // + // Has color camera streams, either color sensors or a color aux + + bool has_color_ = false; + + // + // Can support the ground surface detector + + bool can_support_ground_surface_ = false; + + // + // Can support the apriltag detector + + bool can_support_apriltag_ = false; + // // Diagnostics diagnostic_updater::Updater diagnostic_updater_; diff --git a/multisense_ros/include/multisense_ros/reconfigure.h b/multisense_ros/include/multisense_ros/reconfigure.h index 562eaac..883a147 100644 --- a/multisense_ros/include/multisense_ros/reconfigure.h +++ b/multisense_ros/include/multisense_ros/reconfigure.h @@ -57,6 +57,11 @@ #include #include #include +#include +#include +#include +#include +#include #include #include @@ -92,13 +97,18 @@ class Reconfigure { void callback_st21_vga (multisense_ros::st21_sgm_vga_imuConfig& config, uint32_t level); void callback_mono_cmv2000 (multisense_ros::mono_cmv2000Config& config, uint32_t level); void callback_mono_cmv4000 (multisense_ros::mono_cmv4000Config& config, uint32_t level); - void callback_s27_AR0234 (multisense_ros::s27_sgm_AR0234Config& config, uint32_t level); - void callback_ks21_AR0234 (multisense_ros::ks21_sgm_AR0234Config& config, uint32_t level); - void callback_s27_AR0234_ground_surface (multisense_ros::s27_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level); - void callback_ks21_AR0234_ground_surface (multisense_ros::ks21_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level); - void callback_s27_AR0234_apriltag (multisense_ros::s27_sgm_AR0234_apriltagConfig& dyn, uint32_t level); - void callback_ks21_AR0234_apriltag (multisense_ros::ks21_sgm_AR0234_apriltagConfig& dyn, uint32_t level); + void callback_s27_AR0234 (multisense_ros::s27_sgm_AR0234Config& config, uint32_t level); + void callback_s27_AR0234_ground_surface (multisense_ros::s27_sgm_AR0234_ground_surfaceConfig& config, uint32_t level); + void callback_s27_AR0234_apriltag (multisense_ros::s27_sgm_AR0234_apriltagConfig& config, uint32_t level); + void callback_ks21_AR0234 (multisense_ros::ks21_sgm_AR0234Config& config, uint32_t level); + void callback_ks21_AR0234_ground_surface (multisense_ros::ks21_sgm_AR0234_ground_surfaceConfig& config, uint32_t level); + void callback_ks21_AR0234_apriltag (multisense_ros::ks21_sgm_AR0234_apriltagConfig& config, uint32_t level); + void callback_remote_head_vpb (multisense_ros::remote_head_vpbConfig& config, uint32_t level); + void callback_remote_head_sgm_AR0234 (multisense_ros::remote_head_sgm_AR0234Config& config, uint32_t level); + void callback_remote_head_sgm_AR0234_ground_surface(multisense_ros::remote_head_sgm_AR0234_ground_surfaceConfig& config, uint32_t level); + void callback_remote_head_sgm_AR0234_apriltag (multisense_ros::remote_head_sgm_AR0234_apriltagConfig& config, uint32_t level); + void callback_remote_head_monocam_AR0234 (multisense_ros::remote_head_monocam_AR0234Config& config, uint32_t level); // // Internal helper functions @@ -117,9 +127,11 @@ class Reconfigure { template void configureBorderClip(const T& dyn); template void configurePointCloudRange(const T& dyn); template void configurePtp(const T& dyn); - template void configureStereoProfile(crl::multisense::image::Config &cfg, const T& dyn); - template void configureStereoProfileWithGroundSurface(crl::multisense::image::Config &cfg, const T& dyn); - template void configureStereoProfileWithApriltag(crl::multisense::image::Config &cfg, const T& dyn); + template void configureStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn); + template void configureGroundSurfaceStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn); + template void configureApriltagStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn); + template void configureFullResAuxStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn); + template void configureDetailDisparityStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn); template void configureExtrinsics(const T& dyn); template void configureGroundSurfaceParams(const T& dyn); template void configureApriltagParams(const T& dyn); @@ -159,13 +171,19 @@ class Reconfigure { std::shared_ptr< dynamic_reconfigure::Server > server_st21_vga_; std::shared_ptr< dynamic_reconfigure::Server > server_mono_cmv2000_; std::shared_ptr< dynamic_reconfigure::Server > server_mono_cmv4000_; - std::shared_ptr< dynamic_reconfigure::Server > server_s27_AR0234_; - std::shared_ptr< dynamic_reconfigure::Server > server_ks21_sgm_AR0234_; + std::shared_ptr< dynamic_reconfigure::Server > server_s27_AR0234_; std::shared_ptr< dynamic_reconfigure::Server > server_s27_AR0234_ground_surface_; - std::shared_ptr< dynamic_reconfigure::Server > server_ks21_sgm_AR0234_ground_surface_; std::shared_ptr< dynamic_reconfigure::Server > server_s27_AR0234_apriltag_; + std::shared_ptr< dynamic_reconfigure::Server > server_ks21_sgm_AR0234_; + std::shared_ptr< dynamic_reconfigure::Server > server_ks21_sgm_AR0234_ground_surface_; std::shared_ptr< dynamic_reconfigure::Server > server_ks21_sgm_AR0234_apriltag_; + std::shared_ptr< dynamic_reconfigure::Server > server_remote_head_vpb_; + std::shared_ptr< dynamic_reconfigure::Server > server_remote_head_sgm_AR0234_; + std::shared_ptr< dynamic_reconfigure::Server > server_remote_head_sgm_AR0234_ground_surface_; + std::shared_ptr< dynamic_reconfigure::Server > server_remote_head_sgm_AR0234_apriltag_; + std::shared_ptr< dynamic_reconfigure::Server > server_remote_head_monocam_AR0234_; + // // Cached values for supported sub-systems (these may be unavailable on // certain hardware variations: S7, S7S, M, etc.) diff --git a/multisense_ros/src/camera.cpp b/multisense_ros/src/camera.cpp index 908e7b0..6aa3728 100644 --- a/multisense_ros/src/camera.cpp +++ b/multisense_ros/src/camera.cpp @@ -352,9 +352,42 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : // // S27/S30 cameras have a 3rd aux color camera and no left color camera + has_left_camera_ = system::DeviceInfo::HARDWARE_REV_MULTISENSE_SL == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_M == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7S == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S21 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST21 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7AR == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_MONOCAM == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_STEREO == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_MONOCAM == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MONO == device_info_.hardwareRevision; + + has_right_camera_ = system::DeviceInfo::HARDWARE_REV_MULTISENSE_SL == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7S == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S21 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST21 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_S7AR == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == device_info_.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_STEREO == device_info_.hardwareRevision; + has_aux_camera_ = system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == device_info_.hardwareRevision || system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == device_info_.hardwareRevision; + has_color_ = has_aux_camera_ || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_M == device_info_.hardwareRevision || + system::DeviceInfo::IMAGER_TYPE_CMV2000_COLOR == device_info_.imagerType || + system::DeviceInfo::IMAGER_TYPE_CMV4000_COLOR == device_info_.imagerType || + system::DeviceInfo::IMAGER_TYPE_AR0239_COLOR == device_info_.imagerType; + + // // Topics published for all device types @@ -373,12 +406,12 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : return; } - const bool ground_surface_supported = + const bool can_support_ground_surface_ = std::any_of(device_modes.begin(), device_modes.end(), [](const auto &mode) { return (mode.supportedDataSources & Source_Ground_Surface_Spline_Data) && (mode.supportedDataSources & Source_Ground_Surface_Class_Image); }); - if (ground_surface_supported) { + if (can_support_ground_surface_) { ground_surface_cam_pub_ = ground_surface_transport_.advertise(GROUND_SURFACE_IMAGE_TOPIC, 5, std::bind(&Camera::connectStream, this, Source_Ground_Surface_Class_Image), std::bind(&Camera::disconnectStream, this, Source_Ground_Surface_Class_Image)); @@ -388,21 +421,20 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : ground_surface_spline_pub_ = ground_surface_nh_.advertise(GROUND_SURFACE_POINT_SPLINE_TOPIC, 5, true); } - const bool apriltag_supported = + const bool can_support_apriltag_ = std::any_of(device_modes.begin(), device_modes.end(), [](const auto &mode) { return mode.supportedDataSources & Source_AprilTag_Detections; }); - if (apriltag_supported) { + if (can_support_apriltag_) { apriltag_detection_pub_ = apriltag_nh_.advertise(APRILTAG_DETECTION_TOPIC, 5, std::bind(&Camera::connectStream, this, Source_AprilTag_Detections), std::bind(&Camera::disconnectStream, this, Source_AprilTag_Detections)); } - // - // Create topic publishers (TODO: color topics should not be advertised if the device can't support it) - if (system::DeviceInfo::HARDWARE_REV_BCAM == device_info_.hardwareRevision) { + // bcam has unique topics compared to all other S* variants + left_mono_cam_pub_ = left_mono_transport_.advertise(MONO_TOPIC, 5, std::bind(&Camera::connectStream, this, Source_Luma_Left), std::bind(&Camera::disconnectStream, this, Source_Luma_Left)); @@ -419,156 +451,160 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : left_rgb_cam_info_pub_ = left_nh_.advertise(COLOR_CAMERA_INFO_TOPIC, 1, true); left_rgb_rect_cam_info_pub_ = left_nh_.advertise(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true); + } else { - } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_M == device_info_.hardwareRevision) { + // all other MultiSense-S* variations - // monocular variation + if (has_left_camera_) { - left_mono_cam_pub_ = left_mono_transport_.advertise(MONO_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Left)); - left_rect_cam_pub_ = left_rect_transport_.advertiseCamera(RECT_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left)); - left_rgb_cam_pub_ = left_rgb_transport_.advertise(COLOR_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left)); - left_rgb_rect_cam_pub_ = left_rgb_rect_transport_.advertiseCamera(RECT_COLOR_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left)); + left_mono_cam_pub_ = left_mono_transport_.advertise(MONO_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Left), + std::bind(&Camera::disconnectStream, this, Source_Luma_Left)); - left_mono_cam_info_pub_ = left_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); - left_rect_cam_info_pub_ = left_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); - left_rgb_cam_info_pub_ = left_nh_.advertise(COLOR_CAMERA_INFO_TOPIC, 1, true); - left_rgb_rect_cam_info_pub_ = left_nh_.advertise(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true); + left_mono_cam_info_pub_ = left_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); - } else { // all other MultiSense-S* variations + left_rect_cam_pub_ = left_rect_transport_.advertiseCamera(RECT_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left)); + left_rect_cam_info_pub_ = left_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); + } - left_mono_cam_pub_ = left_mono_transport_.advertise(MONO_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Left)); - right_mono_cam_pub_ = right_mono_transport_.advertise(MONO_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Right), - std::bind(&Camera::disconnectStream, this, Source_Luma_Right)); - left_rect_cam_pub_ = left_rect_transport_.advertiseCamera(RECT_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left)); - right_rect_cam_pub_ = right_rect_transport_.advertiseCamera(RECT_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Right), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Right)); - depth_cam_pub_ = depth_transport_.advertise(DEPTH_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Disparity)); - ni_depth_cam_pub_ = ni_depth_transport_.advertise(OPENNI_DEPTH_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Disparity)); - - if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST21 != device_info_.hardwareRevision) { + if (has_right_camera_) { - if (has_aux_camera_) { + right_mono_cam_pub_ = right_mono_transport_.advertise(MONO_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Right), + std::bind(&Camera::disconnectStream, this, Source_Luma_Right)); - aux_mono_cam_pub_ = aux_mono_transport_.advertise(MONO_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Aux), - std::bind(&Camera::disconnectStream, this, Source_Luma_Aux)); + right_mono_cam_info_pub_ = right_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); - aux_mono_cam_info_pub_ = aux_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); + right_rect_cam_pub_ = right_rect_transport_.advertiseCamera(RECT_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Right), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Right)); - aux_rgb_cam_pub_ = aux_rgb_transport_.advertise(COLOR_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Aux | Source_Chroma_Aux), - std::bind(&Camera::disconnectStream, this, Source_Luma_Aux | Source_Chroma_Aux)); + right_rect_cam_info_pub_ = right_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); + } - aux_rgb_cam_info_pub_ = aux_nh_.advertise(COLOR_CAMERA_INFO_TOPIC, 1, true); + // only publish for stereo cameras + if (has_left_camera_ && has_right_camera_) { - aux_rect_cam_pub_ = aux_rect_transport_.advertiseCamera(RECT_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Aux), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Aux)); + // run the stereo topics if left and right cameras both exist - aux_rect_cam_info_pub_ = aux_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); + depth_cam_pub_ = depth_transport_.advertise(DEPTH_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Disparity)); - aux_rgb_rect_cam_pub_ = aux_rgb_rect_transport_.advertiseCamera(RECT_COLOR_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux)); + ni_depth_cam_pub_ = ni_depth_transport_.advertise(OPENNI_DEPTH_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Disparity)); - aux_rgb_rect_cam_info_pub_ = aux_nh_.advertise(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true); - } - else { + depth_cam_info_pub_ = device_nh_.advertise(DEPTH_CAMERA_INFO_TOPIC, 1, true); + + luma_point_cloud_pub_ = device_nh_.advertise(POINTCLOUD_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity)); - left_rgb_cam_pub_ = left_rgb_transport_.advertise(COLOR_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left)); + luma_organized_point_cloud_pub_ = device_nh_.advertise(ORGANIZED_POINTCLOUD_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity)); - left_rgb_cam_info_pub_ = left_nh_.advertise(COLOR_CAMERA_INFO_TOPIC, 1, true); - left_rgb_rect_cam_info_pub_ = left_nh_.advertise(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true); + raw_cam_data_pub_ = calibration_nh_.advertise(RAW_CAM_DATA_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity)); - left_rgb_rect_cam_pub_ = left_rgb_rect_transport_.advertiseCamera(RECT_COLOR_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left), - std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left)); + left_disparity_pub_ = disparity_left_transport_.advertise(DISPARITY_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Disparity)); + left_disp_cam_info_pub_ = left_nh_.advertise(DISPARITY_CAMERA_INFO_TOPIC, 1, true); + + left_stereo_disparity_pub_ = left_nh_.advertise(DISPARITY_IMAGE_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity), + std::bind(&Camera::disconnectStream, this, Source_Disparity)); + + if (version_info_.sensorFirmwareVersion >= 0x0300) { + + right_disparity_pub_ = disparity_right_transport_.advertise(DISPARITY_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity_Right), + std::bind(&Camera::disconnectStream, this, Source_Disparity_Right)); + + right_disp_cam_info_pub_ = right_nh_.advertise(DISPARITY_CAMERA_INFO_TOPIC, 1, true); + + right_stereo_disparity_pub_ = right_nh_.advertise(DISPARITY_IMAGE_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity_Right), + std::bind(&Camera::disconnectStream, this, Source_Disparity_Right)); + + left_disparity_cost_pub_ = disparity_cost_transport_.advertise(COST_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Disparity_Cost), + std::bind(&Camera::disconnectStream, this, Source_Disparity_Cost)); + + left_cost_cam_info_pub_ = left_nh_.advertise(COST_CAMERA_INFO_TOPIC, 1, true); } - const auto point_cloud_color_topics = has_aux_camera_ ? Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux : - Source_Luma_Left | Source_Chroma_Left; + // publish colorized stereo topics + if (has_aux_camera_) { + + const auto point_cloud_color_topics = has_aux_camera_ ? Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux : + Source_Luma_Left | Source_Chroma_Left; - color_point_cloud_pub_ = device_nh_.advertise(COLOR_POINTCLOUD_TOPIC, 5, - std::bind(&Camera::connectStream, this, - Source_Disparity | point_cloud_color_topics), - std::bind(&Camera::disconnectStream, this, - Source_Disparity | point_cloud_color_topics)); - color_organized_point_cloud_pub_ = device_nh_.advertise(COLOR_ORGANIZED_POINTCLOUD_TOPIC, 5, - std::bind(&Camera::connectStream, this, - Source_Disparity | point_cloud_color_topics), - std::bind(&Camera::disconnectStream, this, - Source_Disparity | point_cloud_color_topics)); + color_point_cloud_pub_ = device_nh_.advertise(COLOR_POINTCLOUD_TOPIC, 5, + std::bind(&Camera::connectStream, this, + Source_Disparity | point_cloud_color_topics), + std::bind(&Camera::disconnectStream, this, + Source_Disparity | point_cloud_color_topics)); + + color_organized_point_cloud_pub_ = device_nh_.advertise(COLOR_ORGANIZED_POINTCLOUD_TOPIC, 5, + std::bind(&Camera::connectStream, this, + Source_Disparity | point_cloud_color_topics), + std::bind(&Camera::disconnectStream, this, + Source_Disparity | point_cloud_color_topics)); + + } } - luma_point_cloud_pub_ = device_nh_.advertise(POINTCLOUD_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity)); + // handle color topic publishing if color data exists + if (has_aux_camera_) { - luma_organized_point_cloud_pub_ = device_nh_.advertise(ORGANIZED_POINTCLOUD_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity)); + aux_mono_cam_pub_ = aux_mono_transport_.advertise(MONO_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Aux), + std::bind(&Camera::disconnectStream, this, Source_Luma_Aux)); - raw_cam_data_pub_ = calibration_nh_.advertise(RAW_CAM_DATA_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Left | Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Left | Source_Disparity)); + aux_mono_cam_info_pub_ = aux_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); + + aux_rgb_cam_pub_ = aux_rgb_transport_.advertise(COLOR_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Aux | Source_Chroma_Aux), + std::bind(&Camera::disconnectStream, this, Source_Luma_Aux | Source_Chroma_Aux)); + + aux_rgb_cam_info_pub_ = aux_nh_.advertise(COLOR_CAMERA_INFO_TOPIC, 1, true); + + aux_rect_cam_pub_ = aux_rect_transport_.advertiseCamera(RECT_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Aux), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Aux)); + + aux_rect_cam_info_pub_ = aux_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); - left_disparity_pub_ = disparity_left_transport_.advertise(DISPARITY_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Disparity)); + aux_rgb_rect_cam_pub_ = aux_rgb_rect_transport_.advertiseCamera(RECT_COLOR_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux), + std::bind(&Camera::disconnectStream, this, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux)); - left_stereo_disparity_pub_ = left_nh_.advertise(DISPARITY_IMAGE_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity), - std::bind(&Camera::disconnectStream, this, Source_Disparity)); + aux_rgb_rect_cam_info_pub_ = aux_nh_.advertise(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true); - if (version_info_.sensorFirmwareVersion >= 0x0300) { + } else if (has_color_ && !has_aux_camera_) { // !has_aux_camera_ is redundant, but leaving to prevent confusion over edge cases - right_disparity_pub_ = disparity_right_transport_.advertise(DISPARITY_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity_Right), - std::bind(&Camera::disconnectStream, this, Source_Disparity_Right)); + left_rgb_cam_pub_ = left_rgb_transport_.advertise(COLOR_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left), + std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left)); - right_stereo_disparity_pub_ = right_nh_.advertise(DISPARITY_IMAGE_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity_Right), - std::bind(&Camera::disconnectStream, this, Source_Disparity_Right)); + left_rgb_rect_cam_pub_ = left_rgb_rect_transport_.advertiseCamera(RECT_COLOR_TOPIC, 5, + std::bind(&Camera::connectStream, this, Source_Luma_Left | Source_Chroma_Left), + std::bind(&Camera::disconnectStream, this, Source_Luma_Left | Source_Chroma_Left)); - left_disparity_cost_pub_ = disparity_cost_transport_.advertise(COST_TOPIC, 5, - std::bind(&Camera::connectStream, this, Source_Disparity_Cost), - std::bind(&Camera::disconnectStream, this, Source_Disparity_Cost)); + left_rgb_cam_info_pub_ = left_nh_.advertise(COLOR_CAMERA_INFO_TOPIC, 1, true); + left_rgb_rect_cam_info_pub_ = left_nh_.advertise(RECT_COLOR_CAMERA_INFO_TOPIC, 1, true); - right_disp_cam_info_pub_ = right_nh_.advertise(DISPARITY_CAMERA_INFO_TOPIC, 1, true); - left_cost_cam_info_pub_ = left_nh_.advertise(COST_CAMERA_INFO_TOPIC, 1, true); } - // - // Camera info topic publishers - left_mono_cam_info_pub_ = left_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); - right_mono_cam_info_pub_ = right_nh_.advertise(MONO_CAMERA_INFO_TOPIC, 1, true); - left_rect_cam_info_pub_ = left_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); - right_rect_cam_info_pub_ = right_nh_.advertise(RECT_CAMERA_INFO_TOPIC, 1, true); - left_disp_cam_info_pub_ = left_nh_.advertise(DISPARITY_CAMERA_INFO_TOPIC, 1, true); - depth_cam_info_pub_ = device_nh_.advertise(DEPTH_CAMERA_INFO_TOPIC, 1, true); } // @@ -627,35 +663,56 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : // Publish image calibration image::Calibration image_calibration; - status = driver_->getImageCalibration(image_calibration); - if (Status_Ok != status) { - ROS_ERROR("Camera: failed to query image calibration: %s", - Channel::statusString(status)); - } - else { + if (has_left_camera_ || has_right_camera_ || has_aux_camera_) + { + status = driver_->getImageCalibration(image_calibration); + if (Status_Ok != status) { + ROS_ERROR("Camera: failed to query image calibration: %s", + Channel::statusString(status)); + } + else { + // currently no ROS cameras have only an aux camera + // HARDWARE_REV_MULTISENSE_MONOCAM uses the left cal for its calibration + + multisense_ros::RawCamCal cal; + const float *cP; + + cP = reinterpret_cast(&(image_calibration.left.M[0][0])); + for(uint32_t i=0; i<9; i++) cal.left_M[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.left.D[0])); + for(uint32_t i=0; i<8; i++) cal.left_D[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.left.R[0][0])); + for(uint32_t i=0; i<9; i++) cal.left_R[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.left.P[0][0])); + for(uint32_t i=0; i<12; i++) cal.left_P[i] = cP[i]; + + if (not has_right_camera_) { + + // publish the left calibration twice if there is no right camera + cP = reinterpret_cast(&(image_calibration.left.M[0][0])); + for(uint32_t i=0; i<9; i++) cal.right_M[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.left.D[0])); + for(uint32_t i=0; i<8; i++) cal.right_D[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.left.R[0][0])); + for(uint32_t i=0; i<9; i++) cal.right_R[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.left.P[0][0])); + for(uint32_t i=0; i<12; i++) cal.right_P[i] = cP[i]; - multisense_ros::RawCamCal cal; - const float *cP; + } else { - cP = reinterpret_cast(&(image_calibration.left.M[0][0])); - for(uint32_t i=0; i<9; i++) cal.left_M[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.left.D[0])); - for(uint32_t i=0; i<8; i++) cal.left_D[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.left.R[0][0])); - for(uint32_t i=0; i<9; i++) cal.left_R[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.left.P[0][0])); - for(uint32_t i=0; i<12; i++) cal.left_P[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.right.M[0][0])); + for(uint32_t i=0; i<9; i++) cal.right_M[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.right.D[0])); + for(uint32_t i=0; i<8; i++) cal.right_D[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.right.R[0][0])); + for(uint32_t i=0; i<9; i++) cal.right_R[i] = cP[i]; + cP = reinterpret_cast(&(image_calibration.right.P[0][0])); + for(uint32_t i=0; i<12; i++) cal.right_P[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.right.M[0][0])); - for(uint32_t i=0; i<9; i++) cal.right_M[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.right.D[0])); - for(uint32_t i=0; i<8; i++) cal.right_D[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.right.R[0][0])); - for(uint32_t i=0; i<9; i++) cal.right_R[i] = cP[i]; - cP = reinterpret_cast(&(image_calibration.right.P[0][0])); - for(uint32_t i=0; i<12; i++) cal.right_P[i] = cP[i]; + } - raw_cam_cal_pub_.publish(cal); + raw_cam_cal_pub_.publish(cal); + } } stereo_calibration_manager_ = std::make_shared(image_config, image_calibration, device_info_); @@ -736,17 +793,37 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : driver_->addIsolatedCallback(monoCB, Source_Luma_Left, this); driver_->addIsolatedCallback(jpegCB, Source_Jpeg_Left, this); + } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_VPB == device_info_.hardwareRevision) { + // no incoming image data from vpb + } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_MONOCAM == device_info_.hardwareRevision) { + + driver_->addIsolatedCallback(monoCB, Source_Luma_Left, this); + driver_->addIsolatedCallback(rectCB, Source_Luma_Rectified_Left, this); + } else { - driver_->addIsolatedCallback(colorizeCB, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux | Source_Luma_Aux | - Source_Luma_Left | Source_Chroma_Left | Source_Luma_Rectified_Left, this); - driver_->addIsolatedCallback(monoCB, Source_Luma_Left | Source_Luma_Right | Source_Luma_Aux, this); - driver_->addIsolatedCallback(rectCB, Source_Luma_Rectified_Left | Source_Luma_Rectified_Right | Source_Luma_Rectified_Aux, this); - driver_->addIsolatedCallback(depthCB, Source_Disparity, this); - driver_->addIsolatedCallback(pointCB, Source_Disparity, this); - driver_->addIsolatedCallback(rawCB, Source_Disparity | Source_Luma_Rectified_Left, this); - driver_->addIsolatedCallback(colorCB, Source_Chroma_Left | Source_Chroma_Rectified_Aux | Source_Chroma_Aux, this); - driver_->addIsolatedCallback(dispCB, Source_Disparity | Source_Disparity_Right | Source_Disparity_Cost, this); + if (has_left_camera_ || has_right_camera_ || has_aux_camera_) { + driver_->addIsolatedCallback(monoCB, Source_Luma_Left | Source_Luma_Right | Source_Luma_Aux, this); + driver_->addIsolatedCallback(rectCB, Source_Luma_Rectified_Left | Source_Luma_Rectified_Right | Source_Luma_Rectified_Aux, this); + } + + if (has_color_) { + if (has_aux_camera_) { + driver_->addIsolatedCallback(colorizeCB, Source_Luma_Rectified_Aux | Source_Chroma_Rectified_Aux | Source_Luma_Aux | + Source_Luma_Left | Source_Luma_Rectified_Left, this); + driver_->addIsolatedCallback(colorCB, Source_Chroma_Rectified_Aux | Source_Chroma_Aux, this); + } else { + driver_->addIsolatedCallback(colorizeCB, Source_Luma_Left | Source_Chroma_Left | Source_Luma_Rectified_Left, this); + driver_->addIsolatedCallback(colorCB, Source_Chroma_Left, this); + } + } + + if (has_left_camera_ && has_right_camera_) { + driver_->addIsolatedCallback(depthCB, Source_Disparity, this); + driver_->addIsolatedCallback(pointCB, Source_Disparity, this); + driver_->addIsolatedCallback(rawCB, Source_Disparity | Source_Luma_Rectified_Left, this); + driver_->addIsolatedCallback(dispCB, Source_Disparity | Source_Disparity_Right | Source_Disparity_Cost, this); + } } // @@ -757,12 +834,12 @@ Camera::Camera(Channel* driver, const std::string& tf_prefix) : // // Add algorithm callbacks - if (ground_surface_supported) { + if (can_support_ground_surface_) { driver_->addIsolatedCallback(groundSurfaceCB, Source_Ground_Surface_Class_Image, this); driver_->addIsolatedCallback(groundSurfaceSplineCB, this); } - if (apriltag_supported) { + if (can_support_apriltag_) { driver_->addIsolatedCallback(apriltagCB, this); } @@ -793,19 +870,34 @@ Camera::~Camera() } else { - driver_->removeIsolatedCallback(colorizeCB); - driver_->removeIsolatedCallback(monoCB); - driver_->removeIsolatedCallback(rectCB); - driver_->removeIsolatedCallback(depthCB); - driver_->removeIsolatedCallback(pointCB); - driver_->removeIsolatedCallback(rawCB); - driver_->removeIsolatedCallback(colorCB); - driver_->removeIsolatedCallback(dispCB); + if (has_left_camera_ || has_right_camera_ || has_aux_camera_) { + driver_->removeIsolatedCallback(monoCB); + driver_->removeIsolatedCallback(rectCB); + } + + if (has_color_) { + driver_->removeIsolatedCallback(colorizeCB); + driver_->removeIsolatedCallback(colorCB); + } + + if (has_left_camera_ && has_right_camera_) { + driver_->removeIsolatedCallback(depthCB); + driver_->removeIsolatedCallback(pointCB); + driver_->removeIsolatedCallback(rawCB); + driver_->removeIsolatedCallback(dispCB); + } + } + + if (can_support_ground_surface_) + { + driver_->removeIsolatedCallback(groundSurfaceCB); + driver_->removeIsolatedCallback(groundSurfaceSplineCB); } - driver_->removeIsolatedCallback(groundSurfaceCB); - driver_->removeIsolatedCallback(groundSurfaceSplineCB); - driver_->removeIsolatedCallback(apriltagCB); + if (can_support_apriltag_) + { + driver_->removeIsolatedCallback(apriltagCB); + } } void Camera::borderClipChanged(const BorderClip &borderClipType, double borderClipValue) @@ -2244,36 +2336,30 @@ void Camera::publishAllCameraInfo() } else { // all other MultiSense-S* variations - if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_ST21 != device_info_.hardwareRevision) { + if (has_left_camera_) { + left_mono_cam_info_pub_.publish(left_camera_info); + left_rect_cam_info_pub_.publish(left_rectified_camera_info); + } - if (has_aux_camera_) { + if (has_right_camera_) { + right_mono_cam_info_pub_.publish(right_camera_info); + right_rect_cam_info_pub_.publish(right_rectified_camera_info); + } - aux_mono_cam_info_pub_.publish(left_camera_info); - aux_rgb_cam_info_pub_.publish(left_camera_info); - aux_rect_cam_info_pub_.publish(left_rectified_camera_info); - aux_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info); + // only publish for stereo cameras + if (has_left_camera_ && has_right_camera_) { - } else { + left_disp_cam_info_pub_.publish(left_rectified_camera_info); + depth_cam_info_pub_.publish(left_rectified_camera_info); - left_rgb_cam_info_pub_.publish(left_camera_info); - left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info); + if (version_info_.sensorFirmwareVersion >= 0x0300) { + right_disp_cam_info_pub_.publish(right_rectified_camera_info); + left_cost_cam_info_pub_.publish(left_rectified_camera_info); } } - if (version_info_.sensorFirmwareVersion >= 0x0300) { - - right_disp_cam_info_pub_.publish(right_rectified_camera_info); - left_cost_cam_info_pub_.publish(left_rectified_camera_info); - } - - left_mono_cam_info_pub_.publish(left_camera_info); - left_rect_cam_info_pub_.publish(left_rectified_camera_info); - right_mono_cam_info_pub_.publish(right_camera_info); - right_rect_cam_info_pub_.publish(right_rectified_camera_info); - left_disp_cam_info_pub_.publish(left_rectified_camera_info); - depth_cam_info_pub_.publish(left_rectified_camera_info); - + // handle color topic publishing if color data exists if (has_aux_camera_) { // @@ -2288,6 +2374,12 @@ void Camera::publishAllCameraInfo() aux_rgb_cam_info_pub_.publish(stereo_calibration_manager_->auxCameraInfo(frame_id_aux_, stamp, auxResolution)); aux_rgb_rect_cam_info_pub_.publish(stereo_calibration_manager_->auxCameraInfo(frame_id_rectified_aux_, stamp, stereoResolution)); + + } else if (has_color_ && !has_aux_camera_) { // !has_aux_camera_ is redundant, but leaving to prevent confusion over edge cases + + left_rgb_cam_info_pub_.publish(left_camera_info); + left_rgb_rect_cam_info_pub_.publish(left_rectified_camera_info); + } } } diff --git a/multisense_ros/src/reconfigure.cpp b/multisense_ros/src/reconfigure.cpp index 72106b3..b56cb9d 100644 --- a/multisense_ros/src/reconfigure.cpp +++ b/multisense_ros/src/reconfigure.cpp @@ -107,7 +107,10 @@ Reconfigure::Reconfigure(Channel* driver, } if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_C6S2_S27 == deviceInfo.hardwareRevision || system::DeviceInfo::HARDWARE_REV_MULTISENSE_S30 == deviceInfo.hardwareRevision || - system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == deviceInfo.hardwareRevision) + system::DeviceInfo::HARDWARE_REV_MULTISENSE_KS21 == deviceInfo.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_VPB == deviceInfo.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_STEREO == deviceInfo.hardwareRevision || + system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_MONOCAM == deviceInfo.hardwareRevision) { ptp_supported_ = true; } @@ -208,6 +211,32 @@ Reconfigure::Reconfigure(Channel* driver, server_ks21_sgm_AR0234_->setCallback(std::bind(&Reconfigure::callback_ks21_AR0234, this, std::placeholders::_1, std::placeholders::_2)); } + } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_VPB == deviceInfo.hardwareRevision) { + server_remote_head_vpb_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_remote_head_vpb_->setCallback(std::bind(&Reconfigure::callback_remote_head_vpb, this, + std::placeholders::_1, std::placeholders::_2)); + } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_STEREO == deviceInfo.hardwareRevision) { + if (ground_surface_supported) { + server_remote_head_sgm_AR0234_ground_surface_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_remote_head_sgm_AR0234_ground_surface_->setCallback(std::bind(&Reconfigure::callback_remote_head_sgm_AR0234_ground_surface, this, + std::placeholders::_1, std::placeholders::_2)); + } else { + server_remote_head_sgm_AR0234_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_remote_head_sgm_AR0234_->setCallback(std::bind(&Reconfigure::callback_remote_head_sgm_AR0234, this, + std::placeholders::_1, std::placeholders::_2)); + } + } else if (system::DeviceInfo::HARDWARE_REV_MULTISENSE_REMOTE_HEAD_MONOCAM == deviceInfo.hardwareRevision) { + server_remote_head_monocam_AR0234_ = + std::shared_ptr< dynamic_reconfigure::Server > ( + new dynamic_reconfigure::Server(device_nh_)); + server_remote_head_monocam_AR0234_->setCallback(std::bind(&Reconfigure::callback_remote_head_monocam_AR0234, this, + std::placeholders::_1, std::placeholders::_2)); } else if (versionInfo.sensorFirmwareVersion <= 0x0202) { switch(deviceInfo.imagerType) { @@ -759,39 +788,30 @@ template void Reconfigure::configurePtp(const T& dyn) } } -template void Reconfigure::configureStereoProfile(crl::multisense::image::Config &cfg, const T& dyn) +template void Reconfigure::configureStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn) { - crl::multisense::CameraProfile profile = crl::multisense::User_Control; - profile |= (dyn.detail_disparity_profile ? crl::multisense::Detail_Disparity : profile); profile |= (dyn.high_contrast_profile ? crl::multisense::High_Contrast : profile); profile |= (dyn.show_roi_profile ? crl::multisense::Show_ROIs : profile); - profile |= (dyn.full_res_aux_profile ? crl::multisense::Full_Res_Aux_Cam : profile); - - cfg.setCameraProfile(profile); } -template void Reconfigure::configureStereoProfileWithGroundSurface(crl::multisense::image::Config &cfg, const T& dyn) +template void Reconfigure::configureGroundSurfaceStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn) { - crl::multisense::CameraProfile profile = crl::multisense::User_Control; - profile |= (dyn.detail_disparity_profile ? crl::multisense::Detail_Disparity : profile); - profile |= (dyn.high_contrast_profile ? crl::multisense::High_Contrast : profile); - profile |= (dyn.show_roi_profile ? crl::multisense::Show_ROIs : profile); - profile |= (dyn.full_res_aux_profile ? crl::multisense::Full_Res_Aux_Cam : profile); profile |= (dyn.ground_surface_profile ? crl::multisense::Ground_Surface : profile); +} - cfg.setCameraProfile(profile); +template void Reconfigure::configureApriltagStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn) +{ + profile |= (dyn.apriltag_profile ? crl::multisense::AprilTag : profile); } -template void Reconfigure::configureStereoProfileWithApriltag(crl::multisense::image::Config &cfg, const T& dyn) +template void Reconfigure::configureFullResAuxStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn) { - crl::multisense::CameraProfile profile = crl::multisense::User_Control; - profile |= (dyn.detail_disparity_profile ? crl::multisense::Detail_Disparity : profile); - profile |= (dyn.high_contrast_profile ? crl::multisense::High_Contrast : profile); - profile |= (dyn.show_roi_profile ? crl::multisense::Show_ROIs : profile); profile |= (dyn.full_res_aux_profile ? crl::multisense::Full_Res_Aux_Cam : profile); - profile |= (dyn.apriltag_profile ? crl::multisense::AprilTag : profile); +} - cfg.setCameraProfile(profile); +template void Reconfigure::configureDetailDisparityStereoProfile(crl::multisense::CameraProfile &profile, const T& dyn) +{ + profile |= (dyn.detail_disparity_profile ? crl::multisense::Detail_Disparity : profile); } template void Reconfigure::configureExtrinsics(const T& dyn) @@ -977,7 +997,11 @@ template void Reconfigure::configureApriltagParams(const T& dyn) #define S27_SGM() do { \ GET_CONFIG(); \ configureSgm(cfg, dyn); \ - configureStereoProfile(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureFullResAuxStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ configureAutoWhiteBalance(cfg, dyn); \ configureAuxCamera(cfg, dyn); \ configureCamera(cfg, dyn); \ @@ -987,22 +1011,34 @@ template void Reconfigure::configureApriltagParams(const T& dyn) configureExtrinsics(dyn); \ } while(0) -#define KS21_SGM() do { \ +#define S27_SGM_GROUND_SURFACE() do { \ GET_CONFIG(); \ configureSgm(cfg, dyn); \ - configureStereoProfile(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureGroundSurfaceStereoProfile(profile, dyn); \ + configureFullResAuxStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ + configureAutoWhiteBalance(cfg, dyn); \ + configureAuxCamera(cfg, dyn); \ configureCamera(cfg, dyn); \ configureBorderClip(dyn); \ - configureLeds(dyn); \ configurePtp(dyn); \ configurePointCloudRange(dyn); \ configureExtrinsics(dyn); \ + configureGroundSurfaceParams(dyn); \ } while(0) -#define S27_SGM_GROUND_SURFACE() do { \ +#define S27_SGM_APRILTAG() do { \ GET_CONFIG(); \ configureSgm(cfg, dyn); \ - configureStereoProfileWithGroundSurface(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureApriltagStereoProfile(profile, dyn); \ + configureFullResAuxStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ configureAutoWhiteBalance(cfg, dyn); \ configureAuxCamera(cfg, dyn); \ configureCamera(cfg, dyn); \ @@ -1010,13 +1046,32 @@ template void Reconfigure::configureApriltagParams(const T& dyn) configurePtp(dyn); \ configurePointCloudRange(dyn); \ configureExtrinsics(dyn); \ - configureGroundSurfaceParams(dyn); \ + configureApriltagParams(dyn); \ + } while(0) + +#define KS21_SGM() do { \ + GET_CONFIG(); \ + configureSgm(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ + configureCamera(cfg, dyn); \ + configureBorderClip(dyn); \ + configureLeds(dyn); \ + configurePtp(dyn); \ + configurePointCloudRange(dyn); \ + configureExtrinsics(dyn); \ } while(0) #define KS21_SGM_GROUND_SURFACE() do { \ GET_CONFIG(); \ configureSgm(cfg, dyn); \ - configureStereoProfileWithGroundSurface(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureGroundSurfaceStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ configureCamera(cfg, dyn); \ configureBorderClip(dyn); \ configureLeds(dyn); \ @@ -1026,24 +1081,69 @@ template void Reconfigure::configureApriltagParams(const T& dyn) configureGroundSurfaceParams(dyn); \ } while(0) -#define S27_SGM_APRILTAG() do { \ +#define KS21_SGM_APRILTAG() do { \ GET_CONFIG(); \ configureSgm(cfg, dyn); \ - configureStereoProfileWithApriltag(cfg, dyn); \ - configureAutoWhiteBalance(cfg, dyn); \ - configureAuxCamera(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureApriltagStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ configureCamera(cfg, dyn); \ configureBorderClip(dyn); \ + configureLeds(dyn); \ configurePtp(dyn); \ configurePointCloudRange(dyn); \ configureExtrinsics(dyn); \ configureApriltagParams(dyn); \ } while(0) -#define KS21_SGM_APRILTAG() do { \ +#define REMOTE_HEAD_VPB() do { \ + GET_CONFIG(); \ + configurePtp(dyn); \ + configureExtrinsics(dyn); \ + } while(0) + +#define REMOTE_HEAD_SGM_AR0234() do { \ + GET_CONFIG(); \ + configureSgm(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ + configureCamera(cfg, dyn); \ + configureBorderClip(dyn); \ + configureLeds(dyn); \ + configurePtp(dyn); \ + configurePointCloudRange(dyn); \ + configureExtrinsics(dyn); \ + } while(0) + +#define REMOTE_HEAD_SGM_AR0234_GROUND_SURFACE() do { \ + GET_CONFIG(); \ + configureSgm(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureGroundSurfaceStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ + configureCamera(cfg, dyn); \ + configureBorderClip(dyn); \ + configureLeds(dyn); \ + configurePtp(dyn); \ + configurePointCloudRange(dyn); \ + configureExtrinsics(dyn); \ + configureGroundSurfaceParams(dyn); \ + } while(0) + +#define REMOTE_HEAD_SGM_AR0234_APRILTAG() do { \ GET_CONFIG(); \ configureSgm(cfg, dyn); \ - configureStereoProfileWithApriltag(cfg, dyn); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + configureDetailDisparityStereoProfile(profile, dyn); \ + configureApriltagStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ configureCamera(cfg, dyn); \ configureBorderClip(dyn); \ configureLeds(dyn); \ @@ -1052,23 +1152,41 @@ template void Reconfigure::configureApriltagParams(const T& dyn) configureExtrinsics(dyn); \ configureApriltagParams(dyn); \ } while(0) + +#define REMOTE_HEAD_MONOCAM_AR0234() do { \ + GET_CONFIG(); \ + crl::multisense::CameraProfile profile = crl::multisense::User_Control; \ + configureStereoProfile(profile, dyn); \ + cfg.setCameraProfile(profile); \ + configureCamera(cfg, dyn); \ + configureLeds(dyn); \ + configurePtp(dyn); \ + configureExtrinsics(dyn); \ + } while(0) + // // The dynamic reconfigure callbacks (MultiSense S* & feature variations) -void Reconfigure::callback_sl_bm_cmv2000 (multisense_ros::sl_bm_cmv2000Config& dyn, uint32_t level) { (void) level; SL_BM(); } -void Reconfigure::callback_sl_bm_cmv2000_imu (multisense_ros::sl_bm_cmv2000_imuConfig& dyn, uint32_t level) { (void) level; SL_BM_IMU(); } -void Reconfigure::callback_sl_bm_cmv4000 (multisense_ros::sl_bm_cmv4000Config& dyn, uint32_t level) { (void) level; SL_BM(); } -void Reconfigure::callback_sl_bm_cmv4000_imu (multisense_ros::sl_bm_cmv4000_imuConfig& dyn, uint32_t level) { (void) level; SL_BM_IMU(); } -void Reconfigure::callback_sl_sgm_cmv2000_imu(multisense_ros::sl_sgm_cmv2000_imuConfig& dyn, uint32_t level) { (void) level; SL_SGM_IMU(); } -void Reconfigure::callback_sl_sgm_cmv4000_imu(multisense_ros::sl_sgm_cmv4000_imuConfig& dyn, uint32_t level) { (void) level; SL_SGM_IMU_CMV4000(); } -void Reconfigure::callback_mono_cmv2000 (multisense_ros::mono_cmv2000Config& dyn, uint32_t level) { (void) level; MONO_BM_IMU(); } -void Reconfigure::callback_mono_cmv4000 (multisense_ros::mono_cmv4000Config& dyn, uint32_t level) { (void) level; MONO_BM_IMU(); } -void Reconfigure::callback_s27_AR0234 (multisense_ros::s27_sgm_AR0234Config& dyn, uint32_t level) { (void) level; S27_SGM(); } -void Reconfigure::callback_ks21_AR0234 (multisense_ros::ks21_sgm_AR0234Config& dyn, uint32_t level) { (void) level; KS21_SGM(); } -void Reconfigure::callback_s27_AR0234_ground_surface (multisense_ros::s27_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level) { (void) level; S27_SGM_GROUND_SURFACE(); } -void Reconfigure::callback_ks21_AR0234_ground_surface (multisense_ros::ks21_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level) { (void) level; KS21_SGM_GROUND_SURFACE(); } -void Reconfigure::callback_s27_AR0234_apriltag (multisense_ros::s27_sgm_AR0234_apriltagConfig& dyn, uint32_t level) { (void) level; S27_SGM_APRILTAG(); } -void Reconfigure::callback_ks21_AR0234_apriltag (multisense_ros::ks21_sgm_AR0234_apriltagConfig& dyn, uint32_t level) { (void) level; KS21_SGM_APRILTAG(); } +void Reconfigure::callback_sl_bm_cmv2000 (multisense_ros::sl_bm_cmv2000Config& dyn, uint32_t level) { (void) level; SL_BM(); } +void Reconfigure::callback_sl_bm_cmv2000_imu (multisense_ros::sl_bm_cmv2000_imuConfig& dyn, uint32_t level) { (void) level; SL_BM_IMU(); } +void Reconfigure::callback_sl_bm_cmv4000 (multisense_ros::sl_bm_cmv4000Config& dyn, uint32_t level) { (void) level; SL_BM(); } +void Reconfigure::callback_sl_bm_cmv4000_imu (multisense_ros::sl_bm_cmv4000_imuConfig& dyn, uint32_t level) { (void) level; SL_BM_IMU(); } +void Reconfigure::callback_sl_sgm_cmv2000_imu (multisense_ros::sl_sgm_cmv2000_imuConfig& dyn, uint32_t level) { (void) level; SL_SGM_IMU(); } +void Reconfigure::callback_sl_sgm_cmv4000_imu (multisense_ros::sl_sgm_cmv4000_imuConfig& dyn, uint32_t level) { (void) level; SL_SGM_IMU_CMV4000(); } +void Reconfigure::callback_mono_cmv2000 (multisense_ros::mono_cmv2000Config& dyn, uint32_t level) { (void) level; MONO_BM_IMU(); } +void Reconfigure::callback_mono_cmv4000 (multisense_ros::mono_cmv4000Config& dyn, uint32_t level) { (void) level; MONO_BM_IMU(); } + +void Reconfigure::callback_s27_AR0234 (multisense_ros::s27_sgm_AR0234Config& dyn, uint32_t level) { (void) level; S27_SGM(); } +void Reconfigure::callback_s27_AR0234_ground_surface (multisense_ros::s27_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level) { (void) level; S27_SGM_GROUND_SURFACE(); } +void Reconfigure::callback_s27_AR0234_apriltag (multisense_ros::s27_sgm_AR0234_apriltagConfig& dyn, uint32_t level) { (void) level; S27_SGM_APRILTAG(); } +void Reconfigure::callback_ks21_AR0234 (multisense_ros::ks21_sgm_AR0234Config& dyn, uint32_t level) { (void) level; KS21_SGM(); } +void Reconfigure::callback_ks21_AR0234_ground_surface(multisense_ros::ks21_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level) { (void) level; KS21_SGM_GROUND_SURFACE(); } +void Reconfigure::callback_ks21_AR0234_apriltag (multisense_ros::ks21_sgm_AR0234_apriltagConfig& dyn, uint32_t level) { (void) level; KS21_SGM_APRILTAG(); } +void Reconfigure::callback_remote_head_vpb (multisense_ros::remote_head_vpbConfig& dyn, uint32_t level) { (void) level; REMOTE_HEAD_VPB(); } +void Reconfigure::callback_remote_head_sgm_AR0234 (multisense_ros::remote_head_sgm_AR0234Config& dyn, uint32_t level) { (void) level; REMOTE_HEAD_SGM_AR0234(); } +void Reconfigure::callback_remote_head_sgm_AR0234_ground_surface(multisense_ros::remote_head_sgm_AR0234_ground_surfaceConfig& dyn, uint32_t level) { (void) level; REMOTE_HEAD_SGM_AR0234_GROUND_SURFACE(); } +void Reconfigure::callback_remote_head_sgm_AR0234_apriltag (multisense_ros::remote_head_sgm_AR0234_apriltagConfig& dyn, uint32_t level) { (void) level; REMOTE_HEAD_SGM_AR0234_APRILTAG(); } +void Reconfigure::callback_remote_head_monocam_AR0234 (multisense_ros::remote_head_monocam_AR0234Config& dyn, uint32_t level) { (void) level; REMOTE_HEAD_MONOCAM_AR0234(); } // // BCAM (Sony IMX104) diff --git a/multisense_ros/src/ros_driver.cpp b/multisense_ros/src/ros_driver.cpp index 98d46b7..2becb42 100644 --- a/multisense_ros/src/ros_driver.cpp +++ b/multisense_ros/src/ros_driver.cpp @@ -55,20 +55,41 @@ int main(int argc, char** argvPP) std::string sensor_ip; std::string tf_prefix; int sensor_mtu; + int head_id; nh_private_.param("sensor_ip", sensor_ip, "10.66.171.21"); nh_private_.param("tf_prefix", tf_prefix, "multisense"); nh_private_.param("sensor_mtu", sensor_mtu, 7200); + nh_private_.param("head_id", head_id, 0); Channel *d = NULL; try { - d = Channel::Create(sensor_ip); - if (NULL == d) { - ROS_ERROR("multisense_ros: failed to create communication channel to sensor @ \"%s\"", - sensor_ip.c_str()); + RemoteHeadChannel rh_channel = 0; + switch(head_id) { + case 0: + rh_channel = Remote_Head_0; + break; + case 1: + rh_channel = Remote_Head_1; + break; + case 2: + rh_channel = Remote_Head_2; + break; + case 3: + rh_channel = Remote_Head_3; + break; + default: + rh_channel = Remote_Head_VPB; + break; + } + + d = Channel::Create(sensor_ip, rh_channel); + if (NULL == d) { + ROS_ERROR("multisense_ros: failed to create communication channel to sensor @ \"%s\" with head ID %d", + sensor_ip.c_str(), rh_channel); return -2; }