From 8f49b8d4f2a962fe9b102e11ca2022985723ac0f Mon Sep 17 00:00:00 2001 From: Serafadam Date: Tue, 20 Aug 2024 09:48:34 +0000 Subject: [PATCH 1/3] add compressed image publishers --- depthai_bridge/CMakeLists.txt | 2 + .../include/depthai_bridge/ImageConverter.hpp | 59 +++-- depthai_bridge/package.xml | 1 + depthai_bridge/src/ImageConverter.cpp | 146 +++++++++--- depthai_ros_driver/CMakeLists.txt | 10 +- .../dai_nodes/base_node.hpp | 7 +- .../dai_nodes/nn/detection.hpp | 10 +- .../dai_nodes/nn/spatial_detection.hpp | 9 +- .../dai_nodes/sensors/img_pub.hpp | 120 ++++++++++ .../dai_nodes/sensors/sensor_helpers.hpp | 93 -------- .../include/depthai_ros_driver/utils.hpp | 54 +++++ depthai_ros_driver/package.xml | 1 + depthai_ros_driver/src/camera.cpp | 3 +- depthai_ros_driver/src/camera_node.cpp | 12 + .../src/dai_nodes/base_node.cpp | 8 +- .../src/dai_nodes/sensors/img_pub.cpp | 225 ++++++++++++++++++ .../src/dai_nodes/sensors/mono.cpp | 26 +- .../src/dai_nodes/sensors/rgb.cpp | 19 +- .../src/dai_nodes/sensors/sensor_helpers.cpp | 169 ------------- .../src/dai_nodes/sensors/sync.cpp | 7 +- depthai_ros_driver/src/dai_nodes/stereo.cpp | 53 +++-- .../param_handlers/sensor_param_handler.cpp | 5 + .../param_handlers/stereo_param_handler.cpp | 15 ++ .../src/pipeline/pipeline_generator.cpp | 3 +- depthai_ros_driver/src/utils.cpp | 11 + 25 files changed, 689 insertions(+), 379 deletions(-) create mode 100644 depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/img_pub.hpp create mode 100644 depthai_ros_driver/src/camera_node.cpp create mode 100644 depthai_ros_driver/src/dai_nodes/sensors/img_pub.cpp diff --git a/depthai_bridge/CMakeLists.txt b/depthai_bridge/CMakeLists.txt index d39393ae..d384e437 100644 --- a/depthai_bridge/CMakeLists.txt +++ b/depthai_bridge/CMakeLists.txt @@ -41,6 +41,7 @@ find_package(tf2_ros REQUIRED) find_package(tf2 REQUIRED) find_package(tf2_geometry_msgs REQUIRED) find_package(composition_interfaces REQUIRED) +find_package(ffmpeg_image_transport_msgs REQUIRED) set(dependencies camera_info_manager @@ -56,6 +57,7 @@ set(dependencies tf2_geometry_msgs tf2 composition_interfaces + ffmpeg_image_transport_msgs ) include_directories( diff --git a/depthai_bridge/include/depthai_bridge/ImageConverter.hpp b/depthai_bridge/include/depthai_bridge/ImageConverter.hpp index 358b2cd7..9e2e3081 100644 --- a/depthai_bridge/include/depthai_bridge/ImageConverter.hpp +++ b/depthai_bridge/include/depthai_bridge/ImageConverter.hpp @@ -10,9 +10,12 @@ #include "depthai-shared/common/CameraBoardSocket.hpp" #include "depthai-shared/common/Point2f.hpp" #include "depthai/device/CalibrationHandler.hpp" +#include "depthai/pipeline/datatype/EncodedFrame.hpp" #include "depthai/pipeline/datatype/ImgFrame.hpp" +#include "ffmpeg_image_transport_msgs/msg/ffmpeg_packet.hpp" #include "rclcpp/time.hpp" #include "sensor_msgs/msg/camera_info.hpp" +#include "sensor_msgs/msg/compressed_image.hpp" #include "sensor_msgs/msg/image.hpp" #include "std_msgs/msg/header.hpp" @@ -22,7 +25,10 @@ namespace ros { namespace StdMsgs = std_msgs::msg; namespace ImageMsgs = sensor_msgs::msg; +namespace FFMPEGMsgs = ffmpeg_image_transport_msgs::msg; using ImagePtr = ImageMsgs::Image::SharedPtr; +using FFMPEGImagePtr = FFMPEGMsgs::FFMPEGPacket::SharedPtr; +using ImpImgPtr = ImageMsgs::CompressedImage::SharedPtr; using TimePoint = std::chrono::time_point; @@ -47,7 +53,7 @@ class ImageConverter { * @param update: bool whether to automatically update the ROS base time on message conversion */ void setUpdateRosBaseTimeOnToRosMsg(bool update = true) { - _updateRosBaseTimeOnToRosMsg = update; + updateRosBaseTimeOnToRosMsg = update; } /** @@ -80,10 +86,20 @@ class ImageConverter { */ void setAlphaScaling(double alphaScalingFactor = 0.0); + /** + * @brief Sets the encoding of the image when converting to FFMPEG message. Default is libx264. + * @param encoding: The encoding to be used. + */ + void setFFMPEGEncoding(const std::string& encoding); + void toRosMsg(std::shared_ptr inData, std::deque& outImageMsgs); ImageMsgs::Image toRosMsgRawPtr(std::shared_ptr inData, const sensor_msgs::msg::CameraInfo& info = sensor_msgs::msg::CameraInfo()); ImagePtr toRosMsgPtr(std::shared_ptr inData); + FFMPEGMsgs::FFMPEGPacket toRosFFMPEGPacket(std::shared_ptr inData); + + ImageMsgs::CompressedImage toRosCompressedMsg(std::shared_ptr inData); + void toDaiMsg(const ImageMsgs::Image& inMsg, dai::ImgFrame& outData); /** TODO(sachin): Add support for ros msg to cv mat since we have some @@ -99,36 +115,37 @@ class ImageConverter { Point2f bottomRightPixelId = Point2f()); private: + void planarToInterleaved(const std::vector& srcData, std::vector& destData, int w, int h, int numPlanes, int bpp); + void interleavedToPlanar(const std::vector& srcData, std::vector& destData, int w, int h, int numPlanes, int bpp); static std::unordered_map encodingEnumMap; static std::unordered_map planarEncodingEnumMap; // dai::RawImgFrame::Type _srcType; - bool _daiInterleaved; + bool daiInterleaved; // bool c - const std::string _frameName = ""; - void planarToInterleaved(const std::vector& srcData, std::vector& destData, int w, int h, int numPlanes, int bpp); - void interleavedToPlanar(const std::vector& srcData, std::vector& destData, int w, int h, int numPlanes, int bpp); - std::chrono::time_point _steadyBaseTime; + const std::string frameName = ""; + std::chrono::time_point steadyBaseTime; - rclcpp::Time _rosBaseTime; - bool _getBaseDeviceTimestamp; + rclcpp::Time rosBaseTime; + bool getBaseDeviceTimestamp; // For handling ROS time shifts and debugging - int64_t _totalNsChange{0}; + int64_t totalNsChange{0}; // Whether to update the ROS base time on each message conversion - bool _updateRosBaseTimeOnToRosMsg{false}; - dai::RawImgFrame::Type _srcType; - bool _fromBitstream = false; - bool _convertDispToDepth = false; - bool _addExpOffset = false; - bool _alphaScalingEnabled = false; - dai::CameraExposureOffset _expOffset; - bool _reverseStereoSocketOrder = false; - double _baseline; - double _alphaScalingFactor = 0.0; + bool updateRosBaseTimeOnToRosMsg{false}; + dai::RawImgFrame::Type srcType; + bool fromBitstream = false; + bool dispToDepth = false; + bool addExpOffset = false; + bool alphaScalingEnabled = false; + dai::CameraExposureOffset expOffset; + bool reversedStereoSocketOrder = false; + double baseline; + double alphaScalingFactor = 0.0; + int camHeight = -1; + int camWidth = -1; + std::string ffmpegEncoding = "libx264"; }; } // namespace ros -namespace rosBridge = ros; - } // namespace dai diff --git a/depthai_bridge/package.xml b/depthai_bridge/package.xml index f83bca45..d2015544 100644 --- a/depthai_bridge/package.xml +++ b/depthai_bridge/package.xml @@ -25,6 +25,7 @@ std_msgs stereo_msgs vision_msgs + ffmpeg_image_transport_msgs tf2_ros tf2 tf2_geometry_msgs diff --git a/depthai_bridge/src/ImageConverter.cpp b/depthai_bridge/src/ImageConverter.cpp index 4901a744..d6fe1a91 100644 --- a/depthai_bridge/src/ImageConverter.cpp +++ b/depthai_bridge/src/ImageConverter.cpp @@ -1,6 +1,10 @@ #include "depthai_bridge/ImageConverter.hpp" +#include + +#include "depthai-shared/datatype/RawEncodedFrame.hpp" +#include "depthai/pipeline/datatype/EncodedFrame.hpp" #include "depthai_bridge/depthaiUtility.hpp" #include "opencv2/calib3d.hpp" #include "opencv2/imgcodecs.hpp" @@ -26,71 +30,75 @@ std::unordered_map ImageConverter::planarEn {dai::RawImgFrame::Type::YUV420p, "rgb8"}}; ImageConverter::ImageConverter(bool interleaved, bool getBaseDeviceTimestamp) - : _daiInterleaved(interleaved), _steadyBaseTime(std::chrono::steady_clock::now()), _getBaseDeviceTimestamp(getBaseDeviceTimestamp) { - _rosBaseTime = rclcpp::Clock().now(); + : daiInterleaved(interleaved), steadyBaseTime(std::chrono::steady_clock::now()), getBaseDeviceTimestamp(getBaseDeviceTimestamp) { + rosBaseTime = rclcpp::Clock().now(); } ImageConverter::ImageConverter(const std::string frameName, bool interleaved, bool getBaseDeviceTimestamp) - : _frameName(frameName), _daiInterleaved(interleaved), _steadyBaseTime(std::chrono::steady_clock::now()), _getBaseDeviceTimestamp(getBaseDeviceTimestamp) { - _rosBaseTime = rclcpp::Clock().now(); + : frameName(frameName), daiInterleaved(interleaved), steadyBaseTime(std::chrono::steady_clock::now()), getBaseDeviceTimestamp(getBaseDeviceTimestamp) { + rosBaseTime = rclcpp::Clock().now(); } ImageConverter::~ImageConverter() = default; void ImageConverter::updateRosBaseTime() { - updateBaseTime(_steadyBaseTime, _rosBaseTime, _totalNsChange); + updateBaseTime(steadyBaseTime, rosBaseTime, totalNsChange); } void ImageConverter::convertFromBitstream(dai::RawImgFrame::Type srcType) { - _fromBitstream = true; - _srcType = srcType; + fromBitstream = true; + this->srcType = srcType; } void ImageConverter::convertDispToDepth(double baseline) { - _convertDispToDepth = true; - _baseline = baseline; + dispToDepth = true; + this->baseline = baseline; } void ImageConverter::addExposureOffset(dai::CameraExposureOffset& offset) { - _expOffset = offset; - _addExpOffset = true; + expOffset = offset; + addExpOffset = true; } void ImageConverter::reverseStereoSocketOrder() { - _reverseStereoSocketOrder = true; + reversedStereoSocketOrder = true; } void ImageConverter::setAlphaScaling(double alphaScalingFactor) { - _alphaScalingEnabled = true; - _alphaScalingFactor = alphaScalingFactor; + alphaScalingEnabled = true; + this->alphaScalingFactor = alphaScalingFactor; +} + +void ImageConverter::setFFMPEGEncoding(const std::string& encoding) { + ffmpegEncoding = encoding; } ImageMsgs::Image ImageConverter::toRosMsgRawPtr(std::shared_ptr inData, const sensor_msgs::msg::CameraInfo& info) { - if(_updateRosBaseTimeOnToRosMsg) { + if(updateRosBaseTimeOnToRosMsg) { updateRosBaseTime(); } std::chrono::_V2::steady_clock::time_point tstamp; - if(_getBaseDeviceTimestamp) - if(_addExpOffset) - tstamp = inData->getTimestampDevice(_expOffset); + if(getBaseDeviceTimestamp) + if(addExpOffset) + tstamp = inData->getTimestampDevice(expOffset); else tstamp = inData->getTimestampDevice(); - else if(_addExpOffset) - tstamp = inData->getTimestamp(_expOffset); + else if(addExpOffset) + tstamp = inData->getTimestamp(expOffset); else tstamp = inData->getTimestamp(); ImageMsgs::Image outImageMsg; StdMsgs::Header header; - header.frame_id = _frameName; + header.frame_id = frameName; - header.stamp = getFrameTime(_rosBaseTime, _steadyBaseTime, tstamp); + header.stamp = getFrameTime(rosBaseTime, steadyBaseTime, tstamp); - if(_fromBitstream) { + if(fromBitstream) { std::string encoding; int decodeFlags; int channels; cv::Mat output; - switch(_srcType) { + switch(srcType) { case dai::RawImgFrame::Type::BGR888i: { encoding = sensor_msgs::image_encodings::BGR8; decodeFlags = cv::IMREAD_COLOR; @@ -110,7 +118,7 @@ ImageMsgs::Image ImageConverter::toRosMsgRawPtr(std::shared_ptr i break; } default: { - std::cout << _frameName << static_cast(_srcType) << std::endl; + std::cout << frameName << static_cast(srcType) << std::endl; throw(std::runtime_error("Converted type not supported!")); } } @@ -118,8 +126,8 @@ ImageMsgs::Image ImageConverter::toRosMsgRawPtr(std::shared_ptr i output = cv::imdecode(cv::Mat(inData->getData()), decodeFlags); // converting disparity - if(_convertDispToDepth) { - auto factor = std::abs(_baseline * 10) * info.p[0]; + if(dispToDepth) { + auto factor = std::abs(baseline * 10) * info.p[0]; cv::Mat depthOut = cv::Mat(cv::Size(output.cols, output.rows), CV_16UC1); depthOut.forEach([&output, &factor](uint16_t& pixel, const int* position) -> void { auto disp = output.at(position); @@ -209,7 +217,79 @@ ImageMsgs::Image ImageConverter::toRosMsgRawPtr(std::shared_ptr i } return outImageMsg; } +std::chrono::time_point getOffsetTimestamp( + std::chrono::time_point ts, + CameraExposureOffset offset, + std::chrono::microseconds expTime) { + switch(offset) { + case CameraExposureOffset::START: + return ts - expTime; + case CameraExposureOffset::MIDDLE: + return ts - expTime / 2; + case CameraExposureOffset::END: + default: + return ts; + } +} + +ImageMsgs::CompressedImage ImageConverter::toRosCompressedMsg(std::shared_ptr inData) { + if(updateRosBaseTimeOnToRosMsg) { + updateRosBaseTime(); + } + std::chrono::_V2::steady_clock::time_point tstamp; + if(getBaseDeviceTimestamp) + if(addExpOffset) + tstamp = getOffsetTimestamp(inData->getTimestampDevice(), expOffset, inData->getExposureTime()); + else + tstamp = inData->getTimestampDevice(); + else if(addExpOffset) + tstamp = getOffsetTimestamp(inData->getTimestamp(), expOffset, inData->getExposureTime()); + else + tstamp = inData->getTimestamp(); + + ImageMsgs::CompressedImage outImageMsg; + StdMsgs::Header header; + header.frame_id = frameName; + header.stamp = getFrameTime(rosBaseTime, steadyBaseTime, tstamp); + + outImageMsg.header = header; + outImageMsg.format = "jpeg"; + outImageMsg.data = inData->getData(); + return outImageMsg; +} + +FFMPEGMsgs::FFMPEGPacket ImageConverter::toRosFFMPEGPacket(std::shared_ptr inData) { + if(updateRosBaseTimeOnToRosMsg) { + updateRosBaseTime(); + } + std::chrono::_V2::steady_clock::time_point tstamp; + if(getBaseDeviceTimestamp) + if(addExpOffset) + tstamp = getOffsetTimestamp(inData->getTimestampDevice(), expOffset, inData->getExposureTime()); + else + tstamp = inData->getTimestampDevice(); + else if(addExpOffset) + tstamp = getOffsetTimestamp(inData->getTimestamp(), expOffset, inData->getExposureTime()); + else + tstamp = inData->getTimestamp(); + FFMPEGMsgs::FFMPEGPacket outFrameMsg; + StdMsgs::Header header; + header.frame_id = frameName; + header.stamp = getFrameTime(rosBaseTime, steadyBaseTime, tstamp); + outFrameMsg.header = header; + auto ft = inData->getFrameType(); + + outFrameMsg.width = camWidth; + outFrameMsg.height = camHeight; + outFrameMsg.encoding = ffmpegEncoding; + outFrameMsg.pts = header.stamp.sec * 1000000000 + header.stamp.nanosec; // in nanoseconds + outFrameMsg.flags = (int)(ft == RawEncodedFrame::FrameType::I); + outFrameMsg.is_bigendian = false; + outFrameMsg.data = inData->getData(); + + return outFrameMsg; +} void ImageConverter::toRosMsg(std::shared_ptr inData, std::deque& outImageMsgs) { auto outImageMsg = toRosMsgRawPtr(inData); outImageMsgs.push_back(outImageMsg); @@ -225,7 +305,7 @@ ImagePtr ImageConverter::toRosMsgPtr(std::shared_ptr inData) { void ImageConverter::toDaiMsg(const ImageMsgs::Image& inMsg, dai::ImgFrame& outData) { std::unordered_map::iterator revEncodingIter; - if(_daiInterleaved) { + if(daiInterleaved) { revEncodingIter = std::find_if(encodingEnumMap.begin(), encodingEnumMap.end(), [&](const std::pair& pair) { return pair.second == inMsg.encoding; }); @@ -352,6 +432,8 @@ ImageMsgs::CameraInfo ImageConverter::calibrationToCameraInfo( cameraData.height = static_cast(height); } + camWidth = cameraData.width; + camHeight = cameraData.height; camIntrinsics = calibHandler.getCameraIntrinsics(cameraId, cameraData.width, cameraData.height, topLeftPixelId, bottomRightPixelId); flatIntrinsics.resize(9); @@ -382,7 +464,7 @@ ImageMsgs::CameraInfo ImageConverter::calibrationToCameraInfo( std::vector> stereoIntrinsics = calibHandler.getCameraIntrinsics( calibHandler.getStereoRightCameraId(), cameraData.width, cameraData.height, topLeftPixelId, bottomRightPixelId); - if(_alphaScalingEnabled) { + if(alphaScalingEnabled) { cv::Mat cameraMatrix = cv::Mat(3, 3, CV_64F); for(int i = 0; i < 3; i++) { for(int j = 0; j < 3; j++) { @@ -391,7 +473,7 @@ ImageMsgs::CameraInfo ImageConverter::calibrationToCameraInfo( } cv::Mat distCoefficients(distCoeffs); - cv::Mat newCameraMatrix = cv::getOptimalNewCameraMatrix(cameraMatrix, distCoefficients, cv::Size(width, height), _alphaScalingFactor); + cv::Mat newCameraMatrix = cv::getOptimalNewCameraMatrix(cameraMatrix, distCoefficients, cv::Size(width, height), alphaScalingFactor); // Copying the contents of newCameraMatrix to stereoIntrinsics for(int i = 0; i < 3; i++) { for(int j = 0; j < 3; j++) { @@ -411,7 +493,7 @@ ImageMsgs::CameraInfo ImageConverter::calibrationToCameraInfo( dai::CameraBoardSocket stereoSocketFirst = calibHandler.getStereoLeftCameraId(); dai::CameraBoardSocket stereoSocketSecond = calibHandler.getStereoRightCameraId(); double factor = 1.0; - if(_reverseStereoSocketOrder) { + if(reversedStereoSocketOrder) { stereoSocketFirst = calibHandler.getStereoRightCameraId(); stereoSocketSecond = calibHandler.getStereoLeftCameraId(); factor = -1.0; @@ -440,4 +522,4 @@ ImageMsgs::CameraInfo ImageConverter::calibrationToCameraInfo( return cameraData; } } // namespace ros -} // namespace dai \ No newline at end of file +} // namespace dai diff --git a/depthai_ros_driver/CMakeLists.txt b/depthai_ros_driver/CMakeLists.txt index 63e28e76..06620777 100644 --- a/depthai_ros_driver/CMakeLists.txt +++ b/depthai_ros_driver/CMakeLists.txt @@ -30,6 +30,7 @@ rclcpp sensor_msgs diagnostic_updater diagnostic_msgs +ffmpeg_image_transport_msgs ) set(SENSOR_DEPS @@ -66,6 +67,7 @@ add_library( src/utils.cpp src/dai_nodes/base_node.cpp src/dai_nodes/sys_logger.cpp + src/dai_nodes/sensors/img_pub.cpp src/dai_nodes/sensors/sensor_helpers.cpp # TODO: Figure out different place for this src/param_handlers/camera_param_handler.cpp src/param_handlers/imu_param_handler.cpp @@ -123,7 +125,11 @@ target_link_libraries( ${NN_LIB_NAME} ${COMMON_LIB_NAME} ) - +add_executable( + camera_node + src/camera_node.cpp) +ament_target_dependencies(camera_node ${CAM_DEPS} ${COMMON_DEPS} ${SENSOR_DEPS} ${NN_DEPS}) +target_link_libraries(camera_node ${PROJECT_NAME} ${SENSOR_LIB_NAME} ${NN_LIB_NAME} ${COMMON_LIB_NAME}) rclcpp_components_register_nodes(${PROJECT_NAME} "${PROJECT_NAME}::Camera") pluginlib_export_plugin_description_file(${PROJECT_NAME} plugins.xml) ament_export_include_directories(include) @@ -146,7 +152,7 @@ EXPORT ${PROJECT_NAME}Targets install(EXPORT ${PROJECT_NAME}Targets DESTINATION share/${PROJECT_NAME}/cmake) - +install(TARGETS camera_node DESTINATION lib/${PROJECT_NAME}) ament_python_install_package(${PROJECT_NAME}) # Install Python executables install( diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/base_node.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/base_node.hpp index c012cd9c..d6df3e3d 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/base_node.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/base_node.hpp @@ -1,10 +1,11 @@ #pragma once -#include #include #include +#include "depthai-shared/common/CameraBoardSocket.hpp" #include "depthai/pipeline/Node.hpp" +#include "depthai_ros_driver/utils.hpp" #include "rclcpp/logger.hpp" namespace dai { @@ -25,6 +26,7 @@ namespace depthai_ros_driver { namespace dai_nodes { namespace sensor_helpers { class ImagePublisher; +struct VideoEncoderConfig; } // namespace sensor_helpers class BaseNode { public: @@ -57,8 +59,7 @@ class BaseNode { const std::string& qName, std::function nodeLink, bool isSynced = false, - bool isLowBandwidth = false, - int quality = 50); + const utils::VideoEncoderConfig& encoderConfig = {}); virtual void closeQueues() = 0; std::shared_ptr setupXout(std::shared_ptr pipeline, const std::string& name); diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp index 6483e774..3986f6c4 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp @@ -4,7 +4,6 @@ #include #include -#include "camera_info_manager/camera_info_manager.hpp" #include "depthai-shared/common/CameraBoardSocket.hpp" #include "depthai/device/DataQueue.hpp" #include "depthai/device/Device.hpp" @@ -15,6 +14,7 @@ #include "depthai_bridge/ImageConverter.hpp" #include "depthai_bridge/ImgDetectionConverter.hpp" #include "depthai_ros_driver/dai_nodes/base_node.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/img_pub.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "depthai_ros_driver/param_handlers/nn_param_handler.hpp" #include "rclcpp/node.hpp" @@ -78,12 +78,12 @@ class Detection : public BaseNode { nnQ->addCallback(std::bind(&Detection::detectionCB, this, std::placeholders::_1, std::placeholders::_2)); if(ph->getParam("i_enable_passthrough")) { - sensor_helpers::ImgConverterConfig convConf; + utils::ImgConverterConfig convConf; convConf.getBaseDeviceTimestamp = ph->getParam("i_get_base_device_timestamp"); convConf.tfPrefix = tfPrefix; convConf.updateROSBaseTimeOnRosMsg = ph->getParam("i_update_ros_base_time_on_ros_msg"); - sensor_helpers::ImgPublisherConfig pubConf; + utils::ImgPublisherConfig pubConf; pubConf.daiNodeName = getName(); pubConf.topicName = "~/" + getName(); pubConf.topicSuffix = "passthrough"; @@ -166,14 +166,12 @@ class Detection : public BaseNode { std::unique_ptr detConverter; std::vector labelNames; rclcpp::Publisher::SharedPtr detPub; - std::shared_ptr imageConverter; std::shared_ptr ptPub; - std::shared_ptr infoManager; std::shared_ptr detectionNode; std::shared_ptr imageManip; std::unique_ptr ph; std::shared_ptr nnQ, ptQ; - std::shared_ptr xoutNN, xoutPT; + std::shared_ptr xoutNN; std::string nnQName, ptQName; }; diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/spatial_detection.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/spatial_detection.hpp index f2434c47..f22d1986 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/spatial_detection.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/spatial_detection.hpp @@ -16,6 +16,7 @@ #include "depthai_bridge/SpatialDetectionConverter.hpp" #include "depthai_ros_driver/dai_nodes/base_node.hpp" #include "depthai_ros_driver/dai_nodes/nn/nn_helpers.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/img_pub.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "depthai_ros_driver/param_handlers/nn_param_handler.hpp" #include "rclcpp/node.hpp" @@ -66,12 +67,12 @@ class SpatialDetection : public BaseNode { detPub = getROSNode()->template create_publisher("~/" + getName() + "/spatial_detections", 10, options); if(ph->getParam("i_enable_passthrough")) { - sensor_helpers::ImgConverterConfig convConf; + utils::ImgConverterConfig convConf; convConf.tfPrefix = tfPrefix; convConf.getBaseDeviceTimestamp = ph->getParam("i_get_base_device_timestamp"); convConf.updateROSBaseTimeOnRosMsg = ph->getParam("i_update_ros_base_time_on_ros_msg"); - sensor_helpers::ImgPublisherConfig pubConf; + utils::ImgPublisherConfig pubConf; pubConf.width = width; pubConf.height = height; pubConf.daiNodeName = getName(); @@ -87,12 +88,12 @@ class SpatialDetection : public BaseNode { if(!ph->getOtherNodeParam("stereo", "i_align_depth")) { tfPrefix = getTFPrefix("right"); }; - sensor_helpers::ImgConverterConfig convConf; + utils::ImgConverterConfig convConf; convConf.tfPrefix = tfPrefix; convConf.getBaseDeviceTimestamp = ph->getParam("i_get_base_device_timestamp"); convConf.updateROSBaseTimeOnRosMsg = ph->getParam("i_update_ros_base_time_on_ros_msg"); - sensor_helpers::ImgPublisherConfig pubConf; + utils::ImgPublisherConfig pubConf; pubConf.width = ph->getOtherNodeParam(sensor_helpers::getNodeName(getROSNode(), sensor_helpers::NodeNameEnum::Stereo), "i_width"); pubConf.height = ph->getOtherNodeParam(sensor_helpers::getNodeName(getROSNode(), sensor_helpers::NodeNameEnum::Stereo), "i_height"); pubConf.daiNodeName = getName(); diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/img_pub.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/img_pub.hpp new file mode 100644 index 00000000..009ad332 --- /dev/null +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/img_pub.hpp @@ -0,0 +1,120 @@ +#pragma once + +#include + +#include "depthai-shared/common/CameraBoardSocket.hpp" +#include "depthai-shared/datatype/RawImgFrame.hpp" +#include "depthai-shared/properties/VideoEncoderProperties.hpp" +#include "depthai/common/CameraExposureOffset.hpp" +#include "depthai/pipeline/Node.hpp" +#include "depthai/pipeline/datatype/ADatatype.hpp" +#include "depthai_ros_driver/utils.hpp" +#include "ffmpeg_image_transport_msgs/msg/ffmpeg_packet.hpp" +#include "image_transport/camera_publisher.hpp" +#include "sensor_msgs/msg/camera_info.hpp" +#include "sensor_msgs/msg/compressed_image.hpp" +#include "sensor_msgs/msg/image.hpp" + +namespace dai { +class Device; +class Pipeline; +class DataOutputQueue; +namespace node { +class VideoEncoder; +class XLinkOut; +} // namespace node +namespace ros { +class ImageConverter; +} +} // namespace dai + +namespace rclcpp { +class Logger; +class Node; +} // namespace rclcpp + +namespace camera_info_manager { +class CameraInfoManager; +} +namespace depthai_ros_driver { +namespace dai_nodes { + +namespace sensor_helpers { +/** + * @brief Image struct + * + * This struct is used to store the image, camera info, header and ffmpeg packet. + */ +class Image { + public: + sensor_msgs::msg::Image::UniquePtr image; + sensor_msgs::msg::CameraInfo::UniquePtr info; + ffmpeg_image_transport_msgs::msg::FFMPEGPacket::UniquePtr ffmpegPacket; + sensor_msgs::msg::CompressedImage::UniquePtr compressedImg; +}; +/** + * @brief ImagePublisher class + * + * This class is used to publish images from the device to ROS2. It creates a publisher for the image and camera info topics. + */ +class ImagePublisher { + public: + /** + * @brief Construct a new Image Publisher object + * + * Creates XLinkOut if synced and VideoEncoder if lowBandwidth is enabled. linkFunc is stored and returned when link is called. + */ + ImagePublisher(std::shared_ptr node, + std::shared_ptr pipeline, + const std::string& qName, + std::function linkFunc, + bool synced = false, + bool ipcEnabled = false, + const utils::VideoEncoderConfig& encoderConfig = {}); + + ~ImagePublisher(); + /** + * @brief Setup the image publisher + * + * Creates Publishers, ImageConverter and CameraInfoManager. Creates a Queue and adds a callback if not synced. + */ + void setup(std::shared_ptr device, const utils::ImgConverterConfig& convConf, const utils::ImgPublisherConfig& pubConf); + void createImageConverter(std::shared_ptr device); + void createInfoManager(std::shared_ptr device); + void addQueueCB(const std::shared_ptr& queue); + void closeQueue(); + std::shared_ptr getQueue(); + void link(dai::Node::Input in); + std::string getQueueName(); + void publish(const std::shared_ptr& data); + void publish(std::shared_ptr img); + void publish(std::shared_ptr img, rclcpp::Time timestamp); + std::shared_ptr convertData(const std::shared_ptr& data); + std::shared_ptr createEncoder(std::shared_ptr pipeline, const utils::VideoEncoderConfig& encoderConfig); + + private: + bool detectSubscription(const rclcpp::Publisher::SharedPtr& pub, + const rclcpp::Publisher::SharedPtr& infoPub); + std::shared_ptr node; + utils::VideoEncoderConfig encConfig; + utils::ImgPublisherConfig pubConfig; + utils::ImgConverterConfig convConfig; + std::shared_ptr infoManager; + std::shared_ptr converter; + std::shared_ptr xout; + std::shared_ptr encoder; + std::function linkCB; + rclcpp::Publisher::SharedPtr imgPub; + rclcpp::Publisher::SharedPtr infoPub; + rclcpp::Publisher::SharedPtr ffmpegPub; + rclcpp::Publisher::SharedPtr compressedImgPub; + image_transport::CameraPublisher imgPubIT; + std::shared_ptr dataQ; + int cbID; + std::string qName; + bool ipcEnabled; + bool synced; +}; +} // namespace sensor_helpers +} // namespace dai_nodes +} // namespace depthai_ros_driver diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp index cac0c848..d468e749 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp @@ -1,14 +1,10 @@ #pragma once -#include #include #include -#include "depthai-shared/datatype/RawImgFrame.hpp" #include "depthai-shared/properties/ColorCameraProperties.hpp" #include "depthai-shared/properties/MonoCameraProperties.hpp" -#include "depthai-shared/properties/VideoEncoderProperties.hpp" -#include "depthai/common/CameraExposureOffset.hpp" #include "depthai/pipeline/Node.hpp" #include "depthai/pipeline/datatype/ADatatype.hpp" #include "depthai/pipeline/datatype/CameraControl.hpp" @@ -43,89 +39,6 @@ namespace link_types { enum class RGBLinkType { video, isp, preview }; }; namespace sensor_helpers { -struct ImgConverterConfig { - std::string tfPrefix = ""; - bool interleaved = false; - bool getBaseDeviceTimestamp = false; - bool updateROSBaseTimeOnRosMsg = false; - bool lowBandwidth = false; - dai::RawImgFrame::Type encoding = dai::RawImgFrame::Type::BGR888i; - bool addExposureOffset = false; - dai::CameraExposureOffset expOffset = dai::CameraExposureOffset::START; - bool reverseSocketOrder = false; - bool alphaScalingEnabled = false; - double alphaScaling = 1.0; - bool outputDisparity = false; -}; - -struct ImgPublisherConfig { - std::string daiNodeName = ""; - std::string topicName = ""; - bool lazyPub = false; - dai::CameraBoardSocket socket = dai::CameraBoardSocket::AUTO; - dai::CameraBoardSocket leftSocket = dai::CameraBoardSocket::CAM_B; - dai::CameraBoardSocket rightSocket = dai::CameraBoardSocket::CAM_C; - std::string calibrationFile = ""; - std::string topicSuffix = "/image_raw"; - std::string infoMgrSuffix = ""; - bool rectified = false; - int width = 0; - int height = 0; - int maxQSize = 8; - bool qBlocking = false; -}; -class ImagePublisher { - public: - /** - * @brief Construct a new Image Publisher object - * - * Creates XLinkOut if synced and VideoEncoder if lowBandwidth is enabled. linkFunc is stored and returned when link is called. - */ - ImagePublisher(std::shared_ptr node, - std::shared_ptr pipeline, - const std::string& qName, - std::function linkFunc, - bool synced = false, - bool ipcEnabled = false, - bool lowBandwidth = false, - int lowBandwidthQuality = 50); - - ~ImagePublisher(); - /** - * @brief Setup the image publisher - * - * Creates Publishers, ImageConverter and CameraInfoManager. Creates a Queue and adds a callback if not synced. - */ - void setup(std::shared_ptr device, const ImgConverterConfig& convConf, const ImgPublisherConfig& pubConf); - void createImageConverter(std::shared_ptr device); - void createInfoManager(std::shared_ptr device); - void addQueueCB(const std::shared_ptr& queue); - void closeQueue(); - std::shared_ptr getQueue(); - void link(dai::Node::Input in); - std::string getQueueName(); - void publish(const std::shared_ptr& data); - void publish(std::pair data); - std::pair convertData(const std::shared_ptr data); - - private: - std::shared_ptr node; - ImgPublisherConfig pubConfig; - ImgConverterConfig convConfig; - std::shared_ptr infoManager; - std::shared_ptr converter; - std::shared_ptr xout; - std::shared_ptr encoder; - std::function linkCB; - rclcpp::Publisher::SharedPtr imgPub; - rclcpp::Publisher::SharedPtr infoPub; - image_transport::CameraPublisher imgPubIT; - std::shared_ptr dataQ; - int cbID; - std::string qName; - bool ipcEnabled; - bool synced; -}; enum class NodeNameEnum { RGB, Left, Right, Stereo, IMU, NN }; struct ImageSensor { std::string name; @@ -157,12 +70,6 @@ sensor_msgs::msg::CameraInfo getCalibInfo(const rclcpp::Logger& logger, dai::CameraBoardSocket socket, int width = 0, int height = 0); -std::shared_ptr setupXout(std::shared_ptr pipeline, const std::string& name); -std::shared_ptr createEncoder(std::shared_ptr pipeline, - int quality, - dai::VideoEncoderProperties::Profile profile = dai::VideoEncoderProperties::Profile::MJPEG); -bool detectSubscription(const rclcpp::Publisher::SharedPtr& pub, - const rclcpp::Publisher::SharedPtr& infoPub); } // namespace sensor_helpers } // namespace dai_nodes } // namespace depthai_ros_driver diff --git a/depthai_ros_driver/include/depthai_ros_driver/utils.hpp b/depthai_ros_driver/include/depthai_ros_driver/utils.hpp index 597e2f0a..59f67fb0 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/utils.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/utils.hpp @@ -5,6 +5,18 @@ #include #include +#include "depthai-shared/common/CameraBoardSocket.hpp" +#include "depthai-shared/datatype/RawImgFrame.hpp" +#include "depthai-shared/properties/VideoEncoderProperties.hpp" +#include "depthai/common/CameraExposureOffset.hpp" + +namespace dai { +class Pipeline; +namespace node { +class XLinkOut; +} // namespace node +} // namespace dai + namespace depthai_ros_driver { namespace utils { template @@ -22,5 +34,47 @@ T getValFromMap(const std::string& name, const std::unordered_map setupXout(std::shared_ptr pipeline, const std::string& name); } // namespace utils } // namespace depthai_ros_driver diff --git a/depthai_ros_driver/package.xml b/depthai_ros_driver/package.xml index f34b3597..20d3d122 100644 --- a/depthai_ros_driver/package.xml +++ b/depthai_ros_driver/package.xml @@ -30,6 +30,7 @@ pluginlib diagnostic_updater diagnostic_msgs + ffmpeg_image_transport_msgs ament_cmake diff --git a/depthai_ros_driver/src/camera.cpp b/depthai_ros_driver/src/camera.cpp index 8e301a53..4d57983a 100644 --- a/depthai_ros_driver/src/camera.cpp +++ b/depthai_ros_driver/src/camera.cpp @@ -2,12 +2,11 @@ #include -#include "ament_index_cpp/get_package_share_directory.hpp" #include "depthai/device/Device.hpp" #include "depthai/pipeline/Pipeline.hpp" #include "depthai_bridge/TFPublisher.hpp" #include "depthai_ros_driver/pipeline/pipeline_generator.hpp" -#include "diagnostic_msgs/msg/diagnostic_status.hpp" +#include "diagnostic_msgs/msg/diagnostic_array.hpp" namespace depthai_ros_driver { diff --git a/depthai_ros_driver/src/camera_node.cpp b/depthai_ros_driver/src/camera_node.cpp new file mode 100644 index 00000000..2779ebbd --- /dev/null +++ b/depthai_ros_driver/src/camera_node.cpp @@ -0,0 +1,12 @@ +#include "depthai_ros_driver/camera.hpp" +#include "rclcpp/executors.hpp" +#include "rclcpp/utilities.hpp" + +int main(int argc, char** argv) { + rclcpp::init(argc, argv); + auto cam = std::make_shared(); + + rclcpp::spin(cam); + rclcpp::shutdown(); + return 0; +} diff --git a/depthai_ros_driver/src/dai_nodes/base_node.cpp b/depthai_ros_driver/src/dai_nodes/base_node.cpp index f4ef53f9..b6376bfa 100644 --- a/depthai_ros_driver/src/dai_nodes/base_node.cpp +++ b/depthai_ros_driver/src/dai_nodes/base_node.cpp @@ -5,6 +5,7 @@ #include "depthai/pipeline/Pipeline.hpp" #include "depthai/pipeline/node/VideoEncoder.hpp" #include "depthai/pipeline/node/XLinkOut.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/img_pub.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "depthai_ros_driver/utils.hpp" #include "rclcpp/node.hpp" @@ -68,16 +69,15 @@ void BaseNode::closeQueues() { }; std::shared_ptr BaseNode::setupXout(std::shared_ptr pipeline, const std::string& name) { - return sensor_helpers::setupXout(pipeline, name); + return utils::setupXout(pipeline, name); }; std::shared_ptr BaseNode::setupOutput(std::shared_ptr pipeline, const std::string& qName, std::function nodeLink, bool isSynced, - bool isLowBandwidth, - int quality) { - return std::make_shared(getROSNode(), pipeline, qName, nodeLink, isSynced, ipcEnabled(), isLowBandwidth, quality); + const utils::VideoEncoderConfig& encoderConfig) { + return std::make_shared(getROSNode(), pipeline, qName, nodeLink, isSynced, ipcEnabled(), encoderConfig); }; void BaseNode::setNames() { diff --git a/depthai_ros_driver/src/dai_nodes/sensors/img_pub.cpp b/depthai_ros_driver/src/dai_nodes/sensors/img_pub.cpp new file mode 100644 index 00000000..70f9fb28 --- /dev/null +++ b/depthai_ros_driver/src/dai_nodes/sensors/img_pub.cpp @@ -0,0 +1,225 @@ +#include "depthai_ros_driver/dai_nodes/sensors/img_pub.hpp" + +#include + +#include "camera_info_manager/camera_info_manager.hpp" +#include "depthai/device/Device.hpp" +#include "depthai/pipeline/Pipeline.hpp" +#include "depthai/pipeline/node/VideoEncoder.hpp" +#include "depthai/pipeline/node/XLinkOut.hpp" +#include "depthai_bridge/ImageConverter.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" +#include "depthai_ros_driver/utils.hpp" +#include "image_transport/image_transport.hpp" + +namespace depthai_ros_driver { +namespace dai_nodes { +namespace sensor_helpers { +ImagePublisher::ImagePublisher(std::shared_ptr node, + std::shared_ptr pipeline, + const std::string& qName, + std::function linkFunc, + bool synced, + bool ipcEnabled, + const utils::VideoEncoderConfig& encoderConfig) + : node(node), encConfig(encoderConfig), qName(qName), ipcEnabled(ipcEnabled), synced(synced) { + if(!synced) { + xout = utils::setupXout(pipeline, qName); + } + + linkCB = linkFunc; + if(encoderConfig.enabled) { + encoder = createEncoder(pipeline, encoderConfig); + linkFunc(encoder->input); + + if(!synced) { + if(encoderConfig.profile == dai::VideoEncoderProperties::Profile::MJPEG) { + encoder->bitstream.link(xout->input); + } else { + encoder->out.link(xout->input); + } + } else { + if(encoderConfig.profile == dai::VideoEncoderProperties::Profile::MJPEG) { + linkCB = [&](dai::Node::Input input) { encoder->bitstream.link(input); }; + } else { + linkCB = [&](dai::Node::Input input) { encoder->out.link(input); }; + } + } + } else { + if(!synced) { + linkFunc(xout->input); + } + } +} +void ImagePublisher::setup(std::shared_ptr device, const utils::ImgConverterConfig& convConf, const utils::ImgPublisherConfig& pubConf) { + convConfig = convConf; + pubConfig = pubConf; + createImageConverter(device); + createInfoManager(device); + if(pubConfig.topicName.empty()) { + throw std::runtime_error("Topic name cannot be empty!"); + } + rclcpp::PublisherOptions pubOptions; + pubOptions.qos_overriding_options = rclcpp::QosOverridingOptions::with_default_policies(); + if(pubConfig.publishCompressed) { + if(encConfig.profile == dai::VideoEncoderProperties::Profile::MJPEG) { + compressedImgPub = + node->create_publisher(pubConfig.topicName + pubConfig.compressedTopicSuffix, rclcpp::QoS(10), pubOptions); + } else { + ffmpegPub = node->create_publisher( + pubConfig.topicName + pubConfig.compressedTopicSuffix, rclcpp::QoS(10), pubOptions); + } + infoPub = node->create_publisher(pubConfig.topicName + "/camera_info", rclcpp::QoS(10), pubOptions); + } else if(ipcEnabled) { + imgPub = node->create_publisher(pubConfig.topicName + pubConfig.topicSuffix, rclcpp::QoS(10), pubOptions); + infoPub = node->create_publisher(pubConfig.topicName + "/camera_info", rclcpp::QoS(10), pubOptions); + } else { + imgPubIT = image_transport::create_camera_publisher(node.get(), pubConfig.topicName + pubConfig.topicSuffix); + } + if(!synced) { + dataQ = device->getOutputQueue(getQueueName(), pubConf.maxQSize, pubConf.qBlocking); + addQueueCB(dataQ); + } +} + +void ImagePublisher::createImageConverter(std::shared_ptr device) { + converter = std::make_shared(convConfig.tfPrefix, convConfig.interleaved, convConfig.getBaseDeviceTimestamp); + converter->setUpdateRosBaseTimeOnToRosMsg(convConfig.updateROSBaseTimeOnRosMsg); + if(convConfig.lowBandwidth) { + converter->convertFromBitstream(convConfig.encoding); + } + if(convConfig.addExposureOffset) { + converter->addExposureOffset(convConfig.expOffset); + } + if(convConfig.reverseSocketOrder) { + converter->reverseStereoSocketOrder(); + } + if(convConfig.alphaScalingEnabled) { + converter->setAlphaScaling(convConfig.alphaScaling); + } + if(convConfig.outputDisparity) { + auto calHandler = device->readCalibration(); + double baseline = calHandler.getBaselineDistance(pubConfig.leftSocket, pubConfig.rightSocket, false); + if(convConfig.reverseSocketOrder) { + baseline = calHandler.getBaselineDistance(pubConfig.rightSocket, pubConfig.leftSocket, false); + } + converter->convertDispToDepth(baseline); + } + converter->setFFMPEGEncoding(convConfig.ffmpegEncoder); +} + +std::shared_ptr ImagePublisher::createEncoder(std::shared_ptr pipeline, + const utils::VideoEncoderConfig& encoderConfig) { + auto enc = pipeline->create(); + enc->setQuality(encoderConfig.quality); + enc->setProfile(encoderConfig.profile); + enc->setBitrate(encoderConfig.bitrate); + enc->setKeyframeFrequency(encoderConfig.frameFreq); + return enc; +} +void ImagePublisher::createInfoManager(std::shared_ptr device) { + infoManager = std::make_shared( + node->create_sub_node(std::string(node->get_name()) + "/" + pubConfig.daiNodeName).get(), "/" + pubConfig.daiNodeName + pubConfig.infoMgrSuffix); + if(pubConfig.calibrationFile.empty()) { + auto info = sensor_helpers::getCalibInfo(node->get_logger(), converter, device, pubConfig.socket, pubConfig.width, pubConfig.height); + if(pubConfig.rectified) { + std::fill(info.d.begin(), info.d.end(), 0.0); + std::fill(info.k.begin(), info.k.end(), 0.0); + info.r[0] = info.r[4] = info.r[8] = 1.0; + } + infoManager->setCameraInfo(info); + } else { + infoManager->loadCameraInfo(pubConfig.calibrationFile); + } +}; +ImagePublisher::~ImagePublisher() { + closeQueue(); +}; + +void ImagePublisher::closeQueue() { + if(dataQ) dataQ->close(); +} +void ImagePublisher::link(dai::Node::Input in) { + linkCB(in); +} +std::shared_ptr ImagePublisher::getQueue() { + return dataQ; +} +void ImagePublisher::addQueueCB(const std::shared_ptr& queue) { + dataQ = queue; + qName = queue->getName(); + cbID = dataQ->addCallback([this](const std::shared_ptr& data) { publish(data); }); +} + +std::string ImagePublisher::getQueueName() { + return qName; +} +std::shared_ptr ImagePublisher::convertData(const std::shared_ptr& data) { + auto info = infoManager->getCameraInfo(); + auto img = std::make_shared(); + if(pubConfig.publishCompressed) { + if(encConfig.profile == dai::VideoEncoderProperties::Profile::MJPEG) { + auto daiImg = std::dynamic_pointer_cast(data); + auto rawMsg = converter->toRosCompressedMsg(daiImg); + img->compressedImg = std::make_unique(rawMsg); + } else { + auto daiImg = std::dynamic_pointer_cast(data); + auto rawMsg = converter->toRosFFMPEGPacket(daiImg); + img->ffmpegPacket = std::make_unique(rawMsg); + } + } else { + auto daiImg = std::dynamic_pointer_cast(data); + auto rawMsg = converter->toRosMsgRawPtr(daiImg, info); + info.header = rawMsg.header; + sensor_msgs::msg::Image::UniquePtr msg = std::make_unique(rawMsg); + img->image = std::move(msg); + } + sensor_msgs::msg::CameraInfo::UniquePtr infoMsg = std::make_unique(info); + img->info = std::move(infoMsg); + return img; +} +void ImagePublisher::publish(std::shared_ptr img) { + if(pubConfig.publishCompressed) { + if(encConfig.profile == dai::VideoEncoderProperties::Profile::MJPEG) { + compressedImgPub->publish(std::move(img->compressedImg)); + } else { + ffmpegPub->publish(std::move(img->ffmpegPacket)); + } + } else { + if(ipcEnabled && (!pubConfig.lazyPub || detectSubscription(imgPub, infoPub))) { + imgPub->publish(std::move(img->image)); + infoPub->publish(std::move(img->info)); + } else { + if(!pubConfig.lazyPub || imgPubIT.getNumSubscribers() > 0) imgPubIT.publish(*img->image, *img->info); + } + } +} +void ImagePublisher::publish(std::shared_ptr img, rclcpp::Time timestamp) { + img->info->header.stamp = timestamp; + if(pubConfig.publishCompressed) { + if(encConfig.profile == dai::VideoEncoderProperties::Profile::MJPEG) { + img->compressedImg->header.stamp = timestamp; + } else { + img->ffmpegPacket->header.stamp = timestamp; + } + } else { + img->image->header.stamp = timestamp; + } + publish(img); +} + +void ImagePublisher::publish(const std::shared_ptr& data) { + if(rclcpp::ok()) { + auto img = convertData(data); + publish(img); + } +} + +bool ImagePublisher::detectSubscription(const rclcpp::Publisher::SharedPtr& pub, + const rclcpp::Publisher::SharedPtr& infoPub) { + return (pub->get_subscription_count() > 0 || pub->get_intra_process_subscription_count() > 0 || infoPub->get_subscription_count() > 0 + || infoPub->get_intra_process_subscription_count() > 0); +} +} // namespace sensor_helpers +} // namespace dai_nodes +} // namespace depthai_ros_driver diff --git a/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp b/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp index b221ca66..d901a12a 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp @@ -1,16 +1,13 @@ #include "depthai_ros_driver/dai_nodes/sensors/mono.hpp" -#include "camera_info_manager/camera_info_manager.hpp" -#include "depthai/device/DataQueue.hpp" #include "depthai/device/Device.hpp" #include "depthai/pipeline/Pipeline.hpp" #include "depthai/pipeline/node/MonoCamera.hpp" -#include "depthai/pipeline/node/VideoEncoder.hpp" #include "depthai/pipeline/node/XLinkIn.hpp" -#include "depthai/pipeline/node/XLinkOut.hpp" -#include "depthai_bridge/ImageConverter.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/img_pub.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "depthai_ros_driver/param_handlers/sensor_param_handler.hpp" +#include "depthai_ros_driver/utils.hpp" #include "rclcpp/node.hpp" namespace depthai_ros_driver { @@ -38,13 +35,15 @@ void Mono::setNames() { void Mono::setXinXout(std::shared_ptr pipeline) { if(ph->getParam("i_publish_topic")) { + utils::VideoEncoderConfig encConfig; + encConfig.profile = static_cast(ph->getParam("i_low_bandwidth_profile")); + encConfig.bitrate = ph->getParam("i_low_bandwidth_bitrate"); + encConfig.frameFreq = ph->getParam("i_low_bandwidth_keyframe_frequency"); + encConfig.quality = ph->getParam("i_low_bandwidth_quality"); + encConfig.enabled = ph->getParam("i_low_bandwidth"); + imagePublisher = setupOutput( - pipeline, - monoQName, - [&](auto input) { monoCamNode->out.link(input); }, - ph->getParam("i_synced"), - ph->getParam("i_low_bandwidth"), - ph->getParam("i_low_bandwidth_quality")); + pipeline, monoQName, [&](auto input) { monoCamNode->out.link(input); }, ph->getParam("i_synced"), encConfig); } xinControl = pipeline->create(); xinControl->setStreamName(controlQName); @@ -54,7 +53,7 @@ void Mono::setXinXout(std::shared_ptr pipeline) { void Mono::setupQueues(std::shared_ptr device) { if(ph->getParam("i_publish_topic")) { auto tfPrefix = getOpticalTFPrefix(getSocketName(static_cast(ph->getParam("i_board_socket_id")))); - sensor_helpers::ImgConverterConfig convConf; + utils::ImgConverterConfig convConf; convConf.tfPrefix = tfPrefix; convConf.getBaseDeviceTimestamp = ph->getParam("i_get_base_device_timestamp"); convConf.updateROSBaseTimeOnRosMsg = ph->getParam("i_update_ros_base_time_on_ros_msg"); @@ -64,7 +63,7 @@ void Mono::setupQueues(std::shared_ptr device) { convConf.expOffset = static_cast(ph->getParam("i_exposure_offset")); convConf.reverseSocketOrder = ph->getParam("i_reverse_stereo_socket_order"); - sensor_helpers::ImgPublisherConfig pubConf; + utils::ImgPublisherConfig pubConf; pubConf.daiNodeName = getName(); pubConf.topicName = "~/" + getName(); pubConf.lazyPub = ph->getParam("i_enable_lazy_publisher"); @@ -74,6 +73,7 @@ void Mono::setupQueues(std::shared_ptr device) { pubConf.width = ph->getParam("i_width"); pubConf.height = ph->getParam("i_height"); pubConf.maxQSize = ph->getParam("i_max_q_size"); + pubConf.publishCompressed = ph->getParam("i_publish_compressed"); imagePublisher->setup(device, convConf, pubConf); } diff --git a/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp b/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp index 3c7d4fad..1b20b761 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp @@ -8,6 +8,7 @@ #include "depthai/pipeline/node/XLinkIn.hpp" #include "depthai/pipeline/node/XLinkOut.hpp" #include "depthai_bridge/ImageConverter.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/img_pub.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "depthai_ros_driver/param_handlers/sensor_param_handler.hpp" #include "rclcpp/node.hpp" @@ -46,7 +47,14 @@ void RGB::setXinXout(std::shared_ptr pipeline) { rgbLinkChoice = [&](auto input) { colorCamNode->video.link(input); }; } if(ph->getParam("i_publish_topic")) { - rgbPub = setupOutput(pipeline, ispQName, rgbLinkChoice, ph->getParam("i_synced"), lowBandwidth, ph->getParam("i_low_bandwidth_quality")); + utils::VideoEncoderConfig encConfig; + encConfig.profile = static_cast(ph->getParam("i_low_bandwidth_profile")); + encConfig.bitrate = ph->getParam("i_low_bandwidth_bitrate"); + encConfig.frameFreq = ph->getParam("i_low_bandwidth_keyframe_frequency"); + encConfig.quality = ph->getParam("i_low_bandwidth_quality"); + encConfig.enabled = lowBandwidth; + + rgbPub = setupOutput(pipeline, ispQName, rgbLinkChoice, ph->getParam("i_synced"), encConfig); } if(ph->getParam("i_enable_preview")) { previewPub = setupOutput(pipeline, previewQName, [&](auto input) { colorCamNode->preview.link(input); }); @@ -59,7 +67,7 @@ void RGB::setXinXout(std::shared_ptr pipeline) { void RGB::setupQueues(std::shared_ptr device) { if(ph->getParam("i_publish_topic")) { auto tfPrefix = getOpticalTFPrefix(getSocketName(static_cast(ph->getParam("i_board_socket_id")))); - sensor_helpers::ImgConverterConfig convConfig; + utils::ImgConverterConfig convConfig; convConfig.tfPrefix = tfPrefix; convConfig.getBaseDeviceTimestamp = ph->getParam("i_get_base_device_timestamp"); convConfig.updateROSBaseTimeOnRosMsg = ph->getParam("i_update_ros_base_time_on_ros_msg"); @@ -69,7 +77,7 @@ void RGB::setupQueues(std::shared_ptr device) { convConfig.expOffset = static_cast(ph->getParam("i_exposure_offset")); convConfig.reverseSocketOrder = ph->getParam("i_reverse_stereo_socket_order"); - sensor_helpers::ImgPublisherConfig pubConfig; + utils::ImgPublisherConfig pubConfig; pubConfig.daiNodeName = getName(); pubConfig.topicName = "~/" + getName(); pubConfig.lazyPub = ph->getParam("i_enable_lazy_publisher"); @@ -79,17 +87,18 @@ void RGB::setupQueues(std::shared_ptr device) { pubConfig.width = ph->getParam("i_width"); pubConfig.height = ph->getParam("i_height"); pubConfig.maxQSize = ph->getParam("i_max_q_size"); + pubConfig.publishCompressed = ph->getParam("i_publish_compressed"); rgbPub->setup(device, convConfig, pubConfig); } if(ph->getParam("i_enable_preview")) { auto tfPrefix = getOpticalTFPrefix(getSocketName(static_cast(ph->getParam("i_board_socket_id")))); - sensor_helpers::ImgConverterConfig convConfig; + utils::ImgConverterConfig convConfig; convConfig.tfPrefix = tfPrefix; convConfig.getBaseDeviceTimestamp = ph->getParam("i_get_base_device_timestamp"); convConfig.updateROSBaseTimeOnRosMsg = ph->getParam("i_update_ros_base_time_on_ros_msg"); - sensor_helpers::ImgPublisherConfig pubConfig; + utils::ImgPublisherConfig pubConfig; pubConfig.daiNodeName = getName(); pubConfig.topicName = "~/" + getName(); pubConfig.lazyPub = ph->getParam("i_enable_lazy_publisher"); diff --git a/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp b/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp index b5db2ad4..c2f51a94 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp @@ -13,155 +13,6 @@ namespace depthai_ros_driver { namespace dai_nodes { namespace sensor_helpers { -ImagePublisher::ImagePublisher(std::shared_ptr node, - std::shared_ptr pipeline, - const std::string& qName, - std::function linkFunc, - bool synced, - bool ipcEnabled, - bool lowBandwidth, - int lowBandwidthQuality) - : node(node), qName(qName), ipcEnabled(ipcEnabled), synced(synced) { - if(!synced) { - xout = setupXout(pipeline, qName); - } - - linkCB = linkFunc; - if(lowBandwidth) { - encoder = sensor_helpers::createEncoder(pipeline, lowBandwidthQuality); - linkFunc(encoder->input); - - if(!synced) { - encoder->bitstream.link(xout->input); - } else { - linkCB = [&](dai::Node::Input input) { encoder->bitstream.link(input); }; - } - } else { - if(!synced) { - linkFunc(xout->input); - } - } -} -void ImagePublisher::setup(std::shared_ptr device, const ImgConverterConfig& convConf, const ImgPublisherConfig& pubConf) { - convConfig = convConf; - pubConfig = pubConf; - createImageConverter(device); - createInfoManager(device); - if(pubConfig.topicName.empty()) { - throw std::runtime_error("Topic name cannot be empty!"); - } - if(ipcEnabled) { - rclcpp::PublisherOptions pubOptions; - pubOptions.qos_overriding_options = rclcpp::QosOverridingOptions::with_default_policies(); - imgPub = node->create_publisher(pubConfig.topicName + pubConfig.topicSuffix, rclcpp::QoS(10), pubOptions); - infoPub = node->create_publisher(pubConfig.topicName + "/camera_info", rclcpp::QoS(10), pubOptions); - } else { - imgPubIT = image_transport::create_camera_publisher(node.get(), pubConfig.topicName + pubConfig.topicSuffix); - } - if(!synced) { - dataQ = device->getOutputQueue(getQueueName(), pubConf.maxQSize, pubConf.qBlocking); - addQueueCB(dataQ); - } -} - -void ImagePublisher::createImageConverter(std::shared_ptr device) { - converter = std::make_shared(convConfig.tfPrefix, convConfig.interleaved, convConfig.getBaseDeviceTimestamp); - converter->setUpdateRosBaseTimeOnToRosMsg(convConfig.updateROSBaseTimeOnRosMsg); - if(convConfig.lowBandwidth) { - converter->convertFromBitstream(convConfig.encoding); - } - if(convConfig.addExposureOffset) { - converter->addExposureOffset(convConfig.expOffset); - } - if(convConfig.reverseSocketOrder) { - converter->reverseStereoSocketOrder(); - } - if(convConfig.alphaScalingEnabled) { - converter->setAlphaScaling(convConfig.alphaScaling); - } - if(convConfig.outputDisparity) { - auto calHandler = device->readCalibration(); - double baseline = calHandler.getBaselineDistance(pubConfig.leftSocket, pubConfig.rightSocket, false); - if(convConfig.reverseSocketOrder) { - baseline = calHandler.getBaselineDistance(pubConfig.rightSocket, pubConfig.leftSocket, false); - } - converter->convertDispToDepth(baseline); - } -} - -void ImagePublisher::createInfoManager(std::shared_ptr device) { - infoManager = std::make_shared( - node->create_sub_node(std::string(node->get_name()) + "/" + pubConfig.daiNodeName).get(), "/" + pubConfig.daiNodeName + pubConfig.infoMgrSuffix); - if(pubConfig.calibrationFile.empty()) { - auto info = sensor_helpers::getCalibInfo(node->get_logger(), converter, device, pubConfig.socket, pubConfig.width, pubConfig.height); - if(pubConfig.rectified) { - std::fill(info.d.begin(), info.d.end(), 0.0); - std::fill(info.k.begin(), info.k.end(), 0.0); - info.r[0] = info.r[4] = info.r[8] = 1.0; - } - infoManager->setCameraInfo(info); - } else { - infoManager->loadCameraInfo(pubConfig.calibrationFile); - } -}; -ImagePublisher::~ImagePublisher() { - closeQueue(); -}; - -void ImagePublisher::closeQueue() { - if(dataQ) dataQ->close(); -} -void ImagePublisher::link(dai::Node::Input in) { - linkCB(in); -} -std::shared_ptr ImagePublisher::getQueue() { - return dataQ; -} -void ImagePublisher::addQueueCB(const std::shared_ptr& queue) { - dataQ = queue; - qName = queue->getName(); - cbID = dataQ->addCallback([this](const std::shared_ptr& data) { publish(data); }); -} - -std::string ImagePublisher::getQueueName() { - return qName; -} -std::pair ImagePublisher::convertData(const std::shared_ptr data) { - auto img = std::dynamic_pointer_cast(data); - auto info = infoManager->getCameraInfo(); - auto rawMsg = converter->toRosMsgRawPtr(img, info); - info.header = rawMsg.header; - sensor_msgs::msg::CameraInfo::UniquePtr infoMsg = std::make_unique(info); - sensor_msgs::msg::Image::UniquePtr msg = std::make_unique(rawMsg); - return {std::move(msg), std::move(infoMsg)}; -} - -void ImagePublisher::publish(std::pair data) { - if(ipcEnabled && (!pubConfig.lazyPub || detectSubscription(imgPub, infoPub))) { - imgPub->publish(std::move(data.first)); - infoPub->publish(std::move(data.second)); - } else { - if(!pubConfig.lazyPub || imgPubIT.getNumSubscribers() > 0) imgPubIT.publish(*data.first, *data.second); - } -} - -void ImagePublisher::publish(const std::shared_ptr& data) { - if(rclcpp::ok()) { - auto img = std::dynamic_pointer_cast(data); - auto info = infoManager->getCameraInfo(); - auto rawMsg = converter->toRosMsgRawPtr(img, info); - info.header = rawMsg.header; - if(ipcEnabled && (!pubConfig.lazyPub || detectSubscription(imgPub, infoPub))) { - sensor_msgs::msg::CameraInfo::UniquePtr infoMsg = std::make_unique(info); - sensor_msgs::msg::Image::UniquePtr msg = std::make_unique(rawMsg); - imgPub->publish(std::move(msg)); - infoPub->publish(std::move(infoMsg)); - } else { - if(!pubConfig.lazyPub || imgPubIT.getNumSubscribers() > 0) imgPubIT.publish(rawMsg, info); - } - } -} - std::vector availableSensors = {{"IMX378", "1080P", {"12MP", "4K", "1080P"}, true}, {"OV9282", "720P", {"800P", "720P", "400P"}, false}, {"OV9782", "720P", {"800P", "720P", "400P"}, true}, @@ -290,26 +141,6 @@ sensor_msgs::msg::CameraInfo getCalibInfo(const rclcpp::Logger& logger, return info; } -std::shared_ptr setupXout(std::shared_ptr pipeline, const std::string& name) { - auto xout = pipeline->create(); - xout->setStreamName(name); - xout->input.setBlocking(false); - xout->input.setWaitForMessage(false); - xout->input.setQueueSize(0); - return xout; -}; -std::shared_ptr createEncoder(std::shared_ptr pipeline, int quality, dai::VideoEncoderProperties::Profile profile) { - auto enc = pipeline->create(); - enc->setQuality(quality); - enc->setProfile(profile); - return enc; -} - -bool detectSubscription(const rclcpp::Publisher::SharedPtr& pub, - const rclcpp::Publisher::SharedPtr& infoPub) { - return (pub->get_subscription_count() > 0 || pub->get_intra_process_subscription_count() > 0 || infoPub->get_subscription_count() > 0 - || infoPub->get_intra_process_subscription_count() > 0); -} } // namespace sensor_helpers } // namespace dai_nodes } // namespace depthai_ros_driver diff --git a/depthai_ros_driver/src/dai_nodes/sensors/sync.cpp b/depthai_ros_driver/src/dai_nodes/sensors/sync.cpp index e6002aae..efb56d0e 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/sync.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/sync.cpp @@ -6,6 +6,7 @@ #include "depthai/pipeline/datatype/MessageGroup.hpp" #include "depthai/pipeline/node/Sync.hpp" #include "depthai/pipeline/node/XLinkOut.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/img_pub.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "depthai_ros_driver/param_handlers/sync_param_handler.hpp" @@ -46,12 +47,10 @@ void Sync::setupQueues(std::shared_ptr device) { if(pub->getQueueName() == msg.first) { auto data = pub->convertData(msg.second); if(firstMsg) { - timestamp = data.first->header.stamp; + timestamp = data->info->header.stamp; firstMsg = false; } - data.first->header.stamp = timestamp; - data.second->header.stamp = timestamp; - pub->publish(std::move(data)); + pub->publish(std::move(data), timestamp); } } } diff --git a/depthai_ros_driver/src/dai_nodes/stereo.cpp b/depthai_ros_driver/src/dai_nodes/stereo.cpp index a2e2b8c3..21afca67 100644 --- a/depthai_ros_driver/src/dai_nodes/stereo.cpp +++ b/depthai_ros_driver/src/dai_nodes/stereo.cpp @@ -8,9 +8,11 @@ #include "depthai_ros_driver/dai_nodes/nn/nn_helpers.hpp" #include "depthai_ros_driver/dai_nodes/nn/spatial_nn_wrapper.hpp" #include "depthai_ros_driver/dai_nodes/sensors/feature_tracker.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/img_pub.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp" #include "depthai_ros_driver/param_handlers/stereo_param_handler.hpp" +#include "depthai_ros_driver/utils.hpp" #include "rclcpp/node.hpp" namespace depthai_ros_driver { @@ -85,28 +87,37 @@ void Stereo::setXinXout(std::shared_ptr pipeline) { stereoLinkChoice = [&](auto input) { stereoCamNode->depth.link(input); }; } if(ph->getParam("i_publish_topic")) { - stereoPub = setupOutput( - pipeline, stereoQName, stereoLinkChoice, ph->getParam("i_synced"), ph->getParam("i_low_bandwidth"), ph->getParam("i_quality")); + utils::VideoEncoderConfig encConf; + encConf.profile = static_cast(ph->getParam("i_low_bandwidth_profile")); + encConf.bitrate = ph->getParam("i_low_bandwidth_bitrate"); + encConf.frameFreq = ph->getParam("i_low_bandwidth_frame_freq"); + encConf.quality = ph->getParam("i_low_bandwidth_quality"); + encConf.enabled = ph->getParam("i_low_bandwidth"); + + stereoPub = setupOutput(pipeline, stereoQName, stereoLinkChoice, ph->getParam("i_synced"), encConf); } if(ph->getParam("i_publish_left_rect") || ph->getParam("i_publish_synced_rect_pair")) { + utils::VideoEncoderConfig encConf; + encConf.profile = static_cast(ph->getParam("i_left_rect_low_bandwidth_profile")); + encConf.bitrate = ph->getParam("i_left_rect_low_bandwidth_bitrate"); + encConf.frameFreq = ph->getParam("i_left_rect_low_bandwidth_frame_freq"); + encConf.quality = ph->getParam("i_left_rect_low_bandwidth_quality"); + encConf.enabled = ph->getParam("i_left_rect_low_bandwidth"); + leftRectPub = setupOutput( - pipeline, - leftRectQName, - [&](auto input) { stereoCamNode->rectifiedLeft.link(input); }, - ph->getParam("i_left_rect_synced"), - ph->getParam("i_left_rect_low_bandwidth"), - ph->getParam("i_left_rect_low_bandwidth_quality")); + pipeline, leftRectQName, [&](auto input) { stereoCamNode->rectifiedLeft.link(input); }, ph->getParam("i_left_rect_synced"), encConf); } if(ph->getParam("i_publish_right_rect") || ph->getParam("i_publish_synced_rect_pair")) { + utils::VideoEncoderConfig encConf; + encConf.profile = static_cast(ph->getParam("i_right_rect_low_bandwidth_profile")); + encConf.bitrate = ph->getParam("i_right_rect_low_bandwidth_bitrate"); + encConf.frameFreq = ph->getParam("i_right_rect_low_bandwidth_frame_freq"); + encConf.quality = ph->getParam("i_right_rect_low_bandwidth_quality"); + encConf.enabled = ph->getParam("i_right_rect_low_bandwidth"); rightRectPub = setupOutput( - pipeline, - rightRectQName, - [&](auto input) { stereoCamNode->rectifiedRight.link(input); }, - ph->getParam("i_right_rect_synced"), - ph->getParam("i_right_rect_low_bandwidth"), - ph->getParam("i_right_rect_low_bandwidth_quality")); + pipeline, rightRectQName, [&](auto input) { stereoCamNode->rectifiedRight.link(input); }, ph->getParam("i_right_rect_synced"), encConf); } if(ph->getParam("i_left_rect_enable_feature_tracker")) { @@ -127,7 +138,7 @@ void Stereo::setupRectQueue(std::shared_ptr device, bool isLeft) { auto sensorName = getSocketName(sensorInfo.socket); auto tfPrefix = getOpticalTFPrefix(sensorName); - sensor_helpers::ImgConverterConfig convConfig; + utils::ImgConverterConfig convConfig; convConfig.tfPrefix = tfPrefix; convConfig.interleaved = false; convConfig.getBaseDeviceTimestamp = ph->getParam("i_get_base_device_timestamp"); @@ -138,7 +149,7 @@ void Stereo::setupRectQueue(std::shared_ptr device, convConfig.expOffset = static_cast(ph->getParam(isLeft ? "i_left_rect_exposure_offset" : "i_right_rect_exposure_offset")); convConfig.reverseSocketOrder = ph->getParam("i_reverse_stereo_socket_order"); - sensor_helpers::ImgPublisherConfig pubConfig; + utils::ImgPublisherConfig pubConfig; pubConfig.daiNodeName = sensorName; pubConfig.rectified = true; pubConfig.width = ph->getOtherNodeParam(sensorName, "i_width"); @@ -148,6 +159,7 @@ void Stereo::setupRectQueue(std::shared_ptr device, pubConfig.maxQSize = ph->getOtherNodeParam(sensorName, "i_max_q_size"); pubConfig.socket = sensorInfo.socket; pubConfig.infoMgrSuffix = "rect"; + pubConfig.publishCompressed = ph->getParam(isLeft ? "i_left_rect_publish_compressed" : "i_right_rect_publish_compressed"); pub->setup(device, convConfig, pubConfig); } @@ -167,7 +179,7 @@ void Stereo::setupStereoQueue(std::shared_ptr device) { } else { tfPrefix = getOpticalTFPrefix(getSocketName(rightSensInfo.socket).c_str()); } - sensor_helpers::ImgConverterConfig convConfig; + utils::ImgConverterConfig convConfig; convConfig.getBaseDeviceTimestamp = ph->getParam("i_get_base_device_timestamp"); convConfig.tfPrefix = tfPrefix; convConfig.updateROSBaseTimeOnRosMsg = ph->getParam("i_update_ros_base_time_on_ros_msg"); @@ -180,7 +192,7 @@ void Stereo::setupStereoQueue(std::shared_ptr device) { convConfig.alphaScaling = ph->getParam("i_alpha_scaling"); convConfig.outputDisparity = ph->getParam("i_output_disparity"); - sensor_helpers::ImgPublisherConfig pubConf; + utils::ImgPublisherConfig pubConf; pubConf.daiNodeName = getName(); pubConf.topicName = "~/" + getName(); pubConf.rectified = !convConfig.alphaScalingEnabled; @@ -192,6 +204,7 @@ void Stereo::setupStereoQueue(std::shared_ptr device) { pubConf.rightSocket = rightSensInfo.socket; pubConf.lazyPub = ph->getParam("i_enable_lazy_publisher"); pubConf.maxQSize = ph->getParam("i_max_q_size"); + pubConf.publishCompressed = ph->getParam("i_publish_compressed"); stereoPub->setup(device, convConfig, pubConf); } @@ -280,10 +293,10 @@ std::vector> Stereo::getPublishe if(ph->getParam("i_synced")) { pubs.push_back(stereoPub); } - if(ph->getParam("i_left_rect_synced")) { + if(ph->getParam("i_left_rect_publish_topic") && ph->getParam("i_left_rect_synced")) { pubs.push_back(leftRectPub); } - if(ph->getParam("i_right_rect_synced")) { + if(ph->getParam("i_right_rect_publish_topic") && ph->getParam("i_right_rect_synced")) { pubs.push_back(rightRectPub); } auto pubsLeft = left->getPublishers(); diff --git a/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp b/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp index aeac1ba9..0ff4ea05 100644 --- a/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp @@ -20,7 +20,11 @@ SensorParamHandler::~SensorParamHandler() = default; void SensorParamHandler::declareCommonParams(dai::CameraBoardSocket socket) { declareAndLogParam("i_max_q_size", 30); declareAndLogParam("i_low_bandwidth", false); + declareAndLogParam("i_low_bandwidth_profile", 4); + declareAndLogParam("i_low_bandiwdth_frame_freq", 30); + declareAndLogParam("i_low_bandwidth_bitrate", 0); declareAndLogParam("i_low_bandwidth_quality", 50); + declareAndLogParam("i_low_bandwidth_ffmpeg_encoder", "libx264"); declareAndLogParam("i_calibration_file", ""); declareAndLogParam("i_simulate_from_topic", false); declareAndLogParam("i_simulated_topic_name", ""); @@ -35,6 +39,7 @@ void SensorParamHandler::declareCommonParams(dai::CameraBoardSocket socket) { declareAndLogParam("i_exposure_offset", 0); declareAndLogParam("i_reverse_stereo_socket_order", false); declareAndLogParam("i_synced", true); + declareAndLogParam("i_publish_compressed", false); } void SensorParamHandler::declareParams(std::shared_ptr monoCam, dai_nodes::sensor_helpers::ImageSensor sensor, bool publish) { diff --git a/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp b/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp index b36304f5..da56d8b7 100644 --- a/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp @@ -56,6 +56,10 @@ void StereoParamHandler::declareParams(std::shared_ptr s declareAndLogParam("i_max_q_size", 30); declareAndLogParam("i_low_bandwidth", false); declareAndLogParam("i_low_bandwidth_quality", 50); + declareAndLogParam("i_low_bandwidth_profile", 4); + declareAndLogParam("i_low_bandiwdth_frame_freq", 30); + declareAndLogParam("i_low_bandwidth_bitrate", 0); + declareAndLogParam("i_low_bandwidth_ffmpeg_encoder", "libx264"); declareAndLogParam("i_output_disparity", false); declareAndLogParam("i_get_base_device_timestamp", false); declareAndLogParam("i_update_ros_base_time_on_ros_msg", false); @@ -64,23 +68,34 @@ void StereoParamHandler::declareParams(std::shared_ptr s declareAndLogParam("i_exposure_offset", 0); declareAndLogParam("i_enable_lazy_publisher", true); declareAndLogParam("i_reverse_stereo_socket_order", false); + declareAndLogParam("i_publish_compressed", false); declareAndLogParam("i_publish_synced_rect_pair", false); declareAndLogParam("i_publish_left_rect", false); declareAndLogParam("i_left_rect_low_bandwidth", false); + declareAndLogParam("i_left_rect_low_bandwidth_profile", 4); + declareAndLogParam("i_left_rect_low_bandwidth_frame_freq", 30); + declareAndLogParam("i_left_rect_low_bandwidth_bitrate", 0); declareAndLogParam("i_left_rect_low_bandwidth_quality", 50); + declareAndLogParam("i_left_rect_low_bandwidth_ffmpeg_encoder", "libx264"); declareAndLogParam("i_left_rect_add_exposure_offset", false); declareAndLogParam("i_left_rect_exposure_offset", 0); declareAndLogParam("i_left_rect_enable_feature_tracker", false); declareAndLogParam("i_left_rect_synced", true); + declareAndLogParam("i_left_rect_publish_compressed", false); declareAndLogParam("i_publish_right_rect", false); declareAndLogParam("i_right_rect_low_bandwidth", false); declareAndLogParam("i_right_rect_low_bandwidth_quality", 50); + declareAndLogParam("i_right_rect_low_bandwidth_profile", 4); + declareAndLogParam("i_right_rect_low_bandwidth_frame_freq", 30); + declareAndLogParam("i_right_rect_low_bandwidth_bitrate", 0); + declareAndLogParam("i_right_rect_low_bandwidth_ffmpeg_encoder", "libx264"); declareAndLogParam("i_right_rect_enable_feature_tracker", false); declareAndLogParam("i_right_rect_add_exposure_offset", false); declareAndLogParam("i_right_rect_exposure_offset", 0); declareAndLogParam("i_right_rect_synced", true); + declareAndLogParam("i_right_rect_publish_compressed", false); declareAndLogParam("i_enable_spatial_nn", false); declareAndLogParam("i_spatial_nn_source", "right"); diff --git a/depthai_ros_driver/src/pipeline/pipeline_generator.cpp b/depthai_ros_driver/src/pipeline/pipeline_generator.cpp index dfda1686..f6a0bc21 100644 --- a/depthai_ros_driver/src/pipeline/pipeline_generator.cpp +++ b/depthai_ros_driver/src/pipeline/pipeline_generator.cpp @@ -2,6 +2,7 @@ #include "depthai/device/Device.hpp" #include "depthai/pipeline/Pipeline.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/img_pub.hpp" #include "depthai_ros_driver/dai_nodes/sensors/imu.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sync.hpp" #include "depthai_ros_driver/dai_nodes/sys_logger.hpp" @@ -68,8 +69,8 @@ std::vector> PipelineGenerator::createPipel auto sysLogger = std::make_unique("sys_logger", node, pipeline); daiNodes.push_back(std::move(sysLogger)); } - auto sync = std::make_unique("sync", node, pipeline); if(ph->getParam("i_enable_sync")) { + auto sync = std::make_unique("sync", node, pipeline); for(auto& daiNode : daiNodes) { auto pubs = daiNode->getPublishers(); RCLCPP_DEBUG(node->get_logger(), "Number of synced publishers found for %s: %zu", daiNode->getName().c_str(), pubs.size()); diff --git a/depthai_ros_driver/src/utils.cpp b/depthai_ros_driver/src/utils.cpp index 76a75b78..10a8a6b0 100644 --- a/depthai_ros_driver/src/utils.cpp +++ b/depthai_ros_driver/src/utils.cpp @@ -1,4 +1,7 @@ #include "depthai_ros_driver/utils.hpp" + +#include "depthai/pipeline/Pipeline.hpp" +#include "depthai/pipeline/node/XLinkOut.hpp" namespace depthai_ros_driver { namespace utils { std::string getUpperCaseStr(const std::string& string) { @@ -6,5 +9,13 @@ std::string getUpperCaseStr(const std::string& string) { for(auto& c : upper) c = toupper(c); return upper; } +std::shared_ptr setupXout(std::shared_ptr pipeline, const std::string& name) { + auto xout = pipeline->create(); + xout->setStreamName(name); + xout->input.setBlocking(false); + xout->input.setWaitForMessage(false); + xout->input.setQueueSize(1); + return xout; +}; } // namespace utils } // namespace depthai_ros_driver From d81463e1537f913a40db75a68352f66ea144f5e8 Mon Sep 17 00:00:00 2001 From: Serafadam Date: Wed, 21 Aug 2024 07:52:03 +0000 Subject: [PATCH 2/3] minor cleanup --- depthai_bridge/include/depthai_bridge/ImageConverter.hpp | 2 +- .../include/depthai_ros_driver/dai_nodes/sensors/img_pub.hpp | 4 ---- depthai_ros_driver/src/pipeline/pipeline_generator.cpp | 1 + 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/depthai_bridge/include/depthai_bridge/ImageConverter.hpp b/depthai_bridge/include/depthai_bridge/ImageConverter.hpp index 9e2e3081..79d058a0 100644 --- a/depthai_bridge/include/depthai_bridge/ImageConverter.hpp +++ b/depthai_bridge/include/depthai_bridge/ImageConverter.hpp @@ -28,7 +28,7 @@ namespace ImageMsgs = sensor_msgs::msg; namespace FFMPEGMsgs = ffmpeg_image_transport_msgs::msg; using ImagePtr = ImageMsgs::Image::SharedPtr; using FFMPEGImagePtr = FFMPEGMsgs::FFMPEGPacket::SharedPtr; -using ImpImgPtr = ImageMsgs::CompressedImage::SharedPtr; +using CompImagePtr = ImageMsgs::CompressedImage::SharedPtr; using TimePoint = std::chrono::time_point; diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/img_pub.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/img_pub.hpp index 009ad332..1fcda95f 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/img_pub.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/img_pub.hpp @@ -2,10 +2,6 @@ #include -#include "depthai-shared/common/CameraBoardSocket.hpp" -#include "depthai-shared/datatype/RawImgFrame.hpp" -#include "depthai-shared/properties/VideoEncoderProperties.hpp" -#include "depthai/common/CameraExposureOffset.hpp" #include "depthai/pipeline/Node.hpp" #include "depthai/pipeline/datatype/ADatatype.hpp" #include "depthai_ros_driver/utils.hpp" diff --git a/depthai_ros_driver/src/pipeline/pipeline_generator.cpp b/depthai_ros_driver/src/pipeline/pipeline_generator.cpp index f6a0bc21..5dcd28e9 100644 --- a/depthai_ros_driver/src/pipeline/pipeline_generator.cpp +++ b/depthai_ros_driver/src/pipeline/pipeline_generator.cpp @@ -75,6 +75,7 @@ std::vector> PipelineGenerator::createPipel auto pubs = daiNode->getPublishers(); RCLCPP_DEBUG(node->get_logger(), "Number of synced publishers found for %s: %zu", daiNode->getName().c_str(), pubs.size()); if(pubs.empty()) { + continue; } else { std::for_each(pubs.begin(), pubs.end(), [&sync](auto& pub) { sync->addPublisher(pub); From 28b70a384405df6de97dff9ef3e43a13cafbb9db Mon Sep 17 00:00:00 2001 From: Serafadam Date: Wed, 21 Aug 2024 13:26:07 +0000 Subject: [PATCH 3/3] fix publishing bugs in release mode, fix parameter bugs --- .../dai_nodes/sensors/sync.hpp | 2 +- .../param_handlers/base_param_handler.hpp | 6 +++++ .../src/dai_nodes/sensors/mono.cpp | 4 +-- .../src/dai_nodes/sensors/rgb.cpp | 2 +- .../src/dai_nodes/sensors/sync.cpp | 7 +++-- depthai_ros_driver/src/dai_nodes/stereo.cpp | 26 ++++++++++++------- .../src/param_handlers/imu_param_handler.cpp | 8 +++--- .../param_handlers/sensor_param_handler.cpp | 2 +- .../param_handlers/stereo_param_handler.cpp | 7 ++--- .../src/pipeline/pipeline_generator.cpp | 9 ++----- 10 files changed, 43 insertions(+), 30 deletions(-) diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sync.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sync.hpp index 52dadcb0..947c14f1 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sync.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sync.hpp @@ -39,7 +39,7 @@ class Sync : public BaseNode { void setNames() override; void setXinXout(std::shared_ptr pipeline) override; void closeQueues() override; - void addPublisher(std::shared_ptr publisher); + void addPublishers(const std::vector>& pubs); private: std::unique_ptr paramHandler; diff --git a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/base_param_handler.hpp b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/base_param_handler.hpp index 496c9894..6fc5d88c 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/base_param_handler.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/base_param_handler.hpp @@ -27,12 +27,18 @@ class BaseParamHandler { template T getParam(const std::string& paramName) { T value; + if(!baseNode->has_parameter(getFullParamName(paramName))) { + RCLCPP_WARN(baseNode->get_logger(), "Parameter %s not found", getFullParamName(paramName).c_str()); + } baseNode->get_parameter(getFullParamName(paramName), value); return value; } template T getOtherNodeParam(const std::string& daiNodeName, const std::string& paramName) { T value; + if(!baseNode->has_parameter(getFullParamName(daiNodeName, paramName))) { + RCLCPP_WARN(baseNode->get_logger(), "Parameter %s not found", getFullParamName(daiNodeName, paramName).c_str()); + } baseNode->get_parameter(getFullParamName(daiNodeName, paramName), value); return value; } diff --git a/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp b/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp index d901a12a..9174e854 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp @@ -38,7 +38,7 @@ void Mono::setXinXout(std::shared_ptr pipeline) { utils::VideoEncoderConfig encConfig; encConfig.profile = static_cast(ph->getParam("i_low_bandwidth_profile")); encConfig.bitrate = ph->getParam("i_low_bandwidth_bitrate"); - encConfig.frameFreq = ph->getParam("i_low_bandwidth_keyframe_frequency"); + encConfig.frameFreq = ph->getParam("i_low_bandwidth_frame_freq"); encConfig.quality = ph->getParam("i_low_bandwidth_quality"); encConfig.enabled = ph->getParam("i_low_bandwidth"); @@ -92,7 +92,7 @@ void Mono::link(dai::Node::Input in, int /*linkType*/) { std::vector> Mono::getPublishers() { std::vector> publishers; - if(ph->getParam("i_publish_topic")) { + if(ph->getParam("i_publish_topic") && ph->getParam("i_synced")) { publishers.push_back(imagePublisher); } return publishers; diff --git a/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp b/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp index 1b20b761..2ce908b3 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp @@ -50,7 +50,7 @@ void RGB::setXinXout(std::shared_ptr pipeline) { utils::VideoEncoderConfig encConfig; encConfig.profile = static_cast(ph->getParam("i_low_bandwidth_profile")); encConfig.bitrate = ph->getParam("i_low_bandwidth_bitrate"); - encConfig.frameFreq = ph->getParam("i_low_bandwidth_keyframe_frequency"); + encConfig.frameFreq = ph->getParam("i_low_bandwidth_frame_freq"); encConfig.quality = ph->getParam("i_low_bandwidth_quality"); encConfig.enabled = lowBandwidth; diff --git a/depthai_ros_driver/src/dai_nodes/sensors/sync.cpp b/depthai_ros_driver/src/dai_nodes/sensors/sync.cpp index efb56d0e..d91030b3 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/sync.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/sync.cpp @@ -71,8 +71,11 @@ void Sync::closeQueues() { outQueue->close(); } -void Sync::addPublisher(std::shared_ptr publisher) { - publishers.push_back(publisher); +void Sync::addPublishers(const std::vector>& pubs) { + for(auto& pub : pubs) { + pub->link(getInputByName(pub->getQueueName())); + } + publishers.insert(publishers.end(), pubs.begin(), pubs.end()); } } // namespace dai_nodes diff --git a/depthai_ros_driver/src/dai_nodes/stereo.cpp b/depthai_ros_driver/src/dai_nodes/stereo.cpp index 21afca67..f44830d3 100644 --- a/depthai_ros_driver/src/dai_nodes/stereo.cpp +++ b/depthai_ros_driver/src/dai_nodes/stereo.cpp @@ -97,7 +97,7 @@ void Stereo::setXinXout(std::shared_ptr pipeline) { stereoPub = setupOutput(pipeline, stereoQName, stereoLinkChoice, ph->getParam("i_synced"), encConf); } - if(ph->getParam("i_publish_left_rect") || ph->getParam("i_publish_synced_rect_pair")) { + if(ph->getParam("i_left_rect_publish_topic") || ph->getParam("i_publish_synced_rect_pair")) { utils::VideoEncoderConfig encConf; encConf.profile = static_cast(ph->getParam("i_left_rect_low_bandwidth_profile")); encConf.bitrate = ph->getParam("i_left_rect_low_bandwidth_bitrate"); @@ -109,7 +109,7 @@ void Stereo::setXinXout(std::shared_ptr pipeline) { pipeline, leftRectQName, [&](auto input) { stereoCamNode->rectifiedLeft.link(input); }, ph->getParam("i_left_rect_synced"), encConf); } - if(ph->getParam("i_publish_right_rect") || ph->getParam("i_publish_synced_rect_pair")) { + if(ph->getParam("i_right_rect_publish_topic") || ph->getParam("i_publish_synced_rect_pair")) { utils::VideoEncoderConfig encConf; encConf.profile = static_cast(ph->getParam("i_right_rect_low_bandwidth_profile")); encConf.bitrate = ph->getParam("i_right_rect_low_bandwidth_bitrate"); @@ -189,7 +189,9 @@ void Stereo::setupStereoQueue(std::shared_ptr device) { convConfig.expOffset = static_cast(ph->getParam("i_exposure_offset")); convConfig.reverseSocketOrder = ph->getParam("i_reverse_stereo_socket_order"); convConfig.alphaScalingEnabled = ph->getParam("i_enable_alpha_scaling"); - convConfig.alphaScaling = ph->getParam("i_alpha_scaling"); + if(convConfig.alphaScalingEnabled) { + convConfig.alphaScaling = ph->getParam("i_alpha_scaling"); + } convConfig.outputDisparity = ph->getParam("i_output_disparity"); utils::ImgPublisherConfig pubConf; @@ -225,10 +227,10 @@ void Stereo::setupQueues(std::shared_ptr device) { if(ph->getParam("i_publish_topic")) { setupStereoQueue(device); } - if(ph->getParam("i_publish_left_rect") || ph->getParam("i_publish_synced_rect_pair")) { + if(ph->getParam("i_left_rect_publish_topic") || ph->getParam("i_publish_synced_rect_pair")) { setupLeftRectQueue(device); } - if(ph->getParam("i_publish_right_rect") || ph->getParam("i_publish_synced_rect_pair")) { + if(ph->getParam("i_right_rect_publish_topic") || ph->getParam("i_publish_synced_rect_pair")) { setupRightRectQueue(device); } if(ph->getParam("i_publish_synced_rect_pair")) { @@ -254,10 +256,10 @@ void Stereo::closeQueues() { if(ph->getParam("i_publish_topic")) { stereoPub->closeQueue(); } - if(ph->getParam("i_publish_left_rect")) { + if(ph->getParam("i_left_rect_publish_topic")) { leftRectPub->closeQueue(); } - if(ph->getParam("i_publish_right_rect")) { + if(ph->getParam("i_right_rect_publish_topic")) { rightRectPub->closeQueue(); } if(ph->getParam("i_publish_synced_rect_pair")) { @@ -290,7 +292,7 @@ void Stereo::link(dai::Node::Input in, int linkType) { std::vector> Stereo::getPublishers() { std::vector> pubs; - if(ph->getParam("i_synced")) { + if(ph->getParam("i_publish_topic") && ph->getParam("i_synced")) { pubs.push_back(stereoPub); } if(ph->getParam("i_left_rect_publish_topic") && ph->getParam("i_left_rect_synced")) { @@ -300,9 +302,13 @@ std::vector> Stereo::getPublishe pubs.push_back(rightRectPub); } auto pubsLeft = left->getPublishers(); - pubs.insert(pubs.end(), pubsLeft.begin(), pubsLeft.end()); + if(!pubsLeft.empty()) { + pubs.insert(pubs.end(), pubsLeft.begin(), pubsLeft.end()); + } auto pubsRight = right->getPublishers(); - pubs.insert(pubs.end(), pubsRight.begin(), pubsRight.end()); + if(!pubsRight.empty()) { + pubs.insert(pubs.end(), pubsRight.begin(), pubsRight.end()); + } return pubs; } diff --git a/depthai_ros_driver/src/param_handlers/imu_param_handler.cpp b/depthai_ros_driver/src/param_handlers/imu_param_handler.cpp index 40fe43ce..68da77b7 100644 --- a/depthai_ros_driver/src/param_handlers/imu_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/imu_param_handler.cpp @@ -8,9 +8,7 @@ namespace depthai_ros_driver { namespace param_handlers { -ImuParamHandler::ImuParamHandler(std::shared_ptr node, const std::string& name) : BaseParamHandler(node, name) {} -ImuParamHandler::~ImuParamHandler() = default; -void ImuParamHandler::declareParams(std::shared_ptr imu, const std::string& imuType) { +ImuParamHandler::ImuParamHandler(std::shared_ptr node, const std::string& name) : BaseParamHandler(node, name) { imuSyncMethodMap = { {"COPY", dai::ros::ImuSyncMethod::COPY}, {"LINEAR_INTERPOLATE_GYRO", dai::ros::ImuSyncMethod::LINEAR_INTERPOLATE_GYRO}, @@ -23,7 +21,11 @@ void ImuParamHandler::declareParams(std::shared_ptr imu, const s {"GEOMAGNETIC_ROTATION_VECTOR", dai::IMUSensor::GEOMAGNETIC_ROTATION_VECTOR}, {"ARVR_STABILIZED_ROTATION_VECTOR", dai::IMUSensor::ARVR_STABILIZED_ROTATION_VECTOR}, {"ARVR_STABILIZED_GAME_ROTATION_VECTOR", dai::IMUSensor::ARVR_STABILIZED_GAME_ROTATION_VECTOR}}; +} +ImuParamHandler::~ImuParamHandler() = default; +void ImuParamHandler::declareParams(std::shared_ptr imu, const std::string& imuType) { declareAndLogParam("i_get_base_device_timestamp", false); + declareAndLogParam("i_max_q_size", 8); auto messageType = declareAndLogParam("i_message_type", "IMU"); declareAndLogParam("i_sync_method", "LINEAR_INTERPOLATE_ACCEL"); declareAndLogParam("i_acc_cov", 0.0); diff --git a/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp b/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp index 0ff4ea05..23a87395 100644 --- a/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp @@ -21,7 +21,7 @@ void SensorParamHandler::declareCommonParams(dai::CameraBoardSocket socket) { declareAndLogParam("i_max_q_size", 30); declareAndLogParam("i_low_bandwidth", false); declareAndLogParam("i_low_bandwidth_profile", 4); - declareAndLogParam("i_low_bandiwdth_frame_freq", 30); + declareAndLogParam("i_low_bandwidth_frame_freq", 30); declareAndLogParam("i_low_bandwidth_bitrate", 0); declareAndLogParam("i_low_bandwidth_quality", 50); declareAndLogParam("i_low_bandwidth_ffmpeg_encoder", "libx264"); diff --git a/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp b/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp index da56d8b7..6954ea12 100644 --- a/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp @@ -57,7 +57,7 @@ void StereoParamHandler::declareParams(std::shared_ptr s declareAndLogParam("i_low_bandwidth", false); declareAndLogParam("i_low_bandwidth_quality", 50); declareAndLogParam("i_low_bandwidth_profile", 4); - declareAndLogParam("i_low_bandiwdth_frame_freq", 30); + declareAndLogParam("i_low_bandwidth_frame_freq", 30); declareAndLogParam("i_low_bandwidth_bitrate", 0); declareAndLogParam("i_low_bandwidth_ffmpeg_encoder", "libx264"); declareAndLogParam("i_output_disparity", false); @@ -69,9 +69,10 @@ void StereoParamHandler::declareParams(std::shared_ptr s declareAndLogParam("i_enable_lazy_publisher", true); declareAndLogParam("i_reverse_stereo_socket_order", false); declareAndLogParam("i_publish_compressed", false); + declareAndLogParam("i_calibration_file", ""); declareAndLogParam("i_publish_synced_rect_pair", false); - declareAndLogParam("i_publish_left_rect", false); + declareAndLogParam("i_left_rect_publish_topic", false); declareAndLogParam("i_left_rect_low_bandwidth", false); declareAndLogParam("i_left_rect_low_bandwidth_profile", 4); declareAndLogParam("i_left_rect_low_bandwidth_frame_freq", 30); @@ -84,7 +85,7 @@ void StereoParamHandler::declareParams(std::shared_ptr s declareAndLogParam("i_left_rect_synced", true); declareAndLogParam("i_left_rect_publish_compressed", false); - declareAndLogParam("i_publish_right_rect", false); + declareAndLogParam("i_right_rect_publish_topic", false); declareAndLogParam("i_right_rect_low_bandwidth", false); declareAndLogParam("i_right_rect_low_bandwidth_quality", 50); declareAndLogParam("i_right_rect_low_bandwidth_profile", 4); diff --git a/depthai_ros_driver/src/pipeline/pipeline_generator.cpp b/depthai_ros_driver/src/pipeline/pipeline_generator.cpp index 5dcd28e9..d3eb6429 100644 --- a/depthai_ros_driver/src/pipeline/pipeline_generator.cpp +++ b/depthai_ros_driver/src/pipeline/pipeline_generator.cpp @@ -74,13 +74,8 @@ std::vector> PipelineGenerator::createPipel for(auto& daiNode : daiNodes) { auto pubs = daiNode->getPublishers(); RCLCPP_DEBUG(node->get_logger(), "Number of synced publishers found for %s: %zu", daiNode->getName().c_str(), pubs.size()); - if(pubs.empty()) { - continue; - } else { - std::for_each(pubs.begin(), pubs.end(), [&sync](auto& pub) { - sync->addPublisher(pub); - pub->link(sync->getInputByName(pub->getQueueName())); - }); + if(!pubs.empty()) { + sync->addPublishers(pubs); } } daiNodes.push_back(std::move(sync));