Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Polar2d #1

Open
wants to merge 7 commits into
base: indigo-devel
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions sick_visionary_t_driver/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,15 @@ find_package(catkin REQUIRED COMPONENTS
image_transport
roscpp
roslaunch
sensor_msgs
)

find_package(Boost REQUIRED COMPONENTS system)
find_package(OpenCV REQUIRED)

catkin_package(
INCLUDE_DIRS include
CATKIN_DEPENDS roscpp
CATKIN_DEPENDS roscpp sensor_msgs
)

### BUILD ###
Expand All @@ -26,7 +27,6 @@ add_executable(sick_visionary_t_driver_node src/node.cpp src/driver.cpp)

target_link_libraries(sick_visionary_t_driver_node
${catkin_LIBRARIES}
${OpenCV_LIBS}
)

### TEST ###
Expand Down
11 changes: 11 additions & 0 deletions sick_visionary_t_driver/description/urdf/visionary_t.urdf.xacro
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
<?xml version="1.0"?>
<robot xmlns:xacro="http://www.ros.org/wiki/xacro" name="sick_visionary_t">

<xacro:include filename="$(find sick_visionary_t_driver)/description/urdf/visionary_t_macro.urdf.xacro" />
<link name="base_link"/>

<xacro:sick_visionary_t parent="base_link" has_holder="true" holder_tilt="${-25/180*pi}">
<origin xyz="0.0 0 0.0" rpy="0 0 0" />
</xacro:sick_visionary_t>

</robot>
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
<?xml version="1.0"?>
<robot xmlns:xacro="http://www.ros.org/wiki/xacro">

<xacro:macro name="sick_visionary_t" params="parent *origin has_holder holder_tilt">

<xacro:if value="${has_holder}">

<joint name="visionary_t_holder_joint" type="fixed">
<xacro:insert_block name="origin" />
<parent link="${parent}"/>
<child link="visionary_t_holder_link" />
</joint>

<link name="visionary_t_holder_link">
<!-- <xacro:default_inertial/>
<visual>
<origin xyz="0 0 0" rpy="0 0 0" />
<geometry>
<mesh filename="package://iirob_description/environments/meshes/r3_cell/table.dae" />
</geometry>
</visual>
<collision>
<origin xyz="0 0 0.005" rpy="0 0 0" />
<geometry>
<mesh filename="package://iirob_description/environments/meshes/r3_cell/table.stl" scale="0.001 0.001 0.001" />
</geometry>
</collision> -->
</link>

<joint name="visionary_t_holder_camera_joint" type="fixed">
<origin xyz="0 0 0.1" rpy="0 ${holder_tilt} 0" />
<parent link="visionary_t_holder_link"/>
<child link="visionary_t_camera_link" />
</joint>

</xacro:if>

<xacro:unless value="${has_holder}">

<joint name="visionary_t_holder_joint" type="fixed">
<xacro:insert_block name="origin" />
<parent link="${parent}"/>
<child link="visionary_t_camera_link" />
</joint>

</xacro:unless>

<link name="visionary_t_camera_link">
</link>

<joint name="visionary_t_camera_sensor_joint" type="fixed">
<!-- y=34.5 mm -->
<origin xyz="0 0 0.042" rpy="${pi/2} 0 ${pi/2}" />
<parent link="visionary_t_camera_link"/>
<child link="visionary_t_sensor" />
</joint>

<link name="visionary_t_sensor">
</link>

</xacro:macro>

</robot>
15 changes: 13 additions & 2 deletions sick_visionary_t_driver/launch/sick_visionary_t_driver.launch
Original file line number Diff line number Diff line change
@@ -1,13 +1,24 @@
<?xml version="1.0"?>

<!-- Entry point for using OpenNI devices -->
<launch>

<arg name="camera" default="sick_visionary_t_driver" />

<arg name="device_ip" default="141.3.80.137"/>
<arg name="use_robot" default="false"/>

<param name="sensor_description" command="$(find xacro)/xacro.py --inorder '$(find sick_visionary_t_driver)/description/urdf/visionary_t.urdf.xacro'" unless="$(arg use_robot)"/>

<node pkg="sick_visionary_t_driver" type="sick_visionary_t_driver_node" name="$(arg camera)" output="screen" >
<!-- IP address if the Visionary-T device, default: 192.168.1.10 -->
<param name="remote_device_ip" value="192.168.1.10" />

<param name="remote_device_ip" value="$(arg device_ip)" />
<param name="frame_id" value="camera" />
</node>



<node pkg="robot_state_publisher" type="robot_state_publisher" name="sensor_state_publisher" output="screen" unless="$(arg use_robot)">
<remap from="robot_description" to="sensor_description" />
</node>
</launch>
1 change: 1 addition & 0 deletions sick_visionary_t_driver/package.xml
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,6 @@
<depend>roscpp</depend>
<depend>roslaunch</depend>
<depend>sensor_msgs</depend>
<exec_depend>robot_state_publisher</exec_depend>

</package>
163 changes: 112 additions & 51 deletions sick_visionary_t_driver/src/node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,18 +43,21 @@
#include <image_transport/image_transport.h>
#include <sensor_msgs/image_encodings.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/LaserScan.h>
#include <cv_bridge/cv_bridge.h>
#include <boost/thread.hpp>
#include <sensor_msgs/CameraInfo.h>
#include <sensor_msgs/distortion_models.h>
#include <std_msgs/ByteMultiArray.h>


/// Flag whether invalid points are to be replaced by NaN
const bool SUPPRESS_INVALID_POINTS = true;

/// Distance code for data outside the TOF range
const uint16_t NARE_DISTANCE_VALUE = 0xffffU;
image_transport::Publisher g_pub_depth, g_pub_confidence, g_pub_intensity;
ros::Publisher g_pub_camera_info, g_pub_points, g_pub_ios;
ros::Publisher g_pub_camera_info, g_pub_points, g_pub_ios, g_pub_laserscan;
Driver_3DCS::Control *g_control = NULL;
std::string g_frame_id;
/// If true: prevents skipping of frames and publish everything, otherwise use newest data to publish to ROS world
Expand Down Expand Up @@ -89,32 +92,33 @@ void on_frame(const boost::shared_ptr<Driver_3DCS::Data> &data) {

void publish_frame(const Driver_3DCS::Data &data) {
bool published_anything = false;

sensor_msgs::ImagePtr msg;
// sensor_msgs::CameraInfoPtr ciPtr;
std_msgs::Header header;
header.stamp = ros::Time::now();
header.frame_id = g_frame_id;

if(g_pub_camera_info.getNumSubscribers()>0) {
if(g_pub_camera_info.getNumSubscribers()>0) { //was also if laserscan
published_anything = true;

// std::cout<<"camera info ok"<<std::endl;
sensor_msgs::CameraInfo ci;
ci.header = header;

// std::cout<<"mid"<<std::endl;

ci.height = data.getCameraParameters().height;
ci.width = data.getCameraParameters().width;
// std::cout<<"camera info clear"<<std::endl;
ci.D.clear();
ci.D.resize(5,0);
ci.D[0] = data.getCameraParameters().k1;
ci.D[1] = data.getCameraParameters().k2;
// std::cout<<"camera info for"<<std::endl;
for(int i=0; i<9; i++) ci.K[i]=0;
ci.K[0] = data.getCameraParameters().fx;
ci.K[4] = data.getCameraParameters().fy;
ci.K[2] = data.getCameraParameters().cx;
ci.K[5] = data.getCameraParameters().cy;
// std::cout<<"camera info for 2 "<<std::endl;
for(int i=0; i<12; i++)
ci.P[i] = 0;//data.getCameraParameters().cam2worldMatrix[i];
//TODO:....
Expand All @@ -123,11 +127,19 @@ void publish_frame(const Driver_3DCS::Data &data) {
ci.P[10] = 1;
ci.P[2] = data.getCameraParameters().cx;
ci.P[6] = data.getCameraParameters().cy;
// std::cout<<"camera infor end"<<std::endl;
g_pub_camera_info.publish(ci);
/* if (!ciPtr)
{
ciPtr = boost::make_shared<sensor_msgs::CameraInfo>();
}

sensor_msgs::CameraInfoPtr infoPtr(new sensor_msgs::CameraInfo(ci));
ciPtr = infoPtr;
ciPtr->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;*/
}

if(g_pub_depth.getNumSubscribers()>0) {
if(g_pub_depth.getNumSubscribers()>0){
published_anything = true;

msg = cv_bridge::CvImage(std_msgs::Header(), sensor_msgs::image_encodings::TYPE_16UC1, data.get_distance()).toImageMsg();
Expand All @@ -148,50 +160,96 @@ void publish_frame(const Driver_3DCS::Data &data) {
msg->header = header;
g_pub_intensity.publish(msg);
}
if(g_pub_points.getNumSubscribers()>0) {
if(g_pub_laserscan.getNumSubscribers()>0) {
published_anything = true;

typedef sensor_msgs::PointCloud2 PointCloud;

// Allocate new point cloud message
PointCloud::Ptr cloud_msg (new PointCloud);
cloud_msg->header = header; // Use depth image time stamp
cloud_msg->height = data.get_distance().rows;
cloud_msg->width = data.get_distance().cols;
cloud_msg->is_dense = false;
cloud_msg->is_bigendian = false;
// msg = cv_bridge::CvImage(std_msgs::Header(), sensor_msgs::image_encodings::TYPE_16UC1, data.get_distance()).toImageMsg();
// msg->header = header;
// std::cout<<"here"<<std::endl;
cv::Mat mat = data.get_distance();
std::cout<<"mat:"<<mat<<"total "<<mat.total()<<std::endl;
//msg->header = header;
int height = 144;
int width = 176;
// int step = msg->step;
int min;
// std::cout<<"w:"<<width<<"h:"<<height<<std::endl;
sensor_msgs::LaserScan scan_msg;
scan_msg.ranges.resize(width);
scan_msg.angle_min = -45 * (3.14/180); //define pi
scan_msg.angle_max = 45 * (3.14/180);
scan_msg.angle_increment = 90/width * (3.14/180);
scan_msg.range_min = 10.0; //mm
scan_msg.range_max = 10000; //mm
scan_msg.header = header;
// std::cout<<msg->data.size()<<std::endl;
for (int i =0;i<width;i++)
{
min = 100000;
for(int j=0;j<height;j++)
{

cloud_msg->fields.resize (5);
cloud_msg->fields[0].name = "x"; cloud_msg->fields[1].name = "y"; cloud_msg->fields[2].name = "z";
cloud_msg->fields[3].name = "confidence"; cloud_msg->fields[4].name = "intensity";
int offset = 0;
// All offsets are *4, as all field data types are float32
for (size_t d = 0; d < cloud_msg->fields.size (); ++d, offset += 4)
{
cloud_msg->fields[d].offset = offset;
cloud_msg->fields[d].datatype = (d<3) ? int(sensor_msgs::PointField::FLOAT32) : int(sensor_msgs::PointField::UINT16);
cloud_msg->fields[d].count = 1;
}
cloud_msg->point_step = offset;
cloud_msg->row_step = cloud_msg->point_step * cloud_msg->width;
cloud_msg->data.resize (cloud_msg->height * cloud_msg->width * cloud_msg->point_step);
// std::cout<<"min: "<<min<<"data: "<<mat.at<unsigned short>(j,i)<<"h:"<<j<<"w:"<<i<<std::endl;
if(mat.at<unsigned short>(j,i)>0){
if(mat.at<unsigned short>(j,i)<min){
// std::cout<<"jere"<<std::endl;
min = mat.at<unsigned short>(j,i);
}
}

}

// std::cout<<"here blok "<<i<<" "<<scan_msg.ranges.size()<<std::endl;

scan_msg.ranges[i]=(float)min;

}
// std::cout<<scan_msg.ranges[0]<<std::endl;
g_pub_laserscan.publish(scan_msg);

}
if(g_pub_points.getNumSubscribers()>0) {
published_anything = true;

typedef sensor_msgs::PointCloud2 PointCloud;

// Allocate new point cloud message
PointCloud::Ptr cloud_msg (new PointCloud);
cloud_msg->header = header; // Use depth image time stamp
cloud_msg->height = data.get_distance().rows;
cloud_msg->width = data.get_distance().cols;
cloud_msg->is_dense = false;
cloud_msg->is_bigendian = false;

const float f2rc = data.getCameraParameters().f2rc / 1000.f; // since f2rc is given in [mm], but the pcl message wants [m]
cloud_msg->fields.resize (5);

cloud_msg->fields[3].name = "confidence"; cloud_msg->fields[4].name = "intensity";
int offset = 0;
// All offsets are *4, as all field data types are float32
for (size_t d = 0; d < cloud_msg->fields.size (); ++d, offset += 4)
{
cloud_msg->fields[d].offset = offset;
cloud_msg->fields[d].datatype = (d<3) ? int(sensor_msgs::PointField::FLOAT32) : int(sensor_msgs::PointField::UINT16);
cloud_msg->fields[d].count = 1;
}
cloud_msg->point_step = offset;
cloud_msg->row_step = cloud_msg->point_step * cloud_msg->width;
cloud_msg->data.resize (cloud_msg->height * cloud_msg->width * cloud_msg->point_step);

const float f2rc = data.getCameraParameters().f2rc / 1000.f; // since f2rc is given in [mm], but the pcl message wants [m]
uint16_t *pDepth, *pConf, *pInt;
int cp=0;
for(int i = 0; i < data.get_distance().rows; ++i)
{
pDepth = (uint16_t*)(data.get_distance() .data + (data.get_distance() .step*i) );
pConf = (uint16_t*)(data.get_confidence().data + (data.get_confidence().step*i) );
pInt = (uint16_t*)(data.get_intensity() .data + (data.get_intensity() .step*i) );

uint16_t *pDepth, *pConf, *pInt;
int cp=0;
for(int i = 0; i < data.get_distance().rows; ++i)
for (int j = 0; j < data.get_distance().cols; ++j, ++cp)
{
pDepth = (uint16_t*)(data.get_distance() .data + (data.get_distance() .step*i) );
pConf = (uint16_t*)(data.get_confidence().data + (data.get_confidence().step*i) );
pInt = (uint16_t*)(data.get_intensity() .data + (data.get_intensity() .step*i) );

for (int j = 0; j < data.get_distance().cols; ++j, ++cp)
{
if(pDepth[j]==0 || pDepth[j]==NARE_DISTANCE_VALUE) {
const float bad_point = std::numeric_limits<float>::quiet_NaN();
memcpy (&cloud_msg->data[cp * cloud_msg->point_step + cloud_msg->fields[0].offset], &bad_point, sizeof (float));
memcpy (&cloud_msg->data[cp * cloud_msg->point_step + cloud_msg->fields[1].offset], &bad_point, sizeof (float));
if(pDepth[j]==0 || pDepth[j]==NARE_DISTANCE_VALUE) {
const float bad_point = std::numeric_limits<float>::quiet_NaN();
memcpy (&cloud_msg->data[cp * cloud_msg->point_step + cloud_msg->fields[0].offset], &bad_point, sizeof (float));
memcpy (&cloud_msg->data[cp * cloud_msg->point_step + cloud_msg->fields[1].offset], &bad_point, sizeof (float));
memcpy (&cloud_msg->data[cp * cloud_msg->point_step + cloud_msg->fields[2].offset], &bad_point, sizeof (float));
}
else {
Expand Down Expand Up @@ -227,7 +285,8 @@ void publish_frame(const Driver_3DCS::Data &data) {
}

if(!published_anything) {
if(g_control) g_control->stopStream();
// std::cout<<"here"<<std::endl;
if(g_control) g_control->stopStream();
}
}

Expand Down Expand Up @@ -288,7 +347,8 @@ int main(int argc, char **argv) {
g_pub_intensity = it.advertise("intensity", 1, (image_transport::SubscriberStatusCallback)on_new_subscriber_it, image_transport::SubscriberStatusCallback());
g_pub_camera_info = nh.advertise<sensor_msgs::CameraInfo>("camera_info", 1, (ros::SubscriberStatusCallback)on_new_subscriber_ros, ros::SubscriberStatusCallback());
g_pub_ios = nh.advertise<std_msgs::ByteMultiArray>("ios", 1, (ros::SubscriberStatusCallback)on_new_subscriber_ros, ros::SubscriberStatusCallback());

g_pub_laserscan = nh.advertise<sensor_msgs::LaserScan>("laserscan", 1, (ros::SubscriberStatusCallback)on_new_subscriber_ros, ros::SubscriberStatusCallback());

//wait til end of exec.
ros::spin();

Expand All @@ -302,6 +362,7 @@ int main(int argc, char **argv) {
g_pub_camera_info.shutdown();
g_pub_points.shutdown();
g_pub_ios.shutdown();
g_pub_laserscan.shutdown();

return 0;
}