Skip to content
Snippets Groups Projects
Commit 1d881ee6 authored by Fernando Herrero's avatar Fernando Herrero
Browse files

Algorithms fix: take into account transform between base and camera, transform...

Algorithms fix: take into account transform between base and camera, transform from pixels to 3d points. Added marker publishing
parent 19366371
No related branches found
No related tags found
No related merge requests found
......@@ -6,7 +6,7 @@ find_package(catkin REQUIRED)
# ********************************************************************
# Add catkin additional components here
# ********************************************************************
find_package(catkin REQUIRED COMPONENTS iri_base_algorithm nav_msgs iri_blob_detector sensor_msgs geometry_msgs dynamic_reconfigure message_generation tf)
find_package(catkin REQUIRED COMPONENTS iri_base_algorithm visualization_msgs nav_msgs iri_blob_detector sensor_msgs geometry_msgs dynamic_reconfigure message_generation tf)
## System dependencies are found with CMake's conventions
# find_package(Boost REQUIRED COMPONENTS system)
......@@ -62,7 +62,7 @@ catkin_package(
# ********************************************************************
# Add ROS and IRI ROS run time dependencies
# ********************************************************************
CATKIN_DEPENDS iri_base_algorithm nav_msgs iri_blob_detector sensor_msgs geometry_msgs dynamic_reconfigure message_runtime tf
CATKIN_DEPENDS iri_base_algorithm visualization_msgs nav_msgs iri_blob_detector sensor_msgs geometry_msgs dynamic_reconfigure message_runtime tf
# ********************************************************************
# Add system and labrobotica run time dependencies here
# ********************************************************************
......@@ -99,6 +99,8 @@ target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBRARIES})
# Add message headers dependencies
# ********************************************************************
# add_dependencies(${PROJECT_NAME} <msg_package_name>_generate_messages_cpp)
add_dependencies(${PROJECT_NAME} geometry_msgs_generate_messages_cpp)
add_dependencies(${PROJECT_NAME} visualization_msgs_generate_messages_cpp)
add_dependencies(${PROJECT_NAME} nav_msgs_generate_messages_cpp)
add_dependencies(${PROJECT_NAME} iri_blob_detector_generate_messages_cpp)
# ********************************************************************
......
......@@ -39,8 +39,12 @@ gen = ParameterGenerator()
# Name Type Reconfiguration level Description Default Min Max
#gen.add("velocity_scale_factor", double_t, 0, "Maximum velocity scale factor", 0.5, 0.0, 1.0)
gen.add("world_frame" , str_t, 0, "Wolrd reference frame", "map")
gen.add("world_frame" , str_t, 0, "World reference frame", "map")
gen.add("odom_gps_frame" , str_t, 0, "GPS odometry reference frame", "gps_odom")
gen.add("base_frame" , str_t, 0, "Base frame id", "base_link")
gen.add("camera_frame" , str_t, 0, "Camera frame id", "top_camera_optical")
gen.add("undistort_points" , bool_t, 0, "Enable point undistorsion", True)
gen.add("publish_markers" , bool_t, 0, "Enable marker publishing", True)
gen.add("color1_id" , str_t, 0, "Color 1 identifier", "color1")
gen.add("color1_x" , double_t, 0, "Color 1 X position", 0.5, -100.0,100.0)
gen.add("color1_y" , double_t, 0, "Color 1 Y position", 0.5, -100.0,100.0)
......@@ -58,4 +62,7 @@ gen.add("color4_x" , double_t, 0, "
gen.add("color4_y" , double_t, 0, "Color 4 Y position", 0.5, -100.0,100.0)
gen.add("color4_z" , double_t, 0, "Color 4 Z position", 0.5, -100.0,100.0)
exit(gen.generate(PACKAGE, "VisualGpsAlgorithm", "VisualGps"))
world_frame: map
odom_gps_frame: gps_odom
base_frame: base_link
camera_frame: top_camera_optical
\ No newline at end of file
......@@ -29,10 +29,14 @@
#include <iri_blob_detector/blob.h>
#include <geometry_msgs/Pose.h>
#include <tf/transform_listener.h>
//include visual_gps_alg main library
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <Eigen/Eigen>
/**
* \brief IRI ROS Specific Driver Class
*
......@@ -52,8 +56,19 @@ class VisualGpsAlgorithm
// private attributes and methods
cv::Mat K,D,R,P;
geometry_msgs::Pose pose;
tf::StampedTransform transform_base_to_camera;
Eigen::Vector3d t_base_2_camera;
Eigen::Matrix3d R_base_camera;
Eigen::Vector2d t_world_2_base;
double yaw_world_2_base;
std::vector<std::string> points_ids;
std::vector<cv::Point3f> map_points;
std::vector<cv::Point3f> img_points;
void update_points(std::vector<iri_blob_detector::blob> &blobs, std::vector<cv::Point3f> &points_3d,std::vector<cv::Point2f> &points_img);
void update_points(std::vector<iri_blob_detector::blob> &blobs, std::vector<cv::Point3f> &points_3d,std::vector<cv::Point2f> &pixels_img, std::vector<cv::Point3f> &points_img_3d, std::vector<std::string> &ids);
void OptimalRigidTransformation(std::vector<cv::Point3f> pw, std::vector<cv::Point3f> pc, geometry_msgs::Pose &pose);
public:
/**
* \brief define config type
......@@ -128,6 +143,8 @@ class VisualGpsAlgorithm
void set_camera_params(cv::Mat &K,cv::Mat &D,cv::Mat &R,cv::Mat &P,unsigned int width,unsigned int height);
bool compute_pos(std::vector<iri_blob_detector::blob> &blobs1,std::vector<iri_blob_detector::blob> &blobs2,std::vector<iri_blob_detector::blob> &blobs3,std::vector<iri_blob_detector::blob> &blobs4);
void get_pose(geometry_msgs::Pose &pose);
void get_points(std::vector<cv::Point3f> &map_points, std::vector<cv::Point3f> &img_points, std::vector<std::string> &ids);
void set_transform_base_to_camera(tf::StampedTransform t);
/**
* \brief Destructor
......
......@@ -29,7 +29,10 @@
#include "visual_gps_alg.h"
#include <tf/transform_broadcaster.h>
// [publisher subscriber headers]
#include <geometry_msgs/PoseStamped.h>
#include <visualization_msgs/MarkerArray.h>
#include <nav_msgs/Odometry.h>
#include <iri_blob_detector/blob_array.h>
#include <sensor_msgs/CameraInfo.h>
......@@ -47,6 +50,12 @@ class VisualGpsAlgNode : public algorithm_base::IriBaseAlgorithm<VisualGpsAlgori
{
private:
// [publisher attributes]
ros::Publisher pose_publisher_;
geometry_msgs::PoseStamped pose_PoseStamped_msg_;
ros::Publisher markers_publisher_;
visualization_msgs::MarkerArray markers_MarkerArray_msg_;
ros::Publisher odom_publisher_;
nav_msgs::Odometry odom_Odometry_msg_;
......@@ -103,6 +112,11 @@ class VisualGpsAlgNode : public algorithm_base::IriBaseAlgorithm<VisualGpsAlgori
// [action client attributes]
tf::TransformBroadcaster odom_broadcaster;
tf::TransformListener tf_listener;
bool loaded_static_tf;
bool load_static_tf();
void publish_markers(std::vector<cv::Point3f> map_points, std::vector<cv::Point3f> img_points, std::vector<std::string> ids);
/**
* \brief config variable
......
......@@ -58,5 +58,6 @@
<remap from="~/camera/camera_info" to="/usb_cam/camera_info"/>
<remap from="~/set_camera_params" to="/usb_cam/set_parameters"/>
<rosparam file="$(arg positions_path)/$(arg positions_file)" command="load" />
<rosparam file="$(arg positions_path)/params.yaml" command="load" />
</node>
</launch>
......@@ -58,5 +58,6 @@
<remap from="~/camera/camera_info" to="/usb_cam/camera_info"/>
<remap from="~/set_camera_params" to="/usb_cam/set_parameters"/>
<rosparam file="$(arg positions_path)/$(arg positions_file)" command="load" />
<rosparam file="$(arg positions_path)/params.yaml" command="load" />
</node>
</launch>
......@@ -51,6 +51,8 @@
<!-- <doc_depend>doxygen</doc_depend> -->
<buildtool_depend>catkin</buildtool_depend>
<build_depend>iri_base_algorithm</build_depend>
<run_depend>visualization_msgs</run_depend>
<build_depend>visualization_msgs</build_depend>
<build_depend>nav_msgs</build_depend>
<build_depend>iri_blob_detector</build_depend>
<build_depend>sensor_msgs</build_depend>
......
......@@ -23,12 +23,12 @@ void VisualGpsAlgorithm::config_update(Config& config, uint32_t level)
}
// VisualGpsAlgorithm Public API
void VisualGpsAlgorithm::update_points(std::vector<iri_blob_detector::blob> &blobs, std::vector<cv::Point3f> &points_3d,std::vector<cv::Point2f> &points_img)
void VisualGpsAlgorithm::update_points(std::vector<iri_blob_detector::blob> &blobs, std::vector<cv::Point3f> &points_3d,std::vector<cv::Point2f> &pixels_img, std::vector<cv::Point3f> &points_img_3d, std::vector<std::string> &ids)
{
unsigned int max_index=0;
double max_size=0;
if(blobs.size()!=0)// some blobs detected for color1
if(blobs.size()!=0)// some blobs detected
{
if(blobs[0].id.find(this->config_.color1_id)!=std::string::npos)// blob 1 matches with color1
points_3d.push_back(cv::Point3f(this->config_.color1_x,this->config_.color1_y,this->config_.color1_z));
......@@ -40,12 +40,35 @@ void VisualGpsAlgorithm::update_points(std::vector<iri_blob_detector::blob> &blo
points_3d.push_back(cv::Point3f(this->config_.color4_x,this->config_.color4_y,this->config_.color4_z));
// get the biggest blob
for(unsigned int i=0;i<blobs.size();i++)
{
if(blobs[i].size>max_size)
{
max_size=blobs[i].size;
max_index=i;
}
points_img.push_back(cv::Point2f(blobs[max_index].center_x-this->K.at<double>(0,2),blobs[max_index].center_y-this->K.at<double>(1,2)));
}
if(this->config_.undistort_points)
{
// rectify points
std::vector<cv::Point2f> pixels_img_undist;
cv::fisheye::undistortPoints(pixels_img,pixels_img_undist,this->K,this->D,this->R,this->P);
pixels_img = pixels_img_undist;
}
pixels_img.push_back(cv::Point2f(blobs[max_index].center_x-this->K.at<double>(0,2),blobs[max_index].center_y-this->K.at<double>(1,2)));
Eigen::Vector3d p_projected;
static double z_i = points_3d[0].z - this->t_base_2_camera(2); // map height
p_projected(0) = pixels_img.back().x / this->K.at<double>(0,0) * z_i; // x projected to the map height
p_projected(1) = pixels_img.back().y / this->K.at<double>(1,1) * z_i; // y projected to the map height
p_projected(2) = z_i; // z is the map height
// move to baselink frame
Eigen::Vector3d p_base_link = this->R_base_camera * p_projected + this->t_base_2_camera;
points_img_3d.push_back(cv::Point3f( p_base_link(0),p_base_link(1), p_base_link(2)));
ids.push_back(blobs[0].id);
}
}
......@@ -60,9 +83,12 @@ void VisualGpsAlgorithm::set_camera_params(cv::Mat &K,cv::Mat &D,cv::Mat &R,cv::
bool VisualGpsAlgorithm::compute_pos(std::vector<iri_blob_detector::blob> &blobs1,std::vector<iri_blob_detector::blob> &blobs2,std::vector<iri_blob_detector::blob> &blobs3,std::vector<iri_blob_detector::blob> &blobs4)
{
bool ok=false;
std::vector<cv::Point3f> points_3d;
std::vector<cv::Point2f> points_img;
std::vector<cv::Point2f> points_img_undist;
std::vector<std::string> ids;
std::vector<cv::Point3f> map_points;
std::vector<cv::Point2f> pixels_img;
std::vector<cv::Point3f> img_points;
std::vector<cv::Point3f> img_points_undist;
Eigen::Vector2d point_a,point_b,ab_vector,center_world,tmp_vector;
std::vector<Eigen::Vector2d> circle_center;
std::vector<double> circle_radius;
......@@ -72,113 +98,74 @@ bool VisualGpsAlgorithm::compute_pos(std::vector<iri_blob_detector::blob> &blobs
double num=0.0,den=0.0;
// find point correspondence
this->update_points(blobs1,points_3d,points_img);
this->update_points(blobs2,points_3d,points_img);
this->update_points(blobs4,points_3d,points_img);
this->update_points(blobs3,points_3d,points_img);
// rectify points
cv::fisheye::undistortPoints(points_img,points_img_undist,this->K,this->D,this->R,this->P);
//points_img = points_img_undist;
if(points_3d.size()>3 && points_img.size()>3)
{
ROS_DEBUG("VisualGpsAlgorithm: using 4 detections");
for(unsigned int i=0;i<points_3d.size();i++)
{
for(unsigned int j=i+1;j<points_3d.size();j++)
{
point_a << points_3d[i].x , points_3d[i].y;
point_b << points_3d[j].x , points_3d[j].y;
ab_vector=point_b-point_a;
angle_a=atan2(-points_img[i].y,points_img[i].x);
if(angle_a<0.0)
angle_a+=2*3.14159;
angle_b=atan2(-points_img[j].y,points_img[j].x);
if(angle_b<0.0)
angle_b+=2*3.14159;
angle=angle_a-angle_b;
if(fabs(cos(angle))<0.9)
{
angle=3.14159/2.0-angle;
radius=ab_vector.norm()/(2.0*fabs(cos(angle)));
circle_radius.push_back(radius);
tmp_vector << -ab_vector(1) , ab_vector(0);
center_world=point_a+(ab_vector+tan(angle)*tmp_vector)/2.0;
circle_center.push_back(center_world);
}
}
}
Eigen::MatrixXd A(circle_center.size()-1,2);
Eigen::VectorXd B(circle_center.size()-1);
Eigen::Vector2d sol;
for(unsigned int i=0;i<circle_center.size()-1;i++)
{
A(i,0)=circle_center[i+1](0)-circle_center[0](0);
A(i,1)=circle_center[i+1](1)-circle_center[0](1);
B(i)=pow(circle_center[i+1](0),2)-pow(circle_center[0](0),2)+(pow(circle_center[i+1](1),2)-pow(circle_center[0](1),2)+pow(circle_radius[0],2)-pow(circle_radius[i+1],2))/2.0;
}
sol=(A.transpose()*A).inverse()*A.transpose()*B;
this->pose.position.x=sol(0);
this->pose.position.y=sol(1);
this->pose.position.z=0.0;
this->update_points(blobs1,map_points,pixels_img, img_points, ids);
this->update_points(blobs2,map_points,pixels_img, img_points, ids);
this->update_points(blobs4,map_points,pixels_img, img_points, ids);
this->update_points(blobs3,map_points,pixels_img, img_points, ids);
// estimate orientation
for(unsigned int i=0;i<points_3d.size();i++)
{
camera_x_avg+=points_img[i].x;
camera_y_avg+=points_img[i].y;
world_x_avg+=points_3d[i].x;
world_y_avg+=points_3d[i].y;
}
camera_x_avg/=points_3d.size();
camera_y_avg/=points_3d.size();
world_x_avg/=points_3d.size();
world_y_avg/=points_3d.size();
for(unsigned int i=0;i<points_3d.size();i++)
{
num+=(points_img[i].x-camera_x_avg)*(points_3d[i].y-world_y_avg)-(points_img[i].y-camera_y_avg)*(points_3d[i].x-world_x_avg);
den+=(points_img[i].x-camera_x_avg)*(points_3d[i].x-world_x_avg)-(points_img[i].y-camera_y_avg)*(points_3d[i].y-world_y_avg);
}
yaw=atan2(num,den);
this->pose.orientation=tf::createQuaternionMsgFromYaw(yaw);
ROS_INFO("VisualGpsAlgorithm:compute_pos: pose x,y,yaw=%f,%f,%f",this->pose.position.x, this->pose.position.y, yaw );
ok = true;
}
else if(points_3d.size()>2 && points_img.size()>2)
if(map_points.size()>1 && img_points.size()>1)
{
ROS_DEBUG("VisualGpsAlgorithm: using 3 detections");
for(unsigned int i=0;i<points_3d.size();i++)
{
//ROS_INFO("VisualGpsAlgorithm:compute_pos: ________cx,cy= %f, %f ________ wx,wy= %f, %f", points_img[i].x, points_img[i].y, points_3d[i].x, points_3d[i].y );
camera_x_avg+=points_img[i].x;
camera_y_avg+=points_img[i].y;
world_x_avg+=points_3d[i].x;
world_y_avg+=points_3d[i].y;
//ROS_INFO("VisualGpsAlgorithm:compute_pos: ---");
}
camera_x_avg/=points_3d.size();
camera_y_avg/=points_3d.size();
world_x_avg/=points_3d.size();
world_y_avg/=points_3d.size();
for(unsigned int i=0;i<points_3d.size();i++)
{
num+=(points_img[i].x-camera_x_avg)*(points_3d[i].y-world_y_avg)-(points_img[i].y-camera_y_avg)*(points_3d[i].x-world_x_avg);
den+=(points_img[i].x-camera_x_avg)*(points_3d[i].x-world_x_avg)-(points_img[i].y-camera_y_avg)*(points_3d[i].y-world_y_avg);
}
yaw=atan2(num,den);
this->pose.orientation=tf::createQuaternionMsgFromYaw(yaw);
this->pose.position.x=world_x_avg-cos(yaw)*camera_x_avg+sin(yaw)*camera_y_avg;
this->pose.position.y=world_y_avg-sin(yaw)*camera_x_avg-cos(yaw)*camera_y_avg;
this->pose.position.z=0.0;
ROS_INFO("VisualGpsAlgorithm:compute_pos: pose x,y,yaw=%f,%f,%f",this->pose.position.x, this->pose.position.y, yaw );
this->OptimalRigidTransformation(map_points, img_points, this->pose);
ok = true;
}
else
{
ROS_WARN("VisualGpsAlgorithm:compute_pos: not enough blobs found: %lu", img_points.size());
ok = false;
}
this->map_points = map_points;
this->img_points = img_points;
this->points_ids = ids;
return ok;
}
void VisualGpsAlgorithm::OptimalRigidTransformation(std::vector<cv::Point3f> pw, std::vector<cv::Point3f> pc, geometry_msgs::Pose &pose)
{
double camera_x_avg=0.0,camera_y_avg=0.0,world_x_avg=0.0,world_y_avg=0.0;
double num=0.0,den=0.0;
for(unsigned int i=0;i<pw.size();i++)
{
camera_x_avg+=pc[i].x;
camera_y_avg+=pc[i].y;
world_x_avg+=pw[i].x;
world_y_avg+=pw[i].y;
}
camera_x_avg/=pw.size();
camera_y_avg/=pw.size();
world_x_avg/=pw.size();
world_y_avg/=pw.size();
for(unsigned int i=0;i<pw.size();i++)
{
num+=(pc[i].x-camera_x_avg)*(pw[i].y-world_y_avg)-(pc[i].y-camera_y_avg)*(pw[i].x-world_x_avg);
den+=(pc[i].x-camera_x_avg)*(pw[i].x-world_x_avg)+(pc[i].y-camera_y_avg)*(pw[i].y-world_y_avg);
}
this->yaw_world_2_base=atan2(num,den);
pose.orientation=tf::createQuaternionMsgFromYaw(this->yaw_world_2_base);
pose.position.x=world_x_avg-cos(this->yaw_world_2_base)*camera_x_avg+sin(this->yaw_world_2_base)*camera_y_avg;
pose.position.y=world_y_avg-sin(this->yaw_world_2_base)*camera_x_avg-cos(this->yaw_world_2_base)*camera_y_avg;
pose.position.z=0.0;
}
void VisualGpsAlgorithm::get_pose(geometry_msgs::Pose &pose)
{
pose=this->pose;
}
void VisualGpsAlgorithm::get_points(std::vector<cv::Point3f> &map_points, std::vector<cv::Point3f> &img_points, std::vector<std::string> &ids)
{
map_points = this->map_points;
img_points = this->img_points;
ids = this->points_ids;
}
void VisualGpsAlgorithm::set_transform_base_to_camera(tf::StampedTransform t)
{
this->transform_base_to_camera = t;
this->t_base_2_camera << t.getOrigin().x(), t.getOrigin().y(), t.getOrigin().z();
double yaw_base_2_camera = tf::getYaw(t.getRotation());
this->R_base_camera = Eigen::Matrix3d::Identity();
this->R_base_camera.topLeftCorner<2,2>() = Eigen::Rotation2D<double>(yaw_base_2_camera).matrix();
}
\ No newline at end of file
......@@ -2,12 +2,15 @@
#include <dynamic_reconfigure/Reconfigure.h>
VisualGpsAlgNode::VisualGpsAlgNode(void) :
algorithm_base::IriBaseAlgorithm<VisualGpsAlgorithm>()
algorithm_base::IriBaseAlgorithm<VisualGpsAlgorithm>(),
tf_listener(ros::Duration(10.f))
{
//init class attributes if necessary
this->loop_rate_ = 10;//in [Hz]
// [init publishers]
this->pose_publisher_ = this->private_node_handle_.advertise<geometry_msgs::PoseStamped>("pose", 1);
this->markers_publisher_ = this->private_node_handle_.advertise<visualization_msgs::MarkerArray>("markers", 1);
this->odom_publisher_ = this->public_node_handle_.advertise<nav_msgs::Odometry>("odom", 1);
// [init subscribers]
......@@ -37,6 +40,8 @@ VisualGpsAlgNode::VisualGpsAlgNode(void) :
// [init action servers]
// [init action clients]
this->loaded_static_tf=false;
}
VisualGpsAlgNode::~VisualGpsAlgNode(void)
......@@ -55,12 +60,27 @@ void VisualGpsAlgNode::mainNodeThread(void)
tf::Quaternion tf_quat;
geometry_msgs::TransformStamped odom_trans;
// [fill msg structures]
// Initialize the topic message structure
//this->pose_PoseStamped_msg_.data = my_var;
// Initialize the topic message structure
//this->markers_MarkerArray_msg_.data = my_var;
// [fill srv structure and make request to the server]
// [fill action structure and make request to the action server]
// [publish messages]
// Uncomment the following line to publish the topic message
//this->pose_publisher_.publish(this->pose_PoseStamped_msg_);
// Uncomment the following line to publish the topic message
//this->markers_publisher_.publish(this->markers_MarkerArray_msg_);
if(!this->loaded_static_tf)
this->loaded_static_tf = this->load_static_tf();
if(this->cam_info_received)
{
......@@ -81,6 +101,20 @@ void VisualGpsAlgNode::mainNodeThread(void)
odom_trans.transform.translation.z = this->odom_Odometry_msg_.pose.pose.position.z;
odom_trans.transform.rotation=this->odom_Odometry_msg_.pose.pose.orientation;
this->odom_broadcaster.sendTransform(odom_trans);
this->pose_PoseStamped_msg_.pose = this->odom_Odometry_msg_.pose.pose;
this->pose_PoseStamped_msg_.header = this->odom_Odometry_msg_.header;
this->pose_PoseStamped_msg_.header.frame_id = this->config_.world_frame;
this->pose_publisher_.publish(this->pose_PoseStamped_msg_);
if(this->config_.publish_markers)
{
std::vector<cv::Point3f> map_points;
std::vector<cv::Point3f> img_points;
std::vector<std::string> points_ids;
this->alg_.get_points(map_points, img_points, points_ids);
this->publish_markers(map_points, img_points, points_ids);
}
}
this->alg_.unlock();
}
......@@ -107,6 +141,8 @@ void VisualGpsAlgNode::camera_info_callback(const sensor_msgs::CameraInfo::Const
}
}
void VisualGpsAlgNode::camera_info_mutex_enter(void)
{
pthread_mutex_lock(&this->camera_info_mutex_);
......@@ -264,3 +300,153 @@ int main(int argc,char *argv[])
{
return algorithm_base::main<VisualGpsAlgNode>(argc, argv, "visual_gps_alg_node");
}
bool VisualGpsAlgNode::load_static_tf()
{
bool ok=false;
std::string target_frame = this->config_.camera_frame;
std::string source_frame = this->config_.base_frame;
ros::Time target_time = ros::Time::now();
try
{
while ( !this->tf_listener.waitForTransform(source_frame, target_frame, target_time, ros::Duration(1.)) )
{
ROS_WARN("VisualGpsAlgNode: load_static_tf: tf not received!");
ros::Duration(0.1).sleep();
}
tf::StampedTransform transform_base_2_camera;
this->tf_listener.lookupTransform(source_frame, target_frame, target_time, transform_base_2_camera);
ROS_INFO("VisualGpsAlgNode:load_static_tf: read camera-base transform: x,y,z=%f,%f,%f, yaw=%f", transform_base_2_camera.getOrigin().x(), transform_base_2_camera.getOrigin().y(), transform_base_2_camera.getOrigin().z(), tf::getYaw(transform_base_2_camera.getRotation()));
this->alg_.set_transform_base_to_camera(transform_base_2_camera);
ok=true;
}
catch (tf::TransformException &ex)
{
ROS_ERROR("VisualGpsAlgNode:load_static_tf: %s",ex.what());
}
return ok;
}
void VisualGpsAlgNode::publish_markers(std::vector<cv::Point3f> map_points, std::vector<cv::Point3f> img_points, std::vector<std::string> ids)
{
this->markers_MarkerArray_msg_.markers.clear();
for(uint i=0; i<map_points.size(); i++)
{
visualization_msgs::Marker marker;
marker.header.frame_id = this->config_.base_frame;
marker.ns = "map";
marker.id = i;
marker.header.stamp = ros::Time::now();
marker.type = visualization_msgs::Marker::CUBE;
marker.action = visualization_msgs::Marker::ADD;
marker.scale.x = 0.6;
marker.scale.y = 0.6;
marker.scale.z = 0.1;
marker.pose.position.x = img_points[i].x;
marker.pose.position.y = img_points[i].y;
marker.pose.position.z = img_points[i].z;
marker.pose.orientation.x = 0.0;
marker.pose.orientation.y = 0.0;
marker.pose.orientation.z = 0.0;
marker.pose.orientation.w = 1.0;
if (ids[i].find("green")!=std::string::npos)
{
marker.color.r = 0.0;
marker.color.g = 1.0;
marker.color.b = 0.0;
}
else if (ids[i].find("red")!=std::string::npos)
{
marker.color.r = 1.0;
marker.color.g = 0.0;
marker.color.b = 0.0;
}
else if (ids[i].find("yellow")!=std::string::npos)
{
marker.color.r = 1.0;
marker.color.g = 1.0;
marker.color.b = 0.0;
}
else if (ids[i].find("blue")!=std::string::npos)
{
marker.color.r = 0.0;
marker.color.g = 0.0;
marker.color.b = 1.0;
}
else
{
ROS_WARN("VisualGpsAlgNode::publish_markers: unrecognized point id: %s", ids[i].c_str());
marker.color.r = 1.0;
marker.color.g = 1.0;
marker.color.b = 1.0;
}
marker.color.a = 1.0;
marker.lifetime = ros::Duration(1.0f);
this->markers_MarkerArray_msg_.markers.push_back(marker);
}
for(uint i=0; i<img_points.size(); i++)
{
visualization_msgs::Marker marker;
marker.header.frame_id = this->config_.base_frame;
marker.ns = "img";
marker.id = i+img_points.size();
marker.header.stamp = ros::Time::now();
marker.type = visualization_msgs::Marker::SPHERE;
marker.action = visualization_msgs::Marker::ADD;
marker.scale.x = 0.5;
marker.scale.y = 0.5;
marker.scale.z = 0.5;
marker.pose.position.x = img_points[i].x;
marker.pose.position.y = img_points[i].y;
marker.pose.position.z = img_points[i].z;
marker.pose.orientation.x = 0.0;
marker.pose.orientation.y = 0.0;
marker.pose.orientation.z = 0.0;
marker.pose.orientation.w = 1.0;
if (ids[i].find("green")!=std::string::npos)
{
marker.color.r = 0.0;
marker.color.g = 1.0;
marker.color.b = 0.0;
}
else if (ids[i].find("red")!=std::string::npos)
{
marker.color.r = 1.0;
marker.color.g = 0.0;
marker.color.b = 0.0;
}
else if (ids[i].find("yellow")!=std::string::npos)
{
marker.color.r = 1.0;
marker.color.g = 1.0;
marker.color.b = 0.0;
}
else if (ids[i].find("blue")!=std::string::npos)
{
marker.color.r = 0.0;
marker.color.g = 0.0;
marker.color.b = 1.0;
}
else
{
ROS_WARN("VisualGpsAlgNode::publish_markers: unrecognized point id: %s", ids[i].c_str());
marker.color.r = 1.0;
marker.color.g = 1.0;
marker.color.b = 1.0;
}
marker.color.a = 1.0;
marker.lifetime = ros::Duration(1.0f);
this->markers_MarkerArray_msg_.markers.push_back(marker);
}
this->markers_publisher_.publish(this->markers_MarkerArray_msg_);
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment