diff --git a/include/visual_gps_alg.h b/include/visual_gps_alg.h index f17d86aaacc89dd939783ba39c1abebb29155694..569155e8191c5e8679535709188ed211cce472a5 100644 --- a/include/visual_gps_alg.h +++ b/include/visual_gps_alg.h @@ -50,7 +50,7 @@ class VisualGpsAlgorithm pthread_mutex_t access_; // private attributes and methods - cv::Mat K,D,R,P;//,tvec,rvec; + cv::Mat K,D,R,P; geometry_msgs::Pose pose; void update_points(std::vector<iri_blob_detector::blob> &blobs, std::vector<cv::Point3f> &points_3d,std::vector<cv::Point2f> &points_img); @@ -126,7 +126,7 @@ class VisualGpsAlgorithm // here define all visual_gps_alg interface methods to retrieve and set // the driver parameters void set_camera_params(cv::Mat &K,cv::Mat &D,cv::Mat &R,cv::Mat &P,unsigned int width,unsigned int height); - void compute_pos(std::vector<iri_blob_detector::blob> &blobs1,std::vector<iri_blob_detector::blob> &blobs2,std::vector<iri_blob_detector::blob> &blobs3,std::vector<iri_blob_detector::blob> &blobs4); + bool compute_pos(std::vector<iri_blob_detector::blob> &blobs1,std::vector<iri_blob_detector::blob> &blobs2,std::vector<iri_blob_detector::blob> &blobs3,std::vector<iri_blob_detector::blob> &blobs4); void get_pose(geometry_msgs::Pose &pose); /** diff --git a/src/visual_gps_alg.cpp b/src/visual_gps_alg.cpp index c607d24185fea13f17ceebf8d8304f06fe6a884c..91e8e4ecc78e32a27b9acbdeb17264547ce99773 100644 --- a/src/visual_gps_alg.cpp +++ b/src/visual_gps_alg.cpp @@ -5,15 +5,6 @@ VisualGpsAlgorithm::VisualGpsAlgorithm(void) { pthread_mutex_init(&this->access_,NULL); - -// this->rvec.create(1,3, CV_32F); -// this->rvec.at<double>(0)=0.0; -// this->rvec.at<double>(1)=0.0; -// this->rvec.at<double>(2)=0.0; -// this->tvec.create(1,3, CV_32F); -// this->tvec.at<double>(0)=0.0; -// this->tvec.at<double>(1)=0.0; -// this->tvec.at<double>(2)=0.0; } VisualGpsAlgorithm::~VisualGpsAlgorithm(void) @@ -66,49 +57,7 @@ void VisualGpsAlgorithm::set_camera_params(cv::Mat &K,cv::Mat &D,cv::Mat &R,cv:: this->P=P; } -/*void VisualGpsAlgorithm::compute_pos(std::vector<iri_blob_detector::blob> &blobs1,std::vector<iri_blob_detector::blob> &blobs2,std::vector<iri_blob_detector::blob> &blobs3,std::vector<iri_blob_detector::blob> &blobs4) -{ - std::vector<cv::Point3f> points_3d; - std::vector<cv::Point2f> points_img; - double w,x,y,z; - cv::Mat R; - - this->update_points(blobs1,points_3d,points_img); - this->update_points(blobs2,points_3d,points_img); - this->update_points(blobs4,points_3d,points_img); - this->update_points(blobs3,points_3d,points_img); - if(points_3d.size()>3 && points_img.size()>3) - { - for(unsigned int i=0;i<points_3d.size();i++) - std::cout << points_3d[i].x << "," << points_3d[i].y << "," << points_3d[i].z << "," << "->" << points_img[i].x << "," << points_img[i].y << std::endl; - //if(cv::solvePnP(cv::Mat(points_3d),cv::Mat(points_img),this->cam_matrix,cv::Mat(),this->rvec,this->tvec,true,CV_ITERATIVE)) - //if(cv::solvePnP(cv::Mat(points_3d),cv::Mat(points_img),this->cam_matrix,cv::Mat(),this->rvec,this->tvec,true,CV_P3P)) - if(cv::solvePnP(cv::Mat(points_3d),cv::Mat(points_img),this->cam_matrix,cv::Mat(),this->rvec,this->tvec,true,CV_EPNP)) - { - cv::Rodrigues(rvec,R); - // Matrix to quaternion - w = sqrt( std::max( 0.0, 1 + R.at<double>(0, 0) + R.at<double>(1, 1) + R.at<double>(2, 2) ) ) / 2; - x = sqrt( std::max( 0.0, 1 + R.at<double>(0, 0) - R.at<double>(1, 1) - R.at<double>(2, 2) ) ) / 2; - y = sqrt( std::max( 0.0, 1 - R.at<double>(0, 0) + R.at<double>(1, 1) - R.at<double>(2, 2) ) ) / 2; - z = sqrt( std::max( 0.0, 1 - R.at<double>(0, 0) - R.at<double>(1, 1) + R.at<double>(2, 2) ) ) / 2; - x = copysign( x, R.at<double>(2, 1) - R.at<double>(1, 2) ); - y = copysign( y, R.at<double>(0, 2) - R.at<double>(2, 0) ); - z = copysign( z, R.at<double>(1, 0) - R.at<double>(0, 1) ); - - this->pose.position.x = tvec.at<double>(0); - this->pose.position.y = tvec.at<double>(1); - this->pose.position.z = tvec.at<double>(2); - this->pose.orientation.x = x; - this->pose.orientation.y = y; - this->pose.orientation.z = z; - this->pose.orientation.w = w; - std::cout << this->pose.position.x << "," << this->pose.position.y << "," << this->pose.position.z << std::endl; - std::cout << this->pose.orientation.x << "," << this->pose.orientation.y << "," << this->pose.orientation.z << "," << this->pose.orientation.w << std::endl; - } - } -}*/ - -void VisualGpsAlgorithm::compute_pos(std::vector<iri_blob_detector::blob> &blobs1,std::vector<iri_blob_detector::blob> &blobs2,std::vector<iri_blob_detector::blob> &blobs3,std::vector<iri_blob_detector::blob> &blobs4) +bool VisualGpsAlgorithm::compute_pos(std::vector<iri_blob_detector::blob> &blobs1,std::vector<iri_blob_detector::blob> &blobs2,std::vector<iri_blob_detector::blob> &blobs3,std::vector<iri_blob_detector::blob> &blobs4) { std::vector<cv::Point3f> points_3d; std::vector<cv::Point2f> points_img; @@ -117,11 +66,16 @@ void VisualGpsAlgorithm::compute_pos(std::vector<iri_blob_detector::blob> &blobs std::vector<Eigen::Vector2d> circle_center; std::vector<double> circle_radius; double angle,angle_a,angle_b,radius; + // estimate orientation + double camera_x_avg=0.0,camera_y_avg=0.0,world_x_avg=0.0,world_y_avg=0.0,yaw; + double num=0.0,den=0.0; + // find point correspondence this->update_points(blobs1,points_3d,points_img); this->update_points(blobs2,points_3d,points_img); this->update_points(blobs4,points_3d,points_img); this->update_points(blobs3,points_3d,points_img); + // rectify points cv::fisheye::undistortPoints(points_img,points_img_undist,this->K,this->D,this->R,this->P); if(points_3d.size()>3 && points_img.size()>3) { @@ -163,29 +117,54 @@ void VisualGpsAlgorithm::compute_pos(std::vector<iri_blob_detector::blob> &blobs this->pose.position.x=sol(0); this->pose.position.y=sol(1); this->pose.position.z=0.0; + // estimate orientation + for(unsigned int i=0;i<points_3d.size();i++) + { + camera_x_avg+=points_img[i].x; + camera_y_avg+=points_img[i].y; + world_x_avg+=points_3d[i].x; + world_y_avg+=points_3d[i].y; + } + camera_x_avg/=points_3d.size(); + camera_y_avg/=points_3d.size(); + world_x_avg/=points_3d.size(); + world_y_avg/=points_3d.size(); + for(unsigned int i=0;i<points_3d.size();i++) + { + num+=(points_img[i].x-camera_x_avg)*(points_3d[i].y-world_y_avg)-(points_img[i].y-camera_y_avg)*(points_3d[i].x-world_x_avg); + den+=(points_img[i].x-camera_x_avg)*(points_3d[i].x-world_x_avg)-(points_img[i].y-camera_y_avg)*(points_3d[i].y-world_y_avg); + } + yaw=atan2(num,den); + this->pose.orientation=tf::createQuaternionMsgFromYaw(yaw); + return true; } - // estimate orientation - double camera_x_avg=0.0,camera_y_avg=0.0,world_x_avg=0.0,world_y_avg=0.0,yaw; - double num=0.0,den=0.0; - - for(unsigned int i=0;i<points_3d.size();i++) - { - camera_x_avg+=points_img[i].x; - camera_y_avg+=points_img[i].y; - world_x_avg+=points_3d[i].x; - world_y_avg+=points_3d[i].y; - } - camera_x_avg/=points_3d.size(); - camera_y_avg/=points_3d.size(); - world_x_avg/=points_3d.size(); - world_y_avg/=points_3d.size(); - for(unsigned int i=0;i<points_3d.size();i++) + else if(points_3d.size()>2 && points_img.size()>2) { - num+=(points_img[i].x-camera_x_avg)*(points_3d[i].y-world_y_avg)-(points_img[i].y-camera_y_avg)*(points_3d[i].x-world_x_avg); - den+=(points_img[i].x-camera_x_avg)*(points_3d[i].x-world_x_avg)-(points_img[i].y-camera_y_avg)*(points_3d[i].y-world_y_avg); + for(unsigned int i=0;i<points_3d.size();i++) + { + camera_x_avg+=points_img[i].x; + camera_y_avg+=points_img[i].y; + world_x_avg+=points_3d[i].x; + world_y_avg+=points_3d[i].y; + } + camera_x_avg/=points_3d.size(); + camera_y_avg/=points_3d.size(); + world_x_avg/=points_3d.size(); + world_y_avg/=points_3d.size(); + for(unsigned int i=0;i<points_3d.size();i++) + { + num+=(points_img[i].x-camera_x_avg)*(points_3d[i].y-world_y_avg)-(points_img[i].y-camera_y_avg)*(points_3d[i].x-world_x_avg); + den+=(points_img[i].x-camera_x_avg)*(points_3d[i].x-world_x_avg)-(points_img[i].y-camera_y_avg)*(points_3d[i].y-world_y_avg); + } + yaw=atan2(num,den); + this->pose.orientation=tf::createQuaternionMsgFromYaw(yaw); + this->pose.position.x=world_x_avg-cos(yaw)*camera_x_avg+sin(yaw)*camera_y_avg; + this->pose.position.y=world_y_avg-sin(yaw)*camera_x_avg-cos(yaw)*camera_y_avg; + this->pose.position.z=0.0; + return true; } - yaw=atan2(num,den); - this->pose.orientation=tf::createQuaternionMsgFromYaw(yaw); + else + return false; } void VisualGpsAlgorithm::get_pose(geometry_msgs::Pose &pose) diff --git a/src/visual_gps_alg_node.cpp b/src/visual_gps_alg_node.cpp index 1c4cfdda91265edb077c5d8e94b718836479474c..eb0bd2e3fc6da184d684cc57c63d1ed2786d6ec5 100644 --- a/src/visual_gps_alg_node.cpp +++ b/src/visual_gps_alg_node.cpp @@ -65,21 +65,23 @@ void VisualGpsAlgNode::mainNodeThread(void) if(this->cam_info_received) { this->alg_.lock(); - this->alg_.compute_pos(this->color1_blobs,this->color2_blobs,this->color3_blobs,this->color4_blobs); - this->odom_Odometry_msg_.header.stamp=ros::Time::now(); - this->odom_Odometry_msg_.header.frame_id=this->config_.world_frame; - this->odom_Odometry_msg_.child_frame_id=this->config_.odom_gps_frame; - this->alg_.get_pose(this->odom_Odometry_msg_.pose.pose); - this->odom_publisher_.publish(this->odom_Odometry_msg_); - // broadcast TF - odom_trans.header.frame_id = this->config_.world_frame; - odom_trans.child_frame_id = this->config_.odom_gps_frame; - odom_trans.header.stamp = ros::Time::now(); - odom_trans.transform.translation.x = this->odom_Odometry_msg_.pose.pose.position.x; - odom_trans.transform.translation.y = this->odom_Odometry_msg_.pose.pose.position.y; - odom_trans.transform.translation.z = this->odom_Odometry_msg_.pose.pose.position.z; - odom_trans.transform.rotation=this->odom_Odometry_msg_.pose.pose.orientation; - this->odom_broadcaster.sendTransform(odom_trans); + if(this->alg_.compute_pos(this->color1_blobs,this->color2_blobs,this->color3_blobs,this->color4_blobs)) + { + this->odom_Odometry_msg_.header.stamp=ros::Time::now(); + this->odom_Odometry_msg_.header.frame_id=this->config_.world_frame; + this->odom_Odometry_msg_.child_frame_id=this->config_.odom_gps_frame; + this->alg_.get_pose(this->odom_Odometry_msg_.pose.pose); + this->odom_publisher_.publish(this->odom_Odometry_msg_); + // broadcast TF + odom_trans.header.frame_id = this->config_.world_frame; + odom_trans.child_frame_id = this->config_.odom_gps_frame; + odom_trans.header.stamp = ros::Time::now(); + odom_trans.transform.translation.x = this->odom_Odometry_msg_.pose.pose.position.x; + odom_trans.transform.translation.y = this->odom_Odometry_msg_.pose.pose.position.y; + odom_trans.transform.translation.z = this->odom_Odometry_msg_.pose.pose.position.z; + odom_trans.transform.rotation=this->odom_Odometry_msg_.pose.pose.orientation; + this->odom_broadcaster.sendTransform(odom_trans); + } this->alg_.unlock(); } }