Skip to content
Snippets Groups Projects
Commit a7585f53 authored by Joan Solà Ortega's avatar Joan Solà Ortega
Browse files

Remove eld commented code and organize a bit

parent b11945b1
No related branches found
No related tags found
2 merge requests!3After cmake and const refactor,!1Resolve "Publisher for visual odometry"
Pipeline #9863 failed
...@@ -158,11 +158,11 @@ void PublisherVisionDebug::publishDerived() ...@@ -158,11 +158,11 @@ void PublisherVisionDebug::publishDerived()
return; return;
// Get capture image // Get capture image
auto cap_img = std::dynamic_pointer_cast<CaptureImage>(processor_vision_->getLast()); auto cap_img = std::dynamic_pointer_cast<CaptureImage>(processor_vision_->getLast());
auto cap_img_origin = std::dynamic_pointer_cast<CaptureImage>(processor_vision_->getOrigin()); auto cap_img_origin = std::dynamic_pointer_cast<CaptureImage>(processor_vision_->getOrigin());
assert(cap_img != nullptr && "Received Capture is not of type CaptureImage!"); assert(cap_img != nullptr && "Received Capture is not of type CaptureImage!");
assert(cap_img_origin != nullptr && "Received origin Capture is not of type CaptureImage!"); assert(cap_img_origin != nullptr && "Received origin Capture is not of type CaptureImage!");
auto track_matrix = processor_vision_->getTrackMatrix(); // copy track matrix auto track_matrix = processor_vision_->getTrackMatrix(); // copy track matrix
...@@ -171,13 +171,11 @@ void PublisherVisionDebug::publishDerived() ...@@ -171,13 +171,11 @@ void PublisherVisionDebug::publishDerived()
{ {
// Extract cv image // Extract cv image
cv::Mat cv_img_debug; cv::Mat cv_img_debug;
cv::cvtColor(cap_img->getImage(), cv_img_debug, cv::COLOR_GRAY2BGR);
cv::Mat cv_img_debug_preprocess; cv::Mat cv_img_debug_preprocess;
cv::cvtColor(cap_img->getImage(), cv_img_debug_preprocess, cv::COLOR_GRAY2BGR); cv::cvtColor(cap_img->getImage(), cv_img_debug_preprocess, cv::COLOR_GRAY2BGR);
cv::cvtColor(cap_img->getImage(), cv_img_debug, cv::COLOR_GRAY2BGR);
// cv::cvtColor(cap_img_origin->getImage(), cv_img_debug, cv::Color_GRAY2BGR);
// Draw all tracks // Draw all tracks
showTracks(cv_img_debug, track_matrix, cap_img); showTracks(cv_img_debug, track_matrix, cap_img);
...@@ -196,17 +194,12 @@ void PublisherVisionDebug::publishDerived() ...@@ -196,17 +194,12 @@ void PublisherVisionDebug::publishDerived()
// Convert to image msg // Convert to image msg
cv_bridge::CvImagePtr cv_msg = boost::make_shared<cv_bridge::CvImage>(); cv_bridge::CvImagePtr cv_msg = boost::make_shared<cv_bridge::CvImage>();
cv_msg->header.stamp.sec = cap_img->getTimeStamp().getSeconds(); cv_msg->header.stamp.sec = cap_img->getTimeStamp().getSeconds();
cv_msg->header.stamp.nsec = cap_img->getTimeStamp().getNanoSeconds(); cv_msg->header.stamp.nsec = cap_img->getTimeStamp().getNanoSeconds();
cv_msg->encoding = sensor_msgs::image_encodings::BGR8; cv_msg->encoding = sensor_msgs::image_encodings::BGR8;
cv_msg->image = cv_img_debug; cv_msg->image = cv_img_debug;
cv_bridge::CvImagePtr cv_msg_preprocess = boost::make_shared<cv_bridge::CvImage>();
cv_msg_preprocess->header.stamp.sec = cap_img->getTimeStamp().getSeconds();
cv_msg_preprocess->header.stamp.nsec = cap_img->getTimeStamp().getNanoSeconds();
cv_msg_preprocess->encoding = sensor_msgs::image_encodings::BGR8;
cv_msg_preprocess->image = cv_img_debug_preprocess;
// convert to proper ROS type and publish // convert to proper ROS type and publish
auto cv_msg_ros = cv_msg->toImageMsg(); auto cv_msg_ros = cv_msg->toImageMsg();
...@@ -214,8 +207,13 @@ void PublisherVisionDebug::publishDerived() ...@@ -214,8 +207,13 @@ void PublisherVisionDebug::publishDerived()
if (tracks_preprocess_.show_ && tracks_preprocess_.show_on_diff_topic_) if (tracks_preprocess_.show_ && tracks_preprocess_.show_on_diff_topic_)
{ {
auto cv_msg_ros_preprocess = cv_msg_preprocess->toImageMsg(); cv_bridge::CvImagePtr cv_msg_preprocess = boost::make_shared<cv_bridge::CvImage>();
publisher_image_preprocess_.publish(cv_msg_ros_preprocess); cv_msg_preprocess->header.stamp.sec = cap_img->getTimeStamp().getSeconds();
cv_msg_preprocess->header.stamp.nsec = cap_img->getTimeStamp().getNanoSeconds();
cv_msg_preprocess->encoding = sensor_msgs::image_encodings::BGR8;
cv_msg_preprocess->image = cv_img_debug_preprocess;
auto cv_msg_ros_preprocess = cv_msg_preprocess->toImageMsg();
publisher_image_preprocess_ .publish(cv_msg_ros_preprocess);
} }
// store to avoid republishing the same capture // store to avoid republishing the same capture
...@@ -274,80 +272,6 @@ std::pair<int, int> PublisherVisionDebug::minMaxFeatureInAliveTrack(const TrackM ...@@ -274,80 +272,6 @@ std::pair<int, int> PublisherVisionDebug::minMaxFeatureInAliveTrack(const TrackM
return std::pair<int, int>(min, max); return std::pair<int, int>(min, max);
} }
// std::vector<int> PublisherVisionDebug::colorTrackAndFeatures(int _nb_feature_in_track,
// int _max_feature_in_tracks,
// int _min_feature_in_tracks,
// Color _color_of_features)
// {
// // std::cout << "\n\ncolorTrackAndFeatures" << '\n';
// //_color_of_features will be useful with other types of sensor -> change color of tracks and features
// std::vector<int> color = {0, 0, 0};
// // alpha corresponds to the relative luminosity between tracks
// // 0 -> min luminosity to represent a short track
// // 1 -> max luminosity to represent a long track
// double alpha;
// // std::cout << "_max_feature_in_tracks: " << _max_feature_in_tracks << '\n';
// if (_max_feature_in_tracks != _min_feature_in_tracks)
// {
// alpha = (double)(_nb_feature_in_track - _min_feature_in_tracks)
// / (double)(_max_feature_in_tracks - _min_feature_in_tracks); // to interval [0,1]
// }
// else
// {
// alpha = 0; // put "full luminosity as a default value"
// // alpha = 0.5; // put "half luminosity as a default value"
// }
// double lum = (tracks_.min_luminosity_ + (tracks_.max_luminosity_ - tracks_.min_luminosity_) * (1 - alpha));
// if (lum > 255)
// {
// color[0] = 255;
// color[1] = round(lum) - 255;
// color[2] = round(lum) - 255;
// }
// else
// {
// color[0] = round(lum);
// color[1] = 0;
// color[2] = 0;
// }
// switch (_color_of_features)
// {
// case BLUE:
// color[2] = color[0];
// color[0] = color[1];
// break;
// case GREEN:
// color[1] = color[0];
// color[0] = color[2];
// break;
// case YELLOW:
// color[1] = color[0];
// break;
// case MAGENTA:
// color[2] = color[0];
// break;
// case CYAN:
// color[2] = color[0];
// color[0] = color[1];
// color[1] = color[2];
// break;
// case GREY:
// color[2] = color[0];
// color[1] = color[0];
// break;
// case RED:
// break;
// default:
// break;
// }
// return color;
// }
cv::Scalar PublisherVisionDebug::colorTrackAndFeatures(int _nb_feature_in_track, cv::Scalar PublisherVisionDebug::colorTrackAndFeatures(int _nb_feature_in_track,
int _max_feature_in_tracks, int _max_feature_in_tracks,
...@@ -421,45 +345,6 @@ cv::Scalar PublisherVisionDebug::colorTrackAndFeatures(int _nb_feature_in_track, ...@@ -421,45 +345,6 @@ cv::Scalar PublisherVisionDebug::colorTrackAndFeatures(int _nb_feature_in_track,
} }
// std::vector<int> PublisherVisionDebug::primaryColor(Color _color)
// {
// std::vector<int> color = {0, 0, 0};
// switch (_color)
// {
// case BLUE:
// color[2] = 255;
// break;
// case GREEN:
// color[1] = 255;
// break;
// case RED:
// color[0] = 255;
// break;
// case YELLOW:
// color[0] = 255;
// color[1] = 255;
// break;
// case MAGENTA:
// color[0] = 255;
// color[2] = 255;
// break;
// case CYAN:
// color[1] = 255;
// color[2] = 255;
// break;
// case GREY:
// color[0] = 128;
// color[1] = 128;
// color[2] = 128;
// break;
// default:
// break;
// }
// return color;
// }
cv::Scalar PublisherVisionDebug::primaryColor(Color _color) cv::Scalar PublisherVisionDebug::primaryColor(Color _color)
{ {
int R = 0; int R = 0;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment