diff --git a/src/examples/feature_detect_descript_match.cpp b/src/examples/feature_detect_descript_match.cpp index 02658d161a087a116102bbd49415f74762af10f0..407643d57364e291cccbaf862fb7f3c1c7c7daa9 100644 --- a/src/examples/feature_detect_descript_match.cpp +++ b/src/examples/feature_detect_descript_match.cpp @@ -54,7 +54,7 @@ int main(int argc, char *argv[]) int desc_type; descriptor.get(desc_type); - std::cout << std::endl << "Which DESCRIPTOR do you want to test?[default: " << descriptor_list[desc_type] << "]"; + std::cout << std::endl << "Which DESCRIPTOR do you want to test?[default: " << descriptor_list[desc_type] << "]" << std::endl; // desc_type = readFromUser(desc_type); @@ -78,7 +78,7 @@ int main(int argc, char *argv[]) int match_type; matcher.get(match_type); - std::cout << std::endl << "Which MATCHER do you want to test?[default: " << matchers_list[match_type] << "]"; + std::cout << std::endl << "Which MATCHER do you want to test?[default: " << matchers_list[match_type] << "]" << std::endl; // match_type = readFromUser(match_type); @@ -92,9 +92,9 @@ int main(int argc, char *argv[]) // Get default value int match_search_type; - matcher.get(match_search_type); + matcher.getSearchType(match_search_type); - std::cout << std::endl << "Which MATCHER do you want to test?[default: " << matchers_search_list[match_search_type] << "]"; + std::cout << std::endl << "Which MATCHER SEARCH do you want to test?[default: " << matchers_search_list[match_search_type] << "]" << std::endl; // match_search_type = readFromUser(match_search_type); @@ -104,9 +104,6 @@ int main(int argc, char *argv[]) // ***************************** - // matcher objects - KeyPointVector keypoints_old; - // Open camera cv::VideoCapture cam; CCamUtils cam_fc; @@ -116,6 +113,13 @@ int main(int argc, char *argv[]) cv::startWindowThread(); cv::namedWindow("Original image", cv::WINDOW_NORMAL); cv::namedWindow("Detections", cv::WINDOW_NORMAL); + cv::namedWindow("Matches", cv::WINDOW_NORMAL); + + cv::Mat frame_old; + KeyPointVector keypoints_old; + KeyPointVector good_keypoints; + cv::Mat descriptors_old; + std::vector<cv::DMatch> good_matches; // The following line is used to remove the OpenCV "init done" from the terminal std::cout << "\e[A" << " " << std::endl; @@ -123,13 +127,14 @@ int main(int argc, char *argv[]) for (int nframe = 0; nframe < 1000; ++nframe) { // Get frame - cv::Mat frame; + cv::Mat frame; cam_fc.getFrame(cam, frame); + cv::Mat frame_matches = frame.clone(); // Show ORIGINAL frame cv::imshow("Original image", frame); - // Detector + // Detector KeyPointVector keypoints; keypoints = detector.detectKeyPoints(frame); @@ -140,28 +145,54 @@ int main(int argc, char *argv[]) // Matcher if (nframe > 1) { - std::vector<cv::DMatch> matches; - matcher.match(keypoints,keypoints_old,matches); + // TODO: Implement this object creation depending on user preferences + // if (matchers_search_list[match_search_type].compare("Match") == 0) + // std::vector<cv::DMatch> matches; + // else + std::vector< std::vector<cv::DMatch> > matches; + matcher.match(descriptors,descriptors_old,matches); + + // Filter matches + good_matches.clear(); + good_keypoints.clear(); + matcher.filterMatches(keypoints_old, keypoints, matches, frame.rows, frame.cols, good_matches, good_keypoints); } - // update old keypoints + // Update objects keypoints_old.clear(); + keypoints_old.resize(keypoints.size()); for (unsigned int ii = 0; ii < keypoints.size(); ++ii) keypoints_old.push_back(keypoints[ii]); + descriptors_old = cv::Mat(descriptors.size(),descriptors.type()); + descriptors_old = descriptors.clone(); + frame_old = frame; // Show frame with features detector.drawKeyFeatures(frame, keypoints); - std::cout << "\e[A" << - "Detection time: " << detector.getTime() << " " << - "Description time: " << descriptor.getTime() << " " << - "TOTAL time: " << detector.getTime() + descriptor.getTime() << std::endl; + // Draw matches + if (nframe > 1 && !keypoints_old.empty() && !keypoints.empty()) + { + matcher.drawKeyFeatures(frame_matches, good_keypoints); +// cv::drawMatches( frame_old, keypoints_old, frame, keypoints, +// good_matches, frame_matches, cv::Scalar::all(-1), cv::Scalar::all(-1), +// std::vector<char>(), cv::DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS ); + for (unsigned int ii = 0; ii < good_keypoints.size(); ++ii) + cv::line(frame_matches,good_keypoints[ii].pt,good_keypoints[ii].pt,cv::Scalar(0,255,0),3); + cv::imshow("Matches", frame_matches); + } // Show NEW frame cv::imshow("Detections", frame); cv::waitKey(1); // if (cv::waitKey(30) >= 0) break; - } + + std::cout << "\e[A" << + "Detection time: " << detector.getTime() << " " << + "Description time: " << descriptor.getTime() << " " << + "Match time: " << matcher.getTime() << " " << + "TOTAL time: " << detector.getTime() + descriptor.getTime() << std::endl; + } cv::destroyAllWindows(); } diff --git a/src/feature_matcher/feature_matcher.cpp b/src/feature_matcher/feature_matcher.cpp index a0a7e997f0818cecb36891d230d5fafc64f92949..de1b97a53bd2c30f3e811a6af26ec13564c0020e 100644 --- a/src/feature_matcher/feature_matcher.cpp +++ b/src/feature_matcher/feature_matcher.cpp @@ -32,9 +32,9 @@ void CFeature_Matcher::setAllTypes(void) // Define all match output variants std::vector<std::string> out_types; - out_types += "Match", "knnMartch", "radiusMatch"; + out_types += "Match", "knnMatch", "radiusMatch"; match_search_types_.set(out_types); - match_search_type_ = "Match"; // Default value + match_search_type_ = "knnMatch"; // Default value } std::vector<std::string> CFeature_Matcher::listSearchTypes(void) @@ -42,6 +42,16 @@ std::vector<std::string> CFeature_Matcher::listSearchTypes(void) return match_search_types_.list(); } +void CFeature_Matcher::getSearchType(int& _type) +{ + _type = match_search_types_(match_search_type_); +} + +void CFeature_Matcher::getSearchType(std::string& _type) +{ + _type = match_search_type_; +} + bool CFeature_Matcher::setSearchType(const std::string& _type, const CMatcher_Params& _params) { // TODO: Set parameters for each matcher type @@ -110,7 +120,7 @@ bool CFeature_Matcher::init(const std::string& _type, const CMatcher_Params& _pa } // TODO: Match features and process matches (e.g. https://github.com/kipr/opencv/blob/master/samples/cpp/detector_descriptor_matcher_evaluation.cpp) -void CFeature_Matcher::match(const KeyPointVector& _kpts1, const KeyPointVector& _kpts2, std::vector<cv::DMatch>& matches) +void CFeature_Matcher::match(const cv::Mat& _desc1, const cv::Mat& _desc2, std::vector<cv::DMatch>& matches) { std::cout << __LINE__ << std::endl; if (!is_init_) @@ -120,9 +130,9 @@ void CFeature_Matcher::match(const KeyPointVector& _kpts1, const KeyPointVector& { clock_t tStart = clock(); // TODO: use parameters related with the search type (match_search_params_) - std::cout << __LINE__ << " " << _kpts1.size() << " " << _kpts2.size() << std::endl; - if (!_kpts1.empty() && !_kpts2.empty()) - feature_matcher_->match( _kpts1, _kpts2, matches); + std::cout << __LINE__ << " " << _desc1.size() << " " << _desc2.size() << std::endl; + if (!_desc1.empty() && !_desc2.empty()) + feature_matcher_->match( _desc1, _desc2, matches); comp_time_ = (double)(clock() - tStart) / CLOCKS_PER_SEC; } else @@ -130,7 +140,7 @@ void CFeature_Matcher::match(const KeyPointVector& _kpts1, const KeyPointVector& } // TODO: Match features and process matches (e.g. https://github.com/kipr/opencv/blob/master/samples/cpp/detector_descriptor_matcher_evaluation.cpp) -void CFeature_Matcher::match(const KeyPointVector& _kpts1, const KeyPointVector& _kpts2, std::vector< std::vector<cv::DMatch> >& matches) +void CFeature_Matcher::match(const cv::Mat& _desc1, const cv::Mat& _desc2, std::vector< std::vector<cv::DMatch> >& matches) { if (!is_init_) std::cerr << "[CFeature_Matcher::match]: Matcher non initialized." << std::endl; @@ -139,18 +149,47 @@ void CFeature_Matcher::match(const KeyPointVector& _kpts1, const KeyPointVector& { clock_t tStart = clock(); // TODO: use parameters related with the search type (match_search_params_) - if (!_kpts1.empty() && !_kpts2.empty()) - feature_matcher_->knnMatch(_kpts1, _kpts2, matches, 2); + if (!_desc1.empty() && !_desc2.empty()) + feature_matcher_->knnMatch(_desc1, _desc2, matches, 2); comp_time_ = (double)(clock() - tStart) / CLOCKS_PER_SEC; } else if (match_search_type_.compare(match_search_types_(2))==0) // radius match { clock_t tStart = clock(); // TODO: use parameters related with the search type (match_search_params_) - if (!_kpts1.empty() && !_kpts2.empty()) - feature_matcher_->radiusMatch(_kpts1, _kpts2, matches, 2); + if (!_desc1.empty() && !_desc2.empty()) + feature_matcher_->radiusMatch(_desc1, _desc2, matches, 2); comp_time_ = (double)(clock() - tStart) / CLOCKS_PER_SEC; } else std::cerr << "[CFeature_Matcher::match]: The selected matcher output is different than your object." << std::endl; } + +void CFeature_Matcher::filterMatches(const KeyPointVector& _kpts1,const KeyPointVector& _kpts2, const std::vector< std::vector<cv::DMatch> >& _dirty, const int& _img_width, const int& _img_height, std::vector<cv::DMatch>& _filtered_matches, KeyPointVector& _filtered_kpts) +{ + //only 25% of maximum of possible distance + double tresholdDist = 0.25 * sqrt(double(_img_height*_img_height + _img_width*_img_width)); + + std::vector< cv::DMatch > good_matches2; + _filtered_matches.reserve(_dirty.size()); + for (size_t ii = 0; ii < _dirty.size(); ++ii) + { + for (unsigned int jj = 0; jj < _dirty[ii].size(); jj++) + { + cv::Point2f from = _kpts1[_dirty[ii][jj].queryIdx].pt; + cv::Point2f to = _kpts2[_dirty[ii][jj].trainIdx].pt; + + //calculate local distance for each possible match + double dist = std::sqrt((from.x - to.x) * (from.x - to.x) + (from.y - to.y) * (from.y - to.y)); + + //save as best match if local distance is in specified area and on same height + if (dist < tresholdDist && std::abs(from.y-to.y)<5) + { + _filtered_matches.push_back(_dirty[ii][jj]); + cv::KeyPoint kpt = cv::KeyPoint(to,1); + _filtered_kpts.push_back(kpt); + jj = _dirty[ii].size(); + } + } + } +} diff --git a/src/feature_matcher/feature_matcher.h b/src/feature_matcher/feature_matcher.h index 9cc8ebfd18eb0a9f78a5c402ae72631619d4fd0d..3e3e484ddc5b7712cf79e6b3f0f4fa4530859967 100644 --- a/src/feature_matcher/feature_matcher.h +++ b/src/feature_matcher/feature_matcher.h @@ -37,15 +37,27 @@ class CFeature_Matcher: public CVu_Base<CMatcher_Params> */ bool setSearchType(const std::string& _type, const CMatcher_Params& _params); + /** + * \brief Get matcher search method + */ + void getSearchType(int& _type); + void getSearchType(std::string& _type); /** * \brief Find Matches */ - void match(const KeyPointVector& _kpts1, const KeyPointVector& _kpts2, std::vector<cv::DMatch>& matches); + void match(const cv::Mat& _desc1, const cv::Mat& _desc2, std::vector<cv::DMatch>& matches); /** * \brief Find K best matches or matches in a radius (depending on selected match_search_type_) */ - void match(const KeyPointVector& _kpts1, const KeyPointVector& _kpts2, std::vector< std::vector<cv::DMatch> >& matches); + void match(const cv::Mat& _desc1, const cv::Mat& _desc2, std::vector< std::vector<cv::DMatch> >& matches); + + /** + * \brief Look whether the match is inside a defined area of the image + * + * From https://stackoverflow.com/questions/17967950/improve-matching-of-feature-points-with-opencv + */ + void filterMatches(const KeyPointVector& _kpts1,const KeyPointVector& _kpts2, const std::vector< std::vector<cv::DMatch> >& _dirty, const int& _img_width, const int& _img_height, std::vector<cv::DMatch>& _filtered_matches, KeyPointVector& _filtered_kpts); private: diff --git a/src/vision_utils.cpp b/src/vision_utils.cpp index 922bbd6aa97ef6bbf7b82b039d96b14cc324622d..1776b3afacf6519ebf4ead7f93c4f31384bb6460 100644 --- a/src/vision_utils.cpp +++ b/src/vision_utils.cpp @@ -5,3 +5,30 @@ CVision_Utils::CVision_Utils() { CVision_Utils::~CVision_Utils() { } + +bool LessPoints(const cv::Point2f& lhs, const cv::Point2f& rhs) +{ + return (lhs.x < rhs.x) || ((lhs.x == rhs.x) && (lhs.y < rhs.y)); +} + +std::vector<cv::Point2f> vecIntersec(std::vector<cv::Point2f> v1, std::vector<cv::Point2f> v2) +{ + std::vector<cv::Point2f> v3; + // Sort vectors + std::sort(v1.begin(), v1.end(), LessPoints); + std::sort(v2.begin(), v2.end(), LessPoints); + // Intersect + std::set_intersection(v1.begin(), v1.end(), v2.begin(), v2.end(), std::back_inserter(v3), LessPoints); + return v3; +} + +std::vector<cv::Point2f> vecUnion(std::vector<cv::Point2f> v1, std::vector<cv::Point2f> v2) +{ + std::vector<cv::Point2f> v3; + // Sort vectors + std::sort(v1.begin(), v1.end(), LessPoints); + std::sort(v2.begin(), v2.end(), LessPoints); + // Intersect + std::set_union(v1.begin(), v1.end(), v2.begin(), v2.end(), std::back_inserter(v3), LessPoints); + return v3; +} diff --git a/src/vision_utils.h b/src/vision_utils.h index 24d4b21570c1b2a9ef5b40ee2fc89ee133bf88be..a384bbbbaf163be055aed2afe542687aa878091b 100644 --- a/src/vision_utils.h +++ b/src/vision_utils.h @@ -30,5 +30,9 @@ T readFromUser(const T& def_num) return read; } +bool LessPoints(const cv::Point2f& lhs, const cv::Point2f& rhs); +std::vector<cv::Point2f> vecIntersec(std::vector<cv::Point2f> v1, std::vector<cv::Point2f> v2); +std::vector<cv::Point2f> vecUnion(std::vector<cv::Point2f> v1, std::vector<cv::Point2f> v2); + #endif