diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 01673715166b2ef9670f03bb6ab1ece336d21924..35b51df5c2a92c0a230bf07f1c111421c290a551 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -202,6 +202,7 @@ INCLUDE (${PROJECT_SOURCE_DIR}/cmake_modules/FindYamlCpp.cmake)
 IF(YAMLCPP_FOUND)
     MESSAGE(STATUS "Yaml-cpp related sources will be built.")
     ADD_DEFINITIONS( -DUSING_YAML )
+    SET(USING_YAML TRUE)
 ELSEIF(YAMLCPP_FOUND)
     MESSAGE("[WARN] yaml-cpp Library NOT FOUND!")
 ENDIF(YAMLCPP_FOUND)
diff --git a/src/detectors/akaze/detector_akaze.h b/src/detectors/akaze/detector_akaze.h
index 0151fd0e71c0314c5bb8477f2bfb6e5bff2af3e4..b6efdf6f8a5af70fe841062098ebe9f88650ca6e 100644
--- a/src/detectors/akaze/detector_akaze.h
+++ b/src/detectors/akaze/detector_akaze.h
@@ -19,7 +19,7 @@ struct DetectorParamsAKAZE: public ParamsBase
 	int descriptor_size = 0;							// Size of the descriptor in bits. 0 -> Full size
 	int descriptor_channels = 3;						// Number of channels in the descriptor (1, 2, 3)
 	float threshold = 0.001f;							// Detector response threshold to accept point
-	int nOctaves = 2;									// Maximum octave evolution of the image
+	int nOctaves = 4;									// Maximum octave evolution of the image
 	int nOctaveLayers = 4;								// Default number of sublevels per scale level
 	int diffusivity = cv::KAZE::DIFF_PM_G2; 			// Diffusivity type. DIFF_PM_G1=0, DIFF_PM_G2=1, DIFF_WEICKERT=2, DIFF_CHARBONNIER=3
 	~DetectorParamsAKAZE(){}
diff --git a/src/detectors/fast/detector_fast.h b/src/detectors/fast/detector_fast.h
index c21dac4a8ea07c5f74170f53778c19e4f0da225c..ae80acc55a8d483a8cb566711b83d4295d0f31e5 100644
--- a/src/detectors/fast/detector_fast.h
+++ b/src/detectors/fast/detector_fast.h
@@ -17,7 +17,8 @@ struct DetectorParamsFAST: public ParamsBase
 {
  	int  	threshold = 10;										// threshold on difference between intensity of the central pixel and pixels of a circle around this pixel.
 	bool  	nonmaxSuppression = true;							// if true, non-maximum suppression is applied to detected corners (keypoints).
-	int  	neighbortype = cv::FastFeatureDetector::TYPE_9_16;	// one of the three neighborhoods as defined in the paper: FastFeatureDetector::TYPE_9_16, FastFeatureDetector::TYPE_7_12, FastFeatureDetector::TYPE_5_8
+	int  	neighbortype = cv::FastFeatureDetector::TYPE_5_8;	// one of the three neighborhoods as defined in the paper: FastFeatureDetector::TYPE_9_16, FastFeatureDetector::TYPE_7_12, FastFeatureDetector::TYPE_5_8
+
 	~DetectorParamsFAST(){}
 };
 
diff --git a/src/detectors/orb/detector_orb.h b/src/detectors/orb/detector_orb.h
index 2889ea540525e1a81195773f8795dc31d340ceb9..6315703b48a121c041b8df15aae7823fcd22b6c3 100644
--- a/src/detectors/orb/detector_orb.h
+++ b/src/detectors/orb/detector_orb.h
@@ -22,7 +22,7 @@ struct DetectorParamsORB: public ParamsBase
 	int firstLevel = 0;						// It should be 0 in the current implementation.
 	int WTA_K = 2;							// The number of points that produce each element of the oriented BRIEF descriptor. The default value 2 means the BRIEF where we take a random point pair and compare their brightnesses, so we get 0/1 response. Other possible values are 3 and 4. For example, 3 means that we take 3 random points (of course, those point coordinates are random, but they are generated from the pre-defined seed, so each element of BRIEF descriptor is computed deterministically from the pixel rectangle), find point of maximum brightness and output index of the winner (0, 1 or 2). Such output will occupy 2 bits, and therefore it will need a special variant of Hamming distance, denoted as NORM_HAMMING2 (2 bits per bin). When WTA_K=4, we take 4 random points to compute each bin (that will also occupy 2 bits with possible values 0, 1, 2 or 3).
 	int scoreType = cv::ORB::HARRIS_SCORE;	// The default HARRIS_SCORE means that Harris algorithm is used to rank features (the score is written to KeyPoint::score and is used to retain best nfeatures features); FAST_SCORE is alternative value of the parameter that produces slightly less stable keypoints, but it is a little faster to compute.
-	int patchSize = 31;						// size of the patch used by the oriented BRIEF descriptor. Of course, on smaller pyramid layers the perceived image area covered by a feature will be larger.
+	int patchSize = 16;						// size of the patch used by the oriented BRIEF descriptor. Of course, on smaller pyramid layers the perceived image area covered by a feature will be larger.
 	~DetectorParamsORB(){}
 };
 
diff --git a/src/detectors/sbd/detector_sbd.h b/src/detectors/sbd/detector_sbd.h
index 75fbd40114b402b033a433d11b289183192887a8..76ff8f34d3341707bceaa034225164c589a83c4d 100644
--- a/src/detectors/sbd/detector_sbd.h
+++ b/src/detectors/sbd/detector_sbd.h
@@ -16,6 +16,8 @@ VU_PTR_TYPEDEFS(DetectorParamsSBD);
 struct DetectorParamsSBD: public ParamsBase
 {
 	cv::SimpleBlobDetector::Params cv_params = cv::SimpleBlobDetector::Params();
+
+	DetectorParamsSBD(){cv_params.filterByColor = false;}
 	~DetectorParamsSBD(){}
 };
 
diff --git a/src/examples/yaml/FAST.yaml b/src/examples/yaml/FAST.yaml
index 42f8f70ed783cc1fca0b9932fb0cc7fe12e190b1..ffbe200839c0e636ce0281da5bb449685a8b0f5a 100644
--- a/src/examples/yaml/FAST.yaml
+++ b/src/examples/yaml/FAST.yaml
@@ -4,4 +4,4 @@ detector:
   type: "FAST"         
   threshold: 10
   nonmaxSuppression: true
-  neighbor type: 0 #enum { TYPE_9_16=0, TYPE_7_12=1, TYPE_5_8=2 };
+  neighbor type: 0 #enum { TYPE_5_8=0, TYPE_7_12=1, TYPE_9_16=2 };
diff --git a/src/test/CMakeLists.txt b/src/test/CMakeLists.txt
index 2c9594acd5434063d160cda5ff6936839acdc624..48df6b257b7c7fd6e501c415f25aabd116a7f4df 100644
--- a/src/test/CMakeLists.txt
+++ b/src/test/CMakeLists.txt
@@ -64,6 +64,10 @@ target_link_libraries(gtest_example ${PROJECT_NAME})      #
   vu_add_gtest(gtest_detectors gtest_detectors.cpp)
   target_link_libraries(gtest_detectors ${PROJECT_NAME})    
 
+# Matchers
+  vu_add_gtest(gtest_matchers gtest_matchers.cpp)
+  target_link_libraries(gtest_matchers ${PROJECT_NAME})   
+
 # Sensor usb_cam
   vu_add_gtest(gtest_sensor_usb_cam gtest_sensor_usb_cam.cpp)
   target_link_libraries(gtest_sensor_usb_cam ${PROJECT_NAME})    
diff --git a/src/test/data/FAST.yaml b/src/test/data/FAST.yaml
index 42f8f70ed783cc1fca0b9932fb0cc7fe12e190b1..ffbe200839c0e636ce0281da5bb449685a8b0f5a 100644
--- a/src/test/data/FAST.yaml
+++ b/src/test/data/FAST.yaml
@@ -4,4 +4,4 @@ detector:
   type: "FAST"         
   threshold: 10
   nonmaxSuppression: true
-  neighbor type: 0 #enum { TYPE_9_16=0, TYPE_7_12=1, TYPE_5_8=2 };
+  neighbor type: 0 #enum { TYPE_5_8=0, TYPE_7_12=1, TYPE_9_16=2 };
diff --git a/src/test/gtest_descriptors.cpp b/src/test/gtest_descriptors.cpp
index feb9df2705fccd1a1ae672ddbef0139e1ced15ba..7d04457b206385b7c2ce4702d73cfd0f719368db 100644
--- a/src/test/gtest_descriptors.cpp
+++ b/src/test/gtest_descriptors.cpp
@@ -27,15 +27,22 @@ TEST(Descriptors, AKAZE)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
-    std::string yaml_file_params = vu_root + "/src/test/data/AKAZE.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+
+    vision_utils::DetectorParamsAKAZEPtr params = std::make_shared<vision_utils::DetectorParamsAKAZE>();
+    std::string det_name = "AKAZE";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
     vision_utils::DetectorAKAZEPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorAKAZE>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 108);
 
-    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
-    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    vision_utils::DescriptorParamsAKAZEPtr des_params = std::make_shared<vision_utils::DescriptorParamsAKAZE>();
+    std::string des_name = "AKAZE";
+    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " detector", des_params);
+#ifdef USING_YAML
+    std::string yaml_file_params = vu_root + "/src/test/data/AKAZE.yaml";
+    des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+#endif
     vision_utils::DescriptorAKAZEPtr des_ptr = std::static_pointer_cast<vision_utils::DescriptorAKAZE>(des_b_ptr);
     ASSERT_TRUE(des_ptr!=NULL);
     cv::Mat descriptors = des_ptr->getDescriptor( image, keypoints);
@@ -46,16 +53,21 @@ TEST(Descriptors, BRIEF)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
-    std::string yaml_file_params = vu_root + "/src/test/data/ORB.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsORBPtr params = std::make_shared<vision_utils::DetectorParamsORB>();
+    std::string det_name = "ORB";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
     vision_utils::DetectorAKAZEPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorAKAZE>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 17);
 
-    yaml_file_params = vu_root + "/src/test/data/BRIEF.yaml";
-    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
-    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    vision_utils::DescriptorParamsBRIEFPtr des_params = std::make_shared<vision_utils::DescriptorParamsBRIEF>();
+    std::string des_name = "BRIEF";
+    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " detector", des_params);
+#ifdef USING_YAML
+    std::string yaml_file_params = vu_root + "/src/test/data/BRIEF.yaml";
+    des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+#endif
     vision_utils::DescriptorBRIEFPtr des_ptr = std::static_pointer_cast<vision_utils::DescriptorBRIEF>(des_b_ptr);
     ASSERT_TRUE(des_ptr!=NULL);
     cv::Mat descriptors = des_ptr->getDescriptor( image, keypoints);
@@ -66,15 +78,21 @@ TEST(Descriptors, BRISK)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
-    std::string yaml_file_params = vu_root + "/src/test/data/BRISK.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
-    det_ptr = std::static_pointer_cast<vision_utils::DetectorBRISK>(det_ptr);
+    vision_utils::DetectorParamsBRISKPtr params = std::make_shared<vision_utils::DetectorParamsBRISK>();
+    std::string det_name = "BRISK";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+    vision_utils::DetectorBRISKPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorBRISK>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 193);
 
-    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
-    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    vision_utils::DescriptorParamsBRISKPtr des_params = std::make_shared<vision_utils::DescriptorParamsBRISK>();
+    std::string des_name = "BRISK";
+    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " detector", des_params);
+#ifdef USING_YAML
+    std::string yaml_file_params = vu_root + "/src/test/data/BRISK.yaml";
+    des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+#endif
     vision_utils::DescriptorBRISKPtr des_ptr = std::static_pointer_cast<vision_utils::DescriptorBRISK>(des_b_ptr);
     ASSERT_TRUE(des_ptr!=NULL);
     cv::Mat descriptors = des_ptr->getDescriptor( image, keypoints);
@@ -85,16 +103,21 @@ TEST(Descriptors, DAISY)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
-    std::string yaml_file_params = vu_root + "/src/test/data/ORB.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsORBPtr params = std::make_shared<vision_utils::DetectorParamsORB>();
+    std::string det_name = "ORB";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
     vision_utils::DetectorAKAZEPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorAKAZE>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 17);
 
-    yaml_file_params = vu_root + "/src/test/data/DAISY.yaml";
-    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
-    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    vision_utils::DescriptorParamsDAISYPtr des_params = std::make_shared<vision_utils::DescriptorParamsDAISY>();
+    std::string des_name = "DAISY";
+    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " detector", des_params);
+#ifdef USING_YAML
+    std::string yaml_file_params = vu_root + "/src/test/data/DAISY.yaml";
+    des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+#endif
     vision_utils::DescriptorDAISYPtr des_ptr = std::static_pointer_cast<vision_utils::DescriptorDAISY>(des_b_ptr);
     ASSERT_TRUE(des_ptr!=NULL);
     cv::Mat descriptors = des_ptr->getDescriptor( image, keypoints);
@@ -105,16 +128,21 @@ TEST(Descriptors, FREAK)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
-    std::string yaml_file_params = vu_root + "/src/test/data/ORB.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsORBPtr params = std::make_shared<vision_utils::DetectorParamsORB>();
+    std::string det_name = "ORB";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
     vision_utils::DetectorAKAZEPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorAKAZE>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 17);
 
-    yaml_file_params = vu_root + "/src/test/data/FREAK.yaml";
-    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
-    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    vision_utils::DescriptorParamsFREAKPtr des_params = std::make_shared<vision_utils::DescriptorParamsFREAK>();
+    std::string des_name = "FREAK";
+    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " detector", des_params);
+#ifdef USING_YAML
+    std::string yaml_file_params = vu_root + "/src/test/data/FREAK.yaml";
+    des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+#endif
     vision_utils::DescriptorFREAKPtr des_ptr = std::static_pointer_cast<vision_utils::DescriptorFREAK>(des_b_ptr);
     ASSERT_TRUE(des_ptr!=NULL);
     cv::Mat descriptors = des_ptr->getDescriptor( image, keypoints);
@@ -125,15 +153,21 @@ TEST(Descriptors, KAZE)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
-    std::string yaml_file_params = vu_root + "/src/test/data/KAZE.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
-    det_ptr = std::static_pointer_cast<vision_utils::DetectorKAZE>(det_ptr);
+    vision_utils::DetectorParamsKAZEPtr params = std::make_shared<vision_utils::DetectorParamsKAZE>();
+    std::string det_name = "KAZE";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+    vision_utils::DetectorKAZEPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorKAZE>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 224);
 
-    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
-    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    vision_utils::DescriptorParamsKAZEPtr des_params = std::make_shared<vision_utils::DescriptorParamsKAZE>();
+    std::string des_name = "KAZE";
+    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " detector", des_params);
+#ifdef USING_YAML
+    std::string yaml_file_params = vu_root + "/src/test/data/KAZE.yaml";
+    des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+#endif
     vision_utils::DescriptorKAZEPtr des_ptr = std::static_pointer_cast<vision_utils::DescriptorKAZE>(des_b_ptr);
     ASSERT_TRUE(des_ptr!=NULL);
     cv::Mat descriptors = des_ptr->getDescriptor( image, keypoints);
@@ -144,16 +178,21 @@ TEST(Descriptors, LATCH)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
-    std::string yaml_file_params = vu_root + "/src/test/data/ORB.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsORBPtr params = std::make_shared<vision_utils::DetectorParamsORB>();
+    std::string det_name = "ORB";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
     vision_utils::DetectorAKAZEPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorAKAZE>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 17);
 
-    yaml_file_params = vu_root + "/src/test/data/LATCH.yaml";
-    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
-    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    vision_utils::DescriptorParamsLATCHPtr des_params = std::make_shared<vision_utils::DescriptorParamsLATCH>();
+    std::string des_name = "LATCH";
+    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " detector", des_params);
+#ifdef USING_YAML
+    std::string yaml_file_params = vu_root + "/src/test/data/LATCH.yaml";
+    des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+#endif
     vision_utils::DescriptorLATCHPtr des_ptr = std::static_pointer_cast<vision_utils::DescriptorLATCH>(des_b_ptr);
     ASSERT_TRUE(des_ptr!=NULL);
     cv::Mat descriptors = des_ptr->getDescriptor( image, keypoints);
@@ -164,16 +203,21 @@ TEST(Descriptors, LUCID)
 {
     image = cv::imread(filename3);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename3 << std::endl;
-    std::string yaml_file_params = vu_root + "/src/test/data/ORB.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsORBPtr params = std::make_shared<vision_utils::DetectorParamsORB>();
+    std::string det_name = "ORB";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
     vision_utils::DetectorAKAZEPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorAKAZE>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 20);
 
-    yaml_file_params = vu_root + "/src/test/data/LUCID.yaml";
-    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
-    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    vision_utils::DescriptorParamsLUCIDPtr des_params = std::make_shared<vision_utils::DescriptorParamsLUCID>();
+    std::string des_name = "LUCID";
+    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " detector", des_params);
+#ifdef USING_YAML
+    std::string yaml_file_params = vu_root + "/src/test/data/LUCID.yaml";
+    des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+#endif
     vision_utils::DescriptorLUCIDPtr des_ptr = std::static_pointer_cast<vision_utils::DescriptorLUCID>(des_b_ptr);
     ASSERT_TRUE(des_ptr!=NULL);
     cv::Mat descriptors = des_ptr->getDescriptor( image, keypoints);
@@ -184,15 +228,21 @@ TEST(Descriptors, ORB)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
-    std::string yaml_file_params = vu_root + "/src/test/data/ORB.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
-    det_ptr = std::static_pointer_cast<vision_utils::DetectorORB>(det_ptr);
+    vision_utils::DetectorParamsORBPtr params = std::make_shared<vision_utils::DetectorParamsORB>();
+    std::string det_name = "ORB";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+    vision_utils::DetectorORBPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorORB>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 17);
 
-    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
-    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    vision_utils::DescriptorParamsORBPtr des_params = std::make_shared<vision_utils::DescriptorParamsORB>();
+    std::string des_name = "ORB";
+    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " detector", des_params);
+#ifdef USING_YAML
+    std::string yaml_file_params = vu_root + "/src/test/data/ORB.yaml";
+    des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+#endif
     vision_utils::DescriptorORBPtr des_ptr = std::static_pointer_cast<vision_utils::DescriptorORB>(des_b_ptr);
     ASSERT_TRUE(des_ptr!=NULL);
     cv::Mat descriptors = des_ptr->getDescriptor( image, keypoints);
@@ -203,15 +253,21 @@ TEST(Descriptors, SIFT)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
-    std::string yaml_file_params = vu_root + "/src/test/data/SIFT.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
-    det_ptr = std::static_pointer_cast<vision_utils::DetectorSIFT>(det_ptr);
+    vision_utils::DetectorParamsSIFTPtr params = std::make_shared<vision_utils::DetectorParamsSIFT>();
+    std::string det_name = "SIFT";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+    vision_utils::DetectorSIFTPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorSIFT>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 62);
 
-    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
-    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    vision_utils::DescriptorParamsSIFTPtr des_params = std::make_shared<vision_utils::DescriptorParamsSIFT>();
+    std::string des_name = "SIFT";
+    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " detector", des_params);
+#ifdef USING_YAML
+    std::string yaml_file_params = vu_root + "/src/test/data/SIFT.yaml";
+    des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+#endif
     vision_utils::DescriptorSIFTPtr des_ptr = std::static_pointer_cast<vision_utils::DescriptorSIFT>(des_b_ptr);
     ASSERT_TRUE(des_ptr!=NULL);
     cv::Mat descriptors = des_ptr->getDescriptor( image, keypoints);
@@ -222,15 +278,21 @@ TEST(Descriptors, SURF)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
-    std::string yaml_file_params = vu_root + "/src/test/data/SURF.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
-    det_ptr = std::static_pointer_cast<vision_utils::DetectorSURF>(det_ptr);
+    vision_utils::DetectorParamsSURFPtr params = std::make_shared<vision_utils::DetectorParamsSURF>();
+    std::string det_name = "SURF";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+    vision_utils::DetectorSURFPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorSURF>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 121);
 
-    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
-    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    vision_utils::DescriptorParamsSURFPtr des_params = std::make_shared<vision_utils::DescriptorParamsSURF>();
+    std::string des_name = "SURF";
+    vision_utils::DescriptorBasePtr des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " detector", des_params);
+#ifdef USING_YAML
+    std::string yaml_file_params = vu_root + "/src/test/data/SURF.yaml";
+    des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    des_b_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+#endif
     vision_utils::DescriptorSURFPtr des_ptr = std::static_pointer_cast<vision_utils::DescriptorSURF>(des_b_ptr);
     ASSERT_TRUE(des_ptr!=NULL);
     cv::Mat descriptors = des_ptr->getDescriptor( image, keypoints);
diff --git a/src/test/gtest_detectors.cpp b/src/test/gtest_detectors.cpp
index 0299a4916ad98eb758c352567e26ca3a5fd68602..9e53a7d896ba4116e90dd56dc4737b3c546de69a 100644
--- a/src/test/gtest_detectors.cpp
+++ b/src/test/gtest_detectors.cpp
@@ -109,9 +109,16 @@ TEST(Detectors, AGAST)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
+
+    vision_utils::DetectorParamsAGASTPtr params = std::make_shared<vision_utils::DetectorParamsAGAST>();
+    std::string det_name = "AGAST";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+#ifdef USING_YAML
     std::string yaml_file_params = vu_root + "/src/test/data/AGAST.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+#endif
+
     vision_utils::DetectorAGASTPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorAGAST>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 37);
@@ -146,11 +153,11 @@ TEST(Detectors, AGASTRoiBounds)
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
 
-    std::string yaml_file_params = vu_root + "/src/test/data/AGAST.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsAGASTPtr params = std::make_shared<vision_utils::DetectorParamsAGAST>();
+    std::string det_name = "AGAST";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
 
-    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_ptr, det_name, points_to_check);
+    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_b_ptr, det_name, points_to_check);
     ASSERT_TRUE(!points_found.empty());
 
     // check that at least all keypoints in the list except #10 have been detected
@@ -165,9 +172,16 @@ TEST(Detectors, AKAZE)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
+
+    vision_utils::DetectorParamsAKAZEPtr params = std::make_shared<vision_utils::DetectorParamsAKAZE>();
+    std::string det_name = "AKAZE";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+#ifdef USING_YAML
     std::string yaml_file_params = vu_root + "/src/test/data/AKAZE.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+#endif
+
     vision_utils::DetectorAKAZEPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorAKAZE>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 108);
@@ -201,11 +215,11 @@ TEST(Detectors, AKAZERoiBounds)
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
 
-    std::string yaml_file_params = vu_root + "/src/test/data/AKAZE.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsAKAZEPtr params = std::make_shared<vision_utils::DetectorParamsAKAZE>();
+    std::string det_name = "AKAZE";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
 
-    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_ptr, det_name, points_to_check);
+    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_b_ptr, det_name, points_to_check);
     ASSERT_TRUE(!points_found.empty());
 
     // check that at least all keypoints in the list except #10 have been detected
@@ -220,9 +234,16 @@ TEST(Detectors, BRISK)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
+
+    vision_utils::DetectorParamsBRISKPtr params = std::make_shared<vision_utils::DetectorParamsBRISK>();
+    std::string det_name = "BRISK";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+#ifdef USING_YAML
     std::string yaml_file_params = vu_root + "/src/test/data/BRISK.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+#endif
+
     vision_utils::DetectorBRISKPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorBRISK>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 193);
@@ -255,11 +276,11 @@ TEST(Detectors, BRISKRoiBounds)
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
 
-    std::string yaml_file_params = vu_root + "/src/test/data/BRISK.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsBRISKPtr params = std::make_shared<vision_utils::DetectorParamsBRISK>();
+    std::string det_name = "BRISK";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
 
-    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_ptr, det_name, points_to_check);
+    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_b_ptr, det_name, points_to_check);
     ASSERT_TRUE(!points_found.empty());
 
     // check that at least all keypoints in the list except #10 have been detected
@@ -274,9 +295,16 @@ TEST(Detectors, FAST)
 {
     image = cv::imread(filename2, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename2 << std::endl;
+
+    vision_utils::DetectorParamsFASTPtr params = std::make_shared<vision_utils::DetectorParamsFAST>();
+    std::string det_name = "FAST";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+#ifdef USING_YAML
     std::string yaml_file_params = vu_root + "/src/test/data/FAST.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+#endif
+
     vision_utils::DetectorFASTPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorFAST>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 156);
@@ -309,11 +337,11 @@ TEST(Detectors, FASTRoiBounds)
     image = cv::imread(filename2, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename2 << std::endl;
 
-    std::string yaml_file_params = vu_root + "/src/test/data/FAST.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsFASTPtr params = std::make_shared<vision_utils::DetectorParamsFAST>();
+    std::string det_name = "FAST";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
 
-    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_ptr, det_name, points_to_check);
+    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_b_ptr, det_name, points_to_check);
     ASSERT_TRUE(!points_found.empty());
 
     // check that at least all keypoints in the list except #10 have been detected
@@ -328,9 +356,16 @@ TEST(Detectors, GFTT)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
+
+    vision_utils::DetectorParamsGFTTPtr params = std::make_shared<vision_utils::DetectorParamsGFTT>();
+    std::string det_name = "GFTT";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+#ifdef USING_YAML
     std::string yaml_file_params = vu_root + "/src/test/data/GFTT.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+#endif
+
     vision_utils::DetectorGFTTPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorGFTT>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 168);
@@ -363,11 +398,11 @@ TEST(Detectors, GFTTRoiBounds)
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
 
-    std::string yaml_file_params = vu_root + "/src/test/data/GFTT.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsGFTTPtr params = std::make_shared<vision_utils::DetectorParamsGFTT>();
+    std::string det_name = "GFTT";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
 
-    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_ptr, det_name, points_to_check);
+    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_b_ptr, det_name, points_to_check);
     ASSERT_TRUE(!points_found.empty());
 
     // check that at least all keypoints in the list except #10 have been detected
@@ -382,9 +417,16 @@ TEST(Detectors, HARRIS)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
+
+    vision_utils::DetectorParamsHARRISPtr params = std::make_shared<vision_utils::DetectorParamsHARRIS>();
+    std::string det_name = "HARRIS";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+#ifdef USING_YAML
     std::string yaml_file_params = vu_root + "/src/test/data/HARRIS.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+#endif
+
     vision_utils::DetectorHARRISPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorHARRIS>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 33);
@@ -419,11 +461,11 @@ TEST(Detectors, HARRISRoiBounds)
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
 
-    std::string yaml_file_params = vu_root + "/src/test/data/HARRIS.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsHARRISPtr params = std::make_shared<vision_utils::DetectorParamsHARRIS>();
+    std::string det_name = "HARRIS";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
 
-    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_ptr, det_name, points_to_check);
+    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_b_ptr, det_name, points_to_check);
     ASSERT_TRUE(!points_found.empty());
 
     // check that at least all keypoints in the list except #10 have been detected
@@ -438,9 +480,16 @@ TEST(Detectors, KAZE)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
+
+    vision_utils::DetectorParamsKAZEPtr params = std::make_shared<vision_utils::DetectorParamsKAZE>();
+    std::string det_name = "KAZE";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+#ifdef USING_YAML
     std::string yaml_file_params = vu_root + "/src/test/data/KAZE.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+#endif
+
     vision_utils::DetectorKAZEPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorKAZE>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 224);
@@ -475,11 +524,11 @@ TEST(Detectors, KAZERoiBounds)
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
 
-    std::string yaml_file_params = vu_root + "/src/test/data/KAZE.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsKAZEPtr params = std::make_shared<vision_utils::DetectorParamsKAZE>();
+    std::string det_name = "KAZE";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
 
-    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_ptr, det_name, points_to_check);
+    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_b_ptr, det_name, points_to_check);
     ASSERT_TRUE(!points_found.empty());
 
     // check that at least all keypoints in the list except #10 have been detected
@@ -494,9 +543,16 @@ TEST(Detectors, MSER)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
+
+    vision_utils::DetectorParamsMSERPtr params = std::make_shared<vision_utils::DetectorParamsMSER>();
+    std::string det_name = "MSER";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+#ifdef USING_YAML
     std::string yaml_file_params = vu_root + "/src/test/data/MSER.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+#endif
+
     vision_utils::DetectorMSERPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorMSER>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 6);
@@ -531,11 +587,11 @@ TEST(Detectors, MSERRoiBounds)
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
 
-    std::string yaml_file_params = vu_root + "/src/test/data/MSER.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsMSERPtr params = std::make_shared<vision_utils::DetectorParamsMSER>();
+    std::string det_name = "MSER";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
 
-    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_ptr, det_name, points_to_check);
+    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_b_ptr, det_name, points_to_check);
     ASSERT_TRUE(!points_found.empty());
 
     // check that at least all keypoints in the list except #10 have been detected
@@ -550,9 +606,16 @@ TEST(Detectors, ORB)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
+
+    vision_utils::DetectorParamsORBPtr params = std::make_shared<vision_utils::DetectorParamsORB>();
+    std::string det_name = "ORB";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+#ifdef USING_YAML
     std::string yaml_file_params = vu_root + "/src/test/data/ORB.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+#endif
+
     vision_utils::DetectorORBPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorORB>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 17);
@@ -580,11 +643,11 @@ TEST(Detectors, ORBRoiBounds)
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
 
-    std::string yaml_file_params = vu_root + "/src/test/data/ORB.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsORBPtr params = std::make_shared<vision_utils::DetectorParamsORB>();
+    std::string det_name = "ORB";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
 
-    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_ptr, det_name, points_to_check);
+    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_b_ptr, det_name, points_to_check);
     ASSERT_TRUE(!points_found.empty());
 
     // check that at least all keypoints in the list except #10 have been detected
@@ -599,9 +662,16 @@ TEST(Detectors, SBD)
 {
     image = cv::imread(filename2, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename2 << std::endl;
+
+    vision_utils::DetectorParamsSBDPtr params = std::make_shared<vision_utils::DetectorParamsSBD>();
+    std::string det_name = "SBD";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+#ifdef USING_YAML
     std::string yaml_file_params = vu_root + "/src/test/data/SBD.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+#endif
+
     vision_utils::DetectorSBDPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorSBD>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 1);
@@ -636,11 +706,11 @@ TEST(Detectors, SBDRoiBounds)
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
 
-    std::string yaml_file_params = vu_root + "/src/test/data/SBD.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsSBDPtr params = std::make_shared<vision_utils::DetectorParamsSBD>();
+    std::string det_name = "SBD";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
 
-    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_ptr, det_name, points_to_check);
+    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_b_ptr, det_name, points_to_check);
     ASSERT_TRUE(!points_found.empty());
 
     // check that at least all keypoints in the list except #10 have been detected
@@ -655,9 +725,16 @@ TEST(Detectors, SIFT)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
+
+    vision_utils::DetectorParamsSIFTPtr params = std::make_shared<vision_utils::DetectorParamsSIFT>();
+    std::string det_name = "SIFT";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+#ifdef USING_YAML
     std::string yaml_file_params = vu_root + "/src/test/data/SIFT.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+#endif
+
     vision_utils::DetectorSIFTPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorSIFT>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 62);
@@ -692,11 +769,11 @@ TEST(Detectors, SIFTRoiBounds)
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
 
-    std::string yaml_file_params = vu_root + "/src/test/data/SIFT.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsSIFTPtr params = std::make_shared<vision_utils::DetectorParamsSIFT>();
+    std::string det_name = "SIFT";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
 
-    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_ptr, det_name, points_to_check);
+    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_b_ptr, det_name, points_to_check);
     ASSERT_TRUE(!points_found.empty());
 
     // check that at least all keypoints in the list except #10 have been detected
@@ -711,9 +788,16 @@ TEST(Detectors, SURF)
 {
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
+
+    vision_utils::DetectorParamsSURFPtr params = std::make_shared<vision_utils::DetectorParamsSURF>();
+    std::string det_name = "SURF";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
+#ifdef USING_YAML
     std::string yaml_file_params = vu_root + "/src/test/data/SURF.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+#endif
+
     vision_utils::DetectorSURFPtr det_ptr = std::static_pointer_cast<vision_utils::DetectorSURF>(det_b_ptr);
     std::vector<cv::KeyPoint> keypoints = det_ptr->detect(image);
     ASSERT_EQ(keypoints.size(), 121);
@@ -748,11 +832,11 @@ TEST(Detectors, SURFRoiBounds)
     image = cv::imread(filename, CV_LOAD_IMAGE_GRAYSCALE);
     ASSERT_TRUE(image.data)<< "failed to load image " << filename << std::endl;
 
-    std::string yaml_file_params = vu_root + "/src/test/data/SURF.yaml";
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    vision_utils::DetectorParamsSURFPtr params = std::make_shared<vision_utils::DetectorParamsSURF>();
+    std::string det_name = "SURF";
+    vision_utils::DetectorBasePtr det_b_ptr = vision_utils::setupDetector(det_name, det_name + " detector", params);
 
-    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_ptr, det_name, points_to_check);
+    std::map<int, cv::Point2f> points_found = TestDetectorROI(image, det_b_ptr, det_name, points_to_check);
     ASSERT_TRUE(!points_found.empty());
 
     // check that at least all keypoints in the list except #10 have been detected
diff --git a/src/test/gtest_matcher_base.cpp b/src/test/gtest_matcher_base.cpp
index 6d97c745a70223cd93aba22d7940b0d31d3331aa..72329fd366a7925d8199f992ed6591d1d1424c70 100644
--- a/src/test/gtest_matcher_base.cpp
+++ b/src/test/gtest_matcher_base.cpp
@@ -48,78 +48,6 @@ TEST(MatcherBase, getParams)
     ASSERT_EQ(params->match_type,1);
 }
 
-TEST(MatcherBase, match)
-{
-    cv::Mat image1 = cv::imread(filename1, CV_LOAD_IMAGE_GRAYSCALE);
-    ASSERT_TRUE(image1.data)<< "Failed to load image " << filename1 << std::endl;
-    cv::Mat image2 = cv::imread(filename2, CV_LOAD_IMAGE_GRAYSCALE);
-    ASSERT_TRUE(image1.data)<< "Failed to load image " << filename2 << std::endl;
-
-    // Define detector
-    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
-    ASSERT_TRUE(det_name.compare("ORB")==0);
-    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
-    det_ptr = std::static_pointer_cast<vision_utils::DetectorORB>(det_ptr);
-    ASSERT_TRUE(det_ptr!=NULL);
-
-    // Define descriptor
-    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
-    ASSERT_TRUE(des_name.compare("ORB")==0);
-    vision_utils::DescriptorBasePtr des_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
-    des_ptr = std::static_pointer_cast<vision_utils::DescriptorORB>(des_ptr);
-    ASSERT_TRUE(des_ptr!=NULL);
-
-    // Image 1
-    KeyPointVector kpts1 = det_ptr->detect(image1);
-    cv::Mat desc1 = des_ptr->getDescriptor(image1,kpts1);
-    ASSERT_TRUE(kpts1.size() == desc1.rows);
-
-    // Image 2
-    KeyPointVector kpts2 = det_ptr->detect(image2);
-    cv::Mat desc2 = des_ptr->getDescriptor(image2,kpts2);
-    ASSERT_TRUE(kpts2.size() == desc2.rows);
-
-    // Define matcher
-    vision_utils::MatcherParamsBasePtr params_ptr = std::make_shared<vision_utils::MatcherParamsBase>();
-	vision_utils::MatcherBasePtr mat_ptr;
-
-	// MATCH type
-	params_ptr->match_type = vision_utils::MATCH;
-	mat_ptr = vision_utils::setupMatcher(matcher_name, matcher_name, params_ptr);
-    params_ptr = mat_ptr->getParams();
-    ASSERT_EQ(params_ptr->match_type,1);
-
-	// match
-	DMatchVector matches;
-    mat_ptr->match(desc1,desc2,matches);
-    ASSERT_EQ(matches.size(),80);
-
-    // filter
-    DMatchVector best_matches;
-    KeyPointVector kpts_matched_img2;
-    KeyPointVector kpts_matched_img1;
-    mat_ptr->filterByDistance(10, 0.25, kpts1, kpts2, matches, image1.rows, image1.cols, best_matches, kpts_matched_img2, kpts_matched_img1);
-    ASSERT_EQ(best_matches.size(),37);
-
-    // KNNMATCH type
-	params_ptr->match_type = vision_utils::KNNMATCH;
-	mat_ptr = vision_utils::setupMatcher(matcher_name, matcher_name, params_ptr);
-    ASSERT_EQ(mat_ptr->getParams()->match_type,2);
-
-	// match
-    std::vector< DMatchVector > matches_vec;
-    mat_ptr->match(desc1,desc2,matches_vec);
-    ASSERT_EQ(matches.size(),80);
-
-    // filter
-    mat_ptr->filterByDistance(10, 0.25, kpts1, kpts2, matches_vec, image1.rows, image1.cols, best_matches, kpts_matched_img2, kpts_matched_img1);
-    ASSERT_EQ(best_matches.size(),85);
-
-
-    // RADIUSMATCH type currently not implemented:
-    // OpenCV Error: The function/feature is not implemented (LSH index does not support radiusSearch operation) in radiusSearch
-}
-
 int main(int argc, char **argv)
 {
   testing::InitGoogleTest(&argc, argv);
diff --git a/src/test/gtest_matchers.cpp b/src/test/gtest_matchers.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..6b48bb0abca9b0f16e7841b7a4e4f69b44f8183e
--- /dev/null
+++ b/src/test/gtest_matchers.cpp
@@ -0,0 +1,188 @@
+#include "utils_gtest.h"
+#include "../vision_utils.h"
+#include "../detectors.h"
+#include "../descriptors.h"
+#include "../matchers.h"
+
+std::string matcher_name = "BRUTEFORCE";
+std::string vu_root = _VU_ROOT_DIR;
+std::string yaml_file_params = vu_root + "/src/test/data/base_classes.yaml";
+std::string filename1 = vu_root + "/src/test/data/img_1.png";
+std::string filename2 = vu_root + "/src/test/data/img_2.png";
+
+TEST(Matchers, LoadImagesFromFile)
+{
+    cv::Mat image1 = cv::imread(filename1, CV_LOAD_IMAGE_GRAYSCALE);
+    ASSERT_TRUE(image1.data)<< "Failed to load image " << filename1 << std::endl;
+    cv::Mat image2 = cv::imread(filename2, CV_LOAD_IMAGE_GRAYSCALE);
+    ASSERT_TRUE(image1.data)<< "Failed to load image " << filename2 << std::endl;
+}
+
+TEST(Matchers, MATCH)
+{
+    cv::Mat image1 = cv::imread(filename1, CV_LOAD_IMAGE_GRAYSCALE);
+    ASSERT_TRUE(image1.data)<< "Failed to load image " << filename1 << std::endl;
+    cv::Mat image2 = cv::imread(filename2, CV_LOAD_IMAGE_GRAYSCALE);
+    ASSERT_TRUE(image1.data)<< "Failed to load image " << filename2 << std::endl;
+
+    // Define detector
+    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    ASSERT_TRUE(det_name.compare("ORB")==0);
+    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_ptr = std::static_pointer_cast<vision_utils::DetectorORB>(det_ptr);
+    ASSERT_TRUE(det_ptr!=NULL);
+
+    // Define descriptor
+    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    ASSERT_TRUE(des_name.compare("ORB")==0);
+    vision_utils::DescriptorBasePtr des_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    des_ptr = std::static_pointer_cast<vision_utils::DescriptorORB>(des_ptr);
+    ASSERT_TRUE(des_ptr!=NULL);
+
+    // Image 1
+    KeyPointVector kpts1 = det_ptr->detect(image1);
+    cv::Mat desc1 = des_ptr->getDescriptor(image1,kpts1);
+    ASSERT_TRUE(kpts1.size() == desc1.rows);
+
+    // Image 2
+    KeyPointVector kpts2 = det_ptr->detect(image2);
+    cv::Mat desc2 = des_ptr->getDescriptor(image2,kpts2);
+    ASSERT_TRUE(kpts2.size() == desc2.rows);
+
+    // Define matcher
+    vision_utils::MatcherParamsBasePtr params_ptr = std::make_shared<vision_utils::MatcherParamsBase>();
+	vision_utils::MatcherBasePtr mat_ptr;
+
+	// MATCH type
+	params_ptr->match_type = vision_utils::MATCH;
+	mat_ptr = vision_utils::setupMatcher(matcher_name, matcher_name, params_ptr);
+    params_ptr = mat_ptr->getParams();
+    ASSERT_EQ(params_ptr->match_type,1);
+
+	// match
+	DMatchVector matches;
+    mat_ptr->match(desc1,desc2,matches);
+    ASSERT_EQ(matches.size(),80);
+
+    // filter
+    DMatchVector best_matches;
+    KeyPointVector kpts_matched_img2;
+    KeyPointVector kpts_matched_img1;
+    mat_ptr->filterByDistance(10, 0.25, kpts1, kpts2, matches, image1.rows, image1.cols, best_matches, kpts_matched_img2, kpts_matched_img1);
+    ASSERT_EQ(best_matches.size(),37);
+}
+
+TEST(Matchers, KNNMATCH)
+{
+    cv::Mat image1 = cv::imread(filename1, CV_LOAD_IMAGE_GRAYSCALE);
+    ASSERT_TRUE(image1.data)<< "Failed to load image " << filename1 << std::endl;
+    cv::Mat image2 = cv::imread(filename2, CV_LOAD_IMAGE_GRAYSCALE);
+    ASSERT_TRUE(image1.data)<< "Failed to load image " << filename2 << std::endl;
+
+    // Define detector
+    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    ASSERT_TRUE(det_name.compare("ORB")==0);
+    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_ptr = std::static_pointer_cast<vision_utils::DetectorORB>(det_ptr);
+    ASSERT_TRUE(det_ptr!=NULL);
+
+    // Define descriptor
+    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    ASSERT_TRUE(des_name.compare("ORB")==0);
+    vision_utils::DescriptorBasePtr des_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    des_ptr = std::static_pointer_cast<vision_utils::DescriptorORB>(des_ptr);
+    ASSERT_TRUE(des_ptr!=NULL);
+
+    // Image 1
+    KeyPointVector kpts1 = det_ptr->detect(image1);
+    cv::Mat desc1 = des_ptr->getDescriptor(image1,kpts1);
+    ASSERT_TRUE(kpts1.size() == desc1.rows);
+
+    // Image 2
+    KeyPointVector kpts2 = det_ptr->detect(image2);
+    cv::Mat desc2 = des_ptr->getDescriptor(image2,kpts2);
+    ASSERT_TRUE(kpts2.size() == desc2.rows);
+
+    // Define matcher
+    vision_utils::MatcherParamsBasePtr params_ptr = std::make_shared<vision_utils::MatcherParamsBase>();
+	vision_utils::MatcherBasePtr mat_ptr;
+
+    // KNNMATCH type
+	params_ptr->match_type = vision_utils::KNNMATCH;
+	mat_ptr = vision_utils::setupMatcher(matcher_name, matcher_name, params_ptr);
+    ASSERT_EQ(mat_ptr->getParams()->match_type,2);
+
+	// match
+    std::vector< DMatchVector > matches_vec;
+    mat_ptr->match(desc1,desc2,matches_vec);
+    ASSERT_EQ(matches_vec.size(),80);
+
+    // filter
+    DMatchVector best_matches;
+    KeyPointVector kpts_matched_img2;
+    KeyPointVector kpts_matched_img1;
+    mat_ptr->filterByDistance(10, 0.25, kpts1, kpts2, matches_vec, image1.rows, image1.cols, best_matches, kpts_matched_img2, kpts_matched_img1);
+    ASSERT_EQ(best_matches.size(),48);
+}
+
+TEST(Matchers, RADIUSMATCH)
+{
+    cv::Mat image1 = cv::imread(filename1, CV_LOAD_IMAGE_GRAYSCALE);
+    ASSERT_TRUE(image1.data)<< "Failed to load image " << filename1 << std::endl;
+    cv::Mat image2 = cv::imread(filename2, CV_LOAD_IMAGE_GRAYSCALE);
+    ASSERT_TRUE(image1.data)<< "Failed to load image " << filename2 << std::endl;
+
+    // Define detector
+    std::string det_name = vision_utils::readYamlType(yaml_file_params, "detector");
+    ASSERT_TRUE(det_name.compare("ORB")==0);
+    vision_utils::DetectorBasePtr det_ptr = vision_utils::setupDetector(det_name, det_name + " detector", yaml_file_params);
+    det_ptr = std::static_pointer_cast<vision_utils::DetectorORB>(det_ptr);
+    ASSERT_TRUE(det_ptr!=NULL);
+
+    // Define descriptor
+    std::string des_name = vision_utils::readYamlType(yaml_file_params, "descriptor");
+    ASSERT_TRUE(des_name.compare("ORB")==0);
+    vision_utils::DescriptorBasePtr des_ptr = vision_utils::setupDescriptor(des_name, des_name + " descriptor", yaml_file_params);
+    des_ptr = std::static_pointer_cast<vision_utils::DescriptorORB>(des_ptr);
+    ASSERT_TRUE(des_ptr!=NULL);
+
+    // Image 1
+    KeyPointVector kpts1 = det_ptr->detect(image1);
+    cv::Mat desc1 = des_ptr->getDescriptor(image1,kpts1);
+    ASSERT_TRUE(kpts1.size() == desc1.rows);
+
+    // Image 2
+    KeyPointVector kpts2 = det_ptr->detect(image2);
+    cv::Mat desc2 = des_ptr->getDescriptor(image2,kpts2);
+    ASSERT_TRUE(kpts2.size() == desc2.rows);
+
+    // Define matcher
+    vision_utils::MatcherParamsBasePtr params_ptr = std::make_shared<vision_utils::MatcherParamsBase>();
+	vision_utils::MatcherBasePtr mat_ptr;
+
+    // RADIUSMATCH type
+    params_ptr->match_type = vision_utils::RADIUSMATCH;
+	mat_ptr = vision_utils::setupMatcher(matcher_name, matcher_name, params_ptr);
+    ASSERT_EQ(mat_ptr->getParams()->match_type,3);
+
+    // RADIUSMATCH type currently not implemented:
+    // OpenCV Error: The function/feature is not implemented (LSH index does not support radiusSearch operation) in radiusSearch
+	// match
+//    std::vector< DMatchVector > matches_vec;
+//    mat_ptr->match(desc1,desc2,matches_vec);
+//    ASSERT_EQ(matches_vec.size(),80);
+//
+//    // filter
+//    DMatchVector best_matches;
+//    KeyPointVector kpts_matched_img2;
+//    KeyPointVector kpts_matched_img1;
+//    mat_ptr->filterByDistance(10, 0.25, kpts1, kpts2, matches_vec, image1.rows, image1.cols, best_matches, kpts_matched_img2, kpts_matched_img1);
+//    ASSERT_EQ(best_matches.size(),48);
+}
+
+
+int main(int argc, char **argv)
+{
+  testing::InitGoogleTest(&argc, argv);
+  return RUN_ALL_TESTS();
+}