From 2b226128b836614abe90fa3b8dc54550ed53cddc Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Iv=C3=A1n=20del=20Pino?= <idelpino@iri.upc.edu>
Date: Thu, 14 Jul 2022 17:37:26 +0200
Subject: [PATCH] work in progress

---
 include/kf_based_terrain_analysis.h |   3 +
 include/structs_definitions.h       |   1 +
 src/kf_based_terrain_analysis.cpp   | 505 ++++++++++------------------
 3 files changed, 187 insertions(+), 322 deletions(-)

diff --git a/include/kf_based_terrain_analysis.h b/include/kf_based_terrain_analysis.h
index 4903f87..fce5ce8 100644
--- a/include/kf_based_terrain_analysis.h
+++ b/include/kf_based_terrain_analysis.h
@@ -33,6 +33,9 @@ private:
                             const pcl::PointXYZRGBNormal point_in_sensor_frame, float &predicted_std_dev,
                             bool &observation_z_is_below_prediction);
 
+  float mahalanobisDistance(const pcl::PointXYZRGBNormal reference_in_sensor_frame,
+                            const pcl::PointXYZRGBNormal point_in_sensor_frame);
+
   float euclideanDistance(const pcl::PointXYZRGBNormal reference_in_sensor_frame,
                           const pcl::PointXYZRGBNormal point_in_sensor_frame);
 
diff --git a/include/structs_definitions.h b/include/structs_definitions.h
index 6c24cb0..33ab2e6 100644
--- a/include/structs_definitions.h
+++ b/include/structs_definitions.h
@@ -147,6 +147,7 @@ struct FilteringConfiguration
 
   int number_of_references_used_for_labelling;
   float max_pred_std_dev_for_labelling;
+  float score_threshold;
 
   bool classify_not_labeled_points_as_obstacles;
   bool discard_not_connected_references_for_labelling;
diff --git a/src/kf_based_terrain_analysis.cpp b/src/kf_based_terrain_analysis.cpp
index 0ff31b1..dc48dc0 100644
--- a/src/kf_based_terrain_analysis.cpp
+++ b/src/kf_based_terrain_analysis.cpp
@@ -168,6 +168,44 @@ float CKf_Based_Terrain_Analysis::mahalanobisDistance(const pcl::PointXYZRGBNorm
   return (mahalanobis_distance_fast);
 }
 
+float CKf_Based_Terrain_Analysis::mahalanobisDistance(const pcl::PointXYZRGBNormal reference_in_sensor_frame,
+                                                      const pcl::PointXYZRGBNormal point_in_sensor_frame)
+{
+  //std::cout << "<------- Exectuting CKf_Based_Terrain_Analysis::mahalanobisDistance... --------->" << std::endl;
+
+  float prediction_fast = reference_in_sensor_frame.z
+      + (point_in_sensor_frame.x - reference_in_sensor_frame.x)
+          * reference_in_sensor_frame.data_c[GRND_REF_DATA_C_2_PITCH]
+      + (point_in_sensor_frame.y - reference_in_sensor_frame.y)
+          * reference_in_sensor_frame.data_c[GRND_REF_DATA_C_1_ROLL];
+
+  float prediction_std_fast = sqrt(
+      reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE]
+          + (point_in_sensor_frame.x - reference_in_sensor_frame.x)
+              * (point_in_sensor_frame.x - reference_in_sensor_frame.x)
+              * reference_in_sensor_frame.data_n[GRND_REF_DATA_N_1_PITCH_VARIANCE]
+          + (point_in_sensor_frame.y - reference_in_sensor_frame.y)
+              * (point_in_sensor_frame.y - reference_in_sensor_frame.y)
+              * reference_in_sensor_frame.data_n[GRND_REF_DATA_N_0_ROLL_VARIANCE]);
+
+  float mahalanobis_distance_fast = fabs(point_in_sensor_frame.z - prediction_fast) / prediction_std_fast;
+
+  if (std::isnan(mahalanobis_distance_fast))
+  {
+    std::cout << "ref (x,y,z)   = " << reference_in_sensor_frame.x << ", " << reference_in_sensor_frame.y << ", "
+        << reference_in_sensor_frame.z << std::endl;
+    std::cout << "point (x,y,z) = " << point_in_sensor_frame.x << ", " << point_in_sensor_frame.y << ", "
+        << point_in_sensor_frame.z << std::endl;
+    std::cout << "ref (z, sz, a, sa, b, sb) = " << reference_in_sensor_frame.z << ", "
+        << reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE] << ", "
+        << reference_in_sensor_frame.data_c[GRND_REF_DATA_C_2_PITCH] << ", "
+        << reference_in_sensor_frame.data_n[GRND_REF_DATA_N_1_PITCH_VARIANCE] << ", "
+        << reference_in_sensor_frame.data_c[GRND_REF_DATA_C_1_ROLL] << ", "
+        << reference_in_sensor_frame.data_n[GRND_REF_DATA_N_0_ROLL_VARIANCE] << std::endl;
+  }
+  return (mahalanobis_distance_fast);
+}
+
 float CKf_Based_Terrain_Analysis::mahalanobisDistance(const pcl::PointXYZRGBNormal reference_in_sensor_frame,
                                                       const pcl::PointXYZRGBNormal point_in_sensor_frame,
                                                       float &predicted_std_dev, bool &observation_z_is_below_prediction)
@@ -329,117 +367,121 @@ void CKf_Based_Terrain_Analysis::labelLocalGroundPoints(
   {
     pcl::PointXYZRGBNormal point_in_sensor_frame = *i;
 
-    float prediction_std_dev = -1.0;
+    if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) != OUTLIER)
+    {
+
+      float prediction_std_dev = -1.0;
 //    std::cout << "Computing mahalanobis distance..." << std::endl;
-    float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame, point_in_sensor_frame,
-                                                     prediction_std_dev);
+      float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame, point_in_sensor_frame,
+                                                       prediction_std_dev);
 
 //    std::cout << "Mahalanobis distance computed!" << std::endl;
-    if (std::isnan(mahalanobis_distance))
-    {
-      std::cout << "prior pred sigma = " << prediction_std_dev << "    uncertainty = "
-          << prediction_std_dev * filtering_configuration.mahalanobis_threshold << "    mahalanobis distance = "
-          << mahalanobis_distance << std::endl;
+      if (std::isnan(mahalanobis_distance))
+      {
+        std::cout << "prior pred sigma = " << prediction_std_dev << "    uncertainty = "
+            << prediction_std_dev * filtering_configuration.mahalanobis_threshold << "    mahalanobis distance = "
+            << mahalanobis_distance << std::endl;
 
-      std::getchar();
-    }
+        std::getchar();
+      }
 
-    if (mahalanobis_distance < filtering_configuration.mahalanobis_threshold)
-    {
-      //std::cout << "Ground point found! x = " << point_in_sensor_frame.x << "    y = " << point_in_sensor_frame.y
-      //    << "    z = " << point_in_sensor_frame.z << std::endl << "mahalanobis_distance = " << mahalanobis_distance
-      //    << "    mahalanobis_threshold = " << filtering_configuration.mahalanobis_threshold << std::endl;
+      if (mahalanobis_distance < filtering_configuration.mahalanobis_threshold)
+      {
+        //std::cout << "Ground point found! x = " << point_in_sensor_frame.x << "    y = " << point_in_sensor_frame.y
+        //    << "    z = " << point_in_sensor_frame.z << std::endl << "mahalanobis_distance = " << mahalanobis_distance
+        //    << "    mahalanobis_threshold = " << filtering_configuration.mahalanobis_threshold << std::endl;
 
 //      std::cout << "Inserting observation for plane estimation!" << std::endl;
-      observations_for_tangent_plane_estimation->push_back(point_in_sensor_frame); // independently of exploration, any ground point is
-                                                                                   // used for plane estimation, to no discard useful
-                                                                                   // information!
+        observations_for_tangent_plane_estimation->push_back(point_in_sensor_frame); // independently of exploration, any ground point is
+                                                                                     // used for plane estimation, to no discard useful
+                                                                                     // information!
 
-      if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) == CLASS_UNKNOWN) // we use unknown points to drive the exploration
-      {
+        if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) == CLASS_UNKNOWN) // we use unknown points to drive the exploration
+        {
 //        std::cout << "Inserting observation to drive exploration! --> original index = "
 //            << point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX] << std::endl;
 
 //        std::cout << "elevation_cloud_ptr->points.size() = " << elevation_cloud_ptr->points.size() << std::endl;
 
-        new_references_creator.addGroundObservationInSensorFrame(
-            reference_in_sensor_frame,
-            elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]]);
-      }
+          new_references_creator.addGroundObservationInSensorFrame(
+              reference_in_sensor_frame,
+              elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]]);
+        }
 
-      // now we compute the score
-      float score = 0.99 - (mahalanobis_distance / filtering_configuration.mahalanobis_threshold);
-      if (score < 0.0)
-        score = 0.0;
+        // now we compute the score
+        float score = 0.99 - (mahalanobis_distance / filtering_configuration.mahalanobis_threshold);
+        if (score < 0.0)
+          score = 0.0;
 
-      float index_of_labelling_ref =
-          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL];
+        float index_of_labelling_ref =
+            elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL];
 
 //      std::cout << "Updating elevation cloud point to represent ground data" << std::endl;
-      if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) != CLASS_GROUND
-          || std::floor((int)index_of_labelling_ref) == INDEX_UNKNOWN
-          || index_of_labelling_ref - std::floor((int)index_of_labelling_ref) < score) // if the point was seen as obstacle from another POV
-                                                                                       // or was unexplored, with set it to ground class
-      {
-        // and store the reference for the later full pointcloud labelling step (only if the score is better than the previous label
-        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL] =
-            reference_index + score;
-
-        if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) != CLASS_GROUND)
+        if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) != CLASS_GROUND
+            || std::floor((int)index_of_labelling_ref) == INDEX_UNKNOWN
+            || index_of_labelling_ref - std::floor((int)index_of_labelling_ref) < score) // if the point was seen as obstacle from another POV
+                                                                                         // or was unexplored, with set it to ground class
         {
-//          std::cout << "Storing z point as z ground" << std::endl;
-          // as now the point is classified as ground, we store its own z value as z ground
-          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_n[DATA_N_1_Z_GROUND] =
-              elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].z;
-
-          // set the class as ground
-          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_1_ID_CLASS] =
-              (float)CLASS_GROUND + score;
+          // and store the reference for the later full pointcloud labelling step (only if the score is better than the previous label
+          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL] =
+              reference_index + score;
 
-          // colour the point properly
-          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].r = R_CLASS_GROUND;
-          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].g = G_CLASS_GROUND;
-          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].b = B_CLASS_GROUND;
+          if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) != CLASS_GROUND)
+          {
+//          std::cout << "Storing z point as z ground" << std::endl;
+            // as now the point is classified as ground, we store its own z value as z ground
+            elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_n[DATA_N_1_Z_GROUND] =
+                elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].z;
+
+            // set the class as ground
+            elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_1_ID_CLASS] =
+                (float)CLASS_GROUND + score;
+
+            // colour the point properly
+            elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].r = R_CLASS_GROUND;
+            elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].g = G_CLASS_GROUND;
+            elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].b = B_CLASS_GROUND;
+          }
         }
       }
-    }
-    else
-    {
+      else
+      {
 //      std::cout << "Obstacle point found! x = " << point_in_sensor_frame.x << "    y = " << point_in_sensor_frame.y
 //          << "    z = " << point_in_sensor_frame.z << std::endl << "mahalanobis_distance = " << mahalanobis_distance
 //          << "    mahalanobis_threshold = " << filtering_configuration.mahalanobis_threshold << std::endl;
 
-      if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) != CLASS_GROUND) // we don't want to convert ground points to obstacles
-                                                                                            // because of the POV (one thing can be seem as obstacle or ground depending on the POV)
-      {
-        // Now we compute the score
-        float score = (mahalanobis_distance / filtering_configuration.mahalanobis_threshold) - 1.0;
-        if (score > 0.99)
-          score = 0.99;
-
-        // Now we check the score of the previous detection
-        float index_of_labelling_ref =
-            elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL];
-
-        if (std::floor((int)index_of_labelling_ref) == INDEX_UNKNOWN
-            || index_of_labelling_ref - std::floor((int)index_of_labelling_ref) < score)
-          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL] =
-              reference_index + score;
-
-        // if it is an obstacle point we use the ground reference z prediction as z ground
-        float z_ground = predictZ(ground_reference_cloud_ptr->points[reference_index], point_in_sensor_frame.x,
-                                  point_in_sensor_frame.y);
-        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_n[DATA_N_1_Z_GROUND] =
-            z_ground;
+        if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) != CLASS_GROUND) // we don't want to convert ground points to obstacles
+                                                                                              // because of the POV (one thing can be seem as obstacle or ground depending on the POV)
+        {
+          // Now we compute the score
+          float score = (mahalanobis_distance / filtering_configuration.mahalanobis_threshold) - 1.0;
+          if (score > 0.99)
+            score = 0.99;
+
+          // Now we check the score of the previous detection
+          float index_of_labelling_ref =
+              elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL];
+
+          if (std::floor((int)index_of_labelling_ref) == INDEX_UNKNOWN
+              || index_of_labelling_ref - std::floor((int)index_of_labelling_ref) < score)
+            elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL] =
+                reference_index + score;
+
+          // if it is an obstacle point we use the ground reference z prediction as z ground
+          float z_ground = predictZ(ground_reference_cloud_ptr->points[reference_index], point_in_sensor_frame.x,
+                                    point_in_sensor_frame.y);
+          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_n[DATA_N_1_Z_GROUND] =
+              z_ground;
 
-        // set the class to OBSTACLE
-        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_1_ID_CLASS] =
-            (float)CLASS_OBSTACLE + score; //0.99;
+          // set the class to OBSTACLE
+          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_1_ID_CLASS] =
+              (float)CLASS_OBSTACLE + score; //0.99;
 
-        // and colour the point
-        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].r = R_CLASS_OBSTACLE;
-        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].g = G_CLASS_OBSTACLE;
-        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].b = B_CLASS_OBSTACLE;
+          // and colour the point
+          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].r = R_CLASS_OBSTACLE;
+          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].g = G_CLASS_OBSTACLE;
+          elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].b = B_CLASS_OBSTACLE;
+        }
       }
     }
   }
@@ -826,114 +868,12 @@ void CKf_Based_Terrain_Analysis::fastGenerateElevationCloud(
     //std::cout << "Storing point into elevation cloud!" << std::endl;
     elevation_cloud->push_back(elevation_cloud_point);
   }
+
   //std::cout << "elevation_cloud generated!!" << std::endl;
 //  //std::cout << "total_number_of_points_stored = " << total_number_of_points_stored << std::endl;
 //  //std::cout << "original number of points = " << pcl_cloud_ptr->points.size() << std::endl;
 }
 
-/*
- void CKf_Based_Terrain_Analysis::fastGenerateElevationCloud(
- const kf_based_terrain_analysis_lib::FilteringConfiguration filtering_configuration,
- pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pcl_cloud_ptr,
- pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr elevation_cloud, std::vector<std::vector<int>> &correspondence_indexes)
- {
- //if (filtering_configuration.measure_performance)
- //CFunctionMonitor performance_monitor("generateElevationCloud", performance_supervisor_ptr_);
-
- int number_of_rows_and_columns = std::ceil(2.0 * MAX_RANGE / filtering_configuration.elevation_grid_resolution);
- //std::cout << "number_of_rows_and_columns = " << number_of_rows_and_columns << std::endl;
-
- kf_based_terrain_analysis_lib::ElevationCloudCell elevation_map[number_of_rows_and_columns][number_of_rows_and_columns];
- //std::cout << "elevation_map created! " << std::endl;
-
- int i = 0;
- //std::cout << "Starting to fill the elevation_map! " << std::endl;
- for (pcl::PointCloud<pcl::PointXYZRGBNormal>::const_iterator it = pcl_cloud_ptr->begin(); it != pcl_cloud_ptr->end();
- ++it)
- {
- int row = std::round((number_of_rows_and_columns / 2.0) + (it->x / filtering_configuration.elevation_grid_resolution));
- int col = std::round((number_of_rows_and_columns / 2.0) + (it->y / filtering_configuration.elevation_grid_resolution));
-
- if(elevation_map[row][col].indexes_of_points_in_cell.empty())
- {
- //std::cout << "Pointing to an empty cell, storing current point as lowest, and keeping its index! " << std::endl;
- //std::cout << "Point number " << i << "    row = " << row << "    col = " << col << std::endl;
- elevation_map[row][col].lowest_point_in_cell = *it;
- elevation_map[row][col].index_of_lowest_point = i;
- }
- else
- {
- //std::cout << "Pointing to a NON-empty cell! checking z!" << std::endl;
- if(it->z < elevation_map[row][col].lowest_point_in_cell.z)
- {
- //std::cout << "Current point is the lowest, storing it as it is!" << std::endl;
- elevation_map[row][col].lowest_point_in_cell = *it;
- elevation_map[row][col].index_of_lowest_point = i;
- }
- }
- //std::cout << "Storing z coordinate and index in the indexes vector" << std::endl;
- elevation_map[row][col].z_coordinates.push_back(it->z);
- elevation_map[row][col].indexes_of_points_in_cell.push_back(i);
- i++;
- }
-
- //std::cout << "Now generating the elevation cloud from the elevation map!" << std::endl;
- //DEBUG
- //int total_number_of_points_stored = 0;
- int index_respect_elevation_cloud = 0;
- for(int row = 0; row < number_of_rows_and_columns; ++row)
- {
- for(int col = 0; col < number_of_rows_and_columns; ++col)
- {
- if(!elevation_map[row][col].indexes_of_points_in_cell.empty())
- {
- pcl::PointXYZRGBNormal elevation_cloud_point = elevation_map[row][col].lowest_point_in_cell;
- if (elevation_cloud_point.data_c[DATA_C_1_ID_CLASS] == OUTLIER)
- {
- elevation_cloud_point.r = R_CLASS_OUTLIER;
- elevation_cloud_point.g = G_CLASS_OUTLIER;
- elevation_cloud_point.b = B_CLASS_OUTLIER;
- }
- else
- {
- elevation_cloud_point.r = R_CLASS_UNKNOWN;
- elevation_cloud_point.g = G_CLASS_UNKNOWN;
- elevation_cloud_point.b = B_CLASS_UNKNOWN;
- }
- float sum = std::accumulate(elevation_map[row][col].z_coordinates.begin(), elevation_map[row][col].z_coordinates.end(), 0.0);
- float z_mean = sum / (float)elevation_map[row][col].z_coordinates.size();
- float var = 0;
- for (int n = 0; n < elevation_map[row][col].z_coordinates.size(); ++n)
- var += ((elevation_map[row][col].z_coordinates[n] - z_mean) * (elevation_map[row][col].z_coordinates[n] - z_mean));
-
- var /= (float)elevation_map[row][col].z_coordinates.size();
-
- // Filling remaining fields
- elevation_cloud_point.data_n[DATA_N_0_INTENSITY] = INTENSITY_UNKNOWN;
- elevation_cloud_point.data_n[DATA_N_1_Z_GROUND] = Z_GROUND_UNKNOWN;
- elevation_cloud_point.data_n[DATA_N_2_Z_MEAN] = z_mean;
- elevation_cloud_point.data_n[DATA_N_3_Z_VARIANCE] = var;
-
- elevation_cloud_point.data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL] = INDEX_UNKNOWN;
- elevation_cloud_point.data_c[DATA_C_3_ORIGINAL_INDEX] = index_respect_elevation_cloud++; //storing the index for later in labelling step
-
- //std::cout << "var z = " << it->data_n[DATA_N_3_Z_VARIANCE] << std::endl;
- //std::cout << "z_coordinates.size() = " << z_coordinates.size() << std::endl;
-
- //DEBUG!
- //total_number_of_points_stored += elevation_map[row][col].indexes_of_points_in_cell.size();
-
- correspondence_indexes.push_back(elevation_map[row][col].indexes_of_points_in_cell);
- //std::cout << "Storing point into elevation cloud!" << std::endl;
- elevation_cloud->push_back(elevation_cloud_point);
- }
- }
- }
- //std::cout << "elevation_cloud generated!!" << std::endl;
- //  //std::cout << "total_number_of_points_stored = " << total_number_of_points_stored << std::endl;
- //  //std::cout << "original number of points = " << pcl_cloud_ptr->points.size() << std::endl;
- }*/
-
 void CKf_Based_Terrain_Analysis::generateRootVertex(
     const kf_based_terrain_analysis_lib::SensorConfiguration lidar_configuration,
     const kf_based_terrain_analysis_lib::FilteringConfiguration filtering_configuration,
@@ -1084,162 +1024,88 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
   {
     pcl::PointXYZRGBNormal point_in_sensor_frame = *i;
 
-    bool elevation_cloud_point_is_ground = false;
-    if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) == CLASS_GROUND)
-      elevation_cloud_point_is_ground = true;
-
-    bool elevation_cloud_point_is_not_predictable_using_the_model = false;
     int reference_index = (int)std::floor(
         point_in_sensor_frame.data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL]);
-    if (reference_index == INDEX_UNKNOWN)
-      elevation_cloud_point_is_not_predictable_using_the_model = true;
 
-    if (!elevation_cloud_point_is_not_predictable_using_the_model)
+    if (reference_index != INDEX_UNKNOWN)
     {
       pcl::PointXYZRGBNormal reference_in_sensor_frame = ground_reference_cloud_ptr->points[reference_index];
-      if (elevation_cloud_point_is_ground) //&& sqrt(reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE]) < 0.06) //TODO!!!
-      {
-        // If the lowest point is ground we need to check one by one the rest of points in the cell
-        // In case we find not-overhanging obstacles in its cell, we will classify the lowest point in cell also as obstacle
-        bool flag_not_overhanging_obstacle_found = false;
-
-        // 1) We point to the vector of indices related to the elevation_cloud point under evaluation
-        std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin()
-            + point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX];
-
-        // and iterate over this vector
-        for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
-            point_iterator != index_iterator->end(); ++point_iterator)
-        {
-          bool observation_z_is_below_prediction = false;
-          float prediction_std_dev = -1.0;
-          float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame,
-                                                           pcl_cloud_ptr->points[*point_iterator], prediction_std_dev,
-                                                           observation_z_is_below_prediction);
-          if (std::isnan(mahalanobis_distance))
-          {
-            std::cout << "posterior pred sigma = " << prediction_std_dev << "    uncertainty = "
-                << prediction_std_dev * filtering_configuration.mahalanobis_threshold << "    mahalanobis distance = "
-                << mahalanobis_distance << std::endl;
-
-            std::getchar();
-          }
-
-          if (mahalanobis_distance < filtering_configuration.mahalanobis_threshold)
-          {
-            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = pcl_cloud_ptr->points[*point_iterator].z;
-            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
-                point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
-            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
-                point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];
-
-            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
-            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_GROUND;  // We use instead the r g b channels directly
-            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND;
-            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND;
 
-            float score = 0.99 - (mahalanobis_distance / filtering_configuration.mahalanobis_threshold);
-            if (score < 0.0)
-              score = 0.0;
-            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_GROUND + score;
-            // data_c[2] is reserved for Car probability, as we do not estimate it in this algorithm we leave this value untouched
-            // so if there are others segmentators working out there, we do not erase their job!
-
-            // data_c[3] is reserved to store the original index, so we do not want to change it!
-          }
-          else
-          {
-            //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;
-
-            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = point_in_sensor_frame.z;
-            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
-                point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
-            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
-                point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];
+      // 1) We point to the vector of indices related to the elevation_cloud point under evaluation
+      std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin()
+          + point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX];
 
-            //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;
-            if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
-                < filtering_configuration.robot_height)
-            {
-              // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
-              pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE; // We use instead the r g b channels directly
-              pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
-              pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;
+      // and iterate over this vector
+      for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
+          point_iterator != index_iterator->end(); ++point_iterator)
+      {
+        float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame,
+                                                         pcl_cloud_ptr->points[*point_iterator]);
+//        if (std::isnan(mahalanobis_distance))
+//        {
+//          std::cout << "posterior pred sigma = " << prediction_std_dev << "    uncertainty = "
+//              << prediction_std_dev * filtering_configuration.mahalanobis_threshold << "    mahalanobis distance = "
+//              << mahalanobis_distance << std::endl;
+//
+//          std::getchar();
+//        }
+
+        pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] = point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
+        pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
+            point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];
+
+        //sqrt(reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE]) < 0.06) TODO: Could we use this variance to penalize uneven terrain?
+        float score = 0.995 - (mahalanobis_distance / filtering_configuration.mahalanobis_threshold); // we use 0.995 instead of 1.0 to avoid overflowing the class field
+        if (score < 0.0)
+          score = 0.0;
 
-              pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + 0.99;
+        //float score = computeLikelihood(reference_in_sensor_frame, pcl_cloud_ptr->points[*point_iterator]) / computeLikelihood(reference_in_sensor_frame, reference_in_sensor_frame);
 
-              flag_not_overhanging_obstacle_found = true;
-            }
-            else
-            {
-              //std::cout << "Overhanging obstacle detected!" << std::endl;
-              // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
-              pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE; // We use instead the r g b channels directly
-              pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
-              pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;
+        if (score > filtering_configuration.score_threshold)
+        {
+          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = pcl_cloud_ptr->points[*point_iterator].z;
 
-              pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE
-                  + 0.99;
-              // data_c[2] is reserved for Car probability, as we do not estimate it in this algorithm we leave this value untouched
-              // so if there are others segmentators working out there, we do not erase their job!
+          pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_GROUND;
+          pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND;
+          pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND;
 
-              // data_c[3] is reserved to store the original index, so we do not want to change it!
-            }
-          }
+          pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_GROUND + score;
         }
-      }
-      else // If the lowest point in cell is already an obstacle, the remaining points are also obstacles
-      {
-        //std::cout << "Labeling all points in cell as obstacle!" << std::endl;
-        // 1) We point to the vector of indices related to the elevation_cloud point under evaluation
-        std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin()
-            + point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX];
-
-        for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
-            point_iterator != index_iterator->end(); ++point_iterator)
+        else
         {
-          // Intensity is already in the pointcloud so we do not copy data_n[0] (in fact this value is not present in elevation cloud)
-          float z_ground = i->z;
-          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = z_ground;
-          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
-              point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
-          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
-              point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];
+          //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;
+          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = point_in_sensor_frame.z;
 
           //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;
-          if ((pcl_cloud_ptr->points[*point_iterator].z - z_ground) < filtering_configuration.robot_height)
+          if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
+              < filtering_configuration.robot_height)
           {
-            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
-            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE; // We use instead the r g b channels directly
+            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE;
             pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
             pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;
 
-            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + 0.99;
+            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + score;
           }
           else
           {
             //std::cout << "Overhanging obstacle detected!" << std::endl;
-            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
-            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE; // We use instead the r g b channels directly
+            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE;
             pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
             pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;
 
-            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE + 0.99;
-            // data_c[2] is reserved for Car probability, as we do not estimate it in this algorithm we leave this value untouched
-            // so if there are others segmentators working out there, we do not erase their job!
-
-            // data_c[3] is reserved to store the original index, so we do not want to change it!
+            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE
+                + score;
           }
         }
       }
     }
     else // if we don't have enough information to try to predict the ground level at this point
     {
-      std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin()
-          + point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX];
-
       if (filtering_configuration.classify_not_labeled_points_as_obstacles)
       {
+        std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin()
+            + point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX];
+
         for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
             point_iterator != index_iterator->end(); ++point_iterator)
         {
@@ -1249,7 +1115,6 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
           pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
               point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];
 
-          //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;
           if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
               < filtering_configuration.ground_threshold_in_not_analyzed_areas)
           {
@@ -1257,32 +1122,28 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
             pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND;
             pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND;
 
-            float score = 0.01; // we don't have too much confidence in these points, because they are not analyzed
+            float score = 0.5; // we don't have too much confidence in these points, because they are not analyzed
             pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_GROUND + score;
           }
           else if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
               < filtering_configuration.robot_height)
           {
-            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
-            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE; // We use instead the r g b channels directly
+            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE;
             pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
             pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;
 
-            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + 0.99;
+            float score = 0.0;
+            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + score;
           }
           else
           {
-            //std::cout << "Overhanging obstacle detected!" << std::endl;
-            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
-            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE; // We use instead the r g b channels directly
+            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE;
             pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
             pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;
 
-            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE + 0.99;
-            // data_c[2] is reserved for Car probability, as we do not estimate it in this algorithm we leave this value untouched
-            // so if there are others segmentators working out there, we do not erase their job!
-
-            // data_c[3] is reserved to store the original index, so we do not want to change it!
+            float score = 0.0;
+            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE
+                + score;
           }
         }
       }
-- 
GitLab