Skip to content
Snippets Groups Projects
kf_based_terrain_analysis.cpp 83.01 KiB
#include "kf_based_terrain_analysis.h"

CKf_Based_Terrain_Analysis::CKf_Based_Terrain_Analysis()
{
  performance_supervisor_ptr_ = boost::make_shared<CTimeSupervisor>();
}

CKf_Based_Terrain_Analysis::~CKf_Based_Terrain_Analysis()
{

}

void CKf_Based_Terrain_Analysis::showPerformanceStatistics(void)
{
  performance_supervisor_ptr_->printStatistics();
}

void CKf_Based_Terrain_Analysis::extractPointsInROI(
    const kf_based_terrain_analysis_lib::SensorConfiguration lidar_configuration,
    const kf_based_terrain_analysis_lib::FilteringConfiguration filtering_configuration,
    const pcl::PointXYZRGBNormal reference, const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr elevation_cloud_ptr,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr points_in_ROI)
{
//  if (filtering_configuration.measure_performance)
//    CFunctionMonitor performance_monitor("extractPointsInROI", performance_supervisor_ptr_);

//std::cout << "Extracting points in ROI: reference.x = " << reference.x << "    reference.y = " << reference.y
//    << "    reference.z = " << reference.z << std::endl;

  float x_min, x_max, y_min, y_max, x_search_range, y_search_range;

  if (reference.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX] > 0.5)
  {
    x_search_range = filtering_configuration.ROI_delta_x_and_y;
    y_search_range = filtering_configuration.ROI_delta_x_and_y;
  }
  else
  {
    x_search_range = filtering_configuration.ROI_shadow_area;
    y_search_range = filtering_configuration.ROI_shadow_area;
  }

  x_min = reference.x - x_search_range;
  y_min = reference.y - y_search_range;
  x_max = reference.x + x_search_range;
  y_max = reference.y + y_search_range;

  //std::cout << "Using shadow area reference point! " << std::endl;
  //std::cout << "search_range = " << filtering_configuration.ROI_delta_x_and_y << std::endl;

  // Extracting lidar points
  pcl::PassThrough < pcl::PointXYZRGBNormal > pass;
  pass.setInputCloud(elevation_cloud_ptr);
  pass.setFilterFieldName("x");
  pass.setFilterLimits(x_min, x_max);
  pass.filter(*points_in_ROI);
  pass.setInputCloud(points_in_ROI);
  pass.setFilterFieldName("y");
  pass.setFilterLimits(y_min, y_max);
  pass.filter(*points_in_ROI);

  //std::cout << "X limits = " << x_min << " ; " << x_max << std::endl;
  //std::cout << "Y limits = " << y_min << " ; " << y_max << std::endl;
  //std::cout << "Z ground ref = " << reference.z << std::endl;

  //std::cout << "Total num of elevation pointcloud points in ROI = " << points_in_ROI->points.size() << std::endl;
}

float CKf_Based_Terrain_Analysis::euclideanDistance(const pcl::PointXYZRGBNormal reference_in_sensor_frame,
                                                    const pcl::PointXYZRGBNormal point_in_sensor_frame)
{
  // Expectation
  float roll = reference_in_sensor_frame.data_c[GRND_REF_DATA_C_1_ROLL];
  float pitch = reference_in_sensor_frame.data_c[GRND_REF_DATA_C_2_PITCH];

  float delta_x = point_in_sensor_frame.x - reference_in_sensor_frame.x;
  float delta_y = point_in_sensor_frame.y - reference_in_sensor_frame.y;

  float prediction = reference_in_sensor_frame.z + delta_x * pitch + delta_y * roll;

  // Innovation
  float observation = point_in_sensor_frame.z;
  float innovation = observation - prediction;

  return (innovation);
}

float CKf_Based_Terrain_Analysis::computeLikelihood(const pcl::PointXYZRGBNormal reference_in_sensor_frame,
                                                    const pcl::PointXYZRGBNormal point_in_sensor_frame)
{
  float z_pred = 0.0;
  float var_z_pred = 0.0;

  float delta_x = point_in_sensor_frame.x - reference_in_sensor_frame.x;
  float delta_y = point_in_sensor_frame.y - reference_in_sensor_frame.y;

  predictZWithVariance(reference_in_sensor_frame, delta_x, delta_y, z_pred, var_z_pred);

  float sigma = sqrt(var_z_pred);
  float regularization_term = 1.0 / (sigma * 2.506628275);

  float likelihood = regularization_term
      * (exp(-1.0 * ((point_in_sensor_frame.z - z_pred) * (point_in_sensor_frame.z - z_pred)) / (2 * var_z_pred)));
  //DEBUG!float likelihood = ((point_in_sensor_frame.z - z_pred) * (point_in_sensor_frame.z - z_pred)) / (2 * var_z_pred);

  if (false)  //point_in_sensor_frame.z < -1.75)
  {
    std::cout << "delta_x = " << delta_x << std::endl;
    std::cout << "delta_y = " << delta_y << std::endl;

    std::cout << "z_pred = " << z_pred << std::endl;
    std::cout << "var_z_pred = " << var_z_pred << std::endl;

    std::cout << "sigma = " << sigma << std::endl;
    std::cout << "regularization_term = " << regularization_term << std::endl;

    std::cout << "z observed = " << point_in_sensor_frame.z << std::endl;
    std::cout << "likelihood = " << likelihood << std::endl;

    std::cout << "reference_in_sensor_frame.x = " << reference_in_sensor_frame.x << "    reference_in_sensor_frame.y = "
        << reference_in_sensor_frame.y << "    reference_in_sensor_frame.z = " << reference_in_sensor_frame.z
        << std::endl;

    getchar();
  }
  return (likelihood);
}

float CKf_Based_Terrain_Analysis::mahalanobisDistance(const pcl::PointXYZRGBNormal reference_in_sensor_frame,
                                                      const pcl::PointXYZRGBNormal point_in_sensor_frame,
                                                      float &predicted_std_dev)
{
  //std::cout << "<------- Exectuting CKf_Based_Terrain_Analysis::mahalanobisDistance... --------->" << std::endl;

  float prediction_fast = reference_in_sensor_frame.z
      + (point_in_sensor_frame.x - reference_in_sensor_frame.x)
          * reference_in_sensor_frame.data_c[GRND_REF_DATA_C_2_PITCH]
      + (point_in_sensor_frame.y - reference_in_sensor_frame.y)
          * reference_in_sensor_frame.data_c[GRND_REF_DATA_C_1_ROLL];
  float prediction_std_fast = sqrt(
      reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE]
          + (point_in_sensor_frame.x - reference_in_sensor_frame.x)
              * (point_in_sensor_frame.x - reference_in_sensor_frame.x)
              * reference_in_sensor_frame.data_n[GRND_REF_DATA_N_1_PITCH_VARIANCE]
          + (point_in_sensor_frame.y - reference_in_sensor_frame.y)
              * (point_in_sensor_frame.y - reference_in_sensor_frame.y)
              * reference_in_sensor_frame.data_n[GRND_REF_DATA_N_0_ROLL_VARIANCE]);

  float mahalanobis_distance_fast = fabs(point_in_sensor_frame.z - prediction_fast) / prediction_std_fast;

  predicted_std_dev = prediction_std_fast;

  //assert(!std::isnan(mahalanobis_distance_fast));
  if (std::isnan(mahalanobis_distance_fast))
  {
    std::cout << "ref (x,y,z)   = " << reference_in_sensor_frame.x << ", " << reference_in_sensor_frame.y << ", "
        << reference_in_sensor_frame.z << std::endl;
    std::cout << "point (x,y,z) = " << point_in_sensor_frame.x << ", " << point_in_sensor_frame.y << ", "
        << point_in_sensor_frame.z << std::endl;
    std::cout << "ref (z, sz, a, sa, b, sb) = " << reference_in_sensor_frame.z << ", "
        << reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE] << ", "
        << reference_in_sensor_frame.data_c[GRND_REF_DATA_C_2_PITCH] << ", "
        << reference_in_sensor_frame.data_n[GRND_REF_DATA_N_1_PITCH_VARIANCE] << ", "
        << reference_in_sensor_frame.data_c[GRND_REF_DATA_C_1_ROLL] << ", "
        << reference_in_sensor_frame.data_n[GRND_REF_DATA_N_0_ROLL_VARIANCE] << std::endl;
  }
  return (mahalanobis_distance_fast);
}

float CKf_Based_Terrain_Analysis::mahalanobisDistance(const pcl::PointXYZRGBNormal reference_in_sensor_frame,
                                                      const pcl::PointXYZRGBNormal point_in_sensor_frame,
                                                      float &predicted_std_dev, bool &observation_z_is_below_prediction)
{
  //std::cout << "<------- Exectuting CKf_Based_Terrain_Analysis::mahalanobisDistance... --------->" << std::endl;

  float prediction_fast = reference_in_sensor_frame.z
      + (point_in_sensor_frame.x - reference_in_sensor_frame.x)
          * reference_in_sensor_frame.data_c[GRND_REF_DATA_C_2_PITCH]
      + (point_in_sensor_frame.y - reference_in_sensor_frame.y)
          * reference_in_sensor_frame.data_c[GRND_REF_DATA_C_1_ROLL];

  float prediction_std_fast = sqrt(
      reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE]
          + (point_in_sensor_frame.x - reference_in_sensor_frame.x)
              * (point_in_sensor_frame.x - reference_in_sensor_frame.x)
              * reference_in_sensor_frame.data_n[GRND_REF_DATA_N_1_PITCH_VARIANCE]
          + (point_in_sensor_frame.y - reference_in_sensor_frame.y)
              * (point_in_sensor_frame.y - reference_in_sensor_frame.y)
              * reference_in_sensor_frame.data_n[GRND_REF_DATA_N_0_ROLL_VARIANCE]);

  float mahalanobis_distance_fast = fabs(point_in_sensor_frame.z - prediction_fast) / prediction_std_fast;

  predicted_std_dev = prediction_std_fast;

  if (point_in_sensor_frame.z < prediction_fast)
    observation_z_is_below_prediction = true;
  else
    observation_z_is_below_prediction = false;

  //assert(!std::isnan(mahalanobis_distance_fast));
  if (std::isnan(mahalanobis_distance_fast))
  {
    std::cout << "ref (x,y,z)   = " << reference_in_sensor_frame.x << ", " << reference_in_sensor_frame.y << ", "
        << reference_in_sensor_frame.z << std::endl;
    std::cout << "point (x,y,z) = " << point_in_sensor_frame.x << ", " << point_in_sensor_frame.y << ", "
        << point_in_sensor_frame.z << std::endl;
    std::cout << "ref (z, sz, a, sa, b, sb) = " << reference_in_sensor_frame.z << ", "
        << reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE] << ", "
        << reference_in_sensor_frame.data_c[GRND_REF_DATA_C_2_PITCH] << ", "
        << reference_in_sensor_frame.data_n[GRND_REF_DATA_N_1_PITCH_VARIANCE] << ", "
        << reference_in_sensor_frame.data_c[GRND_REF_DATA_C_1_ROLL] << ", "
        << reference_in_sensor_frame.data_n[GRND_REF_DATA_N_0_ROLL_VARIANCE] << std::endl;
  }
  return (mahalanobis_distance_fast);
}

float CKf_Based_Terrain_Analysis::predictZ(const pcl::PointXYZRGBNormal reference, const float delta_x,
                                           const float delta_y)
{
  float roll_slope = reference.data_c[GRND_REF_DATA_C_1_ROLL];
  float pitch_slope = reference.data_c[GRND_REF_DATA_C_2_PITCH];

  float predicted_z = reference.z + delta_x * pitch_slope + delta_y * roll_slope;
  return (predicted_z);
}

void CKf_Based_Terrain_Analysis::predictZWithVariance(const pcl::PointXYZRGBNormal reference, const float delta_x,
                                                      const float delta_y, float &z_predicted, float &var_z_predicted)
{
  float z_local_frame = reference.z;
  float roll_slope = reference.data_c[GRND_REF_DATA_C_1_ROLL];
  float pitch_slope = reference.data_c[GRND_REF_DATA_C_2_PITCH];

  z_predicted = z_local_frame + delta_x * pitch_slope + delta_y * roll_slope;

  Eigen::Vector3f partial_derivatives;
  partial_derivatives(0) = 1;
  partial_derivatives(1) = delta_x;
  partial_derivatives(2) = delta_y;

  Eigen::Matrix3f cov = Eigen::Matrix3f::Zero();
  cov(0, 0) = reference.data_c[GRND_REF_DATA_C_3_Z_VARIANCE];
  cov(1, 1) = reference.data_n[GRND_REF_DATA_N_0_ROLL_VARIANCE];
  cov(2, 2) = reference.data_n[GRND_REF_DATA_N_1_PITCH_VARIANCE];

  var_z_predicted = partial_derivatives.transpose() * cov * partial_derivatives;
}

void CKf_Based_Terrain_Analysis::estimateTangentPlane(
    const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr observations_for_tangent_plane_estimation,
    const double measurement_std_dev, pcl::PointXYZRGBNormal &reference_in_sensor_frame)
{
  Eigen::Vector3d x;
  x << (double)reference_in_sensor_frame.z, (double)reference_in_sensor_frame.data_c[GRND_REF_DATA_C_2_PITCH], (double)reference_in_sensor_frame.data_c[GRND_REF_DATA_C_1_ROLL];

  Eigen::Matrix3d P;
  P << (double)reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE], 0.0, 0.0, 0.0, (double)reference_in_sensor_frame.data_n[GRND_REF_DATA_N_1_PITCH_VARIANCE], 0.0, 0.0, 0.0, (double)reference_in_sensor_frame.data_n[GRND_REF_DATA_N_0_ROLL_VARIANCE];

  //std::cout << "x prior = " << std::endl << x << std::endl;
  //std::cout << "P prior = " << std::endl << P << std::endl;

  Eigen::RowVector3d H;
  Eigen::Vector3d K;

  double r = measurement_std_dev * measurement_std_dev;

  for (pcl::PointCloud<pcl::PointXYZRGBNormal>::iterator i = observations_for_tangent_plane_estimation->begin();
      i != observations_for_tangent_plane_estimation->end(); ++i)
  {
    H << (double)1.0, (i->x - (double)reference_in_sensor_frame.x), (i->y - (double)reference_in_sensor_frame.y);

    //std::cout << "H = " << std::endl << H << std::endl;

    K = P * H.transpose() / (H * P * H.transpose() + r);
    //std::cout << "K = " << std::endl << K << std::endl;

    x = x + K * (i->z - (H * x));
    P = P - K * H * P;
    //std::cout << "x posterior = " << std::endl << x << std::endl;
    //std::cout << "P posterior = " << std::endl << P << std::endl;
  }

  reference_in_sensor_frame.z = (float)x(0);
  reference_in_sensor_frame.data_c[GRND_REF_DATA_C_2_PITCH] = (float)x(1);
  reference_in_sensor_frame.data_c[GRND_REF_DATA_C_1_ROLL] = (float)x(2);

  reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE] = (float)P(0, 0);
  reference_in_sensor_frame.data_n[GRND_REF_DATA_N_1_PITCH_VARIANCE] = (float)P(1, 1);
  reference_in_sensor_frame.data_n[GRND_REF_DATA_N_0_ROLL_VARIANCE] = (float)P(2, 2);

  //std::cout << "x posterior = " << std::endl << x << std::endl;
  //std::cout << "P posterior = " << std::endl << P << std::endl;
  //getchar();
}

void CKf_Based_Terrain_Analysis::labelLocalGroundPoints(
    const kf_based_terrain_analysis_lib::SensorConfiguration lidar_configuration,
    const kf_based_terrain_analysis_lib::FilteringConfiguration filtering_configuration,
    const std::vector<std::vector<int> > &correspondence_indexes, const int reference_index,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr points_in_ROI,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr references_in_ROI,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr ground_reference_cloud_ptr,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr elevation_cloud_ptr,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_ptr, std::vector<std::vector<int>> &edges,
    std::vector<std::vector<kf_based_terrain_analysis_lib::Edge>> &discarded_edges)
{
  //std::cout << "Starting labelLocalGroundPoints!!" << std::endl;
//  if (filtering_configuration.measure_performance)
//    CFunctionMonitor performance_monitor("labelLocalGroundPoints", performance_supervisor_ptr_);

  CObservationDownsampler new_references_creator(filtering_configuration.ground_reference_search_resolution,
                                                 filtering_configuration.propagation_additive_noise,
                                                 filtering_configuration.propagation_z_additive_noise);

  //std::cout << "Reference_index = " << reference_index << std::endl;
  //std::cout << "Using reference with ID = "
  //    << ground_reference_cloud_ptr->points[reference_index].data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX] << std::endl;
  pcl::PointXYZRGBNormal reference_in_sensor_frame = ground_reference_cloud_ptr->points[reference_index];

  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr observations_for_tangent_plane_estimation(
      new pcl::PointCloud<pcl::PointXYZRGBNormal>);

// points in ROI come from elevation pointcloud, which is a reduction of the original
// pointcloud (we have stored the indexes to be able to label the original pointcloud)
  //std::cout << "Extracting observations for local tangent plane estimation..." << std::endl;
  for (pcl::PointCloud<pcl::PointXYZRGBNormal>::iterator i = points_in_ROI->begin(); i != points_in_ROI->end(); ++i)
  {
    pcl::PointXYZRGBNormal point_in_sensor_frame = *i;

    float prediction_std_dev = -1.0;
    //std::cout << "Computing mahalanobis distance..." << std::endl;
    float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame, point_in_sensor_frame,
                                                     prediction_std_dev);

    //std::cout << "Mahalanobis distance computed!" << std::endl;
    if (std::isnan(mahalanobis_distance))
    {
      std::cout << "prior pred sigma = " << prediction_std_dev << "    uncertainty = "
          << prediction_std_dev * filtering_configuration.mahalanobis_threshold << "    mahalanobis distance = "
          << mahalanobis_distance << std::endl;

      std::getchar();
    }

    if (mahalanobis_distance < filtering_configuration.mahalanobis_threshold)
    {
      //std::cout << "Ground point found! x = " << point_in_sensor_frame.x << "    y = " << point_in_sensor_frame.y
      //    << "    z = " << point_in_sensor_frame.z << std::endl << "mahalanobis_distance = " << mahalanobis_distance
      //    << "    mahalanobis_threshold = " << filtering_configuration.mahalanobis_threshold << std::endl;

      //std::cout << "Inserting observation for plane estimation!" << std::endl;
      observations_for_tangent_plane_estimation->push_back(point_in_sensor_frame); // independently of exploration, any ground point is
                                                                                   // used for plane estimation, to no discard useful
                                                                                   // information!

      if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) == CLASS_UNKNOWN) // we use unknown points to drive the exploration
      {
        //std::cout << "Inserting observation to drive exploration! --> original index = "
        //    << point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX] << std::endl;

        //std::cout << "elevation_cloud_ptr->points.size() = " << elevation_cloud_ptr->points.size() << std::endl;

        new_references_creator.addGroundObservationInSensorFrame(
            reference_in_sensor_frame,
            elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]]);
      }

      //std::cout << "Updating elevation cloud point to represent ground data" << std::endl;
      if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) != CLASS_GROUND) // if the point was seen as obstacle from another POV
                                                                                            // or was unexplored, with set it to ground class
      {
        //std::cout << "Storing z point as z ground" << std::endl;
        // as now the point is classified as ground, we store its own z value as z ground
        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_n[DATA_N_1_Z_GROUND] =
            elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].z;

        // set the class as ground
        float score = 0.99 - (mahalanobis_distance / filtering_configuration.mahalanobis_threshold);
        if (score < 0.0)
          score = 0.0;
        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_1_ID_CLASS] =
            (float)CLASS_GROUND + score;

        // colour the point properly
        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].r = R_CLASS_GROUND;
        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].g = G_CLASS_GROUND;
        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].b = B_CLASS_GROUND;

        // and store the reference for the later full pointcloud labelling step
        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL] =
            reference_index;
      }
    }
    else
    {
      //std::cout << "Obstacle point found! x = " << point_in_sensor_frame.x << "    y = " << point_in_sensor_frame.y
      //    << "    z = " << point_in_sensor_frame.z << std::endl << "mahalanobis_distance = " << mahalanobis_distance
      //    << "    mahalanobis_threshold = " << filtering_configuration.mahalanobis_threshold << std::endl;

      if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) == CLASS_UNKNOWN) // we only store the data if it has not been done
                                                                                             // previously
      {
        // if it is an obstacle point we use the ground reference z prediction as z ground
        float z_ground = predictZ(ground_reference_cloud_ptr->points[reference_index], point_in_sensor_frame.x,
                                  point_in_sensor_frame.y);
        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_n[DATA_N_1_Z_GROUND] =
            z_ground;

        // set the class to OBSTACLE
        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_1_ID_CLASS] =
            (float)CLASS_OBSTACLE + 0.99;

        // and colour the point
        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].r = R_CLASS_OBSTACLE;
        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].g = G_CLASS_OBSTACLE;
        elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].b = B_CLASS_OBSTACLE;
      }
      // and store the reference for the later full pointcloud labelling step
      elevation_cloud_ptr->points[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL] =
          reference_index;
    }
  }
  //std::cout << "Observations for local tangent plane estimation extracted!" << std::endl;

  //std::cout << "Reference Prior = " << std::endl;
  //std::cout << "x = " << ground_reference_cloud_ptr->points[reference_index].x << "    y = "
  //    << ground_reference_cloud_ptr->points[reference_index].y << std::endl << "    z     = ( "
  //    << ground_reference_cloud_ptr->points[reference_index].z << " , "
  //    << sqrt(ground_reference_cloud_ptr->points[reference_index].data_c[GRND_REF_DATA_C_3_Z_VARIANCE]) << " )"
  //    << "    roll  = ( "
  //    << atan(ground_reference_cloud_ptr->points[reference_index].data_c[GRND_REF_DATA_C_1_ROLL]) * 180.0 / M_PI
  //    << " , "
  //    << sqrt(ground_reference_cloud_ptr->points[reference_index].data_n[GRND_REF_DATA_N_0_ROLL_VARIANCE]) * 180.0
  //        / M_PI << " )" << "    pitch = ( "
  //    << atan(ground_reference_cloud_ptr->points[reference_index].data_c[GRND_REF_DATA_C_2_PITCH]) * 180.0 / M_PI
  //    << " , "
  //    << sqrt(ground_reference_cloud_ptr->points[reference_index].data_n[GRND_REF_DATA_N_1_PITCH_VARIANCE]) * 180.0
  //        / M_PI << " )" << std::endl;

  //std::cout << "Estimating tangent plane..." << std::endl;
  estimateTangentPlane(observations_for_tangent_plane_estimation, (double)filtering_configuration.z_observation_std_dev,
                       reference_in_sensor_frame);
  //std::cout << "Posterior distribution obtained!" << std::endl;

  ground_reference_cloud_ptr->points[reference_index] = reference_in_sensor_frame;

//  std::cout << "Reference Posterior = " << std::endl;
//  std::cout << "x = " << ground_reference_cloud_ptr->points[reference_index].x << "    y = "
//      << ground_reference_cloud_ptr->points[reference_index].y << std::endl << "    z     = ( "
//      << ground_reference_cloud_ptr->points[reference_index].z << " , "
//      << sqrt(ground_reference_cloud_ptr->points[reference_index].data_c[GRND_REF_DATA_C_3_Z_VARIANCE]) << " )"
//      << "    roll  = ( "
//      << atan(ground_reference_cloud_ptr->points[reference_index].data_c[GRND_REF_DATA_C_1_ROLL]) * 180.0 / M_PI
//      << " , "
//      << sqrt(ground_reference_cloud_ptr->points[reference_index].data_n[GRND_REF_DATA_N_0_ROLL_VARIANCE]) * 180.0
//          / M_PI << " )" << "    pitch = ( "
//      << atan(ground_reference_cloud_ptr->points[reference_index].data_c[GRND_REF_DATA_C_2_PITCH]) * 180.0 / M_PI
//      << " , "
//      << sqrt(ground_reference_cloud_ptr->points[reference_index].data_n[GRND_REF_DATA_N_1_PITCH_VARIANCE]) * 180.0
//          / M_PI << " )" << std::endl;

  //std::cout << "Creating new references..." << std::endl;
  new_references_creator.generateNewGroundReferences(reference_index, reference_in_sensor_frame,
                                                     ground_reference_cloud_ptr, edges);
  //std::cout << "New references created!" << std::endl;

  //std::cout << "Search for references in the ROI..." << std::endl;
  extractPointsInROI(lidar_configuration, filtering_configuration, ground_reference_cloud_ptr->points[reference_index],
                     ground_reference_cloud_ptr, references_in_ROI);
  //std::cout << "Search concluded! Number of references in ROI = " << references_in_ROI->size() << std::endl;

  //std::cout << "Making edges to existing references in the ROI... " << std::endl;
  int num_of_discarded_edges = 0;
  for (pcl::PointCloud<pcl::PointXYZRGBNormal>::iterator i = references_in_ROI->begin(); i != references_in_ROI->end();
      ++i)
  {
    pcl::PointXYZRGBNormal reference_in_ROI = *i;

    // just a check to not repeat already known edges
    if (reference_index != reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX]
        && (std::find(edges[reference_index].begin(), edges[reference_index].end(),
                      reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX]) == edges[reference_index].end())
        && (std::find(edges[reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX]].begin(),
                      edges[reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX]].end(), reference_index)
            == edges[reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX]].end()))
    {
//      std::cout << "reference_index = " << reference_index << std::endl;
//      std::cout << "reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX] = "
//          << reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX] << std::endl;
//      std::cout << "edges[reference_index].size() = " << edges[reference_index].size() << std::endl;
//      //int n = 0;
//      //for (std::vector<int>::iterator i = edges[reference_index].begin(); i != edges[reference_index].end(); i++)
//      //{
//      //  std::cout << "edges[" << n++ << "] = " << *i << std::endl;
//      //}
//      std::cout << "Analizing vertex in ROI with id = " << reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX]
//          << std::endl;

      float prediction_std_dev = -1.0;
      float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame, reference_in_ROI, prediction_std_dev);

      // Just a check!
      if (std::isnan(mahalanobis_distance))
      {
        std::cout << "posterior pred sigma = " << prediction_std_dev << "    uncertainty = "
            << prediction_std_dev * filtering_configuration.mahalanobis_threshold << "    mahalanobis distance = "
            << mahalanobis_distance << std::endl;

        std::getchar();
      }
      //std::cout << "Checking connections..." << std::endl;
      bool connect = true;

      int edge_class = CLASS_EDGE_ACCEPTED;
      if (mahalanobis_distance > filtering_configuration.mahalanobis_threshold)
      {
        //std::cout << "Discarding connection! Mahalanobis distance = " << mahalanobis_distance
        //    << "    Mahalanobis threshold = " << filtering_configuration.mahalanobis_threshold << std::endl;
        connect = false;
        edge_class = CLASS_EDGE_DISCARDED_TOO_MUCH_MAHALANOBIS_DISTANCE;
      }

      if (connect)
      {
//        std::cout << "Adding edge between vertex " << reference_in_sensor_frame.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX]
//            << " and vertex " << reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX] << std::endl;
        edges[reference_in_sensor_frame.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX]].push_back(
            reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX]);

        if (reference_in_sensor_frame.data_n[GRND_REF_DATA_N_2_VERTEX_CLASS] == VERTEX_CONNECTED_TO_ROOT)
          ground_reference_cloud_ptr->points[reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX]].data_n[GRND_REF_DATA_N_2_VERTEX_CLASS] =
              VERTEX_CONNECTED_TO_ROOT;

      }
      else
      {
        kf_based_terrain_analysis_lib::Edge discarded_edge;
        discarded_edge.vertex_index = reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX];
        discarded_edge.edge_class = edge_class;
        discarded_edges[reference_in_sensor_frame.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX]].push_back(discarded_edge);
        num_of_discarded_edges++;
      }
    }
    else
    {
//      std::cout << "Skip adding edge between vertex "
//          << reference_in_sensor_frame.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX] << " and vertex "
//          << reference_in_ROI.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX] << std::endl;
    }
  }
  //std::cout << "Total edges evaluated = " << references_in_ROI->points.size() << "    discarded = "
  //    << num_of_discarded_edges << std::endl;
//getchar();
//  new_references_creator.generateNewGroundReferences(reference_index, reference_in_sensor_frame,
//                                                     ground_reference_cloud_ptr, edges);
}

void CKf_Based_Terrain_Analysis::labelGroundPoints(
    const kf_based_terrain_analysis_lib::SensorConfiguration lidar_configuration,
    const kf_based_terrain_analysis_lib::FilteringConfiguration filtering_configuration,
    const std::vector<std::vector<int> > &correspondence_indexes, const int reference_index,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr ground_reference_cloud_ptr,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr elevation_cloud_ptr,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_ptr, std::vector<std::vector<int>> &edges,
    std::vector<std::vector<kf_based_terrain_analysis_lib::Edge>> &discarded_edges)
{
  //if (filtering_configuration.measure_performance)
  //CFunctionMonitor performance_monitor("labelGroundPoints", performance_supervisor_ptr_);

  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr points_in_ROI(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr references_in_ROI(new pcl::PointCloud<pcl::PointXYZRGBNormal>);

  //std::cout << "Extracting points in ROI..." << std::endl;
  extractPointsInROI(lidar_configuration, filtering_configuration, ground_reference_cloud_ptr->points[reference_index],
                     elevation_cloud_ptr, points_in_ROI);

//  std::cout << "Reference index = " << reference_index << std::endl;
//  std::cout << "Points in ROI extracted!! Number of points in ROI = " << points_in_ROI->points.size() << std::endl;
//  std::cout << "References in ROI extracted!! Number of references in ROI = " << references_in_ROI->points.size()
//      << std::endl;

  //std::cout << "Points in ROI extracted!" << std::endl;

//  std::cout << "Number of edges in reference prior to labeling = " << edges[reference_index].size() << std::endl;
  //std::cout << "Labeling local ground points..." << std::endl;
  labelLocalGroundPoints(lidar_configuration, filtering_configuration, correspondence_indexes, reference_index,
                         points_in_ROI, references_in_ROI, ground_reference_cloud_ptr, elevation_cloud_ptr, cloud_ptr,
                         edges, discarded_edges);
  //std::cout << "Local ground points labeled!" << std::endl;
//  std::cout << "Number of edges after labeling = " << edges[reference_index].size() << std::endl;

//  getchar();
}

// Just a try to optimize this function, it turned out to be slower :(
//void CKf_Based_Terrain_Analysis::generateElevationCloud(
//    const kf_based_terrain_analysis_lib::FilteringConfiguration filtering_configuration,
//    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pcl_cloud_ptr,
//    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr elevation_cloud, std::vector<std::vector<int>> &correspondence_indexes)
//{
//  //if (filtering_configuration.measure_performance)
//  CFunctionMonitor performance_monitor("generateElevationCloud", performance_supervisor_ptr_);
//
//  int i = 0;
//  for (pcl::PointCloud<pcl::PointXYZRGBNormal>::iterator it = pcl_cloud_ptr->begin(); it != pcl_cloud_ptr->end();
//      ++it)
//    it->data_c[DATA_C_3_ORIGINAL_INDEX] = i++;
//
//  // We want to find the x and y values, but not interested in z
//  //std::cout << "Elevation grid resolution = " << filtering_configuration.elevation_grid_resolution << std::endl;
//  pcl::VoxelGrid < pcl::PointXYZRGBNormal > voxel_grid;
//  voxel_grid.setInputCloud(pcl_cloud_ptr);
//  voxel_grid.setLeafSize(filtering_configuration.elevation_grid_resolution,
//                         filtering_configuration.elevation_grid_resolution, OUT_OF_RANGE);
//  voxel_grid.filter(*elevation_cloud);
//  //std::cout << "Number of points after voxelization = " << elevation_cloud->points.size() << std::endl;
//
//  pcl::PassThrough < pcl::PointXYZRGBNormal > pass;
//  pass.setInputCloud(pcl_cloud_ptr);
//
//  i = 0;
//  for (pcl::PointCloud<pcl::PointXYZRGBNormal>::iterator it = elevation_cloud->begin(); it != elevation_cloud->end();
//      ++it)
//  {
//    float x_min, x_max, y_min, y_max, x_search_range, y_search_range;
//
//    x_search_range = filtering_configuration.elevation_grid_resolution;
//    y_search_range = filtering_configuration.elevation_grid_resolution;
//
//    x_min = it->x - x_search_range;
//    y_min = it->y - y_search_range;
//    x_max = it->x + x_search_range;
//    y_max = it->y + y_search_range;
//
//    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr points_in_cell(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
//
//    // Extracting lidar points
//    pass.setFilterFieldName("x");
//    pass.setFilterLimits(x_min, x_max);
//    pass.filter(*points_in_cell);
//    pass.setInputCloud(points_in_cell);
//    pass.setFilterFieldName("y");
//    pass.setFilterLimits(y_min, y_max);
//    pass.filter(*points_in_cell);
//
//    std::vector<int> pointIdxRadiusSearch;
//    for (pcl::PointCloud<pcl::PointXYZRGBNormal>::iterator cell_point_iterator = points_in_cell->begin();
//        cell_point_iterator != points_in_cell->end(); ++cell_point_iterator)
//    {
//      //std::cout << "id = " << cell_point_iterator->data_c[DATA_C_3_ORIGINAL_INDEX] << std::endl;
//      pointIdxRadiusSearch.push_back(cell_point_iterator->data_c[DATA_C_3_ORIGINAL_INDEX]);
//    }
//    //std::cout << "Total number of points in cell = " << pointIdxRadiusSearch.size() << std::endl;
//
//    std::vector<float> z_coordinates;
//    std::vector<float> x_coordinates;
//    std::vector<float> y_coordinates;
//
//    bool outlier_detected = false;
//    for (std::vector<int>::const_iterator iterator = pointIdxRadiusSearch.begin();
//        iterator != pointIdxRadiusSearch.end(); ++iterator)
//    {
//      //std::cout << "storing coordinates of point in cell with id = " << *iterator << std::endl;
//      z_coordinates.push_back(pcl_cloud_ptr->points[*iterator].z);
//      x_coordinates.push_back(pcl_cloud_ptr->points[*iterator].x);
//      y_coordinates.push_back(pcl_cloud_ptr->points[*iterator].y);
//
//      if (pcl_cloud_ptr->points[*iterator].data_c[DATA_C_1_ID_CLASS] == OUTLIER)
//        outlier_detected = true;
//    }
//
//    if (outlier_detected)
//    {
//      //std::cout << "Outlier detected!" << std::endl;
//      it->data_c[DATA_C_1_ID_CLASS] = OUTLIER;
//      it->r = R_CLASS_OUTLIER;
//      it->g = G_CLASS_OUTLIER;
//      it->b = B_CLASS_OUTLIER;
//    }
//    else
//    {
//      //std::cout << "Not outlier, setting points to unknown class" << std::endl;
//      it->data_c[DATA_C_1_ID_CLASS] = CLASS_UNKNOWN;
//      it->r = R_CLASS_UNKNOWN;
//      it->g = G_CLASS_UNKNOWN;
//      it->b = B_CLASS_UNKNOWN;
//    }
//
//    //std::cout << "Computing cell statistics!" << std::endl;
//    float sum = std::accumulate(z_coordinates.begin(), z_coordinates.end(), 0.0);
//    float z_mean = sum / (float)z_coordinates.size();
//    float var = 0;
//    for (int n = 0; n < z_coordinates.size(); ++n)
//      var += ((z_coordinates[n] - z_mean) * (z_coordinates[n] - z_mean));
//
//    var /= (float)z_coordinates.size();
//
//    //std::cout << "Retrieving the position of the lowest point in cell..." << std::endl;
//    std::vector<float>::iterator lowest_point_in_cell = std::min_element(z_coordinates.begin(), z_coordinates.end());
//    int position_in_vector = std::distance(z_coordinates.begin(), lowest_point_in_cell);
//
//    it->z = z_coordinates[position_in_vector];
//    it->x = x_coordinates[position_in_vector];
//    it->y = y_coordinates[position_in_vector];
//
//    //std::cout << "Filling remaining fields" << std::endl;
//    it->data_n[DATA_N_0_INTENSITY] = INTENSITY_UNKNOWN;
//    it->data_n[DATA_N_1_Z_GROUND] = Z_GROUND_UNKNOWN;
//    it->data_n[DATA_N_2_Z_MEAN] = z_mean;
//    it->data_n[DATA_N_3_Z_VARIANCE] = var;
//
//    it->data_c[DATA_C_2_CAR_PROB] = PROB_UNKNOWN;
//
//    //std::cout << "storing the index for later in labelling step" << std::endl;
//    it->data_c[DATA_C_3_ORIGINAL_INDEX] = i++;
//
//    //std::cout << "var z = " << it->data_n[DATA_N_3_Z_VARIANCE] << std::endl;
//    //std::cout << "z_coordinates.size() = " << z_coordinates.size() << std::endl;
//
//    //std::cout << "storing the vector of indexes" << std::endl;
//    correspondence_indexes.push_back(pointIdxRadiusSearch);
//  }
//}

void CKf_Based_Terrain_Analysis::generateElevationCloud(
    const kf_based_terrain_analysis_lib::FilteringConfiguration filtering_configuration,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pcl_cloud_ptr,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr elevation_cloud, std::vector<std::vector<int>> &correspondence_indexes)
{
  //if (filtering_configuration.measure_performance)
  //CFunctionMonitor performance_monitor("generateElevationCloud", performance_supervisor_ptr_);

  pcl::PointCloud<pcl::PointXY>::Ptr pcl_pointcloud_2D_ptr(new pcl::PointCloud<pcl::PointXY>);
  pcl::PointXY point;
  for (pcl::PointCloud<pcl::PointXYZRGBNormal>::const_iterator it = pcl_cloud_ptr->begin(); it != pcl_cloud_ptr->end();
      ++it)
  {
    point.x = it->x;
    point.y = it->y;
    pcl_pointcloud_2D_ptr->points.push_back(point);
  }

  //std::cout << "Number of points in pcl_pointcloud_2D_ptr = " << pcl_pointcloud_2D_ptr->points.size() << std::endl;

  pcl::KdTreeFLANN < pcl::PointXY > kdtree_2D;
  kdtree_2D.setInputCloud(pcl_pointcloud_2D_ptr);

  // We want to find the x and y values, but not interested in z
  //std::cout << "Elevation grid resolution = " << filtering_configuration.elevation_grid_resolution << std::endl;
  pcl::VoxelGrid < pcl::PointXYZRGBNormal > voxel_grid;
  voxel_grid.setInputCloud(pcl_cloud_ptr);
  voxel_grid.setLeafSize(filtering_configuration.elevation_grid_resolution,
                         filtering_configuration.elevation_grid_resolution, OUT_OF_RANGE);
  voxel_grid.filter(*elevation_cloud);
  //std::cout << "Number of points after voxelization = " << elevation_cloud->points.size() << std::endl;

  const float DISTANCE_FROM_CELL_CENTER_TO_CELL_CORNER = 0.7071067812
      * filtering_configuration.elevation_grid_resolution; //sqrt(2)/2.0

  int i = 0;
  for (pcl::PointCloud<pcl::PointXYZRGBNormal>::iterator it = elevation_cloud->begin(); it != elevation_cloud->end();
      ++it)
  {
    point.x = it->x;
    point.y = it->y;

    std::vector<int> pointIdxRadiusSearch;
    std::vector<float> pointRadiusSearchSquaredDistance;

    kdtree_2D.radiusSearch(point, DISTANCE_FROM_CELL_CENTER_TO_CELL_CORNER, pointIdxRadiusSearch,
                           pointRadiusSearchSquaredDistance);

    std::vector<float> z_coordinates;
    std::vector<float> x_coordinates;
    std::vector<float> y_coordinates;

    bool outlier_detected = false;
    for (std::vector<int>::const_iterator iterator = pointIdxRadiusSearch.begin();
        iterator != pointIdxRadiusSearch.end(); ++iterator)
    {
      z_coordinates.push_back(pcl_cloud_ptr->points[*iterator].z);
      x_coordinates.push_back(pcl_cloud_ptr->points[*iterator].x);
      y_coordinates.push_back(pcl_cloud_ptr->points[*iterator].y);

      if (pcl_cloud_ptr->points[*iterator].data_c[DATA_C_1_ID_CLASS] == OUTLIER)
        outlier_detected = true;
    }

    if (outlier_detected)
    {
      it->data_c[DATA_C_1_ID_CLASS] = OUTLIER;
      it->r = R_CLASS_OUTLIER;
      it->g = G_CLASS_OUTLIER;
      it->b = B_CLASS_OUTLIER;
    }
    else
    {
      it->data_c[DATA_C_1_ID_CLASS] = CLASS_UNKNOWN;
      it->r = R_CLASS_UNKNOWN;
      it->g = G_CLASS_UNKNOWN;
      it->b = B_CLASS_UNKNOWN;
    }

    float sum = std::accumulate(z_coordinates.begin(), z_coordinates.end(), 0.0);
    float z_mean = sum / (float)z_coordinates.size();
    float var = 0;
    for (int n = 0; n < z_coordinates.size(); ++n)
      var += ((z_coordinates[n] - z_mean) * (z_coordinates[n] - z_mean));

    var /= (float)z_coordinates.size();

    std::vector<float>::iterator lowest_point_in_cell = std::min_element(z_coordinates.begin(), z_coordinates.end());
    int position_in_vector = std::distance(z_coordinates.begin(), lowest_point_in_cell);

    it->z = z_coordinates[position_in_vector];
    it->x = x_coordinates[position_in_vector];
    it->y = y_coordinates[position_in_vector];

    // Filling remaining fields
    it->data_n[DATA_N_0_INTENSITY] = INTENSITY_UNKNOWN;
    it->data_n[DATA_N_1_Z_GROUND] = Z_GROUND_UNKNOWN;
    it->data_n[DATA_N_2_Z_MEAN] = z_mean;
    it->data_n[DATA_N_3_Z_VARIANCE] = var;

    it->data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL] = INDEX_UNKNOWN;
    it->data_c[DATA_C_3_ORIGINAL_INDEX] = i++;    //storing the index for later in labelling step

    //std::cout << "var z = " << it->data_n[DATA_N_3_Z_VARIANCE] << std::endl;
    //std::cout << "z_coordinates.size() = " << z_coordinates.size() << std::endl;

    correspondence_indexes.push_back(pointIdxRadiusSearch);
  }
}

void CKf_Based_Terrain_Analysis::generateRootVertex(
    const kf_based_terrain_analysis_lib::SensorConfiguration lidar_configuration,
    const kf_based_terrain_analysis_lib::FilteringConfiguration filtering_configuration,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr ground_reference_cloud_ptr)
{
//std::cout << "Generating root vertex" << std::endl;
// The first reference point (root vertex) is the ground in (0,0) approximately known by sensor extrinsic calibration
  pcl::PointXYZRGBNormal root_vertex;
  root_vertex.x = 0.0;
  root_vertex.y = 0.0;
  root_vertex.z = -1.0 * lidar_configuration.sensor_height;

  root_vertex.r = 0;
  root_vertex.g = 0;
  root_vertex.b = 0;

  root_vertex.data_n[GRND_REF_DATA_N_2_VERTEX_CLASS] = VERTEX_CONNECTED_TO_ROOT;

  int index = 0;
  root_vertex.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX] = (float)index;

  // we set the prior
  root_vertex.data_c[GRND_REF_DATA_C_1_ROLL] = 0.0;
  root_vertex.data_c[GRND_REF_DATA_C_2_PITCH] = 0.0;
  root_vertex.data_c[GRND_REF_DATA_C_3_Z_VARIANCE] = filtering_configuration.z_initial_std_dev
      * filtering_configuration.z_initial_std_dev;
  root_vertex.data_n[GRND_REF_DATA_N_0_ROLL_VARIANCE] = filtering_configuration.roll_initial_std_dev
      * filtering_configuration.roll_initial_std_dev;
  root_vertex.data_n[GRND_REF_DATA_N_1_PITCH_VARIANCE] = filtering_configuration.pitch_initial_std_dev
      * filtering_configuration.pitch_initial_std_dev;

  root_vertex.data_n[GRND_REF_DATA_N_2_VERTEX_CLASS] = VERTEX_CONNECTED_TO_ROOT;
  ground_reference_cloud_ptr->points.push_back(root_vertex);
}

void CKf_Based_Terrain_Analysis::createDenseGroundCloud(
    const kf_based_terrain_analysis_lib::FilteringConfiguration filtering_configuration,
    const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr ground_reference_cloud_ptr,
    const kf_based_terrain_analysis_lib::SensorConfiguration lidar_configuration,
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pcl_dense_cloud_ptr)
{
  //if (filtering_configuration.measure_performance)
  CFunctionMonitor performance_monitor("createDenseGroundCloud", performance_supervisor_ptr_);

  //std::cout << "setting input cloud" << std::endl;
  pcl::KdTreeFLANN < pcl::PointXYZRGBNormal > kdtree;
  kdtree.setInputCloud(ground_reference_cloud_ptr);
  int K = filtering_configuration.number_of_references_used_for_labelling;
  std::vector<int> pointIdxKNNSearch(K);
  std::vector<float> pointKNNSquaredDistance(K);

  //std::cout << "Starting dense cloud creation" << std::endl;
  float min_std_dev = 1000.0;
  float max_std_dev = -1.0;

  for (float i = -20.0; i < 20.0; i += 0.2)
  {
    for (float j = -20.0; j < 20.0; j += 0.2)
    {
      pcl::PointXYZRGBNormal point;
      point.x = i;
      point.y = j;

      kdtree.nearestKSearch(point, K, pointIdxKNNSearch, pointKNNSquaredDistance);
      const int NUM_OF_NEIGHBOURS = pointIdxKNNSearch.size();

      if (NUM_OF_NEIGHBOURS > 0)
      {
        //std::cout << "Computing Z value as a weighted average" << std::endl;
        float z_vector[NUM_OF_NEIGHBOURS];
        float z_var_vector[NUM_OF_NEIGHBOURS];
        for (int i = 0; i < NUM_OF_NEIGHBOURS; i++)
        {
          pcl::PointXYZRGBNormal reference = ground_reference_cloud_ptr->points[pointIdxKNNSearch[i]];

          float delta_x = point.x - reference.x;
          float delta_y = point.y - reference.y;
          predictZWithVariance(reference, delta_x, delta_y, z_vector[i], z_var_vector[i]);
        }

        float norm_factor = 0.0;
        float weights[NUM_OF_NEIGHBOURS];
        for (int i = 0; i < NUM_OF_NEIGHBOURS; i++)
        {
          if (z_var_vector[i] > 0.0)
          {
            weights[i] = 1.0 / z_var_vector[i];
            norm_factor += weights[i];
          }
          else
          {
            std::cout << "ERROR in CKf_Based_Terrain_Analysis::createDenseGroundCloud! --> invalid variance = "
                << z_var_vector[i] << std::endl;
            assert(false);
          }
        }

        for (int i = 0; i < NUM_OF_NEIGHBOURS; i++)
        {
          point.z += (weights[i] * z_vector[i]) / norm_factor;
        }

        //std::cout << "Computing color using std_dev: blue --> std_dev of zero; green -->std_dev >= MAX_STD_DEV
        float MAX_STD_DEV = filtering_configuration.max_pred_std_dev_for_labelling;
        float std_dev = sqrt(1.0 / norm_factor);

        if (std_dev < min_std_dev)
          min_std_dev = std_dev;

        if (std_dev > max_std_dev)
          max_std_dev = std_dev;

        if (std_dev > MAX_STD_DEV)
          std_dev = MAX_STD_DEV;

        float red = 0.0;
        float green = 255.0 * std_dev / MAX_STD_DEV;
        float blue = 255.0 - red;

        point.r = (int)red;
        point.g = (int)green;
        point.b = (int)blue;

        //std::cout << "Storing the point!" << std::endl;
        pcl_dense_cloud_ptr->push_back(point);
      }
    }
  }

  //std::cout << "Dense reconstruction min_std_dev = " << min_std_dev << "  max_std_dev = " << max_std_dev << std::endl;

  //std::cout << "Finished dense cloud creation!!" << std::endl;
}

void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
    const kf_based_terrain_analysis_lib::FilteringConfiguration filtering_configuration,
    const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr elevation_cloud_ptr,
    const std::vector<std::vector<int> > &correspondence_indexes,
    const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr ground_reference_cloud_ptr,
    const std::vector<std::vector<int>> &edges, pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pcl_cloud_ptr)
{
  //if (filtering_configuration.measure_performance)
  //CFunctionMonitor performance_monitor("labelPointcloudUsingGroundModel", performance_supervisor_ptr_);
  //std::cout << "Labeling points in the original pointcloud using the posterior and the ROI indexes vectors..."
  //    << std::endl;
  for (pcl::PointCloud<pcl::PointXYZRGBNormal>::iterator i = elevation_cloud_ptr->begin();
      i != elevation_cloud_ptr->end(); ++i)
  {
    pcl::PointXYZRGBNormal point_in_sensor_frame = *i;

    bool elevation_cloud_point_is_ground = false;
    if ((int)std::floor(point_in_sensor_frame.data_c[DATA_C_1_ID_CLASS]) == CLASS_GROUND)
      elevation_cloud_point_is_ground = true;

    bool elevation_cloud_point_is_not_predictable_using_the_model = false;
    int reference_index = (int)std::floor(
        point_in_sensor_frame.data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL]);
    if (reference_index == INDEX_UNKNOWN)
      elevation_cloud_point_is_not_predictable_using_the_model = true;

    if (!elevation_cloud_point_is_not_predictable_using_the_model)
    {
      pcl::PointXYZRGBNormal reference_in_sensor_frame = ground_reference_cloud_ptr->points[reference_index];
      if (elevation_cloud_point_is_ground) //&& sqrt(reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE]) < 0.06) //TODO!!!
      {
        // If the lowest point is ground we need to check one by one the rest of points in the cell
        // In case we find not-overhanging obstacles in its cell, we will classify the lowest point in cell also as obstacle
        bool flag_not_overhanging_obstacle_found = false;

        // 1) We point to the vector of indices related to the elevation_cloud point under evaluation
        std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin()
            + point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX];

        // and iterate over this vector
        for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
            point_iterator != index_iterator->end(); ++point_iterator)
        {
          bool observation_z_is_below_prediction = false;
          float prediction_std_dev = -1.0;
          float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame,
                                                           pcl_cloud_ptr->points[*point_iterator], prediction_std_dev,
                                                           observation_z_is_below_prediction);
          if (std::isnan(mahalanobis_distance))
          {
            std::cout << "posterior pred sigma = " << prediction_std_dev << "    uncertainty = "
                << prediction_std_dev * filtering_configuration.mahalanobis_threshold << "    mahalanobis distance = "
                << mahalanobis_distance << std::endl;

            std::getchar();
          }

          if (mahalanobis_distance < filtering_configuration.mahalanobis_threshold)
          {
            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = pcl_cloud_ptr->points[*point_iterator].z;
            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
                point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
                point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];

            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_GROUND;  // We use instead the r g b channels directly
            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND;
            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND;

            float score = 0.99 - (mahalanobis_distance / filtering_configuration.mahalanobis_threshold);
            if (score < 0.0)
              score = 0.0;
            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_GROUND + score;
            // data_c[2] is reserved for Car probability, as we do not estimate it in this algorithm we leave this value untouched
            // so if there are others segmentators working out there, we do not erase their job!

            // data_c[3] is reserved to store the original index, so we do not want to change it!
          }
          else
          {
            //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;

            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = point_in_sensor_frame.z;
            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
                point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
                point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];

            //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;
            if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
                < filtering_configuration.robot_height)
            {
              // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
              pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE; // We use instead the r g b channels directly
              pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
              pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;

              pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + 0.99;

              flag_not_overhanging_obstacle_found = true;
            }
            else
            {
              //std::cout << "Overhanging obstacle detected!" << std::endl;
              // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
              pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE; // We use instead the r g b channels directly
              pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
              pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;

              pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE
                  + 0.99;
              // data_c[2] is reserved for Car probability, as we do not estimate it in this algorithm we leave this value untouched
              // so if there are others segmentators working out there, we do not erase their job!

              // data_c[3] is reserved to store the original index, so we do not want to change it!
            }
          }
        }
      }
      else // If the lowest point in cell is already an obstacle, the remaining points are also obstacles
      {
        //std::cout << "Labeling all points in cell as obstacle!" << std::endl;
        // 1) We point to the vector of indices related to the elevation_cloud point under evaluation
        std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin()
            + point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX];

        for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
            point_iterator != index_iterator->end(); ++point_iterator)
        {
          // Intensity is already in the pointcloud so we do not copy data_n[0] (in fact this value is not present in elevation cloud)
          float z_ground = i->z;
          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = z_ground;
          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
              point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
              point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];

          //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;
          if ((pcl_cloud_ptr->points[*point_iterator].z - z_ground) < filtering_configuration.robot_height)
          {
            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE; // We use instead the r g b channels directly
            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;

            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + 0.99;
          }
          else
          {
            //std::cout << "Overhanging obstacle detected!" << std::endl;
            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE; // We use instead the r g b channels directly
            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;

            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE + 0.99;
            // data_c[2] is reserved for Car probability, as we do not estimate it in this algorithm we leave this value untouched
            // so if there are others segmentators working out there, we do not erase their job!

            // data_c[3] is reserved to store the original index, so we do not want to change it!
          }
        }
      }
    }
    else // if we don't have enough information to try to predict the ground level at this point
    {
      std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin()
          + point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX];

      if (filtering_configuration.classify_not_labeled_points_as_obstacles)
      {
        for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
            point_iterator != index_iterator->end(); ++point_iterator)
        {
          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = point_in_sensor_frame.z;
          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
              point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
              point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];

          //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;
          if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
              < filtering_configuration.ground_threshold_in_not_analyzed_areas)
          {
            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_GROUND;  // We use instead the r g b channels directly
            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND;
            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND;

            float score = 0.01; // we don't have too much confidence in these points, because they are not analyzed
            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_GROUND + score;
          }
          else if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
              < filtering_configuration.robot_height)
          {
            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE; // We use instead the r g b channels directly
            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;

            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + 0.99;
          }
          else
          {
            //std::cout << "Overhanging obstacle detected!" << std::endl;
            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE; // We use instead the r g b channels directly
            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;

            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE + 0.99;
            // data_c[2] is reserved for Car probability, as we do not estimate it in this algorithm we leave this value untouched
            // so if there are others segmentators working out there, we do not erase their job!

            // data_c[3] is reserved to store the original index, so we do not want to change it!
          }
        }
      }
    }
  }
  //std::cout << "Labeling concluded!" << std::endl;
}

void CKf_Based_Terrain_Analysis::labelPointcloudUsingGroundModel(
    const kf_based_terrain_analysis_lib::FilteringConfiguration filtering_configuration,
    const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr elevation_cloud_ptr,
    const std::vector<std::vector<int> > &correspondence_indexes,
    const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr ground_reference_cloud_ptr,
    const std::vector<std::vector<int>> &edges, pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pcl_cloud_ptr)
{
  //if (filtering_configuration.measure_performance)
  //CFunctionMonitor performance_monitor("labelPointcloudUsingGroundModel", performance_supervisor_ptr_);
  //std::cout << "Labeling points in the original pointcloud using the posterior and the ROI indexes vectors..."
  //    << std::endl;
  for (pcl::PointCloud<pcl::PointXYZRGBNormal>::iterator i = elevation_cloud_ptr->begin();
      i != elevation_cloud_ptr->end(); ++i)
  {
    pcl::PointXYZRGBNormal point_in_sensor_frame = *i;

    // 0) K nearest neighbor search
    pcl::KdTreeFLANN < pcl::PointXYZRGBNormal > kdtree;
    kdtree.setInputCloud(ground_reference_cloud_ptr);
    int K = filtering_configuration.number_of_references_used_for_labelling;
    std::vector<int> pointIdxKNNSearch(K);
    std::vector<float> pointKNNSquaredDistance(K);

    int num_of_references_found = kdtree.nearestKSearch(point_in_sensor_frame, K, pointIdxKNNSearch,
                                                        pointKNNSquaredDistance);
    int best_reference_index = -1;
    int best_reference_index_predicting_ground = -1;
    int best_reference_index_predicting_obstacle = -1;

    float min_ground_pred_std_dev = 10000.0;
    float min_obstacle_pred_std_dev = 10000.0;
    float best_pred_std_dev = 10000.0;

    for (int reference_iterator = 0; reference_iterator < num_of_references_found; ++reference_iterator)
    {
      pcl::PointXYZRGBNormal reference_in_sensor_frame =
          ground_reference_cloud_ptr->points[pointIdxKNNSearch[reference_iterator]];

      if (!filtering_configuration.discard_not_connected_references_for_labelling
          || (reference_in_sensor_frame.data_n[GRND_REF_DATA_N_2_VERTEX_CLASS] == VERTEX_CONNECTED_TO_ROOT
              && edges[reference_in_sensor_frame.data_n[GRND_REF_DATA_N_3_ORIGINAL_INDEX]].size() > 0))
      {
        bool observation_z_is_below_prediction = false;
        float prediction_std_dev = -1.0;
        float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame, point_in_sensor_frame,
                                                         prediction_std_dev, observation_z_is_below_prediction);

        // if we have enough confidence to label something
        if (prediction_std_dev < filtering_configuration.max_pred_std_dev_for_labelling)
        {
          // if we think the point could be ground
          if (mahalanobis_distance < filtering_configuration.mahalanobis_threshold)
          {
            if (prediction_std_dev < min_ground_pred_std_dev)
            {
              min_ground_pred_std_dev = prediction_std_dev;
              best_reference_index_predicting_ground = reference_iterator;
            }
          }
          else
          {
            if (prediction_std_dev < min_obstacle_pred_std_dev)
            {
              min_obstacle_pred_std_dev = prediction_std_dev;
              best_reference_index_predicting_obstacle = reference_iterator;
            }
          }
        }
      }
    }

    bool elevation_cloud_point_is_ground = false;
    bool elevation_cloud_point_is_not_predictable_using_the_model = false;
    if (best_reference_index_predicting_ground != -1.0)
    {
      best_reference_index = best_reference_index_predicting_ground;
      best_pred_std_dev = min_ground_pred_std_dev;
      elevation_cloud_point_is_ground = true;
    }
    else if (best_reference_index_predicting_obstacle != -1.0)
    {
      best_reference_index = best_reference_index_predicting_obstacle;
      best_pred_std_dev = min_obstacle_pred_std_dev;
      elevation_cloud_point_is_ground = false;
    }
    else
    {
      elevation_cloud_point_is_not_predictable_using_the_model = true;
    }

    if (!elevation_cloud_point_is_not_predictable_using_the_model)
    {
      pcl::PointXYZRGBNormal reference_in_sensor_frame =
          ground_reference_cloud_ptr->points[pointIdxKNNSearch[best_reference_index]];
      if (elevation_cloud_point_is_ground) //&& sqrt(reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE]) < 0.06) //TODO!!!
      {
        //std::cout << "std_dev_z = " << sqrt(reference_in_sensor_frame.data_c[GRND_REF_DATA_C_3_Z_VARIANCE]) << std::endl;
        // We classify the elevation cloud point as ground
        i->data_n[DATA_N_1_Z_GROUND] = i->z;

        // set the class as ground
        float score = 0.99 - (best_pred_std_dev / filtering_configuration.max_pred_std_dev_for_labelling);
        if (score < 0.0)
          score = 0.0;
        i->data_c[DATA_C_1_ID_CLASS] = (float)CLASS_GROUND + score;

        // colour the point properly
        i->r = R_CLASS_GROUND;
        i->g = G_CLASS_GROUND;
        i->b = B_CLASS_GROUND;

        // If the lowest point is ground we need to check one by one the rest of points in the cell
        // In case we find not-overhanging obstacles in its cell, we will classify the lowest point in cell also as obstacle
        bool flag_not_overhanging_obstacle_found = false;

        // 1) We point to the vector of indices related to the elevation_cloud point under evaluation
        std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin()
            + point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX];

        // and iterate over this vector
        for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
            point_iterator != index_iterator->end(); ++point_iterator)
        {
          bool observation_z_is_below_prediction = false;
          float prediction_std_dev = -1.0;
          float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame,
                                                           pcl_cloud_ptr->points[*point_iterator], prediction_std_dev,
                                                           observation_z_is_below_prediction);
          if (std::isnan(mahalanobis_distance))
          {
            std::cout << "posterior pred sigma = " << prediction_std_dev << "    uncertainty = "
                << prediction_std_dev * filtering_configuration.mahalanobis_threshold << "    mahalanobis distance = "
                << mahalanobis_distance << std::endl;

            std::getchar();
          }
          if (mahalanobis_distance < filtering_configuration.mahalanobis_threshold)
          {
            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = pcl_cloud_ptr->points[*point_iterator].z;
            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
                point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
                point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];

            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_GROUND;  // We use instead the r g b channels directly
            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND;
            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND;

            float score = 0.99 - (mahalanobis_distance / filtering_configuration.mahalanobis_threshold);
            if (score < 0.0)
              score = 0.0;
            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_GROUND + score;
            // data_c[2] is reserved for Car probability, as we do not estimate it in this algorithm we leave this value untouched
            // so if there are others segmentators working out there, we do not erase their job!

            // data_c[3] is reserved to store the original index, so we do not want to change it!
          }
          else
          {
            //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;

            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = point_in_sensor_frame.z;
            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
                point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
            pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
                point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];

            //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;
            if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
                < filtering_configuration.robot_height)
            {
              // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
              pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE; // We use instead the r g b channels directly
              pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
              pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;

              pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + 0.99;

              flag_not_overhanging_obstacle_found = true;
            }
            else
            {
              //std::cout << "Overhanging obstacle detected!" << std::endl;
              // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
              pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE; // We use instead the r g b channels directly
              pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
              pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;

              pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE
                  + 0.99;
              // data_c[2] is reserved for Car probability, as we do not estimate it in this algorithm we leave this value untouched
              // so if there are others segmentators working out there, we do not erase their job!

              // data_c[3] is reserved to store the original index, so we do not want to change it!
            }
          }
        }
        if (flag_not_overhanging_obstacle_found)
        {
          i->r = R_CLASS_OBSTACLE;
          i->g = G_CLASS_OBSTACLE;
          i->b = B_CLASS_OBSTACLE;
        }
      }
      else // If the lowest point in cell is already an obstacle, the remaining points are also obstacles
      {
        // if it is an obstacle point we use the ground reference z prediction as z ground
        float z_ground = predictZ(reference_in_sensor_frame, point_in_sensor_frame.x, point_in_sensor_frame.y);

        // If the point from the elevation cloud is an obstacle below the prediction
        // we assume that the ground is at the level of the elevation cloud point
        if (point_in_sensor_frame.z < z_ground)
          z_ground = point_in_sensor_frame.z;

        i->data_n[DATA_N_1_Z_GROUND] = z_ground;

        // set the class to OBSTACLE
        i->data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + 0.99;

        // and colour the point
        i->r = R_CLASS_OBSTACLE;
        i->g = G_CLASS_OBSTACLE;
        i->b = B_CLASS_OBSTACLE;

        //std::cout << "Labeling all points in cell as obstacle!" << std::endl;
        // 1) We point to the vector of indices related to the elevation_cloud point under evaluation
        std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin()
            + point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX];

        for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
            point_iterator != index_iterator->end(); ++point_iterator)
        {
          // Intensity is already in the pointcloud so we do not copy data_n[0] (in fact this value is not present in elevation cloud)
          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = z_ground;
          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
              point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
              point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];

          //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;
          if ((pcl_cloud_ptr->points[*point_iterator].z - z_ground) < filtering_configuration.robot_height)
          {
            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE; // We use instead the r g b channels directly
            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;

            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + 0.99;
          }
          else
          {
            //std::cout << "Overhanging obstacle detected!" << std::endl;
            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE; // We use instead the r g b channels directly
            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;

            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE + 0.99;
            // data_c[2] is reserved for Car probability, as we do not estimate it in this algorithm we leave this value untouched
            // so if there are others segmentators working out there, we do not erase their job!

            // data_c[3] is reserved to store the original index, so we do not want to change it!
          }
        }
      }
    }
    else // if we don't have enough information to try to predict the ground level at this point
    {
      std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin()
          + point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX];

      if (filtering_configuration.classify_not_labeled_points_as_obstacles)
      {
        for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
            point_iterator != index_iterator->end(); ++point_iterator)
        {
          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_Z_GROUND] = point_in_sensor_frame.z;
          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
              point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
          pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
              point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];

          //std::cout << "obstacle z distance from ground = " << pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z << std::endl;
          if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
              < filtering_configuration.ground_threshold_in_not_analyzed_areas)
          {
            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_GROUND;  // We use instead the r g b channels directly
            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND;
            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND;

            float score = 0.01; // we don't have too much confidence in these points, because they are not analyzed
            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_GROUND + score;
          }
          else if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
              < filtering_configuration.robot_height)
          {
            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE; // We use instead the r g b channels directly
            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;

            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + 0.99;
          }
          else
          {
            //std::cout << "Overhanging obstacle detected!" << std::endl;
            // data_c[0] is reserved to cast the RGB values into a float (PCL convention) so we do not use it
            pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE; // We use instead the r g b channels directly
            pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
            pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;

            pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE + 0.99;
            // data_c[2] is reserved for Car probability, as we do not estimate it in this algorithm we leave this value untouched
            // so if there are others segmentators working out there, we do not erase their job!

            // data_c[3] is reserved to store the original index, so we do not want to change it!
          }
        }
      }
    }
  }
  //std::cout << "Labeling concluded!" << std::endl;
}

void CKf_Based_Terrain_Analysis::ensureThatConnectionsAreReflectedInBothEnds(
    const pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr ground_reference_cloud_ptr, std::vector<std::vector<int>> edges)
{
  for (int vertex_iterator = 0; vertex_iterator < ground_reference_cloud_ptr->points.size(); ++vertex_iterator)
  {
    for (int connected_vertices_iterator = 0; connected_vertices_iterator < edges[vertex_iterator].size();
        ++connected_vertices_iterator)
    {
      int connected_vertex = edges[vertex_iterator][connected_vertices_iterator];
      if (std::find(edges[connected_vertex].begin(), edges[connected_vertex].end(), vertex_iterator)
          == edges[connected_vertex].end())
        edges[connected_vertex].push_back(vertex_iterator);
    }
  }
}

void CKf_Based_Terrain_Analysis::groundSegmentation(
    const kf_based_terrain_analysis_lib::SensorConfiguration lidar_configuration,
    const kf_based_terrain_analysis_lib::FilteringConfiguration filtering_configuration,
    pcl::PointCloud<pcl::PointXYZRGBNormal> &pcl_cloud,
    pcl::PointCloud<pcl::PointXYZRGBNormal> &ground_references_pcl_cloud, std::vector<std::vector<int>> &edges,
    std::vector<std::vector<kf_based_terrain_analysis_lib::Edge>> &discarded_edges,
    pcl::PointCloud<pcl::PointXYZRGBNormal> &ground_dense_reconstruction_pcl_cloud)
{
//  if (filtering_configuration.measure_performance)
//    CFunctionMonitor performance_monitor("groundSegmentation", performance_supervisor_ptr_);

  //std::cout << "Starting ground segmentation!!" << std::endl;
  //std::cout << "Initializing pointcloud: Number of points in pcl_cloud = " << pcl_cloud.points.size() << std::endl;

  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pcl_cloud_ptr(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
  *pcl_cloud_ptr = pcl_cloud;

// We create a pointcloud that will contain only trusted subsampled ground points, these points will be used
// as references to compute local planes to label near points
  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr ground_reference_cloud_ptr(new pcl::PointCloud<pcl::PointXYZRGBNormal>);

  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr elevation_cloud_ptr(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
  std::vector < std::vector<int> > correspondence_indexes;

  generateElevationCloud(filtering_configuration, pcl_cloud_ptr, elevation_cloud_ptr, correspondence_indexes);
  //std::cout << "Elevation cloud generated! Npoints = " << elevation_cloud_ptr->points.size() << std::endl;

  generateRootVertex(lidar_configuration, filtering_configuration, ground_reference_cloud_ptr);
  //std::cout << "Root vertex generated! Num of points = " << ground_reference_cloud_ptr->points.size() << std::endl
  //    << "Number of edges = " << edges[0].size() << std::endl;

//getchar();

// We run the algorithm, starting from the reference at sensor origin it will try to find others trusty local
// ground references, which will increase the value of ground_reference_cloud->points.size(), evaluating
// local planes using these reference points, once no new references appear. The algorithm ends after evaluating
// all of them

  int reference_index = 0;
  while (reference_index < ground_reference_cloud_ptr->points.size())
  {
//    std::cout << "Evaluating reference num " << reference_index << ":" << std::endl << "x = "
//        << ground_reference_cloud_ptr->points[reference_index].x << "    y = "
//        << ground_reference_cloud_ptr->points[reference_index].y << "    z = "
//        << ground_reference_cloud_ptr->points[reference_index].z << std::endl;
////
//    std::cout << "r = " << (int)ground_reference_cloud_ptr->points[reference_index].r << "    g = "
//        << (int)ground_reference_cloud_ptr->points[reference_index].g << "    b = "
//        << (int)ground_reference_cloud_ptr->points[reference_index].b << std::endl;

    labelGroundPoints(lidar_configuration, filtering_configuration, correspondence_indexes, reference_index++,
                      ground_reference_cloud_ptr, elevation_cloud_ptr, pcl_cloud_ptr, edges, discarded_edges);

//    std::cout << "reference_index = " << reference_index << std::endl;
//    std::cout << "number of references = " << ground_reference_cloud_ptr->points.size() << std::endl;
  }
  //std::cout << "Total number of references = " << ground_reference_cloud_ptr->points.size() << std::endl;
//getchar();

  // We can discard ground references if they are not connected with any other, to test this, we need
  // to be sure that connections are reflected in both ends
  ensureThatConnectionsAreReflectedInBothEnds(ground_reference_cloud_ptr, edges);

  if (filtering_configuration.number_of_references_used_for_labelling > 0)
  {
    labelPointcloudUsingGroundModel(filtering_configuration, elevation_cloud_ptr, correspondence_indexes,
                                    ground_reference_cloud_ptr, edges, pcl_cloud_ptr);
  }
  else
  {
    //std::cout << "Using fast labelling function!" << std::endl;
    fastLabelPointcloudUsingGroundModel(filtering_configuration, elevation_cloud_ptr, correspondence_indexes,
                                        ground_reference_cloud_ptr, edges, pcl_cloud_ptr);
  }
  // Dense reconstruction aims to serve as debugging or visualizing tool, it generates an XY regular grid with predicted
  // Z values using the closest reference point

  if (filtering_configuration.show_dense_reconstruction)
  {
    //std::cout << "Ready to create dense reconstruction" << std::endl;
    pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pcl_dense_cloud_ptr(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
    createDenseGroundCloud(filtering_configuration, ground_reference_cloud_ptr, lidar_configuration,
                           pcl_dense_cloud_ptr);
    ground_dense_reconstruction_pcl_cloud.clear();
    ground_dense_reconstruction_pcl_cloud = *pcl_dense_cloud_ptr;
  }
  else
  {
    ground_dense_reconstruction_pcl_cloud.clear();
    ground_dense_reconstruction_pcl_cloud = *elevation_cloud_ptr;
  }

  ground_references_pcl_cloud.clear();
  ground_references_pcl_cloud = *ground_reference_cloud_ptr;

  pcl_cloud.clear();
  pcl_cloud = *pcl_cloud_ptr;

  if (filtering_configuration.measure_performance)
    performance_supervisor_ptr_->printStatistics();
}