Skip to content
Snippets Groups Projects
Commit dad3eae5 authored by Iván del Pino's avatar Iván del Pino
Browse files

debugged!

parent e179f81f
No related branches found
No related tags found
No related merge requests found
......@@ -22,7 +22,7 @@ const int DATA_N_3_Z_VARIANCE = 3;
const int DATA_C_0_RGB_CAST_INTO_FLOAT = 0;
const int DATA_C_1_ID_CLASS = 1;
const int DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL = 2;
const int DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL = 2; // only used in elevation cloud
const int DATA_C_3_ORIGINAL_INDEX = 3;
......@@ -42,12 +42,12 @@ const int INDEX_UNKNOWN = -1;
const float VERTEX_CONNECTED_TO_ROOT = 0.0;
const float VERTEX_NOT_CONNECTED_TO_ROOT = 100.0;
const float UNKNOWN_MEAN = -1000000.0;
const float UNKNOWN_MEAN = -1000.0;
const float UNKOWN_VARIANCE = -1.0;
const float UNKNOWN_INTENSITY = -1.0;
const float UNKNOWN_PROB = -1.0;
const float UNKNOWN_Z_GROUND = -1000000.0;
const float UNKNOWN_Z_GROUND = -1000.0;
const int KITTI_NUMBER_OF_PRECISION_RECALL_INTERVALES = 40;
......
......@@ -755,7 +755,7 @@ void CKf_Based_Terrain_Analysis::fastGenerateElevationCloud(
for (int i = 0; i < elevation_cell_vector.size(); ++i)
{
pcl::PointXYZRGBNormal elevation_cloud_point = elevation_cell_vector[i].lowest_point_in_cell;
if (elevation_cloud_point.data_c[DATA_C_1_ID_CLASS] == OUTLIER)
if ((int)std::floor(elevation_cloud_point.data_c[DATA_C_1_ID_CLASS]) == OUTLIER)
{
elevation_cloud_point.r = R_CLASS_OUTLIER;
elevation_cloud_point.g = G_CLASS_OUTLIER;
......@@ -767,7 +767,7 @@ void CKf_Based_Terrain_Analysis::fastGenerateElevationCloud(
elevation_cloud_point.g = G_CLASS_UNKNOWN;
elevation_cloud_point.b = B_CLASS_UNKNOWN;
}
// TODO: check this!
float sum = std::accumulate(elevation_cell_vector[i].z_coordinates.begin(),
elevation_cell_vector[i].z_coordinates.end(), 0.0);
float z_mean = sum / (float)elevation_cell_vector[i].z_coordinates.size();
......@@ -986,11 +986,14 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
{
pcl::PointXYZRGBNormal point_in_sensor_frame = *i;
// We pick the reference that made the best prediction for this point in the elevation cloud
int reference_index = (int)std::floor(
point_in_sensor_frame.data_c[DATA_C_2_INDEX_OF_GROUND_REF_THAT_MADE_THE_LABEL]);
if (reference_index != INDEX_UNKNOWN)
{
// If this point in the elevation cloud has been labeled by a reference, we extract the reference to
// check the Mahalanobis distance and make the actual labeling of the original point cloud
pcl::PointXYZRGBNormal reference_in_sensor_frame = ground_reference_cloud_ptr->points[reference_index];
// 1) We point to the vector of indices related to the elevation_cloud point under evaluation
......@@ -1001,35 +1004,52 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
point_iterator != index_iterator->end(); ++point_iterator)
{
float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame,
pcl_cloud_ptr->points[*point_iterator]);
// if (std::isnan(mahalanobis_distance))
// {
// std::cout << "posterior pred sigma = " << prediction_std_dev << " uncertainty = "
// << prediction_std_dev * filtering_configuration.mahalanobis_threshold << " mahalanobis distance = "
// << mahalanobis_distance << std::endl;
//
// std::getchar();
// }
// We now copy the data that is common to every point in the vector (because are statistics extracted
// from the points in the vector)
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_2_MEAN_INTENSITY] =
point_in_sensor_frame.data_n[DATA_N_0_INTENSITY];
// in data_n[0] we have the point intensity, so nothing to change
// in data_n[1] we will store the intensity variance in the elevation cloud cell
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_INTENSITY_VARIANCE] =
point_in_sensor_frame.data_n[DATA_N_1_INTENSITY_VARIANCE];
// in data_n[2] we store the mean z value in the elevation cell
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] = point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
// in data_n[3] we store the z variance in the elevation cell
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];
// TODO: we copy the mean intensity value in the channel reserved for the color, we do this because
// we have detected a problem with pcl conversions: when passing the point cloud through a ROS topic,
// channels c2 and c3 get corrupted, so we avoid to use them, we will investigate this problem in the
// future (hopefully)
// pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_2_MEAN_INTENSITY] =
// point_in_sensor_frame.data_n[DATA_N_0_INTENSITY];
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_0_RGB_CAST_INTO_FLOAT] =
point_in_sensor_frame.data_n[DATA_N_0_INTENSITY];
// std::cout << "Intensity in elevation cloud: mean = " << point_in_sensor_frame.data_n[DATA_N_0_INTENSITY]
// << " var = "
// << point_in_sensor_frame.data_n[DATA_N_1_INTENSITY_VARIANCE]
// << std::endl;
//
// std::cout << "Intensity in pcl cloud: point = "
// << pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_0_INTENSITY] << " mean = "
// << pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_2_MEAN_INTENSITY] << " var = "
// << pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_INTENSITY_VARIANCE] << std::endl;
// std::cout << "Intensity in pcl cloud: point = " << pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_0_INTENSITY]
// << " mean = " << pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_0_RGB_CAST_INTO_FLOAT]
// << " var = " << pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_INTENSITY_VARIANCE] << std::endl;
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] = point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];
// once we have all the common values stored, only rest to compute the class and score, that will be
// packaged into the data_c[1] field
float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame,
pcl_cloud_ptr->points[*point_iterator]);
// if (std::isnan(mahalanobis_distance))
// {
// std::cout << "posterior pred sigma = " << prediction_std_dev << " uncertainty = "
// << prediction_std_dev * filtering_configuration.mahalanobis_threshold << " mahalanobis distance = "
// << mahalanobis_distance << std::endl;
//
// std::getchar();
// }
float score = 0.995 - (mahalanobis_distance / filtering_configuration.mahalanobis_threshold); // we use 0.995 instead of 1.0 to avoid overflowing the class field
if (score < 0.0)
......@@ -1039,9 +1059,12 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
if (score > filtering_configuration.score_threshold)
{
pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_GROUND;
pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND;
pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND;
// TODO: when passing the point cloud through a ROS topic,
// channels c2 and c3 get corrupted, so we avoid to use them, we are temporarily the c0 channel
// which is the one storing RGB values packed as a float, so colour is disabled at the moment
// pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_GROUND;
// pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND;
// pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND;
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_GROUND + score;
}
......@@ -1050,11 +1073,17 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
< filtering_configuration.robot_height)
{
pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE;
pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + score;
// TODO: when passing the point cloud through a ROS topic,
// channels c2 and c3 get corrupted, so we avoid to use them, we are temporarily the c0 channel
// which is the one storing RGB values packed as a float, so colour is disabled at the moment
// pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE;
// pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
// pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;
// We don't use score with obstacle points, if they are considered obstacles, we believe that their
// probability of being ground is zero, the class label is used to distinguish between general obstacles
// and overhanging obstacles
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + 0.0;
// if (flag_reduce_mahalanobis_thres_if_an_obstacle_is_found)
// flag_obstacle_found = true;
......@@ -1062,12 +1091,16 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
else
{
//std::cout << "Overhanging obstacle detected!" << std::endl;
pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE;
pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;
// TODO: when passing the point cloud through a ROS topic,
// channels c2 and c3 get corrupted, so we avoid to use them, we are temporarily the c0 channel
// which is the one storing RGB values packed as a float, so colour is disabled at the moment
// pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE;
// pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
// pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE
+ score;
+ 0.0;
}
}
}
......@@ -1082,41 +1115,64 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
point_iterator != index_iterator->end(); ++point_iterator)
{
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_2_MEAN_INTENSITY] =
point_in_sensor_frame.data_n[DATA_N_0_INTENSITY];
// We now copy the data that is common to every point in the vector (because are statistics extracted
// from the points in the vector)
// in data_n[0] we have the point intensity, so nothing to change
// in data_n[1] we will store the intensity variance in the elevation cloud cell
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_INTENSITY_VARIANCE] =
point_in_sensor_frame.data_n[DATA_N_1_INTENSITY_VARIANCE];
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
// in data_n[2] we store the mean z value in the elevation cell
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] = point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
// in data_n[3] we store the z variance in the elevation cell
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];
// TODO: we copy the mean intensity value in the channel reserved for the color, we do this because
// we have detected a problem with pcl conversions: when passing the point cloud through a ROS topic,
// channels c2 and c3 get corrupted, so we avoid to use them, we will investigate this problem in the
// future (hopefully)
// pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_2_MEAN_INTENSITY] =
// point_in_sensor_frame.data_n[DATA_N_0_INTENSITY];
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_0_RGB_CAST_INTO_FLOAT] =
point_in_sensor_frame.data_n[DATA_N_0_INTENSITY];
if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
< filtering_configuration.ground_threshold_in_not_analyzed_areas)
{
pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_GROUND; // We use instead the r g b channels directly
pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND;
pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND;
float score = 0.5; // we don't have too much confidence in these points, because they are not analyzed
// TODO: when passing the point cloud through a ROS topic,
// channels c2 and c3 get corrupted, so we avoid to use them, we are temporarily the c0 channel
// which is the one storing RGB values packed as a float, so colour is disabled at the moment
// pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_GROUND; // We use instead the r g b channels directly
// pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND;
// pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND;
float score = 0.0; // we don't have too much confidence in these points, because they are not analyzed
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_GROUND + score;
}
else if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
< filtering_configuration.robot_height)
{
pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE;
pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;
// TODO: when passing the point cloud through a ROS topic,
// channels c2 and c3 get corrupted, so we avoid to use them, we are temporarily the c0 channel
// which is the one storing RGB values packed as a float, so colour is disabled at the moment
// pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OBSTACLE;
// pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OBSTACLE;
// pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OBSTACLE;
float score = 0.0;
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OBSTACLE + score;
}
else
{
pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE;
pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;
// TODO: when passing the point cloud through a ROS topic,
// channels c2 and c3 get corrupted, so we avoid to use them, we are temporarily the c0 channel
// which is the one storing RGB values packed as a float, so colour is disabled at the moment
// pcl_cloud_ptr->points[*point_iterator].r = R_CLASS_OVERHANGING_OBSTACLE;
// pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
// pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;
float score = 0.0;
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE
......@@ -1159,7 +1215,7 @@ void CKf_Based_Terrain_Analysis::groundSegmentation(
//std::cout << "Starting ground segmentation!!" << std::endl;
//std::cout << "Initializing pointcloud: Number of points in pcl_cloud = " << pcl_cloud.points.size() << std::endl;
// We copy the input in a pointer fashion (maybe it could be optimized)
// We copy the input in a pointer fashion (TODO: maybe it could be optimized)
pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr pcl_cloud_ptr(new pcl::PointCloud<pcl::PointXYZRGBNormal>);
*pcl_cloud_ptr = pcl_cloud;
......@@ -1240,6 +1296,7 @@ void CKf_Based_Terrain_Analysis::groundSegmentation(
ground_dense_reconstruction_pcl_cloud = *elevation_cloud_ptr;
}
// (TODO: maybe it could be optimized)
ground_references_pcl_cloud.clear();
ground_references_pcl_cloud = *ground_reference_cloud_ptr;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment