Skip to content
Snippets Groups Projects
Commit 2a481016 authored by Iván del Pino's avatar Iván del Pino
Browse files

working in neural network integration

parent d7d10b9e
No related branches found
No related tags found
No related merge requests found
...@@ -104,7 +104,7 @@ float CKf_Based_Terrain_Analysis::computeLikelihood(const pcl::PointXYZRGBNormal ...@@ -104,7 +104,7 @@ float CKf_Based_Terrain_Analysis::computeLikelihood(const pcl::PointXYZRGBNormal
* (exp(-1.0 * ((point_in_sensor_frame.z - z_pred) * (point_in_sensor_frame.z - z_pred)) / (2 * var_z_pred))); * (exp(-1.0 * ((point_in_sensor_frame.z - z_pred) * (point_in_sensor_frame.z - z_pred)) / (2 * var_z_pred)));
//DEBUG!float likelihood = ((point_in_sensor_frame.z - z_pred) * (point_in_sensor_frame.z - z_pred)) / (2 * var_z_pred); //DEBUG!float likelihood = ((point_in_sensor_frame.z - z_pred) * (point_in_sensor_frame.z - z_pred)) / (2 * var_z_pred);
if (false) //point_in_sensor_frame.z < -1.75) if (false) //point_in_sensor_frame.z < -1.75)
{ {
std::cout << "delta_x = " << delta_x << std::endl; std::cout << "delta_x = " << delta_x << std::endl;
std::cout << "delta_y = " << delta_y << std::endl; std::cout << "delta_y = " << delta_y << std::endl;
...@@ -996,7 +996,18 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel( ...@@ -996,7 +996,18 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
// check the Mahalanobis distance and make the actual labeling of the original point cloud // check the Mahalanobis distance and make the actual labeling of the original point cloud
pcl::PointXYZRGBNormal reference_in_sensor_frame = ground_reference_cloud_ptr->points[reference_index]; pcl::PointXYZRGBNormal reference_in_sensor_frame = ground_reference_cloud_ptr->points[reference_index];
// 1) We point to the vector of indices related to the elevation_cloud point under evaluation // 1) We create a vectors to store the features to be used with the Shallow Neural Network for
// ground classification
std::vector<int> features_indexes_in_pcl_cloud_ptr;
std::vector<double> squared_distances;
std::vector<double> incidence_angles;
std::vector<double> intensities;
std::vector<double> point_to_reference_squared_distances;
std::vector<double> prediction_errors;
std::vector<double> scores;
// 2) We point to the vector of indices related to the elevation_cloud point under evaluation
std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin() std::vector<std::vector<int>>::const_iterator index_iterator = correspondence_indexes.begin()
+ point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]; + point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX];
...@@ -1004,60 +1015,14 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel( ...@@ -1004,60 +1015,14 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
for (std::vector<int>::const_iterator point_iterator = index_iterator->begin(); for (std::vector<int>::const_iterator point_iterator = index_iterator->begin();
point_iterator != index_iterator->end(); ++point_iterator) point_iterator != index_iterator->end(); ++point_iterator)
{ {
// We now copy the data that is common to every point in the vector (because are statistics extracted
// from the points in the vector)
// in data_n[0] we have the point intensity, so nothing to change
// in data_n[1] we will store the intensity variance in the elevation cloud cell
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_INTENSITY_VARIANCE] =
point_in_sensor_frame.data_n[DATA_N_1_INTENSITY_VARIANCE];
// in data_n[2] we store the mean z value in the elevation cell
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] = point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
// in data_n[3] we store the z variance in the elevation cell
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
point_in_sensor_frame.data_n[DATA_N_3_Z_VARIANCE];
// TODO: we copy the mean intensity value in the channel reserved for the color, we do this because
// we have detected a problem with pcl conversions: when passing the point cloud through a ROS topic,
// channels c2 and c3 get corrupted, so we avoid to use them, we will investigate this problem in the
// future (hopefully)
// pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_2_MEAN_INTENSITY] =
// point_in_sensor_frame.data_n[DATA_N_0_INTENSITY];
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_0_RGB_CAST_INTO_FLOAT] =
point_in_sensor_frame.data_n[DATA_N_0_INTENSITY];
// std::cout << "Intensity in elevation cloud: mean = " << point_in_sensor_frame.data_n[DATA_N_0_INTENSITY]
// << " var = "
// << point_in_sensor_frame.data_n[DATA_N_1_INTENSITY_VARIANCE]
// << std::endl;
//
// std::cout << "Intensity in pcl cloud: point = " << pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_0_INTENSITY]
// << " mean = " << pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_0_RGB_CAST_INTO_FLOAT]
// << " var = " << pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_1_INTENSITY_VARIANCE] << std::endl;
// once we have all the common values stored, only rest to compute the class and score, that will be
// packaged into the data_c[1] field
float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame, float mahalanobis_distance = mahalanobisDistance(reference_in_sensor_frame,
pcl_cloud_ptr->points[*point_iterator]); pcl_cloud_ptr->points[*point_iterator]);
// if (std::isnan(mahalanobis_distance))
// {
// std::cout << "posterior pred sigma = " << prediction_std_dev << " uncertainty = "
// << prediction_std_dev * filtering_configuration.mahalanobis_threshold << " mahalanobis distance = "
// << mahalanobis_distance << std::endl;
//
// std::getchar();
// }
float score = 0.995 - (mahalanobis_distance / filtering_configuration.mahalanobis_threshold); // we use 0.995 instead of 1.0 to avoid overflowing the class field float score = 0.995 - (mahalanobis_distance / filtering_configuration.mahalanobis_threshold); // we use 0.995 instead of 1.0 to avoid overflowing the class field
if (score < 0.0) if (score < 0.0)
score = 0.0; score = 0.0;
//float score = computeLikelihood(reference_in_sensor_frame, pcl_cloud_ptr->points[*point_iterator]) / computeLikelihood(reference_in_sensor_frame, reference_in_sensor_frame); if (score > filtering_configuration.score_threshold) // We only use the Neural Network to segment ground points into several classes
if (score > filtering_configuration.score_threshold)
{ {
// TODO: when passing the point cloud through a ROS topic, // TODO: when passing the point cloud through a ROS topic,
// channels c2 and c3 get corrupted, so we avoid to use them, we are temporarily the c0 channel // channels c2 and c3 get corrupted, so we avoid to use them, we are temporarily the c0 channel
...@@ -1066,6 +1031,27 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel( ...@@ -1066,6 +1031,27 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
// pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND; // pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_GROUND;
// pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND; // pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_GROUND;
pcl::PointXYZRGBNormal point_to_evaluate = pcl_cloud_ptr->points[*point_iterator];
features_indexes_in_pcl_cloud_ptr.push_back(*point_iterator); // Index to modify the classification in a later step
squared_distances.push_back(
(double)(point_to_evaluate.x * point_to_evaluate.x + point_to_evaluate.y * point_to_evaluate.y
+ point_to_evaluate.z * point_to_evaluate.z));
incidence_angles.push_back(0.0); // TODO: Incidence angle w.r.t the plane defined by the ground reference
intensities.push_back(point_to_evaluate.data_n[DATA_N_0_INTENSITY]); // Point intensity
double delta_x = (point_to_evaluate.x - reference_in_sensor_frame.x);
double delta_y = (point_to_evaluate.y - reference_in_sensor_frame.y);
double delta_z = (point_to_evaluate.z - reference_in_sensor_frame.z);
point_to_reference_squared_distances.push_back(delta_x * delta_x + delta_y * delta_y + delta_z * delta_z);
prediction_errors.push_back((double)euclideanDistance(reference_in_sensor_frame, point_to_evaluate)); // Prediction Error
scores.push_back((double)score);
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_GROUND + score; pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_GROUND + score;
} }
else else
...@@ -1099,11 +1085,42 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel( ...@@ -1099,11 +1085,42 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
// pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE; // pcl_cloud_ptr->points[*point_iterator].g = G_CLASS_OVERHANGING_OBSTACLE;
// pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE; // pcl_cloud_ptr->points[*point_iterator].b = B_CLASS_OVERHANGING_OBSTACLE;
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_1_ID_CLASS] = (float)CLASS_OVERHANGING_OBSTACLE + 0.0;
+ 0.0;
} }
} }
} }
double ratio_ground_points = features_container.size()
/ correspondence_indexes[point_in_sensor_frame.data_c[DATA_C_3_ORIGINAL_INDEX]].size();
double mean_intensity_ground_points_in_cell = 0.0;
double std_intensity_ground_points_in_cell = 0.0;
double mean_prediction_error = 0.0;
double std_prediction_error = 0.0;
double mean_squared_error_of_predictions = 0.0;
double mean_of_scores = 0.0;
double std_of_scores = 0.0;
float sum = std::accumulate(elevation_cell_vector[i].z_coordinates.begin(),
elevation_cell_vector[i].z_coordinates.end(), 0.0);
float z_mean = sum / (float)elevation_cell_vector[i].z_coordinates.size();
float var = 0;
for (int n = 0; n < elevation_cell_vector[i].z_coordinates.size(); ++n)
var += ((elevation_cell_vector[i].z_coordinates[n] - z_mean)
* (elevation_cell_vector[i].z_coordinates[n] - z_mean));
var /= (float)elevation_cell_vector[i].z_coordinates.size();
Eigen::VectorXd features(14, 1);
features(06, 0) = ratio_ground_points;
features(07, 0) = mean_intensity_ground_points_in_cell;
features(08, 0) = std_intensity_ground_points_in_cell;
features(09, 0) = mean_prediction_error;
features(10, 0) = std_prediction_error;
features(11, 0) = mean_squared_error_of_predictions;
features(12, 0) = mean_of_scores;
features(13, 0) = std_of_scores;
} }
else // if we don't have enough information to try to predict the ground level at this point else // if we don't have enough information to try to predict the ground level at this point
{ {
...@@ -1124,7 +1141,8 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel( ...@@ -1124,7 +1141,8 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
point_in_sensor_frame.data_n[DATA_N_1_INTENSITY_VARIANCE]; point_in_sensor_frame.data_n[DATA_N_1_INTENSITY_VARIANCE];
// in data_n[2] we store the mean z value in the elevation cell // in data_n[2] we store the mean z value in the elevation cell
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] = point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN]; pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_2_Z_MEAN] =
point_in_sensor_frame.data_n[DATA_N_2_Z_MEAN];
// in data_n[3] we store the z variance in the elevation cell // in data_n[3] we store the z variance in the elevation cell
pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] = pcl_cloud_ptr->points[*point_iterator].data_n[DATA_N_3_Z_VARIANCE] =
...@@ -1136,8 +1154,8 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel( ...@@ -1136,8 +1154,8 @@ void CKf_Based_Terrain_Analysis::fastLabelPointcloudUsingGroundModel(
// future (hopefully) // future (hopefully)
// pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_2_MEAN_INTENSITY] = // pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_2_MEAN_INTENSITY] =
// point_in_sensor_frame.data_n[DATA_N_0_INTENSITY]; // point_in_sensor_frame.data_n[DATA_N_0_INTENSITY];
pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_0_RGB_CAST_INTO_FLOAT] = // pcl_cloud_ptr->points[*point_iterator].data_c[DATA_C_0_RGB_CAST_INTO_FLOAT] =
point_in_sensor_frame.data_n[DATA_N_0_INTENSITY]; // point_in_sensor_frame.data_n[DATA_N_0_INTENSITY];
if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z) if ((pcl_cloud_ptr->points[*point_iterator].z - point_in_sensor_frame.z)
< filtering_configuration.ground_threshold_in_not_analyzed_areas) < filtering_configuration.ground_threshold_in_not_analyzed_areas)
...@@ -1239,7 +1257,7 @@ void CKf_Based_Terrain_Analysis::groundSegmentation( ...@@ -1239,7 +1257,7 @@ void CKf_Based_Terrain_Analysis::groundSegmentation(
fastGenerateElevationCloud(filtering_configuration, pcl_cloud_ptr, elevation_cloud_ptr, correspondence_indexes); fastGenerateElevationCloud(filtering_configuration, pcl_cloud_ptr, elevation_cloud_ptr, correspondence_indexes);
//std::cout << "Elevation cloud generated! Npoints = " << elevation_cloud_ptr->points.size() << std::endl; //std::cout << "Elevation cloud generated! Npoints = " << elevation_cloud_ptr->points.size() << std::endl;
// This function juit create the first vertex in the graph, using the prior information in the lidar and filtering configuration // This function just creates the first vertex in the graph, using the prior information in the lidar and filtering configuration
// structures. // structures.
generateRootVertex(lidar_configuration, filtering_configuration, ground_reference_cloud_ptr); generateRootVertex(lidar_configuration, filtering_configuration, ground_reference_cloud_ptr);
//std::cout << "Root vertex generated! Num of points = " << ground_reference_cloud_ptr->points.size() << std::endl; //std::cout << "Root vertex generated! Num of points = " << ground_reference_cloud_ptr->points.size() << std::endl;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment