未验证 提交 25a42a48 编写于 作者: T Tae Eun Choe 提交者: GitHub

 
Perception: 1. Fixed geometry; 2. Added manual calibration using arrow keys (#7166)

3. Fixed missing uv_points of lane line in swap for special case

4. Fixed test warnings

5. Merge multi-line AINFOs to one AINFO

6. Fix TODO format
上级 9cc50f60
......@@ -173,8 +173,10 @@ void OnlineCalibrationService::Update(CameraFrame *frame) {
}
}
auto iter = name_camera_status_map_.find(sensor_name_);
AINFO << "camera_ground_height: " << iter->second.camera_ground_height;
AINFO << "pitch_angle: " << iter->second.pitch_angle;
AINFO << "camera_ground_height: "
<< iter->second.camera_ground_height << " meter";
AINFO << "pitch_angle: "
<< iter->second.pitch_angle * 180.0 / M_PI << " degree";
// CHECK(BuildIndex());
}
......
......@@ -42,7 +42,8 @@ std::vector<base::LaneLinePositionType> spatialLUT(
base::LaneLinePositionType::ADJACENT_RIGHT,
base::LaneLinePositionType::THIRD_RIGHT,
base::LaneLinePositionType::FOURTH_RIGHT,
base::LaneLinePositionType::OTHER, base::LaneLinePositionType::CURB_LEFT,
base::LaneLinePositionType::OTHER,
base::LaneLinePositionType::CURB_LEFT,
base::LaneLinePositionType::CURB_RIGHT});
std::map<base::LaneLinePositionType, int> spatialLUTind = {
......@@ -110,6 +111,9 @@ bool DarkSCNNLanePostprocessor::Init(
roi_height_ = lane_postprocessor_param_.roi_height();
roi_start_ = lane_postprocessor_param_.roi_start();
roi_width_ = lane_postprocessor_param_.roi_width();
lane_type_num_ = static_cast<int>(spatialLUTind.size());
AINFO << "lane_type_num_: " << lane_type_num_;
return true;
}
......@@ -129,18 +133,22 @@ bool DarkSCNNLanePostprocessor::Process2D(
// }
// 1. Sample points on lane_map and project them onto world coordinate
// TODO(techoe): Should be fixed
int y = static_cast<int>(lane_map.rows * 0.9 - 1);
// TODO(techoe): Should be fixed
int step_y = (y - 40) * (y - 40) / 6400 + 1;
xy_points.clear();
xy_points.resize(13);
xy_points.resize(lane_type_num_);
uv_points.clear();
uv_points.resize(13);
uv_points.resize(lane_type_num_);
while (y > 0) {
for (int x = 1; x < lane_map.cols - 1; x++) {
int value = static_cast<int>(round(lane_map.at<float>(y, x)));
// lane on left
if ((value > 0 && value < 5) || value == 11) {
// right edge (inner) of the lane
if (value != static_cast<int>(round(lane_map.at<float>(y, x + 1)))) {
......@@ -152,23 +160,27 @@ bool DarkSCNNLanePostprocessor::Process2D(
xy_p = trans_mat_ * img_point;
Eigen::Matrix<float, 2, 1> xy_point;
Eigen::Matrix<float, 2, 1> uv_point;
if (std::abs(xy_p(2)) < 1e-6) continue;
if (std::fabs(xy_p(2)) < 1e-6) continue;
xy_point << xy_p(0) / xy_p(2), xy_p(1) / xy_p(2);
if (xy_point(0) < 0.0 || xy_point(0) > 300.0 ||
// Filter out lane line points
if (xy_point(0) < 0.0 || // This condition is only for front camera
xy_point(0) > max_longitudinal_distance_ ||
std::abs(xy_point(1)) > 30.0) {
continue;
}
uv_point << static_cast<float>(x * roi_width_ / lane_map.cols),
static_cast<float>(y * roi_height_ / lane_map.rows + roi_start_);
if (xy_points[value].size() < 5 || xy_point(0) < 50 ||
std::abs(xy_point(1) - xy_points[value].back()(1)) < 1) {
if (xy_points[value].size() < minNumPoints_ ||
xy_point(0) < 50.0f ||
std::fabs(xy_point(1) - xy_points[value].back()(1)) < 1.0f) {
xy_points[value].push_back(xy_point);
uv_points[value].push_back(uv_point);
}
}
} else if (value >= 5) {
} else if (value >= 5 && value < lane_type_num_) {
// left edge (inner) of the lane
if (value != round(lane_map.at<float>(y, x - 1))) {
if (value != static_cast<int>(round(lane_map.at<float>(y, x - 1)))) {
Eigen::Matrix<float, 3, 1> img_point(
static_cast<float>(x * roi_width_ / lane_map.cols),
static_cast<float>(y * roi_height_ / lane_map.rows + roi_start_),
......@@ -177,18 +189,24 @@ bool DarkSCNNLanePostprocessor::Process2D(
xy_p = trans_mat_ * img_point;
Eigen::Matrix<float, 2, 1> xy_point;
Eigen::Matrix<float, 2, 1> uv_point;
if (std::fabs(xy_p(2)) < 1e-6) continue;
xy_point << xy_p(0) / xy_p(2), xy_p(1) / xy_p(2);
// Filter out lane line points
if (xy_point(0) < 0.0 || xy_point(0) > 300.0 ||
std::abs(xy_point(1)) > 25.0) {
continue;
}
uv_point << static_cast<float>(x * roi_width_ / lane_map.cols),
static_cast<float>(y * roi_height_ / lane_map.rows + roi_start_);
if (xy_points[value].size() < 5 || xy_point(0) < 50 ||
std::abs(xy_point(1) - xy_points[value].back()(1)) < 1) {
if (xy_points[value].size() < minNumPoints_ ||
xy_point(0) < 50.0f ||
std::fabs(xy_point(1) - xy_points[value].back()(1)) < 1.0f) {
xy_points[value].push_back(xy_point);
uv_points[value].push_back(uv_point);
}
} else if (value >= lane_type_num_) {
AWARN << "Lane line value shouldn't be equal or more than "
<< lane_type_num_;
}
}
}
......@@ -204,18 +222,18 @@ bool DarkSCNNLanePostprocessor::Process2D(
// 2. Remove outliers and Do a ransac fitting
std::vector<Eigen::Matrix<float, 4, 1>> coeffs;
std::vector<Eigen::Matrix<float, 4, 1>> img_coeffs;
coeffs.resize(13);
img_coeffs.resize(13);
for (int i = 1; i < 13; ++i) {
coeffs.resize(lane_type_num_);
img_coeffs.resize(lane_type_num_);
for (int i = 1; i < lane_type_num_; ++i) {
if (xy_points[i].size() < minNumPoints_) continue;
Eigen::Matrix<float, 4, 1> coeff;
// solve linear system to estimate polynomial coefficients
// if (RansacFitting(&xy_points[i], &coeff,
// 200, static_cast<int> (minNumPoints_), 0.1f)) {
if (PolyFit(xy_points[i], max_poly_order, &coeff, true)) {
coeffs[i] = coeff;
if (RansacFitting(&xy_points[i], &coeff,
200, static_cast<int> (minNumPoints_), 0.1f)) {
// if (PolyFit(xy_points[i], max_poly_order, &coeff, true)) {
coeffs[i] = coeff;
} else {
xy_points[i].clear();
xy_points[i].clear();
}
}
......@@ -225,8 +243,8 @@ bool DarkSCNNLanePostprocessor::Process2D(
time_2 += microseconds_2 - microseconds_1;
// 3. Write values into lane_objects
std::vector<float> c0s(13, 0);
for (int i = 1; i < 13; ++i) {
std::vector<float> c0s(lane_type_num_, 0);
for (int i = 1; i < lane_type_num_; ++i) {
if (xy_points[i].size() < minNumPoints_) continue;
c0s[i] = GetPolyValue(
static_cast<float>(coeffs[i](3)), static_cast<float>(coeffs[i](2)),
......@@ -237,11 +255,13 @@ bool DarkSCNNLanePostprocessor::Process2D(
if (xy_points[4].size() < minNumPoints_ &&
xy_points[5].size() >= minNumPoints_) {
std::swap(xy_points[4], xy_points[5]);
std::swap(uv_points[4], uv_points[5]);
std::swap(coeffs[4], coeffs[5]);
std::swap(c0s[4], c0s[5]);
} else if (xy_points[6].size() < minNumPoints_ &&
xy_points[5].size() >= minNumPoints_) {
std::swap(xy_points[6], xy_points[5]);
std::swap(uv_points[6], uv_points[5]);
std::swap(coeffs[6], coeffs[5]);
std::swap(c0s[6], c0s[5]);
}
......@@ -258,11 +278,13 @@ bool DarkSCNNLanePostprocessor::Process2D(
}
if (use_boundary) {
std::swap(xy_points[4], xy_points[11]);
std::swap(uv_points[4], uv_points[11]);
std::swap(coeffs[4], coeffs[11]);
std::swap(c0s[4], c0s[11]);
}
}
if (xy_points[6].size() < minNumPoints_ &&
xy_points[12].size() >= minNumPoints_) {
// use right lane boundary as the left most missing right lane,
......@@ -275,12 +297,13 @@ bool DarkSCNNLanePostprocessor::Process2D(
}
if (use_boundary) {
std::swap(xy_points[6], xy_points[12]);
std::swap(uv_points[6], uv_points[12]);
std::swap(coeffs[6], coeffs[12]);
std::swap(c0s[6], c0s[12]);
}
}
for (int i = 1; i < 13; ++i) {
for (int i = 1; i < lane_type_num_; ++i) {
base::LaneLine cur_object;
if (xy_points[i].size() < minNumPoints_) continue;
......@@ -299,7 +322,6 @@ bool DarkSCNNLanePostprocessor::Process2D(
else if (c0s[i] < c0s[9] && i == 11)
continue;
}
// [4] write values
cur_object.curve_car_coord.x_start =
static_cast<float>(xy_points[i].front()(0));
......@@ -312,7 +334,6 @@ bool DarkSCNNLanePostprocessor::Process2D(
// if (cur_object.curve_car_coord.x_end -
// cur_object.curve_car_coord.x_start < 5) continue;
// cur_object.order = 2;
for (size_t j = 0; j < xy_points[i].size(); ++j) {
base::Point2DF p_j;
p_j.x = static_cast<float>(xy_points[i][j](0));
......
......@@ -87,6 +87,12 @@ class DarkSCNNLanePostprocessor : public BaseLanePostprocessor {
int64_t time_3 = 0;
int time_num = 0;
float max_longitudinal_distance_ = 300.0f;
float min_longitudinal_distance_ = 0.0f;
// number of lane type (13)
int lane_type_num_;
lane::DarkSCNNLanePostprocessorParam lane_postprocessor_param_;
private:
......
......@@ -163,6 +163,8 @@ TEST(darkSCNNLanePostprocessor, camera_lane_postprocessor_point_test) {
// initilize visualizer and set homography for lane_postprocessor
Visualizer visualize_;
double pitch_adj = 0;
double yaw_adj = 0;
double roll_adj = 0;
std::string visual_camera = "onsemi_obstacle";
std::map<std::string, Eigen::Matrix4d> extrinsic_map;
std::map<std::string, Eigen::Matrix3f> intrinsic_map;
......@@ -185,7 +187,8 @@ TEST(darkSCNNLanePostprocessor, camera_lane_postprocessor_point_test) {
extrinsic_map["onsemi_obstacle"] = ex_camera2lidar;
EXPECT_TRUE(visualize_.Init_all_info_single_camera(
visual_camera, intrinsic_map, extrinsic_map, ex_lidar2imu, pitch_adj,
visual_camera, intrinsic_map, extrinsic_map, ex_lidar2imu,
pitch_adj, yaw_adj, roll_adj,
calibration_service_init_options.image_height,
calibration_service_init_options.image_width));
homography_im2car_ = visualize_.homography_im2car();
......
......@@ -35,11 +35,20 @@ class Visualizer {
TransformServer *tf_server);
bool Init_all_info_single_camera(
const std::string &camera_name,
std::map<std::string, Eigen::Matrix3f> intrinsic_map,
std::map<std::string, Eigen::Matrix4d> extrinsic_map,
Eigen::Matrix4d ex_lidar2imu, double pitch_adj, int image_height,
int image_width);
void SetDirectory(const std::string &path);
const std::map<std::string, Eigen::Matrix3f> &intrinsic_map,
const std::map<std::string, Eigen::Matrix4d> &extrinsic_map,
const Eigen::Matrix4d &ex_lidar2imu,
const double &pitch_adj,
const double &yaw_adj,
const double &roll_adj,
const int image_height,
const int image_width);
bool adjust_angles(
const std::string &camera_name,
const double &pitch_adj,
const double &yaw_adj,
const double &roll_adj);
bool SetDirectory(const std::string &path);
void ShowResult(const cv::Mat &img, const CameraFrame &frame);
void Draw2Dand3D(const cv::Mat &img, const CameraFrame &frame);
void ShowResult_all_info_single_camera(const cv::Mat &img,
......@@ -50,17 +59,24 @@ class Visualizer {
Eigen::Matrix4d extrinsic);
cv::Point world_point_to_bigimg(const Eigen::Vector2d &p);
Eigen::Vector2d image2ground(cv::Point p_img);
cv::Point ground2image(Eigen::Vector2d p_ground);
std::string type_to_string(const apollo::perception::base::ObjectType type);
std::string sub_type_to_string(
const apollo::perception::base::ObjectSubType type);
Eigen::Matrix3d homography_im2car() { return homography_im2car_; }
Eigen::Matrix3d homography_im2car() { return homography_image2ground_; }
void Set_ROI(int input_offset_y, int crop_height, int crop_width) {
roi_start_ = input_offset_y;
roi_height_ = crop_height;
roi_width_ = crop_width;
}
bool key_handler(const std::string &camera_name, const int key);
bool reset_key();
bool write_out_img_ = false;
bool cv_imshow_img_ = true;
// homograph between image and ground plane
Eigen::Matrix3d homography_image2ground_ = Eigen::Matrix3d::Identity(3, 3);
Eigen::Matrix3d homography_ground2image_ = Eigen::Matrix3d::Identity(3, 3);;
private:
std::map<std::string, cv::Mat> camera_image_;
......@@ -75,8 +91,20 @@ class Visualizer {
int small_h_ = 0;
int small_w_ = 0;
int world_h_ = 0;
int m2pixel_ = 6;
double fov_cut_ratio_ = 0.6;
int m2pixel_ = 15; // 6;
double fov_cut_ratio_ = 0.55;
double degree_to_radian_factor = M_PI / 180.0;
double pitch_adj_degree_ = 0;
double yaw_adj_degree_ = 0;
double roll_adj_degree_ = 0;
double max_pitch_degree_ = 5.0;
double min_pitch_degree_ = -5.0;
double max_yaw_degree_ = 5.0;
double min_yaw_degree_ = -5.0;
double max_roll_degree_ = 5.0;
double min_roll_degree_ = -5.0;
cv::Point p_fov_1_;
cv::Point p_fov_2_;
cv::Point p_fov_3_;
......@@ -85,14 +113,51 @@ class Visualizer {
int roi_start_ = 312;
int roi_width_ = 1920;
Eigen::Vector2d vp1_;
Eigen::Vector2d vp2_;
void draw_range_circle();
// map for store params
std::map<std::string, Eigen::Matrix3f> intrinsic_map_;
std::map<std::string, Eigen::Matrix4d> extrinsic_map_;
Eigen::Matrix4d ex_lidar2imu_;
Eigen::Matrix4d ex_camera2lidar_;
Eigen::Matrix4d ex_camera2imu_;
Eigen::Matrix4d ex_imu2camera_;
Eigen::Matrix4d ex_car2camera_;
Eigen::Matrix4d ex_camera2car_;
Eigen::Matrix4d ex_imu2car_;
Eigen::Matrix4d adjusted_camera2car_ = Eigen::Matrix4d::Identity();
// homograph between image and ground plane
Eigen::Matrix3d homography_im2car_;
Eigen::Matrix4d projection_matrix_;
Eigen::Matrix3d K_;
// Visualization related variables
bool use_class_color_ = true;
bool capture_screen_ = false;
bool capture_video_ = false;
bool show_camera_box2d_ = true;
bool show_camera_box3d_ = true;
bool show_camera_bdv_ = true;
bool show_radar_pc_ = true;
bool show_fusion_ = false;
bool show_associate_color_ = false;
bool show_type_id_label_ = true;
bool show_verbose_ = false;
bool show_lane_ = true;
bool show_trajectory_ = true;
bool show_vp_grid_ = true; // show vanishing point and ground plane grid
bool draw_lane_objects_ = true;
bool show_box_ = true;
bool show_velocity_ = false;
bool show_polygon_ = true;
bool show_text_ = false;
bool show_help_text_ = false;
std::string help_str_;
};
} // namespace camera
......
......@@ -143,6 +143,7 @@ bool GetProjectMatrix(
const std::map<std::string, Eigen::Matrix4d> &extrinsic_map,
const std::map<std::string, Eigen::Matrix3f> &intrinsic_map,
Eigen::Matrix3d *project_matrix, double *pitch_diff = nullptr) {
// TODO(techoe): This condition should be removed.
if (camera_names.size() != 2) {
AINFO << "camera number must be 2!";
return false;
......@@ -207,7 +208,10 @@ bool FusionCameraDetectionComponent::Init() {
// computed from camera height and pitch.
// Apply online calibration to adjust pitch/height automatically
// Temporary code is used here for test
double pitch_adj = -0.1;
double pitch_adj_degree = 0.0;
double yaw_adj_degree = 0.0;
double roll_adj_degree = 0.0;
// load in lidar to imu extrinsic
Eigen::Matrix4d ex_lidar2imu;
LoadExtrinsics(FLAGS_obs_sensor_intrinsic_path + "/" +
......@@ -216,8 +220,10 @@ bool FusionCameraDetectionComponent::Init() {
AINFO << "velodyne128_novatel_extrinsics: " << ex_lidar2imu;
CHECK(visualize_.Init_all_info_single_camera(
visual_camera_, intrinsic_map_, extrinsic_map_, ex_lidar2imu, pitch_adj,
visual_camera_, intrinsic_map_, extrinsic_map_, ex_lidar2imu,
pitch_adj_degree, yaw_adj_degree, roll_adj_degree,
image_height_, image_width_));
homography_im2car_ = visualize_.homography_im2car();
camera_obstacle_pipeline_->SetIm2CarHomography(homography_im2car_);
......
......@@ -213,7 +213,9 @@ bool LaneDetectionComponent::Init() {
// computed from camera height and pitch.
// Apply online calibration to adjust pitch/height automatically
// Temporary code is used here for testing
double pitch_adj = -0.1;
double pitch_adj_degree = 0.0;
double yaw_adj_degree = 0.0;
double roll_adj_degree = 0.0;
// load in lidar to imu extrinsic
Eigen::Matrix4d ex_lidar2imu;
LoadExtrinsics(FLAGS_obs_sensor_intrinsic_path + "/" +
......@@ -222,10 +224,11 @@ bool LaneDetectionComponent::Init() {
AINFO << "velodyne128_novatel_extrinsics: " << ex_lidar2imu;
CHECK(visualize_.Init_all_info_single_camera(
visual_camera_, intrinsic_map_, extrinsic_map_, ex_lidar2imu, pitch_adj,
image_height_, image_width_));
homography_im2car_ = visualize_.homography_im2car();
camera_lane_pipeline_->SetIm2CarHomography(homography_im2car_);
visual_camera_, intrinsic_map_, extrinsic_map_, ex_lidar2imu,
pitch_adj_degree, yaw_adj_degree, roll_adj_degree,
image_height_, image_width_));
homography_image2ground_ = visualize_.homography_im2car();
camera_lane_pipeline_->SetIm2CarHomography(homography_image2ground_);
if (enable_visualization_) {
if (write_visual_img_) {
......@@ -233,7 +236,6 @@ bool LaneDetectionComponent::Init() {
visualize_.SetDirectory(visual_debug_folder_);
}
}
AINFO << "Init processes all succeed";
return true;
}
......
......@@ -118,7 +118,7 @@ class LaneDetectionComponent : public apollo::cyber::Component<> {
// map for store params
std::map<std::string, Eigen::Matrix4d> extrinsic_map_;
std::map<std::string, Eigen::Matrix3f> intrinsic_map_;
Eigen::Matrix3d homography_im2car_;
Eigen::Matrix3d homography_image2ground_;
// camera lane pipeline
camera::CameraPerceptionInitOptions camera_perception_init_options_;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册