Apollo 10.0
自动驾驶开放平台
darkSCNN_lane_postprocessor.cc
浏览该文件的文档.
1/******************************************************************************
2 * Copyright 2019 The Apollo Authors. All Rights Reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the License);
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an AS IS BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *****************************************************************************/
17
18#include <algorithm>
19#include <map>
20#include <utility>
21
22#include "cyber/common/file.h"
27
28namespace apollo {
29namespace perception {
30namespace camera {
31
32// Lane image valid size
33static const int kMaxValidSize = 3000;
34
35std::vector<base::LaneLinePositionType> spatialLUT(
48
63
64// @brief: evaluating y value of given x for a third-order polynomial function
65template <typename T = float>
66T GetPolyValue(T a, T b, T c, T d, T x) {
67 T y = d;
68 T v = x;
69 y += (c * v);
70 v *= x;
71 y += (b * v);
72 v *= x;
73 y += (a * v);
74 return y;
75}
76
78 const LanePostprocessorInitOptions& options) {
79 // Read postprocessor parameter
80 std::string config_file =
81 GetConfigFile(options.config_path, options.config_file);
82 if (!cyber::common::GetProtoFromFile(config_file,
83 &lane_postprocessor_param_)) {
84 AERROR << "Failed to read config detect_param: " << config_file;
85 return false;
86 }
87 AINFO << "lane_postprocessor param: "
88 << lane_postprocessor_param_.DebugString();
89
90 input_offset_x_ = lane_postprocessor_param_.input_offset_x();
91 input_offset_y_ = lane_postprocessor_param_.input_offset_y();
92 lane_map_width_ = lane_postprocessor_param_.resize_width();
93 lane_map_height_ = lane_postprocessor_param_.resize_height();
94 DCHECK_LT(lane_map_width_, kMaxValidSize);
95 DCHECK_GT(lane_map_width_, 0);
96 DCHECK_LT(lane_map_height_, kMaxValidSize);
97 DCHECK_GT(lane_map_height_, 0);
98
99 roi_height_ = lane_postprocessor_param_.roi_height();
100 roi_start_ = lane_postprocessor_param_.roi_start();
101 roi_width_ = lane_postprocessor_param_.roi_width();
102
103 lane_type_num_ = static_cast<int>(spatialLUTind.size());
104 AINFO << "lane_type_num_: " << lane_type_num_;
105 return true;
106}
107
109 const LanePostprocessorOptions& options, CameraFrame* frame) {
110 ADEBUG << "Begin to Process2D.";
111 frame->lane_objects.clear();
112 auto start = std::chrono::high_resolution_clock::now();
113
114 cv::Mat lane_map(lane_map_height_, lane_map_width_, CV_32FC1);
115 memcpy(lane_map.data, frame->lane_detected_blob->cpu_data(),
116 lane_map_width_ * lane_map_height_ * sizeof(float));
117
118 // if (options.use_lane_history &&
119 // (!use_history_ || time_stamp_ > options.timestamp)) {
120 // InitLaneHistory();
121 // }
122
123 // 1. Sample points on lane_map and project them onto world coordinate
124
125 // TODO(techoe): Should be fixed
126 int y = static_cast<int>(lane_map.rows * 0.9 - 1);
127 // TODO(techoe): Should be fixed
128 int step_y = (y - 40) * (y - 40) / 6400 + 1;
129
130 xy_points.clear();
131 xy_points.resize(lane_type_num_);
132 uv_points.clear();
133 uv_points.resize(lane_type_num_);
134
135 while (y > 0) {
136 for (int x = 1; x < lane_map.cols - 1; ++x) {
137 int value = static_cast<int>(round(lane_map.at<float>(y, x)));
138 // lane on left
139
140 if ((value > 0 && value < 5) || value == 11) {
141 // right edge (inner) of the lane
142 if (value != static_cast<int>(round(lane_map.at<float>(y, x + 1)))) {
143 Eigen::Matrix<float, 3, 1> img_point(
144 static_cast<float>(x * roi_width_ / lane_map.cols),
145 static_cast<float>(y * roi_height_ / lane_map.rows + roi_start_),
146 1.0);
147 Eigen::Matrix<float, 3, 1> xy_p;
148 xy_p = trans_mat_ * img_point;
149 Eigen::Matrix<float, 2, 1> xy_point;
150 Eigen::Matrix<float, 2, 1> uv_point;
151 if (std::fabs(xy_p(2)) < 1e-6) continue;
152 xy_point << xy_p(0) / xy_p(2), xy_p(1) / xy_p(2);
153
154 // Filter out lane line points
155 if (xy_point(0) < 0.0 || // This condition is only for front camera
156 xy_point(0) > max_longitudinal_distance_ ||
157 std::abs(xy_point(1)) > 30.0) {
158 continue;
159 }
160 uv_point << static_cast<float>(x * roi_width_ / lane_map.cols),
161 static_cast<float>(y * roi_height_ / lane_map.rows + roi_start_);
162 if (xy_points[value].size() < minNumPoints_ || xy_point(0) < 50.0f ||
163 std::fabs(xy_point(1) - xy_points[value].back()(1)) < 1.0f) {
164 xy_points[value].push_back(xy_point);
165 uv_points[value].push_back(uv_point);
166 }
167 }
168 } else if (value >= 5 && value < lane_type_num_) {
169 // Left edge (inner) of the lane
170 if (value != static_cast<int>(round(lane_map.at<float>(y, x - 1)))) {
171 Eigen::Matrix<float, 3, 1> img_point(
172 static_cast<float>(x * roi_width_ / lane_map.cols),
173 static_cast<float>(y * roi_height_ / lane_map.rows + roi_start_),
174 1.0);
175 Eigen::Matrix<float, 3, 1> xy_p;
176 xy_p = trans_mat_ * img_point;
177 Eigen::Matrix<float, 2, 1> xy_point;
178 Eigen::Matrix<float, 2, 1> uv_point;
179 if (std::fabs(xy_p(2)) < 1e-6) continue;
180 xy_point << xy_p(0) / xy_p(2), xy_p(1) / xy_p(2);
181 // Filter out lane line points
182 if (xy_point(0) < 0.0 || // This condition is only for front camera
183 xy_point(0) > max_longitudinal_distance_ ||
184 std::abs(xy_point(1)) > 30.0) {
185 continue;
186 }
187 uv_point << static_cast<float>(x * roi_width_ / lane_map.cols),
188 static_cast<float>(y * roi_height_ / lane_map.rows + roi_start_);
189 if (xy_points[value].size() < minNumPoints_ || xy_point(0) < 50.0f ||
190 std::fabs(xy_point(1) - xy_points[value].back()(1)) < 1.0f) {
191 xy_points[value].push_back(xy_point);
192 uv_points[value].push_back(uv_point);
193 }
194 } else if (value >= lane_type_num_) {
195 AWARN << "Lane line value shouldn't be equal or more than: "
196 << lane_type_num_;
197 }
198 }
199 }
200 step_y = (y - 45) * (y - 45) / 6400 + 1;
201 y -= step_y;
202 }
203
204 auto elapsed_1 = std::chrono::high_resolution_clock::now() - start;
205 int64_t microseconds_1 =
206 std::chrono::duration_cast<std::chrono::microseconds>(elapsed_1).count();
207 time_1 += microseconds_1;
208
209 // 2. Remove outliers and Do a ransac fitting
212 EigenVector<Eigen::Matrix<float, 2, 1>> selected_xy_points;
213 coeffs.resize(lane_type_num_);
214 img_coeffs.resize(lane_type_num_);
215 for (int i = 1; i < lane_type_num_; ++i) {
216 coeffs[i] << 0, 0, 0, 0;
217 if (xy_points[i].size() < minNumPoints_) continue;
218 Eigen::Matrix<float, 4, 1> coeff;
219 // Solve linear system to estimate polynomial coefficients
220 if (RansacFitting<float>(xy_points[i], &selected_xy_points, &coeff, 200,
221 static_cast<int>(minNumPoints_), 0.1f)) {
222 coeffs[i] = coeff;
223
224 xy_points[i].clear();
225 xy_points[i] = selected_xy_points;
226 } else {
227 xy_points[i].clear();
228 }
229 }
230
231 auto elapsed_2 = std::chrono::high_resolution_clock::now() - start;
232 int64_t microseconds_2 =
233 std::chrono::duration_cast<std::chrono::microseconds>(elapsed_2).count();
234 time_2 += microseconds_2 - microseconds_1;
235
236 // 3. Write values into lane_objects
237 std::vector<float> c0s(lane_type_num_, 0);
238 for (int i = 1; i < lane_type_num_; ++i) {
239 if (xy_points[i].size() < minNumPoints_) continue;
240 c0s[i] = GetPolyValue(
241 static_cast<float>(coeffs[i](3)), static_cast<float>(coeffs[i](2)),
242 static_cast<float>(coeffs[i](1)), static_cast<float>(coeffs[i](0)),
243 static_cast<float>(3.0));
244 }
245 // [1] Determine lane spatial tag in special cases
246 if (xy_points[4].size() < minNumPoints_ &&
247 xy_points[5].size() >= minNumPoints_) {
248 std::swap(xy_points[4], xy_points[5]);
249 std::swap(uv_points[4], uv_points[5]);
250 std::swap(coeffs[4], coeffs[5]);
251 std::swap(c0s[4], c0s[5]);
252 } else if (xy_points[6].size() < minNumPoints_ &&
253 xy_points[5].size() >= minNumPoints_) {
254 std::swap(xy_points[6], xy_points[5]);
255 std::swap(uv_points[6], uv_points[5]);
256 std::swap(coeffs[6], coeffs[5]);
257 std::swap(c0s[6], c0s[5]);
258 }
259
260 if (xy_points[4].size() < minNumPoints_ &&
261 xy_points[11].size() >= minNumPoints_) {
262 // Use left lane boundary as the right most missing left lane,
263 bool use_boundary = true;
264 for (int k = 3; k >= 1; --k) {
265 if (xy_points[k].size() >= minNumPoints_) {
266 use_boundary = false;
267 break;
268 }
269 }
270 if (use_boundary) {
271 std::swap(xy_points[4], xy_points[11]);
272 std::swap(uv_points[4], uv_points[11]);
273 std::swap(coeffs[4], coeffs[11]);
274 std::swap(c0s[4], c0s[11]);
275 }
276 }
277
278 if (xy_points[6].size() < minNumPoints_ &&
279 xy_points[12].size() >= minNumPoints_) {
280 // Use right lane boundary as the left most missing right lane,
281 bool use_boundary = true;
282 for (int k = 7; k <= 9; ++k) {
283 if (xy_points[k].size() >= minNumPoints_) {
284 use_boundary = false;
285 break;
286 }
287 }
288 if (use_boundary) {
289 std::swap(xy_points[6], xy_points[12]);
290 std::swap(uv_points[6], uv_points[12]);
291 std::swap(coeffs[6], coeffs[12]);
292 std::swap(c0s[6], c0s[12]);
293 }
294 }
295
296 for (int i = 1; i < lane_type_num_; ++i) {
297 base::LaneLine cur_object;
298 if (xy_points[i].size() < minNumPoints_) {
299 continue;
300 }
301
302 // [2] Set spatial label
303 cur_object.pos_type = spatialLUT[i];
304
305 // [3] Determine which lines are valid according to the y value at x = 3
306 if ((i < 5 && c0s[i] < c0s[i + 1]) ||
307 (i > 5 && i < 10 && c0s[i] > c0s[i - 1])) {
308 continue;
309 }
310 if (i == 11 || i == 12) {
311 std::sort(c0s.begin(), c0s.begin() + 10);
312 if ((c0s[i] > c0s[0] && i == 12) || (c0s[i] < c0s[9] && i == 11)) {
313 continue;
314 }
315 }
316 // [4] Write values
317 cur_object.curve_car_coord.x_start =
318 static_cast<float>(xy_points[i].front()(0));
319 cur_object.curve_car_coord.x_end =
320 static_cast<float>(xy_points[i].back()(0));
321 cur_object.curve_car_coord.a = static_cast<float>(coeffs[i](3));
322 cur_object.curve_car_coord.b = static_cast<float>(coeffs[i](2));
323 cur_object.curve_car_coord.c = static_cast<float>(coeffs[i](1));
324 cur_object.curve_car_coord.d = static_cast<float>(coeffs[i](0));
325 // if (cur_object.curve_car_coord.x_end -
326 // cur_object.curve_car_coord.x_start < 5) continue;
327 // cur_object.order = 2;
328 cur_object.curve_car_coord_point_set.clear();
329 for (size_t j = 0; j < xy_points[i].size(); ++j) {
330 base::Point2DF p_j;
331 p_j.x = static_cast<float>(xy_points[i][j](0));
332 p_j.y = static_cast<float>(xy_points[i][j](1));
333 cur_object.curve_car_coord_point_set.push_back(p_j);
334 }
335
336 cur_object.curve_image_point_set.clear();
337 for (size_t j = 0; j < uv_points[i].size(); ++j) {
338 base::Point2DF p_j;
339 p_j.x = static_cast<float>(uv_points[i][j](0));
340 p_j.y = static_cast<float>(uv_points[i][j](1));
341 cur_object.curve_image_point_set.push_back(p_j);
342 }
343
344 // cur_object.confidence.push_back(1);
345 cur_object.confidence = 1.0f;
346 frame->lane_objects.push_back(cur_object);
347 }
348
349 // Special case riding on a lane:
350 // 0: no center lane, 1: center lane as left, 2: center lane as right
351 int has_center_ = 0;
352 for (auto lane_ : frame->lane_objects) {
353 if (lane_.pos_type == base::LaneLinePositionType::EGO_CENTER) {
354 if (lane_.curve_car_coord.d >= 0) {
355 has_center_ = 1;
356 } else if (lane_.curve_car_coord.d < 0) {
357 has_center_ = 2;
358 }
359 break;
360 }
361 }
362 // Change labels for all lanes from one side
363 if (has_center_ == 1) {
364 for (auto& lane_ : frame->lane_objects) {
365 int spatial_id = spatialLUTind[lane_.pos_type];
366 if (spatial_id >= 1 && spatial_id <= 5) {
367 lane_.pos_type = spatialLUT[spatial_id - 1];
368 }
369 }
370 } else if (has_center_ == 2) {
371 for (auto& lane_ : frame->lane_objects) {
372 int spatial_id = spatialLUTind[lane_.pos_type];
373 if (spatial_id >= 5 && spatial_id <= 9) {
374 lane_.pos_type = spatialLUT[spatial_id + 1];
375 }
376 }
377 }
378
379 auto elapsed = std::chrono::high_resolution_clock::now() - start;
380 int64_t microseconds =
381 std::chrono::duration_cast<std::chrono::microseconds>(elapsed).count();
382 // AINFO << "Time for writing: " << microseconds - microseconds_2 << " us";
383 time_3 += microseconds - microseconds_2;
384 ++time_num;
385
386 ADEBUG << "frame->lane_objects.size(): " << frame->lane_objects.size();
387
388 ADEBUG << "Avg sampling time: " << time_1 / time_num
389 << " Avg fitting time: " << time_2 / time_num
390 << " Avg writing time: " << time_3 / time_num;
391 ADEBUG << "darkSCNN lane_postprocess done!";
392 return true;
393}
394
395// Produce laneline output in camera coordinates (optional)
397 const LanePostprocessorOptions& options, CameraFrame* frame) {
398 ConvertImagePoint2Camera(frame);
399 PolyFitCameraLaneline(frame);
400 return true;
401}
402
403void DarkSCNNLanePostprocessor::ConvertImagePoint2Camera(CameraFrame* frame) {
404 float pitch_angle = frame->calibration_service->QueryPitchAngle();
405 float camera_ground_height =
407 const Eigen::Matrix3f& intrinsic_params = frame->camera_k_matrix;
408 const Eigen::Matrix3f& intrinsic_params_inverse = intrinsic_params.inverse();
409 std::vector<base::LaneLine>& lane_objects = frame->lane_objects;
410 int laneline_num = static_cast<int>(lane_objects.size());
411 for (int line_index = 0; line_index < laneline_num; ++line_index) {
412 std::vector<base::Point2DF>& image_point_set =
413 lane_objects[line_index].curve_image_point_set;
414 std::vector<base::Point3DF>& camera_point_set =
415 lane_objects[line_index].curve_camera_point_set;
416 for (int i = 0; i < static_cast<int>(image_point_set.size()); i++) {
417 base::Point3DF camera_point;
418 Eigen::Vector3d camera_point3d;
419 const base::Point2DF& image_point = image_point_set[i];
420 ImagePoint2Camera(image_point, pitch_angle, camera_ground_height,
421 intrinsic_params_inverse, &camera_point3d);
422 camera_point.x = static_cast<float>(camera_point3d(0));
423 camera_point.y = static_cast<float>(camera_point3d(1));
424 camera_point.z = static_cast<float>(camera_point3d(2));
425 camera_point_set.push_back(camera_point);
426 }
427 }
428}
429
430// @brief: Fit camera lane line using polynomial
431void DarkSCNNLanePostprocessor::PolyFitCameraLaneline(CameraFrame* frame) {
432 std::vector<base::LaneLine>& lane_objects = frame->lane_objects;
433 int laneline_num = static_cast<int>(lane_objects.size());
434 for (int line_index = 0; line_index < laneline_num; ++line_index) {
435 const std::vector<base::Point3DF>& camera_point_set =
436 lane_objects[line_index].curve_camera_point_set;
437 // z: longitudinal direction
438 // x: latitudinal direction
439 float x_start = camera_point_set[0].z;
440 float x_end = 0.0f;
441 Eigen::Matrix<float, max_poly_order + 1, 1> camera_coeff;
442 EigenVector<Eigen::Matrix<float, 2, 1>> camera_pos_vec;
443 for (int i = 0; i < static_cast<int>(camera_point_set.size()); ++i) {
444 x_end = std::max(camera_point_set[i].z, x_end);
445 x_start = std::min(camera_point_set[i].z, x_start);
446 Eigen::Matrix<float, 2, 1> camera_pos;
447 camera_pos << camera_point_set[i].z, camera_point_set[i].x;
448 camera_pos_vec.push_back(camera_pos);
449 }
450
451 bool is_x_axis = true;
452 bool fit_flag =
453 PolyFit(camera_pos_vec, max_poly_order, &camera_coeff, is_x_axis);
454 if (!fit_flag) {
455 continue;
456 }
457 lane_objects[line_index].curve_camera_coord.a = camera_coeff(3, 0);
458 lane_objects[line_index].curve_camera_coord.b = camera_coeff(2, 0);
459 lane_objects[line_index].curve_camera_coord.c = camera_coeff(1, 0);
460 lane_objects[line_index].curve_camera_coord.d = camera_coeff(0, 0);
461 lane_objects[line_index].curve_camera_coord.x_start = x_start;
462 lane_objects[line_index].curve_camera_coord.x_end = x_end;
463 lane_objects[line_index].use_type = base::LaneLineUseType::REAL;
464 }
465}
466
468 return "DarkSCNNLanePostprocessor";
469}
470
472} // namespace camera
473} // namespace perception
474} // namespace apollo
#define REGISTER_LANE_POSTPROCESSOR(name)
bool Init(const LanePostprocessorInitOptions &options=LanePostprocessorInitOptions()) override
bool Process2D(const LanePostprocessorOptions &options, CameraFrame *frame) override
bool Process3D(const LanePostprocessorOptions &options, CameraFrame *frame) override
#define ADEBUG
Definition log.h:41
#define AERROR
Definition log.h:44
#define AINFO
Definition log.h:42
#define AWARN
Definition log.h:43
bool GetProtoFromFile(const std::string &file_name, google::protobuf::Message *message)
Parses the content of the file specified by the file_name as a representation of protobufs,...
Definition file.cc:132
Point3D< float > Point3DF
Definition point.h:98
@ ADJACENT_RIGHT
lane marking on the right side next to ego lane
@ ADJACENT_LEFT
lane marking on the left side next to ego lane
@ EGO_LEFT
left lane marking of the ego lane
@ EGO_CENTER
center lane marking of the ego lane, changing lane
@ EGO_RIGHT
right lane marking of the ego lane
std::map< base::LaneLinePositionType, int > spatialLUTind
bool PolyFit(const EigenVector< Eigen::Matrix< Dtype, 2, 1 > > &pos_vec, const int &order, Eigen::Matrix< Dtype, max_poly_order+1, 1 > *coeff, const bool &is_x_axis=true)
T GetPolyValue(T a, T b, T c, T d, T x)
bool ImagePoint2Camera(const base::Point2DF &img_point, float pitch_angle, float camera_ground_height, const Eigen::Matrix3f &intrinsic_params_inverse, Eigen::Vector3d *camera_point)
std::vector< base::LaneLinePositionType > spatialLUT({base::LaneLinePositionType::UNKNOWN, base::LaneLinePositionType::FOURTH_LEFT, base::LaneLinePositionType::THIRD_LEFT, base::LaneLinePositionType::ADJACENT_LEFT, base::LaneLinePositionType::EGO_LEFT, base::LaneLinePositionType::EGO_CENTER, base::LaneLinePositionType::EGO_RIGHT, base::LaneLinePositionType::ADJACENT_RIGHT, base::LaneLinePositionType::THIRD_RIGHT, base::LaneLinePositionType::FOURTH_RIGHT, base::LaneLinePositionType::OTHER, base::LaneLinePositionType::CURB_LEFT, base::LaneLinePositionType::CURB_RIGHT})
std::string GetConfigFile(const std::string &config_path, const std::string &config_file)
Definition util.cc:80
class register implement
Definition arena_queue.h:37
std::vector< Point2DF > curve_car_coord_point_set
Definition lane_struct.h:83
std::vector< Point2DF > curve_image_point_set
Definition lane_struct.h:79
LaneLineCubicCurve curve_car_coord
Definition lane_struct.h:73
LaneLinePositionType pos_type
Definition lane_struct.h:71
BaseCalibrationService * calibration_service
std::vector< base::LaneLine > lane_objects
std::shared_ptr< base::Blob< float > > lane_detected_blob