Apollo 10.0
自动驾驶开放平台
visualizer.cc
浏览该文件的文档.
1/******************************************************************************
2 * Copyright 2018 The Apollo Authors. All Rights Reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the License);
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an AS IS BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *****************************************************************************/
17
18#include <algorithm>
19#include <fstream>
20#include <iostream>
21#include <limits>
22
23#include "absl/strings/str_cat.h"
24#include "cyber/common/file.h"
25#include "cyber/common/log.h"
29
30namespace apollo {
31namespace perception {
32namespace camera {
33
34std::vector<cv::Scalar> colorlistobj = {magenta_color, // last digit 0
35 purple_color, // last digit 1
36 teal_color, // last digit 2
37 violet_color, // last digit 3
38 pink_color, // last digit 4
39 beige_color, // last digit 5
40 ivory_color, // last digit 6
41 olive_color, // last digit 7
42 maroon_color, // last digit 8
43 lime_color}; // last digit 9
44
59
60Eigen::Matrix3d Camera2CarHomograph(
61 const Eigen::Matrix3d &intrinsic,
62 const Eigen::Matrix4d &extrinsic_camera2lidar,
63 const Eigen::Matrix4d &extrinsic_lidar2imu,
64 double pitch_adj) {
65 AINFO << "intrinsic parameter of camera: " << intrinsic;
66 AINFO << "extrinsic parameter of camera to lidar: " << extrinsic_camera2lidar;
67 AINFO << "extrinsic parameter of lidar to imu: " << extrinsic_lidar2imu;
68 // rotate 90 degree around z axis to make x point forward
69 Eigen::Matrix4d Rz;
70 Rz << 0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1;
71 Eigen::Matrix4d extrinsic_camera2car;
72 extrinsic_camera2car = extrinsic_camera2lidar * extrinsic_lidar2imu * Rz;
73 // adjust pitch in camera coords
74 Eigen::Matrix4d Rx;
75 Rx << 1, 0, 0, 0, 0, cos(pitch_adj), -sin(pitch_adj), 0, 0, sin(pitch_adj),
76 cos(pitch_adj), 0, 0, 0, 0, 1;
77 extrinsic_camera2car = extrinsic_camera2car * Rx;
78 AINFO << "extrinsic parameter from camera to car: " << extrinsic_camera2car;
79
80 // compute the homography matrix, such that H [u, v, 1]' ~ [X_l, Y_l, 1]
81 Eigen::Matrix3d K = intrinsic;
82 Eigen::Matrix3d R = extrinsic_camera2car.block(0, 0, 3, 3);
83 Eigen::Vector3d T = extrinsic_camera2car.block(0, 3, 3, 1);
84 Eigen::Matrix3d H;
85
86 H.block(0, 0, 3, 2) = (K * R.transpose()).block(0, 0, 3, 2);
87 H.block(0, 2, 3, 1) = -K * R.transpose() * T;
88 return H.inverse();
89}
90
91bool Visualizer::Init(const std::vector<std::string> &camera_names,
92 TransformServer *tf_server) {
93 tf_server_ = tf_server;
94 if (tf_server_ == nullptr) {
95 AERROR << "tf_server is unavailable";
96 return false;
97 }
98 last_timestamp_ = 0;
99 small_h_ = static_cast<int>(image_height_ * scale_ratio_);
100 small_w_ = static_cast<int>(image_width_ * scale_ratio_);
101 world_h_ = 2 * small_h_;
102
103 for (size_t i = 0; i < camera_names.size(); ++i) {
104 camera_image_[camera_names[i]] =
105 cv::Mat(small_h_, small_w_, CV_8UC3, black_color);
106 }
107 world_image_ = cv::Mat(world_h_, wide_pixel_, CV_8UC3, black_color);
108 color_cipv_ = white_color;
109 virtual_lane_color_ = white_color;
110 return true;
111}
112
114 const std::vector<std::string> &camera_names,
115 const std::string &visual_camera,
116 const EigenMap<std::string, Eigen::Matrix3f> &intrinsic_map,
117 const EigenMap<std::string, Eigen::Matrix4d> &extrinsic_map,
118 const Eigen::Matrix4d &ex_lidar2imu, const double pitch_adj_degree,
119 const double yaw_adj_degree, const double roll_adj_degree,
120 const int image_height, const int image_width) {
121 image_height_ = image_height;
122 image_width_ = image_width;
123 intrinsic_map_ = intrinsic_map;
124 extrinsic_map_ = extrinsic_map;
125 ex_lidar2imu_ = ex_lidar2imu;
126 camera_names_ = camera_names;
127
128 last_timestamp_ = 0;
129 small_h_ = static_cast<int>(image_height_ * scale_ratio_);
130 small_w_ = static_cast<int>(image_width_ * scale_ratio_);
131 world_h_ = 2 * small_h_;
132
133 world_image_ = cv::Mat(world_h_, wide_pixel_, CV_8UC3, black_color);
134 color_cipv_ = white_color;
135 virtual_lane_color_ = green_color;
136
138
139 AINFO << "world_h_: " << world_h_;
140 AINFO << "wide_pixel_: " << wide_pixel_;
141 AINFO << "small_h_: " << small_h_;
142 AINFO << "small_w_: " << small_w_;
143
144 visual_camera_ = visual_camera;
145 // Set camera specific parameters
146 for (auto camera_name : camera_names) {
147 camera_image_[camera_name] =
148 cv::Mat(small_h_, small_w_, CV_8UC3, black_color);
149 camera_image_[camera_name] =
150 cv::Mat(small_h_, small_w_, CV_8UC3, black_color);
151
152 // 1. transform camera->lidar
153 ex_camera2lidar_[camera_name] = extrinsic_map_.at(camera_name);
154 AINFO << "ex_camera2lidar_ = " << extrinsic_map_.at(camera_name);
155
156 AINFO << "ex_lidar2imu_ =" << ex_lidar2imu_;
157
158 // 2. transform camera->lidar->imu
159 ex_camera2imu_ = ex_lidar2imu_ * ex_camera2lidar_[camera_name];
160 AINFO << "ex_camera2imu_ =" << ex_camera2imu_;
161
162 // intrinsic camera parameter
163 K_[camera_name] = intrinsic_map_.at(camera_name).cast<double>();
164 AINFO << "intrinsic K_ =" << K_[camera_name];
165 // homography_ground2image_.setIdentity();
166 // homography_image2ground_.setIdentity();
167
168 // rotate 90 degree around z axis to make x point forward
169 // double imu_height = 0; // imu height should be considred later
170 ex_imu2car_ << 0, 1, 0, 0, // cos(90), sin(90), 0,
171 -1, 0, 0, 0, // -sin(90), cos(90), 0,
172 0, 0, 1, 0, // 0, 0, 1
173 0, 0, 0, 1;
174
175 // 3. transform camera->lidar->imu->car
176 ex_camera2car_ = ex_imu2car_ * ex_camera2imu_;
177
178 AINFO << "ex_camera2car_ =" << ex_camera2car_;
179
180 // Adjust angle
181 adjust_angles(camera_name, pitch_adj_degree, yaw_adj_degree,
182 roll_adj_degree);
183
184 AINFO << "homography_image2ground_ ="
185 << homography_image2ground_[camera_name];
186 AINFO << "homography_ground2image_ ="
187 << homography_ground2image_[camera_name];
188
189 // compute FOV points
190 p_fov_1_.x = 0;
191 p_fov_1_.y = static_cast<int>(image_height_ * fov_cut_ratio_);
192
193 p_fov_2_.x = image_width_ - 1;
194 p_fov_2_.y = static_cast<int>(image_height_ * fov_cut_ratio_);
195
196 p_fov_3_.x = 0;
197 p_fov_3_.y = image_height_ - 1;
198
199 p_fov_4_.x = image_width_ - 1;
200 p_fov_4_.y = image_height_ - 1;
201
202 AINFO << "p_fov_1_ =" << p_fov_1_;
203 AINFO << "p_fov_2_ =" << p_fov_2_;
204 AINFO << "p_fov_3_ =" << p_fov_3_;
205 AINFO << "p_fov_4_ =" << p_fov_4_;
206
207 vp1_[camera_name](0) = 1024.0;
208 if (K_[camera_name](0, 0) >= 1.0) {
209 vp1_[camera_name](1) =
210 (image_width_ >> 1) * vp1_[camera_name](0) / K_[camera_name](0, 0);
211 } else {
212 AWARN << "Focal length (" << K_[camera_name](0, 0)
213 << " in pixel) is incorrect. "
214 << " Please check camera intrinsic parameters.";
215 vp1_[camera_name](1) = vp1_[camera_name](0) * 0.25;
216 }
217
218 vp2_[camera_name](0) = vp1_[camera_name](0);
219 vp2_[camera_name](1) = -vp1_[camera_name](1);
220
221 AINFO << "vanishing point 1:" << vp1_[camera_name];
222 AINFO << "vanishing point 2:" << vp2_[camera_name];
223
224 pitch_adj_degree_[camera_name] = pitch_adj_degree;
225 yaw_adj_degree_[camera_name] = yaw_adj_degree;
226 roll_adj_degree_[camera_name] = roll_adj_degree;
227 }
228
229 reset_key();
230
231 all_camera_recieved_ = 0x0;
232
233 return true;
234}
235
236bool Visualizer::adjust_angles(const std::string &camera_name,
237 const double pitch_adj_degree,
238 const double yaw_adj_degree,
239 const double roll_adj_degree) {
240 // Convert degree angles to radian angles
241 double pitch_adj_radian = pitch_adj_degree * degree_to_radian_factor_;
242 double yaw_adj_radian = yaw_adj_degree * degree_to_radian_factor_;
243 double roll_adj_radian = roll_adj_degree * degree_to_radian_factor_;
244
245 // We use "right handed ZYX" coordinate system for euler angles
246 // adjust pitch yaw roll in camera coords
247 // Remember that camera coordinate
248 // (Z)----> X
249 // |
250 // |
251 // V
252 // Y
253 Eigen::Matrix4d Rx; // pitch
254 Rx << 1, 0, 0, 0, 0, cos(pitch_adj_radian), -sin(pitch_adj_radian), 0, 0,
255 sin(pitch_adj_radian), cos(pitch_adj_radian), 0, 0, 0, 0, 1;
256 Eigen::Matrix4d Ry; // yaw
257 Ry << cos(yaw_adj_radian), 0, sin(yaw_adj_radian), 0, 0, 1, 0, 0,
258 -sin(yaw_adj_radian), 0, cos(yaw_adj_radian), 0, 0, 0, 0, 1;
259 Eigen::Matrix4d Rz; // roll
260 Rz << cos(roll_adj_radian), -sin(roll_adj_radian), 0, 0, sin(roll_adj_radian),
261 cos(roll_adj_radian), 0, 0, 0, 0, 1, 0, 0, 0, 0, 1;
262
263 adjusted_camera2car_ = ex_camera2car_ * Rz * Ry * Rx;
264 AWARN << "adjusted_camera2car_: " << adjusted_camera2car_;
265
266 // Get homography from projection matrix
267 // ====
268 // Version 1. Direct
269
270 // compute the homography matrix, such that H [u, v, 1]' ~ [X_l, Y_l, 1]
271 Eigen::Matrix3d R = adjusted_camera2car_.block(0, 0, 3, 3);
272 Eigen::Vector3d T = adjusted_camera2car_.block(0, 3, 3, 1);
273 Eigen::Matrix3d H;
274 Eigen::Matrix3d H_inv;
275
276 H.block(0, 0, 3, 2) = (K_[camera_name] * R.transpose()).block(0, 0, 3, 2);
277 H.block(0, 2, 3, 1) = -K_[camera_name] * R.transpose() * T;
278 H_inv = H.inverse();
279 homography_ground2image_[camera_name] = H;
280 homography_image2ground_[camera_name] = H_inv;
281
282 // Version 2. Conceptual
283 // ex_car2camera_ = adjusted_camera2car_.inverse();
284
285 // // compute the homography matrix, such that H [u, v, 1]' ~ [X_l, Y_l, 1]
286 // Eigen::Matrix3d R = ex_car2camera_.block(0, 0, 3, 3);
287 // Eigen::Vector3d T = ex_car2camera_.block(0, 3, 3, 1);
288
289 // projection_matrix_.setIdentity();
290 // projection_matrix_.block(0, 0, 3, 3) = K_ * R;
291 // projection_matrix_.block(0, 3, 3, 1) = K_ * T;
292
293 // homography_ground2image_.block(0, 0, 3, 2)
294 // = projection_matrix_.block(0, 0, 3, 2);
295 // homography_ground2image_.block(0, 2, 3, 1)
296 // = projection_matrix_.block(0, 3, 3, 1);
297
298 // AINFO << "homography_ground2image_: ";
299 // AINFO << homography_ground2image_;
300
301 // homography_image2ground_ = homography_ground2image_.inverse();
302
303 return true;
304}
305
306bool Visualizer::SetDirectory(const std::string &path) {
308 return false;
309 }
310 const std::string command = "rm " + path + "/*.jpg";
311 int ret = system(command.c_str());
312 path_ = path;
313 return ret == 0;
314}
315
317 switch (type) {
319 return "UNKN";
321 return "U_MO";
323 return "UNMO";
325 return "PED";
327 return "CYC";
329 return "VEH";
330 default:
331 break;
332 }
333 return "WRNG";
334}
335
337 switch (type) {
339 return "UNKN";
341 return "U_MO";
343 return "UNMO";
345 return "CAR";
347 return "VAN";
349 return "TRUC";
351 return "BUS";
353 return "CYC";
355 return "MCYC";
357 return "TCYC";
359 return "PED";
361 return "CONE";
362 default:
363 break;
364 }
365 return "WRNG";
366}
367
369 use_class_color_ = true;
370 capture_screen_ = false;
371 capture_video_ = false;
372 show_camera_box2d_ = true;
373 show_camera_box3d_ = true;
374 show_camera_bdv_ = true;
375 show_virtual_egolane_ = true;
376 show_radar_pc_ = true;
377 show_fusion_ = false;
378 show_associate_color_ = false;
379 show_type_id_label_ = true;
380 show_verbose_ = false;
381 show_lane_count_ = 1;
382 show_trajectory_ = true;
383 show_vp_grid_ = true; // show vanishing point and ground plane grid
384 draw_lane_objects_ = true;
385 show_box_ = true;
386 show_velocity_ = false;
387 show_polygon_ = true;
388 show_text_ = false;
389 show_help_text_ = false;
390 manual_calibration_mode_ = false;
391 show_homography_object_ = false;
392 return true;
393}
394
395double Visualizer::regularize_angle(const double radian_angle) {
396 if (radian_angle <= -M_PI) {
397 return radian_angle + M_PI * 2.0;
398 } else if (radian_angle > M_PI) {
399 return radian_angle - M_PI * 2.0;
400 }
401 return radian_angle;
402}
403
404// ZYX Euler angles to quaternion
405bool Visualizer::euler_to_quaternion(Eigen::Vector4d *quaternion,
406 const double pitch_radian,
407 const double yaw_radian,
408 const double roll_radian) {
409 // // Option 1. ZYX Euler to quortonian
410 // double cy = cos(yaw_radian * 0.5);
411 // double sy = sin(yaw_radian * 0.5);
412 // double cp = cos(pitch_radian * 0.5);
413 // double sp = sin(pitch_radian * 0.5);
414 // double cr = cos(roll_radian * 0.5);
415 // double sr = sin(roll_radian * 0.5);
416
417 // quaternion(0) = sy * cp * cr - cy * sp * sr; // Q.x
418 // quaternion(1) = cy * sp * cr + sy * cp * sr; // Q.y
419 // quaternion(2) = cy * cp * sr - sy * sp * cr; // Q.z
420 // quaternion(3) = cy * cp * cr + sy * sp * sr; // Q.w
421
422 // AINFO << "fast quaternion(x, y, z, w): ("
423 // << quaternion(0) << ", "
424 // << quaternion(1) << ", "
425 // << quaternion(2) << ", "
426 // << quaternion(3) << ")";
427
428 // Option 2. Rotation matrix to quaternion
429 Eigen::Matrix3d Rx; // pitch
430 Rx << 1, 0, 0, 0, cos(pitch_radian), -sin(pitch_radian), 0, sin(pitch_radian),
431 cos(pitch_radian);
432 Eigen::Matrix3d Ry; // roll
433 Ry << cos(roll_radian), 0, sin(roll_radian), 0, 1, 0, -sin(roll_radian), 0,
434 cos(roll_radian);
435 Eigen::Matrix3d Rz; // yaw
436 Rz << cos(yaw_radian), -sin(yaw_radian), 0, sin(yaw_radian), cos(yaw_radian),
437 0, 0, 0, 1;
438 Eigen::Matrix3d R;
439 R = Rz * Ry * Rx;
440 AINFO << "Rotation matrix R: " << R;
441 double qw = 0.5 * sqrt(1.0 + R(0, 0) + R(1, 1) + R(2, 2));
442 if (fabs(qw) > 1.0e-6) {
443 (*quaternion)(0) = 0.25 * (R(2, 1) - R(1, 2)) / qw; // Q.x
444 (*quaternion)(1) = 0.25 * (R(0, 2) - R(2, 0)) / qw; // Q.y
445 (*quaternion)(2) = 0.25 * (R(1, 0) - R(0, 1)) / qw; // Q.z
446 (*quaternion)(3) = qw; // Q.w
447 AINFO << "quaternion(x, y, z, w): (" << (*quaternion)(0) << ", "
448 << (*quaternion)(1) << ", " << (*quaternion)(2) << ", "
449 << (*quaternion)(3) << ")";
450 } else {
451 double qx = 0.5 * sqrt(1.0 + R(0, 0) - R(1, 1) - R(2, 2));
452 if (fabs(qx) < 1.0e-6) {
453 AWARN << "quaternion is degenerate qw: " << qw << "qx: " << qx;
454 return false;
455 }
456 (*quaternion)(0) = qx; // Q.x
457 (*quaternion)(1) = 0.25 * (R(0, 1) + R(1, 0)) / qx; // Q.y
458 (*quaternion)(2) = 0.25 * (R(0, 2) + R(2, 0)) / qx; // Q.z
459 (*quaternion)(3) = 0.25 * (R(2, 1) - R(1, 2)) / qx; // Q.w
460 AINFO << "second quaternion(x, y, z, w): (" << (*quaternion)(0) << ", "
461 << (*quaternion)(1) << ", " << (*quaternion)(2) << ", "
462 << (*quaternion)(3) << ")";
463 }
464 return true;
465}
466
467bool Visualizer::copy_backup_file(const std::string &filename) {
468 static int index = 0;
469 // int last_index = 0;
470 // std::string files = filename + "*";
471 // for (const auto &file : std::filesysfs::directory_iterator(files)) {
472 // AINFO << file.path() << std::endl;
473 // // Extract index
474 // last_index = get_index(file.path());
475 // }
476 // index = last_index;
477
478 ++index;
479 std::string yaml_bak_file = absl::StrCat(filename, "__", index);
480 AINFO << "yaml_backup_file: " << yaml_bak_file;
481
482 if (!cyber::common::Copy(filename, yaml_bak_file)) {
483 AERROR << "Cannot backup the file: " << filename;
484 } else {
485 AINFO << "Backup file: " << filename << " saved successfully.";
486 }
487
488 return true;
489}
490
491bool Visualizer::save_extrinsic_in_yaml(const std::string &camera_name,
492 const Eigen::Matrix4d &extrinsic,
493 const Eigen::Vector4d &quaternion,
494 const double pitch_radian,
495 const double yaw_radian,
496 const double roll_radian) {
497 // Todo(zero): Need change to GetCommonConfigFile
498 std::string yaml_file =
499 FLAGS_obs_sensor_intrinsic_path + "/" + camera_name + "_extrinsics.yaml";
500
501 copy_backup_file(yaml_file);
502
503 AINFO << "extrinsic: " << extrinsic;
504
505 // Save data
506 // Option 1. Save using streaming
507 std::ofstream y_file(yaml_file);
508
509 y_file << "header:\n";
510 y_file << " seq: 0\n";
511 y_file << " stamp:\n";
512 y_file << " secs: 0\n";
513 y_file << " nsecs: 0\n";
514 y_file << " frame_id: velodyne128\n";
515 y_file << "child_frame_id: %s\n", camera_name.c_str();
516 y_file << "transform:\n";
517 y_file << " translation:\n";
518 y_file << " x: " << extrinsic(0, 3) << "\n";
519 y_file << " y: " << extrinsic(1, 3) << "\n";
520 y_file << " z: " << extrinsic(2, 3) << "\n";
521 y_file << " rotation:\n";
522 y_file << " x: " << quaternion(0) << "\n";
523 y_file << " y: " << quaternion(1) << "\n";
524 y_file << " z: " << quaternion(2) << "\n";
525 y_file << " w: " << quaternion(3) << "\n";
526 y_file << " euler_angles_degree:\n";
527 y_file << " pitch: " << pitch_radian * radian_to_degree_factor_ << "\n";
528 y_file << " yaw: " << yaw_radian * radian_to_degree_factor_ << "\n";
529 y_file << " roll: " << roll_radian * radian_to_degree_factor_ << "\n";
530 // Option 2. Use YAML write function.
531 // Alert! Couldn't find a library to save YAML node.
532 // YAML::Node node = YAML::LoadFile(yaml_file);
533
534 // try{
535 // if (node.IsNull()) {
536 // AINFO << "Load " << yaml_file << " failed! please check!";
537 // return false;
538 // }
539 // // Replace rotation only
540 // node["transform"]["rotation"]["x"].as<double>() = quaternion(0);
541 // node["transform"]["rotation"]["y"].as<double>() = quaternion(1);
542 // node["transform"]["rotation"]["z"].as<double>() = quaternion(2);
543 // node["transform"]["rotation"]["w"].as<double>() = quaternion(3);
544 //
545 // node.SaveFile(yaml_file);
546 // if (node.IsNull()) {
547 // AINFO << "Save " << yaml_file << " failed! please check!";
548 // return false;
549 // }
550 // } catch (YAML::InvalidNode &in) {
551 // AERROR << "load/save camera extrisic file " << yaml_file
552 // << " with error, YAML::InvalidNode exception";
553 // return false;
554 // } catch (YAML::TypedBadConversion<double> &bc) {
555 // AERROR << "load camera extrisic file " << yaml_file
556 // << " with error, YAML::TypedBadConversion exception";
557 // return false;
558 // } catch (YAML::Exception &e) {
559 // AERROR << "load camera extrisic file " << yaml_file
560 // << " with error, YAML exception:" << e.what();
561 // return false;
562 // }
563
564 return true;
565}
566
568 const std::string &camera_name, const double pitch_adj_degree,
569 const double yaw_adj_degree, const double roll_adj_degree) {
570 // Convert degree angles to radian angles
571 double pitch_adj_radian = pitch_adj_degree * degree_to_radian_factor_;
572 double yaw_adj_radian = yaw_adj_degree * degree_to_radian_factor_;
573 double roll_adj_radian = roll_adj_degree * degree_to_radian_factor_;
574
575 // Get current angle from extrinsics
576 // ex_camera2lidar_ = extrinsic_map_.at(camera_name);
577 Eigen::Matrix3d R = ex_camera2lidar_[camera_name].block(0, 0, 3, 3);
578
579 double old_pitch_radian = regularize_angle(atan2(R(2, 1), R(2, 2)));
580 double old_roll_radian = regularize_angle(
581 atan2(-R(2, 0), sqrt(R(2, 1) * R(2, 1) + R(2, 2) * R(2, 2))));
582 double old_yaw_radian = regularize_angle(atan2(R(1, 0), R(0, 0)));
583 AINFO << "Old pitch: " << old_pitch_radian * radian_to_degree_factor_;
584 AINFO << "Old yaw: " << old_yaw_radian * radian_to_degree_factor_;
585 AINFO << "Old roll: " << old_roll_radian * radian_to_degree_factor_;
586 AINFO << "Adjusted pitch: " << pitch_adj_degree;
587 AINFO << "Adjusted yaw: " << yaw_adj_degree;
588 AINFO << "Adjusted roll: " << roll_adj_degree;
589
590 // Convert value here since the coordinate system is different
591
592 // Apply changed angles to each angle
593 double new_pitch_radian =
594 regularize_angle(old_pitch_radian + pitch_adj_radian);
595 double new_yaw_radian = regularize_angle(old_yaw_radian - yaw_adj_radian);
596 double new_roll_radian = regularize_angle(old_roll_radian + roll_adj_radian);
597
598 AINFO << "New pitch: " << new_pitch_radian * radian_to_degree_factor_;
599 AINFO << "New yaw: " << new_yaw_radian * radian_to_degree_factor_;
600 AINFO << "New roll: " << new_roll_radian * radian_to_degree_factor_;
601
602 Eigen::Vector4d quaternion;
603 euler_to_quaternion(&quaternion, new_pitch_radian, new_yaw_radian,
604 new_roll_radian);
605 AINFO << "Quaternion X: " << quaternion(0) << ", Y: " << quaternion(1)
606 << ", Z: " << quaternion(2) << ", W: " << quaternion(3);
607 // Save the file
608 // Yaw and Roll are swapped.
609 save_extrinsic_in_yaml(camera_name, ex_camera2lidar_[camera_name], quaternion,
610 new_pitch_radian, new_yaw_radian, new_roll_radian);
611
612 return true;
613}
614
615bool Visualizer::key_handler(const std::string &camera_name, const int key) {
616 AINFO << "Pressed Key: " << key;
617 if (key <= 0) {
618 return false;
619 }
620 switch (key) {
621 case KEY_0:
622 show_associate_color_ = !show_associate_color_;
623 break;
624 case KEY_2:
625 show_camera_box2d_ = !show_camera_box2d_;
626 break;
627 case KEY_3:
628 show_camera_box3d_ = !show_camera_box3d_;
629 break;
630 case KEY_UPPER_A:
631 case KEY_LOWER_A:
632 capture_video_ = !capture_video_;
633 break;
634 case KEY_UPPER_B:
635 case KEY_LOWER_B:
636 show_box_ = (show_box_ + 1) % 2;
637 break;
638 case KEY_UPPER_C:
639 case KEY_LOWER_C:
640 use_class_color_ = !use_class_color_;
641 break;
642 case KEY_UPPER_D:
643 case KEY_LOWER_D:
644 show_radar_pc_ = !show_radar_pc_;
645 break;
646 case KEY_UPPER_E:
647 case KEY_LOWER_E:
648 draw_lane_objects_ = !draw_lane_objects_;
649 break;
650 case KEY_UPPER_F:
651 case KEY_LOWER_F:
652 show_fusion_ = !show_fusion_;
653 break;
654 case KEY_UPPER_G:
655 case KEY_LOWER_G:
656 show_vp_grid_ = !show_vp_grid_;
657 break;
658 case KEY_UPPER_H:
659 case KEY_LOWER_H:
660 show_help_text_ = !show_help_text_;
661 break;
662 case KEY_UPPER_I:
663 case KEY_LOWER_I:
664 show_type_id_label_ = !show_type_id_label_;
665 break;
666 case KEY_UPPER_L:
667 case KEY_LOWER_L:
668 show_verbose_ = !show_verbose_;
669 break;
670 case KEY_UPPER_O:
671 case KEY_LOWER_O:
672 show_camera_bdv_ = !show_camera_bdv_;
673 break;
674 case KEY_UPPER_Q:
675 case KEY_LOWER_Q:
676 show_lane_count_ = (show_lane_count_ + 1) % 3;
677 break;
678 case KEY_UPPER_R:
679 case KEY_LOWER_R:
680 reset_key();
681 break;
682 case KEY_UPPER_S:
683 case KEY_LOWER_S:
684 capture_screen_ = true;
685 break;
686 case KEY_UPPER_T:
687 case KEY_LOWER_T:
688 show_trajectory_ = !show_trajectory_;
689 break;
690 case KEY_UPPER_V:
691 case KEY_LOWER_V:
692 show_velocity_ = (show_velocity_ + 1) % 2;
693 break;
695 case KEY_UP:
696 if (manual_calibration_mode_ &&
697 pitch_adj_degree_[camera_name] + 0.05 <= max_pitch_degree_) {
698 pitch_adj_degree_[camera_name] -= 0.05;
699 } else {
700 visual_camera_ = camera_names_[0];
701 }
702 AINFO << "Current pitch: " << pitch_adj_degree_[camera_name];
703 break;
705 case KEY_DOWN:
706 if (manual_calibration_mode_ &&
707 pitch_adj_degree_[camera_name] - 0.05 >= min_pitch_degree_) {
708 pitch_adj_degree_[camera_name] += 0.05;
709 } else {
710 visual_camera_ = camera_names_[1];
711 }
712 AINFO << "Current pitch: " << pitch_adj_degree_[camera_name];
713 break;
715 case KEY_RIGHT:
716 if (manual_calibration_mode_ &&
717 yaw_adj_degree_[camera_name] + 0.05 <= max_yaw_degree_) {
718 yaw_adj_degree_[camera_name] -= 0.05;
719 }
720 AINFO << "Current yaw: " << yaw_adj_degree_[camera_name];
721 break;
723 case KEY_LEFT:
724 if (manual_calibration_mode_ &&
725 yaw_adj_degree_[camera_name] - 0.05 >= min_yaw_degree_) {
726 yaw_adj_degree_[camera_name] += 0.05;
727 }
728 AINFO << "Current yaw: " << yaw_adj_degree_[camera_name];
729 break;
731 case KEY_SHIFT_RIGHT:
732 if (manual_calibration_mode_ &&
733 roll_adj_degree_[camera_name] + 0.05 <= max_roll_degree_) {
734 roll_adj_degree_[camera_name] -= 0.05;
735 }
736 AINFO << "Current roll: " << roll_adj_degree_[camera_name];
737 break;
739 case KEY_SHIFT_LEFT:
740 if (manual_calibration_mode_ &&
741 roll_adj_degree_[camera_name] - 0.05 >= min_roll_degree_) {
742 roll_adj_degree_[camera_name] += 0.05;
743 }
744 AINFO << "Current roll: " << roll_adj_degree_[camera_name];
745 break;
747 case KEY_CTRL_S:
748 if (manual_calibration_mode_) {
750 visual_camera_, pitch_adj_degree_[camera_name],
751 yaw_adj_degree_[camera_name], roll_adj_degree_[camera_name]);
752 AINFO << "Saved calibration parameters(pyr): ("
753 << pitch_adj_degree_[camera_name] << ", "
754 << yaw_adj_degree_[camera_name] << ", "
755 << roll_adj_degree_[camera_name] << ")";
756 }
757 break;
759 case KEY_ALT_C:
760 manual_calibration_mode_ = !manual_calibration_mode_;
761
762 default:
763 break;
764 }
765
766 help_str_ = "H: show help";
767 if (show_help_text_) {
768 absl::StrAppend(&help_str_, " (ON)\nR: reset matrxi\nB: show box");
769 if (show_box_) {
770 absl::StrAppend(&help_str_, "(ON)");
771 }
772 absl::StrAppend(&help_str_, "\nV: show velocity");
773 if (show_velocity_) {
774 absl::StrAppend(&help_str_, " (ON)");
775 }
776 absl::StrAppend(&help_str_, "\nC: use class color");
777 if (use_class_color_) {
778 absl::StrAppend(&help_str_, " (ON)");
779 }
780 absl::StrAppend(&help_str_, "\nS: capture screen", "\nA: capture video",
781 "\nI: show type id label");
782 if (show_type_id_label_) {
783 absl::StrAppend(&help_str_, " (ON)");
784 }
785 absl::StrAppend(&help_str_, "\nQ: show lane");
786 if (show_lane_count_ > 0) {
787 absl::StrAppend(&help_str_, " (ON)");
788 }
789 absl::StrAppend(&help_str_, "\nE: draw lane objects");
790 if (draw_lane_objects_) {
791 absl::StrAppend(&help_str_, " (ON)");
792 }
793 absl::StrAppend(&help_str_, "\nF: show fusion");
794 if (show_fusion_) {
795 absl::StrAppend(&help_str_, " (ON)");
796 }
797 absl::StrAppend(&help_str_, "\nD: show radar pc");
798 if (show_radar_pc_) {
799 absl::StrAppend(&help_str_, " (ON)");
800 }
801 absl::StrAppend(&help_str_, "\nT: show trajectory");
802 if (show_trajectory_) {
803 absl::StrAppend(&help_str_, " (ON)");
804 }
805 absl::StrAppend(&help_str_, "\nO: show camera bdv");
806 if (show_camera_bdv_) {
807 absl::StrAppend(&help_str_, " (ON)");
808 }
809 absl::StrAppend(&help_str_, "\n2: show camera box2d");
810 if (show_camera_box2d_) {
811 absl::StrAppend(&help_str_, " (ON)");
812 }
813 absl::StrAppend(&help_str_, "\n3: show camera box3d");
814 if (show_camera_box3d_) {
815 absl::StrAppend(&help_str_, " (ON)");
816 }
817 absl::StrAppend(&help_str_, "\n0: show associate color");
818 if (show_associate_color_) {
819 absl::StrAppend(&help_str_, " (ON)");
820 }
821 absl::StrAppend(&help_str_,
822 "\nG: show vanishing point and ground plane grid");
823 if (show_vp_grid_) {
824 absl::StrAppend(&help_str_, " (ON)");
825 }
826 absl::StrAppend(&help_str_, "\nT: show verbose");
827 if (show_verbose_) {
828 absl::StrAppend(&help_str_, " (ON)");
829 }
830 }
831 switch (key) {
838 case KEY_UP:
839 case KEY_LEFT:
840 case KEY_RIGHT:
841 case KEY_DOWN:
842 case KEY_SHIFT_LEFT:
843 case KEY_SHIFT_RIGHT:
844 if (manual_calibration_mode_) {
845 adjust_angles(camera_name, pitch_adj_degree_[camera_name],
846 yaw_adj_degree_[camera_name],
847 roll_adj_degree_[camera_name]);
848 if (show_help_text_) {
849 absl::StrAppend(&help_str_,
850 "\nAdjusted Pitch: ", pitch_adj_degree_[camera_name]);
851 absl::StrAppend(&help_str_,
852 "\nAdjusted Yaw: ", yaw_adj_degree_[camera_name]);
853 absl::StrAppend(&help_str_,
854 "\nAdjusted Roll: ", roll_adj_degree_[camera_name]);
855 }
856 }
857 }
858 return true;
859}
860
861// Draw trajectory of each object
863 const base::MotionBufferPtr motion_buffer) {
864 if (object->drop_num == 0 || motion_buffer == nullptr ||
865 motion_buffer->size() == 0) {
866 return false;
867 }
868 std::size_t count = std::min(object->drop_num, motion_buffer->size());
869
870 Eigen::Vector4f start_point;
871 start_point << static_cast<float>(object->drops[0](0)),
872 static_cast<float>(object->drops[0](1)), 0, 1;
873 start_point = (*motion_buffer)[0].motion * start_point;
874 cv::circle(world_image_, world_point_to_bigimg(start_point), 3, gray_color);
875
876 for (size_t i = 1; i < count; i++) {
877 Eigen::Vector4f end_point;
878 end_point << static_cast<float>(object->drops[i](0)),
879 static_cast<float>(object->drops[i](1)), 0, 1;
880 cv::circle(world_image_, world_point_to_bigimg(end_point), 3, gray_color);
881 cv::line(world_image_, world_point_to_bigimg(start_point),
883 trajectory_line_thickness_);
884 start_point = end_point;
885 }
886 return true;
887}
888
889void Visualizer::Draw2Dand3D(const cv::Mat &img, const CameraFrame &frame) {
890 cv::Mat image = img.clone();
891 Eigen::Affine3d pose;
892 if (!tf_server_->QueryPos(frame.timestamp, &pose)) {
893 pose.setIdentity();
894 }
895 Eigen::Affine3d lidar2novatel;
896 if (!tf_server_->QueryTransform("velodyne128", "novatel", &lidar2novatel)) {
897 AWARN << "Failed to query transform from lidar to ground.";
898 return;
899 }
900 Eigen::Affine3d lidar2world = pose * lidar2novatel;
901 Eigen::Affine3d world2lidar = lidar2world.inverse();
902 for (const auto &object : frame.tracked_objects) {
903 base::RectF rect(object->camera_supplement.box);
904 cv::Rect r(static_cast<int>(rect.x), static_cast<int>(rect.y),
905 static_cast<int>(rect.width), static_cast<int>(rect.height));
906 cv::Scalar color = colorlistobj[object->track_id % colorlistobj.size()];
907
908 cv::rectangle(image, r, color, 2);
909 cv::putText(image, std::to_string(object->track_id),
910 cv::Point(static_cast<int>(rect.x), static_cast<int>(rect.y)),
911 cv::FONT_HERSHEY_DUPLEX, 1, red_color, 2);
912 Eigen::Vector3d theta;
913 theta << cos(object->theta), sin(object->theta), 0;
914 theta = world2lidar.linear() * theta;
915 float yaw = static_cast<float>(atan2(theta(1), theta(0)));
916 Eigen::Matrix2d rotate;
917 rotate << cos(yaw), -sin(yaw), sin(yaw), cos(yaw);
918
919 Eigen::Vector3d pos;
920 pos << object->center(0), object->center(1), object->center(2);
921 pos = world2lidar * pos;
922 Eigen::Vector2d pos_2d;
923 pos_2d << pos(0), pos(1);
924 Eigen::Vector3d v;
925 v << object->velocity(0), object->velocity(1), object->velocity(2);
926 v = world2lidar.linear() * v;
927 Eigen::Vector2d v_2d;
928 v_2d << v(0) + pos_2d(0), v(1) + pos_2d(1);
929 Eigen::Vector2d p1;
930 p1 << object->size(0) * 0.5, object->size(1) * 0.5;
931 p1 = rotate * p1 + pos_2d;
932 Eigen::Vector2d p2;
933 p2 << -object->size(0) * 0.5, object->size(1) * 0.5;
934 p2 = rotate * p2 + pos_2d;
935 Eigen::Vector2d p3;
936 p3 << -object->size(0) * 0.5, -object->size(1) * 0.5;
937 p3 = rotate * p3 + pos_2d;
938 Eigen::Vector2d p4;
939 p4 << object->size(0) * 0.5, -object->size(1) * 0.5;
940 p4 = rotate * p4 + pos_2d;
941
942 if (object->b_cipv) {
943 cv::line(world_image_, world_point_to_bigimg(p1),
944 world_point_to_bigimg(p2), color_cipv_, cipv_line_thickness_);
945 cv::line(world_image_, world_point_to_bigimg(p2),
946 world_point_to_bigimg(p3), color_cipv_, cipv_line_thickness_);
947 cv::line(world_image_, world_point_to_bigimg(p3),
948 world_point_to_bigimg(p4), color_cipv_, cipv_line_thickness_);
949 cv::line(world_image_, world_point_to_bigimg(p4),
950 world_point_to_bigimg(p1), color_cipv_, cipv_line_thickness_);
951 // cv::line(world_image_, world_point_to_bigimg(pos_2d),
952 // world_point_to_bigimg(v_2d), color_cipv_,
953 // cipv_line_thickness_);
954 }
955 cv::line(world_image_, world_point_to_bigimg(p1), world_point_to_bigimg(p2),
956 color_cipv_, line_thickness_);
957 cv::line(world_image_, world_point_to_bigimg(p2), world_point_to_bigimg(p3),
958 color_cipv_, line_thickness_);
959 cv::line(world_image_, world_point_to_bigimg(p3), world_point_to_bigimg(p4),
960 color_cipv_, line_thickness_);
961 cv::line(world_image_, world_point_to_bigimg(p4), world_point_to_bigimg(p1),
962 color_cipv_, line_thickness_);
963 // cv::line(world_image_, world_point_to_bigimg(pos_2d),
964 // world_point_to_bigimg(v_2d), color_cipv_, line_thickness_);
965 }
966 last_timestamp_ = frame.timestamp;
967 cv::resize(image, camera_image_[frame.data_provider->sensor_name()],
968 cv::Size(small_w_, small_h_));
969}
970
971void Visualizer::ShowResult(const cv::Mat &img, const CameraFrame &frame) {
972 cv::Mat image = img.clone();
973 std::string camera_name = frame.data_provider->sensor_name();
974
975 if (frame.timestamp - last_timestamp_ > 0.02) {
976 draw_selected_image_boundary(small_w_, small_h_,
977 &(camera_image_[visual_camera_]));
978 cv::Mat bigimg(world_h_, small_w_ + wide_pixel_, CV_8UC3);
979 camera_image_[camera_name].copyTo(
980 bigimg(cv::Rect(0, 0, small_w_, small_h_)));
981 camera_image_[camera_name].copyTo(
982 bigimg(cv::Rect(0, small_h_, small_w_, small_h_)));
983 world_image_.copyTo(bigimg(cv::Rect(small_w_, 0, wide_pixel_, world_h_)));
984 if (write_out_img_) {
985 char path[1000];
986 static int k = 0;
987 snprintf(path, sizeof(path), "%s/%06d.jpg", path_.c_str(), k++);
988 AINFO << "A snapshot of visualizer saved at " << path;
989 cv::imwrite(path, bigimg);
990 }
991
992 if (cv_imshow_img_) {
993 cv::namedWindow("Apollo Visualizer", CV_WINDOW_NORMAL);
994 cv::setWindowProperty("Apollo Visualizer", CV_WND_PROP_FULLSCREEN,
995 CV_WINDOW_FULLSCREEN);
996 cv::imshow("Apollo Visualizer", bigimg);
997 int key = cvWaitKey(30);
998 key_handler(camera_name, key);
999 }
1000 world_image_ = cv::Mat(world_h_, wide_pixel_, CV_8UC3, black_color);
1002 }
1003
1004 cv::putText(image, camera_name, cv::Point(10, 50), cv::FONT_HERSHEY_DUPLEX,
1005 1.3, red_color, 3);
1006 cv::putText(image, absl::StrCat("frame #: ", frame.frame_id),
1007 cv::Point(10, 100), cv::FONT_HERSHEY_DUPLEX, 1.3, red_color, 3);
1008 Draw2Dand3D(image, frame);
1009}
1010
1012 const std::string &camera_name, const cv::Mat &img,
1013 const CameraFrame &frame, const Eigen::Matrix3d &intrinsic,
1014 const Eigen::Matrix4d &extrinsic, const Eigen::Affine3d &world2camera,
1015 const base::MotionBufferPtr motion_buffer) {
1016 cv::Mat image_2D = img.clone(); // All clone should be replaced with global
1017
1018 // plot FOV
1019
1020 // cv::line(img2, p_fov_1_, p_fov_2_, white_color, 2);
1021 // cv::line(img2, p_fov_1_, p_fov_3_, white_color, 2);
1022 // cv::line(img2, p_fov_2_, p_fov_4_, white_color, 2);
1023 // cv::line(world_image_, world_point_to_bigimg(image2ground(p_fov_1_)),
1024 // world_point_to_bigimg(image2ground(p_fov_2_)), white_color, 2);
1025 // cv::line(world_image_, world_point_to_bigimg(image2ground(p_fov_1_)),
1026 // world_point_to_bigimg(image2ground(p_fov_3_)), white_color, 2);
1027 // cv::line(world_image_, world_point_to_bigimg(image2ground(p_fov_2_)),
1028 // world_point_to_bigimg(image2ground(p_fov_4_)), white_color, 2);
1029
1030 // cv::line(img2, p_fov_2_, p_fov_4_, white_color, 2);
1031 // cv::line(world_image_, world_point_to_bigimg(image2ground(p_fov_1_)),
1032 // world_point_to_bigimg(image2ground(p_fov_2_)), white_color, 2);
1033 // cv::line(world_image_, world_point_to_bigimg(image2ground(p_fov_1_)),
1034
1035 if (show_vp_grid_) {
1036 cv::line(image_2D, ground2image(camera_name, vp1_[camera_name]),
1037 ground2image(camera_name, vp2_[camera_name]), white_color, 2);
1038 }
1039 // plot laneline on image and ground plane
1040 if (show_lane_count_ > 0) { // Do now show lane line
1041 for (const auto &object : frame.lane_objects) {
1042 cv::Scalar lane_color = colormapline[object.pos_type];
1043 if (show_lane_count_ == 1) { // show inlier points
1044 cv::Point p_prev;
1045 p_prev.x = static_cast<int>(object.curve_image_point_set[0].x);
1046 p_prev.y = static_cast<int>(object.curve_image_point_set[0].y);
1047 Eigen::Vector2d p_prev_ground = image2ground(camera_name, p_prev);
1048 for (unsigned i = 1; i < object.curve_image_point_set.size(); i++) {
1049 cv::Point p_cur;
1050 p_cur.x = static_cast<int>(object.curve_image_point_set[i].x);
1051 p_cur.y = static_cast<int>(object.curve_image_point_set[i].y);
1052 Eigen::Vector2d p_cur_ground = image2ground(camera_name, p_cur);
1053
1054 if (p_cur.x >= 0 && p_cur.y >= 0 && p_prev.x >= 0 && p_prev.y >= 0 &&
1055 p_cur.x < image_width_ && p_cur.y < image_height_ &&
1056 p_prev.x < image_width_ && p_prev.y < image_height_) {
1057 cv::line(image_2D, p_prev, p_cur, lane_color, line_thickness_);
1058 }
1059 cv::line(world_image_, world_point_to_bigimg(p_prev_ground),
1060 world_point_to_bigimg(p_cur_ground), lane_color, 2);
1061 p_prev = p_cur;
1062 p_prev_ground = p_cur_ground;
1063 }
1064 } else if (show_lane_count_ == 2) { // Show fitted curve
1065 base::LaneLineCubicCurve curve_coord = object.curve_car_coord;
1066 Eigen::Vector2d p_prev_ground;
1067 float step =
1068 std::max(std::abs(curve_coord.x_end - curve_coord.x_start) /
1069 static_cast<float>(lane_step_num_),
1070 3.0f);
1071 float x = curve_coord.x_start;
1072 p_prev_ground(0) = x;
1073 p_prev_ground(1) = curve_coord.a * x * x * x + curve_coord.b * x * x +
1074 curve_coord.c * x + curve_coord.d;
1075 cv::Point p_prev = ground2image(camera_name, p_prev_ground);
1076 x += step;
1077 for (unsigned int i = 0; x < curve_coord.x_end && i < lane_step_num_;
1078 x += step, i++) {
1079 Eigen::Vector2d p_cur_ground;
1080 p_cur_ground(0) = x;
1081 p_cur_ground(1) = curve_coord.a * x * x * x + curve_coord.b * x * x +
1082 curve_coord.c * x + curve_coord.d;
1083 cv::Point p_cur = ground2image(camera_name, p_cur_ground);
1084 if (p_cur.x >= 0 && p_cur.y >= 0 && p_prev.x >= 0 && p_prev.y >= 0 &&
1085 p_cur.x < image_width_ && p_cur.y < image_height_ &&
1086 p_prev.x < image_width_ && p_prev.y < image_height_) {
1087 cv::line(image_2D, p_prev, p_cur, lane_color, line_thickness_);
1088 }
1089 cv::line(world_image_, world_point_to_bigimg(p_prev_ground),
1090 world_point_to_bigimg(p_cur_ground), lane_color, 2);
1091 p_prev = p_cur;
1092 p_prev_ground = p_cur_ground;
1093 }
1094 }
1095 }
1096 }
1097
1098 // Draw objects
1099 for (const auto &object : frame.tracked_objects) {
1100 // plot 2D box on image_2D
1101 base::RectF rect(object->camera_supplement.box);
1102 cv::Rect r(static_cast<int>(rect.x), static_cast<int>(rect.y),
1103 static_cast<int>(rect.width), static_cast<int>(rect.height));
1104 cv::Scalar color = colorlistobj[object->track_id % colorlistobj.size()];
1105
1106 cv::putText(
1107 image_2D,
1108 // type_to_string(object->type) + "->" +
1109 sub_type_to_string(object->sub_type),
1110 cv::Point(static_cast<int>(rect.x), static_cast<int>(rect.y) + 30),
1111 cv::FONT_HERSHEY_DUPLEX, 1, red_color, 1);
1112
1113 // compute 8 vetices in camera coodinates
1114 Eigen::Vector3d pos;
1115
1116 ADEBUG << "object->track_id: " << object->track_id;
1117 // Draw 3D position using homography or direct distance from CNN
1118 double theta_ray;
1119 double theta;
1120 cv::Point c_2D;
1121 Eigen::Vector2d c_2D_l;
1122 if (show_homography_object_) {
1123 pos << object->camera_supplement.local_center(0),
1124 object->camera_supplement.local_center(1),
1125 object->camera_supplement.local_center(2);
1126 ADEBUG << "Camera pos: (" << pos(0) << ", " << pos(1) << ", " << pos(2)
1127 << ")";
1128 theta_ray = atan2(pos(0), pos(2));
1129 theta = object->camera_supplement.alpha + theta_ray;
1130 // compute obstacle center in lidar ground
1131 c_2D.x = static_cast<int>(rect.x + rect.width / 2);
1132 c_2D.y = static_cast<int>(rect.y + rect.height);
1133 ADEBUG << "Image Footprint c_2D: (" << c_2D.x << ", " << c_2D.y << ")";
1134 c_2D_l = image2ground(camera_name, c_2D);
1135 ADEBUG << "Image Footprint position: (" << c_2D_l(0) << ", " << c_2D_l(1)
1136 << ")";
1137 } else {
1138 pos = world2camera * object->center;
1139 theta_ray = atan2(pos(0), pos(2));
1140 theta = object->camera_supplement.alpha + theta_ray;
1141 ADEBUG << "Direct pos: (" << pos(0) << ", " << pos(1) << ", " << pos(2)
1142 << ")";
1143
1144 // compute obstacle center in lidar ground
1145 c_2D_l(0) = pos(2);
1146 c_2D_l(1) = -pos(0);
1147 ADEBUG << "Direct center position: (" << c_2D_l(0) << ", " << c_2D_l(1)
1148 << ")";
1149 }
1150 ADEBUG << "theta_ray: " << theta_ray * 180 / M_PI << " degree";
1151 ADEBUG << "object->camera_supplement.alpha: "
1152 << object->camera_supplement.alpha * 180 / M_PI << " degree";
1153 ADEBUG << "theta: " << theta * 180 / M_PI << " = "
1154 << object->camera_supplement.alpha * 180 / M_PI << " + "
1155 << theta_ray * 180 / M_PI;
1156
1157 float distance =
1158 static_cast<float>(sqrt(c_2D_l(0) * c_2D_l(0) + c_2D_l(1) * c_2D_l(1)));
1159 char dist_string[100];
1160 snprintf(dist_string, sizeof(dist_string), "%.1fm", distance);
1161 // Show distance
1162 cv::putText(
1163 image_2D, dist_string,
1164 cv::Point(static_cast<int>(rect.x), static_cast<int>(rect.y - 10)),
1165 cv::FONT_HERSHEY_DUPLEX, 1, lime_color, 2);
1166
1167 if (show_camera_box3d_) {
1168 Eigen::Matrix3d rotate_ry;
1169 rotate_ry << cos(theta), 0, sin(theta), 0, 1, 0, -sin(theta), 0,
1170 cos(theta);
1173 std::vector<cv::Point> p_proj(8);
1174 p[0] << object->size(0) * 0.5, object->size(2) * 0.5,
1175 object->size(1) * 0.5;
1176 p[1] << -object->size(0) * 0.5, object->size(2) * 0.5,
1177 object->size(1) * 0.5;
1178 p[2] << -object->size(0) * 0.5, object->size(2) * 0.5,
1179 -object->size(1) * 0.5;
1180 p[3] << object->size(0) * 0.5, object->size(2) * 0.5,
1181 -object->size(1) * 0.5;
1182 p[4] << object->size(0) * 0.5, -object->size(2) * 0.5,
1183 object->size(1) * 0.5;
1184 p[5] << -object->size(0) * 0.5, -object->size(2) * 0.5,
1185 object->size(1) * 0.5;
1186 p[6] << -object->size(0) * 0.5, -object->size(2) * 0.5,
1187 -object->size(1) * 0.5;
1188 p[7] << object->size(0) * 0.5, -object->size(2) * 0.5,
1189 -object->size(1) * 0.5;
1190
1191 for (uint i = 0; i < p.size(); i++) {
1192 proj[i] = intrinsic * (rotate_ry * p[i] + pos);
1193 if (fabs(p[i](2)) > std::numeric_limits<double>::min()) {
1194 p_proj[i].x = static_cast<int>(proj[i](0) / proj[i](2));
1195 p_proj[i].y = static_cast<int>(proj[i](1) / proj[i](2));
1196 }
1197 }
1198 if (object->b_cipv) {
1199 cv::line(image_2D, p_proj[0], p_proj[1], color_cipv_,
1200 cipv_line_thickness_);
1201 cv::line(image_2D, p_proj[1], p_proj[2], color_cipv_,
1202 cipv_line_thickness_);
1203 cv::line(image_2D, p_proj[2], p_proj[3], color_cipv_,
1204 cipv_line_thickness_);
1205 cv::line(image_2D, p_proj[3], p_proj[0], color_cipv_,
1206 cipv_line_thickness_);
1207 cv::line(image_2D, p_proj[4], p_proj[5], color_cipv_,
1208 cipv_line_thickness_);
1209 cv::line(image_2D, p_proj[5], p_proj[6], color_cipv_,
1210 cipv_line_thickness_);
1211 cv::line(image_2D, p_proj[6], p_proj[7], color_cipv_,
1212 cipv_line_thickness_);
1213 cv::line(image_2D, p_proj[7], p_proj[4], color_cipv_,
1214 cipv_line_thickness_);
1215 cv::line(image_2D, p_proj[0], p_proj[4], color_cipv_,
1216 cipv_line_thickness_);
1217 cv::line(image_2D, p_proj[1], p_proj[5], color_cipv_,
1218 cipv_line_thickness_);
1219 cv::line(image_2D, p_proj[2], p_proj[6], color_cipv_,
1220 cipv_line_thickness_);
1221 cv::line(image_2D, p_proj[3], p_proj[7], color_cipv_,
1222 cipv_line_thickness_);
1223 }
1224
1225 cv::line(image_2D, p_proj[0], p_proj[1], color, line_thickness_);
1226 cv::line(image_2D, p_proj[1], p_proj[2], color, line_thickness_);
1227 cv::line(image_2D, p_proj[2], p_proj[3], color, line_thickness_);
1228 cv::line(image_2D, p_proj[3], p_proj[0], color, line_thickness_);
1229 cv::line(image_2D, p_proj[4], p_proj[5], color, line_thickness_);
1230 cv::line(image_2D, p_proj[5], p_proj[6], color, line_thickness_);
1231 cv::line(image_2D, p_proj[6], p_proj[7], color, line_thickness_);
1232 cv::line(image_2D, p_proj[7], p_proj[4], color, line_thickness_);
1233 cv::line(image_2D, p_proj[0], p_proj[4], color, line_thickness_);
1234 cv::line(image_2D, p_proj[1], p_proj[5], color, line_thickness_);
1235 cv::line(image_2D, p_proj[2], p_proj[6], color, line_thickness_);
1236 cv::line(image_2D, p_proj[3], p_proj[7], color, line_thickness_);
1237 } else {
1238 if (object->b_cipv) {
1239 cv::rectangle(image_2D, r, color_cipv_, cipv_line_thickness_);
1240 }
1241 cv::rectangle(image_2D, r, color, 2);
1242 }
1243
1244 // plot obstacles on ground plane in lidar coordinates
1245 Eigen::Matrix2d rotate_rz;
1246 theta = theta - M_PI_2;
1247 rotate_rz << cos(theta), sin(theta), -sin(theta), cos(theta);
1248 Eigen::Vector2d p1;
1249 p1 << object->size(0) * 0.5, object->size(1) * 0.5;
1250 p1 = rotate_rz * p1 + c_2D_l;
1251 Eigen::Vector2d p2;
1252 p2 << -object->size(0) * 0.5, object->size(1) * 0.5;
1253 p2 = rotate_rz * p2 + c_2D_l;
1254 Eigen::Vector2d p3;
1255 p3 << -object->size(0) * 0.5, -object->size(1) * 0.5;
1256 p3 = rotate_rz * p3 + c_2D_l;
1257 Eigen::Vector2d p4;
1258 p4 << object->size(0) * 0.5, -object->size(1) * 0.5;
1259 p4 = rotate_rz * p4 + c_2D_l;
1260
1261 Eigen::Vector2d pos_2d;
1262 pos_2d << c_2D_l(0), c_2D_l(1);
1263 Eigen::Vector3d v;
1264 v << object->velocity(0), object->velocity(1), object->velocity(2);
1265 v = world2camera.linear() * v;
1266 Eigen::Vector2d v_2d;
1267 v_2d << v(0) + pos_2d(0), v(1) + pos_2d(1);
1268
1269 AINFO << "v.norm: " << v.norm();
1270 if (show_trajectory_ && motion_buffer != nullptr &&
1271 motion_buffer->size() > 0 && v.norm() > speed_limit_) {
1272 DrawTrajectories(object, motion_buffer);
1273 }
1274 if (object->b_cipv) {
1275 cv::line(world_image_, world_point_to_bigimg(p1),
1276 world_point_to_bigimg(p2), color_cipv_, cipv_line_thickness_);
1277 cv::line(world_image_, world_point_to_bigimg(p2),
1278 world_point_to_bigimg(p3), color_cipv_, cipv_line_thickness_);
1279 cv::line(world_image_, world_point_to_bigimg(p3),
1280 world_point_to_bigimg(p4), color_cipv_, cipv_line_thickness_);
1281 cv::line(world_image_, world_point_to_bigimg(p4),
1282 world_point_to_bigimg(p1), color_cipv_, cipv_line_thickness_);
1283 // cv::line(world_image_, world_point_to_bigimg(pos_2d),
1284 // world_point_to_bigimg(v_2d), color_cipv_,
1285 // cipv_line_thickness_);
1286 }
1287 cv::line(world_image_, world_point_to_bigimg(p1), world_point_to_bigimg(p2),
1288 color, line_thickness_);
1289 cv::line(world_image_, world_point_to_bigimg(p2), world_point_to_bigimg(p3),
1290 color, line_thickness_);
1291 cv::line(world_image_, world_point_to_bigimg(p3), world_point_to_bigimg(p4),
1292 color, line_thickness_);
1293 cv::line(world_image_, world_point_to_bigimg(p4), world_point_to_bigimg(p1),
1294 color, line_thickness_);
1295 // cv::line(world_image_, world_point_to_bigimg(pos_2d),
1296 // world_point_to_bigimg(v_2d), color, line_thickness_);
1297 }
1298
1299 // Draw virtual ego lanes
1300 if (show_virtual_egolane_) {
1301 EgoLane virtual_egolane_ground;
1302 virtual_egolane_ground.left_line.line_point.clear();
1303 virtual_egolane_ground.right_line.line_point.clear();
1304 CipvOptions cipv_options;
1305 if (motion_buffer == nullptr || motion_buffer->size() == 0) {
1306 AWARN << "motion_buffer_ is empty";
1307 cipv_options.velocity = 5.0f;
1308 cipv_options.yaw_rate = 0.0f;
1309 } else {
1310 cipv_options.velocity = motion_buffer->back().velocity;
1311 cipv_options.yaw_rate = motion_buffer->back().yaw_rate;
1312 }
1314 cipv_options.yaw_rate, cipv_options.velocity,
1315 kMaxVehicleWidthInMeter * 0.5, &virtual_egolane_ground.left_line,
1316 &virtual_egolane_ground.right_line);
1317 // Left ego lane
1318 Eigen::Vector2d p_prev_ground;
1319 p_prev_ground(0) = virtual_egolane_ground.left_line.line_point[0](0);
1320 p_prev_ground(1) = virtual_egolane_ground.left_line.line_point[0](1);
1321 cv::Point p_prev = ground2image(camera_name, p_prev_ground);
1322 for (unsigned i = 1; i < virtual_egolane_ground.left_line.line_point.size();
1323 i++) {
1324 Eigen::Vector2d p_cur_ground;
1325 p_cur_ground(0) = virtual_egolane_ground.left_line.line_point[i](0);
1326 p_cur_ground(1) = virtual_egolane_ground.left_line.line_point[i](1);
1327 cv::Point p_cur = ground2image(camera_name, p_cur_ground);
1328 if (p_cur.x >= 0 && p_cur.y >= 0 && p_prev.x >= 0 && p_prev.y >= 0 &&
1329 p_cur.x < image_width_ && p_cur.y < image_height_ &&
1330 p_prev.x < image_width_ && p_prev.y < image_height_) {
1331 cv::line(image_2D, p_prev, p_cur, virtual_lane_color_, line_thickness_);
1332 }
1333 cv::line(world_image_, world_point_to_bigimg(p_prev_ground),
1334 world_point_to_bigimg(p_cur_ground), virtual_lane_color_, 2);
1335 p_prev = p_cur;
1336 p_prev_ground = p_cur_ground;
1337 }
1338
1339 // Right ego lane
1340 p_prev_ground(0) = virtual_egolane_ground.right_line.line_point[0](0);
1341 p_prev_ground(1) = virtual_egolane_ground.right_line.line_point[0](1);
1342 p_prev = ground2image(camera_name, p_prev_ground);
1343 for (unsigned i = 1;
1344 i < virtual_egolane_ground.right_line.line_point.size(); i++) {
1345 Eigen::Vector2d p_cur_ground;
1346 p_cur_ground(0) = virtual_egolane_ground.right_line.line_point[i](0);
1347 p_cur_ground(1) = virtual_egolane_ground.right_line.line_point[i](1);
1348 cv::Point p_cur = ground2image(camera_name, p_cur_ground);
1349
1350 if (p_cur.x >= 0 && p_cur.y >= 0 && p_prev.x >= 0 && p_prev.y >= 0 &&
1351 p_cur.x < image_width_ && p_cur.y < image_height_ &&
1352 p_prev.x < image_width_ && p_prev.y < image_height_) {
1353 cv::line(image_2D, p_prev, p_cur, virtual_lane_color_, line_thickness_);
1354 }
1355 cv::line(world_image_, world_point_to_bigimg(p_prev_ground),
1356 world_point_to_bigimg(p_cur_ground), virtual_lane_color_, 2);
1357 p_prev = p_cur;
1358 p_prev_ground = p_cur_ground;
1359 }
1360 }
1361
1362 last_timestamp_ = frame.timestamp;
1363 camera_image_[frame.data_provider->sensor_name()] = image_2D;
1364 cv::resize(image_2D, camera_image_[frame.data_provider->sensor_name()],
1365 cv::Size(small_w_, small_h_));
1366}
1367
1369 const cv::Mat &img, const CameraFrame &frame,
1370 const base::MotionBufferPtr motion_buffer,
1371 const Eigen::Affine3d &world2camera) {
1372 if (frame.timestamp - last_timestamp_ < 0.02) return;
1373
1374 world_image_ = cv::Mat(world_h_, wide_pixel_, CV_8UC3, black_color);
1375 if (frame.data_provider->sensor_name() == "front_6mm") {
1376 cv::imwrite("./test.png", img);
1377 }
1378 // draw results on visulization panel
1379 int line_pos = 0;
1380 cv::Mat image = img.clone();
1381 std::string camera_name = frame.data_provider->sensor_name();
1382 if (manual_calibration_mode_) {
1383 line_pos += 50;
1384 cv::putText(image,
1385 "Manual Calibration: Pitch(up/down) Yaw(left/right) "
1386 "Roll(SH+left/right)",
1387 cv::Point(10, line_pos), cv::FONT_HERSHEY_DUPLEX, 1.3,
1388 red_color, 3);
1389 }
1390 line_pos += 50;
1391 cv::putText(image, camera_name, cv::Point(10, line_pos),
1392 cv::FONT_HERSHEY_DUPLEX, 1.3, red_color, 3);
1393 line_pos += 50;
1394 cv::putText(image, absl::StrCat("frame id: ", frame.frame_id),
1395 cv::Point(10, line_pos), cv::FONT_HERSHEY_DUPLEX, 1.3, red_color,
1396 3);
1397 line_pos += 50;
1398 if (motion_buffer != nullptr) {
1399 cv::putText(
1400 image, absl::StrCat("yaw rate: ", motion_buffer->back().yaw_rate),
1401 cv::Point(10, line_pos), cv::FONT_HERSHEY_DUPLEX, 1.3, red_color, 3);
1402 line_pos += 50;
1403 cv::putText(
1404 image, absl::StrCat("pitch rate: ", motion_buffer->back().pitch_rate),
1405 cv::Point(10, line_pos), cv::FONT_HERSHEY_DUPLEX, 1.3, red_color, 3);
1406 line_pos += 50;
1407 cv::putText(
1408 image, absl::StrCat("roll rate: ", motion_buffer->back().roll_rate),
1409 cv::Point(10, line_pos), cv::FONT_HERSHEY_DUPLEX, 1.3, red_color, 3);
1410 line_pos += 50;
1411 cv::putText(
1412 image, absl::StrCat("velocity: ", motion_buffer->back().velocity),
1413 cv::Point(10, line_pos), cv::FONT_HERSHEY_DUPLEX, 1.3, red_color, 3);
1414 }
1415
1416 // plot predicted vanishing point
1417 if (frame.pred_vpt.size() > 0) {
1418 // Option 1. Show both x and y
1419 // cv::circle(image,
1420 // cv::Point(static_cast<int>(frame.pred_vpt[0]),
1421 // static_cast<int>(frame.pred_vpt[1])), 5, dark_green_color, 3);
1422 // Option 2. Show height/2 (x) and y
1423 cv::circle(image,
1424 cv::Point(static_cast<int>(image_width_ >> 1),
1425 static_cast<int>(frame.pred_vpt[1])),
1426 5, dark_green_color, 3);
1427 }
1428
1429 for (const auto &object : frame.tracked_objects) {
1430 if (object->b_cipv) {
1431 line_pos += 50;
1432 cv::putText(image, absl::StrCat("CIPV: ", object->track_id),
1433 cv::Point(10, line_pos), cv::FONT_HERSHEY_DUPLEX, 1.3,
1434 red_color, 3);
1435 }
1436 }
1437
1438 if (intrinsic_map_.find(camera_name) != intrinsic_map_.end() &&
1439 extrinsic_map_.find(camera_name) != extrinsic_map_.end()) {
1441 camera_name, image, frame,
1442 intrinsic_map_.at(camera_name).cast<double>(),
1443 extrinsic_map_.at(camera_name), world2camera, motion_buffer);
1444 } else {
1445 AERROR << "Failed to find necessuary intrinsic or extrinsic params.";
1446 }
1447
1448 // copy visual results into visualization panel
1449 if (cv_imshow_img_) {
1450 if (camera_name == camera_names_[0]) {
1451 all_camera_recieved_ |= 0x1;
1452 } else if (camera_name == camera_names_[1]) {
1453 all_camera_recieved_ |= 0x2;
1454 }
1455 if (all_camera_recieved_ == 0x3) {
1456 if (camera_name == visual_camera_) {
1458 draw_selected_image_boundary(small_w_, small_h_,
1459 &(camera_image_[visual_camera_]));
1460 cv::Mat bigimg(world_h_, small_w_ + wide_pixel_, CV_8UC3);
1461 camera_image_[camera_names_[0]].copyTo(
1462 bigimg(cv::Rect(0, 0, small_w_, small_h_)));
1463 camera_image_[camera_names_[1]].copyTo(
1464 bigimg(cv::Rect(0, small_h_, small_w_, small_h_)));
1465 world_image_.copyTo(
1466 bigimg(cv::Rect(small_w_, 0, wide_pixel_, world_h_)));
1467 // cv::namedWindow("Apollo Visualizer", CV_WINDOW_NORMAL);
1468 // cv::setWindowProperty("Apollo Visualizer", CV_WND_PROP_FULLSCREEN,
1469 // CV_WINDOW_FULLSCREEN);
1470 // cv::imshow("Apollo Visualizer", bigimg);
1471 // int key = cvWaitKey(30);
1472 // key_handler(camera_name, key);
1473
1474 // output visualization panel
1475 if (write_out_img_) {
1476 char path[1000];
1477 static int k = 0;
1478 snprintf(path, sizeof(path), "%s/%06d.jpg", path_.c_str(), k++);
1479 AINFO << "snapshot is saved at " << path;
1480 cv::imwrite(path, bigimg);
1481 }
1482 all_camera_recieved_ = 0x0;
1483 } // if (camera_name == visual_camera)
1484 } // if (all_camera_recieved_ == 0x3)
1485 } // if (cv_imshow_img_)
1486}
1487
1489 cv::circle(world_image_, cv::Point(wide_pixel_ / 2, world_h_), 1 * m2pixel_,
1491 for (int i = 20; i < 300; i += 20) {
1492 cv::circle(world_image_, cv::Point(wide_pixel_ / 2, world_h_), i * m2pixel_,
1494 }
1495 for (int i = 50; i < 300; i += 50) {
1496 cv::putText(world_image_, std::to_string(i),
1497 cv::Point(wide_pixel_ / 2, world_h_ - i * m2pixel_),
1498 cv::FONT_HERSHEY_DUPLEX, 1, red_color, 2);
1499 }
1500}
1501
1502void Visualizer::draw_selected_image_boundary(const int width, int const height,
1503 cv::Mat *image) {
1504 cv::Rect image_boundary(0, 0, width, height);
1505 cv::rectangle(*image, image_boundary, light_green_color, 4);
1506}
1507
1508cv::Point Visualizer::world_point_to_bigimg(const Eigen::Vector2d &p) {
1509 cv::Point point;
1510 point.x = static_cast<int>(-p(1) * m2pixel_ + wide_pixel_ * 0.5);
1511 point.y = static_cast<int>(world_h_ - p(0) * m2pixel_);
1512 return point;
1513}
1514cv::Point Visualizer::world_point_to_bigimg(const Eigen::Vector4f &p) {
1515 cv::Point point;
1516 point.x = (wide_pixel_ >> 1) -
1517 static_cast<int>(p(1) * static_cast<float>(m2pixel_));
1518 point.y = world_h_ - static_cast<int>(p(0) * static_cast<float>(m2pixel_));
1519 return point;
1520}
1521
1522Eigen::Vector2d Visualizer::image2ground(const std::string &camera_name,
1523 cv::Point p_img) {
1524 Eigen::Vector3d p_homo;
1525
1526 p_homo << p_img.x, p_img.y, 1;
1527 Eigen::Vector3d p_ground;
1528 p_ground = homography_image2ground_[camera_name] * p_homo;
1529 if (fabs(p_ground(2)) > std::numeric_limits<double>::min()) {
1530 p_ground(0) = p_ground(0) / p_ground(2);
1531 p_ground(1) = p_ground(1) / p_ground(2);
1532 } else {
1533 AWARN << "p_ground(2) too small :" << p_ground(2);
1534 }
1535 return p_ground.block(0, 0, 2, 1);
1536}
1537cv::Point Visualizer::ground2image(const std::string &camera_name,
1538 const Eigen::Vector2d &p_ground) {
1539 Eigen::Vector3d p_homo;
1540
1541 p_homo << p_ground(0), p_ground(1), 1;
1542 Eigen::Vector3d p_img;
1543 p_img = homography_ground2image_[camera_name] * p_homo;
1544 if (fabs(p_img(2)) > std::numeric_limits<double>::min()) {
1545 p_img(0) = p_img(0) / p_img(2);
1546 p_img(1) = p_img(1) / p_img(2);
1547 }
1548 return cv::Point(static_cast<int>(p_img(0)), static_cast<int>(p_img(1)));
1549}
1550
1551} // namespace camera
1552} // namespace perception
1553} // namespace apollo
static bool MakeVirtualEgoLaneFromYawRate(const float yaw_rate, const float velocity, const float offset_distance, LaneLineSimple *left_lane_line, LaneLineSimple *right_lane_line)
bool QueryTransform(const std::string &child_frame_id, const std::string &frame_id, Eigen::Affine3d *transform)
bool QueryPos(double timestamp, Eigen::Affine3d *pose)
EigenMap< std::string, Eigen::Matrix3d > homography_ground2image_
Definition visualizer.h:113
apollo::common::EigenMap< T, EigenType > EigenMap
Definition visualizer.h:43
std::string type_to_string(const apollo::perception::base::ObjectType type)
bool save_manual_calibration_parameter(const std::string &camera_name, const double pitch_adj_degree, const double yaw_adj_degree, const double roll_adj_degree)
bool SetDirectory(const std::string &path)
void draw_selected_image_boundary(const int width, int const height, cv::Mat *image)
cv::Point world_point_to_bigimg(const Eigen::Vector2d &p)
bool Init_all_info_single_camera(const std::vector< std::string > &camera_names, const std::string &visual_camera, const EigenMap< std::string, Eigen::Matrix3f > &intrinsic_map, const EigenMap< std::string, Eigen::Matrix4d > &extrinsic_map, const Eigen::Matrix4d &ex_lidar2imu, const double pitch_adj, const double yaw_adj, const double roll_adj, const int image_height, const int image_width)
void Draw2Dand3D(const cv::Mat &img, const CameraFrame &frame)
void ShowResult_all_info_single_camera(const cv::Mat &img, const CameraFrame &frame, const base::MotionBufferPtr motion_buffer, const Eigen::Affine3d &world2camera)
double regularize_angle(const double angle)
bool copy_backup_file(const std::string &filename)
bool euler_to_quaternion(Eigen::Vector4d *quaternion, const double pitch_radian, const double yaw_radian, const double roll_radian)
bool DrawTrajectories(const base::ObjectPtr &object, const base::MotionBufferPtr motion_buffer)
bool adjust_angles(const std::string &camera_name, const double pitch_adj, const double yaw_adj, const double roll_adj)
std::string sub_type_to_string(const apollo::perception::base::ObjectSubType type)
void ShowResult(const cv::Mat &img, const CameraFrame &frame)
void Draw2Dand3D_all_info_single_camera(const std::string &camera_name, const cv::Mat &img, const CameraFrame &frame, const Eigen::Matrix3d &intrinsic, const Eigen::Matrix4d &extrinsic, const Eigen::Affine3d &world2camera, const base::MotionBufferPtr motion_buffer)
bool save_extrinsic_in_yaml(const std::string &camera_name, const Eigen::Matrix4d &extrinsic, const Eigen::Vector4d &quaternion, const double pitch_radian, const double yaw_radian, const double roll_radian)
bool Init(const std::vector< std::string > &camera_names, TransformServer *tf_server)
Definition visualizer.cc:91
EigenMap< std::string, Eigen::Matrix3d > homography_image2ground_
Definition visualizer.h:112
cv::Point ground2image(const std::string &camera_name, const Eigen::Vector2d &p_ground)
Eigen::Vector2d image2ground(const std::string &camera_name, cv::Point p_img)
bool key_handler(const std::string &camera_name, const int key)
#define ADEBUG
Definition log.h:41
#define AERROR
Definition log.h:44
#define AINFO
Definition log.h:42
#define AWARN
Definition log.h:43
std::vector< EigenType, Eigen::aligned_allocator< EigenType > > EigenVector
Definition eigen_defs.h:33
bool Copy(const std::string &from, const std::string &to)
Copy a file or directory.
Definition file.cc:295
bool EnsureDirectory(const std::string &directory_path)
Check if a specified directory specified by directory_path exists.
Definition file.cc:299
std::shared_ptr< MotionBuffer > MotionBufferPtr
@ ADJACENT_RIGHT
lane marking on the right side next to ego lane
@ ADJACENT_LEFT
lane marking on the left side next to ego lane
@ EGO_LEFT
left lane marking of the ego lane
@ EGO_CENTER
center lane marking of the ego lane, changing lane
@ EGO_RIGHT
right lane marking of the ego lane
Rect< float > RectF
Definition box.h:160
std::shared_ptr< Object > ObjectPtr
Definition object.h:127
std::vector< cv::Scalar > colorlistobj
Definition visualizer.cc:34
std::map< base::LaneLinePositionType, cv::Scalar > colormapline
Definition visualizer.cc:45
Eigen::Matrix3d Camera2CarHomograph(const Eigen::Matrix3d &intrinsic, const Eigen::Matrix4d &extrinsic_camera2lidar, const Eigen::Matrix4d &extrinsic_lidar2imu, double pitch_adj)
Definition visualizer.cc:60
cv::Scalar magenta_color
Definition colormap.h:30
cv::Scalar lime_color
Definition colormap.h:51
cv::Scalar teal_color
Definition colormap.h:32
cv::Scalar coral_color
Definition colormap.h:58
cv::Scalar blue_color
Definition colormap.h:44
cv::Scalar ivory_color
Definition colormap.h:36
constexpr float kMaxVehicleWidthInMeter
Definition lane_object.h:41
cv::Scalar salmon_color
Definition colormap.h:59
cv::Scalar red_color
Definition colormap.h:57
cv::Scalar maroon_color
Definition colormap.h:62
@ KEY_SHIFT_RIGHT_NUM_LOCK_ON
Definition keycode.h:109
cv::Scalar violet_color
Definition colormap.h:33
cv::Scalar purple_color
Definition colormap.h:31
cv::Scalar dark_green_color
Definition colormap.h:50
cv::Scalar gray_color
Definition colormap.h:66
cv::Scalar yellow_color
Definition colormap.h:61
cv::Scalar black_color
Definition colormap.h:28
cv::Scalar green_color
Definition colormap.h:54
cv::Scalar deep_sky_blue_color
Definition colormap.h:42
cv::Scalar dark_blue_color
Definition colormap.h:46
cv::Scalar pink_color
Definition colormap.h:34
cv::Scalar olive_color
Definition colormap.h:53
cv::Scalar orange_color
Definition colormap.h:60
cv::Scalar beige_color
Definition colormap.h:35
cv::Scalar white_color
Definition colormap.h:29
cv::Scalar sky_blue_color
Definition colormap.h:41
cv::Scalar dodger_blue_color
Definition colormap.h:43
cv::Scalar cyan_color
Definition colormap.h:40
cv::Scalar light_green_color
Definition colormap.h:52
class register implement
Definition arena_queue.h:37
std::vector< base::LaneLine > lane_objects
std::vector< base::ObjectPtr > tracked_objects
std::vector< std::shared_ptr< DataProvider > > data_provider