Apollo 11.0
自动驾驶开放平台
tracked_object.cc
浏览该文件的文档.
1/******************************************************************************
2 * Copyright 2018 The Apollo Authors. All Rights Reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *****************************************************************************/
17
18#include "cyber/common/log.h"
25
26namespace apollo {
27namespace perception {
28namespace lidar {
29
31
33 const Eigen::Affine3d& pose) {
34 AttachObject(obj_ptr, pose);
35}
36
38 const Eigen::Affine3d& pose,
39 const Eigen::Vector3d& global_to_local_offset,
40 const base::SensorInfo& sensor,
41 double timestamp) {
42 if (obj_ptr) {
43 // all state of input obj_ptr will not change except cloud world
44 object_ptr = obj_ptr;
45
47 global_local_offset = global_to_local_offset;
48 // object info to tracked object
49 center = pose * object_ptr->center;
50 const PointFCloud& cloud = (object_ptr->lidar_supplement).cloud;
51 barycenter = (algorithm::CalculateCentroid(cloud)).cast<double>();
52 barycenter = pose * barycenter;
54
55 // object-detection center
56 float detection_center_x = obj_ptr->lidar_supplement.detections[0];
57 float detection_center_y = obj_ptr->lidar_supplement.detections[1];
58 float detection_center_z = obj_ptr->lidar_supplement.detections[2];
59 float detection_size_x = obj_ptr->lidar_supplement.detections[3];
60 float detection_size_y = obj_ptr->lidar_supplement.detections[4];
61 float detection_size_z = obj_ptr->lidar_supplement.detections[5];
62 float detection_theta = obj_ptr->lidar_supplement.detections[6];
63 Eigen::Vector3d model_center_lidar = Eigen::Vector3d(detection_center_x,
64 detection_center_y, detection_center_z);
65 detection_center = pose * model_center_lidar;
66
67 // object-detection center/size/direction -> corners
68 Eigen::Matrix<float, 8, 1> corners;
70 detection_center_x, detection_center_y, detection_center_z,
71 detection_size_x, detection_size_y, detection_size_z,
72 detection_theta, &corners);
73 for (size_t i = 0; i < 4; i++) {
74 Eigen::Vector3d detection_cor = Eigen::Vector3d(
75 static_cast<double>(corners[2 * i]),
76 static_cast<double>(corners[2 * i + 1]), detection_center_z);
77 detection_corners[i] = pose * detection_cor;
78 }
79
80 Eigen::Matrix3d rotation = pose.rotation();
81 direction = rotation * object_ptr->direction.cast<double>();
83 size = object_ptr->size.cast<double>();
84 type = object_ptr->type;
85 type_probs = object_ptr->type_probs;
86 is_background = object_ptr->lidar_supplement.is_background;
87
88 base::PointDCloud& cloud_world = (object_ptr->lidar_supplement).cloud_world;
89 cloud_world.clear();
90 cloud_world.resize(cloud.size());
91 for (size_t i = 0; i < cloud.size(); ++i) {
92 Eigen::Vector3d pt(cloud[i].x, cloud[i].y, cloud[i].z);
93 Eigen::Vector3d pt_world = pose * pt;
94 cloud_world[i].x = pt_world(0);
95 cloud_world[i].y = pt_world(1);
96 cloud_world[i].z = pt_world(2);
97 cloud_world[i].intensity = cloud[i].intensity;
98 }
99 // memcpy(&(cloud_world.points_height(0)), &(cloud.points_height(0)),
100 // sizeof(float) * cloud.size());
101 for (size_t i = 0; i < cloud.size(); ++i) {
102 cloud_world.SetPointHeight(i, cloud.points_height(i));
103 }
104 // other belief information keep as Reset()
105 selected_measured_velocity = Eigen::Vector3d::Zero();
106 selected_measured_acceleration = Eigen::Vector3d::Zero();
108 belief_velocity = Eigen::Vector3d::Zero();
109 belief_acceleration = Eigen::Vector3d::Zero();
110
111 direction_state = Eigen::Vector3d::Zero();
112 direction_state_covariance = Eigen::Matrix3d::Zero();
113 output_angular = Eigen::Vector3d::Zero();
115 direction_converged = false;
116
117 output_velocity = Eigen::Vector3d::Zero();
118 output_velocity_uncertainty = Eigen::Matrix3d::Zero();
122
123 sensor_info = sensor;
124 this->timestamp = timestamp;
125 }
126}
127
129 const base::PointFCloud& cloud = (object_ptr->lidar_supplement).cloud;
130 base::PointDCloud& cloud_world = (object_ptr->lidar_supplement).cloud_world;
131 cloud_world.clear();
132 cloud_world.resize(cloud.size());
133 for (size_t i = 0; i < cloud.size(); ++i) {
134 Eigen::Vector3d pt(cloud[i].x, cloud[i].y, cloud[i].z);
135 Eigen::Vector3d pt_world = sensor_to_local_pose * pt;
136 cloud_world[i].x = pt_world(0);
137 cloud_world[i].y = pt_world(1);
138 cloud_world[i].z = pt_world(2);
139 cloud_world[i].intensity = cloud[i].intensity;
140 }
141 memcpy(&cloud_world.points_height(0), &cloud.points_height(0),
142 sizeof(float) * cloud.size());
143}
144
146 object_ptr.reset();
147 sensor_to_local_pose = Eigen::Affine3d::Identity();
148
149 // measurement reset
150 for (int i = 0; i < 4; ++i) {
151 corners[i] = Eigen::Vector3d::Zero();
152 detection_corners[i] = Eigen::Vector3d::Zero();
153 measured_corners_velocity[i] = Eigen::Vector3d::Zero();
154 measured_history_corners_velocity[i] = Eigen::Vector3d::Zero();
155 measured_detection_history_corners_velocity[i] = Eigen::Vector3d::Zero();
156 }
157 center = Eigen::Vector3d::Zero();
158 barycenter = Eigen::Vector3d::Zero();
159 anchor_point = Eigen::Vector3d::Zero();
160 detection_center = Eigen::Vector3d::Zero();
161 measured_barycenter_velocity = Eigen::Vector3d::Zero();
162 measured_center_velocity = Eigen::Vector3d::Zero();
163 measured_nearest_corner_velocity = Eigen::Vector3d::Zero();
164 measured_history_center_velocity = Eigen::Vector3d::Zero();
165 measured_detection_center_velocity = Eigen::Vector3d::Zero();
166 measured_detection_history_center_velocity = Eigen::Vector3d::Zero();
168 direction = Eigen::Vector3d::Zero();
169 lane_direction = Eigen::Vector3d::Zero();
170 size = Eigen::Vector3d::Zero();
172 type_probs.assign(static_cast<int>(base::ObjectType::MAX_OBJECT_TYPE), 0.0f);
173 is_background = false;
174 shape_features.clear();
175 shape_features_full.clear();
177 association_score = 0.0;
178 is_fake = false;
179 track_id = -1;
180 tracking_time = 0.0;
181
182 // filter reset
184 valid = false;
185 converged = true;
187 update_quality = 0.0;
189 selected_measured_acceleration = Eigen::Vector3d::Zero();
193 belief_velocity_gain = Eigen::Vector3d::Zero();
194 velocity_covariance = Eigen::Matrix3d::Zero();
195 belief_velocity_online_covariance = Eigen::Matrix3d::Zero();
196
197 state = Eigen::Vector4d::Zero();
198 measurement_covariance = Eigen::Matrix4d::Zero();
199 state_covariance = Eigen::Matrix4d::Zero();
200
201 motion_score = Eigen::Vector3d(1, 1, 1);
202
203 direction_state = Eigen::Vector3d::Zero();
204 direction_state_covariance = Eigen::Matrix3d::Zero();
205 output_angular = Eigen::Vector3d::Zero();
207 direction_converged = false;
208
209 // output reset
215
216 // sensor info reset
218 timestamp = 0.0;
219}
220
221void TrackedObject::Reset(base::ObjectPtr obj_ptr, const Eigen::Affine3d& pose,
222 const Eigen::Vector3d& global_to_local_offset,
223 const base::SensorInfo& sensor, double timestamp) {
224 Reset();
225 AttachObject(obj_ptr, pose, global_to_local_offset, sensor, timestamp);
226}
227
228void TrackedObject::CopyFrom(TrackedObjectPtr rhs, bool is_deep) {
229 *this = *rhs;
230 if (is_deep) {
231 object_ptr = base::ObjectPool::Instance().Get();
232 *object_ptr = *(rhs->object_ptr);
233 } else {
234 object_ptr = rhs->object_ptr;
235 }
236}
237
239 if (obj->type == base::ObjectType::VEHICLE) {
240 return 0.99f;
241 }
242 return 0.0f;
243}
244
246 *obj = *object_ptr;
247 // obj id keep default
248 // obj polygon calculate outside, because
249 /* 1. if ConvexHull2D as member variable:
250 constructor time consume a little large
251 2. if ConvexHull2D as static or global variable:
252 need mutex time consume in multi threads
253 what a pity!
254 */
255 obj->direction = output_direction.cast<float>();
256 // obj theta_variance not calculate in tracker, keep default
257 obj->center = output_center;
258 // obj center_uncertainty not calculate in tracker, keep default
259 obj->size = output_size.cast<float>();
260 // obj size_varuance not calculate in tracker, keep default
261 obj->anchor_point = belief_anchor_point;
262 obj->type = type;
263 // obj type_probs calculate in tracker
264 obj->type_probs = type_probs;
265 // obj confidence not calculate in tracker, keep default
266 obj->track_id = track_id;
267 obj->velocity = output_velocity.cast<float>();
268 obj->velocity_uncertainty = output_velocity_uncertainty.cast<float>();
269 obj->velocity_converged = converged;
270 obj->tracking_time = tracking_time;
271 if (obj->velocity.norm() > GetVelThreshold(obj)) {
272 obj->theta = std::atan2(obj->velocity[1], obj->velocity[0]);
273 } else {
274 obj->theta = std::atan2(obj->direction[1], obj->direction[0]);
275 }
276 // obj latest_tracked_time not calculate in tracker, keep default
277 // obj car_light not calculate in tracker, keep default
278 // obj lidar_supplement cloud_world has passed in *obj = *object_ptr
279 // obj lidar_supplement other elements not calculate in tracker, keep default
280 // obj radar_supplement not calculate in tracker, keep default
281 // obj camera_supplement not calculate in tracker, keep default
282}
283
284std::string TrackedObject::ToString() const {
285 // std::string txt;
286 // return txt;
287 std::ostringstream oos;
288 oos << "obj id: " << object_ptr->id << ", track_id: " << object_ptr->track_id
289 << ", bary_center: (" << barycenter[0] << "," << barycenter[1] << ","
290 << barycenter[2] << ")"
291 << ", lane_direction: (" << lane_direction[0] << "," << lane_direction[1]
292 << "," << lane_direction[2] << ")";
293 return oos.str();
294}
295
297 // Compute object's shape feature
298 // 1. check whether shape feature is ready
299 // 2. compute object's shape feature
301 FeatureDescriptor fd(&(object_ptr->lidar_supplement.cloud));
302 fd.ComputeHistogram(static_cast<int>(histogram_bin_size),
303 shape_features_full.data());
304
305 size_t feature_len = histogram_bin_size * 3;
306 shape_features.clear();
307 shape_features.resize(feature_len);
308 for (size_t i = 0; i < feature_len; ++i) {
310 }
311}
312} // namespace lidar
313} // namespace perception
314} // namespace apollo
const std::vector< float > & points_height() const
void resize(const size_t size) override
void SetPointHeight(size_t i, size_t j, float height)
void ComputeHistogram(int bin_size, float *feature)
Eigen::Matrix< T, 3, 1 > CalculateCentroid(const base::AttributePointCloud< base::Point< T > > &cloud)
Definition common.h:134
void CalculateCornersFromCenter(Type center_x, Type center_y, Type center_z, Type size_x, Type size_y, Type size_z, Type theta, Eigen::Matrix< Type, 8, 1 > *corners)
Definition common.h:366
std::shared_ptr< Object > ObjectPtr
Definition object.h:127
apollo::perception::base::PointCloud< PointF > PointFCloud
std::shared_ptr< TrackedObject > TrackedObjectPtr
class register implement
Definition arena_queue.h:37
void AttachObject(base::ObjectPtr obj_ptr, const Eigen::Affine3d &pose, const Eigen::Vector3d &global_to_local_offset=Eigen::Vector3d::Zero(), const base::SensorInfo &sensor=base::SensorInfo(), double timestamp=0.0)
Attach object
std::string ToString() const
Transform tracked data state to string
Eigen::Vector3d measured_detection_history_corners_velocity[4]
float GetVelThreshold(base::ObjectPtr obj) const
Get vehicle threshold
void ToObject(base::ObjectPtr obj_ptr) const
Convert tracked object to base object
void ComputeShapeFeatures()
Compute shape features
void TransformObjectCloudToWorld()
Transform object cloud to world
void CopyFrom(std::shared_ptr< TrackedObject > rhs, bool is_deep)
Copy from tracked object