Apollo 10.0
自动驾驶开放平台
omt_obstacle_tracker.cc
浏览该文件的文档.
1/******************************************************************************
2 * Copyright 2023 The Apollo Authors. All Rights Reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the License);
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an AS IS BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *****************************************************************************/
17
18#include <algorithm>
19#include <functional>
20
21#include "cyber/common/file.h"
26
27namespace apollo {
28namespace perception {
29namespace camera {
30
32 std::string config_file =
33 GetConfigFile(options.config_path, options.config_file);
34 if (!cyber::common::GetProtoFromFile(config_file, &omt_param_)) {
35 AERROR << "Read config failed: " << config_file;
36 return false;
37 }
38 track_id_ = 0;
39 frame_num_ = 0;
40 // frame list
41 frame_buffer_.set_capacity(omt_param_.img_capability());
42 // similar map
43 gpu_id_ = options.gpu_id;
44 similar_map_.Init(omt_param_.img_capability(), gpu_id_);
45 similar_.reset(new GPUSimilar);
46 // reference
47 width_ = options.image_width;
48 height_ = options.image_height;
49 reference_.Init(omt_param_.reference(), width_, height_);
50 // type change cost file
51 std::string type_change_cost_file =
52 GetConfigFile(options.config_path, omt_param_.type_change_cost_file());
53 std::ifstream fin(type_change_cost_file);
54 ACHECK(fin.is_open());
55 kTypeAssociatedCost_.clear();
56 int n_type = static_cast<int>(base::ObjectSubType::MAX_OBJECT_TYPE);
57 for (int i = 0; i < n_type; ++i) {
58 kTypeAssociatedCost_.emplace_back(std::vector<float>(n_type, 0));
59 for (int j = 0; j < n_type; ++j) {
60 fin >> kTypeAssociatedCost_[i][j];
61 }
62 }
63 targets_.clear();
64 used_.clear();
65
66 // reset object template
67 object_template_manager_ = ObjectTemplateManager::Instance();
68
69 // reset feature_extractor_;
70 PluginParam plugin_param = omt_param_.plugin_param();
71 feature_extractor_.reset(
72 BaseFeatureExtractorRegisterer::GetInstanceByName(plugin_param.name()));
73 if (nullptr == feature_extractor_) {
74 AERROR << "Failed to get tracking instance.";
75 return false;
76 }
77
78 // init feature_extractor_
79 FeatureExtractorInitOptions feature_init_options;
80
81 feature_init_options.input_height = omt_param_.feature_input_height();
82 feature_init_options.input_width = omt_param_.feature_input_width();
83 feature_init_options.gpu_id = gpu_id_;
84 feature_init_options.config_path = plugin_param.config_path();
85 feature_init_options.config_file = plugin_param.config_file();
86
87 if (!feature_extractor_->Init(feature_init_options)) {
88 AERROR << "feature_extracotr init false";
89 return false;
90 }
91
92 // init object template
93 ACHECK(ObjectTemplateManager::Instance()->Init());
94
95 return true;
96}
97
98bool OMTObstacleTracker::Process(std::shared_ptr<CameraTrackingFrame> frame) {
99 if (nullptr == frame) {
100 return false;
101 }
102 if (!FeatureExtract(frame.get())) {
103 AERROR << "FeatureExtract failed!";
104 return false;
105 }
106
107 if (!Predict(frame.get())) {
108 AERROR << "Predict failed!";
109 return false;
110 }
111
112 if (!Associate2D(frame)) {
113 AERROR << "Associate2D failed!";
114 return false;
115 }
116
117 if (!Associate3D(frame)) {
118 AERROR << "Associate3D failed!";
119 return false;
120 }
121 return true;
122}
123
125 FeatureExtractorOptions feature_options;
126 feature_options.normalized = true;
127 bool res = feature_extractor_->Extract(feature_options, frame);
128 return res;
129}
130
131// todo(huqilin): need to optimize
132// bool OMTObstacleTracker::CalSimilarity(CameraTrackingFrame *frame) {
133// frame_list_.Add(frame);
134// for (int t = 0; t < frame_list_.Size(); t++) {
135// int frame1 = frame_list_[t]->frame_id;
136// int frame2 = frame_list_[-1]->frame_id;
137// similar_->Calc(frame_list_[frame1], frame_list_[frame2],
138// similar_map_.get(frame1, frame2).get());
139// }
140// for (auto &target : targets_) {
141// target.RemoveOld(frame_list_.OldestFrameId());
142// ++target.lost_age;
143// }
144// return true;
145// }
146
148 for (auto &target : targets_) {
149 // use kalman filter to predict the image_center,
150 // world_center, world_center_const
151 target.Predict(frame);
152 auto obj = target.latest_object;
153 frame->proposed_objects.push_back(obj->object);
154 }
155 return true;
156}
157
158// @description combine targets using iou after association
159bool OMTObstacleTracker::CombineDuplicateTargets() {
160 std::vector<Hypothesis> score_list;
161 Hypothesis hypo;
162 for (size_t i = 0; i < targets_.size(); ++i) {
163 if (targets_[i].Size() == 0) {
164 continue;
165 }
166 for (size_t j = i + 1; j < targets_.size(); ++j) {
167 if (targets_[j].Size() == 0) {
168 continue;
169 }
170 int count = 0;
171 float score = 0.0f;
172 int index1 = 0;
173 int index2 = 0;
174 while (index1 < targets_[i].Size() && index2 < targets_[j].Size()) {
175 auto p1 = targets_[i][index1];
176 auto p2 = targets_[j][index2];
177 if (std::abs(p1->timestamp - p2->timestamp) <
178 omt_param_.same_ts_eps()) {
179 if (p1->indicator.sensor_name != p2->indicator.sensor_name) {
180 auto box1 = p1->projected_box;
181 auto box2 = p2->projected_box;
182 score += algorithm::CalculateIOUBBox(box1, box2);
183 base::RectF rect1(box1);
184 base::RectF rect2(box2);
185 score -= std::abs((rect1.width - rect2.width) *
186 (rect1.height - rect2.height) /
187 (rect1.width * rect1.height));
188 count += 1;
189 }
190 ++index1;
191 ++index2;
192 } else {
193 if (p1->timestamp > p2->timestamp) {
194 ++index2;
195 } else {
196 ++index1;
197 }
198 }
199 }
200 ADEBUG << "Overlap: (" << targets_[i].id << "," << targets_[j].id
201 << ") score " << score << " count " << count;
202 hypo.target = static_cast<int>(i);
203 hypo.object = static_cast<int>(j);
204 hypo.score = (count > 0) ? score / static_cast<float>(count) : 0;
205 if (hypo.score < omt_param_.target_combine_iou_threshold()) {
206 continue;
207 }
208 score_list.push_back(hypo);
209 }
210 }
211 sort(score_list.begin(), score_list.end(), std::greater<Hypothesis>());
212 std::vector<bool> used_target(targets_.size(), false);
213 for (auto &pair : score_list) {
214 if (used_target[pair.target] || used_target[pair.object]) {
215 continue;
216 }
217 int index1 = pair.target;
218 int index2 = pair.object;
219 if (targets_[pair.target].id > targets_[pair.object].id) {
220 index1 = pair.object;
221 index2 = pair.target;
222 }
223 Target &target_save = targets_[index1];
224 Target &target_del = targets_[index2];
225 for (int i = 0; i < target_del.Size(); i++) {
226 // no need to change track_id of all objects in target_del
227 target_save.Add(target_del[i]);
228 }
229 std::sort(
230 target_save.tracked_objects.begin(), target_save.tracked_objects.end(),
231 [](const TrackObjectPtr object1, const TrackObjectPtr object2) -> bool {
232 return object1->indicator.frame_id < object2->indicator.frame_id;
233 });
234 target_save.latest_object = target_save.get_object(-1);
235 base::ObjectPtr object = target_del.latest_object->object;
236 target_del.Clear();
237 AINFO << "Target " << target_del.id << " is merged into Target "
238 << target_save.id << " with iou " << pair.score;
239 used_target[pair.object] = true;
240 used_target[pair.target] = true;
241 }
242 return true;
243}
244
245void OMTObstacleTracker::GenerateHypothesis(const TrackObjectPtrs &objects) {
246 std::vector<Hypothesis> score_list;
247 Hypothesis hypo;
248 for (size_t i = 0; i < targets_.size(); ++i) {
249 for (size_t j = 0; j < objects.size(); ++j) {
250 hypo.target = static_cast<int>(i);
251 hypo.object = static_cast<int>(j);
252 // calculate the feature similarity between Target and object
253 float sa = ScoreAppearance(targets_[i], objects[j]);
254 // calculate the motion similarity between Target and object
255 float sm = ScoreMotion(targets_[i], objects[j]);
256 // calculate the shape similarity between Target and object
257 float ss = ScoreShape(targets_[i], objects[j]);
258 // calculate the overlap similarity (iou) between Target and object
259 float so = ScoreOverlap(targets_[i], objects[j]);
260
261 hypo.score = (omt_param_.weight_same_camera().appearance() * sa +
262 omt_param_.weight_same_camera().motion() * sm);
263
264 int change_from_type = static_cast<int>(targets_[i].type);
265 int change_to_type = static_cast<int>(objects[j]->object->sub_type);
266 hypo.score += -kTypeAssociatedCost_[change_from_type][change_to_type];
267
268 AINFO << "Detection " << objects[j]->indicator.frame_id << " object "
269 << objects[j]->object->id << " "
270 << "(" << j << ") sa:" << sa << " sm: " << sm << " ss: " << ss
271 << " so: " << so << " score: " << hypo.score;
272
273 // 95.44% area is range [mu - sigma*2, mu + sigma*2]
274 // don't match if motion is beyond the range
275 if (sm < 0.045 || hypo.score < omt_param_.target_thresh()) {
276 continue;
277 }
278 score_list.push_back(hypo);
279 }
280 }
281
282 sort(score_list.begin(), score_list.end(), std::greater<Hypothesis>());
283 std::vector<bool> used_target(targets_.size(), false);
284 for (auto &pair : score_list) {
285 if (used_target[pair.target] || used_[pair.object]) {
286 continue;
287 }
288 Target &target = targets_[pair.target];
289 auto det_obj = objects[pair.object];
290 target.Add(det_obj);
291 used_[pair.object] = true;
292 used_target[pair.target] = true;
293 AINFO << "Target " << target.id << " match " << det_obj->indicator.frame_id
294 << " (" << pair.object << ")"
295 << "at " << pair.score << " size: " << target.Size();
296 }
297}
298
299float OMTObstacleTracker::ScoreMotion(const Target &target,
300 TrackObjectPtr track_obj) {
301 Eigen::Vector4d x = target.image_center.get_state();
302 float target_centerx = static_cast<float>(x[0]);
303 float target_centery = static_cast<float>(x[1]);
304 base::Point2DF center = track_obj->projected_box.Center();
305 base::RectF rect(track_obj->projected_box);
306 float s = gaussian(center.x, target_centerx, rect.width) *
307 gaussian(center.y, target_centery, rect.height);
308 return s;
309}
310
311float OMTObstacleTracker::ScoreShape(const Target &target,
312 TrackObjectPtr track_obj) {
313 Eigen::Vector2d shape = target.image_wh.get_state();
314 base::RectF rect(track_obj->projected_box);
315 float s = static_cast<float>((shape[1] - rect.height) *
316 (shape[0] - rect.width) / (shape[1] * shape[0]));
317 return -std::abs(s);
318}
319
320float OMTObstacleTracker::ScoreAppearance(const Target &target,
321 TrackObjectPtr track_obj) {
322 float energy = 0.0f;
323 int count = 0;
324 // auto sensor_name = track_obj->indicator.sensor_name;
325 for (int i = target.Size() - 1; i >= 0; --i) {
326 // if (target[i]->indicator.sensor_name != sensor_name) {
327 // continue;
328 // }
329 PatchIndicator p1 = target[i]->indicator;
330 PatchIndicator p2 = track_obj->indicator;
331
332 energy += similar_map_.sim(p1, p2);
333 count += 1;
334 }
335
336 return energy / (0.1f + static_cast<float>(count) * 0.9f);
337}
338
339// [new]
340float OMTObstacleTracker::ScoreOverlap(const Target &target,
341 TrackObjectPtr track_obj) {
342 Eigen::Vector4d center = target.image_center.get_state();
343 Eigen::VectorXd wh = target.image_wh.get_state();
344 base::BBox2DF box_target;
345 box_target.xmin = static_cast<float>(center[0] - wh[0] * 0.5);
346 box_target.xmax = static_cast<float>(center[0] + wh[0] * 0.5);
347 box_target.ymin = static_cast<float>(center[1] - wh[1] * 0.5);
348 box_target.ymax = static_cast<float>(center[1] + wh[1] * 0.5);
349
350 auto box_obj = track_obj->projected_box;
351
352 float iou = algorithm::CalculateIOUBBox(box_target, box_obj);
353 return iou;
354}
355
356void ProjectBox(const base::BBox2DF &box_origin,
357 const Eigen::Matrix3d &transform,
358 base::BBox2DF *box_projected) {
359 Eigen::Vector3d point;
360 // top left
361 point << box_origin.xmin, box_origin.ymin, 1;
362 point = transform * point;
363 box_projected->xmin = static_cast<float>(point[0] / point[2]);
364 box_projected->ymin = static_cast<float>(point[1] / point[2]);
365 // bottom right
366 point << box_origin.xmax, box_origin.ymax, 1;
367 point = transform * point;
368 box_projected->xmax = static_cast<float>(point[0] / point[2]);
369 box_projected->ymax = static_cast<float>(point[1] / point[2]);
370}
371
372int OMTObstacleTracker::CreateNewTarget(const TrackObjectPtrs &objects) {
373 const TemplateMap &kMinTemplateHWL =
375 std::vector<base::RectF> target_rects;
376 for (auto &&target : targets_) {
377 if (!target.isTracked() || target.isLost()) {
378 continue;
379 }
380 base::RectF target_rect(target[-1]->object->camera_supplement.box);
381 target_rects.push_back(target_rect);
382 }
383 int created_count = 0;
384 for (size_t i = 0; i < objects.size(); ++i) {
385 if (!used_[i]) {
386 bool is_covered = false;
387 const auto &sub_type = objects[i]->object->sub_type;
388 base::RectF rect(objects[i]->object->camera_supplement.box);
389 auto &min_tmplt = kMinTemplateHWL.at(sub_type);
390 // check if the object is out of valid region
391 if (OutOfValidRegion(rect, width_, height_, omt_param_.border())) {
392 AINFO << "Out of valid region";
393 AINFO << "Rect x: " << rect.x << " Rect y: " << rect.y
394 << " Rect height: " << rect.height
395 << " Rect width: " << rect.width << " GT height_: " << height_
396 << " GT width_: " << width_;
397 continue;
398 }
399 for (auto &&target_rect : target_rects) {
400 // check if rect is covered by target_rect
401 if (IsCovered(rect, target_rect, 0.4f) ||
402 IsCoveredHorizon(rect, target_rect, 0.5f)) {
403 is_covered = true;
404 break;
405 }
406 }
407 if (is_covered) {
408 continue;
409 }
410 if (min_tmplt.empty() // unknown type
411 || rect.height > min_tmplt[0] * omt_param_.min_init_height_ratio()) {
412 Target target(omt_param_.target_param());
413 target.Add(objects[i]);
414 targets_.push_back(target);
415 AINFO << "Target " << target.id << " is created by "
416 << objects[i]->indicator.frame_id << " ("
417 << objects[i]->indicator.patch_id << ")";
418 created_count += 1;
419 }
420 }
421 }
422 return created_count;
423}
424
426 std::shared_ptr<CameraTrackingFrame> frame) {
427 // pre-compute appearance similarities between frames
429 frame_buffer_.push_back(frame);
430 // calculate the similarity between the current frame and the previous frames
431 std::shared_ptr<CameraTrackingFrame> cur_frame_ptr = frame_buffer_.back();
432 for (auto frame_ptr : frame_buffer_) {
433 int pre_frame_id = frame_ptr->frame_id;
434 int cur_frame_id = cur_frame_ptr->frame_id;
435 similar_->Calc(frame_ptr.get(), cur_frame_ptr.get(),
436 similar_map_.get(pre_frame_id, cur_frame_id).get());
437 }
438
439 // remove oldest frame in Target
440 for (auto &target : targets_) {
441 target.RemoveOld(frame_buffer_.front()->frame_id);
442 ++target.lost_age;
443 }
444 // clear non Target in targets_
445 ClearTargets();
446 // correct detection 3d size
447 reference_.CorrectSize(frame.get());
448
449 TrackObjectPtrs track_objects;
450 for (size_t i = 0; i < frame->detected_objects.size(); ++i) {
451 TrackObjectPtr track_ptr(new TrackObject);
452 track_ptr->object = frame->detected_objects[i];
453 track_ptr->object->id = static_cast<int>(i);
454 track_ptr->timestamp = frame->timestamp;
455 track_ptr->indicator = PatchIndicator(frame->frame_id, static_cast<int>(i),
456 frame->data_provider->sensor_name());
457 track_ptr->object->camera_supplement.sensor_name =
458 frame->data_provider->sensor_name();
459 ProjectBox(frame->detected_objects[i]->camera_supplement.box,
460 frame->project_matrix, &(track_ptr->projected_box));
461 track_objects.push_back(track_ptr);
462 }
463 // wxt todo: solve reference and calibration_service
464 used_.clear();
465 used_.resize(frame->detected_objects.size(), false);
466 // generate hypothesis between existing Targets
467 // and track_objects in the new frame
468 GenerateHypothesis(track_objects);
469 // create new Targets for track_objects that are not
470 // associated with any existing Targets
471 int new_count = CreateNewTarget(track_objects);
472 AINFO << "Create " << new_count << " new target";
473
474 for (auto &target : targets_) {
475 // remove old Target
476 if (target.lost_age > omt_param_.reserve_age()) {
477 AINFO << "Target " << target.id << " is lost";
478 target.Clear();
479 } else {
480 // update the sub_type and size of the Target
481 target.UpdateType(frame.get());
482 // update the 2D center, width and height of the Target
483 target.Update2D(frame.get());
484 }
485 }
486
487 // combine targets using iou after association
488 CombineDuplicateTargets();
489
490 // return filter reulst to original box
491 Eigen::Matrix3d inverse_project = frame->project_matrix.inverse();
492 for (auto &target : targets_) {
493 if (!target.isLost()) {
494 ProjectBox(target[-1]->projected_box, inverse_project,
495 &(target[-1]->object->camera_supplement.box));
496 RefineBox(target[-1]->object->camera_supplement.box, width_, height_,
497 &(target[-1]->object->camera_supplement.box));
498 }
499 }
500 return true;
501}
502
503void OMTObstacleTracker::ClearTargets() {
504 int left = 0;
505 int end = static_cast<int>(targets_.size() - 1);
506 while (left <= end) {
507 if ((targets_[left].Size() == 0)) {
508 while ((left < end) && (targets_[end].Size() == 0)) {
509 --end;
510 }
511 if (left >= end) {
512 break;
513 }
514 targets_[left] = targets_[end];
515 --end;
516 }
517 ++left;
518 }
519 targets_.erase(targets_.begin() + left, targets_.end());
520}
521
523 std::shared_ptr<CameraTrackingFrame> frame) {
524 reference_.UpdateReference(frame.get(), targets_);
525 frame->tracked_objects.clear();
526 TrackObjectPtrs track_objects;
527 // mismatch may lead to an abnormal movement
528 // if an abnormal movement is found, remove old target and create new one
529 for (auto &target : targets_) {
530 if (target.isLost() || target.Size() == 1) {
531 continue;
532 }
533 auto obj = target[-1]->object;
534 if (obj->type != base::ObjectType::UNKNOWN_UNMOVABLE) {
535 Eigen::VectorXd x = target.world_center.get_state();
536 double move = sqr(x[0] - obj->center[0]) + sqr(x[1] - obj->center[1]);
537 float obj_2_car_x = obj->camera_supplement.local_center[0];
538 float obj_2_car_y = obj->camera_supplement.local_center[2];
539 float dis = obj_2_car_x * obj_2_car_x + obj_2_car_y * obj_2_car_y;
540 if (move > sqr(omt_param_.abnormal_movement()) * dis) {
541 AINFO << "Target " << target.id << " is removed for abnormal movement";
542 track_objects.push_back(target.latest_object);
543 target.Clear();
544 }
545 }
546 }
547 ClearTargets();
548 used_.clear();
549 used_.resize(track_objects.size(), false);
550 int new_count = CreateNewTarget(track_objects);
551 AINFO << "Create " << new_count << " new target";
552 for (int j = 0; j < new_count; ++j) {
553 targets_[targets_.size() - j - 1].Update2D(frame.get());
554 targets_[targets_.size() - j - 1].UpdateType(frame.get());
555 }
556 for (Target &target : targets_) {
557 target.Update3D(frame.get());
558 if (!target.isLost()) {
559 frame->tracked_objects.push_back(target[-1]->object);
560 ADEBUG << "Target " << target.id
561 << " velocity: " << target.world_center.get_state().transpose()
562 << " % " << target.world_center.variance_.diagonal().transpose()
563 << " % " << target[-1]->object->velocity.transpose();
564 }
565 }
566 return true;
567}
568
570
571} // namespace camera
572} // namespace perception
573} // namespace apollo
#define REGISTER_OBSTACLE_TRACKER(name)
bool Init(const ObstacleTrackerInitOptions &options) override
initialize omt obstacle tracker with options
bool Associate3D(std::shared_ptr< CameraTrackingFrame > frame) override
associate obstales by 3D information associated obstacles with tracking id should be filled,...
bool FeatureExtract(CameraTrackingFrame *frame) override
extract features from the new frame
bool Associate2D(std::shared_ptr< CameraTrackingFrame > frame) override
calculate similarity of two objects
bool Process(std::shared_ptr< CameraTrackingFrame > camera_frame)
main function for tracking
bool Predict(CameraTrackingFrame *frame) override
predict candidate obstales in the new frame candidate obstacle 2D boxes should be filled,...
EIGEN_MAKE_ALIGNED_OPERATOR_NEW void Init(const ReferenceParam &ref_param, float width, float height)
void UpdateReference(const CameraTrackingFrame *frame, const apollo::common::EigenVector< Target > &targets)
static bool set_device_id(int device_id)
#define ACHECK(cond)
Definition log.h:80
#define ADEBUG
Definition log.h:41
#define AERROR
Definition log.h:44
#define AINFO
Definition log.h:42
bool GetProtoFromFile(const std::string &file_name, google::protobuf::Message *message)
Parses the content of the file specified by the file_name as a representation of protobufs,...
Definition file.cc:132
Type CalculateIOUBBox(const base::BBox2D< Type > &box1, const base::BBox2D< Type > &box2)
Definition common.h:206
Rect< float > RectF
Definition box.h:160
std::shared_ptr< Object > ObjectPtr
Definition object.h:127
Point2D< float > Point2DF
Definition point.h:87
BBox2D< float > BBox2DF
Definition box.h:164
std::vector< TrackObjectPtr > TrackObjectPtrs
Dtype gaussian(Dtype x, Dtype mu, Dtype sigma)
bool IsCoveredHorizon(const base::Rect< T > &rect1, const base::Rect< T > &rect2, float thresh)
Definition util.h:45
std::shared_ptr< TrackObject > TrackObjectPtr
bool OutOfValidRegion(const base::BBox2D< T > box, const T width, const T height, const T border_size=0)
Definition util.h:74
void ProjectBox(const base::BBox2DF &box_origin, const Eigen::Matrix3d &transform, base::BBox2DF *box_projected)
void RefineBox(const base::Rect< T > &box_in, const T width, const T height, base::Rect< T > *box_out)
Definition util.h:92
bool IsCovered(const base::Rect< T > &rect1, const base::Rect< T > &rect2, float thresh)
Definition util.h:39
std::map< base::ObjectSubType, std::vector< float > > TemplateMap
std::string GetConfigFile(const std::string &config_path, const std::string &config_file)
Definition util.cc:80
class register implement
Definition arena_queue.h:37
EIGEN_MAKE_ALIGNED_OPERATOR_NEW int target
optional ReferenceParam reference
Definition omt.proto:98
optional WeightParam weight_same_camera
Definition omt.proto:86
optional TargetParam target_param
Definition omt.proto:91
optional float target_combine_iou_threshold
Definition omt.proto:93
optional perception::PluginParam plugin_param
Definition omt.proto:103
optional string type_change_cost_file
Definition omt.proto:99
float sim(const PatchIndicator &p1, const PatchIndicator &p2) const
Definition frame_list.h:96
const std::shared_ptr< base::Blob< float > > get(int frame1, int frame2) const
Definition frame_list.h:92
EIGEN_MAKE_ALIGNED_OPERATOR_NEW bool Init(int dim, int gpu_id=0, int init_size=128)
Definition frame_list.h:66