提交 95e32ae6 编写于 作者: L Liangliang Zhang 提交者: Dong Li

Perception: Don't put an alias in your public API just to save typing in the...

Perception: Don't put an alias in your public API just to save typing in the implementation.(https://google.github.io/styleguide/cppguide.html)
上级 b952464f
......@@ -85,7 +85,7 @@ PbfSensorFramePtr AsyncFusion::ConstructFrame(const SensorObjects &frame) {
pbf_frame->objects.resize(frame.objects.size());
for (size_t i = 0; i < frame.objects.size(); i++) {
PbfSensorObjectPtr obj(new PbfSensorObject());
std::shared_ptr<PbfSensorObject> obj(new PbfSensorObject());
obj->timestamp = frame.timestamp;
obj->sensor_type = frame.sensor_type;
obj->object->clone(*(frame.objects[i]));
......@@ -129,9 +129,9 @@ void AsyncFusion::FuseFrame(const PbfSensorFramePtr &frame) {
<< "object_number: " << frame->objects.size() << ","
<< "timestamp: " << std::fixed << std::setprecision(12)
<< frame->timestamp;
std::vector<PbfSensorObjectPtr> &objects = frame->objects;
std::vector<PbfSensorObjectPtr> background_objects;
std::vector<PbfSensorObjectPtr> foreground_objects;
std::vector<std::shared_ptr<PbfSensorObject>> &objects = frame->objects;
std::vector<std::shared_ptr<PbfSensorObject>> background_objects;
std::vector<std::shared_ptr<PbfSensorObject>> foreground_objects;
DecomposeFrameObjects(objects, &foreground_objects, &background_objects);
AINFO << "There are " << foreground_objects.size() << " foreground objects "
<< "\n " << background_objects.size() << " background objects";
......@@ -143,7 +143,7 @@ void AsyncFusion::FuseFrame(const PbfSensorFramePtr &frame) {
}
void AsyncFusion::CreateNewTracks(
const std::vector<PbfSensorObjectPtr> &sensor_objects,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const std::vector<int> &unassigned_ids) {
for (size_t i = 0; i < unassigned_ids.size(); ++i) {
int id = unassigned_ids[i];
......@@ -153,7 +153,7 @@ void AsyncFusion::CreateNewTracks(
}
void AsyncFusion::UpdateAssignedTracks(
const std::vector<PbfSensorObjectPtr> &sensor_objects,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const std::vector<std::pair<int, int>> &assignments,
const std::vector<double> &track_object_dist,
std::vector<PbfTrackPtr> const *tracks) {
......@@ -190,7 +190,7 @@ void AsyncFusion::CollectFusedObjects(double timestamp,
int fg_obj_num = 0;
std::vector<PbfTrackPtr> &tracks = track_manager_->GetTracks();
for (size_t i = 0; i < tracks.size(); i++) {
PbfSensorObjectPtr fused_object = tracks[i]->GetFusedObject();
std::shared_ptr<PbfSensorObject> fused_object = tracks[i]->GetFusedObject();
ObjectPtr obj(new Object());
obj->clone(*(fused_object->object));
obj->track_id = tracks[i]->GetTrackId();
......@@ -205,9 +205,9 @@ void AsyncFusion::CollectFusedObjects(double timestamp,
}
void AsyncFusion::DecomposeFrameObjects(
const std::vector<PbfSensorObjectPtr> &frame_objects,
std::vector<PbfSensorObjectPtr> *foreground_objects,
std::vector<PbfSensorObjectPtr> *background_objects) {
const std::vector<std::shared_ptr<PbfSensorObject>> &frame_objects,
std::vector<std::shared_ptr<PbfSensorObject>> *foreground_objects,
std::vector<std::shared_ptr<PbfSensorObject>> *background_objects) {
foreground_objects->clear();
background_objects->clear();
for (size_t i = 0; i < frame_objects.size(); i++) {
......@@ -222,7 +222,7 @@ void AsyncFusion::DecomposeFrameObjects(
void AsyncFusion::FuseForegroundObjects(
const Eigen::Vector3d &ref_point, const SensorType &sensor_type,
const std::string &sensor_id, const double timestamp,
std::vector<PbfSensorObjectPtr> *foreground_objects) {
std::vector<std::shared_ptr<PbfSensorObject>> *foreground_objects) {
std::vector<int> unassigned_tracks;
std::vector<int> unassigned_objects;
std::vector<std::pair<int, int>> assignments;
......
......@@ -53,12 +53,13 @@ class AsyncFusion : public BaseFusion {
void FuseFrame(const PbfSensorFramePtr &frame);
/**@brief create new tracks for objects not assigned to current tracks*/
void CreateNewTracks(const std::vector<PbfSensorObjectPtr> &sensor_objects,
const std::vector<int> &unassigned_ids);
void CreateNewTracks(
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const std::vector<int> &unassigned_ids);
/**@brief update current tracks with matched objects*/
void UpdateAssignedTracks(
const std::vector<PbfSensorObjectPtr> &sensor_objects,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const std::vector<std::pair<int, int>> &assignments,
const std::vector<double> &track_objects_dist,
std::vector<PbfTrackPtr> const *tracks);
......@@ -75,14 +76,14 @@ class AsyncFusion : public BaseFusion {
std::vector<ObjectPtr> *fused_objects);
void DecomposeFrameObjects(
const std::vector<PbfSensorObjectPtr> &frame_objects,
std::vector<PbfSensorObjectPtr> *foreground_objects,
std::vector<PbfSensorObjectPtr> *background_objects);
const std::vector<std::shared_ptr<PbfSensorObject>> &frame_objects,
std::vector<std::shared_ptr<PbfSensorObject>> *foreground_objects,
std::vector<std::shared_ptr<PbfSensorObject>> *background_objects);
void FuseForegroundObjects(
const Eigen::Vector3d &ref_point, const SensorType &sensor_type,
const std::string &sensor_id, const double timestamp,
std::vector<PbfSensorObjectPtr> *foreground_objects);
std::vector<std::shared_ptr<PbfSensorObject>> *foreground_objects);
PbfSensorFramePtr ConstructFrame(const SensorObjects &obj);
......
......@@ -17,6 +17,7 @@
#ifndef MODULES_PERCEPTION_OBSTACLE_FUSION_PROBABILISTIC_FUSION_PBF_BASE_MOTION_FUSION_H_ // NOLINT
#define MODULES_PERCEPTION_OBSTACLE_FUSION_PROBABILISTIC_FUSION_PBF_BASE_MOTION_FUSION_H_ // NOLINT
#include <memory>
#include <string>
#include <utility>
#include <vector>
......@@ -42,7 +43,8 @@ class PbfBaseMotionFusion {
// @brief initialize the state of the filter
// @params[IN] new_object: initial object for filtering
// @return nothing
virtual void Initialize(const PbfSensorObjectPtr new_object) = 0;
virtual void Initialize(
const std::shared_ptr<PbfSensorObject> new_object) = 0;
// @brief predict the state of filter
// @params[OUT] anchor_point: predicted anchor point for filtering
......@@ -56,8 +58,9 @@ class PbfBaseMotionFusion {
// @params[IN] new_object: new object for current update
// @params[IN] time_diff: time interval from last update;
// @return nothing
virtual void UpdateWithObject(const PbfSensorObjectPtr new_object,
const double time_diff) = 0;
virtual void UpdateWithObject(
const std::shared_ptr<PbfSensorObject> new_object,
const double time_diff) = 0;
// @brief update without measurements
// @params[IN] time_diff: time interval from last update
......
......@@ -17,7 +17,6 @@
#include "modules/perception/obstacle/fusion/probabilistic_fusion/pbf_base_track_object_matcher.h"
#include <numeric>
#include <string>
#include <unordered_map>
namespace apollo {
......@@ -35,7 +34,7 @@ double PbfBaseTrackObjectMatcher::GetMaxMatchDistance() {
void PbfBaseTrackObjectMatcher::IdAssign(
const std::vector<PbfTrackPtr> &fusion_tracks,
const std::vector<PbfSensorObjectPtr> &sensor_objects,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
std::vector<std::pair<int, int>> *assignments,
std::vector<int> *unassigned_fusion_tracks,
std::vector<int> *unassigned_sensor_objects) {
......@@ -61,7 +60,7 @@ void PbfBaseTrackObjectMatcher::IdAssign(
std::unordered_map<int, int> track_id_2_sensor_id;
for (size_t i = 0; i < num_track; ++i) {
PbfSensorObjectPtr obj =
std::shared_ptr<PbfSensorObject> obj =
fusion_tracks[i]->GetSensorObject(sensor_type, sensor_id);
if (obj == nullptr) {
continue;
......
......@@ -17,6 +17,7 @@
#ifndef MODULES_PERCEPTION_OBSTACLE_FUSION_PROBABILISTIC_FUSION_PBF_BASE_TRACK_OBJECT_MATCHER_H_ // NOLINT
#define MODULES_PERCEPTION_OBSTACLE_FUSION_PROBABILISTIC_FUSION_PBF_BASE_TRACK_OBJECT_MATCHER_H_ // NOLINT
#include <memory>
#include <string>
#include <utility>
#include <vector>
......@@ -49,24 +50,26 @@ class PbfBaseTrackObjectMatcher {
// @prams[OUT] measurement2track_dist:minimum match distacne to tracks for
// each measurement
// @return nothing
virtual bool Match(const std::vector<PbfTrackPtr> &fusion_tracks,
const std::vector<PbfSensorObjectPtr> &sensor_objects,
const TrackObjectMatcherOptions &options,
std::vector<std::pair<int, int>> *assignments,
std::vector<int> *unassigned_fusion_tracks,
std::vector<int> *unassigned_sensor_tracks,
std::vector<double> *track2measurements_dist,
std::vector<double> *measurement2track_dist) = 0;
virtual bool Match(
const std::vector<PbfTrackPtr> &fusion_tracks,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const TrackObjectMatcherOptions &options,
std::vector<std::pair<int, int>> *assignments,
std::vector<int> *unassigned_fusion_tracks,
std::vector<int> *unassigned_sensor_tracks,
std::vector<double> *track2measurements_dist,
std::vector<double> *measurement2track_dist) = 0;
virtual bool Init() = 0;
virtual std::string name() const = 0;
void IdAssign(const std::vector<PbfTrackPtr> &fusion_tracks,
const std::vector<PbfSensorObjectPtr> &sensor_objects,
std::vector<std::pair<int, int>> *assignments,
std::vector<int> *unassigned_fusion_tracks,
std::vector<int> *unassigned_sensor_objects);
void IdAssign(
const std::vector<PbfTrackPtr> &fusion_tracks,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
std::vector<std::pair<int, int>> *assignments,
std::vector<int> *unassigned_fusion_tracks,
std::vector<int> *unassigned_sensor_objects);
static void SetMaxMatchDistance(const double dist);
......
......@@ -26,7 +26,7 @@ namespace perception {
bool PbfHmTrackObjectMatcher::Match(
const std::vector<PbfTrackPtr> &fusion_tracks,
const std::vector<PbfSensorObjectPtr> &sensor_objects,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const TrackObjectMatcherOptions &options,
std::vector<std::pair<int, int>> *assignments,
std::vector<int> *unassigned_fusion_tracks,
......@@ -121,7 +121,7 @@ std::string PbfHmTrackObjectMatcher::name() const {
void PbfHmTrackObjectMatcher::ComputeAssociationMat(
const std::vector<PbfTrackPtr> &fusion_tracks,
const std::vector<PbfSensorObjectPtr> &sensor_objects,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const std::vector<int> &unassigned_fusion_tracks,
const std::vector<int> &unassigned_sensor_objects,
const Eigen::Vector3d &ref_point,
......@@ -139,7 +139,8 @@ void PbfHmTrackObjectMatcher::ComputeAssociationMat(
const PbfTrackPtr &fusion_track = fusion_tracks[fusion_idx];
for (size_t j = 0; j < unassigned_sensor_objects.size(); ++j) {
int sensor_idx = unassigned_sensor_objects[j];
const PbfSensorObjectPtr &sensor_object = sensor_objects[sensor_idx];
const std::shared_ptr<PbfSensorObject> &sensor_object =
sensor_objects[sensor_idx];
double distance =
pbf_distance.Compute(fusion_track, sensor_object, options);
ADEBUG << "sensor distance:" << distance;
......
......@@ -17,6 +17,7 @@
#ifndef MODULES_PERCEPTION_OBSTACLE_FUSION_PBF_PBF_HM_TRACK_OBJECT_MATCHER_H_
#define MODULES_PERCEPTION_OBSTACLE_FUSION_PBF_PBF_HM_TRACK_OBJECT_MATCHER_H_
#include <memory>
#include <string>
#include <utility>
#include <vector>
......@@ -36,14 +37,15 @@ class PbfHmTrackObjectMatcher : public PbfBaseTrackObjectMatcher {
PbfHmTrackObjectMatcher() = default;
virtual ~PbfHmTrackObjectMatcher() = default;
bool Match(const std::vector<PbfTrackPtr> &fusion_tracks,
const std::vector<PbfSensorObjectPtr> &sensor_objects,
const TrackObjectMatcherOptions &options,
std::vector<std::pair<int, int>> *assignments,
std::vector<int> *unassigned_fusion_tracks,
std::vector<int> *unassigned_sensor_tracks,
std::vector<double> *track2measurements_dist,
std::vector<double> *measurement2track_dist) override;
bool Match(
const std::vector<PbfTrackPtr> &fusion_tracks,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const TrackObjectMatcherOptions &options,
std::vector<std::pair<int, int>> *assignments,
std::vector<int> *unassigned_fusion_tracks,
std::vector<int> *unassigned_sensor_tracks,
std::vector<double> *track2measurements_dist,
std::vector<double> *measurement2track_dist) override;
bool Init() override;
......@@ -52,7 +54,7 @@ class PbfHmTrackObjectMatcher : public PbfBaseTrackObjectMatcher {
protected:
void ComputeAssociationMat(
const std::vector<PbfTrackPtr> &fusion_tracks,
const std::vector<PbfSensorObjectPtr> &sensor_objects,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const std::vector<int> &unassigned_fusion_tracks,
const std::vector<int> &unassigned_sensor_objects,
const Eigen::Vector3d &ref_point,
......
......@@ -15,6 +15,7 @@
*****************************************************************************/
#include "modules/perception/obstacle/fusion/probabilistic_fusion/pbf_imf_fusion.h" // NOLINT
#include "modules/common/log.h"
#include "modules/perception/obstacle/base/types.h"
......@@ -30,11 +31,12 @@ PbfIMFFusion::~PbfIMFFusion() {}
void PbfIMFFusion::Initialize(const Eigen::Vector3d& anchor_point,
const Eigen::Vector3d& velocity) {
_belief_anchor_point = anchor_point;
_belief_velocity = velocity;
belief_anchor_point_ = anchor_point;
belief_velocity_ = velocity;
}
void PbfIMFFusion::Initialize(const PbfSensorObjectPtr new_object) {
void PbfIMFFusion::Initialize(
const std::shared_ptr<PbfSensorObject> new_object) {
if (new_object == nullptr) {
AERROR << "Initialize PbfInformationMotionFusion with null sensor object";
return;
......@@ -44,25 +46,25 @@ void PbfIMFFusion::Initialize(const PbfSensorObjectPtr new_object) {
return;
}
_belief_anchor_point = new_object->object->anchor_point;
_belief_velocity = new_object->object->velocity;
belief_anchor_point_ = new_object->object->anchor_point;
belief_velocity_ = new_object->object->velocity;
// initialize states to the states of the detected obstacle
_posteriori_state(0) = _belief_anchor_point(0);
_posteriori_state(1) = _belief_anchor_point(1);
_posteriori_state(2) = _belief_velocity(0);
_posteriori_state(3) = _belief_velocity(1);
_priori_state = _posteriori_state;
_omega_matrix = new_object->object->state_uncertainty;
_omega_matrix = _omega_matrix.inverse().eval();
_xi = _omega_matrix * _posteriori_state;
_a_matrix.setIdentity();
_a_matrix(0, 2) = 0.05;
_a_matrix(1, 3) = 0.05;
_q_matrix.setIdentity();
ADEBUG << "PBFIMF pbf imf filter initial state is " << _posteriori_state(0)
<< " " << _posteriori_state(1) << " " << _posteriori_state(2) << " "
<< _posteriori_state(3)
posteriori_state_(0) = belief_anchor_point_(0);
posteriori_state_(1) = belief_anchor_point_(1);
posteriori_state_(2) = belief_velocity_(0);
posteriori_state_(3) = belief_velocity_(1);
priori_state_ = posteriori_state_;
omega_matrix_ = new_object->object->state_uncertainty;
omega_matrix_ = omega_matrix_.inverse().eval();
xi_ = omega_matrix_ * posteriori_state_;
a_matrix_.setIdentity();
a_matrix_(0, 2) = 0.05;
a_matrix_(1, 3) = 0.05;
q_matrix_.setIdentity();
ADEBUG << "PBFIMF pbf imf filter initial state is " << posteriori_state_(0)
<< " " << posteriori_state_(1) << " " << posteriori_state_(2) << " "
<< posteriori_state_(3)
<< " for object info: " << new_object->object->ToString();
std::cerr << "PBFIMF pbf imf initial uncertainty set "
<< new_object->object->state_uncertainty << std::endl;
......@@ -72,12 +74,12 @@ void PbfIMFFusion::Initialize(const PbfSensorObjectPtr new_object) {
void PbfIMFFusion::Predict(Eigen::Vector3d* anchor_point,
Eigen::Vector3d* velocity, const double time_diff) {
*anchor_point = _belief_anchor_point + _belief_velocity * time_diff;
*velocity = _belief_velocity;
*anchor_point = belief_anchor_point_ + belief_velocity_ * time_diff;
*velocity = belief_velocity_;
}
void PbfIMFFusion::UpdateWithObject(const PbfSensorObjectPtr new_object,
const double time_diff) {
void PbfIMFFusion::UpdateWithObject(
const std::shared_ptr<PbfSensorObject> new_object, const double time_diff) {
if (new_object == nullptr) {
AERROR << "update PbfInformationMotionFusion with null sensor object";
return;
......@@ -91,9 +93,9 @@ void PbfIMFFusion::UpdateWithObject(const PbfSensorObjectPtr new_object,
RemoveOutdatedSensorObjects(new_object->timestamp);
// print some debug information for debugging
ADEBUG << "\nPBFIMF: previous state: " << _belief_anchor_point(0) << " "
<< _belief_anchor_point(1) << " " << _belief_velocity(0) << " "
<< _belief_velocity(1);
ADEBUG << "\nPBFIMF: previous state: " << belief_anchor_point_(0) << " "
<< belief_anchor_point_(1) << " " << belief_velocity_(0) << " "
<< belief_velocity_(1);
ADEBUG << "PBFIMF: previous timestamp: " << std::fixed
<< std::setprecision(15) << last_fuse_timestamp;
ADEBUG << "PBFIMF: new object information: "
......@@ -103,28 +105,28 @@ void PbfIMFFusion::UpdateWithObject(const PbfSensorObjectPtr new_object,
ADEBUG << "PBFIMF: time diff is " << time_diff;
// compute priori
_a_matrix.setIdentity();
_a_matrix(0, 2) = time_diff;
_a_matrix(1, 3) = time_diff;
_q_matrix.setIdentity();
_q_matrix = _q_matrix / 10;
_q_matrix = _q_matrix * time_diff;
_priori_state = _a_matrix * _posteriori_state;
_omega_matrix =
(_a_matrix * _omega_matrix.inverse() * _a_matrix.transpose() + _q_matrix);
_omega_matrix = _omega_matrix.inverse().eval();
ADEBUG << "PBFIMF:predicted state " << _priori_state(0) << " "
<< _priori_state(1) << " " << _priori_state(2) << " "
<< _priori_state(3);
_xi = _omega_matrix * _priori_state;
a_matrix_.setIdentity();
a_matrix_(0, 2) = time_diff;
a_matrix_(1, 3) = time_diff;
q_matrix_.setIdentity();
q_matrix_ = q_matrix_ / 10;
q_matrix_ = q_matrix_ * time_diff;
priori_state_ = a_matrix_ * posteriori_state_;
omega_matrix_ =
(a_matrix_ * omega_matrix_.inverse() * a_matrix_.transpose() + q_matrix_);
omega_matrix_ = omega_matrix_.inverse().eval();
ADEBUG << "PBFIMF:predicted state " << priori_state_(0) << " "
<< priori_state_(1) << " " << priori_state_(2) << " "
<< priori_state_(3);
xi_ = omega_matrix_ * priori_state_;
// sensor level processor noise matrix and trans matrix
const Eigen::Matrix4d* sensor_processor_noise;
const Eigen::Matrix4d* sensor_transition_matrix;
if (new_object->sensor_type == SensorType::CAMERA) {
_belief_anchor_point = new_object->object->center;
_belief_velocity = new_object->object->velocity;
belief_anchor_point_ = new_object->object->center;
belief_velocity_ = new_object->object->velocity;
if (!CameraFrameSupplement::state_vars.initialized_) {
AERROR << "process noise and trans matrix not initialized for camera";
return;
......@@ -133,8 +135,8 @@ void PbfIMFFusion::UpdateWithObject(const PbfSensorObjectPtr new_object,
sensor_transition_matrix =
&(CameraFrameSupplement::state_vars.trans_matrix);
} else if (new_object->sensor_type == SensorType::RADAR) {
_belief_anchor_point = new_object->object->center;
_belief_velocity = new_object->object->velocity;
belief_anchor_point_ = new_object->object->center;
belief_velocity_ = new_object->object->velocity;
// for radar, we don't set externally yet, just use default value
/*if (!RadarFrameSupplement::state_vars.initialized_) {
......@@ -144,8 +146,8 @@ void PbfIMFFusion::UpdateWithObject(const PbfSensorObjectPtr new_object,
sensor_processor_noise = &(RadarFrameSupplement::state_vars.process_noise);
sensor_transition_matrix = &(RadarFrameSupplement::state_vars.trans_matrix);
} else if (new_object->sensor_type == SensorType::VELODYNE_64) {
_belief_anchor_point = new_object->object->center;
_belief_velocity = new_object->object->velocity;
belief_anchor_point_ = new_object->object->center;
belief_velocity_ = new_object->object->velocity;
if (!LidarFrameSupplement::state_vars.initialized_) {
AERROR << "process noise and trans matrix not initialized for camera";
return;
......@@ -157,7 +159,7 @@ void PbfIMFFusion::UpdateWithObject(const PbfSensorObjectPtr new_object,
return;
}
const PbfSensorObjectPtr sensor_object =
const std::shared_ptr<PbfSensorObject> sensor_object =
GetSensorLatestCache(new_object->sensor_type);
if (sensor_object != nullptr) {
......@@ -191,16 +193,16 @@ void PbfIMFFusion::UpdateWithObject(const PbfSensorObjectPtr new_object,
ADEBUG << "PBFIMF: state sensor prev " << state_sensor_prev(0) << " "
<< state_sensor(1);
_omega_matrix =
_omega_matrix + (cov_sensor_inverse - cov_sensor_prev_inverse);
omega_matrix_ =
omega_matrix_ + (cov_sensor_inverse - cov_sensor_prev_inverse);
std::cerr << "PBFIMF: information prediction" << _xi << std::endl;
std::cerr << "PBFIMF: information prediction" << xi_ << std::endl;
std::cerr << "PBFIMF: information delta "
<< (cov_sensor_inverse * state_sensor -
cov_sensor_prev_inverse * state_sensor_prev)
<< std::endl;
_xi = _xi + (cov_sensor_inverse * state_sensor -
xi_ = xi_ + (cov_sensor_inverse * state_sensor -
cov_sensor_prev_inverse * state_sensor_prev);
} else {
// this case is weird, might lead to unexpected situation
......@@ -216,18 +218,18 @@ void PbfIMFFusion::UpdateWithObject(const PbfSensorObjectPtr new_object,
}
AWARN
<< "Sensor data deprecation in Fusion, should not see this many times";
_omega_matrix = 0.5 * _omega_matrix + 0.5 * cov_sensor.inverse();
_xi = 0.5 * _xi + 0.5 * cov_sensor.inverse() * state_sensor;
omega_matrix_ = 0.5 * omega_matrix_ + 0.5 * cov_sensor.inverse();
xi_ = 0.5 * xi_ + 0.5 * cov_sensor.inverse() * state_sensor;
}
_posteriori_state = _omega_matrix.inverse() * _xi;
_belief_anchor_point(0) = _posteriori_state(0);
_belief_anchor_point(1) = _posteriori_state(1);
_belief_velocity(0) = _posteriori_state(2);
_belief_velocity(1) = _posteriori_state(3);
ADEBUG << "PBFIMF: new state is " << _belief_anchor_point(0) << " "
<< _belief_anchor_point(1) << " " << _belief_velocity(0) << " "
<< _belief_velocity(1) << "\n";
posteriori_state_ = omega_matrix_.inverse() * xi_;
belief_anchor_point_(0) = posteriori_state_(0);
belief_anchor_point_(1) = posteriori_state_(1);
belief_velocity_(0) = posteriori_state_(2);
belief_velocity_(1) = posteriori_state_(3);
ADEBUG << "PBFIMF: new state is " << belief_anchor_point_(0) << " "
<< belief_anchor_point_(1) << " " << belief_velocity_(0) << " "
<< belief_velocity_(1) << "\n";
CacheSensorObjects(new_object);
}
......@@ -280,13 +282,13 @@ bool PbfIMFFusion::ObtainSensorPrediction(ObjectPtr object,
}
void PbfIMFFusion::UpdateWithoutObject(const double time_diff) {
_belief_anchor_point = _belief_anchor_point + _belief_velocity * time_diff;
belief_anchor_point_ = belief_anchor_point_ + belief_velocity_ * time_diff;
}
void PbfIMFFusion::GetState(Eigen::Vector3d* anchor_point,
Eigen::Vector3d* velocity) {
*anchor_point = _belief_anchor_point;
*velocity = _belief_velocity;
*anchor_point = belief_anchor_point_;
*velocity = belief_velocity_;
}
void PbfIMFFusion::GetUncertainty(Eigen::Matrix3d* position_uncertainty,
......@@ -294,26 +296,27 @@ void PbfIMFFusion::GetUncertainty(Eigen::Matrix3d* position_uncertainty,
*position_uncertainty << 0.01, 0.0, 0.0, 0.0, 0.01, 0.0, 0.0, 0.0, 0.01;
*velocity_uncertainty << 0.01, 0.0, 0.0, 0.0, 0.01, 0.0, 0.0, 0.0, 0.01;
(*position_uncertainty).topLeftCorner(2, 2) =
_omega_matrix.inverse().topLeftCorner(2, 2);
omega_matrix_.inverse().topLeftCorner(2, 2);
(*velocity_uncertainty).topLeftCorner(2, 2) =
_omega_matrix.inverse().block<2, 2>(2, 2);
omega_matrix_.inverse().block<2, 2>(2, 2);
}
void PbfIMFFusion::CacheSensorObjects(const PbfSensorObjectPtr new_object) {
void PbfIMFFusion::CacheSensorObjects(
const std::shared_ptr<PbfSensorObject> new_object) {
const SensorType& type = new_object->sensor_type;
auto it = _cached_sensor_objects.find(type);
if (it != _cached_sensor_objects.end()) {
auto it = cached_sensor_objects_.find(type);
if (it != cached_sensor_objects_.end()) {
it->second.push(new_object);
} else {
std::queue<PbfSensorObjectPtr> objects;
std::queue<std::shared_ptr<PbfSensorObject>> objects;
objects.push(new_object);
_cached_sensor_objects[type] = objects;
cached_sensor_objects_[type] = objects;
}
}
void PbfIMFFusion::RemoveOutdatedSensorObjects(const double timestamp) {
auto it = _cached_sensor_objects.begin();
for (; it != _cached_sensor_objects.end(); ++it) {
auto it = cached_sensor_objects_.begin();
for (; it != cached_sensor_objects_.end(); ++it) {
double time_invisible = 0.0;
if (it->first == SensorType::VELODYNE_64) {
......@@ -338,9 +341,10 @@ void PbfIMFFusion::RemoveOutdatedSensorObjects(const double timestamp) {
}
}
PbfSensorObjectPtr PbfIMFFusion::GetSensorLatestCache(const SensorType type) {
auto it = _cached_sensor_objects.find(type);
if (it != _cached_sensor_objects.end()) {
std::shared_ptr<PbfSensorObject> PbfIMFFusion::GetSensorLatestCache(
const SensorType type) {
auto it = cached_sensor_objects_.find(type);
if (it != cached_sensor_objects_.end()) {
const auto& objects = it->second;
if (!objects.empty()) {
return objects.back();
......@@ -351,8 +355,8 @@ PbfSensorObjectPtr PbfIMFFusion::GetSensorLatestCache(const SensorType type) {
void PbfIMFFusion::SetState(const Eigen::Vector3d& anchor_point,
const Eigen::Vector3d& velocity) {
_belief_anchor_point = anchor_point;
_belief_velocity = velocity;
belief_anchor_point_ = anchor_point;
belief_velocity_ = velocity;
}
} // namespace perception
......
......@@ -18,10 +18,12 @@
#define MODULES_PERCEPTION_OBSTACLE_FUSION_PROBABILISTIC_FUSION_IMF_FUSION_H_ // NOLINT
#include <map>
#include <memory>
#include <queue>
#include <string>
#include <utility>
#include <vector>
#include "modules/common/macro.h"
#include "modules/perception/obstacle/fusion/probabilistic_fusion/pbf_base_motion_fusion.h"
#include "modules/perception/obstacle/fusion/probabilistic_fusion/pbf_sensor_object.h"
......@@ -45,7 +47,7 @@ class PbfIMFFusion : public PbfBaseMotionFusion {
// @brief initialize state of the filter
// @params[IN] new_object: initial object for filtering
// @return nothing
virtual void Initialize(const PbfSensorObjectPtr new_object);
virtual void Initialize(const std::shared_ptr<PbfSensorObject> new_object);
// @brief predict the state of filter
// @params[OUT] anchor_point: predicted anchor point for filtering
......@@ -59,8 +61,9 @@ class PbfIMFFusion : public PbfBaseMotionFusion {
// @params[IN] new_object: new object for current update
// @params[IN] time_diff: time interval from last update;
// @return nothing
virtual void UpdateWithObject(const PbfSensorObjectPtr new_object,
const double time_diff);
virtual void UpdateWithObject(
const std::shared_ptr<PbfSensorObject> new_object,
const double time_diff);
// @brief update without measurements
// @params[IN] time_diff: time interval from last update
......@@ -81,7 +84,7 @@ class PbfIMFFusion : public PbfBaseMotionFusion {
// @brief cache sensor objects in history
// @param[IN] new object: new object for current update
// @return nothing
void CacheSensorObjects(const PbfSensorObjectPtr new_object);
void CacheSensorObjects(const std::shared_ptr<PbfSensorObject> new_object);
// @brief remove outdated sensor objects from cache
// @param[IN] timestamp: current sensor timestamp
......@@ -91,7 +94,7 @@ class PbfIMFFusion : public PbfBaseMotionFusion {
// @brief get latest sensor cache
// @param[IN] type: requested sensor type
// @return latest sensor object
PbfSensorObjectPtr GetSensorLatestCache(const SensorType type);
std::shared_ptr<PbfSensorObject> GetSensorLatestCache(const SensorType type);
void GetUncertainty(Eigen::Matrix3d* position_uncertainty,
Eigen::Matrix3d* velocity_uncertainty);
......@@ -102,23 +105,24 @@ class PbfIMFFusion : public PbfBaseMotionFusion {
Eigen::Vector4d* state_pre,
Eigen::Matrix4d* cov_pre);
// global
Eigen::Vector3d _belief_anchor_point;
Eigen::Vector3d _belief_velocity;
Eigen::Vector3d _belief_acceleration;
Eigen::Matrix<double, 4, 1> _priori_state;
Eigen::Matrix<double, 4, 1> _posteriori_state;
std::map<SensorType, std::queue<PbfSensorObjectPtr>> _cached_sensor_objects;
Eigen::Vector3d belief_anchor_point_;
Eigen::Vector3d belief_velocity_;
Eigen::Vector3d belief_acceleration_;
Eigen::Matrix<double, 4, 1> priori_state_;
Eigen::Matrix<double, 4, 1> posteriori_state_;
std::map<SensorType, std::queue<std::shared_ptr<PbfSensorObject>>>
cached_sensor_objects_;
// the omega matrix
Eigen::Matrix<double, 4, 4> _omega_matrix;
Eigen::Matrix<double, 4, 4> omega_matrix_;
// the state vector is information matrix: cov.inverse() * state
Eigen::Matrix<double, 4, 1> _xi;
Eigen::Matrix<double, 4, 1> xi_;
// the state-transition matrix
Eigen::Matrix<double, 4, 4> _a_matrix;
Eigen::Matrix<double, 4, 4> a_matrix_;
// the observation mode
Eigen::Matrix<double, 4, 4> _c_matrix;
Eigen::Matrix<double, 4, 4> c_matrix_;
// the covariance of the process noise
Eigen::Matrix<double, 4, 4> _q_matrix;
Eigen::Matrix<double, 4, 4> q_matrix_;
};
} // namespace perception
......
......@@ -37,7 +37,8 @@ void PbfKalmanMotionFusion::Initialize(const Eigen::Vector3d &anchor_point,
belief_acceleration_ = Eigen::Vector3d(0, 0, 0);
}
void PbfKalmanMotionFusion::Initialize(const PbfSensorObjectPtr new_object) {
void PbfKalmanMotionFusion::Initialize(
const std::shared_ptr<PbfSensorObject> new_object) {
ACHECK(new_object != nullptr && new_object->object != nullptr)
<< "Initialize PbfKalmanMotionFusion with null sensor object";
......@@ -97,7 +98,7 @@ void PbfKalmanMotionFusion::Predict(Eigen::Vector3d *anchor_point,
}
void PbfKalmanMotionFusion::UpdateWithObject(
const PbfSensorObjectPtr new_object, const double time_diff) {
const std::shared_ptr<PbfSensorObject> new_object, const double time_diff) {
ACHECK(new_object != nullptr && new_object->object != nullptr)
<< "update PbfKalmanMotionFusion with null sensor object";
......
......@@ -18,6 +18,7 @@
#define MODULES_PERCEPTION_OBSTACLE_FUSION_PROBABILISTIC_FUSION_PBF_KALMAN_MOTION_FUSION_H_ // NOLINT
#include <deque>
#include <memory>
#include <utility>
#include <vector>
......@@ -44,7 +45,7 @@ class PbfKalmanMotionFusion : public PbfBaseMotionFusion {
// @brief initialize state of the filter
// @params[IN] new_object: initial object for filtering
// @return nothing
void Initialize(const PbfSensorObjectPtr new_object);
void Initialize(const std::shared_ptr<PbfSensorObject> new_object);
// @brief predict the state of filter
// @params[OUT] anchor_point: predicted anchor point for filtering
......@@ -58,7 +59,7 @@ class PbfKalmanMotionFusion : public PbfBaseMotionFusion {
// @params[IN] new_object: new object for current update
// @params[IN] time_diff: time interval from last update;
// @return nothing
void UpdateWithObject(const PbfSensorObjectPtr new_object,
void UpdateWithObject(const std::shared_ptr<PbfSensorObject> new_object,
const double time_diff);
// @brief update without measurements
......@@ -123,4 +124,6 @@ class PbfKalmanMotionFusion : public PbfBaseMotionFusion {
} // namespace perception
} // namespace apollo
#endif // MODULES_PERCEPTION_OBSTACLE_FUSION_PROBABILISTIC_FUSION_PBF_KALMAN_MOTION_FUSION_H_ // NOLINT
// clang-format off
#endif // MODULES_PERCEPTION_OBSTACLE_FUSION_PROBABILISTIC_FUSION_PBF_KALMAN_MOTION_FUSION_H_ // NOLINT
// clang-format on
......@@ -65,7 +65,7 @@ TEST_F(PbfMotionFusionTest, test_initialize_with_lidar_object) {
lidar_object->anchor_point = lidar_position;
lidar_object->velocity = lidar_velocity;
lidar_object->state_uncertainty.setIdentity();
PbfSensorObjectPtr pbf_lidar_object(new PbfSensorObject(
std::shared_ptr<PbfSensorObject> pbf_lidar_object(new PbfSensorObject(
lidar_object, SensorType::VELODYNE_64, lidar_timestamp));
for (auto motion_fusion_alg : motion_fusion_algs_) {
motion_fusion_alg->Initialize(pbf_lidar_object);
......@@ -85,7 +85,7 @@ TEST_F(PbfMotionFusionTest, test_initialize_with_radar_object) {
radar_object->center = radar_position;
radar_object->anchor_point = radar_position;
radar_object->velocity = radar_velocity;
PbfSensorObjectPtr pbf_radar_object(
std::shared_ptr<PbfSensorObject> pbf_radar_object(
new PbfSensorObject(radar_object, SensorType::RADAR, radar_timestamp));
for (auto motion_fusion_alg : motion_fusion_algs_) {
motion_fusion_alg->Initialize(pbf_radar_object);
......@@ -105,7 +105,7 @@ TEST_F(PbfMotionFusionTest, test_update_with_measurement_kalman) {
radar_object->center = radar_position;
radar_object->anchor_point = radar_position;
radar_object->velocity = radar_velocity;
PbfSensorObjectPtr pbf_radar_object(
std::shared_ptr<PbfSensorObject> pbf_radar_object(
new PbfSensorObject(radar_object, SensorType::RADAR, radar_timestamp));
ObjectPtr lidar_object(new Object());
......@@ -115,7 +115,7 @@ TEST_F(PbfMotionFusionTest, test_update_with_measurement_kalman) {
lidar_object->center = lidar_position;
lidar_object->anchor_point = lidar_position;
lidar_object->velocity = lidar_velocity;
PbfSensorObjectPtr pbf_lidar_object(new PbfSensorObject(
std::shared_ptr<PbfSensorObject> pbf_lidar_object(new PbfSensorObject(
lidar_object, SensorType::VELODYNE_64, lidar_timestamp));
for (auto motion_fusion_alg : motion_fusion_algs_) {
if (motion_fusion_alg->name() != "PbfKalmanMotionFusion") {
......@@ -145,7 +145,7 @@ TEST_F(PbfMotionFusionTest, test_update_with_measurement_imf) {
radar_object->center = radar_position;
radar_object->anchor_point = radar_position;
radar_object->velocity = radar_velocity;
PbfSensorObjectPtr pbf_radar_object(
std::shared_ptr<PbfSensorObject> pbf_radar_object(
new PbfSensorObject(radar_object, SensorType::RADAR, radar_timestamp));
ObjectPtr lidar_object(new Object());
......@@ -155,7 +155,7 @@ TEST_F(PbfMotionFusionTest, test_update_with_measurement_imf) {
lidar_object->center = lidar_position;
lidar_object->anchor_point = lidar_position;
lidar_object->velocity = lidar_velocity;
PbfSensorObjectPtr pbf_lidar_object(new PbfSensorObject(
std::shared_ptr<PbfSensorObject> pbf_lidar_object(new PbfSensorObject(
lidar_object, SensorType::VELODYNE_64, lidar_timestamp));
for (auto motion_fusion_alg : motion_fusion_algs_) {
......@@ -189,7 +189,7 @@ TEST_F(PbfMotionFusionTest, test_update_with_measurement_imf_seq) {
radar_object->center = radar_position;
radar_object->anchor_point = radar_position;
radar_object->velocity = radar_velocity;
PbfSensorObjectPtr pbf_radar_object(
std::shared_ptr<PbfSensorObject> pbf_radar_object(
new PbfSensorObject(radar_object, SensorType::RADAR, radar_timestamp));
pbf_radar_object->timestamp = radar_timestamp;
std::default_random_engine generator;
......@@ -218,7 +218,7 @@ TEST_F(PbfMotionFusionTest, test_update_with_measurement_imf_seq) {
AINFO << "radar object center is " << radar_object->center(0);
radar_object->anchor_point = radar_object->center;
radar_object->velocity = radar_velocity;
PbfSensorObjectPtr pbf_radar_object(new PbfSensorObject(
std::shared_ptr<PbfSensorObject> pbf_radar_object(new PbfSensorObject(
radar_object, SensorType::RADAR, mutable_radar_timestamp));
pbf_radar_object->timestamp = mutable_radar_timestamp;
motion_fusion_alg->setCurrentFuseTS(mutable_radar_timestamp + 0.2);
......@@ -244,7 +244,7 @@ TEST_F(PbfMotionFusionTest, test_update_without_measurement) {
lidar_object->center = lidar_position;
lidar_object->anchor_point = lidar_position;
lidar_object->velocity = lidar_velocity;
PbfSensorObjectPtr pbf_lidar_object(new PbfSensorObject(
std::shared_ptr<PbfSensorObject> pbf_lidar_object(new PbfSensorObject(
lidar_object, SensorType::VELODYNE_64, lidar_timestamp));
double time_diff = 0.1;
......
......@@ -16,8 +16,7 @@
#include "modules/perception/obstacle/fusion/probabilistic_fusion/pbf_sensor.h"
#include <string>
#include <vector>
#include <memory>
namespace apollo {
namespace perception {
......@@ -65,7 +64,7 @@ void PbfSensor::AddFrame(const SensorObjects &frame) {
pbf_frame->objects.resize(frame.objects.size());
for (size_t i = 0; i < frame.objects.size(); ++i) {
PbfSensorObjectPtr obj(new PbfSensorObject());
std::shared_ptr<PbfSensorObject> obj(new PbfSensorObject());
obj->timestamp = frame.timestamp;
obj->sensor_type = frame.sensor_type;
obj->object->clone(*(frame.objects[i]));
......
......@@ -40,8 +40,6 @@ struct PbfSensorObject {
double invisible_period;
};
typedef std::shared_ptr<PbfSensorObject> PbfSensorObjectPtr;
struct PbfSensorFrame {
PbfSensorFrame() { sensor2world_pose = Eigen::Matrix4d::Identity(); }
SensorType sensor_type = SensorType::UNKNOWN_SENSOR_TYPE;
......@@ -50,7 +48,7 @@ struct PbfSensorFrame {
double timestamp = 0.0;
int seq_num = 0;
Eigen::Matrix4d sensor2world_pose;
std::vector<PbfSensorObjectPtr> objects;
std::vector<std::shared_ptr<PbfSensorObject>> objects;
};
typedef std::shared_ptr<PbfSensorFrame> PbfSensorFramePtr;
......
......@@ -50,7 +50,7 @@ using apollo::common::time::TimeUtil;
PbfTrack::MotionFusionMethod PbfTrack::s_motion_fusion_method_ =
PbfTrack::MotionFusionMethod::PBF_KALMAN;
PbfTrack::PbfTrack(PbfSensorObjectPtr obj) {
PbfTrack::PbfTrack(std::shared_ptr<PbfSensorObject> obj) {
idx_ = GetNextTrackId();
SensorType sensor_type = obj->sensor_type;
std::string sensor_id = obj->sensor_id;
......@@ -102,7 +102,7 @@ void PbfTrack::SetMotionFusionMethod(const std::string &motion_fusion_method) {
PbfTrack::~PbfTrack() {}
void PbfTrack::UpdateWithSensorObject(PbfSensorObjectPtr obj,
void PbfTrack::UpdateWithSensorObject(std::shared_ptr<PbfSensorObject> obj,
double match_dist) {
const SensorType &sensor_type = obj->sensor_type;
const std::string sensor_id = obj->sensor_id;
......@@ -170,12 +170,15 @@ void PbfTrack::UpdateWithoutSensorObject(const SensorType &sensor_type,
int PbfTrack::GetTrackId() const { return idx_; }
PbfSensorObjectPtr PbfTrack::GetFusedObject() { return fused_object_; }
std::shared_ptr<PbfSensorObject> PbfTrack::GetFusedObject() {
return fused_object_;
}
double PbfTrack::GetFusedTimestamp() const { return fused_timestamp_; }
PbfSensorObjectPtr PbfTrack::GetLidarObject(const std::string &sensor_id) {
PbfSensorObjectPtr obj = nullptr;
std::shared_ptr<PbfSensorObject> PbfTrack::GetLidarObject(
const std::string &sensor_id) {
std::shared_ptr<PbfSensorObject> obj = nullptr;
auto it = lidar_objects_.find(sensor_id);
if (it != lidar_objects_.end()) {
obj = it->second;
......@@ -183,8 +186,9 @@ PbfSensorObjectPtr PbfTrack::GetLidarObject(const std::string &sensor_id) {
return obj;
}
PbfSensorObjectPtr PbfTrack::GetRadarObject(const std::string &sensor_id) {
PbfSensorObjectPtr obj = nullptr;
std::shared_ptr<PbfSensorObject> PbfTrack::GetRadarObject(
const std::string &sensor_id) {
std::shared_ptr<PbfSensorObject> obj = nullptr;
auto it = radar_objects_.find(sensor_id);
if (it != radar_objects_.end()) {
obj = it->second;
......@@ -192,8 +196,9 @@ PbfSensorObjectPtr PbfTrack::GetRadarObject(const std::string &sensor_id) {
return obj;
}
PbfSensorObjectPtr PbfTrack::GetCameraObject(const std::string &sensor_id) {
PbfSensorObjectPtr obj = nullptr;
std::shared_ptr<PbfSensorObject> PbfTrack::GetCameraObject(
const std::string &sensor_id) {
std::shared_ptr<PbfSensorObject> obj = nullptr;
auto it = camera_objects_.find(sensor_id);
if (it != camera_objects_.end()) {
obj = it->second;
......@@ -201,7 +206,7 @@ PbfSensorObjectPtr PbfTrack::GetCameraObject(const std::string &sensor_id) {
return obj;
}
void PbfTrack::PerformMotionFusionAsync(PbfSensorObjectPtr obj) {
void PbfTrack::PerformMotionFusionAsync(std::shared_ptr<PbfSensorObject> obj) {
if (motion_fusion_ == nullptr) {
AERROR << "Skip motion fusion becuase motion_fusion_ is nullptr.";
return;
......@@ -247,7 +252,7 @@ void PbfTrack::PerformMotionFusionAsync(PbfSensorObjectPtr obj) {
}
}
void PbfTrack::PerformMotionFusion(PbfSensorObjectPtr obj) {
void PbfTrack::PerformMotionFusion(std::shared_ptr<PbfSensorObject> obj) {
if (motion_fusion_ == nullptr) {
AERROR << "Skip motion fusion becuase motion_fusion_ is nullptr.";
return;
......@@ -255,8 +260,8 @@ void PbfTrack::PerformMotionFusion(PbfSensorObjectPtr obj) {
const SensorType &sensor_type = obj->sensor_type;
double time_diff = obj->timestamp - fused_object_->timestamp;
PbfSensorObjectPtr lidar_object = GetLatestLidarObject();
PbfSensorObjectPtr radar_object = GetLatestRadarObject();
std::shared_ptr<PbfSensorObject> lidar_object = GetLatestLidarObject();
std::shared_ptr<PbfSensorObject> radar_object = GetLatestRadarObject();
if (FLAGS_use_navigation_mode) {
if (is_camera(sensor_type) || is_radar(sensor_type)) {
......@@ -333,7 +338,7 @@ void PbfTrack::PerformMotionFusion(PbfSensorObjectPtr obj) {
}
}
void PbfTrack::PerformMotionCompensation(PbfSensorObjectPtr obj,
void PbfTrack::PerformMotionCompensation(std::shared_ptr<PbfSensorObject> obj,
double timestamp) {
double time_diff = timestamp - obj->timestamp;
if (time_diff < 0) {
......@@ -393,7 +398,7 @@ bool PbfTrack::AbleToPublish() {
return true;
}
PbfSensorObjectPtr radar_object = GetLatestRadarObject();
std::shared_ptr<PbfSensorObject> radar_object = GetLatestRadarObject();
if (s_publish_if_has_radar_ && !invisible_in_radar_ &&
radar_object != nullptr) {
if (radar_object->sensor_type == SensorType::RADAR) {
......@@ -414,7 +419,7 @@ bool PbfTrack::AbleToPublish() {
}
void PbfTrack::UpdateMeasurementsLifeWithMeasurement(
std::map<std::string, PbfSensorObjectPtr> *objects,
std::map<std::string, std::shared_ptr<PbfSensorObject>> *objects,
const std::string &sensor_id, double timestamp, double max_invisible_time) {
for (auto it = objects->begin(); it != objects->end();) {
if (sensor_id != it->first) {
......@@ -432,7 +437,7 @@ void PbfTrack::UpdateMeasurementsLifeWithMeasurement(
}
void PbfTrack::UpdateMeasurementsLifeWithoutMeasurement(
std::map<std::string, PbfSensorObjectPtr> *objects,
std::map<std::string, std::shared_ptr<PbfSensorObject>> *objects,
const std::string &sensor_id, double timestamp, double max_invisible_time,
bool *invisible_state) {
*invisible_state = true;
......@@ -453,8 +458,8 @@ void PbfTrack::UpdateMeasurementsLifeWithoutMeasurement(
}
}
PbfSensorObjectPtr PbfTrack::GetLatestLidarObject() {
PbfSensorObjectPtr lidar_object;
std::shared_ptr<PbfSensorObject> PbfTrack::GetLatestLidarObject() {
std::shared_ptr<PbfSensorObject> lidar_object;
for (auto it = lidar_objects_.begin(); it != lidar_objects_.end(); ++it) {
if (lidar_object == nullptr) {
lidar_object = it->second;
......@@ -465,8 +470,8 @@ PbfSensorObjectPtr PbfTrack::GetLatestLidarObject() {
return lidar_object;
}
PbfSensorObjectPtr PbfTrack::GetLatestRadarObject() {
PbfSensorObjectPtr radar_object;
std::shared_ptr<PbfSensorObject> PbfTrack::GetLatestRadarObject() {
std::shared_ptr<PbfSensorObject> radar_object;
for (auto it = radar_objects_.begin(); it != radar_objects_.end(); ++it) {
if (radar_object == nullptr) {
radar_object = it->second;
......@@ -477,8 +482,8 @@ PbfSensorObjectPtr PbfTrack::GetLatestRadarObject() {
return radar_object;
}
PbfSensorObjectPtr PbfTrack::GetLatestCameraObject() {
PbfSensorObjectPtr camera_object;
std::shared_ptr<PbfSensorObject> PbfTrack::GetLatestCameraObject() {
std::shared_ptr<PbfSensorObject> camera_object;
for (auto it = camera_objects_.begin(); it != camera_objects_.end(); ++it) {
if (camera_object == nullptr) {
camera_object = it->second;
......@@ -489,8 +494,8 @@ PbfSensorObjectPtr PbfTrack::GetLatestCameraObject() {
return camera_object;
}
PbfSensorObjectPtr PbfTrack::GetSensorObject(const SensorType &sensor_type,
const std::string &sensor_id) {
std::shared_ptr<PbfSensorObject> PbfTrack::GetSensorObject(
const SensorType &sensor_type, const std::string &sensor_id) {
if (is_lidar(sensor_type)) {
return GetLidarObject(sensor_id);
} else if (is_radar(sensor_type)) {
......
......@@ -30,36 +30,38 @@ namespace perception {
class PbfTrack {
public:
explicit PbfTrack(PbfSensorObjectPtr obj);
explicit PbfTrack(std::shared_ptr<PbfSensorObject> obj);
~PbfTrack();
/**@brief Update track with sensor object */
void UpdateWithSensorObject(PbfSensorObjectPtr obj, double match_dist);
void UpdateWithSensorObject(std::shared_ptr<PbfSensorObject> obj,
double match_dist);
void UpdateWithoutSensorObject(const SensorType &sensor_type,
const std::string &sensor_id,
double min_match_dist, double timestamp);
PbfSensorObjectPtr GetFusedObject();
std::shared_ptr<PbfSensorObject> GetFusedObject();
double GetFusedTimestamp() const;
PbfSensorObjectPtr GetLidarObject(const std::string &sensor_id);
std::shared_ptr<PbfSensorObject> GetLidarObject(const std::string &sensor_id);
PbfSensorObjectPtr GetRadarObject(const std::string &sensor_id);
std::shared_ptr<PbfSensorObject> GetRadarObject(const std::string &sensor_id);
PbfSensorObjectPtr GetCameraObject(const std::string &sensor_id);
std::shared_ptr<PbfSensorObject> GetCameraObject(
const std::string &sensor_id);
PbfSensorObjectPtr GetSensorObject(const SensorType &sensor_type,
const std::string &sensor_id);
std::shared_ptr<PbfSensorObject> GetSensorObject(
const SensorType &sensor_type, const std::string &sensor_id);
/**@brief get latest lidar measurement for multi lidar sensors*/
PbfSensorObjectPtr GetLatestLidarObject();
std::shared_ptr<PbfSensorObject> GetLatestLidarObject();
/**@brief get latest lidar measurement for multi radar sensors*/
PbfSensorObjectPtr GetLatestRadarObject();
std::shared_ptr<PbfSensorObject> GetLatestRadarObject();
/**@brief get latest camera measurement for multi camera sensors*/
PbfSensorObjectPtr GetLatestCameraObject();
std::shared_ptr<PbfSensorObject> GetLatestCameraObject();
int GetTrackId() const;
......@@ -117,24 +119,25 @@ class PbfTrack {
protected:
/**@brief use obj's velocity to update obj's location to input timestamp*/
void PerformMotionCompensation(PbfSensorObjectPtr obj, double timestamp);
void PerformMotionCompensation(std::shared_ptr<PbfSensorObject> obj,
double timestamp);
void PerformMotionFusion(PbfSensorObjectPtr obj);
void PerformMotionFusion(std::shared_ptr<PbfSensorObject> obj);
void PerformMotionFusionAsync(PbfSensorObjectPtr obj);
void PerformMotionFusionAsync(std::shared_ptr<PbfSensorObject> obj);
void UpdateMeasurementsLifeWithMeasurement(
std::map<std::string, PbfSensorObjectPtr> *objects,
std::map<std::string, std::shared_ptr<PbfSensorObject>> *objects,
const std::string &sensor_id, double timestamp,
double max_invisible_time);
void UpdateMeasurementsLifeWithoutMeasurement(
std::map<std::string, PbfSensorObjectPtr> *objects,
std::map<std::string, std::shared_ptr<PbfSensorObject>> *objects,
const std::string &sensor_id, double timestamp, double max_invisible_time,
bool *invisible_state);
protected:
PbfSensorObjectPtr fused_object_;
std::shared_ptr<PbfSensorObject> fused_object_;
/**@brief time stamp of the track*/
double fused_timestamp_;
......@@ -153,9 +156,9 @@ class PbfTrack {
std::shared_ptr<PbfBaseMotionFusion> motion_fusion_;
/**@brief one object instance per sensor, might be more later*/
std::map<std::string, PbfSensorObjectPtr> lidar_objects_;
std::map<std::string, PbfSensorObjectPtr> radar_objects_;
std::map<std::string, PbfSensorObjectPtr> camera_objects_;
std::map<std::string, std::shared_ptr<PbfSensorObject>> lidar_objects_;
std::map<std::string, std::shared_ptr<PbfSensorObject>> radar_objects_;
std::map<std::string, std::shared_ptr<PbfSensorObject>> camera_objects_;
bool is_dead_;
......
......@@ -38,11 +38,12 @@ namespace apollo {
namespace perception {
float PbfTrackObjectDistance::Compute(
const PbfTrackPtr &fused_track, const PbfSensorObjectPtr &sensor_object,
const PbfTrackPtr &fused_track,
const std::shared_ptr<PbfSensorObject> &sensor_object,
const TrackObjectDistanceOptions &options) {
const SensorType &sensor_type = sensor_object->sensor_type;
ADEBUG << "sensor type: " << static_cast<int>(sensor_type);
PbfSensorObjectPtr fused_object = fused_track->GetFusedObject();
std::shared_ptr<PbfSensorObject> fused_object = fused_track->GetFusedObject();
if (fused_object == nullptr) {
ADEBUG << "fused object is nullptr";
return std::numeric_limits<float>::max();
......@@ -55,8 +56,10 @@ float PbfTrackObjectDistance::Compute(
}
float distance = std::numeric_limits<float>::max();
const PbfSensorObjectPtr &lidar_object = fused_track->GetLatestLidarObject();
const PbfSensorObjectPtr &radar_object = fused_track->GetLatestRadarObject();
const std::shared_ptr<PbfSensorObject> &lidar_object =
fused_track->GetLatestLidarObject();
const std::shared_ptr<PbfSensorObject> &radar_object =
fused_track->GetLatestRadarObject();
if (FLAGS_use_navigation_mode) {
if (FLAGS_use_distance_angle_fusion) {
......@@ -94,9 +97,9 @@ float PbfTrackObjectDistance::Compute(
}
float PbfTrackObjectDistance::ComputeVelodyne64Velodyne64(
const PbfSensorObjectPtr &fused_object,
const PbfSensorObjectPtr &sensor_object, const Eigen::Vector3d &ref_pos,
int range) {
const std::shared_ptr<PbfSensorObject> &fused_object,
const std::shared_ptr<PbfSensorObject> &sensor_object,
const Eigen::Vector3d &ref_pos, int range) {
float distance =
ComputeDistance3D(fused_object, sensor_object, ref_pos, range);
ADEBUG << "compute_velodyne64_velodyne64 distance: " << distance;
......@@ -104,9 +107,9 @@ float PbfTrackObjectDistance::ComputeVelodyne64Velodyne64(
}
float PbfTrackObjectDistance::ComputeVelodyne64Radar(
const PbfSensorObjectPtr &fused_object,
const PbfSensorObjectPtr &sensor_object, const Eigen::Vector3d &ref_pos,
int range) {
const std::shared_ptr<PbfSensorObject> &fused_object,
const std::shared_ptr<PbfSensorObject> &sensor_object,
const Eigen::Vector3d &ref_pos, int range) {
float distance =
ComputeDistance3D(fused_object, sensor_object, ref_pos, range);
ADEBUG << "compute_velodyne64_radar distance " << distance;
......@@ -114,9 +117,9 @@ float PbfTrackObjectDistance::ComputeVelodyne64Radar(
}
float PbfTrackObjectDistance::ComputeRadarRadar(
const PbfSensorObjectPtr &fused_object,
const PbfSensorObjectPtr &sensor_object, const Eigen::Vector3d &ref_pos,
int range) {
const std::shared_ptr<PbfSensorObject> &fused_object,
const std::shared_ptr<PbfSensorObject> &sensor_object,
const Eigen::Vector3d &ref_pos, int range) {
float distance =
ComputeDistance3D(fused_object, sensor_object, ref_pos, range);
ADEBUG << "compute_radar_radar distance " << distance;
......@@ -135,8 +138,8 @@ float PbfTrackObjectDistance::GetAngle(const ObjectPtr &obj) {
}
float PbfTrackObjectDistance::ComputeDistanceAngleMatchProb(
const PbfSensorObjectPtr &fused_object,
const PbfSensorObjectPtr &sensor_object) {
const std::shared_ptr<PbfSensorObject> &fused_object,
const std::shared_ptr<PbfSensorObject> &sensor_object) {
static float weight_x = 0.8f;
static float weight_y = 0.2f;
static float speed_diff = 5.0f;
......@@ -205,9 +208,9 @@ float PbfTrackObjectDistance::ComputeDistanceAngleMatchProb(
}
float PbfTrackObjectDistance::ComputeDistance3D(
const PbfSensorObjectPtr &fused_object,
const PbfSensorObjectPtr &sensor_object, const Eigen::Vector3d &ref_pos,
const int range) {
const std::shared_ptr<PbfSensorObject> &fused_object,
const std::shared_ptr<PbfSensorObject> &sensor_object,
const Eigen::Vector3d &ref_pos, const int range) {
const ObjectPtr &obj = fused_object->object;
if (obj == nullptr) {
AERROR << "Object is nullptr.";
......
......@@ -17,6 +17,8 @@
#ifndef MODULES_PERCEPTION_OBSTACLE_FUSION_PBF_PBF_TRACK_OBJECT_DISTANCE_H_
#define MODULES_PERCEPTION_OBSTACLE_FUSION_PBF_PBF_TRACK_OBJECT_DISTANCE_H_
#include <memory>
#include "modules/common/macro.h"
#include "modules/perception/obstacle/base/types.h"
#include "modules/perception/obstacle/fusion/probabilistic_fusion/pbf_sensor_object.h"
......@@ -35,23 +37,24 @@ class PbfTrackObjectDistance {
virtual ~PbfTrackObjectDistance() = default;
float Compute(const PbfTrackPtr &fused_track,
const PbfSensorObjectPtr &sensor_object,
const std::shared_ptr<PbfSensorObject> &sensor_object,
const TrackObjectDistanceOptions &options);
protected:
float ComputeVelodyne64Velodyne64(const PbfSensorObjectPtr &fused_object,
const PbfSensorObjectPtr &sensor_object,
const Eigen::Vector3d &ref_pos,
int range = 3);
float ComputeVelodyne64Radar(const PbfSensorObjectPtr &fused_object,
const PbfSensorObjectPtr &sensor_object,
const Eigen::Vector3d &ref_pos, int range = 3);
float ComputeRadarRadar(const PbfSensorObjectPtr &fused_object,
const PbfSensorObjectPtr &sensor_object,
float ComputeVelodyne64Velodyne64(
const std::shared_ptr<PbfSensorObject> &fused_object,
const std::shared_ptr<PbfSensorObject> &sensor_object,
const Eigen::Vector3d &ref_pos, int range = 3);
float ComputeVelodyne64Radar(
const std::shared_ptr<PbfSensorObject> &fused_object,
const std::shared_ptr<PbfSensorObject> &sensor_object,
const Eigen::Vector3d &ref_pos, int range = 3);
float ComputeRadarRadar(const std::shared_ptr<PbfSensorObject> &fused_object,
const std::shared_ptr<PbfSensorObject> &sensor_object,
const Eigen::Vector3d &ref_pos, int range = 3);
float ComputeDistance3D(const PbfSensorObjectPtr &fused_object,
const PbfSensorObjectPtr &sensor_object,
float ComputeDistance3D(const std::shared_ptr<PbfSensorObject> &fused_object,
const std::shared_ptr<PbfSensorObject> &sensor_object,
const Eigen::Vector3d &ref_pos, const int range);
float ComputeEuclideanDistance(const Eigen::Vector3d &des,
const Eigen::Vector3d &src);
......@@ -60,8 +63,9 @@ class PbfTrackObjectDistance {
bool ComputePolygonCenter(const PolygonDType &polygon,
const Eigen::Vector3d &ref_pos, int range,
Eigen::Vector3d *center);
float ComputeDistanceAngleMatchProb(const PbfSensorObjectPtr &fused_object,
const PbfSensorObjectPtr &sensor_object);
float ComputeDistanceAngleMatchProb(
const std::shared_ptr<PbfSensorObject> &fused_object,
const std::shared_ptr<PbfSensorObject> &sensor_object);
float GetAngle(const ObjectPtr &obj);
......
......@@ -30,12 +30,12 @@ namespace apollo {
namespace perception {
TEST(PbfTrackTest, test_pbf_track_constructor) {
PbfSensorObjectPtr object1(new PbfSensorObject());
std::shared_ptr<PbfSensorObject> object1(new PbfSensorObject());
object1->sensor_type = SensorType::VELODYNE_64;
object1->sensor_id = "velodyne_64";
object1->timestamp = 0.1;
object1->object->track_id = 1;
PbfSensorObjectPtr object2(new PbfSensorObject());
std::shared_ptr<PbfSensorObject> object2(new PbfSensorObject());
object2->sensor_type = SensorType::RADAR;
object2->sensor_id = "radar";
object2->timestamp = 0.2;
......@@ -52,13 +52,13 @@ TEST(PbfTrackTest, test_pbf_track_constructor) {
}
TEST(PbfTrackTest, test_pbf_get_object) {
PbfSensorObjectPtr object1(new PbfSensorObject());
std::shared_ptr<PbfSensorObject> object1(new PbfSensorObject());
object1->sensor_type = SensorType::VELODYNE_64;
object1->sensor_id = "velodyne_64";
object1->timestamp = 0.1;
object1->object->track_id = 1;
PbfTrack track(object1);
PbfSensorObjectPtr object2(new PbfSensorObject());
std::shared_ptr<PbfSensorObject> object2(new PbfSensorObject());
object2->sensor_type = SensorType::RADAR;
object2->sensor_id = "radar";
object2->timestamp = 0.09;
......@@ -67,32 +67,32 @@ TEST(PbfTrackTest, test_pbf_get_object) {
CHECK_EQ(nullptr != track.GetLidarObject("velodyne_64"), true);
CHECK_EQ(nullptr != track.GetRadarObject("radar"), true);
PbfSensorObjectPtr object4(new PbfSensorObject());
std::shared_ptr<PbfSensorObject> object4(new PbfSensorObject());
object4->sensor_type = SensorType::VELODYNE_64;
object4->sensor_id = "velodyne_64_1";
object4->timestamp = 0.2;
object4->object->track_id = 1;
track.lidar_objects_[object4->sensor_id] = object4;
PbfSensorObjectPtr object5(new PbfSensorObject());
std::shared_ptr<PbfSensorObject> object5(new PbfSensorObject());
object5->sensor_type = SensorType::RADAR;
object5->sensor_id = "radar_1";
object5->timestamp = 0.095;
object5->object->track_id = 1;
track.radar_objects_[object5->sensor_id] = object5;
PbfSensorObjectPtr obj = track.GetLatestLidarObject();
std::shared_ptr<PbfSensorObject> obj = track.GetLatestLidarObject();
CHECK_EQ(obj->timestamp - 0.2 < 0.001, true);
obj = track.GetLatestRadarObject();
CHECK_EQ(obj->timestamp - 0.095 < 0.0001, true);
}
TEST(PbfTrackTest, test_pbf_update_measurements_life) {
PbfSensorObjectPtr object1(new PbfSensorObject());
std::shared_ptr<PbfSensorObject> object1(new PbfSensorObject());
object1->sensor_type = SensorType::VELODYNE_64;
object1->sensor_id = "velodyne_64";
object1->timestamp = 0.1;
object1->object->track_id = 1;
object1->invisible_period = 0.0;
PbfTrack track(object1);
PbfSensorObjectPtr object2(new PbfSensorObject());
std::shared_ptr<PbfSensorObject> object2(new PbfSensorObject());
object2->sensor_type = SensorType::VELODYNE_64;
object2->sensor_id = "velodyne_64_1";
object2->timestamp = 0.1;
......@@ -104,7 +104,7 @@ TEST(PbfTrackTest, test_pbf_update_measurements_life) {
track.UpdateMeasurementsLifeWithMeasurement(&(track.lidar_objects_),
"velodyne_64_1", 0.25, 0.2);
CHECK_EQ(track.lidar_objects_.size(), 1);
PbfSensorObjectPtr object3(new PbfSensorObject());
std::shared_ptr<PbfSensorObject> object3(new PbfSensorObject());
object3->sensor_type = SensorType::VELODYNE_64;
object3->sensor_id = "velodyne_64_2";
object3->timestamp = 0.2;
......
......@@ -185,9 +185,9 @@ void ProbabilisticFusion::FuseFrame(const PbfSensorFramePtr &frame) {
<< "object_number: " << frame->objects.size() << ","
<< "timestamp: " << std::fixed << std::setprecision(12)
<< frame->timestamp;
std::vector<PbfSensorObjectPtr> &objects = frame->objects;
std::vector<PbfSensorObjectPtr> background_objects;
std::vector<PbfSensorObjectPtr> foreground_objects;
std::vector<std::shared_ptr<PbfSensorObject>> &objects = frame->objects;
std::vector<std::shared_ptr<PbfSensorObject>> background_objects;
std::vector<std::shared_ptr<PbfSensorObject>> foreground_objects;
DecomposeFrameObjects(objects, &foreground_objects, &background_objects);
Eigen::Vector3d ref_point = frame->sensor2world_pose.topRightCorner(3, 1);
......@@ -197,7 +197,7 @@ void ProbabilisticFusion::FuseFrame(const PbfSensorFramePtr &frame) {
}
void ProbabilisticFusion::CreateNewTracks(
const std::vector<PbfSensorObjectPtr> &sensor_objects,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const std::vector<int> &unassigned_ids) {
for (size_t i = 0; i < unassigned_ids.size(); i++) {
int id = unassigned_ids[i];
......@@ -208,7 +208,7 @@ void ProbabilisticFusion::CreateNewTracks(
void ProbabilisticFusion::UpdateAssignedTracks(
std::vector<PbfTrackPtr> *tracks,
const std::vector<PbfSensorObjectPtr> &sensor_objects,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const std::vector<std::pair<int, int>> &assignments,
const std::vector<double> &track_object_dist) {
for (size_t i = 0; i < assignments.size(); i++) {
......@@ -242,7 +242,8 @@ void ProbabilisticFusion::CollectFusedObjects(
std::vector<PbfTrackPtr> &tracks = track_manager_->GetTracks();
for (size_t i = 0; i < tracks.size(); i++) {
if (tracks[i]->AbleToPublish()) {
PbfSensorObjectPtr fused_object = tracks[i]->GetFusedObject();
std::shared_ptr<PbfSensorObject> fused_object =
tracks[i]->GetFusedObject();
ObjectPtr obj(new Object());
obj->clone(*(fused_object->object));
obj->track_id = tracks[i]->GetTrackId();
......@@ -259,9 +260,9 @@ void ProbabilisticFusion::CollectFusedObjects(
}
void ProbabilisticFusion::DecomposeFrameObjects(
const std::vector<PbfSensorObjectPtr> &frame_objects,
std::vector<PbfSensorObjectPtr> *foreground_objects,
std::vector<PbfSensorObjectPtr> *background_objects) {
const std::vector<std::shared_ptr<PbfSensorObject>> &frame_objects,
std::vector<std::shared_ptr<PbfSensorObject>> *foreground_objects,
std::vector<std::shared_ptr<PbfSensorObject>> *background_objects) {
foreground_objects->clear();
background_objects->clear();
for (size_t i = 0; i < frame_objects.size(); i++) {
......@@ -274,7 +275,7 @@ void ProbabilisticFusion::DecomposeFrameObjects(
}
void ProbabilisticFusion::FuseForegroundObjects(
std::vector<PbfSensorObjectPtr> *foreground_objects,
std::vector<std::shared_ptr<PbfSensorObject>> *foreground_objects,
Eigen::Vector3d ref_point, const SensorType &sensor_type,
const std::string &sensor_id, double timestamp) {
std::vector<int> unassigned_tracks;
......
......@@ -17,6 +17,7 @@
#ifndef MODULES_PERCEPTION_OBSTACLE_FUSION_PROBABILISTIC_FUSION_PROBABILISTIC_FUSION_H_ // NOLINT
#define MODULES_PERCEPTION_OBSTACLE_FUSION_PROBABILISTIC_FUSION_PROBABILISTIC_FUSION_H_ // NOLINT
#include <memory>
#include <mutex>
#include <string>
#include <utility>
......@@ -55,13 +56,14 @@ class ProbabilisticFusion : public BaseFusion {
void FuseFrame(const PbfSensorFramePtr &frame);
/**@brief create new tracks for objects not assigned to current tracks*/
void CreateNewTracks(const std::vector<PbfSensorObjectPtr> &sensor_objects,
const std::vector<int> &unassigned_ids);
void CreateNewTracks(
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const std::vector<int> &unassigned_ids);
/**@brief update current tracks with matched objects*/
void UpdateAssignedTracks(
std::vector<PbfTrackPtr> *tracks,
const std::vector<PbfSensorObjectPtr> &sensor_objects,
const std::vector<std::shared_ptr<PbfSensorObject>> &sensor_objects,
const std::vector<std::pair<int, int>> &assignments,
const std::vector<double> &track_objects_dist);
......@@ -76,12 +78,12 @@ class ProbabilisticFusion : public BaseFusion {
std::vector<ObjectPtr> *fused_objects);
void DecomposeFrameObjects(
const std::vector<PbfSensorObjectPtr> &frame_objects,
std::vector<PbfSensorObjectPtr> *foreground_objects,
std::vector<PbfSensorObjectPtr> *background_objects);
const std::vector<std::shared_ptr<PbfSensorObject>> &frame_objects,
std::vector<std::shared_ptr<PbfSensorObject>> *foreground_objects,
std::vector<std::shared_ptr<PbfSensorObject>> *background_objects);
void FuseForegroundObjects(
std::vector<PbfSensorObjectPtr> *foreground_objects,
std::vector<std::shared_ptr<PbfSensorObject>> *foreground_objects,
Eigen::Vector3d ref_point, const SensorType &sensor_type,
const std::string &sensor_id, double timestamp);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册