diff --git a/modules/common/time/time_util.h b/modules/common/time/time_util.h index e198736eb6a2f99ead12d3db83a619b9a0cb22e9..4618cdd4f3725558167ae61703c976e1560b5224 100644 --- a/modules/common/time/time_util.h +++ b/modules/common/time/time_util.h @@ -51,7 +51,8 @@ class TimeUtil { static double GetCurrentTime() { struct timeval tv; gettimeofday(&tv, nullptr); - const double timestamp = tv.tv_sec * 1000000 + tv.tv_usec; + const double timestamp = + static_cast(tv.tv_sec * 1000000 + tv.tv_usec); return timestamp / 1000000; } diff --git a/modules/perception/camera/lib/obstacle/tracker/omt/omt_obstacle_tracker.cc b/modules/perception/camera/lib/obstacle/tracker/omt/omt_obstacle_tracker.cc index 9759d1182326d2189566edbd16d6dc0fc78b7e79..0528b05eb10ecc013b09be4ed8ecb70949c057a0 100644 --- a/modules/perception/camera/lib/obstacle/tracker/omt/omt_obstacle_tracker.cc +++ b/modules/perception/camera/lib/obstacle/tracker/omt/omt_obstacle_tracker.cc @@ -133,9 +133,9 @@ bool OMTObstacleTracker::CombineDuplicateTargets() { << "," << targets_[j].id << ") score " << score << " count " << count; - hypo.target = i; - hypo.object = j; - hypo.score = (count > 0) ? score / (count) : 0; + hypo.target = static_cast(i); + hypo.object = static_cast(j); + hypo.score = (count > 0) ? score / static_cast(count) : 0; if (hypo.score < omt_param_.target_combine_iou_threshold()) { continue; } @@ -185,8 +185,8 @@ void OMTObstacleTracker::GenerateHypothesis(const TrackObjectPtrs &objects) { for (size_t i = 0; i < targets_.size(); ++i) { ADEBUG << "Target " << targets_[i].id; for (size_t j = 0; j < objects.size(); ++j) { - hypo.target = i; - hypo.object = j; + hypo.target = static_cast(i); + hypo.object = static_cast(j); float sa = ScoreAppearance(targets_[i], objects[j]); float sm = ScoreMotion(targets_[i], objects[j]); float ss = ScoreShape(targets_[i], objects[j]); @@ -279,7 +279,7 @@ float OMTObstacleTracker::ScoreAppearance(const Target &target, count += 1; } - return energy / (0.1f + count * 0.9f); + return energy / (0.1f + static_cast(count) * 0.9f); } // [new] @@ -347,8 +347,8 @@ int OMTObstacleTracker::CreateNewTarget(const TrackObjectPtrs &objects) { continue; } for (auto &&target_rect : target_rects) { - if (IsCovered(rect, target_rect, 0.4) || - IsCoveredHorizon(rect, target_rect, 0.5)) { + if (IsCovered(rect, target_rect, 0.4f) || + IsCoveredHorizon(rect, target_rect, 0.5f)) { is_covered = true; break; } @@ -391,9 +391,9 @@ bool OMTObstacleTracker::Associate2D(const ObstacleTrackerOptions &options, // TODO(gaohan): use pool TrackObjectPtr track_ptr(new TrackObject); track_ptr->object = frame->detected_objects[i]; - track_ptr->object->id = i; + track_ptr->object->id = static_cast(i); track_ptr->timestamp = frame->timestamp; - track_ptr->indicator = PatchIndicator(frame->frame_id, i, + track_ptr->indicator = PatchIndicator(frame->frame_id, static_cast(i), frame->data_provider->sensor_name()); track_ptr->object->camera_supplement.sensor_name = frame->data_provider->sensor_name(); diff --git a/modules/perception/fusion/base/sensor.cc b/modules/perception/fusion/base/sensor.cc index ec2f1cc95c27b6f4ec2fb9ce37ff89531aed93ca..db695ca4a7d707154e5723280884c249f57c4e2a 100644 --- a/modules/perception/fusion/base/sensor.cc +++ b/modules/perception/fusion/base/sensor.cc @@ -55,7 +55,7 @@ bool Sensor::GetPose(double timestamp, Eigen::Affine3d* pose) const { for (int i = static_cast(frames_.size()) - 1; i >= 0; --i) { double time_diff = timestamp - frames_[i]->GetTimestamp(); - if (fabs(time_diff) < 1.0e-3) { + if (fabs(time_diff) < 1.0e-3) { // > ? return frames_[i]->GetPose(pose); } } diff --git a/modules/perception/fusion/common/kalman_filter.cc b/modules/perception/fusion/common/kalman_filter.cc index d78e08e175f0be03c9d175c7a58ce61959a20b1a..d03e44ca02a392df0f9ee0b6ce0d10f215807b64 100644 --- a/modules/perception/fusion/common/kalman_filter.cc +++ b/modules/perception/fusion/common/kalman_filter.cc @@ -28,7 +28,7 @@ bool KalmanFilter::Init(const Eigen::VectorXd &initial_belief_states, AERROR << "the cols and rows of uncertainty martix should be equal"; return false; } - states_num_ = initial_uncertainty.rows(); + states_num_ = static_cast(initial_uncertainty.rows()); if (states_num_ <= 0) { AERROR << "state_num should be greater than zero"; diff --git a/modules/perception/fusion/common/kalman_filter.h b/modules/perception/fusion/common/kalman_filter.h index 70d5ff3701df0c3b477234d10c2fd31498192d10..2f9f69ce6964614b1fe667e732fa5b6ec9c237c2 100644 --- a/modules/perception/fusion/common/kalman_filter.h +++ b/modules/perception/fusion/common/kalman_filter.h @@ -55,9 +55,9 @@ class KalmanFilter : public BaseFilter { bool DeCorrelation(int x, int y, int x_len, int y_len); void CorrectionBreakdown(); bool SetGainBreakdownThresh(const std::vector &break_down, - const float threshold = 2.0); + const float threshold = 2.0f); bool SetValueBreakdownThresh(const std::vector &break_down, - const float threshold = 0.05); + const float threshold = 0.05f); private: // @brief kalman gain @@ -66,8 +66,8 @@ class KalmanFilter : public BaseFilter { Eigen::VectorXd value_break_down_; Eigen::MatrixXd kalman_gain_; - float value_break_down_threshold_ = 999.0; - float gain_break_down_threshold_ = 0.0; + float value_break_down_threshold_ = 999.0f; + float gain_break_down_threshold_ = 0.0f; }; } // namespace fusion diff --git a/modules/perception/fusion/lib/data_fusion/motion_fusion/kalman_motion_fusion/kalman_motion_fusion.cc b/modules/perception/fusion/lib/data_fusion/motion_fusion/kalman_motion_fusion/kalman_motion_fusion.cc index aeb7ce2feb42a9c7af1793598b795d35a83a2e18..9bbab1e0e5909760a503fa7f99083a0e6b090bb8 100644 --- a/modules/perception/fusion/lib/data_fusion/motion_fusion/kalman_motion_fusion/kalman_motion_fusion.cc +++ b/modules/perception/fusion/lib/data_fusion/motion_fusion/kalman_motion_fusion/kalman_motion_fusion.cc @@ -372,8 +372,8 @@ void KalmanMotionFusion::RewardRMatrix(const base::SensorType& sensor_type, Eigen::MatrixXd* r_matrix) { common::SensorManager* sensor_manager = lib::Singleton::get_instance(); - const float converged_scale = 0.01; - const float unconverged_scale = 1000.0; + const float converged_scale = 0.01f; + const float unconverged_scale = 1000.0f; if (sensor_manager->IsLidar(sensor_type)) { if (converged) { r_matrix->setIdentity(); diff --git a/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_filter.cc b/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_filter.cc index b3987a0cd7fdd7cfd8fd432efb91100ea9c53854..2063a09e8cb6a086cbcff6b5de943352fbf65d2c 100644 --- a/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_filter.cc +++ b/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_filter.cc @@ -148,6 +148,9 @@ void MlfMotionFilter::KalmanFilterUpdateWithPartialObservation( double time_diff = new_object->object_ptr->latest_tracked_time - latest_object->object_ptr->latest_tracked_time; + if (time_diff < EPSION_TIME) { // Very small time than assign + time_diff = DEFAULT_FPS; + } Eigen::Matrix4d transition = Eigen::Matrix4d::Identity(); transition(0, 2) = transition(1, 3) = time_diff; diff --git a/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_filter.h b/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_filter.h index 1065af60e7397a3a5e22a7da6491beacaa1f5562..28ada0bf1f65af475b086e6fb7d6e9e968386597 100644 --- a/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_filter.h +++ b/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_filter.h @@ -135,6 +135,8 @@ class MlfMotionFilter : public MlfBaseFilter { void BeliefToOutput(TrackedObjectPtr object); protected: + const double EPSION_TIME = 1e-3; + const double DEFAULT_FPS = 0.1; // motion measurement std::shared_ptr motion_measurer_; // motion refiner diff --git a/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_measurement.cc b/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_measurement.cc index 124ffc8c9f0a8a1a47eb17d6b856105c2610897a..02e70e9f9d84dc7162a2f7121cd08af21bcaf818 100644 --- a/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_measurement.cc +++ b/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_measurement.cc @@ -41,7 +41,9 @@ void MlfMotionMeasurement::ComputeMotionMeasurment( double latest_time = latest_object->object_ptr->latest_tracked_time; double current_time = new_object->object_ptr->latest_tracked_time; double time_diff = current_time - latest_time; - + if (fabs(time_diff) < EPSILON_TIME) { + time_diff = DEFAULT_FPS; + } MeasureAnchorPointVelocity(new_object, latest_object, time_diff); MeasureBboxCenterVelocity(new_object, latest_object, time_diff); MeasureBboxCornerVelocity(new_object, latest_object, time_diff); diff --git a/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_measurement.h b/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_measurement.h index 163d7c27b6d9c98d57bdd0aa07cfaa8def4e43a5..8ab115ce3e05dd704b6c0072dc50fac5db6643bb 100644 --- a/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_measurement.h +++ b/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_measurement.h @@ -51,6 +51,8 @@ class MlfMotionMeasurement { TrackedObjectPtr new_object); private: + const double EPSILON_TIME = 1e-3; // or numeric_limits::epsilon() + const double DEFAULT_FPS = 0.1; DISALLOW_COPY_AND_ASSIGN(MlfMotionMeasurement); }; // class MlfMotionMeasurement diff --git a/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_refiner.cc b/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_refiner.cc index 976bf6e37ae98622b49c5b59adeeff53b2bb46a5..2389ebbfac6e6c29095c3c4e88f9a2e7dde09831 100644 --- a/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_refiner.cc +++ b/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_refiner.cc @@ -56,7 +56,12 @@ bool MlfMotionRefiner::Refine(const MlfTrackDataConstPtr& track_data, double time_diff = new_object->object_ptr->latest_tracked_time - latest_object->object_ptr->latest_tracked_time; Eigen::Vector3d filter_velocity_gain = new_object->belief_velocity_gain; - double filter_acceleration_gain = filter_velocity_gain.norm() / time_diff; + double filter_acceleration_gain = 0.0; + if (fabs(time_diff) < EPSION_TIME) { + filter_acceleration_gain = 0.0; + } else { + filter_acceleration_gain = filter_velocity_gain.norm() / time_diff; + } bool need_keep_motion = filter_acceleration_gain > claping_acceleration_threshold_; // use tighter threshold for pedestrian diff --git a/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_refiner.h b/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_refiner.h index 1d4f05277dca45d909096d3ef0fc888e65c70fb0..419ca4222f77757439c03a2fda9f74c9e4ad6c56 100644 --- a/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_refiner.h +++ b/modules/perception/lidar/lib/tracker/multi_lidar_fusion/mlf_motion_refiner.h @@ -70,6 +70,7 @@ class MlfMotionRefiner { protected: double claping_acceleration_threshold_ = 10; double claping_speed_threshold_ = 1.0; + const double EPSION_TIME = 1e-3; }; // class MlfMotionRefiner } // namespace lidar diff --git a/modules/perception/onboard/component/fusion_camera_detection_component.cc b/modules/perception/onboard/component/fusion_camera_detection_component.cc index bf3f06218c2f1e464bdbf25dac9532159105b73c..a64c77a9ea7796e4feb1b53d5f6c0723c74600fc 100644 --- a/modules/perception/onboard/component/fusion_camera_detection_component.cc +++ b/modules/perception/onboard/component/fusion_camera_detection_component.cc @@ -331,9 +331,10 @@ int FusionCameraDetectionComponent::InitConfig() { fusion_camera_detection_param.output_final_obstacles(); prefused_channel_name_ = fusion_camera_detection_param.prefused_channel_name(); - default_camera_pitch_ = fusion_camera_detection_param.default_camera_pitch(); + default_camera_pitch_ = + static_cast(fusion_camera_detection_param.default_camera_pitch()); default_camera_height_ = - fusion_camera_detection_param.default_camera_height(); + static_cast(fusion_camera_detection_param.default_camera_height()); output_camera_debug_msg_ = fusion_camera_detection_param.output_camera_debug_msg(); camera_debug_channel_name_ = @@ -398,8 +399,8 @@ int FusionCameraDetectionComponent::InitSensorInfo() { // assume all camera have same image size base::BaseCameraModelPtr camera_model_ptr = sensor_manager->GetUndistortCameraModel(camera_names_[0]); - image_width_ = camera_model_ptr->get_width(); - image_height_ = camera_model_ptr->get_height(); + image_width_ = static_cast(camera_model_ptr->get_width()); + image_height_ = static_cast(camera_model_ptr->get_height()); std::string format_str = R"( camera_names: %s %s @@ -704,7 +705,7 @@ int FusionCameraDetectionComponent::ConvertObjectToPb( pb_msg->set_height(object_ptr->size(2)); // convert 3d bbox to polygon - int polygon_point_size = object_ptr->polygon.size(); + int polygon_point_size = static_cast(object_ptr->polygon.size()); for (int i = 0; i < polygon_point_size; ++i) { auto &pt = object_ptr->polygon.at(i); apollo::common::Point3D *p = pb_msg->add_polygon_point();