提交 1790c116 编写于 作者: X xiaohuitu 提交者: Dong Li

perception: clean visualizer tool code and log (#2061)

* perception: clean visualizer tool code and log

* perception: clean opengl_visualizer code and log
上级 a9a8c447
......@@ -99,36 +99,6 @@ void FrameContent::OffsetObject(ObjectPtr object,
object->center[2] += offset[2];
}
void FrameContent::SetTrackedObjectsLidar(
const std::vector<ObjectPtr> &objects) {
tracked_objects_lidar_.resize(objects.size());
for (size_t i = 0; i < objects.size(); ++i) {
tracked_objects_lidar_[i].reset(new Object);
tracked_objects_lidar_[i]->clone(*objects[i]);
OffsetObject(tracked_objects_lidar_[i], global_offset_);
}
}
void FrameContent::SetTrackedObjectsRadar(
const std::vector<ObjectPtr> &objects) {
tracked_objects_radar_.resize(objects.size());
for (size_t i = 0; i < objects.size(); ++i) {
tracked_objects_radar_[i].reset(new Object);
tracked_objects_radar_[i]->clone(*objects[i]);
OffsetObject(tracked_objects_radar_[i], global_offset_);
}
}
void FrameContent::SetTrackedObjectsFused(
const std::vector<ObjectPtr> &objects) {
tracked_objects_fused_.resize(objects.size());
for (size_t i = 0; i < objects.size(); ++i) {
tracked_objects_fused_[i].reset(new Object);
tracked_objects_fused_[i]->clone(*objects[i]);
OffsetObject(tracked_objects_fused_[i], global_offset_);
}
}
void FrameContent::SetTrackedObjects(
const std::vector<ObjectPtr> &objects) {
tracked_objects_.resize(objects.size());
......
......@@ -45,9 +45,6 @@ class FrameContent {
bool HasCloud();
void SetTrackedObjectsLidar(const std::vector<ObjectPtr> &objects);
void SetTrackedObjectsRadar(const std::vector<ObjectPtr> &objects);
void SetTrackedObjectsFused(const std::vector<ObjectPtr> &objects);
void SetTrackedObjects(const std::vector<ObjectPtr> &objects);
std::vector<ObjectPtr> GetTrackedObjects();
......@@ -68,9 +65,6 @@ class FrameContent {
Eigen::Vector3d global_offset_;
bool global_offset_initialized_;
std::vector<ObjectPtr> tracked_objects_lidar_; // after tracking
std::vector<ObjectPtr> tracked_objects_radar_; // after tracking
std::vector<ObjectPtr> tracked_objects_fused_; // after tracking
std::vector<ObjectPtr> tracked_objects_; // after tracking
};
......
......@@ -27,12 +27,12 @@ bool OpenglVisualizer::Init() {
opengl_vs_ = boost::shared_ptr<GLFWViewer>(new GLFWViewer());
if (opengl_vs_ == nullptr) {
AINFO << "Failed to create opengl viewer";
AERROR << "Failed to create opengl viewer";
return false;
}
if (opengl_vs_->Initialize() == false) {
AINFO << "Failed to initialize opengl viewer";
AERROR << "Failed to initialize opengl viewer";
return false;
}
......@@ -61,7 +61,6 @@ void OpenglVisualizer::Render(const FrameContent &content) {
Eigen::Vector3d(forward_world_.x, forward_world_.y, forward_world_.z));
opengl_vs_->SetFrameContent(content);
opengl_vs_->SpinOnce();
AINFO << "OpenglVisualizer spin_once";
}
void OpenglVisualizer::SetSize(int w, int h) {
......
......@@ -13,8 +13,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "modules/perception/obstacle/onboard/obstacle_perception.h"
#include <string>
#include "modules/common/log.h"
#include "modules/common/time/time.h"
#include "modules/perception/common/perception_gflags.h"
......@@ -24,20 +26,31 @@
#include "modules/perception/obstacle/fusion/dummy/dummy_algorithms.h"
#include "modules/perception/obstacle/fusion/probabilistic_fusion/probabilistic_fusion.h"
DEFINE_bool(show_lidar_obstacles, false, "whether show lidar obstacles or not");
DEFINE_bool(show_radar_obstacles, false, "whether show radar obstacles or not");
DEFINE_bool(show_fused_obstacles, false, "whether show fused obstacles or not");
DEFINE_string(obstacle_show_type, "fused",
"show obstacle from lidar/radar/fused");
namespace apollo {
namespace perception {
ObstaclePerception::ObstaclePerception() : initialized_(false) {}
ObstacleShowType GetObstacleShowType(const std::string& show_type_string) {
if (show_type_string == "lidar") {
return SHOW_LIDAR;
} else if (show_type_string == "radar") {
return SHOW_RADAR;
} else if (show_type_string == "fused") {
return SHOW_FUSED;
} else {
return MAX_SHOW_TYPE;
}
}
ObstaclePerception::ObstaclePerception() {}
ObstaclePerception::~ObstaclePerception() {}
bool ObstaclePerception::Init() {
RegistAllAlgorithm();
// initialize lidar detector
/// initialize lidar detector
lidar_perception_.reset(new LidarProcess);
if (lidar_perception_ == nullptr) {
AERROR << "Failed to get LidarProcess instance";
......@@ -47,7 +60,7 @@ bool ObstaclePerception::Init() {
AERROR << "Failed to initialize lidar perception";
return false;
}
// initialize radar detector
/// initialize radar detector
radar_detector_.reset(BaseRadarDetectorRegisterer::GetInstanceByName(
FLAGS_onboard_radar_detector));
if (radar_detector_ == nullptr) {
......@@ -59,7 +72,7 @@ bool ObstaclePerception::Init() {
AERROR << "Failed to initialize RadarDetector : "
<< FLAGS_onboard_radar_detector;
}
// initialize fusion
/// initialize fusion
fusion_.reset(BaseFusionRegisterer::GetInstanceByName(FLAGS_onboard_fusion));
if (fusion_ == nullptr) {
AERROR << "Failed to get fusion instance: " << FLAGS_onboard_fusion;
......@@ -69,7 +82,7 @@ bool ObstaclePerception::Init() {
AERROR << "Failed to init fusion:" << FLAGS_onboard_fusion;
return false;
}
// initialize visualizer
/// initialize visualizer
if (FLAGS_enable_visualization) {
frame_visualizer_.reset(new OpenglVisualizer());
if (frame_visualizer_ == nullptr) {
......@@ -80,16 +93,16 @@ bool ObstaclePerception::Init() {
AERROR << "Failed to init visualizer.";
return false;
}
initialized_ = true;
obstacle_show_type_ = GetObstacleShowType(FLAGS_obstacle_show_type);
if (obstacle_show_type_ == MAX_SHOW_TYPE) {
AERROR << "Failed to get ObstacleShowType";
return false;
}
}
return true;
}
void ObstaclePerception::SetGlobalOffset(const Eigen::Vector3d& global_offset) {
global_offset_ = global_offset;
}
void ObstaclePerception::RegistAllAlgorithm() {
RegisterFactoryDummyRadarDetector();
RegisterFactoryDummyFusion();
......@@ -107,7 +120,7 @@ bool ObstaclePerception::Process(SensorRawFrame* frame,
std::shared_ptr<SensorObjects> sensor_objects(new SensorObjects());
if (frame->sensor_type_ == VELODYNE_64) {
// lidar obstacle detection
/// lidar obstacle detection
VelodyneRawFrame* velodyne_frame = dynamic_cast<VelodyneRawFrame*>(frame);
std::shared_ptr<Eigen::Matrix4d> velodyne_pose(new Eigen::Matrix4d);
*(velodyne_pose.get()) = velodyne_frame->pose_;
......@@ -118,17 +131,18 @@ bool ObstaclePerception::Process(SensorRawFrame* frame,
return false;
}
sensor_objects->objects = lidar_perception_->GetObjects();
AINFO << "lidar objects size: " << sensor_objects->objects.size();
PERF_BLOCK_END("lidar_perception");
// set frame content
/// set frame content
if (FLAGS_enable_visualization) {
frame_content_.SetLidarPose(velodyne_frame->pose_);
frame_content_.SetLidarCloud(velodyne_frame->cloud_);
if (FLAGS_show_lidar_obstacles) {
if (obstacle_show_type_ == SHOW_LIDAR) {
frame_content_.SetTrackedObjects(sensor_objects->objects);
}
}
} else if (frame->sensor_type_ == RADAR) {
// radar obstacle detection
/// radar obstacle detection
RadarRawFrame* radar_frame = dynamic_cast<RadarRawFrame*>(frame);
RadarDetectorOptions options;
options.radar2world_pose = &(radar_frame->pose_);
......@@ -144,8 +158,8 @@ bool ObstaclePerception::Process(SensorRawFrame* frame,
sensor_objects->objects = objects;
AINFO << "radar objects size: " << objects.size();
PERF_BLOCK_END("radar_detection");
// set frame content
if (FLAGS_enable_visualization && FLAGS_show_radar_obstacles) {
/// set frame content
if (FLAGS_enable_visualization && obstacle_show_type_ == SHOW_RADAR) {
frame_content_.SetTrackedObjects(sensor_objects->objects);
}
} else {
......@@ -157,7 +171,7 @@ bool ObstaclePerception::Process(SensorRawFrame* frame,
sensor_objects->timestamp = frame->timestamp_;
sensor_objects->sensor2world_pose = frame->pose_;
// fusion
/// fusion
std::vector<SensorObjects> multi_sensor_objs;
multi_sensor_objs.push_back(*sensor_objects);
std::vector<ObjectPtr> fused_objects;
......@@ -165,11 +179,12 @@ bool ObstaclePerception::Process(SensorRawFrame* frame,
AERROR << "Failed to fusion";
return false;
}
*out_objects = fused_objects;
AINFO << "fused objects size: " << fused_objects.size();
PERF_BLOCK_END("sensor_fusion");
// // set frame content
/// set frame content
if (FLAGS_enable_visualization) {
if (FLAGS_show_fused_obstacles) {
if (obstacle_show_type_ == SHOW_FUSED) {
frame_content_.SetTrackedObjects(fused_objects);
if (frame->sensor_type_ != VELODYNE_64) {
return true;
......@@ -179,7 +194,6 @@ bool ObstaclePerception::Process(SensorRawFrame* frame,
frame_visualizer_->Render(frame_content_);
}
*out_objects = fused_objects;
return true;
}
......
......@@ -32,26 +32,33 @@
namespace apollo {
namespace perception {
enum ObstacleShowType {
SHOW_LIDAR = 0,
SHOW_RADAR = 1,
SHOW_FUSED = 2,
MAX_SHOW_TYPE
};
class ObstaclePerception {
public:
ObstaclePerception();
~ObstaclePerception();
bool Init();
bool Process(SensorRawFrame* frame, std::vector<ObjectPtr>* out_objects);
void SetGlobalOffset(const Eigen::Vector3d& global_offset);
private:
void RegistAllAlgorithm();
/// obstacle detector
std::unique_ptr<LidarProcess> lidar_perception_;
std::unique_ptr<BaseRadarDetector> radar_detector_;
std::unique_ptr<BaseFusion> fusion_;
/// visualization
std::unique_ptr<OpenglVisualizer> frame_visualizer_ = nullptr;
ObstacleShowType obstacle_show_type_;
FrameContent frame_content_;
bool initialized_;
Eigen::Vector3d global_offset_;
DISALLOW_COPY_AND_ASSIGN(ObstaclePerception);
}; // class ObstaclePerception
......
......@@ -82,7 +82,7 @@
# ros TF2 quary frame id. tf2_buffer.lookupTransform.
# type: string
# default: world
--lidar_tf2_frame_id=world
--lidar_tf2_frame_id=novatel
# ros TF2 quary child frame id. tf2_buffer.lookupTransform.
# type: string
......
......@@ -2,34 +2,38 @@
--work_root=modules/perception
--config_manager_path=conf/config_manager.config
--enable_global_offset=false
####################################################################
# Flags from obstacle/lidar/test/offline_lidar_perception_test.cpp
# pcd path
# Flags from modules/perception/tool/offline_visualizer_tool/
# /offline_sequential_obstacle_perception_test.cc
# lidar data path
# type: string
# default: /apollo/data/pcd/
# default: ./modules/perception/tool/export_sensor_data/lidar/
--lidar_path=./modules/perception/tool/export_sensor_data/lidar/
# radar data path
# type: string
# default: ./modules/perception/tool/export_sensor_data/radar/
--radar_path=./modules/perception/tool/export_sensor_data/radar/
# pose path
# the radar detector for onboard
# type: string
# default: /apollo/data/pose/
--pose_path=/apollo/data/pose/
# candidate: DummyRadarDetector, ModestRadarDetector
--onboard_radar_detector=ModestRadarDetector
# output path
# the fusion method for onboard
# type: string
# default: /apollo/data/output/
--save_obstacles=false
--output_path=/apollo/data/output/
# candidate: DummyFusion, ProbabilisticFusion
--onboard_fusion=ProbabilisticFusion
# enable visualization
# type: bool
# default: true
# default: false
--enable_visualization=true
--show_lidar_obstacles=false
--show_radar_obstacles=false
--show_fused_obstacles=true
# enable showing obstacles from lidar/radar/fused
# type: string
# default: fused
--obstacle_show_type=fused
####################################################################
# Flags from obstacle/onboard/hdmap_input.cc
......@@ -77,30 +81,3 @@
# type: string
# candidate: DummyTypeFuser, SequenceTypeFuser
--onboard_type_fuser=DummyTypeFuser
# the radar detector for onboard
# type: string
# candidate: DummyRadarDetector, ModestRadarDetector
--onboard_radar_detector=ModestRadarDetector
# the perception module's output topic name.
# type: string
# default: perception_obstacle
--obstacle_module_name=perception_obstacle
# Query Ros TF timeout in ms. ros::Duration time.
# type: int
# default: 10
--tf2_buff_in_ms=10
# ros TF2 quary frame id. tf2_buffer.lookupTransform.
# type: string
# default: world
--lidar_tf2_frame_id=world
# ros TF2 quary child frame id. tf2_buffer.lookupTransform.
# type: string
# default: velodyne64
--lidar_tf2_child_frame_id=velodyne64
--v=4
......@@ -33,9 +33,6 @@ DEFINE_string(lidar_path, "./modules/perception/tool/export_sensor_data/lidar/",
"lidar path");
DEFINE_string(radar_path, "./modules/perception/tool/export_sensor_data/radar/",
"radar path");
DEFINE_bool(enable_global_offset, true,
"enable global offset for benchmark evaluation");
DEFINE_string(main_sensor, "velodyne_64", "main publish sensor");
namespace apollo {
namespace perception {
......@@ -83,13 +80,13 @@ bool LoadOdometry(const std::string& filepath, Eigen::Vector3f* velocity) {
}
double timestamp = 0.0;
int frame_id;
fin >> frame_id >> timestamp >> (*velocity)(0) >> (*velocity)(1) >>
(*velocity)(2);
fin >> frame_id >> timestamp
>> (*velocity)(0) >> (*velocity)(1) >> (*velocity)(2);
bool state = true;
// if (!fin.good()) {
// state = false;
// AERROR << "Failed to read odometry: " << filepath;
// }
if (!fin.good()) {
state = false;
AERROR << "Failed to read odometry: " << filepath;
}
fin.close();
return state;
}
......@@ -100,7 +97,7 @@ class SequentialPerceptionTest {
std::shared_ptr<SensorRawFrame>*)>
SensorFrameReconstructor;
SequentialPerceptionTest() : init_offset_(false) {}
SequentialPerceptionTest() {}
~SequentialPerceptionTest() {}
......@@ -109,12 +106,6 @@ class SequentialPerceptionTest {
AERROR << "Failed to init ObstaclePerception";
return false;
}
if (!FLAGS_enable_global_offset) {
init_offset_ = true;
global_offset_ = Eigen::Vector3d(0, 0, 0);
} else {
init_offset_ = false;
}
sensor_frame_reconstructor_["pcd"] = std::bind(
&SequentialPerceptionTest::ReconstructPointcloudSensorRawFrame, this,
std::placeholders::_1, std::placeholders::_2);
......@@ -124,7 +115,6 @@ class SequentialPerceptionTest {
sensors_files_sources_ = {
{"pcd", SensorFilesSource(FLAGS_lidar_path, "pcd")},
{"radar", SensorFilesSource(FLAGS_radar_path, "radar")}};
return true;
}
......@@ -143,41 +133,28 @@ class SequentialPerceptionTest {
const std::string& file_path, std::shared_ptr<SensorRawFrame>* frame) {
std::string type = file_path.substr(file_path.find_last_of(".") + 1);
if (type != "pcd") {
AERROR
<< "reconstruct_pointcloud_sensor_raw_frame can only handle pcd file";
AERROR << "Pass file: " << file_path;
AERROR << "ReconstructPointcloudSensorRawFrame can only handle pcd file. "
<< "Pass file: " << file_path;
return false;
}
// static Eigen::Matrix4d pose_original;
AINFO << "Process pcd";
// read pose and timestamp
/// read pose and timestamp
Eigen::Matrix4d pose;
int frame_id = 0;
double timestamp = 0.0;
std::string pose_filename =
FLAGS_lidar_path + "/" +
file_path.substr(0, file_path.find_last_of('.')) + ".pose";
std::string pose_filename = FLAGS_lidar_path + "/" +
file_path.substr(0, file_path.find_last_of('.')) + ".pose";
if (!ReadPoseFile(pose_filename, &pose, &frame_id, &timestamp)) {
AERROR << "Pose file not exits: " << pose_filename;
AERROR << "Failed to read pose file: " << pose_filename;
return false;
}
std::cout << "read pose file: " << std::endl;
std::cout << pose << std::endl;
if (!init_offset_) {
global_offset_ = pose.col(3).head(3);
obstacle_perception_.SetGlobalOffset(global_offset_);
init_offset_ = true;
}
pose.col(3).head(3) -= global_offset_;
// pose_original = pose;
// read pcd
/// read pcd
pcl::PointCloud<pcl_util::PointXYZIT>::Ptr cloud_raw(
new pcl::PointCloud<pcl_util::PointXYZIT>);
std::string pcd_filename =
FLAGS_lidar_path + "/" +
file_path.substr(0, file_path.find_last_of('.')) + ".pcd";
AINFO << "pcd file_name: " << pcd_filename;
std::string pcd_filename = FLAGS_lidar_path + "/" +
file_path.substr(0, file_path.find_last_of('.')) + ".pcd";
pcl::io::loadPCDFile<pcl_util::PointXYZIT>(pcd_filename, *cloud_raw);
pcl_util::PointCloudPtr cloud(new pcl_util::PointCloud);
for (size_t i = 0; i < cloud_raw->points.size(); ++i) {
......@@ -189,7 +166,7 @@ class SequentialPerceptionTest {
cloud->points.push_back(p);
}
// pose = pose * _velodyne2novatel_ex;
/// construct frame
frame->reset(new VelodyneRawFrame);
VelodyneRawFrame* velodyne_frame =
dynamic_cast<VelodyneRawFrame*>(frame->get());
......@@ -197,7 +174,6 @@ class SequentialPerceptionTest {
velodyne_frame->pose_ = pose;
velodyne_frame->sensor_type_ = VELODYNE_64;
velodyne_frame->cloud_ = cloud;
AINFO << "velo frame sensor_type_ = " << (*frame)->sensor_type_;
return true;
}
......@@ -205,50 +181,43 @@ class SequentialPerceptionTest {
std::shared_ptr<SensorRawFrame>* frame) {
std::string type = file_path.substr(file_path.find_last_of(".") + 1);
if (type != "radar") {
AERROR << "reconstruct_radar_sensor_raw_frame can only handle "
<< "radar file";
AERROR << "Pass file: " << file_path;
AERROR << "ReconstructRadarSensorRawFrame can only handle radar file. "
<< "Pass file: " << file_path;
return false;
}
// read pose
/// read pose and timestamp
Eigen::Matrix4d pose;
int frame_id = 0;
double timestamp = 0.0;
std::string pose_filename =
FLAGS_radar_path + "/" +
file_path.substr(0, file_path.find_last_of('.')) + ".pose";
std::string pose_filename = FLAGS_radar_path + "/" +
file_path.substr(0, file_path.find_last_of('.')) + ".pose";
if (!ReadPoseFile(pose_filename, &pose, &frame_id, &timestamp)) {
AERROR << "Pose file not exits: " << pose_filename;
AERROR << "Failed to read pose file: " << pose_filename;
return false;
}
// read radar obstacle
/// read radar obstacle
ContiRadar radar_obs_proto;
std::string radar_filename =
FLAGS_radar_path + "/" +
file_path.substr(0, file_path.find_last_of('.')) + ".radar";
std::string radar_filename = FLAGS_radar_path + "/" +
file_path.substr(0, file_path.find_last_of('.')) + ".radar";
if (!LoadRadarProto(radar_filename, &radar_obs_proto)) {
AERROR << "Failed to load " << radar_filename;
AERROR << "Failed to load radar proto " << radar_filename;
return false;
}
AINFO << "radar_obs_proto size: " << radar_obs_proto.contiobs().size();
// read host vehicle velocity
AINFO << "LoadRadarProto obstacle size: "
<< radar_obs_proto.contiobs().size();
/// read host vehicle velocity
Eigen::Vector3f velocity;
std::string odometry_filename =
FLAGS_radar_path + "/" +
file_path.substr(0, file_path.find_last_of('.')) + ".velocity";
std::string odometry_filename = FLAGS_radar_path + "/" +
file_path.substr(0, file_path.find_last_of('.')) + ".velocity";
if (!LoadOdometry(odometry_filename, &velocity)) {
AERROR << "Failed to load " << odometry_filename;
AERROR << "Failed to load odometry " << odometry_filename;
return false;
}
if (!init_offset_) {
global_offset_ = pose.col(3).head(3);
obstacle_perception_.SetGlobalOffset(global_offset_);
init_offset_ = true;
}
pose.col(3).head(3) -= global_offset_;
/// construct frame
frame->reset(new RadarRawFrame);
RadarRawFrame* radar_frame = dynamic_cast<RadarRawFrame*>(frame->get());
radar_frame->timestamp_ = timestamp;
......@@ -256,13 +225,12 @@ class SequentialPerceptionTest {
radar_frame->sensor_type_ = RADAR;
radar_frame->raw_obstacles_ = radar_obs_proto;
radar_frame->car_linear_speed_ = velocity;
AINFO << "radar frame sensor_type_ = " << (*frame)->sensor_type_;
return true;
}
void GetSequentialSensorsDataFiles(std::vector<SensorFile>* sensors_files) {
PERF_BLOCK_START();
std::map<std::string, std::vector<SensorFile>> sensors_files_lists;
std::map<std::string, std::vector<SensorFile> > sensors_files_lists;
size_t total_sensor_files_num = 0;
for (const auto& sensor_files_source : sensors_files_sources_) {
std::string sensor_key = sensor_files_source.first;
......@@ -272,39 +240,29 @@ class SequentialPerceptionTest {
AWARN << "No sensor files source: " << sensor_key;
continue;
}
/// get files with specified extension
std::string ext = std::string(".") + source.extension;
std::vector<std::string> files_list;
GetFileNamesInFolderById(source.folder_path, ext, &files_list);
// get time stamp
/// get sensor files
std::vector<SensorFile>& sensor_files_list =
sensors_files_lists[sensor_key];
sensor_files_list.reserve(files_list.size());
for (const auto& file_name : files_list) {
std::string pose_file =
source.folder_path + "/" +
file_name.substr(0, file_name.find_last_of('.')) + ".pose";
FILE* fin = fopen(pose_file.c_str(), "rt");
if (fin == nullptr) {
AWARN << "pose file: " << pose_file
<< " not found. sensor key: " << sensor_key;
continue;
}
int seq = 0;
/// get timestamp
std::string timestamp_str = file_name.substr(
0, file_name.find_last_of('.'));
double timestamp = 0.0;
if (fscanf(fin, "%d %lf", &seq, &timestamp) != 2) {
AERROR << "parse pose file: " << pose_file
<< " failed. sensor key: " << sensor_key;
fclose(fin);
continue;
}
std::stringstream ss(timestamp_str);
ss >> timestamp;
sensor_files_list.push_back(
SensorFile(sensor_key, file_name, timestamp));
fclose(fin);
}
total_sensor_files_num += sensor_files_list.size();
AINFO << "sensor_key: " << sensor_key
<< " files num: " << sensor_files_list.size();
}
/// sort all sensor files from different sensor type by timestamp
sensors_files->clear();
sensors_files->reserve(total_sensor_files_num);
for (const auto& sensor_files_list : sensors_files_lists) {
......@@ -316,24 +274,24 @@ class SequentialPerceptionTest {
return lhs.timestamp < rhs.timestamp;
};
std::sort(sensors_files->begin(), sensors_files->end(), compare);
PERF_BLOCK_END("get_sequential_sensors_data_files");
PERF_BLOCK_END("GetSequentialSensorsDataFiles");
}
void Run(SensorRawFrame* frame) {
if (frame == nullptr) {
AERROR << "SensorRawFrame is nullptr";
return;
}
std::vector<ObjectPtr> fused_objs;
obstacle_perception_.Process(frame, &fused_objs);
}
const Eigen::Vector3d& GetGlobalOffset() { return global_offset_; }
private:
// reconstruct sensor raw frame function
std::map<std::string, SensorFrameReconstructor> sensor_frame_reconstructor_;
std::map<std::string, SensorFilesSource> sensors_files_sources_;
ObstaclePerception obstacle_perception_;
Eigen::Vector3d global_offset_;
bool init_offset_;
};
} // namespace perception
......@@ -345,8 +303,14 @@ int main(int argc, char** argv) {
"offline_sequential_obstacle_perception_test.flag";
google::ParseCommandLineFlags(&argc, &argv, true);
/// initiaize SequentialPerceptionTest
apollo::perception::SequentialPerceptionTest test;
test.Init();
if (!test.Init()) {
AERROR << "Failed to init SequentialPerceptionTest.";
return -1;
}
/// read sensor data files
std::vector<apollo::perception::SensorFile> sensors_files;
test.GetSequentialSensorsDataFiles(&sensors_files);
AINFO << "=============================" << std::endl;
......@@ -354,11 +318,18 @@ int main(int argc, char** argv) {
AINFO << sensors_files[i];
}
AINFO << "Total sensors files num: " << sensors_files.size();
/// process each sensor file
for (size_t i = 0; i < sensors_files.size(); ++i) {
AINFO << "Process frame " << sensors_files[i];
std::shared_ptr<apollo::perception::SensorRawFrame> raw_frame(
new apollo::perception::SensorRawFrame);
test.ReconstructSensorRawFrame(sensors_files[i].file_path, &raw_frame);
if (!test.ReconstructSensorRawFrame(sensors_files[i].file_path,
&raw_frame)) {
AERROR << "Failed to ReconstructSensorRawFrame for file "
<< sensors_files[i].file_path;
continue;
}
test.Run(raw_frame.get());
}
return 0;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册