提交 3356336e 编写于 作者: G gineshidalgo99

Added pre-processing worker to Wrapper (#608)

上级 084d1691
......@@ -315,6 +315,7 @@ OpenPose Library - Release Notes
32. Function getFilesOnDirectory() can extra all basic image file types at once without requiring to manually enumerate them.
33. Added the flags `--face_detector` and `--hand_detector`, that enable the user to select the face/hand rectangle detector that is used for the later face/hand keypoint detection. It includes OpenCV (for face), and also allows the user to provide its own input. Flag `--hand_tracking` is removed and integrated into this flag too.
34. Maximum queue size per OpenPose thread is configurable through the Wrapper class.
35. Added pre-processing capabilities to Wrapper (WorkerType::PreProcessing), which will be run right after the image has been read.
2. Functions or parameters renamed:
1. By default, python example `tutorial_developer/python_2_pose_from_heatmaps.py` was using 2 scales starting at -1x736, changed to 1 scale at -1x368.
2. WrapperStructPose default parameters changed to match those of the OpenPose demo binary.
......
// ------------------------- OpenPose C++ API Tutorial - Example 13 - Custom Pre-processing -------------------------
// Synchronous mode: ideal for production integration. It provides the fastest results with respect to runtime
// performance.
// In this function, the user can implement its own pre-processing, i.e., his function will be called after the image
// has been read by OpenPose but before OpenPose processes the frames.
// Command-line user intraface
#include <openpose/flags.hpp>
// OpenPose dependencies
#include <openpose/headers.hpp>
// This worker will just invert the image
class WUserPreProcessing : public op::Worker<std::shared_ptr<std::vector<std::shared_ptr<op::Datum>>>>
{
public:
WUserPreProcessing()
{
// User's constructor here
}
void initializationOnThread() {}
void work(std::shared_ptr<std::vector<std::shared_ptr<op::Datum>>>& datumsPtr)
{
// User's pre-processing (after OpenPose read the input image & before OpenPose processing) here
// datumPtr->cvInputData: input frame
try
{
if (datumsPtr != nullptr && !datumsPtr->empty())
for (auto& datumPtr : *datumsPtr)
cv::bitwise_not(datumPtr->cvOutputData, datumPtr->cvOutputData);
}
catch (const std::exception& e)
{
this->stop();
op::error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
};
void configureWrapper(op::Wrapper& opWrapper)
{
try
{
// Configuring OpenPose
// logging_level
op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.",
__LINE__, __FUNCTION__, __FILE__);
op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level);
op::Profiler::setDefaultX(FLAGS_profile_speed);
// Applying user defined configuration - GFlags to program variables
// producerType
op::ProducerType producerType;
std::string producerString;
std::tie(producerType, producerString) = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_flir_camera_index);
// cameraSize
const auto cameraSize = op::flagsToPoint(FLAGS_camera_resolution, "-1x-1");
// outputSize
const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1");
// netInputSize
const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368");
// faceNetInputSize
const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)");
// handNetInputSize
const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)");
// poseModel
const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose);
// JSON saving
if (!FLAGS_write_keypoint.empty())
op::log("Flag `write_keypoint` is deprecated and will eventually be removed."
" Please, use `write_json` instead.", op::Priority::Max);
// keypointScaleMode
const auto keypointScaleMode = op::flagsToScaleMode(FLAGS_keypoint_scale);
// heatmaps to add
const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg,
FLAGS_heatmaps_add_PAFs);
const auto heatMapScaleMode = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale);
// >1 camera view?
const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1 || FLAGS_flir_camera);
// Face and hand detectors
const auto faceDetector = op::flagsToDetector(FLAGS_face_detector);
const auto handDetector = op::flagsToDetector(FLAGS_hand_detector);
// Enabling Google Logging
const bool enableGoogleLogging = true;
// Initializing the user custom classes
// Processing
auto wUserPreProcessing = std::make_shared<WUserPreProcessing>();
// Add custom processing
const auto workerProcessingOnNewThread = true;
opWrapper.setWorker(op::WorkerType::PreProcessing, wUserPreProcessing, workerProcessingOnNewThread);
// Pose configuration (use WrapperStructPose{} for default and recommended configuration)
const op::WrapperStructPose wrapperStructPose{
!FLAGS_body_disable, netInputSize, outputSize, keypointScaleMode, FLAGS_num_gpu, FLAGS_num_gpu_start,
FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView),
poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap,
FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScaleMode, FLAGS_part_candidates,
(float)FLAGS_render_threshold, FLAGS_number_people_max, FLAGS_maximize_positives, FLAGS_fps_max,
FLAGS_prototxt_path, FLAGS_caffemodel_path, enableGoogleLogging};
opWrapper.configure(wrapperStructPose);
// Face configuration (use op::WrapperStructFace{} to disable it)
const op::WrapperStructFace wrapperStructFace{
FLAGS_face, faceDetector, faceNetInputSize,
op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose),
(float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold};
opWrapper.configure(wrapperStructFace);
// Hand configuration (use op::WrapperStructHand{} to disable it)
const op::WrapperStructHand wrapperStructHand{
FLAGS_hand, handDetector, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range,
op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose,
(float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold};
opWrapper.configure(wrapperStructHand);
// Extra functionality configuration (use op::WrapperStructExtra{} to disable it)
const op::WrapperStructExtra wrapperStructExtra{
FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads};
opWrapper.configure(wrapperStructExtra);
// Producer (use default to disable any input)
const op::WrapperStructInput wrapperStructInput{
producerType, producerString, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last,
FLAGS_process_real_time, FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat,
cameraSize, FLAGS_camera_parameter_path, FLAGS_frame_undistort, FLAGS_3d_views};
opWrapper.configure(wrapperStructInput);
// Output (comment or use default argument to disable any output)
const op::WrapperStructOutput wrapperStructOutput{
FLAGS_cli_verbose, FLAGS_write_keypoint, op::stringToDataFormat(FLAGS_write_keypoint_format),
FLAGS_write_json, FLAGS_write_coco_json, FLAGS_write_coco_foot_json, FLAGS_write_coco_json_variant,
FLAGS_write_images, FLAGS_write_images_format, FLAGS_write_video, FLAGS_write_video_fps,
FLAGS_write_video_with_audio, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, FLAGS_write_video_3d,
FLAGS_write_video_adam, FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port};
opWrapper.configure(wrapperStructOutput);
// GUI (comment or use default argument to disable any visual output)
const op::WrapperStructGui wrapperStructGui{
op::flagsToDisplayMode(FLAGS_display, FLAGS_3d), !FLAGS_no_gui_verbose, FLAGS_fullscreen};
opWrapper.configure(wrapperStructGui);
// Set to single-thread (for sequential processing and/or debugging and/or reducing latency)
if (FLAGS_disable_multi_thread)
opWrapper.disableMultiThreading();
}
catch (const std::exception& e)
{
op::error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
int tutorialApiCpp()
{
try
{
op::log("Starting OpenPose demo...", op::Priority::High);
const auto opTimer = op::getTimerInit();
// OpenPose wrapper
op::log("Configuring OpenPose...", op::Priority::High);
op::Wrapper opWrapper;
configureWrapper(opWrapper);
// Start, run, and stop processing - exec() blocks this thread until OpenPose wrapper has finished
op::log("Starting thread(s)...", op::Priority::High);
opWrapper.exec();
// Measuring total time
op::printTime(opTimer, "OpenPose demo successfully finished. Total time: ", " seconds.", op::Priority::High);
// Return
return 0;
}
catch (const std::exception& e)
{
return -1;
}
}
int main(int argc, char *argv[])
{
// Parsing command line flags
gflags::ParseCommandLineFlags(&argc, &argv, true);
// Running tutorialApiCpp
return tutorialApiCpp();
}
......@@ -11,7 +11,7 @@ set(EXAMPLE_FILES
10_asynchronous_custom_output.cpp
11_asynchronous_custom_input_output_and_datum.cpp
12_synchronous_custom_input.cpp
# 13_synchronous_custom_preprocessing.cpp
13_synchronous_custom_preprocessing.cpp
14_synchronous_custom_postprocessing.cpp
15_synchronous_custom_output.cpp
16_synchronous_custom_all_and_datum.cpp)
......
......@@ -97,6 +97,7 @@ namespace boost
#endif
#ifdef OPEN_CV_IS_4_OR_HIGHER
#define CV_BGR2GRAY cv::COLOR_BGR2GRAY
#define CV_BGR2RGB cv::COLOR_BGR2RGB
#define CV_CALIB_CB_ADAPTIVE_THRESH cv::CALIB_CB_ADAPTIVE_THRESH
#define CV_CALIB_CB_NORMALIZE_IMAGE cv::CALIB_CB_NORMALIZE_IMAGE
#define CV_CALIB_CB_FILTER_QUADS cv::CALIB_CB_FILTER_QUADS
......@@ -111,6 +112,7 @@ namespace boost
#define CV_INTER_CUBIC cv::INTER_CUBIC
#define CV_INTER_LINEAR cv::INTER_LINEAR
#define CV_L2 cv::NORM_L2
#define CV_RGB2BGR cv::COLOR_RGB2BGR
#define CV_TERMCRIT_EPS cv::TermCriteria::Type::EPS
#define CV_TERMCRIT_ITER cv::TermCriteria::Type::MAX_ITER
#define CV_WARP_INVERSE_MAP cv::WARP_INVERSE_MAP
......
......@@ -15,7 +15,7 @@ namespace op
enum class WorkerType : unsigned char
{
Input = 0,
// PreProcessing,
PreProcessing,
PostProcessing,
Output,
Size,
......
......@@ -60,7 +60,7 @@ namespace op
/**
* Add an user-defined extra Worker for a desired task (input, output, ...).
* @param workerType WorkerType to configure (e.g., Input, PostProcessing, Output).
* @param workerType WorkerType to configure (e.g., Input, PreProcessing, PostProcessing, Output).
* @param worker TWorker to be added.
* @param workerOnNewThread Whether to add this TWorker on a new thread (if it is computationally demanding) or
* simply reuse existing threads (for light functions). Set to true if the performance time is unknown.
......
......@@ -109,9 +109,11 @@ namespace op
// User custom workers
const auto& userInputWs = userWs[int(WorkerType::Input)];
const auto& userPreProcessingWs = userWs[int(WorkerType::PreProcessing)];
const auto& userPostProcessingWs = userWs[int(WorkerType::PostProcessing)];
const auto& userOutputWs = userWs[int(WorkerType::Output)];
const auto userInputWsOnNewThread = userWsOnNewThread[int(WorkerType::Input)];
const auto userPreProcessingWsOnNewThread = userWsOnNewThread[int(WorkerType::PreProcessing)];
const auto userPostProcessingWsOnNewThread = userWsOnNewThread[int(WorkerType::PostProcessing)];
const auto userOutputWsOnNewThread = userWsOnNewThread[int(WorkerType::Output)];
......@@ -900,7 +902,18 @@ namespace op
// After producer
// ID generator (before any multi-threading or any function that requires the ID)
const auto wIdGenerator = std::make_shared<WIdGenerator<TDatumsSP>>();
std::vector<TWorker> workersAux{wIdGenerator};
// If custom user Worker and uses its own thread
std::vector<TWorker> workersAux;
if (!userPreProcessingWs.empty())
{
// If custom user Worker in its own thread
if (userPreProcessingWsOnNewThread)
log("You chose to add your pre-processing function in a new thread. However, OpenPose will"
" add it in the same thread than the input frame producer.",
Priority::High, __LINE__, __FUNCTION__, __FILE__);
workersAux = mergeVectors(workersAux, {userPreProcessingWs});
}
workersAux = mergeVectors(workersAux, {wIdGenerator});
// Scale & cv::Mat to OP format
if (scaleAndSizeExtractorW != nullptr)
workersAux = mergeVectors(workersAux, {scaleAndSizeExtractorW});
......@@ -927,7 +940,7 @@ namespace op
workersAux = mergeVectors({datumProducerW}, workersAux);
// Otherwise
else if (threadManagerMode != ThreadManagerMode::Asynchronous
&& threadManagerMode != ThreadManagerMode::AsynchronousIn)
&& threadManagerMode != ThreadManagerMode::AsynchronousIn)
error("No input selected.", __LINE__, __FUNCTION__, __FILE__);
// Thread 0 or 1, queues 0 -> 1
log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
......
......@@ -6,9 +6,7 @@ mkdir build
cd build
echo "RUN_EXAMPLES = ${RUN_EXAMPLES}."
if [[ $RUN_EXAMPLES == true ]] ; then
ARGS="-DDOWNLOAD_FACE_MODEL=OFF -DDOWNLOAD_HAND_MODEL=OFF"
else
if [[ $RUN_EXAMPLES == false ]] ; then
ARGS="-DDOWNLOAD_BODY_25_MODEL=OFF -DDOWNLOAD_FACE_MODEL=OFF -DDOWNLOAD_HAND_MODEL=OFF"
# ARGS="-DBUILD_CAFFE=ON -DDOWNLOAD_BODY_25_MODEL=OFF -DDOWNLOAD_BODY_COCO_MODEL=OFF -DDOWNLOAD_FACE_MODEL=OFF -DDOWNLOAD_HAND_MODEL=OFF -DDOWNLOAD_BODY_MPI_MODEL=OFF"
fi
......
......@@ -66,9 +66,9 @@ if [[ $RUN_EXAMPLES == true ]] ; then
./build/examples/tutorial_api_cpp/12_synchronous_custom_input.bin --image_dir examples/media/ --net_resolution -1x32 --write_json output/ --write_images output/ --display 0
echo " "
# echo "Tutorial API C++: Example 13..."
# ./build/examples/tutorial_api_cpp/13_synchronous_custom_preprocessing.bin --image_dir examples/media/ --net_resolution -1x32 --write_json output/ --write_images output/ --display 0
# echo " "
echo "Tutorial API C++: Example 13..."
./build/examples/tutorial_api_cpp/13_synchronous_custom_preprocessing.bin --image_dir examples/media/ --net_resolution -1x32 --write_json output/ --write_images output/ --display 0
echo " "
echo "Tutorial API C++: Example 14..."
./build/examples/tutorial_api_cpp/14_synchronous_custom_postprocessing.bin --image_dir examples/media/ --net_resolution -1x32 --write_json output/ --write_images output/ --display 0
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册