提交 e1daa2dd 编写于 作者: G gineshidalgo99

Producer inside Wrapper

上级 39325238
......@@ -280,6 +280,7 @@ OpenPose Library - Release Notes
2. Array::getPybindPtr() to get an editable const pointer.
3. Array::pData as binding of spData.
4. Array::Array that takes as input a pointer, so it does not re-allocate memory.
12. Producer defined inside Wrapper rather than being defined on each example.
2. Functions or parameters renamed:
1. By default, python example `tutorial_developer/python_2_pose_from_heatmaps.py` was using 2 scales starting at -1x736, changed to 1 scale at -1x368.
2. WrapperStructPose default parameters changed to match those of the OpenPose demo binary.
......
......@@ -31,6 +31,8 @@ int openPoseDemo()
// op::Profiler::setDefaultX(100);
// Applying user defined configuration - GFlags to program variables
// cameraSize
const auto cameraSize = op::flagsToPoint(FLAGS_camera_resolution, "-1x-1");
// outputSize
const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1");
// netInputSize
......@@ -40,10 +42,10 @@ int openPoseDemo()
// handNetInputSize
const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)");
// producerType
const auto producerSharedPtr = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_camera_resolution,
FLAGS_camera_fps, FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
(unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index);
op::ProducerType producerType;
std::string producerString;
std::tie(producerType, producerString) = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_flir_camera_index);
// poseModel
const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose);
// JSON saving
......@@ -89,8 +91,10 @@ int openPoseDemo()
opWrapper.configure(wrapperStructExtra);
// Producer (use default to disable any input)
const op::WrapperStructInput wrapperStructInput{
producerSharedPtr, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last, FLAGS_process_real_time,
FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat};
producerType, producerString, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last,
FLAGS_process_real_time, FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat,
cameraSize, FLAGS_camera_fps, FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
(unsigned int) FLAGS_3d_views};
opWrapper.configure(wrapperStructInput);
// Consumer (comment or use default argument to disable any output)
const op::WrapperStructOutput wrapperStructOutput{
......
......@@ -43,7 +43,7 @@ int handFromJsonTest()
// handNetInputSize
const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)");
// producerType
const auto producerSharedPtr = op::flagsToProducer(FLAGS_image_dir, "", "", 0);
const auto producerSharedPtr = op::createProducer(op::ProducerType::ImageDirectory, FLAGS_image_dir);
// Enabling Google Logging
const bool enableGoogleLogging = true;
......
......@@ -44,6 +44,8 @@ int tutorialAddModule1()
op::Profiler::setDefaultX(FLAGS_profile_speed);
// Applying user defined configuration - GFlags to program variables
// cameraSize
const auto cameraSize = op::flagsToPoint(FLAGS_camera_resolution, "-1x-1");
// outputSize
const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1");
// netInputSize
......@@ -53,10 +55,10 @@ int tutorialAddModule1()
// handNetInputSize
const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)");
// producerType
const auto producerSharedPtr = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_camera_resolution,
FLAGS_camera_fps, FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
(unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index);
op::ProducerType producerType;
std::string producerString;
std::tie(producerType, producerString) = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_flir_camera_index);
// poseModel
const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose);
// JSON saving
......@@ -102,8 +104,10 @@ int tutorialAddModule1()
opWrapperT.configure(wrapperStructExtra);
// Producer (use default to disable any input)
const op::WrapperStructInput wrapperStructInput{
producerSharedPtr, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last, FLAGS_process_real_time,
FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat};
producerType, producerString, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last,
FLAGS_process_real_time, FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat,
cameraSize, FLAGS_camera_fps, FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
(unsigned int) FLAGS_3d_views};
opWrapperT.configure(wrapperStructInput);
// Consumer (comment or use default argument to disable any output)
const op::WrapperStructOutput wrapperStructOutput{
......
......@@ -126,6 +126,8 @@ int tutorialApiCpp5()
op::Profiler::setDefaultX(FLAGS_profile_speed);
// Applying user defined configuration - GFlags to program variables
// cameraSize
const auto cameraSize = op::flagsToPoint(FLAGS_camera_resolution, "-1x-1");
// outputSize
const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1");
// netInputSize
......@@ -135,10 +137,10 @@ int tutorialApiCpp5()
// handNetInputSize
const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)");
// producerType
const auto producerSharedPtr = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_camera_resolution,
FLAGS_camera_fps, FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
(unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index);
op::ProducerType producerType;
std::string producerString;
std::tie(producerType, producerString) = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_flir_camera_index);
// poseModel
const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose);
// JSON saving
......@@ -184,8 +186,10 @@ int tutorialApiCpp5()
opWrapperT.configure(wrapperStructExtra);
// Producer (use default to disable any input)
const op::WrapperStructInput wrapperStructInput{
producerSharedPtr, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last, FLAGS_process_real_time,
FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat};
producerType, producerString, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last,
FLAGS_process_real_time, FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat,
cameraSize, FLAGS_camera_fps, FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
(unsigned int) FLAGS_3d_views};
opWrapperT.configure(wrapperStructInput);
// Consumer (comment or use default argument to disable any output)
const auto displayMode = op::DisplayMode::NoDisplay;
......
......@@ -86,6 +86,8 @@ int tutorialApiCpp6()
// op::Profiler::setDefaultX(100);
// Applying user defined configuration - GFlags to program variables
// cameraSize
const auto cameraSize = op::flagsToPoint(FLAGS_camera_resolution, "-1x-1");
// outputSize
const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1");
// netInputSize
......@@ -95,10 +97,10 @@ int tutorialApiCpp6()
// handNetInputSize
const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)");
// producerType
const auto producerSharedPtr = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_camera_resolution,
FLAGS_camera_fps, FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
(unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index);
op::ProducerType producerType;
std::string producerString;
std::tie(producerType, producerString) = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_flir_camera_index);
// poseModel
const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose);
// JSON saving
......@@ -152,8 +154,10 @@ int tutorialApiCpp6()
opWrapperT.configure(wrapperStructExtra);
// Producer (use default to disable any input)
const op::WrapperStructInput wrapperStructInput{
producerSharedPtr, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last, FLAGS_process_real_time,
FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat};
producerType, producerString, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last,
FLAGS_process_real_time, FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat,
cameraSize, FLAGS_camera_fps, FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
(unsigned int) FLAGS_3d_views};
opWrapperT.configure(wrapperStructInput);
// Consumer (comment or use default argument to disable any output)
const op::WrapperStructOutput wrapperStructOutput{
......
......@@ -133,6 +133,8 @@ int tutorialApiCpp8()
// op::Profiler::setDefaultX(100);
// Applying user defined configuration - GFlags to program variables
// cameraSize
const auto cameraSize = op::flagsToPoint(FLAGS_camera_resolution, "-1x-1");
// outputSize
const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1");
// netInputSize
......@@ -142,10 +144,10 @@ int tutorialApiCpp8()
// handNetInputSize
const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)");
// producerType
const auto producerSharedPtr = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_camera_resolution,
FLAGS_camera_fps, FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
(unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index);
op::ProducerType producerType;
std::string producerString;
std::tie(producerType, producerString) = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_flir_camera_index);
// poseModel
const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose);
// JSON saving
......@@ -199,8 +201,10 @@ int tutorialApiCpp8()
opWrapperT.configure(wrapperStructExtra);
// Producer (use default to disable any input)
const op::WrapperStructInput wrapperStructInput{
producerSharedPtr, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last, FLAGS_process_real_time,
FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat};
producerType, producerString, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last,
FLAGS_process_real_time, FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat,
cameraSize, FLAGS_camera_fps, FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
(unsigned int) FLAGS_3d_views};
opWrapperT.configure(wrapperStructInput);
// Consumer (comment or use default argument to disable any output)
const auto displayMode = op::DisplayMode::NoDisplay;
......
......@@ -76,23 +76,29 @@ int tutorialDeveloperThread1()
__LINE__, __FUNCTION__, __FILE__);
op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level);
// Step 2 - Read GFlags (user defined configuration)
// cameraSize
const auto cameraSize = op::flagsToPoint(FLAGS_camera_resolution, "-1x-1");
// outputSize
const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1");
// producerType
const auto producerSharedPtr = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_camera_resolution,
FLAGS_camera_fps, FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
(unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index);
op::ProducerType producerType;
std::string producerString;
std::tie(producerType, producerString) = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_flir_camera_index);
const auto displayProducerFpsMode = (FLAGS_process_real_time
? op::ProducerFpsMode::OriginalFps : op::ProducerFpsMode::RetrievalFps);
auto producerSharedPtr = createProducer(
producerType, producerString, cameraSize, FLAGS_camera_fps, FLAGS_camera_parameter_folder,
!FLAGS_frame_keep_distortion, (unsigned int) FLAGS_3d_views);
producerSharedPtr->setProducerFpsMode(displayProducerFpsMode);
op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__);
// Step 3 - Setting producer
auto videoSeekSharedPtr = std::make_shared<std::pair<std::atomic<bool>, std::atomic<int>>>();
videoSeekSharedPtr->first = false;
videoSeekSharedPtr->second = 0;
const op::Point<int> producerSize{(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_WIDTH),
(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_HEIGHT)};
const op::Point<int> producerSize{
(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_WIDTH),
(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_HEIGHT)};
// Step 4 - Setting thread workers && manager
typedef std::vector<op::Datum> TypedefDatumsNoPtr;
typedef std::shared_ptr<TypedefDatumsNoPtr> TypedefDatums;
......
......@@ -110,23 +110,29 @@ int tutorialDeveloperThread2()
__LINE__, __FUNCTION__, __FILE__);
op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level);
// Step 2 - Read GFlags (user defined configuration)
// cameraSize
const auto cameraSize = op::flagsToPoint(FLAGS_camera_resolution, "-1x-1");
// outputSize
const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1");
// producerType
const auto producerSharedPtr = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_camera_resolution,
FLAGS_camera_fps, FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
(unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index);
op::ProducerType producerType;
std::string producerString;
std::tie(producerType, producerString) = op::flagsToProducer(
FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, FLAGS_flir_camera, FLAGS_flir_camera_index);
const auto displayProducerFpsMode = (FLAGS_process_real_time
? op::ProducerFpsMode::OriginalFps : op::ProducerFpsMode::RetrievalFps);
auto producerSharedPtr = createProducer(
producerType, producerString, cameraSize, FLAGS_camera_fps, FLAGS_camera_parameter_folder,
!FLAGS_frame_keep_distortion, (unsigned int) FLAGS_3d_views);
producerSharedPtr->setProducerFpsMode(displayProducerFpsMode);
op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__);
// Step 3 - Setting producer
auto videoSeekSharedPtr = std::make_shared<std::pair<std::atomic<bool>, std::atomic<int>>>();
videoSeekSharedPtr->first = false;
videoSeekSharedPtr->second = 0;
const op::Point<int> producerSize{(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_WIDTH),
(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_HEIGHT)};
const op::Point<int> producerSize{
(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_WIDTH),
(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_HEIGHT)};
// Step 4 - Setting thread workers && manager
typedef std::vector<op::Datum> TypedefDatumsNoPtr;
typedef std::shared_ptr<TypedefDatumsNoPtr> TypedefDatums;
......
......@@ -10,10 +10,12 @@ namespace op
Yaml,
Yml,
};
enum class CocoJsonFormat : bool
enum class CocoJsonFormat : unsigned char
{
Body,
Foot,
Car,
Size,
};
}
......
......@@ -5,8 +5,10 @@ namespace op
{
enum class ProducerFpsMode : bool
{
OriginalFps, /**< The frames will be extracted at the original source fps (frames might be skipped or repeated). */
RetrievalFps, /**< The frames will be extracted when the software retrieves them (frames will not be skipped or repeated). */
/** The frames will be extracted at the original source fps (frames might be skipped or repeated). */
OriginalFps,
/** The frames will be extracted when the software retrieves them (frames will not be skipped or repeated). */
RetrievalFps,
};
enum class ProducerProperty : unsigned char
......@@ -25,12 +27,20 @@ namespace op
*/
enum class ProducerType : unsigned char
{
FlirCamera, /**< Stereo FLIR (Point-Grey) camera reader. Based on Spinnaker SDK. */
ImageDirectory, /**< An image directory reader. It is able to read images on a folder with a interface similar to the OpenCV cv::VideoCapture. */
IPCamera, /**< An IP camera frames extractor, extending the functionality of cv::VideoCapture. */
Video, /**< A video frames extractor, extending the functionality of cv::VideoCapture. */
Webcam, /**< A webcam frames extractor, extending the functionality of cv::VideoCapture. */
None, /**< No type defined. Default state when no specific Producer has been picked yet. */
/** Stereo FLIR (Point-Grey) camera reader. Based on Spinnaker SDK. */
FlirCamera,
/** An image directory reader. It is able to read images on a folder with a interface similar to the OpenCV
* cv::VideoCapture.
*/
ImageDirectory,
/** An IP camera frames extractor, extending the functionality of cv::VideoCapture. */
IPCamera,
/** A video frames extractor, extending the functionality of cv::VideoCapture. */
Video,
/** A webcam frames extractor, extending the functionality of cv::VideoCapture. */
Webcam,
/** No type defined. Default state when no specific Producer has been picked yet. */
None,
};
}
......
......@@ -178,6 +178,15 @@ namespace op
DELETE_COPY(Producer);
};
/**
* This function returns the desired producer given the input parameters.
*/
OP_API std::shared_ptr<Producer> createProducer(
const ProducerType producerType = ProducerType::None, const std::string& producerString = "",
const Point<int>& cameraResolution = Point<int>{-1,-1}, const double webcamFps = 30.,
const std::string& cameraParameterPath = "models/cameraParameters/", const bool undistortImage = true,
const unsigned int imageDirectoryStereo = -1);
}
#endif // OPENPOSE_PRODUCER_PRODUCER_HPP
......@@ -5,7 +5,7 @@
#include <openpose/core/enumClasses.hpp>
#include <openpose/gui/enumClasses.hpp>
#include <openpose/pose/enumClasses.hpp>
#include <openpose/producer/producer.hpp>
#include <openpose/producer/enumClasses.hpp>
namespace op
{
......@@ -20,15 +20,9 @@ namespace op
const std::string& ipCameraPath, const int webcamIndex,
const bool flirCamera);
OP_API std::shared_ptr<Producer> flagsToProducer(const std::string& imageDirectory, const std::string& videoPath,
const std::string& ipCameraPath, const int webcamIndex,
const bool flirCamera = false,
const std::string& cameraResolution = "-1x-1",
const double webcamFps = 30.,
const std::string& cameraParameterPath = "models/cameraParameters/",
const bool undistortImage = true,
const unsigned int imageDirectoryStereo = 1,
const int flirCameraIndex = -1);
OP_API std::pair<ProducerType, std::string> flagsToProducer(
const std::string& imageDirectory, const std::string& videoPath, const std::string& ipCameraPath = "",
const int webcamIndex = -1, const bool flirCamera = false, const int flirCameraIndex = -1);
OP_API std::vector<HeatMapType> flagsToHeatMaps(const bool heatMapsAddParts = false,
const bool heatMapsAddBkg = false,
......
......@@ -22,17 +22,15 @@ namespace op
* @param wrapperStructOutput
* @param renderOutput
* @param userOutputWsEmpty
* @param producerSharedPtr
* @param threadManagerMode
*/
OP_API void wrapperConfigureSanityChecks(WrapperStructPose& wrapperStructPose,
const WrapperStructFace& wrapperStructFace,
const WrapperStructHand& wrapperStructHand,
const WrapperStructExtra& wrapperStructExtra,
const WrapperStructInput& wrapperStructInput,
const WrapperStructOutput& wrapperStructOutput,
const bool renderOutput,
const bool userOutputWsEmpty,
const ThreadManagerMode threadManagerMode);
OP_API void wrapperConfigureSanityChecks(
WrapperStructPose& wrapperStructPose, const WrapperStructFace& wrapperStructFace,
const WrapperStructHand& wrapperStructHand, const WrapperStructExtra& wrapperStructExtra,
const WrapperStructInput& wrapperStructInput, const WrapperStructOutput& wrapperStructOutput,
const bool renderOutput, const bool userOutputWsEmpty, const std::shared_ptr<Producer>& producerSharedPtr,
const ThreadManagerMode threadManagerMode);
/**
* Thread ID increase (private internal function).
......@@ -96,6 +94,13 @@ namespace op
{
log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
// Create producer
auto producerSharedPtr = createProducer(
wrapperStructInput.producerType, wrapperStructInput.producerString,
wrapperStructInput.cameraResolution, wrapperStructInput.webcamFps,
wrapperStructInput.cameraParameterPath, wrapperStructInput.undistortImage,
wrapperStructInput.imageDirectoryStereo);
// Editable arguments
auto wrapperStructPose = wrapperStructPoseTemp;
auto multiThreadEnabled = multiThreadEnabledTemp;
......@@ -139,9 +144,9 @@ namespace op
// Check no wrong/contradictory flags enabled
const auto userOutputWsEmpty = userOutputWs.empty();
wrapperConfigureSanityChecks(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra,
wrapperStructInput, wrapperStructOutput, renderOutput, userOutputWsEmpty,
threadManagerMode);
wrapperConfigureSanityChecks(
wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, wrapperStructInput,
wrapperStructOutput, renderOutput, userOutputWsEmpty, producerSharedPtr, threadManagerMode);
// Get number threads
auto numberThreads = wrapperStructPose.gpuNumber;
......@@ -194,20 +199,19 @@ namespace op
// Common parameters
auto finalOutputSize = wrapperStructPose.outputSize;
Point<int> producerSize{-1,-1};
const auto oPProducer = (wrapperStructInput.producerSharedPtr != nullptr);
const auto oPProducer = (producerSharedPtr != nullptr);
if (oPProducer)
{
// 1. Set producer properties
const auto displayProducerFpsMode = (wrapperStructInput.realTimeProcessing
? ProducerFpsMode::OriginalFps : ProducerFpsMode::RetrievalFps);
wrapperStructInput.producerSharedPtr->setProducerFpsMode(displayProducerFpsMode);
wrapperStructInput.producerSharedPtr->set(ProducerProperty::Flip, wrapperStructInput.frameFlip);
wrapperStructInput.producerSharedPtr->set(ProducerProperty::Rotation, wrapperStructInput.frameRotate);
wrapperStructInput.producerSharedPtr->set(ProducerProperty::AutoRepeat,
wrapperStructInput.framesRepeat);
producerSharedPtr->setProducerFpsMode(displayProducerFpsMode);
producerSharedPtr->set(ProducerProperty::Flip, wrapperStructInput.frameFlip);
producerSharedPtr->set(ProducerProperty::Rotation, wrapperStructInput.frameRotate);
producerSharedPtr->set(ProducerProperty::AutoRepeat, wrapperStructInput.framesRepeat);
// 2. Set finalOutputSize
producerSize = Point<int>{(int)wrapperStructInput.producerSharedPtr->get(CV_CAP_PROP_FRAME_WIDTH),
(int)wrapperStructInput.producerSharedPtr->get(CV_CAP_PROP_FRAME_HEIGHT)};
producerSize = Point<int>{(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_WIDTH),
(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_HEIGHT)};
// Set finalOutputSize to input size if desired
if (finalOutputSize.x == -1 || finalOutputSize.y == -1)
finalOutputSize = producerSize;
......@@ -217,7 +221,7 @@ namespace op
if (oPProducer)
{
const auto datumProducer = std::make_shared<DatumProducer<TDatums>>(
wrapperStructInput.producerSharedPtr, wrapperStructInput.frameFirst, wrapperStructInput.frameStep,
producerSharedPtr, wrapperStructInput.frameFirst, wrapperStructInput.frameStep,
wrapperStructInput.frameLast, spVideoSeek
);
datumProducerW = std::make_shared<WDatumProducer<TDatumsSP, TDatums>>(datumProducer);
......@@ -609,8 +613,11 @@ namespace op
{
// If humanFormat: bigger size (& maybe slower to process), but easier for user to read it
const auto humanFormat = true;
const auto cocoJsonSaver = std::make_shared<CocoJsonSaver>(wrapperStructOutput.writeCocoJson,
humanFormat, CocoJsonFormat::Body);
const auto cocoJsonSaver = std::make_shared<CocoJsonSaver>(
wrapperStructOutput.writeCocoJson, humanFormat,
(wrapperStructPose.poseModel != PoseModel::CAR_22
&& wrapperStructPose.poseModel != PoseModel::CAR_12
? CocoJsonFormat::Body : CocoJsonFormat::Car));
outputWs.emplace_back(std::make_shared<WCocoJsonSaver<TDatumsSP>>(cocoJsonSaver));
}
// Write people foot pose data on disk (COCO validation json format for foot data)
......@@ -630,8 +637,8 @@ namespace op
outputWs.emplace_back(std::make_shared<WImageSaver<TDatumsSP>>(imageSaver));
}
// Write frames as *.avi video on hard disk
const auto producerFps = (wrapperStructInput.producerSharedPtr == nullptr ?
0. : wrapperStructInput.producerSharedPtr->get(CV_CAP_PROP_FPS));
const auto producerFps = (producerSharedPtr == nullptr ?
0. : producerSharedPtr->get(CV_CAP_PROP_FPS));
const auto originalVideoFps = (wrapperStructOutput.writeVideoFps > 0 ?
wrapperStructOutput.writeVideoFps
: producerFps);
......
......@@ -14,10 +14,17 @@ namespace op
struct OP_API WrapperStructInput
{
/**
* Producer which will generate the frames.
* Set to nullptr to disable the whole input, i.e., if the user is going to use his own frames generator.
* Desired type of producer (FlirCamera, ImageDirectory, IPCamera, Video, Webcam, None, etc.).
* Default: ProducerType::None.
*/
std::shared_ptr<Producer> producerSharedPtr;
ProducerType producerType;
/**
* Path of the producer (image directory path for ImageDirectory, video path for Video,
* camera index for Webcam and FlirCamera, URL for IPCamera, etc.).
* Default: "".
*/
std::string producerString;
/**
* First image to process.
......@@ -59,16 +66,45 @@ namespace op
*/
bool framesRepeat;
/**
* Camera resolution (only for Webcam and FlirCamera).
*/
Point<int> cameraResolution;
/**
* Frame rate of the camera (only for some producers).
*/
double webcamFps;
/**
* Directory path for the camera parameters (intrinsic and extrinsic parameters).
*/
std::string cameraParameterPath;
/**
* Whether to undistort the image given the camera parameters.
*/
bool undistortImage;
/**
* Number of camera views recorded (only for prerecorded produced sources, such as video and image directory).
*/
unsigned int imageDirectoryStereo;
/**
* Constructor of the struct.
* It has the recommended and default values we recommend for each element of the struct.
* Since all the elements of the struct are public, they can also be manually filled.
*/
WrapperStructInput(const std::shared_ptr<Producer> producerSharedPtr = nullptr,
const unsigned long long frameFirst = 0, const unsigned long long frameStep = 1,
WrapperStructInput(const ProducerType producerType = ProducerType::None,
const std::string& producerString = "", const unsigned long long frameFirst = 0,
const unsigned long long frameStep = 1,
const unsigned long long frameLast = std::numeric_limits<unsigned long long>::max(),
const bool realTimeProcessing = false, const bool frameFlip = false,
const int frameRotate = 0, const bool framesRepeat = false);
const int frameRotate = 0, const bool framesRepeat = false,
const Point<int>& cameraResolution = Point<int>{-1,-1}, const double webcamFps = 30.,
const std::string& cameraParameterPath = "models/cameraParameters/",
const bool undistortImage = true, const unsigned int imageDirectoryStereo = 1);
};
}
......
......@@ -45,7 +45,7 @@ namespace op
error("Dimension mismatch between poseKeypoints and poseScores.", __LINE__, __FUNCTION__, __FILE__);
const auto numberPeople = poseKeypoints.getSize(0);
const auto numberBodyParts = poseKeypoints.getSize(1);
const auto imageId = (numberBodyParts != 12 ? getLastNumber(imageName) : 1900000 + getLastNumber(imageName));
const auto imageId = getLastNumber(imageName);
for (auto person = 0 ; person < numberPeople ; person++)
{
// Comma at any moment but first element
......@@ -84,14 +84,23 @@ namespace op
indexesInCocoOrder = std::vector<int>{0, 16,15,18,17, 5,2,6,3,7, 4,12,9,13,10, 14,11};
else if (numberBodyParts == 23)
indexesInCocoOrder = std::vector<int>{18,21,19,22,20, 4,1,5,2,6, 3,13,8,14, 9, 15,10};
// Car
else if (numberBodyParts == 12)
indexesInCocoOrder = std::vector<int>{0,1,2,3, 4,5,6,7, 8, 8,9,10,11, 11};
}
// Foot
else if (mCocoJsonFormat == CocoJsonFormat::Foot)
{
if (numberBodyParts == 25)
indexesInCocoOrder = std::vector<int>{19,20,21, 22,23,24};
}
// Car
else if (mCocoJsonFormat == CocoJsonFormat::Car)
{
if (numberBodyParts == 12)
indexesInCocoOrder = std::vector<int>{0,1,2,3, 4,5,6,7, 8, 8,9,10,11, 11};
else if (numberBodyParts == 22)
for (auto i = 0 ; i < 22 ; i++)
indexesInCocoOrder.emplace_back(i);
}
// Sanity check
if (indexesInCocoOrder.empty())
error("Invalid number of body parts (" + std::to_string(numberBodyParts) + ").",
__LINE__, __FUNCTION__, __FILE__);
......
#include <openpose/utilities/check.hpp>
#include <openpose/utilities/fastMath.hpp>
#include <openpose/producer/headers.hpp>
#include <openpose/producer/producer.hpp>
namespace op
......@@ -349,4 +350,70 @@ namespace op
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
std::shared_ptr<Producer> createProducer(const ProducerType producerType, const std::string& producerString,
const Point<int>& cameraResolution, const double webcamFps,
const std::string& cameraParameterPath, const bool undistortImage,
const unsigned int imageDirectoryStereo)
{
try
{
log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
// Directory of images
if (producerType == ProducerType::ImageDirectory)
return std::make_shared<ImageDirectoryReader>(
producerString, imageDirectoryStereo, cameraParameterPath);
// Video
else if (producerType == ProducerType::Video)
return std::make_shared<VideoReader>(
producerString, imageDirectoryStereo, cameraParameterPath);
// IP camera
else if (producerType == ProducerType::IPCamera)
return std::make_shared<IpCameraReader>(producerString);
// Flir camera
else if (producerType == ProducerType::FlirCamera)
return std::make_shared<FlirReader>(
cameraParameterPath, cameraResolution, undistortImage, std::stoi(producerString));
// Webcam
else if (producerType == ProducerType::Webcam)
{
const auto webcamIndex = std::stoi(producerString);
auto cameraResolutionFinal = cameraResolution;
if (cameraResolutionFinal.x < 0 || cameraResolutionFinal.y < 0)
cameraResolutionFinal = Point<int>{1280,720};
if (webcamIndex >= 0)
{
const auto throwExceptionIfNoOpened = true;
return std::make_shared<WebcamReader>(
webcamIndex, cameraResolutionFinal, webcamFps, throwExceptionIfNoOpened);
}
else
{
const auto throwExceptionIfNoOpened = false;
std::shared_ptr<WebcamReader> webcamReader;
for (auto index = 0 ; index < 10 ; index++)
{
webcamReader = std::make_shared<WebcamReader>(
index, cameraResolutionFinal, webcamFps, throwExceptionIfNoOpened);
if (webcamReader->isOpened())
{
log("Auto-detecting camera index... Detected and opened camera " + std::to_string(index)
+ ".", Priority::High);
return webcamReader;
}
}
error("No camera found.", __LINE__, __FUNCTION__, __FILE__);
}
}
// else
error("Undefined Producer selected.", __LINE__, __FUNCTION__, __FILE__);
return std::shared_ptr<Producer>{};
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return std::shared_ptr<Producer>{};
}
}
}
#include <cstdio> // sscanf
#include <openpose/producer/flirReader.hpp>
#include <openpose/producer/imageDirectoryReader.hpp>
#include <openpose/producer/ipCameraReader.hpp>
#include <openpose/producer/videoReader.hpp>
#include <openpose/producer/webcamReader.hpp>
#include <openpose/utilities/check.hpp>
#include <openpose/utilities/flagsToOpenPose.hpp>
......@@ -150,12 +145,9 @@ namespace op
}
}
std::shared_ptr<Producer> flagsToProducer(const std::string& imageDirectory, const std::string& videoPath,
const std::string& ipCameraPath, const int webcamIndex,
const bool flirCamera, const std::string& cameraResolution,
const double webcamFps, const std::string& cameraParameterPath,
const bool undistortImage, const unsigned int imageDirectoryStereo,
const int flirCameraIndex)
std::pair<ProducerType, std::string> flagsToProducer(
const std::string& imageDirectory, const std::string& videoPath, const std::string& ipCameraPath,
const int webcamIndex, const bool flirCamera, const int flirCameraIndex)
{
try
{
......@@ -163,59 +155,25 @@ namespace op
const auto type = flagsToProducerType(imageDirectory, videoPath, ipCameraPath, webcamIndex, flirCamera);
if (type == ProducerType::ImageDirectory)
return std::make_shared<ImageDirectoryReader>(imageDirectory, imageDirectoryStereo,
cameraParameterPath);
return std::make_pair(ProducerType::ImageDirectory, imageDirectory);
else if (type == ProducerType::Video)
return std::make_shared<VideoReader>(videoPath, imageDirectoryStereo, cameraParameterPath);
return std::make_pair(ProducerType::Video, videoPath);
else if (type == ProducerType::IPCamera)
return std::make_shared<IpCameraReader>(ipCameraPath);
return std::make_pair(ProducerType::IPCamera, ipCameraPath);
// Flir camera
if (type == ProducerType::FlirCamera)
{
// cameraFrameSize
const auto cameraFrameSize = flagsToPoint(cameraResolution, "-1x-1");
return std::make_shared<FlirReader>(cameraParameterPath, cameraFrameSize, undistortImage,
flirCameraIndex);
}
else if (type == ProducerType::FlirCamera)
return std::make_pair(ProducerType::FlirCamera, std::to_string(flirCameraIndex));
// Webcam
if (type == ProducerType::Webcam)
{
// cameraFrameSize
auto cameraFrameSize = flagsToPoint(cameraResolution, "1280x720");
if (cameraFrameSize.x < 0 || cameraFrameSize.y < 0)
cameraFrameSize = Point<int>{1280,720};
if (webcamIndex >= 0)
{
const auto throwExceptionIfNoOpened = true;
return std::make_shared<WebcamReader>(webcamIndex, cameraFrameSize, webcamFps,
throwExceptionIfNoOpened);
}
else
{
const auto throwExceptionIfNoOpened = false;
std::shared_ptr<WebcamReader> webcamReader;
for (auto index = 0 ; index < 10 ; index++)
{
webcamReader = std::make_shared<WebcamReader>(index, cameraFrameSize, webcamFps,
throwExceptionIfNoOpened);
if (webcamReader->isOpened())
{
log("Auto-detecting camera index... Detected and opened camera " + std::to_string(index)
+ ".", Priority::High);
return webcamReader;
}
}
error("No camera found.", __LINE__, __FUNCTION__, __FILE__);
}
}
else if (type == ProducerType::Webcam)
return std::make_pair(ProducerType::Webcam, std::to_string(webcamIndex));
// else
error("Undefined Producer selected.", __LINE__, __FUNCTION__, __FILE__);
return std::shared_ptr<Producer>{};
return std::make_pair(ProducerType::None, "");
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return std::shared_ptr<Producer>{};
return std::make_pair(ProducerType::None, "");
}
}
......
......@@ -12,6 +12,7 @@ namespace op
const WrapperStructOutput& wrapperStructOutput,
const bool renderOutput,
const bool userOutputWsEmpty,
const std::shared_ptr<Producer>& producerSharedPtr,
const ThreadManagerMode threadManagerMode)
{
try
......@@ -104,9 +105,9 @@ namespace op
log(message, Priority::High);
}
}
if (!wrapperStructOutput.writeVideo.empty() && wrapperStructInput.producerSharedPtr == nullptr)
if (!wrapperStructOutput.writeVideo.empty() && producerSharedPtr == nullptr)
error("Writting video is only available if the OpenPose producer is used (i.e."
" wrapperStructInput.producerSharedPtr cannot be a nullptr).",
" producerSharedPtr cannot be a nullptr).",
__LINE__, __FUNCTION__, __FILE__);
if (!wrapperStructPose.enable)
{
......@@ -136,7 +137,7 @@ namespace op
+ std::to_string(wrapperStructPose.outputSize.y) + ").",
__LINE__, __FUNCTION__, __FILE__);
if (wrapperStructOutput.writeVideoFps <= 0
&& wrapperStructInput.producerSharedPtr->get(CV_CAP_PROP_FPS) > 0)
&& producerSharedPtr->get(CV_CAP_PROP_FPS) > 0)
error("Set `--camera_fps` for this producer, as its frame rate is unknown.",
__LINE__, __FUNCTION__, __FILE__);
#ifdef USE_CPU_ONLY
......@@ -147,8 +148,8 @@ namespace op
// Net input resolution cannot be reshaped for Caffe OpenCL and MKL versions, only for CUDA version
#if defined USE_MKL || defined USE_OPENCL
// If image_dir and netInputSize == -1 --> error
if ((wrapperStructInput.producerSharedPtr == nullptr
|| wrapperStructInput.producerSharedPtr->getType() == ProducerType::ImageDirectory)
if ((producerSharedPtr == nullptr
|| producerSharedPtr->getType() == ProducerType::ImageDirectory)
// If netInputSize is -1
&& (wrapperStructPose.netInputSize.x == -1 || wrapperStructPose.netInputSize.y == -1))
{
......
......@@ -2,18 +2,27 @@
namespace op
{
WrapperStructInput::WrapperStructInput(const std::shared_ptr<Producer> producerSharedPtr_,
WrapperStructInput::WrapperStructInput(const ProducerType producerType_, const std::string& producerString_,
const unsigned long long frameFirst_, const unsigned long long frameStep_,
const unsigned long long frameLast_, const bool realTimeProcessing_,
const bool frameFlip_, const int frameRotate_, const bool framesRepeat_) :
producerSharedPtr{producerSharedPtr_},
const bool frameFlip_, const int frameRotate_, const bool framesRepeat_,
const Point<int>& cameraResolution_, const double webcamFps_,
const std::string& cameraParameterPath_, const bool undistortImage_,
const unsigned int imageDirectoryStereo_) :
producerType{producerType_},
producerString{producerString_},
frameFirst{frameFirst_},
frameStep{frameStep_},
frameLast{frameLast_},
realTimeProcessing{realTimeProcessing_},
frameFlip{frameFlip_},
frameRotate{frameRotate_},
framesRepeat{framesRepeat_}
framesRepeat{framesRepeat_},
cameraResolution{cameraResolution_},
webcamFps{webcamFps_},
cameraParameterPath{cameraParameterPath_},
undistortImage{undistortImage_},
imageDirectoryStereo{imageDirectoryStereo_}
{
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册