提交 1d15592d 编写于 作者: G gineshidalgo99

Fixed missing include and old GCC error

上级 ad3aad62
......@@ -45,7 +45,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......@@ -102,6 +102,13 @@ DEFINE_string(face_net_resolution, "368x368", "Multiples of 16. Analog
DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g."
" `model_folder`.");
DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16. Analogous to `net_resolution` but applied to the hand keypoint detector.");
DEFINE_int32(hand_detection_mode, -1, "Set to 0 to perform 1-time keypoint detection (fastest), 1 for iterative detection"
" (recommended for images and fast videos, slow method), 2 for tracking (recommended for"
" webcam if the frame rate is >10 FPS per GPU used and for video, in practice as fast as"
" 1-time detection), 3 for both iterative and tracking (recommended for webcam if the"
" resulting frame rate is still >10 FPS and for video, ideally best result but slower), or"
" -1 (default) for automatic selection (fast method for webcam, tracking for video and"
" iterative for images).");
// OpenPose Rendering
DEFINE_int32(part_to_show, 0, "Part to show from the start.");
DEFINE_bool(disable_blending, false, "If blending is enabled, it will merge the results with the original frame. If disabled, it"
......@@ -238,6 +245,33 @@ std::vector<op::HeatMapType> gflagToHeatMaps(const bool heatMapsAddParts, const
return heatMapTypes;
}
op::DetectionMode gflagToDetectionMode(const int handDetectionModeFlag, const std::shared_ptr<op::Producer>& producer = nullptr)
{
if (handDetectionModeFlag == -1)
{
if (producer == nullptr)
op::error("Since there is no default producer, `hand_detection_mode` must be set.", __LINE__, __FUNCTION__, __FILE__);
const auto producerType = producer->getType();
if (producerType == op::ProducerType::Webcam)
return op::DetectionMode::Fast;
else if (producerType == op::ProducerType::ImageDirectory)
return op::DetectionMode::Iterative;
else if (producerType == op::ProducerType::Video)
return op::DetectionMode::Tracking;
}
else if (handDetectionModeFlag == 0)
return op::DetectionMode::Fast;
else if (handDetectionModeFlag == 1)
return op::DetectionMode::Iterative;
else if (handDetectionModeFlag == 2)
return op::DetectionMode::Tracking;
else if (handDetectionModeFlag == 3)
return op::DetectionMode::IterativeAndTracking;
// else
op::error("Undefined DetectionMode selected.", __LINE__, __FUNCTION__, __FILE__);
return op::DetectionMode::Fast;
}
op::RenderMode gflagToRenderMode(const int renderFlag, const int renderPoseFlag = -2)
{
if (renderFlag == -1 && renderPoseFlag != -2)
......@@ -304,7 +338,7 @@ int opRealTimePoseDemo()
op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level);
// op::ConfigureLog::setPriorityThreshold(op::Priority::None); // To print all logging messages
op::log("Starting pose estimation demo.", op::Priority::Max);
op::log("Starting pose estimation demo.", op::Priority::High);
const auto timerBegin = std::chrono::high_resolution_clock::now();
// Applying user defined configuration
......@@ -331,8 +365,9 @@ int opRealTimePoseDemo()
const op::WrapperStructFace wrapperStructFace{FLAGS_face, faceNetInputSize, gflagToRenderMode(FLAGS_render_face, FLAGS_render_pose),
(float)FLAGS_alpha_face, (float)FLAGS_alpha_heatmap_face};
// Hand configuration (use op::WrapperStructHand{} to disable it)
const op::WrapperStructHand wrapperStructHand{FLAGS_hand, handNetInputSize, gflagToRenderMode(FLAGS_render_hand, FLAGS_render_pose),
(float)FLAGS_alpha_hand, (float)FLAGS_alpha_heatmap_hand};
const op::WrapperStructHand wrapperStructHand{FLAGS_hand, handNetInputSize, gflagToDetectionMode(FLAGS_hand_detection_mode, producerSharedPtr),
gflagToRenderMode(FLAGS_render_hand, FLAGS_render_pose), (float)FLAGS_alpha_hand,
(float)FLAGS_alpha_heatmap_hand};
// Producer (use default to disable any input)
const op::WrapperStructInput wrapperStructInput{producerSharedPtr, FLAGS_frame_first, FLAGS_frame_last, FLAGS_process_real_time,
FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat};
......@@ -348,7 +383,7 @@ int opRealTimePoseDemo()
// Start processing
// Two different ways of running the program on multithread environment
op::log("Starting thread(s)", op::Priority::Max);
op::log("Starting thread(s)", op::Priority::High);
// Option a) Recommended - Also using the main thread (this thread) for processing (it saves 1 thread)
// Start, run & stop threads
opWrapper.exec(); // It blocks this thread until all threads have finished
......@@ -370,14 +405,14 @@ int opRealTimePoseDemo()
// while (opWrapper.isRunning())
// std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs});
// // Stop and join threads
// op::log("Stopping thread(s)", op::Priority::Max);
// op::log("Stopping thread(s)", op::Priority::High);
// opWrapper.stop();
// Measuring total time
const auto now = std::chrono::high_resolution_clock::now();
const auto totalTimeSec = (double)std::chrono::duration_cast<std::chrono::nanoseconds>(now-timerBegin).count() * 1e-9;
const auto message = "Real-time pose estimation demo successfully finished. Total time: " + std::to_string(totalTimeSec) + " seconds.";
op::log(message, op::Priority::Max);
op::log(message, op::Priority::High);
return 0;
}
......
......@@ -22,7 +22,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......@@ -91,7 +91,7 @@ std::tuple<op::Point<int>, op::Point<int>, op::Point<int>, op::PoseModel> gflags
int openPoseTutorialPose1()
{
op::log("OpenPose Library Tutorial - Example 1.", op::Priority::Max);
op::log("OpenPose Library Tutorial - Example 1.", op::Priority::High);
// ------------------------- INITIALIZATION -------------------------
// Step 1 - Set logging level
// - 0 will output all the logging messages
......@@ -141,7 +141,7 @@ int openPoseTutorialPose1()
// Step 1 - Show results
frameDisplayer.displayFrame(outputImage, 0); // Alternative: cv::imshow(outputImage) + cv::waitKey(0)
// Step 2 - Logging information message
op::log("Example 1 successfully finished.", op::Priority::Max);
op::log("Example 1 successfully finished.", op::Priority::High);
// Return successful message
return 0;
}
......
......@@ -22,7 +22,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......@@ -94,7 +94,7 @@ std::tuple<op::Point<int>, op::Point<int>, op::Point<int>, op::PoseModel> gflags
int openPoseTutorialPose2()
{
op::log("OpenPose Library Tutorial - Example 2.", op::Priority::Max);
op::log("OpenPose Library Tutorial - Example 2.", op::Priority::High);
// ------------------------- INITIALIZATION -------------------------
// Step 1 - Set logging level
// - 0 will output all the logging messages
......@@ -148,7 +148,7 @@ int openPoseTutorialPose2()
// Step 1 - Show results
frameDisplayer.displayFrame(outputImage, 0); // Alternative: cv::imshow(outputImage) + cv::waitKey(0)
// Step 2 - Logging information message
op::log("Example 2 successfully finished.", op::Priority::Max);
op::log("Example 2 successfully finished.", op::Priority::High);
// Return successful message
return 0;
}
......
......@@ -21,7 +21,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......@@ -107,7 +107,7 @@ std::tuple<op::Point<int>, op::Point<int>, std::shared_ptr<op::Producer>> gflags
int openPoseTutorialThread1()
{
op::log("OpenPose Library Tutorial - Example 3.", op::Priority::Max);
op::log("OpenPose Library Tutorial - Example 3.", op::Priority::High);
// ------------------------- INITIALIZATION -------------------------
// Step 1 - Set logging level
// - 0 will output all the logging messages
......@@ -173,7 +173,7 @@ int openPoseTutorialThread1()
// threadManager.add(threadId, {wDatumProducer, wGui}, queueIn, queueOut); // Thread 0, queues 0 -> 1
// ------------------------- STARTING AND STOPPING THREADING -------------------------
op::log("Starting thread(s)", op::Priority::Max);
op::log("Starting thread(s)", op::Priority::High);
// Two different ways of running the program on multithread environment
// Option a) Using the main thread (this thread) for processing (it saves 1 thread, recommended)
threadManager.exec(); // It blocks this thread until all threads have finished
......@@ -187,12 +187,12 @@ int openPoseTutorialThread1()
// while (threadManager.isRunning())
// std::this_thread::sleep_for(std::chrono::milliseconds{33});
// // Stop and join threads
// op::log("Stopping thread(s)", op::Priority::Max);
// op::log("Stopping thread(s)", op::Priority::High);
// threadManager.stop();
// ------------------------- CLOSING -------------------------
// Logging information message
op::log("Example 3 successfully finished.", op::Priority::Max);
op::log("Example 3 successfully finished.", op::Priority::High);
// Return successful message
return 0;
}
......
......@@ -22,7 +22,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......@@ -141,7 +141,7 @@ std::tuple<op::Point<int>, op::Point<int>, std::shared_ptr<op::Producer>> gflags
int openPoseTutorialThread2()
{
op::log("OpenPose Library Tutorial - Example 3.", op::Priority::Max);
op::log("OpenPose Library Tutorial - Example 3.", op::Priority::High);
// ------------------------- INITIALIZATION -------------------------
// Step 1 - Set logging level
// - 0 will output all the logging messages
......@@ -215,7 +215,7 @@ int openPoseTutorialThread2()
// threadManager.add(threadId, wGui, queueIn++, queueOut++); // Thread 0, queues 2 -> 3
// ------------------------- STARTING AND STOPPING THREADING -------------------------
op::log("Starting thread(s)", op::Priority::Max);
op::log("Starting thread(s)", op::Priority::High);
// Two different ways of running the program on multithread environment
// Option a) Using the main thread (this thread) for processing (it saves 1 thread, recommended)
threadManager.exec(); // It blocks this thread until all threads have finished
......@@ -229,12 +229,12 @@ int openPoseTutorialThread2()
// while (threadManager.isRunning())
// std::this_thread::sleep_for(std::chrono::milliseconds{33});
// // Stop and join threads
// op::log("Stopping thread(s)", op::Priority::Max);
// op::log("Stopping thread(s)", op::Priority::High);
// threadManager.stop();
// ------------------------- CLOSING -------------------------
// Logging information message
op::log("Example 3 successfully finished.", op::Priority::Max);
op::log("Example 3 successfully finished.", op::Priority::High);
// Return successful message
return 0;
}
......
......@@ -27,7 +27,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......@@ -61,7 +61,7 @@ public:
// Close program when empty frame
if (mImageFiles.size() <= mCounter)
{
op::log("Last frame read and added to queue. Closing program after it is processed.", op::Priority::Max);
op::log("Last frame read and added to queue. Closing program after it is processed.", op::Priority::High);
// This funtion stops this worker, which will eventually stop the whole thread system once all the frames have been processed
this->stop();
return nullptr;
......@@ -79,7 +79,7 @@ public:
// If empty frame -> return nullptr
if (datum.cvInputData.empty())
{
op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", op::Priority::Max);
op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", op::Priority::High);
this->stop();
datumsPtr = nullptr;
}
......@@ -162,7 +162,7 @@ public:
int openPoseTutorialThread3()
{
op::log("OpenPose Library Tutorial - Example 3.", op::Priority::Max);
op::log("OpenPose Library Tutorial - Example 3.", op::Priority::High);
// ------------------------- INITIALIZATION -------------------------
// Step 1 - Set logging level
// - 0 will output all the logging messages
......@@ -197,7 +197,7 @@ int openPoseTutorialThread3()
threadManager.add(threadId++, wUserOutput, queueIn++, queueOut++); // Thread 2, queues 2 -> 3
// ------------------------- STARTING AND STOPPING THREADING -------------------------
op::log("Starting thread(s)", op::Priority::Max);
op::log("Starting thread(s)", op::Priority::High);
// Two different ways of running the program on multithread environment
// Option a) Using the main thread (this thread) for processing (it saves 1 thread, recommended)
threadManager.exec(); // It blocks this thread until all threads have finished
......@@ -211,12 +211,12 @@ int openPoseTutorialThread3()
// while (threadManager.isRunning())
// std::this_thread::sleep_for(std::chrono::milliseconds{33});
// // Stop and join threads
// op::log("Stopping thread(s)", op::Priority::Max);
// op::log("Stopping thread(s)", op::Priority::High);
// threadManager.stop();
// ------------------------- CLOSING -------------------------
// Logging information message
op::log("Example 3 successfully finished.", op::Priority::Max);
op::log("Example 3 successfully finished.", op::Priority::High);
// Return successful message
return 0;
}
......
......@@ -27,7 +27,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......@@ -74,7 +74,7 @@ public:
// Close program when empty frame
if (mImageFiles.size() <= mCounter)
{
op::log("Last frame read and added to queue. Closing program after it is processed.", op::Priority::Max);
op::log("Last frame read and added to queue. Closing program after it is processed.", op::Priority::High);
// This funtion stops this worker, which will eventually stop the whole thread system once all the frames have been processed
this->stop();
return nullptr;
......@@ -92,7 +92,7 @@ public:
// If empty frame -> return nullptr
if (datum.cvInputData.empty())
{
op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", op::Priority::Max);
op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", op::Priority::High);
this->stop();
datumsPtr = nullptr;
}
......@@ -175,7 +175,7 @@ public:
int openPoseTutorialThread4()
{
op::log("OpenPose Library Tutorial - Example 3.", op::Priority::Max);
op::log("OpenPose Library Tutorial - Example 3.", op::Priority::High);
// ------------------------- INITIALIZATION -------------------------
// Step 1 - Set logging level
// - 0 will output all the logging messages
......@@ -210,7 +210,7 @@ int openPoseTutorialThread4()
threadManager.add(threadId++, wUserOutput, queueIn++, queueOut++); // Thread 2, queues 2 -> 3
// ------------------------- STARTING AND STOPPING THREADING -------------------------
op::log("Starting thread(s)", op::Priority::Max);
op::log("Starting thread(s)", op::Priority::High);
// Two different ways of running the program on multithread environment
// Option a) Using the main thread (this thread) for processing (it saves 1 thread, recommended)
threadManager.exec(); // It blocks this thread until all threads have finished
......@@ -224,12 +224,12 @@ int openPoseTutorialThread4()
// while (threadManager.isRunning())
// std::this_thread::sleep_for(std::chrono::milliseconds{33});
// // Stop and join threads
// op::log("Stopping thread(s)", op::Priority::Max);
// op::log("Stopping thread(s)", op::Priority::High);
// threadManager.stop();
// ------------------------- CLOSING -------------------------
// Logging information message
op::log("Example 3 successfully finished.", op::Priority::Max);
op::log("Example 3 successfully finished.", op::Priority::High);
// Return successful message
return 0;
}
......
......@@ -43,7 +43,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......@@ -89,6 +89,13 @@ DEFINE_string(face_net_resolution, "368x368", "Multiples of 16. Analog
DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g."
" `model_folder`.");
DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16. Analogous to `net_resolution` but applied to the hand keypoint detector.");
DEFINE_int32(hand_detection_mode, 0, "Set to 0 to perform 1-time keypoint detection (fastest), 1 for iterative detection"
" (recommended for images and fast videos, slow method), 2 for tracking (recommended for"
" webcam if the frame rate is >10 FPS per GPU used and for video, in practice as fast as"
" 1-time detection), 3 for both iterative and tracking (recommended for webcam if the"
" resulting frame rate is still >10 FPS and for video, ideally best result but slower), or"
" -1 (default) for automatic selection (fast method for webcam, tracking for video and"
" iterative for images).");
// OpenPose Rendering
DEFINE_int32(part_to_show, 0, "Part to show from the start.");
DEFINE_bool(disable_blending, false, "If blending is enabled, it will merge the results with the original frame. If disabled, it"
......@@ -164,7 +171,7 @@ public:
// Close program when empty frame
if (mClosed || mImageFiles.size() <= mCounter)
{
op::log("Last frame read and added to queue. Closing program after it is processed.", op::Priority::Max);
op::log("Last frame read and added to queue. Closing program after it is processed.", op::Priority::High);
// This funtion stops this worker, which will eventually stop the whole thread system once all the frames have been processed
mClosed = true;
return nullptr;
......@@ -182,7 +189,7 @@ public:
// If empty frame -> return nullptr
if (datum.cvInputData.empty())
{
op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", op::Priority::Max);
op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", op::Priority::High);
mClosed = true;
datumsPtr = nullptr;
}
......@@ -217,7 +224,7 @@ public:
cv::waitKey(1); // It displays the image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image)
}
else
op::log("Nullptr or empty datumsPtr found.", op::Priority::Max, __LINE__, __FUNCTION__, __FILE__);
op::log("Nullptr or empty datumsPtr found.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__);
}
};
......@@ -271,6 +278,34 @@ std::vector<op::HeatMapType> gflagToHeatMaps(const bool heatMapsAddParts, const
return heatMapTypes;
}
op::DetectionMode gflagToDetectionMode(const int handDetectionModeFlag, const std::shared_ptr<op::Producer>& producer = nullptr)
{
if (handDetectionModeFlag == -1)
{
if (producer == nullptr)
op::error("Since there is no default producer, `hand_detection_mode` must be set.", __LINE__, __FUNCTION__, __FILE__);
const auto producerType = producer->getType();
if (producerType == op::ProducerType::Webcam)
return op::DetectionMode::Fast;
else if (producerType == op::ProducerType::ImageDirectory)
return op::DetectionMode::Iterative;
else if (producerType == op::ProducerType::Video)
return op::DetectionMode::Tracking;
}
else if (handDetectionModeFlag == 0)
return op::DetectionMode::Fast;
else if (handDetectionModeFlag == 1)
return op::DetectionMode::Iterative;
else if (handDetectionModeFlag == 2)
return op::DetectionMode::Tracking;
else if (handDetectionModeFlag == 3)
return op::DetectionMode::IterativeAndTracking;
// else
op::error("Undefined DetectionMode selected.", __LINE__, __FUNCTION__, __FILE__);
return op::DetectionMode::Fast;
}
op::RenderMode gflagToRenderMode(const int renderFlag, const int renderPoseFlag = -2)
{
if (renderFlag == -1 && renderPoseFlag != -2)
......@@ -333,7 +368,7 @@ int openPoseTutorialWrapper1()
op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level);
// op::ConfigureLog::setPriorityThreshold(op::Priority::None); // To print all logging messages
op::log("Starting pose estimation demo.", op::Priority::Max);
op::log("Starting pose estimation demo.", op::Priority::High);
const auto timerBegin = std::chrono::high_resolution_clock::now();
// Applying user defined configuration
......@@ -359,8 +394,9 @@ int openPoseTutorialWrapper1()
const op::WrapperStructFace wrapperStructFace{FLAGS_face, faceNetInputSize, gflagToRenderMode(FLAGS_render_face, FLAGS_render_pose),
(float)FLAGS_alpha_face, (float)FLAGS_alpha_heatmap_face};
// Hand configuration (use op::WrapperStructHand{} to disable it)
const op::WrapperStructHand wrapperStructHand{FLAGS_hand, handNetInputSize, gflagToRenderMode(FLAGS_render_hand, FLAGS_render_pose),
(float)FLAGS_alpha_hand, (float)FLAGS_alpha_heatmap_hand};
const op::WrapperStructHand wrapperStructHand{FLAGS_hand, handNetInputSize, gflagToDetectionMode(FLAGS_hand_detection_mode),
gflagToRenderMode(FLAGS_render_hand, FLAGS_render_pose), (float)FLAGS_alpha_hand,
(float)FLAGS_alpha_heatmap_hand};
// Consumer (comment or use default argument to disable any output)
const bool displayGui = false;
const bool guiVerbose = false;
......@@ -374,7 +410,7 @@ int openPoseTutorialWrapper1()
// Set to single-thread running (e.g. for debugging purposes)
// opWrapper.disableMultiThreading();
op::log("Starting thread(s)", op::Priority::Max);
op::log("Starting thread(s)", op::Priority::High);
opWrapper.start();
// User processing
......@@ -392,18 +428,18 @@ int openPoseTutorialWrapper1()
if (successfullyEmplaced && opWrapper.waitAndPop(datumProcessed))
userOutputClass.display(datumProcessed);
else
op::log("Processed datum could not be emplaced.", op::Priority::Max, __LINE__, __FUNCTION__, __FILE__);
op::log("Processed datum could not be emplaced.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__);
}
}
op::log("Stopping thread(s)", op::Priority::Max);
op::log("Stopping thread(s)", op::Priority::High);
opWrapper.stop();
// Measuring total time
const auto now = std::chrono::high_resolution_clock::now();
const auto totalTimeSec = (double)std::chrono::duration_cast<std::chrono::nanoseconds>(now-timerBegin).count() * 1e-9;
const auto message = "Real-time pose estimation demo successfully finished. Total time: " + std::to_string(totalTimeSec) + " seconds.";
op::log(message, op::Priority::Max);
op::log(message, op::Priority::High);
return 0;
}
......
......@@ -43,7 +43,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......@@ -89,6 +89,13 @@ DEFINE_string(face_net_resolution, "368x368", "Multiples of 16. Analog
DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g."
" `model_folder`.");
DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16. Analogous to `net_resolution` but applied to the hand keypoint detector.");
DEFINE_int32(hand_detection_mode, 0, "Set to 0 to perform 1-time keypoint detection (fastest), 1 for iterative detection"
" (recommended for images and fast videos, slow method), 2 for tracking (recommended for"
" webcam if the frame rate is >10 FPS per GPU used and for video, in practice as fast as"
" 1-time detection), 3 for both iterative and tracking (recommended for webcam if the"
" resulting frame rate is still >10 FPS and for video, ideally best result but slower), or"
" -1 (default) for automatic selection (fast method for webcam, tracking for video and"
" iterative for images).");
// OpenPose Rendering
DEFINE_int32(part_to_show, 0, "Part to show from the start.");
DEFINE_bool(disable_blending, false, "If blending is enabled, it will merge the results with the original frame. If disabled, it"
......@@ -167,7 +174,7 @@ public:
// Close program when empty frame
if (mImageFiles.size() <= mCounter)
{
op::log("Last frame read and added to queue. Closing program after it is processed.", op::Priority::Max);
op::log("Last frame read and added to queue. Closing program after it is processed.", op::Priority::High);
// This funtion stops this worker, which will eventually stop the whole thread system once all the frames have been processed
this->stop();
return nullptr;
......@@ -185,7 +192,7 @@ public:
// If empty frame -> return nullptr
if (datum.cvInputData.empty())
{
op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", op::Priority::Max);
op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", op::Priority::High);
this->stop();
datumsPtr = nullptr;
}
......@@ -316,6 +323,34 @@ std::vector<op::HeatMapType> gflagToHeatMaps(const bool heatMapsAddParts, const
return heatMapTypes;
}
op::DetectionMode gflagToDetectionMode(const int handDetectionModeFlag, const std::shared_ptr<op::Producer>& producer = nullptr)
{
if (handDetectionModeFlag == -1)
{
if (producer == nullptr)
op::error("Since there is no default producer, `hand_detection_mode` must be set.", __LINE__, __FUNCTION__, __FILE__);
const auto producerType = producer->getType();
if (producerType == op::ProducerType::Webcam)
return op::DetectionMode::Fast;
else if (producerType == op::ProducerType::ImageDirectory)
return op::DetectionMode::Iterative;
else if (producerType == op::ProducerType::Video)
return op::DetectionMode::Tracking;
}
else if (handDetectionModeFlag == 0)
return op::DetectionMode::Fast;
else if (handDetectionModeFlag == 1)
return op::DetectionMode::Iterative;
else if (handDetectionModeFlag == 2)
return op::DetectionMode::Tracking;
else if (handDetectionModeFlag == 3)
return op::DetectionMode::IterativeAndTracking;
// else
op::error("Undefined DetectionMode selected.", __LINE__, __FUNCTION__, __FILE__);
return op::DetectionMode::Fast;
}
op::RenderMode gflagToRenderMode(const int renderFlag, const int renderPoseFlag = -2)
{
if (renderFlag == -1 && renderPoseFlag != -2)
......@@ -378,7 +413,7 @@ int openPoseTutorialWrapper2()
op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level);
// op::ConfigureLog::setPriorityThreshold(op::Priority::None); // To print all logging messages
op::log("Starting pose estimation demo.", op::Priority::Max);
op::log("Starting pose estimation demo.", op::Priority::High);
const auto timerBegin = std::chrono::high_resolution_clock::now();
// Applying user defined configuration
......@@ -420,8 +455,9 @@ int openPoseTutorialWrapper2()
const op::WrapperStructFace wrapperStructFace{FLAGS_face, faceNetInputSize, gflagToRenderMode(FLAGS_render_face, FLAGS_render_pose),
(float)FLAGS_alpha_face, (float)FLAGS_alpha_heatmap_face};
// Hand configuration (use op::WrapperStructHand{} to disable it)
const op::WrapperStructHand wrapperStructHand{FLAGS_hand, handNetInputSize, gflagToRenderMode(FLAGS_render_hand, FLAGS_render_pose),
(float)FLAGS_alpha_hand, (float)FLAGS_alpha_heatmap_hand};
const op::WrapperStructHand wrapperStructHand{FLAGS_hand, handNetInputSize, gflagToDetectionMode(FLAGS_hand_detection_mode),
gflagToRenderMode(FLAGS_render_hand, FLAGS_render_pose), (float)FLAGS_alpha_hand,
(float)FLAGS_alpha_heatmap_hand};
// Consumer (comment or use default argument to disable any output)
const bool displayGui = false;
const bool guiVerbose = false;
......@@ -435,7 +471,7 @@ int openPoseTutorialWrapper2()
// Set to single-thread running (e.g. for debugging purposes)
// opWrapper.disableMultiThreading();
op::log("Starting thread(s)", op::Priority::Max);
op::log("Starting thread(s)", op::Priority::High);
// Two different ways of running the program on multithread environment
// // Option a) Recommended - Also using the main thread (this thread) for processing (it saves 1 thread)
// // Start, run & stop threads
......@@ -451,19 +487,19 @@ int openPoseTutorialWrapper2()
// // 1: wait ~10sec so the memory has been totally loaded on GPU
// // 2: profile the GPU memory
// std::this_thread::sleep_for(std::chrono::milliseconds{1000});
// op::log("Random task here...", op::Priority::Max);
// op::log("Random task here...", op::Priority::High);
// // Keep program alive while running threads
// while (opWrapper.isRunning())
// std::this_thread::sleep_for(std::chrono::milliseconds{33});
// // Stop and join threads
// op::log("Stopping thread(s)", op::Priority::Max);
// op::log("Stopping thread(s)", op::Priority::High);
// opWrapper.stop();
// Measuring total time
const auto now = std::chrono::high_resolution_clock::now();
const auto totalTimeSec = (double)std::chrono::duration_cast<std::chrono::nanoseconds>(now-timerBegin).count() * 1e-9;
const auto message = "Real-time pose estimation demo successfully finished. Total time: " + std::to_string(totalTimeSec) + " seconds.";
op::log(message, op::Priority::Max);
op::log(message, op::Priority::High);
return 0;
}
......
......@@ -40,7 +40,7 @@ namespace op
* @param size Integer with the number of T element to be allocated. E.g. size = 5 is internally similar to: new T[5].
* @param value Initial value for each component of the Array.
*/
explicit Array(const int size, const T value);
Array(const int size, const T value);
/**
* Array constructor.
......@@ -48,7 +48,7 @@ namespace op
* @param sizes Vector with the size of each dimension. E.g. size = {3, 5, 2} is internally similar to: new T[3*5*2].
* @param value Initial value for each component of the Array.
*/
explicit Array(const std::vector<int>& sizes, const T value);
Array(const std::vector<int>& sizes, const T value);
/**
* Copy constructor.
......
......@@ -94,11 +94,12 @@ namespace op
std::vector<std::array<Rectangle<float>, 2>> handRectangles;
/**
* Experimental (NOT IMPLEMENTED YET)
* Hands code is in development phase. Not included in this version.
* Size: #people's hands to render x 2 (right and left hand) x #hand parts (21) x 3 ((x,y) coordinates + score)
* Hand keypoints (x,y,score) locations for each person in the image.
* It has been resized to the same resolution as `poseKeypoints`.
* handKeypoints[0] corresponds to left hands, and handKeypoints[1] to right ones.
* Size: #people x #hand parts (20) x 3 ((x,y) coordinates + score)
*/
Array<float> handKeypoints;
std::array<Array<float>, 2> handKeypoints;
// -------------------------------------------------- Other parameters -------------------------------------------------- //
float scaleInputToOutput; /**< Scale ratio between the input Datum::cvInputData and the output Datum::cvOutputData. */
......
......@@ -20,6 +20,14 @@ namespace op
PAFs,
};
enum class DetectionMode : unsigned char
{
Fast,
Iterative,
Tracking,
IterativeAndTracking,
};
enum class RenderMode : unsigned char
{
None,
......
#ifndef OPENPOSE_CORE_POINT_HPP
#define OPENPOSE_CORE_POINT_HPP
#include <string>
namespace op
{
template<typename T>
......@@ -48,6 +50,13 @@ namespace op
return x * y;
}
/**
* It returns a string with the whole Point<T> data. Useful for debugging.
* The format is: `[x, y]`
* @return A string with the Point<T> values in the above format.
*/
std::string toString() const;
......@@ -141,7 +150,6 @@ namespace op
Point<T>& operator/=(const T value);
Point<T> operator/(const T value) const;
};
}
......
#ifndef OPENPOSE_CORE_RECTANGLE_HPP
#define OPENPOSE_CORE_RECTANGLE_HPP
#include <string>
#include "point.hpp"
namespace op
......@@ -61,6 +62,13 @@ namespace op
return width * height;
}
/**
* It returns a string with the whole Rectangle<T> data. Useful for debugging.
* The format is: `[x, y, width, height]`
* @return A string with the Rectangle<T> values in the above format.
*/
std::string toString() const;
// -------------------------------------------------- Basic Operators -------------------------------------------------- //
Rectangle<T>& operator*=(const T value);
......
......@@ -57,7 +57,7 @@ namespace op
// Rescale pose data
for (auto& tDatum : *tDatums)
{
std::vector<Array<float>> arraysToScale{tDatum.poseKeypoints, tDatum.handKeypoints, tDatum.faceKeypoints};
std::vector<Array<float>> arraysToScale{tDatum.poseKeypoints, tDatum.handKeypoints[0], tDatum.handKeypoints[1], tDatum.faceKeypoints};
spKeypointScaler->scale(arraysToScale, (float)tDatum.scaleInputToOutput, (float)tDatum.scaleNetToOutput, Point<int>{tDatum.cvInputData.cols, tDatum.cvInputData.rows});
}
// Profiling speed
......
......@@ -26,9 +26,10 @@ namespace op
// They might be modified on running time
const auto FACE_DEFAULT_NMS_THRESHOLD = 0.1f;
// Rendering default parameters
// Rendering parameters
const auto FACE_DEFAULT_ALPHA_KEYPOINT = POSE_DEFAULT_ALPHA_KEYPOINT;
const auto FACE_DEFAULT_ALPHA_HEAT_MAP = POSE_DEFAULT_ALPHA_HEAT_MAP;
const auto FACE_RENDER_THRESHOLD = 0.4f;
}
#endif // OPENPOSE_FACE_FACE_PARAMETERS_HPP
......@@ -22,7 +22,7 @@ namespace op
cv::Mat loadData(const std::string& cvMatName, const std::string& fileNameNoExtension, const DataFormat format);
// Json - Saving as *.json not available in OpenCV verions < 3.0, this function is a quick fix
void savePoseJson(const Array<float>& pose, const std::string& fileName, const bool humanReadable);
void saveKeypointsJson(const Array<float>& pose, const std::string& fileName, const bool humanReadable, const std::string& keypointName);
// Save/load image
void saveImage(const cv::Mat& cvMat, const std::string& fullFilePath, const std::vector<int>& openCvCompressionParams = {CV_IMWRITE_JPEG_QUALITY, 100, CV_IMWRITE_PNG_COMPRESSION, 9});
......
......@@ -13,7 +13,7 @@ namespace op
public:
KeypointJsonSaver(const std::string& directoryPath);
void saveKeypoints(const std::vector<Array<float>>& keypointVector, const std::string& fileName, const std::string& keypointName) const;
void save(const std::vector<Array<float>>& keypointVector, const std::string& fileName, const std::string& keypointName) const;
};
}
......
......@@ -65,7 +65,7 @@ namespace op
for (auto i = 0; i < tDatumsNoPtr.size(); i++)
keypointVector[i] = tDatumsNoPtr[i].faceKeypoints;
const auto fileName = (!tDatumsNoPtr[0].name.empty() ? tDatumsNoPtr[0].name : std::to_string(tDatumsNoPtr[0].id));
spKeypointJsonSaver->saveKeypoints(keypointVector, fileName, "face");
spKeypointJsonSaver->save(keypointVector, fileName, "face_keypoints");
// Profiling speed
Profiler::timerEnd(profilerKey);
Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__, Profiler::DEFAULT_X);
......
......@@ -61,11 +61,16 @@ namespace op
// T* to T
auto& tDatumsNoPtr = *tDatums;
// Record people hand data in json format
const auto fileName = (!tDatumsNoPtr[0].name.empty() ? tDatumsNoPtr[0].name : std::to_string(tDatumsNoPtr[0].id));
std::vector<Array<float>> keypointVector(tDatumsNoPtr.size());
// Left hand
for (auto i = 0; i < tDatumsNoPtr.size(); i++)
keypointVector[i] = tDatumsNoPtr[i].handKeypoints;
const auto fileName = (!tDatumsNoPtr[0].name.empty() ? tDatumsNoPtr[0].name : std::to_string(tDatumsNoPtr[0].id));
spKeypointJsonSaver->saveKeypoints(keypointVector, fileName, "hand");
keypointVector[i] = tDatumsNoPtr[i].handKeypoints[0];
spKeypointJsonSaver->save(keypointVector, fileName, "hand_left_keypoints");
// Right hand
for (auto i = 0; i < tDatumsNoPtr.size(); i++)
keypointVector[i] = tDatumsNoPtr[i].handKeypoints[1];
spKeypointJsonSaver->save(keypointVector, fileName, "hand_right_keypoints");
// Profiling speed
Profiler::timerEnd(profilerKey);
Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__, Profiler::DEFAULT_X);
......
......@@ -62,11 +62,16 @@ namespace op
// T* to T
auto& tDatumsNoPtr = *tDatums;
// Record people hand keypoint data
const auto fileName = (!tDatumsNoPtr[0].name.empty() ? tDatumsNoPtr[0].name : std::to_string(tDatumsNoPtr[0].id));
std::vector<Array<float>> keypointVector(tDatumsNoPtr.size());
// Left hand
for (auto i = 0; i < tDatumsNoPtr.size(); i++)
keypointVector[i] = tDatumsNoPtr[i].handKeypoints;
const auto fileName = (!tDatumsNoPtr[0].name.empty() ? tDatumsNoPtr[0].name : std::to_string(tDatumsNoPtr[0].id));
spKeypointSaver->saveKeypoints(keypointVector, fileName, "hand");
keypointVector[i] = tDatumsNoPtr[i].handKeypoints[0];
spKeypointSaver->saveKeypoints(keypointVector, fileName, "hand_left");
// Right hand
for (auto i = 0; i < tDatumsNoPtr.size(); i++)
keypointVector[i] = tDatumsNoPtr[i].handKeypoints[1];
spKeypointSaver->saveKeypoints(keypointVector, fileName, "hand_right");
// Profiling speed
Profiler::timerEnd(profilerKey);
Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__, Profiler::DEFAULT_X);
......
......@@ -65,7 +65,7 @@ namespace op
for (auto i = 0; i < tDatumsNoPtr.size(); i++)
keypointVector[i] = tDatumsNoPtr[i].poseKeypoints;
const auto fileName = (!tDatumsNoPtr[0].name.empty() ? tDatumsNoPtr[0].name : std::to_string(tDatumsNoPtr[0].id));
spKeypointJsonSaver->saveKeypoints(keypointVector, fileName, "pose");
spKeypointJsonSaver->save(keypointVector, fileName, "pose_keypoints");
// Profiling speed
Profiler::timerEnd(profilerKey);
Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__, Profiler::DEFAULT_X);
......
......@@ -2,6 +2,7 @@
#define OPENPOSE_HAND_HAND_DETECTOR_HPP
#include <array>
#include <mutex>
#include <vector>
#include <openpose/core/array.hpp>
#include <openpose/core/point.hpp>
......@@ -12,6 +13,8 @@
namespace op
{
// Note: This class is thread-safe, so several GPUs can be running hands and using `updateTracker`, and updateTracker will keep the latest known
// tracking
class HandDetector
{
public:
......@@ -21,7 +24,7 @@ namespace op
std::vector<std::array<Rectangle<float>, 2>> trackHands(const Array<float>& poseKeypoints, const float scaleInputToOutput);
void updateTracker(const Array<float>& poseKeypoints, const Array<float>& handKeypoints);
void updateTracker(const Array<float>& poseKeypoints, const std::array<Array<float>, 2>& handKeypoints);
private:
enum class PosePart : unsigned int
......@@ -37,10 +40,13 @@ namespace op
const std::array<unsigned int, (int)PosePart::Size> mPoseIndexes;
std::vector<std::array<Point<float>, (int)PosePart::Size>> mPoseTrack;
std::vector<Rectangle<float>> mHandTrack;
std::vector<Rectangle<float>> mHandLeftPrevious;
std::vector<Rectangle<float>> mHandRightPrevious;
unsigned long long mCurrentId;
std::mutex mMutex;
std::array<unsigned int, (int)PosePart::Size> getPoseKeypoints(const PoseModel poseModel,
const std::array<std::string, (int)PosePart::Size>& poseStrings);
const std::array<std::string, (int)PosePart::Size>& poseStrings) const;
DELETE_COPY(HandDetector);
};
......
......@@ -20,13 +20,15 @@ namespace op
class HandExtractor
{
public:
explicit HandExtractor(const Point<int>& netInputSize, const Point<int>& netOutputSize, const std::string& modelFolder, const int gpuId);
explicit HandExtractor(const Point<int>& netInputSize, const Point<int>& netOutputSize, const std::string& modelFolder, const int gpuId,
const bool iterativeDetection = false);
void initializationOnThread();
void forwardPass(const std::vector<std::array<Rectangle<float>, 2>> handRectangles, const cv::Mat& cvInputData, const float scaleInputToOutput);
void forwardPass(const std::vector<std::array<Rectangle<float>, 2>> handRectangles, const cv::Mat& cvInputData,
const float scaleInputToOutput);
Array<float> getHandKeypoints() const;
std::array<Array<float>, 2> getHandKeypoints() const;
double get(const HandProperty property) const;
......@@ -35,13 +37,14 @@ namespace op
void increase(const HandProperty property, const double value);
private:
const bool mIterativeDetection;
const Point<int> mNetOutputSize;
std::array<std::atomic<double>, (int)HandProperty::Size> mProperties;
std::shared_ptr<Net> spNet;
std::shared_ptr<ResizeAndMergeCaffe<float>> spResizeAndMergeCaffe;
std::shared_ptr<NmsCaffe<float>> spNmsCaffe;
Array<float> mHandImageCrop;
Array<float> mHandKeypoints;
std::array<Array<float>, 2> mHandKeypoints;
// Init with thread
boost::shared_ptr<caffe::Blob<float>> spCaffeNetOutputBlob;
std::shared_ptr<caffe::Blob<float>> spHeatMapsBlob;
......@@ -50,6 +53,9 @@ namespace op
void checkThread() const;
void detectHandKeypoints(Array<float>& handCurrent, const float scaleInputToOutput, const int person, const cv::Mat& affineMatrix,
const unsigned int handPeaksOffset);
DELETE_COPY(HandExtractor);
};
}
......
......@@ -6,7 +6,7 @@
namespace op
{
const auto HAND_MAX_HANDS = 2*POSE_MAX_PEOPLE;
const auto HAND_MAX_HANDS = POSE_MAX_PEOPLE;
const auto HAND_NUMBER_PARTS = 21u;
#define HAND_PAIRS_RENDER_GPU {0,1, 1,2, 2,3, 3,4, 0,5, 5,6, 6,7, 7,8, 0,9, 9,10, 10,11, 11,12, 0,13, 13,14, 14,15, 15,16, 0,17, 17,18, 18,19, 19,20}
......@@ -47,9 +47,10 @@ namespace op
// They might be modified on running time
const auto HAND_DEFAULT_NMS_THRESHOLD = 0.1f;
// Rendering default parameters
// Rendering parameters
const auto HAND_DEFAULT_ALPHA_KEYPOINT = POSE_DEFAULT_ALPHA_KEYPOINT;
const auto HAND_DEFAULT_ALPHA_HEAT_MAP = POSE_DEFAULT_ALPHA_HEAT_MAP;
const auto HAND_RENDER_THRESHOLD = 0.05f;
}
#endif // OPENPOSE_HAND_HAND_PARAMETERS_HPP
#ifndef OPENPOSE_HAND_HAND_RENDERER_HPP
#define OPENPOSE_HAND_HAND_RENDERER_HPP
#include <array>
#include <openpose/core/array.hpp>
#include <openpose/core/enumClasses.hpp>
#include <openpose/core/point.hpp>
......@@ -20,16 +21,16 @@ namespace op
void initializationOnThread();
void renderHand(Array<float>& outputData, const Array<float>& handKeypoints);
void renderHand(Array<float>& outputData, const std::array<Array<float>, 2>& handKeypoints);
private:
const Point<int> mFrameSize;
const RenderMode mRenderMode;
float* pGpuHand; // GPU aux memory
void renderHandCpu(Array<float>& outputData, const Array<float>& handKeypoints);
void renderHandCpu(Array<float>& outputData, const std::array<Array<float>, 2>& handKeypoints) const;
void renderHandGpu(Array<float>& outputData, const Array<float>& handKeypoints);
void renderHandGpu(Array<float>& outputData, const std::array<Array<float>, 2>& handKeypoints);
DELETE_COPY(HandRenderer);
};
......
......@@ -9,6 +9,8 @@
#include "handRenderer.hpp"
#include "renderHand.hpp"
#include "wHandDetector.hpp"
#include "wHandDetectorTracking.hpp"
#include "wHandDetectorUpdate.hpp"
#include "wHandExtractor.hpp"
#include "wHandRenderer.hpp"
......
#ifndef OPENPOSE_HAND_GPU_HAND_RENDER_HPP
#define OPENPOSE_HAND_GPU_HAND_RENDER_HPP
#include <array>
#include <openpose/core/array.hpp>
#include <openpose/core/point.hpp>
#include "handParameters.hpp"
namespace op
{
void renderHandKeypointsCpu(Array<float>& frameArray, const Array<float>& handKeypoints);
void renderHandKeypointsCpu(Array<float>& frameArray, const std::array<Array<float>, 2>& handKeypoints);
void renderHandKeypointsGpu(float* framePtr, const Point<int>& frameSize, const float* const handsPtr, const int numberHands,
const float alphaColorToAdd = HAND_DEFAULT_ALPHA_KEYPOINT);
......
#ifndef OPENPOSE_HAND_W_HAND_DETECTOR_TRACKING_HPP
#define OPENPOSE_HAND_W_HAND_DETECTOR_TRACKING_HPP
#include <memory> // std::shared_ptr
#include <openpose/thread/worker.hpp>
#include "handRenderer.hpp"
namespace op
{
template<typename TDatums>
class WHandDetectorTracking : public Worker<TDatums>
{
public:
explicit WHandDetectorTracking(const std::shared_ptr<HandDetector>& handDetector);
void initializationOnThread();
void work(TDatums& tDatums);
private:
std::shared_ptr<HandDetector> spHandDetector;
DELETE_COPY(WHandDetectorTracking);
};
}
// Implementation
#include <openpose/utilities/errorAndLog.hpp>
#include <openpose/utilities/macros.hpp>
#include <openpose/utilities/pointerContainer.hpp>
#include <openpose/utilities/profiler.hpp>
namespace op
{
template<typename TDatums>
WHandDetectorTracking<TDatums>::WHandDetectorTracking(const std::shared_ptr<HandDetector>& handDetector) :
spHandDetector{handDetector}
{
}
template<typename TDatums>
void WHandDetectorTracking<TDatums>::initializationOnThread()
{
}
template<typename TDatums>
void WHandDetectorTracking<TDatums>::work(TDatums& tDatums)
{
try
{
if (checkNoNullNorEmpty(tDatums))
{
// Debugging log
dLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
// Profiling speed
const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
// Detect people hand
for (auto& tDatum : *tDatums)
tDatum.handRectangles = spHandDetector->trackHands(tDatum.poseKeypoints, tDatum.scaleInputToOutput);
// Profiling speed
Profiler::timerEnd(profilerKey);
Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__, Profiler::DEFAULT_X);
// Debugging log
dLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
}
}
catch (const std::exception& e)
{
this->stop();
tDatums = nullptr;
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
COMPILE_TEMPLATE_DATUM(WHandDetectorTracking);
}
#endif // OPENPOSE_HAND_W_HAND_DETECTOR_TRACKING_HPP
#ifndef OPENPOSE_HAND_W_HAND_DETECTOR_UPDATE_HPP
#define OPENPOSE_HAND_W_HAND_DETECTOR_UPDATE_HPP
#include <memory> // std::shared_ptr
#include <openpose/thread/worker.hpp>
#include "handRenderer.hpp"
namespace op
{
template<typename TDatums>
class WHandDetectorUpdate : public Worker<TDatums>
{
public:
explicit WHandDetectorUpdate(const std::shared_ptr<HandDetector>& handDetector);
void initializationOnThread();
void work(TDatums& tDatums);
private:
std::shared_ptr<HandDetector> spHandDetector;
DELETE_COPY(WHandDetectorUpdate);
};
}
// Implementation
#include <openpose/utilities/errorAndLog.hpp>
#include <openpose/utilities/macros.hpp>
#include <openpose/utilities/pointerContainer.hpp>
#include <openpose/utilities/profiler.hpp>
namespace op
{
template<typename TDatums>
WHandDetectorUpdate<TDatums>::WHandDetectorUpdate(const std::shared_ptr<HandDetector>& handDetector) :
spHandDetector{handDetector}
{
}
template<typename TDatums>
void WHandDetectorUpdate<TDatums>::initializationOnThread()
{
}
template<typename TDatums>
void WHandDetectorUpdate<TDatums>::work(TDatums& tDatums)
{
try
{
if (checkNoNullNorEmpty(tDatums))
{
// Debugging log
dLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
// Profiling speed
const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
// Detect people hand
for (auto& tDatum : *tDatums)
spHandDetector->updateTracker(tDatum.poseKeypoints, tDatum.handKeypoints);
// Profiling speed
Profiler::timerEnd(profilerKey);
Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__, Profiler::DEFAULT_X);
// Debugging log
dLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
}
}
catch (const std::exception& e)
{
this->stop();
tDatums = nullptr;
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
COMPILE_TEMPLATE_DATUM(WHandDetectorUpdate);
}
#endif // OPENPOSE_HAND_W_HAND_DETECTOR_UPDATE_HPP
......@@ -125,9 +125,10 @@ namespace op
const std::array<unsigned int, (int)PoseModel::Size> POSE_DEFAULT_CONNECT_MIN_SUBSET_CNT{ 3, 3, 3};
const std::array<float, (int)PoseModel::Size> POSE_DEFAULT_CONNECT_MIN_SUBSET_SCORE{ 0.4f, 0.4f, 0.4f};
// Rendering default parameters
// Rendering parameters
const auto POSE_DEFAULT_ALPHA_KEYPOINT = 0.6f;
const auto POSE_DEFAULT_ALPHA_HEAT_MAP = 0.7f;
const auto POSE_RENDER_THRESHOLD = 0.01f;
// Auxiliary functions
unsigned int poseBodyPartMapStringToKey(const PoseModel poseModel, const std::string& string);
......
......@@ -23,7 +23,7 @@ namespace op
* @param webcamResolution const Point<int> parameter which specifies the desired camera resolution.
* @param fps Double parameter which specifies the desired camera frame rate.
*/
explicit WebcamReader(const int webcamIndex = 0, const Point<int> webcamResolution = Point<int>{}, const double fps = 30.);
explicit WebcamReader(const int webcamIndex = 0, const Point<int>& webcamResolution = Point<int>{}, const double fps = 30.);
~WebcamReader();
......
......@@ -16,11 +16,12 @@ namespace op
void scaleKeypoints(Array<float>& keypoints, const float scaleX, const float scaleY, const float offsetX, const float offsetY);
void renderKeypointsCpu(Array<float>& frameArray, const Array<float>& keypoints, const std::vector<unsigned int>& pairs,
const std::vector<float> colors, const float thicknessCircleRatio, const float thicknessLineRatioWRTCircle);
const std::vector<float> colors, const float thicknessCircleRatio, const float thicknessLineRatioWRTCircle,
const float threshold);
Rectangle<unsigned int> getKeypointsRectangle(const float* keypointPtr, const int numberBodyParts, const float threshold);
Rectangle<float> getKeypointsRectangle(const float* keypointPtr, const int numberKeypoints, const float threshold);
int getKeypointsArea(const float* keypointPtr, const int numberBodyParts, const float threshold);
int getKeypointsArea(const float* keypointPtr, const int numberKeypoints, const float threshold);
int getBiggestPerson(const Array<float>& keypoints, const float threshold);
}
......
......@@ -513,7 +513,7 @@ namespace op
// Reset initial GPU to 0 (we want them all)
gpuNumberStart = 0;
// Logging message
log("Auto-detecting GPUs... Detected " + std::to_string(gpuNumber) + " GPU(s), using them all.", Priority::Max);
log("Auto-detecting GPUs... Detected " + std::to_string(gpuNumber) + " GPU(s), using them all.", Priority::High);
}
// Proper format
......@@ -625,10 +625,10 @@ namespace op
// Face extractor(s)
if (wrapperStructFace.enable)
{
const auto faceDetector = std::make_shared<FaceDetector>(wrapperStructPose.poseModel);
for (auto gpuId = 0; gpuId < spWPoses.size(); gpuId++)
{
// Face detector
const auto faceDetector = std::make_shared<FaceDetector>(wrapperStructPose.poseModel);
spWPoses.at(gpuId).emplace_back(std::make_shared<WFaceDetector<TDatumsPtr>>(faceDetector));
// Face keypoint extractor
const auto netOutputSize = wrapperStructFace.netInputSize;
......@@ -642,17 +642,29 @@ namespace op
// Hand extractor(s)
if (wrapperStructHand.enable)
{
const auto handDetector = std::make_shared<HandDetector>(wrapperStructPose.poseModel);
for (auto gpuId = 0; gpuId < spWPoses.size(); gpuId++)
{
// Hand detector
const auto handDetector = std::make_shared<HandDetector>(wrapperStructPose.poseModel);
spWPoses.at(gpuId).emplace_back(std::make_shared<WHandDetector<TDatumsPtr>>(handDetector));
// If tracking
if (wrapperStructHand.detectionMode == DetectionMode::Tracking
|| wrapperStructHand.detectionMode == DetectionMode::IterativeAndTracking)
spWPoses.at(gpuId).emplace_back(std::make_shared<WHandDetectorTracking<TDatumsPtr>>(handDetector));
// If detection
else
spWPoses.at(gpuId).emplace_back(std::make_shared<WHandDetector<TDatumsPtr>>(handDetector));
// Hand keypoint extractor
const auto netOutputSize = wrapperStructHand.netInputSize;
const auto handExtractor = std::make_shared<HandExtractor>(
wrapperStructHand.netInputSize, netOutputSize, wrapperStructPose.modelFolder, gpuId + gpuNumberStart
wrapperStructHand.netInputSize, netOutputSize, wrapperStructPose.modelFolder, gpuId + gpuNumberStart,
(wrapperStructHand.detectionMode == DetectionMode::Iterative
|| wrapperStructHand.detectionMode == DetectionMode::IterativeAndTracking)
);
spWPoses.at(gpuId).emplace_back(std::make_shared<WHandExtractor<TDatumsPtr>>(handExtractor));
// If tracking
if (wrapperStructHand.detectionMode == DetectionMode::Tracking
|| wrapperStructHand.detectionMode == DetectionMode::IterativeAndTracking)
spWPoses.at(gpuId).emplace_back(std::make_shared<WHandDetectorUpdate<TDatumsPtr>>(handDetector));
}
}
......@@ -697,14 +709,16 @@ namespace op
error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__);
}
// Hands renderer(s)
// Hand renderer(s)
if (renderHand)
{
// CPU rendering
if (wrapperStructHand.renderMode == RenderMode::Cpu)
{
// Construct hand renderer
const auto handRenderer = std::make_shared<HandRenderer>(finalOutputSize);
const auto handRenderer = std::make_shared<HandRenderer>(finalOutputSize, wrapperStructHand.alphaKeypoint,
wrapperStructHand.alphaHeatMap,
wrapperStructHand.renderMode);
// Add worker
cpuRenderers.emplace_back(std::make_shared<WHandRenderer<TDatumsPtr>>(handRenderer));
}
......@@ -714,8 +728,9 @@ namespace op
for (auto i = 0; i < spWPoses.size(); i++)
{
// Construct hands renderer
const auto handRenderer = std::make_shared<HandRenderer>(finalOutputSize, wrapperStructFace.alphaKeypoint,
wrapperStructFace.alphaHeatMap);
const auto handRenderer = std::make_shared<HandRenderer>(finalOutputSize, wrapperStructHand.alphaKeypoint,
wrapperStructHand.alphaHeatMap,
wrapperStructHand.renderMode);
// Performance boost -> share spGpuMemoryPtr for all renderers
if (!poseRenderers.empty())
{
......
......@@ -26,6 +26,12 @@ namespace op
*/
Point<int> netInputSize;
/**
* Whether to perform 1-time keypoint detection (fastest), iterative detection (recommended for images and fast videos, slowest method), or
* tracking (recommended for video and webcam, in practice as fast as 1-time detection).
*/
DetectionMode detectionMode;
/**
* Whether to render the output (pose locations, body, background or PAF heat maps) with CPU or GPU.
* Select `None` for no rendering, `Cpu` or `Gpu` por CPU and GPU rendering respectively.
......@@ -50,6 +56,7 @@ namespace op
* Since all the elements of the struct are public, they can also be manually filled.
*/
WrapperStructHand(const bool enable = false, const Point<int>& netInputSize = Point<int>{368, 368},
const DetectionMode detectionMode = DetectionMode::Fast,
const RenderMode renderMode = RenderMode::None,
const float alphaKeypoint = HAND_DEFAULT_ALPHA_KEYPOINT,
const float alphaHeatMap = HAND_DEFAULT_ALPHA_HEAT_MAP);
......
......@@ -160,7 +160,8 @@ namespace op
datum.faceRectangles = faceRectangles;
datum.faceKeypoints = faceKeypoints.clone();
datum.handRectangles = datum.handRectangles;
datum.handKeypoints = handKeypoints.clone();
datum.handKeypoints[0] = handKeypoints[0].clone();
datum.handKeypoints[1] = handKeypoints[1].clone();
// Other parameters
datum.scaleInputToOutput = scaleInputToOutput;
datum.scaleNetToOutput = scaleNetToOutput;
......
......@@ -73,6 +73,20 @@ namespace op
}
}
template<typename T>
std::string Point<T>::toString() const
{
try
{
return '[' + std::to_string(x) + ", " + std::to_string(y) + ']';
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return "";
}
}
template<typename T>
Point<T>& Point<T>::operator+=(const Point<T>& point)
{
......
......@@ -111,6 +111,20 @@ namespace op
}
}
template<typename T>
std::string Rectangle<T>::toString() const
{
try
{
return '[' + std::to_string(x) + ", " + std::to_string(y) + ", " + std::to_string(width) + ", " + std::to_string(height) + ']';
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return "";
}
}
template<typename T>
Rectangle<T>& Rectangle<T>::operator*=(const T value)
{
......
......@@ -93,7 +93,7 @@ namespace op
const auto& faceRectangle = faceRectangles.at(person);
// Only consider faces with a minimum pixel area
const auto minFaceSize = fastMin(faceRectangle.width, faceRectangle.height);
// // Debugging
// // Debugging -> red rectangle
// log(std::to_string(cvInputData.cols) + " " + std::to_string(cvInputData.rows));
// cv::rectangle(cvInputDataCopy,
// cv::Point{(int)faceRectangle.x, (int)faceRectangle.y},
......@@ -102,7 +102,7 @@ namespace op
// Get parts
if (minFaceSize > 40)
{
// // Debugging
// // Debugging -> green rectangle overwriting red one
// log(std::to_string(cvInputData.cols) + " " + std::to_string(cvInputData.rows));
// cv::rectangle(cvInputDataCopy,
// cv::Point{(int)faceRectangle.x, (int)faceRectangle.y},
......
......@@ -20,7 +20,7 @@ namespace op
const auto& pairs = FACE_PAIRS_RENDER;
// Render keypoints
renderKeypointsCpu(frameArray, faceKeypoints, pairs, COLORS, thicknessCircleRatio, thicknessLineRatioWRTCircle);
renderKeypointsCpu(frameArray, faceKeypoints, pairs, COLORS, thicknessCircleRatio, thicknessLineRatioWRTCircle, FACE_RENDER_THRESHOLD);
}
}
catch (const std::exception& e)
......
......@@ -45,9 +45,8 @@ namespace op
{
if (numberFaces > 0)
{
const auto threshold = 0.4f;
const auto numBlocks = getNumberCudaBlocks(frameSize, THREADS_PER_BLOCK);
renderFaceParts<<<THREADS_PER_BLOCK, numBlocks>>>(framePtr, frameSize.x, frameSize.y, facePtr, numberFaces, threshold,
renderFaceParts<<<THREADS_PER_BLOCK, numBlocks>>>(framePtr, frameSize.x, frameSize.y, facePtr, numberFaces, FACE_RENDER_THRESHOLD,
alphaColorToAdd);
cudaCheck(__LINE__, __FUNCTION__, __FILE__);
}
......
......@@ -134,7 +134,7 @@ namespace op
}
}
void savePoseJson(const Array<float>& pose, const std::string& fileName, const bool humanReadable)
void saveKeypointsJson(const Array<float>& pose, const std::string& fileName, const bool humanReadable, const std::string& keypointName)
{
try
{
......@@ -159,7 +159,7 @@ namespace op
for (auto person = 0 ; person < numberPeople ; person++)
{
jsonOfstream.objectOpen();
jsonOfstream.key("body_parts");
jsonOfstream.key(keypointName);
jsonOfstream.arrayOpen();
// Body parts
for (auto bodyPart = 0 ; bodyPart < numberBodyParts ; bodyPart++)
......
......@@ -9,7 +9,7 @@ namespace op
{
}
void KeypointJsonSaver::saveKeypoints(const std::vector<Array<float>>& keypointVector, const std::string& fileName, const std::string& keypointName) const
void KeypointJsonSaver::save(const std::vector<Array<float>>& keypointVector, const std::string& fileName, const std::string& keypointName) const
{
try
{
......@@ -23,7 +23,7 @@ namespace op
for (auto i = 0; i < keypointVector.size(); i++)
{
const auto finalFileName = fileNameNoExtension + (i != 0 ? "_" + std::to_string(i) : "") + ".json";
savePoseJson(keypointVector[i], finalFileName, humanReadable);
saveKeypointsJson(keypointVector[i], finalFileName, humanReadable, keypointName);
}
}
}
......
......@@ -3,6 +3,8 @@
namespace op
{
DEFINE_TEMPLATE_DATUM(WHandDetector);
DEFINE_TEMPLATE_DATUM(WHandDetectorTracking);
DEFINE_TEMPLATE_DATUM(WHandDetectorUpdate);
DEFINE_TEMPLATE_DATUM(WHandExtractor);
DEFINE_TEMPLATE_DATUM(WHandRenderer);
}
......@@ -7,65 +7,127 @@
namespace op
{
inline std::array<Rectangle<float>, 2> getHandFromPoseIndexes(const Array<float>& poseKeypoints, const unsigned int personIndex, const unsigned int lWrist,
const unsigned int lElbow, const unsigned int lShoulder, const unsigned int rWrist,
const unsigned int rElbow, const unsigned int rShoulder, const float threshold)
inline Rectangle<float> getHandFromPoseIndexes(const Array<float>& poseKeypoints, const unsigned int person, const unsigned int wrist,
const unsigned int elbow, const unsigned int shoulder, const float threshold)
{
try
{
std::array<Rectangle<float>, 2> handRectangle;
const auto* posePtr = &poseKeypoints.at(personIndex*poseKeypoints.getSize(1)*poseKeypoints.getSize(2));
const auto lWristScoreAbove = (posePtr[lWrist*3+2] > threshold);
const auto lElbowScoreAbove = (posePtr[lElbow*3+2] > threshold);
const auto lShoulderScoreAbove = (posePtr[lShoulder*3+2] > threshold);
const auto rWristScoreAbove = (posePtr[rWrist*3+2] > threshold);
const auto rElbowScoreAbove = (posePtr[rElbow*3+2] > threshold);
const auto rShoulderScoreAbove = (posePtr[rShoulder*3+2] > threshold);
// const auto neckScoreAbove = (posePtr[neck*3+2] > threshold);
// const auto headNoseScoreAbove = (posePtr[headNose*3+2] > threshold);
const auto ratio = 0.33f;
auto& handLeftRectangle = handRectangle.at(0);
auto& handRightRectangle = handRectangle.at(1);
// Left hand
if (lWristScoreAbove && lElbowScoreAbove && lShoulderScoreAbove)
Rectangle<float> handRectangle;
// Parameters
const auto* posePtr = &poseKeypoints.at(person*poseKeypoints.getSize(1)*poseKeypoints.getSize(2));
const auto wristScoreAbove = (posePtr[wrist*3+2] > threshold);
const auto elbowScoreAbove = (posePtr[elbow*3+2] > threshold);
const auto shoulderScoreAbove = (posePtr[shoulder*3+2] > threshold);
const auto ratioWristElbow = 0.33f;
// Hand
if (wristScoreAbove && elbowScoreAbove && shoulderScoreAbove)
{
handLeftRectangle.x = posePtr[lWrist*3] + ratio * (posePtr[lWrist*3] - posePtr[lElbow*3]);
handLeftRectangle.y = posePtr[lWrist*3+1] + ratio * (posePtr[lWrist*3+1] - posePtr[lElbow*3+1]);
const auto distanceWristElbow = getDistance(posePtr, lWrist, lElbow);
const auto distanceElbowShoulder = getDistance(posePtr, lElbow, lShoulder);
// const auto distanceWristShoulder = getDistance(posePtr, lWrist, lShoulder);
// if (distanceWristElbow / distanceElbowShoulder > 0.85)
handLeftRectangle.width = 1.5f * fastMax(distanceWristElbow, 0.9f * distanceElbowShoulder);
// else
// handLeftRectangle.width = 1.5f * 0.9f * distanceElbowShoulder * fastMin(distanceElbowShoulder / distanceWristElbow, 3.f);
// somehow --> if distanceWristShoulder ~ distanceElbowShoulder --> do zoom in
// pos_hand = pos_wrist + ratio * (pos_wrist - pos_elbox) = (1 + ratio) * pos_wrist - ratio * pos_elbox
handRectangle.x = posePtr[wrist*3] + ratioWristElbow * (posePtr[wrist*3] - posePtr[elbow*3]);
handRectangle.y = posePtr[wrist*3+1] + ratioWristElbow * (posePtr[wrist*3+1] - posePtr[elbow*3+1]);
const auto distanceWristElbow = getDistance(posePtr, wrist, elbow);
const auto distanceElbowShoulder = getDistance(posePtr, elbow, shoulder);
handRectangle.width = 1.5f * fastMax(distanceWristElbow, 0.9f * distanceElbowShoulder);
}
// Right hand
if (rWristScoreAbove && rElbowScoreAbove && rShoulderScoreAbove)
// height = width
handRectangle.height = handRectangle.width;
// x-y refers to the center --> offset to topLeft point
handRectangle.x -= handRectangle.width / 2.f;
handRectangle.y -= handRectangle.height / 2.f;
// Return result
return handRectangle;
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return Rectangle<float>{};
}
}
inline std::array<Rectangle<float>, 2> getHandFromPoseIndexes(const Array<float>& poseKeypoints, const unsigned int person,
const unsigned int lWrist, const unsigned int lElbow, const unsigned int lShoulder,
const unsigned int rWrist, const unsigned int rElbow, const unsigned int rShoulder,
const float threshold)
{
try
{
return {getHandFromPoseIndexes(poseKeypoints, person, lWrist, lElbow, lShoulder, threshold),
getHandFromPoseIndexes(poseKeypoints, person, rWrist, rElbow, rShoulder, threshold)};
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return std::array<Rectangle<float>, 2>(); // Parentheses instead of braces to avoid error in GCC 4.8
}
}
float getAreaRatio(const Rectangle<float>& rectangleA, const Rectangle<float>& rectangleB)
{
try
{
// https://stackoverflow.com/a/22613463
const auto sA = rectangleA.area();
const auto sB = rectangleB.area();
const auto bottomRightA = rectangleA.bottomRight();
const auto bottomRightB = rectangleB.bottomRight();
const auto sI = fastMax(0.f, 1.f + fastMin(bottomRightA.x, bottomRightB.x) - fastMax(rectangleA.x, rectangleB.x))
* fastMax(0.f, 1.f + fastMin(bottomRightA.y, bottomRightB.y) - fastMax(rectangleA.y, rectangleB.y));
// // Option a - areaRatio = 1.f only if both Rectangle has same size and location
// const auto sU = sA + sB - sI;
// return sI / (float)sU;
// Option b - areaRatio = 1.f if at least one Rectangle is contained in the other
const auto sU = fastMin(sA, sB);
return fastMin(1.f, sI / (float)sU);
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return 0.f;
}
}
void trackHand(Rectangle<float>& currentRectangle, const std::vector<Rectangle<float>>& previousHands)
{
try
{
if (currentRectangle.area() > 0 && previousHands.size() > 0)
{
handRightRectangle.x = posePtr[rWrist*3] + ratio * (posePtr[rWrist*3] - posePtr[rElbow*3]);
handRightRectangle.y = posePtr[rWrist*3+1] + ratio * (posePtr[rWrist*3+1] - posePtr[rElbow*3+1]);
handRightRectangle.width = 1.5f * fastMax(getDistance(posePtr, rWrist, rElbow), 0.9f * getDistance(posePtr, rElbow, rShoulder));
// Find closest previous rectangle
auto maxIndex = -1;
auto maxValue = 0.f;
for (auto previous = 0 ; previous < previousHands.size() ; previous++)
{
const auto areaRatio = getAreaRatio(currentRectangle, previousHands[previous]);
if (maxValue < areaRatio)
{
maxValue = areaRatio;
maxIndex = previous;
}
}
// Update current rectangle with closest previous rectangle
if (maxIndex > -1)
{
const auto& prevRectangle = previousHands[maxIndex];
const auto ratio = 2.f;
const auto newWidth = fastMax((currentRectangle.width * ratio + prevRectangle.width) * 0.5f,
(currentRectangle.height * ratio + prevRectangle.height) * 0.5f);
currentRectangle.x = 0.5f * (currentRectangle.x + prevRectangle.x + 0.5f * (currentRectangle.width + prevRectangle.width) - newWidth);
currentRectangle.y = 0.5f * (currentRectangle.y + prevRectangle.y + 0.5f * (currentRectangle.height + prevRectangle.height) - newWidth);
currentRectangle.width = newWidth;
currentRectangle.height = newWidth;
}
}
handLeftRectangle.height = handLeftRectangle.width;
handLeftRectangle.x -= handLeftRectangle.width / 2.f;
handLeftRectangle.y -= handLeftRectangle.height / 2.f;
handRightRectangle.height = handRightRectangle.width;
handRightRectangle.x -= handRightRectangle.width / 2.f;
handRightRectangle.y -= handRightRectangle.height / 2.f;
return handRectangle;
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return std::array<Rectangle<float>, 2>{};
}
}
HandDetector::HandDetector(const PoseModel poseModel) :
mPoseIndexes{getPoseKeypoints(poseModel, {"LWrist", "LElbow", "LShoulder", "RWrist", "RElbow", "RShoulder"})}
// Parentheses instead of braces to avoid error in GCC 4.8
mPoseIndexes(getPoseKeypoints(poseModel, {"LWrist", "LElbow", "LShoulder", "RWrist", "RElbow", "RShoulder"})),
mCurrentId{0}
{
}
......@@ -75,7 +137,7 @@ namespace op
{
const auto numberPeople = poseKeypoints.getSize(0);
std::vector<std::array<Rectangle<float>, 2>> handRectangles(numberPeople);
const auto threshold = 0.25f;
const auto threshold = 0.03f;
// If no poseKeypoints detected -> no way to detect hand location
// Otherwise, get hand position(s)
if (!poseKeypoints.empty())
......@@ -104,7 +166,19 @@ namespace op
{
try
{
std::lock_guard<std::mutex> lock{mMutex};
// Baseline detectHands
auto handRectangles = detectHands(poseKeypoints, scaleInputToOutput);
// If previous hands saved
// for (auto current = 0 ; current < handRectangles.size() ; current++)
for (auto& handRectangle : handRectangles)
{
// trackHand(handRectangles[current][0], mHandLeftPrevious);
// trackHand(handRectangles[current][1], mHandRightPrevious);
trackHand(handRectangle[0], mHandLeftPrevious);
trackHand(handRectangle[1], mHandRightPrevious);
}
// Return result
return handRectangles;
}
catch (const std::exception& e)
......@@ -114,29 +188,37 @@ namespace op
}
}
void HandDetector::updateTracker(const Array<float>& poseKeypoints, const Array<float>& handKeypoints)
void HandDetector::updateTracker(const Array<float>& poseKeypoints, const std::array<Array<float>, 2>& handKeypoints)
{
try
{
std::lock_guard<std::mutex> lock{mMutex};
// Security checks
if (poseKeypoints.getSize(0) != handKeypoints.getSize(0))
if (poseKeypoints.getSize(0) != handKeypoints[0].getSize(0) || poseKeypoints.getSize(0) != handKeypoints[1].getSize(0))
error("Number people on poseKeypoints different than in handKeypoints.", __LINE__, __FUNCTION__, __FILE__);
// Parameters
const auto numberPeople = poseKeypoints.getSize(0);
const auto numberParts = poseKeypoints.getSize(1);
// const auto poseNumberParts = poseKeypoints.getSize(1);
const auto handNumberParts = handKeypoints[0].getSize(1);
const auto numberChannels = poseKeypoints.getSize(2);
const auto thresholdRectangle = 0.25f;
// Update pose keypoints and hand rectangles
mPoseTrack.resize(numberPeople);
mHandTrack.resize(numberPeople);
for (auto personIndex = 0 ; personIndex < mPoseTrack.size() ; personIndex++)
mHandLeftPrevious.clear();
mHandRightPrevious.clear();
for (auto person = 0 ; person < mPoseTrack.size() ; person++)
{
// Update pose keypoints
const auto* posePtr = &poseKeypoints.at(personIndex * numberParts * numberChannels);
for (auto j = 0 ; j < mPoseIndexes.size() ; j++)
mPoseTrack[personIndex][j] = Point<float>{posePtr[numberChannels*mPoseIndexes[j]], posePtr[numberChannels*mPoseIndexes[j+1]]};
// Update hand rectangles
// for (auto j = 0 ; j < mPoseIndexes.size() ; j++)
// mHandTrack[personIndex] = XXXXXXXXXXXXXXXXXx;
const auto offset = person * handNumberParts * numberChannels;
// Left hand
const auto* handLeftPtr = handKeypoints[0].getConstPtr() + offset;
const auto handLeftRectangle = getKeypointsRectangle(handLeftPtr, handNumberParts, thresholdRectangle);
if (handLeftRectangle.area() > 0)
mHandLeftPrevious.emplace_back(handLeftRectangle);
const auto* handRightPtr = handKeypoints[1].getConstPtr() + offset;
// Right hand
const auto handRightRectangle = getKeypointsRectangle(handRightPtr, handNumberParts, thresholdRectangle);
if (handRightRectangle.area() > 0)
mHandRightPrevious.emplace_back(handRightRectangle);
}
}
catch (const std::exception& e)
......@@ -145,16 +227,13 @@ namespace op
}
}
std::array<unsigned int, (int)HandDetector::PosePart::Size> HandDetector::getPoseKeypoints(const PoseModel poseModel,
const std::array<std::string,
(int)HandDetector::PosePart::Size>& poseStrings
)
std::array<unsigned int, (int)HandDetector::PosePart::Size> HandDetector::getPoseKeypoints(
const PoseModel poseModel, const std::array<std::string, (int)HandDetector::PosePart::Size>& poseStrings
) const
{
std::array<unsigned int, (int)PosePart::Size> poseKeypoints;
for (auto i = 0 ; i < poseKeypoints.size() ; i++)
{
poseKeypoints.at(i) = poseBodyPartMapStringToKey(poseModel, poseStrings.at(i));
}
return poseKeypoints;
}
}
#include <limits> // std::numeric_limits
#include <opencv2/opencv.hpp> // CV_WARP_INVERSE_MAP, CV_INTER_LINEAR
#include <openpose/core/netCaffe.hpp>
#include <openpose/hand/handParameters.hpp>
......@@ -5,14 +6,18 @@
#include <openpose/utilities/cuda.hpp>
#include <openpose/utilities/errorAndLog.hpp>
#include <openpose/utilities/fastMath.hpp>
#include <openpose/utilities/keypoint.hpp>
#include <openpose/utilities/openCv.hpp>
#include <openpose/hand/handExtractor.hpp>
namespace op
{
HandExtractor::HandExtractor(const Point<int>& netInputSize, const Point<int>& netOutputSize, const std::string& modelFolder, const int gpuId) :
HandExtractor::HandExtractor(const Point<int>& netInputSize, const Point<int>& netOutputSize, const std::string& modelFolder, const int gpuId,
const bool iterativeDetection) :
mIterativeDetection{iterativeDetection},
mNetOutputSize{netOutputSize},
spNet{std::make_shared<NetCaffe>(std::array<int,4>{1, 3, mNetOutputSize.y, mNetOutputSize.x}, modelFolder + HAND_PROTOTXT, modelFolder + HAND_TRAINED_MODEL, gpuId)},
spNet{std::make_shared<NetCaffe>(std::array<int,4>{1, 3, mNetOutputSize.y, mNetOutputSize.x}, modelFolder + HAND_PROTOTXT,
modelFolder + HAND_TRAINED_MODEL, gpuId)},
spResizeAndMergeCaffe{std::make_shared<ResizeAndMergeCaffe<float>>()},
spNmsCaffe{std::make_shared<NmsCaffe<float>>()},
mHandImageCrop{mNetOutputSize.area()*3}
......@@ -67,7 +72,8 @@ error("Hands extraction is not implemented yet. COMING SOON!", __LINE__, __FUNCT
}
}
void HandExtractor::forwardPass(const std::vector<std::array<Rectangle<float>, 2>> handRectangles, const cv::Mat& cvInputData, const float scaleInputToOutput)
void HandExtractor::forwardPass(const std::vector<std::array<Rectangle<float>, 2>> handRectangles, const cv::Mat& cvInputData,
const float scaleInputToOutput)
{
try
{
......@@ -82,7 +88,7 @@ error("Hands extraction is not implemented yet. COMING SOON!", __LINE__, __FUNCT
}
}
Array<float> HandExtractor::getHandKeypoints() const
std::array<Array<float>, 2> HandExtractor::getHandKeypoints() const
{
try
{
......@@ -92,7 +98,7 @@ error("Hands extraction is not implemented yet. COMING SOON!", __LINE__, __FUNCT
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return Array<float>{};
return std::array<Array<float>, 2>{};
}
}
......@@ -145,4 +151,22 @@ error("Hands extraction is not implemented yet. COMING SOON!", __LINE__, __FUNCT
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
void HandExtractor::detectHandKeypoints(Array<float>& handCurrent, const float scaleInputToOutput, const int person,
const cv::Mat& affineMatrix, const unsigned int handPeaksOffset)
{
try
{
error("Hands extraction is not implemented yet. COMING SOON!", __LINE__, __FUNCTION__, __FILE__);
UNUSED(handCurrent);
UNUSED(scaleInputToOutput);
UNUSED(person);
UNUSED(affineMatrix);
UNUSED(handPeaksOffset);
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
}
......@@ -40,7 +40,7 @@ namespace op
Renderer::initializationOnThread();
// GPU memory allocation for rendering
#ifndef CPU_ONLY
cudaMalloc((void**)(&pGpuHand), 2*HAND_NUMBER_PARTS * 3 * sizeof(float));
cudaMalloc((void**)(&pGpuHand), 2 * HAND_MAX_HANDS * HAND_NUMBER_PARTS * 3 * sizeof(float));
#endif
log("Finished initialization on thread.", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
}
......@@ -50,13 +50,15 @@ namespace op
}
}
void HandRenderer::renderHand(Array<float>& outputData, const Array<float>& handKeypoints)
void HandRenderer::renderHand(Array<float>& outputData, const std::array<Array<float>, 2>& handKeypoints)
{
try
{
// Security checks
if (outputData.empty())
error("Empty Array<float> outputData.", __LINE__, __FUNCTION__, __FILE__);
if (handKeypoints[0].getSize(0) != handKeypoints[1].getSize(0))
error("Wrong hand format: handKeypoints.getSize(0) != handKeypoints.getSize(1).", __LINE__, __FUNCTION__, __FILE__);
// CPU rendering
if (mRenderMode == RenderMode::Cpu)
......@@ -72,7 +74,7 @@ namespace op
}
}
void HandRenderer::renderHandCpu(Array<float>& outputData, const Array<float>& handKeypoints)
void HandRenderer::renderHandCpu(Array<float>& outputData, const std::array<Array<float>, 2>& handKeypoints) const
{
try
{
......@@ -84,21 +86,24 @@ namespace op
}
}
void HandRenderer::renderHandGpu(Array<float>& outputData, const Array<float>& handKeypoints)
void HandRenderer::renderHandGpu(Array<float>& outputData, const std::array<Array<float>, 2>& handKeypoints)
{
try
{
// GPU rendering
#ifndef CPU_ONLY
const auto elementRendered = spElementToRender->load(); // I prefer std::round(T&) over intRound(T) for std::atomic
const auto numberPeople = handKeypoints.getSize(0);
const auto numberPeople = handKeypoints[0].getSize(0);
// GPU rendering
if (numberPeople > 0 && elementRendered == 0)
{
cpuToGpuMemoryIfNotCopiedYet(outputData.getPtr());
// Draw handKeypoints
cudaMemcpy(pGpuHand, handKeypoints.getConstPtr(), 2*HAND_NUMBER_PARTS*3 * sizeof(float), cudaMemcpyHostToDevice);
renderHandKeypointsGpu(*spGpuMemoryPtr, mFrameSize, pGpuHand, handKeypoints.getSize(0));
const auto handArea = handKeypoints[0].getSize(1)*handKeypoints[0].getSize(2);
const auto handVolume = numberPeople * handArea;
cudaMemcpy(pGpuHand, handKeypoints[0].getConstPtr(), handVolume * sizeof(float), cudaMemcpyHostToDevice);
cudaMemcpy(pGpuHand + handVolume, handKeypoints[1].getConstPtr(), handVolume * sizeof(float), cudaMemcpyHostToDevice);
renderHandKeypointsGpu(*spGpuMemoryPtr, mFrameSize, pGpuHand, 2 * numberPeople);
// CUDA check
cudaCheck(__LINE__, __FUNCTION__, __FILE__);
}
......
......@@ -8,20 +8,19 @@ namespace op
{
const std::vector<float> COLORS{HAND_COLORS_RENDER};
void renderHandKeypointsCpu(Array<float>& frameArray, const Array<float>& handKeypoints)
void renderHandKeypointsCpu(Array<float>& frameArray, const std::array<Array<float>, 2>& handKeypoints)
{
try
{
// Parameters
const auto thicknessCircleRatio = 1.f/50.f;
const auto thicknessLineRatioWRTCircle = 0.75f;
const auto& pairs = HAND_PAIRS_RENDER;
// Render keypoints
if (!frameArray.empty())
{
// Parameters
const auto thicknessCircleRatio = 1.f/50.f;
const auto thicknessLineRatioWRTCircle = 0.75f;
const auto& pairs = HAND_PAIRS_RENDER;
// Render keypoints
renderKeypointsCpu(frameArray, handKeypoints, pairs, COLORS, thicknessCircleRatio, thicknessLineRatioWRTCircle);
}
renderKeypointsCpu(frameArray, handKeypoints[0], pairs, COLORS, thicknessCircleRatio, thicknessLineRatioWRTCircle, HAND_RENDER_THRESHOLD);
if (!frameArray.empty())
renderKeypointsCpu(frameArray, handKeypoints[1], pairs, COLORS, thicknessCircleRatio, thicknessLineRatioWRTCircle, HAND_RENDER_THRESHOLD);
}
catch (const std::exception& e)
{
......
......@@ -45,12 +45,11 @@ namespace op
{
if (numberHands > 0)
{
const auto threshold = 0.05f;
dim3 threadsPerBlock;
dim3 numBlocks;
std::tie(threadsPerBlock, numBlocks) = getNumberCudaThreadsAndBlocks(frameSize);
renderHandsParts<<<threadsPerBlock, numBlocks>>>(framePtr, frameSize.x, frameSize.y, handsPtr,
numberHands, threshold, alphaColorToAdd);
numberHands, HAND_RENDER_THRESHOLD, alphaColorToAdd);
cudaCheck(__LINE__, __FUNCTION__, __FILE__);
}
}
......
......@@ -29,7 +29,7 @@ namespace op
const auto& colors = (poseModel == PoseModel::COCO_18 ? COCO_COLORS : MPI_COLORS);
// Render keypoints
renderKeypointsCpu(frameArray, poseKeypoints, pairs, colors, thicknessCircleRatio, thicknessLineRatioWRTCircle);
renderKeypointsCpu(frameArray, poseKeypoints, pairs, colors, thicknessCircleRatio, thicknessLineRatioWRTCircle, POSE_RENDER_THRESHOLD);
}
}
catch (const std::exception& e)
......
......@@ -263,27 +263,6 @@ namespace op
error("Alpha must be in the range [0, 1].", __LINE__, __FUNCTION__, __FILE__);
}
inline float getThresholdForPose(const PoseModel type)
{
try
{
if (type == PoseModel::COCO_18)
return 0.01f;
else if (type == PoseModel::MPI_15 || type == PoseModel::MPI_15_4)
return 0.f;
else
{
error("Unvalid Model", __LINE__, __FUNCTION__, __FILE__);
return 0.f;
}
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return 0.f;
}
}
inline void renderPosePAFGpuAux(float* framePtr, const PoseModel poseModel, const Point<int>& frameSize,
const float* const heatMapPtr, const Point<int>& heatMapSize, const float scaleToKeepRatio,
const int part, const int partsToRender, const float alphaBlending)
......@@ -323,14 +302,13 @@ namespace op
dim3 threadsPerBlock;
dim3 numBlocks;
std::tie(threadsPerBlock, numBlocks) = getNumberCudaThreadsAndBlocks(frameSize);
const auto threshold = getThresholdForPose(poseModel);
if (poseModel == PoseModel::COCO_18)
renderPoseCoco<<<threadsPerBlock, numBlocks>>>(framePtr, frameSize.x, frameSize.y, posePtr, numberPeople,
threshold, googlyEyes, blendOriginalFrame, alphaBlending);
POSE_RENDER_THRESHOLD, googlyEyes, blendOriginalFrame, alphaBlending);
else if (poseModel == PoseModel::MPI_15 || poseModel == PoseModel::MPI_15_4)
renderPoseMpi29Parts<<<threadsPerBlock, numBlocks>>>(framePtr, frameSize.x, frameSize.y, posePtr,
numberPeople, threshold, blendOriginalFrame, alphaBlending);
numberPeople, POSE_RENDER_THRESHOLD, blendOriginalFrame, alphaBlending);
else
error("Unvalid Model.", __LINE__, __FUNCTION__, __FILE__);
cudaCheck(__LINE__, __FUNCTION__, __FILE__);
......
......@@ -6,7 +6,7 @@
namespace op
{
WebcamReader::WebcamReader(const int webcamIndex, const Point<int> webcamResolution, const double fps) :
WebcamReader::WebcamReader(const int webcamIndex, const Point<int>& webcamResolution, const double fps) :
VideoCaptureReader{webcamIndex},
mFps{fps},
mFrameNameCounter{-1}
......
#include <limits> // std::numeric_limits
#include <opencv2/imgproc/imgproc.hpp> // cv::line, cv::circle
#include <openpose/utilities/errorAndLog.hpp>
#include <openpose/utilities/fastMath.hpp>
#include <openpose/utilities/keypoint.hpp>
......@@ -96,7 +98,8 @@ namespace op
}
void renderKeypointsCpu(Array<float>& frameArray, const Array<float>& keypoints, const std::vector<unsigned int>& pairs,
const std::vector<float> colors, const float thicknessCircleRatio, const float thicknessLineRatioWRTCircle)
const std::vector<float> colors, const float thicknessCircleRatio, const float thicknessLineRatioWRTCircle,
const float threshold)
{
try
{
......@@ -120,15 +123,15 @@ namespace op
// Parameters
const auto lineType = 8;
const auto shift = 0;
const auto threshold = 0.1;
const auto thresholdRectangle = 0.1;
const auto numberColors = colors.size();
const auto numberBodyParts = keypoints.getSize(1);
const auto areaKeypoints = numberBodyParts * keypoints.getSize(2);
const auto numberKeypoints = keypoints.getSize(1);
const auto areaKeypoints = numberKeypoints * keypoints.getSize(2);
// Keypoints
for (auto person = 0 ; person < keypoints.getSize(0) ; person++)
{
const auto personRectangle = getKeypointsRectangle(&keypoints[person*areaKeypoints], keypoints.getSize(1), threshold);
const auto personRectangle = getKeypointsRectangle(&keypoints[person*areaKeypoints], keypoints.getSize(1), thresholdRectangle);
const auto ratioAreas = fastMin(1.f, fastMax(personRectangle.width/(float)width, personRectangle.height/(float)height));
// Size-dependent variables
const auto thicknessCircle = fastMax(intRound(std::sqrt(area)*thicknessCircleRatio * ratioAreas), 2);
......@@ -179,18 +182,18 @@ namespace op
}
}
Rectangle<unsigned int> getKeypointsRectangle(const float* keypointPtr, const int numberBodyParts, const float threshold)
Rectangle<float> getKeypointsRectangle(const float* keypointPtr, const int numberKeypoints, const float threshold)
{
try
{
if (numberBodyParts < 1)
if (numberKeypoints < 1)
error("Number body parts must be > 0", __LINE__, __FUNCTION__, __FILE__);
unsigned int minX = -1;
unsigned int maxX = 0u;
unsigned int minY = -1;
unsigned int maxY = 0u;
for (auto part = 0 ; part < numberBodyParts ; part++)
float minX = std::numeric_limits<float>::max();
float maxX = 0.f;
float minY = minX;
float maxY = maxX;
for (auto part = 0 ; part < numberKeypoints ; part++)
{
const auto score = keypointPtr[3*part + 2];
if (score > threshold)
......@@ -209,20 +212,23 @@ namespace op
minY = y;
}
}
return Rectangle<unsigned int>{minX, minY, maxX-minX, maxY-minY};
if (maxX >= minX && maxY >= minY)
return Rectangle<float>{minX, minY, maxX-minX, maxY-minY};
else
return Rectangle<float>{};
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return Rectangle<unsigned int>{};
return Rectangle<float>{};
}
}
int getKeypointsArea(const float* keypointPtr, const int numberBodyParts, const float threshold)
int getKeypointsArea(const float* keypointPtr, const int numberKeypoints, const float threshold)
{
try
{
return getKeypointsRectangle(keypointPtr, numberBodyParts, threshold).area();
return getKeypointsRectangle(keypointPtr, numberKeypoints, threshold).area();
}
catch (const std::exception& e)
{
......@@ -238,13 +244,13 @@ namespace op
if (!keypoints.empty())
{
const auto numberPeople = keypoints.getSize(0);
const auto numberBodyParts = keypoints.getSize(1);
const auto area = numberBodyParts * keypoints.getSize(2);
const auto numberKeypoints = keypoints.getSize(1);
const auto area = numberKeypoints * keypoints.getSize(2);
auto biggestPoseIndex = -1;
auto biggestArea = -1;
for (auto person = 0 ; person < numberPeople ; person++)
{
const auto newPersonArea = getKeypointsArea(&keypoints[person*area], numberBodyParts, threshold);
const auto newPersonArea = getKeypointsArea(&keypoints[person*area], numberKeypoints, threshold);
if (newPersonArea > biggestArea)
{
biggestArea = newPersonArea;
......
......@@ -2,10 +2,11 @@
namespace op
{
WrapperStructHand::WrapperStructHand(const bool enable_, const Point<int>& netInputSize_, const RenderMode renderMode_,
const float alphaKeypoint_, const float alphaHeatMap_) :
WrapperStructHand::WrapperStructHand(const bool enable_, const Point<int>& netInputSize_, const DetectionMode detectionMode_,
const RenderMode renderMode_, const float alphaKeypoint_, const float alphaHeatMap_) :
enable{enable_},
netInputSize{netInputSize_},
detectionMode{detectionMode_},
renderMode{renderMode_},
alphaKeypoint{alphaKeypoint_},
alphaHeatMap{alphaHeatMap_}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册