From 7b960082e25872a2cfabc2988c29ea42714c59f4 Mon Sep 17 00:00:00 2001 From: Frankie Robertson Date: Thu, 29 Oct 2020 16:48:49 +0200 Subject: [PATCH] Improve Python API including enabled AsynchronousOut mode (#1593) * Move producer configuration into configure(...) method of Python API * Enable output from OpenPose in Python API by making VectorDatum an opaque STL type * Return empty arrays as None in Python API * Add type_caster for Array for datum.poseIds in Python API * Add ThreadManagerMode enum to Python interface * Update Python tutorial in line with updated interface and add demo of AsynchronousOut mode * Remove static_cast of ThreadManagerMode which is no longer needed * Only set up producer in Python API when SynchronousIn is set * Fix up memory handling in emplaceAndPop * Pass AsynchronousOut in async out example --- .../tutorial_api_python/01_body_from_image.py | 2 +- .../02_whole_body_from_image.py | 2 +- .../04_keypoints_from_images.py | 2 +- .../05_keypoints_from_images_multi_gpu.py | 9 +- .../tutorial_api_python/06_face_from_image.py | 2 +- .../tutorial_api_python/07_hand_from_image.py | 2 +- .../08_heatmaps_from_image.py | 2 +- .../09_keypoints_from_heatmaps.py | 2 +- .../12_asynchronous_custom_output.py | 89 +++++++++++++ .../tutorial_api_python/openpose_python.py | 2 +- python/openpose/openpose_python.cpp | 122 ++++++++++++++---- 11 files changed, 200 insertions(+), 36 deletions(-) create mode 100644 examples/tutorial_api_python/12_asynchronous_custom_output.py diff --git a/examples/tutorial_api_python/01_body_from_image.py b/examples/tutorial_api_python/01_body_from_image.py index b7a32067..a6d65699 100644 --- a/examples/tutorial_api_python/01_body_from_image.py +++ b/examples/tutorial_api_python/01_body_from_image.py @@ -60,7 +60,7 @@ try: datum = op.Datum() imageToProcess = cv2.imread(args[0].image_path) datum.cvInputData = imageToProcess - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) # Display Image print("Body keypoints: \n" + str(datum.poseKeypoints)) diff --git a/examples/tutorial_api_python/02_whole_body_from_image.py b/examples/tutorial_api_python/02_whole_body_from_image.py index 77766713..a09e654f 100644 --- a/examples/tutorial_api_python/02_whole_body_from_image.py +++ b/examples/tutorial_api_python/02_whole_body_from_image.py @@ -62,7 +62,7 @@ try: datum = op.Datum() imageToProcess = cv2.imread(args[0].image_path) datum.cvInputData = imageToProcess - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) # Display Image print("Body keypoints: \n" + str(datum.poseKeypoints)) diff --git a/examples/tutorial_api_python/04_keypoints_from_images.py b/examples/tutorial_api_python/04_keypoints_from_images.py index f9f9dfc5..3a7e6cf5 100644 --- a/examples/tutorial_api_python/04_keypoints_from_images.py +++ b/examples/tutorial_api_python/04_keypoints_from_images.py @@ -67,7 +67,7 @@ try: datum = op.Datum() imageToProcess = cv2.imread(imagePath) datum.cvInputData = imageToProcess - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) print("Body keypoints: \n" + str(datum.poseKeypoints)) diff --git a/examples/tutorial_api_python/05_keypoints_from_images_multi_gpu.py b/examples/tutorial_api_python/05_keypoints_from_images_multi_gpu.py index a99995eb..6a2ffb25 100644 --- a/examples/tutorial_api_python/05_keypoints_from_images_multi_gpu.py +++ b/examples/tutorial_api_python/05_keypoints_from_images_multi_gpu.py @@ -71,7 +71,6 @@ try: for imageBaseId in range(0, len(imagePaths), numberGPUs): # Create datums - datums = [] images = [] # Read and push images into OpenPose wrapper @@ -84,8 +83,7 @@ try: datum = op.Datum() images.append(cv2.imread(imagePath)) datum.cvInputData = images[-1] - datums.append(datum) - opWrapper.waitAndEmplace([datums[-1]]) + opWrapper.waitAndEmplace(op.VectorDatum([datum])) # Retrieve processed results from OpenPose wrapper for gpuId in range(0, numberGPUs): @@ -93,8 +91,9 @@ try: imageId = imageBaseId+gpuId if imageId < len(imagePaths): - datum = datums[gpuId] - opWrapper.waitAndPop([datum]) + datums = op.VectorDatum() + opWrapper.waitAndPop(datums) + datum = datums[0] print("Body keypoints: \n" + str(datum.poseKeypoints)) diff --git a/examples/tutorial_api_python/06_face_from_image.py b/examples/tutorial_api_python/06_face_from_image.py index 40387e49..e96654a3 100644 --- a/examples/tutorial_api_python/06_face_from_image.py +++ b/examples/tutorial_api_python/06_face_from_image.py @@ -74,7 +74,7 @@ try: datum.faceRectangles = faceRectangles # Process and display image - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) print("Face keypoints: \n" + str(datum.faceKeypoints)) cv2.imshow("OpenPose 1.6.0 - Tutorial Python API", datum.cvOutputData) cv2.waitKey(0) diff --git a/examples/tutorial_api_python/07_hand_from_image.py b/examples/tutorial_api_python/07_hand_from_image.py index 5227bf5c..0ecfa06b 100644 --- a/examples/tutorial_api_python/07_hand_from_image.py +++ b/examples/tutorial_api_python/07_hand_from_image.py @@ -86,7 +86,7 @@ try: datum.handRectangles = handRectangles # Process and display image - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) print("Left hand keypoints: \n" + str(datum.handKeypoints[0])) print("Right hand keypoints: \n" + str(datum.handKeypoints[1])) cv2.imshow("OpenPose 1.6.0 - Tutorial Python API", datum.cvOutputData) diff --git a/examples/tutorial_api_python/08_heatmaps_from_image.py b/examples/tutorial_api_python/08_heatmaps_from_image.py index 9f0ced11..6f79be99 100644 --- a/examples/tutorial_api_python/08_heatmaps_from_image.py +++ b/examples/tutorial_api_python/08_heatmaps_from_image.py @@ -64,7 +64,7 @@ try: datum = op.Datum() imageToProcess = cv2.imread(args[0].image_path) datum.cvInputData = imageToProcess - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) # Process outputs outputImageF = (datum.inputNetData[0].copy())[0,:,:,:] + 0.5 diff --git a/examples/tutorial_api_python/09_keypoints_from_heatmaps.py b/examples/tutorial_api_python/09_keypoints_from_heatmaps.py index 35fdbd54..4cefc0e3 100644 --- a/examples/tutorial_api_python/09_keypoints_from_heatmaps.py +++ b/examples/tutorial_api_python/09_keypoints_from_heatmaps.py @@ -77,7 +77,7 @@ try: datum = op.Datum() datum.cvInputData = imageToProcess datum.poseNetOutput = poseHeatMaps - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) # Display Image print("Body keypoints: \n" + str(datum.poseKeypoints)) diff --git a/examples/tutorial_api_python/12_asynchronous_custom_output.py b/examples/tutorial_api_python/12_asynchronous_custom_output.py new file mode 100644 index 00000000..c54e52eb --- /dev/null +++ b/examples/tutorial_api_python/12_asynchronous_custom_output.py @@ -0,0 +1,89 @@ +# From Python +# It requires OpenCV installed for Python +import sys +import cv2 +import os +from sys import platform +import argparse + + +def display(datums): + datum = datums[0] + cv2.imshow("OpenPose 1.6.0 - Tutorial Python API", datum.cvOutputData) + key = cv2.waitKey(1) + return (key == 27) + + +def printKeypoints(datums): + datum = datums[0] + print("Body keypoints: \n" + str(datum.poseKeypoints)) + print("Face keypoints: \n" + str(datum.faceKeypoints)) + print("Left hand keypoints: \n" + str(datum.handKeypoints[0])) + print("Right hand keypoints: \n" + str(datum.handKeypoints[1])) + + +try: + # Import Openpose (Windows/Ubuntu/OSX) + dir_path = os.path.dirname(os.path.realpath(__file__)) + try: + # Windows Import + if platform == "win32": + # Change these variables to point to the correct folder (Release/x64 etc.) + sys.path.append(dir_path + '/../../python/openpose/Release'); + os.environ['PATH'] = os.environ['PATH'] + ';' + dir_path + '/../../x64/Release;' + dir_path + '/../../bin;' + import pyopenpose as op + else: + # Change these variables to point to the correct folder (Release/x64 etc.) + sys.path.append('../../python'); + # If you run `make install` (default path is `/usr/local/python` for Ubuntu), you can also access the OpenPose/python module from there. This will install OpenPose and the python library at your desired installation path. Ensure that this is in your python path in order to use it. + # sys.path.append('/usr/local/python') + from openpose import pyopenpose as op + except ImportError as e: + print('Error: OpenPose library could not be found. Did you enable `BUILD_PYTHON` in CMake and have this Python script in the right folder?') + raise e + + # Flags + parser = argparse.ArgumentParser() + parser.add_argument("--no-display", action="store_true", help="Disable display.") + args = parser.parse_known_args() + + # Custom Params (refer to include/openpose/flags.hpp for more parameters) + params = dict() + params["model_folder"] = "../../../models/" + + # Add others in path? + for i in range(0, len(args[1])): + curr_item = args[1][i] + if i != len(args[1])-1: next_item = args[1][i+1] + else: next_item = "1" + if "--" in curr_item and "--" in next_item: + key = curr_item.replace('-','') + if key not in params: params[key] = "1" + elif "--" in curr_item and "--" not in next_item: + key = curr_item.replace('-','') + if key not in params: params[key] = next_item + + # Construct it from system arguments + # op.init_argv(args[1]) + # oppython = op.OpenposePython() + + # Starting OpenPose + opWrapper = op.WrapperPython(op.ThreadManagerMode.AsynchronousOut) + opWrapper.configure(params) + opWrapper.start() + + # Main loop + userWantsToExit = False + while not userWantsToExit: + # Pop frame + datumProcessed = op.VectorDatum() + if opWrapper.waitAndPop(datumProcessed): + if not args[0].no_display: + # Display image + userWantsToExit = display(datumProcessed) + printKeypoints(datumProcessed) + else: + break +except Exception as e: + print(e) + sys.exit(-1) diff --git a/examples/tutorial_api_python/openpose_python.py b/examples/tutorial_api_python/openpose_python.py index 8c6630c2..6303ef50 100644 --- a/examples/tutorial_api_python/openpose_python.py +++ b/examples/tutorial_api_python/openpose_python.py @@ -52,7 +52,7 @@ try: # oppython = op.OpenposePython() # Starting OpenPose - opWrapper = op.WrapperPython(3) + opWrapper = op.WrapperPython(op.ThreadManagerMode.Synchronous) opWrapper.configure(params) opWrapper.execute() except Exception as e: diff --git a/python/openpose/openpose_python.cpp b/python/openpose/openpose_python.cpp index 0da4cf9b..ddb03a10 100644 --- a/python/openpose/openpose_python.cpp +++ b/python/openpose/openpose_python.cpp @@ -7,10 +7,13 @@ #include #include +#include #include #include #include +PYBIND11_MAKE_OPAQUE(std::vector>); + #ifdef _WIN32 #define OP_EXPORT __declspec(dllexport) #else @@ -78,13 +81,20 @@ namespace op class WrapperPython{ public: std::unique_ptr opWrapper; + bool synchronousIn; - WrapperPython(int mode = 0) + WrapperPython(ThreadManagerMode mode = ThreadManagerMode::Asynchronous) { opLog("Starting OpenPose Python Wrapper...", Priority::High); // Construct opWrapper - opWrapper = std::unique_ptr(new Wrapper(static_cast(mode))); + opWrapper = std::unique_ptr(new Wrapper(mode)); + + // Synchronous in + synchronousIn = ( + mode == ThreadManagerMode::AsynchronousOut || + mode == ThreadManagerMode::Synchronous + ); } void configure(py::dict params = py::dict()) @@ -168,6 +178,22 @@ namespace op op::String(FLAGS_write_video_adam), op::String(FLAGS_write_bvh), op::String(FLAGS_udp_host), op::String(FLAGS_udp_port)}; opWrapper->configure(wrapperStructOutput); + if (synchronousIn) { + // SynchronousIn => We need a producer + + // Producer (use default to disable any input) + const auto cameraSize = flagsToPoint(op::String(FLAGS_camera_resolution), "-1x-1"); + ProducerType producerType; + op::String producerString; + std::tie(producerType, producerString) = flagsToProducer( + op::String(FLAGS_image_dir), op::String(FLAGS_video), op::String(FLAGS_ip_camera), FLAGS_camera, + FLAGS_flir_camera, FLAGS_flir_camera_index); + const WrapperStructInput wrapperStructInput{ + producerType, producerString, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last, + FLAGS_process_real_time, FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat, + cameraSize, op::String(FLAGS_camera_parameter_path), FLAGS_frame_undistort, FLAGS_3d_views}; + opWrapper->configure(wrapperStructInput); + } // No GUI. Equivalent to: opWrapper.configure(WrapperStructGui{}); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (FLAGS_disable_multi_thread) @@ -207,18 +233,6 @@ namespace op { try { - const auto cameraSize = flagsToPoint(op::String(FLAGS_camera_resolution), "-1x-1"); - ProducerType producerType; - op::String producerString; - std::tie(producerType, producerString) = flagsToProducer( - op::String(FLAGS_image_dir), op::String(FLAGS_video), op::String(FLAGS_ip_camera), FLAGS_camera, - FLAGS_flir_camera, FLAGS_flir_camera_index); - // Producer (use default to disable any input) - const WrapperStructInput wrapperStructInput{ - producerType, producerString, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last, - FLAGS_process_real_time, FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat, - cameraSize, op::String(FLAGS_camera_parameter_path), FLAGS_frame_undistort, FLAGS_3d_views}; - opWrapper->configure(wrapperStructInput); // GUI (comment or use default argument to disable any visual output) const WrapperStructGui wrapperStructGui{ flagsToDisplayMode(FLAGS_display, FLAGS_3d), !FLAGS_no_gui_verbose, FLAGS_fullscreen}; @@ -231,29 +245,38 @@ namespace op } } - void emplaceAndPop(std::vector>& l) + bool emplaceAndPop(std::vector>& l) { try { - auto datumsPtr = std::make_shared>>(l); - opWrapper->emplaceAndPop(datumsPtr); + std::shared_ptr>> datumsPtr( + &l, + [](std::vector>*){} + ); + auto got = opWrapper->emplaceAndPop(datumsPtr); + if (got && datumsPtr.get() != &l) { + l.swap(*datumsPtr); + } + return got; } catch (const std::exception& e) { error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return false; } } - void waitAndEmplace(std::vector>& l) + bool waitAndEmplace(std::vector>& l) { try { - auto datumsPtr = std::make_shared>>(l); - opWrapper->waitAndEmplace(datumsPtr); + std::shared_ptr>> datumsPtr(&l); + return opWrapper->waitAndEmplace(datumsPtr); } catch (const std::exception& e) { error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return false; } } @@ -261,8 +284,12 @@ namespace op { try { - auto datumsPtr = std::make_shared>>(l); - return opWrapper->waitAndPop(datumsPtr); + std::shared_ptr>> datumsPtr; + auto got = opWrapper->waitAndPop(datumsPtr); + if (got) { + l.swap(*datumsPtr); + } + return got; } catch (const std::exception& e) { @@ -311,7 +338,7 @@ namespace op // OpenposePython py::class_(m, "WrapperPython") .def(py::init<>()) - .def(py::init()) + .def(py::init()) .def("configure", &WrapperPython::configure) .def("start", &WrapperPython::start) .def("stop", &WrapperPython::stop) @@ -321,6 +348,14 @@ namespace op .def("waitAndPop", &WrapperPython::waitAndPop) ; + // ThreadManagerMode + py::enum_(m, "ThreadManagerMode") + .value("Asynchronous", ThreadManagerMode::Asynchronous) + .value("AsynchronousIn", ThreadManagerMode::AsynchronousIn) + .value("AsynchronousOut", ThreadManagerMode::AsynchronousOut) + .value("Synchronous", ThreadManagerMode::Synchronous) + ; + // Datum Object py::class_>(m, "Datum") .def(py::init<>()) @@ -360,6 +395,8 @@ namespace op .def_readwrite("elementRendered", &Datum::elementRendered) ; + py::bind_vector>>(m, "VectorDatum"); + // Rectangle py::class_>(m, "Rectangle") .def("__repr__", [](Rectangle &a) { return a.toString(); }) @@ -431,6 +468,9 @@ template <> struct type_caster> { static handle cast(const op::Array &m, return_value_policy, handle defval) { UNUSED(defval); + if (m.getSize().size() == 0) { + return none(); + } std::string format = format_descriptor::format(); return array(buffer_info( m.getPseudoConstPtr(),/* Pointer to buffer */ @@ -445,6 +485,42 @@ template <> struct type_caster> { }; }} // namespace pybind11::detail +// Numpy - op::Array interop +namespace pybind11 { namespace detail { + +template <> struct type_caster> { + public: + + PYBIND11_TYPE_CASTER(op::Array, _("numpy.ndarray")); + + // Cast numpy to op::Array + bool load(handle src, bool imp) + { + op::error("op::Array is read only now", __LINE__, __FUNCTION__, __FILE__); + return false; + } + + // Cast op::Array to numpy + static handle cast(const op::Array &m, return_value_policy, handle defval) + { + UNUSED(defval); + if (m.getSize().size() == 0) { + return none(); + } + std::string format = format_descriptor::format(); + return array(buffer_info( + m.getPseudoConstPtr(),/* Pointer to buffer */ + sizeof(long long), /* Size of one scalar */ + format, /* Python struct-style format descriptor */ + m.getSize().size(), /* Number of dimensions */ + m.getSize(), /* Buffer dimensions */ + m.getStride() /* Strides (in bytes) for each index */ + )).release(); + } + + }; +}} // namespace pybind11::detail + // Numpy - op::Matrix interop namespace pybind11 { namespace detail { -- GitLab