diff --git a/examples/tutorial_api_python/01_body_from_image.py b/examples/tutorial_api_python/01_body_from_image.py index b7a320672ea469cce0486ff6e7bbc79256a35434..a6d65699b6e779bd57bb770ed0cf44e979cebc19 100644 --- a/examples/tutorial_api_python/01_body_from_image.py +++ b/examples/tutorial_api_python/01_body_from_image.py @@ -60,7 +60,7 @@ try: datum = op.Datum() imageToProcess = cv2.imread(args[0].image_path) datum.cvInputData = imageToProcess - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) # Display Image print("Body keypoints: \n" + str(datum.poseKeypoints)) diff --git a/examples/tutorial_api_python/02_whole_body_from_image.py b/examples/tutorial_api_python/02_whole_body_from_image.py index 777667136d83870d5583424e75f5470617e78576..a09e654f1121d158f0a7216ff30a5a74a1579142 100644 --- a/examples/tutorial_api_python/02_whole_body_from_image.py +++ b/examples/tutorial_api_python/02_whole_body_from_image.py @@ -62,7 +62,7 @@ try: datum = op.Datum() imageToProcess = cv2.imread(args[0].image_path) datum.cvInputData = imageToProcess - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) # Display Image print("Body keypoints: \n" + str(datum.poseKeypoints)) diff --git a/examples/tutorial_api_python/04_keypoints_from_images.py b/examples/tutorial_api_python/04_keypoints_from_images.py index f9f9dfc591fea1ee9d3ecce15601299446c9374e..3a7e6cf564762a5dfcc7ffe39eec26a76a77e2f1 100644 --- a/examples/tutorial_api_python/04_keypoints_from_images.py +++ b/examples/tutorial_api_python/04_keypoints_from_images.py @@ -67,7 +67,7 @@ try: datum = op.Datum() imageToProcess = cv2.imread(imagePath) datum.cvInputData = imageToProcess - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) print("Body keypoints: \n" + str(datum.poseKeypoints)) diff --git a/examples/tutorial_api_python/05_keypoints_from_images_multi_gpu.py b/examples/tutorial_api_python/05_keypoints_from_images_multi_gpu.py index a99995eb326fbf7b9e2e3f189648a3e2cdc864f8..6a2ffb2555a5ce19a1e211a61de19bb8169a3aa9 100644 --- a/examples/tutorial_api_python/05_keypoints_from_images_multi_gpu.py +++ b/examples/tutorial_api_python/05_keypoints_from_images_multi_gpu.py @@ -71,7 +71,6 @@ try: for imageBaseId in range(0, len(imagePaths), numberGPUs): # Create datums - datums = [] images = [] # Read and push images into OpenPose wrapper @@ -84,8 +83,7 @@ try: datum = op.Datum() images.append(cv2.imread(imagePath)) datum.cvInputData = images[-1] - datums.append(datum) - opWrapper.waitAndEmplace([datums[-1]]) + opWrapper.waitAndEmplace(op.VectorDatum([datum])) # Retrieve processed results from OpenPose wrapper for gpuId in range(0, numberGPUs): @@ -93,8 +91,9 @@ try: imageId = imageBaseId+gpuId if imageId < len(imagePaths): - datum = datums[gpuId] - opWrapper.waitAndPop([datum]) + datums = op.VectorDatum() + opWrapper.waitAndPop(datums) + datum = datums[0] print("Body keypoints: \n" + str(datum.poseKeypoints)) diff --git a/examples/tutorial_api_python/06_face_from_image.py b/examples/tutorial_api_python/06_face_from_image.py index 40387e49473bb7634dadd095f4aa1379fda270a8..e96654a3d0c8f833925edf6f3b97401d09854619 100644 --- a/examples/tutorial_api_python/06_face_from_image.py +++ b/examples/tutorial_api_python/06_face_from_image.py @@ -74,7 +74,7 @@ try: datum.faceRectangles = faceRectangles # Process and display image - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) print("Face keypoints: \n" + str(datum.faceKeypoints)) cv2.imshow("OpenPose 1.6.0 - Tutorial Python API", datum.cvOutputData) cv2.waitKey(0) diff --git a/examples/tutorial_api_python/07_hand_from_image.py b/examples/tutorial_api_python/07_hand_from_image.py index 5227bf5c3025df4bedda4ce1f6b1a4e2c419ceb9..0ecfa06b164ecdf07f90519407478039ffd8cdd2 100644 --- a/examples/tutorial_api_python/07_hand_from_image.py +++ b/examples/tutorial_api_python/07_hand_from_image.py @@ -86,7 +86,7 @@ try: datum.handRectangles = handRectangles # Process and display image - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) print("Left hand keypoints: \n" + str(datum.handKeypoints[0])) print("Right hand keypoints: \n" + str(datum.handKeypoints[1])) cv2.imshow("OpenPose 1.6.0 - Tutorial Python API", datum.cvOutputData) diff --git a/examples/tutorial_api_python/08_heatmaps_from_image.py b/examples/tutorial_api_python/08_heatmaps_from_image.py index 9f0ced118686a1cea94b24323d591c713cf19541..6f79be994ea2385a94083d490e848bcd4d150dbb 100644 --- a/examples/tutorial_api_python/08_heatmaps_from_image.py +++ b/examples/tutorial_api_python/08_heatmaps_from_image.py @@ -64,7 +64,7 @@ try: datum = op.Datum() imageToProcess = cv2.imread(args[0].image_path) datum.cvInputData = imageToProcess - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) # Process outputs outputImageF = (datum.inputNetData[0].copy())[0,:,:,:] + 0.5 diff --git a/examples/tutorial_api_python/09_keypoints_from_heatmaps.py b/examples/tutorial_api_python/09_keypoints_from_heatmaps.py index 35fdbd54ba2b51c77c6b8c81f6b3555c55ee8885..4cefc0e3fb110c00df60685b714663bbf2ad831f 100644 --- a/examples/tutorial_api_python/09_keypoints_from_heatmaps.py +++ b/examples/tutorial_api_python/09_keypoints_from_heatmaps.py @@ -77,7 +77,7 @@ try: datum = op.Datum() datum.cvInputData = imageToProcess datum.poseNetOutput = poseHeatMaps - opWrapper.emplaceAndPop([datum]) + opWrapper.emplaceAndPop(op.VectorDatum([datum])) # Display Image print("Body keypoints: \n" + str(datum.poseKeypoints)) diff --git a/examples/tutorial_api_python/12_asynchronous_custom_output.py b/examples/tutorial_api_python/12_asynchronous_custom_output.py new file mode 100644 index 0000000000000000000000000000000000000000..c54e52ebc3f0c06096da01aa47a05a7ce60e561a --- /dev/null +++ b/examples/tutorial_api_python/12_asynchronous_custom_output.py @@ -0,0 +1,89 @@ +# From Python +# It requires OpenCV installed for Python +import sys +import cv2 +import os +from sys import platform +import argparse + + +def display(datums): + datum = datums[0] + cv2.imshow("OpenPose 1.6.0 - Tutorial Python API", datum.cvOutputData) + key = cv2.waitKey(1) + return (key == 27) + + +def printKeypoints(datums): + datum = datums[0] + print("Body keypoints: \n" + str(datum.poseKeypoints)) + print("Face keypoints: \n" + str(datum.faceKeypoints)) + print("Left hand keypoints: \n" + str(datum.handKeypoints[0])) + print("Right hand keypoints: \n" + str(datum.handKeypoints[1])) + + +try: + # Import Openpose (Windows/Ubuntu/OSX) + dir_path = os.path.dirname(os.path.realpath(__file__)) + try: + # Windows Import + if platform == "win32": + # Change these variables to point to the correct folder (Release/x64 etc.) + sys.path.append(dir_path + '/../../python/openpose/Release'); + os.environ['PATH'] = os.environ['PATH'] + ';' + dir_path + '/../../x64/Release;' + dir_path + '/../../bin;' + import pyopenpose as op + else: + # Change these variables to point to the correct folder (Release/x64 etc.) + sys.path.append('../../python'); + # If you run `make install` (default path is `/usr/local/python` for Ubuntu), you can also access the OpenPose/python module from there. This will install OpenPose and the python library at your desired installation path. Ensure that this is in your python path in order to use it. + # sys.path.append('/usr/local/python') + from openpose import pyopenpose as op + except ImportError as e: + print('Error: OpenPose library could not be found. Did you enable `BUILD_PYTHON` in CMake and have this Python script in the right folder?') + raise e + + # Flags + parser = argparse.ArgumentParser() + parser.add_argument("--no-display", action="store_true", help="Disable display.") + args = parser.parse_known_args() + + # Custom Params (refer to include/openpose/flags.hpp for more parameters) + params = dict() + params["model_folder"] = "../../../models/" + + # Add others in path? + for i in range(0, len(args[1])): + curr_item = args[1][i] + if i != len(args[1])-1: next_item = args[1][i+1] + else: next_item = "1" + if "--" in curr_item and "--" in next_item: + key = curr_item.replace('-','') + if key not in params: params[key] = "1" + elif "--" in curr_item and "--" not in next_item: + key = curr_item.replace('-','') + if key not in params: params[key] = next_item + + # Construct it from system arguments + # op.init_argv(args[1]) + # oppython = op.OpenposePython() + + # Starting OpenPose + opWrapper = op.WrapperPython(op.ThreadManagerMode.AsynchronousOut) + opWrapper.configure(params) + opWrapper.start() + + # Main loop + userWantsToExit = False + while not userWantsToExit: + # Pop frame + datumProcessed = op.VectorDatum() + if opWrapper.waitAndPop(datumProcessed): + if not args[0].no_display: + # Display image + userWantsToExit = display(datumProcessed) + printKeypoints(datumProcessed) + else: + break +except Exception as e: + print(e) + sys.exit(-1) diff --git a/examples/tutorial_api_python/openpose_python.py b/examples/tutorial_api_python/openpose_python.py index 8c6630c28383f90753d4002b8374bee14e75a9d9..6303ef5032e4293054dfd264958a0f5a35b07a83 100644 --- a/examples/tutorial_api_python/openpose_python.py +++ b/examples/tutorial_api_python/openpose_python.py @@ -52,7 +52,7 @@ try: # oppython = op.OpenposePython() # Starting OpenPose - opWrapper = op.WrapperPython(3) + opWrapper = op.WrapperPython(op.ThreadManagerMode.Synchronous) opWrapper.configure(params) opWrapper.execute() except Exception as e: diff --git a/python/openpose/openpose_python.cpp b/python/openpose/openpose_python.cpp index 0da4cf9b9d4fb06e1d2ba52fcb1b45104d18d6eb..ddb03a10856236c1fd0484fa563ed9044d899dd0 100644 --- a/python/openpose/openpose_python.cpp +++ b/python/openpose/openpose_python.cpp @@ -7,10 +7,13 @@ #include #include +#include #include #include #include +PYBIND11_MAKE_OPAQUE(std::vector>); + #ifdef _WIN32 #define OP_EXPORT __declspec(dllexport) #else @@ -78,13 +81,20 @@ namespace op class WrapperPython{ public: std::unique_ptr opWrapper; + bool synchronousIn; - WrapperPython(int mode = 0) + WrapperPython(ThreadManagerMode mode = ThreadManagerMode::Asynchronous) { opLog("Starting OpenPose Python Wrapper...", Priority::High); // Construct opWrapper - opWrapper = std::unique_ptr(new Wrapper(static_cast(mode))); + opWrapper = std::unique_ptr(new Wrapper(mode)); + + // Synchronous in + synchronousIn = ( + mode == ThreadManagerMode::AsynchronousOut || + mode == ThreadManagerMode::Synchronous + ); } void configure(py::dict params = py::dict()) @@ -168,6 +178,22 @@ namespace op op::String(FLAGS_write_video_adam), op::String(FLAGS_write_bvh), op::String(FLAGS_udp_host), op::String(FLAGS_udp_port)}; opWrapper->configure(wrapperStructOutput); + if (synchronousIn) { + // SynchronousIn => We need a producer + + // Producer (use default to disable any input) + const auto cameraSize = flagsToPoint(op::String(FLAGS_camera_resolution), "-1x-1"); + ProducerType producerType; + op::String producerString; + std::tie(producerType, producerString) = flagsToProducer( + op::String(FLAGS_image_dir), op::String(FLAGS_video), op::String(FLAGS_ip_camera), FLAGS_camera, + FLAGS_flir_camera, FLAGS_flir_camera_index); + const WrapperStructInput wrapperStructInput{ + producerType, producerString, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last, + FLAGS_process_real_time, FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat, + cameraSize, op::String(FLAGS_camera_parameter_path), FLAGS_frame_undistort, FLAGS_3d_views}; + opWrapper->configure(wrapperStructInput); + } // No GUI. Equivalent to: opWrapper.configure(WrapperStructGui{}); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (FLAGS_disable_multi_thread) @@ -207,18 +233,6 @@ namespace op { try { - const auto cameraSize = flagsToPoint(op::String(FLAGS_camera_resolution), "-1x-1"); - ProducerType producerType; - op::String producerString; - std::tie(producerType, producerString) = flagsToProducer( - op::String(FLAGS_image_dir), op::String(FLAGS_video), op::String(FLAGS_ip_camera), FLAGS_camera, - FLAGS_flir_camera, FLAGS_flir_camera_index); - // Producer (use default to disable any input) - const WrapperStructInput wrapperStructInput{ - producerType, producerString, FLAGS_frame_first, FLAGS_frame_step, FLAGS_frame_last, - FLAGS_process_real_time, FLAGS_frame_flip, FLAGS_frame_rotate, FLAGS_frames_repeat, - cameraSize, op::String(FLAGS_camera_parameter_path), FLAGS_frame_undistort, FLAGS_3d_views}; - opWrapper->configure(wrapperStructInput); // GUI (comment or use default argument to disable any visual output) const WrapperStructGui wrapperStructGui{ flagsToDisplayMode(FLAGS_display, FLAGS_3d), !FLAGS_no_gui_verbose, FLAGS_fullscreen}; @@ -231,29 +245,38 @@ namespace op } } - void emplaceAndPop(std::vector>& l) + bool emplaceAndPop(std::vector>& l) { try { - auto datumsPtr = std::make_shared>>(l); - opWrapper->emplaceAndPop(datumsPtr); + std::shared_ptr>> datumsPtr( + &l, + [](std::vector>*){} + ); + auto got = opWrapper->emplaceAndPop(datumsPtr); + if (got && datumsPtr.get() != &l) { + l.swap(*datumsPtr); + } + return got; } catch (const std::exception& e) { error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return false; } } - void waitAndEmplace(std::vector>& l) + bool waitAndEmplace(std::vector>& l) { try { - auto datumsPtr = std::make_shared>>(l); - opWrapper->waitAndEmplace(datumsPtr); + std::shared_ptr>> datumsPtr(&l); + return opWrapper->waitAndEmplace(datumsPtr); } catch (const std::exception& e) { error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return false; } } @@ -261,8 +284,12 @@ namespace op { try { - auto datumsPtr = std::make_shared>>(l); - return opWrapper->waitAndPop(datumsPtr); + std::shared_ptr>> datumsPtr; + auto got = opWrapper->waitAndPop(datumsPtr); + if (got) { + l.swap(*datumsPtr); + } + return got; } catch (const std::exception& e) { @@ -311,7 +338,7 @@ namespace op // OpenposePython py::class_(m, "WrapperPython") .def(py::init<>()) - .def(py::init()) + .def(py::init()) .def("configure", &WrapperPython::configure) .def("start", &WrapperPython::start) .def("stop", &WrapperPython::stop) @@ -321,6 +348,14 @@ namespace op .def("waitAndPop", &WrapperPython::waitAndPop) ; + // ThreadManagerMode + py::enum_(m, "ThreadManagerMode") + .value("Asynchronous", ThreadManagerMode::Asynchronous) + .value("AsynchronousIn", ThreadManagerMode::AsynchronousIn) + .value("AsynchronousOut", ThreadManagerMode::AsynchronousOut) + .value("Synchronous", ThreadManagerMode::Synchronous) + ; + // Datum Object py::class_>(m, "Datum") .def(py::init<>()) @@ -360,6 +395,8 @@ namespace op .def_readwrite("elementRendered", &Datum::elementRendered) ; + py::bind_vector>>(m, "VectorDatum"); + // Rectangle py::class_>(m, "Rectangle") .def("__repr__", [](Rectangle &a) { return a.toString(); }) @@ -431,6 +468,9 @@ template <> struct type_caster> { static handle cast(const op::Array &m, return_value_policy, handle defval) { UNUSED(defval); + if (m.getSize().size() == 0) { + return none(); + } std::string format = format_descriptor::format(); return array(buffer_info( m.getPseudoConstPtr(),/* Pointer to buffer */ @@ -445,6 +485,42 @@ template <> struct type_caster> { }; }} // namespace pybind11::detail +// Numpy - op::Array interop +namespace pybind11 { namespace detail { + +template <> struct type_caster> { + public: + + PYBIND11_TYPE_CASTER(op::Array, _("numpy.ndarray")); + + // Cast numpy to op::Array + bool load(handle src, bool imp) + { + op::error("op::Array is read only now", __LINE__, __FUNCTION__, __FILE__); + return false; + } + + // Cast op::Array to numpy + static handle cast(const op::Array &m, return_value_policy, handle defval) + { + UNUSED(defval); + if (m.getSize().size() == 0) { + return none(); + } + std::string format = format_descriptor::format(); + return array(buffer_info( + m.getPseudoConstPtr(),/* Pointer to buffer */ + sizeof(long long), /* Size of one scalar */ + format, /* Python struct-style format descriptor */ + m.getSize().size(), /* Number of dimensions */ + m.getSize(), /* Buffer dimensions */ + m.getStride() /* Strides (in bytes) for each index */ + )).release(); + } + + }; +}} // namespace pybind11::detail + // Numpy - op::Matrix interop namespace pybind11 { namespace detail {