diff --git a/CMakeLists.txt b/CMakeLists.txt index fcbde4ebf083ca08f08bed67eac5467e2d2142b2..eeae1df652f047368da11f252b13acca5302f952 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -240,7 +240,7 @@ option(BUILD_EXAMPLES "Build OpenPose examples." ON) option(BUILD_DOCS "Build OpenPose documentation." OFF) option(BUILD_PYTHON "Build OpenPose python." OFF) if (WIN32) - option(BUILD_DLL "Copy all required DLL files into the same folder." ON) + option(BUILD_BIN_FOLDER "Copy all required 3rd-party DLL files into {build_directory}/bin. Disable to save some memory." ON) endif () # Build as shared library @@ -521,15 +521,15 @@ if (WIN32) # Build DLL Must be on if Build Python is on if (BUILD_PYTHON) - if (NOT BUILD_DLL) - message(FATAL_ERROR "BUILD_DLL must be turned on to as well to build python library") + if (NOT BUILD_BIN_FOLDER) + message(FATAL_ERROR "BUILD_BIN_FOLDER must be turned on to as well to build python library") endif () endif () # Auto copy DLLs - if (BUILD_DLL) - file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/lib) - # Auto copy DLLs + if (BUILD_BIN_FOLDER) + # Locate DLLs + # Caffe DLLs if (${GPU_MODE} MATCHES "CUDA") file(GLOB CAFFE_DLL "${CMAKE_SOURCE_DIR}/3rdparty/windows/caffe/bin/*.dll") elseif (${GPU_MODE} MATCHES "OPENCL") @@ -537,12 +537,17 @@ if (WIN32) elseif (${GPU_MODE} MATCHES "CPU_ONLY") file(GLOB CAFFE_DLL "${CMAKE_SOURCE_DIR}/3rdparty/windows/caffe_cpu/bin/*.dll") endif () - file(GLOB OPENCV_DLL "${CMAKE_SOURCE_DIR}/3rdparty/windows/opencv/x64/vc14/bin/*.dll") + # Caffe 3rd-party DLLs file(GLOB OPENCV3PTY_DLL "${CMAKE_SOURCE_DIR}/3rdparty/windows/caffe3rdparty/lib/*.dll") - file(COPY ${CAFFE_DLL} DESTINATION ${CMAKE_BINARY_DIR}/lib) - file(COPY ${OPENCV_DLL} DESTINATION ${CMAKE_BINARY_DIR}/lib) - file(COPY ${OPENCV3PTY_DLL} DESTINATION ${CMAKE_BINARY_DIR}/lib) - endif () + # OpenCV DLLs + file(GLOB OPENCV_DLL "${CMAKE_SOURCE_DIR}/3rdparty/windows/opencv/x64/vc14/bin/*.dll") + # Copy DLLs into same folder + set(BIN_FOLDER ${CMAKE_BINARY_DIR}/bin) + file(MAKE_DIRECTORY ${BIN_FOLDER}) + file(COPY ${CAFFE_DLL} DESTINATION ${BIN_FOLDER}) + file(COPY ${OPENCV_DLL} DESTINATION ${BIN_FOLDER}) + file(COPY ${OPENCV3PTY_DLL} DESTINATION ${BIN_FOLDER}) + endif (BUILD_BIN_FOLDER) endif (WIN32) diff --git a/doc/installation.md b/doc/installation.md index 3909f9f6236d6da957cb9ffbc2708b95efe82206..88d1774b33b3b8e3c6e87e1a2abfae14e978db14 100644 --- a/doc/installation.md +++ b/doc/installation.md @@ -113,11 +113,9 @@ The instructions in this section describe the steps to build OpenPose using CMak - Ubuntu: Run `sudo ubuntu/install_cudnn.sh` or alternatively download and install it from their website. - Windows (and Ubuntu if manual installation): In order to manually install it, just unzip it and copy (merge) the contents on the CUDA folder, usually `/usr/local/cuda/` in Ubuntu and `C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v8.0` in Windows. 3. AMD GPU version prerequisites: - 1. [**AMD - Windows**](https://support.amd.com/en-us/download): - - Download official AMD drivers for Windows - 2. [**AMD - OpenCL**](https://rocm.github.io/ROCmInstall.html): - - Download 3rd party ROCM driver for Ubuntu - 3. AMD Drivers have not been tested on OSX. Please email us if you wish to test it. This has only been tested on Vega series cards + 1. Download official AMD drivers for Windows from [**AMD - Windows**](https://support.amd.com/en-us/download). + 2. Download 3rd party ROCM driver for Ubuntu from [**AMD - OpenCL**](https://rocm.github.io/ROCmInstall.html). + 3. AMD Drivers have not been tested on OSX. Please email us if you wish to test it. This has only been tested on Vega series cards. 4. Ubuntu - Other prerequisites: - Caffe prerequisites: By default, OpenPose uses Caffe under the hood. If you have not used Caffe previously, install its dependencies by running `sudo bash ./ubuntu/install_cmake.sh`. - OpenCV must be already installed on your machine. It can be installed with `apt-get install libopencv-dev`. You can also use your own compiled OpenCV version. @@ -184,6 +182,8 @@ make -j`nproc` #### Windows In order to build the project, open the Visual Studio solution (Windows), called `build/OpenPose.sln`. Then, set the configuration from `Debug` to `Release` and press the green triangle icon (alternatively press F5). +**VERY IMPORTANT NOTE**: In order to use OpenPose outside Visual Studio, and assuming you have not unchecked the `BUILD_BIN_FOLDER` flag in CMake, copy all DLLs from `{build_directory}/bin` into the folder where the generated `openpose.dll` and `*.exe` demos are, e.g., `{build_directory}x64/Release` for the 64-bit release version. + ### OpenPose from other Projects (Ubuntu and Mac) diff --git a/doc/modules/calibration_module.md b/doc/modules/calibration_module.md index 43a33bf8d05a9f888be223a82c7fdac52ffb8c72..062a7f484fbbefeb5b1fad6e0ad2f018aa77ff20 100644 --- a/doc/modules/calibration_module.md +++ b/doc/modules/calibration_module.md @@ -79,11 +79,12 @@ Note: In order to maximize calibration quality, **do not reuse the same video se ### Step 2 - Extrinsic Parameter Calibration -1. After intrinsics calibration, save undirtoted images for all the camera views: +1. **VERY IMPORTANT NOTE**: If you want to re-run the extrinsic parameter calibration over the same intrinsic XML files (e.g., if you move the camera location, but you know the instrinsics are the same), you must manually re-set to `1 0 0 0 0 1 0 0 0 0 1 0` the camera matrix of each XML file. +2. After intrinsics calibration, save undirtoted images for all the camera views: ```sh ./build/examples/openpose/openpose.bin --num_gpu 0 --flir_camera --write_images ~/Desktop/extrinsics ``` -2. Run the extrinsic calibration tool between each pair of close cameras. In this example: +3. Run the extrinsic calibration tool between each pair of close cameras. In this example: - We assume camera 0 to the right, 1 in the middle-right, 2 in the middle-left, and 3 in the left. - We assume camera 1 as the coordinate origin. ```sh @@ -94,7 +95,7 @@ Note: In order to maximize calibration quality, **do not reuse the same video se # Note: Wait until calibration of camera index 2 with respect to 1 is completed, as information from camera 2 XML calibration file will be used: ./build/examples/calibration/calibration.bin --mode 2 --grid_square_size_mm 127.0 --grid_number_inner_corners 9x6 --omit_distortion --calibration_image_dir ~/Desktop/extrinsics/ --cam0 2 --cam1 3 --combine_cam0_extrinsics ``` -3. Hint to verify extrinsic calibration is successful: +4. Hint to verify extrinsic calibration is successful: 1. Translation vector - Global distance: 1. Manually open each one of the generated XML files from the folder indicated by the flag `--camera_parameter_folder` (or the default one indicated by the `--help` flag if the former was not used). 2. The field `CameraMatrix` is a 3 x 4 matrix (you can see that the subfield `rows` in that file is 3 and `cols` is 4). diff --git a/doc/modules/python_module.md b/doc/modules/python_module.md index 1dc9b00dd511f9f392e753725367d58df30d979c..94eddf90aa81d62103bdc3f14cc33cc4e9a091f0 100644 --- a/doc/modules/python_module.md +++ b/doc/modules/python_module.md @@ -16,7 +16,7 @@ At present the Python API only supports body pose. Hands and Face will be added ## Installation Check [doc/installation.md#python-module](./installation.md#python-module) for installation steps. -To simply test the OpenPose API in your project without installation, ensure that the line `sys.path.append('{OpenPose_path}/python')` is set in your *.py files, where `{OpenPose_path}` points to your build folder of OpenPose. Take a look at `build/examples/tutorial_pose/1_extract_pose.py` for an example. +To simply test the OpenPose API in your project without installation, ensure that the line `sys.path.append('{OpenPose_path}/python')` is set in your `*.py` files, where `{OpenPose_path}` points to your build folder of OpenPose. Take a look at `build/examples/tutorial_pose/1_extract_pose.py` for an example. On an Ubuntu or OSX based system, you may use it globally. Running `sudo make install` will install OpenPose by default into `/usr/local/python`. You can set this into your python path and start using it at any location. @@ -47,4 +47,4 @@ python 1_extract_pose.py ## Code Sample -See `examples/tutorial_python/1_extract_pose.py`. +See [examples/tutorial_python/1_extract_pose.py](../../../master/examples/tutorial_python/1_extract_pose.py). diff --git a/doc/quick_start.md b/doc/quick_start.md index fdfb1ac5dd76ca058a67c03f218a0dd13b843b0c..6764d4fe84a1b9fb454a8e074dda65415121b8a2 100644 --- a/doc/quick_start.md +++ b/doc/quick_start.md @@ -29,7 +29,7 @@ bin\OpenPoseDemo.exe --video examples\media\video.avi bin\OpenPoseDemo.exe --video examples\media\video.avi --face --hand ``` ``` -:: Windows - Library +:: Windows - Library - Assuming you copied the DLLs following doc/installation.md#windows build\x64\Release\OpenPoseDemo.exe --video examples\media\video.avi :: With face and hands build\x64\Release\OpenPoseDemo.exe --video examples\media\video.avi --face --hand @@ -51,7 +51,7 @@ bin\OpenPoseDemo.exe bin\OpenPoseDemo.exe --face --hand ``` ``` -:: Windows - Library +:: Windows - Library - Assuming you copied the DLLs following doc/installation.md#windows build\x64\Release\OpenPoseDemo.exe :: With face and hands build\x64\Release\OpenPoseDemo.exe --face --hand @@ -73,7 +73,7 @@ bin\OpenPoseDemo.exe --image_dir examples\media\ bin\OpenPoseDemo.exe --image_dir examples\media\ --face --hand ``` ``` -:: Windows - Library +:: Windows - Library - Assuming you copied the DLLs following doc/installation.md#windows build\x64\Release\OpenPoseDemo.exe --image_dir examples\media\ :: With face and hands build\x64\Release\OpenPoseDemo.exe --image_dir examples\media\ --face --hand @@ -96,9 +96,9 @@ bin\OpenPoseDemo.exe --net_resolution "1312x736" --scale_number 4 --scale_gap 0. bin\OpenPoseDemo.exe --net_resolution "1312x736" --scale_number 4 --scale_gap 0.25 --hand --hand_scale_number 6 --hand_scale_range 0.4 --face ``` ``` -:: Windows - Library: Body +:: Windows - Library - Assuming you copied the DLLs following doc/installation.md#windows: Body build\x64\Release\OpenPoseDemo.exe --net_resolution "1312x736" --scale_number 4 --scale_gap 0.25 -:: Windows - Library: Body + Hand + Face +:: Windows - Library - Assuming you copied the DLLs following doc/installation.md#windows: Body + Hand + Face build\x64\Release\OpenPoseDemo.exe --net_resolution "1312x736" --scale_number 4 --scale_gap 0.25 --hand --hand_scale_number 6 --hand_scale_range 0.4 --face ``` @@ -119,7 +119,7 @@ bin\OpenPoseDemo.exe --flir_camera --3d --number_people_max 1 bin\OpenPoseDemo.exe --flir_camera --3d --number_people_max 1 --face --hand ``` ``` -:: Windows - Library +:: Windows - Library - Assuming you copied the DLLs following doc/installation.md#windows build\x64\Release\OpenPoseDemo.exe --flir_camera --3d --number_people_max 1 :: With face and hands build\x64\Release\OpenPoseDemo.exe --flir_camera --3d --number_people_max 1 --face --hand diff --git a/examples/tutorial_python/1_extract_pose.py b/examples/tutorial_python/1_extract_pose.py index dfc48fead660e5c04676c836b49b5499b9d69167..d4bb4a5e6d5d7d7734c7b51c6ba2006cc1d71528 100644 --- a/examples/tutorial_python/1_extract_pose.py +++ b/examples/tutorial_python/1_extract_pose.py @@ -35,7 +35,7 @@ openpose = OpenPose(params) while 1: # Read new image - img = cv2.imread("image.png") + img = cv2.imread("../../../examples/media/COCO_val2014_000000000192.jpg") # Output keypoints and the image with the human skeleton blended on it keypoints, output_image = openpose.forward(img, True) # Print the human pose keypoints, i.e., a [#people x #keypoints x 3]-dimensional numpy object with the keypoints of all the people on that image diff --git a/src/openpose/calibration/cameraParameterEstimation.cpp b/src/openpose/calibration/cameraParameterEstimation.cpp index ac8c7e959609327b5faa93eb2ceb50995a8c28f1..0f4bcfddfc429ee84cb4205fc16da2df698c8f88 100644 --- a/src/openpose/calibration/cameraParameterEstimation.cpp +++ b/src/openpose/calibration/cameraParameterEstimation.cpp @@ -870,13 +870,16 @@ namespace op try { // Point --> cv::Size + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); const cv::Size gridInnerCornersCvSize{gridInnerCorners.x, gridInnerCorners.y}; // Read images in folder + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); std::vector> points2DVectors; const auto imageAndPaths = getImageAndPaths(imagesFolder); // Get 2D grid corners of each image + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); std::vector imagesWithCorners; const auto imageSize = imageAndPaths.at(0).first.size(); for (auto i = 0u ; i < imageAndPaths.size() ; i++) @@ -917,6 +920,7 @@ namespace op } // Run calibration + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); // objects3DVector is the same one for each image const std::vector> objects3DVectors(points2DVectors.size(), getObjects3DVector(gridInnerCornersCvSize, @@ -924,11 +928,13 @@ namespace op const auto intrinsics = calcIntrinsicParameters(imageSize, points2DVectors, objects3DVectors, flags); // Save intrinsics/results + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); CameraParameterReader cameraParameterReader{serialNumber, intrinsics.cameraMatrix, intrinsics.distortionCoefficients}; cameraParameterReader.writeParameters(outputParameterFolder); // Save images with corners + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); if (saveImagesWithCorners) { const auto folderWhereSavingImages = imagesFolder + "images_with_corners/";