From fd566ce91857f71389ff88a56c656ba8766369cf Mon Sep 17 00:00:00 2001 From: gineshidalgo99 Date: Wed, 26 Sep 2018 18:56:19 -0400 Subject: [PATCH] C++ API & examples improved --- CMakeLists.txt | 24 +- README.md | 6 +- doc/installation.md | 16 +- doc/library_add_new_module.md | 4 +- doc/library_extend_functionality.md | 4 +- doc/modules/3d_reconstruction_module.md | 2 +- doc/modules/python_module.md | 10 +- doc/release_notes.md | 14 +- examples/CMakeLists.txt | 7 +- examples/calibration/CMakeLists.txt | 2 +- examples/calibration/calibration.cpp | 20 +- examples/openpose/CMakeLists.txt | 2 +- examples/openpose/openpose.cpp | 231 +-- examples/tests/README.md | 4 + examples/tests/clTest.cpp | 15 +- examples/tests/handFromJsonTest.cpp | 15 +- examples/tests/resizeTest.cpp | 12 +- .../1_custom_post_processing.cpp | 229 +-- .../tutorial_api_cpp/1_body_from_image.cpp | 103 ++ .../2_whole_body_from_image.cpp | 85 ++ .../3_keypoints_from_image_configurable.cpp | 150 ++ ...nchronous_loop_custom_input_and_output.cpp | 314 +++++ .../5_asynchronous_loop_custom_output.cpp | 251 ++++ .../6_synchronous_custom_postprocessing.cpp | 222 +++ .../7_synchronous_custom_input.cpp | 266 ++++ .../8_synchronous_custom_output.cpp | 274 ++++ .../9_synchronous_custom_all.cpp | 362 +++++ .../CMakeLists.txt | 71 +- examples/tutorial_api_cpp/README.md | 2 + .../1_extract_pose.py | 0 .../CMakeLists.txt | 1 - examples/tutorial_api_python/README.md | 2 + .../CMakeLists.txt | 65 +- examples/tutorial_developer/README.md | 4 + .../pose_1_extract_from_image.cpp} | 2 +- ..._2_extract_pose_or_heatmat_from_image.cpp} | 2 +- .../python_1_pose_from_heatmaps.py} | 2 +- .../thread_1_openpose_read_and_display.cpp} | 4 +- .../thread_2_user_processing_function.cpp} | 4 +- ...ad_3_user_input_processing_and_output.cpp} | 2 +- ...ser_input_processing_output_and_datum.cpp} | 2 +- examples/tutorial_pose/CMakeLists.txt | 28 - .../2_user_synchronous_input.cpp | 477 ------- .../3_user_synchronous_output.cpp | 492 ------- .../4_user_synchronous_all.cpp | 541 ------- .../tutorial_wrapper/5_user_asynchronous.cpp | 493 ------- .../6_user_asynchronous_output.cpp | 461 ------ examples/user_code/README.md | 2 +- include/openpose/core/datum.hpp | 2 +- include/openpose/core/macros.hpp | 2 + include/openpose/face/faceExtractorNet.hpp | 1 - .../openpose/flags.hpp | 238 +--- include/openpose/hand/handExtractorNet.hpp | 1 - .../{pose => net}/bodyPartConnectorBase.hpp | 0 .../{pose => net}/bodyPartConnectorCaffe.hpp | 0 include/openpose/net/headers.hpp | 2 + include/openpose/pose/headers.hpp | 2 - include/openpose/pose/poseExtractorNet.hpp | 1 - include/openpose/producer/producer.hpp | 1 - include/openpose/producer/webcamReader.hpp | 1 - include/openpose/thread/thread.hpp | 1 - include/openpose/thread/wQueueAssembler.hpp | 2 - include/openpose/thread/wQueueOrderer.hpp | 2 - include/openpose/wrapper/enumClasses.hpp | 16 + include/openpose/wrapper/headers.hpp | 1 + include/openpose/wrapper/wrapper.hpp | 1246 +++-------------- include/openpose/wrapper/wrapperAuxiliary.hpp | 939 +++++++++++++ .../openpose/wrapper/wrapperStructFace.hpp | 2 +- .../openpose/wrapper/wrapperStructHand.hpp | 2 +- .../openpose/wrapper/wrapperStructPose.hpp | 4 +- python/openpose/_openpose.cpp | 556 ++++---- src/openpose/3d/poseTriangulation.cpp | 1 - .../calibration/cameraParameterEstimation.cpp | 1 - src/openpose/gui/gui.cpp | 2 - src/openpose/gui/guiInfoAdder.cpp | 1 - src/openpose/net/CMakeLists.txt | 3 + .../{pose => net}/bodyPartConnectorBase.cpp | 2 +- .../{pose => net}/bodyPartConnectorBase.cu | 8 +- .../{pose => net}/bodyPartConnectorCaffe.cpp | 4 +- src/openpose/pose/CMakeLists.txt | 3 - src/openpose/pose/poseExtractorCaffe.cpp | 2 +- src/openpose/producer/producer.cpp | 1 - src/openpose/producer/spinnakerWrapper.cpp | 3 - src/openpose/producer/webcamReader.cpp | 2 +- src/openpose/tracking/personIdExtractor.cpp | 1 - src/openpose/tracking/personTracker.cpp | 1 - src/openpose/utilities/profiler.cpp | 2 - src/openpose/wrapper/wrapperAuxiliary.cpp | 13 + 88 files changed, 3670 insertions(+), 4703 deletions(-) create mode 100644 examples/tests/README.md create mode 100644 examples/tutorial_api_cpp/1_body_from_image.cpp create mode 100644 examples/tutorial_api_cpp/2_whole_body_from_image.cpp create mode 100644 examples/tutorial_api_cpp/3_keypoints_from_image_configurable.cpp create mode 100644 examples/tutorial_api_cpp/4_asynchronous_loop_custom_input_and_output.cpp create mode 100644 examples/tutorial_api_cpp/5_asynchronous_loop_custom_output.cpp create mode 100644 examples/tutorial_api_cpp/6_synchronous_custom_postprocessing.cpp create mode 100644 examples/tutorial_api_cpp/7_synchronous_custom_input.cpp create mode 100644 examples/tutorial_api_cpp/8_synchronous_custom_output.cpp create mode 100644 examples/tutorial_api_cpp/9_synchronous_custom_all.cpp rename examples/{tutorial_wrapper => tutorial_api_cpp}/CMakeLists.txt (74%) create mode 100644 examples/tutorial_api_cpp/README.md rename examples/{tutorial_python => tutorial_api_python}/1_extract_pose.py (100%) rename examples/{tutorial_python => tutorial_api_python}/CMakeLists.txt (52%) create mode 100644 examples/tutorial_api_python/README.md rename examples/{tutorial_thread => tutorial_developer}/CMakeLists.txt (70%) create mode 100644 examples/tutorial_developer/README.md rename examples/{tutorial_pose/1_extract_from_image.cpp => tutorial_developer/pose_1_extract_from_image.cpp} (99%) rename examples/{tutorial_pose/2_extract_pose_or_heatmat_from_image.cpp => tutorial_developer/pose_2_extract_pose_or_heatmat_from_image.cpp} (99%) rename examples/{tutorial_python/2_pose_from_heatmaps.py => tutorial_developer/python_1_pose_from_heatmaps.py} (99%) rename examples/{tutorial_thread/1_openpose_read_and_display.cpp => tutorial_developer/thread_1_openpose_read_and_display.cpp} (98%) rename examples/{tutorial_thread/2_user_processing_function.cpp => tutorial_developer/thread_2_user_processing_function.cpp} (99%) rename examples/{tutorial_thread/3_user_input_processing_and_output.cpp => tutorial_developer/thread_3_user_input_processing_and_output.cpp} (99%) rename examples/{tutorial_thread/4_user_input_processing_output_and_datum.cpp => tutorial_developer/thread_4_user_input_processing_output_and_datum.cpp} (99%) delete mode 100644 examples/tutorial_pose/CMakeLists.txt delete mode 100644 examples/tutorial_wrapper/2_user_synchronous_input.cpp delete mode 100644 examples/tutorial_wrapper/3_user_synchronous_output.cpp delete mode 100644 examples/tutorial_wrapper/4_user_synchronous_all.cpp delete mode 100644 examples/tutorial_wrapper/5_user_asynchronous.cpp delete mode 100644 examples/tutorial_wrapper/6_user_asynchronous_output.cpp rename examples/tutorial_wrapper/1_user_synchronous_postprocessing.cpp => include/openpose/flags.hpp (68%) rename include/openpose/{pose => net}/bodyPartConnectorBase.hpp (100%) rename include/openpose/{pose => net}/bodyPartConnectorCaffe.hpp (100%) create mode 100644 include/openpose/wrapper/enumClasses.hpp rename src/openpose/{pose => net}/bodyPartConnectorBase.cpp (99%) rename src/openpose/{pose => net}/bodyPartConnectorBase.cu (98%) rename src/openpose/{pose => net}/bodyPartConnectorCaffe.cpp (99%) diff --git a/CMakeLists.txt b/CMakeLists.txt index f46ca8de..6c38a65b 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -26,10 +26,28 @@ if (NOT WIN32 AND NOT UNIX AND NOT APPLE) endif (NOT WIN32 AND NOT UNIX AND NOT APPLE) -### BUILD_TYPE +### CMAKE_BUILD_TYPE # Default: Release -set(CMAKE_BUILD_TYPE "Release" CACHE STRING "Choose the type of build." FORCE) -set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" "MinSizeRel" "RelWithDebInfo") +# Bug fixed: By default, it uses something different to Release, that provokes OpenPose to be about 15% slower than +# it should be. +# Is CMAKE_BUILD_TYPE "Debug" or "MinSizeRel" or "RelWithDebInfo"? +set(CMAKE_BUILD_TYPE_KNOWN FALSE) +if (${CMAKE_BUILD_TYPE} MATCHES "Debug") + set(CMAKE_BUILD_TYPE_KNOWN TRUE) +endif (${CMAKE_BUILD_TYPE} MATCHES "Debug") +if (${CMAKE_BUILD_TYPE} MATCHES "MinSizeRel") + set(CMAKE_BUILD_TYPE_KNOWN TRUE) +endif (${CMAKE_BUILD_TYPE} MATCHES "MinSizeRel") +if (${CMAKE_BUILD_TYPE} MATCHES "RelWithDebInfo") + set(CMAKE_BUILD_TYPE_KNOWN TRUE) +endif (${CMAKE_BUILD_TYPE} MATCHES "RelWithDebInfo") +# Assign proper CMAKE_BUILD_TYPE +if (${CMAKE_BUILD_TYPE_KNOWN}) + set(CMAKE_BUILD_TYPE "Release" CACHE STRING "Choose the type of build.") +else (${CMAKE_BUILD_TYPE_KNOWN}) + set(CMAKE_BUILD_TYPE "Release" CACHE STRING "Choose the type of build." FORCE) +endif (${CMAKE_BUILD_TYPE_KNOWN}) +set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Release" "Debug" "MinSizeRel" "RelWithDebInfo") ### FLAGS diff --git a/README.md b/README.md index 5fbf7792..163274b8 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ |-------------| |[![Build Status](https://travis-ci.org/CMU-Perceptual-Computing-Lab/openpose.svg?branch=master)](https://travis-ci.org/CMU-Perceptual-Computing-Lab/openpose)| -[OpenPose](https://github.com/CMU-Perceptual-Computing-Lab/openpose) represents the **first real-time multi-person system to jointly detect human body, hand, and facial keypoints (in total 135 keypoints) on single images**. +[OpenPose](https://github.com/CMU-Perceptual-Computing-Lab/openpose) represents the **first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints (in total 135 keypoints) on single images**.

@@ -113,9 +113,9 @@ bin\OpenPoseDemo.exe --video examples\media\video.avi - **Calibration toolbox**: To easily calibrate your cameras for 3-D OpenPose or any other stereo vision task. See [doc/modules/calibration_module.md](doc/modules/calibration_module.md). -- **OpenPose Wrapper**: If you want to read a specific input, and/or add your custom post-processing function, and/or implement your own display/saving, check the `Wrapper` tutorial on [examples/tutorial_wrapper/](examples/tutorial_wrapper/). You can create your custom code on [examples/user_code/](examples/user_code/) and quickly compile it with CMake when compiling the whole OpenPose project. Quickly **add your custom code**: See [examples/user_code/README.md](examples/user_code/README.md) for further details. +- **OpenPose C++ API**: If you want to read a specific input, and/or add your custom post-processing function, and/or implement your own display/saving, check the C++ API tutorial on [examples/tutorial_api_cpp/](examples/tutorial_api_cpp/) and [doc/library_introduction.md](doc/library_introduction.md). You can create your custom code on [examples/user_code/](examples/user_code/) and quickly compile it with CMake when compiling the whole OpenPose project. Quickly **add your custom code**: See [examples/user_code/README.md](examples/user_code/README.md) for further details. -- **OpenPose C++ API**: See [doc/library_introduction.md](doc/library_introduction.md). +- **OpenPose Python API**: Analogously to the C++ API, find the tutorial for the Python API on [examples/tutorial_api_python/](examples/tutorial_api_python/). - **Adding an extra module**: Check [doc/library_add_new_module.md](./doc/library_add_new_module.md). diff --git a/doc/installation.md b/doc/installation.md index 9d5dff25..bd4a954d 100644 --- a/doc/installation.md +++ b/doc/installation.md @@ -112,14 +112,20 @@ Any problem installing OpenPose? Check [doc/faq.md](./faq.md) and/or post a GitH - Windows: download and install the latest CMake win64-x64 msi installer from the [CMake website](https://cmake.org/download/), called `cmake-X.X.X-win64-x64.msi`. - Mac: `brew cask install cmake`. 3. Windows - **Microsoft Visual Studio (VS) 2015 Enterprise Update 3**: - - If **Visual Studio 2017 Community** is desired, we do not officially support it, but it might be compiled by firstly [enabling CUDA 8.0 in VS2017](https://stackoverflow.com/questions/43745099/using-cuda-with-visual-studio-2017?answertab=active#tab-top) or use **VS2017 with CUDA 9** by checking the `.vcxproj` file and changing the necessary paths from CUDA 8 to 9. - - VS 2015 Enterprise Update 1 will give some compiler errors and VS 2015 Community has not been tested. + - **IMPORTANT**: Enable all C++-related flags when selecting the components to install. + - Different VS versions: + - If **Visual Studio 2017 Community** is desired, we do not officially support it, but it might be compiled by firstly [enabling CUDA 8.0 in VS2017](https://stackoverflow.com/questions/43745099/using-cuda-with-visual-studio-2017?answertab=active#tab-top) or use **VS2017 with CUDA 9** by checking the `.vcxproj` file and changing the necessary paths from CUDA 8 to 9. + - VS 2015 Enterprise Update 1 will give some compiler errors. + - VS 2015 Community has not been tested. 4. Nvidia GPU version prerequisites: 1. [**CUDA 8**](https://developer.nvidia.com/cuda-80-ga2-download-archive): - Ubuntu: Run `sudo ubuntu/install_cuda.sh` or alternatively download and install it from their website. - - Windows: Install CUDA 8.0 after Visual Studio 2015 is installed to assure that the CUDA installation will generate all necessary files for VS. If CUDA was already installed, re-install - **IMPORTANT 1/2**: Nvidia V, any Nvidia with Volta architecture, and newer Nvidia model GPUs require at least CUDA 9. - - **IMPORTANT 2/2**: As of a recent Windows update, you might want to download the Nvidia [drivers](http://www.nvidia.com/Download/index.aspx) first, and then install CUDA without the Graphics Driver flag or else your system might hang. - 2. [**cuDNN 5.1**](https://developer.nvidia.com/cudnn): + - Windows: Install CUDA 8.0 after Visual Studio 2015 is installed to assure that the CUDA installation will generate all necessary files for VS. If CUDA was already installed, re-install it. + - **Important installation tips**: + - New Nvidia model GPUs (e.g., Nvidia V, GTX 2080, any Nvidia with Volta or Turing architecture, etc.) require at least CUDA 9. + - (Windows issue, reported Sep 2018): If your computer hangs when installing CUDA drivers, try installing first the [Nvidia drivers](http://www.nvidia.com/Download/index.aspx), and then installing CUDA without the Graphics Driver flag. + - (Windows): If CMake returns and error message similar to `CUDA_TOOLKIT_ROOT_DIR not found or specified` or any other CUDA component missing, then: 1) Re-install Visual Studio 2015; 2) Reboot your PC; 3) Re-install CUDA. + 2. [**cuDNN 5.1**](https://developer.nvidia.com/rdp/cudnn-archive): - Ubuntu: Run `sudo ubuntu/install_cudnn.sh` or alternatively download and install it from their website. - Windows (and Ubuntu if manual installation): In order to manually install it, just unzip it and copy (merge) the contents on the CUDA folder, usually `/usr/local/cuda/` in Ubuntu and `C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v8.0` in Windows. 5. AMD GPU version prerequisites: diff --git a/doc/library_add_new_module.md b/doc/library_add_new_module.md index a02228e3..fadce0da 100644 --- a/doc/library_add_new_module.md +++ b/doc/library_add_new_module.md @@ -8,7 +8,7 @@ In order to add a new module, these are the recommended steps in order to develo 2. Implement all the functionality in one `Worker` (i.e. inherit from `Worker` and implement all the functionality on that class). 1. The first letter of the class name should be `W` (e.g. `WHairExtractor`). 2. To initially simplify development: - 1. Initialize the Worker class with the specific std::shared_ptr> instead of directly using a template class (following the `examples/tutorial_wrapper` synchronous examples). + 1. Initialize the Worker class with the specific std::shared_ptr> instead of directly using a template class (following the `examples/tutorial_api_cpp` synchronous examples). 2. Use the whole op::Datum as unique argument of your auxiliary functions. 3. Use the OpenPose Wrapper in ThreadManagerMode::SingleThread mode (e.g. it allows you to directly use cv::imshow). 4. If you are using your own custom Caffe -> initially change the Caffe for your version. It should directly work. @@ -20,6 +20,8 @@ In order to add a new module, these are the recommended steps in order to develo 5. If the Workers need extra data from `Datum`, simply add into `Datum` the new variables required (without removing/modifying any previous variables!). 6. Read also the release steps before starting this developping phase. + + ## Release Steps In order to release the new module: diff --git a/doc/library_extend_functionality.md b/doc/library_extend_functionality.md index deec7c53..a11da00b 100644 --- a/doc/library_extend_functionality.md +++ b/doc/library_extend_functionality.md @@ -7,7 +7,7 @@ If you intend to extend the functionality of our library: 2. Check the basic library overview doc on [library_overview.md](library_overview.md). -3. Read, understand and play with the basic real time pose demo source code [examples/openpose/openpose.cpp](../examples/openpose/openpose.cpp) and [examples/tutorial_wrapper](../examples/tutorial_wrapper). It includes all the functionality of our library, and it has been properly commented. +3. Read, understand and play with the basic real time pose demo source code [examples/openpose/openpose.cpp](../examples/openpose/openpose.cpp) and [examples/tutorial_api_cpp](../examples/tutorial_api_cpp). It includes all the functionality of our library, and it has been properly commented. 4. Read, understand and play with the other tutorials in [examples/](../examples/). It includes more specific examples. @@ -15,6 +15,6 @@ If you intend to extend the functionality of our library: 6. Take a look to the stucuture of the already existing modules. -7. The C++ headers files add documentation in [Doxygen](http://www.doxygen.org/) format. Create this documentation by compiling the [include](../include/) folder with Doxygen. This documentation will be completed during the next few weeks/months. +7. The C++ headers files add documentation in [Doxygen](http://www.doxygen.org/) format. Create this documentation by compiling the [include](../include/) folder with Doxygen. This documentation is slowly but continuously improved. 8. You can also take a look to the source code or ask us on GitHub. diff --git a/doc/modules/3d_reconstruction_module.md b/doc/modules/3d_reconstruction_module.md index d0949344..98418ead 100644 --- a/doc/modules/3d_reconstruction_module.md +++ b/doc/modules/3d_reconstruction_module.md @@ -109,7 +109,7 @@ It should be similar to the following image. You can copy and modify the OpenPose 3-D demo to use any camera brand by: 1. You can optionally turn off the `WITH_FLIR_CAMERA` while compiling CMake. -2. Copy any of the `examples/tutorial_wrapper/*.cpp` examples (we recommend `2_user_synchronous.cpp`). +2. Copy `examples/tutorial_api_cpp/7_synchronous_custom_input.cpp` (or 9_synchronous_custom_all.cpp). 3. Modify `WUserInput` and add your custom code there. Your code should fill `Datum::name`, `Datum::cameraMatrix`, `Datum::cvInputData`, and `Datum::cvOutputData` (fill cvOutputData = cvInputData). 4. Remove `WUserPostProcessing` and `WUserOutput` (unless you want to have your custom post-processing and/or output). diff --git a/doc/modules/python_module.md b/doc/modules/python_module.md index 27b8892c..496ff9a8 100644 --- a/doc/modules/python_module.md +++ b/doc/modules/python_module.md @@ -35,21 +35,21 @@ pip install opencv-python ## Testing -Two examples can be found in `build/examples/tutorial_python` in your build folder. Navigate directly to this path to run examples. +Two examples can be found in `build/examples/tutorial_api_python` in your build folder. Navigate directly to this path to run examples. - `1_extract_pose` demonstrates a simple use of the API. -- `2_pose_from_heatmaps` demonstrates constructing pose from heatmaps from the caffe network. (Requires Python Caffe to be installed seperately) +- `2_pose_from_heatmaps` demonstrates constructing pose from heatmaps from the caffe network (Requires Python Caffe to be installed seperately, only tested on Ubuntu). ``` # From command line -cd build/examples/tutorial_python +cd build/examples/tutorial_api_python python 1_extract_pose.py ``` ## Exporting Python OpenPose -Note: This step is only required if you are moving the `*.py` files outside their original location, or writting new `*.py` scripts outside `build/examples/tutorial_python`. +Note: This step is only required if you are moving the `*.py` files outside their original location, or writting new `*.py` scripts outside `build/examples/tutorial_api_python`. - Option a, installing OpenPose: On an Ubuntu or OSX based system, you could install OpenPose by running `sudo make install`, you could then set the OpenPose path in your python scripts to the OpenPose installation path (default: `/usr/local/python`) and start using OpenPose at any location. Take a look at `build/examples/tutorial_pose/1_extract_pose.py` for an example. -- Option b, not Installing OpenPose: To move the OpenPose Python API demos to a different folder, ensure that the line `sys.path.append('{OpenPose_path}/python')` is properly set in your `*.py` files, where `{OpenPose_path}` points to your build folder of OpenPose. Take a look at `build/examples/tutorial_pose/1_extract_pose.py` for an example. +- Option b, not installing OpenPose: To move the OpenPose Python API demos to a different folder, ensure that the line `sys.path.append('{OpenPose_path}/python')` is properly set in your `*.py` files, where `{OpenPose_path}` points to your build folder of OpenPose. Take a look at `build/examples/tutorial_pose/1_extract_pose.py` for an example. diff --git a/doc/release_notes.md b/doc/release_notes.md index 790bd300..b9153ef0 100644 --- a/doc/release_notes.md +++ b/doc/release_notes.md @@ -57,7 +57,7 @@ OpenPose Library - Release Notes 9. WCocoJsonSaver finished and removed its 3599-image limit. 10. Added `--camera_fps` so generated video will use that frame rate. 11. Reduced the number of printed information messages. Default logging priority threshold increased to Priority::Max. - 12. Google flags to OpenPose configuration parameters reader moved from each demo to utilities/flagsToOpenPose. + 12. GFlags to OpenPose configuration parameters reader moved from each demo to utilities/flagsToOpenPose. 13. Nms classes do not use `numberParts` for `Reshape`, they deduce the value. 14. Improved documentation. 2. Functions or parameters renamed: @@ -259,15 +259,23 @@ OpenPose Library - Release Notes -## Current version - future OpenPose 1.4.1 +## Current version - future OpenPose 1.5.0 1. Main improvements: 1. Added initial single-person tracker for further speed up or visual smoothing (`--tracking` flag). 2. Greedy body part connector implemented in CUDA: +~30% speed up in Nvidia (CUDA) version with default flags and +~10% in maximum accuracy configuration. In addition, it provides a small 0.5% boost in accuracy (default flags). 3. OpenPose can be built as Unity plugin: Added flag `BUILD_UNITY_SUPPORT` and special Unity code. 4. If camera is unplugged, OpenPose GUI and command line will display a warning and try to reconnect it. + 5. Wrapper classes simplified and renamed. + 6. API and examples improved: + 1. New header file `flags.hpp` that includes all OpenPose flags, removing the need to copy them repeatedly on each OpenPose example file. + 2. `tutorial_wrapper` renamed as `tutorial_api_cpp` as well as new examples were added. + 2. `tutorial_python` renamed as `tutorial_api_python` as well as new examples were added. + 3. `tutorial_pose` and `tutorial_thread` renamed as `tutorial_developer`, not meant to be used by users, but rather for OpenPose developers. 2. Functions or parameters renamed: - 1. By default, python example `2_pose_from_heatmaps.py` was using 2 scales starting at -1x736, changed to 1 scale at -1x368. + 1. By default, python example `tutorial_developer/python_2_pose_from_heatmaps.py` was using 2 scales starting at -1x736, changed to 1 scale at -1x368. + 2. WrapperStructPose default parameters changed to match those of the OpenPose demo binary. 3. Main bugs fixed: + 1. CMake-GUI was forcing to Release mode, allowed Debug modes too. diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index b2df7751..167ec367 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,10 +1,9 @@ add_subdirectory(calibration) add_subdirectory(openpose) add_subdirectory(tutorial_add_module) -add_subdirectory(tutorial_pose) -add_subdirectory(tutorial_python) -add_subdirectory(tutorial_thread) -add_subdirectory(tutorial_wrapper) +add_subdirectory(tutorial_api_python) +add_subdirectory(tutorial_api_cpp) +add_subdirectory(tutorial_developer) add_subdirectory(user_code) if (UNIX OR APPLE) add_subdirectory(tests) diff --git a/examples/calibration/CMakeLists.txt b/examples/calibration/CMakeLists.txt index 963f4bce..bb19ea5d 100644 --- a/examples/calibration/CMakeLists.txt +++ b/examples/calibration/CMakeLists.txt @@ -1,4 +1,4 @@ -set(EXAMPLE_FILES +set(EXAMPLE_FILES calibration.cpp) foreach(EXAMPLE_FILE ${EXAMPLE_FILES}) diff --git a/examples/calibration/calibration.cpp b/examples/calibration/calibration.cpp index 80d8c79b..4c0b935a 100644 --- a/examples/calibration/calibration.cpp +++ b/examples/calibration/calibration.cpp @@ -3,26 +3,12 @@ // Implemented on top of OpenCV. // It computes and saves the intrinsics parameters of the input images. -// C++ std library dependencies -#include // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds -#include // std::this_thread -// Other 3rdparty dependencies -// GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string -#include -// Allow Google Flags in Ubuntu 14 -#ifndef GFLAGS_GFLAGS_H_ - namespace gflags = google; -#endif +// Command-line user intraface +#define OPENPOSE_FLAGS_DISABLE_POSE +#include // OpenPose dependencies #include -// See all the available parameter options withe the `--help` flag. E.g. `build/examples/openpose/openpose.bin --help` -// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose -// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`. -// Debugging/Other -DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while" - " 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for" - " low priority messages and 4 for important ones."); // Calibration DEFINE_int32(mode, 1, "Select 1 for intrinsic camera parameter calibration, 2 for extrinsic calibration."); DEFINE_string(calibration_image_dir, "images/intrinsics/", "Directory where the images for camera parameter calibration are placed."); diff --git a/examples/openpose/CMakeLists.txt b/examples/openpose/CMakeLists.txt index 5fa7691d..8f1d87c4 100644 --- a/examples/openpose/CMakeLists.txt +++ b/examples/openpose/CMakeLists.txt @@ -1,4 +1,4 @@ -set(EXAMPLE_FILES +set(EXAMPLE_FILES openpose.cpp) foreach(EXAMPLE_FILE ${EXAMPLE_FILES}) diff --git a/examples/openpose/openpose.cpp b/examples/openpose/openpose.cpp index 3170c0a2..1661c2f5 100755 --- a/examples/openpose/openpose.cpp +++ b/examples/openpose/openpose.cpp @@ -1,7 +1,7 @@ // ------------------------- OpenPose Library Tutorial - Real Time Pose Estimation ------------------------- -// If the user wants to learn to use the OpenPose library, we highly recommend to start with the `examples/tutorial_*/` -// folders. -// This example summarizes all the funcitonality of the OpenPose library: +// If the user wants to learn to use the OpenPose library, we highly recommend to start with the +// examples in `examples/tutorial_api_cpp/`. +// This example summarizes all the functionality of the OpenPose library: // 1. Read folder of images / video / webcam (`producer` module) // 2. Extract and render body keypoint / heatmap / PAF of that image (`pose` module) // 3. Extract and render face keypoint / heatmap / PAF of that image (`face` module) @@ -16,228 +16,11 @@ // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively // This file should only be used for the user to take specific examples. -// C++ std library dependencies -#include // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds -#include // std::this_thread -// Other 3rdparty dependencies -// GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string -#include -// Allow Google Flags in Ubuntu 14 -#ifndef GFLAGS_GFLAGS_H_ - namespace gflags = google; -#endif +// Command-line user intraface +#include // OpenPose dependencies #include -// See all the available parameter options withe the `--help` flag. E.g. `build/examples/openpose/openpose.bin --help` -// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose -// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`. -// Debugging/Other -DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while" - " 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for" - " low priority messages and 4 for important ones."); -DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful" - " for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with" - " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the" - " error."); -DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some" - " runtime statistics at this frame number."); -// Producer -DEFINE_int32(camera, -1, "The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative" - " number (by default), to auto-detect and open the first available camera."); -DEFINE_string(camera_resolution, "-1x-1", "Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the" - " default 1280x720 for `--camera`, or the maximum flir camera resolution available for" - " `--flir_camera`"); -DEFINE_double(camera_fps, 30.0, "Frame rate for the webcam (also used when saving video). Set this value to the minimum" - " value between the OpenPose displayed speed and the webcam real frame rate."); -DEFINE_string(video, "", "Use a video file instead of the camera. Use `examples/media/video.avi` for our default" - " example video."); -DEFINE_string(image_dir, "", "Process a directory of images. Use `examples/media/` for our default example folder with 20" - " images. Read all standard formats (jpg, png, bmp, etc.)."); -DEFINE_bool(flir_camera, false, "Whether to use FLIR (Point-Grey) stereo camera."); -DEFINE_int32(flir_camera_index, -1, "Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir" - " camera index to run, where 0 corresponds to the detected flir camera with the lowest" - " serial number, and `n` to the `n`-th lowest serial number camera."); -DEFINE_string(ip_camera, "", "String with the IP camera URL. It supports protocols like RTSP and HTTP."); -DEFINE_uint64(frame_first, 0, "Start on desired frame number. Indexes are 0-based, i.e. the first frame has index 0."); -DEFINE_uint64(frame_last, -1, "Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g. if set to" - " 10, it will process 11 frames (0-10)."); -DEFINE_bool(frame_flip, false, "Flip/mirror each frame (e.g. for real time webcam demonstrations)."); -DEFINE_int32(frame_rotate, 0, "Rotate each frame, 4 possible values: 0, 90, 180, 270."); -DEFINE_bool(frames_repeat, false, "Repeat frames when finished."); -DEFINE_bool(process_real_time, false, "Enable to keep the original source frame rate (e.g. for video). If the processing time is" - " too long, it will skip frames. If it is too fast, it will slow it down."); -DEFINE_string(camera_parameter_folder, "models/cameraParameters/flir/", "String with the folder where the camera parameters are located."); -DEFINE_bool(frame_keep_distortion, false, "If false (default), it will undistortionate the image based on the" - " `camera_parameter_folder` camera parameters; if true, it will not undistortionate, i.e.," - " it will leave it as it is."); -// OpenPose -DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located."); -DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the" - " input image resolution."); -DEFINE_int32(num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your" - " machine."); -DEFINE_int32(num_gpu_start, 0, "GPU device start number."); -DEFINE_int32(keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e. the scale of the (x,y)" - " coordinates that will be saved with the `write_json` & `write_keypoint` flags." - " Select `0` to scale it to the original source resolution; `1`to scale it to the net output" - " size (set with `net_resolution`); `2` to scale it to the final output size (set with" - " `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left" - " corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where" - " (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non" - " related with `scale_number` and `scale_gap`."); -DEFINE_int32(number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with" - " top scores. The score is based in person area over the image, body part score, as well as" - " joint score (between each pair of connected body parts). Useful if you know the exact" - " number of people in the scene, so it can remove false positives (if all the people have" - " been detected. However, it might also include false negatives by removing very small or" - " highly occluded people. -1 will keep them all."); -// OpenPose Body Pose -DEFINE_bool(body_disable, false, "Disable body keypoint detection. Option only possible for faster (but less accurate) face" - " keypoint detection."); -DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g. `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), " - "`MPI_4_layers` (15 keypoints, even faster but less accurate)."); -DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is" - " decreased, the speed increases. For maximum speed-accuracy balance, it should keep the" - " closest aspect ratio possible to the images or videos to be processed. Using `-1` in" - " any of the dimensions, OP will choose the optimal aspect ratio depending on the user's" - " input value. E.g. the default `-1x368` is equivalent to `656x368` in 16:9 resolutions," - " e.g. full HD (1980x1080) and HD (1280x720) resolutions."); -DEFINE_int32(scale_number, 1, "Number of scales to average."); -DEFINE_double(scale_gap, 0.3, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1." - " If you want to change the initial scale, you actually want to multiply the" - " `net_resolution` by your desired initial scale."); -// OpenPose Body Pose Heatmaps and Part Candidates -DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and" - " analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps." - " If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential" - " memory order: body parts + bkg + PAFs. It will follow the order on" - " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will" - " considerably decrease. Not required for OpenPose, enable it only if you intend to" - " explicitly use this information later."); -DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to" - " background."); -DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs."); -DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer" - " rounded [0,255]; and 3 for no scaling."); -DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the" - " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all" - " the detected body parts, before being assembled into people. Note that the number of" - " candidates is equal or higher than the number of final body parts (i.e. after being" - " assembled into people). The empty body parts are filled with 0s. Program speed will" - " slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly" - " use this information."); -// OpenPose Face -DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Note that this will considerable slow down the performance and increse" - " the required GPU memory. In addition, the greater number of people on the image, the" - " slower OpenPose will be."); -DEFINE_string(face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint" - " detector. 320x320 usually works fine while giving a substantial speed up when multiple" - " faces on the image."); -// OpenPose Hand -DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Analogously to `--face`, it will also slow down the performance, increase" - " the required GPU memory and its speed depends on the number of people."); -DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint" - " detector."); -DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results" - " were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."); -DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range" - " between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if" - " scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."); -DEFINE_bool(hand_tracking, false, "Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate" - " is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it" - " simply looks for hands in positions at which hands were located in previous frames, but" - " it does not guarantee the same person ID among frames."); -// OpenPose 3-D Reconstruction -DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system." - " 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction" - " results. Note that it will only display 1 person. If multiple people is present, it will" - " fail."); -DEFINE_int32(3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will" - " require all the cameras to see the keypoint in order to reconstruct it."); -DEFINE_int32(3d_views, 1, "Complementary option to `--image_dir` or `--video`. OpenPose will read as many images per" - " iteration, allowing tasks such as stereo camera processing (`--3d`). Note that" - " `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the" - " parameter folder as this number indicates."); -// Extra algorithms -DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames."); -DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The" - " value indicates the number of frames where tracking is run between each OpenPose keypoint" - " detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint" - " detector and tracking for potentially higher accurary than only OpenPose."); -DEFINE_int32(ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D" - " keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing" - " the number of threads will increase the speed but also the global system latency."); -// OpenPose Rendering -DEFINE_int32(part_to_show, 0, "Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body" - " part heat map, 19 for the background heat map, 20 for all the body part heat maps" - " together, 21 for all the PAFs, 22-40 for each body part pair PAF."); -DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black" - " background, instead of being rendered into the original image. Related: `part_to_show`," - " `alpha_pose`, and `alpha_pose`."); -// OpenPose Rendering Pose -DEFINE_double(render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be" - " rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;" - " while small thresholds (~0.1) will also output guessed and occluded keypoints, but also" - " more false positives (i.e. wrong detections)."); -DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering" - " (slower but greater functionality, e.g. `alpha_X` flags). If -1, it will pick CPU if" - " CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render" - " both `outputData` and `cvOutputData` with the original image and desired body part to be" - " shown (i.e. keypoints, heat maps or PAFs)."); -DEFINE_double(alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will" - " hide it. Only valid for GPU rendering."); -DEFINE_double(alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the" - " heatmap, 0 will only show the frame. Only valid for GPU rendering."); -// OpenPose Rendering Face -DEFINE_double(face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints."); -DEFINE_int32(face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face."); -DEFINE_double(face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face."); -// OpenPose Rendering Hand -DEFINE_double(hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints."); -DEFINE_int32(hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand."); -DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand."); -// Display -DEFINE_bool(fullscreen, false, "Run in full-screen mode (press f during runtime to toggle)."); -DEFINE_bool(no_gui_verbose, false, "Do not write text on output images on GUI (e.g. number of current frame and people). It" - " does not affect the pose rendering."); -DEFINE_int32(display, -1, "Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server" - " and/or to slightly speed up the processing if visual output is not required); 2 for 2-D" - " display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display."); -// Result Saving -DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format."); -DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV" - " function cv::imwrite for all compatible extensions."); -DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the" - " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag" - " `camera_fps` controls FPS."); -DEFINE_string(write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose" - " keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."); -DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format."); -DEFINE_string(write_coco_foot_json, "", "Full file path to write people foot pose data with JSON COCO validation format."); -DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag" - " must be enabled."); -DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`." - " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for" - " floating values."); -DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format" - " with `write_keypoint_format`."); -DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml," - " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."); -// Result Saving - Extra Algorithms -DEFINE_string(write_video_adam, "", "Experimental, not available yet. E.g.: `~/Desktop/adamResult.avi`. Flag `camera_fps`" - " controls FPS."); -DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g.: `~/Desktop/mocapResult.bvh`."); -// UDP communication -DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."); -DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication."); - int openPoseDemo() { try @@ -256,7 +39,7 @@ int openPoseDemo() // // Print out speed values faster // op::Profiler::setDefaultX(100); - // Applying user defined configuration - Google flags to program variables + // Applying user defined configuration - GFlags to program variables // outputSize const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); // netInputSize @@ -325,7 +108,7 @@ int openPoseDemo() // Configure wrapper opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, wrapperStructInput, wrapperStructOutput); - // Set to single-thread running (to debug and/or reduce latency) + // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (FLAGS_disable_multi_thread) opWrapper.disableMultiThreading(); diff --git a/examples/tests/README.md b/examples/tests/README.md new file mode 100644 index 00000000..1dca5a95 --- /dev/null +++ b/examples/tests/README.md @@ -0,0 +1,4 @@ +# Low Level API Examples +**Disclaimer**: This folder is meant for internal OpenPose developers. The Examples might highly change, and we will not answer questions about them nor provide official support for them. + +**If the OpenPose library does not compile for an error happening due to a file from this folder, notify us**. diff --git a/examples/tests/clTest.cpp b/examples/tests/clTest.cpp index 8c102c65..91f2368d 100644 --- a/examples/tests/clTest.cpp +++ b/examples/tests/clTest.cpp @@ -1,19 +1,14 @@ // ------------------------- OpenPose Resize Layer Testing ------------------------- +// Command-line user intraface +#define OPENPOSE_FLAGS_DISABLE_POSE +#include +// OpenPose dependencies #include +// OpenCL dependencies #ifdef USE_OPENCL #include #include -#include // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds -// GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string -#include -// Allow Google Flags in Ubuntu 14 -#ifndef GFLAGS_GFLAGS_H_ -namespace gflags = google; -#endif -#ifdef USE_CAFFE -#include -#endif DEFINE_string(image_path, "examples/media/COCO_val2014_000000000192.jpg", "Process the desired image."); diff --git a/examples/tests/handFromJsonTest.cpp b/examples/tests/handFromJsonTest.cpp index 98e8c8ad..c6f9e135 100644 --- a/examples/tests/handFromJsonTest.cpp +++ b/examples/tests/handFromJsonTest.cpp @@ -1,19 +1,14 @@ // ------------------------- OpenPose Library Tutorial - Hand Keypoint Detection from JSON Ground-Truth Data ------------------------- // Example to test hands accuracy given ground-truth bounding boxes. -#include // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds -// GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string -#include -// Allow Google Flags in Ubuntu 14 -#ifndef GFLAGS_GFLAGS_H_ - namespace gflags = google; -#endif +// Command-line user intraface +#define OPENPOSE_FLAGS_DISABLE_POSE +#include +// OpenPose dependencies #include #include "wrapperHandFromJsonTest.hpp" // For info about the flags, check `examples/openpose/openpose.bin`. -// Debugging/Other -DEFINE_int32(logging_level, 3, ""); // Producer DEFINE_string(image_dir, "", ""); DEFINE_string(hand_ground_truth, "", ""); @@ -44,7 +39,7 @@ int handFromJsonTest() __LINE__, __FUNCTION__, __FILE__); op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); - // Applying user defined configuration - Google flags to program variables + // Applying user defined configuration - GFlags to program variables // handNetInputSize const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); // producerType diff --git a/examples/tests/resizeTest.cpp b/examples/tests/resizeTest.cpp index 568485d5..a19c826d 100644 --- a/examples/tests/resizeTest.cpp +++ b/examples/tests/resizeTest.cpp @@ -1,14 +1,12 @@ // ------------------------- OpenPose Resize Layer Testing ------------------------- +// Command-line user intraface +#define OPENPOSE_FLAGS_DISABLE_POSE +#include +// OpenPose dependencies #include + #ifdef USE_CUDA - #include // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds - // GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string - #include - // Allow Google Flags in Ubuntu 14 - #ifndef GFLAGS_GFLAGS_H_ - namespace gflags = google; - #endif #ifdef USE_CAFFE #include #endif diff --git a/examples/tutorial_add_module/1_custom_post_processing.cpp b/examples/tutorial_add_module/1_custom_post_processing.cpp index 79ae80f3..c1c2c19c 100644 --- a/examples/tutorial_add_module/1_custom_post_processing.cpp +++ b/examples/tutorial_add_module/1_custom_post_processing.cpp @@ -20,233 +20,16 @@ // 4. If extra classes and files are required, add those extra files inside the OpenPose include and src folders, // under a new folder (i.e. `include/newMethod/` and `src/newMethod/`), including `namespace op` on those files. -// This example is a sub-case of `tutorial_wrapper/2_user_synchronous.cpp`, where only custom post-processing is +// This example is a sub-case of `tutorial_api_cpp/6_synchronous_custom_postprocessing.cpp`, where only custom post-processing is // considered. -// C++ std library dependencies -#include // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds -#include // std::this_thread -// Other 3rdparty dependencies -// GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string -#include -// Allow Google Flags in Ubuntu 14 -#ifndef GFLAGS_GFLAGS_H_ - namespace gflags = google; -#endif +// Command-line user intraface +#include // OpenPose dependencies #include #include "userDatum.hpp" #include "wUserPostProcessing.hpp" -// See all the available parameter options withe the `--help` flag. E.g. `build/examples/openpose/openpose.bin --help` -// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose -// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`. -// Debugging/Other -DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while" - " 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for" - " low priority messages and 4 for important ones."); -DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful" - " for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with" - " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the" - " error."); -DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some" - " runtime statistics at this frame number."); -// Producer -DEFINE_int32(camera, -1, "The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative" - " number (by default), to auto-detect and open the first available camera."); -DEFINE_string(camera_resolution, "-1x-1", "Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the" - " default 1280x720 for `--camera`, or the maximum flir camera resolution available for" - " `--flir_camera`"); -DEFINE_double(camera_fps, 30.0, "Frame rate for the webcam (also used when saving video). Set this value to the minimum" - " value between the OpenPose displayed speed and the webcam real frame rate."); -DEFINE_string(video, "", "Use a video file instead of the camera. Use `examples/media/video.avi` for our default" - " example video."); -DEFINE_string(image_dir, "", "Process a directory of images. Use `examples/media/` for our default example folder with 20" - " images. Read all standard formats (jpg, png, bmp, etc.)."); -DEFINE_bool(flir_camera, false, "Whether to use FLIR (Point-Grey) stereo camera."); -DEFINE_int32(flir_camera_index, -1, "Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir" - " camera index to run, where 0 corresponds to the detected flir camera with the lowest" - " serial number, and `n` to the `n`-th lowest serial number camera."); -DEFINE_string(ip_camera, "", "String with the IP camera URL. It supports protocols like RTSP and HTTP."); -DEFINE_uint64(frame_first, 0, "Start on desired frame number. Indexes are 0-based, i.e. the first frame has index 0."); -DEFINE_uint64(frame_last, -1, "Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g. if set to" - " 10, it will process 11 frames (0-10)."); -DEFINE_bool(frame_flip, false, "Flip/mirror each frame (e.g. for real time webcam demonstrations)."); -DEFINE_int32(frame_rotate, 0, "Rotate each frame, 4 possible values: 0, 90, 180, 270."); -DEFINE_bool(frames_repeat, false, "Repeat frames when finished."); -DEFINE_bool(process_real_time, false, "Enable to keep the original source frame rate (e.g. for video). If the processing time is" - " too long, it will skip frames. If it is too fast, it will slow it down."); -DEFINE_string(camera_parameter_folder, "models/cameraParameters/flir/", "String with the folder where the camera parameters are located."); -DEFINE_bool(frame_keep_distortion, false, "If false (default), it will undistortionate the image based on the" - " `camera_parameter_folder` camera parameters; if true, it will not undistortionate, i.e.," - " it will leave it as it is."); -// OpenPose -DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located."); -DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the" - " input image resolution."); -DEFINE_int32(num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your" - " machine."); -DEFINE_int32(num_gpu_start, 0, "GPU device start number."); -DEFINE_int32(keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e. the scale of the (x,y)" - " coordinates that will be saved with the `write_json` & `write_keypoint` flags." - " Select `0` to scale it to the original source resolution; `1`to scale it to the net output" - " size (set with `net_resolution`); `2` to scale it to the final output size (set with" - " `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left" - " corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where" - " (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non" - " related with `scale_number` and `scale_gap`."); -DEFINE_int32(number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with" - " top scores. The score is based in person area over the image, body part score, as well as" - " joint score (between each pair of connected body parts). Useful if you know the exact" - " number of people in the scene, so it can remove false positives (if all the people have" - " been detected. However, it might also include false negatives by removing very small or" - " highly occluded people. -1 will keep them all."); -// OpenPose Body Pose -DEFINE_bool(body_disable, false, "Disable body keypoint detection. Option only possible for faster (but less accurate) face" - " keypoint detection."); -DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g. `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), " - "`MPI_4_layers` (15 keypoints, even faster but less accurate)."); -DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is" - " decreased, the speed increases. For maximum speed-accuracy balance, it should keep the" - " closest aspect ratio possible to the images or videos to be processed. Using `-1` in" - " any of the dimensions, OP will choose the optimal aspect ratio depending on the user's" - " input value. E.g. the default `-1x368` is equivalent to `656x368` in 16:9 resolutions," - " e.g. full HD (1980x1080) and HD (1280x720) resolutions."); -DEFINE_int32(scale_number, 1, "Number of scales to average."); -DEFINE_double(scale_gap, 0.3, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1." - " If you want to change the initial scale, you actually want to multiply the" - " `net_resolution` by your desired initial scale."); -// OpenPose Body Pose Heatmaps and Part Candidates -DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and" - " analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps." - " If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential" - " memory order: body parts + bkg + PAFs. It will follow the order on" - " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will" - " considerably decrease. Not required for OpenPose, enable it only if you intend to" - " explicitly use this information later."); -DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to" - " background."); -DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs."); -DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer" - " rounded [0,255]; and 3 for no scaling."); -DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the" - " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all" - " the detected body parts, before being assembled into people. Note that the number of" - " candidates is equal or higher than the number of final body parts (i.e. after being" - " assembled into people). The empty body parts are filled with 0s. Program speed will" - " slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly" - " use this information."); -// OpenPose Face -DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Note that this will considerable slow down the performance and increse" - " the required GPU memory. In addition, the greater number of people on the image, the" - " slower OpenPose will be."); -DEFINE_string(face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint" - " detector. 320x320 usually works fine while giving a substantial speed up when multiple" - " faces on the image."); -// OpenPose Hand -DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Analogously to `--face`, it will also slow down the performance, increase" - " the required GPU memory and its speed depends on the number of people."); -DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint" - " detector."); -DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results" - " were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."); -DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range" - " between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if" - " scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."); -DEFINE_bool(hand_tracking, false, "Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate" - " is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it" - " simply looks for hands in positions at which hands were located in previous frames, but" - " it does not guarantee the same person ID among frames."); -// OpenPose 3-D Reconstruction -DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system." - " 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction" - " results. Note that it will only display 1 person. If multiple people is present, it will" - " fail."); -DEFINE_int32(3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will" - " require all the cameras to see the keypoint in order to reconstruct it."); -DEFINE_int32(3d_views, 1, "Complementary option to `--image_dir` or `--video`. OpenPose will read as many images per" - " iteration, allowing tasks such as stereo camera processing (`--3d`). Note that" - " `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the" - " parameter folder as this number indicates."); -// Extra algorithms -DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames."); -DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The" - " value indicates the number of frames where tracking is run between each OpenPose keypoint" - " detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint" - " detector and tracking for potentially higher accurary than only OpenPose."); -DEFINE_int32(ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D" - " keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing" - " the number of threads will increase the speed but also the global system latency."); -// OpenPose Rendering -DEFINE_int32(part_to_show, 0, "Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body" - " part heat map, 19 for the background heat map, 20 for all the body part heat maps" - " together, 21 for all the PAFs, 22-40 for each body part pair PAF."); -DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black" - " background, instead of being rendered into the original image. Related: `part_to_show`," - " `alpha_pose`, and `alpha_pose`."); -// OpenPose Rendering Pose -DEFINE_double(render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be" - " rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;" - " while small thresholds (~0.1) will also output guessed and occluded keypoints, but also" - " more false positives (i.e. wrong detections)."); -DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering" - " (slower but greater functionality, e.g. `alpha_X` flags). If -1, it will pick CPU if" - " CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render" - " both `outputData` and `cvOutputData` with the original image and desired body part to be" - " shown (i.e. keypoints, heat maps or PAFs)."); -DEFINE_double(alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will" - " hide it. Only valid for GPU rendering."); -DEFINE_double(alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the" - " heatmap, 0 will only show the frame. Only valid for GPU rendering."); -// OpenPose Rendering Face -DEFINE_double(face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints."); -DEFINE_int32(face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face."); -DEFINE_double(face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face."); -// OpenPose Rendering Hand -DEFINE_double(hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints."); -DEFINE_int32(hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand."); -DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand."); -// Display -DEFINE_bool(fullscreen, false, "Run in full-screen mode (press f during runtime to toggle)."); -DEFINE_bool(no_gui_verbose, false, "Do not write text on output images on GUI (e.g. number of current frame and people). It" - " does not affect the pose rendering."); -DEFINE_int32(display, -1, "Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server" - " and/or to slightly speed up the processing if visual output is not required); 2 for 2-D" - " display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display."); -// Result Saving -DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format."); -DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV" - " function cv::imwrite for all compatible extensions."); -DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the" - " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag" - " `camera_fps` controls FPS."); -DEFINE_string(write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose" - " keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."); -DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format."); -DEFINE_string(write_coco_foot_json, "", "Full file path to write people foot pose data with JSON COCO validation format."); -DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag" - " must be enabled."); -DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`." - " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for" - " floating values."); -DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format" - " with `write_keypoint_format`."); -DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml," - " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."); -// Result Saving - Extra Algorithms -DEFINE_string(write_video_adam, "", "Experimental, not available yet. E.g.: `~/Desktop/adamResult.avi`. Flag `camera_fps`" - " controls FPS."); -DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g.: `~/Desktop/mocapResult.bvh`."); -// UDP communication -DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."); -DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication."); - int openPoseTutorialWrapper4() { try @@ -260,7 +43,7 @@ int openPoseTutorialWrapper4() op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); op::Profiler::setDefaultX(FLAGS_profile_speed); - // Applying user defined configuration - Google flags to program variables + // Applying user defined configuration - GFlags to program variables // outputSize const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); // netInputSize @@ -334,12 +117,12 @@ int openPoseTutorialWrapper4() ); // Add custom processing const auto workerProcessingOnNewThread = false; - opWrapper.setWorkerPostProcessing(wUserPostProcessing, workerProcessingOnNewThread); + opWrapper.setWorker(op::WorkerType::PostProcessing, wUserPostProcessing, workerProcessingOnNewThread); // Configure wrapper opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, wrapperStructInput, wrapperStructOutput); - // Set to single-thread running (to debug and/or reduce latency) + // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (FLAGS_disable_multi_thread) opWrapper.disableMultiThreading(); diff --git a/examples/tutorial_api_cpp/1_body_from_image.cpp b/examples/tutorial_api_cpp/1_body_from_image.cpp new file mode 100644 index 00000000..98e038a3 --- /dev/null +++ b/examples/tutorial_api_cpp/1_body_from_image.cpp @@ -0,0 +1,103 @@ +// ------------------------- OpenPose API Tutorial - Example 1 - Body from image ------------------------- +// It reads an image, process it, and displays it with the pose keypoints. + +// Command-line user intraface +#define OPENPOSE_FLAGS_DISABLE_POSE +#include +// OpenPose dependencies +#include + +// Custom OpenPose flags +// Producer +DEFINE_string(image_path, "examples/media/COCO_val2014_000000000192.jpg", + "Process an image. Read all standard formats (jpg, png, bmp, etc.)."); + +// This worker will just read and return all the jpg files in a directory +void display(const std::shared_ptr>& datumsPtr) +{ + // User's displaying/saving/other processing here + // datum.cvOutputData: rendered frame with pose or heatmaps + // datum.poseKeypoints: Array with the estimated pose + if (datumsPtr != nullptr && !datumsPtr->empty()) + { + // Display image + cv::imshow("User worker GUI", datumsPtr->at(0).cvOutputData); + cv::waitKey(0); + } + else + op::log("Nullptr or empty datumsPtr found.", op::Priority::High); +} + +void printKeypoints(const std::shared_ptr>& datumsPtr) +{ + // Example: How to use the pose keypoints + if (datumsPtr != nullptr && !datumsPtr->empty()) + { + // Alternative 1 + op::log("Body keypoints: " + datumsPtr->at(0).poseKeypoints.toString()); + + // // Alternative 2 + // op::log(datumsPtr->at(0).poseKeypoints); + + // // Alternative 3 + // std::cout << datumsPtr->at(0).poseKeypoints << std::endl; + + // // Alternative 4 - Accesing each element of the keypoints + // op::log("\nKeypoints:"); + // const auto& poseKeypoints = datumsPtr->at(0).poseKeypoints; + // op::log("Person pose keypoints:"); + // for (auto person = 0 ; person < poseKeypoints.getSize(0) ; person++) + // { + // op::log("Person " + std::to_string(person) + " (x, y, score):"); + // for (auto bodyPart = 0 ; bodyPart < poseKeypoints.getSize(1) ; bodyPart++) + // { + // std::string valueToPrint; + // for (auto xyscore = 0 ; xyscore < poseKeypoints.getSize(2) ; xyscore++) + // valueToPrint += std::to_string( poseKeypoints[{person, bodyPart, xyscore}] ) + " "; + // op::log(valueToPrint); + // } + // } + // op::log(" "); + } + else + op::log("Nullptr or empty datumsPtr found.", op::Priority::High); +} + +int bodyFromImage() +{ + op::log("Starting OpenPose demo...", op::Priority::High); + + // Configuring OpenPose + op::log("Configuring OpenPose...", op::Priority::High); + op::Wrapper> opWrapper{op::ThreadManagerMode::Asynchronous}; + // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) + if (FLAGS_disable_multi_thread) + opWrapper.disableMultiThreading(); + // Starting OpenPose + op::log("Starting thread(s)...", op::Priority::High); + opWrapper.start(); + + // Process and display image + const auto imageToProcess = cv::imread(FLAGS_image_path); + auto datumProcessed = opWrapper.emplaceAndPop(imageToProcess); + if (datumProcessed != nullptr) + { + printKeypoints(datumProcessed); + display(datumProcessed); + } + else + op::log("Image could not be processed.", op::Priority::High); + + // Return successful message + op::log("Stopping OpenPose...", op::Priority::High); + return 0; +} + +int main(int argc, char *argv[]) +{ + // Parsing command line flags + gflags::ParseCommandLineFlags(&argc, &argv, true); + + // Running bodyFromImage + return bodyFromImage(); +} diff --git a/examples/tutorial_api_cpp/2_whole_body_from_image.cpp b/examples/tutorial_api_cpp/2_whole_body_from_image.cpp new file mode 100644 index 00000000..92e11100 --- /dev/null +++ b/examples/tutorial_api_cpp/2_whole_body_from_image.cpp @@ -0,0 +1,85 @@ +// ------------------------- OpenPose API Tutorial - Example 2 - Whole body from image ------------------------- +// It reads an image, process it, and displays it with the pose, hand, and face keypoints. + +// Command-line user intraface +#define OPENPOSE_FLAGS_DISABLE_POSE +#include +// OpenPose dependencies +#include + +// Custom OpenPose flags +// Producer +DEFINE_string(image_path, "examples/media/COCO_val2014_000000000241.jpg", + "Process an image. Read all standard formats (jpg, png, bmp, etc.)."); + +// This worker will just read and return all the jpg files in a directory +void display(const std::shared_ptr>& datumsPtr) +{ + // User's displaying/saving/other processing here + // datum.cvOutputData: rendered frame with pose or heatmaps + // datum.poseKeypoints: Array with the estimated pose + if (datumsPtr != nullptr && !datumsPtr->empty()) + { + // Display image + cv::imshow("User worker GUI", datumsPtr->at(0).cvOutputData); + cv::waitKey(0); + } + else + op::log("Nullptr or empty datumsPtr found.", op::Priority::High); +} + +void printKeypoints(const std::shared_ptr>& datumsPtr) +{ + // Example: How to use the pose keypoints + if (datumsPtr != nullptr && !datumsPtr->empty()) + { + op::log("Body keypoints: " + datumsPtr->at(0).poseKeypoints.toString()); + op::log("Face keypoints: " + datumsPtr->at(0).faceKeypoints.toString()); + op::log("Left hand keypoints: " + datumsPtr->at(0).handKeypoints[0].toString()); + op::log("Right hand keypoints: " + datumsPtr->at(0).handKeypoints[1].toString()); + } + else + op::log("Nullptr or empty datumsPtr found.", op::Priority::High); +} + +int wholeBodyFromImage() +{ + op::log("Starting OpenPose demo...", op::Priority::High); + + // Configuring OpenPose + op::log("Configuring OpenPose...", op::Priority::High); + op::Wrapper> opWrapper{op::ThreadManagerMode::Asynchronous}; + // Add hand and face + opWrapper.configure(op::WrapperStructFace{true}); + opWrapper.configure(op::WrapperStructHand{true}); + // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) + if (FLAGS_disable_multi_thread) + opWrapper.disableMultiThreading(); + // Starting OpenPose + op::log("Starting thread(s)...", op::Priority::High); + opWrapper.start(); + + // Process and display image + const auto imageToProcess = cv::imread(FLAGS_image_path); + auto datumProcessed = opWrapper.emplaceAndPop(imageToProcess); + if (datumProcessed != nullptr) + { + printKeypoints(datumProcessed); + display(datumProcessed); + } + else + op::log("Image could not be processed.", op::Priority::High); + + // Return successful message + op::log("Stopping OpenPose...", op::Priority::High); + return 0; +} + +int main(int argc, char *argv[]) +{ + // Parsing command line flags + gflags::ParseCommandLineFlags(&argc, &argv, true); + + // Running wholeBodyFromImage + return wholeBodyFromImage(); +} diff --git a/examples/tutorial_api_cpp/3_keypoints_from_image_configurable.cpp b/examples/tutorial_api_cpp/3_keypoints_from_image_configurable.cpp new file mode 100644 index 00000000..57054a84 --- /dev/null +++ b/examples/tutorial_api_cpp/3_keypoints_from_image_configurable.cpp @@ -0,0 +1,150 @@ +// ------------------------- OpenPose API Tutorial - Example 3 - Body from image configurable ------------------------- +// It reads an image, process it, and displays it with the pose (and optionally hand and face) keypoints. In addition, +// it includes all the OpenPose configuration flags (enable/disable hand, face, output saving, etc.). + +// Command-line user intraface +#define OPENPOSE_FLAGS_DISABLE_PRODUCER +#define OPENPOSE_FLAGS_DISABLE_DISPLAY +#include +// OpenPose dependencies +#include + +// Custom OpenPose flags +// Producer +DEFINE_string(image_path, "examples/media/COCO_val2014_000000000294.jpg", + "Process an image. Read all standard formats (jpg, png, bmp, etc.)."); + +// This worker will just read and return all the jpg files in a directory +void display(const std::shared_ptr>& datumsPtr) +{ + // User's displaying/saving/other processing here + // datum.cvOutputData: rendered frame with pose or heatmaps + // datum.poseKeypoints: Array with the estimated pose + if (datumsPtr != nullptr && !datumsPtr->empty()) + { + // Display image + cv::imshow("User worker GUI", datumsPtr->at(0).cvOutputData); + cv::waitKey(0); + } + else + op::log("Nullptr or empty datumsPtr found.", op::Priority::High); +} + +void printKeypoints(const std::shared_ptr>& datumsPtr) +{ + // Example: How to use the pose keypoints + if (datumsPtr != nullptr && !datumsPtr->empty()) + { + op::log("Body keypoints: " + datumsPtr->at(0).poseKeypoints.toString()); + op::log("Face keypoints: " + datumsPtr->at(0).faceKeypoints.toString()); + op::log("Left hand keypoints: " + datumsPtr->at(0).handKeypoints[0].toString()); + op::log("Right hand keypoints: " + datumsPtr->at(0).handKeypoints[1].toString()); + } + else + op::log("Nullptr or empty datumsPtr found.", op::Priority::High); +} + +int wholeBodyFromImage() +{ + op::log("Starting OpenPose demo...", op::Priority::High); + + // logging_level + op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", + __LINE__, __FUNCTION__, __FILE__); + op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); + op::Profiler::setDefaultX(FLAGS_profile_speed); + + // Applying user defined configuration - GFlags to program variables + // outputSize + const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); + // netInputSize + const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); + // faceNetInputSize + const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); + // handNetInputSize + const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); + // poseModel + const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); + // JSON saving + if (!FLAGS_write_keypoint.empty()) + op::log("Flag `write_keypoint` is deprecated and will eventually be removed." + " Please, use `write_json` instead.", op::Priority::Max); + // keypointScale + const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); + // heatmaps to add + const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, + FLAGS_heatmaps_add_PAFs); + const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); + // >1 camera view? + const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1); + // Enabling Google Logging + const bool enableGoogleLogging = true; + // Logging + op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + + // Configuring OpenPose + op::log("Configuring OpenPose...", op::Priority::High); + op::Wrapper> opWrapper{op::ThreadManagerMode::Asynchronous}; + // Pose configuration (use WrapperStructPose{} for default and recommended configuration) + const op::WrapperStructPose wrapperStructPose{ + !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, + FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), + poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, + FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, + (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; + // Face configuration (use op::WrapperStructFace{} to disable it) + const op::WrapperStructFace wrapperStructFace{ + FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), + (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; + // Hand configuration (use op::WrapperStructHand{} to disable it) + const op::WrapperStructHand wrapperStructHand{ + FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, + op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, + (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; + // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) + const op::WrapperStructExtra wrapperStructExtra{ + FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; + // Consumer (comment or use default argument to disable any output) + const auto displayMode = op::DisplayMode::NoDisplay; + const bool guiVerbose = false; + const bool fullScreen = false; + const op::WrapperStructOutput wrapperStructOutput{ + displayMode, guiVerbose, fullScreen, FLAGS_write_keypoint, + op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, FLAGS_write_coco_json, + FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, FLAGS_write_video, + FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, FLAGS_write_video_adam, + FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; + // Configure wrapper + opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, + op::WrapperStructInput{}, wrapperStructOutput); + // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) + if (FLAGS_disable_multi_thread) + opWrapper.disableMultiThreading(); + // Starting OpenPose + op::log("Starting thread(s)...", op::Priority::High); + opWrapper.start(); + + // Process and display image + const auto imageToProcess = cv::imread(FLAGS_image_path); + auto datumProcessed = opWrapper.emplaceAndPop(imageToProcess); + if (datumProcessed != nullptr) + { + printKeypoints(datumProcessed); + display(datumProcessed); + } + else + op::log("Image could not be processed.", op::Priority::High); + + // Return successful message + op::log("Stopping OpenPose...", op::Priority::High); + return 0; +} + +int main(int argc, char *argv[]) +{ + // Parsing command line flags + gflags::ParseCommandLineFlags(&argc, &argv, true); + + // Running wholeBodyFromImage + return wholeBodyFromImage(); +} diff --git a/examples/tutorial_api_cpp/4_asynchronous_loop_custom_input_and_output.cpp b/examples/tutorial_api_cpp/4_asynchronous_loop_custom_input_and_output.cpp new file mode 100644 index 00000000..765bfb12 --- /dev/null +++ b/examples/tutorial_api_cpp/4_asynchronous_loop_custom_input_and_output.cpp @@ -0,0 +1,314 @@ +// ------------------------- OpenPose Library Tutorial - Wrapper - Example 1 - Asynchronous ------------------------- +// Asynchronous mode: ideal for fast prototyping when performance is not an issue. The user emplaces/pushes and pops frames from the OpenPose wrapper +// when he desires to. + +// This example shows the user how to use the OpenPose wrapper class: + // 1. User reads images + // 2. Extract and render keypoint / heatmap / PAF of that image + // 3. Save the results on disk + // 4. User displays the rendered pose + // Everything in a multi-thread scenario +// In addition to the previous OpenPose modules, we also need to use: + // 1. `core` module: + // For the Array class that the `pose` module needs + // For the Datum struct that the `thread` module sends between the queues + // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively +// This file should only be used for the user to take specific examples. + +// Command-line user intraface +#define OPENPOSE_FLAGS_DISABLE_PRODUCER +#define OPENPOSE_FLAGS_DISABLE_DISPLAY +#include +// OpenPose dependencies +#include + +// Custom OpenPose flags +// Producer +DEFINE_string(image_dir, "examples/media/", + "Process a directory of images. Read all standard formats (jpg, png, bmp, etc.)."); + +// If the user needs his own variables, he can inherit the op::Datum struct and add them +// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define Wrapper instead of +// Wrapper +struct UserDatum : public op::Datum +{ + bool boolThatUserNeedsForSomeReason; + + UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) : + boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_} + {} +}; + +// The W-classes can be implemented either as a template or as simple classes given +// that the user usually knows which kind of data he will move between the queues, +// in this case we assume a std::shared_ptr of a std::vector of UserDatum + +// This worker will just read and return all the jpg files in a directory +class UserInputClass +{ +public: + UserInputClass(const std::string& directoryPath) : + mImageFiles{op::getFilesOnDirectory(directoryPath, "jpg")}, + // If we want "jpg" + "png" images + // mImageFiles{op::getFilesOnDirectory(directoryPath, std::vector{"jpg", "png"})}, + mCounter{0}, + mClosed{false} + { + if (mImageFiles.empty()) + op::error("No images found on: " + directoryPath, __LINE__, __FUNCTION__, __FILE__); + } + + std::shared_ptr> createDatum() + { + // Close program when empty frame + if (mClosed || mImageFiles.size() <= mCounter) + { + op::log("Last frame read and added to queue. Closing program after it is processed.", op::Priority::High); + // This funtion stops this worker, which will eventually stop the whole thread system once all the frames + // have been processed + mClosed = true; + return nullptr; + } + else // if (!mClosed) + { + // Create new datum + auto datumsPtr = std::make_shared>(); + datumsPtr->emplace_back(); + auto& datum = datumsPtr->at(0); + + // Fill datum + datum.cvInputData = cv::imread(mImageFiles.at(mCounter++)); + + // If empty frame -> return nullptr + if (datum.cvInputData.empty()) + { + op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", + op::Priority::High); + mClosed = true; + datumsPtr = nullptr; + } + + return datumsPtr; + } + } + + bool isFinished() const + { + return mClosed; + } + +private: + const std::vector mImageFiles; + unsigned long long mCounter; + bool mClosed; +}; + +// This worker will just read and return all the jpg files in a directory +class UserOutputClass +{ +public: + bool display(const std::shared_ptr>& datumsPtr) + { + // User's displaying/saving/other processing here + // datum.cvOutputData: rendered frame with pose or heatmaps + // datum.poseKeypoints: Array with the estimated pose + char key = ' '; + if (datumsPtr != nullptr && !datumsPtr->empty()) + { + cv::imshow("User worker GUI", datumsPtr->at(0).cvOutputData); + // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image) + key = (char)cv::waitKey(1); + } + else + op::log("Nullptr or empty datumsPtr found.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__); + return (key == 27); + } + void printKeypoints(const std::shared_ptr>& datumsPtr) + { + // Example: How to use the pose keypoints + if (datumsPtr != nullptr && !datumsPtr->empty()) + { + op::log("\nKeypoints:"); + // Accesing each element of the keypoints + const auto& poseKeypoints = datumsPtr->at(0).poseKeypoints; + op::log("Person pose keypoints:"); + for (auto person = 0 ; person < poseKeypoints.getSize(0) ; person++) + { + op::log("Person " + std::to_string(person) + " (x, y, score):"); + for (auto bodyPart = 0 ; bodyPart < poseKeypoints.getSize(1) ; bodyPart++) + { + std::string valueToPrint; + for (auto xyscore = 0 ; xyscore < poseKeypoints.getSize(2) ; xyscore++) + valueToPrint += std::to_string( poseKeypoints[{person, bodyPart, xyscore}] ) + " "; + op::log(valueToPrint); + } + } + op::log(" "); + // Alternative: just getting std::string equivalent + op::log("Face keypoints: " + datumsPtr->at(0).faceKeypoints.toString()); + op::log("Left hand keypoints: " + datumsPtr->at(0).handKeypoints[0].toString()); + op::log("Right hand keypoints: " + datumsPtr->at(0).handKeypoints[1].toString()); + // Heatmaps + const auto& poseHeatMaps = datumsPtr->at(0).poseHeatMaps; + if (!poseHeatMaps.empty()) + { + op::log("Pose heatmaps size: [" + std::to_string(poseHeatMaps.getSize(0)) + ", " + + std::to_string(poseHeatMaps.getSize(1)) + ", " + + std::to_string(poseHeatMaps.getSize(2)) + "]"); + const auto& faceHeatMaps = datumsPtr->at(0).faceHeatMaps; + op::log("Face heatmaps size: [" + std::to_string(faceHeatMaps.getSize(0)) + ", " + + std::to_string(faceHeatMaps.getSize(1)) + ", " + + std::to_string(faceHeatMaps.getSize(2)) + ", " + + std::to_string(faceHeatMaps.getSize(3)) + "]"); + const auto& handHeatMaps = datumsPtr->at(0).handHeatMaps; + op::log("Left hand heatmaps size: [" + std::to_string(handHeatMaps[0].getSize(0)) + ", " + + std::to_string(handHeatMaps[0].getSize(1)) + ", " + + std::to_string(handHeatMaps[0].getSize(2)) + ", " + + std::to_string(handHeatMaps[0].getSize(3)) + "]"); + op::log("Right hand heatmaps size: [" + std::to_string(handHeatMaps[1].getSize(0)) + ", " + + std::to_string(handHeatMaps[1].getSize(1)) + ", " + + std::to_string(handHeatMaps[1].getSize(2)) + ", " + + std::to_string(handHeatMaps[1].getSize(3)) + "]"); + } + } + else + op::log("Nullptr or empty datumsPtr found.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__); + } +}; + +int openPoseTutorialWrapper3() +{ + try + { + op::log("Starting OpenPose demo...", op::Priority::High); + const auto timerBegin = std::chrono::high_resolution_clock::now(); + + // logging_level + op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", + __LINE__, __FUNCTION__, __FILE__); + op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); + op::Profiler::setDefaultX(FLAGS_profile_speed); + + // Applying user defined configuration - GFlags to program variables + // outputSize + const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); + // netInputSize + const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); + // faceNetInputSize + const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); + // handNetInputSize + const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); + // poseModel + const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); + // JSON saving + if (!FLAGS_write_keypoint.empty()) + op::log("Flag `write_keypoint` is deprecated and will eventually be removed." + " Please, use `write_json` instead.", op::Priority::Max); + // keypointScale + const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); + // heatmaps to add + const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, + FLAGS_heatmaps_add_PAFs); + const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); + // >1 camera view? + const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1); + // Enabling Google Logging + const bool enableGoogleLogging = true; + // Logging + op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + + // Configure OpenPose + op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + op::Wrapper> opWrapper{op::ThreadManagerMode::Asynchronous}; + // Pose configuration (use WrapperStructPose{} for default and recommended configuration) + const op::WrapperStructPose wrapperStructPose{ + !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, + FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), + poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, + FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, + (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; + // Face configuration (use op::WrapperStructFace{} to disable it) + const op::WrapperStructFace wrapperStructFace{ + FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), + (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; + // Hand configuration (use op::WrapperStructHand{} to disable it) + const op::WrapperStructHand wrapperStructHand{ + FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, + op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, + (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; + // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) + const op::WrapperStructExtra wrapperStructExtra{ + FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; + // Consumer (comment or use default argument to disable any output) + const auto displayMode = op::DisplayMode::NoDisplay; + const bool guiVerbose = false; + const bool fullScreen = false; + const op::WrapperStructOutput wrapperStructOutput{ + displayMode, guiVerbose, fullScreen, FLAGS_write_keypoint, + op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, FLAGS_write_coco_json, + FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, FLAGS_write_video, + FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, FLAGS_write_video_adam, + FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; + // Configure wrapper + opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, + op::WrapperStructInput{}, wrapperStructOutput); + // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) + if (FLAGS_disable_multi_thread) + opWrapper.disableMultiThreading(); + + op::log("Starting thread(s)...", op::Priority::High); + opWrapper.start(); + + // User processing + UserInputClass userInputClass(FLAGS_image_dir); + UserOutputClass userOutputClass; + bool userWantsToExit = false; + while (!userWantsToExit && !userInputClass.isFinished()) + { + // Push frame + auto datumToProcess = userInputClass.createDatum(); + if (datumToProcess != nullptr) + { + auto successfullyEmplaced = opWrapper.waitAndEmplace(datumToProcess); + // Pop frame + std::shared_ptr> datumProcessed; + if (successfullyEmplaced && opWrapper.waitAndPop(datumProcessed)) + { + userWantsToExit = userOutputClass.display(datumProcessed); + userOutputClass.printKeypoints(datumProcessed); + } + else + op::log("Processed datum could not be emplaced.", op::Priority::High, + __LINE__, __FUNCTION__, __FILE__); + } + } + + op::log("Stopping thread(s)", op::Priority::High); + opWrapper.stop(); + + // Measuring total time + const auto now = std::chrono::high_resolution_clock::now(); + const auto totalTimeSec = (double)std::chrono::duration_cast(now-timerBegin).count() + * 1e-9; + const auto message = "OpenPose demo successfully finished. Total time: " + + std::to_string(totalTimeSec) + " seconds."; + op::log(message, op::Priority::High); + + // Return successful message + return 0; + } + catch (const std::exception& e) + { + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return -1; + } +} + +int main(int argc, char *argv[]) +{ + // Parsing command line flags + gflags::ParseCommandLineFlags(&argc, &argv, true); + + // Running openPoseTutorialWrapper3 + return openPoseTutorialWrapper3(); +} diff --git a/examples/tutorial_api_cpp/5_asynchronous_loop_custom_output.cpp b/examples/tutorial_api_cpp/5_asynchronous_loop_custom_output.cpp new file mode 100644 index 00000000..4e146373 --- /dev/null +++ b/examples/tutorial_api_cpp/5_asynchronous_loop_custom_output.cpp @@ -0,0 +1,251 @@ +// ------------------------- OpenPose Library Tutorial - Wrapper - Example 3 - Asynchronous Output ------------------------- +// Asynchronous output mode: ideal for fast prototyping when performance is not an issue and user wants to use the output OpenPose format. The user +// simply gets the processed frames from the OpenPose wrapper when he desires to. + +// This example shows the user how to use the OpenPose wrapper class: + // 1. Read folder of images / video / webcam + // 2. Extract and render keypoint / heatmap / PAF of that image + // 3. Save the results on disk + // 4. User displays the rendered pose + // Everything in a multi-thread scenario +// In addition to the previous OpenPose modules, we also need to use: + // 1. `core` module: + // For the Array class that the `pose` module needs + // For the Datum struct that the `thread` module sends between the queues + // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively +// This file should only be used for the user to take specific examples. + +// Command-line user intraface +#define OPENPOSE_FLAGS_DISABLE_DISPLAY +#include +// OpenPose dependencies +#include + +// If the user needs his own variables, he can inherit the op::Datum struct and add them +// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define +// Wrapper instead of Wrapper +struct UserDatum : public op::Datum +{ + bool boolThatUserNeedsForSomeReason; + + UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) : + boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_} + {} +}; + +// The W-classes can be implemented either as a template or as simple classes given +// that the user usually knows which kind of data he will move between the queues, +// in this case we assume a std::shared_ptr of a std::vector of UserDatum + +// This worker will just read and return all the jpg files in a directory +class UserOutputClass +{ +public: + bool display(const std::shared_ptr>& datumsPtr) + { + // User's displaying/saving/other processing here + // datum.cvOutputData: rendered frame with pose or heatmaps + // datum.poseKeypoints: Array with the estimated pose + char key = ' '; + if (datumsPtr != nullptr && !datumsPtr->empty()) + { + cv::imshow("User worker GUI", datumsPtr->at(0).cvOutputData); + // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image) + key = (char)cv::waitKey(1); + } + else + op::log("Nullptr or empty datumsPtr found.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__); + return (key == 27); + } + void printKeypoints(const std::shared_ptr>& datumsPtr) + { + // Example: How to use the pose keypoints + if (datumsPtr != nullptr && !datumsPtr->empty()) + { + op::log("\nKeypoints:"); + // Accesing each element of the keypoints + const auto& poseKeypoints = datumsPtr->at(0).poseKeypoints; + op::log("Person pose keypoints:"); + for (auto person = 0 ; person < poseKeypoints.getSize(0) ; person++) + { + op::log("Person " + std::to_string(person) + " (x, y, score):"); + for (auto bodyPart = 0 ; bodyPart < poseKeypoints.getSize(1) ; bodyPart++) + { + std::string valueToPrint; + for (auto xyscore = 0 ; xyscore < poseKeypoints.getSize(2) ; xyscore++) + { + valueToPrint += std::to_string( poseKeypoints[{person, bodyPart, xyscore}] ) + " "; + } + op::log(valueToPrint); + } + } + op::log(" "); + // Alternative: just getting std::string equivalent + op::log("Face keypoints: " + datumsPtr->at(0).faceKeypoints.toString()); + op::log("Left hand keypoints: " + datumsPtr->at(0).handKeypoints[0].toString()); + op::log("Right hand keypoints: " + datumsPtr->at(0).handKeypoints[1].toString()); + // Heatmaps + const auto& poseHeatMaps = datumsPtr->at(0).poseHeatMaps; + if (!poseHeatMaps.empty()) + { + op::log("Pose heatmaps size: [" + std::to_string(poseHeatMaps.getSize(0)) + ", " + + std::to_string(poseHeatMaps.getSize(1)) + ", " + + std::to_string(poseHeatMaps.getSize(2)) + "]"); + const auto& faceHeatMaps = datumsPtr->at(0).faceHeatMaps; + op::log("Face heatmaps size: [" + std::to_string(faceHeatMaps.getSize(0)) + ", " + + std::to_string(faceHeatMaps.getSize(1)) + ", " + + std::to_string(faceHeatMaps.getSize(2)) + ", " + + std::to_string(faceHeatMaps.getSize(3)) + "]"); + const auto& handHeatMaps = datumsPtr->at(0).handHeatMaps; + op::log("Left hand heatmaps size: [" + std::to_string(handHeatMaps[0].getSize(0)) + ", " + + std::to_string(handHeatMaps[0].getSize(1)) + ", " + + std::to_string(handHeatMaps[0].getSize(2)) + ", " + + std::to_string(handHeatMaps[0].getSize(3)) + "]"); + op::log("Right hand heatmaps size: [" + std::to_string(handHeatMaps[1].getSize(0)) + ", " + + std::to_string(handHeatMaps[1].getSize(1)) + ", " + + std::to_string(handHeatMaps[1].getSize(2)) + ", " + + std::to_string(handHeatMaps[1].getSize(3)) + "]"); + } + } + else + op::log("Nullptr or empty datumsPtr found.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__); + } +}; + +int openPoseTutorialWrapper1() +{ + try + { + op::log("Starting OpenPose demo...", op::Priority::High); + const auto timerBegin = std::chrono::high_resolution_clock::now(); + + // logging_level + op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", + __LINE__, __FUNCTION__, __FILE__); + op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); + op::Profiler::setDefaultX(FLAGS_profile_speed); + + // Applying user defined configuration - GFlags to program variables + // outputSize + const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); + // netInputSize + const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); + // faceNetInputSize + const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); + // handNetInputSize + const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); + // producerType + const auto producerSharedPtr = op::flagsToProducer(FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, + FLAGS_flir_camera, FLAGS_camera_resolution, FLAGS_camera_fps, + FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion, + (unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index); + // poseModel + const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); + // JSON saving + if (!FLAGS_write_keypoint.empty()) + op::log("Flag `write_keypoint` is deprecated and will eventually be removed." + " Please, use `write_json` instead.", op::Priority::Max); + // keypointScale + const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); + // heatmaps to add + const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, + FLAGS_heatmaps_add_PAFs); + const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); + // >1 camera view? + const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1 || FLAGS_flir_camera); + // Enabling Google Logging + const bool enableGoogleLogging = true; + // Logging + op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + + // Configure OpenPose + op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + op::Wrapper> opWrapper{op::ThreadManagerMode::AsynchronousOut}; + // Pose configuration (use WrapperStructPose{} for default and recommended configuration) + const op::WrapperStructPose wrapperStructPose{ + !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, + FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), + poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, + FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, + (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; + // Face configuration (use op::WrapperStructFace{} to disable it) + const op::WrapperStructFace wrapperStructFace{ + FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), + (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; + // Hand configuration (use op::WrapperStructHand{} to disable it) + const op::WrapperStructHand wrapperStructHand{ + FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, + op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, + (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; + // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) + const op::WrapperStructExtra wrapperStructExtra{ + FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; + // Producer (use default to disable any input) + const op::WrapperStructInput wrapperStructInput{ + producerSharedPtr, FLAGS_frame_first, FLAGS_frame_last, FLAGS_process_real_time, FLAGS_frame_flip, + FLAGS_frame_rotate, FLAGS_frames_repeat}; + // Consumer (comment or use default argument to disable any output) + const auto displayMode = op::DisplayMode::NoDisplay; + const bool guiVerbose = false; + const bool fullScreen = false; + const op::WrapperStructOutput wrapperStructOutput{ + displayMode, guiVerbose, fullScreen, FLAGS_write_keypoint, + op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, FLAGS_write_coco_json, + FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, FLAGS_write_video, + FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, FLAGS_write_video_adam, + FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; + // Configure wrapper + opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, + wrapperStructInput, wrapperStructOutput); + // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) + if (FLAGS_disable_multi_thread) + opWrapper.disableMultiThreading(); + + op::log("Starting thread(s)...", op::Priority::High); + opWrapper.start(); + + // User processing + UserOutputClass userOutputClass; + bool userWantsToExit = false; + while (!userWantsToExit) + { + // Pop frame + std::shared_ptr> datumProcessed; + if (opWrapper.waitAndPop(datumProcessed)) + { + userWantsToExit = userOutputClass.display(datumProcessed);; + userOutputClass.printKeypoints(datumProcessed); + } + else + op::log("Processed datum could not be emplaced.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__); + } + + op::log("Stopping thread(s)", op::Priority::High); + opWrapper.stop(); + + // Measuring total time + const auto now = std::chrono::high_resolution_clock::now(); + const auto totalTimeSec = (double)std::chrono::duration_cast(now-timerBegin).count() + * 1e-9; + const auto message = "OpenPose demo successfully finished. Total time: " + + std::to_string(totalTimeSec) + " seconds."; + op::log(message, op::Priority::High); + + // Return successful message + return 0; + } + catch (const std::exception& e) + { + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return -1; + } +} + +int main(int argc, char *argv[]) +{ + // Parsing command line flags + gflags::ParseCommandLineFlags(&argc, &argv, true); + + // Running openPoseTutorialWrapper1 + return openPoseTutorialWrapper1(); +} diff --git a/examples/tutorial_api_cpp/6_synchronous_custom_postprocessing.cpp b/examples/tutorial_api_cpp/6_synchronous_custom_postprocessing.cpp new file mode 100644 index 00000000..539bd3fc --- /dev/null +++ b/examples/tutorial_api_cpp/6_synchronous_custom_postprocessing.cpp @@ -0,0 +1,222 @@ +// ------------------------- OpenPose Library Tutorial - Real Time Pose Estimation ------------------------- +// If the user wants to learn to use the OpenPose library, we highly recommend to start with the +// examples in `examples/tutorial_api_cpp/`. +// This example summarizes all the functionality of the OpenPose library: + // 1. Read folder of images / video / webcam (`producer` module) + // 2. Extract and render body keypoint / heatmap / PAF of that image (`pose` module) + // 3. Extract and render face keypoint / heatmap / PAF of that image (`face` module) + // 4. Save the results on disk (`filestream` module) + // 5. Display the rendered pose (`gui` module) + // Everything in a multi-thread scenario (`thread` module) + // Points 2 to 5 are included in the `wrapper` module +// In addition to the previous OpenPose modules, we also need to use: + // 1. `core` module: + // For the Array class that the `pose` module needs + // For the Datum struct that the `thread` module sends between the queues + // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively +// This file should only be used for the user to take specific examples. + +// Command-line user intraface +#include +// OpenPose dependencies +#include + +// If the user needs his own variables, he can inherit the op::Datum struct and add them +// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define +// Wrapper instead of Wrapper +struct UserDatum : public op::Datum +{ + bool boolThatUserNeedsForSomeReason; + + UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) : + boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_} + {} +}; + +// The W-classes can be implemented either as a template or as simple classes given +// that the user usually knows which kind of data he will move between the queues, +// in this case we assume a std::shared_ptr of a std::vector of UserDatum + +// This worker will just invert the image +class WUserPostProcessing : public op::Worker>> +{ +public: + WUserPostProcessing() + { + // User's constructor here + } + + void initializationOnThread() {} + + void work(std::shared_ptr>& datumsPtr) + { + // User's post-processing (after OpenPose processing & before OpenPose outputs) here + // datum.cvOutputData: rendered frame with pose or heatmaps + // datum.poseKeypoints: Array with the estimated pose + try + { + if (datumsPtr != nullptr && !datumsPtr->empty()) + for (auto& datum : *datumsPtr) + cv::bitwise_not(datum.cvOutputData, datum.cvOutputData); + } + catch (const std::exception& e) + { + this->stop(); + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + } + } +}; + +int openPoseDemo() +{ + try + { + op::log("Starting OpenPose demo...", op::Priority::High); + const auto timerBegin = std::chrono::high_resolution_clock::now(); + + // logging_level + op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", + __LINE__, __FUNCTION__, __FILE__); + op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); + op::Profiler::setDefaultX(FLAGS_profile_speed); + // // For debugging + // // Print all logging messages + // op::ConfigureLog::setPriorityThreshold(op::Priority::None); + // // Print out speed values faster + // op::Profiler::setDefaultX(100); + + // Applying user defined configuration - GFlags to program variables + // outputSize + const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); + // netInputSize + const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); + // faceNetInputSize + const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); + // handNetInputSize + const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); + // producerType + const auto producerSharedPtr = op::flagsToProducer(FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, + FLAGS_flir_camera, FLAGS_camera_resolution, FLAGS_camera_fps, + FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion, + (unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index); + // poseModel + const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); + // JSON saving + if (!FLAGS_write_keypoint.empty()) + op::log("Flag `write_keypoint` is deprecated and will eventually be removed." + " Please, use `write_json` instead.", op::Priority::Max); + // keypointScale + const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); + // heatmaps to add + const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, + FLAGS_heatmaps_add_PAFs); + const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); + // >1 camera view? + const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1 || FLAGS_flir_camera); + // Enabling Google Logging + const bool enableGoogleLogging = true; + // Logging + op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + + // OpenPose wrapper + op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + // op::Wrapper> opWrapper; + op::Wrapper> opWrapper; + + // Initializing the user custom classes + // Processing + auto wUserPostProcessing = std::make_shared(); + // Add custom processing + const auto workerProcessingOnNewThread = true; + opWrapper.setWorker(op::WorkerType::PostProcessing, wUserPostProcessing, workerProcessingOnNewThread); + + // Pose configuration (use WrapperStructPose{} for default and recommended configuration) + const op::WrapperStructPose wrapperStructPose{ + !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, + FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), + poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, + FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, + (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; + // Face configuration (use op::WrapperStructFace{} to disable it) + const op::WrapperStructFace wrapperStructFace{ + FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), + (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; + // Hand configuration (use op::WrapperStructHand{} to disable it) + const op::WrapperStructHand wrapperStructHand{ + FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, + op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, + (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; + // Producer (use default to disable any input) + const op::WrapperStructInput wrapperStructInput{ + producerSharedPtr, FLAGS_frame_first, FLAGS_frame_last, FLAGS_process_real_time, FLAGS_frame_flip, + FLAGS_frame_rotate, FLAGS_frames_repeat}; + // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) + const op::WrapperStructExtra wrapperStructExtra{ + FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; + // Consumer (comment or use default argument to disable any output) + const op::WrapperStructOutput wrapperStructOutput{ + op::flagsToDisplayMode(FLAGS_display, FLAGS_3d), !FLAGS_no_gui_verbose, FLAGS_fullscreen, + FLAGS_write_keypoint, op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, + FLAGS_write_coco_json, FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, + FLAGS_write_video, FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, + FLAGS_write_video_adam, FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; + // Configure wrapper + opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, + wrapperStructInput, wrapperStructOutput); + // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) + if (FLAGS_disable_multi_thread) + opWrapper.disableMultiThreading(); + + // Start processing + // Two different ways of running the program on multithread environment + op::log("Starting thread(s)...", op::Priority::High); + // Start, run & stop threads - it blocks this thread until all others have finished + opWrapper.exec(); + + // // Option b) Keeping this thread free in case you want to do something else meanwhile, e.g. profiling the GPU + // memory + // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main + // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: + // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections` + // // Start threads + // opWrapper.start(); + // // Profile used GPU memory + // // 1: wait ~10sec so the memory has been totally loaded on GPU + // // 2: profile the GPU memory + // const auto sleepTimeMs = 10; + // for (auto i = 0 ; i < 10000/sleepTimeMs && opWrapper.isRunning() ; i++) + // std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs}); + // op::Profiler::profileGpuMemory(__LINE__, __FUNCTION__, __FILE__); + // // Keep program alive while running threads + // while (opWrapper.isRunning()) + // std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs}); + // // Stop and join threads + // op::log("Stopping thread(s)", op::Priority::High); + // opWrapper.stop(); + + // Measuring total time + const auto now = std::chrono::high_resolution_clock::now(); + const auto totalTimeSec = (double)std::chrono::duration_cast(now-timerBegin).count() + * 1e-9; + const auto message = "OpenPose demo successfully finished. Total time: " + + std::to_string(totalTimeSec) + " seconds."; + op::log(message, op::Priority::High); + + // Return successful message + return 0; + } + catch (const std::exception& e) + { + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return -1; + } +} + +int main(int argc, char *argv[]) +{ + // Parsing command line flags + gflags::ParseCommandLineFlags(&argc, &argv, true); + + // Running openPoseDemo + return openPoseDemo(); +} diff --git a/examples/tutorial_api_cpp/7_synchronous_custom_input.cpp b/examples/tutorial_api_cpp/7_synchronous_custom_input.cpp new file mode 100644 index 00000000..e35d729b --- /dev/null +++ b/examples/tutorial_api_cpp/7_synchronous_custom_input.cpp @@ -0,0 +1,266 @@ +// ------------------------- OpenPose Library Tutorial - Real Time Pose Estimation ------------------------- +// If the user wants to learn to use the OpenPose library, we highly recommend to start with the +// examples in `examples/tutorial_api_cpp/`. +// This example summarizes all the functionality of the OpenPose library: + // 1. Read folder of images / video / webcam (`producer` module) + // 2. Extract and render body keypoint / heatmap / PAF of that image (`pose` module) + // 3. Extract and render face keypoint / heatmap / PAF of that image (`face` module) + // 4. Save the results on disk (`filestream` module) + // 5. Display the rendered pose (`gui` module) + // Everything in a multi-thread scenario (`thread` module) + // Points 2 to 5 are included in the `wrapper` module +// In addition to the previous OpenPose modules, we also need to use: + // 1. `core` module: + // For the Array class that the `pose` module needs + // For the Datum struct that the `thread` module sends between the queues + // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively +// This file should only be used for the user to take specific examples. + +// Command-line user intraface +#define OPENPOSE_FLAGS_DISABLE_PRODUCER +#include +// OpenPose dependencies +#include + +// Custom OpenPose flags +// Producer +DEFINE_string(image_dir, "examples/media/", + "Process a directory of images. Read all standard formats (jpg, png, bmp, etc.)."); + +// If the user needs his own variables, he can inherit the op::Datum struct and add them +// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define +// Wrapper instead of Wrapper +struct UserDatum : public op::Datum +{ + bool boolThatUserNeedsForSomeReason; + + UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) : + boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_} + {} +}; + +// The W-classes can be implemented either as a template or as simple classes given +// that the user usually knows which kind of data he will move between the queues, +// in this case we assume a std::shared_ptr of a std::vector of UserDatum + +// This worker will just read and return all the jpg files in a directory +class WUserInput : public op::WorkerProducer>> +{ +public: + WUserInput(const std::string& directoryPath) : + mImageFiles{op::getFilesOnDirectory(directoryPath, "jpg")}, + // If we want "jpg" + "png" images + // mImageFiles{op::getFilesOnDirectory(directoryPath, std::vector{"jpg", "png"})}, + mCounter{0} + { + if (mImageFiles.empty()) + op::error("No images found on: " + directoryPath, __LINE__, __FUNCTION__, __FILE__); + } + + void initializationOnThread() {} + + std::shared_ptr> workProducer() + { + try + { + // Close program when empty frame + if (mImageFiles.size() <= mCounter) + { + op::log("Last frame read and added to queue. Closing program after it is processed.", + op::Priority::High); + // This funtion stops this worker, which will eventually stop the whole thread system once all the + // frames have been processed + this->stop(); + return nullptr; + } + else + { + // Create new datum + auto datumsPtr = std::make_shared>(); + datumsPtr->emplace_back(); + auto& datum = datumsPtr->at(0); + + // Fill datum + datum.cvInputData = cv::imread(mImageFiles.at(mCounter++)); + + // If empty frame -> return nullptr + if (datum.cvInputData.empty()) + { + op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", + op::Priority::High); + this->stop(); + datumsPtr = nullptr; + } + + return datumsPtr; + } + } + catch (const std::exception& e) + { + this->stop(); + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return nullptr; + } + } + +private: + const std::vector mImageFiles; + unsigned long long mCounter; +}; + +int openPoseDemo() +{ + try + { + op::log("Starting OpenPose demo...", op::Priority::High); + const auto timerBegin = std::chrono::high_resolution_clock::now(); + + // logging_level + op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", + __LINE__, __FUNCTION__, __FILE__); + op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); + op::Profiler::setDefaultX(FLAGS_profile_speed); + // // For debugging + // // Print all logging messages + // op::ConfigureLog::setPriorityThreshold(op::Priority::None); + // // Print out speed values faster + // op::Profiler::setDefaultX(100); + + // Applying user defined configuration - GFlags to program variables + // outputSize + const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); + // netInputSize + const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); + // faceNetInputSize + const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); + // handNetInputSize + const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); + // // producerType + // const auto producerSharedPtr = op::flagsToProducer(FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, + // FLAGS_flir_camera, FLAGS_camera_resolution, FLAGS_camera_fps, + // FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion, + // (unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index); + // poseModel + const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); + // JSON saving + if (!FLAGS_write_keypoint.empty()) + op::log("Flag `write_keypoint` is deprecated and will eventually be removed." + " Please, use `write_json` instead.", op::Priority::Max); + // keypointScale + const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); + // heatmaps to add + const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, + FLAGS_heatmaps_add_PAFs); + const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); + // >1 camera view? + // const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1 || FLAGS_flir_camera); + const auto multipleView = false; + // Enabling Google Logging + const bool enableGoogleLogging = true; + // Logging + op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + + // OpenPose wrapper + op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + // op::Wrapper> opWrapper; + op::Wrapper> opWrapper; + + // Initializing the user custom classes + // Frames producer (e.g. video, webcam, ...) + auto wUserInput = std::make_shared(FLAGS_image_dir); + // Add custom processing + const auto workerInputOnNewThread = true; + opWrapper.setWorker(op::WorkerType::Input, wUserInput, workerInputOnNewThread); + + // Pose configuration (use WrapperStructPose{} for default and recommended configuration) + const op::WrapperStructPose wrapperStructPose{ + !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, + FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), + poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, + FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, + (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; + // Face configuration (use op::WrapperStructFace{} to disable it) + const op::WrapperStructFace wrapperStructFace{ + FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), + (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; + // Hand configuration (use op::WrapperStructHand{} to disable it) + const op::WrapperStructHand wrapperStructHand{ + FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, + op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, + (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; + // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) + const op::WrapperStructExtra wrapperStructExtra{ + FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; + // Producer (use default to disable any input) + // const op::WrapperStructInput wrapperStructInput{producerSharedPtr, FLAGS_frame_first, FLAGS_frame_last, + // FLAGS_process_real_time, FLAGS_frame_flip, FLAGS_frame_rotate, + // FLAGS_frames_repeat}; + const op::WrapperStructInput wrapperStructInput; + // Consumer (comment or use default argument to disable any output) + const op::WrapperStructOutput wrapperStructOutput{ + op::flagsToDisplayMode(FLAGS_display, FLAGS_3d), !FLAGS_no_gui_verbose, FLAGS_fullscreen, + FLAGS_write_keypoint, op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, + FLAGS_write_coco_json, FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, + FLAGS_write_video, FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, + FLAGS_write_video_adam, FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; + // Configure wrapper + + opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, + wrapperStructInput, wrapperStructOutput); + // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) + if (FLAGS_disable_multi_thread) + opWrapper.disableMultiThreading(); + + // Start processing + // Two different ways of running the program on multithread environment + op::log("Starting thread(s)...", op::Priority::High); + // Start, run & stop threads - it blocks this thread until all others have finished + opWrapper.exec(); + + // // Option b) Keeping this thread free in case you want to do something else meanwhile, e.g. profiling the GPU + // memory + // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main + // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: + // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections` + // // Start threads + // opWrapper.start(); + // // Profile used GPU memory + // // 1: wait ~10sec so the memory has been totally loaded on GPU + // // 2: profile the GPU memory + // const auto sleepTimeMs = 10; + // for (auto i = 0 ; i < 10000/sleepTimeMs && opWrapper.isRunning() ; i++) + // std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs}); + // op::Profiler::profileGpuMemory(__LINE__, __FUNCTION__, __FILE__); + // // Keep program alive while running threads + // while (opWrapper.isRunning()) + // std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs}); + // // Stop and join threads + // op::log("Stopping thread(s)", op::Priority::High); + // opWrapper.stop(); + + // Measuring total time + const auto now = std::chrono::high_resolution_clock::now(); + const auto totalTimeSec = (double)std::chrono::duration_cast(now-timerBegin).count() + * 1e-9; + const auto message = "OpenPose demo successfully finished. Total time: " + + std::to_string(totalTimeSec) + " seconds."; + op::log(message, op::Priority::High); + + // Return successful message + return 0; + } + catch (const std::exception& e) + { + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return -1; + } +} + +int main(int argc, char *argv[]) +{ + // Parsing command line flags + gflags::ParseCommandLineFlags(&argc, &argv, true); + + // Running openPoseDemo + return openPoseDemo(); +} diff --git a/examples/tutorial_api_cpp/8_synchronous_custom_output.cpp b/examples/tutorial_api_cpp/8_synchronous_custom_output.cpp new file mode 100644 index 00000000..ade60a01 --- /dev/null +++ b/examples/tutorial_api_cpp/8_synchronous_custom_output.cpp @@ -0,0 +1,274 @@ +// ------------------------- OpenPose Library Tutorial - Real Time Pose Estimation ------------------------- +// If the user wants to learn to use the OpenPose library, we highly recommend to start with the +// examples in `examples/tutorial_api_cpp/`. +// This example summarizes all the functionality of the OpenPose library: + // 1. Read folder of images / video / webcam (`producer` module) + // 2. Extract and render body keypoint / heatmap / PAF of that image (`pose` module) + // 3. Extract and render face keypoint / heatmap / PAF of that image (`face` module) + // 4. Save the results on disk (`filestream` module) + // 5. Display the rendered pose (`gui` module) + // Everything in a multi-thread scenario (`thread` module) + // Points 2 to 5 are included in the `wrapper` module +// In addition to the previous OpenPose modules, we also need to use: + // 1. `core` module: + // For the Array class that the `pose` module needs + // For the Datum struct that the `thread` module sends between the queues + // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively +// This file should only be used for the user to take specific examples. + +// Command-line user intraface +#include +// OpenPose dependencies +#include + +// If the user needs his own variables, he can inherit the op::Datum struct and add them +// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define +// Wrapper instead of Wrapper +struct UserDatum : public op::Datum +{ + bool boolThatUserNeedsForSomeReason; + + UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) : + boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_} + {} +}; + +// The W-classes can be implemented either as a template or as simple classes given +// that the user usually knows which kind of data he will move between the queues, +// in this case we assume a std::shared_ptr of a std::vector of UserDatum + +// This worker will just read and return all the jpg files in a directory +class WUserOutput : public op::WorkerConsumer>> +{ +public: + void initializationOnThread() {} + + void workConsumer(const std::shared_ptr>& datumsPtr) + { + try + { + // User's displaying/saving/other processing here + // datum.cvOutputData: rendered frame with pose or heatmaps + // datum.poseKeypoints: Array with the estimated pose + if (datumsPtr != nullptr && !datumsPtr->empty()) + { + // Show in command line the resulting pose keypoints for body, face and hands + op::log("\nKeypoints:"); + // Accesing each element of the keypoints + const auto& poseKeypoints = datumsPtr->at(0).poseKeypoints; + op::log("Person pose keypoints:"); + for (auto person = 0 ; person < poseKeypoints.getSize(0) ; person++) + { + op::log("Person " + std::to_string(person) + " (x, y, score):"); + for (auto bodyPart = 0 ; bodyPart < poseKeypoints.getSize(1) ; bodyPart++) + { + std::string valueToPrint; + for (auto xyscore = 0 ; xyscore < poseKeypoints.getSize(2) ; xyscore++) + { + valueToPrint += std::to_string( poseKeypoints[{person, bodyPart, xyscore}] ) + " "; + } + op::log(valueToPrint); + } + } + op::log(" "); + // Alternative: just getting std::string equivalent + op::log("Face keypoints: " + datumsPtr->at(0).faceKeypoints.toString()); + op::log("Left hand keypoints: " + datumsPtr->at(0).handKeypoints[0].toString()); + op::log("Right hand keypoints: " + datumsPtr->at(0).handKeypoints[1].toString()); + // Heatmaps + const auto& poseHeatMaps = datumsPtr->at(0).poseHeatMaps; + if (!poseHeatMaps.empty()) + { + op::log("Pose heatmaps size: [" + std::to_string(poseHeatMaps.getSize(0)) + ", " + + std::to_string(poseHeatMaps.getSize(1)) + ", " + + std::to_string(poseHeatMaps.getSize(2)) + "]"); + const auto& faceHeatMaps = datumsPtr->at(0).faceHeatMaps; + op::log("Face heatmaps size: [" + std::to_string(faceHeatMaps.getSize(0)) + ", " + + std::to_string(faceHeatMaps.getSize(1)) + ", " + + std::to_string(faceHeatMaps.getSize(2)) + ", " + + std::to_string(faceHeatMaps.getSize(3)) + "]"); + const auto& handHeatMaps = datumsPtr->at(0).handHeatMaps; + op::log("Left hand heatmaps size: [" + std::to_string(handHeatMaps[0].getSize(0)) + ", " + + std::to_string(handHeatMaps[0].getSize(1)) + ", " + + std::to_string(handHeatMaps[0].getSize(2)) + ", " + + std::to_string(handHeatMaps[0].getSize(3)) + "]"); + op::log("Right hand heatmaps size: [" + std::to_string(handHeatMaps[1].getSize(0)) + ", " + + std::to_string(handHeatMaps[1].getSize(1)) + ", " + + std::to_string(handHeatMaps[1].getSize(2)) + ", " + + std::to_string(handHeatMaps[1].getSize(3)) + "]"); + } + + // Display rendered output image + cv::imshow("User worker GUI", datumsPtr->at(0).cvOutputData); + // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image) + const char key = (char)cv::waitKey(1); + if (key == 27) + this->stop(); + } + } + catch (const std::exception& e) + { + this->stop(); + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + } + } +}; + +int openPoseDemo() +{ + try + { + op::log("Starting OpenPose demo...", op::Priority::High); + const auto timerBegin = std::chrono::high_resolution_clock::now(); + + // logging_level + op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", + __LINE__, __FUNCTION__, __FILE__); + op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); + op::Profiler::setDefaultX(FLAGS_profile_speed); + // // For debugging + // // Print all logging messages + // op::ConfigureLog::setPriorityThreshold(op::Priority::None); + // // Print out speed values faster + // op::Profiler::setDefaultX(100); + + // Applying user defined configuration - GFlags to program variables + // outputSize + const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); + // netInputSize + const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); + // faceNetInputSize + const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); + // handNetInputSize + const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); + // producerType + const auto producerSharedPtr = op::flagsToProducer(FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, + FLAGS_flir_camera, FLAGS_camera_resolution, FLAGS_camera_fps, + FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion, + (unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index); + // poseModel + const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); + // JSON saving + if (!FLAGS_write_keypoint.empty()) + op::log("Flag `write_keypoint` is deprecated and will eventually be removed." + " Please, use `write_json` instead.", op::Priority::Max); + // keypointScale + const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); + // heatmaps to add + const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, + FLAGS_heatmaps_add_PAFs); + const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); + // >1 camera view? + const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1 || FLAGS_flir_camera); + // Enabling Google Logging + const bool enableGoogleLogging = true; + // Logging + op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + + // OpenPose wrapper + op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + // op::Wrapper> opWrapper; + op::Wrapper> opWrapper; + + // Initializing the user custom classes + // GUI (Display) + auto wUserOutput = std::make_shared(); + // Add custom processing + const auto workerOutputOnNewThread = true; + opWrapper.setWorker(op::WorkerType::Output, wUserOutput, workerOutputOnNewThread); + + // Pose configuration (use WrapperStructPose{} for default and recommended configuration) + const op::WrapperStructPose wrapperStructPose{ + !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, + FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), + poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, + FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, + (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; + // Face configuration (use op::WrapperStructFace{} to disable it) + const op::WrapperStructFace wrapperStructFace{ + FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), + (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; + // Hand configuration (use op::WrapperStructHand{} to disable it) + const op::WrapperStructHand wrapperStructHand{ + FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, + op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, + (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; + // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) + const op::WrapperStructExtra wrapperStructExtra{ + FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; + // Producer (use default to disable any input) + const op::WrapperStructInput wrapperStructInput{ + producerSharedPtr, FLAGS_frame_first, FLAGS_frame_last, FLAGS_process_real_time, FLAGS_frame_flip, + FLAGS_frame_rotate, FLAGS_frames_repeat}; + // Consumer (comment or use default argument to disable any output) + // const op::WrapperStructOutput wrapperStructOutput{op::flagsToDisplayMode(FLAGS_display, FLAGS_3d), + // !FLAGS_no_gui_verbose, FLAGS_fullscreen, FLAGS_write_keypoint, + const auto displayMode = op::DisplayMode::NoDisplay; + const bool guiVerbose = false; + const bool fullScreen = false; + const op::WrapperStructOutput wrapperStructOutput{ + displayMode, guiVerbose, fullScreen, FLAGS_write_keypoint, + op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, FLAGS_write_coco_json, + FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, FLAGS_write_video, + FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, FLAGS_write_video_adam, + FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; + // Configure wrapper + opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, + wrapperStructInput, wrapperStructOutput); + // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) + if (FLAGS_disable_multi_thread) + opWrapper.disableMultiThreading(); + + // Start processing + // Two different ways of running the program on multithread environment + op::log("Starting thread(s)...", op::Priority::High); + // Start, run & stop threads - it blocks this thread until all others have finished + opWrapper.exec(); + + // // Option b) Keeping this thread free in case you want to do something else meanwhile, e.g. profiling the GPU + // memory + // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main + // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: + // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections` + // // Start threads + // opWrapper.start(); + // // Profile used GPU memory + // // 1: wait ~10sec so the memory has been totally loaded on GPU + // // 2: profile the GPU memory + // const auto sleepTimeMs = 10; + // for (auto i = 0 ; i < 10000/sleepTimeMs && opWrapper.isRunning() ; i++) + // std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs}); + // op::Profiler::profileGpuMemory(__LINE__, __FUNCTION__, __FILE__); + // // Keep program alive while running threads + // while (opWrapper.isRunning()) + // std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs}); + // // Stop and join threads + // op::log("Stopping thread(s)", op::Priority::High); + // opWrapper.stop(); + + // Measuring total time + const auto now = std::chrono::high_resolution_clock::now(); + const auto totalTimeSec = (double)std::chrono::duration_cast(now-timerBegin).count() + * 1e-9; + const auto message = "OpenPose demo successfully finished. Total time: " + + std::to_string(totalTimeSec) + " seconds."; + op::log(message, op::Priority::High); + + // Return successful message + return 0; + } + catch (const std::exception& e) + { + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return -1; + } +} + +int main(int argc, char *argv[]) +{ + // Parsing command line flags + gflags::ParseCommandLineFlags(&argc, &argv, true); + + // Running openPoseDemo + return openPoseDemo(); +} diff --git a/examples/tutorial_api_cpp/9_synchronous_custom_all.cpp b/examples/tutorial_api_cpp/9_synchronous_custom_all.cpp new file mode 100644 index 00000000..682176cf --- /dev/null +++ b/examples/tutorial_api_cpp/9_synchronous_custom_all.cpp @@ -0,0 +1,362 @@ +// ------------------------- OpenPose Library Tutorial - Wrapper - Example 2 - Synchronous ------------------------- +// Synchronous mode: ideal for performance. The user can add his own frames producer / post-processor / consumer to the OpenPose wrapper or use the +// default ones. + +// This example shows the user how to use the OpenPose wrapper class: + // 1. User reads images + // 2. Extract and render keypoint / heatmap / PAF of that image + // 3. Save the results on disk + // 4. User displays the rendered pose + // Everything in a multi-thread scenario +// In addition to the previous OpenPose modules, we also need to use: + // 1. `core` module: + // For the Array class that the `pose` module needs + // For the Datum struct that the `thread` module sends between the queues + // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively +// This file should only be used for the user to take specific examples. + +// Command-line user intraface +#define OPENPOSE_FLAGS_DISABLE_PRODUCER +#define OPENPOSE_FLAGS_DISABLE_DISPLAY +#include +// OpenPose dependencies +#include + +// Custom OpenPose flags +// Producer +DEFINE_string(image_dir, "examples/media/", + "Process a directory of images. Read all standard formats (jpg, png, bmp, etc.)."); + +// If the user needs his own variables, he can inherit the op::Datum struct and add them +// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define +// Wrapper instead of Wrapper +struct UserDatum : public op::Datum +{ + bool boolThatUserNeedsForSomeReason; + + UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) : + boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_} + {} +}; + +// The W-classes can be implemented either as a template or as simple classes given +// that the user usually knows which kind of data he will move between the queues, +// in this case we assume a std::shared_ptr of a std::vector of UserDatum + +// This worker will just read and return all the jpg files in a directory +class WUserInput : public op::WorkerProducer>> +{ +public: + WUserInput(const std::string& directoryPath) : + mImageFiles{op::getFilesOnDirectory(directoryPath, "jpg")}, + // If we want "jpg" + "png" images + // mImageFiles{op::getFilesOnDirectory(directoryPath, std::vector{"jpg", "png"})}, + mCounter{0} + { + if (mImageFiles.empty()) + op::error("No images found on: " + directoryPath, __LINE__, __FUNCTION__, __FILE__); + } + + void initializationOnThread() {} + + std::shared_ptr> workProducer() + { + try + { + // Close program when empty frame + if (mImageFiles.size() <= mCounter) + { + op::log("Last frame read and added to queue. Closing program after it is processed.", + op::Priority::High); + // This funtion stops this worker, which will eventually stop the whole thread system once all the + // frames have been processed + this->stop(); + return nullptr; + } + else + { + // Create new datum + auto datumsPtr = std::make_shared>(); + datumsPtr->emplace_back(); + auto& datum = datumsPtr->at(0); + + // Fill datum + datum.cvInputData = cv::imread(mImageFiles.at(mCounter++)); + + // If empty frame -> return nullptr + if (datum.cvInputData.empty()) + { + op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", + op::Priority::High); + this->stop(); + datumsPtr = nullptr; + } + + return datumsPtr; + } + } + catch (const std::exception& e) + { + this->stop(); + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return nullptr; + } + } + +private: + const std::vector mImageFiles; + unsigned long long mCounter; +}; + +// This worker will just invert the image +class WUserPostProcessing : public op::Worker>> +{ +public: + WUserPostProcessing() + { + // User's constructor here + } + + void initializationOnThread() {} + + void work(std::shared_ptr>& datumsPtr) + { + // User's post-processing (after OpenPose processing & before OpenPose outputs) here + // datum.cvOutputData: rendered frame with pose or heatmaps + // datum.poseKeypoints: Array with the estimated pose + try + { + if (datumsPtr != nullptr && !datumsPtr->empty()) + for (auto& datum : *datumsPtr) + cv::bitwise_not(datum.cvOutputData, datum.cvOutputData); + } + catch (const std::exception& e) + { + this->stop(); + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + } + } +}; + +// This worker will just read and return all the jpg files in a directory +class WUserOutput : public op::WorkerConsumer>> +{ +public: + void initializationOnThread() {} + + void workConsumer(const std::shared_ptr>& datumsPtr) + { + try + { + // User's displaying/saving/other processing here + // datum.cvOutputData: rendered frame with pose or heatmaps + // datum.poseKeypoints: Array with the estimated pose + if (datumsPtr != nullptr && !datumsPtr->empty()) + { + // Show in command line the resulting pose keypoints for body, face and hands + op::log("\nKeypoints:"); + // Accesing each element of the keypoints + const auto& poseKeypoints = datumsPtr->at(0).poseKeypoints; + op::log("Person pose keypoints:"); + for (auto person = 0 ; person < poseKeypoints.getSize(0) ; person++) + { + op::log("Person " + std::to_string(person) + " (x, y, score):"); + for (auto bodyPart = 0 ; bodyPart < poseKeypoints.getSize(1) ; bodyPart++) + { + std::string valueToPrint; + for (auto xyscore = 0 ; xyscore < poseKeypoints.getSize(2) ; xyscore++) + { + valueToPrint += std::to_string( poseKeypoints[{person, bodyPart, xyscore}] ) + " "; + } + op::log(valueToPrint); + } + } + op::log(" "); + // Alternative: just getting std::string equivalent + op::log("Face keypoints: " + datumsPtr->at(0).faceKeypoints.toString()); + op::log("Left hand keypoints: " + datumsPtr->at(0).handKeypoints[0].toString()); + op::log("Right hand keypoints: " + datumsPtr->at(0).handKeypoints[1].toString()); + // Heatmaps + const auto& poseHeatMaps = datumsPtr->at(0).poseHeatMaps; + if (!poseHeatMaps.empty()) + { + op::log("Pose heatmaps size: [" + std::to_string(poseHeatMaps.getSize(0)) + ", " + + std::to_string(poseHeatMaps.getSize(1)) + ", " + + std::to_string(poseHeatMaps.getSize(2)) + "]"); + const auto& faceHeatMaps = datumsPtr->at(0).faceHeatMaps; + op::log("Face heatmaps size: [" + std::to_string(faceHeatMaps.getSize(0)) + ", " + + std::to_string(faceHeatMaps.getSize(1)) + ", " + + std::to_string(faceHeatMaps.getSize(2)) + ", " + + std::to_string(faceHeatMaps.getSize(3)) + "]"); + const auto& handHeatMaps = datumsPtr->at(0).handHeatMaps; + op::log("Left hand heatmaps size: [" + std::to_string(handHeatMaps[0].getSize(0)) + ", " + + std::to_string(handHeatMaps[0].getSize(1)) + ", " + + std::to_string(handHeatMaps[0].getSize(2)) + ", " + + std::to_string(handHeatMaps[0].getSize(3)) + "]"); + op::log("Right hand heatmaps size: [" + std::to_string(handHeatMaps[1].getSize(0)) + ", " + + std::to_string(handHeatMaps[1].getSize(1)) + ", " + + std::to_string(handHeatMaps[1].getSize(2)) + ", " + + std::to_string(handHeatMaps[1].getSize(3)) + "]"); + } + + // Display rendered output image + cv::imshow("User worker GUI", datumsPtr->at(0).cvOutputData); + // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image) + const char key = (char)cv::waitKey(1); + if (key == 27) + this->stop(); + } + } + catch (const std::exception& e) + { + this->stop(); + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + } + } +}; + +int openPoseTutorialWrapper2() +{ + try + { + op::log("Starting OpenPose demo...", op::Priority::High); + const auto timerBegin = std::chrono::high_resolution_clock::now(); + + // logging_level + op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", + __LINE__, __FUNCTION__, __FILE__); + op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); + op::Profiler::setDefaultX(FLAGS_profile_speed); + + // Applying user defined configuration - GFlags to program variables + // outputSize + const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); + // netInputSize + const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); + // faceNetInputSize + const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); + // handNetInputSize + const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); + // poseModel + const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); + // JSON saving + if (!FLAGS_write_keypoint.empty()) + op::log("Flag `write_keypoint` is deprecated and will eventually be removed." + " Please, use `write_json` instead.", op::Priority::Max); + // keypointScale + const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); + // heatmaps to add + const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, + FLAGS_heatmaps_add_PAFs); + const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); + // >1 camera view? + const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1); + // Enabling Google Logging + const bool enableGoogleLogging = true; + // Logging + op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + + // Initializing the user custom classes + // Frames producer (e.g. video, webcam, ...) + auto wUserInput = std::make_shared(FLAGS_image_dir); + // Processing + auto wUserPostProcessing = std::make_shared(); + // GUI (Display) + auto wUserOutput = std::make_shared(); + + op::Wrapper> opWrapper; + // Add custom input + const auto workerInputOnNewThread = false; + opWrapper.setWorker(op::WorkerType::Input, wUserInput, workerInputOnNewThread); + // Add custom processing + const auto workerProcessingOnNewThread = false; + opWrapper.setWorker(op::WorkerType::PostProcessing, wUserPostProcessing, workerProcessingOnNewThread); + // Add custom output + const auto workerOutputOnNewThread = true; + opWrapper.setWorker(op::WorkerType::Output, wUserOutput, workerOutputOnNewThread); + // Configure OpenPose + op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + const op::WrapperStructPose wrapperStructPose{ + !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, + FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), + poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, + FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, + (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; + // Face configuration (use op::WrapperStructFace{} to disable it) + const op::WrapperStructFace wrapperStructFace{ + FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), + (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; + // Hand configuration (use op::WrapperStructHand{} to disable it) + const op::WrapperStructHand wrapperStructHand{ + FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, + op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, + (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; + // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) + const op::WrapperStructExtra wrapperStructExtra{ + FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; + // Consumer (comment or use default argument to disable any output) + const auto displayMode = op::DisplayMode::NoDisplay; + const bool guiVerbose = false; + const bool fullScreen = false; + const op::WrapperStructOutput wrapperStructOutput{ + displayMode, guiVerbose, fullScreen, FLAGS_write_keypoint, + op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, FLAGS_write_coco_json, + FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, FLAGS_write_video, + FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, FLAGS_write_video_adam, + FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; + // Configure wrapper + opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, + op::WrapperStructInput{}, wrapperStructOutput); + // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) + if (FLAGS_disable_multi_thread) + opWrapper.disableMultiThreading(); + + op::log("Starting thread(s)...", op::Priority::High); + // Two different ways of running the program on multithread environment + // Start, run & stop threads - it blocks this thread until all others have finished + opWrapper.exec(); + + // Option b) Keeping this thread free in case you want to do something else meanwhile, e.g. profiling the GPU memory + // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main + // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: + // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections` + // // Start threads + // opWrapper.start(); + // // Profile used GPU memory + // // 1: wait ~10sec so the memory has been totally loaded on GPU + // // 2: profile the GPU memory + // std::this_thread::sleep_for(std::chrono::milliseconds{1000}); + // op::log("Random task here...", op::Priority::High); + // // Keep program alive while running threads + // while (opWrapper.isRunning()) + // std::this_thread::sleep_for(std::chrono::milliseconds{33}); + // // Stop and join threads + // op::log("Stopping thread(s)", op::Priority::High); + // opWrapper.stop(); + + // Measuring total time + const auto now = std::chrono::high_resolution_clock::now(); + const auto totalTimeSec = (double)std::chrono::duration_cast(now-timerBegin).count() + * 1e-9; + const auto message = "OpenPose demo successfully finished. Total time: " + + std::to_string(totalTimeSec) + " seconds."; + op::log(message, op::Priority::High); + + // Return successful message + return 0; + } + catch (const std::exception& e) + { + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return -1; + } +} + +int main(int argc, char *argv[]) +{ + // Parsing command line flags + gflags::ParseCommandLineFlags(&argc, &argv, true); + + // Running openPoseTutorialWrapper2 + return openPoseTutorialWrapper2(); +} diff --git a/examples/tutorial_wrapper/CMakeLists.txt b/examples/tutorial_api_cpp/CMakeLists.txt similarity index 74% rename from examples/tutorial_wrapper/CMakeLists.txt rename to examples/tutorial_api_cpp/CMakeLists.txt index db1095b1..8d9dc191 100644 --- a/examples/tutorial_wrapper/CMakeLists.txt +++ b/examples/tutorial_api_cpp/CMakeLists.txt @@ -1,34 +1,37 @@ -set(EXAMPLE_FILES - 1_user_synchronous_postprocessing.cpp - 2_user_synchronous_input.cpp - 3_user_synchronous_output.cpp - 4_user_synchronous_all.cpp - 5_user_asynchronous.cpp - 6_user_asynchronous_output.cpp) - -include(${CMAKE_SOURCE_DIR}/cmake/Utils.cmake) - -foreach(EXAMPLE_FILE ${EXAMPLE_FILES}) - - get_filename_component(SOURCE_NAME ${EXAMPLE_FILE} NAME_WE) - - if (UNIX OR APPLE) - set(EXE_NAME "${SOURCE_NAME}.bin") - elseif (WIN32) - set(EXE_NAME "${SOURCE_NAME}") - endif () - - message(STATUS "Adding Example ${EXE_NAME}") - add_executable(${EXE_NAME} ${EXAMPLE_FILE}) - target_link_libraries(${EXE_NAME} openpose ${examples_3rdparty_libraries}) - - if (WIN32) - set_property(TARGET ${EXE_NAME} PROPERTY FOLDER "Examples/Tutorial/Wrapper") - configure_file(${CMAKE_SOURCE_DIR}/cmake/OpenPose${VCXPROJ_FILE_GPU_MODE}.vcxproj.user - ${CMAKE_CURRENT_BINARY_DIR}/${EXE_NAME}.vcxproj.user @ONLY) - # Properties->General->Output Directory - set_property(TARGET ${EXE_NAME} PROPERTY RUNTIME_OUTPUT_DIRECTORY_RELEASE ${PROJECT_BINARY_DIR}/$(Platform)/$(Configuration)) - set_property(TARGET ${EXE_NAME} PROPERTY RUNTIME_OUTPUT_DIRECTORY_DEBUG ${PROJECT_BINARY_DIR}/$(Platform)/$(Configuration)) - endif (WIN32) - -endforeach() +set(EXAMPLE_FILES + 1_body_from_image.cpp + 2_whole_body_from_image.cpp + 3_keypoints_from_image_configurable.cpp + 4_asynchronous_loop_custom_input_and_output.cpp + 5_asynchronous_loop_custom_output.cpp + 6_synchronous_custom_postprocessing.cpp + 7_synchronous_custom_input.cpp + 8_synchronous_custom_output.cpp + 9_synchronous_custom_all.cpp) + +include(${CMAKE_SOURCE_DIR}/cmake/Utils.cmake) + +foreach(EXAMPLE_FILE ${EXAMPLE_FILES}) + + get_filename_component(SOURCE_NAME ${EXAMPLE_FILE} NAME_WE) + + if (UNIX OR APPLE) + set(EXE_NAME "${SOURCE_NAME}.bin") + elseif (WIN32) + set(EXE_NAME "${SOURCE_NAME}") + endif () + + message(STATUS "Adding Example ${EXE_NAME}") + add_executable(${EXE_NAME} ${EXAMPLE_FILE}) + target_link_libraries(${EXE_NAME} openpose ${examples_3rdparty_libraries}) + + if (WIN32) + set_property(TARGET ${EXE_NAME} PROPERTY FOLDER "Examples/Tutorial/C++ API") + configure_file(${CMAKE_SOURCE_DIR}/cmake/OpenPose${VCXPROJ_FILE_GPU_MODE}.vcxproj.user + ${CMAKE_CURRENT_BINARY_DIR}/${EXE_NAME}.vcxproj.user @ONLY) + # Properties->General->Output Directory + set_property(TARGET ${EXE_NAME} PROPERTY RUNTIME_OUTPUT_DIRECTORY_RELEASE ${PROJECT_BINARY_DIR}/$(Platform)/$(Configuration)) + set_property(TARGET ${EXE_NAME} PROPERTY RUNTIME_OUTPUT_DIRECTORY_DEBUG ${PROJECT_BINARY_DIR}/$(Platform)/$(Configuration)) + endif (WIN32) + +endforeach() diff --git a/examples/tutorial_api_cpp/README.md b/examples/tutorial_api_cpp/README.md new file mode 100644 index 00000000..88a12669 --- /dev/null +++ b/examples/tutorial_api_cpp/README.md @@ -0,0 +1,2 @@ +# C++ API Examples +This folder provides examples to the basic OpenPose C++ API. The analogous Python API is exposed in [examples/tutorial_api_python/](../tutorial_api_python/). diff --git a/examples/tutorial_python/1_extract_pose.py b/examples/tutorial_api_python/1_extract_pose.py similarity index 100% rename from examples/tutorial_python/1_extract_pose.py rename to examples/tutorial_api_python/1_extract_pose.py diff --git a/examples/tutorial_python/CMakeLists.txt b/examples/tutorial_api_python/CMakeLists.txt similarity index 52% rename from examples/tutorial_python/CMakeLists.txt rename to examples/tutorial_api_python/CMakeLists.txt index 834e2bb1..437814b1 100644 --- a/examples/tutorial_python/CMakeLists.txt +++ b/examples/tutorial_api_python/CMakeLists.txt @@ -1,3 +1,2 @@ ### Add Python Test configure_file(1_extract_pose.py 1_extract_pose.py) -configure_file(2_pose_from_heatmaps.py 2_pose_from_heatmaps.py) diff --git a/examples/tutorial_api_python/README.md b/examples/tutorial_api_python/README.md new file mode 100644 index 00000000..339caee1 --- /dev/null +++ b/examples/tutorial_api_python/README.md @@ -0,0 +1,2 @@ +# Python API Examples +This folder provides examples to the basic OpenPose Python API. The analogous C++ API is exposed in [examples/tutorial_api_cpp/](../tutorial_api_cpp/). diff --git a/examples/tutorial_thread/CMakeLists.txt b/examples/tutorial_developer/CMakeLists.txt similarity index 70% rename from examples/tutorial_thread/CMakeLists.txt rename to examples/tutorial_developer/CMakeLists.txt index f905383f..34410920 100644 --- a/examples/tutorial_thread/CMakeLists.txt +++ b/examples/tutorial_developer/CMakeLists.txt @@ -1,30 +1,35 @@ -set(EXAMPLE_FILES - 1_openpose_read_and_display.cpp - 2_user_processing_function.cpp - 3_user_input_processing_and_output.cpp - 4_user_input_processing_output_and_datum.cpp) - -foreach(EXAMPLE_FILE ${EXAMPLE_FILES}) - - get_filename_component(SOURCE_NAME ${EXAMPLE_FILE} NAME_WE) - - if (UNIX OR APPLE) - set(EXE_NAME "${SOURCE_NAME}.bin") - elseif (WIN32) - set(EXE_NAME "${SOURCE_NAME}") - endif () - - message(STATUS "Adding Example ${EXE_NAME}") - add_executable(${EXE_NAME} ${EXAMPLE_FILE}) - target_link_libraries(${EXE_NAME} openpose ${examples_3rdparty_libraries}) - - if (WIN32) - set_property(TARGET ${EXE_NAME} PROPERTY FOLDER "Examples/Tutorial/Thread") - configure_file(${CMAKE_SOURCE_DIR}/cmake/OpenPose${VCXPROJ_FILE_GPU_MODE}.vcxproj.user - ${CMAKE_CURRENT_BINARY_DIR}/${EXE_NAME}.vcxproj.user @ONLY) - # Properties->General->Output Directory - set_property(TARGET ${EXE_NAME} PROPERTY RUNTIME_OUTPUT_DIRECTORY_RELEASE ${PROJECT_BINARY_DIR}/$(Platform)/$(Configuration)) - set_property(TARGET ${EXE_NAME} PROPERTY RUNTIME_OUTPUT_DIRECTORY_DEBUG ${PROJECT_BINARY_DIR}/$(Platform)/$(Configuration)) - endif (WIN32) - -endforeach() +set(EXAMPLE_FILES + pose_1_extract_from_image.cpp + pose_2_extract_pose_or_heatmat_from_image.cpp + thread_1_openpose_read_and_display.cpp + thread_2_user_processing_function.cpp + thread_3_user_input_processing_and_output.cpp + thread_4_user_input_processing_output_and_datum.cpp) + +foreach(EXAMPLE_FILE ${EXAMPLE_FILES}) + + get_filename_component(SOURCE_NAME ${EXAMPLE_FILE} NAME_WE) + + if (UNIX OR APPLE) + set(EXE_NAME "${SOURCE_NAME}.bin") + elseif (WIN32) + set(EXE_NAME "${SOURCE_NAME}") + endif () + + message(STATUS "Adding Example ${EXE_NAME}") + add_executable(${EXE_NAME} ${EXAMPLE_FILE}) + target_link_libraries(${EXE_NAME} openpose ${examples_3rdparty_libraries}) + + if (WIN32) + set_property(TARGET ${EXE_NAME} PROPERTY FOLDER "Examples/Tutorial/Developer Examples") + configure_file(${CMAKE_SOURCE_DIR}/cmake/OpenPose${VCXPROJ_FILE_GPU_MODE}.vcxproj.user + ${CMAKE_CURRENT_BINARY_DIR}/${EXE_NAME}.vcxproj.user @ONLY) + # Properties->General->Output Directory + set_property(TARGET ${EXE_NAME} PROPERTY RUNTIME_OUTPUT_DIRECTORY_RELEASE ${PROJECT_BINARY_DIR}/$(Platform)/$(Configuration)) + set_property(TARGET ${EXE_NAME} PROPERTY RUNTIME_OUTPUT_DIRECTORY_DEBUG ${PROJECT_BINARY_DIR}/$(Platform)/$(Configuration)) + endif (WIN32) + +endforeach() + +### Add Python files +configure_file(python_1_pose_from_heatmaps.py python_1_pose_from_heatmaps.py) diff --git a/examples/tutorial_developer/README.md b/examples/tutorial_developer/README.md new file mode 100644 index 00000000..4d6dab25 --- /dev/null +++ b/examples/tutorial_developer/README.md @@ -0,0 +1,4 @@ +# Developer Examples +**Disclaimer**: This folder is meant for internal OpenPose developers. The Examples might highly change, and we will not answer questions about them nor provide official support for them. + +**If the OpenPose library does not compile for an error happening due to a file from this folder, notify us**. diff --git a/examples/tutorial_pose/1_extract_from_image.cpp b/examples/tutorial_developer/pose_1_extract_from_image.cpp similarity index 99% rename from examples/tutorial_pose/1_extract_from_image.cpp rename to examples/tutorial_developer/pose_1_extract_from_image.cpp index f49364e5..1a93f030 100644 --- a/examples/tutorial_pose/1_extract_from_image.cpp +++ b/examples/tutorial_developer/pose_1_extract_from_image.cpp @@ -74,7 +74,7 @@ int openPoseTutorialPose1() __LINE__, __FUNCTION__, __FILE__); op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - // Step 2 - Read Google flags (user defined configuration) + // Step 2 - Read GFlags (user defined configuration) // outputSize const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); // netInputSize diff --git a/examples/tutorial_pose/2_extract_pose_or_heatmat_from_image.cpp b/examples/tutorial_developer/pose_2_extract_pose_or_heatmat_from_image.cpp similarity index 99% rename from examples/tutorial_pose/2_extract_pose_or_heatmat_from_image.cpp rename to examples/tutorial_developer/pose_2_extract_pose_or_heatmat_from_image.cpp index 7b750e44..0f87b746 100644 --- a/examples/tutorial_pose/2_extract_pose_or_heatmat_from_image.cpp +++ b/examples/tutorial_developer/pose_2_extract_pose_or_heatmat_from_image.cpp @@ -79,7 +79,7 @@ int openPoseTutorialPose2() __LINE__, __FUNCTION__, __FILE__); op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - // Step 2 - Read Google flags (user defined configuration) + // Step 2 - Read GFlags (user defined configuration) // outputSize const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); // netInputSize diff --git a/examples/tutorial_python/2_pose_from_heatmaps.py b/examples/tutorial_developer/python_1_pose_from_heatmaps.py similarity index 99% rename from examples/tutorial_python/2_pose_from_heatmaps.py rename to examples/tutorial_developer/python_1_pose_from_heatmaps.py index 066792a1..c822938a 100644 --- a/examples/tutorial_python/2_pose_from_heatmaps.py +++ b/examples/tutorial_developer/python_1_pose_from_heatmaps.py @@ -6,7 +6,7 @@ except ImportError: print("This sample can only be run if Python Caffe if available on your system") print("Currently OpenPose does not compile Python Caffe. This may be supported in the future") sys.exit(-1) - + import os os.environ["GLOG_minloglevel"] = "1" import caffe diff --git a/examples/tutorial_thread/1_openpose_read_and_display.cpp b/examples/tutorial_developer/thread_1_openpose_read_and_display.cpp similarity index 98% rename from examples/tutorial_thread/1_openpose_read_and_display.cpp rename to examples/tutorial_developer/thread_1_openpose_read_and_display.cpp index e204ec7b..8a6fc486 100644 --- a/examples/tutorial_thread/1_openpose_read_and_display.cpp +++ b/examples/tutorial_developer/thread_1_openpose_read_and_display.cpp @@ -2,7 +2,7 @@ // This third example shows the user how to: // 1. Read folder of images / video / webcam (`producer` module) // 2. Display the rendered pose (`gui` module) - // Everything in a multi-thread scenario (`thread` module) + // Everything in a multi-thread scenario (`thread` module) // In addition to the previous OpenPose modules, we also need to use: // 1. `core` module: for the Datum struct that the `thread` module sends between the queues // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively @@ -75,7 +75,7 @@ int openPoseTutorialThread1() op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", __LINE__, __FUNCTION__, __FILE__); op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); - // Step 2 - Read Google flags (user defined configuration) + // Step 2 - Read GFlags (user defined configuration) // outputSize const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); // producerType diff --git a/examples/tutorial_thread/2_user_processing_function.cpp b/examples/tutorial_developer/thread_2_user_processing_function.cpp similarity index 99% rename from examples/tutorial_thread/2_user_processing_function.cpp rename to examples/tutorial_developer/thread_2_user_processing_function.cpp index 4f3841e4..2d7a05e1 100644 --- a/examples/tutorial_thread/2_user_processing_function.cpp +++ b/examples/tutorial_developer/thread_2_user_processing_function.cpp @@ -3,7 +3,7 @@ // 1. Read folder of images / video / webcam (`producer` module) // 2. Use the processing implemented by the user // 3. Display the rendered pose (`gui` module) - // Everything in a multi-thread scenario (`thread` module) + // Everything in a multi-thread scenario (`thread` module) // In addition to the previous OpenPose modules, we also need to use: // 1. `core` module: for the Datum struct that the `thread` module sends between the queues // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively @@ -109,7 +109,7 @@ int openPoseTutorialThread2() op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", __LINE__, __FUNCTION__, __FILE__); op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); - // Step 2 - Read Google flags (user defined configuration) + // Step 2 - Read GFlags (user defined configuration) // outputSize const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); // producerType diff --git a/examples/tutorial_thread/3_user_input_processing_and_output.cpp b/examples/tutorial_developer/thread_3_user_input_processing_and_output.cpp similarity index 99% rename from examples/tutorial_thread/3_user_input_processing_and_output.cpp rename to examples/tutorial_developer/thread_3_user_input_processing_and_output.cpp index 966c0387..d5768987 100644 --- a/examples/tutorial_thread/3_user_input_processing_and_output.cpp +++ b/examples/tutorial_developer/thread_3_user_input_processing_and_output.cpp @@ -3,7 +3,7 @@ // 1. Read folder of images / video / webcam (`producer` module) // 2. Use the processing implemented by the user // 3. Display the rendered pose (`gui` module) - // Everything in a multi-thread scenario (`thread` module) + // Everything in a multi-thread scenario (`thread` module) // In addition to the previous OpenPose modules, we also need to use: // 1. `core` module: for the Datum struct that the `thread` module sends between the queues // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively diff --git a/examples/tutorial_thread/4_user_input_processing_output_and_datum.cpp b/examples/tutorial_developer/thread_4_user_input_processing_output_and_datum.cpp similarity index 99% rename from examples/tutorial_thread/4_user_input_processing_output_and_datum.cpp rename to examples/tutorial_developer/thread_4_user_input_processing_output_and_datum.cpp index 40328220..ec51cf29 100644 --- a/examples/tutorial_thread/4_user_input_processing_output_and_datum.cpp +++ b/examples/tutorial_developer/thread_4_user_input_processing_output_and_datum.cpp @@ -3,7 +3,7 @@ // 1. Read folder of images / video / webcam (`producer` module) // 2. Use the processing implemented by the user // 3. Display the rendered pose (`gui` module) - // Everything in a multi-thread scenario (`thread` module) + // Everything in a multi-thread scenario (`thread` module) // In addition to the previous OpenPose modules, we also need to use: // 1. `core` module: for the Datum struct that the `thread` module sends between the queues // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively diff --git a/examples/tutorial_pose/CMakeLists.txt b/examples/tutorial_pose/CMakeLists.txt deleted file mode 100644 index b78e83b3..00000000 --- a/examples/tutorial_pose/CMakeLists.txt +++ /dev/null @@ -1,28 +0,0 @@ -set(EXAMPLE_FILES - 1_extract_from_image.cpp - 2_extract_pose_or_heatmat_from_image.cpp) - -foreach(EXAMPLE_FILE ${EXAMPLE_FILES}) - - get_filename_component(SOURCE_NAME ${EXAMPLE_FILE} NAME_WE) - - if (UNIX OR APPLE) - set(EXE_NAME "${SOURCE_NAME}.bin") - elseif (WIN32) - set(EXE_NAME "${SOURCE_NAME}") - endif () - - message(STATUS "Adding Example ${EXE_NAME}") - add_executable(${EXE_NAME} ${EXAMPLE_FILE}) - target_link_libraries(${EXE_NAME} openpose ${examples_3rdparty_libraries}) - - if (WIN32) - set_property(TARGET ${EXE_NAME} PROPERTY FOLDER "Examples/Tutorial/Pose") - configure_file(${CMAKE_SOURCE_DIR}/cmake/OpenPose${VCXPROJ_FILE_GPU_MODE}.vcxproj.user - ${CMAKE_CURRENT_BINARY_DIR}/${EXE_NAME}.vcxproj.user @ONLY) - # Properties->General->Output Directory - set_property(TARGET ${EXE_NAME} PROPERTY RUNTIME_OUTPUT_DIRECTORY_RELEASE ${PROJECT_BINARY_DIR}/$(Platform)/$(Configuration)) - set_property(TARGET ${EXE_NAME} PROPERTY RUNTIME_OUTPUT_DIRECTORY_DEBUG ${PROJECT_BINARY_DIR}/$(Platform)/$(Configuration)) - endif (WIN32) - -endforeach() diff --git a/examples/tutorial_wrapper/2_user_synchronous_input.cpp b/examples/tutorial_wrapper/2_user_synchronous_input.cpp deleted file mode 100644 index d58760cf..00000000 --- a/examples/tutorial_wrapper/2_user_synchronous_input.cpp +++ /dev/null @@ -1,477 +0,0 @@ -// ------------------------- OpenPose Library Tutorial - Real Time Pose Estimation ------------------------- -// If the user wants to learn to use the OpenPose library, we highly recommend to start with the `examples/tutorial_*/` -// folders. -// This example summarizes all the funcitonality of the OpenPose library: - // 1. Read folder of images / video / webcam (`producer` module) - // 2. Extract and render body keypoint / heatmap / PAF of that image (`pose` module) - // 3. Extract and render face keypoint / heatmap / PAF of that image (`face` module) - // 4. Save the results on disk (`filestream` module) - // 5. Display the rendered pose (`gui` module) - // Everything in a multi-thread scenario (`thread` module) - // Points 2 to 5 are included in the `wrapper` module -// In addition to the previous OpenPose modules, we also need to use: - // 1. `core` module: - // For the Array class that the `pose` module needs - // For the Datum struct that the `thread` module sends between the queues - // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively -// This file should only be used for the user to take specific examples. - -// C++ std library dependencies -#include // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds -#include // std::this_thread -// Other 3rdparty dependencies -// GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string -#include -// Allow Google Flags in Ubuntu 14 -#ifndef GFLAGS_GFLAGS_H_ - namespace gflags = google; -#endif -// OpenPose dependencies -#include - -// See all the available parameter options withe the `--help` flag. E.g. `build/examples/openpose/openpose.bin --help` -// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose -// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`. -// Debugging/Other -DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while" - " 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for" - " low priority messages and 4 for important ones."); -DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful" - " for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with" - " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the" - " error."); -DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some" - " runtime statistics at this frame number."); -// Producer -DEFINE_int32(camera, -1, "The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative" - " number (by default), to auto-detect and open the first available camera."); -DEFINE_string(camera_resolution, "-1x-1", "Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the" - " default 1280x720 for `--camera`, or the maximum flir camera resolution available for" - " `--flir_camera`"); -DEFINE_double(camera_fps, 30.0, "Frame rate for the webcam (also used when saving video). Set this value to the minimum" - " value between the OpenPose displayed speed and the webcam real frame rate."); -DEFINE_string(video, "", "Use a video file instead of the camera. Use `examples/media/video.avi` for our default" - " example video."); -DEFINE_string(image_dir, "", "Process a directory of images. Use `examples/media/` for our default example folder with 20" - " images. Read all standard formats (jpg, png, bmp, etc.)."); -DEFINE_bool(flir_camera, false, "Whether to use FLIR (Point-Grey) stereo camera."); -DEFINE_int32(flir_camera_index, -1, "Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir" - " camera index to run, where 0 corresponds to the detected flir camera with the lowest" - " serial number, and `n` to the `n`-th lowest serial number camera."); -DEFINE_string(ip_camera, "", "String with the IP camera URL. It supports protocols like RTSP and HTTP."); -DEFINE_uint64(frame_first, 0, "Start on desired frame number. Indexes are 0-based, i.e. the first frame has index 0."); -DEFINE_uint64(frame_last, -1, "Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g. if set to" - " 10, it will process 11 frames (0-10)."); -DEFINE_bool(frame_flip, false, "Flip/mirror each frame (e.g. for real time webcam demonstrations)."); -DEFINE_int32(frame_rotate, 0, "Rotate each frame, 4 possible values: 0, 90, 180, 270."); -DEFINE_bool(frames_repeat, false, "Repeat frames when finished."); -DEFINE_bool(process_real_time, false, "Enable to keep the original source frame rate (e.g. for video). If the processing time is" - " too long, it will skip frames. If it is too fast, it will slow it down."); -DEFINE_string(camera_parameter_folder, "models/cameraParameters/flir/", "String with the folder where the camera parameters are located."); -DEFINE_bool(frame_keep_distortion, false, "If false (default), it will undistortionate the image based on the" - " `camera_parameter_folder` camera parameters; if true, it will not undistortionate, i.e.," - " it will leave it as it is."); -// OpenPose -DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located."); -DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the" - " input image resolution."); -DEFINE_int32(num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your" - " machine."); -DEFINE_int32(num_gpu_start, 0, "GPU device start number."); -DEFINE_int32(keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e. the scale of the (x,y)" - " coordinates that will be saved with the `write_json` & `write_keypoint` flags." - " Select `0` to scale it to the original source resolution; `1`to scale it to the net output" - " size (set with `net_resolution`); `2` to scale it to the final output size (set with" - " `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left" - " corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where" - " (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non" - " related with `scale_number` and `scale_gap`."); -DEFINE_int32(number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with" - " top scores. The score is based in person area over the image, body part score, as well as" - " joint score (between each pair of connected body parts). Useful if you know the exact" - " number of people in the scene, so it can remove false positives (if all the people have" - " been detected. However, it might also include false negatives by removing very small or" - " highly occluded people. -1 will keep them all."); -// OpenPose Body Pose -DEFINE_bool(body_disable, false, "Disable body keypoint detection. Option only possible for faster (but less accurate) face" - " keypoint detection."); -DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g. `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), " - "`MPI_4_layers` (15 keypoints, even faster but less accurate)."); -DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is" - " decreased, the speed increases. For maximum speed-accuracy balance, it should keep the" - " closest aspect ratio possible to the images or videos to be processed. Using `-1` in" - " any of the dimensions, OP will choose the optimal aspect ratio depending on the user's" - " input value. E.g. the default `-1x368` is equivalent to `656x368` in 16:9 resolutions," - " e.g. full HD (1980x1080) and HD (1280x720) resolutions."); -DEFINE_int32(scale_number, 1, "Number of scales to average."); -DEFINE_double(scale_gap, 0.3, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1." - " If you want to change the initial scale, you actually want to multiply the" - " `net_resolution` by your desired initial scale."); -// OpenPose Body Pose Heatmaps and Part Candidates -DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and" - " analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps." - " If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential" - " memory order: body parts + bkg + PAFs. It will follow the order on" - " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will" - " considerably decrease. Not required for OpenPose, enable it only if you intend to" - " explicitly use this information later."); -DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to" - " background."); -DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs."); -DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer" - " rounded [0,255]; and 3 for no scaling."); -DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the" - " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all" - " the detected body parts, before being assembled into people. Note that the number of" - " candidates is equal or higher than the number of final body parts (i.e. after being" - " assembled into people). The empty body parts are filled with 0s. Program speed will" - " slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly" - " use this information."); -// OpenPose Face -DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Note that this will considerable slow down the performance and increse" - " the required GPU memory. In addition, the greater number of people on the image, the" - " slower OpenPose will be."); -DEFINE_string(face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint" - " detector. 320x320 usually works fine while giving a substantial speed up when multiple" - " faces on the image."); -// OpenPose Hand -DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Analogously to `--face`, it will also slow down the performance, increase" - " the required GPU memory and its speed depends on the number of people."); -DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint" - " detector."); -DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results" - " were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."); -DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range" - " between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if" - " scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."); -DEFINE_bool(hand_tracking, false, "Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate" - " is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it" - " simply looks for hands in positions at which hands were located in previous frames, but" - " it does not guarantee the same person ID among frames."); -// OpenPose 3-D Reconstruction -DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system." - " 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction" - " results. Note that it will only display 1 person. If multiple people is present, it will" - " fail."); -DEFINE_int32(3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will" - " require all the cameras to see the keypoint in order to reconstruct it."); -DEFINE_int32(3d_views, 1, "Complementary option to `--image_dir` or `--video`. OpenPose will read as many images per" - " iteration, allowing tasks such as stereo camera processing (`--3d`). Note that" - " `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the" - " parameter folder as this number indicates."); -// Extra algorithms -DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames."); -DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The" - " value indicates the number of frames where tracking is run between each OpenPose keypoint" - " detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint" - " detector and tracking for potentially higher accurary than only OpenPose."); -DEFINE_int32(ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D" - " keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing" - " the number of threads will increase the speed but also the global system latency."); -// OpenPose Rendering -DEFINE_int32(part_to_show, 0, "Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body" - " part heat map, 19 for the background heat map, 20 for all the body part heat maps" - " together, 21 for all the PAFs, 22-40 for each body part pair PAF."); -DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black" - " background, instead of being rendered into the original image. Related: `part_to_show`," - " `alpha_pose`, and `alpha_pose`."); -// OpenPose Rendering Pose -DEFINE_double(render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be" - " rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;" - " while small thresholds (~0.1) will also output guessed and occluded keypoints, but also" - " more false positives (i.e. wrong detections)."); -DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering" - " (slower but greater functionality, e.g. `alpha_X` flags). If -1, it will pick CPU if" - " CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render" - " both `outputData` and `cvOutputData` with the original image and desired body part to be" - " shown (i.e. keypoints, heat maps or PAFs)."); -DEFINE_double(alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will" - " hide it. Only valid for GPU rendering."); -DEFINE_double(alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the" - " heatmap, 0 will only show the frame. Only valid for GPU rendering."); -// OpenPose Rendering Face -DEFINE_double(face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints."); -DEFINE_int32(face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face."); -DEFINE_double(face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face."); -// OpenPose Rendering Hand -DEFINE_double(hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints."); -DEFINE_int32(hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand."); -DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand."); -// Display -DEFINE_bool(fullscreen, false, "Run in full-screen mode (press f during runtime to toggle)."); -DEFINE_bool(no_gui_verbose, false, "Do not write text on output images on GUI (e.g. number of current frame and people). It" - " does not affect the pose rendering."); -DEFINE_int32(display, -1, "Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server" - " and/or to slightly speed up the processing if visual output is not required); 2 for 2-D" - " display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display."); -// Result Saving -DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format."); -DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV" - " function cv::imwrite for all compatible extensions."); -DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the" - " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag" - " `camera_fps` controls FPS."); -DEFINE_string(write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose" - " keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."); -DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format."); -DEFINE_string(write_coco_foot_json, "", "Full file path to write people foot pose data with JSON COCO validation format."); -DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag" - " must be enabled."); -DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`." - " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for" - " floating values."); -DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format" - " with `write_keypoint_format`."); -DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml," - " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."); -// Result Saving - Extra Algorithms -DEFINE_string(write_video_adam, "", "Experimental, not available yet. E.g.: `~/Desktop/adamResult.avi`. Flag `camera_fps`" - " controls FPS."); -DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g.: `~/Desktop/mocapResult.bvh`."); -// UDP communication -DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."); -DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication."); - - -// If the user needs his own variables, he can inherit the op::Datum struct and add them -// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define -// Wrapper instead of Wrapper -struct UserDatum : public op::Datum -{ - bool boolThatUserNeedsForSomeReason; - - UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) : - boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_} - {} -}; - -// The W-classes can be implemented either as a template or as simple classes given -// that the user usually knows which kind of data he will move between the queues, -// in this case we assume a std::shared_ptr of a std::vector of UserDatum - -// This worker will just read and return all the jpg files in a directory -class WUserInput : public op::WorkerProducer>> -{ -public: - WUserInput(const std::string& directoryPath) : - mImageFiles{op::getFilesOnDirectory(directoryPath, "jpg")}, - // If we want "jpg" + "png" images - // mImageFiles{op::getFilesOnDirectory(directoryPath, std::vector{"jpg", "png"})}, - mCounter{0} - { - if (mImageFiles.empty()) - op::error("No images found on: " + directoryPath, __LINE__, __FUNCTION__, __FILE__); - } - - void initializationOnThread() {} - - std::shared_ptr> workProducer() - { - try - { - // Close program when empty frame - if (mImageFiles.size() <= mCounter) - { - op::log("Last frame read and added to queue. Closing program after it is processed.", - op::Priority::High); - // This funtion stops this worker, which will eventually stop the whole thread system once all the - // frames have been processed - this->stop(); - return nullptr; - } - else - { - // Create new datum - auto datumsPtr = std::make_shared>(); - datumsPtr->emplace_back(); - auto& datum = datumsPtr->at(0); - - // Fill datum - datum.cvInputData = cv::imread(mImageFiles.at(mCounter++)); - - // If empty frame -> return nullptr - if (datum.cvInputData.empty()) - { - op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", - op::Priority::High); - this->stop(); - datumsPtr = nullptr; - } - - return datumsPtr; - } - } - catch (const std::exception& e) - { - this->stop(); - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - return nullptr; - } - } - -private: - const std::vector mImageFiles; - unsigned long long mCounter; -}; - -int openPoseDemo() -{ - try - { - op::log("Starting OpenPose demo...", op::Priority::High); - const auto timerBegin = std::chrono::high_resolution_clock::now(); - - // logging_level - op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", - __LINE__, __FUNCTION__, __FILE__); - op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); - op::Profiler::setDefaultX(FLAGS_profile_speed); - // // For debugging - // // Print all logging messages - // op::ConfigureLog::setPriorityThreshold(op::Priority::None); - // // Print out speed values faster - // op::Profiler::setDefaultX(100); - - // Applying user defined configuration - Google flags to program variables - // outputSize - const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); - // netInputSize - const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); - // faceNetInputSize - const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); - // handNetInputSize - const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); - // producerType - const auto producerSharedPtr = op::flagsToProducer(FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, - FLAGS_flir_camera, FLAGS_camera_resolution, FLAGS_camera_fps, - FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion, - (unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index); - // poseModel - const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); - // JSON saving - if (!FLAGS_write_keypoint.empty()) - op::log("Flag `write_keypoint` is deprecated and will eventually be removed." - " Please, use `write_json` instead.", op::Priority::Max); - // keypointScale - const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); - // heatmaps to add - const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, - FLAGS_heatmaps_add_PAFs); - const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); - // >1 camera view? - const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1 || FLAGS_flir_camera); - // Enabling Google Logging - const bool enableGoogleLogging = true; - // Logging - op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - - // OpenPose wrapper - op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - // op::Wrapper> opWrapper; - op::Wrapper> opWrapper; - - // Initializing the user custom classes - // Frames producer (e.g. video, webcam, ...) - auto wUserInput = std::make_shared(FLAGS_image_dir); - // Add custom processing - const auto workerInputOnNewThread = true; - opWrapper.setWorkerInput(wUserInput, workerInputOnNewThread); - - // Pose configuration (use WrapperStructPose{} for default and recommended configuration) - const op::WrapperStructPose wrapperStructPose{ - !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, - FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), - poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, - FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, - (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; - // Face configuration (use op::WrapperStructFace{} to disable it) - const op::WrapperStructFace wrapperStructFace{ - FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), - (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; - // Hand configuration (use op::WrapperStructHand{} to disable it) - const op::WrapperStructHand wrapperStructHand{ - FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, - op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, - (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; - // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) - const op::WrapperStructExtra wrapperStructExtra{ - FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; - // Producer (use default to disable any input) - // const op::WrapperStructInput wrapperStructInput{producerSharedPtr, FLAGS_frame_first, FLAGS_frame_last, - // FLAGS_process_real_time, FLAGS_frame_flip, FLAGS_frame_rotate, - // FLAGS_frames_repeat}; - const op::WrapperStructInput wrapperStructInput; - // Consumer (comment or use default argument to disable any output) - const op::WrapperStructOutput wrapperStructOutput{ - op::flagsToDisplayMode(FLAGS_display, FLAGS_3d), !FLAGS_no_gui_verbose, FLAGS_fullscreen, - FLAGS_write_keypoint, op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, - FLAGS_write_coco_json, FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, - FLAGS_write_video, FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, - FLAGS_write_video_adam, FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; - // Configure wrapper - - opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, - wrapperStructInput, wrapperStructOutput); - // Set to single-thread running (to debug and/or reduce latency) - if (FLAGS_disable_multi_thread) - opWrapper.disableMultiThreading(); - - // Start processing - // Two different ways of running the program on multithread environment - op::log("Starting thread(s)...", op::Priority::High); - // Start, run & stop threads - it blocks this thread until all others have finished - opWrapper.exec(); - - // // Option b) Keeping this thread free in case you want to do something else meanwhile, e.g. profiling the GPU - // memory - // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main - // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: - // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections` - // // Start threads - // opWrapper.start(); - // // Profile used GPU memory - // // 1: wait ~10sec so the memory has been totally loaded on GPU - // // 2: profile the GPU memory - // const auto sleepTimeMs = 10; - // for (auto i = 0 ; i < 10000/sleepTimeMs && opWrapper.isRunning() ; i++) - // std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs}); - // op::Profiler::profileGpuMemory(__LINE__, __FUNCTION__, __FILE__); - // // Keep program alive while running threads - // while (opWrapper.isRunning()) - // std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs}); - // // Stop and join threads - // op::log("Stopping thread(s)", op::Priority::High); - // opWrapper.stop(); - - // Measuring total time - const auto now = std::chrono::high_resolution_clock::now(); - const auto totalTimeSec = (double)std::chrono::duration_cast(now-timerBegin).count() - * 1e-9; - const auto message = "OpenPose demo successfully finished. Total time: " - + std::to_string(totalTimeSec) + " seconds."; - op::log(message, op::Priority::High); - - // Return successful message - return 0; - } - catch (const std::exception& e) - { - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - return -1; - } -} - -int main(int argc, char *argv[]) -{ - // Parsing command line flags - gflags::ParseCommandLineFlags(&argc, &argv, true); - - // Running openPoseDemo - return openPoseDemo(); -} diff --git a/examples/tutorial_wrapper/3_user_synchronous_output.cpp b/examples/tutorial_wrapper/3_user_synchronous_output.cpp deleted file mode 100644 index f334a206..00000000 --- a/examples/tutorial_wrapper/3_user_synchronous_output.cpp +++ /dev/null @@ -1,492 +0,0 @@ -// ------------------------- OpenPose Library Tutorial - Real Time Pose Estimation ------------------------- -// If the user wants to learn to use the OpenPose library, we highly recommend to start with the `examples/tutorial_*/` -// folders. -// This example summarizes all the funcitonality of the OpenPose library: - // 1. Read folder of images / video / webcam (`producer` module) - // 2. Extract and render body keypoint / heatmap / PAF of that image (`pose` module) - // 3. Extract and render face keypoint / heatmap / PAF of that image (`face` module) - // 4. Save the results on disk (`filestream` module) - // 5. Display the rendered pose (`gui` module) - // Everything in a multi-thread scenario (`thread` module) - // Points 2 to 5 are included in the `wrapper` module -// In addition to the previous OpenPose modules, we also need to use: - // 1. `core` module: - // For the Array class that the `pose` module needs - // For the Datum struct that the `thread` module sends between the queues - // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively -// This file should only be used for the user to take specific examples. - -// C++ std library dependencies -#include // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds -#include // std::this_thread -// Other 3rdparty dependencies -// GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string -#include -// Allow Google Flags in Ubuntu 14 -#ifndef GFLAGS_GFLAGS_H_ - namespace gflags = google; -#endif -// OpenPose dependencies -#include - -// See all the available parameter options withe the `--help` flag. E.g. `build/examples/openpose/openpose.bin --help` -// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose -// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`. -// Debugging/Other -DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while" - " 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for" - " low priority messages and 4 for important ones."); -DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful" - " for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with" - " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the" - " error."); -DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some" - " runtime statistics at this frame number."); -// Producer -DEFINE_int32(camera, -1, "The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative" - " number (by default), to auto-detect and open the first available camera."); -DEFINE_string(camera_resolution, "-1x-1", "Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the" - " default 1280x720 for `--camera`, or the maximum flir camera resolution available for" - " `--flir_camera`"); -DEFINE_double(camera_fps, 30.0, "Frame rate for the webcam (also used when saving video). Set this value to the minimum" - " value between the OpenPose displayed speed and the webcam real frame rate."); -DEFINE_string(video, "", "Use a video file instead of the camera. Use `examples/media/video.avi` for our default" - " example video."); -DEFINE_string(image_dir, "", "Process a directory of images. Use `examples/media/` for our default example folder with 20" - " images. Read all standard formats (jpg, png, bmp, etc.)."); -DEFINE_bool(flir_camera, false, "Whether to use FLIR (Point-Grey) stereo camera."); -DEFINE_int32(flir_camera_index, -1, "Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir" - " camera index to run, where 0 corresponds to the detected flir camera with the lowest" - " serial number, and `n` to the `n`-th lowest serial number camera."); -DEFINE_string(ip_camera, "", "String with the IP camera URL. It supports protocols like RTSP and HTTP."); -DEFINE_uint64(frame_first, 0, "Start on desired frame number. Indexes are 0-based, i.e. the first frame has index 0."); -DEFINE_uint64(frame_last, -1, "Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g. if set to" - " 10, it will process 11 frames (0-10)."); -DEFINE_bool(frame_flip, false, "Flip/mirror each frame (e.g. for real time webcam demonstrations)."); -DEFINE_int32(frame_rotate, 0, "Rotate each frame, 4 possible values: 0, 90, 180, 270."); -DEFINE_bool(frames_repeat, false, "Repeat frames when finished."); -DEFINE_bool(process_real_time, false, "Enable to keep the original source frame rate (e.g. for video). If the processing time is" - " too long, it will skip frames. If it is too fast, it will slow it down."); -DEFINE_string(camera_parameter_folder, "models/cameraParameters/flir/", "String with the folder where the camera parameters are located."); -DEFINE_bool(frame_keep_distortion, false, "If false (default), it will undistortionate the image based on the" - " `camera_parameter_folder` camera parameters; if true, it will not undistortionate, i.e.," - " it will leave it as it is."); -// OpenPose -DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located."); -DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the" - " input image resolution."); -DEFINE_int32(num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your" - " machine."); -DEFINE_int32(num_gpu_start, 0, "GPU device start number."); -DEFINE_int32(keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e. the scale of the (x,y)" - " coordinates that will be saved with the `write_json` & `write_keypoint` flags." - " Select `0` to scale it to the original source resolution; `1`to scale it to the net output" - " size (set with `net_resolution`); `2` to scale it to the final output size (set with" - " `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left" - " corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where" - " (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non" - " related with `scale_number` and `scale_gap`."); -DEFINE_int32(number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with" - " top scores. The score is based in person area over the image, body part score, as well as" - " joint score (between each pair of connected body parts). Useful if you know the exact" - " number of people in the scene, so it can remove false positives (if all the people have" - " been detected. However, it might also include false negatives by removing very small or" - " highly occluded people. -1 will keep them all."); -// OpenPose Body Pose -DEFINE_bool(body_disable, false, "Disable body keypoint detection. Option only possible for faster (but less accurate) face" - " keypoint detection."); -DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g. `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), " - "`MPI_4_layers` (15 keypoints, even faster but less accurate)."); -DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is" - " decreased, the speed increases. For maximum speed-accuracy balance, it should keep the" - " closest aspect ratio possible to the images or videos to be processed. Using `-1` in" - " any of the dimensions, OP will choose the optimal aspect ratio depending on the user's" - " input value. E.g. the default `-1x368` is equivalent to `656x368` in 16:9 resolutions," - " e.g. full HD (1980x1080) and HD (1280x720) resolutions."); -DEFINE_int32(scale_number, 1, "Number of scales to average."); -DEFINE_double(scale_gap, 0.3, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1." - " If you want to change the initial scale, you actually want to multiply the" - " `net_resolution` by your desired initial scale."); -// OpenPose Body Pose Heatmaps and Part Candidates -DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and" - " analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps." - " If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential" - " memory order: body parts + bkg + PAFs. It will follow the order on" - " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will" - " considerably decrease. Not required for OpenPose, enable it only if you intend to" - " explicitly use this information later."); -DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to" - " background."); -DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs."); -DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer" - " rounded [0,255]; and 3 for no scaling."); -DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the" - " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all" - " the detected body parts, before being assembled into people. Note that the number of" - " candidates is equal or higher than the number of final body parts (i.e. after being" - " assembled into people). The empty body parts are filled with 0s. Program speed will" - " slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly" - " use this information."); -// OpenPose Face -DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Note that this will considerable slow down the performance and increse" - " the required GPU memory. In addition, the greater number of people on the image, the" - " slower OpenPose will be."); -DEFINE_string(face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint" - " detector. 320x320 usually works fine while giving a substantial speed up when multiple" - " faces on the image."); -// OpenPose Hand -DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Analogously to `--face`, it will also slow down the performance, increase" - " the required GPU memory and its speed depends on the number of people."); -DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint" - " detector."); -DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results" - " were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."); -DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range" - " between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if" - " scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."); -DEFINE_bool(hand_tracking, false, "Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate" - " is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it" - " simply looks for hands in positions at which hands were located in previous frames, but" - " it does not guarantee the same person ID among frames."); -// OpenPose 3-D Reconstruction -DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system." - " 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction" - " results. Note that it will only display 1 person. If multiple people is present, it will" - " fail."); -DEFINE_int32(3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will" - " require all the cameras to see the keypoint in order to reconstruct it."); -DEFINE_int32(3d_views, 1, "Complementary option to `--image_dir` or `--video`. OpenPose will read as many images per" - " iteration, allowing tasks such as stereo camera processing (`--3d`). Note that" - " `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the" - " parameter folder as this number indicates."); -// Extra algorithms -DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames."); -DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The" - " value indicates the number of frames where tracking is run between each OpenPose keypoint" - " detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint" - " detector and tracking for potentially higher accurary than only OpenPose."); -DEFINE_int32(ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D" - " keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing" - " the number of threads will increase the speed but also the global system latency."); -// OpenPose Rendering -DEFINE_int32(part_to_show, 0, "Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body" - " part heat map, 19 for the background heat map, 20 for all the body part heat maps" - " together, 21 for all the PAFs, 22-40 for each body part pair PAF."); -DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black" - " background, instead of being rendered into the original image. Related: `part_to_show`," - " `alpha_pose`, and `alpha_pose`."); -// OpenPose Rendering Pose -DEFINE_double(render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be" - " rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;" - " while small thresholds (~0.1) will also output guessed and occluded keypoints, but also" - " more false positives (i.e. wrong detections)."); -DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering" - " (slower but greater functionality, e.g. `alpha_X` flags). If -1, it will pick CPU if" - " CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render" - " both `outputData` and `cvOutputData` with the original image and desired body part to be" - " shown (i.e. keypoints, heat maps or PAFs)."); -DEFINE_double(alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will" - " hide it. Only valid for GPU rendering."); -DEFINE_double(alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the" - " heatmap, 0 will only show the frame. Only valid for GPU rendering."); -// OpenPose Rendering Face -DEFINE_double(face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints."); -DEFINE_int32(face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face."); -DEFINE_double(face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face."); -// OpenPose Rendering Hand -DEFINE_double(hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints."); -DEFINE_int32(hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand."); -DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand."); -// Display -DEFINE_bool(fullscreen, false, "Run in full-screen mode (press f during runtime to toggle)."); -DEFINE_bool(no_gui_verbose, false, "Do not write text on output images on GUI (e.g. number of current frame and people). It" - " does not affect the pose rendering."); -DEFINE_int32(display, -1, "Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server" - " and/or to slightly speed up the processing if visual output is not required); 2 for 2-D" - " display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display."); -// Result Saving -DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format."); -DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV" - " function cv::imwrite for all compatible extensions."); -DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the" - " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag" - " `camera_fps` controls FPS."); -DEFINE_string(write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose" - " keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."); -DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format."); -DEFINE_string(write_coco_foot_json, "", "Full file path to write people foot pose data with JSON COCO validation format."); -DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag" - " must be enabled."); -DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`." - " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for" - " floating values."); -DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format" - " with `write_keypoint_format`."); -DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml," - " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."); -// Result Saving - Extra Algorithms -DEFINE_string(write_video_adam, "", "Experimental, not available yet. E.g.: `~/Desktop/adamResult.avi`. Flag `camera_fps`" - " controls FPS."); -DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g.: `~/Desktop/mocapResult.bvh`."); -// UDP communication -DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."); -DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication."); - - -// If the user needs his own variables, he can inherit the op::Datum struct and add them -// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define -// Wrapper instead of Wrapper -struct UserDatum : public op::Datum -{ - bool boolThatUserNeedsForSomeReason; - - UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) : - boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_} - {} -}; - -// The W-classes can be implemented either as a template or as simple classes given -// that the user usually knows which kind of data he will move between the queues, -// in this case we assume a std::shared_ptr of a std::vector of UserDatum - -// This worker will just read and return all the jpg files in a directory -class WUserOutput : public op::WorkerConsumer>> -{ -public: - void initializationOnThread() {} - - void workConsumer(const std::shared_ptr>& datumsPtr) - { - try - { - // User's displaying/saving/other processing here - // datum.cvOutputData: rendered frame with pose or heatmaps - // datum.poseKeypoints: Array with the estimated pose - if (datumsPtr != nullptr && !datumsPtr->empty()) - { - // Show in command line the resulting pose keypoints for body, face and hands - op::log("\nKeypoints:"); - // Accesing each element of the keypoints - const auto& poseKeypoints = datumsPtr->at(0).poseKeypoints; - op::log("Person pose keypoints:"); - for (auto person = 0 ; person < poseKeypoints.getSize(0) ; person++) - { - op::log("Person " + std::to_string(person) + " (x, y, score):"); - for (auto bodyPart = 0 ; bodyPart < poseKeypoints.getSize(1) ; bodyPart++) - { - std::string valueToPrint; - for (auto xyscore = 0 ; xyscore < poseKeypoints.getSize(2) ; xyscore++) - { - valueToPrint += std::to_string( poseKeypoints[{person, bodyPart, xyscore}] ) + " "; - } - op::log(valueToPrint); - } - } - op::log(" "); - // Alternative: just getting std::string equivalent - op::log("Face keypoints: " + datumsPtr->at(0).faceKeypoints.toString()); - op::log("Left hand keypoints: " + datumsPtr->at(0).handKeypoints[0].toString()); - op::log("Right hand keypoints: " + datumsPtr->at(0).handKeypoints[1].toString()); - // Heatmaps - const auto& poseHeatMaps = datumsPtr->at(0).poseHeatMaps; - if (!poseHeatMaps.empty()) - { - op::log("Pose heatmaps size: [" + std::to_string(poseHeatMaps.getSize(0)) + ", " - + std::to_string(poseHeatMaps.getSize(1)) + ", " - + std::to_string(poseHeatMaps.getSize(2)) + "]"); - const auto& faceHeatMaps = datumsPtr->at(0).faceHeatMaps; - op::log("Face heatmaps size: [" + std::to_string(faceHeatMaps.getSize(0)) + ", " - + std::to_string(faceHeatMaps.getSize(1)) + ", " - + std::to_string(faceHeatMaps.getSize(2)) + ", " - + std::to_string(faceHeatMaps.getSize(3)) + "]"); - const auto& handHeatMaps = datumsPtr->at(0).handHeatMaps; - op::log("Left hand heatmaps size: [" + std::to_string(handHeatMaps[0].getSize(0)) + ", " - + std::to_string(handHeatMaps[0].getSize(1)) + ", " - + std::to_string(handHeatMaps[0].getSize(2)) + ", " - + std::to_string(handHeatMaps[0].getSize(3)) + "]"); - op::log("Right hand heatmaps size: [" + std::to_string(handHeatMaps[1].getSize(0)) + ", " - + std::to_string(handHeatMaps[1].getSize(1)) + ", " - + std::to_string(handHeatMaps[1].getSize(2)) + ", " - + std::to_string(handHeatMaps[1].getSize(3)) + "]"); - } - - // Display rendered output image - cv::imshow("User worker GUI", datumsPtr->at(0).cvOutputData); - // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image) - const char key = (char)cv::waitKey(1); - if (key == 27) - this->stop(); - } - } - catch (const std::exception& e) - { - this->stop(); - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - } - } -}; - -int openPoseDemo() -{ - try - { - op::log("Starting OpenPose demo...", op::Priority::High); - const auto timerBegin = std::chrono::high_resolution_clock::now(); - - // logging_level - op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", - __LINE__, __FUNCTION__, __FILE__); - op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); - op::Profiler::setDefaultX(FLAGS_profile_speed); - // // For debugging - // // Print all logging messages - // op::ConfigureLog::setPriorityThreshold(op::Priority::None); - // // Print out speed values faster - // op::Profiler::setDefaultX(100); - - // Applying user defined configuration - Google flags to program variables - // outputSize - const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); - // netInputSize - const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); - // faceNetInputSize - const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); - // handNetInputSize - const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); - // producerType - const auto producerSharedPtr = op::flagsToProducer(FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, - FLAGS_flir_camera, FLAGS_camera_resolution, FLAGS_camera_fps, - FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion, - (unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index); - // poseModel - const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); - // JSON saving - if (!FLAGS_write_keypoint.empty()) - op::log("Flag `write_keypoint` is deprecated and will eventually be removed." - " Please, use `write_json` instead.", op::Priority::Max); - // keypointScale - const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); - // heatmaps to add - const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, - FLAGS_heatmaps_add_PAFs); - const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); - // >1 camera view? - const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1 || FLAGS_flir_camera); - // Enabling Google Logging - const bool enableGoogleLogging = true; - // Logging - op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - - // OpenPose wrapper - op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - // op::Wrapper> opWrapper; - op::Wrapper> opWrapper; - - // Initializing the user custom classes - // GUI (Display) - auto wUserOutput = std::make_shared(); - // Add custom processing - const auto workerOutputOnNewThread = true; - opWrapper.setWorkerOutput(wUserOutput, workerOutputOnNewThread); - - // Pose configuration (use WrapperStructPose{} for default and recommended configuration) - const op::WrapperStructPose wrapperStructPose{ - !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, - FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), - poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, - FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, - (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; - // Face configuration (use op::WrapperStructFace{} to disable it) - const op::WrapperStructFace wrapperStructFace{ - FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), - (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; - // Hand configuration (use op::WrapperStructHand{} to disable it) - const op::WrapperStructHand wrapperStructHand{ - FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, - op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, - (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; - // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) - const op::WrapperStructExtra wrapperStructExtra{ - FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; - // Producer (use default to disable any input) - const op::WrapperStructInput wrapperStructInput{ - producerSharedPtr, FLAGS_frame_first, FLAGS_frame_last, FLAGS_process_real_time, FLAGS_frame_flip, - FLAGS_frame_rotate, FLAGS_frames_repeat}; - // Consumer (comment or use default argument to disable any output) - // const op::WrapperStructOutput wrapperStructOutput{op::flagsToDisplayMode(FLAGS_display, FLAGS_3d), - // !FLAGS_no_gui_verbose, FLAGS_fullscreen, FLAGS_write_keypoint, - const auto displayMode = op::DisplayMode::NoDisplay; - const bool guiVerbose = false; - const bool fullScreen = false; - const op::WrapperStructOutput wrapperStructOutput{ - displayMode, guiVerbose, fullScreen, FLAGS_write_keypoint, - op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, FLAGS_write_coco_json, - FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, FLAGS_write_video, - FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, FLAGS_write_video_adam, - FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; - // Configure wrapper - opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, - wrapperStructInput, wrapperStructOutput); - // Set to single-thread running (to debug and/or reduce latency) - if (FLAGS_disable_multi_thread) - opWrapper.disableMultiThreading(); - - // Start processing - // Two different ways of running the program on multithread environment - op::log("Starting thread(s)...", op::Priority::High); - // Start, run & stop threads - it blocks this thread until all others have finished - opWrapper.exec(); - - // // Option b) Keeping this thread free in case you want to do something else meanwhile, e.g. profiling the GPU - // memory - // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main - // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: - // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections` - // // Start threads - // opWrapper.start(); - // // Profile used GPU memory - // // 1: wait ~10sec so the memory has been totally loaded on GPU - // // 2: profile the GPU memory - // const auto sleepTimeMs = 10; - // for (auto i = 0 ; i < 10000/sleepTimeMs && opWrapper.isRunning() ; i++) - // std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs}); - // op::Profiler::profileGpuMemory(__LINE__, __FUNCTION__, __FILE__); - // // Keep program alive while running threads - // while (opWrapper.isRunning()) - // std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs}); - // // Stop and join threads - // op::log("Stopping thread(s)", op::Priority::High); - // opWrapper.stop(); - - // Measuring total time - const auto now = std::chrono::high_resolution_clock::now(); - const auto totalTimeSec = (double)std::chrono::duration_cast(now-timerBegin).count() - * 1e-9; - const auto message = "OpenPose demo successfully finished. Total time: " - + std::to_string(totalTimeSec) + " seconds."; - op::log(message, op::Priority::High); - - // Return successful message - return 0; - } - catch (const std::exception& e) - { - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - return -1; - } -} - -int main(int argc, char *argv[]) -{ - // Parsing command line flags - gflags::ParseCommandLineFlags(&argc, &argv, true); - - // Running openPoseDemo - return openPoseDemo(); -} diff --git a/examples/tutorial_wrapper/4_user_synchronous_all.cpp b/examples/tutorial_wrapper/4_user_synchronous_all.cpp deleted file mode 100644 index e1547194..00000000 --- a/examples/tutorial_wrapper/4_user_synchronous_all.cpp +++ /dev/null @@ -1,541 +0,0 @@ -// ------------------------- OpenPose Library Tutorial - Wrapper - Example 2 - Synchronous ------------------------- -// Synchronous mode: ideal for performance. The user can add his own frames producer / post-processor / consumer to the OpenPose wrapper or use the -// default ones. - -// This example shows the user how to use the OpenPose wrapper class: - // 1. User reads images - // 2. Extract and render keypoint / heatmap / PAF of that image - // 3. Save the results on disk - // 4. User displays the rendered pose - // Everything in a multi-thread scenario -// In addition to the previous OpenPose modules, we also need to use: - // 1. `core` module: - // For the Array class that the `pose` module needs - // For the Datum struct that the `thread` module sends between the queues - // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively -// This file should only be used for the user to take specific examples. - -// C++ std library dependencies -#include // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds -#include // std::this_thread -// Other 3rdparty dependencies -// GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string -#include -// Allow Google Flags in Ubuntu 14 -#ifndef GFLAGS_GFLAGS_H_ - namespace gflags = google; -#endif -// OpenPose dependencies -#include - -// See all the available parameter options withe the `--help` flag. E.g. `build/examples/openpose/openpose.bin --help` -// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose -// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`. -// Debugging/Other -DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while" - " 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for" - " low priority messages and 4 for important ones."); -DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful" - " for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with" - " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the" - " error."); -DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some" - " runtime statistics at this frame number."); -// Producer -DEFINE_string(image_dir, "examples/media/", "Process a directory of images. Read all standard formats (jpg, png, bmp, etc.)."); -DEFINE_double(camera_fps, 30.0, "Frame rate for the webcam (also used when saving video). Set this value to the minimum" - " value between the OpenPose displayed speed and the webcam real frame rate."); -// OpenPose -DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located."); -DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the" - " input image resolution."); -DEFINE_int32(num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your" - " machine."); -DEFINE_int32(num_gpu_start, 0, "GPU device start number."); -DEFINE_int32(keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e. the scale of the (x,y)" - " coordinates that will be saved with the `write_json` & `write_keypoint` flags." - " Select `0` to scale it to the original source resolution; `1`to scale it to the net output" - " size (set with `net_resolution`); `2` to scale it to the final output size (set with" - " `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left" - " corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where" - " (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non" - " related with `scale_number` and `scale_gap`."); -DEFINE_int32(number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with" - " top scores. The score is based in person area over the image, body part score, as well as" - " joint score (between each pair of connected body parts). Useful if you know the exact" - " number of people in the scene, so it can remove false positives (if all the people have" - " been detected. However, it might also include false negatives by removing very small or" - " highly occluded people. -1 will keep them all."); -// OpenPose Body Pose -DEFINE_bool(body_disable, false, "Disable body keypoint detection. Option only possible for faster (but less accurate) face" - " keypoint detection."); -DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g. `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), " - "`MPI_4_layers` (15 keypoints, even faster but less accurate)."); -DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is" - " decreased, the speed increases. For maximum speed-accuracy balance, it should keep the" - " closest aspect ratio possible to the images or videos to be processed. Using `-1` in" - " any of the dimensions, OP will choose the optimal aspect ratio depending on the user's" - " input value. E.g. the default `-1x368` is equivalent to `656x368` in 16:9 resolutions," - " e.g. full HD (1980x1080) and HD (1280x720) resolutions."); -DEFINE_int32(scale_number, 1, "Number of scales to average."); -DEFINE_double(scale_gap, 0.3, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1." - " If you want to change the initial scale, you actually want to multiply the" - " `net_resolution` by your desired initial scale."); -// OpenPose Body Pose Heatmaps and Part Candidates -DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and" - " analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps." - " If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential" - " memory order: body parts + bkg + PAFs. It will follow the order on" - " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will" - " considerably decrease. Not required for OpenPose, enable it only if you intend to" - " explicitly use this information later."); -DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to" - " background."); -DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs."); -DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer" - " rounded [0,255]; and 3 for no scaling."); -DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the" - " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all" - " the detected body parts, before being assembled into people. Note that the number of" - " candidates is equal or higher than the number of final body parts (i.e. after being" - " assembled into people). The empty body parts are filled with 0s. Program speed will" - " slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly" - " use this information."); -// OpenPose Face -DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Note that this will considerable slow down the performance and increse" - " the required GPU memory. In addition, the greater number of people on the image, the" - " slower OpenPose will be."); -DEFINE_string(face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint" - " detector. 320x320 usually works fine while giving a substantial speed up when multiple" - " faces on the image."); -// OpenPose Hand -DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Analogously to `--face`, it will also slow down the performance, increase" - " the required GPU memory and its speed depends on the number of people."); -DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint" - " detector."); -DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results" - " were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."); -DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range" - " between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if" - " scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."); -DEFINE_bool(hand_tracking, false, "Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate" - " is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it" - " simply looks for hands in positions at which hands were located in previous frames, but" - " it does not guarantee the same person ID among frames."); -// OpenPose 3-D Reconstruction -DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system." - " 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction" - " results. Note that it will only display 1 person. If multiple people is present, it will" - " fail."); -DEFINE_int32(3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will" - " require all the cameras to see the keypoint in order to reconstruct it."); -DEFINE_int32(3d_views, 1, "Complementary option to `--image_dir` or `--video`. OpenPose will read as many images per" - " iteration, allowing tasks such as stereo camera processing (`--3d`). Note that" - " `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the" - " parameter folder as this number indicates."); -// Extra algorithms -DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames."); -DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The" - " value indicates the number of frames where tracking is run between each OpenPose keypoint" - " detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint" - " detector and tracking for potentially higher accurary than only OpenPose."); -DEFINE_int32(ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D" - " keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing" - " the number of threads will increase the speed but also the global system latency."); -// OpenPose Rendering -DEFINE_int32(part_to_show, 0, "Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body" - " part heat map, 19 for the background heat map, 20 for all the body part heat maps" - " together, 21 for all the PAFs, 22-40 for each body part pair PAF."); -DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black" - " background, instead of being rendered into the original image. Related: `part_to_show`," - " `alpha_pose`, and `alpha_pose`."); -// OpenPose Rendering Pose -DEFINE_double(render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be" - " rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;" - " while small thresholds (~0.1) will also output guessed and occluded keypoints, but also" - " more false positives (i.e. wrong detections)."); -DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering" - " (slower but greater functionality, e.g. `alpha_X` flags). If -1, it will pick CPU if" - " CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render" - " both `outputData` and `cvOutputData` with the original image and desired body part to be" - " shown (i.e. keypoints, heat maps or PAFs)."); -DEFINE_double(alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will" - " hide it. Only valid for GPU rendering."); -DEFINE_double(alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the" - " heatmap, 0 will only show the frame. Only valid for GPU rendering."); -// OpenPose Rendering Face -DEFINE_double(face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints."); -DEFINE_int32(face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face."); -DEFINE_double(face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face."); -// OpenPose Rendering Hand -DEFINE_double(hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints."); -DEFINE_int32(hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand."); -DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand."); -// Result Saving -DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format."); -DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV" - " function cv::imwrite for all compatible extensions."); -DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the" - " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag" - " `camera_fps` controls FPS."); -DEFINE_string(write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose" - " keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."); -DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format."); -DEFINE_string(write_coco_foot_json, "", "Full file path to write people foot pose data with JSON COCO validation format."); -DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag" - " must be enabled."); -DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`." - " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for" - " floating values."); -DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format" - " with `write_keypoint_format`."); -DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml," - " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."); -// Result Saving - Extra Algorithms -DEFINE_string(write_video_adam, "", "Experimental, not available yet. E.g.: `~/Desktop/adamResult.avi`. Flag `camera_fps`" - " controls FPS."); -DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g.: `~/Desktop/mocapResult.bvh`."); -// UDP communication -DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."); -DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication."); - - -// If the user needs his own variables, he can inherit the op::Datum struct and add them -// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define -// Wrapper instead of Wrapper -struct UserDatum : public op::Datum -{ - bool boolThatUserNeedsForSomeReason; - - UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) : - boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_} - {} -}; - -// The W-classes can be implemented either as a template or as simple classes given -// that the user usually knows which kind of data he will move between the queues, -// in this case we assume a std::shared_ptr of a std::vector of UserDatum - -// This worker will just read and return all the jpg files in a directory -class WUserInput : public op::WorkerProducer>> -{ -public: - WUserInput(const std::string& directoryPath) : - mImageFiles{op::getFilesOnDirectory(directoryPath, "jpg")}, - // If we want "jpg" + "png" images - // mImageFiles{op::getFilesOnDirectory(directoryPath, std::vector{"jpg", "png"})}, - mCounter{0} - { - if (mImageFiles.empty()) - op::error("No images found on: " + directoryPath, __LINE__, __FUNCTION__, __FILE__); - } - - void initializationOnThread() {} - - std::shared_ptr> workProducer() - { - try - { - // Close program when empty frame - if (mImageFiles.size() <= mCounter) - { - op::log("Last frame read and added to queue. Closing program after it is processed.", - op::Priority::High); - // This funtion stops this worker, which will eventually stop the whole thread system once all the - // frames have been processed - this->stop(); - return nullptr; - } - else - { - // Create new datum - auto datumsPtr = std::make_shared>(); - datumsPtr->emplace_back(); - auto& datum = datumsPtr->at(0); - - // Fill datum - datum.cvInputData = cv::imread(mImageFiles.at(mCounter++)); - - // If empty frame -> return nullptr - if (datum.cvInputData.empty()) - { - op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", - op::Priority::High); - this->stop(); - datumsPtr = nullptr; - } - - return datumsPtr; - } - } - catch (const std::exception& e) - { - this->stop(); - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - return nullptr; - } - } - -private: - const std::vector mImageFiles; - unsigned long long mCounter; -}; - -// This worker will just invert the image -class WUserPostProcessing : public op::Worker>> -{ -public: - WUserPostProcessing() - { - // User's constructor here - } - - void initializationOnThread() {} - - void work(std::shared_ptr>& datumsPtr) - { - // User's post-processing (after OpenPose processing & before OpenPose outputs) here - // datum.cvOutputData: rendered frame with pose or heatmaps - // datum.poseKeypoints: Array with the estimated pose - try - { - if (datumsPtr != nullptr && !datumsPtr->empty()) - for (auto& datum : *datumsPtr) - cv::bitwise_not(datum.cvOutputData, datum.cvOutputData); - } - catch (const std::exception& e) - { - this->stop(); - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - } - } -}; - -// This worker will just read and return all the jpg files in a directory -class WUserOutput : public op::WorkerConsumer>> -{ -public: - void initializationOnThread() {} - - void workConsumer(const std::shared_ptr>& datumsPtr) - { - try - { - // User's displaying/saving/other processing here - // datum.cvOutputData: rendered frame with pose or heatmaps - // datum.poseKeypoints: Array with the estimated pose - if (datumsPtr != nullptr && !datumsPtr->empty()) - { - // Show in command line the resulting pose keypoints for body, face and hands - op::log("\nKeypoints:"); - // Accesing each element of the keypoints - const auto& poseKeypoints = datumsPtr->at(0).poseKeypoints; - op::log("Person pose keypoints:"); - for (auto person = 0 ; person < poseKeypoints.getSize(0) ; person++) - { - op::log("Person " + std::to_string(person) + " (x, y, score):"); - for (auto bodyPart = 0 ; bodyPart < poseKeypoints.getSize(1) ; bodyPart++) - { - std::string valueToPrint; - for (auto xyscore = 0 ; xyscore < poseKeypoints.getSize(2) ; xyscore++) - { - valueToPrint += std::to_string( poseKeypoints[{person, bodyPart, xyscore}] ) + " "; - } - op::log(valueToPrint); - } - } - op::log(" "); - // Alternative: just getting std::string equivalent - op::log("Face keypoints: " + datumsPtr->at(0).faceKeypoints.toString()); - op::log("Left hand keypoints: " + datumsPtr->at(0).handKeypoints[0].toString()); - op::log("Right hand keypoints: " + datumsPtr->at(0).handKeypoints[1].toString()); - // Heatmaps - const auto& poseHeatMaps = datumsPtr->at(0).poseHeatMaps; - if (!poseHeatMaps.empty()) - { - op::log("Pose heatmaps size: [" + std::to_string(poseHeatMaps.getSize(0)) + ", " - + std::to_string(poseHeatMaps.getSize(1)) + ", " - + std::to_string(poseHeatMaps.getSize(2)) + "]"); - const auto& faceHeatMaps = datumsPtr->at(0).faceHeatMaps; - op::log("Face heatmaps size: [" + std::to_string(faceHeatMaps.getSize(0)) + ", " - + std::to_string(faceHeatMaps.getSize(1)) + ", " - + std::to_string(faceHeatMaps.getSize(2)) + ", " - + std::to_string(faceHeatMaps.getSize(3)) + "]"); - const auto& handHeatMaps = datumsPtr->at(0).handHeatMaps; - op::log("Left hand heatmaps size: [" + std::to_string(handHeatMaps[0].getSize(0)) + ", " - + std::to_string(handHeatMaps[0].getSize(1)) + ", " - + std::to_string(handHeatMaps[0].getSize(2)) + ", " - + std::to_string(handHeatMaps[0].getSize(3)) + "]"); - op::log("Right hand heatmaps size: [" + std::to_string(handHeatMaps[1].getSize(0)) + ", " - + std::to_string(handHeatMaps[1].getSize(1)) + ", " - + std::to_string(handHeatMaps[1].getSize(2)) + ", " - + std::to_string(handHeatMaps[1].getSize(3)) + "]"); - } - - // Display rendered output image - cv::imshow("User worker GUI", datumsPtr->at(0).cvOutputData); - // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image) - const char key = (char)cv::waitKey(1); - if (key == 27) - this->stop(); - } - } - catch (const std::exception& e) - { - this->stop(); - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - } - } -}; - -int openPoseTutorialWrapper2() -{ - try - { - op::log("Starting OpenPose demo...", op::Priority::High); - const auto timerBegin = std::chrono::high_resolution_clock::now(); - - // logging_level - op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", - __LINE__, __FUNCTION__, __FILE__); - op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); - op::Profiler::setDefaultX(FLAGS_profile_speed); - - // Applying user defined configuration - Google flags to program variables - // outputSize - const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); - // netInputSize - const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); - // faceNetInputSize - const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); - // handNetInputSize - const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); - // poseModel - const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); - // JSON saving - if (!FLAGS_write_keypoint.empty()) - op::log("Flag `write_keypoint` is deprecated and will eventually be removed." - " Please, use `write_json` instead.", op::Priority::Max); - // keypointScale - const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); - // heatmaps to add - const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, - FLAGS_heatmaps_add_PAFs); - const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); - // >1 camera view? - const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1); - // Enabling Google Logging - const bool enableGoogleLogging = true; - // Logging - op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - - // Initializing the user custom classes - // Frames producer (e.g. video, webcam, ...) - auto wUserInput = std::make_shared(FLAGS_image_dir); - // Processing - auto wUserPostProcessing = std::make_shared(); - // GUI (Display) - auto wUserOutput = std::make_shared(); - - op::Wrapper> opWrapper; - // Add custom input - const auto workerInputOnNewThread = false; - opWrapper.setWorkerInput(wUserInput, workerInputOnNewThread); - // Add custom processing - const auto workerProcessingOnNewThread = false; - opWrapper.setWorkerPostProcessing(wUserPostProcessing, workerProcessingOnNewThread); - // Add custom output - const auto workerOutputOnNewThread = true; - opWrapper.setWorkerOutput(wUserOutput, workerOutputOnNewThread); - // Configure OpenPose - op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - const op::WrapperStructPose wrapperStructPose{ - !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, - FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), - poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, - FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, - (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; - // Face configuration (use op::WrapperStructFace{} to disable it) - const op::WrapperStructFace wrapperStructFace{ - FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), - (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; - // Hand configuration (use op::WrapperStructHand{} to disable it) - const op::WrapperStructHand wrapperStructHand{ - FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, - op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, - (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; - // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) - const op::WrapperStructExtra wrapperStructExtra{ - FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; - // Consumer (comment or use default argument to disable any output) - const auto displayMode = op::DisplayMode::NoDisplay; - const bool guiVerbose = false; - const bool fullScreen = false; - const op::WrapperStructOutput wrapperStructOutput{ - displayMode, guiVerbose, fullScreen, FLAGS_write_keypoint, - op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, FLAGS_write_coco_json, - FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, FLAGS_write_video, - FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, FLAGS_write_video_adam, - FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; - // Configure wrapper - opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, - op::WrapperStructInput{}, wrapperStructOutput); - // Set to single-thread running (to debug and/or reduce latency) - if (FLAGS_disable_multi_thread) - opWrapper.disableMultiThreading(); - - op::log("Starting thread(s)...", op::Priority::High); - // Two different ways of running the program on multithread environment - // Start, run & stop threads - it blocks this thread until all others have finished - opWrapper.exec(); - - // Option b) Keeping this thread free in case you want to do something else meanwhile, e.g. profiling the GPU memory - // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main - // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: - // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections` - // // Start threads - // opWrapper.start(); - // // Profile used GPU memory - // // 1: wait ~10sec so the memory has been totally loaded on GPU - // // 2: profile the GPU memory - // std::this_thread::sleep_for(std::chrono::milliseconds{1000}); - // op::log("Random task here...", op::Priority::High); - // // Keep program alive while running threads - // while (opWrapper.isRunning()) - // std::this_thread::sleep_for(std::chrono::milliseconds{33}); - // // Stop and join threads - // op::log("Stopping thread(s)", op::Priority::High); - // opWrapper.stop(); - - // Measuring total time - const auto now = std::chrono::high_resolution_clock::now(); - const auto totalTimeSec = (double)std::chrono::duration_cast(now-timerBegin).count() - * 1e-9; - const auto message = "OpenPose demo successfully finished. Total time: " - + std::to_string(totalTimeSec) + " seconds."; - op::log(message, op::Priority::High); - - // Return successful message - return 0; - } - catch (const std::exception& e) - { - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - return -1; - } -} - -int main(int argc, char *argv[]) -{ - // Parsing command line flags - gflags::ParseCommandLineFlags(&argc, &argv, true); - - // Running openPoseTutorialWrapper2 - return openPoseTutorialWrapper2(); -} diff --git a/examples/tutorial_wrapper/5_user_asynchronous.cpp b/examples/tutorial_wrapper/5_user_asynchronous.cpp deleted file mode 100644 index 30010bca..00000000 --- a/examples/tutorial_wrapper/5_user_asynchronous.cpp +++ /dev/null @@ -1,493 +0,0 @@ -// ------------------------- OpenPose Library Tutorial - Wrapper - Example 1 - Asynchronous ------------------------- -// Asynchronous mode: ideal for fast prototyping when performance is not an issue. The user emplaces/pushes and pops frames from the OpenPose wrapper -// when he desires to. - -// This example shows the user how to use the OpenPose wrapper class: - // 1. User reads images - // 2. Extract and render keypoint / heatmap / PAF of that image - // 3. Save the results on disk - // 4. User displays the rendered pose - // Everything in a multi-thread scenario -// In addition to the previous OpenPose modules, we also need to use: - // 1. `core` module: - // For the Array class that the `pose` module needs - // For the Datum struct that the `thread` module sends between the queues - // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively -// This file should only be used for the user to take specific examples. - -// C++ std library dependencies -#include // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds -#include // std::this_thread -// Other 3rdparty dependencies -// GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string -#include -// Allow Google Flags in Ubuntu 14 -#ifndef GFLAGS_GFLAGS_H_ - namespace gflags = google; -#endif -// OpenPose dependencies -#include - -// See all the available parameter options withe the `--help` flag. E.g. `build/examples/openpose/openpose.bin --help` -// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose -// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`. -// Debugging/Other -DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while" - " 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for" - " low priority messages and 4 for important ones."); -DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful" - " for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with" - " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the" - " error."); -DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some" - " runtime statistics at this frame number."); -// Producer -DEFINE_string(image_dir, "examples/media/", "Process a directory of images. Read all standard formats (jpg, png, bmp, etc.)."); -DEFINE_double(camera_fps, 30.0, "Frame rate for the webcam (also used when saving video). Set this value to the minimum" - " value between the OpenPose displayed speed and the webcam real frame rate."); -// OpenPose -DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located."); -DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the" - " input image resolution."); -DEFINE_int32(num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your" - " machine."); -DEFINE_int32(num_gpu_start, 0, "GPU device start number."); -DEFINE_int32(keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e. the scale of the (x,y)" - " coordinates that will be saved with the `write_json` & `write_keypoint` flags." - " Select `0` to scale it to the original source resolution; `1`to scale it to the net output" - " size (set with `net_resolution`); `2` to scale it to the final output size (set with" - " `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left" - " corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where" - " (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non" - " related with `scale_number` and `scale_gap`."); -DEFINE_int32(number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with" - " top scores. The score is based in person area over the image, body part score, as well as" - " joint score (between each pair of connected body parts). Useful if you know the exact" - " number of people in the scene, so it can remove false positives (if all the people have" - " been detected. However, it might also include false negatives by removing very small or" - " highly occluded people. -1 will keep them all."); -// OpenPose Body Pose -DEFINE_bool(body_disable, false, "Disable body keypoint detection. Option only possible for faster (but less accurate) face" - " keypoint detection."); -DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g. `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), " - "`MPI_4_layers` (15 keypoints, even faster but less accurate)."); -DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is" - " decreased, the speed increases. For maximum speed-accuracy balance, it should keep the" - " closest aspect ratio possible to the images or videos to be processed. Using `-1` in" - " any of the dimensions, OP will choose the optimal aspect ratio depending on the user's" - " input value. E.g. the default `-1x368` is equivalent to `656x368` in 16:9 resolutions," - " e.g. full HD (1980x1080) and HD (1280x720) resolutions."); -DEFINE_int32(scale_number, 1, "Number of scales to average."); -DEFINE_double(scale_gap, 0.3, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1." - " If you want to change the initial scale, you actually want to multiply the" - " `net_resolution` by your desired initial scale."); -// OpenPose Body Pose Heatmaps and Part Candidates -DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and" - " analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps." - " If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential" - " memory order: body parts + bkg + PAFs. It will follow the order on" - " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will" - " considerably decrease. Not required for OpenPose, enable it only if you intend to" - " explicitly use this information later."); -DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to" - " background."); -DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs."); -DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer" - " rounded [0,255]; and 3 for no scaling."); -DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the" - " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all" - " the detected body parts, before being assembled into people. Note that the number of" - " candidates is equal or higher than the number of final body parts (i.e. after being" - " assembled into people). The empty body parts are filled with 0s. Program speed will" - " slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly" - " use this information."); -// OpenPose Face -DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Note that this will considerable slow down the performance and increse" - " the required GPU memory. In addition, the greater number of people on the image, the" - " slower OpenPose will be."); -DEFINE_string(face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint" - " detector. 320x320 usually works fine while giving a substantial speed up when multiple" - " faces on the image."); -// OpenPose Hand -DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Analogously to `--face`, it will also slow down the performance, increase" - " the required GPU memory and its speed depends on the number of people."); -DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint" - " detector."); -DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results" - " were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."); -DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range" - " between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if" - " scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."); -DEFINE_bool(hand_tracking, false, "Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate" - " is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it" - " simply looks for hands in positions at which hands were located in previous frames, but" - " it does not guarantee the same person ID among frames."); -// OpenPose 3-D Reconstruction -DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system." - " 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction" - " results. Note that it will only display 1 person. If multiple people is present, it will" - " fail."); -DEFINE_int32(3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will" - " require all the cameras to see the keypoint in order to reconstruct it."); -DEFINE_int32(3d_views, 1, "Complementary option to `--image_dir` or `--video`. OpenPose will read as many images per" - " iteration, allowing tasks such as stereo camera processing (`--3d`). Note that" - " `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the" - " parameter folder as this number indicates."); -// Extra algorithms -DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames."); -DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The" - " value indicates the number of frames where tracking is run between each OpenPose keypoint" - " detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint" - " detector and tracking for potentially higher accurary than only OpenPose."); -DEFINE_int32(ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D" - " keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing" - " the number of threads will increase the speed but also the global system latency."); -// OpenPose Rendering -DEFINE_int32(part_to_show, 0, "Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body" - " part heat map, 19 for the background heat map, 20 for all the body part heat maps" - " together, 21 for all the PAFs, 22-40 for each body part pair PAF."); -DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black" - " background, instead of being rendered into the original image. Related: `part_to_show`," - " `alpha_pose`, and `alpha_pose`."); -// OpenPose Rendering Pose -DEFINE_double(render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be" - " rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;" - " while small thresholds (~0.1) will also output guessed and occluded keypoints, but also" - " more false positives (i.e. wrong detections)."); -DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering" - " (slower but greater functionality, e.g. `alpha_X` flags). If -1, it will pick CPU if" - " CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render" - " both `outputData` and `cvOutputData` with the original image and desired body part to be" - " shown (i.e. keypoints, heat maps or PAFs)."); -DEFINE_double(alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will" - " hide it. Only valid for GPU rendering."); -DEFINE_double(alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the" - " heatmap, 0 will only show the frame. Only valid for GPU rendering."); -// OpenPose Rendering Face -DEFINE_double(face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints."); -DEFINE_int32(face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face."); -DEFINE_double(face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face."); -// OpenPose Rendering Hand -DEFINE_double(hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints."); -DEFINE_int32(hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand."); -DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand."); -// Result Saving -DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format."); -DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV" - " function cv::imwrite for all compatible extensions."); -DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the" - " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag" - " `camera_fps` controls FPS."); -DEFINE_string(write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose" - " keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."); -DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format."); -DEFINE_string(write_coco_foot_json, "", "Full file path to write people foot pose data with JSON COCO validation format."); -DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag" - " must be enabled."); -DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`." - " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for" - " floating values."); -DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format" - " with `write_keypoint_format`."); -DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml," - " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."); -// Result Saving - Extra Algorithms -DEFINE_string(write_video_adam, "", "Experimental, not available yet. E.g.: `~/Desktop/adamResult.avi`. Flag `camera_fps`" - " controls FPS."); -DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g.: `~/Desktop/mocapResult.bvh`."); -// UDP communication -DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."); -DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication."); - - -// If the user needs his own variables, he can inherit the op::Datum struct and add them -// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define Wrapper instead of -// Wrapper -struct UserDatum : public op::Datum -{ - bool boolThatUserNeedsForSomeReason; - - UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) : - boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_} - {} -}; - -// The W-classes can be implemented either as a template or as simple classes given -// that the user usually knows which kind of data he will move between the queues, -// in this case we assume a std::shared_ptr of a std::vector of UserDatum - -// This worker will just read and return all the jpg files in a directory -class UserInputClass -{ -public: - UserInputClass(const std::string& directoryPath) : - mImageFiles{op::getFilesOnDirectory(directoryPath, "jpg")}, - // If we want "jpg" + "png" images - // mImageFiles{op::getFilesOnDirectory(directoryPath, std::vector{"jpg", "png"})}, - mCounter{0}, - mClosed{false} - { - if (mImageFiles.empty()) - op::error("No images found on: " + directoryPath, __LINE__, __FUNCTION__, __FILE__); - } - - std::shared_ptr> createDatum() - { - // Close program when empty frame - if (mClosed || mImageFiles.size() <= mCounter) - { - op::log("Last frame read and added to queue. Closing program after it is processed.", op::Priority::High); - // This funtion stops this worker, which will eventually stop the whole thread system once all the frames - // have been processed - mClosed = true; - return nullptr; - } - else // if (!mClosed) - { - // Create new datum - auto datumsPtr = std::make_shared>(); - datumsPtr->emplace_back(); - auto& datum = datumsPtr->at(0); - - // Fill datum - datum.cvInputData = cv::imread(mImageFiles.at(mCounter++)); - - // If empty frame -> return nullptr - if (datum.cvInputData.empty()) - { - op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.", - op::Priority::High); - mClosed = true; - datumsPtr = nullptr; - } - - return datumsPtr; - } - } - - bool isFinished() const - { - return mClosed; - } - -private: - const std::vector mImageFiles; - unsigned long long mCounter; - bool mClosed; -}; - -// This worker will just read and return all the jpg files in a directory -class UserOutputClass -{ -public: - bool display(const std::shared_ptr>& datumsPtr) - { - // User's displaying/saving/other processing here - // datum.cvOutputData: rendered frame with pose or heatmaps - // datum.poseKeypoints: Array with the estimated pose - char key = ' '; - if (datumsPtr != nullptr && !datumsPtr->empty()) - { - cv::imshow("User worker GUI", datumsPtr->at(0).cvOutputData); - // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image) - key = (char)cv::waitKey(1); - } - else - op::log("Nullptr or empty datumsPtr found.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__); - return (key == 27); - } - void printKeypoints(const std::shared_ptr>& datumsPtr) - { - // Example: How to use the pose keypoints - if (datumsPtr != nullptr && !datumsPtr->empty()) - { - op::log("\nKeypoints:"); - // Accesing each element of the keypoints - const auto& poseKeypoints = datumsPtr->at(0).poseKeypoints; - op::log("Person pose keypoints:"); - for (auto person = 0 ; person < poseKeypoints.getSize(0) ; person++) - { - op::log("Person " + std::to_string(person) + " (x, y, score):"); - for (auto bodyPart = 0 ; bodyPart < poseKeypoints.getSize(1) ; bodyPart++) - { - std::string valueToPrint; - for (auto xyscore = 0 ; xyscore < poseKeypoints.getSize(2) ; xyscore++) - valueToPrint += std::to_string( poseKeypoints[{person, bodyPart, xyscore}] ) + " "; - op::log(valueToPrint); - } - } - op::log(" "); - // Alternative: just getting std::string equivalent - op::log("Face keypoints: " + datumsPtr->at(0).faceKeypoints.toString()); - op::log("Left hand keypoints: " + datumsPtr->at(0).handKeypoints[0].toString()); - op::log("Right hand keypoints: " + datumsPtr->at(0).handKeypoints[1].toString()); - // Heatmaps - const auto& poseHeatMaps = datumsPtr->at(0).poseHeatMaps; - if (!poseHeatMaps.empty()) - { - op::log("Pose heatmaps size: [" + std::to_string(poseHeatMaps.getSize(0)) + ", " - + std::to_string(poseHeatMaps.getSize(1)) + ", " - + std::to_string(poseHeatMaps.getSize(2)) + "]"); - const auto& faceHeatMaps = datumsPtr->at(0).faceHeatMaps; - op::log("Face heatmaps size: [" + std::to_string(faceHeatMaps.getSize(0)) + ", " - + std::to_string(faceHeatMaps.getSize(1)) + ", " - + std::to_string(faceHeatMaps.getSize(2)) + ", " - + std::to_string(faceHeatMaps.getSize(3)) + "]"); - const auto& handHeatMaps = datumsPtr->at(0).handHeatMaps; - op::log("Left hand heatmaps size: [" + std::to_string(handHeatMaps[0].getSize(0)) + ", " - + std::to_string(handHeatMaps[0].getSize(1)) + ", " - + std::to_string(handHeatMaps[0].getSize(2)) + ", " - + std::to_string(handHeatMaps[0].getSize(3)) + "]"); - op::log("Right hand heatmaps size: [" + std::to_string(handHeatMaps[1].getSize(0)) + ", " - + std::to_string(handHeatMaps[1].getSize(1)) + ", " - + std::to_string(handHeatMaps[1].getSize(2)) + ", " - + std::to_string(handHeatMaps[1].getSize(3)) + "]"); - } - } - else - op::log("Nullptr or empty datumsPtr found.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__); - } -}; - -int openPoseTutorialWrapper3() -{ - try - { - op::log("Starting OpenPose demo...", op::Priority::High); - const auto timerBegin = std::chrono::high_resolution_clock::now(); - - // logging_level - op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", - __LINE__, __FUNCTION__, __FILE__); - op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); - op::Profiler::setDefaultX(FLAGS_profile_speed); - - // Applying user defined configuration - Google flags to program variables - // outputSize - const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); - // netInputSize - const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); - // faceNetInputSize - const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); - // handNetInputSize - const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); - // poseModel - const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); - // JSON saving - if (!FLAGS_write_keypoint.empty()) - op::log("Flag `write_keypoint` is deprecated and will eventually be removed." - " Please, use `write_json` instead.", op::Priority::Max); - // keypointScale - const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); - // heatmaps to add - const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, - FLAGS_heatmaps_add_PAFs); - const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); - // >1 camera view? - const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1); - // Enabling Google Logging - const bool enableGoogleLogging = true; - // Logging - op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - - // Configure OpenPose - op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - op::Wrapper> opWrapper{op::ThreadManagerMode::Asynchronous}; - // Pose configuration (use WrapperStructPose{} for default and recommended configuration) - const op::WrapperStructPose wrapperStructPose{ - !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, - FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), - poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, - FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, - (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; - // Face configuration (use op::WrapperStructFace{} to disable it) - const op::WrapperStructFace wrapperStructFace{ - FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), - (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; - // Hand configuration (use op::WrapperStructHand{} to disable it) - const op::WrapperStructHand wrapperStructHand{ - FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, - op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, - (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; - // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) - const op::WrapperStructExtra wrapperStructExtra{ - FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; - // Consumer (comment or use default argument to disable any output) - const auto displayMode = op::DisplayMode::NoDisplay; - const bool guiVerbose = false; - const bool fullScreen = false; - const op::WrapperStructOutput wrapperStructOutput{ - displayMode, guiVerbose, fullScreen, FLAGS_write_keypoint, - op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, FLAGS_write_coco_json, - FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, FLAGS_write_video, - FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, FLAGS_write_video_adam, - FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; - // Configure wrapper - opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, - op::WrapperStructInput{}, wrapperStructOutput); - // Set to single-thread running (to debug and/or reduce latency) - if (FLAGS_disable_multi_thread) - opWrapper.disableMultiThreading(); - - op::log("Starting thread(s)...", op::Priority::High); - opWrapper.start(); - - // User processing - UserInputClass userInputClass(FLAGS_image_dir); - UserOutputClass userOutputClass; - bool userWantsToExit = false; - while (!userWantsToExit && !userInputClass.isFinished()) - { - // Push frame - auto datumToProcess = userInputClass.createDatum(); - if (datumToProcess != nullptr) - { - auto successfullyEmplaced = opWrapper.waitAndEmplace(datumToProcess); - // Pop frame - std::shared_ptr> datumProcessed; - if (successfullyEmplaced && opWrapper.waitAndPop(datumProcessed)) - { - userWantsToExit = userOutputClass.display(datumProcessed); - userOutputClass.printKeypoints(datumProcessed); - } - else - op::log("Processed datum could not be emplaced.", op::Priority::High, - __LINE__, __FUNCTION__, __FILE__); - } - } - - op::log("Stopping thread(s)", op::Priority::High); - opWrapper.stop(); - - // Measuring total time - const auto now = std::chrono::high_resolution_clock::now(); - const auto totalTimeSec = (double)std::chrono::duration_cast(now-timerBegin).count() - * 1e-9; - const auto message = "OpenPose demo successfully finished. Total time: " - + std::to_string(totalTimeSec) + " seconds."; - op::log(message, op::Priority::High); - - // Return successful message - return 0; - } - catch (const std::exception& e) - { - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - return -1; - } -} - -int main(int argc, char *argv[]) -{ - // Parsing command line flags - gflags::ParseCommandLineFlags(&argc, &argv, true); - - // Running openPoseTutorialWrapper3 - return openPoseTutorialWrapper3(); -} diff --git a/examples/tutorial_wrapper/6_user_asynchronous_output.cpp b/examples/tutorial_wrapper/6_user_asynchronous_output.cpp deleted file mode 100644 index c7386192..00000000 --- a/examples/tutorial_wrapper/6_user_asynchronous_output.cpp +++ /dev/null @@ -1,461 +0,0 @@ -// ------------------------- OpenPose Library Tutorial - Wrapper - Example 3 - Asynchronous Output ------------------------- -// Asynchronous output mode: ideal for fast prototyping when performance is not an issue and user wants to use the output OpenPose format. The user -// simply gets the processed frames from the OpenPose wrapper when he desires to. - -// This example shows the user how to use the OpenPose wrapper class: - // 1. Read folder of images / video / webcam - // 2. Extract and render keypoint / heatmap / PAF of that image - // 3. Save the results on disk - // 4. User displays the rendered pose - // Everything in a multi-thread scenario -// In addition to the previous OpenPose modules, we also need to use: - // 1. `core` module: - // For the Array class that the `pose` module needs - // For the Datum struct that the `thread` module sends between the queues - // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively -// This file should only be used for the user to take specific examples. - -// C++ std library dependencies -#include // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds -#include // std::this_thread -// Other 3rdparty dependencies -// GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string -#include -// Allow Google Flags in Ubuntu 14 -#ifndef GFLAGS_GFLAGS_H_ - namespace gflags = google; -#endif -// OpenPose dependencies -#include - -// See all the available parameter options withe the `--help` flag. E.g. `build/examples/openpose/openpose.bin --help` -// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose -// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`. -// Debugging/Other -DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while" - " 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for" - " low priority messages and 4 for important ones."); -DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful" - " for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with" - " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the" - " error."); -DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some" - " runtime statistics at this frame number."); -// Producer -DEFINE_int32(camera, -1, "The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative" - " number (by default), to auto-detect and open the first available camera."); -DEFINE_string(camera_resolution, "-1x-1", "Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the" - " default 1280x720 for `--camera`, or the maximum flir camera resolution available for" - " `--flir_camera`"); -DEFINE_double(camera_fps, 30.0, "Frame rate for the webcam (also used when saving video). Set this value to the minimum" - " value between the OpenPose displayed speed and the webcam real frame rate."); -DEFINE_string(video, "", "Use a video file instead of the camera. Use `examples/media/video.avi` for our default" - " example video."); -DEFINE_string(image_dir, "", "Process a directory of images. Use `examples/media/` for our default example folder with 20" - " images. Read all standard formats (jpg, png, bmp, etc.)."); -DEFINE_bool(flir_camera, false, "Whether to use FLIR (Point-Grey) stereo camera."); -DEFINE_int32(flir_camera_index, -1, "Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir" - " camera index to run, where 0 corresponds to the detected flir camera with the lowest" - " serial number, and `n` to the `n`-th lowest serial number camera."); -DEFINE_string(ip_camera, "", "String with the IP camera URL. It supports protocols like RTSP and HTTP."); -DEFINE_uint64(frame_first, 0, "Start on desired frame number. Indexes are 0-based, i.e. the first frame has index 0."); -DEFINE_uint64(frame_last, -1, "Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g. if set to" - " 10, it will process 11 frames (0-10)."); -DEFINE_bool(frame_flip, false, "Flip/mirror each frame (e.g. for real time webcam demonstrations)."); -DEFINE_int32(frame_rotate, 0, "Rotate each frame, 4 possible values: 0, 90, 180, 270."); -DEFINE_bool(frames_repeat, false, "Repeat frames when finished."); -DEFINE_bool(process_real_time, false, "Enable to keep the original source frame rate (e.g. for video). If the processing time is" - " too long, it will skip frames. If it is too fast, it will slow it down."); -DEFINE_string(camera_parameter_folder, "models/cameraParameters/flir/", "String with the folder where the camera parameters are located."); -DEFINE_bool(frame_keep_distortion, false, "If false (default), it will undistortionate the image based on the" - " `camera_parameter_folder` camera parameters; if true, it will not undistortionate, i.e.," - " it will leave it as it is."); -// OpenPose -DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located."); -DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the" - " input image resolution."); -DEFINE_int32(num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your" - " machine."); -DEFINE_int32(num_gpu_start, 0, "GPU device start number."); -DEFINE_int32(keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e. the scale of the (x,y)" - " coordinates that will be saved with the `write_json` & `write_keypoint` flags." - " Select `0` to scale it to the original source resolution; `1`to scale it to the net output" - " size (set with `net_resolution`); `2` to scale it to the final output size (set with" - " `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left" - " corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where" - " (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non" - " related with `scale_number` and `scale_gap`."); -DEFINE_int32(number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with" - " top scores. The score is based in person area over the image, body part score, as well as" - " joint score (between each pair of connected body parts). Useful if you know the exact" - " number of people in the scene, so it can remove false positives (if all the people have" - " been detected. However, it might also include false negatives by removing very small or" - " highly occluded people. -1 will keep them all."); -// OpenPose Body Pose -DEFINE_bool(body_disable, false, "Disable body keypoint detection. Option only possible for faster (but less accurate) face" - " keypoint detection."); -DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g. `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), " - "`MPI_4_layers` (15 keypoints, even faster but less accurate)."); -DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is" - " decreased, the speed increases. For maximum speed-accuracy balance, it should keep the" - " closest aspect ratio possible to the images or videos to be processed. Using `-1` in" - " any of the dimensions, OP will choose the optimal aspect ratio depending on the user's" - " input value. E.g. the default `-1x368` is equivalent to `656x368` in 16:9 resolutions," - " e.g. full HD (1980x1080) and HD (1280x720) resolutions."); -DEFINE_int32(scale_number, 1, "Number of scales to average."); -DEFINE_double(scale_gap, 0.3, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1." - " If you want to change the initial scale, you actually want to multiply the" - " `net_resolution` by your desired initial scale."); -// OpenPose Body Pose Heatmaps and Part Candidates -DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and" - " analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps." - " If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential" - " memory order: body parts + bkg + PAFs. It will follow the order on" - " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will" - " considerably decrease. Not required for OpenPose, enable it only if you intend to" - " explicitly use this information later."); -DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to" - " background."); -DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs."); -DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer" - " rounded [0,255]; and 3 for no scaling."); -DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the" - " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all" - " the detected body parts, before being assembled into people. Note that the number of" - " candidates is equal or higher than the number of final body parts (i.e. after being" - " assembled into people). The empty body parts are filled with 0s. Program speed will" - " slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly" - " use this information."); -// OpenPose Face -DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Note that this will considerable slow down the performance and increse" - " the required GPU memory. In addition, the greater number of people on the image, the" - " slower OpenPose will be."); -DEFINE_string(face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint" - " detector. 320x320 usually works fine while giving a substantial speed up when multiple" - " faces on the image."); -// OpenPose Hand -DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g." - " `model_folder`. Analogously to `--face`, it will also slow down the performance, increase" - " the required GPU memory and its speed depends on the number of people."); -DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint" - " detector."); -DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results" - " were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."); -DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range" - " between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if" - " scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."); -DEFINE_bool(hand_tracking, false, "Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate" - " is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it" - " simply looks for hands in positions at which hands were located in previous frames, but" - " it does not guarantee the same person ID among frames."); -// OpenPose 3-D Reconstruction -DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system." - " 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction" - " results. Note that it will only display 1 person. If multiple people is present, it will" - " fail."); -DEFINE_int32(3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will" - " require all the cameras to see the keypoint in order to reconstruct it."); -DEFINE_int32(3d_views, 1, "Complementary option to `--image_dir` or `--video`. OpenPose will read as many images per" - " iteration, allowing tasks such as stereo camera processing (`--3d`). Note that" - " `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the" - " parameter folder as this number indicates."); -// Extra algorithms -DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames."); -DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The" - " value indicates the number of frames where tracking is run between each OpenPose keypoint" - " detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint" - " detector and tracking for potentially higher accurary than only OpenPose."); -DEFINE_int32(ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D" - " keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing" - " the number of threads will increase the speed but also the global system latency."); -// OpenPose Rendering -DEFINE_int32(part_to_show, 0, "Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body" - " part heat map, 19 for the background heat map, 20 for all the body part heat maps" - " together, 21 for all the PAFs, 22-40 for each body part pair PAF."); -DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black" - " background, instead of being rendered into the original image. Related: `part_to_show`," - " `alpha_pose`, and `alpha_pose`."); -// OpenPose Rendering Pose -DEFINE_double(render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be" - " rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;" - " while small thresholds (~0.1) will also output guessed and occluded keypoints, but also" - " more false positives (i.e. wrong detections)."); -DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering" - " (slower but greater functionality, e.g. `alpha_X` flags). If -1, it will pick CPU if" - " CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render" - " both `outputData` and `cvOutputData` with the original image and desired body part to be" - " shown (i.e. keypoints, heat maps or PAFs)."); -DEFINE_double(alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will" - " hide it. Only valid for GPU rendering."); -DEFINE_double(alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the" - " heatmap, 0 will only show the frame. Only valid for GPU rendering."); -// OpenPose Rendering Face -DEFINE_double(face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints."); -DEFINE_int32(face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face."); -DEFINE_double(face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face."); -// OpenPose Rendering Hand -DEFINE_double(hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints."); -DEFINE_int32(hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same" - " configuration that `render_pose` is using."); -DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand."); -DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand."); -// Result Saving -DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format."); -DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV" - " function cv::imwrite for all compatible extensions."); -DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the" - " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag" - " `camera_fps` controls FPS."); -DEFINE_string(write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose" - " keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."); -DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format."); -DEFINE_string(write_coco_foot_json, "", "Full file path to write people foot pose data with JSON COCO validation format."); -DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag" - " must be enabled."); -DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`." - " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for" - " floating values."); -DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format" - " with `write_keypoint_format`."); -DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml," - " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."); -// Result Saving - Extra Algorithms -DEFINE_string(write_video_adam, "", "Experimental, not available yet. E.g.: `~/Desktop/adamResult.avi`. Flag `camera_fps`" - " controls FPS."); -DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g.: `~/Desktop/mocapResult.bvh`."); -// UDP communication -DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."); -DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication."); - - -// If the user needs his own variables, he can inherit the op::Datum struct and add them -// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define -// Wrapper instead of Wrapper -struct UserDatum : public op::Datum -{ - bool boolThatUserNeedsForSomeReason; - - UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) : - boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_} - {} -}; - -// The W-classes can be implemented either as a template or as simple classes given -// that the user usually knows which kind of data he will move between the queues, -// in this case we assume a std::shared_ptr of a std::vector of UserDatum - -// This worker will just read and return all the jpg files in a directory -class UserOutputClass -{ -public: - bool display(const std::shared_ptr>& datumsPtr) - { - // User's displaying/saving/other processing here - // datum.cvOutputData: rendered frame with pose or heatmaps - // datum.poseKeypoints: Array with the estimated pose - char key = ' '; - if (datumsPtr != nullptr && !datumsPtr->empty()) - { - cv::imshow("User worker GUI", datumsPtr->at(0).cvOutputData); - // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image) - key = (char)cv::waitKey(1); - } - else - op::log("Nullptr or empty datumsPtr found.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__); - return (key == 27); - } - void printKeypoints(const std::shared_ptr>& datumsPtr) - { - // Example: How to use the pose keypoints - if (datumsPtr != nullptr && !datumsPtr->empty()) - { - op::log("\nKeypoints:"); - // Accesing each element of the keypoints - const auto& poseKeypoints = datumsPtr->at(0).poseKeypoints; - op::log("Person pose keypoints:"); - for (auto person = 0 ; person < poseKeypoints.getSize(0) ; person++) - { - op::log("Person " + std::to_string(person) + " (x, y, score):"); - for (auto bodyPart = 0 ; bodyPart < poseKeypoints.getSize(1) ; bodyPart++) - { - std::string valueToPrint; - for (auto xyscore = 0 ; xyscore < poseKeypoints.getSize(2) ; xyscore++) - { - valueToPrint += std::to_string( poseKeypoints[{person, bodyPart, xyscore}] ) + " "; - } - op::log(valueToPrint); - } - } - op::log(" "); - // Alternative: just getting std::string equivalent - op::log("Face keypoints: " + datumsPtr->at(0).faceKeypoints.toString()); - op::log("Left hand keypoints: " + datumsPtr->at(0).handKeypoints[0].toString()); - op::log("Right hand keypoints: " + datumsPtr->at(0).handKeypoints[1].toString()); - // Heatmaps - const auto& poseHeatMaps = datumsPtr->at(0).poseHeatMaps; - if (!poseHeatMaps.empty()) - { - op::log("Pose heatmaps size: [" + std::to_string(poseHeatMaps.getSize(0)) + ", " - + std::to_string(poseHeatMaps.getSize(1)) + ", " - + std::to_string(poseHeatMaps.getSize(2)) + "]"); - const auto& faceHeatMaps = datumsPtr->at(0).faceHeatMaps; - op::log("Face heatmaps size: [" + std::to_string(faceHeatMaps.getSize(0)) + ", " - + std::to_string(faceHeatMaps.getSize(1)) + ", " - + std::to_string(faceHeatMaps.getSize(2)) + ", " - + std::to_string(faceHeatMaps.getSize(3)) + "]"); - const auto& handHeatMaps = datumsPtr->at(0).handHeatMaps; - op::log("Left hand heatmaps size: [" + std::to_string(handHeatMaps[0].getSize(0)) + ", " - + std::to_string(handHeatMaps[0].getSize(1)) + ", " - + std::to_string(handHeatMaps[0].getSize(2)) + ", " - + std::to_string(handHeatMaps[0].getSize(3)) + "]"); - op::log("Right hand heatmaps size: [" + std::to_string(handHeatMaps[1].getSize(0)) + ", " - + std::to_string(handHeatMaps[1].getSize(1)) + ", " - + std::to_string(handHeatMaps[1].getSize(2)) + ", " - + std::to_string(handHeatMaps[1].getSize(3)) + "]"); - } - } - else - op::log("Nullptr or empty datumsPtr found.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__); - } -}; - -int openPoseTutorialWrapper1() -{ - try - { - op::log("Starting OpenPose demo...", op::Priority::High); - const auto timerBegin = std::chrono::high_resolution_clock::now(); - - // logging_level - op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", - __LINE__, __FUNCTION__, __FILE__); - op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); - op::Profiler::setDefaultX(FLAGS_profile_speed); - - // Applying user defined configuration - Google flags to program variables - // outputSize - const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); - // netInputSize - const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); - // faceNetInputSize - const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); - // handNetInputSize - const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); - // producerType - const auto producerSharedPtr = op::flagsToProducer(FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, - FLAGS_flir_camera, FLAGS_camera_resolution, FLAGS_camera_fps, - FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion, - (unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index); - // poseModel - const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); - // JSON saving - if (!FLAGS_write_keypoint.empty()) - op::log("Flag `write_keypoint` is deprecated and will eventually be removed." - " Please, use `write_json` instead.", op::Priority::Max); - // keypointScale - const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); - // heatmaps to add - const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, - FLAGS_heatmaps_add_PAFs); - const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); - // >1 camera view? - const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1 || FLAGS_flir_camera); - // Enabling Google Logging - const bool enableGoogleLogging = true; - // Logging - op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - - // Configure OpenPose - op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - op::Wrapper> opWrapper{op::ThreadManagerMode::AsynchronousOut}; - // Pose configuration (use WrapperStructPose{} for default and recommended configuration) - const op::WrapperStructPose wrapperStructPose{ - !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, - FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), - poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, - FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, - (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; - // Face configuration (use op::WrapperStructFace{} to disable it) - const op::WrapperStructFace wrapperStructFace{ - FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), - (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; - // Hand configuration (use op::WrapperStructHand{} to disable it) - const op::WrapperStructHand wrapperStructHand{ - FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, - op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, - (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; - // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) - const op::WrapperStructExtra wrapperStructExtra{ - FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; - // Producer (use default to disable any input) - const op::WrapperStructInput wrapperStructInput{ - producerSharedPtr, FLAGS_frame_first, FLAGS_frame_last, FLAGS_process_real_time, FLAGS_frame_flip, - FLAGS_frame_rotate, FLAGS_frames_repeat}; - // Consumer (comment or use default argument to disable any output) - const auto displayMode = op::DisplayMode::NoDisplay; - const bool guiVerbose = false; - const bool fullScreen = false; - const op::WrapperStructOutput wrapperStructOutput{ - displayMode, guiVerbose, fullScreen, FLAGS_write_keypoint, - op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, FLAGS_write_coco_json, - FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, FLAGS_write_video, - FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, FLAGS_write_video_adam, - FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; - // Configure wrapper - opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, - wrapperStructInput, wrapperStructOutput); - // Set to single-thread running (to debug and/or reduce latency) - if (FLAGS_disable_multi_thread) - opWrapper.disableMultiThreading(); - - op::log("Starting thread(s)...", op::Priority::High); - opWrapper.start(); - - // User processing - UserOutputClass userOutputClass; - bool userWantsToExit = false; - while (!userWantsToExit) - { - // Pop frame - std::shared_ptr> datumProcessed; - if (opWrapper.waitAndPop(datumProcessed)) - { - userWantsToExit = userOutputClass.display(datumProcessed);; - userOutputClass.printKeypoints(datumProcessed); - } - else - op::log("Processed datum could not be emplaced.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__); - } - - op::log("Stopping thread(s)", op::Priority::High); - opWrapper.stop(); - - // Measuring total time - const auto now = std::chrono::high_resolution_clock::now(); - const auto totalTimeSec = (double)std::chrono::duration_cast(now-timerBegin).count() - * 1e-9; - const auto message = "OpenPose demo successfully finished. Total time: " - + std::to_string(totalTimeSec) + " seconds."; - op::log(message, op::Priority::High); - - // Return successful message - return 0; - } - catch (const std::exception& e) - { - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - return -1; - } -} - -int main(int argc, char *argv[]) -{ - // Parsing command line flags - gflags::ParseCommandLineFlags(&argc, &argv, true); - - // Running openPoseTutorialWrapper1 - return openPoseTutorialWrapper1(); -} diff --git a/examples/user_code/README.md b/examples/user_code/README.md index 7bff0b6d..d907d184 100644 --- a/examples/user_code/README.md +++ b/examples/user_code/README.md @@ -10,7 +10,7 @@ You can quickly add your custom code into this folder so that quick prototypes c ## How-to 1. Install/compile OpenPose as usual. -2. Add your custom *.cpp / *.hpp files here,. Hint: You might want to start by copying the [OpenPoseDemo](../openpose/openpose.cpp) example or any of the [examples/tutorial_wrapper/](../tutorial_wrapper/) examples. Then, you can simply modify their content. +2. Add your custom *.cpp / *.hpp files here,. Hint: You might want to start by copying the [OpenPoseDemo](../openpose/openpose.cpp) example or any of the [examples/tutorial_api_cpp/](../tutorial_api_cpp/) examples. Then, you can simply modify their content. 3. Add the name of your custom *.cpp / *.hpp files at the top of the [examples/user_code/CMakeLists.txt](./CMakeLists.txt) file. 4. Re-compile OpenPose. ``` diff --git a/include/openpose/core/datum.hpp b/include/openpose/core/datum.hpp index 1cb168ef..d1bc3622 100644 --- a/include/openpose/core/datum.hpp +++ b/include/openpose/core/datum.hpp @@ -106,7 +106,7 @@ namespace op * Order heatmaps: body parts + background (as appears in POSE_BODY_PART_MAPPING) + (x,y) channel of each PAF * (sorted as appears in POSE_BODY_PART_PAIRS). See `pose/poseParameters.hpp`. * The user can choose the heatmaps normalization: ranges [0, 1], [-1, 1] or [0, 255]. Check the - * `heatmaps_scale` flag in the examples/tutorial_wrapper/ for more details. + * `heatmaps_scale` flag in {OpenPose_path}doc/demo_overview.md for more details. * Size: #heatmaps x output_net_height x output_net_width */ Array poseHeatMaps; diff --git a/include/openpose/core/macros.hpp b/include/openpose/core/macros.hpp index 7b8f849d..2755fb2e 100644 --- a/include/openpose/core/macros.hpp +++ b/include/openpose/core/macros.hpp @@ -1,9 +1,11 @@ #ifndef OPENPOSE_CORE_MACROS_HPP #define OPENPOSE_CORE_MACROS_HPP +#include // std::chrono:: functionaligy, e.g., std::chrono::milliseconds #include // std::shared_ptr #include #include +#include // std::this_thread #include // OpenPose name and version diff --git a/include/openpose/face/faceExtractorNet.hpp b/include/openpose/face/faceExtractorNet.hpp index b09cbcc7..a717fdba 100644 --- a/include/openpose/face/faceExtractorNet.hpp +++ b/include/openpose/face/faceExtractorNet.hpp @@ -2,7 +2,6 @@ #define OPENPOSE_FACE_FACE_EXTRACTOR_HPP #include -#include #include // cv::Mat #include #include diff --git a/examples/tutorial_wrapper/1_user_synchronous_postprocessing.cpp b/include/openpose/flags.hpp similarity index 68% rename from examples/tutorial_wrapper/1_user_synchronous_postprocessing.cpp rename to include/openpose/flags.hpp index f2f3acba..d4592e86 100644 --- a/examples/tutorial_wrapper/1_user_synchronous_postprocessing.cpp +++ b/include/openpose/flags.hpp @@ -1,33 +1,16 @@ -// ------------------------- OpenPose Library Tutorial - Real Time Pose Estimation ------------------------- -// If the user wants to learn to use the OpenPose library, we highly recommend to start with the `examples/tutorial_*/` -// folders. -// This example summarizes all the funcitonality of the OpenPose library: - // 1. Read folder of images / video / webcam (`producer` module) - // 2. Extract and render body keypoint / heatmap / PAF of that image (`pose` module) - // 3. Extract and render face keypoint / heatmap / PAF of that image (`face` module) - // 4. Save the results on disk (`filestream` module) - // 5. Display the rendered pose (`gui` module) - // Everything in a multi-thread scenario (`thread` module) - // Points 2 to 5 are included in the `wrapper` module -// In addition to the previous OpenPose modules, we also need to use: - // 1. `core` module: - // For the Array class that the `pose` module needs - // For the Datum struct that the `thread` module sends between the queues - // 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively -// This file should only be used for the user to take specific examples. +#ifndef OPENPOSE_FLAGS_HPP +#define OPENPOSE_FLAGS_HPP + +// Note: This class is not included within the basic OpenPose `headers.hpp` and must be explicitly included. In +// addition, Google Flags library must also be linked to the resulting binary or library. OpenPose library does +// not force to use Google Flags, but the OpenPose examples do so. -// C++ std library dependencies -#include // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds -#include // std::this_thread -// Other 3rdparty dependencies // GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string #include // Allow Google Flags in Ubuntu 14 #ifndef GFLAGS_GFLAGS_H_ namespace gflags = google; #endif -// OpenPose dependencies -#include // See all the available parameter options withe the `--help` flag. E.g. `build/examples/openpose/openpose.bin --help` // Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose @@ -42,14 +25,18 @@ DEFINE_bool(disable_multi_thread, false, "It would slightly reduc " error."); DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some" " runtime statistics at this frame number."); +#ifndef OPENPOSE_FLAGS_DISABLE_POSE +#ifndef OPENPOSE_FLAGS_DISABLE_PRODUCER // Producer DEFINE_int32(camera, -1, "The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative" " number (by default), to auto-detect and open the first available camera."); DEFINE_string(camera_resolution, "-1x-1", "Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the" " default 1280x720 for `--camera`, or the maximum flir camera resolution available for" " `--flir_camera`"); +#endif // OPENPOSE_FLAGS_DISABLE_PRODUCER DEFINE_double(camera_fps, 30.0, "Frame rate for the webcam (also used when saving video). Set this value to the minimum" " value between the OpenPose displayed speed and the webcam real frame rate."); +#ifndef OPENPOSE_FLAGS_DISABLE_PRODUCER DEFINE_string(video, "", "Use a video file instead of the camera. Use `examples/media/video.avi` for our default" " example video."); DEFINE_string(image_dir, "", "Process a directory of images. Use `examples/media/` for our default example folder with 20" @@ -71,6 +58,7 @@ DEFINE_string(camera_parameter_folder, "models/cameraParameters/flir/", "String DEFINE_bool(frame_keep_distortion, false, "If false (default), it will undistortionate the image based on the" " `camera_parameter_folder` camera parameters; if true, it will not undistortionate, i.e.," " it will leave it as it is."); +#endif // OPENPOSE_FLAGS_DISABLE_PRODUCER // OpenPose DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located."); DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the" @@ -203,6 +191,7 @@ DEFINE_int32(hand_render, -1, "Analogous to `render_po " configuration that `render_pose` is using."); DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand."); DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand."); +#ifndef OPENPOSE_FLAGS_DISABLE_DISPLAY // Display DEFINE_bool(fullscreen, false, "Run in full-screen mode (press f during runtime to toggle)."); DEFINE_bool(no_gui_verbose, false, "Do not write text on output images on GUI (e.g. number of current frame and people). It" @@ -210,6 +199,7 @@ DEFINE_bool(no_gui_verbose, false, "Do not write text on ou DEFINE_int32(display, -1, "Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server" " and/or to slightly speed up the processing if visual output is not required); 2 for 2-D" " display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display."); +#endif // OPENPOSE_FLAGS_DISABLE_DISPLAY // Result Saving DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format."); DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV" @@ -237,204 +227,6 @@ DEFINE_string(write_bvh, "", "Experimental, not avail // UDP communication DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."); DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication."); +#endif // OPENPOSE_FLAGS_DISABLE_POSE - -// If the user needs his own variables, he can inherit the op::Datum struct and add them -// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define -// Wrapper instead of Wrapper -struct UserDatum : public op::Datum -{ - bool boolThatUserNeedsForSomeReason; - - UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) : - boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_} - {} -}; - -// The W-classes can be implemented either as a template or as simple classes given -// that the user usually knows which kind of data he will move between the queues, -// in this case we assume a std::shared_ptr of a std::vector of UserDatum - -// This worker will just invert the image -class WUserPostProcessing : public op::Worker>> -{ -public: - WUserPostProcessing() - { - // User's constructor here - } - - void initializationOnThread() {} - - void work(std::shared_ptr>& datumsPtr) - { - // User's post-processing (after OpenPose processing & before OpenPose outputs) here - // datum.cvOutputData: rendered frame with pose or heatmaps - // datum.poseKeypoints: Array with the estimated pose - try - { - if (datumsPtr != nullptr && !datumsPtr->empty()) - for (auto& datum : *datumsPtr) - cv::bitwise_not(datum.cvOutputData, datum.cvOutputData); - } - catch (const std::exception& e) - { - this->stop(); - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - } - } -}; - -int openPoseDemo() -{ - try - { - op::log("Starting OpenPose demo...", op::Priority::High); - const auto timerBegin = std::chrono::high_resolution_clock::now(); - - // logging_level - op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", - __LINE__, __FUNCTION__, __FILE__); - op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); - op::Profiler::setDefaultX(FLAGS_profile_speed); - // // For debugging - // // Print all logging messages - // op::ConfigureLog::setPriorityThreshold(op::Priority::None); - // // Print out speed values faster - // op::Profiler::setDefaultX(100); - - // Applying user defined configuration - Google flags to program variables - // outputSize - const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); - // netInputSize - const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); - // faceNetInputSize - const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)"); - // handNetInputSize - const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)"); - // producerType - const auto producerSharedPtr = op::flagsToProducer(FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera, - FLAGS_flir_camera, FLAGS_camera_resolution, FLAGS_camera_fps, - FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion, - (unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index); - // poseModel - const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose); - // JSON saving - if (!FLAGS_write_keypoint.empty()) - op::log("Flag `write_keypoint` is deprecated and will eventually be removed." - " Please, use `write_json` instead.", op::Priority::Max); - // keypointScale - const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale); - // heatmaps to add - const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg, - FLAGS_heatmaps_add_PAFs); - const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale); - // >1 camera view? - const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1 || FLAGS_flir_camera); - // Enabling Google Logging - const bool enableGoogleLogging = true; - // Logging - op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - - // OpenPose wrapper - op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - // op::Wrapper> opWrapper; - op::Wrapper> opWrapper; - - // Initializing the user custom classes - // Processing - auto wUserPostProcessing = std::make_shared(); - // Add custom processing - const auto workerProcessingOnNewThread = true; - opWrapper.setWorkerPostProcessing(wUserPostProcessing, workerProcessingOnNewThread); - - // Pose configuration (use WrapperStructPose{} for default and recommended configuration) - const op::WrapperStructPose wrapperStructPose{ - !FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start, - FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView), - poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap, - FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates, - (float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging}; - // Face configuration (use op::WrapperStructFace{} to disable it) - const op::WrapperStructFace wrapperStructFace{ - FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose), - (float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold}; - // Hand configuration (use op::WrapperStructHand{} to disable it) - const op::WrapperStructHand wrapperStructHand{ - FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking, - op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose, - (float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold}; - // Producer (use default to disable any input) - const op::WrapperStructInput wrapperStructInput{ - producerSharedPtr, FLAGS_frame_first, FLAGS_frame_last, FLAGS_process_real_time, FLAGS_frame_flip, - FLAGS_frame_rotate, FLAGS_frames_repeat}; - // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) - const op::WrapperStructExtra wrapperStructExtra{ - FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads}; - // Consumer (comment or use default argument to disable any output) - const op::WrapperStructOutput wrapperStructOutput{ - op::flagsToDisplayMode(FLAGS_display, FLAGS_3d), !FLAGS_no_gui_verbose, FLAGS_fullscreen, - FLAGS_write_keypoint, op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, - FLAGS_write_coco_json, FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, - FLAGS_write_video, FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, - FLAGS_write_video_adam, FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port}; - // Configure wrapper - opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, - wrapperStructInput, wrapperStructOutput); - // Set to single-thread running (to debug and/or reduce latency) - if (FLAGS_disable_multi_thread) - opWrapper.disableMultiThreading(); - - // Start processing - // Two different ways of running the program on multithread environment - op::log("Starting thread(s)...", op::Priority::High); - // Start, run & stop threads - it blocks this thread until all others have finished - opWrapper.exec(); - - // // Option b) Keeping this thread free in case you want to do something else meanwhile, e.g. profiling the GPU - // memory - // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main - // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: - // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections` - // // Start threads - // opWrapper.start(); - // // Profile used GPU memory - // // 1: wait ~10sec so the memory has been totally loaded on GPU - // // 2: profile the GPU memory - // const auto sleepTimeMs = 10; - // for (auto i = 0 ; i < 10000/sleepTimeMs && opWrapper.isRunning() ; i++) - // std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs}); - // op::Profiler::profileGpuMemory(__LINE__, __FUNCTION__, __FILE__); - // // Keep program alive while running threads - // while (opWrapper.isRunning()) - // std::this_thread::sleep_for(std::chrono::milliseconds{sleepTimeMs}); - // // Stop and join threads - // op::log("Stopping thread(s)", op::Priority::High); - // opWrapper.stop(); - - // Measuring total time - const auto now = std::chrono::high_resolution_clock::now(); - const auto totalTimeSec = (double)std::chrono::duration_cast(now-timerBegin).count() - * 1e-9; - const auto message = "OpenPose demo successfully finished. Total time: " - + std::to_string(totalTimeSec) + " seconds."; - op::log(message, op::Priority::High); - - // Return successful message - return 0; - } - catch (const std::exception& e) - { - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - return -1; - } -} - -int main(int argc, char *argv[]) -{ - // Parsing command line flags - gflags::ParseCommandLineFlags(&argc, &argv, true); - - // Running openPoseDemo - return openPoseDemo(); -} +#endif // OPENPOSE_FLAGS_HPP diff --git a/include/openpose/hand/handExtractorNet.hpp b/include/openpose/hand/handExtractorNet.hpp index e5d17ff8..10ef59c9 100644 --- a/include/openpose/hand/handExtractorNet.hpp +++ b/include/openpose/hand/handExtractorNet.hpp @@ -2,7 +2,6 @@ #define OPENPOSE_HAND_HAND_EXTRACTOR_HPP #include -#include #include // cv::Mat #include #include diff --git a/include/openpose/pose/bodyPartConnectorBase.hpp b/include/openpose/net/bodyPartConnectorBase.hpp similarity index 100% rename from include/openpose/pose/bodyPartConnectorBase.hpp rename to include/openpose/net/bodyPartConnectorBase.hpp diff --git a/include/openpose/pose/bodyPartConnectorCaffe.hpp b/include/openpose/net/bodyPartConnectorCaffe.hpp similarity index 100% rename from include/openpose/pose/bodyPartConnectorCaffe.hpp rename to include/openpose/net/bodyPartConnectorCaffe.hpp diff --git a/include/openpose/net/headers.hpp b/include/openpose/net/headers.hpp index 1d8cc412..93fceae0 100644 --- a/include/openpose/net/headers.hpp +++ b/include/openpose/net/headers.hpp @@ -2,6 +2,8 @@ #define OPENPOSE_NET_HEADERS_HPP // net module +#include +#include #include #include #include diff --git a/include/openpose/pose/headers.hpp b/include/openpose/pose/headers.hpp index a6a8bd78..9d788ec6 100644 --- a/include/openpose/pose/headers.hpp +++ b/include/openpose/pose/headers.hpp @@ -2,8 +2,6 @@ #define OPENPOSE_POSE_HEADERS_HPP // pose module -#include -#include #include #include #include diff --git a/include/openpose/pose/poseExtractorNet.hpp b/include/openpose/pose/poseExtractorNet.hpp index 6766d5a3..8758b87f 100644 --- a/include/openpose/pose/poseExtractorNet.hpp +++ b/include/openpose/pose/poseExtractorNet.hpp @@ -2,7 +2,6 @@ #define OPENPOSE_POSE_POSE_EXTRACTOR_NET_HPP #include -#include #include #include #include diff --git a/include/openpose/producer/producer.hpp b/include/openpose/producer/producer.hpp index cd99af9b..12470dbd 100644 --- a/include/openpose/producer/producer.hpp +++ b/include/openpose/producer/producer.hpp @@ -1,7 +1,6 @@ #ifndef OPENPOSE_PRODUCER_PRODUCER_HPP #define OPENPOSE_PRODUCER_PRODUCER_HPP -#include #include // cv::Mat #include // capProperties of OpenCV #include diff --git a/include/openpose/producer/webcamReader.hpp b/include/openpose/producer/webcamReader.hpp index 51348488..730d0f73 100644 --- a/include/openpose/producer/webcamReader.hpp +++ b/include/openpose/producer/webcamReader.hpp @@ -3,7 +3,6 @@ #include #include -#include #include #include diff --git a/include/openpose/thread/thread.hpp b/include/openpose/thread/thread.hpp index 4b73acd8..c2183a17 100644 --- a/include/openpose/thread/thread.hpp +++ b/include/openpose/thread/thread.hpp @@ -2,7 +2,6 @@ #define OPENPOSE_THREAD_THREAD_HPP #include -#include #include #include #include diff --git a/include/openpose/thread/wQueueAssembler.hpp b/include/openpose/thread/wQueueAssembler.hpp index 2813d27d..854c9bea 100644 --- a/include/openpose/thread/wQueueAssembler.hpp +++ b/include/openpose/thread/wQueueAssembler.hpp @@ -35,8 +35,6 @@ namespace op // Implementation -#include -#include namespace op { template diff --git a/include/openpose/thread/wQueueOrderer.hpp b/include/openpose/thread/wQueueOrderer.hpp index cab87fe9..8b075495 100644 --- a/include/openpose/thread/wQueueOrderer.hpp +++ b/include/openpose/thread/wQueueOrderer.hpp @@ -36,8 +36,6 @@ namespace op // Implementation -#include -#include namespace op { template diff --git a/include/openpose/wrapper/enumClasses.hpp b/include/openpose/wrapper/enumClasses.hpp new file mode 100644 index 00000000..b8d739c0 --- /dev/null +++ b/include/openpose/wrapper/enumClasses.hpp @@ -0,0 +1,16 @@ +#ifndef OPENPOSE_WRAPPER_ENUM_CLASSES_HPP +#define OPENPOSE_WRAPPER_ENUM_CLASSES_HPP + +namespace op +{ + enum class WorkerType : unsigned char + { + Input = 0, + // PreProcessing, + PostProcessing, + Output, + Size, + }; +} + +#endif // OPENPOSE_WRAPPER_ENUM_CLASSES_HPP diff --git a/include/openpose/wrapper/headers.hpp b/include/openpose/wrapper/headers.hpp index 3c4b6e45..15a99e39 100644 --- a/include/openpose/wrapper/headers.hpp +++ b/include/openpose/wrapper/headers.hpp @@ -2,6 +2,7 @@ #define OPENPOSE_WRAPPER_HEADERS_HPP // wrapper module +#include #include #include #include diff --git a/include/openpose/wrapper/wrapper.hpp b/include/openpose/wrapper/wrapper.hpp index c49030ba..6122d642 100644 --- a/include/openpose/wrapper/wrapper.hpp +++ b/include/openpose/wrapper/wrapper.hpp @@ -3,6 +3,7 @@ #include #include +#include #include #include #include @@ -29,8 +30,7 @@ namespace op */ template, - typename TWorker = std::shared_ptr>, - typename TQueue = Queue> + typename TWorker = std::shared_ptr>> class Wrapper { public: @@ -52,62 +52,21 @@ namespace op /** * Disable multi-threading. * Useful for debugging and logging, all the Workers will run in the same thread. - * Note that workerOnNewThread (argument for setWorkerInput, setWorkerPostProcessing and setWorkerOutput) will - * not make any effect. + * Note that workerOnNewThread (argument for setWorker function) will not make any effect. */ void disableMultiThreading(); /** - * Add an user-defined extra Worker as frames generator. + * Add an user-defined extra Worker for a desired task (input, output, ...). + * @param workerType WorkerType to configure (e.g., Input, PostProcessing, Output). * @param worker TWorker to be added. * @param workerOnNewThread Whether to add this TWorker on a new thread (if it is computationally demanding) or * simply reuse existing threads (for light functions). Set to true if the performance time is unknown. */ - void setWorkerInput(const TWorker& worker, const bool workerOnNewThread = true); + void setWorker(const WorkerType workerType, const TWorker& worker, const bool workerOnNewThread = true); - /** - * Add an user-defined extra Worker as frames post-processor. - * @param worker TWorker to be added. - * @param workerOnNewThread Whether to add this TWorker on a new thread (if it is computationally demanding) or - * simply reuse existing threads (for light functions). Set to true if the performance time is unknown. - */ - void setWorkerPostProcessing(const TWorker& worker, const bool workerOnNewThread = true); - - /** - * Add an user-defined extra Worker as frames consumer (custom display and/or saving). - * @param worker TWorker to be added. - * @param workerOnNewThread Whether to add this TWorker on a new thread (if it is computationally demanding) or - * simply reuse existing threads (for light functions). Set to true if the performance time is unknown. - */ - void setWorkerOutput(const TWorker& worker, const bool workerOnNewThread = true); - - // If output is not required, just use this function until the renderOutput argument. Keep the default values - // for the other parameters in order not to display/save any output. - void configure(const WrapperStructPose& wrapperStructPose, - // Producer: set producerSharedPtr=nullptr or use default WrapperStructInput{} to disable input - const WrapperStructInput& wrapperStructInput, - // Consumer (keep default values to disable any output) - const WrapperStructOutput& wrapperStructOutput = WrapperStructOutput{}); - - // Similar to the previos configure, but it includes hand extraction and rendering - void configure(const WrapperStructPose& wrapperStructPose, - // Hand (use the default WrapperStructHand{} to disable any hand detector) - const WrapperStructHand& wrapperStructHand, - // Producer: set producerSharedPtr=nullptr or use default WrapperStructInput{} to disable input - const WrapperStructInput& wrapperStructInput, - // Consumer (keep default values to disable any output) - const WrapperStructOutput& wrapperStructOutput = WrapperStructOutput{}); - - // Similar to the previos configure, but it includes hand extraction and rendering - void configure(const WrapperStructPose& wrapperStructPose, - // Face (use the default WrapperStructFace{} to disable any face detector) - const WrapperStructFace& wrapperStructFace, - // Producer: set producerSharedPtr=nullptr or use default WrapperStructInput{} to disable input - const WrapperStructInput& wrapperStructInput, - // Consumer (keep default values to disable any output) - const WrapperStructOutput& wrapperStructOutput = WrapperStructOutput{}); - - // Similar to the previos configure, but it includes hand extraction and rendering + // Configure class. Provide WrapperStruct structs to configure the wrapper, or call without arguments for + // default values void configure(const WrapperStructPose& wrapperStructPose = WrapperStructPose{}, // Face (use the default WrapperStructFace{} to disable any face detector) const WrapperStructFace& wrapperStructFace = WrapperStructFace{}, @@ -120,6 +79,36 @@ namespace op // Consumer (keep default values to disable any output) const WrapperStructOutput& wrapperStructOutput = WrapperStructOutput{}); + // /** + // * Analogous to configure() but applied to only pose (WrapperStructPose) + // */ + // void configure(const WrapperStructPose& wrapperStructPose); + + /** + * Analogous to configure() but applied to only pose (WrapperStructFace) + */ + void configure(const WrapperStructFace& wrapperStructFace); + + /** + * Analogous to configure() but applied to only pose (WrapperStructHand) + */ + void configure(const WrapperStructHand& wrapperStructHand); + + /** + * Analogous to configure() but applied to only pose (WrapperStructExtra) + */ + void configure(const WrapperStructExtra& wrapperStructExtra); + + /** + * Analogous to configure() but applied to only pose (WrapperStructInput) + */ + void configure(const WrapperStructInput& wrapperStructInput); + + /** + * Analogous to configure() but applied to only pose (WrapperStructInput) + */ + void configure(const WrapperStructOutput& wrapperStructOutput); + /** * Function to start multi-threading. * Similar to start(), but exec() blocks the thread that calls the function (it saves 1 thread). Use exec() @@ -205,55 +194,30 @@ namespace op */ bool waitAndPop(TDatumsSP& tDatums); - private: - const ThreadManagerMode mThreadManagerMode; - const std::shared_ptr, std::atomic>> spVideoSeek; - bool mConfigured; - ThreadManager mThreadManager; - bool mUserInputWsOnNewThread; - bool mUserPostProcessingWsOnNewThread; - bool mUserOutputWsOnNewThread; - unsigned long long mThreadId; - bool mMultiThreadEnabled; - // Workers - std::vector mUserInputWs; - TWorker wDatumProducer; - TWorker spWScaleAndSizeExtractor; - TWorker spWCvMatToOpInput; - TWorker spWCvMatToOpOutput; - std::vector> spWPoseExtractors; - std::vector> spWPoseTriangulations; - std::vector> spWJointAngleEstimations; - std::vector mPostProcessingWs; - std::vector mUserPostProcessingWs; - std::vector mOutputWs; - TWorker spWGui; - std::vector mUserOutputWs; - /** - * Frees TWorker variables (private internal function). - * For most cases, this class is non-necessary, since std::shared_ptr are automatically cleaned on destruction - * of each class. - * However, it might be useful if the same Wrapper is gonna be started twice (not recommended on most cases). + * Runs both waitAndEmplace and waitAndPop */ - void reset(); + bool emplaceAndPop(TDatumsSP& tDatums); /** - * Set ThreadManager from TWorkers (private internal function). - * After any configure() has been called, the TWorkers are initialized. This function resets the ThreadManager - * and adds them. - * Common code for start() and exec(). + * Runs both waitAndEmplace and waitAndPop */ - void configureThreadManager(); + TDatumsSP emplaceAndPop(const cv::Mat& cvMat); - /** - * Thread ID increase (private internal function). - * If multi-threading mode, it increases the thread ID. - * If single-threading mode (for debugging), it does not modify it. - * Note that mThreadId must be re-initialized to 0 before starting a new Wrapper configuration. - * @return unsigned int with the next thread id value. - */ - unsigned long long threadIdPP(); + private: + const ThreadManagerMode mThreadManagerMode; + ThreadManager mThreadManager; + bool mMultiThreadEnabled; + // Configuration + WrapperStructPose mWrapperStructPose; + WrapperStructFace mWrapperStructFace; + WrapperStructHand mWrapperStructHand; + WrapperStructExtra mWrapperStructExtra; + WrapperStructInput mWrapperStructInput; + WrapperStructOutput mWrapperStructOutput; + // User configurable workers + std::array mUserWsOnNewThread; + std::array, int(WorkerType::Size)> mUserWs; DELETE_COPY(Wrapper); }; @@ -264,48 +228,28 @@ namespace op // Implementation -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include #include namespace op { - template - Wrapper::Wrapper(const ThreadManagerMode threadManagerMode) : + template + Wrapper::Wrapper(const ThreadManagerMode threadManagerMode) : mThreadManagerMode{threadManagerMode}, - spVideoSeek{std::make_shared, std::atomic>>()}, - mConfigured{false}, mThreadManager{threadManagerMode}, mMultiThreadEnabled{true} { - try - { - // It cannot be directly included in the constructor (compiler error for copying std::atomic) - spVideoSeek->first = false; - spVideoSeek->second = 0; - } - catch (const std::exception& e) - { - error(e.what(), __LINE__, __FUNCTION__, __FILE__); - } } - template - Wrapper::~Wrapper() + template + Wrapper::~Wrapper() { try { stop(); - reset(); + // Reset mThreadManager + mThreadManager.reset(); + // Reset user workers + for (auto& userW : mUserWs) + userW.clear(); } catch (const std::exception& e) { @@ -313,8 +257,8 @@ namespace op } } - template - void Wrapper::disableMultiThreading() + template + void Wrapper::disableMultiThreading() { try { @@ -326,17 +270,19 @@ namespace op } } - template - void Wrapper::setWorkerInput(const TWorker& worker, - const bool workerOnNewThread) + template + void Wrapper::setWorker( + const WorkerType workerType, const TWorker& worker, const bool workerOnNewThread) { try { - mUserInputWs.clear(); + // Security check if (worker == nullptr) error("Your worker is a nullptr.", __LINE__, __FILE__, __FUNCTION__); - mUserInputWs.emplace_back(worker); - mUserInputWsOnNewThread = {workerOnNewThread}; + // Add worker + mUserWs[int(workerType)].clear(); + mUserWs[int(workerType)].emplace_back(worker); + mUserWsOnNewThread[int(workerType)] = workerOnNewThread; } catch (const std::exception& e) { @@ -344,17 +290,22 @@ namespace op } } - template - void Wrapper::setWorkerPostProcessing(const TWorker& worker, - const bool workerOnNewThread) + template + void Wrapper::configure(const WrapperStructPose& wrapperStructPose, + const WrapperStructFace& wrapperStructFace, + const WrapperStructHand& wrapperStructHand, + const WrapperStructExtra& wrapperStructExtra, + const WrapperStructInput& wrapperStructInput, + const WrapperStructOutput& wrapperStructOutput) { try { - mUserPostProcessingWs.clear(); - if (worker == nullptr) - error("Your worker is a nullptr.", __LINE__, __FILE__, __FUNCTION__); - mUserPostProcessingWs.emplace_back(worker); - mUserPostProcessingWsOnNewThread = {workerOnNewThread}; + mWrapperStructPose = wrapperStructPose; + mWrapperStructFace = wrapperStructFace; + mWrapperStructHand = wrapperStructHand; + mWrapperStructExtra = wrapperStructExtra; + mWrapperStructInput = wrapperStructInput; + mWrapperStructOutput = wrapperStructOutput; } catch (const std::exception& e) { @@ -362,17 +313,25 @@ namespace op } } - template - void Wrapper::setWorkerOutput(const TWorker& worker, - const bool workerOnNewThread) + // template + // void Wrapper::configure(const WrapperStructPose& wrapperStructPose) + // { + // try + // { + // mWrapperStructPose = wrapperStructPose; + // } + // catch (const std::exception& e) + // { + // error(e.what(), __LINE__, __FUNCTION__, __FILE__); + // } + // } + + template + void Wrapper::configure(const WrapperStructFace& wrapperStructFace) { try { - mUserOutputWs.clear(); - if (worker == nullptr) - error("Your worker is a nullptr.", __LINE__, __FILE__, __FUNCTION__); - mUserOutputWs.emplace_back(worker); - mUserOutputWsOnNewThread = {workerOnNewThread}; + mWrapperStructFace = wrapperStructFace; } catch (const std::exception& e) { @@ -380,15 +339,12 @@ namespace op } } - template - void Wrapper::configure(const WrapperStructPose& wrapperStructPose, - const WrapperStructInput& wrapperStructInput, - const WrapperStructOutput& wrapperStructOutput) + template + void Wrapper::configure(const WrapperStructHand& wrapperStructHand) { try { - configure(wrapperStructPose, WrapperStructFace{}, WrapperStructHand{}, - WrapperStructExtra{}, wrapperStructInput, wrapperStructOutput); + mWrapperStructHand = wrapperStructHand; } catch (const std::exception& e) { @@ -396,16 +352,12 @@ namespace op } } - template - void Wrapper::configure(const WrapperStructPose& wrapperStructPose, - const WrapperStructFace& wrapperStructFace, - const WrapperStructInput& wrapperStructInput, - const WrapperStructOutput& wrapperStructOutput) + template + void Wrapper::configure(const WrapperStructExtra& wrapperStructExtra) { try { - configure(wrapperStructPose, wrapperStructFace, WrapperStructHand{}, - WrapperStructExtra{}, wrapperStructInput, wrapperStructOutput); + mWrapperStructExtra = wrapperStructExtra; } catch (const std::exception& e) { @@ -413,16 +365,12 @@ namespace op } } - template - void Wrapper::configure(const WrapperStructPose& wrapperStructPose, - const WrapperStructHand& wrapperStructHand, - const WrapperStructInput& wrapperStructInput, - const WrapperStructOutput& wrapperStructOutput) + template + void Wrapper::configure(const WrapperStructInput& wrapperStructInput) { try { - configure(wrapperStructPose, WrapperStructFace{}, wrapperStructHand, - WrapperStructExtra{}, wrapperStructInput, wrapperStructOutput); + mWrapperStructInput = wrapperStructInput; } catch (const std::exception& e) { @@ -430,627 +378,12 @@ namespace op } } - template - void Wrapper::configure(const WrapperStructPose& wrapperStructPoseTemp, - const WrapperStructFace& wrapperStructFace, - const WrapperStructHand& wrapperStructHand, - const WrapperStructExtra& wrapperStructExtra, - const WrapperStructInput& wrapperStructInput, - const WrapperStructOutput& wrapperStructOutput) + template + void Wrapper::configure(const WrapperStructOutput& wrapperStructOutput) { try { - auto wrapperStructPose = wrapperStructPoseTemp; - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - - // Required parameters - const auto renderOutput = wrapperStructPose.renderMode != RenderMode::None - || wrapperStructFace.renderMode != RenderMode::None - || wrapperStructHand.renderMode != RenderMode::None; - const auto renderOutputGpu = wrapperStructPose.renderMode == RenderMode::Gpu - || wrapperStructFace.renderMode == RenderMode::Gpu - || wrapperStructHand.renderMode == RenderMode::Gpu; - const auto renderFace = wrapperStructFace.enable && wrapperStructFace.renderMode != RenderMode::None; - const auto renderHand = wrapperStructHand.enable && wrapperStructHand.renderMode != RenderMode::None; - const auto renderHandGpu = wrapperStructHand.enable && wrapperStructHand.renderMode == RenderMode::Gpu; - - // Check no wrong/contradictory flags enabled - const auto userOutputWsEmpty = mUserOutputWs.empty(); - wrapperConfigureSecurityChecks(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, - wrapperStructInput, wrapperStructOutput, renderOutput, userOutputWsEmpty, - mThreadManagerMode); - - // Get number threads - auto numberThreads = wrapperStructPose.gpuNumber; - auto gpuNumberStart = wrapperStructPose.gpuNumberStart; - // CPU --> 1 thread or no pose extraction - if (getGpuMode() == GpuMode::NoGpu) - { - numberThreads = (wrapperStructPose.gpuNumber == 0 ? 0 : 1); - gpuNumberStart = 0; - // Disabling multi-thread makes the code 400 ms faster (2.3 sec vs. 2.7 in i7-6850K) - // and fixes the bug that the screen was not properly displayed and only refreshed sometimes - // Note: The screen bug could be also fixed by using waitKey(30) rather than waitKey(1) - disableMultiThreading(); - } - // GPU --> user picks (<= #GPUs) - else - { - // Get total number GPUs - const auto totalGpuNumber = getGpuNumber(); - // If number GPU < 0 --> set it to all the available GPUs - if (numberThreads < 0) - { - if (totalGpuNumber <= gpuNumberStart) - error("Number of initial GPU (`--number_gpu_start`) must be lower than the total number of" - " used GPUs (`--number_gpu`)", __LINE__, __FUNCTION__, __FILE__); - numberThreads = totalGpuNumber - gpuNumberStart; - // Reset initial GPU to 0 (we want them all) - // Logging message - log("Auto-detecting all available GPUs... Detected " + std::to_string(totalGpuNumber) - + " GPU(s), using " + std::to_string(numberThreads) + " of them starting at GPU " - + std::to_string(gpuNumberStart) + ".", Priority::High); - } - // Security check - if (gpuNumberStart + numberThreads > totalGpuNumber) - error("Initial GPU selected (`--number_gpu_start`) + number GPUs to use (`--number_gpu`) must" - " be lower or equal than the total number of GPUs in your machine (" - + std::to_string(gpuNumberStart) + " + " - + std::to_string(numberThreads) + " vs. " - + std::to_string(totalGpuNumber) + ").", - __LINE__, __FUNCTION__, __FILE__); - } - - // Proper format - const auto writeImagesCleaned = formatAsDirectory(wrapperStructOutput.writeImages); - const auto writeKeypointCleaned = formatAsDirectory(wrapperStructOutput.writeKeypoint); - const auto writeJsonCleaned = formatAsDirectory(wrapperStructOutput.writeJson); - const auto writeHeatMapsCleaned = formatAsDirectory(wrapperStructOutput.writeHeatMaps); - const auto modelFolder = formatAsDirectory(wrapperStructPose.modelFolder); - - // Common parameters - auto finalOutputSize = wrapperStructPose.outputSize; - Point producerSize{-1,-1}; - const auto oPProducer = (wrapperStructInput.producerSharedPtr != nullptr); - if (oPProducer) - { - // 1. Set producer properties - const auto displayProducerFpsMode = (wrapperStructInput.realTimeProcessing - ? ProducerFpsMode::OriginalFps : ProducerFpsMode::RetrievalFps); - wrapperStructInput.producerSharedPtr->setProducerFpsMode(displayProducerFpsMode); - wrapperStructInput.producerSharedPtr->set(ProducerProperty::Flip, wrapperStructInput.frameFlip); - wrapperStructInput.producerSharedPtr->set(ProducerProperty::Rotation, wrapperStructInput.frameRotate); - wrapperStructInput.producerSharedPtr->set(ProducerProperty::AutoRepeat, - wrapperStructInput.framesRepeat); - // 2. Set finalOutputSize - producerSize = Point{(int)wrapperStructInput.producerSharedPtr->get(CV_CAP_PROP_FRAME_WIDTH), - (int)wrapperStructInput.producerSharedPtr->get(CV_CAP_PROP_FRAME_HEIGHT)}; - // Set finalOutputSize to input size if desired - if (finalOutputSize.x == -1 || finalOutputSize.y == -1) - finalOutputSize = producerSize; - } - - // Producer - if (oPProducer) - { - const auto datumProducer = std::make_shared>( - wrapperStructInput.producerSharedPtr, wrapperStructInput.frameFirst, wrapperStructInput.frameLast, - spVideoSeek - ); - wDatumProducer = std::make_shared>(datumProducer); - } - else - wDatumProducer = nullptr; - - std::vector> poseExtractorNets; - std::vector> faceExtractorNets; - std::vector> handExtractorNets; - std::vector> poseGpuRenderers; - std::shared_ptr poseCpuRenderer; - if (numberThreads > 0) - { - // Get input scales and sizes - const auto scaleAndSizeExtractor = std::make_shared( - wrapperStructPose.netInputSize, finalOutputSize, wrapperStructPose.scalesNumber, - wrapperStructPose.scaleGap - ); - spWScaleAndSizeExtractor = std::make_shared>(scaleAndSizeExtractor); - - // Input cvMat to OpenPose input & output format - const auto cvMatToOpInput = std::make_shared(wrapperStructPose.poseModel); - spWCvMatToOpInput = std::make_shared>(cvMatToOpInput); - if (renderOutput) - { - const auto cvMatToOpOutput = std::make_shared(); - spWCvMatToOpOutput = std::make_shared>(cvMatToOpOutput); - } - - // Pose estimators & renderers - std::vector cpuRenderers; - spWPoseExtractors.clear(); - spWPoseExtractors.resize(numberThreads); - if (wrapperStructPose.enable) - { - // Pose estimators - for (auto gpuId = 0; gpuId < numberThreads; gpuId++) - poseExtractorNets.emplace_back(std::make_shared( - wrapperStructPose.poseModel, modelFolder, gpuId + gpuNumberStart, - wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale, - wrapperStructPose.addPartCandidates, wrapperStructPose.enableGoogleLogging - )); - - // Pose renderers - if (renderOutputGpu || wrapperStructPose.renderMode == RenderMode::Cpu) - { - // If wrapperStructPose.renderMode != RenderMode::Gpu but renderOutput, then we create an - // alpha = 0 pose renderer in order to keep the removing background option - const auto alphaKeypoint = (wrapperStructPose.renderMode != RenderMode::None - ? wrapperStructPose.alphaKeypoint : 0.f); - const auto alphaHeatMap = (wrapperStructPose.renderMode != RenderMode::None - ? wrapperStructPose.alphaHeatMap : 0.f); - // GPU rendering - if (renderOutputGpu) - { - for (const auto& poseExtractorNet : poseExtractorNets) - { - poseGpuRenderers.emplace_back(std::make_shared( - wrapperStructPose.poseModel, poseExtractorNet, wrapperStructPose.renderThreshold, - wrapperStructPose.blendOriginalFrame, alphaKeypoint, - alphaHeatMap, wrapperStructPose.defaultPartToRender - )); - } - } - // CPU rendering - if (wrapperStructPose.renderMode == RenderMode::Cpu) - { - poseCpuRenderer = std::make_shared( - wrapperStructPose.poseModel, wrapperStructPose.renderThreshold, - wrapperStructPose.blendOriginalFrame, alphaKeypoint, alphaHeatMap, - wrapperStructPose.defaultPartToRender); - cpuRenderers.emplace_back(std::make_shared>(poseCpuRenderer)); - } - } - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - - // Pose extractor(s) - spWPoseExtractors.resize(poseExtractorNets.size()); - const auto personIdExtractor = (wrapperStructExtra.identification - ? std::make_shared() : nullptr); - // Keep top N people - // Added right after PoseExtractorNet to avoid: - // 1) Rendering people that are later deleted (wrong visualization). - // 2) Processing faces and hands on people that will be deleted (speed up). - // 3) Running tracking before deleting the people. - // Add KeepTopNPeople for each PoseExtractorNet - const auto keepTopNPeople = (wrapperStructPose.numberPeopleMax > 0 ? - std::make_shared(wrapperStructPose.numberPeopleMax) - : nullptr); - // Person tracker - auto personTrackers = std::make_shared>>(); - if (wrapperStructExtra.tracking > -1) - personTrackers->emplace_back( - std::make_shared(wrapperStructExtra.tracking == 0)); - for (auto i = 0u; i < spWPoseExtractors.size(); i++) - { - // OpenPose keypoint detector + keepTopNPeople - // + ID extractor (experimental) + tracking (experimental) - const auto poseExtractor = std::make_shared( - poseExtractorNets.at(i), keepTopNPeople, personIdExtractor, personTrackers, - wrapperStructPose.numberPeopleMax, wrapperStructExtra.tracking); - spWPoseExtractors.at(i) = {std::make_shared>(poseExtractor)}; - // // Just OpenPose keypoint detector - // spWPoseExtractors.at(i) = {std::make_shared>( - // poseExtractorNets.at(i))}; - } - - // // (Before tracking / id extractor) - // // Added right after PoseExtractorNet to avoid: - // // 1) Rendering people that are later deleted (wrong visualization). - // // 2) Processing faces and hands on people that will be deleted (speed up). - // if (wrapperStructPose.numberPeopleMax > 0) - // { - // // Add KeepTopNPeople for each PoseExtractorNet - // const auto keepTopNPeople = std::make_shared( - // wrapperStructPose.numberPeopleMax); - // for (auto& wPose : spWPoseExtractors) - // wPose.emplace_back(std::make_shared>(keepTopNPeople)); - // } - } - - - // Face extractor(s) - if (wrapperStructFace.enable) - { - // Face detector - // OpenPose face detector - if (wrapperStructPose.enable) - { - const auto faceDetector = std::make_shared(wrapperStructPose.poseModel); - for (auto& wPose : spWPoseExtractors) - wPose.emplace_back(std::make_shared>(faceDetector)); - } - // OpenCV face detector - else - { - log("Body keypoint detection is disabled. Hence, using OpenCV face detector (much less" - " accurate but faster).", Priority::High); - for (auto& wPose : spWPoseExtractors) - { - // 1 FaceDetectorOpenCV per thread, OpenCV face detector is not thread-safe - const auto faceDetectorOpenCV = std::make_shared(modelFolder); - wPose.emplace_back( - std::make_shared>(faceDetectorOpenCV) - ); - } - } - // Face keypoint extractor - for (auto gpu = 0u; gpu < spWPoseExtractors.size(); gpu++) - { - // Face keypoint extractor - const auto netOutputSize = wrapperStructFace.netInputSize; - const auto faceExtractorNet = std::make_shared( - wrapperStructFace.netInputSize, netOutputSize, modelFolder, - gpu + gpuNumberStart, wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale, - wrapperStructPose.enableGoogleLogging - ); - faceExtractorNets.emplace_back(faceExtractorNet); - spWPoseExtractors.at(gpu).emplace_back( - std::make_shared>(faceExtractorNet)); - } - } - - // Hand extractor(s) - if (wrapperStructHand.enable) - { - const auto handDetector = std::make_shared(wrapperStructPose.poseModel); - for (auto gpu = 0u; gpu < spWPoseExtractors.size(); gpu++) - { - // Hand detector - // If tracking - if (wrapperStructHand.tracking) - spWPoseExtractors.at(gpu).emplace_back( - std::make_shared>(handDetector) - ); - // If detection - else - spWPoseExtractors.at(gpu).emplace_back( - std::make_shared>(handDetector)); - // Hand keypoint extractor - const auto netOutputSize = wrapperStructHand.netInputSize; - const auto handExtractorNet = std::make_shared( - wrapperStructHand.netInputSize, netOutputSize, modelFolder, - gpu + gpuNumberStart, wrapperStructHand.scalesNumber, wrapperStructHand.scaleRange, - wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale, - wrapperStructPose.enableGoogleLogging - ); - handExtractorNets.emplace_back(handExtractorNet); - spWPoseExtractors.at(gpu).emplace_back( - std::make_shared>(handExtractorNet) - ); - // If tracking - if (wrapperStructHand.tracking) - spWPoseExtractors.at(gpu).emplace_back( - std::make_shared>(handDetector) - ); - } - } - - // Pose renderer(s) - if (!poseGpuRenderers.empty()) - for (auto i = 0u; i < spWPoseExtractors.size(); i++) - spWPoseExtractors.at(i).emplace_back(std::make_shared>( - poseGpuRenderers.at(i) - )); - - // Face renderer(s) - if (renderFace) - { - // CPU rendering - if (wrapperStructFace.renderMode == RenderMode::Cpu) - { - // Construct face renderer - const auto faceRenderer = std::make_shared(wrapperStructFace.renderThreshold, - wrapperStructFace.alphaKeypoint, - wrapperStructFace.alphaHeatMap); - // Add worker - cpuRenderers.emplace_back(std::make_shared>(faceRenderer)); - } - // GPU rendering - else if (wrapperStructFace.renderMode == RenderMode::Gpu) - { - for (auto i = 0u; i < spWPoseExtractors.size(); i++) - { - // Construct face renderer - const auto faceRenderer = std::make_shared( - wrapperStructFace.renderThreshold, wrapperStructFace.alphaKeypoint, - wrapperStructFace.alphaHeatMap - ); - // Performance boost -> share spGpuMemory for all renderers - if (!poseGpuRenderers.empty()) - { - const bool isLastRenderer = !renderHandGpu; - const auto renderer = std::static_pointer_cast( - poseGpuRenderers.at(i) - ); - faceRenderer->setSharedParametersAndIfLast(renderer->getSharedParameters(), - isLastRenderer); - } - // Add worker - spWPoseExtractors.at(i).emplace_back( - std::make_shared>(faceRenderer)); - } - } - else - error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__); - } - - // Hand renderer(s) - if (renderHand) - { - // CPU rendering - if (wrapperStructHand.renderMode == RenderMode::Cpu) - { - // Construct hand renderer - const auto handRenderer = std::make_shared(wrapperStructHand.renderThreshold, - wrapperStructHand.alphaKeypoint, - wrapperStructHand.alphaHeatMap); - // Add worker - cpuRenderers.emplace_back(std::make_shared>(handRenderer)); - } - // GPU rendering - else if (wrapperStructHand.renderMode == RenderMode::Gpu) - { - for (auto i = 0u; i < spWPoseExtractors.size(); i++) - { - // Construct hands renderer - const auto handRenderer = std::make_shared( - wrapperStructHand.renderThreshold, wrapperStructHand.alphaKeypoint, - wrapperStructHand.alphaHeatMap - ); - // Performance boost -> share spGpuMemory for all renderers - if (!poseGpuRenderers.empty()) - { - const bool isLastRenderer = true; - const auto renderer = std::static_pointer_cast( - poseGpuRenderers.at(i) - ); - handRenderer->setSharedParametersAndIfLast(renderer->getSharedParameters(), - isLastRenderer); - } - // Add worker - spWPoseExtractors.at(i).emplace_back( - std::make_shared>(handRenderer)); - } - } - else - error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__); - } - - // 3-D reconstruction - spWPoseTriangulations.clear(); - if (wrapperStructExtra.reconstruct3d) - { - // For all (body/face/hands): PoseTriangulations ~30 msec, 8 GPUS ~30 msec for keypoint estimation - spWPoseTriangulations.resize(fastMax(1, int(spWPoseExtractors.size() / 4))); - for (auto i = 0u ; i < spWPoseTriangulations.size() ; i++) - { - const auto poseTriangulation = std::make_shared( - wrapperStructExtra.minViews3d); - spWPoseTriangulations.at(i) = {std::make_shared>( - poseTriangulation)}; - } - } - // Itermediate workers (e.g. OpenPose format to cv::Mat, json & frames recorder, ...) - mPostProcessingWs.clear(); - // // Person ID identification (when no multi-thread and no dependency on tracking) - // if (wrapperStructExtra.identification) - // { - // const auto personIdExtractor = std::make_shared(); - // mPostProcessingWs.emplace_back( - // std::make_shared>(personIdExtractor) - // ); - // } - // Frames processor (OpenPose format -> cv::Mat format) - if (renderOutput) - { - mPostProcessingWs = mergeVectors(mPostProcessingWs, cpuRenderers); - const auto opOutputToCvMat = std::make_shared(); - mPostProcessingWs.emplace_back(std::make_shared>(opOutputToCvMat)); - } - // Re-scale pose if desired - // If desired scale is not the current input - if (wrapperStructPose.keypointScale != ScaleMode::InputResolution - // and desired scale is not output when size(input) = size(output) - && !(wrapperStructPose.keypointScale == ScaleMode::OutputResolution && - (finalOutputSize == producerSize || finalOutputSize.x <= 0 || finalOutputSize.y <= 0)) - // and desired scale is not net output when size(input) = size(net output) - && !(wrapperStructPose.keypointScale == ScaleMode::NetOutputResolution - && producerSize == wrapperStructPose.netInputSize)) - { - // Then we must rescale the keypoints - auto keypointScaler = std::make_shared(wrapperStructPose.keypointScale); - mPostProcessingWs.emplace_back(std::make_shared>(keypointScaler)); - } - } - - // IK/Adam - const auto displayAdam = wrapperStructOutput.displayMode == DisplayMode::DisplayAdam - || (wrapperStructOutput.displayMode == DisplayMode::DisplayAll - && wrapperStructExtra.ikThreads > 0); - spWJointAngleEstimations.clear(); -#ifdef USE_3D_ADAM_MODEL - if (wrapperStructExtra.ikThreads > 0) - { - spWJointAngleEstimations.resize(wrapperStructExtra.ikThreads); - // Pose extractor(s) - for (auto i = 0u; i < spWJointAngleEstimations.size(); i++) - { - const auto jointAngleEstimation = std::make_shared(displayAdam); - spWJointAngleEstimations.at(i) = {std::make_shared>( - jointAngleEstimation)}; - } - } -#endif - - // Output workers - mOutputWs.clear(); - // Send information (e.g., to Unity) though UDP client-server communication -#ifdef USE_3D_ADAM_MODEL - if (!wrapperStructOutput.udpHost.empty() && !wrapperStructOutput.udpPort.empty()) - { - const auto udpSender = std::make_shared(wrapperStructOutput.udpHost, - wrapperStructOutput.udpPort); - mOutputWs.emplace_back(std::make_shared>(udpSender)); - } -#endif - // Write people pose data on disk (json for OpenCV >= 3, xml, yml...) - if (!writeKeypointCleaned.empty()) - { - const auto keypointSaver = std::make_shared(writeKeypointCleaned, - wrapperStructOutput.writeKeypointFormat); - mOutputWs.emplace_back(std::make_shared>(keypointSaver)); - if (wrapperStructFace.enable) - mOutputWs.emplace_back(std::make_shared>(keypointSaver)); - if (wrapperStructHand.enable) - mOutputWs.emplace_back(std::make_shared>(keypointSaver)); - } - // Write OpenPose output data on disk in json format (body/hand/face keypoints, body part locations if - // enabled, etc.) - if (!writeJsonCleaned.empty()) - { - const auto peopleJsonSaver = std::make_shared(writeJsonCleaned); - mOutputWs.emplace_back(std::make_shared>(peopleJsonSaver)); - } - // Write people pose data on disk (COCO validation json format) - if (!wrapperStructOutput.writeCocoJson.empty()) - { - // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it - const auto humanFormat = true; - const auto cocoJsonSaver = std::make_shared(wrapperStructOutput.writeCocoJson, - humanFormat, CocoJsonFormat::Body); - mOutputWs.emplace_back(std::make_shared>(cocoJsonSaver)); - } - // Write people foot pose data on disk (COCO validation json format for foot data) - if (!wrapperStructOutput.writeCocoFootJson.empty()) - { - // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it - const auto humanFormat = true; - const auto cocoJsonSaver = std::make_shared(wrapperStructOutput.writeCocoFootJson, - humanFormat, CocoJsonFormat::Foot); - mOutputWs.emplace_back(std::make_shared>(cocoJsonSaver)); - } - // Write frames as desired image format on hard disk - if (!writeImagesCleaned.empty()) - { - const auto imageSaver = std::make_shared(writeImagesCleaned, - wrapperStructOutput.writeImagesFormat); - mOutputWs.emplace_back(std::make_shared>(imageSaver)); - } - // Write frames as *.avi video on hard disk - const auto producerFps = (wrapperStructInput.producerSharedPtr == nullptr ? - 0. : wrapperStructInput.producerSharedPtr->get(CV_CAP_PROP_FPS)); - const auto originalVideoFps = (wrapperStructOutput.writeVideoFps > 0 ? - wrapperStructOutput.writeVideoFps - : producerFps); - if (!wrapperStructOutput.writeVideo.empty()) - { - if (!oPProducer) - error("Video file can only be recorded inside `wrapper/wrapper.hpp` if the producer" - " is one of the default ones (e.g. video, webcam, ...).", - __LINE__, __FUNCTION__, __FILE__); - if (finalOutputSize.x <= 0 || finalOutputSize.y <= 0) - error("Video can only be recorded if outputSize is fixed (e.g. video, webcam, IP camera)," - "but not for a image directory.", __LINE__, __FUNCTION__, __FILE__); - const auto videoSaver = std::make_shared( - wrapperStructOutput.writeVideo, CV_FOURCC('M','J','P','G'), originalVideoFps, finalOutputSize - ); - mOutputWs.emplace_back(std::make_shared>(videoSaver)); - } - // Write joint angles as *.bvh file on hard disk -#ifdef USE_3D_ADAM_MODEL - if (!wrapperStructOutput.writeBvh.empty()) - { - const auto bvhSaver = std::make_shared( - wrapperStructOutput.writeBvh, JointAngleEstimation::getTotalModel(), originalVideoFps - ); - mOutputWs.emplace_back(std::make_shared>(bvhSaver)); - } -#endif - // Write heat maps as desired image format on hard disk - if (!writeHeatMapsCleaned.empty()) - { - const auto heatMapSaver = std::make_shared(writeHeatMapsCleaned, - wrapperStructOutput.writeHeatMapsFormat); - mOutputWs.emplace_back(std::make_shared>(heatMapSaver)); - } - // Add frame information for GUI - const bool guiEnabled = (wrapperStructOutput.displayMode != DisplayMode::NoDisplay); - // If this WGuiInfoAdder instance is placed before the WImageSaver or WVideoSaver, then the resulting - // recorded frames will look exactly as the final displayed image by the GUI - if (wrapperStructOutput.guiVerbose && (guiEnabled || !mUserOutputWs.empty() - || mThreadManagerMode == ThreadManagerMode::Asynchronous - || mThreadManagerMode == ThreadManagerMode::AsynchronousOut)) - { - const auto guiInfoAdder = std::make_shared(numberThreads, guiEnabled); - mOutputWs.emplace_back(std::make_shared>(guiInfoAdder)); - } - // Minimal graphical user interface (GUI) - spWGui = nullptr; - if (guiEnabled) - { - // PoseRenderers to Renderers - std::vector> renderers; - if (wrapperStructPose.renderMode == RenderMode::Cpu) - renderers.emplace_back(std::static_pointer_cast(poseCpuRenderer)); - else - for (const auto& poseGpuRenderer : poseGpuRenderers) - renderers.emplace_back(std::static_pointer_cast(poseGpuRenderer)); - // Display - // Adam (+3-D/2-D) display - if (displayAdam) - { -#ifdef USE_3D_ADAM_MODEL - // Gui - const auto gui = std::make_shared( - finalOutputSize, wrapperStructOutput.fullScreen, mThreadManager.getIsRunningSharedPtr(), - spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers, - wrapperStructOutput.displayMode, JointAngleEstimation::getTotalModel(), - wrapperStructOutput.writeVideoAdam - ); - // WGui - spWGui = {std::make_shared>(gui)}; -#endif - } - // 3-D (+2-D) display - else if (wrapperStructOutput.displayMode == DisplayMode::Display3D - || wrapperStructOutput.displayMode == DisplayMode::DisplayAll) - { - // Gui - const auto gui = std::make_shared( - finalOutputSize, wrapperStructOutput.fullScreen, mThreadManager.getIsRunningSharedPtr(), - spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers, - wrapperStructPose.poseModel, wrapperStructOutput.displayMode - ); - // WGui - spWGui = {std::make_shared>(gui)}; - } - // 2-D display - else if (wrapperStructOutput.displayMode == DisplayMode::Display2D) - { - // Gui - const auto gui = std::make_shared( - finalOutputSize, wrapperStructOutput.fullScreen, mThreadManager.getIsRunningSharedPtr(), - spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers - ); - // WGui - spWGui = {std::make_shared>(gui)}; - } - else - error("Unknown DisplayMode.", __LINE__, __FUNCTION__, __FILE__); - } - // Set wrapper as configured - mConfigured = true; - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + mWrapperStructOutput = wrapperStructOutput; } catch (const std::exception& e) { @@ -1058,12 +391,15 @@ namespace op } } - template - void Wrapper::exec() + template + void Wrapper::exec() { try { - configureThreadManager(); + configureThreadManager( + mThreadManager, mMultiThreadEnabled, mThreadManagerMode, mWrapperStructPose, mWrapperStructFace, + mWrapperStructHand, mWrapperStructExtra, mWrapperStructInput, mWrapperStructOutput, + mUserWs, mUserWsOnNewThread); log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); mThreadManager.exec(); } @@ -1073,12 +409,15 @@ namespace op } } - template - void Wrapper::start() + template + void Wrapper::start() { try { - configureThreadManager(); + configureThreadManager( + mThreadManager, mMultiThreadEnabled, mThreadManagerMode, mWrapperStructPose, mWrapperStructFace, + mWrapperStructHand, mWrapperStructExtra, mWrapperStructInput, mWrapperStructOutput, + mUserWs, mUserWsOnNewThread); log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); mThreadManager.start(); } @@ -1088,8 +427,8 @@ namespace op } } - template - void Wrapper::stop() + template + void Wrapper::stop() { try { @@ -1101,8 +440,8 @@ namespace op } } - template - bool Wrapper::isRunning() const + template + bool Wrapper::isRunning() const { try { @@ -1115,12 +454,12 @@ namespace op } } - template - bool Wrapper::tryEmplace(TDatumsSP& tDatums) + template + bool Wrapper::tryEmplace(TDatumsSP& tDatums) { try { - if (!mUserInputWs.empty()) + if (!mUserWs[int(WorkerType::Input)].empty()) error("Emplace cannot be called if an input worker was already selected.", __LINE__, __FUNCTION__, __FILE__); return mThreadManager.tryEmplace(tDatums); @@ -1132,12 +471,12 @@ namespace op } } - template - bool Wrapper::waitAndEmplace(TDatumsSP& tDatums) + template + bool Wrapper::waitAndEmplace(TDatumsSP& tDatums) { try { - if (!mUserInputWs.empty()) + if (!mUserWs[int(WorkerType::Input)].empty()) error("Emplace cannot be called if an input worker was already selected.", __LINE__, __FUNCTION__, __FILE__); return mThreadManager.waitAndEmplace(tDatums); @@ -1149,12 +488,12 @@ namespace op } } - template - bool Wrapper::tryPush(const TDatumsSP& tDatums) + template + bool Wrapper::tryPush(const TDatumsSP& tDatums) { try { - if (!mUserInputWs.empty()) + if (!mUserWs[int(WorkerType::Input)].empty()) error("Push cannot be called if an input worker was already selected.", __LINE__, __FUNCTION__, __FILE__); return mThreadManager.tryPush(tDatums); @@ -1166,12 +505,12 @@ namespace op } } - template - bool Wrapper::waitAndPush(const TDatumsSP& tDatums) + template + bool Wrapper::waitAndPush(const TDatumsSP& tDatums) { try { - if (!mUserInputWs.empty()) + if (!mUserWs[int(WorkerType::Input)].empty()) error("Push cannot be called if an input worker was already selected.", __LINE__, __FUNCTION__, __FILE__); return mThreadManager.waitAndPush(tDatums); @@ -1183,12 +522,12 @@ namespace op } } - template - bool Wrapper::tryPop(TDatumsSP& tDatums) + template + bool Wrapper::tryPop(TDatumsSP& tDatums) { try { - if (!mUserOutputWs.empty()) + if (!mUserWs[int(WorkerType::Output)].empty()) error("Pop cannot be called if an output worker was already selected.", __LINE__, __FUNCTION__, __FILE__); return mThreadManager.tryPop(tDatums); @@ -1200,12 +539,12 @@ namespace op } } - template - bool Wrapper::waitAndPop(TDatumsSP& tDatums) + template + bool Wrapper::waitAndPop(TDatumsSP& tDatums) { try { - if (!mUserOutputWs.empty()) + if (!mUserWs[int(WorkerType::Output)].empty()) error("Pop cannot be called if an output worker was already selected.", __LINE__, __FUNCTION__, __FILE__); return mThreadManager.waitAndPop(tDatums); @@ -1217,288 +556,43 @@ namespace op } } - template - void Wrapper::reset() + template + bool Wrapper::emplaceAndPop(TDatumsSP& tDatums) { try { - mConfigured = false; - mThreadManager.reset(); - mThreadId = 0ull; - // Reset - mUserInputWs.clear(); - wDatumProducer = nullptr; - spWScaleAndSizeExtractor = nullptr; - spWCvMatToOpInput = nullptr; - spWCvMatToOpOutput = nullptr; - spWPoseExtractors.clear(); - spWPoseTriangulations.clear(); - spWJointAngleEstimations.clear(); - mPostProcessingWs.clear(); - mUserPostProcessingWs.clear(); - mOutputWs.clear(); - spWGui = nullptr; - mUserOutputWs.clear(); - } - catch (const std::exception& e) - { - error(e.what(), __LINE__, __FUNCTION__, __FILE__); - } - } - - template - void Wrapper::configureThreadManager() - { - try - { - // The less number of queues -> the less threads opened, and potentially the less lag - - // Security checks - if (!mConfigured) - error("Configure the Wrapper class before calling `start()`.", __LINE__, __FUNCTION__, __FILE__); - if ((wDatumProducer == nullptr) == (mUserInputWs.empty()) - && mThreadManagerMode != ThreadManagerMode::Asynchronous - && mThreadManagerMode != ThreadManagerMode::AsynchronousIn) - { - const auto message = "You need to have 1 and only 1 producer selected. You can introduce your own" - " producer by using setWorkerInput() or use the OpenPose default producer by" - " configuring it in the configure function) or use the" - " ThreadManagerMode::Asynchronous(In) mode."; - error(message, __LINE__, __FUNCTION__, __FILE__); - } - if (mOutputWs.empty() && mUserOutputWs.empty() && spWGui == nullptr - && mThreadManagerMode != ThreadManagerMode::Asynchronous - && mThreadManagerMode != ThreadManagerMode::AsynchronousOut) - { - error("No output selected.", __LINE__, __FUNCTION__, __FILE__); - } - - // Thread Manager - // Clean previous thread manager (avoid configure to crash the program if used more than once) - mThreadManager.reset(); - mThreadId = 0ull; - auto queueIn = 0ull; - auto queueOut = 1ull; - // After producer - // ID generator (before any multi-threading or any function that requires the ID) - const auto wIdGenerator = std::make_shared>(); - std::vector workersAux{wIdGenerator}; - // Scale & cv::Mat to OP format - if (spWScaleAndSizeExtractor != nullptr) - workersAux = mergeVectors(workersAux, {spWScaleAndSizeExtractor}); - if (spWCvMatToOpInput != nullptr) - workersAux = mergeVectors(workersAux, {spWCvMatToOpInput}); - // cv::Mat to output format - if (spWCvMatToOpOutput != nullptr) - workersAux = mergeVectors(workersAux, {spWCvMatToOpOutput}); - - // Producer - // If custom user Worker and uses its own thread - if (!mUserInputWs.empty() && mUserInputWsOnNewThread) - { - // Thread 0, queues 0 -> 1 - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, mUserInputWs, queueIn++, queueOut++); - threadIdPP(); - } - // If custom user Worker in same thread - else if (!mUserInputWs.empty()) - workersAux = mergeVectors(mUserInputWs, workersAux); - // If OpenPose producer (same thread) - else if (wDatumProducer != nullptr) - workersAux = mergeVectors({wDatumProducer}, workersAux); - // Otherwise - else if (mThreadManagerMode != ThreadManagerMode::Asynchronous - && mThreadManagerMode != ThreadManagerMode::AsynchronousIn) - error("No input selected.", __LINE__, __FUNCTION__, __FILE__); - // Thread 0 or 1, queues 0 -> 1 - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, workersAux, queueIn++, queueOut++); - // Increase thread - threadIdPP(); - - // Pose estimation & rendering - // Thread 1 or 2...X, queues 1 -> 2, X = 2 + #GPUs - if (!spWPoseExtractors.empty()) - { - if (mMultiThreadEnabled) - { - for (auto& wPose : spWPoseExtractors) - { - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, wPose, queueIn, queueOut); - threadIdPP(); - } - queueIn++; - queueOut++; - // Sort frames - Required own thread - if (spWPoseExtractors.size() > 1u) - { - const auto wQueueOrderer = std::make_shared>(); - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, wQueueOrderer, queueIn++, queueOut++); - threadIdPP(); - } - } - else - { - if (spWPoseExtractors.size() > 1) - log("Multi-threading disabled, only 1 thread running. All GPUs have been disabled but the" - " first one, which is defined by gpuNumberStart (e.g. in the OpenPose demo, it is set" - " with the `--num_gpu_start` flag).", Priority::High); - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, spWPoseExtractors.at(0), queueIn++, queueOut++); - } - } - // Assemble all frames from same time instant (3-D module) - const auto wQueueAssembler = std::make_shared>(); - // 3-D reconstruction - if (!spWPoseTriangulations.empty()) - { - // Assemble frames - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, wQueueAssembler, queueIn++, queueOut++); - threadIdPP(); - // 3-D reconstruction - if (mMultiThreadEnabled) - { - for (auto& wPoseTriangulations : spWPoseTriangulations) - { - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, wPoseTriangulations, queueIn, queueOut); - threadIdPP(); - } - queueIn++; - queueOut++; - // Sort frames - if (spWPoseTriangulations.size() > 1u) - { - const auto wQueueOrderer = std::make_shared>(); - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, wQueueOrderer, queueIn++, queueOut++); - threadIdPP(); - } - } - else - { - if (spWPoseTriangulations.size() > 1) - log("Multi-threading disabled, only 1 thread running for 3-D triangulation.", - Priority::High); - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, spWPoseTriangulations.at(0), queueIn++, queueOut++); - } - } - else - mPostProcessingWs = mergeVectors({wQueueAssembler}, mPostProcessingWs); - // Adam/IK step - if (!spWJointAngleEstimations.empty()) - { - if (mMultiThreadEnabled) - { - for (auto& wJointAngleEstimator : spWJointAngleEstimations) - { - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, wJointAngleEstimator, queueIn, queueOut); - threadIdPP(); - } - queueIn++; - queueOut++; - // Sort frames - if (spWJointAngleEstimations.size() > 1) - { - const auto wQueueOrderer = std::make_shared>(); - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, wQueueOrderer, queueIn++, queueOut++); - threadIdPP(); - } - } - else - { - if (spWJointAngleEstimations.size() > 1) - log("Multi-threading disabled, only 1 thread running for joint angle estimation.", - Priority::High); - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, spWJointAngleEstimations.at(0), queueIn++, queueOut++); - } - } - // Post processing workers - if (!mPostProcessingWs.empty()) - { - // Combining mPostProcessingWs and mOutputWs - mOutputWs = mergeVectors(mPostProcessingWs, mOutputWs); - // // If I wanna split them - // log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - // mThreadManager.add(mThreadId, mPostProcessingWs, queueIn++, queueOut++); - // threadIdPP(); - } - // If custom user Worker and uses its own thread - if (!mUserPostProcessingWs.empty()) - { - // If custom user Worker in its own thread - if (mUserPostProcessingWsOnNewThread) - { - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, mUserPostProcessingWs, queueIn++, queueOut++); - threadIdPP(); - } - // If custom user Worker in same thread - // Merge with mOutputWs - else - mOutputWs = mergeVectors(mOutputWs, mUserPostProcessingWs); - } - // Output workers - if (!mOutputWs.empty()) - { - // Thread 4 or 5, queues 4 -> 5 - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, mOutputWs, queueIn++, queueOut++); - threadIdPP(); - } - // User output worker - // Thread Y, queues Q -> Q+1 - if (!mUserOutputWs.empty()) - { - if (mUserOutputWsOnNewThread) - { - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, mUserOutputWs, queueIn++, queueOut++); - threadIdPP(); - } - else - { - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId-1, mUserOutputWs, queueIn++, queueOut++); - } - } - // OpenPose GUI - if (spWGui != nullptr) - { - // Thread Y+1, queues Q+1 -> Q+2 - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); - mThreadManager.add(mThreadId, spWGui, queueIn++, queueOut++); - threadIdPP(); - } - log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + // Run waitAndEmplace + waitAndPop + if (waitAndEmplace(tDatums)) + return waitAndPop(tDatums); + return false; } catch (const std::exception& e) { error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return false; } } - template - unsigned long long Wrapper::threadIdPP() + template + TDatumsSP Wrapper::emplaceAndPop(const cv::Mat& cvMat) { try { - if (mMultiThreadEnabled) - mThreadId++; - return mThreadId; + // Create new datum + auto datumsPtr = std::make_shared(); + datumsPtr->emplace_back(); + auto& datum = datumsPtr->at(0); + // Fill datum + datum.cvInputData = cvMat; + // Emplace and pop + emplaceAndPop(datumsPtr); + // Return result + return datumsPtr; } catch (const std::exception& e) { error(e.what(), __LINE__, __FUNCTION__, __FILE__); - return 0ull; + return false; } } diff --git a/include/openpose/wrapper/wrapperAuxiliary.hpp b/include/openpose/wrapper/wrapperAuxiliary.hpp index d8d1c100..193ae5df 100644 --- a/include/openpose/wrapper/wrapperAuxiliary.hpp +++ b/include/openpose/wrapper/wrapperAuxiliary.hpp @@ -1,6 +1,8 @@ #ifndef OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP #define OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP +#include +#include #include #include #include @@ -31,6 +33,943 @@ namespace op const bool renderOutput, const bool userOutputWsEmpty, const ThreadManagerMode threadManagerMode); + + /** + * Thread ID increase (private internal function). + * If multi-threading mode, it increases the thread ID. + * If single-threading mode (for debugging), it does not modify it. + * Note that mThreadId must be re-initialized to 0 before starting a new Wrapper configuration. + * @param threadId unsigned long long element with the current thread id value. I will be edited to the next + * `desired thread id number. + */ + OP_API void threadIdPP(unsigned long long& threadId, const bool multiThreadEnabled); + + /** + * Set ThreadManager from TWorkers (private internal function). + * After any configure() has been called, the TWorkers are initialized. This function resets the ThreadManager + * and adds them. + * Common code for start() and exec(). + */ + template, + typename TWorker = std::shared_ptr>> + OP_API void configureThreadManager( + ThreadManager& threadManager, const bool multiThreadEnabled, + const ThreadManagerMode threadManagerMode, const WrapperStructPose& wrapperStructPose, + const WrapperStructFace& wrapperStructFace, const WrapperStructHand& wrapperStructHand, + const WrapperStructExtra& wrapperStructExtra, const WrapperStructInput& wrapperStructInput, + const WrapperStructOutput& wrapperStructOutput, + const std::array, int(WorkerType::Size)>& userWs, + const std::array& userWsOnNewThread); +} + + + + + +// Implementation +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +namespace op +{ + template + void configureThreadManager( + ThreadManager& threadManager, const bool multiThreadEnabledTemp, + const ThreadManagerMode threadManagerMode, const WrapperStructPose& wrapperStructPoseTemp, + const WrapperStructFace& wrapperStructFace, const WrapperStructHand& wrapperStructHand, + const WrapperStructExtra& wrapperStructExtra, const WrapperStructInput& wrapperStructInput, + const WrapperStructOutput& wrapperStructOutput, + const std::array, int(WorkerType::Size)>& userWs, + const std::array& userWsOnNewThread) + { + try + { + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + + // Editable arguments + auto wrapperStructPose = wrapperStructPoseTemp; + auto multiThreadEnabled = multiThreadEnabledTemp; + + // Workers + TWorker datumProducerW; + TWorker scaleAndSizeExtractorW; + TWorker cvMatToOpInputW; + TWorker cvMatToOpOutputW; + std::vector> poseExtractorsWs; + std::vector> poseTriangulationsWs; + std::vector> jointAngleEstimationsWs; + std::vector postProcessingWs; + std::vector outputWs; + TWorker guiW; + + // User custom workers + const auto& userInputWs = userWs[int(WorkerType::Input)]; + const auto& userPostProcessingWs = userWs[int(WorkerType::PostProcessing)]; + const auto& userOutputWs = userWs[int(WorkerType::Output)]; + const auto userInputWsOnNewThread = userWsOnNewThread[int(WorkerType::Input)]; + const auto userPostProcessingWsOnNewThread = userWsOnNewThread[int(WorkerType::PostProcessing)]; + const auto userOutputWsOnNewThread = userWsOnNewThread[int(WorkerType::Output)]; + + // Video seek + const auto spVideoSeek = std::make_shared, std::atomic>>(); + // It cannot be directly included in the constructor (compiler error for copying std::atomic) + spVideoSeek->first = false; + spVideoSeek->second = 0; + + // Required parameters + const auto renderOutput = wrapperStructPose.renderMode != RenderMode::None + || wrapperStructFace.renderMode != RenderMode::None + || wrapperStructHand.renderMode != RenderMode::None; + const auto renderOutputGpu = wrapperStructPose.renderMode == RenderMode::Gpu + || wrapperStructFace.renderMode == RenderMode::Gpu + || wrapperStructHand.renderMode == RenderMode::Gpu; + const auto renderFace = wrapperStructFace.enable && wrapperStructFace.renderMode != RenderMode::None; + const auto renderHand = wrapperStructHand.enable && wrapperStructHand.renderMode != RenderMode::None; + const auto renderHandGpu = wrapperStructHand.enable && wrapperStructHand.renderMode == RenderMode::Gpu; + + // Check no wrong/contradictory flags enabled + const auto userOutputWsEmpty = userOutputWs.empty(); + wrapperConfigureSecurityChecks(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, + wrapperStructInput, wrapperStructOutput, renderOutput, userOutputWsEmpty, + threadManagerMode); + + // Get number threads + auto numberThreads = wrapperStructPose.gpuNumber; + auto gpuNumberStart = wrapperStructPose.gpuNumberStart; + // CPU --> 1 thread or no pose extraction + if (getGpuMode() == GpuMode::NoGpu) + { + numberThreads = (wrapperStructPose.gpuNumber == 0 ? 0 : 1); + gpuNumberStart = 0; + // Disabling multi-thread makes the code 400 ms faster (2.3 sec vs. 2.7 in i7-6850K) + // and fixes the bug that the screen was not properly displayed and only refreshed sometimes + // Note: The screen bug could be also fixed by using waitKey(30) rather than waitKey(1) + multiThreadEnabled = false; + } + // GPU --> user picks (<= #GPUs) + else + { + // Get total number GPUs + const auto totalGpuNumber = getGpuNumber(); + // If number GPU < 0 --> set it to all the available GPUs + if (numberThreads < 0) + { + if (totalGpuNumber <= gpuNumberStart) + error("Number of initial GPU (`--number_gpu_start`) must be lower than the total number of" + " used GPUs (`--number_gpu`)", __LINE__, __FUNCTION__, __FILE__); + numberThreads = totalGpuNumber - gpuNumberStart; + // Reset initial GPU to 0 (we want them all) + // Logging message + log("Auto-detecting all available GPUs... Detected " + std::to_string(totalGpuNumber) + + " GPU(s), using " + std::to_string(numberThreads) + " of them starting at GPU " + + std::to_string(gpuNumberStart) + ".", Priority::High); + } + // Security check + if (gpuNumberStart + numberThreads > totalGpuNumber) + error("Initial GPU selected (`--number_gpu_start`) + number GPUs to use (`--number_gpu`) must" + " be lower or equal than the total number of GPUs in your machine (" + + std::to_string(gpuNumberStart) + " + " + + std::to_string(numberThreads) + " vs. " + + std::to_string(totalGpuNumber) + ").", + __LINE__, __FUNCTION__, __FILE__); + } + + // Proper format + const auto writeImagesCleaned = formatAsDirectory(wrapperStructOutput.writeImages); + const auto writeKeypointCleaned = formatAsDirectory(wrapperStructOutput.writeKeypoint); + const auto writeJsonCleaned = formatAsDirectory(wrapperStructOutput.writeJson); + const auto writeHeatMapsCleaned = formatAsDirectory(wrapperStructOutput.writeHeatMaps); + const auto modelFolder = formatAsDirectory(wrapperStructPose.modelFolder); + + // Common parameters + auto finalOutputSize = wrapperStructPose.outputSize; + Point producerSize{-1,-1}; + const auto oPProducer = (wrapperStructInput.producerSharedPtr != nullptr); + if (oPProducer) + { + // 1. Set producer properties + const auto displayProducerFpsMode = (wrapperStructInput.realTimeProcessing + ? ProducerFpsMode::OriginalFps : ProducerFpsMode::RetrievalFps); + wrapperStructInput.producerSharedPtr->setProducerFpsMode(displayProducerFpsMode); + wrapperStructInput.producerSharedPtr->set(ProducerProperty::Flip, wrapperStructInput.frameFlip); + wrapperStructInput.producerSharedPtr->set(ProducerProperty::Rotation, wrapperStructInput.frameRotate); + wrapperStructInput.producerSharedPtr->set(ProducerProperty::AutoRepeat, + wrapperStructInput.framesRepeat); + // 2. Set finalOutputSize + producerSize = Point{(int)wrapperStructInput.producerSharedPtr->get(CV_CAP_PROP_FRAME_WIDTH), + (int)wrapperStructInput.producerSharedPtr->get(CV_CAP_PROP_FRAME_HEIGHT)}; + // Set finalOutputSize to input size if desired + if (finalOutputSize.x == -1 || finalOutputSize.y == -1) + finalOutputSize = producerSize; + } + + // Producer + if (oPProducer) + { + const auto datumProducer = std::make_shared>( + wrapperStructInput.producerSharedPtr, wrapperStructInput.frameFirst, wrapperStructInput.frameLast, + spVideoSeek + ); + datumProducerW = std::make_shared>(datumProducer); + } + else + datumProducerW = nullptr; + + std::vector> poseExtractorNets; + std::vector> faceExtractorNets; + std::vector> handExtractorNets; + std::vector> poseGpuRenderers; + std::shared_ptr poseCpuRenderer; + if (numberThreads > 0) + { + // Get input scales and sizes + const auto scaleAndSizeExtractor = std::make_shared( + wrapperStructPose.netInputSize, finalOutputSize, wrapperStructPose.scalesNumber, + wrapperStructPose.scaleGap + ); + scaleAndSizeExtractorW = std::make_shared>(scaleAndSizeExtractor); + + // Input cvMat to OpenPose input & output format + const auto cvMatToOpInput = std::make_shared(wrapperStructPose.poseModel); + cvMatToOpInputW = std::make_shared>(cvMatToOpInput); + if (renderOutput) + { + const auto cvMatToOpOutput = std::make_shared(); + cvMatToOpOutputW = std::make_shared>(cvMatToOpOutput); + } + + // Pose estimators & renderers + std::vector cpuRenderers; + poseExtractorsWs.clear(); + poseExtractorsWs.resize(numberThreads); + if (wrapperStructPose.enable) + { + // Pose estimators + for (auto gpuId = 0; gpuId < numberThreads; gpuId++) + poseExtractorNets.emplace_back(std::make_shared( + wrapperStructPose.poseModel, modelFolder, gpuId + gpuNumberStart, + wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale, + wrapperStructPose.addPartCandidates, wrapperStructPose.enableGoogleLogging + )); + + // Pose renderers + if (renderOutputGpu || wrapperStructPose.renderMode == RenderMode::Cpu) + { + // If wrapperStructPose.renderMode != RenderMode::Gpu but renderOutput, then we create an + // alpha = 0 pose renderer in order to keep the removing background option + const auto alphaKeypoint = (wrapperStructPose.renderMode != RenderMode::None + ? wrapperStructPose.alphaKeypoint : 0.f); + const auto alphaHeatMap = (wrapperStructPose.renderMode != RenderMode::None + ? wrapperStructPose.alphaHeatMap : 0.f); + // GPU rendering + if (renderOutputGpu) + { + for (const auto& poseExtractorNet : poseExtractorNets) + { + poseGpuRenderers.emplace_back(std::make_shared( + wrapperStructPose.poseModel, poseExtractorNet, wrapperStructPose.renderThreshold, + wrapperStructPose.blendOriginalFrame, alphaKeypoint, + alphaHeatMap, wrapperStructPose.defaultPartToRender + )); + } + } + // CPU rendering + if (wrapperStructPose.renderMode == RenderMode::Cpu) + { + poseCpuRenderer = std::make_shared( + wrapperStructPose.poseModel, wrapperStructPose.renderThreshold, + wrapperStructPose.blendOriginalFrame, alphaKeypoint, alphaHeatMap, + wrapperStructPose.defaultPartToRender); + cpuRenderers.emplace_back(std::make_shared>(poseCpuRenderer)); + } + } + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + + // Pose extractor(s) + poseExtractorsWs.resize(poseExtractorNets.size()); + const auto personIdExtractor = (wrapperStructExtra.identification + ? std::make_shared() : nullptr); + // Keep top N people + // Added right after PoseExtractorNet to avoid: + // 1) Rendering people that are later deleted (wrong visualization). + // 2) Processing faces and hands on people that will be deleted (speed up). + // 3) Running tracking before deleting the people. + // Add KeepTopNPeople for each PoseExtractorNet + const auto keepTopNPeople = (wrapperStructPose.numberPeopleMax > 0 ? + std::make_shared(wrapperStructPose.numberPeopleMax) + : nullptr); + // Person tracker + auto personTrackers = std::make_shared>>(); + if (wrapperStructExtra.tracking > -1) + personTrackers->emplace_back( + std::make_shared(wrapperStructExtra.tracking == 0)); + for (auto i = 0u; i < poseExtractorsWs.size(); i++) + { + // OpenPose keypoint detector + keepTopNPeople + // + ID extractor (experimental) + tracking (experimental) + const auto poseExtractor = std::make_shared( + poseExtractorNets.at(i), keepTopNPeople, personIdExtractor, personTrackers, + wrapperStructPose.numberPeopleMax, wrapperStructExtra.tracking); + poseExtractorsWs.at(i) = {std::make_shared>(poseExtractor)}; + // // Just OpenPose keypoint detector + // poseExtractorsWs.at(i) = {std::make_shared>( + // poseExtractorNets.at(i))}; + } + + // // (Before tracking / id extractor) + // // Added right after PoseExtractorNet to avoid: + // // 1) Rendering people that are later deleted (wrong visualization). + // // 2) Processing faces and hands on people that will be deleted (speed up). + // if (wrapperStructPose.numberPeopleMax > 0) + // { + // // Add KeepTopNPeople for each PoseExtractorNet + // const auto keepTopNPeople = std::make_shared( + // wrapperStructPose.numberPeopleMax); + // for (auto& wPose : poseExtractorsWs) + // wPose.emplace_back(std::make_shared>(keepTopNPeople)); + // } + } + + + // Face extractor(s) + if (wrapperStructFace.enable) + { + // Face detector + // OpenPose face detector + if (wrapperStructPose.enable) + { + const auto faceDetector = std::make_shared(wrapperStructPose.poseModel); + for (auto& wPose : poseExtractorsWs) + wPose.emplace_back(std::make_shared>(faceDetector)); + } + // OpenCV face detector + else + { + log("Body keypoint detection is disabled. Hence, using OpenCV face detector (much less" + " accurate but faster).", Priority::High); + for (auto& wPose : poseExtractorsWs) + { + // 1 FaceDetectorOpenCV per thread, OpenCV face detector is not thread-safe + const auto faceDetectorOpenCV = std::make_shared(modelFolder); + wPose.emplace_back( + std::make_shared>(faceDetectorOpenCV) + ); + } + } + // Face keypoint extractor + for (auto gpu = 0u; gpu < poseExtractorsWs.size(); gpu++) + { + // Face keypoint extractor + const auto netOutputSize = wrapperStructFace.netInputSize; + const auto faceExtractorNet = std::make_shared( + wrapperStructFace.netInputSize, netOutputSize, modelFolder, + gpu + gpuNumberStart, wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale, + wrapperStructPose.enableGoogleLogging + ); + faceExtractorNets.emplace_back(faceExtractorNet); + poseExtractorsWs.at(gpu).emplace_back( + std::make_shared>(faceExtractorNet)); + } + } + + // Hand extractor(s) + if (wrapperStructHand.enable) + { + const auto handDetector = std::make_shared(wrapperStructPose.poseModel); + for (auto gpu = 0u; gpu < poseExtractorsWs.size(); gpu++) + { + // Hand detector + // If tracking + if (wrapperStructHand.tracking) + poseExtractorsWs.at(gpu).emplace_back( + std::make_shared>(handDetector) + ); + // If detection + else + poseExtractorsWs.at(gpu).emplace_back( + std::make_shared>(handDetector)); + // Hand keypoint extractor + const auto netOutputSize = wrapperStructHand.netInputSize; + const auto handExtractorNet = std::make_shared( + wrapperStructHand.netInputSize, netOutputSize, modelFolder, + gpu + gpuNumberStart, wrapperStructHand.scalesNumber, wrapperStructHand.scaleRange, + wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale, + wrapperStructPose.enableGoogleLogging + ); + handExtractorNets.emplace_back(handExtractorNet); + poseExtractorsWs.at(gpu).emplace_back( + std::make_shared>(handExtractorNet) + ); + // If tracking + if (wrapperStructHand.tracking) + poseExtractorsWs.at(gpu).emplace_back( + std::make_shared>(handDetector) + ); + } + } + + // Pose renderer(s) + if (!poseGpuRenderers.empty()) + for (auto i = 0u; i < poseExtractorsWs.size(); i++) + poseExtractorsWs.at(i).emplace_back(std::make_shared>( + poseGpuRenderers.at(i) + )); + + // Face renderer(s) + if (renderFace) + { + // CPU rendering + if (wrapperStructFace.renderMode == RenderMode::Cpu) + { + // Construct face renderer + const auto faceRenderer = std::make_shared(wrapperStructFace.renderThreshold, + wrapperStructFace.alphaKeypoint, + wrapperStructFace.alphaHeatMap); + // Add worker + cpuRenderers.emplace_back(std::make_shared>(faceRenderer)); + } + // GPU rendering + else if (wrapperStructFace.renderMode == RenderMode::Gpu) + { + for (auto i = 0u; i < poseExtractorsWs.size(); i++) + { + // Construct face renderer + const auto faceRenderer = std::make_shared( + wrapperStructFace.renderThreshold, wrapperStructFace.alphaKeypoint, + wrapperStructFace.alphaHeatMap + ); + // Performance boost -> share spGpuMemory for all renderers + if (!poseGpuRenderers.empty()) + { + const bool isLastRenderer = !renderHandGpu; + const auto renderer = std::static_pointer_cast( + poseGpuRenderers.at(i) + ); + faceRenderer->setSharedParametersAndIfLast(renderer->getSharedParameters(), + isLastRenderer); + } + // Add worker + poseExtractorsWs.at(i).emplace_back( + std::make_shared>(faceRenderer)); + } + } + else + error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__); + } + + // Hand renderer(s) + if (renderHand) + { + // CPU rendering + if (wrapperStructHand.renderMode == RenderMode::Cpu) + { + // Construct hand renderer + const auto handRenderer = std::make_shared(wrapperStructHand.renderThreshold, + wrapperStructHand.alphaKeypoint, + wrapperStructHand.alphaHeatMap); + // Add worker + cpuRenderers.emplace_back(std::make_shared>(handRenderer)); + } + // GPU rendering + else if (wrapperStructHand.renderMode == RenderMode::Gpu) + { + for (auto i = 0u; i < poseExtractorsWs.size(); i++) + { + // Construct hands renderer + const auto handRenderer = std::make_shared( + wrapperStructHand.renderThreshold, wrapperStructHand.alphaKeypoint, + wrapperStructHand.alphaHeatMap + ); + // Performance boost -> share spGpuMemory for all renderers + if (!poseGpuRenderers.empty()) + { + const bool isLastRenderer = true; + const auto renderer = std::static_pointer_cast( + poseGpuRenderers.at(i) + ); + handRenderer->setSharedParametersAndIfLast(renderer->getSharedParameters(), + isLastRenderer); + } + // Add worker + poseExtractorsWs.at(i).emplace_back( + std::make_shared>(handRenderer)); + } + } + else + error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__); + } + + // 3-D reconstruction + poseTriangulationsWs.clear(); + if (wrapperStructExtra.reconstruct3d) + { + // For all (body/face/hands): PoseTriangulations ~30 msec, 8 GPUS ~30 msec for keypoint estimation + poseTriangulationsWs.resize(fastMax(1, int(poseExtractorsWs.size() / 4))); + for (auto i = 0u ; i < poseTriangulationsWs.size() ; i++) + { + const auto poseTriangulation = std::make_shared( + wrapperStructExtra.minViews3d); + poseTriangulationsWs.at(i) = {std::make_shared>( + poseTriangulation)}; + } + } + // Itermediate workers (e.g. OpenPose format to cv::Mat, json & frames recorder, ...) + postProcessingWs.clear(); + // // Person ID identification (when no multi-thread and no dependency on tracking) + // if (wrapperStructExtra.identification) + // { + // const auto personIdExtractor = std::make_shared(); + // postProcessingWs.emplace_back( + // std::make_shared>(personIdExtractor) + // ); + // } + // Frames processor (OpenPose format -> cv::Mat format) + if (renderOutput) + { + postProcessingWs = mergeVectors(postProcessingWs, cpuRenderers); + const auto opOutputToCvMat = std::make_shared(); + postProcessingWs.emplace_back(std::make_shared>(opOutputToCvMat)); + } + // Re-scale pose if desired + // If desired scale is not the current input + if (wrapperStructPose.keypointScale != ScaleMode::InputResolution + // and desired scale is not output when size(input) = size(output) + && !(wrapperStructPose.keypointScale == ScaleMode::OutputResolution && + (finalOutputSize == producerSize || finalOutputSize.x <= 0 || finalOutputSize.y <= 0)) + // and desired scale is not net output when size(input) = size(net output) + && !(wrapperStructPose.keypointScale == ScaleMode::NetOutputResolution + && producerSize == wrapperStructPose.netInputSize)) + { + // Then we must rescale the keypoints + auto keypointScaler = std::make_shared(wrapperStructPose.keypointScale); + postProcessingWs.emplace_back(std::make_shared>(keypointScaler)); + } + } + + // IK/Adam + const auto displayAdam = wrapperStructOutput.displayMode == DisplayMode::DisplayAdam + || (wrapperStructOutput.displayMode == DisplayMode::DisplayAll + && wrapperStructExtra.ikThreads > 0); + jointAngleEstimationsWs.clear(); +#ifdef USE_3D_ADAM_MODEL + if (wrapperStructExtra.ikThreads > 0) + { + jointAngleEstimationsWs.resize(wrapperStructExtra.ikThreads); + // Pose extractor(s) + for (auto i = 0u; i < jointAngleEstimationsWs.size(); i++) + { + const auto jointAngleEstimation = std::make_shared(displayAdam); + jointAngleEstimationsWs.at(i) = {std::make_shared>( + jointAngleEstimation)}; + } + } +#endif + + // Output workers + outputWs.clear(); + // Send information (e.g., to Unity) though UDP client-server communication +#ifdef USE_3D_ADAM_MODEL + if (!wrapperStructOutput.udpHost.empty() && !wrapperStructOutput.udpPort.empty()) + { + const auto udpSender = std::make_shared(wrapperStructOutput.udpHost, + wrapperStructOutput.udpPort); + outputWs.emplace_back(std::make_shared>(udpSender)); + } +#endif + // Write people pose data on disk (json for OpenCV >= 3, xml, yml...) + if (!writeKeypointCleaned.empty()) + { + const auto keypointSaver = std::make_shared(writeKeypointCleaned, + wrapperStructOutput.writeKeypointFormat); + outputWs.emplace_back(std::make_shared>(keypointSaver)); + if (wrapperStructFace.enable) + outputWs.emplace_back(std::make_shared>(keypointSaver)); + if (wrapperStructHand.enable) + outputWs.emplace_back(std::make_shared>(keypointSaver)); + } + // Write OpenPose output data on disk in json format (body/hand/face keypoints, body part locations if + // enabled, etc.) + if (!writeJsonCleaned.empty()) + { + const auto peopleJsonSaver = std::make_shared(writeJsonCleaned); + outputWs.emplace_back(std::make_shared>(peopleJsonSaver)); + } + // Write people pose data on disk (COCO validation json format) + if (!wrapperStructOutput.writeCocoJson.empty()) + { + // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it + const auto humanFormat = true; + const auto cocoJsonSaver = std::make_shared(wrapperStructOutput.writeCocoJson, + humanFormat, CocoJsonFormat::Body); + outputWs.emplace_back(std::make_shared>(cocoJsonSaver)); + } + // Write people foot pose data on disk (COCO validation json format for foot data) + if (!wrapperStructOutput.writeCocoFootJson.empty()) + { + // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it + const auto humanFormat = true; + const auto cocoJsonSaver = std::make_shared(wrapperStructOutput.writeCocoFootJson, + humanFormat, CocoJsonFormat::Foot); + outputWs.emplace_back(std::make_shared>(cocoJsonSaver)); + } + // Write frames as desired image format on hard disk + if (!writeImagesCleaned.empty()) + { + const auto imageSaver = std::make_shared(writeImagesCleaned, + wrapperStructOutput.writeImagesFormat); + outputWs.emplace_back(std::make_shared>(imageSaver)); + } + // Write frames as *.avi video on hard disk + const auto producerFps = (wrapperStructInput.producerSharedPtr == nullptr ? + 0. : wrapperStructInput.producerSharedPtr->get(CV_CAP_PROP_FPS)); + const auto originalVideoFps = (wrapperStructOutput.writeVideoFps > 0 ? + wrapperStructOutput.writeVideoFps + : producerFps); + if (!wrapperStructOutput.writeVideo.empty()) + { + if (!oPProducer) + error("Video file can only be recorded inside `wrapper/wrapper.hpp` if the producer" + " is one of the default ones (e.g. video, webcam, ...).", + __LINE__, __FUNCTION__, __FILE__); + if (finalOutputSize.x <= 0 || finalOutputSize.y <= 0) + error("Video can only be recorded if outputSize is fixed (e.g. video, webcam, IP camera)," + "but not for a image directory.", __LINE__, __FUNCTION__, __FILE__); + const auto videoSaver = std::make_shared( + wrapperStructOutput.writeVideo, CV_FOURCC('M','J','P','G'), originalVideoFps, finalOutputSize + ); + outputWs.emplace_back(std::make_shared>(videoSaver)); + } + // Write joint angles as *.bvh file on hard disk +#ifdef USE_3D_ADAM_MODEL + if (!wrapperStructOutput.writeBvh.empty()) + { + const auto bvhSaver = std::make_shared( + wrapperStructOutput.writeBvh, JointAngleEstimation::getTotalModel(), originalVideoFps + ); + outputWs.emplace_back(std::make_shared>(bvhSaver)); + } +#endif + // Write heat maps as desired image format on hard disk + if (!writeHeatMapsCleaned.empty()) + { + const auto heatMapSaver = std::make_shared(writeHeatMapsCleaned, + wrapperStructOutput.writeHeatMapsFormat); + outputWs.emplace_back(std::make_shared>(heatMapSaver)); + } + // Add frame information for GUI + const bool guiEnabled = (wrapperStructOutput.displayMode != DisplayMode::NoDisplay); + // If this WGuiInfoAdder instance is placed before the WImageSaver or WVideoSaver, then the resulting + // recorded frames will look exactly as the final displayed image by the GUI + if (wrapperStructOutput.guiVerbose && (guiEnabled || !userOutputWs.empty() + || threadManagerMode == ThreadManagerMode::Asynchronous + || threadManagerMode == ThreadManagerMode::AsynchronousOut)) + { + const auto guiInfoAdder = std::make_shared(numberThreads, guiEnabled); + outputWs.emplace_back(std::make_shared>(guiInfoAdder)); + } + // Minimal graphical user interface (GUI) + guiW = nullptr; + if (guiEnabled) + { + // PoseRenderers to Renderers + std::vector> renderers; + if (wrapperStructPose.renderMode == RenderMode::Cpu) + renderers.emplace_back(std::static_pointer_cast(poseCpuRenderer)); + else + for (const auto& poseGpuRenderer : poseGpuRenderers) + renderers.emplace_back(std::static_pointer_cast(poseGpuRenderer)); + // Display + // Adam (+3-D/2-D) display + if (displayAdam) + { +#ifdef USE_3D_ADAM_MODEL + // Gui + const auto gui = std::make_shared( + finalOutputSize, wrapperStructOutput.fullScreen, threadManager.getIsRunningSharedPtr(), + spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers, + wrapperStructOutput.displayMode, JointAngleEstimation::getTotalModel(), + wrapperStructOutput.writeVideoAdam + ); + // WGui + guiW = {std::make_shared>(gui)}; +#endif + } + // 3-D (+2-D) display + else if (wrapperStructOutput.displayMode == DisplayMode::Display3D + || wrapperStructOutput.displayMode == DisplayMode::DisplayAll) + { + // Gui + const auto gui = std::make_shared( + finalOutputSize, wrapperStructOutput.fullScreen, threadManager.getIsRunningSharedPtr(), + spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers, + wrapperStructPose.poseModel, wrapperStructOutput.displayMode + ); + // WGui + guiW = {std::make_shared>(gui)}; + } + // 2-D display + else if (wrapperStructOutput.displayMode == DisplayMode::Display2D) + { + // Gui + const auto gui = std::make_shared( + finalOutputSize, wrapperStructOutput.fullScreen, threadManager.getIsRunningSharedPtr(), + spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers + ); + // WGui + guiW = {std::make_shared>(gui)}; + } + else + error("Unknown DisplayMode.", __LINE__, __FUNCTION__, __FILE__); + } + // Set wrapper as configured + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + + + + + + // The less number of queues -> the less threads opened, and potentially the less lag + + // Security checks + if ((datumProducerW == nullptr) == (userInputWs.empty()) + && threadManagerMode != ThreadManagerMode::Asynchronous + && threadManagerMode != ThreadManagerMode::AsynchronousIn) + { + const auto message = "You need to have 1 and only 1 producer selected. You can introduce your own" + " producer by using setWorker(WorkerType::Input, ...) or use the OpenPose" + " default producer by configuring it in the configure function) or use the" + " ThreadManagerMode::Asynchronous(In) mode."; + error(message, __LINE__, __FUNCTION__, __FILE__); + } + if (outputWs.empty() && userOutputWs.empty() && guiW == nullptr + && threadManagerMode != ThreadManagerMode::Asynchronous + && threadManagerMode != ThreadManagerMode::AsynchronousOut) + { + error("No output selected.", __LINE__, __FUNCTION__, __FILE__); + } + + // Thread Manager + // Clean previous thread manager (avoid configure to crash the program if used more than once) + threadManager.reset(); + unsigned long long threadId = 0ull; + auto queueIn = 0ull; + auto queueOut = 1ull; + // After producer + // ID generator (before any multi-threading or any function that requires the ID) + const auto wIdGenerator = std::make_shared>(); + std::vector workersAux{wIdGenerator}; + // Scale & cv::Mat to OP format + if (scaleAndSizeExtractorW != nullptr) + workersAux = mergeVectors(workersAux, {scaleAndSizeExtractorW}); + if (cvMatToOpInputW != nullptr) + workersAux = mergeVectors(workersAux, {cvMatToOpInputW}); + // cv::Mat to output format + if (cvMatToOpOutputW != nullptr) + workersAux = mergeVectors(workersAux, {cvMatToOpOutputW}); + + // Producer + // If custom user Worker and uses its own thread + if (!userInputWs.empty() && userInputWsOnNewThread) + { + // Thread 0, queues 0 -> 1 + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, userInputWs, queueIn++, queueOut++); + threadIdPP(threadId, multiThreadEnabled); + } + // If custom user Worker in same thread + else if (!userInputWs.empty()) + workersAux = mergeVectors(userInputWs, workersAux); + // If OpenPose producer (same thread) + else if (datumProducerW != nullptr) + workersAux = mergeVectors({datumProducerW}, workersAux); + // Otherwise + else if (threadManagerMode != ThreadManagerMode::Asynchronous + && threadManagerMode != ThreadManagerMode::AsynchronousIn) + error("No input selected.", __LINE__, __FUNCTION__, __FILE__); + // Thread 0 or 1, queues 0 -> 1 + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, workersAux, queueIn++, queueOut++); + // Increase thread + threadIdPP(threadId, multiThreadEnabled); + + // Pose estimation & rendering + // Thread 1 or 2...X, queues 1 -> 2, X = 2 + #GPUs + if (!poseExtractorsWs.empty()) + { + if (multiThreadEnabled) + { + for (auto& wPose : poseExtractorsWs) + { + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, wPose, queueIn, queueOut); + threadIdPP(threadId, multiThreadEnabled); + } + queueIn++; + queueOut++; + // Sort frames - Required own thread + if (poseExtractorsWs.size() > 1u) + { + const auto wQueueOrderer = std::make_shared>(); + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++); + threadIdPP(threadId, multiThreadEnabled); + } + } + else + { + if (poseExtractorsWs.size() > 1) + log("Multi-threading disabled, only 1 thread running. All GPUs have been disabled but the" + " first one, which is defined by gpuNumberStart (e.g. in the OpenPose demo, it is set" + " with the `--num_gpu_start` flag).", Priority::High); + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, poseExtractorsWs.at(0), queueIn++, queueOut++); + } + } + // Assemble all frames from same time instant (3-D module) + const auto wQueueAssembler = std::make_shared>(); + // 3-D reconstruction + if (!poseTriangulationsWs.empty()) + { + // Assemble frames + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, wQueueAssembler, queueIn++, queueOut++); + threadIdPP(threadId, multiThreadEnabled); + // 3-D reconstruction + if (multiThreadEnabled) + { + for (auto& wPoseTriangulations : poseTriangulationsWs) + { + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, wPoseTriangulations, queueIn, queueOut); + threadIdPP(threadId, multiThreadEnabled); + } + queueIn++; + queueOut++; + // Sort frames + if (poseTriangulationsWs.size() > 1u) + { + const auto wQueueOrderer = std::make_shared>(); + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++); + threadIdPP(threadId, multiThreadEnabled); + } + } + else + { + if (poseTriangulationsWs.size() > 1) + log("Multi-threading disabled, only 1 thread running for 3-D triangulation.", + Priority::High); + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, poseTriangulationsWs.at(0), queueIn++, queueOut++); + } + } + else + postProcessingWs = mergeVectors({wQueueAssembler}, postProcessingWs); + // Adam/IK step + if (!jointAngleEstimationsWs.empty()) + { + if (multiThreadEnabled) + { + for (auto& wJointAngleEstimator : jointAngleEstimationsWs) + { + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, wJointAngleEstimator, queueIn, queueOut); + threadIdPP(threadId, multiThreadEnabled); + } + queueIn++; + queueOut++; + // Sort frames + if (jointAngleEstimationsWs.size() > 1) + { + const auto wQueueOrderer = std::make_shared>(); + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++); + threadIdPP(threadId, multiThreadEnabled); + } + } + else + { + if (jointAngleEstimationsWs.size() > 1) + log("Multi-threading disabled, only 1 thread running for joint angle estimation.", + Priority::High); + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, jointAngleEstimationsWs.at(0), queueIn++, queueOut++); + } + } + // Post processing workers + if (!postProcessingWs.empty()) + { + // Combining postProcessingWs and outputWs + outputWs = mergeVectors(postProcessingWs, outputWs); + // // If I wanna split them + // log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + // threadManager.add(threadId, postProcessingWs, queueIn++, queueOut++); + // threadIdPP(threadId, multiThreadEnabled); + } + // If custom user Worker and uses its own thread + if (!userPostProcessingWs.empty()) + { + // If custom user Worker in its own thread + if (userPostProcessingWsOnNewThread) + { + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, userPostProcessingWs, queueIn++, queueOut++); + threadIdPP(threadId, multiThreadEnabled); + } + // If custom user Worker in same thread + // Merge with outputWs + else + outputWs = mergeVectors(outputWs, userPostProcessingWs); + } + // Output workers + if (!outputWs.empty()) + { + // Thread 4 or 5, queues 4 -> 5 + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, outputWs, queueIn++, queueOut++); + threadIdPP(threadId, multiThreadEnabled); + } + // User output worker + // Thread Y, queues Q -> Q+1 + if (!userOutputWs.empty()) + { + if (userOutputWsOnNewThread) + { + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, userOutputWs, queueIn++, queueOut++); + threadIdPP(threadId, multiThreadEnabled); + } + else + { + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId-1, userOutputWs, queueIn++, queueOut++); + } + } + // OpenPose GUI + if (guiW != nullptr) + { + // Thread Y+1, queues Q+1 -> Q+2 + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + threadManager.add(threadId, guiW, queueIn++, queueOut++); + threadIdPP(threadId, multiThreadEnabled); + } + log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__); + } + catch (const std::exception& e) + { + error(e.what(), __LINE__, __FUNCTION__, __FILE__); + } + } } #endif // OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP diff --git a/include/openpose/wrapper/wrapperStructFace.hpp b/include/openpose/wrapper/wrapperStructFace.hpp index 63858c9b..7a4ec666 100644 --- a/include/openpose/wrapper/wrapperStructFace.hpp +++ b/include/openpose/wrapper/wrapperStructFace.hpp @@ -57,7 +57,7 @@ namespace op * Since all the elements of the struct are public, they can also be manually filled. */ WrapperStructFace(const bool enable = false, const Point& netInputSize = Point{368, 368}, - const RenderMode renderMode = RenderMode::None, + const RenderMode renderMode = RenderMode::Gpu, const float alphaKeypoint = FACE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap = FACE_DEFAULT_ALPHA_HEAT_MAP, const float renderThreshold = 0.4f); diff --git a/include/openpose/wrapper/wrapperStructHand.hpp b/include/openpose/wrapper/wrapperStructHand.hpp index 1a80c3fb..38e46551 100644 --- a/include/openpose/wrapper/wrapperStructHand.hpp +++ b/include/openpose/wrapper/wrapperStructHand.hpp @@ -78,7 +78,7 @@ namespace op */ WrapperStructHand(const bool enable = false, const Point& netInputSize = Point{368, 368}, const int scalesNumber = 1, const float scaleRange = 0.4f, - const bool tracking = false, const RenderMode renderMode = RenderMode::None, + const bool tracking = false, const RenderMode renderMode = RenderMode::Gpu, const float alphaKeypoint = HAND_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap = HAND_DEFAULT_ALPHA_HEAT_MAP, const float renderThreshold = 0.2f); diff --git a/include/openpose/wrapper/wrapperStructPose.hpp b/include/openpose/wrapper/wrapperStructPose.hpp index 53d3a6a3..2a5433f8 100644 --- a/include/openpose/wrapper/wrapperStructPose.hpp +++ b/include/openpose/wrapper/wrapperStructPose.hpp @@ -173,10 +173,10 @@ namespace op * Since all the elements of the struct are public, they can also be manually filled. */ WrapperStructPose(const bool enable = true, const Point& netInputSize = Point{656, 368}, - const Point& outputSize = Point{1280, 720}, + const Point& outputSize = Point{-1, -1}, const ScaleMode keypointScale = ScaleMode::InputResolution, const int gpuNumber = -1, const int gpuNumberStart = 0, const int scalesNumber = 1, - const float scaleGap = 0.15f, const RenderMode renderMode = RenderMode::None, + const float scaleGap = 0.15f, const RenderMode renderMode = RenderMode::Gpu, const PoseModel poseModel = PoseModel::BODY_25, const bool blendOriginalFrame = true, const float alphaKeypoint = POSE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap = POSE_DEFAULT_ALPHA_HEAT_MAP, diff --git a/python/openpose/_openpose.cpp b/python/openpose/_openpose.cpp index 9e9a433a..50e5be2b 100644 --- a/python/openpose/_openpose.cpp +++ b/python/openpose/_openpose.cpp @@ -42,306 +42,312 @@ // Todo, have GPU Number, handle, OpenCL/CPU Cases OP_API class OpenPose { public: - std::unique_ptr poseExtractorCaffe; - std::unique_ptr poseRenderer; - std::unique_ptr frameDisplayer; - std::unique_ptr scaleAndSizeExtractor; - - std::unique_ptr> resizeAndMergeCaffe; - std::unique_ptr> nmsCaffe; - std::unique_ptr> bodyPartConnectorCaffe; - std::shared_ptr> heatMapsBlob; - std::shared_ptr> peaksBlob; - op::Array mPoseKeypoints; - op::Array mPoseScores; - op::PoseModel poseModel; - int mGpuID; - - OpenPose(int FLAGS_logging_level = default_logging_level, - std::string FLAGS_output_resolution = default_output_resolution, - std::string FLAGS_net_resolution = default_net_resolution, - std::string FLAGS_model_pose = default_model_pose, - float FLAGS_alpha_pose = default_alpha_pose, - float FLAGS_scale_gap = default_scale_gap, - int FLAGS_scale_number = default_scale_number, - float FLAGS_render_threshold = default_render_threshold, - int FLAGS_num_gpu_start = default_num_gpu_start, - int FLAGS_disable_blending = default_disable_blending, - std::string FLAGS_model_folder = default_model_folder - ) { - mGpuID = FLAGS_num_gpu_start; + std::unique_ptr poseExtractorCaffe; + std::unique_ptr poseRenderer; + std::unique_ptr frameDisplayer; + std::unique_ptr scaleAndSizeExtractor; + + std::unique_ptr> resizeAndMergeCaffe; + std::unique_ptr> nmsCaffe; + std::unique_ptr> bodyPartConnectorCaffe; + std::shared_ptr> heatMapsBlob; + std::shared_ptr> peaksBlob; + op::Array mPoseKeypoints; + op::Array mPoseScores; + op::PoseModel poseModel; + int mGpuID; + + OpenPose(int FLAGS_logging_level = default_logging_level, + std::string FLAGS_output_resolution = default_output_resolution, + std::string FLAGS_net_resolution = default_net_resolution, + std::string FLAGS_model_pose = default_model_pose, + float FLAGS_alpha_pose = default_alpha_pose, + float FLAGS_scale_gap = default_scale_gap, + int FLAGS_scale_number = default_scale_number, + float FLAGS_render_threshold = default_render_threshold, + int FLAGS_num_gpu_start = default_num_gpu_start, + int FLAGS_disable_blending = default_disable_blending, + std::string FLAGS_model_folder = default_model_folder + ) { + mGpuID = FLAGS_num_gpu_start; #ifdef USE_CUDA - caffe::Caffe::set_mode(caffe::Caffe::GPU); - caffe::Caffe::SetDevice(mGpuID); + caffe::Caffe::set_mode(caffe::Caffe::GPU); + caffe::Caffe::SetDevice(mGpuID); #elif USE_OPENCL - caffe::Caffe::set_mode(caffe::Caffe::GPU); - std::vector devices; - const int maxNumberGpu = op::OpenCL::getTotalGPU(); - for (auto i = 0; i < maxNumberGpu; i++) - devices.emplace_back(i); - caffe::Caffe::SetDevices(devices); - caffe::Caffe::SelectDevice(mGpuID, true); - op::OpenCL::getInstance(mGpuID, CL_DEVICE_TYPE_GPU, true); + caffe::Caffe::set_mode(caffe::Caffe::GPU); + std::vector devices; + const int maxNumberGpu = op::OpenCL::getTotalGPU(); + for (auto i = 0; i < maxNumberGpu; i++) + devices.emplace_back(i); + caffe::Caffe::SetDevices(devices); + caffe::Caffe::SelectDevice(mGpuID, true); + op::OpenCL::getInstance(mGpuID, CL_DEVICE_TYPE_GPU, true); #else - caffe::Caffe::set_mode(caffe::Caffe::CPU); + caffe::Caffe::set_mode(caffe::Caffe::CPU); #endif - op::log("OpenPose Library Python Wrapper", op::Priority::High); - // ------------------------- INITIALIZATION ------------------------- - // Step 1 - Set logging level - // - 0 will output all the logging messages - // - 255 will output nothing - op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); - op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - // Step 2 - Read Google flags (user defined configuration) - // outputSize - const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); - // netInputSize - const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); - // poseModel - poseModel = op::flagsToPoseModel(FLAGS_model_pose); - // Check no contradictory flags enabled - if (FLAGS_alpha_pose < 0. || FLAGS_alpha_pose > 1.) - op::error("Alpha value for blending must be in the range [0,1].", __LINE__, __FUNCTION__, __FILE__); - if (FLAGS_scale_gap <= 0. && FLAGS_scale_number > 1) - op::error("Incompatible flag configuration: scale_gap must be greater than 0 or scale_number = 1.", - __LINE__, __FUNCTION__, __FILE__); - // Logging - op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); - // Step 3 - Initialize all required classes - scaleAndSizeExtractor = std::unique_ptr(new op::ScaleAndSizeExtractor(netInputSize, outputSize, FLAGS_scale_number, FLAGS_scale_gap)); - - poseExtractorCaffe = std::unique_ptr(new op::PoseExtractorCaffe{ poseModel, FLAGS_model_folder, FLAGS_num_gpu_start }); - - poseRenderer = std::unique_ptr(new op::PoseCpuRenderer{ poseModel, (float)FLAGS_render_threshold, !FLAGS_disable_blending, - (float)FLAGS_alpha_pose }); - frameDisplayer = std::unique_ptr(new op::FrameDisplayer{ "OpenPose Tutorial - Example 1", outputSize }); - - // Custom - resizeAndMergeCaffe = std::unique_ptr>(new op::ResizeAndMergeCaffe{}); - nmsCaffe = std::unique_ptr>(new op::NmsCaffe{}); - bodyPartConnectorCaffe = std::unique_ptr>(new op::BodyPartConnectorCaffe{}); - heatMapsBlob = { std::make_shared>(1,1,1,1) }; - peaksBlob = { std::make_shared>(1,1,1,1) }; - bodyPartConnectorCaffe->setPoseModel(poseModel); - - // Step 4 - Initialize resources on desired thread (in this case single thread, i.e. we init resources here) - poseExtractorCaffe->initializationOnThread(); - poseRenderer->initializationOnThread(); - } - - std::vector*> caffeNetSharedToPtr( - std::vector>>& caffeNetOutputBlob) - { - try - { - // Prepare spCaffeNetOutputBlobss - std::vector*> caffeNetOutputBlobs(caffeNetOutputBlob.size()); - for (auto i = 0u; i < caffeNetOutputBlobs.size(); i++) - caffeNetOutputBlobs[i] = caffeNetOutputBlob[i].get(); - return caffeNetOutputBlobs; - } - catch (const std::exception& e) - { - op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); - return{}; - } - } - - void forward(const cv::Mat& inputImage, op::Array& poseKeypoints, cv::Mat& displayImage, bool display = false) { - op::OpOutputToCvMat opOutputToCvMat; - op::CvMatToOpInput cvMatToOpInput; - op::CvMatToOpOutput cvMatToOpOutput; - if (inputImage.empty()) - op::error("Could not open or find the image: ", __LINE__, __FUNCTION__, __FILE__); - const op::Point imageSize{ inputImage.cols, inputImage.rows }; - // Step 2 - Get desired scale sizes - std::vector scaleInputToNetInputs; - std::vector> netInputSizes; - double scaleInputToOutput; - op::Point outputResolution; - std::tie(scaleInputToNetInputs, netInputSizes, scaleInputToOutput, outputResolution) - = scaleAndSizeExtractor->extract(imageSize); - // Step 3 - Format input image to OpenPose input and output formats - const auto netInputArray = cvMatToOpInput.createArray(inputImage, scaleInputToNetInputs, netInputSizes); - - // Step 4 - Estimate poseKeypoints - poseExtractorCaffe->forwardPass(netInputArray, imageSize, scaleInputToNetInputs); - poseKeypoints = poseExtractorCaffe->getPoseKeypoints(); - - if (display) { - auto outputArray = cvMatToOpOutput.createArray(inputImage, scaleInputToOutput, outputResolution); - // Step 5 - Render poseKeypoints - poseRenderer->renderPose(outputArray, poseKeypoints, scaleInputToOutput); - // Step 6 - OpenPose output format to cv::Mat - displayImage = opOutputToCvMat.formatToCvMat(outputArray); - } - } - - void poseFromHeatmap(const cv::Mat& inputImage, std::vector>>& caffeNetOutputBlob, op::Array& poseKeypoints, cv::Mat& displayImage, std::vector>& imageSizes) { - // Get Scale - const op::Point inputDataSize{ inputImage.cols, inputImage.rows }; - - // Convert to Ptr - //std::vector>> a; - //caffeNetOutputBlob.emplace_back(caffeHmPtr); - const auto caffeNetOutputBlobs = caffeNetSharedToPtr(caffeNetOutputBlob); - - // To be called once only - resizeAndMergeCaffe->Reshape(caffeNetOutputBlobs, { heatMapsBlob.get() }, - op::getPoseNetDecreaseFactor(poseModel), 1.f / 1.f, true, - 0); - nmsCaffe->Reshape({ heatMapsBlob.get() }, { peaksBlob.get() }, op::getPoseMaxPeaks(poseModel), - op::getPoseNumberBodyParts(poseModel), 0); - bodyPartConnectorCaffe->Reshape({ heatMapsBlob.get(), peaksBlob.get() }); - - // Normal - op::OpOutputToCvMat opOutputToCvMat; - op::CvMatToOpInput cvMatToOpInput; - op::CvMatToOpOutput cvMatToOpOutput; - if (inputImage.empty()) - op::error("Could not open or find the image: ", __LINE__, __FUNCTION__, __FILE__); - const op::Point imageSize{ inputImage.cols, inputImage.rows }; - // Step 2 - Get desired scale sizes - std::vector scaleInputToNetInputs; - std::vector> netInputSizes; - double scaleInputToOutput; - op::Point outputResolution; - - std::tie(scaleInputToNetInputs, netInputSizes, scaleInputToOutput, outputResolution) - = scaleAndSizeExtractor->extract(imageSize); - - const auto netInputArray = cvMatToOpInput.createArray(inputImage, scaleInputToNetInputs, netInputSizes); - - // Run the modes - const std::vector floatScaleRatios(scaleInputToNetInputs.begin(), scaleInputToNetInputs.end()); - resizeAndMergeCaffe->setScaleRatios(floatScaleRatios); - std::vector*> heatMapsBlobs{ heatMapsBlob.get() }; - std::vector*> peaksBlobs{ peaksBlob.get() }; + op::log("OpenPose Library Python Wrapper", op::Priority::High); + // ------------------------- INITIALIZATION ------------------------- + // Step 1 - Set logging level + // - 0 will output all the logging messages + // - 255 will output nothing + op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level); + op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + // Step 2 - Read GFlags (user defined configuration) + // outputSize + const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1"); + // netInputSize + const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368"); + // poseModel + poseModel = op::flagsToPoseModel(FLAGS_model_pose); + // Check no contradictory flags enabled + if (FLAGS_alpha_pose < 0. || FLAGS_alpha_pose > 1.) + op::error("Alpha value for blending must be in the range [0,1].", __LINE__, __FUNCTION__, __FILE__); + if (FLAGS_scale_gap <= 0. && FLAGS_scale_number > 1) + op::error("Incompatible flag configuration: scale_gap must be greater than 0 or scale_number = 1.", + __LINE__, __FUNCTION__, __FILE__); + // Logging + op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__); + // Step 3 - Initialize all required classes + scaleAndSizeExtractor = std::unique_ptr(new op::ScaleAndSizeExtractor(netInputSize, outputSize, FLAGS_scale_number, FLAGS_scale_gap)); + + poseExtractorCaffe = std::unique_ptr(new op::PoseExtractorCaffe{ poseModel, FLAGS_model_folder, FLAGS_num_gpu_start }); + + poseRenderer = std::unique_ptr(new op::PoseCpuRenderer{ poseModel, (float)FLAGS_render_threshold, !FLAGS_disable_blending, + (float)FLAGS_alpha_pose }); + frameDisplayer = std::unique_ptr(new op::FrameDisplayer{ "OpenPose Tutorial - Example 1", outputSize }); + + // Custom + resizeAndMergeCaffe = std::unique_ptr>(new op::ResizeAndMergeCaffe{}); + nmsCaffe = std::unique_ptr>(new op::NmsCaffe{}); + bodyPartConnectorCaffe = std::unique_ptr>(new op::BodyPartConnectorCaffe{}); + heatMapsBlob = { std::make_shared>(1,1,1,1) }; + peaksBlob = { std::make_shared>(1,1,1,1) }; + bodyPartConnectorCaffe->setPoseModel(poseModel); + + // Step 4 - Initialize resources on desired thread (in this case single thread, i.e. we init resources here) + poseExtractorCaffe->initializationOnThread(); + poseRenderer->initializationOnThread(); + } + + std::vector*> caffeNetSharedToPtr( + std::vector>>& caffeNetOutputBlob) + { + try + { + // Prepare spCaffeNetOutputBlobss + std::vector*> caffeNetOutputBlobs(caffeNetOutputBlob.size()); + for (auto i = 0u; i < caffeNetOutputBlobs.size(); i++) + caffeNetOutputBlobs[i] = caffeNetOutputBlob[i].get(); + return caffeNetOutputBlobs; + } + catch (const std::exception& e) + { + op::error(e.what(), __LINE__, __FUNCTION__, __FILE__); + return{}; + } + } + + void forward(const cv::Mat& inputImage, op::Array& poseKeypoints, cv::Mat& displayImage, bool display = false) { + op::OpOutputToCvMat opOutputToCvMat; + op::CvMatToOpInput cvMatToOpInput; + op::CvMatToOpOutput cvMatToOpOutput; + if (inputImage.empty()) + op::error("Could not open or find the image: ", __LINE__, __FUNCTION__, __FILE__); + const op::Point imageSize{ inputImage.cols, inputImage.rows }; + // Step 2 - Get desired scale sizes + std::vector scaleInputToNetInputs; + std::vector> netInputSizes; + double scaleInputToOutput; + op::Point outputResolution; + std::tie(scaleInputToNetInputs, netInputSizes, scaleInputToOutput, outputResolution) + = scaleAndSizeExtractor->extract(imageSize); + // Step 3 - Format input image to OpenPose input and output formats + const auto netInputArray = cvMatToOpInput.createArray(inputImage, scaleInputToNetInputs, netInputSizes); + + // Step 4 - Estimate poseKeypoints + poseExtractorCaffe->forwardPass(netInputArray, imageSize, scaleInputToNetInputs); + poseKeypoints = poseExtractorCaffe->getPoseKeypoints(); + + if (display) { + auto outputArray = cvMatToOpOutput.createArray(inputImage, scaleInputToOutput, outputResolution); + // Step 5 - Render poseKeypoints + poseRenderer->renderPose(outputArray, poseKeypoints, scaleInputToOutput); + // Step 6 - OpenPose output format to cv::Mat + displayImage = opOutputToCvMat.formatToCvMat(outputArray); + } + } + + void poseFromHeatmap(const cv::Mat& inputImage, std::vector>>& caffeNetOutputBlob, op::Array& poseKeypoints, cv::Mat& displayImage, std::vector>& imageSizes) { + // Get Scale + const op::Point inputDataSize{ inputImage.cols, inputImage.rows }; + + // Convert to Ptr + //std::vector>> a; + //caffeNetOutputBlob.emplace_back(caffeHmPtr); + const auto caffeNetOutputBlobs = caffeNetSharedToPtr(caffeNetOutputBlob); + + // To be called once only + resizeAndMergeCaffe->Reshape(caffeNetOutputBlobs, { heatMapsBlob.get() }, + op::getPoseNetDecreaseFactor(poseModel), 1.f / 1.f, true, + 0); + nmsCaffe->Reshape({ heatMapsBlob.get() }, { peaksBlob.get() }, op::getPoseMaxPeaks(poseModel), + op::getPoseNumberBodyParts(poseModel), 0); + bodyPartConnectorCaffe->Reshape({ heatMapsBlob.get(), peaksBlob.get() }); + + // Normal + op::OpOutputToCvMat opOutputToCvMat; + op::CvMatToOpInput cvMatToOpInput; + op::CvMatToOpOutput cvMatToOpOutput; + if (inputImage.empty()) + op::error("Could not open or find the image: ", __LINE__, __FUNCTION__, __FILE__); + const op::Point imageSize{ inputImage.cols, inputImage.rows }; + // Step 2 - Get desired scale sizes + std::vector scaleInputToNetInputs; + std::vector> netInputSizes; + double scaleInputToOutput; + op::Point outputResolution; + + std::tie(scaleInputToNetInputs, netInputSizes, scaleInputToOutput, outputResolution) + = scaleAndSizeExtractor->extract(imageSize); + + const auto netInputArray = cvMatToOpInput.createArray(inputImage, scaleInputToNetInputs, netInputSizes); + + // Run the modes + const std::vector floatScaleRatios(scaleInputToNetInputs.begin(), scaleInputToNetInputs.end()); + resizeAndMergeCaffe->setScaleRatios(floatScaleRatios); + std::vector*> heatMapsBlobs{ heatMapsBlob.get() }; + std::vector*> peaksBlobs{ peaksBlob.get() }; #ifdef USE_CUDA - resizeAndMergeCaffe->Forward_gpu(caffeNetOutputBlobs, heatMapsBlobs); // ~5ms + resizeAndMergeCaffe->Forward_gpu(caffeNetOutputBlobs, heatMapsBlobs); // ~5ms #elif USE_OPENCL - resizeAndMergeCaffe->Forward_ocl(caffeNetOutputBlobs, heatMapsBlobs); // ~5ms + resizeAndMergeCaffe->Forward_ocl(caffeNetOutputBlobs, heatMapsBlobs); // ~5ms #else - resizeAndMergeCaffe->Forward_cpu(caffeNetOutputBlobs, heatMapsBlobs); // ~5ms + resizeAndMergeCaffe->Forward_cpu(caffeNetOutputBlobs, heatMapsBlobs); // ~5ms #endif - nmsCaffe->setThreshold((float)poseExtractorCaffe->get(op::PoseProperty::NMSThreshold)); + nmsCaffe->setThreshold((float)poseExtractorCaffe->get(op::PoseProperty::NMSThreshold)); #ifdef USE_CUDA - nmsCaffe->Forward_gpu(heatMapsBlobs, peaksBlobs);// ~2ms + nmsCaffe->Forward_gpu(heatMapsBlobs, peaksBlobs);// ~2ms #elif USE_OPENCL - nmsCaffe->Forward_ocl(heatMapsBlobs, peaksBlobs);// ~2ms + nmsCaffe->Forward_ocl(heatMapsBlobs, peaksBlobs);// ~2ms #else - nmsCaffe->Forward_cpu(heatMapsBlobs, peaksBlobs);// ~2ms + nmsCaffe->Forward_cpu(heatMapsBlobs, peaksBlobs);// ~2ms #endif - op::cudaCheck(__LINE__, __FUNCTION__, __FILE__); - - float mScaleNetToOutput = 1. / scaleInputToNetInputs[0]; - bodyPartConnectorCaffe->setScaleNetToOutput(mScaleNetToOutput); - bodyPartConnectorCaffe->setInterMinAboveThreshold( - (float)poseExtractorCaffe->get(op::PoseProperty::ConnectInterMinAboveThreshold) - ); - bodyPartConnectorCaffe->setInterThreshold((float)poseExtractorCaffe->get(op::PoseProperty::ConnectInterThreshold)); - bodyPartConnectorCaffe->setMinSubsetCnt((int)poseExtractorCaffe->get(op::PoseProperty::ConnectMinSubsetCnt)); - bodyPartConnectorCaffe->setMinSubsetScore((float)poseExtractorCaffe->get(op::PoseProperty::ConnectMinSubsetScore)); - - bodyPartConnectorCaffe->Forward_cpu({ heatMapsBlob.get(), - peaksBlob.get() }, - mPoseKeypoints, mPoseScores); - poseKeypoints = mPoseKeypoints; - - auto outputArray = cvMatToOpOutput.createArray(inputImage, scaleInputToOutput, outputResolution); - // Step 5 - Render poseKeypoints - poseRenderer->renderPose(outputArray, mPoseKeypoints, scaleInputToOutput); - // Step 6 - OpenPose output format to cv::Mat - displayImage = opOutputToCvMat.formatToCvMat(outputArray); - } + op::cudaCheck(__LINE__, __FUNCTION__, __FILE__); + + float mScaleNetToOutput = 1. / scaleInputToNetInputs[0]; + bodyPartConnectorCaffe->setScaleNetToOutput(mScaleNetToOutput); + bodyPartConnectorCaffe->setInterMinAboveThreshold( + (float)poseExtractorCaffe->get(op::PoseProperty::ConnectInterMinAboveThreshold) + ); + bodyPartConnectorCaffe->setInterThreshold((float)poseExtractorCaffe->get(op::PoseProperty::ConnectInterThreshold)); + bodyPartConnectorCaffe->setMinSubsetCnt((int)poseExtractorCaffe->get(op::PoseProperty::ConnectMinSubsetCnt)); + bodyPartConnectorCaffe->setMinSubsetScore((float)poseExtractorCaffe->get(op::PoseProperty::ConnectMinSubsetScore)); + +#ifdef USE_CUDA + bodyPartConnectorCaffe->Forward_gpu({ heatMapsBlob.get(), + peaksBlob.get() }, + mPoseKeypoints, mPoseScores); +#else + bodyPartConnectorCaffe->Forward_cpu({ heatMapsBlob.get(), + peaksBlob.get() }, + mPoseKeypoints, mPoseScores); +#endif + poseKeypoints = mPoseKeypoints; + + auto outputArray = cvMatToOpOutput.createArray(inputImage, scaleInputToOutput, outputResolution); + // Step 5 - Render poseKeypoints + poseRenderer->renderPose(outputArray, mPoseKeypoints, scaleInputToOutput); + // Step 6 - OpenPose output format to cv::Mat + displayImage = opOutputToCvMat.formatToCvMat(outputArray); + } }; #ifdef __cplusplus extern "C" { #endif - typedef void* c_OP; - op::Array output; + typedef void* c_OP; + op::Array output; OP_EXPORT c_OP newOP(int logging_level, - char* output_resolution, - char* net_resolution, - char* model_pose, - float alpha_pose, - float scale_gap, - int scale_number, - float render_threshold, - int num_gpu_start, - bool disable_blending, - char* model_folder - ) { - return new OpenPose(logging_level, output_resolution, net_resolution, model_pose, alpha_pose, - scale_gap, scale_number, render_threshold, num_gpu_start, disable_blending, model_folder); - } + char* output_resolution, + char* net_resolution, + char* model_pose, + float alpha_pose, + float scale_gap, + int scale_number, + float render_threshold, + int num_gpu_start, + bool disable_blending, + char* model_folder + ) { + return new OpenPose(logging_level, output_resolution, net_resolution, model_pose, alpha_pose, + scale_gap, scale_number, render_threshold, num_gpu_start, disable_blending, model_folder); + } OP_EXPORT void delOP(c_OP op) { - delete (OpenPose *)op; - } + delete (OpenPose *)op; + } OP_EXPORT void forward(c_OP op, unsigned char* img, size_t rows, size_t cols, int* size, unsigned char* displayImg, bool display) { - OpenPose* openPose = (OpenPose*)op; - cv::Mat image(rows, cols, CV_8UC3, img); - cv::Mat displayImage(rows, cols, CV_8UC3, displayImg); - openPose->forward(image, output, displayImage, display); - if (output.getSize().size()) { - size[0] = output.getSize()[0]; - size[1] = output.getSize()[1]; - size[2] = output.getSize()[2]; - } - else { - size[0] = 0; size[1] = 0; size[2] = 0; - } - if (display) memcpy(displayImg, displayImage.ptr(), sizeof(unsigned char)*rows*cols * 3); - } + OpenPose* openPose = (OpenPose*)op; + cv::Mat image(rows, cols, CV_8UC3, img); + cv::Mat displayImage(rows, cols, CV_8UC3, displayImg); + openPose->forward(image, output, displayImage, display); + if (output.getSize().size()) { + size[0] = output.getSize()[0]; + size[1] = output.getSize()[1]; + size[2] = output.getSize()[2]; + } + else { + size[0] = 0; size[1] = 0; size[2] = 0; + } + if (display) memcpy(displayImg, displayImage.ptr(), sizeof(unsigned char)*rows*cols * 3); + } OP_EXPORT void getOutputs(c_OP op, float* array) { - if (output.getSize().size()) - memcpy(array, output.getPtr(), output.getSize()[0] * output.getSize()[1] * output.getSize()[2] * sizeof(float)); - } + if (output.getSize().size()) + memcpy(array, output.getPtr(), output.getSize()[0] * output.getSize()[1] * output.getSize()[2] * sizeof(float)); + } OP_EXPORT void poseFromHeatmap(c_OP op, unsigned char* img, size_t rows, size_t cols, unsigned char* displayImg, float* hm, int* size, float* ratios) { - OpenPose* openPose = (OpenPose*)op; - cv::Mat image(rows, cols, CV_8UC3, img); - cv::Mat displayImage(rows, cols, CV_8UC3, displayImg); - - std::vector>> caffeNetOutputBlob; - - for (int i = 0; i> caffeHmPtr(new caffe::Blob()); - caffeHmPtr->Reshape(1, size[1], size[2] * ((float)ratios[i] / (float)ratios[0]), size[3] * ((float)ratios[i] / (float)ratios[0])); - float* startIndex = &hm[i*size[1] * size[2] * size[3]]; - for (int d = 0; dshape()[1]; d++) { - for (int r = 0; rshape()[2]; r++) { - for (int c = 0; cshape()[3]; c++) { - int toI = d*caffeHmPtr->shape()[2] * caffeHmPtr->shape()[3] + r*caffeHmPtr->shape()[3] + c; - int fromI = d*size[2] * size[3] + r*size[3] + c; - caffeHmPtr->mutable_cpu_data()[toI] = startIndex[fromI]; - } - } - } - caffeNetOutputBlob.emplace_back(caffeHmPtr); - } - - std::vector> imageSizes; - for (int i = 0; i point(cols*ratios[i], rows*ratios[i]); - imageSizes.emplace_back(point); - } - - openPose->poseFromHeatmap(image, caffeNetOutputBlob, output, displayImage, imageSizes); - memcpy(displayImg, displayImage.ptr(), sizeof(unsigned char)*rows*cols * 3); - // Copy back kp size - if (output.getSize().size()) { - size[0] = output.getSize()[0]; - size[1] = output.getSize()[1]; - size[2] = output.getSize()[2]; - } - else { - size[0] = 0; size[1] = 0; size[2] = 0; - } - } + OpenPose* openPose = (OpenPose*)op; + cv::Mat image(rows, cols, CV_8UC3, img); + cv::Mat displayImage(rows, cols, CV_8UC3, displayImg); + + std::vector>> caffeNetOutputBlob; + + for (int i = 0; i> caffeHmPtr(new caffe::Blob()); + caffeHmPtr->Reshape(1, size[1], size[2] * ((float)ratios[i] / (float)ratios[0]), size[3] * ((float)ratios[i] / (float)ratios[0])); + float* startIndex = &hm[i*size[1] * size[2] * size[3]]; + for (int d = 0; dshape()[1]; d++) { + for (int r = 0; rshape()[2]; r++) { + for (int c = 0; cshape()[3]; c++) { + int toI = d*caffeHmPtr->shape()[2] * caffeHmPtr->shape()[3] + r*caffeHmPtr->shape()[3] + c; + int fromI = d*size[2] * size[3] + r*size[3] + c; + caffeHmPtr->mutable_cpu_data()[toI] = startIndex[fromI]; + } + } + } + caffeNetOutputBlob.emplace_back(caffeHmPtr); + } + + std::vector> imageSizes; + for (int i = 0; i point(cols*ratios[i], rows*ratios[i]); + imageSizes.emplace_back(point); + } + + openPose->poseFromHeatmap(image, caffeNetOutputBlob, output, displayImage, imageSizes); + memcpy(displayImg, displayImage.ptr(), sizeof(unsigned char)*rows*cols * 3); + // Copy back kp size + if (output.getSize().size()) { + size[0] = output.getSize()[0]; + size[1] = output.getSize()[1]; + size[2] = output.getSize()[2]; + } + else { + size[0] = 0; size[1] = 0; size[2] = 0; + } + } #ifdef __cplusplus } diff --git a/src/openpose/3d/poseTriangulation.cpp b/src/openpose/3d/poseTriangulation.cpp index 597515c1..fefda474 100644 --- a/src/openpose/3d/poseTriangulation.cpp +++ b/src/openpose/3d/poseTriangulation.cpp @@ -1,4 +1,3 @@ -// #include #include // std::accumulate #ifdef USE_CERES #include diff --git a/src/openpose/calibration/cameraParameterEstimation.cpp b/src/openpose/calibration/cameraParameterEstimation.cpp index 0f4bcfdd..194de6ee 100644 --- a/src/openpose/calibration/cameraParameterEstimation.cpp +++ b/src/openpose/calibration/cameraParameterEstimation.cpp @@ -1,6 +1,5 @@ #include #include // std::accumulate -#include #include #ifdef USE_EIGEN #include diff --git a/src/openpose/gui/gui.cpp b/src/openpose/gui/gui.cpp index ca515670..2c4bd581 100644 --- a/src/openpose/gui/gui.cpp +++ b/src/openpose/gui/gui.cpp @@ -1,5 +1,3 @@ -#include -#include #include // cv::waitKey #include #include diff --git a/src/openpose/gui/guiInfoAdder.cpp b/src/openpose/gui/guiInfoAdder.cpp index b088a39e..99771dc8 100644 --- a/src/openpose/gui/guiInfoAdder.cpp +++ b/src/openpose/gui/guiInfoAdder.cpp @@ -1,4 +1,3 @@ -#include #include // std::snprintf #include // std::numeric_limits #include diff --git a/src/openpose/net/CMakeLists.txt b/src/openpose/net/CMakeLists.txt index 6686d03b..bddb58e0 100644 --- a/src/openpose/net/CMakeLists.txt +++ b/src/openpose/net/CMakeLists.txt @@ -1,5 +1,8 @@ set(CMAKE_CXX_SOURCE_FILE_EXTENSIONS C;M;c++;cc;cpp;cxx;mm;CPP;cl) set(SOURCES_OP_NET + bodyPartConnectorBase.cpp + bodyPartConnectorBase.cu + bodyPartConnectorCaffe.cpp maximumBase.cpp maximumBase.cu maximumCaffe.cpp diff --git a/src/openpose/pose/bodyPartConnectorBase.cpp b/src/openpose/net/bodyPartConnectorBase.cpp similarity index 99% rename from src/openpose/pose/bodyPartConnectorBase.cpp rename to src/openpose/net/bodyPartConnectorBase.cpp index 85e531b0..0aa38b83 100644 --- a/src/openpose/pose/bodyPartConnectorBase.cpp +++ b/src/openpose/net/bodyPartConnectorBase.cpp @@ -1,7 +1,7 @@ #include #include #include -#include +#include namespace op { diff --git a/src/openpose/pose/bodyPartConnectorBase.cu b/src/openpose/net/bodyPartConnectorBase.cu similarity index 98% rename from src/openpose/pose/bodyPartConnectorBase.cu rename to src/openpose/net/bodyPartConnectorBase.cu index cc5b4384..a56db50c 100644 --- a/src/openpose/pose/bodyPartConnectorBase.cu +++ b/src/openpose/net/bodyPartConnectorBase.cu @@ -1,7 +1,7 @@ #include #include #include -#include +#include namespace op { @@ -31,7 +31,7 @@ namespace op const auto vectorAToBNormX = vectorAToBX/vectorNorm; const auto vectorAToBNormY = vectorAToBY/vectorNorm; - auto sum = 0.; + auto sum = T(0.); auto count = 0; const auto vectorAToBXInLine = vectorAToBX/numberPointsInLine; const auto vectorAToBYInLine = vectorAToBY/numberPointsInLine; @@ -49,7 +49,7 @@ namespace op } // Return PAF score - if (count/(float)numberPointsInLine > interMinAboveThreshold) + if (count/T(numberPointsInLine) > interMinAboveThreshold) return sum/count; else { @@ -141,7 +141,7 @@ namespace op maxPeaks, numberBodyPartPairs, heatMapSize.x, heatMapSize.y, interThreshold, interMinAboveThreshold); // pairScoresCpu <-- pairScoresGpu - cudaMemcpy(pairScoresCpu.getPtr(), pairScoresGpuPtr, totalComputations * sizeof(float), + cudaMemcpy(pairScoresCpu.getPtr(), pairScoresGpuPtr, totalComputations * sizeof(T), cudaMemcpyDeviceToHost); // New code diff --git a/src/openpose/pose/bodyPartConnectorCaffe.cpp b/src/openpose/net/bodyPartConnectorCaffe.cpp similarity index 99% rename from src/openpose/pose/bodyPartConnectorCaffe.cpp rename to src/openpose/net/bodyPartConnectorCaffe.cpp index e0368e61..6ca3e209 100644 --- a/src/openpose/pose/bodyPartConnectorCaffe.cpp +++ b/src/openpose/net/bodyPartConnectorCaffe.cpp @@ -4,9 +4,9 @@ #ifdef USE_CUDA #include #endif -#include +#include #include -#include +#include namespace op { diff --git a/src/openpose/pose/CMakeLists.txt b/src/openpose/pose/CMakeLists.txt index a75a89ff..7b4d0fdc 100644 --- a/src/openpose/pose/CMakeLists.txt +++ b/src/openpose/pose/CMakeLists.txt @@ -1,7 +1,4 @@ set(SOURCES_OP_POSE - bodyPartConnectorBase.cpp - bodyPartConnectorBase.cu - bodyPartConnectorCaffe.cpp defineTemplates.cpp poseCpuRenderer.cpp poseExtractor.cpp diff --git a/src/openpose/pose/poseExtractorCaffe.cpp b/src/openpose/pose/poseExtractorCaffe.cpp index a271d506..c7bc09fa 100644 --- a/src/openpose/pose/poseExtractorCaffe.cpp +++ b/src/openpose/pose/poseExtractorCaffe.cpp @@ -2,10 +2,10 @@ #include #endif #include +#include #include #include #include -#include #include #include #include diff --git a/src/openpose/producer/producer.cpp b/src/openpose/producer/producer.cpp index b50f4f60..4d761e58 100644 --- a/src/openpose/producer/producer.cpp +++ b/src/openpose/producer/producer.cpp @@ -1,4 +1,3 @@ -#include #include #include #include diff --git a/src/openpose/producer/spinnakerWrapper.cpp b/src/openpose/producer/spinnakerWrapper.cpp index 696149c9..25971df1 100644 --- a/src/openpose/producer/spinnakerWrapper.cpp +++ b/src/openpose/producer/spinnakerWrapper.cpp @@ -1,6 +1,3 @@ -#ifdef USE_FLIR_CAMERA - #include -#endif #include // cv::undistort, cv::initUndistortRectifyMap #ifdef USE_FLIR_CAMERA #include diff --git a/src/openpose/producer/webcamReader.cpp b/src/openpose/producer/webcamReader.cpp index 4e443ff0..e6112e28 100644 --- a/src/openpose/producer/webcamReader.cpp +++ b/src/openpose/producer/webcamReader.cpp @@ -214,7 +214,7 @@ namespace op } } - const auto DISCONNETED_THRESHOLD = 15; + const auto DISCONNETED_THRESHOLD = 100; void WebcamReader::bufferingThread() { try diff --git a/src/openpose/tracking/personIdExtractor.cpp b/src/openpose/tracking/personIdExtractor.cpp index 0918ee4a..15566fd5 100644 --- a/src/openpose/tracking/personIdExtractor.cpp +++ b/src/openpose/tracking/personIdExtractor.cpp @@ -1,4 +1,3 @@ -#include #include #include #include diff --git a/src/openpose/tracking/personTracker.cpp b/src/openpose/tracking/personTracker.cpp index 7521c5fc..f238dd62 100644 --- a/src/openpose/tracking/personTracker.cpp +++ b/src/openpose/tracking/personTracker.cpp @@ -1,5 +1,4 @@ #include -#include #include // cv::resize #include #include diff --git a/src/openpose/utilities/profiler.cpp b/src/openpose/utilities/profiler.cpp index 8faf2f61..1082f574 100644 --- a/src/openpose/utilities/profiler.cpp +++ b/src/openpose/utilities/profiler.cpp @@ -1,7 +1,5 @@ -#include #include #include -#include #include #include diff --git a/src/openpose/wrapper/wrapperAuxiliary.cpp b/src/openpose/wrapper/wrapperAuxiliary.cpp index 3db99bf6..09e6f2bb 100644 --- a/src/openpose/wrapper/wrapperAuxiliary.cpp +++ b/src/openpose/wrapper/wrapperAuxiliary.cpp @@ -180,4 +180,17 @@ namespace op error(e.what(), __LINE__, __FUNCTION__, __FILE__); } } + + void threadIdPP(unsigned long long& threadId, const bool multiThreadEnabled) + { + try + { + if (multiThreadEnabled) + threadId++; + } + catch (const std::exception& e) + { + error(e.what(), __LINE__, __FUNCTION__, __FILE__); + } + } } -- GitLab