From 0b965cf0cbab86b68bfd83238ea3bdc15af2c36a Mon Sep 17 00:00:00 2001 From: Travis CI Date: Wed, 31 Oct 2018 16:24:39 +0000 Subject: [PATCH] Deploy code docs to GitHub Pages Travis build: 407 Commit: a8c23f03a7d138cd3cea71ef4f75593b4c5f26dd --- doxygen.log | 5 +- html/classop_1_1_array.html | 2 +- html/classop_1_1_coco_json_saver-members.html | 2 +- html/classop_1_1_coco_json_saver.html | 14 +- html/classop_1_1_coco_json_saver.js | 2 +- html/classop_1_1_face_extractor_caffe.html | 2 +- html/classop_1_1_face_extractor_net.html | 2 +- html/classop_1_1_flir_reader.html | 2 +- html/classop_1_1_frame_displayer.html | 4 +- html/classop_1_1_hand_extractor_caffe.html | 2 +- html/classop_1_1_hand_extractor_net.html | 2 +- html/classop_1_1_image_directory_reader.html | 2 +- html/classop_1_1_ip_camera_reader.html | 2 +- html/classop_1_1_producer.html | 4 +- html/classop_1_1_video_capture_reader.html | 4 +- html/classop_1_1_video_reader.html | 4 +- html/classop_1_1_webcam_reader.html | 2 +- html/coco_json_saver_8hpp_source.html | 36 +- html/fast_math_8hpp_source.html | 2 +- .../filestream_2enum_classes_8hpp_source.html | 23 +- html/flags_8hpp.html | 186 +++-- html/flags_8hpp.js | 25 +- html/flags_8hpp_source.html | 100 +-- html/functions_c.html | 2 +- html/functions_func_c.html | 2 +- html/functions_func_w.html | 2 +- html/functions_vars.html | 3 + html/functions_w.html | 11 +- html/globals.html | 6 +- html/globals_func.html | 4 +- html/index.html | 4 +- html/navtree.js | 8 +- html/navtreeindex10.js | 2 + html/navtreeindex2.js | 2 +- html/navtreeindex6.js | 72 +- html/navtreeindex7.js | 14 +- html/navtreeindex8.js | 4 +- html/navtreeindex9.js | 28 +- html/search/all_16.js | 25 +- html/search/all_2.js | 2 +- html/search/all_3.js | 10 +- html/search/functions_16.js | 2 +- html/search/functions_2.js | 2 +- html/search/functions_3.js | 10 +- html/search/variables_15.js | 1 + html/structop_1_1_datum.html | 12 +- html/structop_1_1_wrapper_struct_hand.html | 2 +- html/structop_1_1_wrapper_struct_input.html | 2 +- ...top_1_1_wrapper_struct_output-members.html | 23 +- html/structop_1_1_wrapper_struct_output.html | 29 +- html/structop_1_1_wrapper_struct_output.js | 3 +- html/worker_8hpp_source.html | 93 +-- html/wrapper_auxiliary_8hpp_source.html | 742 +++++++++--------- html/wrapper_struct_output_8hpp_source.html | 87 +- 54 files changed, 889 insertions(+), 749 deletions(-) diff --git a/doxygen.log b/doxygen.log index e852e947..8b6c0daf 100644 --- a/doxygen.log +++ b/doxygen.log @@ -1064,9 +1064,10 @@ Generating docs for compound op::FaceExtractorCaffe... Genera/home/travis/build/CMU-Perceptual-Computing-Lab/openpose/include/openpose/face/faceExtractorNet.hpp:18: warning: The following parameters of op::FaceExtractorNet::FaceExtractorNet(const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScale=ScaleMode::ZeroToOne) are not documented: parameter 'heatMapTypes' parameter 'heatMapScale' -/home/travis/build/CMU-Perceptual-Computing-Lab/openpose/include/openpose/filestream/cocoJsonSaver.hpp:18: warning: The following parameters of op::CocoJsonSaver::CocoJsonSaver(const std::string &filePathToSave, const bool humanReadable=true, const CocoJsonFormat cocoJsonFormat=CocoJsonFormat::Body) are not documented: +/home/travis/build/CMU-Perceptual-Computing-Lab/openpose/include/openpose/filestream/cocoJsonSaver.hpp:18: warning: The following parameters of op::CocoJsonSaver::CocoJsonSaver(const std::string &filePathToSave, const bool humanReadable=true, const CocoJsonFormat cocoJsonFormat=CocoJsonFormat::Body, const int mCocoJsonVariant=0) are not documented: parameter 'humanReadable' parameter 'cocoJsonFormat' + parameter 'mCocoJsonVariant' /home/travis/build/CMU-Perceptual-Computing-Lab/openpose/include/openpose/hand/handExtractorCaffe.hpp:18: warning: The following parameters of op::HandExtractorCaffe::HandExtractorCaffe(const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::string &modelFolder, const int gpuId, const unsigned short numberScales=1, const float rangeScales=0.4f, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScale=ScaleMode::ZeroToOne, const bool enableGoogleLogging=true) are not documented: parameter 'heatMapTypes' parameter 'heatMapScale' @@ -1225,5 +1226,5 @@ Generating file index... Generating file member index... Generating example index... finalizing index lists... -lookup cache used 4980/65536 hits=38456 misses=5343 +lookup cache used 4983/65536 hits=38475 misses=5346 finished... diff --git a/html/classop_1_1_array.html b/html/classop_1_1_array.html index de070b21..dc6459ad 100644 --- a/html/classop_1_1_array.html +++ b/html/classop_1_1_array.html @@ -1333,7 +1333,7 @@ template<typename T>
-

It returns a string with the whole array data. Useful for debugging. The format is: values separated by a space, and a enter for each dimension. E.g.: For the Array{2, 2, 3}, it will print: Array<T>::toString(): x1 x2 x3 x4 x5 x6

+

It returns a string with the whole array data. Useful for debugging. The format is: values separated by a space, and a enter for each dimension. E.g., For the Array{2, 2, 3}, it will print: Array<T>::toString(): x1 x2 x3 x4 x5 x6

x7 x8 x9 x10 x11 x12

Returns
A string with the array values in the above format.
diff --git a/html/classop_1_1_coco_json_saver-members.html b/html/classop_1_1_coco_json_saver-members.html index f36592fa..04b8d56c 100644 --- a/html/classop_1_1_coco_json_saver-members.html +++ b/html/classop_1_1_coco_json_saver-members.html @@ -112,7 +112,7 @@ $(document).ready(function(){initNavTree('classop_1_1_coco_json_saver.html','');

This is the complete list of members for op::CocoJsonSaver, including all inherited members.

- +
CocoJsonSaver(const std::string &filePathToSave, const bool humanReadable=true, const CocoJsonFormat cocoJsonFormat=CocoJsonFormat::Body)op::CocoJsonSaverexplicit
CocoJsonSaver(const std::string &filePathToSave, const bool humanReadable=true, const CocoJsonFormat cocoJsonFormat=CocoJsonFormat::Body, const int mCocoJsonVariant=0)op::CocoJsonSaverexplicit
record(const Array< float > &poseKeypoints, const Array< float > &poseScores, const std::string &imageName)op::CocoJsonSaver
~CocoJsonSaver()op::CocoJsonSavervirtual
diff --git a/html/classop_1_1_coco_json_saver.html b/html/classop_1_1_coco_json_saver.html index df9d7563..0e2e3986 100644 --- a/html/classop_1_1_coco_json_saver.html +++ b/html/classop_1_1_coco_json_saver.html @@ -117,8 +117,8 @@ $(document).ready(function(){initNavTree('classop_1_1_coco_json_saver.html',''); - - + + @@ -127,7 +127,7 @@ Public Member Functions

Detailed Description

The CocoJsonSaver class creates a COCO validation json file with details about the processed images. It inherits from Recorder.

Constructor & Destructor Documentation

- +

Public Member Functions

 CocoJsonSaver (const std::string &filePathToSave, const bool humanReadable=true, const CocoJsonFormat cocoJsonFormat=CocoJsonFormat::Body)
 
 CocoJsonSaver (const std::string &filePathToSave, const bool humanReadable=true, const CocoJsonFormat cocoJsonFormat=CocoJsonFormat::Body, const int mCocoJsonVariant=0)
 
virtual ~CocoJsonSaver ()
 
void record (const Array< float > &poseKeypoints, const Array< float > &poseScores, const std::string &imageName)
@@ -150,7 +150,13 @@ Public Member Functions - + + + + + + + diff --git a/html/classop_1_1_coco_json_saver.js b/html/classop_1_1_coco_json_saver.js index 852bbcd7..3d51e342 100644 --- a/html/classop_1_1_coco_json_saver.js +++ b/html/classop_1_1_coco_json_saver.js @@ -1,6 +1,6 @@ var classop_1_1_coco_json_saver = [ - [ "CocoJsonSaver", "classop_1_1_coco_json_saver.html#a0ce96fcd7dfaa2791f514a8363d018ff", null ], + [ "CocoJsonSaver", "classop_1_1_coco_json_saver.html#a380d4638d09cfe9c26551cc2efb7c3a8", null ], [ "~CocoJsonSaver", "classop_1_1_coco_json_saver.html#a8bbfab84a7816cb0f189f243246f744b", null ], [ "record", "classop_1_1_coco_json_saver.html#a3a7fd417aa5d85044fb0703379af1a23", null ] ]; \ No newline at end of file diff --git a/html/classop_1_1_face_extractor_caffe.html b/html/classop_1_1_face_extractor_caffe.html index 690bdc14..6f3388e9 100644 --- a/html/classop_1_1_face_extractor_caffe.html +++ b/html/classop_1_1_face_extractor_caffe.html @@ -286,7 +286,7 @@ Additional Inherited Members

This function extracts the face keypoints for each detected face in the image.

Parameters
const CocoJsonFormat cocoJsonFormat = CocoJsonFormat::Body cocoJsonFormat = CocoJsonFormat::Body,
const int mCocoJsonVariant = 0 
- +
faceRectangleslocation of the faces in the image. It is a length-variable std::vector, where each index corresponds to a different person in the image. Internally, a op::Rectangle<float> (similar to cv::Rect for floating values) with the position of that face (or 0,0,0,0 if some face is missing, e.g. if a specific person has only half of the body inside the image).
faceRectangleslocation of the faces in the image. It is a length-variable std::vector, where each index corresponds to a different person in the image. Internally, a op::Rectangle<float> (similar to cv::Rect for floating values) with the position of that face (or 0,0,0,0 if some face is missing, e.g., if a specific person has only half of the body inside the image).
cvInputDataOriginal image in cv::Mat format and BGR format.
diff --git a/html/classop_1_1_face_extractor_net.html b/html/classop_1_1_face_extractor_net.html index f37d5472..7137aafd 100644 --- a/html/classop_1_1_face_extractor_net.html +++ b/html/classop_1_1_face_extractor_net.html @@ -276,7 +276,7 @@ Protected Attributes

This function extracts the face keypoints for each detected face in the image.

Parameters
- +
faceRectangleslocation of the faces in the image. It is a length-variable std::vector, where each index corresponds to a different person in the image. Internally, a op::Rectangle<float> (similar to cv::Rect for floating values) with the position of that face (or 0,0,0,0 if some face is missing, e.g. if a specific person has only half of the body inside the image).
faceRectangleslocation of the faces in the image. It is a length-variable std::vector, where each index corresponds to a different person in the image. Internally, a op::Rectangle<float> (similar to cv::Rect for floating values) with the position of that face (or 0,0,0,0 if some face is missing, e.g., if a specific person has only half of the body inside the image).
cvInputDataOriginal image in cv::Mat format and BGR format.
diff --git a/html/classop_1_1_flir_reader.html b/html/classop_1_1_flir_reader.html index ab183171..49558650 100644 --- a/html/classop_1_1_flir_reader.html +++ b/html/classop_1_1_flir_reader.html @@ -378,7 +378,7 @@ Additional Inherited Members
-

This function returns a unique frame name (e.g. the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.

Implements op::Producer.

diff --git a/html/classop_1_1_frame_displayer.html b/html/classop_1_1_frame_displayer.html index f9e6f7e5..f92d35c2 100644 --- a/html/classop_1_1_frame_displayer.html +++ b/html/classop_1_1_frame_displayer.html @@ -289,7 +289,7 @@ Public Member Functions
-

This function set the new FrameDisplayer::FullScreenMode (e.g. full screen).

+

This function set the new FrameDisplayer::FullScreenMode (e.g., full screen).

Parameters
@@ -311,7 +311,7 @@ Public Member Functions
fullScreenModeNew FrameDisplayer::FullScreenMode state.
-

This function switch between full screen and windowed modes (e.g. when double-click on video players or Ctrt+Enter are presed).

+

This function switch between full screen and windowed modes (e.g., when double-click on video players or Ctrt+Enter are presed).

diff --git a/html/classop_1_1_hand_extractor_caffe.html b/html/classop_1_1_hand_extractor_caffe.html index 1a872d76..1be79e34 100644 --- a/html/classop_1_1_hand_extractor_caffe.html +++ b/html/classop_1_1_hand_extractor_caffe.html @@ -306,7 +306,7 @@ short, float > 
Parameters
- +
handRectangleslocation of the hands in the image. It is a length-variable std::vector, where each index corresponds to a different person in the image. Internally the std::vector, a std::array of 2 elements: index 0 and 1 for left and right hand respectively. Inside each array element, a op::Rectangle<float> (similar to cv::Rect for floating values) with the position of that hand (or 0,0,0,0 if some hand is missing, e.g. if a specific person has only half of the body inside the image).
handRectangleslocation of the hands in the image. It is a length-variable std::vector, where each index corresponds to a different person in the image. Internally the std::vector, a std::array of 2 elements: index 0 and 1 for left and right hand respectively. Inside each array element, a op::Rectangle<float> (similar to cv::Rect for floating values) with the position of that hand (or 0,0,0,0 if some hand is missing, e.g., if a specific person has only half of the body inside the image).
cvInputDataOriginal image in cv::Mat format and BGR format.
diff --git a/html/classop_1_1_hand_extractor_net.html b/html/classop_1_1_hand_extractor_net.html index 67d22209..d8c2ddca 100644 --- a/html/classop_1_1_hand_extractor_net.html +++ b/html/classop_1_1_hand_extractor_net.html @@ -293,7 +293,7 @@ short, float > 
Parameters
- +
handRectangleslocation of the hands in the image. It is a length-variable std::vector, where each index corresponds to a different person in the image. Internally the std::vector, a std::array of 2 elements: index 0 and 1 for left and right hand respectively. Inside each array element, a op::Rectangle<float> (similar to cv::Rect for floating values) with the position of that hand (or 0,0,0,0 if some hand is missing, e.g. if a specific person has only half of the body inside the image).
handRectangleslocation of the hands in the image. It is a length-variable std::vector, where each index corresponds to a different person in the image. Internally the std::vector, a std::array of 2 elements: index 0 and 1 for left and right hand respectively. Inside each array element, a op::Rectangle<float> (similar to cv::Rect for floating values) with the position of that hand (or 0,0,0,0 if some hand is missing, e.g., if a specific person has only half of the body inside the image).
cvInputDataOriginal image in cv::Mat format and BGR format.
diff --git a/html/classop_1_1_image_directory_reader.html b/html/classop_1_1_image_directory_reader.html index a7e3fd1f..4b20ef8a 100644 --- a/html/classop_1_1_image_directory_reader.html +++ b/html/classop_1_1_image_directory_reader.html @@ -380,7 +380,7 @@ Additional Inherited Members
-

This function returns a unique frame name (e.g. the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.

Implements op::Producer.

diff --git a/html/classop_1_1_ip_camera_reader.html b/html/classop_1_1_ip_camera_reader.html index 55585b92..5bbce3d2 100644 --- a/html/classop_1_1_ip_camera_reader.html +++ b/html/classop_1_1_ip_camera_reader.html @@ -372,7 +372,7 @@ Additional Inherited Members
-

This function returns a unique frame name (e.g. the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.

Implements op::VideoCaptureReader.

diff --git a/html/classop_1_1_producer.html b/html/classop_1_1_producer.html index 6f3dc844..85b1e53d 100644 --- a/html/classop_1_1_producer.html +++ b/html/classop_1_1_producer.html @@ -169,7 +169,7 @@ Protected Member Functions  

Detailed Description

-

Producer is an abstract class to extract frames from a source (image directory, video file, webcam stream, etc.). It has the basic and common functions (e.g. getFrame, release & isOpened).

+

Producer is an abstract class to extract frames from a source (image directory, video file, webcam stream, etc.). It has the basic and common functions (e.g., getFrame, release & isOpened).

Constructor & Destructor Documentation

@@ -481,7 +481,7 @@ Protected Member Functions
-

This function returns a unique frame name (e.g. the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.

Implemented in op::ImageDirectoryReader, op::VideoCaptureReader, op::VideoReader, op::WebcamReader, op::FlirReader, and op::IpCameraReader.

diff --git a/html/classop_1_1_video_capture_reader.html b/html/classop_1_1_video_capture_reader.html index 63ea2079..fd374863 100644 --- a/html/classop_1_1_video_capture_reader.html +++ b/html/classop_1_1_video_capture_reader.html @@ -181,7 +181,7 @@ Protected Member Functions  

Detailed Description

-

VideoCaptureReader is an abstract class to extract frames from a cv::VideoCapture source (video file, webcam stream, etc.). It has the basic and common functions of the cv::VideoCapture class (e.g. get, set, etc.).

+

VideoCaptureReader is an abstract class to extract frames from a cv::VideoCapture source (video file, webcam stream, etc.). It has the basic and common functions of the cv::VideoCapture class (e.g., get, set, etc.).

Constructor & Destructor Documentation

@@ -347,7 +347,7 @@ Protected Member Functions
-

This function returns a unique frame name (e.g. the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.

Implements op::Producer.

diff --git a/html/classop_1_1_video_reader.html b/html/classop_1_1_video_reader.html index a5c2049f..8ddffb42 100644 --- a/html/classop_1_1_video_reader.html +++ b/html/classop_1_1_video_reader.html @@ -180,7 +180,7 @@ Additional Inherited Members  

Detailed Description

-

VideoReader is a wrapper of the cv::VideoCapture class for video. It allows controlling a video (e.g. extracting frames, setting resolution & fps, etc).

+

VideoReader is a wrapper of the cv::VideoCapture class for video. It allows controlling a video (e.g., extracting frames, setting resolution & fps, etc).

Constructor & Destructor Documentation

@@ -390,7 +390,7 @@ Additional Inherited Members
-

This function returns a unique frame name (e.g. the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.

Implements op::VideoCaptureReader.

diff --git a/html/classop_1_1_webcam_reader.html b/html/classop_1_1_webcam_reader.html index 0eaf71ee..0fa5f03a 100644 --- a/html/classop_1_1_webcam_reader.html +++ b/html/classop_1_1_webcam_reader.html @@ -396,7 +396,7 @@ Additional Inherited Members
-

This function returns a unique frame name (e.g. the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.

Implements op::VideoCaptureReader.

diff --git a/html/coco_json_saver_8hpp_source.html b/html/coco_json_saver_8hpp_source.html index 1f5fd42f..1d7d6e6a 100644 --- a/html/coco_json_saver_8hpp_source.html +++ b/html/coco_json_saver_8hpp_source.html @@ -120,22 +120,24 @@ $(document).ready(function(){initNavTree('coco_json_saver_8hpp_source.html','');
15  {
16  public:
23  explicit CocoJsonSaver(const std::string& filePathToSave, const bool humanReadable = true,
-
24  const CocoJsonFormat cocoJsonFormat = CocoJsonFormat::Body);
-
25 
-
26  virtual ~CocoJsonSaver();
-
27 
-
28  void record(const Array<float>& poseKeypoints, const Array<float>& poseScores, const std::string& imageName);
-
29 
-
30  private:
-
31  const CocoJsonFormat mCocoJsonFormat;
-
32  JsonOfstream mJsonOfstream;
-
33  bool mFirstElementAdded;
-
34 
- -
36  };
-
37 }
-
38 
-
39 #endif // OPENPOSE_FILESTREAM_POSE_JSON_COCO_SAVER_HPP
+
24  const CocoJsonFormat cocoJsonFormat = CocoJsonFormat::Body,
+
25  const int mCocoJsonVariant = 0);
+
26 
+
27  virtual ~CocoJsonSaver();
+
28 
+
29  void record(const Array<float>& poseKeypoints, const Array<float>& poseScores, const std::string& imageName);
+
30 
+
31  private:
+
32  const CocoJsonFormat mCocoJsonFormat;
+
33  const int mCocoJsonVariant;
+
34  JsonOfstream mJsonOfstream;
+
35  bool mFirstElementAdded;
+
36 
+ +
38  };
+
39 }
+
40 
+
41 #endif // OPENPOSE_FILESTREAM_POSE_JSON_COCO_SAVER_HPP
#define DELETE_COPY(className)
Definition: macros.hpp:33
@@ -144,7 +146,7 @@ $(document).ready(function(){initNavTree('coco_json_saver_8hpp_source.html',''); -
CocoJsonFormat
Definition: enumClasses.hpp:13
+
CocoJsonFormat
Definition: enumClasses.hpp:14
#define OP_API
Definition: macros.hpp:18
std::string string
Definition: cl2.hpp:574
diff --git a/html/fast_math_8hpp_source.html b/html/fast_math_8hpp_source.html index aa71b6d4..06f7caf1 100644 --- a/html/fast_math_8hpp_source.html +++ b/html/fast_math_8hpp_source.html @@ -114,7 +114,7 @@ $(document).ready(function(){initNavTree('fast_math_8hpp_source.html','');});
5 {
6  // Use op::round/max/min for basic types (int, char, long, float, double, etc). Never with classes!
7  // `std::` alternatives uses 'const T&' instead of 'const T' as argument.
-
8  // E.g. std::round is really slow (~300 ms vs ~10 ms when I individually apply it to each element of a whole
+
8  // E.g., std::round is really slow (~300 ms vs ~10 ms when I individually apply it to each element of a whole
9  // image array
10 
11  // Round functions
diff --git a/html/filestream_2enum_classes_8hpp_source.html b/html/filestream_2enum_classes_8hpp_source.html index 0cbd56df..0618bd62 100644 --- a/html/filestream_2enum_classes_8hpp_source.html +++ b/html/filestream_2enum_classes_8hpp_source.html @@ -119,16 +119,17 @@ $(document).ready(function(){initNavTree('filestream_2enum_classes_8hpp_source.h
10  Yaml,
11  Yml,
12  };
-
13  enum class CocoJsonFormat : unsigned char
-
14  {
-
15  Body,
-
16  Foot,
-
17  Car,
-
18  Size,
-
19  };
-
20 }
-
21 
-
22 #endif // OPENPOSE_FILESTREAM_ENUM_CLASSES_HPP
+
13 
+
14  enum class CocoJsonFormat : unsigned char
+
15  {
+
16  Body,
+
17  Foot,
+
18  Car,
+
19  Size,
+
20  };
+
21 }
+
22 
+
23 #endif // OPENPOSE_FILESTREAM_ENUM_CLASSES_HPP
@@ -137,7 +138,7 @@ $(document).ready(function(){initNavTree('filestream_2enum_classes_8hpp_source.h -
CocoJsonFormat
Definition: enumClasses.hpp:13
+
CocoJsonFormat
Definition: enumClasses.hpp:14
diff --git a/html/flags_8hpp.html b/html/flags_8hpp.html index 2cab1aaa..569b328b 100644 --- a/html/flags_8hpp.html +++ b/html/flags_8hpp.html @@ -117,8 +117,8 @@ $(document).ready(function(){initNavTree('flags_8hpp.html','');}); Functions  DEFINE_int32 (logging_level, 3,"The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"" low priority messages and 4 for important ones.")   - DEFINE_bool (disable_multi_thread, false,"It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful"" for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with"" low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the"" error.") -  + DEFINE_bool (disable_multi_thread, false,"It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful"" for 1) Cases where it is needed a low latency (e.g., webcam in real-time scenarios with"" low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the"" error.") +   DEFINE_int32 (profile_speed, 1000,"If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some"" runtime statistics at this frame number.")    DEFINE_int32 (camera,-1,"The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative"" number (by default), to auto-detect and open the first available camera.") @@ -141,16 +141,16 @@ Functions    DEFINE_uint64 (frame_step, 1,"Step or gap between processed frames. E.g., `--frame_step 5` would read and process frames"" 0, 5, 10, etc..")   - DEFINE_uint64 (frame_last,-1,"Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g. if set to"" 10, it will process 11 frames (0-10).") -  - DEFINE_bool (frame_flip, false,"Flip/mirror each frame (e.g. for real time webcam demonstrations).") -  + DEFINE_uint64 (frame_last,-1,"Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g., if set to"" 10, it will process 11 frames (0-10).") +  + DEFINE_bool (frame_flip, false,"Flip/mirror each frame (e.g., for real time webcam demonstrations).") +   DEFINE_int32 (frame_rotate, 0,"Rotate each frame, 4 possible values: 0, 90, 180, 270.")    DEFINE_bool (frames_repeat, false,"Repeat frames when finished.")   - DEFINE_bool (process_real_time, false,"Enable to keep the original source frame rate (e.g. for video). If the processing time is"" too long, it will skip frames. If it is too fast, it will slow it down.") -  + DEFINE_bool (process_real_time, false,"Enable to keep the original source frame rate (e.g., for video). If the processing time is"" too long, it will skip frames. If it is too fast, it will slow it down.") +   DEFINE_string (camera_parameter_folder,"models/cameraParameters/flir/","String with the folder where the camera parameters are located.")    DEFINE_bool (frame_keep_distortion, false,"If false (default), it will undistortionate the image based on the"" `camera_parameter_folder` camera parameters; if true, it will not undistortionate, i.e.,"" it will leave it as it is.") @@ -169,10 +169,10 @@ Functions    DEFINE_bool (body_disable, false,"Disable body keypoint detection. Option only possible for faster (but less accurate) face"" keypoint detection.")   - DEFINE_string (model_pose,"BODY_25","Model to be used. E.g. `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), ""`MPI_4_layers` (15 keypoints, even faster but less accurate).") -  - DEFINE_string (net_resolution,"-1x368","Multiples of 16. If it is increased, the accuracy potentially increases. If it is"" decreased, the speed increases. For maximum speed-accuracy balance, it should keep the"" closest aspect ratio possible to the images or videos to be processed. Using `-1` in"" any of the dimensions, OP will choose the optimal aspect ratio depending on the user's"" input value. E.g. the default `-1x368` is equivalent to `656x368` in 16:9 resolutions,"" e.g. full HD (1980x1080) and HD (1280x720) resolutions.") -  + DEFINE_string (model_pose,"BODY_25","Model to be used. E.g., `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), ""`MPI_4_layers` (15 keypoints, even faster but less accurate).") +  + DEFINE_string (net_resolution,"-1x368","Multiples of 16. If it is increased, the accuracy potentially increases. If it is"" decreased, the speed increases. For maximum speed-accuracy balance, it should keep the"" closest aspect ratio possible to the images or videos to be processed. Using `-1` in"" any of the dimensions, OP will choose the optimal aspect ratio depending on the user's"" input value. E.g., the default `-1x368` is equivalent to `656x368` in 16:9 resolutions,"" e.g., full HD (1980x1080) and HD (1280x720) resolutions.") +   DEFINE_int32 (scale_number, 1,"Number of scales to average.")    DEFINE_double (scale_gap, 0.3,"Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1."" If you want to change the initial scale, you actually want to multiply the"" `net_resolution` by your desired initial scale.") @@ -197,8 +197,8 @@ Functions    DEFINE_int32 (hand_scale_number, 1,"Analogous to `scale_number` but applied to the hand keypoint detector. Our best results"" were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4.")   - DEFINE_double (hand_scale_range, 0.4,"Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range"" between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if"" scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2.") -  + DEFINE_double (hand_scale_range, 0.4,"Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range"" between smallest and biggest scale. The scales will be centered in ratio 1. E.g., if"" scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2.") +   DEFINE_bool (hand_tracking, false,"Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate"" is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it"" simply looks for hands in positions at which hands were located in previous frames, but"" it does not guarantee the same person ID among frames.")    DEFINE_bool (3d, false,"Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system."" 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction"" results. Note that it will only display 1 person. If multiple people is present, it will"" fail.") @@ -219,8 +219,8 @@ Functions    DEFINE_double (render_threshold, 0.05,"Only estimated keypoints whose score confidences are higher than this threshold will be"" rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;"" while small thresholds (~0.1) will also output guessed and occluded keypoints, but also"" more false positives (i.e. wrong detections).")   - DEFINE_int32 (render_pose,-1,"Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering"" (slower but greater functionality, e.g. `alpha_X` flags). If -1, it will pick CPU if"" CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render"" both `outputData` and `cvOutputData` with the original image and desired body part to be"" shown (i.e. keypoints, heat maps or PAFs).") -  + DEFINE_int32 (render_pose,-1,"Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering"" (slower but greater functionality, e.g., `alpha_X` flags). If -1, it will pick CPU if"" CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render"" both `outputData` and `cvOutputData` with the original image and desired body part to be"" shown (i.e. keypoints, heat maps or PAFs).") +   DEFINE_double (alpha_pose, 0.6,"Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will"" hide it. Only valid for GPU rendering.")    DEFINE_double (alpha_heatmap, 0.7,"Blending factor (range 0-1) between heatmap and original frame. 1 will only show the"" heatmap, 0 will only show the frame. Only valid for GPU rendering.") @@ -243,14 +243,14 @@ Functions    DEFINE_bool (fullscreen, false,"Run in full-screen mode (press f during runtime to toggle).")   - DEFINE_bool (no_gui_verbose, false,"Do not write text on output images on GUI (e.g. number of current frame and people). It"" does not affect the pose rendering.") -  + DEFINE_bool (no_gui_verbose, false,"Do not write text on output images on GUI (e.g., number of current frame and people). It"" does not affect the pose rendering.") +   DEFINE_int32 (display,-1,"Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server"" and/or to slightly speed up the processing if visual output is not required); 2 for 2-D"" display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display.")    DEFINE_string (write_images,"","Directory to write rendered frames in `write_images_format` image format.")   - DEFINE_string (write_images_format,"png","File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV"" function cv::imwrite for all compatible extensions.") -  + DEFINE_string (write_images_format,"png","File extension and format for `write_images`, e.g., png, jpg or bmp. Check the OpenCV"" function cv::imwrite for all compatible extensions.") +   DEFINE_string (write_video,"","Full file path to write rendered frames in motion JPEG video format. It might fail if the"" final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag"" `camera_fps` controls FPS.")    DEFINE_string (write_json,"","Directory to write OpenPose output in JSON format. It includes body, hand, and face pose"" keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled).") @@ -259,6 +259,8 @@ Functions    DEFINE_string (write_coco_foot_json,"","Full file path to write people foot pose data with JSON COCO validation format.")   + DEFINE_int32 (write_coco_json_variant, 0,"Currently, this option is experimental and only makes effect on car JSON generation. It"" selects the COCO variant for cocoJsonSaver.") +   DEFINE_string (write_heatmaps,"","Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag"" must be enabled.")    DEFINE_string (write_heatmaps_format,"png","File extension and format for `write_heatmaps`, analogous to `write_images_format`."" For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for"" floating values.") @@ -267,17 +269,17 @@ Functions    DEFINE_string (write_keypoint_format,"yml","(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml,"" yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead.")   - DEFINE_string (write_video_adam,"","Experimental, not available yet. E.g.: `~/Desktop/adamResult.avi`. Flag `camera_fps`"" controls FPS.") -  - DEFINE_string (write_bvh,"","Experimental, not available yet. E.g.: `~/Desktop/mocapResult.bvh`.") -  + DEFINE_string (write_video_adam,"","Experimental, not available yet. E.g., `~/Desktop/adamResult.avi`. Flag `camera_fps`"" controls FPS.") +  + DEFINE_string (write_bvh,"","Experimental, not available yet. E.g., `~/Desktop/mocapResult.bvh`.") +   DEFINE_string (udp_host,"","Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`.")    DEFINE_string (udp_port,"8051","Experimental, not available yet. Port number for UDP communication.")  

Function Documentation

- +
@@ -341,7 +343,7 @@ Functions - +
@@ -360,7 +362,7 @@ Functions - + @@ -405,7 +407,7 @@ Functions - +
"Flip/mirror each frame (e.g. for real time webcam demonstrations)." "Flip/mirror each frame (e.g., for real time webcam demonstrations)."   
@@ -424,7 +426,7 @@ Functions - + @@ -1021,7 +1023,7 @@ Functions - +
"Enable to keep the original source frame rate (e.g. for video). If the processing time is"" too "Enable to keep the original source frame rate (e.g., for video). If the processing time is"" too  long,
@@ -1040,7 +1042,7 @@ Functions - + @@ -1129,7 +1131,7 @@ Functions - +
"Do not write text on output images on GUI (e.g. number of current frame and people). It"" does not affect the pose rendering." "Do not write text on output images on GUI (e.g., number of current frame and people). It"" does not affect the pose rendering."   
@@ -1148,7 +1150,13 @@ Functions - + + + + + + + @@ -2215,7 +2223,7 @@ Functions - +
"Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range"" between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if"" "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range"" between smallest and biggest scale. The scales will be centered in ratio 1. E. g.,
if""  scaleRange = 0.4 and scalesNumber = 2,
@@ -2246,7 +2254,7 @@ Functions - + @@ -2371,6 +2379,44 @@ Functions
and 2 for GPU rendering""(slower but greater functionality, e.g.`alpha_X`flags).If- and 2 for GPU rendering""(slower but greater functionality, e.g.,`alpha_X`flags).If-  1,
+
+
+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (write_coco_json_variant ,
,
Currently,
this option is experimental and only makes effect on car JSON generation.It""selects the COCO variant for cocoJsonSaver."  
)
+
+
@@ -2603,7 +2649,7 @@ Functions
- +
@@ -2622,7 +2668,13 @@ Functions - + + + + + + + @@ -2647,7 +2699,7 @@ Functions - +
"Model to be used. E.g. `COCO`  "Model to be used. E. g.,
`COCO`  18 keypoints,
@@ -2690,13 +2742,25 @@ Functions - + + + + + + + - + + + + + + + @@ -2805,7 +2869,7 @@ Functions - +
OP will choose the optimal aspect ratio depending on the user's""input value.E.g.the default`-1x368`is equivalent to`656x368`in 16:9 OP will choose the optimal aspect ratio depending on the user's""input value.E. g.,
the default`-1x368`is equivalent to`656x368`in 16:9  resolutions,
""e.g.full HD(1980x1080) and HD(1280x720) resolutions." ""e. g.,
full HD(1980x1080) and HD(1280x720) resolutions."   
@@ -2830,8 +2894,14 @@ Functions - - + + + + + + + + @@ -3141,7 +3211,7 @@ Functions - +
e.g. png, e. g.,
png ,
@@ -3166,7 +3236,13 @@ Functions - + + + + + + + @@ -3179,7 +3255,7 @@ Functions - +
not available yet.E.g.:`~/Desktop/adamResult.avi`.Flag`camera_fps`""controls FPS." not available yet.E. g.,
`~/Desktop/adamResult.avi`.Flag`camera_fps`""controls FPS."   
@@ -3204,7 +3280,13 @@ Functions - + + + + + + + @@ -3393,7 +3475,7 @@ Functions - +
not available yet.E.g.:`~/Desktop/mocapResult.bvh`." not available yet.E. g.,
`~/Desktop/mocapResult.bvh`."   
@@ -3418,7 +3500,13 @@ Functions - + + + + + + + diff --git a/html/flags_8hpp.js b/html/flags_8hpp.js index 6e2ab307..e7a9fa42 100644 --- a/html/flags_8hpp.js +++ b/html/flags_8hpp.js @@ -1,10 +1,10 @@ var flags_8hpp = [ - [ "DEFINE_bool", "flags_8hpp.html#a0e9cb0627d386e4d69e5eae0593d70ad", null ], + [ "DEFINE_bool", "flags_8hpp.html#a035ba7d578f36103d096569f1984ab61", null ], [ "DEFINE_bool", "flags_8hpp.html#a7fc3e871114ac35447499ffda035d51e", null ], - [ "DEFINE_bool", "flags_8hpp.html#a64c46584d79ef0b947ecd0a98b282839", null ], + [ "DEFINE_bool", "flags_8hpp.html#a9d45877ef21b9042f1b307d23d404e40", null ], [ "DEFINE_bool", "flags_8hpp.html#a6a501f229de1ba19801b77fd0565527b", null ], - [ "DEFINE_bool", "flags_8hpp.html#a5c66f20b5a9a1bc44d3c685fd37ff7f9", null ], + [ "DEFINE_bool", "flags_8hpp.html#a5e350d38962a702d8e0c658fbbb5cc64", null ], [ "DEFINE_bool", "flags_8hpp.html#ad739178d4295e31edd2f1e468bd2e600", null ], [ "DEFINE_bool", "flags_8hpp.html#a5e8200fc5fd8bc1e80b94b029ea8e18d", null ], [ "DEFINE_bool", "flags_8hpp.html#a669c6f4820af0275f40808b18147b98c", null ], @@ -18,10 +18,10 @@ var flags_8hpp = [ "DEFINE_bool", "flags_8hpp.html#ac210b56a3c46ff06143a62b0bb725cc6", null ], [ "DEFINE_bool", "flags_8hpp.html#a1a9ffc302eac69c8ba024da3edf01a5f", null ], [ "DEFINE_bool", "flags_8hpp.html#a231bec7b2113f31cebb760c849ca2841", null ], - [ "DEFINE_bool", "flags_8hpp.html#a14f23a4c6dd6c9290e3ee44bd79f96bf", null ], + [ "DEFINE_bool", "flags_8hpp.html#a0ee4f86d572ef52ec26dcbb37efb5e65", null ], [ "DEFINE_double", "flags_8hpp.html#a29f9621cd9361deec9512f8d8b53e6ba", null ], [ "DEFINE_double", "flags_8hpp.html#adf35e9261afec1673a42de6197f07f42", null ], - [ "DEFINE_double", "flags_8hpp.html#a94efdbeb7b9adb554d34399279312b8f", null ], + [ "DEFINE_double", "flags_8hpp.html#abf5c8a05ae8f86c8e4d8eedbceb84516", null ], [ "DEFINE_double", "flags_8hpp.html#afd21fc201e4b269bf0b07b8fce886173", null ], [ "DEFINE_double", "flags_8hpp.html#ad971ae1068651177e7a78ae68c022792", null ], [ "DEFINE_double", "flags_8hpp.html#a62bfc56794bb8ceaedd0eb7bc32a0f22", null ], @@ -48,10 +48,11 @@ var flags_8hpp = [ "DEFINE_int32", "flags_8hpp.html#afdfef14901f7b5e324d6983845f5ab50", null ], [ "DEFINE_int32", "flags_8hpp.html#a072bd893f4003b48bc7c99735eeeed39", null ], [ "DEFINE_int32", "flags_8hpp.html#a4cead735de5b43cfcae5c1139df3be1a", null ], - [ "DEFINE_int32", "flags_8hpp.html#ab2af299b6380dcd6dc06a95cceb056d4", null ], + [ "DEFINE_int32", "flags_8hpp.html#a65152cde78f8e7da1d33f557ec392312", null ], [ "DEFINE_int32", "flags_8hpp.html#ad0269da28dc2033e23b8ea84b7e793a2", null ], [ "DEFINE_int32", "flags_8hpp.html#ada5b7fef5063818fd668359e9e0b0504", null ], [ "DEFINE_int32", "flags_8hpp.html#a9a34e10f75069cf3283e535a77006775", null ], + [ "DEFINE_int32", "flags_8hpp.html#a39756aa6d6911435a326e18541db970a", null ], [ "DEFINE_string", "flags_8hpp.html#a9e7b1394aea185360a1fe19cf4e20a89", null ], [ "DEFINE_string", "flags_8hpp.html#aab02d4078b5f7999a582d9c5f4248676", null ], [ "DEFINE_string", "flags_8hpp.html#a7773e867133822f5601899975dc06adb", null ], @@ -59,12 +60,12 @@ var flags_8hpp = [ "DEFINE_string", "flags_8hpp.html#aff61246512375ff5941dc4110e127ca3", null ], [ "DEFINE_string", "flags_8hpp.html#a9ab689ebe20a261b20587af79123e79a", null ], [ "DEFINE_string", "flags_8hpp.html#a85be61a31eaa438a7e9c7d2baf51da47", null ], - [ "DEFINE_string", "flags_8hpp.html#acfd124be44003a59f2591c7584fd3c75", null ], - [ "DEFINE_string", "flags_8hpp.html#a830bcfa6645bf39a18f59d3b72f75edf", null ], + [ "DEFINE_string", "flags_8hpp.html#a055e1e0a1618e96f156f32278b41d3e4", null ], + [ "DEFINE_string", "flags_8hpp.html#a66d6d5c07371179c3702dbd1da9d9bd3", null ], [ "DEFINE_string", "flags_8hpp.html#af5ee5f61f0d36a03bb8647408f5e236b", null ], [ "DEFINE_string", "flags_8hpp.html#afaf97bbf6a49576782d25147bc865bed", null ], [ "DEFINE_string", "flags_8hpp.html#a7a3597e9216885470199ca1578eb7f69", null ], - [ "DEFINE_string", "flags_8hpp.html#ab077893503ebb5fba8cb300bd5f93d62", null ], + [ "DEFINE_string", "flags_8hpp.html#a6957bf419a0ca3478948a62a3ce5ecf0", null ], [ "DEFINE_string", "flags_8hpp.html#a3051d21eb51cc39eed5f781d8eaed960", null ], [ "DEFINE_string", "flags_8hpp.html#a9aa48b2ab293842bc42b96df9e97c9b8", null ], [ "DEFINE_string", "flags_8hpp.html#a26d1c7340fc87d4593dda754d54145a2", null ], @@ -73,11 +74,11 @@ var flags_8hpp = [ "DEFINE_string", "flags_8hpp.html#a242473077549869f06534e8a9ea1ddd6", null ], [ "DEFINE_string", "flags_8hpp.html#ad8c5173beb83c0f9996362b3a3fba820", null ], [ "DEFINE_string", "flags_8hpp.html#a5aaba99bdb163516d0297d2e09dd0c7d", null ], - [ "DEFINE_string", "flags_8hpp.html#a6c5341914694863528d3d93b23b45f01", null ], - [ "DEFINE_string", "flags_8hpp.html#ad0c09dd7ede747d69d36dc86c7ffa11c", null ], + [ "DEFINE_string", "flags_8hpp.html#ad7a48143249d608e2ad3a8413eb19c34", null ], + [ "DEFINE_string", "flags_8hpp.html#a72c09e09c94b67f88b53e1a94e7bfe0f", null ], [ "DEFINE_string", "flags_8hpp.html#a63936bd2bc53a453ba74066289ab9d29", null ], [ "DEFINE_string", "flags_8hpp.html#aa7b1c758c6d6e35c3227994a9ced0236", null ], [ "DEFINE_uint64", "flags_8hpp.html#a1433eaf1c5eb42e406e76bc6f8e517c3", null ], [ "DEFINE_uint64", "flags_8hpp.html#a22572531e5a4896c510639ac57cf522c", null ], - [ "DEFINE_uint64", "flags_8hpp.html#ac8fef8bb0234286e74b75214a750d674", null ] + [ "DEFINE_uint64", "flags_8hpp.html#a158227abc2af90abaec523a784b40fa8", null ] ]; \ No newline at end of file diff --git a/html/flags_8hpp_source.html b/html/flags_8hpp_source.html index 912844df..37b60598 100644 --- a/html/flags_8hpp_source.html +++ b/html/flags_8hpp_source.html @@ -121,15 +121,15 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
12  namespace gflags = google;
13 #endif
14 
-
15 // See all the available parameter options withe the `--help` flag. E.g. `build/examples/openpose/openpose.bin --help`
+
15 // See all the available parameter options withe the `--help` flag. E.g., `build/examples/openpose/openpose.bin --help`
16 // Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
-
17 // executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
+
17 // executable. E.g., for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
18 // Debugging/Other
19 DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
20  " 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
21  " low priority messages and 4 for important ones.");
-
22 DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful"
-
23  " for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with"
+
22 DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful"
+
23  " for 1) Cases where it is needed a low latency (e.g., webcam in real-time scenarios with"
24  " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the"
25  " error.");
26 DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some"
@@ -150,7 +150,7 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
41  " example video.");
42 DEFINE_string(image_dir, "", "Process a directory of images. Use `examples/media/` for our default example folder with 20"
43  " images. Read all standard formats (jpg, png, bmp, etc.).");
-
44 DEFINE_bool(flir_camera, false, "Whether to use FLIR (Point-Grey) stereo camera.");
+
44 DEFINE_bool(flir_camera, false, "Whether to use FLIR (Point-Grey) stereo camera.");
45 DEFINE_int32(flir_camera_index, -1, "Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir"
46  " camera index to run, where 0 corresponds to the detected flir camera with the lowest"
47  " serial number, and `n` to the `n`-th lowest serial number camera.");
@@ -158,15 +158,15 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
49 DEFINE_uint64(frame_first, 0, "Start on desired frame number. Indexes are 0-based, i.e. the first frame has index 0.");
50 DEFINE_uint64(frame_step, 1, "Step or gap between processed frames. E.g., `--frame_step 5` would read and process frames"
51  " 0, 5, 10, etc..");
-
52 DEFINE_uint64(frame_last, -1, "Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g. if set to"
+
52 DEFINE_uint64(frame_last, -1, "Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g., if set to"
53  " 10, it will process 11 frames (0-10).");
-
54 DEFINE_bool(frame_flip, false, "Flip/mirror each frame (e.g. for real time webcam demonstrations).");
+
54 DEFINE_bool(frame_flip, false, "Flip/mirror each frame (e.g., for real time webcam demonstrations).");
55 DEFINE_int32(frame_rotate, 0, "Rotate each frame, 4 possible values: 0, 90, 180, 270.");
-
56 DEFINE_bool(frames_repeat, false, "Repeat frames when finished.");
-
57 DEFINE_bool(process_real_time, false, "Enable to keep the original source frame rate (e.g. for video). If the processing time is"
+
56 DEFINE_bool(frames_repeat, false, "Repeat frames when finished.");
+
57 DEFINE_bool(process_real_time, false, "Enable to keep the original source frame rate (e.g., for video). If the processing time is"
58  " too long, it will skip frames. If it is too fast, it will slow it down.");
59 DEFINE_string(camera_parameter_folder, "models/cameraParameters/flir/", "String with the folder where the camera parameters are located.");
-
60 DEFINE_bool(frame_keep_distortion, false, "If false (default), it will undistortionate the image based on the"
+
60 DEFINE_bool(frame_keep_distortion, false, "If false (default), it will undistortionate the image based on the"
61  " `camera_parameter_folder` camera parameters; if true, it will not undistortionate, i.e.,"
62  " it will leave it as it is.");
63 #endif // OPENPOSE_FLAGS_DISABLE_PRODUCER
@@ -192,34 +192,34 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
83  " been detected. However, it might also include false negatives by removing very small or"
84  " highly occluded people. -1 will keep them all.");
85 // OpenPose Body Pose
-
86 DEFINE_bool(body_disable, false, "Disable body keypoint detection. Option only possible for faster (but less accurate) face"
+
86 DEFINE_bool(body_disable, false, "Disable body keypoint detection. Option only possible for faster (but less accurate) face"
87  " keypoint detection.");
-
88 DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g. `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), "
+
88 DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g., `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), "
89  "`MPI_4_layers` (15 keypoints, even faster but less accurate).");
90 DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is"
91  " decreased, the speed increases. For maximum speed-accuracy balance, it should keep the"
92  " closest aspect ratio possible to the images or videos to be processed. Using `-1` in"
93  " any of the dimensions, OP will choose the optimal aspect ratio depending on the user's"
-
94  " input value. E.g. the default `-1x368` is equivalent to `656x368` in 16:9 resolutions,"
-
95  " e.g. full HD (1980x1080) and HD (1280x720) resolutions.");
+
94  " input value. E.g., the default `-1x368` is equivalent to `656x368` in 16:9 resolutions,"
+
95  " e.g., full HD (1980x1080) and HD (1280x720) resolutions.");
96 DEFINE_int32(scale_number, 1, "Number of scales to average.");
97 DEFINE_double(scale_gap, 0.3, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1."
98  " If you want to change the initial scale, you actually want to multiply the"
99  " `net_resolution` by your desired initial scale.");
100 // OpenPose Body Pose Heatmaps and Part Candidates
-
101 DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and"
+
101 DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and"
102  " analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps."
103  " If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential"
104  " memory order: body parts + bkg + PAFs. It will follow the order on"
105  " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will"
106  " considerably decrease. Not required for OpenPose, enable it only if you intend to"
107  " explicitly use this information later.");
-
108 DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to"
+
108 DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to"
109  " background.");
-
110 DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs.");
+
110 DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs.");
111 DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer"
112  " rounded [0,255]; and 3 for no scaling.");
-
113 DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the"
+
113 DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the"
114  " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all"
115  " the detected body parts, before being assembled into people. Note that the number of"
116  " candidates is equal or higher than the number of final body parts (i.e. after being"
@@ -227,7 +227,7 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
118  " slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly"
119  " use this information.");
120 // OpenPose Face
-
121 DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g."
+
121 DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g."
122  " `model_folder`. Note that this will considerable slow down the performance and increse"
123  " the required GPU memory. In addition, the greater number of people on the image, the"
124  " slower OpenPose will be.");
@@ -235,7 +235,7 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
126  " detector. 320x320 usually works fine while giving a substantial speed up when multiple"
127  " faces on the image.");
128 // OpenPose Hand
-
129 DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g."
+
129 DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g."
130  " `model_folder`. Analogously to `--face`, it will also slow down the performance, increase"
131  " the required GPU memory and its speed depends on the number of people.");
132 DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint"
@@ -243,14 +243,14 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
134 DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results"
135  " were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4.");
136 DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range"
-
137  " between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if"
+
137  " between smallest and biggest scale. The scales will be centered in ratio 1. E.g., if"
138  " scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2.");
-
139 DEFINE_bool(hand_tracking, false, "Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate"
+
139 DEFINE_bool(hand_tracking, false, "Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate"
140  " is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it"
141  " simply looks for hands in positions at which hands were located in previous frames, but"
142  " it does not guarantee the same person ID among frames.");
143 // OpenPose 3-D Reconstruction
-
144 DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system."
+
144 DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system."
145  " 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction"
146  " results. Note that it will only display 1 person. If multiple people is present, it will"
147  " fail.");
@@ -261,7 +261,7 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
152  " `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the"
153  " parameter folder as this number indicates.");
154 // Extra algorithms
-
155 DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames.");
+
155 DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames.");
156 DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The"
157  " value indicates the number of frames where tracking is run between each OpenPose keypoint"
158  " detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint"
@@ -273,7 +273,7 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
164 DEFINE_int32(part_to_show, 0, "Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body"
165  " part heat map, 19 for the background heat map, 20 for all the body part heat maps"
166  " together, 21 for all the PAFs, 22-40 for each body part pair PAF.");
-
167 DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black"
+
167 DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black"
168  " background, instead of being rendered into the original image. Related: `part_to_show`,"
169  " `alpha_pose`, and `alpha_pose`.");
170 // OpenPose Rendering Pose
@@ -282,7 +282,7 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
173  " while small thresholds (~0.1) will also output guessed and occluded keypoints, but also"
174  " more false positives (i.e. wrong detections).");
175 DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering"
-
176  " (slower but greater functionality, e.g. `alpha_X` flags). If -1, it will pick CPU if"
+
176  " (slower but greater functionality, e.g., `alpha_X` flags). If -1, it will pick CPU if"
177  " CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render"
178  " both `outputData` and `cvOutputData` with the original image and desired body part to be"
179  " shown (i.e. keypoints, heat maps or PAFs).");
@@ -304,8 +304,8 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
195 DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand.");
196 #ifndef OPENPOSE_FLAGS_DISABLE_DISPLAY
197 // Display
-
198 DEFINE_bool(fullscreen, false, "Run in full-screen mode (press f during runtime to toggle).");
-
199 DEFINE_bool(no_gui_verbose, false, "Do not write text on output images on GUI (e.g. number of current frame and people). It"
+
198 DEFINE_bool(fullscreen, false, "Run in full-screen mode (press f during runtime to toggle).");
+
199 DEFINE_bool(no_gui_verbose, false, "Do not write text on output images on GUI (e.g., number of current frame and people). It"
200  " does not affect the pose rendering.");
201 DEFINE_int32(display, -1, "Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server"
202  " and/or to slightly speed up the processing if visual output is not required); 2 for 2-D"
@@ -313,7 +313,7 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
204 #endif // OPENPOSE_FLAGS_DISABLE_DISPLAY
205 // Result Saving
206 DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format.");
-
207 DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV"
+
207 DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g., png, jpg or bmp. Check the OpenCV"
208  " function cv::imwrite for all compatible extensions.");
209 DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the"
210  " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag"
@@ -322,27 +322,29 @@ $(document).ready(function(){initNavTree('flags_8hpp_source.html','');});
213  " keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled).");
214 DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format.");
215 DEFINE_string(write_coco_foot_json, "", "Full file path to write people foot pose data with JSON COCO validation format.");
-
216 DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag"
-
217  " must be enabled.");
-
218 DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`."
-
219  " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for"
-
220  " floating values.");
-
221 DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format"
-
222  " with `write_keypoint_format`.");
-
223 DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml,"
-
224  " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead.");
-
225 // Result Saving - Extra Algorithms
-
226 DEFINE_string(write_video_adam, "", "Experimental, not available yet. E.g.: `~/Desktop/adamResult.avi`. Flag `camera_fps`"
-
227  " controls FPS.");
-
228 DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g.: `~/Desktop/mocapResult.bvh`.");
-
229 // UDP communication
-
230 DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`.");
-
231 DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication.");
-
232 #endif // OPENPOSE_FLAGS_DISABLE_POSE
-
233 
-
234 #endif // OPENPOSE_FLAGS_HPP
-
DEFINE_bool(disable_multi_thread, false,"It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful"" for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with"" low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the"" error.")
+
216 DEFINE_int32(write_coco_json_variant, 0, "Currently, this option is experimental and only makes effect on car JSON generation. It"
+
217  " selects the COCO variant for cocoJsonSaver.");
+
218 DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag"
+
219  " must be enabled.");
+
220 DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`."
+
221  " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for"
+
222  " floating values.");
+
223 DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format"
+
224  " with `write_keypoint_format`.");
+
225 DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml,"
+
226  " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead.");
+
227 // Result Saving - Extra Algorithms
+
228 DEFINE_string(write_video_adam, "", "Experimental, not available yet. E.g., `~/Desktop/adamResult.avi`. Flag `camera_fps`"
+
229  " controls FPS.");
+
230 DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g., `~/Desktop/mocapResult.bvh`.");
+
231 // UDP communication
+
232 DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`.");
+
233 DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication.");
+
234 #endif // OPENPOSE_FLAGS_DISABLE_POSE
+
235 
+
236 #endif // OPENPOSE_FLAGS_HPP
DEFINE_double(camera_fps, 30.0,"Frame rate for the webcam (also used when saving video). Set this value to the minimum"" value between the OpenPose displayed speed and the webcam real frame rate.")
+
DEFINE_bool(disable_multi_thread, false,"It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful"" for 1) Cases where it is needed a low latency (e.g., webcam in real-time scenarios with"" low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the"" error.")
DEFINE_string(camera_resolution,"-1x-1","Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the"" default 1280x720 for `--camera`, or the maximum flir camera resolution available for"" `--flir_camera`")
DEFINE_int32(logging_level, 3,"The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"" low priority messages and 4 for important ones.")
DEFINE_uint64(frame_first, 0,"Start on desired frame number. Indexes are 0-based, i.e. the first frame has index 0.")
diff --git a/html/functions_c.html b/html/functions_c.html index 21a1c46a..ff829004 100644 --- a/html/functions_c.html +++ b/html/functions_c.html @@ -194,7 +194,7 @@ $(document).ready(function(){initNavTree('functions_c.html','');}); , op::Datum
  • CocoJsonSaver() -: op::CocoJsonSaver +: op::CocoJsonSaver
  • comma() : op::JsonOfstream diff --git a/html/functions_func_c.html b/html/functions_func_c.html index af1a725c..f898c911 100644 --- a/html/functions_func_c.html +++ b/html/functions_func_c.html @@ -173,7 +173,7 @@ $(document).ready(function(){initNavTree('functions_func_c.html','');}); , op::Datum
  • CocoJsonSaver() -: op::CocoJsonSaver +: op::CocoJsonSaver
  • comma() : op::JsonOfstream diff --git a/html/functions_func_w.html b/html/functions_func_w.html index 2468c06e..a7881e1f 100644 --- a/html/functions_func_w.html +++ b/html/functions_func_w.html @@ -348,7 +348,7 @@ $(document).ready(function(){initNavTree('functions_func_w.html','');}); : op::WrapperStructInput
  • WrapperStructOutput() -: op::WrapperStructOutput +: op::WrapperStructOutput
  • WrapperStructPose() : op::WrapperStructPose diff --git a/html/functions_vars.html b/html/functions_vars.html index 1daaae17..369a009a 100644 --- a/html/functions_vars.html +++ b/html/functions_vars.html @@ -664,6 +664,9 @@ $(document).ready(function(){initNavTree('functions_vars.html','');});
  • writeCocoJson : op::WrapperStructOutput
  • +
  • writeCocoJsonVariant +: op::WrapperStructOutput +
  • writeHeatMaps : op::WrapperStructOutput
  • diff --git a/html/functions_w.html b/html/functions_w.html index 03e6a447..b5d27846 100644 --- a/html/functions_w.html +++ b/html/functions_w.html @@ -340,8 +340,8 @@ $(document).ready(function(){initNavTree('functions_w.html','');}); : op::WQueueOrderer< TDatums >
  • Wrapper() -: cl::detail::Wrapper< T > -, cl::detail::Wrapper< cl_device_id > +: cl::detail::Wrapper< T > +, cl::detail::Wrapper< cl_device_id >
  • WrapperStructExtra() : op::WrapperStructExtra @@ -356,7 +356,7 @@ $(document).ready(function(){initNavTree('functions_w.html','');}); : op::WrapperStructInput
  • WrapperStructOutput() -: op::WrapperStructOutput +: op::WrapperStructOutput
  • WrapperStructPose() : op::WrapperStructPose @@ -365,7 +365,7 @@ $(document).ready(function(){initNavTree('functions_w.html','');}); : op::WrapperT< TDatums, TDatumsSP, TWorker >
  • write() -: op::VideoSaver +: op::VideoSaver
  • writeBvh : op::WrapperStructOutput @@ -376,6 +376,9 @@ $(document).ready(function(){initNavTree('functions_w.html','');});
  • writeCocoJson : op::WrapperStructOutput
  • +
  • writeCocoJsonVariant +: op::WrapperStructOutput +
  • writeHeatMaps : op::WrapperStructOutput
  • diff --git a/html/globals.html b/html/globals.html index 15d3239b..e3182fa1 100644 --- a/html/globals.html +++ b/html/globals.html @@ -223,7 +223,7 @@ $(document).ready(function(){initNavTree('globals.html','');}); : datum.hpp
  • DEFINE_bool() -: flags.hpp +: flags.hpp
  • DEFINE_double() : flags.hpp @@ -232,13 +232,13 @@ $(document).ready(function(){initNavTree('globals.html','');}); : flags.hpp
  • DEFINE_string() -: flags.hpp +: flags.hpp
  • DEFINE_TEMPLATE_DATUM : datum.hpp
  • DEFINE_uint64() -: flags.hpp +: flags.hpp
  • DELETE_COPY : macros.hpp diff --git a/html/globals_func.html b/html/globals_func.html index 6fb1e6d1..004e822b 100644 --- a/html/globals_func.html +++ b/html/globals_func.html @@ -120,7 +120,7 @@ $(document).ready(function(){initNavTree('globals_func.html','');});

    - d -

    • DEFINE_bool() -: flags.hpp +: flags.hpp
    • DEFINE_double() : flags.hpp @@ -129,7 +129,7 @@ $(document).ready(function(){initNavTree('globals_func.html','');}); : flags.hpp
    • DEFINE_string() -: flags.hpp +: flags.hpp
    • DEFINE_uint64() : flags.hpp diff --git a/html/index.html b/html/index.html index 4458987e..936a368d 100644 --- a/html/index.html +++ b/html/index.html @@ -184,7 +184,7 @@ $(document).ready(function(){initNavTree('index.html','');});

      Quick Start

      Most users do not need the OpenPose C++/Python API, but can simply use the OpenPose Demo:

        -
      • OpenPose Demo: To easily process images/video/webcam and display/save the results. See doc/demo_overview.md. E.g. run OpenPose in a video with: ```

        Ubuntu

        +
      • OpenPose Demo: To easily process images/video/webcam and display/save the results. See doc/demo_overview.md. E.g., run OpenPose in a video with: ```

        Ubuntu

      ./build/examples/openpose/openpose.bin –video examples/media/video.avi :: Windows - Portable Demo bin\OpenPoseDemo.exe –video examples\media\video.avi ```

      @@ -195,7 +195,7 @@ $(document).ready(function(){initNavTree('index.html','');});
    • Adding an extra module: Check ./doc/library_add_new_module.md "doc/library_add_new_module.md".
    • Standalone face or hand detector:
      • Face keypoint detection without body keypoint detection: If you want to speed it up (but also reduce amount of detected faces), check the OpenCV-face-detector approach in doc/standalone_face_or_hand_keypoint_detector.md.
      • -
      • Use your own face/hand detector: You can use the hand and/or face keypoint detectors with your own face or hand detectors, rather than using the body detector. E.g. useful for camera views at which the hands are visible but not the body (OpenPose detector would fail). See doc/standalone_face_or_hand_keypoint_detector.md.
      • +
      • Use your own face/hand detector: You can use the hand and/or face keypoint detectors with your own face or hand detectors, rather than using the body detector. E.g., useful for camera views at which the hands are visible but not the body (OpenPose detector would fail). See doc/standalone_face_or_hand_keypoint_detector.md.
    diff --git a/html/navtree.js b/html/navtree.js index 11ba6219..a95eca14 100644 --- a/html/navtree.js +++ b/html/navtree.js @@ -46,10 +46,10 @@ var NAVTREEINDEX = "classop_1_1_queue_base.html#a32ac0e4b14a310aee62ce817e86c0356", "classop_1_1_w_heat_map_saver.html#a20e82b121a580c578f69cbb0401c4cb0", "fast_math_8hpp.html", -"gui_2enum_classes_8hpp.html#a6c22a72ce93c64e7582cb670492a50bfab13311ab51c4c34757f67f26580018dd", -"pose_parameters_8hpp.html#a84d87ec0e4ed3cf75a37ce99d0d25ef7", -"structop_1_1_point.html#a9f80114d18ec8055360222d975bcd5a8", -"w_keep_top_n_people_8hpp.html" +"gui_2enum_classes_8hpp.html#a6c22a72ce93c64e7582cb670492a50bf", +"pose_parameters_8hpp.html#a692472b562fba051964f9ddea07b97d7", +"structop_1_1_point.html#a9759f95e852025014cb071aefb242cef", +"w_joint_angle_estimation_8hpp.html" ]; var SYNCONMSG = 'click to disable panel synchronisation'; diff --git a/html/navtreeindex10.js b/html/navtreeindex10.js index f463c149..061321cb 100644 --- a/html/navtreeindex10.js +++ b/html/navtreeindex10.js @@ -1,5 +1,7 @@ var NAVTREEINDEX10 = { +"w_joint_angle_estimation_8hpp.html":[2,0,0,0,0,4], +"w_joint_angle_estimation_8hpp_source.html":[2,0,0,0,0,4], "w_keep_top_n_people_8hpp.html":[2,0,0,0,2,18], "w_keep_top_n_people_8hpp.html#aaee32c4c68404e5086844bcb911b7a20":[2,0,0,0,2,18,1], "w_keep_top_n_people_8hpp_source.html":[2,0,0,0,2,18], diff --git a/html/navtreeindex2.js b/html/navtreeindex2.js index 8e709ddc..8ef30316 100644 --- a/html/navtreeindex2.js +++ b/html/navtreeindex2.js @@ -119,7 +119,7 @@ var NAVTREEINDEX2 = "classop_1_1_camera_parameter_reader.html#ade743d816f255e3278bb72c761d82ba6":[1,0,3,2,4], "classop_1_1_camera_parameter_reader.html#adf7ad0ef26603129a251fdc166ffa548":[1,0,3,2,5], "classop_1_1_coco_json_saver.html":[1,0,3,34], -"classop_1_1_coco_json_saver.html#a0ce96fcd7dfaa2791f514a8363d018ff":[1,0,3,34,0], +"classop_1_1_coco_json_saver.html#a380d4638d09cfe9c26551cc2efb7c3a8":[1,0,3,34,0], "classop_1_1_coco_json_saver.html#a3a7fd417aa5d85044fb0703379af1a23":[1,0,3,34,2], "classop_1_1_coco_json_saver.html#a8bbfab84a7816cb0f189f243246f744b":[1,0,3,34,1], "classop_1_1_cv_mat_to_op_input.html":[1,0,3,6], diff --git a/html/navtreeindex6.js b/html/navtreeindex6.js index e68486c4..5eabbf8e 100644 --- a/html/navtreeindex6.js +++ b/html/navtreeindex6.js @@ -62,86 +62,87 @@ var NAVTREEINDEX6 = "filestream_2headers_8hpp.html":[2,0,0,0,4,5], "filestream_2headers_8hpp_source.html":[2,0,0,0,4,5], "flags_8hpp.html":[2,0,0,0,15], +"flags_8hpp.html#a035ba7d578f36103d096569f1984ab61":[2,0,0,0,15,0], +"flags_8hpp.html#a055e1e0a1618e96f156f32278b41d3e4":[2,0,0,0,15,60], "flags_8hpp.html#a072bd893f4003b48bc7c99735eeeed39":[2,0,0,0,15,46], "flags_8hpp.html#a0a28dd6ee5503f070d08cc9e4a3aff05":[2,0,0,0,15,13], -"flags_8hpp.html#a0e9cb0627d386e4d69e5eae0593d70ad":[2,0,0,0,15,0], +"flags_8hpp.html#a0ee4f86d572ef52ec26dcbb37efb5e65":[2,0,0,0,15,18], "flags_8hpp.html#a10efaeb1ea3a8478388cc7d0bfd4e59e":[2,0,0,0,15,33], -"flags_8hpp.html#a1433eaf1c5eb42e406e76bc6f8e517c3":[2,0,0,0,15,77], -"flags_8hpp.html#a14f23a4c6dd6c9290e3ee44bd79f96bf":[2,0,0,0,15,18], +"flags_8hpp.html#a1433eaf1c5eb42e406e76bc6f8e517c3":[2,0,0,0,15,78], +"flags_8hpp.html#a158227abc2af90abaec523a784b40fa8":[2,0,0,0,15,80], "flags_8hpp.html#a18fc454ffeef53c7c73d69c67d1a73fc":[2,0,0,0,15,38], "flags_8hpp.html#a1a9ffc302eac69c8ba024da3edf01a5f":[2,0,0,0,15,16], "flags_8hpp.html#a1edea5f45026b353b6e6cc3b196767a0":[2,0,0,0,15,40], "flags_8hpp.html#a20c481950df0272f0b7b0cde67d8e72a":[2,0,0,0,15,44], -"flags_8hpp.html#a22572531e5a4896c510639ac57cf522c":[2,0,0,0,15,78], +"flags_8hpp.html#a22572531e5a4896c510639ac57cf522c":[2,0,0,0,15,79], "flags_8hpp.html#a231bec7b2113f31cebb760c849ca2841":[2,0,0,0,15,17], -"flags_8hpp.html#a242473077549869f06534e8a9ea1ddd6":[2,0,0,0,15,70], -"flags_8hpp.html#a26d1c7340fc87d4593dda754d54145a2":[2,0,0,0,15,67], +"flags_8hpp.html#a242473077549869f06534e8a9ea1ddd6":[2,0,0,0,15,71], +"flags_8hpp.html#a26d1c7340fc87d4593dda754d54145a2":[2,0,0,0,15,68], "flags_8hpp.html#a284252d3d255ad5e5c35815d720fd67a":[2,0,0,0,15,36], "flags_8hpp.html#a29f9621cd9361deec9512f8d8b53e6ba":[2,0,0,0,15,19], -"flags_8hpp.html#a3051d21eb51cc39eed5f781d8eaed960":[2,0,0,0,15,65], +"flags_8hpp.html#a3051d21eb51cc39eed5f781d8eaed960":[2,0,0,0,15,66], "flags_8hpp.html#a311e487137e17445e6939abb7cc0cf8f":[2,0,0,0,15,42], +"flags_8hpp.html#a39756aa6d6911435a326e18541db970a":[2,0,0,0,15,52], "flags_8hpp.html#a4c800b6a91b02874d006da33ab8432cd":[2,0,0,0,15,14], "flags_8hpp.html#a4cead735de5b43cfcae5c1139df3be1a":[2,0,0,0,15,47], "flags_8hpp.html#a4da8f1bc0afb58f2a757ab6d90c6042e":[2,0,0,0,15,12], "flags_8hpp.html#a53eb4c15af968876f31ca0be54d251b3":[2,0,0,0,15,25], -"flags_8hpp.html#a5aaba99bdb163516d0297d2e09dd0c7d":[2,0,0,0,15,72], -"flags_8hpp.html#a5c66f20b5a9a1bc44d3c685fd37ff7f9":[2,0,0,0,15,4], -"flags_8hpp.html#a5db17b8bc6df4fe40b556af7157dcbf7":[2,0,0,0,15,55], +"flags_8hpp.html#a5aaba99bdb163516d0297d2e09dd0c7d":[2,0,0,0,15,73], +"flags_8hpp.html#a5db17b8bc6df4fe40b556af7157dcbf7":[2,0,0,0,15,56], +"flags_8hpp.html#a5e350d38962a702d8e0c658fbbb5cc64":[2,0,0,0,15,4], "flags_8hpp.html#a5e8200fc5fd8bc1e80b94b029ea8e18d":[2,0,0,0,15,6], "flags_8hpp.html#a62bfc56794bb8ceaedd0eb7bc32a0f22":[2,0,0,0,15,24], -"flags_8hpp.html#a63936bd2bc53a453ba74066289ab9d29":[2,0,0,0,15,75], -"flags_8hpp.html#a64c46584d79ef0b947ecd0a98b282839":[2,0,0,0,15,2], +"flags_8hpp.html#a63936bd2bc53a453ba74066289ab9d29":[2,0,0,0,15,76], +"flags_8hpp.html#a65152cde78f8e7da1d33f557ec392312":[2,0,0,0,15,48], "flags_8hpp.html#a669c6f4820af0275f40808b18147b98c":[2,0,0,0,15,7], +"flags_8hpp.html#a66d6d5c07371179c3702dbd1da9d9bd3":[2,0,0,0,15,61], +"flags_8hpp.html#a6957bf419a0ca3478948a62a3ce5ecf0":[2,0,0,0,15,65], "flags_8hpp.html#a6a501f229de1ba19801b77fd0565527b":[2,0,0,0,15,3], "flags_8hpp.html#a6b12d681ace972ae8eede484505c50af":[2,0,0,0,15,28], -"flags_8hpp.html#a6c5341914694863528d3d93b23b45f01":[2,0,0,0,15,73], "flags_8hpp.html#a6c9c8a5843dd8b93e009bf29dc31cde2":[2,0,0,0,15,35], "flags_8hpp.html#a71043931875d1f5ec677fd69ae96c632":[2,0,0,0,15,27], +"flags_8hpp.html#a72c09e09c94b67f88b53e1a94e7bfe0f":[2,0,0,0,15,75], "flags_8hpp.html#a73ee51843ead02ee6358fe39dcbeffde":[2,0,0,0,15,31], -"flags_8hpp.html#a7773e867133822f5601899975dc06adb":[2,0,0,0,15,54], -"flags_8hpp.html#a7a3597e9216885470199ca1578eb7f69":[2,0,0,0,15,63], +"flags_8hpp.html#a7773e867133822f5601899975dc06adb":[2,0,0,0,15,55], +"flags_8hpp.html#a7a3597e9216885470199ca1578eb7f69":[2,0,0,0,15,64], "flags_8hpp.html#a7fc3e871114ac35447499ffda035d51e":[2,0,0,0,15,1], -"flags_8hpp.html#a830bcfa6645bf39a18f59d3b72f75edf":[2,0,0,0,15,60], "flags_8hpp.html#a8511765700f652000f2c1c2b1a5df9f9":[2,0,0,0,15,37], -"flags_8hpp.html#a85be61a31eaa438a7e9c7d2baf51da47":[2,0,0,0,15,58], +"flags_8hpp.html#a85be61a31eaa438a7e9c7d2baf51da47":[2,0,0,0,15,59], "flags_8hpp.html#a890b2b8df8a57fe4e9baa465c6584ccf":[2,0,0,0,15,29], "flags_8hpp.html#a8e9de971b409cfe7fdded7f0d47c502d":[2,0,0,0,15,32], -"flags_8hpp.html#a94efdbeb7b9adb554d34399279312b8f":[2,0,0,0,15,21], "flags_8hpp.html#a9a34e10f75069cf3283e535a77006775":[2,0,0,0,15,51], -"flags_8hpp.html#a9aa48b2ab293842bc42b96df9e97c9b8":[2,0,0,0,15,66], -"flags_8hpp.html#a9ab689ebe20a261b20587af79123e79a":[2,0,0,0,15,57], -"flags_8hpp.html#a9e7b1394aea185360a1fe19cf4e20a89":[2,0,0,0,15,52], -"flags_8hpp.html#aa3fe7c4c07492e6553a6c2d25ebd76b4":[2,0,0,0,15,69], +"flags_8hpp.html#a9aa48b2ab293842bc42b96df9e97c9b8":[2,0,0,0,15,67], +"flags_8hpp.html#a9ab689ebe20a261b20587af79123e79a":[2,0,0,0,15,58], +"flags_8hpp.html#a9d45877ef21b9042f1b307d23d404e40":[2,0,0,0,15,2], +"flags_8hpp.html#a9e7b1394aea185360a1fe19cf4e20a89":[2,0,0,0,15,53], +"flags_8hpp.html#aa3fe7c4c07492e6553a6c2d25ebd76b4":[2,0,0,0,15,70], "flags_8hpp.html#aa5a1826a500d7131fefb480ccd1713fb":[2,0,0,0,15,39], "flags_8hpp.html#aa797dd033c6f3c4d2654e7000939d270":[2,0,0,0,15,43], -"flags_8hpp.html#aa7b1c758c6d6e35c3227994a9ced0236":[2,0,0,0,15,76], +"flags_8hpp.html#aa7b1c758c6d6e35c3227994a9ced0236":[2,0,0,0,15,77], "flags_8hpp.html#aa80e5f9914dd35c852941282aa229b21":[2,0,0,0,15,41], "flags_8hpp.html#aa941a6193b13f3d138437cf2a84e73ff":[2,0,0,0,15,10], -"flags_8hpp.html#aab02d4078b5f7999a582d9c5f4248676":[2,0,0,0,15,53], -"flags_8hpp.html#ab077893503ebb5fba8cb300bd5f93d62":[2,0,0,0,15,64], -"flags_8hpp.html#ab2af299b6380dcd6dc06a95cceb056d4":[2,0,0,0,15,48], +"flags_8hpp.html#aab02d4078b5f7999a582d9c5f4248676":[2,0,0,0,15,54], "flags_8hpp.html#ab41c02abe3634f0db65123ecda964a31":[2,0,0,0,15,34], +"flags_8hpp.html#abf5c8a05ae8f86c8e4d8eedbceb84516":[2,0,0,0,15,21], "flags_8hpp.html#ac17a7af1030d7f5bd69672fec961c083":[2,0,0,0,15,30], "flags_8hpp.html#ac210b56a3c46ff06143a62b0bb725cc6":[2,0,0,0,15,15], -"flags_8hpp.html#ac6c2099e630e05f867ee10b43f35dc65":[2,0,0,0,15,68], +"flags_8hpp.html#ac6c2099e630e05f867ee10b43f35dc65":[2,0,0,0,15,69], "flags_8hpp.html#ac881df85c5be736d05822ad98ac0496f":[2,0,0,0,15,9], -"flags_8hpp.html#ac8fef8bb0234286e74b75214a750d674":[2,0,0,0,15,79], -"flags_8hpp.html#acfd124be44003a59f2591c7584fd3c75":[2,0,0,0,15,59], "flags_8hpp.html#ad0269da28dc2033e23b8ea84b7e793a2":[2,0,0,0,15,49], -"flags_8hpp.html#ad0c09dd7ede747d69d36dc86c7ffa11c":[2,0,0,0,15,74], "flags_8hpp.html#ad739178d4295e31edd2f1e468bd2e600":[2,0,0,0,15,5], -"flags_8hpp.html#ad8c5173beb83c0f9996362b3a3fba820":[2,0,0,0,15,71], +"flags_8hpp.html#ad7a48143249d608e2ad3a8413eb19c34":[2,0,0,0,15,74], +"flags_8hpp.html#ad8c5173beb83c0f9996362b3a3fba820":[2,0,0,0,15,72], "flags_8hpp.html#ad971ae1068651177e7a78ae68c022792":[2,0,0,0,15,23], "flags_8hpp.html#ada5b7fef5063818fd668359e9e0b0504":[2,0,0,0,15,50], "flags_8hpp.html#adf35e9261afec1673a42de6197f07f42":[2,0,0,0,15,20], "flags_8hpp.html#aea11a0489c2af278990d3ddff10960fd":[2,0,0,0,15,11], "flags_8hpp.html#af57ca90aafa15b707af299527b0aff6f":[2,0,0,0,15,8], -"flags_8hpp.html#af5ee5f61f0d36a03bb8647408f5e236b":[2,0,0,0,15,61], +"flags_8hpp.html#af5ee5f61f0d36a03bb8647408f5e236b":[2,0,0,0,15,62], "flags_8hpp.html#af654373ad667b1683f30e350331ea709":[2,0,0,0,15,26], -"flags_8hpp.html#afaf97bbf6a49576782d25147bc865bed":[2,0,0,0,15,62], +"flags_8hpp.html#afaf97bbf6a49576782d25147bc865bed":[2,0,0,0,15,63], "flags_8hpp.html#afd21fc201e4b269bf0b07b8fce886173":[2,0,0,0,15,22], "flags_8hpp.html#afdfef14901f7b5e324d6983845f5ab50":[2,0,0,0,15,45], -"flags_8hpp.html#aff61246512375ff5941dc4110e127ca3":[2,0,0,0,15,56], +"flags_8hpp.html#aff61246512375ff5941dc4110e127ca3":[2,0,0,0,15,57], "flags_8hpp_source.html":[2,0,0,0,15], "flags_to_open_pose_8hpp.html":[2,0,0,0,13,5], "flags_to_open_pose_8hpp.html#a1fbdd7c33f90d0237582c9441162a7d8":[2,0,0,0,13,5,6], @@ -248,6 +249,5 @@ var NAVTREEINDEX6 = "grid_pattern_functions_8hpp_source.html":[2,0,0,0,1,1], "gui3_d_8hpp.html":[2,0,0,0,6,3], "gui3_d_8hpp_source.html":[2,0,0,0,6,3], -"gui_2enum_classes_8hpp.html":[2,0,0,0,6,0], -"gui_2enum_classes_8hpp.html#a6c22a72ce93c64e7582cb670492a50bf":[2,0,0,0,6,0,1] +"gui_2enum_classes_8hpp.html":[2,0,0,0,6,0] }; diff --git a/html/navtreeindex7.js b/html/navtreeindex7.js index 4fd32b2d..62d15238 100644 --- a/html/navtreeindex7.js +++ b/html/navtreeindex7.js @@ -1,5 +1,6 @@ var NAVTREEINDEX7 = { +"gui_2enum_classes_8hpp.html#a6c22a72ce93c64e7582cb670492a50bf":[2,0,0,0,6,0,1], "gui_2enum_classes_8hpp.html#a6c22a72ce93c64e7582cb670492a50bfab13311ab51c4c34757f67f26580018dd":[2,0,0,0,6,0,1,1], "gui_2enum_classes_8hpp.html#a6c22a72ce93c64e7582cb670492a50bfae7ec409749889353b8f83a6b04159420":[2,0,0,0,6,0,1,0], "gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6":[2,0,0,0,6,0,0], @@ -107,12 +108,12 @@ var NAVTREEINDEX7 = "namespaceboost.html":[1,0,0], "namespacecaffe.html":[1,0,1], "namespacecaffe.html":[0,0,1], -"namespacecl.html":[0,0,2], "namespacecl.html":[1,0,2], -"namespacecl_1_1compatibility.html":[1,0,2,0], +"namespacecl.html":[0,0,2], "namespacecl_1_1compatibility.html":[0,0,2,0], -"namespacecl_1_1detail.html":[1,0,2,1], +"namespacecl_1_1compatibility.html":[1,0,2,0], "namespacecl_1_1detail.html":[0,0,2,1], +"namespacecl_1_1detail.html":[1,0,2,1], "namespacemembers.html":[0,1,0,0], "namespacemembers.html":[0,1,0], "namespacemembers_b.html":[0,1,0,1], @@ -122,8 +123,8 @@ var NAVTREEINDEX7 = "namespacemembers_enum.html":[0,1,4], "namespacemembers_eval.html":[0,1,5], "namespacemembers_f.html":[0,1,0,5], -"namespacemembers_func.html":[0,1,1], "namespacemembers_func.html":[0,1,1,0], +"namespacemembers_func.html":[0,1,1], "namespacemembers_func_c.html":[0,1,1,1], "namespacemembers_func_d.html":[0,1,1,2], "namespacemembers_func_e.html":[0,1,1,3], @@ -164,8 +165,8 @@ var NAVTREEINDEX7 = "namespacemembers_z.html":[0,1,0,24], "namespaceop.html":[1,0,3], "namespaceop.html":[0,0,3], -"namespaceop_1_1_configure_error.html":[1,0,3,0], "namespaceop_1_1_configure_error.html":[0,0,3,0], +"namespaceop_1_1_configure_error.html":[1,0,3,0], "namespaceop_1_1_configure_log.html":[1,0,3,1], "namespaceop_1_1_configure_log.html":[0,0,3,1], "namespaces.html":[0,0], @@ -248,6 +249,5 @@ var NAVTREEINDEX7 = "pose_parameters_8hpp.html#a3df938ef93037c534c5d342720d5fb70":[2,0,0,0,9,7,13], "pose_parameters_8hpp.html#a522d4552d2aeabe367f4d3bf371e6b3e":[2,0,0,0,9,7,16], "pose_parameters_8hpp.html#a54a6c42a42a0a7e539061f5e30abb4bc":[2,0,0,0,9,7,9], -"pose_parameters_8hpp.html#a59616e83eefc182f8eb6ae51d6425938":[2,0,0,0,9,7,15], -"pose_parameters_8hpp.html#a692472b562fba051964f9ddea07b97d7":[2,0,0,0,9,7,2] +"pose_parameters_8hpp.html#a59616e83eefc182f8eb6ae51d6425938":[2,0,0,0,9,7,15] }; diff --git a/html/navtreeindex8.js b/html/navtreeindex8.js index 16c6a7ce..81a26297 100644 --- a/html/navtreeindex8.js +++ b/html/navtreeindex8.js @@ -1,5 +1,6 @@ var NAVTREEINDEX8 = { +"pose_parameters_8hpp.html#a692472b562fba051964f9ddea07b97d7":[2,0,0,0,9,7,2], "pose_parameters_8hpp.html#a84d87ec0e4ed3cf75a37ce99d0d25ef7":[2,0,0,0,9,7,6], "pose_parameters_8hpp.html#aa170c3107396e4d0e4214bb0c1b78f08":[2,0,0,0,9,7,5], "pose_parameters_8hpp.html#aab3de911b04b96c1850cc05c6947e184":[2,0,0,0,9,7,0], @@ -248,6 +249,5 @@ var NAVTREEINDEX8 = "structop_1_1_point.html#a767e96d42e4653ac09ada5c5cdceb776":[1,0,3,13,21], "structop_1_1_point.html#a7688d4f0cadcf1970380519df4cfffd7":[1,0,3,13,7], "structop_1_1_point.html#a812d4ef29d102f4ad18f32ae54eb17ec":[1,0,3,13,25], -"structop_1_1_point.html#a81a09bde4e8e27ec6e8b4808f0f60f57":[1,0,3,13,17], -"structop_1_1_point.html#a9759f95e852025014cb071aefb242cef":[1,0,3,13,5] +"structop_1_1_point.html#a81a09bde4e8e27ec6e8b4808f0f60f57":[1,0,3,13,17] }; diff --git a/html/navtreeindex9.js b/html/navtreeindex9.js index 3157968a..cdb075a6 100644 --- a/html/navtreeindex9.js +++ b/html/navtreeindex9.js @@ -1,5 +1,6 @@ var NAVTREEINDEX9 = { +"structop_1_1_point.html#a9759f95e852025014cb071aefb242cef":[1,0,3,13,5], "structop_1_1_point.html#a9f80114d18ec8055360222d975bcd5a8":[1,0,3,13,0], "structop_1_1_point.html#aa389e5c56126074997b4a6fb1a471b45":[1,0,3,13,12], "structop_1_1_point.html#aa50152ec0736b622c159eb0ec8f7ad2b":[1,0,3,13,8], @@ -79,24 +80,25 @@ var NAVTREEINDEX9 = "structop_1_1_wrapper_struct_input.html#ae2078c540324a9cdc8500dce5d361bee":[1,0,3,125,2], "structop_1_1_wrapper_struct_input.html#ae7183e10862dbdbed422f042f1a71ed1":[1,0,3,125,13], "structop_1_1_wrapper_struct_output.html":[1,0,3,126], -"structop_1_1_wrapper_struct_output.html#a34d5796e02b8afaaaef99fc26e7f3056":[1,0,3,126,10], -"structop_1_1_wrapper_struct_output.html#a46207e61b143191a6cb79d397af43989":[1,0,3,126,0], +"structop_1_1_wrapper_struct_output.html#a0119bb7429483928c587ffaf607919de":[1,0,3,126,9], +"structop_1_1_wrapper_struct_output.html#a34d5796e02b8afaaaef99fc26e7f3056":[1,0,3,126,11], "structop_1_1_wrapper_struct_output.html#a4e18a93cfa9c6b47151427152a745817":[1,0,3,126,1], -"structop_1_1_wrapper_struct_output.html#a5f56c15d410645442b5ac21c316b9c85":[1,0,3,126,11], +"structop_1_1_wrapper_struct_output.html#a5f56c15d410645442b5ac21c316b9c85":[1,0,3,126,12], "structop_1_1_wrapper_struct_output.html#a6efbd77da8ebcea3abc4109bb2f39c0b":[1,0,3,126,8], -"structop_1_1_wrapper_struct_output.html#a73d6a5687ab8abac22f5cd29690b6277":[1,0,3,126,16], -"structop_1_1_wrapper_struct_output.html#a8a2f29bae4ff55195b10a83717a0f738":[1,0,3,126,13], +"structop_1_1_wrapper_struct_output.html#a73d6a5687ab8abac22f5cd29690b6277":[1,0,3,126,17], +"structop_1_1_wrapper_struct_output.html#a8a2f29bae4ff55195b10a83717a0f738":[1,0,3,126,14], "structop_1_1_wrapper_struct_output.html#a92040f009e2f9344037d93d6442ce117":[1,0,3,126,4], -"structop_1_1_wrapper_struct_output.html#a94538578a465c78788760cadb8ea86f6":[1,0,3,126,17], +"structop_1_1_wrapper_struct_output.html#a94538578a465c78788760cadb8ea86f6":[1,0,3,126,18], "structop_1_1_wrapper_struct_output.html#aaadbe4f8f765a751973ed1b405843c74":[1,0,3,126,7], "structop_1_1_wrapper_struct_output.html#aaf5df884418d2bf1df36505009264ece":[1,0,3,126,2], -"structop_1_1_wrapper_struct_output.html#aba2aed26f1d01771a0c896bc50e477a5":[1,0,3,126,12], +"structop_1_1_wrapper_struct_output.html#aba2aed26f1d01771a0c896bc50e477a5":[1,0,3,126,13], "structop_1_1_wrapper_struct_output.html#ac72ad601daf957b9b000206e9f1fe0ad":[1,0,3,126,3], -"structop_1_1_wrapper_struct_output.html#ad338fd4719d6f243bb64bc67f68bc7c9":[1,0,3,126,15], -"structop_1_1_wrapper_struct_output.html#ad595edffced2bfd80c3bee183f32f505":[1,0,3,126,18], -"structop_1_1_wrapper_struct_output.html#ad9ca696781cb7c250752f078f5c937fc":[1,0,3,126,14], -"structop_1_1_wrapper_struct_output.html#adca0d08aa43e2280ca06a8db0deb81bc":[1,0,3,126,9], +"structop_1_1_wrapper_struct_output.html#ad338fd4719d6f243bb64bc67f68bc7c9":[1,0,3,126,16], +"structop_1_1_wrapper_struct_output.html#ad595edffced2bfd80c3bee183f32f505":[1,0,3,126,19], +"structop_1_1_wrapper_struct_output.html#ad9ca696781cb7c250752f078f5c937fc":[1,0,3,126,15], +"structop_1_1_wrapper_struct_output.html#adca0d08aa43e2280ca06a8db0deb81bc":[1,0,3,126,10], "structop_1_1_wrapper_struct_output.html#add74785fc0cb1fc7d2c6b5f88b622d53":[1,0,3,126,6], +"structop_1_1_wrapper_struct_output.html#ae6af809e1b560ca018861ee8221bad21":[1,0,3,126,0], "structop_1_1_wrapper_struct_output.html#aec3a7ec85be0779930427ba3e806ea2c":[1,0,3,126,5], "structop_1_1_wrapper_struct_pose.html":[1,0,3,127], "structop_1_1_wrapper_struct_pose.html#a02c4ab6b56e4da4b3ed0da4eae8ac0fc":[1,0,3,127,15], @@ -247,7 +249,5 @@ var NAVTREEINDEX9 = "w_id_generator_8hpp_source.html":[2,0,0,0,11,12], "w_image_saver_8hpp.html":[2,0,0,0,4,18], "w_image_saver_8hpp.html#a505ea16cc6c2c0068bbf4e7269dc8e0a":[2,0,0,0,4,18,1], -"w_image_saver_8hpp_source.html":[2,0,0,0,4,18], -"w_joint_angle_estimation_8hpp.html":[2,0,0,0,0,4], -"w_joint_angle_estimation_8hpp_source.html":[2,0,0,0,0,4] +"w_image_saver_8hpp_source.html":[2,0,0,0,4,18] }; diff --git a/html/search/all_16.js b/html/search/all_16.js index 2f035d46..2a9ca419 100644 --- a/html/search/all_16.js +++ b/html/search/all_16.js @@ -87,8 +87,8 @@ var searchData= ['wkeypointscaler',['WKeypointScaler',['../classop_1_1_w_keypoint_scaler.html#a31624e262988b0840a8ddbf098e56e9b',1,'op::WKeypointScaler']]], ['wkeypointscaler',['WKeypointScaler',['../classop_1_1_w_keypoint_scaler.html',1,'op']]], ['wkeypointscaler_2ehpp',['wKeypointScaler.hpp',['../w_keypoint_scaler_8hpp.html',1,'']]], - ['wopoutputtocvmat',['WOpOutputToCvMat',['../classop_1_1_w_op_output_to_cv_mat.html#a6f632a83de4cdc731c3f52d1541060f3',1,'op::WOpOutputToCvMat']]], ['wopoutputtocvmat',['WOpOutputToCvMat',['../classop_1_1_w_op_output_to_cv_mat.html',1,'op']]], + ['wopoutputtocvmat',['WOpOutputToCvMat',['../classop_1_1_w_op_output_to_cv_mat.html#a6f632a83de4cdc731c3f52d1541060f3',1,'op::WOpOutputToCvMat']]], ['wopoutputtocvmat_2ehpp',['wOpOutputToCvMat.hpp',['../w_op_output_to_cv_mat_8hpp.html',1,'']]], ['work',['work',['../classop_1_1_w_pose_triangulation.html#a495b29e03933d750827acc0531c72c78',1,'op::WPoseTriangulation::work()'],['../classop_1_1_w_cv_mat_to_op_input.html#aa7faa9e2671a85d36aad3366a7958f58',1,'op::WCvMatToOpInput::work()'],['../classop_1_1_w_cv_mat_to_op_output.html#a0bf2e43d2586c83fdd5cb0b1b54aefca',1,'op::WCvMatToOpOutput::work()'],['../classop_1_1_w_keep_top_n_people.html#a5928a091e0990706ab2ea5e5e07629dd',1,'op::WKeepTopNPeople::work()'],['../classop_1_1_w_keypoint_scaler.html#aacad5116921e2ff746fbdf9f6c0cbb25',1,'op::WKeypointScaler::work()'],['../classop_1_1_w_op_output_to_cv_mat.html#ae3fc21569d56a648c606b23fcc016349',1,'op::WOpOutputToCvMat::work()'],['../classop_1_1_w_scale_and_size_extractor.html#afddf54d061dc5325e78252a3bba482b9',1,'op::WScaleAndSizeExtractor::work()'],['../classop_1_1_w_face_detector.html#a721ced99378516c04cb3cff296cc274a',1,'op::WFaceDetector::work()'],['../classop_1_1_w_face_detector_open_c_v.html#a4d3a4a29bcb7b8c141ae1917634ca4c9',1,'op::WFaceDetectorOpenCV::work()'],['../classop_1_1_w_face_extractor_net.html#aa47940fb2ed940a53c7a305ce45817a3',1,'op::WFaceExtractorNet::work()'],['../classop_1_1_w_face_renderer.html#aa52166ea2d5e0f201c94d5c4fe74216e',1,'op::WFaceRenderer::work()'],['../classop_1_1_w_gui_info_adder.html#ae90a68c6ef7b4f45595a020efd232612',1,'op::WGuiInfoAdder::work()'],['../classop_1_1_w_hand_detector.html#aa82ef40fad1d343b5856b41ec4dbcd5c',1,'op::WHandDetector::work()'],['../classop_1_1_w_hand_detector_from_txt.html#a51ebff94734350463fcf507a84eeefdc',1,'op::WHandDetectorFromTxt::work()'],['../classop_1_1_w_hand_detector_tracking.html#a7c849c5a423ffc150c6a4aee9055d34e',1,'op::WHandDetectorTracking::work()'],['../classop_1_1_w_hand_detector_update.html#af9287dc0a3c67abd35974c1c74614f3c',1,'op::WHandDetectorUpdate::work()'],['../classop_1_1_w_hand_extractor_net.html#a21ffee48567b1c7c8994e4effef6cffe',1,'op::WHandExtractorNet::work()'],['../classop_1_1_w_hand_renderer.html#ad178e8d413b3b15edc53625e1f5119d7',1,'op::WHandRenderer::work()'],['../classop_1_1_w_pose_extractor.html#ae0f02aaefccab05bbbd919dd7a9e0f61',1,'op::WPoseExtractor::work()'],['../classop_1_1_w_pose_extractor_net.html#a3d691e30c419c70e23a4d7b3c92adb4b',1,'op::WPoseExtractorNet::work()'],['../classop_1_1_w_pose_renderer.html#a10b1631d78d8270ed2a16e538b30eb76',1,'op::WPoseRenderer::work()'],['../classop_1_1_sub_thread.html#a14330cbc1117f32b6d69c1733ccdeb61',1,'op::SubThread::work()'],['../classop_1_1_sub_thread_no_queue.html#acb7edd02e1724e0fd131235666009f42',1,'op::SubThreadNoQueue::work()'],['../classop_1_1_sub_thread_queue_in.html#a7e9bd6ca09bb77a8de76ae8a02ee8ed4',1,'op::SubThreadQueueIn::work()'],['../classop_1_1_sub_thread_queue_in_out.html#abb65911e9d9b6d5efe782ca0e599be3b',1,'op::SubThreadQueueInOut::work()'],['../classop_1_1_sub_thread_queue_out.html#a0ff5f79e63038ffa5b4aca24cfea7e7c',1,'op::SubThreadQueueOut::work()'],['../classop_1_1_w_id_generator.html#a03bd005cf88749702fb8a29c20d4cb91',1,'op::WIdGenerator::work()'],['../classop_1_1_worker.html#a9acadd6df7af03b31b9e354ae815f781',1,'op::Worker::work()'],['../classop_1_1_worker_consumer.html#a7383747b3bdc6ac79e6f9afbf2c28d27',1,'op::WorkerConsumer::work()'],['../classop_1_1_worker_producer.html#a0259f0b387e2b868388ba0a6769f4691',1,'op::WorkerProducer::work()'],['../classop_1_1_w_queue_assembler.html#a19d61a7a4d44c7a3cf78ecdb461be769',1,'op::WQueueAssembler::work()'],['../classop_1_1_w_queue_orderer.html#a1ea314eeaa8d99fbf33885d9a4c6d044',1,'op::WQueueOrderer::work()'],['../classop_1_1_w_person_id_extractor.html#a4066bf1c8cad753c74de1ceabdd76505',1,'op::WPersonIdExtractor::work()']]], ['workconsumer',['workConsumer',['../classop_1_1_w_coco_json_saver.html#af152a61abc9ab46da651c9d87e6775f0',1,'op::WCocoJsonSaver::workConsumer()'],['../classop_1_1_w_face_saver.html#a026bfad8cd9e0d1289a1db473cef34a0',1,'op::WFaceSaver::workConsumer()'],['../classop_1_1_w_hand_saver.html#afc3976b394070927b9396163137317e5',1,'op::WHandSaver::workConsumer()'],['../classop_1_1_w_heat_map_saver.html#a5fd729a47f0cdbe94001219f971f8f51',1,'op::WHeatMapSaver::workConsumer()'],['../classop_1_1_w_image_saver.html#a198bbfcf625354ddda419e0121d0cb33',1,'op::WImageSaver::workConsumer()'],['../classop_1_1_w_people_json_saver.html#af874a16a06a9a3452a0e3792ac15647e',1,'op::WPeopleJsonSaver::workConsumer()'],['../classop_1_1_w_pose_saver.html#a039027281498168b57df8dfeefd82cd8',1,'op::WPoseSaver::workConsumer()'],['../classop_1_1_w_udp_sender.html#a615fc6a537ca9f624022698391c11a54',1,'op::WUdpSender::workConsumer()'],['../classop_1_1_w_video_saver.html#a40bcb8ccf137c6cbee3ca31e6cc3bfbf',1,'op::WVideoSaver::workConsumer()'],['../classop_1_1_w_gui.html#a664e1f76211510e38b8d5f5bed37ffcb',1,'op::WGui::workConsumer()'],['../classop_1_1_w_gui3_d.html#afe019cff8fd5ed2f59f59d886de7473a',1,'op::WGui3D::workConsumer()'],['../classop_1_1_worker_consumer.html#a26cf5c40df363d94d603fce92a5b69eb',1,'op::WorkerConsumer::workConsumer()']]], @@ -108,26 +108,26 @@ var searchData= ['wpersonidextractor',['WPersonIdExtractor',['../classop_1_1_w_person_id_extractor.html#a14a6cc9c6c70acd4847482fd71e4972b',1,'op::WPersonIdExtractor']]], ['wpersonidextractor',['WPersonIdExtractor',['../classop_1_1_w_person_id_extractor.html',1,'op']]], ['wpersonidextractor_2ehpp',['wPersonIdExtractor.hpp',['../w_person_id_extractor_8hpp.html',1,'']]], - ['wposeextractor',['WPoseExtractor',['../classop_1_1_w_pose_extractor.html',1,'op']]], ['wposeextractor',['WPoseExtractor',['../classop_1_1_w_pose_extractor.html#ae85b1ec41bf47dcf1aed7bdae1d91915',1,'op::WPoseExtractor']]], + ['wposeextractor',['WPoseExtractor',['../classop_1_1_w_pose_extractor.html',1,'op']]], ['wposeextractor_2ehpp',['wPoseExtractor.hpp',['../w_pose_extractor_8hpp.html',1,'']]], - ['wposeextractornet',['WPoseExtractorNet',['../classop_1_1_w_pose_extractor_net.html',1,'op']]], ['wposeextractornet',['WPoseExtractorNet',['../classop_1_1_w_pose_extractor_net.html#aa0f6b7ec6f36fe2a27649ac2c7490c09',1,'op::WPoseExtractorNet']]], + ['wposeextractornet',['WPoseExtractorNet',['../classop_1_1_w_pose_extractor_net.html',1,'op']]], ['wposeextractornet_2ehpp',['wPoseExtractorNet.hpp',['../w_pose_extractor_net_8hpp.html',1,'']]], - ['wposerenderer',['WPoseRenderer',['../classop_1_1_w_pose_renderer.html#ae74189143175b89ccd36662cec4de72e',1,'op::WPoseRenderer']]], ['wposerenderer',['WPoseRenderer',['../classop_1_1_w_pose_renderer.html',1,'op']]], + ['wposerenderer',['WPoseRenderer',['../classop_1_1_w_pose_renderer.html#ae74189143175b89ccd36662cec4de72e',1,'op::WPoseRenderer']]], ['wposerenderer_2ehpp',['wPoseRenderer.hpp',['../w_pose_renderer_8hpp.html',1,'']]], - ['wposesaver',['WPoseSaver',['../classop_1_1_w_pose_saver.html',1,'op']]], ['wposesaver',['WPoseSaver',['../classop_1_1_w_pose_saver.html#aa9dd0f4649c9e8efef10201caf9e4cfd',1,'op::WPoseSaver']]], + ['wposesaver',['WPoseSaver',['../classop_1_1_w_pose_saver.html',1,'op']]], ['wposesaver_2ehpp',['wPoseSaver.hpp',['../w_pose_saver_8hpp.html',1,'']]], ['wposetriangulation',['WPoseTriangulation',['../classop_1_1_w_pose_triangulation.html',1,'op']]], ['wposetriangulation',['WPoseTriangulation',['../classop_1_1_w_pose_triangulation.html#a439c75d19eae34fdd20f2f1c4ee18e48',1,'op::WPoseTriangulation']]], ['wposetriangulation_2ehpp',['wPoseTriangulation.hpp',['../w_pose_triangulation_8hpp.html',1,'']]], - ['wqueueassembler',['WQueueAssembler',['../classop_1_1_w_queue_assembler.html',1,'op']]], ['wqueueassembler',['WQueueAssembler',['../classop_1_1_w_queue_assembler.html#a8d73df6a562bd797e16d2bb3ced83b6d',1,'op::WQueueAssembler']]], + ['wqueueassembler',['WQueueAssembler',['../classop_1_1_w_queue_assembler.html',1,'op']]], ['wqueueassembler_2ehpp',['wQueueAssembler.hpp',['../w_queue_assembler_8hpp.html',1,'']]], - ['wqueueorderer',['WQueueOrderer',['../classop_1_1_w_queue_orderer.html#a3303add5fa8cc36593d3d859ffdd8ae0',1,'op::WQueueOrderer']]], ['wqueueorderer',['WQueueOrderer',['../classop_1_1_w_queue_orderer.html',1,'op']]], + ['wqueueorderer',['WQueueOrderer',['../classop_1_1_w_queue_orderer.html#a3303add5fa8cc36593d3d859ffdd8ae0',1,'op::WQueueOrderer']]], ['wqueueorderer_2ehpp',['wQueueOrderer.hpp',['../w_queue_orderer_8hpp.html',1,'']]], ['wrapper',['Wrapper',['../classcl_1_1detail_1_1_wrapper.html#aa1b916a75114fe3217018378d5f1eb71',1,'cl::detail::Wrapper::Wrapper()'],['../classcl_1_1detail_1_1_wrapper.html#a98dce667e0f400f2ba15a8a8d65fdea0',1,'cl::detail::Wrapper::Wrapper(const cl_type &obj, bool retainObject)'],['../classcl_1_1detail_1_1_wrapper.html#af91bafdf8593c34e0d0722cdd93010be',1,'cl::detail::Wrapper::Wrapper(const Wrapper< cl_type > &rhs)'],['../classcl_1_1detail_1_1_wrapper.html#a115618e2baf10ec5c09513394d985ea6',1,'cl::detail::Wrapper::Wrapper(Wrapper< cl_type > &&rhs) CL_HPP_NOEXCEPT_'],['../classcl_1_1detail_1_1_wrapper_3_01cl__device__id_01_4.html#a6881f47b159f53e2efefc3325127bdc5',1,'cl::detail::Wrapper< cl_device_id >::Wrapper()'],['../classcl_1_1detail_1_1_wrapper_3_01cl__device__id_01_4.html#a69a1b6a912ec468216ddfec1f5b6598e',1,'cl::detail::Wrapper< cl_device_id >::Wrapper(const cl_type &obj, bool retainObject)'],['../classcl_1_1detail_1_1_wrapper_3_01cl__device__id_01_4.html#a0abf5113a5123b8de94685ddfa214bd0',1,'cl::detail::Wrapper< cl_device_id >::Wrapper(const Wrapper< cl_type > &rhs)'],['../classcl_1_1detail_1_1_wrapper_3_01cl__device__id_01_4.html#a6a4506b8f3920fb2487b1a5448a6f10c',1,'cl::detail::Wrapper< cl_device_id >::Wrapper(Wrapper< cl_type > &&rhs) CL_HPP_NOEXCEPT_'],['../namespaceop.html#a3434981351c0d4d04eac3b29dfc22fbd',1,'op::Wrapper()']]], ['wrapper',['Wrapper',['../classcl_1_1detail_1_1_wrapper.html',1,'cl::detail']]], @@ -143,20 +143,20 @@ var searchData= ['wrapper_3c_20cl_5fsampler_20_3e',['Wrapper< cl_sampler >',['../classcl_1_1detail_1_1_wrapper.html',1,'cl::detail']]], ['wrapperauxiliary_2ehpp',['wrapperAuxiliary.hpp',['../wrapper_auxiliary_8hpp.html',1,'']]], ['wrapperconfiguresanitychecks',['wrapperConfigureSanityChecks',['../namespaceop.html#a8d9107e83f9cbc69f195199f0aef4385',1,'op']]], - ['wrapperstructextra',['WrapperStructExtra',['../structop_1_1_wrapper_struct_extra.html#a70cdc27c953962810333fafe011f86dd',1,'op::WrapperStructExtra']]], ['wrapperstructextra',['WrapperStructExtra',['../structop_1_1_wrapper_struct_extra.html',1,'op']]], + ['wrapperstructextra',['WrapperStructExtra',['../structop_1_1_wrapper_struct_extra.html#a70cdc27c953962810333fafe011f86dd',1,'op::WrapperStructExtra']]], ['wrapperstructextra_2ehpp',['wrapperStructExtra.hpp',['../wrapper_struct_extra_8hpp.html',1,'']]], ['wrapperstructface',['WrapperStructFace',['../structop_1_1_wrapper_struct_face.html',1,'op']]], ['wrapperstructface',['WrapperStructFace',['../structop_1_1_wrapper_struct_face.html#ac60accbce8d798adbc3b8a76e6e4f879',1,'op::WrapperStructFace']]], ['wrapperstructface_2ehpp',['wrapperStructFace.hpp',['../wrapper_struct_face_8hpp.html',1,'']]], - ['wrapperstructhand',['WrapperStructHand',['../structop_1_1_wrapper_struct_hand.html',1,'op']]], ['wrapperstructhand',['WrapperStructHand',['../structop_1_1_wrapper_struct_hand.html#aaa42d271c3cb7fa3a071d4785644c0a8',1,'op::WrapperStructHand']]], + ['wrapperstructhand',['WrapperStructHand',['../structop_1_1_wrapper_struct_hand.html',1,'op']]], ['wrapperstructhand_2ehpp',['wrapperStructHand.hpp',['../wrapper_struct_hand_8hpp.html',1,'']]], - ['wrapperstructinput',['WrapperStructInput',['../structop_1_1_wrapper_struct_input.html',1,'op']]], ['wrapperstructinput',['WrapperStructInput',['../structop_1_1_wrapper_struct_input.html#a529d945c83bb69ac1db7b8db371e6622',1,'op::WrapperStructInput']]], + ['wrapperstructinput',['WrapperStructInput',['../structop_1_1_wrapper_struct_input.html',1,'op']]], ['wrapperstructinput_2ehpp',['wrapperStructInput.hpp',['../wrapper_struct_input_8hpp.html',1,'']]], - ['wrapperstructoutput',['WrapperStructOutput',['../structop_1_1_wrapper_struct_output.html#a46207e61b143191a6cb79d397af43989',1,'op::WrapperStructOutput']]], ['wrapperstructoutput',['WrapperStructOutput',['../structop_1_1_wrapper_struct_output.html',1,'op']]], + ['wrapperstructoutput',['WrapperStructOutput',['../structop_1_1_wrapper_struct_output.html#ae6af809e1b560ca018861ee8221bad21',1,'op::WrapperStructOutput']]], ['wrapperstructoutput_2ehpp',['wrapperStructOutput.hpp',['../wrapper_struct_output_8hpp.html',1,'']]], ['wrapperstructpose',['WrapperStructPose',['../structop_1_1_wrapper_struct_pose.html',1,'op']]], ['wrapperstructpose',['WrapperStructPose',['../structop_1_1_wrapper_struct_pose.html#a5ea7564df25975c4279547acb885376a',1,'op::WrapperStructPose']]], @@ -167,6 +167,7 @@ var searchData= ['writebvh',['writeBvh',['../structop_1_1_wrapper_struct_output.html#add74785fc0cb1fc7d2c6b5f88b622d53',1,'op::WrapperStructOutput']]], ['writecocofootjson',['writeCocoFootJson',['../structop_1_1_wrapper_struct_output.html#aaadbe4f8f765a751973ed1b405843c74',1,'op::WrapperStructOutput']]], ['writecocojson',['writeCocoJson',['../structop_1_1_wrapper_struct_output.html#a6efbd77da8ebcea3abc4109bb2f39c0b',1,'op::WrapperStructOutput']]], + ['writecocojsonvariant',['writeCocoJsonVariant',['../structop_1_1_wrapper_struct_output.html#a0119bb7429483928c587ffaf607919de',1,'op::WrapperStructOutput']]], ['writeheatmaps',['writeHeatMaps',['../structop_1_1_wrapper_struct_output.html#adca0d08aa43e2280ca06a8db0deb81bc',1,'op::WrapperStructOutput']]], ['writeheatmapsformat',['writeHeatMapsFormat',['../structop_1_1_wrapper_struct_output.html#a34d5796e02b8afaaaef99fc26e7f3056',1,'op::WrapperStructOutput']]], ['writeimages',['writeImages',['../structop_1_1_wrapper_struct_output.html#a5f56c15d410645442b5ac21c316b9c85',1,'op::WrapperStructOutput']]], @@ -184,7 +185,7 @@ var searchData= ['wudpsender',['WUdpSender',['../classop_1_1_w_udp_sender.html',1,'op']]], ['wudpsender',['WUdpSender',['../classop_1_1_w_udp_sender.html#a22a5ec90fe83ed654bd0aef112fac98b',1,'op::WUdpSender']]], ['wudpsender_2ehpp',['wUdpSender.hpp',['../w_udp_sender_8hpp.html',1,'']]], - ['wvideosaver',['WVideoSaver',['../classop_1_1_w_video_saver.html',1,'op']]], ['wvideosaver',['WVideoSaver',['../classop_1_1_w_video_saver.html#a04dc4e6f039d047a0da6f94283c145d9',1,'op::WVideoSaver']]], + ['wvideosaver',['WVideoSaver',['../classop_1_1_w_video_saver.html',1,'op']]], ['wvideosaver_2ehpp',['wVideoSaver.hpp',['../w_video_saver_8hpp.html',1,'']]] ]; diff --git a/html/search/all_2.js b/html/search/all_2.js index a80974d6..0500d790 100644 --- a/html/search/all_2.js +++ b/html/search/all_2.js @@ -60,7 +60,7 @@ var searchData= ['coco_5fchallenge',['COCO_CHALLENGE',['../namespaceop.html#a59616e83eefc182f8eb6ae51d6425938',1,'op']]], ['cocojsonformat',['CocoJsonFormat',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715ac',1,'op']]], ['cocojsonsaver',['CocoJsonSaver',['../classop_1_1_coco_json_saver.html',1,'op']]], - ['cocojsonsaver',['CocoJsonSaver',['../classop_1_1_coco_json_saver.html#a0ce96fcd7dfaa2791f514a8363d018ff',1,'op::CocoJsonSaver']]], + ['cocojsonsaver',['CocoJsonSaver',['../classop_1_1_coco_json_saver.html#a380d4638d09cfe9c26551cc2efb7c3a8',1,'op::CocoJsonSaver']]], ['cocojsonsaver_2ehpp',['cocoJsonSaver.hpp',['../coco_json_saver_8hpp.html',1,'']]], ['comma',['comma',['../classop_1_1_json_ofstream.html#ae4468279f789c8026d431b2ef62646f9',1,'op::JsonOfstream']]], ['commandqueue',['CommandQueue',['../classcl_1_1_command_queue.html#a8462de408ebfaf6332429a92b7938490',1,'cl::CommandQueue::CommandQueue(cl_command_queue_properties properties, cl_int *err=NULL)'],['../classcl_1_1_command_queue.html#a05ae8e9e44e9b28fc755937490535550',1,'cl::CommandQueue::CommandQueue(QueueProperties properties, cl_int *err=NULL)'],['../classcl_1_1_command_queue.html#adcb116a4e1a53417dad3d3850c33d42f',1,'cl::CommandQueue::CommandQueue(const Context &context, cl_command_queue_properties properties=0, cl_int *err=NULL)'],['../classcl_1_1_command_queue.html#a975c3dce399cb77a5c6b0e294e4778fe',1,'cl::CommandQueue::CommandQueue(const Context &context, QueueProperties properties, cl_int *err=NULL)'],['../classcl_1_1_command_queue.html#a887826e515b03224aec87b33ac59f327',1,'cl::CommandQueue::CommandQueue(const Context &context, const Device &device, cl_command_queue_properties properties=0, cl_int *err=NULL)'],['../classcl_1_1_command_queue.html#a8dd467bd94839921b7318cb30f92b5a4',1,'cl::CommandQueue::CommandQueue(const Context &context, const Device &device, QueueProperties properties, cl_int *err=NULL)'],['../classcl_1_1_command_queue.html#ab1beb7ab6e619a4e050d87c70bb001a6',1,'cl::CommandQueue::CommandQueue()'],['../classcl_1_1_command_queue.html#a09be675998c51f36aa7744b47eabebd3',1,'cl::CommandQueue::CommandQueue(const cl_command_queue &commandQueue, bool retainObject=false)'],['../classcl_1_1_command_queue.html#ae2ce434c5100e5b467ea8c6561e1f11e',1,'cl::CommandQueue::CommandQueue(const CommandQueue &queue)'],['../classcl_1_1_command_queue.html#a1c5830678be567e34dccbb0065c89b21',1,'cl::CommandQueue::CommandQueue(CommandQueue &&queue) CL_HPP_NOEXCEPT_']]], diff --git a/html/search/all_3.js b/html/search/all_3.js index 87aa4ea1..95b95c85 100644 --- a/html/search/all_3.js +++ b/html/search/all_3.js @@ -13,12 +13,12 @@ var searchData= ['deallocate',['deallocate',['../classcl_1_1_s_v_m_allocator.html#a695f4693c4245c66285e7f4a0405cf84',1,'cl::SVMAllocator']]], ['default_5fx',['DEFAULT_X',['../classop_1_1_profiler.html#a13de5fe55b2599c0626d5071d3851dec',1,'op::Profiler']]], ['defaultparttorender',['defaultPartToRender',['../structop_1_1_wrapper_struct_pose.html#ab6810e97aa62a728aa09dbbe6b9b6c06',1,'op::WrapperStructPose']]], - ['define_5fbool',['DEFINE_bool',['../flags_8hpp.html#a0e9cb0627d386e4d69e5eae0593d70ad',1,'DEFINE_bool(disable_multi_thread, false,"It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful"" for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with"" low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the"" error."): flags.hpp'],['../flags_8hpp.html#a7fc3e871114ac35447499ffda035d51e',1,'DEFINE_bool(flir_camera, false,"Whether to use FLIR (Point-Grey) stereo camera."): flags.hpp'],['../flags_8hpp.html#a64c46584d79ef0b947ecd0a98b282839',1,'DEFINE_bool(frame_flip, false,"Flip/mirror each frame (e.g. for real time webcam demonstrations)."): flags.hpp'],['../flags_8hpp.html#a6a501f229de1ba19801b77fd0565527b',1,'DEFINE_bool(frames_repeat, false,"Repeat frames when finished."): flags.hpp'],['../flags_8hpp.html#a5c66f20b5a9a1bc44d3c685fd37ff7f9',1,'DEFINE_bool(process_real_time, false,"Enable to keep the original source frame rate (e.g. for video). If the processing time is"" too long, it will skip frames. If it is too fast, it will slow it down."): flags.hpp'],['../flags_8hpp.html#ad739178d4295e31edd2f1e468bd2e600',1,'DEFINE_bool(frame_keep_distortion, false,"If false (default), it will undistortionate the image based on the"" `camera_parameter_folder` camera parameters; if true, it will not undistortionate, i.e.,"" it will leave it as it is."): flags.hpp'],['../flags_8hpp.html#a5e8200fc5fd8bc1e80b94b029ea8e18d',1,'DEFINE_bool(body_disable, false,"Disable body keypoint detection. Option only possible for faster (but less accurate) face"" keypoint detection."): flags.hpp'],['../flags_8hpp.html#a669c6f4820af0275f40808b18147b98c',1,'DEFINE_bool(heatmaps_add_parts, false,"If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and"" analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps."" If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential"" memory order: body parts + bkg + PAFs. It will follow the order on"" POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will"" considerably decrease. Not required for OpenPose, enable it only if you intend to"" explicitly use this information later."): flags.hpp'],['../flags_8hpp.html#af57ca90aafa15b707af299527b0aff6f',1,'DEFINE_bool(heatmaps_add_bkg, false,"Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to"" background."): flags.hpp'],['../flags_8hpp.html#ac881df85c5be736d05822ad98ac0496f',1,'DEFINE_bool(heatmaps_add_PAFs, false,"Same functionality as `add_heatmaps_parts`, but adding the PAFs."): flags.hpp'],['../flags_8hpp.html#aa941a6193b13f3d138437cf2a84e73ff',1,'DEFINE_bool(part_candidates, false,"Also enable `write_json` in order to save this information. If true, it will fill the"" op::Datum::poseCandidates array with the body part candidates. Candidates refer to all"" the detected body parts, before being assembled into people. Note that the number of"" candidates is equal or higher than the number of final body parts (i.e. after being"" assembled into people). The empty body parts are filled with 0s. Program speed will"" slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly"" use this information."): flags.hpp'],['../flags_8hpp.html#aea11a0489c2af278990d3ddff10960fd',1,'DEFINE_bool(face, false,"Enables face keypoint detection. It will share some parameters from the body pose, e.g."" `model_folder`. Note that this will considerable slow down the performance and increse"" the required GPU memory. In addition, the greater number of people on the image, the"" slower OpenPose will be."): flags.hpp'],['../flags_8hpp.html#a4da8f1bc0afb58f2a757ab6d90c6042e',1,'DEFINE_bool(hand, false,"Enables hand keypoint detection. It will share some parameters from the body pose, e.g."" `model_folder`. Analogously to `--face`, it will also slow down the performance, increase"" the required GPU memory and its speed depends on the number of people."): flags.hpp'],['../flags_8hpp.html#a0a28dd6ee5503f070d08cc9e4a3aff05',1,'DEFINE_bool(hand_tracking, false,"Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate"" is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it"" simply looks for hands in positions at which hands were located in previous frames, but"" it does not guarantee the same person ID among frames."): flags.hpp'],['../flags_8hpp.html#a4c800b6a91b02874d006da33ab8432cd',1,'DEFINE_bool(3d, false,"Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system."" 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction"" results. Note that it will only display 1 person. If multiple people is present, it will"" fail."): flags.hpp'],['../flags_8hpp.html#ac210b56a3c46ff06143a62b0bb725cc6',1,'DEFINE_bool(identification, false,"Experimental, not available yet. Whether to enable people identification across frames."): flags.hpp'],['../flags_8hpp.html#a1a9ffc302eac69c8ba024da3edf01a5f',1,'DEFINE_bool(disable_blending, false,"If enabled, it will render the results (keypoint skeletons or heatmaps) on a black"" background, instead of being rendered into the original image. Related: `part_to_show`,"" `alpha_pose`, and `alpha_pose`."): flags.hpp'],['../flags_8hpp.html#a231bec7b2113f31cebb760c849ca2841',1,'DEFINE_bool(fullscreen, false,"Run in full-screen mode (press f during runtime to toggle)."): flags.hpp'],['../flags_8hpp.html#a14f23a4c6dd6c9290e3ee44bd79f96bf',1,'DEFINE_bool(no_gui_verbose, false,"Do not write text on output images on GUI (e.g. number of current frame and people). It"" does not affect the pose rendering."): flags.hpp']]], - ['define_5fdouble',['DEFINE_double',['../flags_8hpp.html#a29f9621cd9361deec9512f8d8b53e6ba',1,'DEFINE_double(camera_fps, 30.0,"Frame rate for the webcam (also used when saving video). Set this value to the minimum"" value between the OpenPose displayed speed and the webcam real frame rate."): flags.hpp'],['../flags_8hpp.html#adf35e9261afec1673a42de6197f07f42',1,'DEFINE_double(scale_gap, 0.3,"Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1."" If you want to change the initial scale, you actually want to multiply the"" `net_resolution` by your desired initial scale."): flags.hpp'],['../flags_8hpp.html#a94efdbeb7b9adb554d34399279312b8f',1,'DEFINE_double(hand_scale_range, 0.4,"Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range"" between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if"" scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."): flags.hpp'],['../flags_8hpp.html#afd21fc201e4b269bf0b07b8fce886173',1,'DEFINE_double(render_threshold, 0.05,"Only estimated keypoints whose score confidences are higher than this threshold will be"" rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;"" while small thresholds (~0.1) will also output guessed and occluded keypoints, but also"" more false positives (i.e. wrong detections)."): flags.hpp'],['../flags_8hpp.html#ad971ae1068651177e7a78ae68c022792',1,'DEFINE_double(alpha_pose, 0.6,"Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will"" hide it. Only valid for GPU rendering."): flags.hpp'],['../flags_8hpp.html#a62bfc56794bb8ceaedd0eb7bc32a0f22',1,'DEFINE_double(alpha_heatmap, 0.7,"Blending factor (range 0-1) between heatmap and original frame. 1 will only show the"" heatmap, 0 will only show the frame. Only valid for GPU rendering."): flags.hpp'],['../flags_8hpp.html#a53eb4c15af968876f31ca0be54d251b3',1,'DEFINE_double(face_render_threshold, 0.4,"Analogous to `render_threshold`, but applied to the face keypoints."): flags.hpp'],['../flags_8hpp.html#af654373ad667b1683f30e350331ea709',1,'DEFINE_double(face_alpha_pose, 0.6,"Analogous to `alpha_pose` but applied to face."): flags.hpp'],['../flags_8hpp.html#a71043931875d1f5ec677fd69ae96c632',1,'DEFINE_double(face_alpha_heatmap, 0.7,"Analogous to `alpha_heatmap` but applied to face."): flags.hpp'],['../flags_8hpp.html#a6b12d681ace972ae8eede484505c50af',1,'DEFINE_double(hand_render_threshold, 0.2,"Analogous to `render_threshold`, but applied to the hand keypoints."): flags.hpp'],['../flags_8hpp.html#a890b2b8df8a57fe4e9baa465c6584ccf',1,'DEFINE_double(hand_alpha_pose, 0.6,"Analogous to `alpha_pose` but applied to hand."): flags.hpp'],['../flags_8hpp.html#ac17a7af1030d7f5bd69672fec961c083',1,'DEFINE_double(hand_alpha_heatmap, 0.7,"Analogous to `alpha_heatmap` but applied to hand."): flags.hpp']]], - ['define_5fint32',['DEFINE_int32',['../flags_8hpp.html#a73ee51843ead02ee6358fe39dcbeffde',1,'DEFINE_int32(logging_level, 3,"The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"" low priority messages and 4 for important ones."): flags.hpp'],['../flags_8hpp.html#a8e9de971b409cfe7fdded7f0d47c502d',1,'DEFINE_int32(profile_speed, 1000,"If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some"" runtime statistics at this frame number."): flags.hpp'],['../flags_8hpp.html#a10efaeb1ea3a8478388cc7d0bfd4e59e',1,'DEFINE_int32(camera,-1,"The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative"" number (by default), to auto-detect and open the first available camera."): flags.hpp'],['../flags_8hpp.html#ab41c02abe3634f0db65123ecda964a31',1,'DEFINE_int32(flir_camera_index,-1,"Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir"" camera index to run, where 0 corresponds to the detected flir camera with the lowest"" serial number, and `n` to the `n`-th lowest serial number camera."): flags.hpp'],['../flags_8hpp.html#a6c9c8a5843dd8b93e009bf29dc31cde2',1,'DEFINE_int32(frame_rotate, 0,"Rotate each frame, 4 possible values: 0, 90, 180, 270."): flags.hpp'],['../flags_8hpp.html#a284252d3d255ad5e5c35815d720fd67a',1,'DEFINE_int32(num_gpu,-1,"The number of GPU devices to use. If negative, it will use all the available GPUs in your"" machine."): flags.hpp'],['../flags_8hpp.html#a8511765700f652000f2c1c2b1a5df9f9',1,'DEFINE_int32(num_gpu_start, 0,"GPU device start number."): flags.hpp'],['../flags_8hpp.html#a18fc454ffeef53c7c73d69c67d1a73fc',1,'DEFINE_int32(keypoint_scale, 0,"Scaling of the (x,y) coordinates of the final pose data array, i.e. the scale of the (x,y)"" coordinates that will be saved with the `write_json` & `write_keypoint` flags."" Select `0` to scale it to the original source resolution; `1`to scale it to the net output"" size (set with `net_resolution`); `2` to scale it to the final output size (set with"" `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left"" corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where"" (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non"" related with `scale_number` and `scale_gap`."): flags.hpp'],['../flags_8hpp.html#aa5a1826a500d7131fefb480ccd1713fb',1,'DEFINE_int32(number_people_max,-1,"This parameter will limit the maximum number of people detected, by keeping the people with"" top scores. The score is based in person area over the image, body part score, as well as"" joint score (between each pair of connected body parts). Useful if you know the exact"" number of people in the scene, so it can remove false positives (if all the people have"" been detected. However, it might also include false negatives by removing very small or"" highly occluded people. -1 will keep them all."): flags.hpp'],['../flags_8hpp.html#a1edea5f45026b353b6e6cc3b196767a0',1,'DEFINE_int32(scale_number, 1,"Number of scales to average."): flags.hpp'],['../flags_8hpp.html#aa80e5f9914dd35c852941282aa229b21',1,'DEFINE_int32(heatmaps_scale, 2,"Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer"" rounded [0,255]; and 3 for no scaling."): flags.hpp'],['../flags_8hpp.html#a311e487137e17445e6939abb7cc0cf8f',1,'DEFINE_int32(hand_scale_number, 1,"Analogous to `scale_number` but applied to the hand keypoint detector. Our best results"" were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."): flags.hpp'],['../flags_8hpp.html#aa797dd033c6f3c4d2654e7000939d270',1,'DEFINE_int32(3d_min_views,-1,"Minimum number of views required to reconstruct each keypoint. By default (-1), it will"" require all the cameras to see the keypoint in order to reconstruct it."): flags.hpp'],['../flags_8hpp.html#a20c481950df0272f0b7b0cde67d8e72a',1,'DEFINE_int32(3d_views, 1,"Complementary option to `--image_dir` or `--video`. OpenPose will read as many images per"" iteration, allowing tasks such as stereo camera processing (`--3d`). Note that"" `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the"" parameter folder as this number indicates."): flags.hpp'],['../flags_8hpp.html#afdfef14901f7b5e324d6983845f5ab50',1,'DEFINE_int32(tracking,-1,"Experimental, not available yet. Whether to enable people tracking across frames. The"" value indicates the number of frames where tracking is run between each OpenPose keypoint"" detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint"" detector and tracking for potentially higher accurary than only OpenPose."): flags.hpp'],['../flags_8hpp.html#a072bd893f4003b48bc7c99735eeeed39',1,'DEFINE_int32(ik_threads, 0,"Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D"" keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing"" the number of threads will increase the speed but also the global system latency."): flags.hpp'],['../flags_8hpp.html#a4cead735de5b43cfcae5c1139df3be1a',1,'DEFINE_int32(part_to_show, 0,"Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body"" part heat map, 19 for the background heat map, 20 for all the body part heat maps"" together, 21 for all the PAFs, 22-40 for each body part pair PAF."): flags.hpp'],['../flags_8hpp.html#ab2af299b6380dcd6dc06a95cceb056d4',1,'DEFINE_int32(render_pose,-1,"Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering"" (slower but greater functionality, e.g. `alpha_X` flags). If -1, it will pick CPU if"" CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render"" both `outputData` and `cvOutputData` with the original image and desired body part to be"" shown (i.e. keypoints, heat maps or PAFs)."): flags.hpp'],['../flags_8hpp.html#ad0269da28dc2033e23b8ea84b7e793a2',1,'DEFINE_int32(face_render,-1,"Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same"" configuration that `render_pose` is using."): flags.hpp'],['../flags_8hpp.html#ada5b7fef5063818fd668359e9e0b0504',1,'DEFINE_int32(hand_render,-1,"Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same"" configuration that `render_pose` is using."): flags.hpp'],['../flags_8hpp.html#a9a34e10f75069cf3283e535a77006775',1,'DEFINE_int32(display,-1,"Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server"" and/or to slightly speed up the processing if visual output is not required); 2 for 2-D"" display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display."): flags.hpp']]], - ['define_5fstring',['DEFINE_string',['../flags_8hpp.html#a9e7b1394aea185360a1fe19cf4e20a89',1,'DEFINE_string(camera_resolution,"-1x-1","Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the"" default 1280x720 for `--camera`, or the maximum flir camera resolution available for"" `--flir_camera`"): flags.hpp'],['../flags_8hpp.html#aab02d4078b5f7999a582d9c5f4248676',1,'DEFINE_string(video,"","Use a video file instead of the camera. Use `examples/media/video.avi` for our default"" example video."): flags.hpp'],['../flags_8hpp.html#a7773e867133822f5601899975dc06adb',1,'DEFINE_string(image_dir,"","Process a directory of images. Use `examples/media/` for our default example folder with 20"" images. Read all standard formats (jpg, png, bmp, etc.)."): flags.hpp'],['../flags_8hpp.html#a5db17b8bc6df4fe40b556af7157dcbf7',1,'DEFINE_string(ip_camera,"","String with the IP camera URL. It supports protocols like RTSP and HTTP."): flags.hpp'],['../flags_8hpp.html#aff61246512375ff5941dc4110e127ca3',1,'DEFINE_string(camera_parameter_folder,"models/cameraParameters/flir/","String with the folder where the camera parameters are located."): flags.hpp'],['../flags_8hpp.html#a9ab689ebe20a261b20587af79123e79a',1,'DEFINE_string(model_folder,"models/","Folder path (absolute or relative) where the models (pose, face, ...) are located."): flags.hpp'],['../flags_8hpp.html#a85be61a31eaa438a7e9c7d2baf51da47',1,'DEFINE_string(output_resolution,"-1x-1","The image resolution (display and output). Use \"-1x-1\" to force the program to use the"" input image resolution."): flags.hpp'],['../flags_8hpp.html#acfd124be44003a59f2591c7584fd3c75',1,'DEFINE_string(model_pose,"BODY_25","Model to be used. E.g. `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), ""`MPI_4_layers` (15 keypoints, even faster but less accurate)."): flags.hpp'],['../flags_8hpp.html#a830bcfa6645bf39a18f59d3b72f75edf',1,'DEFINE_string(net_resolution,"-1x368","Multiples of 16. If it is increased, the accuracy potentially increases. If it is"" decreased, the speed increases. For maximum speed-accuracy balance, it should keep the"" closest aspect ratio possible to the images or videos to be processed. Using `-1` in"" any of the dimensions, OP will choose the optimal aspect ratio depending on the user's"" input value. E.g. the default `-1x368` is equivalent to `656x368` in 16:9 resolutions,"" e.g. full HD (1980x1080) and HD (1280x720) resolutions."): flags.hpp'],['../flags_8hpp.html#af5ee5f61f0d36a03bb8647408f5e236b',1,'DEFINE_string(face_net_resolution,"368x368","Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint"" detector. 320x320 usually works fine while giving a substantial speed up when multiple"" faces on the image."): flags.hpp'],['../flags_8hpp.html#afaf97bbf6a49576782d25147bc865bed',1,'DEFINE_string(hand_net_resolution,"368x368","Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint"" detector."): flags.hpp'],['../flags_8hpp.html#a7a3597e9216885470199ca1578eb7f69',1,'DEFINE_string(write_images,"","Directory to write rendered frames in `write_images_format` image format."): flags.hpp'],['../flags_8hpp.html#ab077893503ebb5fba8cb300bd5f93d62',1,'DEFINE_string(write_images_format,"png","File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV"" function cv::imwrite for all compatible extensions."): flags.hpp'],['../flags_8hpp.html#a3051d21eb51cc39eed5f781d8eaed960',1,'DEFINE_string(write_video,"","Full file path to write rendered frames in motion JPEG video format. It might fail if the"" final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag"" `camera_fps` controls FPS."): flags.hpp'],['../flags_8hpp.html#a9aa48b2ab293842bc42b96df9e97c9b8',1,'DEFINE_string(write_json,"","Directory to write OpenPose output in JSON format. It includes body, hand, and face pose"" keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."): flags.hpp'],['../flags_8hpp.html#a26d1c7340fc87d4593dda754d54145a2',1,'DEFINE_string(write_coco_json,"","Full file path to write people pose data with JSON COCO validation format."): flags.hpp'],['../flags_8hpp.html#ac6c2099e630e05f867ee10b43f35dc65',1,'DEFINE_string(write_coco_foot_json,"","Full file path to write people foot pose data with JSON COCO validation format."): flags.hpp'],['../flags_8hpp.html#aa3fe7c4c07492e6553a6c2d25ebd76b4',1,'DEFINE_string(write_heatmaps,"","Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag"" must be enabled."): flags.hpp'],['../flags_8hpp.html#a242473077549869f06534e8a9ea1ddd6',1,'DEFINE_string(write_heatmaps_format,"png","File extension and format for `write_heatmaps`, analogous to `write_images_format`."" For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for"" floating values."): flags.hpp'],['../flags_8hpp.html#ad8c5173beb83c0f9996362b3a3fba820',1,'DEFINE_string(write_keypoint,"","(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format"" with `write_keypoint_format`."): flags.hpp'],['../flags_8hpp.html#a5aaba99bdb163516d0297d2e09dd0c7d',1,'DEFINE_string(write_keypoint_format,"yml","(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml,"" yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."): flags.hpp'],['../flags_8hpp.html#a6c5341914694863528d3d93b23b45f01',1,'DEFINE_string(write_video_adam,"","Experimental, not available yet. E.g.: `~/Desktop/adamResult.avi`. Flag `camera_fps`"" controls FPS."): flags.hpp'],['../flags_8hpp.html#ad0c09dd7ede747d69d36dc86c7ffa11c',1,'DEFINE_string(write_bvh,"","Experimental, not available yet. E.g.: `~/Desktop/mocapResult.bvh`."): flags.hpp'],['../flags_8hpp.html#a63936bd2bc53a453ba74066289ab9d29',1,'DEFINE_string(udp_host,"","Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."): flags.hpp'],['../flags_8hpp.html#aa7b1c758c6d6e35c3227994a9ced0236',1,'DEFINE_string(udp_port,"8051","Experimental, not available yet. Port number for UDP communication."): flags.hpp']]], + ['define_5fbool',['DEFINE_bool',['../flags_8hpp.html#a035ba7d578f36103d096569f1984ab61',1,'DEFINE_bool(disable_multi_thread, false,"It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful"" for 1) Cases where it is needed a low latency (e.g., webcam in real-time scenarios with"" low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the"" error."): flags.hpp'],['../flags_8hpp.html#a7fc3e871114ac35447499ffda035d51e',1,'DEFINE_bool(flir_camera, false,"Whether to use FLIR (Point-Grey) stereo camera."): flags.hpp'],['../flags_8hpp.html#a9d45877ef21b9042f1b307d23d404e40',1,'DEFINE_bool(frame_flip, false,"Flip/mirror each frame (e.g., for real time webcam demonstrations)."): flags.hpp'],['../flags_8hpp.html#a6a501f229de1ba19801b77fd0565527b',1,'DEFINE_bool(frames_repeat, false,"Repeat frames when finished."): flags.hpp'],['../flags_8hpp.html#a5e350d38962a702d8e0c658fbbb5cc64',1,'DEFINE_bool(process_real_time, false,"Enable to keep the original source frame rate (e.g., for video). If the processing time is"" too long, it will skip frames. If it is too fast, it will slow it down."): flags.hpp'],['../flags_8hpp.html#ad739178d4295e31edd2f1e468bd2e600',1,'DEFINE_bool(frame_keep_distortion, false,"If false (default), it will undistortionate the image based on the"" `camera_parameter_folder` camera parameters; if true, it will not undistortionate, i.e.,"" it will leave it as it is."): flags.hpp'],['../flags_8hpp.html#a5e8200fc5fd8bc1e80b94b029ea8e18d',1,'DEFINE_bool(body_disable, false,"Disable body keypoint detection. Option only possible for faster (but less accurate) face"" keypoint detection."): flags.hpp'],['../flags_8hpp.html#a669c6f4820af0275f40808b18147b98c',1,'DEFINE_bool(heatmaps_add_parts, false,"If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and"" analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps."" If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential"" memory order: body parts + bkg + PAFs. It will follow the order on"" POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will"" considerably decrease. Not required for OpenPose, enable it only if you intend to"" explicitly use this information later."): flags.hpp'],['../flags_8hpp.html#af57ca90aafa15b707af299527b0aff6f',1,'DEFINE_bool(heatmaps_add_bkg, false,"Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to"" background."): flags.hpp'],['../flags_8hpp.html#ac881df85c5be736d05822ad98ac0496f',1,'DEFINE_bool(heatmaps_add_PAFs, false,"Same functionality as `add_heatmaps_parts`, but adding the PAFs."): flags.hpp'],['../flags_8hpp.html#aa941a6193b13f3d138437cf2a84e73ff',1,'DEFINE_bool(part_candidates, false,"Also enable `write_json` in order to save this information. If true, it will fill the"" op::Datum::poseCandidates array with the body part candidates. Candidates refer to all"" the detected body parts, before being assembled into people. Note that the number of"" candidates is equal or higher than the number of final body parts (i.e. after being"" assembled into people). The empty body parts are filled with 0s. Program speed will"" slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly"" use this information."): flags.hpp'],['../flags_8hpp.html#aea11a0489c2af278990d3ddff10960fd',1,'DEFINE_bool(face, false,"Enables face keypoint detection. It will share some parameters from the body pose, e.g."" `model_folder`. Note that this will considerable slow down the performance and increse"" the required GPU memory. In addition, the greater number of people on the image, the"" slower OpenPose will be."): flags.hpp'],['../flags_8hpp.html#a4da8f1bc0afb58f2a757ab6d90c6042e',1,'DEFINE_bool(hand, false,"Enables hand keypoint detection. It will share some parameters from the body pose, e.g."" `model_folder`. Analogously to `--face`, it will also slow down the performance, increase"" the required GPU memory and its speed depends on the number of people."): flags.hpp'],['../flags_8hpp.html#a0a28dd6ee5503f070d08cc9e4a3aff05',1,'DEFINE_bool(hand_tracking, false,"Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate"" is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it"" simply looks for hands in positions at which hands were located in previous frames, but"" it does not guarantee the same person ID among frames."): flags.hpp'],['../flags_8hpp.html#a4c800b6a91b02874d006da33ab8432cd',1,'DEFINE_bool(3d, false,"Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system."" 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction"" results. Note that it will only display 1 person. If multiple people is present, it will"" fail."): flags.hpp'],['../flags_8hpp.html#ac210b56a3c46ff06143a62b0bb725cc6',1,'DEFINE_bool(identification, false,"Experimental, not available yet. Whether to enable people identification across frames."): flags.hpp'],['../flags_8hpp.html#a1a9ffc302eac69c8ba024da3edf01a5f',1,'DEFINE_bool(disable_blending, false,"If enabled, it will render the results (keypoint skeletons or heatmaps) on a black"" background, instead of being rendered into the original image. Related: `part_to_show`,"" `alpha_pose`, and `alpha_pose`."): flags.hpp'],['../flags_8hpp.html#a231bec7b2113f31cebb760c849ca2841',1,'DEFINE_bool(fullscreen, false,"Run in full-screen mode (press f during runtime to toggle)."): flags.hpp'],['../flags_8hpp.html#a0ee4f86d572ef52ec26dcbb37efb5e65',1,'DEFINE_bool(no_gui_verbose, false,"Do not write text on output images on GUI (e.g., number of current frame and people). It"" does not affect the pose rendering."): flags.hpp']]], + ['define_5fdouble',['DEFINE_double',['../flags_8hpp.html#a29f9621cd9361deec9512f8d8b53e6ba',1,'DEFINE_double(camera_fps, 30.0,"Frame rate for the webcam (also used when saving video). Set this value to the minimum"" value between the OpenPose displayed speed and the webcam real frame rate."): flags.hpp'],['../flags_8hpp.html#adf35e9261afec1673a42de6197f07f42',1,'DEFINE_double(scale_gap, 0.3,"Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1."" If you want to change the initial scale, you actually want to multiply the"" `net_resolution` by your desired initial scale."): flags.hpp'],['../flags_8hpp.html#abf5c8a05ae8f86c8e4d8eedbceb84516',1,'DEFINE_double(hand_scale_range, 0.4,"Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range"" between smallest and biggest scale. The scales will be centered in ratio 1. E.g., if"" scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."): flags.hpp'],['../flags_8hpp.html#afd21fc201e4b269bf0b07b8fce886173',1,'DEFINE_double(render_threshold, 0.05,"Only estimated keypoints whose score confidences are higher than this threshold will be"" rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;"" while small thresholds (~0.1) will also output guessed and occluded keypoints, but also"" more false positives (i.e. wrong detections)."): flags.hpp'],['../flags_8hpp.html#ad971ae1068651177e7a78ae68c022792',1,'DEFINE_double(alpha_pose, 0.6,"Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will"" hide it. Only valid for GPU rendering."): flags.hpp'],['../flags_8hpp.html#a62bfc56794bb8ceaedd0eb7bc32a0f22',1,'DEFINE_double(alpha_heatmap, 0.7,"Blending factor (range 0-1) between heatmap and original frame. 1 will only show the"" heatmap, 0 will only show the frame. Only valid for GPU rendering."): flags.hpp'],['../flags_8hpp.html#a53eb4c15af968876f31ca0be54d251b3',1,'DEFINE_double(face_render_threshold, 0.4,"Analogous to `render_threshold`, but applied to the face keypoints."): flags.hpp'],['../flags_8hpp.html#af654373ad667b1683f30e350331ea709',1,'DEFINE_double(face_alpha_pose, 0.6,"Analogous to `alpha_pose` but applied to face."): flags.hpp'],['../flags_8hpp.html#a71043931875d1f5ec677fd69ae96c632',1,'DEFINE_double(face_alpha_heatmap, 0.7,"Analogous to `alpha_heatmap` but applied to face."): flags.hpp'],['../flags_8hpp.html#a6b12d681ace972ae8eede484505c50af',1,'DEFINE_double(hand_render_threshold, 0.2,"Analogous to `render_threshold`, but applied to the hand keypoints."): flags.hpp'],['../flags_8hpp.html#a890b2b8df8a57fe4e9baa465c6584ccf',1,'DEFINE_double(hand_alpha_pose, 0.6,"Analogous to `alpha_pose` but applied to hand."): flags.hpp'],['../flags_8hpp.html#ac17a7af1030d7f5bd69672fec961c083',1,'DEFINE_double(hand_alpha_heatmap, 0.7,"Analogous to `alpha_heatmap` but applied to hand."): flags.hpp']]], + ['define_5fint32',['DEFINE_int32',['../flags_8hpp.html#a73ee51843ead02ee6358fe39dcbeffde',1,'DEFINE_int32(logging_level, 3,"The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"" low priority messages and 4 for important ones."): flags.hpp'],['../flags_8hpp.html#a8e9de971b409cfe7fdded7f0d47c502d',1,'DEFINE_int32(profile_speed, 1000,"If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some"" runtime statistics at this frame number."): flags.hpp'],['../flags_8hpp.html#a10efaeb1ea3a8478388cc7d0bfd4e59e',1,'DEFINE_int32(camera,-1,"The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative"" number (by default), to auto-detect and open the first available camera."): flags.hpp'],['../flags_8hpp.html#ab41c02abe3634f0db65123ecda964a31',1,'DEFINE_int32(flir_camera_index,-1,"Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir"" camera index to run, where 0 corresponds to the detected flir camera with the lowest"" serial number, and `n` to the `n`-th lowest serial number camera."): flags.hpp'],['../flags_8hpp.html#a6c9c8a5843dd8b93e009bf29dc31cde2',1,'DEFINE_int32(frame_rotate, 0,"Rotate each frame, 4 possible values: 0, 90, 180, 270."): flags.hpp'],['../flags_8hpp.html#a284252d3d255ad5e5c35815d720fd67a',1,'DEFINE_int32(num_gpu,-1,"The number of GPU devices to use. If negative, it will use all the available GPUs in your"" machine."): flags.hpp'],['../flags_8hpp.html#a8511765700f652000f2c1c2b1a5df9f9',1,'DEFINE_int32(num_gpu_start, 0,"GPU device start number."): flags.hpp'],['../flags_8hpp.html#a18fc454ffeef53c7c73d69c67d1a73fc',1,'DEFINE_int32(keypoint_scale, 0,"Scaling of the (x,y) coordinates of the final pose data array, i.e. the scale of the (x,y)"" coordinates that will be saved with the `write_json` & `write_keypoint` flags."" Select `0` to scale it to the original source resolution; `1`to scale it to the net output"" size (set with `net_resolution`); `2` to scale it to the final output size (set with"" `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left"" corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where"" (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non"" related with `scale_number` and `scale_gap`."): flags.hpp'],['../flags_8hpp.html#aa5a1826a500d7131fefb480ccd1713fb',1,'DEFINE_int32(number_people_max,-1,"This parameter will limit the maximum number of people detected, by keeping the people with"" top scores. The score is based in person area over the image, body part score, as well as"" joint score (between each pair of connected body parts). Useful if you know the exact"" number of people in the scene, so it can remove false positives (if all the people have"" been detected. However, it might also include false negatives by removing very small or"" highly occluded people. -1 will keep them all."): flags.hpp'],['../flags_8hpp.html#a1edea5f45026b353b6e6cc3b196767a0',1,'DEFINE_int32(scale_number, 1,"Number of scales to average."): flags.hpp'],['../flags_8hpp.html#aa80e5f9914dd35c852941282aa229b21',1,'DEFINE_int32(heatmaps_scale, 2,"Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer"" rounded [0,255]; and 3 for no scaling."): flags.hpp'],['../flags_8hpp.html#a311e487137e17445e6939abb7cc0cf8f',1,'DEFINE_int32(hand_scale_number, 1,"Analogous to `scale_number` but applied to the hand keypoint detector. Our best results"" were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."): flags.hpp'],['../flags_8hpp.html#aa797dd033c6f3c4d2654e7000939d270',1,'DEFINE_int32(3d_min_views,-1,"Minimum number of views required to reconstruct each keypoint. By default (-1), it will"" require all the cameras to see the keypoint in order to reconstruct it."): flags.hpp'],['../flags_8hpp.html#a20c481950df0272f0b7b0cde67d8e72a',1,'DEFINE_int32(3d_views, 1,"Complementary option to `--image_dir` or `--video`. OpenPose will read as many images per"" iteration, allowing tasks such as stereo camera processing (`--3d`). Note that"" `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the"" parameter folder as this number indicates."): flags.hpp'],['../flags_8hpp.html#afdfef14901f7b5e324d6983845f5ab50',1,'DEFINE_int32(tracking,-1,"Experimental, not available yet. Whether to enable people tracking across frames. The"" value indicates the number of frames where tracking is run between each OpenPose keypoint"" detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint"" detector and tracking for potentially higher accurary than only OpenPose."): flags.hpp'],['../flags_8hpp.html#a072bd893f4003b48bc7c99735eeeed39',1,'DEFINE_int32(ik_threads, 0,"Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D"" keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing"" the number of threads will increase the speed but also the global system latency."): flags.hpp'],['../flags_8hpp.html#a4cead735de5b43cfcae5c1139df3be1a',1,'DEFINE_int32(part_to_show, 0,"Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body"" part heat map, 19 for the background heat map, 20 for all the body part heat maps"" together, 21 for all the PAFs, 22-40 for each body part pair PAF."): flags.hpp'],['../flags_8hpp.html#a65152cde78f8e7da1d33f557ec392312',1,'DEFINE_int32(render_pose,-1,"Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering"" (slower but greater functionality, e.g., `alpha_X` flags). If -1, it will pick CPU if"" CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render"" both `outputData` and `cvOutputData` with the original image and desired body part to be"" shown (i.e. keypoints, heat maps or PAFs)."): flags.hpp'],['../flags_8hpp.html#ad0269da28dc2033e23b8ea84b7e793a2',1,'DEFINE_int32(face_render,-1,"Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same"" configuration that `render_pose` is using."): flags.hpp'],['../flags_8hpp.html#ada5b7fef5063818fd668359e9e0b0504',1,'DEFINE_int32(hand_render,-1,"Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same"" configuration that `render_pose` is using."): flags.hpp'],['../flags_8hpp.html#a9a34e10f75069cf3283e535a77006775',1,'DEFINE_int32(display,-1,"Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server"" and/or to slightly speed up the processing if visual output is not required); 2 for 2-D"" display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display."): flags.hpp'],['../flags_8hpp.html#a39756aa6d6911435a326e18541db970a',1,'DEFINE_int32(write_coco_json_variant, 0,"Currently, this option is experimental and only makes effect on car JSON generation. It"" selects the COCO variant for cocoJsonSaver."): flags.hpp']]], + ['define_5fstring',['DEFINE_string',['../flags_8hpp.html#a9e7b1394aea185360a1fe19cf4e20a89',1,'DEFINE_string(camera_resolution,"-1x-1","Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the"" default 1280x720 for `--camera`, or the maximum flir camera resolution available for"" `--flir_camera`"): flags.hpp'],['../flags_8hpp.html#aab02d4078b5f7999a582d9c5f4248676',1,'DEFINE_string(video,"","Use a video file instead of the camera. Use `examples/media/video.avi` for our default"" example video."): flags.hpp'],['../flags_8hpp.html#a7773e867133822f5601899975dc06adb',1,'DEFINE_string(image_dir,"","Process a directory of images. Use `examples/media/` for our default example folder with 20"" images. Read all standard formats (jpg, png, bmp, etc.)."): flags.hpp'],['../flags_8hpp.html#a5db17b8bc6df4fe40b556af7157dcbf7',1,'DEFINE_string(ip_camera,"","String with the IP camera URL. It supports protocols like RTSP and HTTP."): flags.hpp'],['../flags_8hpp.html#aff61246512375ff5941dc4110e127ca3',1,'DEFINE_string(camera_parameter_folder,"models/cameraParameters/flir/","String with the folder where the camera parameters are located."): flags.hpp'],['../flags_8hpp.html#a9ab689ebe20a261b20587af79123e79a',1,'DEFINE_string(model_folder,"models/","Folder path (absolute or relative) where the models (pose, face, ...) are located."): flags.hpp'],['../flags_8hpp.html#a85be61a31eaa438a7e9c7d2baf51da47',1,'DEFINE_string(output_resolution,"-1x-1","The image resolution (display and output). Use \"-1x-1\" to force the program to use the"" input image resolution."): flags.hpp'],['../flags_8hpp.html#a055e1e0a1618e96f156f32278b41d3e4',1,'DEFINE_string(model_pose,"BODY_25","Model to be used. E.g., `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), ""`MPI_4_layers` (15 keypoints, even faster but less accurate)."): flags.hpp'],['../flags_8hpp.html#a66d6d5c07371179c3702dbd1da9d9bd3',1,'DEFINE_string(net_resolution,"-1x368","Multiples of 16. If it is increased, the accuracy potentially increases. If it is"" decreased, the speed increases. For maximum speed-accuracy balance, it should keep the"" closest aspect ratio possible to the images or videos to be processed. Using `-1` in"" any of the dimensions, OP will choose the optimal aspect ratio depending on the user's"" input value. E.g., the default `-1x368` is equivalent to `656x368` in 16:9 resolutions,"" e.g., full HD (1980x1080) and HD (1280x720) resolutions."): flags.hpp'],['../flags_8hpp.html#af5ee5f61f0d36a03bb8647408f5e236b',1,'DEFINE_string(face_net_resolution,"368x368","Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint"" detector. 320x320 usually works fine while giving a substantial speed up when multiple"" faces on the image."): flags.hpp'],['../flags_8hpp.html#afaf97bbf6a49576782d25147bc865bed',1,'DEFINE_string(hand_net_resolution,"368x368","Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint"" detector."): flags.hpp'],['../flags_8hpp.html#a7a3597e9216885470199ca1578eb7f69',1,'DEFINE_string(write_images,"","Directory to write rendered frames in `write_images_format` image format."): flags.hpp'],['../flags_8hpp.html#a6957bf419a0ca3478948a62a3ce5ecf0',1,'DEFINE_string(write_images_format,"png","File extension and format for `write_images`, e.g., png, jpg or bmp. Check the OpenCV"" function cv::imwrite for all compatible extensions."): flags.hpp'],['../flags_8hpp.html#a3051d21eb51cc39eed5f781d8eaed960',1,'DEFINE_string(write_video,"","Full file path to write rendered frames in motion JPEG video format. It might fail if the"" final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag"" `camera_fps` controls FPS."): flags.hpp'],['../flags_8hpp.html#a9aa48b2ab293842bc42b96df9e97c9b8',1,'DEFINE_string(write_json,"","Directory to write OpenPose output in JSON format. It includes body, hand, and face pose"" keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."): flags.hpp'],['../flags_8hpp.html#a26d1c7340fc87d4593dda754d54145a2',1,'DEFINE_string(write_coco_json,"","Full file path to write people pose data with JSON COCO validation format."): flags.hpp'],['../flags_8hpp.html#ac6c2099e630e05f867ee10b43f35dc65',1,'DEFINE_string(write_coco_foot_json,"","Full file path to write people foot pose data with JSON COCO validation format."): flags.hpp'],['../flags_8hpp.html#aa3fe7c4c07492e6553a6c2d25ebd76b4',1,'DEFINE_string(write_heatmaps,"","Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag"" must be enabled."): flags.hpp'],['../flags_8hpp.html#a242473077549869f06534e8a9ea1ddd6',1,'DEFINE_string(write_heatmaps_format,"png","File extension and format for `write_heatmaps`, analogous to `write_images_format`."" For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for"" floating values."): flags.hpp'],['../flags_8hpp.html#ad8c5173beb83c0f9996362b3a3fba820',1,'DEFINE_string(write_keypoint,"","(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format"" with `write_keypoint_format`."): flags.hpp'],['../flags_8hpp.html#a5aaba99bdb163516d0297d2e09dd0c7d',1,'DEFINE_string(write_keypoint_format,"yml","(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml,"" yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."): flags.hpp'],['../flags_8hpp.html#ad7a48143249d608e2ad3a8413eb19c34',1,'DEFINE_string(write_video_adam,"","Experimental, not available yet. E.g., `~/Desktop/adamResult.avi`. Flag `camera_fps`"" controls FPS."): flags.hpp'],['../flags_8hpp.html#a72c09e09c94b67f88b53e1a94e7bfe0f',1,'DEFINE_string(write_bvh,"","Experimental, not available yet. E.g., `~/Desktop/mocapResult.bvh`."): flags.hpp'],['../flags_8hpp.html#a63936bd2bc53a453ba74066289ab9d29',1,'DEFINE_string(udp_host,"","Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."): flags.hpp'],['../flags_8hpp.html#aa7b1c758c6d6e35c3227994a9ced0236',1,'DEFINE_string(udp_port,"8051","Experimental, not available yet. Port number for UDP communication."): flags.hpp']]], ['define_5ftemplate_5fdatum',['DEFINE_TEMPLATE_DATUM',['../datum_8hpp.html#ad11d52b69bc54e48ceb2f5787f700431',1,'datum.hpp']]], - ['define_5fuint64',['DEFINE_uint64',['../flags_8hpp.html#a1433eaf1c5eb42e406e76bc6f8e517c3',1,'DEFINE_uint64(frame_first, 0,"Start on desired frame number. Indexes are 0-based, i.e. the first frame has index 0."): flags.hpp'],['../flags_8hpp.html#a22572531e5a4896c510639ac57cf522c',1,'DEFINE_uint64(frame_step, 1,"Step or gap between processed frames. E.g., `--frame_step 5` would read and process frames"" 0, 5, 10, etc.."): flags.hpp'],['../flags_8hpp.html#ac8fef8bb0234286e74b75214a750d674',1,'DEFINE_uint64(frame_last,-1,"Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g. if set to"" 10, it will process 11 frames (0-10)."): flags.hpp']]], + ['define_5fuint64',['DEFINE_uint64',['../flags_8hpp.html#a1433eaf1c5eb42e406e76bc6f8e517c3',1,'DEFINE_uint64(frame_first, 0,"Start on desired frame number. Indexes are 0-based, i.e. the first frame has index 0."): flags.hpp'],['../flags_8hpp.html#a22572531e5a4896c510639ac57cf522c',1,'DEFINE_uint64(frame_step, 1,"Step or gap between processed frames. E.g., `--frame_step 5` would read and process frames"" 0, 5, 10, etc.."): flags.hpp'],['../flags_8hpp.html#a158227abc2af90abaec523a784b40fa8',1,'DEFINE_uint64(frame_last,-1,"Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g., if set to"" 10, it will process 11 frames (0-10)."): flags.hpp']]], ['delete_5fcopy',['DELETE_COPY',['../classop_1_1_face_cpu_renderer.html#a233f2a83930d07e4d420b43c8a660f32',1,'op::FaceCpuRenderer::DELETE_COPY()'],['../classop_1_1_hand_cpu_renderer.html#a66a7d318b240c73687320bf092363409',1,'op::HandCpuRenderer::DELETE_COPY()'],['../classop_1_1_sub_thread_no_queue.html#a43504502c36461305d656fb87b914749',1,'op::SubThreadNoQueue::DELETE_COPY()'],['../macros_8hpp.html#abef96b5dd35dd9d44ad27ddf0e2f5f2e',1,'DELETE_COPY(): macros.hpp']]], ['deleter',['Deleter',['../classcl_1_1detail_1_1_deleter.html',1,'cl::detail']]], ['deleter',['Deleter',['../classcl_1_1detail_1_1_deleter.html#a899859732bb1c5f296f3e4f05ca6a39d',1,'cl::detail::Deleter']]], diff --git a/html/search/functions_16.js b/html/search/functions_16.js index 94fd296b..8cc7bd9e 100644 --- a/html/search/functions_16.js +++ b/html/search/functions_16.js @@ -51,7 +51,7 @@ var searchData= ['wrapperstructface',['WrapperStructFace',['../structop_1_1_wrapper_struct_face.html#ac60accbce8d798adbc3b8a76e6e4f879',1,'op::WrapperStructFace']]], ['wrapperstructhand',['WrapperStructHand',['../structop_1_1_wrapper_struct_hand.html#aaa42d271c3cb7fa3a071d4785644c0a8',1,'op::WrapperStructHand']]], ['wrapperstructinput',['WrapperStructInput',['../structop_1_1_wrapper_struct_input.html#a529d945c83bb69ac1db7b8db371e6622',1,'op::WrapperStructInput']]], - ['wrapperstructoutput',['WrapperStructOutput',['../structop_1_1_wrapper_struct_output.html#a46207e61b143191a6cb79d397af43989',1,'op::WrapperStructOutput']]], + ['wrapperstructoutput',['WrapperStructOutput',['../structop_1_1_wrapper_struct_output.html#ae6af809e1b560ca018861ee8221bad21',1,'op::WrapperStructOutput']]], ['wrapperstructpose',['WrapperStructPose',['../structop_1_1_wrapper_struct_pose.html#a5ea7564df25975c4279547acb885376a',1,'op::WrapperStructPose']]], ['wrappert',['WrapperT',['../classop_1_1_wrapper_t.html#af1a56f0542a2cbcc11d6b185adeb5626',1,'op::WrapperT']]], ['write',['write',['../classop_1_1_video_saver.html#a7c79397b9be8b6d6d12b9f3e78e6c794',1,'op::VideoSaver::write(const cv::Mat &cvMat)'],['../classop_1_1_video_saver.html#a3cdbef8bada237edffaf7793eaa0fe8e',1,'op::VideoSaver::write(const std::vector< cv::Mat > &cvMats)']]], diff --git a/html/search/functions_2.js b/html/search/functions_2.js index 0aeab9f5..a117d3ba 100644 --- a/html/search/functions_2.js +++ b/html/search/functions_2.js @@ -17,7 +17,7 @@ var searchData= ['checkthread',['checkThread',['../classop_1_1_pose_extractor_net.html#a073db5b9847c8afbc10ce534ea533a84',1,'op::PoseExtractorNet']]], ['clear',['clear',['../classop_1_1_pose_extractor_net.html#a3fe7256d9860f4c624f5cf928556bc28',1,'op::PoseExtractorNet::clear()'],['../classop_1_1_queue_base.html#a247f435c95709f3246d352eee4f757af',1,'op::QueueBase::clear()']]], ['clone',['clone',['../classop_1_1_array.html#af42f4570122d1b8259c211f52335909b',1,'op::Array::clone()'],['../structop_1_1_datum.html#a847f62b3060c5d0d106e60c29df87a0b',1,'op::Datum::clone()']]], - ['cocojsonsaver',['CocoJsonSaver',['../classop_1_1_coco_json_saver.html#a0ce96fcd7dfaa2791f514a8363d018ff',1,'op::CocoJsonSaver']]], + ['cocojsonsaver',['CocoJsonSaver',['../classop_1_1_coco_json_saver.html#a380d4638d09cfe9c26551cc2efb7c3a8',1,'op::CocoJsonSaver']]], ['comma',['comma',['../classop_1_1_json_ofstream.html#ae4468279f789c8026d431b2ef62646f9',1,'op::JsonOfstream']]], ['commandqueue',['CommandQueue',['../classcl_1_1_command_queue.html#a8462de408ebfaf6332429a92b7938490',1,'cl::CommandQueue::CommandQueue(cl_command_queue_properties properties, cl_int *err=NULL)'],['../classcl_1_1_command_queue.html#a05ae8e9e44e9b28fc755937490535550',1,'cl::CommandQueue::CommandQueue(QueueProperties properties, cl_int *err=NULL)'],['../classcl_1_1_command_queue.html#adcb116a4e1a53417dad3d3850c33d42f',1,'cl::CommandQueue::CommandQueue(const Context &context, cl_command_queue_properties properties=0, cl_int *err=NULL)'],['../classcl_1_1_command_queue.html#a975c3dce399cb77a5c6b0e294e4778fe',1,'cl::CommandQueue::CommandQueue(const Context &context, QueueProperties properties, cl_int *err=NULL)'],['../classcl_1_1_command_queue.html#a887826e515b03224aec87b33ac59f327',1,'cl::CommandQueue::CommandQueue(const Context &context, const Device &device, cl_command_queue_properties properties=0, cl_int *err=NULL)'],['../classcl_1_1_command_queue.html#a8dd467bd94839921b7318cb30f92b5a4',1,'cl::CommandQueue::CommandQueue(const Context &context, const Device &device, QueueProperties properties, cl_int *err=NULL)'],['../classcl_1_1_command_queue.html#ab1beb7ab6e619a4e050d87c70bb001a6',1,'cl::CommandQueue::CommandQueue()'],['../classcl_1_1_command_queue.html#a09be675998c51f36aa7744b47eabebd3',1,'cl::CommandQueue::CommandQueue(const cl_command_queue &commandQueue, bool retainObject=false)'],['../classcl_1_1_command_queue.html#ae2ce434c5100e5b467ea8c6561e1f11e',1,'cl::CommandQueue::CommandQueue(const CommandQueue &queue)'],['../classcl_1_1_command_queue.html#a1c5830678be567e34dccbb0065c89b21',1,'cl::CommandQueue::CommandQueue(CommandQueue &&queue) CL_HPP_NOEXCEPT_']]], ['compile',['compile',['../classcl_1_1_program.html#a8da346ecc73cf9df257690815039ddb5',1,'cl::Program']]], diff --git a/html/search/functions_3.js b/html/search/functions_3.js index 645745f9..40e9f495 100644 --- a/html/search/functions_3.js +++ b/html/search/functions_3.js @@ -4,11 +4,11 @@ var searchData= ['datum',['Datum',['../structop_1_1_datum.html#a72c75834671aebe44705738fb5efc3c5',1,'op::Datum::Datum()'],['../structop_1_1_datum.html#a42f9aef848c6335c5a81cad374319f0b',1,'op::Datum::Datum(const Datum &datum)'],['../structop_1_1_datum.html#a2d4940d8cb12d95b8588cd0280f6524c',1,'op::Datum::Datum(Datum &&datum)']]], ['datumproducer',['DatumProducer',['../classop_1_1_datum_producer.html#a308c34ca52e7d6db2d85e27a348a15bd',1,'op::DatumProducer']]], ['deallocate',['deallocate',['../classcl_1_1_s_v_m_allocator.html#a695f4693c4245c66285e7f4a0405cf84',1,'cl::SVMAllocator']]], - ['define_5fbool',['DEFINE_bool',['../flags_8hpp.html#a0e9cb0627d386e4d69e5eae0593d70ad',1,'DEFINE_bool(disable_multi_thread, false,"It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful"" for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with"" low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the"" error."): flags.hpp'],['../flags_8hpp.html#a7fc3e871114ac35447499ffda035d51e',1,'DEFINE_bool(flir_camera, false,"Whether to use FLIR (Point-Grey) stereo camera."): flags.hpp'],['../flags_8hpp.html#a64c46584d79ef0b947ecd0a98b282839',1,'DEFINE_bool(frame_flip, false,"Flip/mirror each frame (e.g. for real time webcam demonstrations)."): flags.hpp'],['../flags_8hpp.html#a6a501f229de1ba19801b77fd0565527b',1,'DEFINE_bool(frames_repeat, false,"Repeat frames when finished."): flags.hpp'],['../flags_8hpp.html#a5c66f20b5a9a1bc44d3c685fd37ff7f9',1,'DEFINE_bool(process_real_time, false,"Enable to keep the original source frame rate (e.g. for video). If the processing time is"" too long, it will skip frames. If it is too fast, it will slow it down."): flags.hpp'],['../flags_8hpp.html#ad739178d4295e31edd2f1e468bd2e600',1,'DEFINE_bool(frame_keep_distortion, false,"If false (default), it will undistortionate the image based on the"" `camera_parameter_folder` camera parameters; if true, it will not undistortionate, i.e.,"" it will leave it as it is."): flags.hpp'],['../flags_8hpp.html#a5e8200fc5fd8bc1e80b94b029ea8e18d',1,'DEFINE_bool(body_disable, false,"Disable body keypoint detection. Option only possible for faster (but less accurate) face"" keypoint detection."): flags.hpp'],['../flags_8hpp.html#a669c6f4820af0275f40808b18147b98c',1,'DEFINE_bool(heatmaps_add_parts, false,"If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and"" analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps."" If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential"" memory order: body parts + bkg + PAFs. It will follow the order on"" POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will"" considerably decrease. Not required for OpenPose, enable it only if you intend to"" explicitly use this information later."): flags.hpp'],['../flags_8hpp.html#af57ca90aafa15b707af299527b0aff6f',1,'DEFINE_bool(heatmaps_add_bkg, false,"Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to"" background."): flags.hpp'],['../flags_8hpp.html#ac881df85c5be736d05822ad98ac0496f',1,'DEFINE_bool(heatmaps_add_PAFs, false,"Same functionality as `add_heatmaps_parts`, but adding the PAFs."): flags.hpp'],['../flags_8hpp.html#aa941a6193b13f3d138437cf2a84e73ff',1,'DEFINE_bool(part_candidates, false,"Also enable `write_json` in order to save this information. If true, it will fill the"" op::Datum::poseCandidates array with the body part candidates. Candidates refer to all"" the detected body parts, before being assembled into people. Note that the number of"" candidates is equal or higher than the number of final body parts (i.e. after being"" assembled into people). The empty body parts are filled with 0s. Program speed will"" slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly"" use this information."): flags.hpp'],['../flags_8hpp.html#aea11a0489c2af278990d3ddff10960fd',1,'DEFINE_bool(face, false,"Enables face keypoint detection. It will share some parameters from the body pose, e.g."" `model_folder`. Note that this will considerable slow down the performance and increse"" the required GPU memory. In addition, the greater number of people on the image, the"" slower OpenPose will be."): flags.hpp'],['../flags_8hpp.html#a4da8f1bc0afb58f2a757ab6d90c6042e',1,'DEFINE_bool(hand, false,"Enables hand keypoint detection. It will share some parameters from the body pose, e.g."" `model_folder`. Analogously to `--face`, it will also slow down the performance, increase"" the required GPU memory and its speed depends on the number of people."): flags.hpp'],['../flags_8hpp.html#a0a28dd6ee5503f070d08cc9e4a3aff05',1,'DEFINE_bool(hand_tracking, false,"Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate"" is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it"" simply looks for hands in positions at which hands were located in previous frames, but"" it does not guarantee the same person ID among frames."): flags.hpp'],['../flags_8hpp.html#a4c800b6a91b02874d006da33ab8432cd',1,'DEFINE_bool(3d, false,"Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system."" 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction"" results. Note that it will only display 1 person. If multiple people is present, it will"" fail."): flags.hpp'],['../flags_8hpp.html#ac210b56a3c46ff06143a62b0bb725cc6',1,'DEFINE_bool(identification, false,"Experimental, not available yet. Whether to enable people identification across frames."): flags.hpp'],['../flags_8hpp.html#a1a9ffc302eac69c8ba024da3edf01a5f',1,'DEFINE_bool(disable_blending, false,"If enabled, it will render the results (keypoint skeletons or heatmaps) on a black"" background, instead of being rendered into the original image. Related: `part_to_show`,"" `alpha_pose`, and `alpha_pose`."): flags.hpp'],['../flags_8hpp.html#a231bec7b2113f31cebb760c849ca2841',1,'DEFINE_bool(fullscreen, false,"Run in full-screen mode (press f during runtime to toggle)."): flags.hpp'],['../flags_8hpp.html#a14f23a4c6dd6c9290e3ee44bd79f96bf',1,'DEFINE_bool(no_gui_verbose, false,"Do not write text on output images on GUI (e.g. number of current frame and people). It"" does not affect the pose rendering."): flags.hpp']]], - ['define_5fdouble',['DEFINE_double',['../flags_8hpp.html#a29f9621cd9361deec9512f8d8b53e6ba',1,'DEFINE_double(camera_fps, 30.0,"Frame rate for the webcam (also used when saving video). Set this value to the minimum"" value between the OpenPose displayed speed and the webcam real frame rate."): flags.hpp'],['../flags_8hpp.html#adf35e9261afec1673a42de6197f07f42',1,'DEFINE_double(scale_gap, 0.3,"Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1."" If you want to change the initial scale, you actually want to multiply the"" `net_resolution` by your desired initial scale."): flags.hpp'],['../flags_8hpp.html#a94efdbeb7b9adb554d34399279312b8f',1,'DEFINE_double(hand_scale_range, 0.4,"Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range"" between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if"" scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."): flags.hpp'],['../flags_8hpp.html#afd21fc201e4b269bf0b07b8fce886173',1,'DEFINE_double(render_threshold, 0.05,"Only estimated keypoints whose score confidences are higher than this threshold will be"" rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;"" while small thresholds (~0.1) will also output guessed and occluded keypoints, but also"" more false positives (i.e. wrong detections)."): flags.hpp'],['../flags_8hpp.html#ad971ae1068651177e7a78ae68c022792',1,'DEFINE_double(alpha_pose, 0.6,"Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will"" hide it. Only valid for GPU rendering."): flags.hpp'],['../flags_8hpp.html#a62bfc56794bb8ceaedd0eb7bc32a0f22',1,'DEFINE_double(alpha_heatmap, 0.7,"Blending factor (range 0-1) between heatmap and original frame. 1 will only show the"" heatmap, 0 will only show the frame. Only valid for GPU rendering."): flags.hpp'],['../flags_8hpp.html#a53eb4c15af968876f31ca0be54d251b3',1,'DEFINE_double(face_render_threshold, 0.4,"Analogous to `render_threshold`, but applied to the face keypoints."): flags.hpp'],['../flags_8hpp.html#af654373ad667b1683f30e350331ea709',1,'DEFINE_double(face_alpha_pose, 0.6,"Analogous to `alpha_pose` but applied to face."): flags.hpp'],['../flags_8hpp.html#a71043931875d1f5ec677fd69ae96c632',1,'DEFINE_double(face_alpha_heatmap, 0.7,"Analogous to `alpha_heatmap` but applied to face."): flags.hpp'],['../flags_8hpp.html#a6b12d681ace972ae8eede484505c50af',1,'DEFINE_double(hand_render_threshold, 0.2,"Analogous to `render_threshold`, but applied to the hand keypoints."): flags.hpp'],['../flags_8hpp.html#a890b2b8df8a57fe4e9baa465c6584ccf',1,'DEFINE_double(hand_alpha_pose, 0.6,"Analogous to `alpha_pose` but applied to hand."): flags.hpp'],['../flags_8hpp.html#ac17a7af1030d7f5bd69672fec961c083',1,'DEFINE_double(hand_alpha_heatmap, 0.7,"Analogous to `alpha_heatmap` but applied to hand."): flags.hpp']]], - ['define_5fint32',['DEFINE_int32',['../flags_8hpp.html#a73ee51843ead02ee6358fe39dcbeffde',1,'DEFINE_int32(logging_level, 3,"The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"" low priority messages and 4 for important ones."): flags.hpp'],['../flags_8hpp.html#a8e9de971b409cfe7fdded7f0d47c502d',1,'DEFINE_int32(profile_speed, 1000,"If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some"" runtime statistics at this frame number."): flags.hpp'],['../flags_8hpp.html#a10efaeb1ea3a8478388cc7d0bfd4e59e',1,'DEFINE_int32(camera,-1,"The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative"" number (by default), to auto-detect and open the first available camera."): flags.hpp'],['../flags_8hpp.html#ab41c02abe3634f0db65123ecda964a31',1,'DEFINE_int32(flir_camera_index,-1,"Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir"" camera index to run, where 0 corresponds to the detected flir camera with the lowest"" serial number, and `n` to the `n`-th lowest serial number camera."): flags.hpp'],['../flags_8hpp.html#a6c9c8a5843dd8b93e009bf29dc31cde2',1,'DEFINE_int32(frame_rotate, 0,"Rotate each frame, 4 possible values: 0, 90, 180, 270."): flags.hpp'],['../flags_8hpp.html#a284252d3d255ad5e5c35815d720fd67a',1,'DEFINE_int32(num_gpu,-1,"The number of GPU devices to use. If negative, it will use all the available GPUs in your"" machine."): flags.hpp'],['../flags_8hpp.html#a8511765700f652000f2c1c2b1a5df9f9',1,'DEFINE_int32(num_gpu_start, 0,"GPU device start number."): flags.hpp'],['../flags_8hpp.html#a18fc454ffeef53c7c73d69c67d1a73fc',1,'DEFINE_int32(keypoint_scale, 0,"Scaling of the (x,y) coordinates of the final pose data array, i.e. the scale of the (x,y)"" coordinates that will be saved with the `write_json` & `write_keypoint` flags."" Select `0` to scale it to the original source resolution; `1`to scale it to the net output"" size (set with `net_resolution`); `2` to scale it to the final output size (set with"" `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left"" corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where"" (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non"" related with `scale_number` and `scale_gap`."): flags.hpp'],['../flags_8hpp.html#aa5a1826a500d7131fefb480ccd1713fb',1,'DEFINE_int32(number_people_max,-1,"This parameter will limit the maximum number of people detected, by keeping the people with"" top scores. The score is based in person area over the image, body part score, as well as"" joint score (between each pair of connected body parts). Useful if you know the exact"" number of people in the scene, so it can remove false positives (if all the people have"" been detected. However, it might also include false negatives by removing very small or"" highly occluded people. -1 will keep them all."): flags.hpp'],['../flags_8hpp.html#a1edea5f45026b353b6e6cc3b196767a0',1,'DEFINE_int32(scale_number, 1,"Number of scales to average."): flags.hpp'],['../flags_8hpp.html#aa80e5f9914dd35c852941282aa229b21',1,'DEFINE_int32(heatmaps_scale, 2,"Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer"" rounded [0,255]; and 3 for no scaling."): flags.hpp'],['../flags_8hpp.html#a311e487137e17445e6939abb7cc0cf8f',1,'DEFINE_int32(hand_scale_number, 1,"Analogous to `scale_number` but applied to the hand keypoint detector. Our best results"" were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."): flags.hpp'],['../flags_8hpp.html#aa797dd033c6f3c4d2654e7000939d270',1,'DEFINE_int32(3d_min_views,-1,"Minimum number of views required to reconstruct each keypoint. By default (-1), it will"" require all the cameras to see the keypoint in order to reconstruct it."): flags.hpp'],['../flags_8hpp.html#a20c481950df0272f0b7b0cde67d8e72a',1,'DEFINE_int32(3d_views, 1,"Complementary option to `--image_dir` or `--video`. OpenPose will read as many images per"" iteration, allowing tasks such as stereo camera processing (`--3d`). Note that"" `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the"" parameter folder as this number indicates."): flags.hpp'],['../flags_8hpp.html#afdfef14901f7b5e324d6983845f5ab50',1,'DEFINE_int32(tracking,-1,"Experimental, not available yet. Whether to enable people tracking across frames. The"" value indicates the number of frames where tracking is run between each OpenPose keypoint"" detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint"" detector and tracking for potentially higher accurary than only OpenPose."): flags.hpp'],['../flags_8hpp.html#a072bd893f4003b48bc7c99735eeeed39',1,'DEFINE_int32(ik_threads, 0,"Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D"" keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing"" the number of threads will increase the speed but also the global system latency."): flags.hpp'],['../flags_8hpp.html#a4cead735de5b43cfcae5c1139df3be1a',1,'DEFINE_int32(part_to_show, 0,"Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body"" part heat map, 19 for the background heat map, 20 for all the body part heat maps"" together, 21 for all the PAFs, 22-40 for each body part pair PAF."): flags.hpp'],['../flags_8hpp.html#ab2af299b6380dcd6dc06a95cceb056d4',1,'DEFINE_int32(render_pose,-1,"Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering"" (slower but greater functionality, e.g. `alpha_X` flags). If -1, it will pick CPU if"" CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render"" both `outputData` and `cvOutputData` with the original image and desired body part to be"" shown (i.e. keypoints, heat maps or PAFs)."): flags.hpp'],['../flags_8hpp.html#ad0269da28dc2033e23b8ea84b7e793a2',1,'DEFINE_int32(face_render,-1,"Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same"" configuration that `render_pose` is using."): flags.hpp'],['../flags_8hpp.html#ada5b7fef5063818fd668359e9e0b0504',1,'DEFINE_int32(hand_render,-1,"Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same"" configuration that `render_pose` is using."): flags.hpp'],['../flags_8hpp.html#a9a34e10f75069cf3283e535a77006775',1,'DEFINE_int32(display,-1,"Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server"" and/or to slightly speed up the processing if visual output is not required); 2 for 2-D"" display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display."): flags.hpp']]], - ['define_5fstring',['DEFINE_string',['../flags_8hpp.html#a9e7b1394aea185360a1fe19cf4e20a89',1,'DEFINE_string(camera_resolution,"-1x-1","Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the"" default 1280x720 for `--camera`, or the maximum flir camera resolution available for"" `--flir_camera`"): flags.hpp'],['../flags_8hpp.html#aab02d4078b5f7999a582d9c5f4248676',1,'DEFINE_string(video,"","Use a video file instead of the camera. Use `examples/media/video.avi` for our default"" example video."): flags.hpp'],['../flags_8hpp.html#a7773e867133822f5601899975dc06adb',1,'DEFINE_string(image_dir,"","Process a directory of images. Use `examples/media/` for our default example folder with 20"" images. Read all standard formats (jpg, png, bmp, etc.)."): flags.hpp'],['../flags_8hpp.html#a5db17b8bc6df4fe40b556af7157dcbf7',1,'DEFINE_string(ip_camera,"","String with the IP camera URL. It supports protocols like RTSP and HTTP."): flags.hpp'],['../flags_8hpp.html#aff61246512375ff5941dc4110e127ca3',1,'DEFINE_string(camera_parameter_folder,"models/cameraParameters/flir/","String with the folder where the camera parameters are located."): flags.hpp'],['../flags_8hpp.html#a9ab689ebe20a261b20587af79123e79a',1,'DEFINE_string(model_folder,"models/","Folder path (absolute or relative) where the models (pose, face, ...) are located."): flags.hpp'],['../flags_8hpp.html#a85be61a31eaa438a7e9c7d2baf51da47',1,'DEFINE_string(output_resolution,"-1x-1","The image resolution (display and output). Use \"-1x-1\" to force the program to use the"" input image resolution."): flags.hpp'],['../flags_8hpp.html#acfd124be44003a59f2591c7584fd3c75',1,'DEFINE_string(model_pose,"BODY_25","Model to be used. E.g. `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), ""`MPI_4_layers` (15 keypoints, even faster but less accurate)."): flags.hpp'],['../flags_8hpp.html#a830bcfa6645bf39a18f59d3b72f75edf',1,'DEFINE_string(net_resolution,"-1x368","Multiples of 16. If it is increased, the accuracy potentially increases. If it is"" decreased, the speed increases. For maximum speed-accuracy balance, it should keep the"" closest aspect ratio possible to the images or videos to be processed. Using `-1` in"" any of the dimensions, OP will choose the optimal aspect ratio depending on the user's"" input value. E.g. the default `-1x368` is equivalent to `656x368` in 16:9 resolutions,"" e.g. full HD (1980x1080) and HD (1280x720) resolutions."): flags.hpp'],['../flags_8hpp.html#af5ee5f61f0d36a03bb8647408f5e236b',1,'DEFINE_string(face_net_resolution,"368x368","Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint"" detector. 320x320 usually works fine while giving a substantial speed up when multiple"" faces on the image."): flags.hpp'],['../flags_8hpp.html#afaf97bbf6a49576782d25147bc865bed',1,'DEFINE_string(hand_net_resolution,"368x368","Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint"" detector."): flags.hpp'],['../flags_8hpp.html#a7a3597e9216885470199ca1578eb7f69',1,'DEFINE_string(write_images,"","Directory to write rendered frames in `write_images_format` image format."): flags.hpp'],['../flags_8hpp.html#ab077893503ebb5fba8cb300bd5f93d62',1,'DEFINE_string(write_images_format,"png","File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV"" function cv::imwrite for all compatible extensions."): flags.hpp'],['../flags_8hpp.html#a3051d21eb51cc39eed5f781d8eaed960',1,'DEFINE_string(write_video,"","Full file path to write rendered frames in motion JPEG video format. It might fail if the"" final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag"" `camera_fps` controls FPS."): flags.hpp'],['../flags_8hpp.html#a9aa48b2ab293842bc42b96df9e97c9b8',1,'DEFINE_string(write_json,"","Directory to write OpenPose output in JSON format. It includes body, hand, and face pose"" keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."): flags.hpp'],['../flags_8hpp.html#a26d1c7340fc87d4593dda754d54145a2',1,'DEFINE_string(write_coco_json,"","Full file path to write people pose data with JSON COCO validation format."): flags.hpp'],['../flags_8hpp.html#ac6c2099e630e05f867ee10b43f35dc65',1,'DEFINE_string(write_coco_foot_json,"","Full file path to write people foot pose data with JSON COCO validation format."): flags.hpp'],['../flags_8hpp.html#aa3fe7c4c07492e6553a6c2d25ebd76b4',1,'DEFINE_string(write_heatmaps,"","Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag"" must be enabled."): flags.hpp'],['../flags_8hpp.html#a242473077549869f06534e8a9ea1ddd6',1,'DEFINE_string(write_heatmaps_format,"png","File extension and format for `write_heatmaps`, analogous to `write_images_format`."" For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for"" floating values."): flags.hpp'],['../flags_8hpp.html#ad8c5173beb83c0f9996362b3a3fba820',1,'DEFINE_string(write_keypoint,"","(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format"" with `write_keypoint_format`."): flags.hpp'],['../flags_8hpp.html#a5aaba99bdb163516d0297d2e09dd0c7d',1,'DEFINE_string(write_keypoint_format,"yml","(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml,"" yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."): flags.hpp'],['../flags_8hpp.html#a6c5341914694863528d3d93b23b45f01',1,'DEFINE_string(write_video_adam,"","Experimental, not available yet. E.g.: `~/Desktop/adamResult.avi`. Flag `camera_fps`"" controls FPS."): flags.hpp'],['../flags_8hpp.html#ad0c09dd7ede747d69d36dc86c7ffa11c',1,'DEFINE_string(write_bvh,"","Experimental, not available yet. E.g.: `~/Desktop/mocapResult.bvh`."): flags.hpp'],['../flags_8hpp.html#a63936bd2bc53a453ba74066289ab9d29',1,'DEFINE_string(udp_host,"","Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."): flags.hpp'],['../flags_8hpp.html#aa7b1c758c6d6e35c3227994a9ced0236',1,'DEFINE_string(udp_port,"8051","Experimental, not available yet. Port number for UDP communication."): flags.hpp']]], - ['define_5fuint64',['DEFINE_uint64',['../flags_8hpp.html#a1433eaf1c5eb42e406e76bc6f8e517c3',1,'DEFINE_uint64(frame_first, 0,"Start on desired frame number. Indexes are 0-based, i.e. the first frame has index 0."): flags.hpp'],['../flags_8hpp.html#a22572531e5a4896c510639ac57cf522c',1,'DEFINE_uint64(frame_step, 1,"Step or gap between processed frames. E.g., `--frame_step 5` would read and process frames"" 0, 5, 10, etc.."): flags.hpp'],['../flags_8hpp.html#ac8fef8bb0234286e74b75214a750d674',1,'DEFINE_uint64(frame_last,-1,"Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g. if set to"" 10, it will process 11 frames (0-10)."): flags.hpp']]], + ['define_5fbool',['DEFINE_bool',['../flags_8hpp.html#a035ba7d578f36103d096569f1984ab61',1,'DEFINE_bool(disable_multi_thread, false,"It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful"" for 1) Cases where it is needed a low latency (e.g., webcam in real-time scenarios with"" low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the"" error."): flags.hpp'],['../flags_8hpp.html#a7fc3e871114ac35447499ffda035d51e',1,'DEFINE_bool(flir_camera, false,"Whether to use FLIR (Point-Grey) stereo camera."): flags.hpp'],['../flags_8hpp.html#a9d45877ef21b9042f1b307d23d404e40',1,'DEFINE_bool(frame_flip, false,"Flip/mirror each frame (e.g., for real time webcam demonstrations)."): flags.hpp'],['../flags_8hpp.html#a6a501f229de1ba19801b77fd0565527b',1,'DEFINE_bool(frames_repeat, false,"Repeat frames when finished."): flags.hpp'],['../flags_8hpp.html#a5e350d38962a702d8e0c658fbbb5cc64',1,'DEFINE_bool(process_real_time, false,"Enable to keep the original source frame rate (e.g., for video). If the processing time is"" too long, it will skip frames. If it is too fast, it will slow it down."): flags.hpp'],['../flags_8hpp.html#ad739178d4295e31edd2f1e468bd2e600',1,'DEFINE_bool(frame_keep_distortion, false,"If false (default), it will undistortionate the image based on the"" `camera_parameter_folder` camera parameters; if true, it will not undistortionate, i.e.,"" it will leave it as it is."): flags.hpp'],['../flags_8hpp.html#a5e8200fc5fd8bc1e80b94b029ea8e18d',1,'DEFINE_bool(body_disable, false,"Disable body keypoint detection. Option only possible for faster (but less accurate) face"" keypoint detection."): flags.hpp'],['../flags_8hpp.html#a669c6f4820af0275f40808b18147b98c',1,'DEFINE_bool(heatmaps_add_parts, false,"If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and"" analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps."" If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential"" memory order: body parts + bkg + PAFs. It will follow the order on"" POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will"" considerably decrease. Not required for OpenPose, enable it only if you intend to"" explicitly use this information later."): flags.hpp'],['../flags_8hpp.html#af57ca90aafa15b707af299527b0aff6f',1,'DEFINE_bool(heatmaps_add_bkg, false,"Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to"" background."): flags.hpp'],['../flags_8hpp.html#ac881df85c5be736d05822ad98ac0496f',1,'DEFINE_bool(heatmaps_add_PAFs, false,"Same functionality as `add_heatmaps_parts`, but adding the PAFs."): flags.hpp'],['../flags_8hpp.html#aa941a6193b13f3d138437cf2a84e73ff',1,'DEFINE_bool(part_candidates, false,"Also enable `write_json` in order to save this information. If true, it will fill the"" op::Datum::poseCandidates array with the body part candidates. Candidates refer to all"" the detected body parts, before being assembled into people. Note that the number of"" candidates is equal or higher than the number of final body parts (i.e. after being"" assembled into people). The empty body parts are filled with 0s. Program speed will"" slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly"" use this information."): flags.hpp'],['../flags_8hpp.html#aea11a0489c2af278990d3ddff10960fd',1,'DEFINE_bool(face, false,"Enables face keypoint detection. It will share some parameters from the body pose, e.g."" `model_folder`. Note that this will considerable slow down the performance and increse"" the required GPU memory. In addition, the greater number of people on the image, the"" slower OpenPose will be."): flags.hpp'],['../flags_8hpp.html#a4da8f1bc0afb58f2a757ab6d90c6042e',1,'DEFINE_bool(hand, false,"Enables hand keypoint detection. It will share some parameters from the body pose, e.g."" `model_folder`. Analogously to `--face`, it will also slow down the performance, increase"" the required GPU memory and its speed depends on the number of people."): flags.hpp'],['../flags_8hpp.html#a0a28dd6ee5503f070d08cc9e4a3aff05',1,'DEFINE_bool(hand_tracking, false,"Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate"" is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it"" simply looks for hands in positions at which hands were located in previous frames, but"" it does not guarantee the same person ID among frames."): flags.hpp'],['../flags_8hpp.html#a4c800b6a91b02874d006da33ab8432cd',1,'DEFINE_bool(3d, false,"Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system."" 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction"" results. Note that it will only display 1 person. If multiple people is present, it will"" fail."): flags.hpp'],['../flags_8hpp.html#ac210b56a3c46ff06143a62b0bb725cc6',1,'DEFINE_bool(identification, false,"Experimental, not available yet. Whether to enable people identification across frames."): flags.hpp'],['../flags_8hpp.html#a1a9ffc302eac69c8ba024da3edf01a5f',1,'DEFINE_bool(disable_blending, false,"If enabled, it will render the results (keypoint skeletons or heatmaps) on a black"" background, instead of being rendered into the original image. Related: `part_to_show`,"" `alpha_pose`, and `alpha_pose`."): flags.hpp'],['../flags_8hpp.html#a231bec7b2113f31cebb760c849ca2841',1,'DEFINE_bool(fullscreen, false,"Run in full-screen mode (press f during runtime to toggle)."): flags.hpp'],['../flags_8hpp.html#a0ee4f86d572ef52ec26dcbb37efb5e65',1,'DEFINE_bool(no_gui_verbose, false,"Do not write text on output images on GUI (e.g., number of current frame and people). It"" does not affect the pose rendering."): flags.hpp']]], + ['define_5fdouble',['DEFINE_double',['../flags_8hpp.html#a29f9621cd9361deec9512f8d8b53e6ba',1,'DEFINE_double(camera_fps, 30.0,"Frame rate for the webcam (also used when saving video). Set this value to the minimum"" value between the OpenPose displayed speed and the webcam real frame rate."): flags.hpp'],['../flags_8hpp.html#adf35e9261afec1673a42de6197f07f42',1,'DEFINE_double(scale_gap, 0.3,"Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1."" If you want to change the initial scale, you actually want to multiply the"" `net_resolution` by your desired initial scale."): flags.hpp'],['../flags_8hpp.html#abf5c8a05ae8f86c8e4d8eedbceb84516',1,'DEFINE_double(hand_scale_range, 0.4,"Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range"" between smallest and biggest scale. The scales will be centered in ratio 1. E.g., if"" scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."): flags.hpp'],['../flags_8hpp.html#afd21fc201e4b269bf0b07b8fce886173',1,'DEFINE_double(render_threshold, 0.05,"Only estimated keypoints whose score confidences are higher than this threshold will be"" rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;"" while small thresholds (~0.1) will also output guessed and occluded keypoints, but also"" more false positives (i.e. wrong detections)."): flags.hpp'],['../flags_8hpp.html#ad971ae1068651177e7a78ae68c022792',1,'DEFINE_double(alpha_pose, 0.6,"Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will"" hide it. Only valid for GPU rendering."): flags.hpp'],['../flags_8hpp.html#a62bfc56794bb8ceaedd0eb7bc32a0f22',1,'DEFINE_double(alpha_heatmap, 0.7,"Blending factor (range 0-1) between heatmap and original frame. 1 will only show the"" heatmap, 0 will only show the frame. Only valid for GPU rendering."): flags.hpp'],['../flags_8hpp.html#a53eb4c15af968876f31ca0be54d251b3',1,'DEFINE_double(face_render_threshold, 0.4,"Analogous to `render_threshold`, but applied to the face keypoints."): flags.hpp'],['../flags_8hpp.html#af654373ad667b1683f30e350331ea709',1,'DEFINE_double(face_alpha_pose, 0.6,"Analogous to `alpha_pose` but applied to face."): flags.hpp'],['../flags_8hpp.html#a71043931875d1f5ec677fd69ae96c632',1,'DEFINE_double(face_alpha_heatmap, 0.7,"Analogous to `alpha_heatmap` but applied to face."): flags.hpp'],['../flags_8hpp.html#a6b12d681ace972ae8eede484505c50af',1,'DEFINE_double(hand_render_threshold, 0.2,"Analogous to `render_threshold`, but applied to the hand keypoints."): flags.hpp'],['../flags_8hpp.html#a890b2b8df8a57fe4e9baa465c6584ccf',1,'DEFINE_double(hand_alpha_pose, 0.6,"Analogous to `alpha_pose` but applied to hand."): flags.hpp'],['../flags_8hpp.html#ac17a7af1030d7f5bd69672fec961c083',1,'DEFINE_double(hand_alpha_heatmap, 0.7,"Analogous to `alpha_heatmap` but applied to hand."): flags.hpp']]], + ['define_5fint32',['DEFINE_int32',['../flags_8hpp.html#a73ee51843ead02ee6358fe39dcbeffde',1,'DEFINE_int32(logging_level, 3,"The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"" low priority messages and 4 for important ones."): flags.hpp'],['../flags_8hpp.html#a8e9de971b409cfe7fdded7f0d47c502d',1,'DEFINE_int32(profile_speed, 1000,"If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some"" runtime statistics at this frame number."): flags.hpp'],['../flags_8hpp.html#a10efaeb1ea3a8478388cc7d0bfd4e59e',1,'DEFINE_int32(camera,-1,"The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative"" number (by default), to auto-detect and open the first available camera."): flags.hpp'],['../flags_8hpp.html#ab41c02abe3634f0db65123ecda964a31',1,'DEFINE_int32(flir_camera_index,-1,"Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir"" camera index to run, where 0 corresponds to the detected flir camera with the lowest"" serial number, and `n` to the `n`-th lowest serial number camera."): flags.hpp'],['../flags_8hpp.html#a6c9c8a5843dd8b93e009bf29dc31cde2',1,'DEFINE_int32(frame_rotate, 0,"Rotate each frame, 4 possible values: 0, 90, 180, 270."): flags.hpp'],['../flags_8hpp.html#a284252d3d255ad5e5c35815d720fd67a',1,'DEFINE_int32(num_gpu,-1,"The number of GPU devices to use. If negative, it will use all the available GPUs in your"" machine."): flags.hpp'],['../flags_8hpp.html#a8511765700f652000f2c1c2b1a5df9f9',1,'DEFINE_int32(num_gpu_start, 0,"GPU device start number."): flags.hpp'],['../flags_8hpp.html#a18fc454ffeef53c7c73d69c67d1a73fc',1,'DEFINE_int32(keypoint_scale, 0,"Scaling of the (x,y) coordinates of the final pose data array, i.e. the scale of the (x,y)"" coordinates that will be saved with the `write_json` & `write_keypoint` flags."" Select `0` to scale it to the original source resolution; `1`to scale it to the net output"" size (set with `net_resolution`); `2` to scale it to the final output size (set with"" `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left"" corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where"" (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non"" related with `scale_number` and `scale_gap`."): flags.hpp'],['../flags_8hpp.html#aa5a1826a500d7131fefb480ccd1713fb',1,'DEFINE_int32(number_people_max,-1,"This parameter will limit the maximum number of people detected, by keeping the people with"" top scores. The score is based in person area over the image, body part score, as well as"" joint score (between each pair of connected body parts). Useful if you know the exact"" number of people in the scene, so it can remove false positives (if all the people have"" been detected. However, it might also include false negatives by removing very small or"" highly occluded people. -1 will keep them all."): flags.hpp'],['../flags_8hpp.html#a1edea5f45026b353b6e6cc3b196767a0',1,'DEFINE_int32(scale_number, 1,"Number of scales to average."): flags.hpp'],['../flags_8hpp.html#aa80e5f9914dd35c852941282aa229b21',1,'DEFINE_int32(heatmaps_scale, 2,"Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer"" rounded [0,255]; and 3 for no scaling."): flags.hpp'],['../flags_8hpp.html#a311e487137e17445e6939abb7cc0cf8f',1,'DEFINE_int32(hand_scale_number, 1,"Analogous to `scale_number` but applied to the hand keypoint detector. Our best results"" were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."): flags.hpp'],['../flags_8hpp.html#aa797dd033c6f3c4d2654e7000939d270',1,'DEFINE_int32(3d_min_views,-1,"Minimum number of views required to reconstruct each keypoint. By default (-1), it will"" require all the cameras to see the keypoint in order to reconstruct it."): flags.hpp'],['../flags_8hpp.html#a20c481950df0272f0b7b0cde67d8e72a',1,'DEFINE_int32(3d_views, 1,"Complementary option to `--image_dir` or `--video`. OpenPose will read as many images per"" iteration, allowing tasks such as stereo camera processing (`--3d`). Note that"" `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the"" parameter folder as this number indicates."): flags.hpp'],['../flags_8hpp.html#afdfef14901f7b5e324d6983845f5ab50',1,'DEFINE_int32(tracking,-1,"Experimental, not available yet. Whether to enable people tracking across frames. The"" value indicates the number of frames where tracking is run between each OpenPose keypoint"" detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint"" detector and tracking for potentially higher accurary than only OpenPose."): flags.hpp'],['../flags_8hpp.html#a072bd893f4003b48bc7c99735eeeed39',1,'DEFINE_int32(ik_threads, 0,"Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D"" keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing"" the number of threads will increase the speed but also the global system latency."): flags.hpp'],['../flags_8hpp.html#a4cead735de5b43cfcae5c1139df3be1a',1,'DEFINE_int32(part_to_show, 0,"Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body"" part heat map, 19 for the background heat map, 20 for all the body part heat maps"" together, 21 for all the PAFs, 22-40 for each body part pair PAF."): flags.hpp'],['../flags_8hpp.html#a65152cde78f8e7da1d33f557ec392312',1,'DEFINE_int32(render_pose,-1,"Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering"" (slower but greater functionality, e.g., `alpha_X` flags). If -1, it will pick CPU if"" CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render"" both `outputData` and `cvOutputData` with the original image and desired body part to be"" shown (i.e. keypoints, heat maps or PAFs)."): flags.hpp'],['../flags_8hpp.html#ad0269da28dc2033e23b8ea84b7e793a2',1,'DEFINE_int32(face_render,-1,"Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same"" configuration that `render_pose` is using."): flags.hpp'],['../flags_8hpp.html#ada5b7fef5063818fd668359e9e0b0504',1,'DEFINE_int32(hand_render,-1,"Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same"" configuration that `render_pose` is using."): flags.hpp'],['../flags_8hpp.html#a9a34e10f75069cf3283e535a77006775',1,'DEFINE_int32(display,-1,"Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server"" and/or to slightly speed up the processing if visual output is not required); 2 for 2-D"" display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display."): flags.hpp'],['../flags_8hpp.html#a39756aa6d6911435a326e18541db970a',1,'DEFINE_int32(write_coco_json_variant, 0,"Currently, this option is experimental and only makes effect on car JSON generation. It"" selects the COCO variant for cocoJsonSaver."): flags.hpp']]], + ['define_5fstring',['DEFINE_string',['../flags_8hpp.html#a9e7b1394aea185360a1fe19cf4e20a89',1,'DEFINE_string(camera_resolution,"-1x-1","Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the"" default 1280x720 for `--camera`, or the maximum flir camera resolution available for"" `--flir_camera`"): flags.hpp'],['../flags_8hpp.html#aab02d4078b5f7999a582d9c5f4248676',1,'DEFINE_string(video,"","Use a video file instead of the camera. Use `examples/media/video.avi` for our default"" example video."): flags.hpp'],['../flags_8hpp.html#a7773e867133822f5601899975dc06adb',1,'DEFINE_string(image_dir,"","Process a directory of images. Use `examples/media/` for our default example folder with 20"" images. Read all standard formats (jpg, png, bmp, etc.)."): flags.hpp'],['../flags_8hpp.html#a5db17b8bc6df4fe40b556af7157dcbf7',1,'DEFINE_string(ip_camera,"","String with the IP camera URL. It supports protocols like RTSP and HTTP."): flags.hpp'],['../flags_8hpp.html#aff61246512375ff5941dc4110e127ca3',1,'DEFINE_string(camera_parameter_folder,"models/cameraParameters/flir/","String with the folder where the camera parameters are located."): flags.hpp'],['../flags_8hpp.html#a9ab689ebe20a261b20587af79123e79a',1,'DEFINE_string(model_folder,"models/","Folder path (absolute or relative) where the models (pose, face, ...) are located."): flags.hpp'],['../flags_8hpp.html#a85be61a31eaa438a7e9c7d2baf51da47',1,'DEFINE_string(output_resolution,"-1x-1","The image resolution (display and output). Use \"-1x-1\" to force the program to use the"" input image resolution."): flags.hpp'],['../flags_8hpp.html#a055e1e0a1618e96f156f32278b41d3e4',1,'DEFINE_string(model_pose,"BODY_25","Model to be used. E.g., `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), ""`MPI_4_layers` (15 keypoints, even faster but less accurate)."): flags.hpp'],['../flags_8hpp.html#a66d6d5c07371179c3702dbd1da9d9bd3',1,'DEFINE_string(net_resolution,"-1x368","Multiples of 16. If it is increased, the accuracy potentially increases. If it is"" decreased, the speed increases. For maximum speed-accuracy balance, it should keep the"" closest aspect ratio possible to the images or videos to be processed. Using `-1` in"" any of the dimensions, OP will choose the optimal aspect ratio depending on the user's"" input value. E.g., the default `-1x368` is equivalent to `656x368` in 16:9 resolutions,"" e.g., full HD (1980x1080) and HD (1280x720) resolutions."): flags.hpp'],['../flags_8hpp.html#af5ee5f61f0d36a03bb8647408f5e236b',1,'DEFINE_string(face_net_resolution,"368x368","Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint"" detector. 320x320 usually works fine while giving a substantial speed up when multiple"" faces on the image."): flags.hpp'],['../flags_8hpp.html#afaf97bbf6a49576782d25147bc865bed',1,'DEFINE_string(hand_net_resolution,"368x368","Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint"" detector."): flags.hpp'],['../flags_8hpp.html#a7a3597e9216885470199ca1578eb7f69',1,'DEFINE_string(write_images,"","Directory to write rendered frames in `write_images_format` image format."): flags.hpp'],['../flags_8hpp.html#a6957bf419a0ca3478948a62a3ce5ecf0',1,'DEFINE_string(write_images_format,"png","File extension and format for `write_images`, e.g., png, jpg or bmp. Check the OpenCV"" function cv::imwrite for all compatible extensions."): flags.hpp'],['../flags_8hpp.html#a3051d21eb51cc39eed5f781d8eaed960',1,'DEFINE_string(write_video,"","Full file path to write rendered frames in motion JPEG video format. It might fail if the"" final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag"" `camera_fps` controls FPS."): flags.hpp'],['../flags_8hpp.html#a9aa48b2ab293842bc42b96df9e97c9b8',1,'DEFINE_string(write_json,"","Directory to write OpenPose output in JSON format. It includes body, hand, and face pose"" keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."): flags.hpp'],['../flags_8hpp.html#a26d1c7340fc87d4593dda754d54145a2',1,'DEFINE_string(write_coco_json,"","Full file path to write people pose data with JSON COCO validation format."): flags.hpp'],['../flags_8hpp.html#ac6c2099e630e05f867ee10b43f35dc65',1,'DEFINE_string(write_coco_foot_json,"","Full file path to write people foot pose data with JSON COCO validation format."): flags.hpp'],['../flags_8hpp.html#aa3fe7c4c07492e6553a6c2d25ebd76b4',1,'DEFINE_string(write_heatmaps,"","Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag"" must be enabled."): flags.hpp'],['../flags_8hpp.html#a242473077549869f06534e8a9ea1ddd6',1,'DEFINE_string(write_heatmaps_format,"png","File extension and format for `write_heatmaps`, analogous to `write_images_format`."" For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for"" floating values."): flags.hpp'],['../flags_8hpp.html#ad8c5173beb83c0f9996362b3a3fba820',1,'DEFINE_string(write_keypoint,"","(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format"" with `write_keypoint_format`."): flags.hpp'],['../flags_8hpp.html#a5aaba99bdb163516d0297d2e09dd0c7d',1,'DEFINE_string(write_keypoint_format,"yml","(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml,"" yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."): flags.hpp'],['../flags_8hpp.html#ad7a48143249d608e2ad3a8413eb19c34',1,'DEFINE_string(write_video_adam,"","Experimental, not available yet. E.g., `~/Desktop/adamResult.avi`. Flag `camera_fps`"" controls FPS."): flags.hpp'],['../flags_8hpp.html#a72c09e09c94b67f88b53e1a94e7bfe0f',1,'DEFINE_string(write_bvh,"","Experimental, not available yet. E.g., `~/Desktop/mocapResult.bvh`."): flags.hpp'],['../flags_8hpp.html#a63936bd2bc53a453ba74066289ab9d29',1,'DEFINE_string(udp_host,"","Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."): flags.hpp'],['../flags_8hpp.html#aa7b1c758c6d6e35c3227994a9ced0236',1,'DEFINE_string(udp_port,"8051","Experimental, not available yet. Port number for UDP communication."): flags.hpp']]], + ['define_5fuint64',['DEFINE_uint64',['../flags_8hpp.html#a1433eaf1c5eb42e406e76bc6f8e517c3',1,'DEFINE_uint64(frame_first, 0,"Start on desired frame number. Indexes are 0-based, i.e. the first frame has index 0."): flags.hpp'],['../flags_8hpp.html#a22572531e5a4896c510639ac57cf522c',1,'DEFINE_uint64(frame_step, 1,"Step or gap between processed frames. E.g., `--frame_step 5` would read and process frames"" 0, 5, 10, etc.."): flags.hpp'],['../flags_8hpp.html#a158227abc2af90abaec523a784b40fa8',1,'DEFINE_uint64(frame_last,-1,"Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g., if set to"" 10, it will process 11 frames (0-10)."): flags.hpp']]], ['delete_5fcopy',['DELETE_COPY',['../classop_1_1_face_cpu_renderer.html#a233f2a83930d07e4d420b43c8a660f32',1,'op::FaceCpuRenderer::DELETE_COPY()'],['../classop_1_1_hand_cpu_renderer.html#a66a7d318b240c73687320bf092363409',1,'op::HandCpuRenderer::DELETE_COPY()'],['../classop_1_1_sub_thread_no_queue.html#a43504502c36461305d656fb87b914749',1,'op::SubThreadNoQueue::DELETE_COPY()']]], ['deleter',['Deleter',['../classcl_1_1detail_1_1_deleter.html#a899859732bb1c5f296f3e4f05ca6a39d',1,'cl::detail::Deleter']]], ['destroy',['destroy',['../classcl_1_1_s_v_m_allocator.html#a86498cce5cce2d8ac1c509862eb28977',1,'cl::SVMAllocator']]], diff --git a/html/search/variables_15.js b/html/search/variables_15.js index 9618a0fa..ca38e2a0 100644 --- a/html/search/variables_15.js +++ b/html/search/variables_15.js @@ -5,6 +5,7 @@ var searchData= ['writebvh',['writeBvh',['../structop_1_1_wrapper_struct_output.html#add74785fc0cb1fc7d2c6b5f88b622d53',1,'op::WrapperStructOutput']]], ['writecocofootjson',['writeCocoFootJson',['../structop_1_1_wrapper_struct_output.html#aaadbe4f8f765a751973ed1b405843c74',1,'op::WrapperStructOutput']]], ['writecocojson',['writeCocoJson',['../structop_1_1_wrapper_struct_output.html#a6efbd77da8ebcea3abc4109bb2f39c0b',1,'op::WrapperStructOutput']]], + ['writecocojsonvariant',['writeCocoJsonVariant',['../structop_1_1_wrapper_struct_output.html#a0119bb7429483928c587ffaf607919de',1,'op::WrapperStructOutput']]], ['writeheatmaps',['writeHeatMaps',['../structop_1_1_wrapper_struct_output.html#adca0d08aa43e2280ca06a8db0deb81bc',1,'op::WrapperStructOutput']]], ['writeheatmapsformat',['writeHeatMapsFormat',['../structop_1_1_wrapper_struct_output.html#a34d5796e02b8afaaaef99fc26e7f3056',1,'op::WrapperStructOutput']]], ['writeimages',['writeImages',['../structop_1_1_wrapper_struct_output.html#a5f56c15d410645442b5ac21c316b9c85',1,'op::WrapperStructOutput']]], diff --git a/html/structop_1_1_datum.html b/html/structop_1_1_datum.html index 6d24a7ad..8bd0ff17 100644 --- a/html/structop_1_1_datum.html +++ b/html/structop_1_1_datum.html @@ -709,7 +709,7 @@ Public Attributes
  • e.g.if set to"" e. g.,
    if set to""  10,
    -

    Rendered image in cv::Mat uchar format. It has been resized to the desired output resolution (e.g. resolution flag in the demo). If outputData is empty, cvOutputData will also be empty. Size: (output_height x output_width) x 3 channels

    +

    Rendered image in cv::Mat uchar format. It has been resized to the desired output resolution (e.g., resolution flag in the demo). If outputData is empty, cvOutputData will also be empty. Size: (output_height x output_width) x 3 channels

    @@ -902,7 +902,7 @@ Public Attributes
    -

    Name used when saving the data to disk (e.g. write_images or write_keypoint flags in the demo).

    +

    Name used when saving the data to disk (e.g., write_images or write_keypoint flags in the demo).

    @@ -941,7 +941,7 @@ Public Attributes
    -

    Rendered image in Array<float> format. It consists of a blending of the cvInputData and the pose/body part(s) heatmap/PAF(s). If rendering is disabled (e.g. no_render_pose flag in the demo), outputData will be empty. Size: 3 x output_net_height x output_net_width

    +

    Rendered image in Array<float> format. It consists of a blending of the cvInputData and the pose/body part(s) heatmap/PAF(s). If rendering is disabled (e.g., no_render_pose flag in the demo), outputData will be empty. Size: 3 x output_net_height x output_net_width

    @@ -993,7 +993,7 @@ Public Attributes
    -

    Body pose (x,y,score) locations for each person in the image. It has been resized to the desired output resolution (e.g. resolution flag in the demo). Size: #people x #body parts (e.g. 18 for COCO or 15 for MPI) x 3 ((x,y) coordinates + score)

    +

    Body pose (x,y,score) locations for each person in the image. It has been resized to the desired output resolution (e.g., resolution flag in the demo). Size: #people x #body parts (e.g., 18 for COCO or 15 for MPI) x 3 ((x,y) coordinates + score)

    @@ -1006,7 +1006,7 @@ Public Attributes
    -

    Body pose (x,y,z,score) locations for each person in the image. Size: #people x #body parts (e.g. 18 for COCO or 15 for MPI) x 4 ((x,y,z) coordinates + score)

    +

    Body pose (x,y,z,score) locations for each person in the image. Size: #people x #body parts (e.g., 18 for COCO or 15 for MPI) x 4 ((x,y,z) coordinates + score)

    @@ -1019,7 +1019,7 @@ Public Attributes
    -

    Body pose global confidence/score for each person in the image. It does not only consider the score of each body keypoint, but also the score of each PAF association. Optimized for COCO evaluation metric. It will highly penalyze people with missing body parts (e.g. cropped people on the borders of the image). If poseKeypoints is empty, poseScores will also be empty. Size: #people

    +

    Body pose global confidence/score for each person in the image. It does not only consider the score of each body keypoint, but also the score of each PAF association. Optimized for COCO evaluation metric. It will highly penalyze people with missing body parts (e.g., cropped people on the borders of the image). If poseKeypoints is empty, poseScores will also be empty. Size: #people

    diff --git a/html/structop_1_1_wrapper_struct_hand.html b/html/structop_1_1_wrapper_struct_hand.html index e9498439..f23ef3bc 100644 --- a/html/structop_1_1_wrapper_struct_hand.html +++ b/html/structop_1_1_wrapper_struct_hand.html @@ -302,7 +302,7 @@ Public Attributes
    -

    Total range between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2.

    +

    Total range between smallest and biggest scale. The scales will be centered in ratio 1. E.g., if scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2.

    diff --git a/html/structop_1_1_wrapper_struct_input.html b/html/structop_1_1_wrapper_struct_input.html index 1f838e01..6f2dc835 100644 --- a/html/structop_1_1_wrapper_struct_input.html +++ b/html/structop_1_1_wrapper_struct_input.html @@ -342,7 +342,7 @@ Public Attributes
    -

    Whether to re-open the producer if it reaches the end (e.g. video or image directory after the last frame).

    +

    Whether to re-open the producer if it reaches the end (e.g., video or image directory after the last frame).

    diff --git a/html/structop_1_1_wrapper_struct_output-members.html b/html/structop_1_1_wrapper_struct_output-members.html index e926e31a..85283722 100644 --- a/html/structop_1_1_wrapper_struct_output-members.html +++ b/html/structop_1_1_wrapper_struct_output-members.html @@ -117,20 +117,21 @@ $(document).ready(function(){initNavTree('structop_1_1_wrapper_struct_output.htm guiVerboseop::WrapperStructOutput udpHostop::WrapperStructOutput udpPortop::WrapperStructOutput - WrapperStructOutput(const DisplayMode displayMode=DisplayMode::NoDisplay, const bool guiVerbose=false, const bool fullScreen=false, const std::string &writeKeypoint="", const DataFormat writeKeypointFormat=DataFormat::Xml, const std::string &writeJson="", const std::string &writeCocoJson="", const std::string &writeCocoFootJson="", const std::string &writeImages="", const std::string &writeImagesFormat="", const std::string &writeVideo="", const double writeVideoFps=30., const std::string &writeHeatMaps="", const std::string &writeHeatMapsFormat="", const std::string &writeVideoAdam="", const std::string &writeBvh="", const std::string &udpHost="", const std::string &udpPort="")op::WrapperStructOutput + WrapperStructOutput(const DisplayMode displayMode=DisplayMode::NoDisplay, const bool guiVerbose=false, const bool fullScreen=false, const std::string &writeKeypoint="", const DataFormat writeKeypointFormat=DataFormat::Xml, const std::string &writeJson="", const std::string &writeCocoJson="", const std::string &writeCocoFootJson="", const int writeCocoJsonVariant=1, const std::string &writeImages="", const std::string &writeImagesFormat="", const std::string &writeVideo="", const double writeVideoFps=30., const std::string &writeHeatMaps="", const std::string &writeHeatMapsFormat="", const std::string &writeVideoAdam="", const std::string &writeBvh="", const std::string &udpHost="", const std::string &udpPort="")op::WrapperStructOutput writeBvhop::WrapperStructOutput writeCocoFootJsonop::WrapperStructOutput writeCocoJsonop::WrapperStructOutput - writeHeatMapsop::WrapperStructOutput - writeHeatMapsFormatop::WrapperStructOutput - writeImagesop::WrapperStructOutput - writeImagesFormatop::WrapperStructOutput - writeJsonop::WrapperStructOutput - writeKeypointop::WrapperStructOutput - writeKeypointFormatop::WrapperStructOutput - writeVideoop::WrapperStructOutput - writeVideoAdamop::WrapperStructOutput - writeVideoFpsop::WrapperStructOutput + writeCocoJsonVariantop::WrapperStructOutput + writeHeatMapsop::WrapperStructOutput + writeHeatMapsFormatop::WrapperStructOutput + writeImagesop::WrapperStructOutput + writeImagesFormatop::WrapperStructOutput + writeJsonop::WrapperStructOutput + writeKeypointop::WrapperStructOutput + writeKeypointFormatop::WrapperStructOutput + writeVideoop::WrapperStructOutput + writeVideoAdamop::WrapperStructOutput + writeVideoFpsop::WrapperStructOutput diff --git a/html/structop_1_1_wrapper_struct_output.html b/html/structop_1_1_wrapper_struct_output.html index b3075b95..6e79d25c 100644 --- a/html/structop_1_1_wrapper_struct_output.html +++ b/html/structop_1_1_wrapper_struct_output.html @@ -118,8 +118,8 @@ $(document).ready(function(){initNavTree('structop_1_1_wrapper_struct_output.htm - - + +

    Public Member Functions

     WrapperStructOutput (const DisplayMode displayMode=DisplayMode::NoDisplay, const bool guiVerbose=false, const bool fullScreen=false, const std::string &writeKeypoint="", const DataFormat writeKeypointFormat=DataFormat::Xml, const std::string &writeJson="", const std::string &writeCocoJson="", const std::string &writeCocoFootJson="", const std::string &writeImages="", const std::string &writeImagesFormat="", const std::string &writeVideo="", const double writeVideoFps=30., const std::string &writeHeatMaps="", const std::string &writeHeatMapsFormat="", const std::string &writeVideoAdam="", const std::string &writeBvh="", const std::string &udpHost="", const std::string &udpPort="")
     
     WrapperStructOutput (const DisplayMode displayMode=DisplayMode::NoDisplay, const bool guiVerbose=false, const bool fullScreen=false, const std::string &writeKeypoint="", const DataFormat writeKeypointFormat=DataFormat::Xml, const std::string &writeJson="", const std::string &writeCocoJson="", const std::string &writeCocoFootJson="", const int writeCocoJsonVariant=1, const std::string &writeImages="", const std::string &writeImagesFormat="", const std::string &writeVideo="", const double writeVideoFps=30., const std::string &writeHeatMaps="", const std::string &writeHeatMapsFormat="", const std::string &writeVideoAdam="", const std::string &writeBvh="", const std::string &udpHost="", const std::string &udpPort="")
     
    @@ -139,6 +139,8 @@ Public Attributes + + @@ -163,7 +165,7 @@ Public Attributes

    Detailed Description

    WrapperStructOutput: Output (small GUI, writing rendered results and/or pose data, etc.) configuration struct. WrapperStructOutput allows the user to set up the input frames generator.

    Constructor & Destructor Documentation

    - +

    Public Attributes

     
    std::string writeCocoFootJson
     
    int writeCocoJsonVariant
     
    std::string writeImages
     
    std::string writeImagesFormat
    @@ -215,6 +217,12 @@ Public Attributes + + + + + + @@ -389,6 +397,19 @@ Public Attributes

    Pose (x, y, score) locations saving folder location in JSON COCO validation format. If it is empty (default), it is disabled.

    +
    + + +
    +
    +
    const std::string &  writeCocoFootJson = "",
    const int writeCocoJsonVariant = 1,
    + + + +
    int op::WrapperStructOutput::writeCocoJsonVariant
    +
    +

    Experimental option (only makes effect on car JSON generation). It selects the COCO variant for cocoJsonSaver.

    +
    @@ -439,7 +460,7 @@ Public Attributes
    -

    Rendered image saving folder format. Check your OpenCV version documentation for a list of compatible formats. E.g. png, jpg, etc. If writeImages is empty (default), it makes no effect.

    +

    Rendered image saving folder format. Check your OpenCV version documentation for a list of compatible formats. E.g., png, jpg, etc. If writeImages is empty (default), it makes no effect.

    diff --git a/html/structop_1_1_wrapper_struct_output.js b/html/structop_1_1_wrapper_struct_output.js index eafd6ac5..39424331 100644 --- a/html/structop_1_1_wrapper_struct_output.js +++ b/html/structop_1_1_wrapper_struct_output.js @@ -1,6 +1,6 @@ var structop_1_1_wrapper_struct_output = [ - [ "WrapperStructOutput", "structop_1_1_wrapper_struct_output.html#a46207e61b143191a6cb79d397af43989", null ], + [ "WrapperStructOutput", "structop_1_1_wrapper_struct_output.html#ae6af809e1b560ca018861ee8221bad21", null ], [ "displayMode", "structop_1_1_wrapper_struct_output.html#a4e18a93cfa9c6b47151427152a745817", null ], [ "fullScreen", "structop_1_1_wrapper_struct_output.html#aaf5df884418d2bf1df36505009264ece", null ], [ "guiVerbose", "structop_1_1_wrapper_struct_output.html#ac72ad601daf957b9b000206e9f1fe0ad", null ], @@ -9,6 +9,7 @@ var structop_1_1_wrapper_struct_output = [ "writeBvh", "structop_1_1_wrapper_struct_output.html#add74785fc0cb1fc7d2c6b5f88b622d53", null ], [ "writeCocoFootJson", "structop_1_1_wrapper_struct_output.html#aaadbe4f8f765a751973ed1b405843c74", null ], [ "writeCocoJson", "structop_1_1_wrapper_struct_output.html#a6efbd77da8ebcea3abc4109bb2f39c0b", null ], + [ "writeCocoJsonVariant", "structop_1_1_wrapper_struct_output.html#a0119bb7429483928c587ffaf607919de", null ], [ "writeHeatMaps", "structop_1_1_wrapper_struct_output.html#adca0d08aa43e2280ca06a8db0deb81bc", null ], [ "writeHeatMapsFormat", "structop_1_1_wrapper_struct_output.html#a34d5796e02b8afaaaef99fc26e7f3056", null ], [ "writeImages", "structop_1_1_wrapper_struct_output.html#a5f56c15d410645442b5ac21c316b9c85", null ], diff --git a/html/worker_8hpp_source.html b/html/worker_8hpp_source.html index ea306089..168a0f23 100644 --- a/html/worker_8hpp_source.html +++ b/html/worker_8hpp_source.html @@ -136,62 +136,63 @@ $(document).ready(function(){initNavTree('worker_8hpp_source.html','');});
    27  mIsRunning = false;
    28  }
    29 
    -
    30  // Virtual in case some function needs spetial stopping (e.g. buffers might not stop inmediately and need a few iterations)
    -
    31  inline virtual void tryStop()
    -
    32  {
    -
    33  stop();
    -
    34  }
    -
    35 
    -
    36  protected:
    -
    37  virtual void work(TDatums& tDatums) = 0;
    -
    38 
    -
    39  private:
    -
    40  bool mIsRunning;
    -
    41 
    -
    42  DELETE_COPY(Worker);
    -
    43  };
    -
    44 }
    -
    45 
    +
    30  // Virtual in case some function needs spetial stopping (e.g., buffers might not stop inmediately and need a
    +
    31  // few iterations)
    +
    32  inline virtual void tryStop()
    +
    33  {
    +
    34  stop();
    +
    35  }
    +
    36 
    +
    37  protected:
    +
    38  virtual void work(TDatums& tDatums) = 0;
    +
    39 
    +
    40  private:
    +
    41  bool mIsRunning;
    +
    42 
    +
    43  DELETE_COPY(Worker);
    +
    44  };
    +
    45 }
    46 
    47 
    48 
    49 
    -
    50 // Implementation
    -
    51 namespace op
    -
    52 {
    -
    53  template<typename TDatums>
    - -
    55  mIsRunning{true}
    -
    56  {
    -
    57  }
    -
    58 
    -
    59  template<typename TDatums>
    - -
    61  {
    -
    62  }
    -
    63 
    -
    64  template<typename TDatums>
    -
    65  bool Worker<TDatums>::checkAndWork(TDatums& tDatums)
    -
    66  {
    -
    67  if (mIsRunning)
    -
    68  work(tDatums);
    -
    69  return mIsRunning;
    -
    70  }
    -
    71 
    - -
    73 }
    -
    74 
    -
    75 #endif // OPENPOSE_THREAD_WORKER_HPP
    +
    50 
    +
    51 // Implementation
    +
    52 namespace op
    +
    53 {
    +
    54  template<typename TDatums>
    + +
    56  mIsRunning{true}
    +
    57  {
    +
    58  }
    +
    59 
    +
    60  template<typename TDatums>
    + +
    62  {
    +
    63  }
    +
    64 
    +
    65  template<typename TDatums>
    +
    66  bool Worker<TDatums>::checkAndWork(TDatums& tDatums)
    +
    67  {
    +
    68  if (mIsRunning)
    +
    69  work(tDatums);
    +
    70  return mIsRunning;
    +
    71  }
    +
    72 
    + +
    74 }
    +
    75 
    +
    76 #endif // OPENPOSE_THREAD_WORKER_HPP
    Definition: worker.hpp:9
    -
    virtual ~Worker()
    Definition: worker.hpp:60
    +
    virtual ~Worker()
    Definition: worker.hpp:61
    -
    Worker()
    Definition: worker.hpp:54
    -
    virtual void tryStop()
    Definition: worker.hpp:31
    +
    Worker()
    Definition: worker.hpp:55
    +
    virtual void tryStop()
    Definition: worker.hpp:32
    bool isRunning() const
    Definition: worker.hpp:20
    virtual void initializationOnThread()=0
    virtual void work(TDatums &tDatums)=0
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    -
    bool checkAndWork(TDatums &tDatums)
    Definition: worker.hpp:65
    +
    bool checkAndWork(TDatums &tDatums)
    Definition: worker.hpp:66
    void stop()
    Definition: worker.hpp:25
    diff --git a/html/wrapper_auxiliary_8hpp_source.html b/html/wrapper_auxiliary_8hpp_source.html index 2e21e00e..c8cae745 100644 --- a/html/wrapper_auxiliary_8hpp_source.html +++ b/html/wrapper_auxiliary_8hpp_source.html @@ -609,7 +609,7 @@ $(document).ready(function(){initNavTree('wrapper_auxiliary_8hpp_source.html',''
    527  poseTriangulation)};
    528  }
    529  }
    -
    530  // Itermediate workers (e.g. OpenPose format to cv::Mat, json & frames recorder, ...)
    +
    530  // Itermediate workers (e.g., OpenPose format to cv::Mat, json & frames recorder, ...)
    531  postProcessingWs.clear();
    532  // // Person ID identification (when no multi-thread and no dependency on tracking)
    533  // if (wrapperStructExtra.identification)
    @@ -699,374 +699,376 @@ $(document).ready(function(){initNavTree('wrapper_auxiliary_8hpp_source.html',''
    617  wrapperStructOutput.writeCocoJson, humanFormat,
    618  (wrapperStructPose.poseModel != PoseModel::CAR_22
    619  && wrapperStructPose.poseModel != PoseModel::CAR_12
    - -
    621  outputWs.emplace_back(std::make_shared<WCocoJsonSaver<TDatumsSP>>(cocoJsonSaver));
    -
    622  }
    -
    623  // Write people foot pose data on disk (COCO validation json format for foot data)
    -
    624  if (!wrapperStructOutput.writeCocoFootJson.empty())
    -
    625  {
    -
    626  // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it
    -
    627  const auto humanFormat = true;
    -
    628  const auto cocoJsonSaver = std::make_shared<CocoJsonSaver>(wrapperStructOutput.writeCocoFootJson,
    -
    629  humanFormat, CocoJsonFormat::Foot);
    -
    630  outputWs.emplace_back(std::make_shared<WCocoJsonSaver<TDatumsSP>>(cocoJsonSaver));
    -
    631  }
    -
    632  // Write frames as desired image format on hard disk
    -
    633  if (!writeImagesCleaned.empty())
    -
    634  {
    -
    635  const auto imageSaver = std::make_shared<ImageSaver>(writeImagesCleaned,
    -
    636  wrapperStructOutput.writeImagesFormat);
    -
    637  outputWs.emplace_back(std::make_shared<WImageSaver<TDatumsSP>>(imageSaver));
    -
    638  }
    -
    639  // Write frames as *.avi video on hard disk
    -
    640  const auto producerFps = (producerSharedPtr == nullptr ?
    -
    641  0. : producerSharedPtr->get(CV_CAP_PROP_FPS));
    -
    642  const auto originalVideoFps = (wrapperStructOutput.writeVideoFps > 0 ?
    -
    643  wrapperStructOutput.writeVideoFps
    -
    644  : producerFps);
    -
    645  if (!wrapperStructOutput.writeVideo.empty())
    -
    646  {
    -
    647  if (!oPProducer)
    -
    648  error("Video file can only be recorded inside `wrapper/wrapper.hpp` if the producer"
    -
    649  " is one of the default ones (e.g. video, webcam, ...).",
    -
    650  __LINE__, __FUNCTION__, __FILE__);
    -
    651  if (finalOutputSize.x <= 0 || finalOutputSize.y <= 0)
    -
    652  error("Video can only be recorded if outputSize is fixed (e.g. video, webcam, IP camera),"
    -
    653  "but not for a image directory.", __LINE__, __FUNCTION__, __FILE__);
    -
    654  const auto videoSaver = std::make_shared<VideoSaver>(
    -
    655  wrapperStructOutput.writeVideo, CV_FOURCC('M','J','P','G'), originalVideoFps, finalOutputSize
    -
    656  );
    -
    657  outputWs.emplace_back(std::make_shared<WVideoSaver<TDatumsSP>>(videoSaver));
    -
    658  }
    -
    659  // Write joint angles as *.bvh file on hard disk
    -
    660 #ifdef USE_3D_ADAM_MODEL
    -
    661  if (!wrapperStructOutput.writeBvh.empty())
    -
    662  {
    -
    663  const auto bvhSaver = std::make_shared<BvhSaver>(
    -
    664  wrapperStructOutput.writeBvh, JointAngleEstimation::getTotalModel(), originalVideoFps
    -
    665  );
    -
    666  outputWs.emplace_back(std::make_shared<WBvhSaver<TDatumsSP>>(bvhSaver));
    -
    667  }
    -
    668 #endif
    -
    669  // Write heat maps as desired image format on hard disk
    -
    670  if (!writeHeatMapsCleaned.empty())
    -
    671  {
    -
    672  const auto heatMapSaver = std::make_shared<HeatMapSaver>(writeHeatMapsCleaned,
    -
    673  wrapperStructOutput.writeHeatMapsFormat);
    -
    674  outputWs.emplace_back(std::make_shared<WHeatMapSaver<TDatumsSP>>(heatMapSaver));
    -
    675  }
    -
    676  // Add frame information for GUI
    -
    677  const bool guiEnabled = (wrapperStructOutput.displayMode != DisplayMode::NoDisplay);
    -
    678  // If this WGuiInfoAdder instance is placed before the WImageSaver or WVideoSaver, then the resulting
    -
    679  // recorded frames will look exactly as the final displayed image by the GUI
    -
    680  if (wrapperStructOutput.guiVerbose && (guiEnabled || !userOutputWs.empty()
    -
    681  || threadManagerMode == ThreadManagerMode::Asynchronous
    -
    682  || threadManagerMode == ThreadManagerMode::AsynchronousOut))
    -
    683  {
    -
    684  const auto guiInfoAdder = std::make_shared<GuiInfoAdder>(numberThreads, guiEnabled);
    -
    685  outputWs.emplace_back(std::make_shared<WGuiInfoAdder<TDatumsSP>>(guiInfoAdder));
    -
    686  }
    -
    687  // Minimal graphical user interface (GUI)
    -
    688  guiW = nullptr;
    -
    689  if (guiEnabled)
    -
    690  {
    -
    691  // PoseRenderers to Renderers
    -
    692  std::vector<std::shared_ptr<Renderer>> renderers;
    -
    693  if (wrapperStructPose.renderMode == RenderMode::Cpu)
    -
    694  renderers.emplace_back(std::static_pointer_cast<Renderer>(poseCpuRenderer));
    -
    695  else
    -
    696  for (const auto& poseGpuRenderer : poseGpuRenderers)
    -
    697  renderers.emplace_back(std::static_pointer_cast<Renderer>(poseGpuRenderer));
    -
    698  // Display
    -
    699  // Adam (+3-D/2-D) display
    -
    700  if (displayAdam)
    -
    701  {
    -
    702 #ifdef USE_3D_ADAM_MODEL
    -
    703  // Gui
    -
    704  const auto gui = std::make_shared<GuiAdam>(
    -
    705  finalOutputSize, wrapperStructOutput.fullScreen, threadManager.getIsRunningSharedPtr(),
    -
    706  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers,
    -
    707  wrapperStructOutput.displayMode, JointAngleEstimation::getTotalModel(),
    -
    708  wrapperStructOutput.writeVideoAdam
    -
    709  );
    -
    710  // WGui
    -
    711  guiW = {std::make_shared<WGuiAdam<TDatumsSP>>(gui)};
    -
    712 #endif
    -
    713  }
    -
    714  // 3-D (+2-D) display
    -
    715  else if (wrapperStructOutput.displayMode == DisplayMode::Display3D
    -
    716  || wrapperStructOutput.displayMode == DisplayMode::DisplayAll)
    -
    717  {
    -
    718  // Gui
    -
    719  const auto gui = std::make_shared<Gui3D>(
    -
    720  finalOutputSize, wrapperStructOutput.fullScreen, threadManager.getIsRunningSharedPtr(),
    -
    721  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers,
    -
    722  wrapperStructPose.poseModel, wrapperStructOutput.displayMode
    -
    723  );
    -
    724  // WGui
    -
    725  guiW = {std::make_shared<WGui3D<TDatumsSP>>(gui)};
    -
    726  }
    -
    727  // 2-D display
    -
    728  else if (wrapperStructOutput.displayMode == DisplayMode::Display2D)
    -
    729  {
    -
    730  // Gui
    -
    731  const auto gui = std::make_shared<Gui>(
    -
    732  finalOutputSize, wrapperStructOutput.fullScreen, threadManager.getIsRunningSharedPtr(),
    -
    733  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers
    -
    734  );
    -
    735  // WGui
    -
    736  guiW = {std::make_shared<WGui<TDatumsSP>>(gui)};
    -
    737  }
    -
    738  else
    -
    739  error("Unknown DisplayMode.", __LINE__, __FUNCTION__, __FILE__);
    -
    740  }
    -
    741  // Set wrapper as configured
    -
    742  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    743 
    + +
    621  wrapperStructOutput.writeCocoJsonVariant);
    +
    622  outputWs.emplace_back(std::make_shared<WCocoJsonSaver<TDatumsSP>>(cocoJsonSaver));
    +
    623  }
    +
    624  // Write people foot pose data on disk (COCO validation json format for foot data)
    +
    625  if (!wrapperStructOutput.writeCocoFootJson.empty())
    +
    626  {
    +
    627  // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it
    +
    628  const auto humanFormat = true;
    +
    629  const auto cocoJsonSaver = std::make_shared<CocoJsonSaver>(wrapperStructOutput.writeCocoFootJson,
    +
    630  humanFormat, CocoJsonFormat::Foot);
    +
    631  outputWs.emplace_back(std::make_shared<WCocoJsonSaver<TDatumsSP>>(cocoJsonSaver));
    +
    632  }
    +
    633  // Write frames as desired image format on hard disk
    +
    634  if (!writeImagesCleaned.empty())
    +
    635  {
    +
    636  const auto imageSaver = std::make_shared<ImageSaver>(writeImagesCleaned,
    +
    637  wrapperStructOutput.writeImagesFormat);
    +
    638  outputWs.emplace_back(std::make_shared<WImageSaver<TDatumsSP>>(imageSaver));
    +
    639  }
    +
    640  // Write frames as *.avi video on hard disk
    +
    641  const auto producerFps = (producerSharedPtr == nullptr ?
    +
    642  0. : producerSharedPtr->get(CV_CAP_PROP_FPS));
    +
    643  const auto originalVideoFps = (wrapperStructOutput.writeVideoFps > 0 ?
    +
    644  wrapperStructOutput.writeVideoFps
    +
    645  : producerFps);
    +
    646  if (!wrapperStructOutput.writeVideo.empty())
    +
    647  {
    +
    648  if (!oPProducer)
    +
    649  error("Video file can only be recorded inside `wrapper/wrapper.hpp` if the producer"
    +
    650  " is one of the default ones (e.g., video, webcam, ...).",
    +
    651  __LINE__, __FUNCTION__, __FILE__);
    +
    652  if (finalOutputSize.x <= 0 || finalOutputSize.y <= 0)
    +
    653  error("Video can only be recorded if outputSize is fixed (e.g., video, webcam, IP camera),"
    +
    654  "but not for a image directory.", __LINE__, __FUNCTION__, __FILE__);
    +
    655  const auto videoSaver = std::make_shared<VideoSaver>(
    +
    656  wrapperStructOutput.writeVideo, CV_FOURCC('M','J','P','G'), originalVideoFps, finalOutputSize
    +
    657  );
    +
    658  outputWs.emplace_back(std::make_shared<WVideoSaver<TDatumsSP>>(videoSaver));
    +
    659  }
    +
    660  // Write joint angles as *.bvh file on hard disk
    +
    661 #ifdef USE_3D_ADAM_MODEL
    +
    662  if (!wrapperStructOutput.writeBvh.empty())
    +
    663  {
    +
    664  const auto bvhSaver = std::make_shared<BvhSaver>(
    +
    665  wrapperStructOutput.writeBvh, JointAngleEstimation::getTotalModel(), originalVideoFps
    +
    666  );
    +
    667  outputWs.emplace_back(std::make_shared<WBvhSaver<TDatumsSP>>(bvhSaver));
    +
    668  }
    +
    669 #endif
    +
    670  // Write heat maps as desired image format on hard disk
    +
    671  if (!writeHeatMapsCleaned.empty())
    +
    672  {
    +
    673  const auto heatMapSaver = std::make_shared<HeatMapSaver>(writeHeatMapsCleaned,
    +
    674  wrapperStructOutput.writeHeatMapsFormat);
    +
    675  outputWs.emplace_back(std::make_shared<WHeatMapSaver<TDatumsSP>>(heatMapSaver));
    +
    676  }
    +
    677  // Add frame information for GUI
    +
    678  const bool guiEnabled = (wrapperStructOutput.displayMode != DisplayMode::NoDisplay);
    +
    679  // If this WGuiInfoAdder instance is placed before the WImageSaver or WVideoSaver, then the resulting
    +
    680  // recorded frames will look exactly as the final displayed image by the GUI
    +
    681  if (wrapperStructOutput.guiVerbose && (guiEnabled || !userOutputWs.empty()
    +
    682  || threadManagerMode == ThreadManagerMode::Asynchronous
    +
    683  || threadManagerMode == ThreadManagerMode::AsynchronousOut))
    +
    684  {
    +
    685  const auto guiInfoAdder = std::make_shared<GuiInfoAdder>(numberThreads, guiEnabled);
    +
    686  outputWs.emplace_back(std::make_shared<WGuiInfoAdder<TDatumsSP>>(guiInfoAdder));
    +
    687  }
    +
    688  // Minimal graphical user interface (GUI)
    +
    689  guiW = nullptr;
    +
    690  if (guiEnabled)
    +
    691  {
    +
    692  // PoseRenderers to Renderers
    +
    693  std::vector<std::shared_ptr<Renderer>> renderers;
    +
    694  if (wrapperStructPose.renderMode == RenderMode::Cpu)
    +
    695  renderers.emplace_back(std::static_pointer_cast<Renderer>(poseCpuRenderer));
    +
    696  else
    +
    697  for (const auto& poseGpuRenderer : poseGpuRenderers)
    +
    698  renderers.emplace_back(std::static_pointer_cast<Renderer>(poseGpuRenderer));
    +
    699  // Display
    +
    700  // Adam (+3-D/2-D) display
    +
    701  if (displayAdam)
    +
    702  {
    +
    703 #ifdef USE_3D_ADAM_MODEL
    +
    704  // Gui
    +
    705  const auto gui = std::make_shared<GuiAdam>(
    +
    706  finalOutputSize, wrapperStructOutput.fullScreen, threadManager.getIsRunningSharedPtr(),
    +
    707  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers,
    +
    708  wrapperStructOutput.displayMode, JointAngleEstimation::getTotalModel(),
    +
    709  wrapperStructOutput.writeVideoAdam
    +
    710  );
    +
    711  // WGui
    +
    712  guiW = {std::make_shared<WGuiAdam<TDatumsSP>>(gui)};
    +
    713 #endif
    +
    714  }
    +
    715  // 3-D (+2-D) display
    +
    716  else if (wrapperStructOutput.displayMode == DisplayMode::Display3D
    +
    717  || wrapperStructOutput.displayMode == DisplayMode::DisplayAll)
    +
    718  {
    +
    719  // Gui
    +
    720  const auto gui = std::make_shared<Gui3D>(
    +
    721  finalOutputSize, wrapperStructOutput.fullScreen, threadManager.getIsRunningSharedPtr(),
    +
    722  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers,
    +
    723  wrapperStructPose.poseModel, wrapperStructOutput.displayMode
    +
    724  );
    +
    725  // WGui
    +
    726  guiW = {std::make_shared<WGui3D<TDatumsSP>>(gui)};
    +
    727  }
    +
    728  // 2-D display
    +
    729  else if (wrapperStructOutput.displayMode == DisplayMode::Display2D)
    +
    730  {
    +
    731  // Gui
    +
    732  const auto gui = std::make_shared<Gui>(
    +
    733  finalOutputSize, wrapperStructOutput.fullScreen, threadManager.getIsRunningSharedPtr(),
    +
    734  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers
    +
    735  );
    +
    736  // WGui
    +
    737  guiW = {std::make_shared<WGui<TDatumsSP>>(gui)};
    +
    738  }
    +
    739  else
    +
    740  error("Unknown DisplayMode.", __LINE__, __FUNCTION__, __FILE__);
    +
    741  }
    +
    742  // Set wrapper as configured
    +
    743  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    744 
    745 
    746 
    747 
    -
    748  // The less number of queues -> the less threads opened, and potentially the less lag
    -
    749 
    -
    750  // Sanity checks
    -
    751  if ((datumProducerW == nullptr) == (userInputWs.empty())
    -
    752  && threadManagerMode != ThreadManagerMode::Asynchronous
    -
    753  && threadManagerMode != ThreadManagerMode::AsynchronousIn)
    -
    754  {
    -
    755  const auto message = "You need to have 1 and only 1 producer selected. You can introduce your own"
    -
    756  " producer by using setWorker(WorkerType::Input, ...) or use the OpenPose"
    -
    757  " default producer by configuring it in the configure function) or use the"
    -
    758  " ThreadManagerMode::Asynchronous(In) mode.";
    -
    759  error(message, __LINE__, __FUNCTION__, __FILE__);
    -
    760  }
    -
    761  if (outputWs.empty() && userOutputWs.empty() && guiW == nullptr
    -
    762  && threadManagerMode != ThreadManagerMode::Asynchronous
    -
    763  && threadManagerMode != ThreadManagerMode::AsynchronousOut)
    -
    764  {
    -
    765  error("No output selected.", __LINE__, __FUNCTION__, __FILE__);
    -
    766  }
    -
    767 
    -
    768  // Thread Manager
    -
    769  // Clean previous thread manager (avoid configure to crash the program if used more than once)
    -
    770  threadManager.reset();
    -
    771  unsigned long long threadId = 0ull;
    -
    772  auto queueIn = 0ull;
    -
    773  auto queueOut = 1ull;
    -
    774  // After producer
    -
    775  // ID generator (before any multi-threading or any function that requires the ID)
    -
    776  const auto wIdGenerator = std::make_shared<WIdGenerator<TDatumsSP>>();
    -
    777  std::vector<TWorker> workersAux{wIdGenerator};
    -
    778  // Scale & cv::Mat to OP format
    -
    779  if (scaleAndSizeExtractorW != nullptr)
    -
    780  workersAux = mergeVectors(workersAux, {scaleAndSizeExtractorW});
    -
    781  if (cvMatToOpInputW != nullptr)
    -
    782  workersAux = mergeVectors(workersAux, {cvMatToOpInputW});
    -
    783  // cv::Mat to output format
    -
    784  if (cvMatToOpOutputW != nullptr)
    -
    785  workersAux = mergeVectors(workersAux, {cvMatToOpOutputW});
    -
    786 
    -
    787  // Producer
    -
    788  // If custom user Worker and uses its own thread
    -
    789  if (!userInputWs.empty() && userInputWsOnNewThread)
    -
    790  {
    -
    791  // Thread 0, queues 0 -> 1
    -
    792  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    793  threadManager.add(threadId, userInputWs, queueIn++, queueOut++);
    -
    794  threadIdPP(threadId, multiThreadEnabled);
    -
    795  }
    -
    796  // If custom user Worker in same thread
    -
    797  else if (!userInputWs.empty())
    -
    798  workersAux = mergeVectors(userInputWs, workersAux);
    -
    799  // If OpenPose producer (same thread)
    -
    800  else if (datumProducerW != nullptr)
    -
    801  workersAux = mergeVectors({datumProducerW}, workersAux);
    -
    802  // Otherwise
    -
    803  else if (threadManagerMode != ThreadManagerMode::Asynchronous
    -
    804  && threadManagerMode != ThreadManagerMode::AsynchronousIn)
    -
    805  error("No input selected.", __LINE__, __FUNCTION__, __FILE__);
    -
    806  // Thread 0 or 1, queues 0 -> 1
    -
    807  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    808  threadManager.add(threadId, workersAux, queueIn++, queueOut++);
    -
    809  // Increase thread
    -
    810  threadIdPP(threadId, multiThreadEnabled);
    -
    811 
    -
    812  // Pose estimation & rendering
    -
    813  // Thread 1 or 2...X, queues 1 -> 2, X = 2 + #GPUs
    -
    814  if (!poseExtractorsWs.empty())
    -
    815  {
    -
    816  if (multiThreadEnabled)
    -
    817  {
    -
    818  for (auto& wPose : poseExtractorsWs)
    -
    819  {
    -
    820  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    821  threadManager.add(threadId, wPose, queueIn, queueOut);
    -
    822  threadIdPP(threadId, multiThreadEnabled);
    -
    823  }
    -
    824  queueIn++;
    -
    825  queueOut++;
    -
    826  // Sort frames - Required own thread
    -
    827  if (poseExtractorsWs.size() > 1u)
    -
    828  {
    -
    829  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
    -
    830  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    831  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
    -
    832  threadIdPP(threadId, multiThreadEnabled);
    -
    833  }
    -
    834  }
    -
    835  else
    -
    836  {
    -
    837  if (poseExtractorsWs.size() > 1)
    -
    838  log("Multi-threading disabled, only 1 thread running. All GPUs have been disabled but the"
    -
    839  " first one, which is defined by gpuNumberStart (e.g. in the OpenPose demo, it is set"
    -
    840  " with the `--num_gpu_start` flag).", Priority::High);
    -
    841  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    842  threadManager.add(threadId, poseExtractorsWs.at(0), queueIn++, queueOut++);
    -
    843  }
    -
    844  }
    -
    845  // Assemble all frames from same time instant (3-D module)
    -
    846  const auto wQueueAssembler = std::make_shared<WQueueAssembler<TDatumsSP, TDatums>>();
    -
    847  // 3-D reconstruction
    -
    848  if (!poseTriangulationsWs.empty())
    -
    849  {
    -
    850  // Assemble frames
    -
    851  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    852  threadManager.add(threadId, wQueueAssembler, queueIn++, queueOut++);
    -
    853  threadIdPP(threadId, multiThreadEnabled);
    -
    854  // 3-D reconstruction
    -
    855  if (multiThreadEnabled)
    -
    856  {
    -
    857  for (auto& wPoseTriangulations : poseTriangulationsWs)
    -
    858  {
    -
    859  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    860  threadManager.add(threadId, wPoseTriangulations, queueIn, queueOut);
    -
    861  threadIdPP(threadId, multiThreadEnabled);
    -
    862  }
    -
    863  queueIn++;
    -
    864  queueOut++;
    -
    865  // Sort frames
    -
    866  if (poseTriangulationsWs.size() > 1u)
    -
    867  {
    -
    868  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
    -
    869  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    870  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
    -
    871  threadIdPP(threadId, multiThreadEnabled);
    -
    872  }
    -
    873  }
    -
    874  else
    -
    875  {
    -
    876  if (poseTriangulationsWs.size() > 1)
    -
    877  log("Multi-threading disabled, only 1 thread running for 3-D triangulation.",
    - -
    879  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    880  threadManager.add(threadId, poseTriangulationsWs.at(0), queueIn++, queueOut++);
    -
    881  }
    -
    882  }
    -
    883  else
    -
    884  postProcessingWs = mergeVectors({wQueueAssembler}, postProcessingWs);
    -
    885  // Adam/IK step
    -
    886  if (!jointAngleEstimationsWs.empty())
    -
    887  {
    -
    888  if (multiThreadEnabled)
    -
    889  {
    -
    890  for (auto& wJointAngleEstimator : jointAngleEstimationsWs)
    -
    891  {
    -
    892  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    893  threadManager.add(threadId, wJointAngleEstimator, queueIn, queueOut);
    -
    894  threadIdPP(threadId, multiThreadEnabled);
    -
    895  }
    -
    896  queueIn++;
    -
    897  queueOut++;
    -
    898  // Sort frames
    -
    899  if (jointAngleEstimationsWs.size() > 1)
    -
    900  {
    -
    901  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
    -
    902  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    903  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
    -
    904  threadIdPP(threadId, multiThreadEnabled);
    -
    905  }
    -
    906  }
    -
    907  else
    -
    908  {
    -
    909  if (jointAngleEstimationsWs.size() > 1)
    -
    910  log("Multi-threading disabled, only 1 thread running for joint angle estimation.",
    - -
    912  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    913  threadManager.add(threadId, jointAngleEstimationsWs.at(0), queueIn++, queueOut++);
    -
    914  }
    -
    915  }
    -
    916  // Post processing workers
    -
    917  if (!postProcessingWs.empty())
    -
    918  {
    -
    919  // Combining postProcessingWs and outputWs
    -
    920  outputWs = mergeVectors(postProcessingWs, outputWs);
    -
    921  // // If I wanna split them
    -
    922  // log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    923  // threadManager.add(threadId, postProcessingWs, queueIn++, queueOut++);
    -
    924  // threadIdPP(threadId, multiThreadEnabled);
    -
    925  }
    -
    926  // If custom user Worker and uses its own thread
    -
    927  if (!userPostProcessingWs.empty())
    -
    928  {
    -
    929  // If custom user Worker in its own thread
    -
    930  if (userPostProcessingWsOnNewThread)
    -
    931  {
    -
    932  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    933  threadManager.add(threadId, userPostProcessingWs, queueIn++, queueOut++);
    -
    934  threadIdPP(threadId, multiThreadEnabled);
    -
    935  }
    -
    936  // If custom user Worker in same thread
    -
    937  // Merge with outputWs
    -
    938  else
    -
    939  outputWs = mergeVectors(outputWs, userPostProcessingWs);
    -
    940  }
    -
    941  // Output workers
    -
    942  if (!outputWs.empty())
    -
    943  {
    -
    944  // Thread 4 or 5, queues 4 -> 5
    -
    945  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    946  threadManager.add(threadId, outputWs, queueIn++, queueOut++);
    -
    947  threadIdPP(threadId, multiThreadEnabled);
    -
    948  }
    -
    949  // User output worker
    -
    950  // Thread Y, queues Q -> Q+1
    -
    951  if (!userOutputWs.empty())
    -
    952  {
    -
    953  if (userOutputWsOnNewThread)
    -
    954  {
    -
    955  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    956  threadManager.add(threadId, userOutputWs, queueIn++, queueOut++);
    -
    957  threadIdPP(threadId, multiThreadEnabled);
    -
    958  }
    -
    959  else
    -
    960  {
    -
    961  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    962  threadManager.add(threadId-1, userOutputWs, queueIn++, queueOut++);
    -
    963  }
    -
    964  }
    -
    965  // OpenPose GUI
    -
    966  if (guiW != nullptr)
    -
    967  {
    -
    968  // Thread Y+1, queues Q+1 -> Q+2
    -
    969  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    970  threadManager.add(threadId, guiW, queueIn++, queueOut++);
    -
    971  threadIdPP(threadId, multiThreadEnabled);
    -
    972  }
    -
    973  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    -
    974  }
    -
    975  catch (const std::exception& e)
    -
    976  {
    -
    977  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    -
    978  }
    -
    979  }
    -
    980 }
    -
    981 
    -
    982 #endif // OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
    +
    748 
    +
    749  // The less number of queues -> the less threads opened, and potentially the less lag
    +
    750 
    +
    751  // Sanity checks
    +
    752  if ((datumProducerW == nullptr) == (userInputWs.empty())
    +
    753  && threadManagerMode != ThreadManagerMode::Asynchronous
    +
    754  && threadManagerMode != ThreadManagerMode::AsynchronousIn)
    +
    755  {
    +
    756  const auto message = "You need to have 1 and only 1 producer selected. You can introduce your own"
    +
    757  " producer by using setWorker(WorkerType::Input, ...) or use the OpenPose"
    +
    758  " default producer by configuring it in the configure function) or use the"
    +
    759  " ThreadManagerMode::Asynchronous(In) mode.";
    +
    760  error(message, __LINE__, __FUNCTION__, __FILE__);
    +
    761  }
    +
    762  if (outputWs.empty() && userOutputWs.empty() && guiW == nullptr
    +
    763  && threadManagerMode != ThreadManagerMode::Asynchronous
    +
    764  && threadManagerMode != ThreadManagerMode::AsynchronousOut)
    +
    765  {
    +
    766  error("No output selected.", __LINE__, __FUNCTION__, __FILE__);
    +
    767  }
    +
    768 
    +
    769  // Thread Manager
    +
    770  // Clean previous thread manager (avoid configure to crash the program if used more than once)
    +
    771  threadManager.reset();
    +
    772  unsigned long long threadId = 0ull;
    +
    773  auto queueIn = 0ull;
    +
    774  auto queueOut = 1ull;
    +
    775  // After producer
    +
    776  // ID generator (before any multi-threading or any function that requires the ID)
    +
    777  const auto wIdGenerator = std::make_shared<WIdGenerator<TDatumsSP>>();
    +
    778  std::vector<TWorker> workersAux{wIdGenerator};
    +
    779  // Scale & cv::Mat to OP format
    +
    780  if (scaleAndSizeExtractorW != nullptr)
    +
    781  workersAux = mergeVectors(workersAux, {scaleAndSizeExtractorW});
    +
    782  if (cvMatToOpInputW != nullptr)
    +
    783  workersAux = mergeVectors(workersAux, {cvMatToOpInputW});
    +
    784  // cv::Mat to output format
    +
    785  if (cvMatToOpOutputW != nullptr)
    +
    786  workersAux = mergeVectors(workersAux, {cvMatToOpOutputW});
    +
    787 
    +
    788  // Producer
    +
    789  // If custom user Worker and uses its own thread
    +
    790  if (!userInputWs.empty() && userInputWsOnNewThread)
    +
    791  {
    +
    792  // Thread 0, queues 0 -> 1
    +
    793  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    794  threadManager.add(threadId, userInputWs, queueIn++, queueOut++);
    +
    795  threadIdPP(threadId, multiThreadEnabled);
    +
    796  }
    +
    797  // If custom user Worker in same thread
    +
    798  else if (!userInputWs.empty())
    +
    799  workersAux = mergeVectors(userInputWs, workersAux);
    +
    800  // If OpenPose producer (same thread)
    +
    801  else if (datumProducerW != nullptr)
    +
    802  workersAux = mergeVectors({datumProducerW}, workersAux);
    +
    803  // Otherwise
    +
    804  else if (threadManagerMode != ThreadManagerMode::Asynchronous
    +
    805  && threadManagerMode != ThreadManagerMode::AsynchronousIn)
    +
    806  error("No input selected.", __LINE__, __FUNCTION__, __FILE__);
    +
    807  // Thread 0 or 1, queues 0 -> 1
    +
    808  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    809  threadManager.add(threadId, workersAux, queueIn++, queueOut++);
    +
    810  // Increase thread
    +
    811  threadIdPP(threadId, multiThreadEnabled);
    +
    812 
    +
    813  // Pose estimation & rendering
    +
    814  // Thread 1 or 2...X, queues 1 -> 2, X = 2 + #GPUs
    +
    815  if (!poseExtractorsWs.empty())
    +
    816  {
    +
    817  if (multiThreadEnabled)
    +
    818  {
    +
    819  for (auto& wPose : poseExtractorsWs)
    +
    820  {
    +
    821  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    822  threadManager.add(threadId, wPose, queueIn, queueOut);
    +
    823  threadIdPP(threadId, multiThreadEnabled);
    +
    824  }
    +
    825  queueIn++;
    +
    826  queueOut++;
    +
    827  // Sort frames - Required own thread
    +
    828  if (poseExtractorsWs.size() > 1u)
    +
    829  {
    +
    830  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
    +
    831  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    832  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
    +
    833  threadIdPP(threadId, multiThreadEnabled);
    +
    834  }
    +
    835  }
    +
    836  else
    +
    837  {
    +
    838  if (poseExtractorsWs.size() > 1)
    +
    839  log("Multi-threading disabled, only 1 thread running. All GPUs have been disabled but the"
    +
    840  " first one, which is defined by gpuNumberStart (e.g., in the OpenPose demo, it is set"
    +
    841  " with the `--num_gpu_start` flag).", Priority::High);
    +
    842  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    843  threadManager.add(threadId, poseExtractorsWs.at(0), queueIn++, queueOut++);
    +
    844  }
    +
    845  }
    +
    846  // Assemble all frames from same time instant (3-D module)
    +
    847  const auto wQueueAssembler = std::make_shared<WQueueAssembler<TDatumsSP, TDatums>>();
    +
    848  // 3-D reconstruction
    +
    849  if (!poseTriangulationsWs.empty())
    +
    850  {
    +
    851  // Assemble frames
    +
    852  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    853  threadManager.add(threadId, wQueueAssembler, queueIn++, queueOut++);
    +
    854  threadIdPP(threadId, multiThreadEnabled);
    +
    855  // 3-D reconstruction
    +
    856  if (multiThreadEnabled)
    +
    857  {
    +
    858  for (auto& wPoseTriangulations : poseTriangulationsWs)
    +
    859  {
    +
    860  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    861  threadManager.add(threadId, wPoseTriangulations, queueIn, queueOut);
    +
    862  threadIdPP(threadId, multiThreadEnabled);
    +
    863  }
    +
    864  queueIn++;
    +
    865  queueOut++;
    +
    866  // Sort frames
    +
    867  if (poseTriangulationsWs.size() > 1u)
    +
    868  {
    +
    869  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
    +
    870  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    871  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
    +
    872  threadIdPP(threadId, multiThreadEnabled);
    +
    873  }
    +
    874  }
    +
    875  else
    +
    876  {
    +
    877  if (poseTriangulationsWs.size() > 1)
    +
    878  log("Multi-threading disabled, only 1 thread running for 3-D triangulation.",
    + +
    880  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    881  threadManager.add(threadId, poseTriangulationsWs.at(0), queueIn++, queueOut++);
    +
    882  }
    +
    883  }
    +
    884  else
    +
    885  postProcessingWs = mergeVectors({wQueueAssembler}, postProcessingWs);
    +
    886  // Adam/IK step
    +
    887  if (!jointAngleEstimationsWs.empty())
    +
    888  {
    +
    889  if (multiThreadEnabled)
    +
    890  {
    +
    891  for (auto& wJointAngleEstimator : jointAngleEstimationsWs)
    +
    892  {
    +
    893  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    894  threadManager.add(threadId, wJointAngleEstimator, queueIn, queueOut);
    +
    895  threadIdPP(threadId, multiThreadEnabled);
    +
    896  }
    +
    897  queueIn++;
    +
    898  queueOut++;
    +
    899  // Sort frames
    +
    900  if (jointAngleEstimationsWs.size() > 1)
    +
    901  {
    +
    902  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
    +
    903  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    904  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
    +
    905  threadIdPP(threadId, multiThreadEnabled);
    +
    906  }
    +
    907  }
    +
    908  else
    +
    909  {
    +
    910  if (jointAngleEstimationsWs.size() > 1)
    +
    911  log("Multi-threading disabled, only 1 thread running for joint angle estimation.",
    + +
    913  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    914  threadManager.add(threadId, jointAngleEstimationsWs.at(0), queueIn++, queueOut++);
    +
    915  }
    +
    916  }
    +
    917  // Post processing workers
    +
    918  if (!postProcessingWs.empty())
    +
    919  {
    +
    920  // Combining postProcessingWs and outputWs
    +
    921  outputWs = mergeVectors(postProcessingWs, outputWs);
    +
    922  // // If I wanna split them
    +
    923  // log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    924  // threadManager.add(threadId, postProcessingWs, queueIn++, queueOut++);
    +
    925  // threadIdPP(threadId, multiThreadEnabled);
    +
    926  }
    +
    927  // If custom user Worker and uses its own thread
    +
    928  if (!userPostProcessingWs.empty())
    +
    929  {
    +
    930  // If custom user Worker in its own thread
    +
    931  if (userPostProcessingWsOnNewThread)
    +
    932  {
    +
    933  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    934  threadManager.add(threadId, userPostProcessingWs, queueIn++, queueOut++);
    +
    935  threadIdPP(threadId, multiThreadEnabled);
    +
    936  }
    +
    937  // If custom user Worker in same thread
    +
    938  // Merge with outputWs
    +
    939  else
    +
    940  outputWs = mergeVectors(outputWs, userPostProcessingWs);
    +
    941  }
    +
    942  // Output workers
    +
    943  if (!outputWs.empty())
    +
    944  {
    +
    945  // Thread 4 or 5, queues 4 -> 5
    +
    946  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    947  threadManager.add(threadId, outputWs, queueIn++, queueOut++);
    +
    948  threadIdPP(threadId, multiThreadEnabled);
    +
    949  }
    +
    950  // User output worker
    +
    951  // Thread Y, queues Q -> Q+1
    +
    952  if (!userOutputWs.empty())
    +
    953  {
    +
    954  if (userOutputWsOnNewThread)
    +
    955  {
    +
    956  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    957  threadManager.add(threadId, userOutputWs, queueIn++, queueOut++);
    +
    958  threadIdPP(threadId, multiThreadEnabled);
    +
    959  }
    +
    960  else
    +
    961  {
    +
    962  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    963  threadManager.add(threadId-1, userOutputWs, queueIn++, queueOut++);
    +
    964  }
    +
    965  }
    +
    966  // OpenPose GUI
    +
    967  if (guiW != nullptr)
    +
    968  {
    +
    969  // Thread Y+1, queues Q+1 -> Q+2
    +
    970  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    971  threadManager.add(threadId, guiW, queueIn++, queueOut++);
    +
    972  threadIdPP(threadId, multiThreadEnabled);
    +
    973  }
    +
    974  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    975  }
    +
    976  catch (const std::exception& e)
    +
    977  {
    +
    978  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    979  }
    +
    980  }
    +
    981 }
    +
    982 
    +
    983 #endif // OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
    OP_API void threadIdPP(unsigned long long &threadId, const bool multiThreadEnabled)
    +
    int writeCocoJsonVariant
    Definition: wrapperStructOutput.hpp:78
    Definition: wHandDetectorTracking.hpp:11
    Definition: wGuiInfoAdder.hpp:11
    @@ -1076,7 +1078,7 @@ $(document).ready(function(){initNavTree('wrapper_auxiliary_8hpp_source.html',''
    std::array< T, N > array
    Definition: cl2.hpp:594
    float alphaHeatMap
    Definition: wrapperStructFace.hpp:46
    bool guiVerbose
    Definition: wrapperStructOutput.hpp:31
    -
    std::string writeHeatMapsFormat
    Definition: wrapperStructOutput.hpp:106
    +
    std::string writeHeatMapsFormat
    Definition: wrapperStructOutput.hpp:112
    Definition: wPoseSaver.hpp:12
    @@ -1094,10 +1096,10 @@ $(document).ready(function(){initNavTree('wrapper_auxiliary_8hpp_source.html',''
    int tracking
    Definition: wrapperStructExtra.hpp:39
    Definition: wPeopleJsonSaver.hpp:11
    Definition: wHandRenderer.hpp:11
    -
    double writeVideoFps
    Definition: wrapperStructOutput.hpp:111
    +
    double writeVideoFps
    Definition: wrapperStructOutput.hpp:117
    Definition: wImageSaver.hpp:11
    -
    std::string udpPort
    Definition: wrapperStructOutput.hpp:135
    -
    std::string writeImages
    Definition: wrapperStructOutput.hpp:78
    +
    std::string udpPort
    Definition: wrapperStructOutput.hpp:141
    +
    std::string writeImages
    Definition: wrapperStructOutput.hpp:84
    std::shared_ptr< std::atomic< bool > > getIsRunningSharedPtr()
    Definition: threadManager.hpp:40
    @@ -1111,7 +1113,7 @@ $(document).ready(function(){initNavTree('wrapper_auxiliary_8hpp_source.html',''
    Definition: wKeypointScaler.hpp:11
    int minViews3d
    Definition: wrapperStructExtra.hpp:27
    -
    std::string writeHeatMaps
    Definition: wrapperStructOutput.hpp:100
    +
    std::string writeHeatMaps
    Definition: wrapperStructOutput.hpp:106
    std::string writeCocoJson
    Definition: wrapperStructOutput.hpp:67
    void add(const unsigned long long threadId, const std::vector< TWorker > &tWorkers, const unsigned long long queueInId, const unsigned long long queueOutId)
    Definition: threadManager.hpp:125
    @@ -1123,7 +1125,7 @@ $(document).ready(function(){initNavTree('wrapper_auxiliary_8hpp_source.html',''
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    bool fullScreen
    Definition: wrapperStructOutput.hpp:37
    -
    std::string udpHost
    Definition: wrapperStructOutput.hpp:130
    +
    std::string udpHost
    Definition: wrapperStructOutput.hpp:136
    DataFormat writeKeypointFormat
    Definition: wrapperStructOutput.hpp:51
    @@ -1153,7 +1155,7 @@ $(document).ready(function(){initNavTree('wrapper_auxiliary_8hpp_source.html',''
    unsigned long long frameStep
    Definition: wrapperStructInput.hpp:40
    Definition: wHandExtractorNet.hpp:11
    -
    std::string writeBvh
    Definition: wrapperStructOutput.hpp:125
    +
    std::string writeBvh
    Definition: wrapperStructOutput.hpp:131
    Definition: wFaceRenderer.hpp:11
    @@ -1177,14 +1179,14 @@ $(document).ready(function(){initNavTree('wrapper_auxiliary_8hpp_source.html',''
    Definition: poseGpuRenderer.hpp:13
    Definition: wHandDetector.hpp:11
    -
    std::string writeVideoAdam
    Definition: wrapperStructOutput.hpp:118
    +
    std::string writeVideoAdam
    Definition: wrapperStructOutput.hpp:124
    std::vector< T > mergeVectors(const std::vector< T > &vectorA, const std::vector< T > &vectorB)
    Definition: standard.hpp:40
    Point< int > cameraResolution
    Definition: wrapperStructInput.hpp:72
    Definition: wHandSaver.hpp:12
    ProducerType producerType
    Definition: wrapperStructInput.hpp:20
    -
    std::string writeVideo
    Definition: wrapperStructOutput.hpp:93
    +
    std::string writeVideo
    Definition: wrapperStructOutput.hpp:99
    unsigned long long frameLast
    Definition: wrapperStructInput.hpp:46
    bool identification
    Definition: wrapperStructExtra.hpp:32
    DisplayMode displayMode
    Definition: wrapperStructOutput.hpp:25
    @@ -1203,7 +1205,7 @@ $(document).ready(function(){initNavTree('wrapper_auxiliary_8hpp_source.html',''
    #define OP_API
    Definition: macros.hpp:18
    float renderThreshold
    Definition: wrapperStructFace.hpp:54
    int frameRotate
    Definition: wrapperStructInput.hpp:62
    -
    std::string writeImagesFormat
    Definition: wrapperStructOutput.hpp:86
    +
    std::string writeImagesFormat
    Definition: wrapperStructOutput.hpp:92
    unsigned long long frameFirst
    Definition: wrapperStructInput.hpp:33
    Definition: wrapperStructOutput.hpp:14
    diff --git a/html/wrapper_struct_output_8hpp_source.html b/html/wrapper_struct_output_8hpp_source.html index e6c27052..88bce392 100644 --- a/html/wrapper_struct_output_8hpp_source.html +++ b/html/wrapper_struct_output_8hpp_source.html @@ -134,66 +134,69 @@ $(document).ready(function(){initNavTree('wrapper_struct_output_8hpp_source.html
    68 
    73 
    - +
    79 
    - -
    87 
    - -
    94 
    - -
    101 
    - + +
    85 
    + +
    93 
    + +
    100 
    +
    107 
    - -
    112 
    - -
    119 
    - -
    126 
    - -
    131 
    - -
    136 
    -
    142  WrapperStructOutput(const DisplayMode displayMode = DisplayMode::NoDisplay, const bool guiVerbose = false,
    -
    143  const bool fullScreen = false, const std::string& writeKeypoint = "",
    -
    144  const DataFormat writeKeypointFormat = DataFormat::Xml,
    -
    145  const std::string& writeJson = "", const std::string& writeCocoJson = "",
    -
    146  const std::string& writeCocoFootJson = "", const std::string& writeImages = "",
    -
    147  const std::string& writeImagesFormat = "", const std::string& writeVideo = "",
    -
    148  const double writeVideoFps = 30., const std::string& writeHeatMaps = "",
    -
    149  const std::string& writeHeatMapsFormat = "", const std::string& writeVideoAdam = "",
    -
    150  const std::string& writeBvh = "", const std::string& udpHost = "",
    -
    151  const std::string& udpPort = "");
    -
    152  };
    -
    153 }
    -
    154 
    -
    155 #endif // OPENPOSE_WRAPPER_WRAPPER_STRUCT_OUTPUT_HPP
    + +
    113 
    + +
    118 
    + +
    125 
    + +
    132 
    + +
    137 
    + +
    142 
    +
    148  WrapperStructOutput(const DisplayMode displayMode = DisplayMode::NoDisplay, const bool guiVerbose = false,
    +
    149  const bool fullScreen = false, const std::string& writeKeypoint = "",
    +
    150  const DataFormat writeKeypointFormat = DataFormat::Xml,
    +
    151  const std::string& writeJson = "", const std::string& writeCocoJson = "",
    +
    152  const std::string& writeCocoFootJson = "", const int writeCocoJsonVariant = 1,
    +
    153  const std::string& writeImages = "", const std::string& writeImagesFormat = "",
    +
    154  const std::string& writeVideo = "", const double writeVideoFps = 30.,
    +
    155  const std::string& writeHeatMaps = "", const std::string& writeHeatMapsFormat = "",
    +
    156  const std::string& writeVideoAdam = "", const std::string& writeBvh = "",
    +
    157  const std::string& udpHost = "", const std::string& udpPort = "");
    +
    158  };
    +
    159 }
    +
    160 
    +
    161 #endif // OPENPOSE_WRAPPER_WRAPPER_STRUCT_OUTPUT_HPP
    +
    int writeCocoJsonVariant
    Definition: wrapperStructOutput.hpp:78
    bool guiVerbose
    Definition: wrapperStructOutput.hpp:31
    -
    std::string writeHeatMapsFormat
    Definition: wrapperStructOutput.hpp:106
    +
    std::string writeHeatMapsFormat
    Definition: wrapperStructOutput.hpp:112
    std::string writeKeypoint
    Definition: wrapperStructOutput.hpp:44
    -
    double writeVideoFps
    Definition: wrapperStructOutput.hpp:111
    -
    std::string udpPort
    Definition: wrapperStructOutput.hpp:135
    -
    std::string writeImages
    Definition: wrapperStructOutput.hpp:78
    +
    double writeVideoFps
    Definition: wrapperStructOutput.hpp:117
    +
    std::string udpPort
    Definition: wrapperStructOutput.hpp:141
    +
    std::string writeImages
    Definition: wrapperStructOutput.hpp:84
    DataFormat
    Definition: enumClasses.hpp:6
    -
    std::string writeHeatMaps
    Definition: wrapperStructOutput.hpp:100
    +
    std::string writeHeatMaps
    Definition: wrapperStructOutput.hpp:106
    std::string writeCocoJson
    Definition: wrapperStructOutput.hpp:67
    DisplayMode
    Definition: enumClasses.hpp:10
    bool fullScreen
    Definition: wrapperStructOutput.hpp:37
    -
    std::string udpHost
    Definition: wrapperStructOutput.hpp:130
    +
    std::string udpHost
    Definition: wrapperStructOutput.hpp:136
    DataFormat writeKeypointFormat
    Definition: wrapperStructOutput.hpp:51
    std::string writeCocoFootJson
    Definition: wrapperStructOutput.hpp:72
    -
    std::string writeBvh
    Definition: wrapperStructOutput.hpp:125
    +
    std::string writeBvh
    Definition: wrapperStructOutput.hpp:131
    std::string writeJson
    Definition: wrapperStructOutput.hpp:61
    -
    std::string writeVideoAdam
    Definition: wrapperStructOutput.hpp:118
    -
    std::string writeVideo
    Definition: wrapperStructOutput.hpp:93
    +
    std::string writeVideoAdam
    Definition: wrapperStructOutput.hpp:124
    +
    std::string writeVideo
    Definition: wrapperStructOutput.hpp:99
    DisplayMode displayMode
    Definition: wrapperStructOutput.hpp:25
    #define OP_API
    Definition: macros.hpp:18
    -
    std::string writeImagesFormat
    Definition: wrapperStructOutput.hpp:86
    +
    std::string writeImagesFormat
    Definition: wrapperStructOutput.hpp:92
    Definition: wrapperStructOutput.hpp:14
    std::string string
    Definition: cl2.hpp:574
    -- GitLab