OpenPose  1.0.0rc2
OpenPose: A Real-Time Multi-Person Key-Point Detection And Multi-Threading C++ Library
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
wrapperAuxiliary.hpp
Go to the documentation of this file.
1 #ifndef OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
2 #define OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
3 
12 
13 namespace op
14 {
29  WrapperStructPose& wrapperStructPose, const WrapperStructFace& wrapperStructFace,
30  const WrapperStructHand& wrapperStructHand, const WrapperStructExtra& wrapperStructExtra,
31  const WrapperStructInput& wrapperStructInput, const WrapperStructOutput& wrapperStructOutput,
32  const bool renderOutput, const bool userOutputWsEmpty, const std::shared_ptr<Producer>& producerSharedPtr,
33  const ThreadManagerMode threadManagerMode);
34 
43  OP_API void threadIdPP(unsigned long long& threadId, const bool multiThreadEnabled);
44 
51  template<typename TDatums,
52  typename TDatumsSP = std::shared_ptr<TDatums>,
53  typename TWorker = std::shared_ptr<Worker<TDatumsSP>>>
55  ThreadManager<TDatumsSP>& threadManager, const bool multiThreadEnabled,
56  const ThreadManagerMode threadManagerMode, const WrapperStructPose& wrapperStructPose,
57  const WrapperStructFace& wrapperStructFace, const WrapperStructHand& wrapperStructHand,
58  const WrapperStructExtra& wrapperStructExtra, const WrapperStructInput& wrapperStructInput,
59  const WrapperStructOutput& wrapperStructOutput,
60  const std::array<std::vector<TWorker>, int(WorkerType::Size)>& userWs,
61  const std::array<bool, int(WorkerType::Size)>& userWsOnNewThread);
62 }
63 
64 
65 
66 
67 
68 // Implementation
69 #include <openpose/3d/headers.hpp>
73 #include <openpose/gpu/gpu.hpp>
74 #include <openpose/gui/headers.hpp>
81 namespace op
82 {
83  template<typename TDatums, typename TDatumsSP, typename TWorker>
85  ThreadManager<TDatumsSP>& threadManager, const bool multiThreadEnabledTemp,
86  const ThreadManagerMode threadManagerMode, const WrapperStructPose& wrapperStructPoseTemp,
87  const WrapperStructFace& wrapperStructFace, const WrapperStructHand& wrapperStructHand,
88  const WrapperStructExtra& wrapperStructExtra, const WrapperStructInput& wrapperStructInput,
89  const WrapperStructOutput& wrapperStructOutput,
90  const std::array<std::vector<TWorker>, int(WorkerType::Size)>& userWs,
91  const std::array<bool, int(WorkerType::Size)>& userWsOnNewThread)
92  {
93  try
94  {
95  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
96 
97  // Create producer
98  auto producerSharedPtr = createProducer(
99  wrapperStructInput.producerType, wrapperStructInput.producerString,
100  wrapperStructInput.cameraResolution, wrapperStructInput.webcamFps,
101  wrapperStructInput.cameraParameterPath, wrapperStructInput.undistortImage,
102  wrapperStructInput.imageDirectoryStereo);
103 
104  // Editable arguments
105  auto wrapperStructPose = wrapperStructPoseTemp;
106  auto multiThreadEnabled = multiThreadEnabledTemp;
107 
108  // Workers
109  TWorker datumProducerW;
110  TWorker scaleAndSizeExtractorW;
111  TWorker cvMatToOpInputW;
112  TWorker cvMatToOpOutputW;
113  std::vector<std::vector<TWorker>> poseExtractorsWs;
114  std::vector<std::vector<TWorker>> poseTriangulationsWs;
115  std::vector<std::vector<TWorker>> jointAngleEstimationsWs;
116  std::vector<TWorker> postProcessingWs;
117  std::vector<TWorker> outputWs;
118  TWorker guiW;
119 
120  // User custom workers
121  const auto& userInputWs = userWs[int(WorkerType::Input)];
122  const auto& userPostProcessingWs = userWs[int(WorkerType::PostProcessing)];
123  const auto& userOutputWs = userWs[int(WorkerType::Output)];
124  const auto userInputWsOnNewThread = userWsOnNewThread[int(WorkerType::Input)];
125  const auto userPostProcessingWsOnNewThread = userWsOnNewThread[int(WorkerType::PostProcessing)];
126  const auto userOutputWsOnNewThread = userWsOnNewThread[int(WorkerType::Output)];
127 
128  // Video seek
129  const auto spVideoSeek = std::make_shared<std::pair<std::atomic<bool>, std::atomic<int>>>();
130  // It cannot be directly included in the constructor (compiler error for copying std::atomic)
131  spVideoSeek->first = false;
132  spVideoSeek->second = 0;
133 
134  // Required parameters
135  const auto renderOutput = wrapperStructPose.renderMode != RenderMode::None
136  || wrapperStructFace.renderMode != RenderMode::None
137  || wrapperStructHand.renderMode != RenderMode::None;
138  const auto renderOutputGpu = wrapperStructPose.renderMode == RenderMode::Gpu
139  || wrapperStructFace.renderMode == RenderMode::Gpu
140  || wrapperStructHand.renderMode == RenderMode::Gpu;
141  const auto renderFace = wrapperStructFace.enable && wrapperStructFace.renderMode != RenderMode::None;
142  const auto renderHand = wrapperStructHand.enable && wrapperStructHand.renderMode != RenderMode::None;
143  const auto renderHandGpu = wrapperStructHand.enable && wrapperStructHand.renderMode == RenderMode::Gpu;
144 
145  // Check no wrong/contradictory flags enabled
146  const auto userOutputWsEmpty = userOutputWs.empty();
148  wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, wrapperStructInput,
149  wrapperStructOutput, renderOutput, userOutputWsEmpty, producerSharedPtr, threadManagerMode);
150 
151  // Get number threads
152  auto numberThreads = wrapperStructPose.gpuNumber;
153  auto gpuNumberStart = wrapperStructPose.gpuNumberStart;
154  // CPU --> 1 thread or no pose extraction
155  if (getGpuMode() == GpuMode::NoGpu)
156  {
157  numberThreads = (wrapperStructPose.gpuNumber == 0 ? 0 : 1);
158  gpuNumberStart = 0;
159  // Disabling multi-thread makes the code 400 ms faster (2.3 sec vs. 2.7 in i7-6850K)
160  // and fixes the bug that the screen was not properly displayed and only refreshed sometimes
161  // Note: The screen bug could be also fixed by using waitKey(30) rather than waitKey(1)
162  multiThreadEnabled = false;
163  }
164  // GPU --> user picks (<= #GPUs)
165  else
166  {
167  // Get total number GPUs
168  const auto totalGpuNumber = getGpuNumber();
169  // If number GPU < 0 --> set it to all the available GPUs
170  if (numberThreads < 0)
171  {
172  if (totalGpuNumber <= gpuNumberStart)
173  error("Number of initial GPU (`--number_gpu_start`) must be lower than the total number of"
174  " used GPUs (`--number_gpu`)", __LINE__, __FUNCTION__, __FILE__);
175  numberThreads = totalGpuNumber - gpuNumberStart;
176  // Reset initial GPU to 0 (we want them all)
177  // Logging message
178  log("Auto-detecting all available GPUs... Detected " + std::to_string(totalGpuNumber)
179  + " GPU(s), using " + std::to_string(numberThreads) + " of them starting at GPU "
180  + std::to_string(gpuNumberStart) + ".", Priority::High);
181  }
182  // Sanity check
183  if (gpuNumberStart + numberThreads > totalGpuNumber)
184  error("Initial GPU selected (`--number_gpu_start`) + number GPUs to use (`--number_gpu`) must"
185  " be lower or equal than the total number of GPUs in your machine ("
186  + std::to_string(gpuNumberStart) + " + "
187  + std::to_string(numberThreads) + " vs. "
188  + std::to_string(totalGpuNumber) + ").",
189  __LINE__, __FUNCTION__, __FILE__);
190  }
191 
192  // Proper format
193  const auto writeImagesCleaned = formatAsDirectory(wrapperStructOutput.writeImages);
194  const auto writeKeypointCleaned = formatAsDirectory(wrapperStructOutput.writeKeypoint);
195  const auto writeJsonCleaned = formatAsDirectory(wrapperStructOutput.writeJson);
196  const auto writeHeatMapsCleaned = formatAsDirectory(wrapperStructOutput.writeHeatMaps);
197  const auto modelFolder = formatAsDirectory(wrapperStructPose.modelFolder);
198 
199  // Common parameters
200  auto finalOutputSize = wrapperStructPose.outputSize;
201  Point<int> producerSize{-1,-1};
202  const auto oPProducer = (producerSharedPtr != nullptr);
203  if (oPProducer)
204  {
205  // 1. Set producer properties
206  const auto displayProducerFpsMode = (wrapperStructInput.realTimeProcessing
208  producerSharedPtr->setProducerFpsMode(displayProducerFpsMode);
209  producerSharedPtr->set(ProducerProperty::Flip, wrapperStructInput.frameFlip);
210  producerSharedPtr->set(ProducerProperty::Rotation, wrapperStructInput.frameRotate);
211  producerSharedPtr->set(ProducerProperty::AutoRepeat, wrapperStructInput.framesRepeat);
212  // 2. Set finalOutputSize
213  producerSize = Point<int>{(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_WIDTH),
214  (int)producerSharedPtr->get(CV_CAP_PROP_FRAME_HEIGHT)};
215  // Set finalOutputSize to input size if desired
216  if (finalOutputSize.x == -1 || finalOutputSize.y == -1)
217  finalOutputSize = producerSize;
218  }
219 
220  // Producer
221  if (oPProducer)
222  {
223  const auto datumProducer = std::make_shared<DatumProducer<TDatums>>(
224  producerSharedPtr, wrapperStructInput.frameFirst, wrapperStructInput.frameStep,
225  wrapperStructInput.frameLast, spVideoSeek
226  );
227  datumProducerW = std::make_shared<WDatumProducer<TDatumsSP, TDatums>>(datumProducer);
228  }
229  else
230  datumProducerW = nullptr;
231 
232  std::vector<std::shared_ptr<PoseExtractorNet>> poseExtractorNets;
233  std::vector<std::shared_ptr<FaceExtractorNet>> faceExtractorNets;
234  std::vector<std::shared_ptr<HandExtractorNet>> handExtractorNets;
235  std::vector<std::shared_ptr<PoseGpuRenderer>> poseGpuRenderers;
236  std::shared_ptr<PoseCpuRenderer> poseCpuRenderer;
237  if (numberThreads > 0)
238  {
239  // Get input scales and sizes
240  const auto scaleAndSizeExtractor = std::make_shared<ScaleAndSizeExtractor>(
241  wrapperStructPose.netInputSize, finalOutputSize, wrapperStructPose.scalesNumber,
242  wrapperStructPose.scaleGap
243  );
244  scaleAndSizeExtractorW = std::make_shared<WScaleAndSizeExtractor<TDatumsSP>>(scaleAndSizeExtractor);
245 
246  // Input cvMat to OpenPose input & output format
247  const auto cvMatToOpInput = std::make_shared<CvMatToOpInput>(wrapperStructPose.poseModel);
248  cvMatToOpInputW = std::make_shared<WCvMatToOpInput<TDatumsSP>>(cvMatToOpInput);
249  if (renderOutput)
250  {
251  const auto cvMatToOpOutput = std::make_shared<CvMatToOpOutput>();
252  cvMatToOpOutputW = std::make_shared<WCvMatToOpOutput<TDatumsSP>>(cvMatToOpOutput);
253  }
254 
255  // Pose estimators & renderers
256  std::vector<TWorker> cpuRenderers;
257  poseExtractorsWs.clear();
258  poseExtractorsWs.resize(numberThreads);
259  if (wrapperStructPose.enable)
260  {
261  // Pose estimators
262  for (auto gpuId = 0; gpuId < numberThreads; gpuId++)
263  poseExtractorNets.emplace_back(std::make_shared<PoseExtractorCaffe>(
264  wrapperStructPose.poseModel, modelFolder, gpuId + gpuNumberStart,
265  wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale,
266  wrapperStructPose.addPartCandidates, wrapperStructPose.enableGoogleLogging
267  ));
268 
269  // Pose renderers
270  if (renderOutputGpu || wrapperStructPose.renderMode == RenderMode::Cpu)
271  {
272  // If wrapperStructPose.renderMode != RenderMode::Gpu but renderOutput, then we create an
273  // alpha = 0 pose renderer in order to keep the removing background option
274  const auto alphaKeypoint = (wrapperStructPose.renderMode != RenderMode::None
275  ? wrapperStructPose.alphaKeypoint : 0.f);
276  const auto alphaHeatMap = (wrapperStructPose.renderMode != RenderMode::None
277  ? wrapperStructPose.alphaHeatMap : 0.f);
278  // GPU rendering
279  if (renderOutputGpu)
280  {
281  for (const auto& poseExtractorNet : poseExtractorNets)
282  {
283  poseGpuRenderers.emplace_back(std::make_shared<PoseGpuRenderer>(
284  wrapperStructPose.poseModel, poseExtractorNet, wrapperStructPose.renderThreshold,
285  wrapperStructPose.blendOriginalFrame, alphaKeypoint,
286  alphaHeatMap, wrapperStructPose.defaultPartToRender
287  ));
288  }
289  }
290  // CPU rendering
291  if (wrapperStructPose.renderMode == RenderMode::Cpu)
292  {
293  poseCpuRenderer = std::make_shared<PoseCpuRenderer>(
294  wrapperStructPose.poseModel, wrapperStructPose.renderThreshold,
295  wrapperStructPose.blendOriginalFrame, alphaKeypoint, alphaHeatMap,
296  wrapperStructPose.defaultPartToRender);
297  cpuRenderers.emplace_back(std::make_shared<WPoseRenderer<TDatumsSP>>(poseCpuRenderer));
298  }
299  }
300  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
301 
302  // Pose extractor(s)
303  poseExtractorsWs.resize(poseExtractorNets.size());
304  const auto personIdExtractor = (wrapperStructExtra.identification
305  ? std::make_shared<PersonIdExtractor>() : nullptr);
306  // Keep top N people
307  // Added right after PoseExtractorNet to avoid:
308  // 1) Rendering people that are later deleted (wrong visualization).
309  // 2) Processing faces and hands on people that will be deleted (speed up).
310  // 3) Running tracking before deleting the people.
311  // Add KeepTopNPeople for each PoseExtractorNet
312  const auto keepTopNPeople = (wrapperStructPose.numberPeopleMax > 0 ?
313  std::make_shared<KeepTopNPeople>(wrapperStructPose.numberPeopleMax)
314  : nullptr);
315  // Person tracker
316  auto personTrackers = std::make_shared<std::vector<std::shared_ptr<PersonTracker>>>();
317  if (wrapperStructExtra.tracking > -1)
318  personTrackers->emplace_back(
319  std::make_shared<PersonTracker>(wrapperStructExtra.tracking == 0));
320  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
321  {
322  // OpenPose keypoint detector + keepTopNPeople
323  // + ID extractor (experimental) + tracking (experimental)
324  const auto poseExtractor = std::make_shared<PoseExtractor>(
325  poseExtractorNets.at(i), keepTopNPeople, personIdExtractor, personTrackers,
326  wrapperStructPose.numberPeopleMax, wrapperStructExtra.tracking);
327  poseExtractorsWs.at(i) = {std::make_shared<WPoseExtractor<TDatumsSP>>(poseExtractor)};
328  // // Just OpenPose keypoint detector
329  // poseExtractorsWs.at(i) = {std::make_shared<WPoseExtractorNet<TDatumsSP>>(
330  // poseExtractorNets.at(i))};
331  }
332 
333  // // (Before tracking / id extractor)
334  // // Added right after PoseExtractorNet to avoid:
335  // // 1) Rendering people that are later deleted (wrong visualization).
336  // // 2) Processing faces and hands on people that will be deleted (speed up).
337  // if (wrapperStructPose.numberPeopleMax > 0)
338  // {
339  // // Add KeepTopNPeople for each PoseExtractorNet
340  // const auto keepTopNPeople = std::make_shared<KeepTopNPeople>(
341  // wrapperStructPose.numberPeopleMax);
342  // for (auto& wPose : poseExtractorsWs)
343  // wPose.emplace_back(std::make_shared<WKeepTopNPeople<TDatumsSP>>(keepTopNPeople));
344  // }
345  }
346 
347 
348  // Face extractor(s)
349  if (wrapperStructFace.enable)
350  {
351  // Face detector
352  // OpenPose face detector
353  if (wrapperStructPose.enable)
354  {
355  const auto faceDetector = std::make_shared<FaceDetector>(wrapperStructPose.poseModel);
356  for (auto& wPose : poseExtractorsWs)
357  wPose.emplace_back(std::make_shared<WFaceDetector<TDatumsSP>>(faceDetector));
358  }
359  // OpenCV face detector
360  else
361  {
362  log("Body keypoint detection is disabled. Hence, using OpenCV face detector (much less"
363  " accurate but faster).", Priority::High);
364  for (auto& wPose : poseExtractorsWs)
365  {
366  // 1 FaceDetectorOpenCV per thread, OpenCV face detector is not thread-safe
367  const auto faceDetectorOpenCV = std::make_shared<FaceDetectorOpenCV>(modelFolder);
368  wPose.emplace_back(
369  std::make_shared<WFaceDetectorOpenCV<TDatumsSP>>(faceDetectorOpenCV)
370  );
371  }
372  }
373  // Face keypoint extractor
374  for (auto gpu = 0u; gpu < poseExtractorsWs.size(); gpu++)
375  {
376  // Face keypoint extractor
377  const auto netOutputSize = wrapperStructFace.netInputSize;
378  const auto faceExtractorNet = std::make_shared<FaceExtractorCaffe>(
379  wrapperStructFace.netInputSize, netOutputSize, modelFolder,
380  gpu + gpuNumberStart, wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale,
381  wrapperStructPose.enableGoogleLogging
382  );
383  faceExtractorNets.emplace_back(faceExtractorNet);
384  poseExtractorsWs.at(gpu).emplace_back(
385  std::make_shared<WFaceExtractorNet<TDatumsSP>>(faceExtractorNet));
386  }
387  }
388 
389  // Hand extractor(s)
390  if (wrapperStructHand.enable)
391  {
392  const auto handDetector = std::make_shared<HandDetector>(wrapperStructPose.poseModel);
393  for (auto gpu = 0u; gpu < poseExtractorsWs.size(); gpu++)
394  {
395  // Hand detector
396  // If tracking
397  if (wrapperStructHand.tracking)
398  poseExtractorsWs.at(gpu).emplace_back(
399  std::make_shared<WHandDetectorTracking<TDatumsSP>>(handDetector)
400  );
401  // If detection
402  else
403  poseExtractorsWs.at(gpu).emplace_back(
404  std::make_shared<WHandDetector<TDatumsSP>>(handDetector));
405  // Hand keypoint extractor
406  const auto netOutputSize = wrapperStructHand.netInputSize;
407  const auto handExtractorNet = std::make_shared<HandExtractorCaffe>(
408  wrapperStructHand.netInputSize, netOutputSize, modelFolder,
409  gpu + gpuNumberStart, wrapperStructHand.scalesNumber, wrapperStructHand.scaleRange,
410  wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale,
411  wrapperStructPose.enableGoogleLogging
412  );
413  handExtractorNets.emplace_back(handExtractorNet);
414  poseExtractorsWs.at(gpu).emplace_back(
415  std::make_shared<WHandExtractorNet<TDatumsSP>>(handExtractorNet)
416  );
417  // If tracking
418  if (wrapperStructHand.tracking)
419  poseExtractorsWs.at(gpu).emplace_back(
420  std::make_shared<WHandDetectorUpdate<TDatumsSP>>(handDetector)
421  );
422  }
423  }
424 
425  // Pose renderer(s)
426  if (!poseGpuRenderers.empty())
427  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
428  poseExtractorsWs.at(i).emplace_back(std::make_shared<WPoseRenderer<TDatumsSP>>(
429  poseGpuRenderers.at(i)
430  ));
431 
432  // Face renderer(s)
433  if (renderFace)
434  {
435  // CPU rendering
436  if (wrapperStructFace.renderMode == RenderMode::Cpu)
437  {
438  // Construct face renderer
439  const auto faceRenderer = std::make_shared<FaceCpuRenderer>(wrapperStructFace.renderThreshold,
440  wrapperStructFace.alphaKeypoint,
441  wrapperStructFace.alphaHeatMap);
442  // Add worker
443  cpuRenderers.emplace_back(std::make_shared<WFaceRenderer<TDatumsSP>>(faceRenderer));
444  }
445  // GPU rendering
446  else if (wrapperStructFace.renderMode == RenderMode::Gpu)
447  {
448  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
449  {
450  // Construct face renderer
451  const auto faceRenderer = std::make_shared<FaceGpuRenderer>(
452  wrapperStructFace.renderThreshold, wrapperStructFace.alphaKeypoint,
453  wrapperStructFace.alphaHeatMap
454  );
455  // Performance boost -> share spGpuMemory for all renderers
456  if (!poseGpuRenderers.empty())
457  {
458  const bool isLastRenderer = !renderHandGpu;
459  const auto renderer = std::static_pointer_cast<PoseGpuRenderer>(
460  poseGpuRenderers.at(i)
461  );
462  faceRenderer->setSharedParametersAndIfLast(renderer->getSharedParameters(),
463  isLastRenderer);
464  }
465  // Add worker
466  poseExtractorsWs.at(i).emplace_back(
467  std::make_shared<WFaceRenderer<TDatumsSP>>(faceRenderer));
468  }
469  }
470  else
471  error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__);
472  }
473 
474  // Hand renderer(s)
475  if (renderHand)
476  {
477  // CPU rendering
478  if (wrapperStructHand.renderMode == RenderMode::Cpu)
479  {
480  // Construct hand renderer
481  const auto handRenderer = std::make_shared<HandCpuRenderer>(wrapperStructHand.renderThreshold,
482  wrapperStructHand.alphaKeypoint,
483  wrapperStructHand.alphaHeatMap);
484  // Add worker
485  cpuRenderers.emplace_back(std::make_shared<WHandRenderer<TDatumsSP>>(handRenderer));
486  }
487  // GPU rendering
488  else if (wrapperStructHand.renderMode == RenderMode::Gpu)
489  {
490  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
491  {
492  // Construct hands renderer
493  const auto handRenderer = std::make_shared<HandGpuRenderer>(
494  wrapperStructHand.renderThreshold, wrapperStructHand.alphaKeypoint,
495  wrapperStructHand.alphaHeatMap
496  );
497  // Performance boost -> share spGpuMemory for all renderers
498  if (!poseGpuRenderers.empty())
499  {
500  const bool isLastRenderer = true;
501  const auto renderer = std::static_pointer_cast<PoseGpuRenderer>(
502  poseGpuRenderers.at(i)
503  );
504  handRenderer->setSharedParametersAndIfLast(renderer->getSharedParameters(),
505  isLastRenderer);
506  }
507  // Add worker
508  poseExtractorsWs.at(i).emplace_back(
509  std::make_shared<WHandRenderer<TDatumsSP>>(handRenderer));
510  }
511  }
512  else
513  error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__);
514  }
515 
516  // 3-D reconstruction
517  poseTriangulationsWs.clear();
518  if (wrapperStructExtra.reconstruct3d)
519  {
520  // For all (body/face/hands): PoseTriangulations ~30 msec, 8 GPUS ~30 msec for keypoint estimation
521  poseTriangulationsWs.resize(fastMax(1, int(poseExtractorsWs.size() / 4)));
522  for (auto i = 0u ; i < poseTriangulationsWs.size() ; i++)
523  {
524  const auto poseTriangulation = std::make_shared<PoseTriangulation>(
525  wrapperStructExtra.minViews3d);
526  poseTriangulationsWs.at(i) = {std::make_shared<WPoseTriangulation<TDatumsSP>>(
527  poseTriangulation)};
528  }
529  }
530  // Itermediate workers (e.g., OpenPose format to cv::Mat, json & frames recorder, ...)
531  postProcessingWs.clear();
532  // // Person ID identification (when no multi-thread and no dependency on tracking)
533  // if (wrapperStructExtra.identification)
534  // {
535  // const auto personIdExtractor = std::make_shared<PersonIdExtractor>();
536  // postProcessingWs.emplace_back(
537  // std::make_shared<WPersonIdExtractor<TDatumsSP>>(personIdExtractor)
538  // );
539  // }
540  // Frames processor (OpenPose format -> cv::Mat format)
541  if (renderOutput)
542  {
543  postProcessingWs = mergeVectors(postProcessingWs, cpuRenderers);
544  const auto opOutputToCvMat = std::make_shared<OpOutputToCvMat>();
545  postProcessingWs.emplace_back(std::make_shared<WOpOutputToCvMat<TDatumsSP>>(opOutputToCvMat));
546  }
547  // Re-scale pose if desired
548  // If desired scale is not the current input
549  if (wrapperStructPose.keypointScale != ScaleMode::InputResolution
550  // and desired scale is not output when size(input) = size(output)
551  && !(wrapperStructPose.keypointScale == ScaleMode::OutputResolution &&
552  (finalOutputSize == producerSize || finalOutputSize.x <= 0 || finalOutputSize.y <= 0))
553  // and desired scale is not net output when size(input) = size(net output)
554  && !(wrapperStructPose.keypointScale == ScaleMode::NetOutputResolution
555  && producerSize == wrapperStructPose.netInputSize))
556  {
557  // Then we must rescale the keypoints
558  auto keypointScaler = std::make_shared<KeypointScaler>(wrapperStructPose.keypointScale);
559  postProcessingWs.emplace_back(std::make_shared<WKeypointScaler<TDatumsSP>>(keypointScaler));
560  }
561  }
562 
563  // IK/Adam
564  const auto displayAdam = wrapperStructOutput.displayMode == DisplayMode::DisplayAdam
565  || (wrapperStructOutput.displayMode == DisplayMode::DisplayAll
566  && wrapperStructExtra.ikThreads > 0);
567  jointAngleEstimationsWs.clear();
568 #ifdef USE_3D_ADAM_MODEL
569  if (wrapperStructExtra.ikThreads > 0)
570  {
571  jointAngleEstimationsWs.resize(wrapperStructExtra.ikThreads);
572  // Pose extractor(s)
573  for (auto i = 0u; i < jointAngleEstimationsWs.size(); i++)
574  {
575  const auto jointAngleEstimation = std::make_shared<JointAngleEstimation>(displayAdam);
576  jointAngleEstimationsWs.at(i) = {std::make_shared<WJointAngleEstimation<TDatumsSP>>(
577  jointAngleEstimation)};
578  }
579  }
580 #endif
581 
582  // Output workers
583  outputWs.clear();
584  // Send information (e.g., to Unity) though UDP client-server communication
585 #ifdef USE_3D_ADAM_MODEL
586  if (!wrapperStructOutput.udpHost.empty() && !wrapperStructOutput.udpPort.empty())
587  {
588  const auto udpSender = std::make_shared<UdpSender>(wrapperStructOutput.udpHost,
589  wrapperStructOutput.udpPort);
590  outputWs.emplace_back(std::make_shared<WUdpSender<TDatumsSP>>(udpSender));
591  }
592 #endif
593  // Write people pose data on disk (json for OpenCV >= 3, xml, yml...)
594  if (!writeKeypointCleaned.empty())
595  {
596  const auto keypointSaver = std::make_shared<KeypointSaver>(writeKeypointCleaned,
597  wrapperStructOutput.writeKeypointFormat);
598  outputWs.emplace_back(std::make_shared<WPoseSaver<TDatumsSP>>(keypointSaver));
599  if (wrapperStructFace.enable)
600  outputWs.emplace_back(std::make_shared<WFaceSaver<TDatumsSP>>(keypointSaver));
601  if (wrapperStructHand.enable)
602  outputWs.emplace_back(std::make_shared<WHandSaver<TDatumsSP>>(keypointSaver));
603  }
604  // Write OpenPose output data on disk in json format (body/hand/face keypoints, body part locations if
605  // enabled, etc.)
606  if (!writeJsonCleaned.empty())
607  {
608  const auto peopleJsonSaver = std::make_shared<PeopleJsonSaver>(writeJsonCleaned);
609  outputWs.emplace_back(std::make_shared<WPeopleJsonSaver<TDatumsSP>>(peopleJsonSaver));
610  }
611  // Write people pose data on disk (COCO validation json format)
612  if (!wrapperStructOutput.writeCocoJson.empty())
613  {
614  // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it
615  const auto humanFormat = true;
616  const auto cocoJsonSaver = std::make_shared<CocoJsonSaver>(
617  wrapperStructOutput.writeCocoJson, humanFormat,
618  (wrapperStructPose.poseModel != PoseModel::CAR_22
619  && wrapperStructPose.poseModel != PoseModel::CAR_12
621  wrapperStructOutput.writeCocoJsonVariant);
622  outputWs.emplace_back(std::make_shared<WCocoJsonSaver<TDatumsSP>>(cocoJsonSaver));
623  }
624  // Write people foot pose data on disk (COCO validation json format for foot data)
625  if (!wrapperStructOutput.writeCocoFootJson.empty())
626  {
627  // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it
628  const auto humanFormat = true;
629  const auto cocoJsonSaver = std::make_shared<CocoJsonSaver>(wrapperStructOutput.writeCocoFootJson,
630  humanFormat, CocoJsonFormat::Foot);
631  outputWs.emplace_back(std::make_shared<WCocoJsonSaver<TDatumsSP>>(cocoJsonSaver));
632  }
633  // Write frames as desired image format on hard disk
634  if (!writeImagesCleaned.empty())
635  {
636  const auto imageSaver = std::make_shared<ImageSaver>(writeImagesCleaned,
637  wrapperStructOutput.writeImagesFormat);
638  outputWs.emplace_back(std::make_shared<WImageSaver<TDatumsSP>>(imageSaver));
639  }
640  // Write frames as *.avi video on hard disk
641  const auto producerFps = (producerSharedPtr == nullptr ?
642  0. : producerSharedPtr->get(CV_CAP_PROP_FPS));
643  const auto originalVideoFps = (wrapperStructOutput.writeVideoFps > 0 ?
644  wrapperStructOutput.writeVideoFps
645  : producerFps);
646  if (!wrapperStructOutput.writeVideo.empty())
647  {
648  if (!oPProducer)
649  error("Video file can only be recorded inside `wrapper/wrapper.hpp` if the producer"
650  " is one of the default ones (e.g., video, webcam, ...).",
651  __LINE__, __FUNCTION__, __FILE__);
652  if (finalOutputSize.x <= 0 || finalOutputSize.y <= 0)
653  error("Video can only be recorded if outputSize is fixed (e.g., video, webcam, IP camera),"
654  "but not for a image directory.", __LINE__, __FUNCTION__, __FILE__);
655  const auto videoSaver = std::make_shared<VideoSaver>(
656  wrapperStructOutput.writeVideo, CV_FOURCC('M','J','P','G'), originalVideoFps, finalOutputSize
657  );
658  outputWs.emplace_back(std::make_shared<WVideoSaver<TDatumsSP>>(videoSaver));
659  }
660  // Write joint angles as *.bvh file on hard disk
661 #ifdef USE_3D_ADAM_MODEL
662  if (!wrapperStructOutput.writeBvh.empty())
663  {
664  const auto bvhSaver = std::make_shared<BvhSaver>(
665  wrapperStructOutput.writeBvh, JointAngleEstimation::getTotalModel(), originalVideoFps
666  );
667  outputWs.emplace_back(std::make_shared<WBvhSaver<TDatumsSP>>(bvhSaver));
668  }
669 #endif
670  // Write heat maps as desired image format on hard disk
671  if (!writeHeatMapsCleaned.empty())
672  {
673  const auto heatMapSaver = std::make_shared<HeatMapSaver>(writeHeatMapsCleaned,
674  wrapperStructOutput.writeHeatMapsFormat);
675  outputWs.emplace_back(std::make_shared<WHeatMapSaver<TDatumsSP>>(heatMapSaver));
676  }
677  // Add frame information for GUI
678  const bool guiEnabled = (wrapperStructOutput.displayMode != DisplayMode::NoDisplay);
679  // If this WGuiInfoAdder instance is placed before the WImageSaver or WVideoSaver, then the resulting
680  // recorded frames will look exactly as the final displayed image by the GUI
681  if (wrapperStructOutput.guiVerbose && (guiEnabled || !userOutputWs.empty()
682  || threadManagerMode == ThreadManagerMode::Asynchronous
683  || threadManagerMode == ThreadManagerMode::AsynchronousOut))
684  {
685  const auto guiInfoAdder = std::make_shared<GuiInfoAdder>(numberThreads, guiEnabled);
686  outputWs.emplace_back(std::make_shared<WGuiInfoAdder<TDatumsSP>>(guiInfoAdder));
687  }
688  // Minimal graphical user interface (GUI)
689  guiW = nullptr;
690  if (guiEnabled)
691  {
692  // PoseRenderers to Renderers
693  std::vector<std::shared_ptr<Renderer>> renderers;
694  if (wrapperStructPose.renderMode == RenderMode::Cpu)
695  renderers.emplace_back(std::static_pointer_cast<Renderer>(poseCpuRenderer));
696  else
697  for (const auto& poseGpuRenderer : poseGpuRenderers)
698  renderers.emplace_back(std::static_pointer_cast<Renderer>(poseGpuRenderer));
699  // Display
700  // Adam (+3-D/2-D) display
701  if (displayAdam)
702  {
703 #ifdef USE_3D_ADAM_MODEL
704  // Gui
705  const auto gui = std::make_shared<GuiAdam>(
706  finalOutputSize, wrapperStructOutput.fullScreen, threadManager.getIsRunningSharedPtr(),
707  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers,
708  wrapperStructOutput.displayMode, JointAngleEstimation::getTotalModel(),
709  wrapperStructOutput.writeVideoAdam
710  );
711  // WGui
712  guiW = {std::make_shared<WGuiAdam<TDatumsSP>>(gui)};
713 #endif
714  }
715  // 3-D (+2-D) display
716  else if (wrapperStructOutput.displayMode == DisplayMode::Display3D
717  || wrapperStructOutput.displayMode == DisplayMode::DisplayAll)
718  {
719  // Gui
720  const auto gui = std::make_shared<Gui3D>(
721  finalOutputSize, wrapperStructOutput.fullScreen, threadManager.getIsRunningSharedPtr(),
722  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers,
723  wrapperStructPose.poseModel, wrapperStructOutput.displayMode
724  );
725  // WGui
726  guiW = {std::make_shared<WGui3D<TDatumsSP>>(gui)};
727  }
728  // 2-D display
729  else if (wrapperStructOutput.displayMode == DisplayMode::Display2D)
730  {
731  // Gui
732  const auto gui = std::make_shared<Gui>(
733  finalOutputSize, wrapperStructOutput.fullScreen, threadManager.getIsRunningSharedPtr(),
734  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers
735  );
736  // WGui
737  guiW = {std::make_shared<WGui<TDatumsSP>>(gui)};
738  }
739  else
740  error("Unknown DisplayMode.", __LINE__, __FUNCTION__, __FILE__);
741  }
742  // Set wrapper as configured
743  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
744 
745 
746 
747 
748 
749  // The less number of queues -> the less threads opened, and potentially the less lag
750 
751  // Sanity checks
752  if ((datumProducerW == nullptr) == (userInputWs.empty())
753  && threadManagerMode != ThreadManagerMode::Asynchronous
754  && threadManagerMode != ThreadManagerMode::AsynchronousIn)
755  {
756  const auto message = "You need to have 1 and only 1 producer selected. You can introduce your own"
757  " producer by using setWorker(WorkerType::Input, ...) or use the OpenPose"
758  " default producer by configuring it in the configure function) or use the"
759  " ThreadManagerMode::Asynchronous(In) mode.";
760  error(message, __LINE__, __FUNCTION__, __FILE__);
761  }
762  if (outputWs.empty() && userOutputWs.empty() && guiW == nullptr
763  && threadManagerMode != ThreadManagerMode::Asynchronous
764  && threadManagerMode != ThreadManagerMode::AsynchronousOut)
765  {
766  error("No output selected.", __LINE__, __FUNCTION__, __FILE__);
767  }
768 
769  // Thread Manager
770  // Clean previous thread manager (avoid configure to crash the program if used more than once)
771  threadManager.reset();
772  unsigned long long threadId = 0ull;
773  auto queueIn = 0ull;
774  auto queueOut = 1ull;
775  // After producer
776  // ID generator (before any multi-threading or any function that requires the ID)
777  const auto wIdGenerator = std::make_shared<WIdGenerator<TDatumsSP>>();
778  std::vector<TWorker> workersAux{wIdGenerator};
779  // Scale & cv::Mat to OP format
780  if (scaleAndSizeExtractorW != nullptr)
781  workersAux = mergeVectors(workersAux, {scaleAndSizeExtractorW});
782  if (cvMatToOpInputW != nullptr)
783  workersAux = mergeVectors(workersAux, {cvMatToOpInputW});
784  // cv::Mat to output format
785  if (cvMatToOpOutputW != nullptr)
786  workersAux = mergeVectors(workersAux, {cvMatToOpOutputW});
787 
788  // Producer
789  // If custom user Worker and uses its own thread
790  if (!userInputWs.empty() && userInputWsOnNewThread)
791  {
792  // Thread 0, queues 0 -> 1
793  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
794  threadManager.add(threadId, userInputWs, queueIn++, queueOut++);
795  threadIdPP(threadId, multiThreadEnabled);
796  }
797  // If custom user Worker in same thread
798  else if (!userInputWs.empty())
799  workersAux = mergeVectors(userInputWs, workersAux);
800  // If OpenPose producer (same thread)
801  else if (datumProducerW != nullptr)
802  workersAux = mergeVectors({datumProducerW}, workersAux);
803  // Otherwise
804  else if (threadManagerMode != ThreadManagerMode::Asynchronous
805  && threadManagerMode != ThreadManagerMode::AsynchronousIn)
806  error("No input selected.", __LINE__, __FUNCTION__, __FILE__);
807  // Thread 0 or 1, queues 0 -> 1
808  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
809  threadManager.add(threadId, workersAux, queueIn++, queueOut++);
810  // Increase thread
811  threadIdPP(threadId, multiThreadEnabled);
812 
813  // Pose estimation & rendering
814  // Thread 1 or 2...X, queues 1 -> 2, X = 2 + #GPUs
815  if (!poseExtractorsWs.empty())
816  {
817  if (multiThreadEnabled)
818  {
819  for (auto& wPose : poseExtractorsWs)
820  {
821  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
822  threadManager.add(threadId, wPose, queueIn, queueOut);
823  threadIdPP(threadId, multiThreadEnabled);
824  }
825  queueIn++;
826  queueOut++;
827  // Sort frames - Required own thread
828  if (poseExtractorsWs.size() > 1u)
829  {
830  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
831  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
832  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
833  threadIdPP(threadId, multiThreadEnabled);
834  }
835  }
836  else
837  {
838  if (poseExtractorsWs.size() > 1)
839  log("Multi-threading disabled, only 1 thread running. All GPUs have been disabled but the"
840  " first one, which is defined by gpuNumberStart (e.g., in the OpenPose demo, it is set"
841  " with the `--num_gpu_start` flag).", Priority::High);
842  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
843  threadManager.add(threadId, poseExtractorsWs.at(0), queueIn++, queueOut++);
844  }
845  }
846  // Assemble all frames from same time instant (3-D module)
847  const auto wQueueAssembler = std::make_shared<WQueueAssembler<TDatumsSP, TDatums>>();
848  // 3-D reconstruction
849  if (!poseTriangulationsWs.empty())
850  {
851  // Assemble frames
852  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
853  threadManager.add(threadId, wQueueAssembler, queueIn++, queueOut++);
854  threadIdPP(threadId, multiThreadEnabled);
855  // 3-D reconstruction
856  if (multiThreadEnabled)
857  {
858  for (auto& wPoseTriangulations : poseTriangulationsWs)
859  {
860  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
861  threadManager.add(threadId, wPoseTriangulations, queueIn, queueOut);
862  threadIdPP(threadId, multiThreadEnabled);
863  }
864  queueIn++;
865  queueOut++;
866  // Sort frames
867  if (poseTriangulationsWs.size() > 1u)
868  {
869  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
870  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
871  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
872  threadIdPP(threadId, multiThreadEnabled);
873  }
874  }
875  else
876  {
877  if (poseTriangulationsWs.size() > 1)
878  log("Multi-threading disabled, only 1 thread running for 3-D triangulation.",
880  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
881  threadManager.add(threadId, poseTriangulationsWs.at(0), queueIn++, queueOut++);
882  }
883  }
884  else
885  postProcessingWs = mergeVectors({wQueueAssembler}, postProcessingWs);
886  // Adam/IK step
887  if (!jointAngleEstimationsWs.empty())
888  {
889  if (multiThreadEnabled)
890  {
891  for (auto& wJointAngleEstimator : jointAngleEstimationsWs)
892  {
893  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
894  threadManager.add(threadId, wJointAngleEstimator, queueIn, queueOut);
895  threadIdPP(threadId, multiThreadEnabled);
896  }
897  queueIn++;
898  queueOut++;
899  // Sort frames
900  if (jointAngleEstimationsWs.size() > 1)
901  {
902  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
903  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
904  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
905  threadIdPP(threadId, multiThreadEnabled);
906  }
907  }
908  else
909  {
910  if (jointAngleEstimationsWs.size() > 1)
911  log("Multi-threading disabled, only 1 thread running for joint angle estimation.",
913  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
914  threadManager.add(threadId, jointAngleEstimationsWs.at(0), queueIn++, queueOut++);
915  }
916  }
917  // Post processing workers
918  if (!postProcessingWs.empty())
919  {
920  // Combining postProcessingWs and outputWs
921  outputWs = mergeVectors(postProcessingWs, outputWs);
922  // // If I wanna split them
923  // log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
924  // threadManager.add(threadId, postProcessingWs, queueIn++, queueOut++);
925  // threadIdPP(threadId, multiThreadEnabled);
926  }
927  // If custom user Worker and uses its own thread
928  if (!userPostProcessingWs.empty())
929  {
930  // If custom user Worker in its own thread
931  if (userPostProcessingWsOnNewThread)
932  {
933  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
934  threadManager.add(threadId, userPostProcessingWs, queueIn++, queueOut++);
935  threadIdPP(threadId, multiThreadEnabled);
936  }
937  // If custom user Worker in same thread
938  // Merge with outputWs
939  else
940  outputWs = mergeVectors(outputWs, userPostProcessingWs);
941  }
942  // Output workers
943  if (!outputWs.empty())
944  {
945  // Thread 4 or 5, queues 4 -> 5
946  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
947  threadManager.add(threadId, outputWs, queueIn++, queueOut++);
948  threadIdPP(threadId, multiThreadEnabled);
949  }
950  // User output worker
951  // Thread Y, queues Q -> Q+1
952  if (!userOutputWs.empty())
953  {
954  if (userOutputWsOnNewThread)
955  {
956  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
957  threadManager.add(threadId, userOutputWs, queueIn++, queueOut++);
958  threadIdPP(threadId, multiThreadEnabled);
959  }
960  else
961  {
962  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
963  threadManager.add(threadId-1, userOutputWs, queueIn++, queueOut++);
964  }
965  }
966  // OpenPose GUI
967  if (guiW != nullptr)
968  {
969  // Thread Y+1, queues Q+1 -> Q+2
970  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
971  threadManager.add(threadId, guiW, queueIn++, queueOut++);
972  threadIdPP(threadId, multiThreadEnabled);
973  }
974  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
975  }
976  catch (const std::exception& e)
977  {
978  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
979  }
980  }
981 }
982 
983 #endif // OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
OP_API void threadIdPP(unsigned long long &threadId, const bool multiThreadEnabled)
int writeCocoJsonVariant
Definition: wrapperStructOutput.hpp:78
Definition: wHandDetectorTracking.hpp:11
Definition: wGuiInfoAdder.hpp:11
OP_API std::shared_ptr< Producer > createProducer(const ProducerType producerType=ProducerType::None, const std::string &producerString="", const Point< int > &cameraResolution=Point< int >{-1,-1}, const double webcamFps=30., const std::string &cameraParameterPath="models/cameraParameters/", const bool undistortImage=true, const unsigned int imageDirectoryStereo=-1)
std::array< T, N > array
Definition: cl2.hpp:594
float alphaHeatMap
Definition: wrapperStructFace.hpp:46
bool guiVerbose
Definition: wrapperStructOutput.hpp:31
void configureThreadManager(ThreadManager< TDatumsSP > &threadManager, const bool multiThreadEnabled, const ThreadManagerMode threadManagerMode, const WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const std::array< std::vector< TWorker >, int(WorkerType::Size)> &userWs, const std::array< bool, int(WorkerType::Size)> &userWsOnNewThread)
Definition: wrapperAuxiliary.hpp:84
std::string writeHeatMapsFormat
Definition: wrapperStructOutput.hpp:112
Definition: wPoseSaver.hpp:12
std::string cameraParameterPath
Definition: wrapperStructInput.hpp:82
Definition: wrapperStructPose.hpp:17
std::string writeKeypoint
Definition: wrapperStructOutput.hpp:44
float alphaKeypoint
Definition: wrapperStructFace.hpp:39
bool frameFlip
Definition: wrapperStructInput.hpp:56
Definition: wFaceSaver.hpp:12
float alphaKeypoint
Definition: wrapperStructHand.hpp:61
int tracking
Definition: wrapperStructExtra.hpp:39
Definition: wPeopleJsonSaver.hpp:11
Definition: wHandRenderer.hpp:11
double writeVideoFps
Definition: wrapperStructOutput.hpp:117
Definition: wImageSaver.hpp:11
std::string udpPort
Definition: wrapperStructOutput.hpp:141
std::string writeImages
Definition: wrapperStructOutput.hpp:84
std::shared_ptr< std::atomic< bool > > getIsRunningSharedPtr()
Definition: threadManager.hpp:40
T fastMax(const T a, const T b)
Definition: fastMath.hpp:70
float alphaHeatMap
Definition: wrapperStructHand.hpp:68
bool framesRepeat
Definition: wrapperStructInput.hpp:67
Definition: wKeypointScaler.hpp:11
int minViews3d
Definition: wrapperStructExtra.hpp:27
std::string writeHeatMaps
Definition: wrapperStructOutput.hpp:106
std::string writeCocoJson
Definition: wrapperStructOutput.hpp:67
void add(const unsigned long long threadId, const std::vector< TWorker > &tWorkers, const unsigned long long queueInId, const unsigned long long queueOutId)
Definition: threadManager.hpp:125
Definition: wFaceDetector.hpp:11
void reset()
Definition: threadManager.hpp:157
Definition: wrapperStructFace.hpp:15
OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
bool fullScreen
Definition: wrapperStructOutput.hpp:37
std::string udpHost
Definition: wrapperStructOutput.hpp:136
DataFormat writeKeypointFormat
Definition: wrapperStructOutput.hpp:51
Definition: wFaceExtractorNet.hpp:11
std::string writeCocoFootJson
Definition: wrapperStructOutput.hpp:72
RenderMode renderMode
Definition: wrapperStructPose.hpp:81
OP_API void wrapperConfigureSanityChecks(WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const bool renderOutput, const bool userOutputWsEmpty, const std::shared_ptr< Producer > &producerSharedPtr, const ThreadManagerMode threadManagerMode)
float scaleRange
Definition: wrapperStructHand.hpp:41
OP_API GpuMode getGpuMode()
RenderMode renderMode
Definition: wrapperStructFace.hpp:33
bool tracking
Definition: wrapperStructHand.hpp:49
bool enable
Definition: wrapperStructFace.hpp:20
Definition: wFaceDetectorOpenCV.hpp:11
Definition: wVideoSaver.hpp:11
Definition: wUdpSender.hpp:11
Definition: wOpOutputToCvMat.hpp:11
int ikThreads
Definition: wrapperStructExtra.hpp:46
bool realTimeProcessing
Definition: wrapperStructInput.hpp:51
OP_API std::string formatAsDirectory(const std::string &directoryPathString)
Definition: wHandDetectorUpdate.hpp:11
Definition: wrapperStructInput.hpp:14
unsigned long long frameStep
Definition: wrapperStructInput.hpp:40
Definition: wHandExtractorNet.hpp:11
std::string writeBvh
Definition: wrapperStructOutput.hpp:131
Definition: wFaceRenderer.hpp:11
bool undistortImage
Definition: wrapperStructInput.hpp:87
Definition: wPoseRenderer.hpp:11
std::string writeJson
Definition: wrapperStructOutput.hpp:61
OP_API void log(const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
Definition: wCocoJsonSaver.hpp:11
Definition: wrapperStructExtra.hpp:13
Definition: wrapperStructHand.hpp:15
float renderThreshold
Definition: wrapperStructHand.hpp:76
Definition: poseGpuRenderer.hpp:13
Definition: wHandDetector.hpp:11
std::string writeVideoAdam
Definition: wrapperStructOutput.hpp:124
std::vector< T > mergeVectors(const std::vector< T > &vectorA, const std::vector< T > &vectorB)
Definition: standard.hpp:40
Point< int > cameraResolution
Definition: wrapperStructInput.hpp:72
Definition: wHandSaver.hpp:12
ProducerType producerType
Definition: wrapperStructInput.hpp:20
std::string writeVideo
Definition: wrapperStructOutput.hpp:99
unsigned long long frameLast
Definition: wrapperStructInput.hpp:46
bool identification
Definition: wrapperStructExtra.hpp:32
DisplayMode displayMode
Definition: wrapperStructOutput.hpp:25
Point< int > netInputSize
Definition: wrapperStructFace.hpp:27
void setSharedParametersAndIfLast(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< std::atomic< unsigned long long >>, std::shared_ptr< const unsigned int >> &tuple, const bool isLast)
Definition: wHeatMapSaver.hpp:11
std::string producerString
Definition: wrapperStructInput.hpp:27
bool enable
Definition: wrapperStructHand.hpp:20
ThreadManagerMode
Definition: enumClasses.hpp:9
unsigned int imageDirectoryStereo
Definition: wrapperStructInput.hpp:92
#define OP_API
Definition: macros.hpp:18
float renderThreshold
Definition: wrapperStructFace.hpp:54
int frameRotate
Definition: wrapperStructInput.hpp:62
std::string writeImagesFormat
Definition: wrapperStructOutput.hpp:92
unsigned long long frameFirst
Definition: wrapperStructInput.hpp:33
Definition: wrapperStructOutput.hpp:14
bool reconstruct3d
Definition: wrapperStructExtra.hpp:21
OP_API int getGpuNumber()
Point< int > netInputSize
Definition: wrapperStructHand.hpp:27
int scalesNumber
Definition: wrapperStructHand.hpp:35
double webcamFps
Definition: wrapperStructInput.hpp:77
RenderMode renderMode
Definition: wrapperStructHand.hpp:55