OpenPose  1.0.0rc2
OpenPose: A Real-Time Multi-Person Key-Point Detection And Multi-Threading C++ Library
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
wrapperAuxiliary.hpp
Go to the documentation of this file.
1 #ifndef OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
2 #define OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
3 
13 
14 namespace op
15 {
30  WrapperStructPose& wrapperStructPose, const WrapperStructFace& wrapperStructFace,
31  const WrapperStructHand& wrapperStructHand, const WrapperStructExtra& wrapperStructExtra,
32  const WrapperStructInput& wrapperStructInput, const WrapperStructOutput& wrapperStructOutput,
33  const WrapperStructGui& wrapperStructGui, const bool renderOutput, const bool userOutputWsEmpty,
34  const std::shared_ptr<Producer>& producerSharedPtr, const ThreadManagerMode threadManagerMode);
35 
44  OP_API void threadIdPP(unsigned long long& threadId, const bool multiThreadEnabled);
45 
52  template<typename TDatums,
53  typename TDatumsSP = std::shared_ptr<TDatums>,
54  typename TWorker = std::shared_ptr<Worker<TDatumsSP>>>
56  ThreadManager<TDatumsSP>& threadManager, const bool multiThreadEnabled,
57  const ThreadManagerMode threadManagerMode, const WrapperStructPose& wrapperStructPose,
58  const WrapperStructFace& wrapperStructFace, const WrapperStructHand& wrapperStructHand,
59  const WrapperStructExtra& wrapperStructExtra, const WrapperStructInput& wrapperStructInput,
60  const WrapperStructOutput& wrapperStructOutput, const WrapperStructGui& wrapperStructGui,
61  const std::array<std::vector<TWorker>, int(WorkerType::Size)>& userWs,
62  const std::array<bool, int(WorkerType::Size)>& userWsOnNewThread);
63 }
64 
65 
66 
67 
68 
69 // Implementation
70 #include <openpose/3d/headers.hpp>
74 #include <openpose/gpu/gpu.hpp>
75 #include <openpose/gui/headers.hpp>
82 namespace op
83 {
84  template<typename TDatums, typename TDatumsSP, typename TWorker>
86  ThreadManager<TDatumsSP>& threadManager, const bool multiThreadEnabledTemp,
87  const ThreadManagerMode threadManagerMode, const WrapperStructPose& wrapperStructPoseTemp,
88  const WrapperStructFace& wrapperStructFace, const WrapperStructHand& wrapperStructHand,
89  const WrapperStructExtra& wrapperStructExtra, const WrapperStructInput& wrapperStructInput,
90  const WrapperStructOutput& wrapperStructOutput, const WrapperStructGui& wrapperStructGui,
91  const std::array<std::vector<TWorker>, int(WorkerType::Size)>& userWs,
92  const std::array<bool, int(WorkerType::Size)>& userWsOnNewThread)
93  {
94  try
95  {
96  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
97 
98  // Create producer
99  auto producerSharedPtr = createProducer(
100  wrapperStructInput.producerType, wrapperStructInput.producerString,
101  wrapperStructInput.cameraResolution, wrapperStructInput.webcamFps,
102  wrapperStructInput.cameraParameterPath, wrapperStructInput.undistortImage,
103  wrapperStructInput.imageDirectoryStereo);
104 
105  // Editable arguments
106  auto wrapperStructPose = wrapperStructPoseTemp;
107  auto multiThreadEnabled = multiThreadEnabledTemp;
108 
109  // Workers
110  TWorker datumProducerW;
111  TWorker scaleAndSizeExtractorW;
112  TWorker cvMatToOpInputW;
113  TWorker cvMatToOpOutputW;
114  std::vector<std::vector<TWorker>> poseExtractorsWs;
115  std::vector<std::vector<TWorker>> poseTriangulationsWs;
116  std::vector<std::vector<TWorker>> jointAngleEstimationsWs;
117  std::vector<TWorker> postProcessingWs;
118  std::vector<TWorker> outputWs;
119  TWorker guiW;
120 
121  // User custom workers
122  const auto& userInputWs = userWs[int(WorkerType::Input)];
123  const auto& userPostProcessingWs = userWs[int(WorkerType::PostProcessing)];
124  const auto& userOutputWs = userWs[int(WorkerType::Output)];
125  const auto userInputWsOnNewThread = userWsOnNewThread[int(WorkerType::Input)];
126  const auto userPostProcessingWsOnNewThread = userWsOnNewThread[int(WorkerType::PostProcessing)];
127  const auto userOutputWsOnNewThread = userWsOnNewThread[int(WorkerType::Output)];
128 
129  // Video seek
130  const auto spVideoSeek = std::make_shared<std::pair<std::atomic<bool>, std::atomic<int>>>();
131  // It cannot be directly included in the constructor (compiler error for copying std::atomic)
132  spVideoSeek->first = false;
133  spVideoSeek->second = 0;
134 
135  // Required parameters
136  const auto renderOutput = wrapperStructPose.renderMode != RenderMode::None
137  || wrapperStructFace.renderMode != RenderMode::None
138  || wrapperStructHand.renderMode != RenderMode::None;
139  const auto renderOutputGpu = wrapperStructPose.renderMode == RenderMode::Gpu
140  || wrapperStructFace.renderMode == RenderMode::Gpu
141  || wrapperStructHand.renderMode == RenderMode::Gpu;
142  const auto renderFace = wrapperStructFace.enable && wrapperStructFace.renderMode != RenderMode::None;
143  const auto renderHand = wrapperStructHand.enable && wrapperStructHand.renderMode != RenderMode::None;
144  const auto renderHandGpu = wrapperStructHand.enable && wrapperStructHand.renderMode == RenderMode::Gpu;
145 
146  // Check no wrong/contradictory flags enabled
147  const auto userOutputWsEmpty = userOutputWs.empty();
149  wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, wrapperStructInput,
150  wrapperStructOutput, wrapperStructGui, renderOutput, userOutputWsEmpty, producerSharedPtr,
151  threadManagerMode);
152 
153  // Get number threads
154  auto numberThreads = wrapperStructPose.gpuNumber;
155  auto gpuNumberStart = wrapperStructPose.gpuNumberStart;
156  // CPU --> 1 thread or no pose extraction
157  if (getGpuMode() == GpuMode::NoGpu)
158  {
159  numberThreads = (wrapperStructPose.gpuNumber == 0 ? 0 : 1);
160  gpuNumberStart = 0;
161  // Disabling multi-thread makes the code 400 ms faster (2.3 sec vs. 2.7 in i7-6850K)
162  // and fixes the bug that the screen was not properly displayed and only refreshed sometimes
163  // Note: The screen bug could be also fixed by using waitKey(30) rather than waitKey(1)
164  multiThreadEnabled = false;
165  }
166  // GPU --> user picks (<= #GPUs)
167  else
168  {
169  // Get total number GPUs
170  const auto totalGpuNumber = getGpuNumber();
171  // If number GPU < 0 --> set it to all the available GPUs
172  if (numberThreads < 0)
173  {
174  if (totalGpuNumber <= gpuNumberStart)
175  error("Number of initial GPU (`--number_gpu_start`) must be lower than the total number of"
176  " used GPUs (`--number_gpu`)", __LINE__, __FUNCTION__, __FILE__);
177  numberThreads = totalGpuNumber - gpuNumberStart;
178  // Reset initial GPU to 0 (we want them all)
179  // Logging message
180  log("Auto-detecting all available GPUs... Detected " + std::to_string(totalGpuNumber)
181  + " GPU(s), using " + std::to_string(numberThreads) + " of them starting at GPU "
182  + std::to_string(gpuNumberStart) + ".", Priority::High);
183  }
184  // Sanity check
185  if (gpuNumberStart + numberThreads > totalGpuNumber)
186  error("Initial GPU selected (`--number_gpu_start`) + number GPUs to use (`--number_gpu`) must"
187  " be lower or equal than the total number of GPUs in your machine ("
188  + std::to_string(gpuNumberStart) + " + "
189  + std::to_string(numberThreads) + " vs. "
190  + std::to_string(totalGpuNumber) + ").",
191  __LINE__, __FUNCTION__, __FILE__);
192  }
193 
194  // Proper format
195  const auto writeImagesCleaned = formatAsDirectory(wrapperStructOutput.writeImages);
196  const auto writeKeypointCleaned = formatAsDirectory(wrapperStructOutput.writeKeypoint);
197  const auto writeJsonCleaned = formatAsDirectory(wrapperStructOutput.writeJson);
198  const auto writeHeatMapsCleaned = formatAsDirectory(wrapperStructOutput.writeHeatMaps);
199  const auto modelFolder = formatAsDirectory(wrapperStructPose.modelFolder);
200 
201  // Common parameters
202  auto finalOutputSize = wrapperStructPose.outputSize;
203  Point<int> producerSize{-1,-1};
204  const auto oPProducer = (producerSharedPtr != nullptr);
205  if (oPProducer)
206  {
207  // 1. Set producer properties
208  const auto displayProducerFpsMode = (wrapperStructInput.realTimeProcessing
210  producerSharedPtr->setProducerFpsMode(displayProducerFpsMode);
211  producerSharedPtr->set(ProducerProperty::Flip, wrapperStructInput.frameFlip);
212  producerSharedPtr->set(ProducerProperty::Rotation, wrapperStructInput.frameRotate);
213  producerSharedPtr->set(ProducerProperty::AutoRepeat, wrapperStructInput.framesRepeat);
214  // 2. Set finalOutputSize
215  producerSize = Point<int>{(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_WIDTH),
216  (int)producerSharedPtr->get(CV_CAP_PROP_FRAME_HEIGHT)};
217  // Set finalOutputSize to input size if desired
218  if (finalOutputSize.x == -1 || finalOutputSize.y == -1)
219  finalOutputSize = producerSize;
220  }
221 
222  // Producer
223  if (oPProducer)
224  {
225  const auto datumProducer = std::make_shared<DatumProducer<TDatums>>(
226  producerSharedPtr, wrapperStructInput.frameFirst, wrapperStructInput.frameStep,
227  wrapperStructInput.frameLast, spVideoSeek
228  );
229  datumProducerW = std::make_shared<WDatumProducer<TDatumsSP, TDatums>>(datumProducer);
230  }
231  else
232  datumProducerW = nullptr;
233 
234  std::vector<std::shared_ptr<PoseExtractorNet>> poseExtractorNets;
235  std::vector<std::shared_ptr<FaceExtractorNet>> faceExtractorNets;
236  std::vector<std::shared_ptr<HandExtractorNet>> handExtractorNets;
237  std::vector<std::shared_ptr<PoseGpuRenderer>> poseGpuRenderers;
238  std::shared_ptr<PoseCpuRenderer> poseCpuRenderer;
239  if (numberThreads > 0)
240  {
241  // Get input scales and sizes
242  const auto scaleAndSizeExtractor = std::make_shared<ScaleAndSizeExtractor>(
243  wrapperStructPose.netInputSize, finalOutputSize, wrapperStructPose.scalesNumber,
244  wrapperStructPose.scaleGap
245  );
246  scaleAndSizeExtractorW = std::make_shared<WScaleAndSizeExtractor<TDatumsSP>>(scaleAndSizeExtractor);
247 
248  // Input cvMat to OpenPose input & output format
249  const auto cvMatToOpInput = std::make_shared<CvMatToOpInput>(wrapperStructPose.poseModel);
250  cvMatToOpInputW = std::make_shared<WCvMatToOpInput<TDatumsSP>>(cvMatToOpInput);
251  if (renderOutput)
252  {
253  const auto cvMatToOpOutput = std::make_shared<CvMatToOpOutput>();
254  cvMatToOpOutputW = std::make_shared<WCvMatToOpOutput<TDatumsSP>>(cvMatToOpOutput);
255  }
256 
257  // Pose estimators & renderers
258  std::vector<TWorker> cpuRenderers;
259  poseExtractorsWs.clear();
260  poseExtractorsWs.resize(numberThreads);
261  if (wrapperStructPose.enable)
262  {
263  // Pose estimators
264  for (auto gpuId = 0; gpuId < numberThreads; gpuId++)
265  poseExtractorNets.emplace_back(std::make_shared<PoseExtractorCaffe>(
266  wrapperStructPose.poseModel, modelFolder, gpuId + gpuNumberStart,
267  wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale,
268  wrapperStructPose.addPartCandidates, wrapperStructPose.maximizePositives,
269  wrapperStructPose.enableGoogleLogging
270  ));
271 
272  // Pose renderers
273  if (renderOutputGpu || wrapperStructPose.renderMode == RenderMode::Cpu)
274  {
275  // If wrapperStructPose.renderMode != RenderMode::Gpu but renderOutput, then we create an
276  // alpha = 0 pose renderer in order to keep the removing background option
277  const auto alphaKeypoint = (wrapperStructPose.renderMode != RenderMode::None
278  ? wrapperStructPose.alphaKeypoint : 0.f);
279  const auto alphaHeatMap = (wrapperStructPose.renderMode != RenderMode::None
280  ? wrapperStructPose.alphaHeatMap : 0.f);
281  // GPU rendering
282  if (renderOutputGpu)
283  {
284  for (const auto& poseExtractorNet : poseExtractorNets)
285  {
286  poseGpuRenderers.emplace_back(std::make_shared<PoseGpuRenderer>(
287  wrapperStructPose.poseModel, poseExtractorNet, wrapperStructPose.renderThreshold,
288  wrapperStructPose.blendOriginalFrame, alphaKeypoint,
289  alphaHeatMap, wrapperStructPose.defaultPartToRender
290  ));
291  }
292  }
293  // CPU rendering
294  if (wrapperStructPose.renderMode == RenderMode::Cpu)
295  {
296  poseCpuRenderer = std::make_shared<PoseCpuRenderer>(
297  wrapperStructPose.poseModel, wrapperStructPose.renderThreshold,
298  wrapperStructPose.blendOriginalFrame, alphaKeypoint, alphaHeatMap,
299  wrapperStructPose.defaultPartToRender);
300  cpuRenderers.emplace_back(std::make_shared<WPoseRenderer<TDatumsSP>>(poseCpuRenderer));
301  }
302  }
303  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
304 
305  // Pose extractor(s)
306  poseExtractorsWs.resize(poseExtractorNets.size());
307  const auto personIdExtractor = (wrapperStructExtra.identification
308  ? std::make_shared<PersonIdExtractor>() : nullptr);
309  // Keep top N people
310  // Added right after PoseExtractorNet to avoid:
311  // 1) Rendering people that are later deleted (wrong visualization).
312  // 2) Processing faces and hands on people that will be deleted (speed up).
313  // 3) Running tracking before deleting the people.
314  // Add KeepTopNPeople for each PoseExtractorNet
315  const auto keepTopNPeople = (wrapperStructPose.numberPeopleMax > 0 ?
316  std::make_shared<KeepTopNPeople>(wrapperStructPose.numberPeopleMax)
317  : nullptr);
318  // Person tracker
319  auto personTrackers = std::make_shared<std::vector<std::shared_ptr<PersonTracker>>>();
320  if (wrapperStructExtra.tracking > -1)
321  personTrackers->emplace_back(
322  std::make_shared<PersonTracker>(wrapperStructExtra.tracking == 0));
323  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
324  {
325  // OpenPose keypoint detector + keepTopNPeople
326  // + ID extractor (experimental) + tracking (experimental)
327  const auto poseExtractor = std::make_shared<PoseExtractor>(
328  poseExtractorNets.at(i), keepTopNPeople, personIdExtractor, personTrackers,
329  wrapperStructPose.numberPeopleMax, wrapperStructExtra.tracking);
330  poseExtractorsWs.at(i) = {std::make_shared<WPoseExtractor<TDatumsSP>>(poseExtractor)};
331  // // Just OpenPose keypoint detector
332  // poseExtractorsWs.at(i) = {std::make_shared<WPoseExtractorNet<TDatumsSP>>(
333  // poseExtractorNets.at(i))};
334  }
335 
336  // // (Before tracking / id extractor)
337  // // Added right after PoseExtractorNet to avoid:
338  // // 1) Rendering people that are later deleted (wrong visualization).
339  // // 2) Processing faces and hands on people that will be deleted (speed up).
340  // if (wrapperStructPose.numberPeopleMax > 0)
341  // {
342  // // Add KeepTopNPeople for each PoseExtractorNet
343  // const auto keepTopNPeople = std::make_shared<KeepTopNPeople>(
344  // wrapperStructPose.numberPeopleMax);
345  // for (auto& wPose : poseExtractorsWs)
346  // wPose.emplace_back(std::make_shared<WKeepTopNPeople<TDatumsSP>>(keepTopNPeople));
347  // }
348  }
349 
350 
351  // Face extractor(s)
352  if (wrapperStructFace.enable)
353  {
354  // Face detector
355  // OpenPose face detector
356  if (wrapperStructPose.enable)
357  {
358  const auto faceDetector = std::make_shared<FaceDetector>(wrapperStructPose.poseModel);
359  for (auto& wPose : poseExtractorsWs)
360  wPose.emplace_back(std::make_shared<WFaceDetector<TDatumsSP>>(faceDetector));
361  }
362  // OpenCV face detector
363  else
364  {
365  log("Body keypoint detection is disabled. Hence, using OpenCV face detector (much less"
366  " accurate but faster).", Priority::High);
367  for (auto& wPose : poseExtractorsWs)
368  {
369  // 1 FaceDetectorOpenCV per thread, OpenCV face detector is not thread-safe
370  const auto faceDetectorOpenCV = std::make_shared<FaceDetectorOpenCV>(modelFolder);
371  wPose.emplace_back(
372  std::make_shared<WFaceDetectorOpenCV<TDatumsSP>>(faceDetectorOpenCV)
373  );
374  }
375  }
376  // Face keypoint extractor
377  for (auto gpu = 0u; gpu < poseExtractorsWs.size(); gpu++)
378  {
379  // Face keypoint extractor
380  const auto netOutputSize = wrapperStructFace.netInputSize;
381  const auto faceExtractorNet = std::make_shared<FaceExtractorCaffe>(
382  wrapperStructFace.netInputSize, netOutputSize, modelFolder,
383  gpu + gpuNumberStart, wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale,
384  wrapperStructPose.enableGoogleLogging
385  );
386  faceExtractorNets.emplace_back(faceExtractorNet);
387  poseExtractorsWs.at(gpu).emplace_back(
388  std::make_shared<WFaceExtractorNet<TDatumsSP>>(faceExtractorNet));
389  }
390  }
391 
392  // Hand extractor(s)
393  if (wrapperStructHand.enable)
394  {
395  const auto handDetector = std::make_shared<HandDetector>(wrapperStructPose.poseModel);
396  for (auto gpu = 0u; gpu < poseExtractorsWs.size(); gpu++)
397  {
398  // Hand detector
399  // If tracking
400  if (wrapperStructHand.tracking)
401  poseExtractorsWs.at(gpu).emplace_back(
402  std::make_shared<WHandDetectorTracking<TDatumsSP>>(handDetector)
403  );
404  // If detection
405  else
406  poseExtractorsWs.at(gpu).emplace_back(
407  std::make_shared<WHandDetector<TDatumsSP>>(handDetector));
408  // Hand keypoint extractor
409  const auto netOutputSize = wrapperStructHand.netInputSize;
410  const auto handExtractorNet = std::make_shared<HandExtractorCaffe>(
411  wrapperStructHand.netInputSize, netOutputSize, modelFolder,
412  gpu + gpuNumberStart, wrapperStructHand.scalesNumber, wrapperStructHand.scaleRange,
413  wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale,
414  wrapperStructPose.enableGoogleLogging
415  );
416  handExtractorNets.emplace_back(handExtractorNet);
417  poseExtractorsWs.at(gpu).emplace_back(
418  std::make_shared<WHandExtractorNet<TDatumsSP>>(handExtractorNet)
419  );
420  // If tracking
421  if (wrapperStructHand.tracking)
422  poseExtractorsWs.at(gpu).emplace_back(
423  std::make_shared<WHandDetectorUpdate<TDatumsSP>>(handDetector)
424  );
425  }
426  }
427 
428  // Pose renderer(s)
429  if (!poseGpuRenderers.empty())
430  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
431  poseExtractorsWs.at(i).emplace_back(std::make_shared<WPoseRenderer<TDatumsSP>>(
432  poseGpuRenderers.at(i)
433  ));
434 
435  // Face renderer(s)
436  if (renderFace)
437  {
438  // CPU rendering
439  if (wrapperStructFace.renderMode == RenderMode::Cpu)
440  {
441  // Construct face renderer
442  const auto faceRenderer = std::make_shared<FaceCpuRenderer>(wrapperStructFace.renderThreshold,
443  wrapperStructFace.alphaKeypoint,
444  wrapperStructFace.alphaHeatMap);
445  // Add worker
446  cpuRenderers.emplace_back(std::make_shared<WFaceRenderer<TDatumsSP>>(faceRenderer));
447  }
448  // GPU rendering
449  else if (wrapperStructFace.renderMode == RenderMode::Gpu)
450  {
451  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
452  {
453  // Construct face renderer
454  const auto faceRenderer = std::make_shared<FaceGpuRenderer>(
455  wrapperStructFace.renderThreshold, wrapperStructFace.alphaKeypoint,
456  wrapperStructFace.alphaHeatMap
457  );
458  // Performance boost -> share spGpuMemory for all renderers
459  if (!poseGpuRenderers.empty())
460  {
461  const bool isLastRenderer = !renderHandGpu;
462  const auto renderer = std::static_pointer_cast<PoseGpuRenderer>(
463  poseGpuRenderers.at(i)
464  );
465  faceRenderer->setSharedParametersAndIfLast(renderer->getSharedParameters(),
466  isLastRenderer);
467  }
468  // Add worker
469  poseExtractorsWs.at(i).emplace_back(
470  std::make_shared<WFaceRenderer<TDatumsSP>>(faceRenderer));
471  }
472  }
473  else
474  error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__);
475  }
476 
477  // Hand renderer(s)
478  if (renderHand)
479  {
480  // CPU rendering
481  if (wrapperStructHand.renderMode == RenderMode::Cpu)
482  {
483  // Construct hand renderer
484  const auto handRenderer = std::make_shared<HandCpuRenderer>(wrapperStructHand.renderThreshold,
485  wrapperStructHand.alphaKeypoint,
486  wrapperStructHand.alphaHeatMap);
487  // Add worker
488  cpuRenderers.emplace_back(std::make_shared<WHandRenderer<TDatumsSP>>(handRenderer));
489  }
490  // GPU rendering
491  else if (wrapperStructHand.renderMode == RenderMode::Gpu)
492  {
493  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
494  {
495  // Construct hands renderer
496  const auto handRenderer = std::make_shared<HandGpuRenderer>(
497  wrapperStructHand.renderThreshold, wrapperStructHand.alphaKeypoint,
498  wrapperStructHand.alphaHeatMap
499  );
500  // Performance boost -> share spGpuMemory for all renderers
501  if (!poseGpuRenderers.empty())
502  {
503  const bool isLastRenderer = true;
504  const auto renderer = std::static_pointer_cast<PoseGpuRenderer>(
505  poseGpuRenderers.at(i)
506  );
507  handRenderer->setSharedParametersAndIfLast(renderer->getSharedParameters(),
508  isLastRenderer);
509  }
510  // Add worker
511  poseExtractorsWs.at(i).emplace_back(
512  std::make_shared<WHandRenderer<TDatumsSP>>(handRenderer));
513  }
514  }
515  else
516  error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__);
517  }
518 
519  // 3-D reconstruction
520  poseTriangulationsWs.clear();
521  if (wrapperStructExtra.reconstruct3d)
522  {
523  // For all (body/face/hands): PoseTriangulations ~30 msec, 8 GPUS ~30 msec for keypoint estimation
524  poseTriangulationsWs.resize(fastMax(1, int(poseExtractorsWs.size() / 4)));
525  for (auto i = 0u ; i < poseTriangulationsWs.size() ; i++)
526  {
527  const auto poseTriangulation = std::make_shared<PoseTriangulation>(
528  wrapperStructExtra.minViews3d);
529  poseTriangulationsWs.at(i) = {std::make_shared<WPoseTriangulation<TDatumsSP>>(
530  poseTriangulation)};
531  }
532  }
533  // Itermediate workers (e.g., OpenPose format to cv::Mat, json & frames recorder, ...)
534  postProcessingWs.clear();
535  // // Person ID identification (when no multi-thread and no dependency on tracking)
536  // if (wrapperStructExtra.identification)
537  // {
538  // const auto personIdExtractor = std::make_shared<PersonIdExtractor>();
539  // postProcessingWs.emplace_back(
540  // std::make_shared<WPersonIdExtractor<TDatumsSP>>(personIdExtractor)
541  // );
542  // }
543  // Frames processor (OpenPose format -> cv::Mat format)
544  if (renderOutput)
545  {
546  postProcessingWs = mergeVectors(postProcessingWs, cpuRenderers);
547  const auto opOutputToCvMat = std::make_shared<OpOutputToCvMat>();
548  postProcessingWs.emplace_back(std::make_shared<WOpOutputToCvMat<TDatumsSP>>(opOutputToCvMat));
549  }
550  // Re-scale pose if desired
551  // If desired scale is not the current input
552  if (wrapperStructPose.keypointScale != ScaleMode::InputResolution
553  // and desired scale is not output when size(input) = size(output)
554  && !(wrapperStructPose.keypointScale == ScaleMode::OutputResolution &&
555  (finalOutputSize == producerSize || finalOutputSize.x <= 0 || finalOutputSize.y <= 0))
556  // and desired scale is not net output when size(input) = size(net output)
557  && !(wrapperStructPose.keypointScale == ScaleMode::NetOutputResolution
558  && producerSize == wrapperStructPose.netInputSize))
559  {
560  // Then we must rescale the keypoints
561  auto keypointScaler = std::make_shared<KeypointScaler>(wrapperStructPose.keypointScale);
562  postProcessingWs.emplace_back(std::make_shared<WKeypointScaler<TDatumsSP>>(keypointScaler));
563  }
564  }
565 
566  // IK/Adam
567  const auto displayAdam = wrapperStructGui.displayMode == DisplayMode::DisplayAdam
568  || (wrapperStructGui.displayMode == DisplayMode::DisplayAll
569  && wrapperStructExtra.ikThreads > 0);
570  jointAngleEstimationsWs.clear();
571 #ifdef USE_3D_ADAM_MODEL
572  if (wrapperStructExtra.ikThreads > 0)
573  {
574  jointAngleEstimationsWs.resize(wrapperStructExtra.ikThreads);
575  // Pose extractor(s)
576  for (auto i = 0u; i < jointAngleEstimationsWs.size(); i++)
577  {
578  const auto jointAngleEstimation = std::make_shared<JointAngleEstimation>(displayAdam);
579  jointAngleEstimationsWs.at(i) = {std::make_shared<WJointAngleEstimation<TDatumsSP>>(
580  jointAngleEstimation)};
581  }
582  }
583 #endif
584 
585  // Output workers
586  outputWs.clear();
587  // Print verbose
588  if (wrapperStructOutput.verbose > 0.)
589  {
590  const auto verbosePrinter = std::make_shared<VerbosePrinter>(
591  wrapperStructOutput.verbose, producerSharedPtr->get(CV_CAP_PROP_FRAME_COUNT));
592  outputWs.emplace_back(std::make_shared<WVerbosePrinter<TDatumsSP>>(verbosePrinter));
593  }
594  // Send information (e.g., to Unity) though UDP client-server communication
595 
596 #ifdef USE_3D_ADAM_MODEL
597  if (!wrapperStructOutput.udpHost.empty() && !wrapperStructOutput.udpPort.empty())
598  {
599  const auto udpSender = std::make_shared<UdpSender>(wrapperStructOutput.udpHost,
600  wrapperStructOutput.udpPort);
601  outputWs.emplace_back(std::make_shared<WUdpSender<TDatumsSP>>(udpSender));
602  }
603 #endif
604  // Write people pose data on disk (json for OpenCV >= 3, xml, yml...)
605  if (!writeKeypointCleaned.empty())
606  {
607  const auto keypointSaver = std::make_shared<KeypointSaver>(writeKeypointCleaned,
608  wrapperStructOutput.writeKeypointFormat);
609  outputWs.emplace_back(std::make_shared<WPoseSaver<TDatumsSP>>(keypointSaver));
610  if (wrapperStructFace.enable)
611  outputWs.emplace_back(std::make_shared<WFaceSaver<TDatumsSP>>(keypointSaver));
612  if (wrapperStructHand.enable)
613  outputWs.emplace_back(std::make_shared<WHandSaver<TDatumsSP>>(keypointSaver));
614  }
615  // Write OpenPose output data on disk in json format (body/hand/face keypoints, body part locations if
616  // enabled, etc.)
617  if (!writeJsonCleaned.empty())
618  {
619  const auto peopleJsonSaver = std::make_shared<PeopleJsonSaver>(writeJsonCleaned);
620  outputWs.emplace_back(std::make_shared<WPeopleJsonSaver<TDatumsSP>>(peopleJsonSaver));
621  }
622  // Write people pose data on disk (COCO validation json format)
623  if (!wrapperStructOutput.writeCocoJson.empty())
624  {
625  // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it
626  const auto humanFormat = true;
627  const auto cocoJsonSaver = std::make_shared<CocoJsonSaver>(
628  wrapperStructOutput.writeCocoJson, humanFormat,
629  (wrapperStructPose.poseModel != PoseModel::CAR_22
630  && wrapperStructPose.poseModel != PoseModel::CAR_12
632  wrapperStructOutput.writeCocoJsonVariant);
633  outputWs.emplace_back(std::make_shared<WCocoJsonSaver<TDatumsSP>>(cocoJsonSaver));
634  }
635  // Write people foot pose data on disk (COCO validation json format for foot data)
636  if (!wrapperStructOutput.writeCocoFootJson.empty())
637  {
638  // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it
639  const auto humanFormat = true;
640  const auto cocoJsonSaver = std::make_shared<CocoJsonSaver>(wrapperStructOutput.writeCocoFootJson,
641  humanFormat, CocoJsonFormat::Foot);
642  outputWs.emplace_back(std::make_shared<WCocoJsonSaver<TDatumsSP>>(cocoJsonSaver));
643  }
644  // Write frames as desired image format on hard disk
645  if (!writeImagesCleaned.empty())
646  {
647  const auto imageSaver = std::make_shared<ImageSaver>(writeImagesCleaned,
648  wrapperStructOutput.writeImagesFormat);
649  outputWs.emplace_back(std::make_shared<WImageSaver<TDatumsSP>>(imageSaver));
650  }
651  // Write frames as *.avi video on hard disk
652  const auto producerFps = (producerSharedPtr == nullptr ?
653  0. : producerSharedPtr->get(CV_CAP_PROP_FPS));
654  const auto originalVideoFps = (wrapperStructOutput.writeVideoFps > 0 ?
655  wrapperStructOutput.writeVideoFps
656  : producerFps);
657  if (!wrapperStructOutput.writeVideo.empty())
658  {
659  if (!oPProducer)
660  error("Video file can only be recorded inside `wrapper/wrapper.hpp` if the producer"
661  " is one of the default ones (e.g., video, webcam, ...).",
662  __LINE__, __FUNCTION__, __FILE__);
663  if (finalOutputSize.x <= 0 || finalOutputSize.y <= 0)
664  error("Video can only be recorded if outputSize is fixed (e.g., video, webcam, IP camera),"
665  "but not for a image directory.", __LINE__, __FUNCTION__, __FILE__);
666  const auto videoSaver = std::make_shared<VideoSaver>(
667  wrapperStructOutput.writeVideo, CV_FOURCC('M','J','P','G'), originalVideoFps, finalOutputSize
668  );
669  outputWs.emplace_back(std::make_shared<WVideoSaver<TDatumsSP>>(videoSaver));
670  }
671  // Write joint angles as *.bvh file on hard disk
672 #ifdef USE_3D_ADAM_MODEL
673  if (!wrapperStructOutput.writeBvh.empty())
674  {
675  const auto bvhSaver = std::make_shared<BvhSaver>(
676  wrapperStructOutput.writeBvh, JointAngleEstimation::getTotalModel(), originalVideoFps
677  );
678  outputWs.emplace_back(std::make_shared<WBvhSaver<TDatumsSP>>(bvhSaver));
679  }
680 #endif
681  // Write heat maps as desired image format on hard disk
682  if (!writeHeatMapsCleaned.empty())
683  {
684  const auto heatMapSaver = std::make_shared<HeatMapSaver>(writeHeatMapsCleaned,
685  wrapperStructOutput.writeHeatMapsFormat);
686  outputWs.emplace_back(std::make_shared<WHeatMapSaver<TDatumsSP>>(heatMapSaver));
687  }
688  // Add frame information for GUI
689  const bool guiEnabled = (wrapperStructGui.displayMode != DisplayMode::NoDisplay);
690  // If this WGuiInfoAdder instance is placed before the WImageSaver or WVideoSaver, then the resulting
691  // recorded frames will look exactly as the final displayed image by the GUI
692  if (wrapperStructGui.guiVerbose && (guiEnabled || !userOutputWs.empty()
693  || threadManagerMode == ThreadManagerMode::Asynchronous
694  || threadManagerMode == ThreadManagerMode::AsynchronousOut))
695  {
696  const auto guiInfoAdder = std::make_shared<GuiInfoAdder>(numberThreads, guiEnabled);
697  outputWs.emplace_back(std::make_shared<WGuiInfoAdder<TDatumsSP>>(guiInfoAdder));
698  }
699  // Minimal graphical user interface (GUI)
700  guiW = nullptr;
701  if (guiEnabled)
702  {
703  // PoseRenderers to Renderers
704  std::vector<std::shared_ptr<Renderer>> renderers;
705  if (wrapperStructPose.renderMode == RenderMode::Cpu)
706  renderers.emplace_back(std::static_pointer_cast<Renderer>(poseCpuRenderer));
707  else
708  for (const auto& poseGpuRenderer : poseGpuRenderers)
709  renderers.emplace_back(std::static_pointer_cast<Renderer>(poseGpuRenderer));
710  // Display
711  // Adam (+3-D/2-D) display
712  if (displayAdam)
713  {
714 #ifdef USE_3D_ADAM_MODEL
715  // Gui
716  const auto gui = std::make_shared<GuiAdam>(
717  finalOutputSize, wrapperStructGui.fullScreen, threadManager.getIsRunningSharedPtr(),
718  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers,
719  wrapperStructGui.displayMode, JointAngleEstimation::getTotalModel(),
720  wrapperStructOutput.writeVideoAdam
721  );
722  // WGui
723  guiW = {std::make_shared<WGuiAdam<TDatumsSP>>(gui)};
724 #endif
725  }
726  // 3-D (+2-D) display
727  else if (wrapperStructGui.displayMode == DisplayMode::Display3D
728  || wrapperStructGui.displayMode == DisplayMode::DisplayAll)
729  {
730  // Gui
731  const auto gui = std::make_shared<Gui3D>(
732  finalOutputSize, wrapperStructGui.fullScreen, threadManager.getIsRunningSharedPtr(),
733  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers,
734  wrapperStructPose.poseModel, wrapperStructGui.displayMode
735  );
736  // WGui
737  guiW = {std::make_shared<WGui3D<TDatumsSP>>(gui)};
738  }
739  // 2-D display
740  else if (wrapperStructGui.displayMode == DisplayMode::Display2D)
741  {
742  // Gui
743  const auto gui = std::make_shared<Gui>(
744  finalOutputSize, wrapperStructGui.fullScreen, threadManager.getIsRunningSharedPtr(),
745  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers
746  );
747  // WGui
748  guiW = {std::make_shared<WGui<TDatumsSP>>(gui)};
749  }
750  else
751  error("Unknown DisplayMode.", __LINE__, __FUNCTION__, __FILE__);
752  }
753  // Set wrapper as configured
754  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
755 
756 
757 
758 
759 
760  // The less number of queues -> the less threads opened, and potentially the less lag
761 
762  // Sanity checks
763  if ((datumProducerW == nullptr) == (userInputWs.empty())
764  && threadManagerMode != ThreadManagerMode::Asynchronous
765  && threadManagerMode != ThreadManagerMode::AsynchronousIn)
766  {
767  const auto message = "You need to have 1 and only 1 producer selected. You can introduce your own"
768  " producer by using setWorker(WorkerType::Input, ...) or use the OpenPose"
769  " default producer by configuring it in the configure function) or use the"
770  " ThreadManagerMode::Asynchronous(In) mode.";
771  error(message, __LINE__, __FUNCTION__, __FILE__);
772  }
773  if (outputWs.empty() && userOutputWs.empty() && guiW == nullptr
774  && threadManagerMode != ThreadManagerMode::Asynchronous
775  && threadManagerMode != ThreadManagerMode::AsynchronousOut)
776  {
777  error("No output selected.", __LINE__, __FUNCTION__, __FILE__);
778  }
779 
780  // Thread Manager
781  // Clean previous thread manager (avoid configure to crash the program if used more than once)
782  threadManager.reset();
783  unsigned long long threadId = 0ull;
784  auto queueIn = 0ull;
785  auto queueOut = 1ull;
786  // After producer
787  // ID generator (before any multi-threading or any function that requires the ID)
788  const auto wIdGenerator = std::make_shared<WIdGenerator<TDatumsSP>>();
789  std::vector<TWorker> workersAux{wIdGenerator};
790  // Scale & cv::Mat to OP format
791  if (scaleAndSizeExtractorW != nullptr)
792  workersAux = mergeVectors(workersAux, {scaleAndSizeExtractorW});
793  if (cvMatToOpInputW != nullptr)
794  workersAux = mergeVectors(workersAux, {cvMatToOpInputW});
795  // cv::Mat to output format
796  if (cvMatToOpOutputW != nullptr)
797  workersAux = mergeVectors(workersAux, {cvMatToOpOutputW});
798 
799  // Producer
800  // If custom user Worker and uses its own thread
801  if (!userInputWs.empty() && userInputWsOnNewThread)
802  {
803  // Thread 0, queues 0 -> 1
804  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
805  threadManager.add(threadId, userInputWs, queueIn++, queueOut++);
806  threadIdPP(threadId, multiThreadEnabled);
807  }
808  // If custom user Worker in same thread
809  else if (!userInputWs.empty())
810  workersAux = mergeVectors(userInputWs, workersAux);
811  // If OpenPose producer (same thread)
812  else if (datumProducerW != nullptr)
813  workersAux = mergeVectors({datumProducerW}, workersAux);
814  // Otherwise
815  else if (threadManagerMode != ThreadManagerMode::Asynchronous
816  && threadManagerMode != ThreadManagerMode::AsynchronousIn)
817  error("No input selected.", __LINE__, __FUNCTION__, __FILE__);
818  // Thread 0 or 1, queues 0 -> 1
819  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
820  threadManager.add(threadId, workersAux, queueIn++, queueOut++);
821  // Increase thread
822  threadIdPP(threadId, multiThreadEnabled);
823 
824  // Pose estimation & rendering
825  // Thread 1 or 2...X, queues 1 -> 2, X = 2 + #GPUs
826  if (!poseExtractorsWs.empty())
827  {
828  if (multiThreadEnabled)
829  {
830  for (auto& wPose : poseExtractorsWs)
831  {
832  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
833  threadManager.add(threadId, wPose, queueIn, queueOut);
834  threadIdPP(threadId, multiThreadEnabled);
835  }
836  queueIn++;
837  queueOut++;
838  // Sort frames - Required own thread
839  if (poseExtractorsWs.size() > 1u)
840  {
841  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
842  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
843  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
844  threadIdPP(threadId, multiThreadEnabled);
845  }
846  }
847  else
848  {
849  if (poseExtractorsWs.size() > 1)
850  log("Multi-threading disabled, only 1 thread running. All GPUs have been disabled but the"
851  " first one, which is defined by gpuNumberStart (e.g., in the OpenPose demo, it is set"
852  " with the `--num_gpu_start` flag).", Priority::High);
853  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
854  threadManager.add(threadId, poseExtractorsWs.at(0), queueIn++, queueOut++);
855  }
856  }
857  // Assemble all frames from same time instant (3-D module)
858  const auto wQueueAssembler = std::make_shared<WQueueAssembler<TDatumsSP, TDatums>>();
859  // 3-D reconstruction
860  if (!poseTriangulationsWs.empty())
861  {
862  // Assemble frames
863  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
864  threadManager.add(threadId, wQueueAssembler, queueIn++, queueOut++);
865  threadIdPP(threadId, multiThreadEnabled);
866  // 3-D reconstruction
867  if (multiThreadEnabled)
868  {
869  for (auto& wPoseTriangulations : poseTriangulationsWs)
870  {
871  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
872  threadManager.add(threadId, wPoseTriangulations, queueIn, queueOut);
873  threadIdPP(threadId, multiThreadEnabled);
874  }
875  queueIn++;
876  queueOut++;
877  // Sort frames
878  if (poseTriangulationsWs.size() > 1u)
879  {
880  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
881  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
882  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
883  threadIdPP(threadId, multiThreadEnabled);
884  }
885  }
886  else
887  {
888  if (poseTriangulationsWs.size() > 1)
889  log("Multi-threading disabled, only 1 thread running for 3-D triangulation.",
891  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
892  threadManager.add(threadId, poseTriangulationsWs.at(0), queueIn++, queueOut++);
893  }
894  }
895  else
896  postProcessingWs = mergeVectors({wQueueAssembler}, postProcessingWs);
897  // Adam/IK step
898  if (!jointAngleEstimationsWs.empty())
899  {
900  if (multiThreadEnabled)
901  {
902  for (auto& wJointAngleEstimator : jointAngleEstimationsWs)
903  {
904  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
905  threadManager.add(threadId, wJointAngleEstimator, queueIn, queueOut);
906  threadIdPP(threadId, multiThreadEnabled);
907  }
908  queueIn++;
909  queueOut++;
910  // Sort frames
911  if (jointAngleEstimationsWs.size() > 1)
912  {
913  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
914  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
915  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
916  threadIdPP(threadId, multiThreadEnabled);
917  }
918  }
919  else
920  {
921  if (jointAngleEstimationsWs.size() > 1)
922  log("Multi-threading disabled, only 1 thread running for joint angle estimation.",
924  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
925  threadManager.add(threadId, jointAngleEstimationsWs.at(0), queueIn++, queueOut++);
926  }
927  }
928  // Post processing workers
929  if (!postProcessingWs.empty())
930  {
931  // Combining postProcessingWs and outputWs
932  outputWs = mergeVectors(postProcessingWs, outputWs);
933  // // If I wanna split them
934  // log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
935  // threadManager.add(threadId, postProcessingWs, queueIn++, queueOut++);
936  // threadIdPP(threadId, multiThreadEnabled);
937  }
938  // If custom user Worker and uses its own thread
939  if (!userPostProcessingWs.empty())
940  {
941  // If custom user Worker in its own thread
942  if (userPostProcessingWsOnNewThread)
943  {
944  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
945  threadManager.add(threadId, userPostProcessingWs, queueIn++, queueOut++);
946  threadIdPP(threadId, multiThreadEnabled);
947  }
948  // If custom user Worker in same thread
949  // Merge with outputWs
950  else
951  outputWs = mergeVectors(outputWs, userPostProcessingWs);
952  }
953  // Output workers
954  if (!outputWs.empty())
955  {
956  // Thread 4 or 5, queues 4 -> 5
957  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
958  threadManager.add(threadId, outputWs, queueIn++, queueOut++);
959  threadIdPP(threadId, multiThreadEnabled);
960  }
961  // User output worker
962  // Thread Y, queues Q -> Q+1
963  if (!userOutputWs.empty())
964  {
965  if (userOutputWsOnNewThread)
966  {
967  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
968  threadManager.add(threadId, userOutputWs, queueIn++, queueOut++);
969  threadIdPP(threadId, multiThreadEnabled);
970  }
971  else
972  {
973  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
974  threadManager.add(threadId-1, userOutputWs, queueIn++, queueOut++);
975  }
976  }
977  // OpenPose GUI
978  if (guiW != nullptr)
979  {
980  // Thread Y+1, queues Q+1 -> Q+2
981  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
982  threadManager.add(threadId, guiW, queueIn++, queueOut++);
983  threadIdPP(threadId, multiThreadEnabled);
984  }
985  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
986  }
987  catch (const std::exception& e)
988  {
989  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
990  }
991  }
992 }
993 
994 #endif // OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
OP_API void threadIdPP(unsigned long long &threadId, const bool multiThreadEnabled)
int writeCocoJsonVariant
Definition: wrapperStructOutput.hpp:62
Definition: wHandDetectorTracking.hpp:11
Definition: wGuiInfoAdder.hpp:11
OP_API std::shared_ptr< Producer > createProducer(const ProducerType producerType=ProducerType::None, const std::string &producerString="", const Point< int > &cameraResolution=Point< int >{-1,-1}, const double webcamFps=30., const std::string &cameraParameterPath="models/cameraParameters/", const bool undistortImage=true, const unsigned int imageDirectoryStereo=-1)
std::array< T, N > array
Definition: cl2.hpp:594
float alphaHeatMap
Definition: wrapperStructFace.hpp:46
std::string writeHeatMapsFormat
Definition: wrapperStructOutput.hpp:96
Definition: wPoseSaver.hpp:12
std::string cameraParameterPath
Definition: wrapperStructInput.hpp:82
Definition: wrapperStructPose.hpp:17
std::string writeKeypoint
Definition: wrapperStructOutput.hpp:28
float alphaKeypoint
Definition: wrapperStructFace.hpp:39
bool frameFlip
Definition: wrapperStructInput.hpp:56
Definition: wFaceSaver.hpp:12
float alphaKeypoint
Definition: wrapperStructHand.hpp:61
OP_API void wrapperConfigureSanityChecks(WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const WrapperStructGui &wrapperStructGui, const bool renderOutput, const bool userOutputWsEmpty, const std::shared_ptr< Producer > &producerSharedPtr, const ThreadManagerMode threadManagerMode)
int tracking
Definition: wrapperStructExtra.hpp:39
Definition: wPeopleJsonSaver.hpp:11
Definition: wHandRenderer.hpp:11
double writeVideoFps
Definition: wrapperStructOutput.hpp:101
Definition: wImageSaver.hpp:11
std::string udpPort
Definition: wrapperStructOutput.hpp:125
std::string writeImages
Definition: wrapperStructOutput.hpp:68
std::shared_ptr< std::atomic< bool > > getIsRunningSharedPtr()
Definition: threadManager.hpp:40
T fastMax(const T a, const T b)
Definition: fastMath.hpp:70
float alphaHeatMap
Definition: wrapperStructHand.hpp:68
bool framesRepeat
Definition: wrapperStructInput.hpp:67
Definition: wKeypointScaler.hpp:11
int minViews3d
Definition: wrapperStructExtra.hpp:27
std::string writeHeatMaps
Definition: wrapperStructOutput.hpp:90
std::string writeCocoJson
Definition: wrapperStructOutput.hpp:51
DisplayMode displayMode
Definition: wrapperStructGui.hpp:23
void add(const unsigned long long threadId, const std::vector< TWorker > &tWorkers, const unsigned long long queueInId, const unsigned long long queueOutId)
Definition: threadManager.hpp:125
Definition: wFaceDetector.hpp:11
void reset()
Definition: threadManager.hpp:157
Definition: wrapperStructFace.hpp:15
OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
Definition: wVerbosePrinter.hpp:11
std::string udpHost
Definition: wrapperStructOutput.hpp:120
DataFormat writeKeypointFormat
Definition: wrapperStructOutput.hpp:35
Definition: wFaceExtractorNet.hpp:11
std::string writeCocoFootJson
Definition: wrapperStructOutput.hpp:56
RenderMode renderMode
Definition: wrapperStructPose.hpp:81
float scaleRange
Definition: wrapperStructHand.hpp:41
OP_API GpuMode getGpuMode()
RenderMode renderMode
Definition: wrapperStructFace.hpp:33
bool tracking
Definition: wrapperStructHand.hpp:49
bool enable
Definition: wrapperStructFace.hpp:20
Definition: wFaceDetectorOpenCV.hpp:11
Definition: wVideoSaver.hpp:11
Definition: wUdpSender.hpp:11
Definition: wOpOutputToCvMat.hpp:11
int ikThreads
Definition: wrapperStructExtra.hpp:46
bool realTimeProcessing
Definition: wrapperStructInput.hpp:51
OP_API std::string formatAsDirectory(const std::string &directoryPathString)
Definition: wHandDetectorUpdate.hpp:11
Definition: wrapperStructInput.hpp:14
unsigned long long frameStep
Definition: wrapperStructInput.hpp:40
Definition: wrapperStructGui.hpp:12
Definition: wHandExtractorNet.hpp:11
std::string writeBvh
Definition: wrapperStructOutput.hpp:115
Definition: wFaceRenderer.hpp:11
bool undistortImage
Definition: wrapperStructInput.hpp:87
Definition: wPoseRenderer.hpp:11
std::string writeJson
Definition: wrapperStructOutput.hpp:45
OP_API void log(const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
Definition: wCocoJsonSaver.hpp:11
double verbose
Definition: wrapperStructOutput.hpp:21
bool guiVerbose
Definition: wrapperStructGui.hpp:29
Definition: wrapperStructExtra.hpp:13
Definition: wrapperStructHand.hpp:15
float renderThreshold
Definition: wrapperStructHand.hpp:76
Definition: poseGpuRenderer.hpp:13
Definition: wHandDetector.hpp:11
std::string writeVideoAdam
Definition: wrapperStructOutput.hpp:108
std::vector< T > mergeVectors(const std::vector< T > &vectorA, const std::vector< T > &vectorB)
Definition: standard.hpp:40
Point< int > cameraResolution
Definition: wrapperStructInput.hpp:72
Definition: wHandSaver.hpp:12
ProducerType producerType
Definition: wrapperStructInput.hpp:20
std::string writeVideo
Definition: wrapperStructOutput.hpp:83
unsigned long long frameLast
Definition: wrapperStructInput.hpp:46
bool identification
Definition: wrapperStructExtra.hpp:32
Point< int > netInputSize
Definition: wrapperStructFace.hpp:27
void setSharedParametersAndIfLast(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< std::atomic< unsigned long long >>, std::shared_ptr< const unsigned int >> &tuple, const bool isLast)
Definition: wHeatMapSaver.hpp:11
std::string producerString
Definition: wrapperStructInput.hpp:27
bool enable
Definition: wrapperStructHand.hpp:20
ThreadManagerMode
Definition: enumClasses.hpp:9
unsigned int imageDirectoryStereo
Definition: wrapperStructInput.hpp:92
#define OP_API
Definition: macros.hpp:19
float renderThreshold
Definition: wrapperStructFace.hpp:54
int frameRotate
Definition: wrapperStructInput.hpp:62
std::string writeImagesFormat
Definition: wrapperStructOutput.hpp:76
unsigned long long frameFirst
Definition: wrapperStructInput.hpp:33
Definition: wrapperStructOutput.hpp:13
bool reconstruct3d
Definition: wrapperStructExtra.hpp:21
OP_API int getGpuNumber()
Point< int > netInputSize
Definition: wrapperStructHand.hpp:27
void configureThreadManager(ThreadManager< TDatumsSP > &threadManager, const bool multiThreadEnabled, const ThreadManagerMode threadManagerMode, const WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const WrapperStructGui &wrapperStructGui, const std::array< std::vector< TWorker >, int(WorkerType::Size)> &userWs, const std::array< bool, int(WorkerType::Size)> &userWsOnNewThread)
Definition: wrapperAuxiliary.hpp:85
int scalesNumber
Definition: wrapperStructHand.hpp:35
double webcamFps
Definition: wrapperStructInput.hpp:77
RenderMode renderMode
Definition: wrapperStructHand.hpp:55
bool fullScreen
Definition: wrapperStructGui.hpp:35