OpenPose  1.0.0rc2
OpenPose: A Real-Time Multi-Person Key-Point Detection And Multi-Threading C++ Library
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
wrapperAuxiliary.hpp
Go to the documentation of this file.
1 #ifndef OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
2 #define OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
3 
13 
14 namespace op
15 {
30  WrapperStructPose& wrapperStructPose, const WrapperStructFace& wrapperStructFace,
31  const WrapperStructHand& wrapperStructHand, const WrapperStructExtra& wrapperStructExtra,
32  const WrapperStructInput& wrapperStructInput, const WrapperStructOutput& wrapperStructOutput,
33  const WrapperStructGui& wrapperStructGui, const bool renderOutput, const bool userOutputWsEmpty,
34  const std::shared_ptr<Producer>& producerSharedPtr, const ThreadManagerMode threadManagerMode);
35 
44  OP_API void threadIdPP(unsigned long long& threadId, const bool multiThreadEnabled);
45 
52  template<typename TDatums,
53  typename TDatumsSP = std::shared_ptr<TDatums>,
54  typename TWorker = std::shared_ptr<Worker<TDatumsSP>>>
56  ThreadManager<TDatumsSP>& threadManager, const bool multiThreadEnabled,
57  const ThreadManagerMode threadManagerMode, const WrapperStructPose& wrapperStructPose,
58  const WrapperStructFace& wrapperStructFace, const WrapperStructHand& wrapperStructHand,
59  const WrapperStructExtra& wrapperStructExtra, const WrapperStructInput& wrapperStructInput,
60  const WrapperStructOutput& wrapperStructOutput, const WrapperStructGui& wrapperStructGui,
61  const std::array<std::vector<TWorker>, int(WorkerType::Size)>& userWs,
62  const std::array<bool, int(WorkerType::Size)>& userWsOnNewThread);
63 }
64 
65 
66 
67 
68 
69 // Implementation
70 #include <openpose/3d/headers.hpp>
74 #include <openpose/gpu/gpu.hpp>
75 #include <openpose/gui/headers.hpp>
82 namespace op
83 {
84  template<typename TDatums, typename TDatumsSP, typename TWorker>
86  ThreadManager<TDatumsSP>& threadManager, const bool multiThreadEnabledTemp,
87  const ThreadManagerMode threadManagerMode, const WrapperStructPose& wrapperStructPoseTemp,
88  const WrapperStructFace& wrapperStructFace, const WrapperStructHand& wrapperStructHand,
89  const WrapperStructExtra& wrapperStructExtra, const WrapperStructInput& wrapperStructInput,
90  const WrapperStructOutput& wrapperStructOutput, const WrapperStructGui& wrapperStructGui,
91  const std::array<std::vector<TWorker>, int(WorkerType::Size)>& userWs,
92  const std::array<bool, int(WorkerType::Size)>& userWsOnNewThread)
93  {
94  try
95  {
96  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
97 
98  // Create producer
99  auto producerSharedPtr = createProducer(
100  wrapperStructInput.producerType, wrapperStructInput.producerString,
101  wrapperStructInput.cameraResolution, wrapperStructInput.cameraParameterPath,
102  wrapperStructInput.undistortImage, wrapperStructInput.numberViews);
103 
104  // Editable arguments
105  auto wrapperStructPose = wrapperStructPoseTemp;
106  auto multiThreadEnabled = multiThreadEnabledTemp;
107 
108  // User custom workers
109  const auto& userInputWs = userWs[int(WorkerType::Input)];
110  const auto& userPostProcessingWs = userWs[int(WorkerType::PostProcessing)];
111  const auto& userOutputWs = userWs[int(WorkerType::Output)];
112  const auto userInputWsOnNewThread = userWsOnNewThread[int(WorkerType::Input)];
113  const auto userPostProcessingWsOnNewThread = userWsOnNewThread[int(WorkerType::PostProcessing)];
114  const auto userOutputWsOnNewThread = userWsOnNewThread[int(WorkerType::Output)];
115 
116  // Video seek
117  const auto spVideoSeek = std::make_shared<std::pair<std::atomic<bool>, std::atomic<int>>>();
118  // It cannot be directly included in the constructor (compiler error for copying std::atomic)
119  spVideoSeek->first = false;
120  spVideoSeek->second = 0;
121 
122  // Required parameters
123  const auto renderOutput = wrapperStructPose.renderMode != RenderMode::None
124  || wrapperStructFace.renderMode != RenderMode::None
125  || wrapperStructHand.renderMode != RenderMode::None;
126  const auto renderOutputGpu = wrapperStructPose.renderMode == RenderMode::Gpu
127  || wrapperStructFace.renderMode == RenderMode::Gpu
128  || wrapperStructHand.renderMode == RenderMode::Gpu;
129  const auto renderFace = wrapperStructFace.enable && wrapperStructFace.renderMode != RenderMode::None;
130  const auto renderHand = wrapperStructHand.enable && wrapperStructHand.renderMode != RenderMode::None;
131  const auto renderHandGpu = wrapperStructHand.enable && wrapperStructHand.renderMode == RenderMode::Gpu;
132 
133  // Check no wrong/contradictory flags enabled
134  const auto userOutputWsEmpty = userOutputWs.empty();
136  wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, wrapperStructInput,
137  wrapperStructOutput, wrapperStructGui, renderOutput, userOutputWsEmpty, producerSharedPtr,
138  threadManagerMode);
139 
140  // Get number threads
141  auto numberThreads = wrapperStructPose.gpuNumber;
142  auto gpuNumberStart = wrapperStructPose.gpuNumberStart;
143  // CPU --> 1 thread or no pose extraction
144  if (getGpuMode() == GpuMode::NoGpu)
145  {
146  numberThreads = (wrapperStructPose.gpuNumber == 0 ? 0 : 1);
147  gpuNumberStart = 0;
148  // Disabling multi-thread makes the code 400 ms faster (2.3 sec vs. 2.7 in i7-6850K)
149  // and fixes the bug that the screen was not properly displayed and only refreshed sometimes
150  // Note: The screen bug could be also fixed by using waitKey(30) rather than waitKey(1)
151  multiThreadEnabled = false;
152  }
153  // GPU --> user picks (<= #GPUs)
154  else
155  {
156  // Get total number GPUs
157  const auto totalGpuNumber = getGpuNumber();
158  // If number GPU < 0 --> set it to all the available GPUs
159  if (numberThreads < 0)
160  {
161  if (totalGpuNumber <= gpuNumberStart)
162  error("Number of initial GPU (`--number_gpu_start`) must be lower than the total number of"
163  " used GPUs (`--number_gpu`)", __LINE__, __FUNCTION__, __FILE__);
164  numberThreads = totalGpuNumber - gpuNumberStart;
165  // Reset initial GPU to 0 (we want them all)
166  // Logging message
167  log("Auto-detecting all available GPUs... Detected " + std::to_string(totalGpuNumber)
168  + " GPU(s), using " + std::to_string(numberThreads) + " of them starting at GPU "
169  + std::to_string(gpuNumberStart) + ".", Priority::High);
170  }
171  // Sanity check
172  if (gpuNumberStart + numberThreads > totalGpuNumber)
173  error("Initial GPU selected (`--number_gpu_start`) + number GPUs to use (`--number_gpu`) must"
174  " be lower or equal than the total number of GPUs in your machine ("
175  + std::to_string(gpuNumberStart) + " + "
176  + std::to_string(numberThreads) + " vs. "
177  + std::to_string(totalGpuNumber) + ").",
178  __LINE__, __FUNCTION__, __FILE__);
179  }
180 
181  // Proper format
182  const auto writeImagesCleaned = formatAsDirectory(wrapperStructOutput.writeImages);
183  const auto writeKeypointCleaned = formatAsDirectory(wrapperStructOutput.writeKeypoint);
184  const auto writeJsonCleaned = formatAsDirectory(wrapperStructOutput.writeJson);
185  const auto writeHeatMapsCleaned = formatAsDirectory(wrapperStructOutput.writeHeatMaps);
186  const auto modelFolder = formatAsDirectory(wrapperStructPose.modelFolder);
187 
188  // Common parameters
189  auto finalOutputSize = wrapperStructPose.outputSize;
190  Point<int> producerSize{-1,-1};
191  const auto oPProducer = (producerSharedPtr != nullptr);
192  if (oPProducer)
193  {
194  // 1. Set producer properties
195  const auto displayProducerFpsMode = (wrapperStructInput.realTimeProcessing
197  producerSharedPtr->setProducerFpsMode(displayProducerFpsMode);
198  producerSharedPtr->set(ProducerProperty::Flip, wrapperStructInput.frameFlip);
199  producerSharedPtr->set(ProducerProperty::Rotation, wrapperStructInput.frameRotate);
200  producerSharedPtr->set(ProducerProperty::AutoRepeat, wrapperStructInput.framesRepeat);
201  // 2. Set finalOutputSize
202  producerSize = Point<int>{(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_WIDTH),
203  (int)producerSharedPtr->get(CV_CAP_PROP_FRAME_HEIGHT)};
204  // Set finalOutputSize to input size if desired
205  if (finalOutputSize.x == -1 || finalOutputSize.y == -1)
206  finalOutputSize = producerSize;
207  }
208 
209  // Producer
210  TWorker datumProducerW;
211  if (oPProducer)
212  {
213  const auto datumProducer = std::make_shared<DatumProducer<TDatums>>(
214  producerSharedPtr, wrapperStructInput.frameFirst, wrapperStructInput.frameStep,
215  wrapperStructInput.frameLast, spVideoSeek
216  );
217  datumProducerW = std::make_shared<WDatumProducer<TDatumsSP, TDatums>>(datumProducer);
218  }
219  else
220  datumProducerW = nullptr;
221 
222  std::vector<std::shared_ptr<PoseExtractorNet>> poseExtractorNets;
223  std::vector<std::shared_ptr<FaceExtractorNet>> faceExtractorNets;
224  std::vector<std::shared_ptr<HandExtractorNet>> handExtractorNets;
225  std::vector<std::shared_ptr<PoseGpuRenderer>> poseGpuRenderers;
226  std::shared_ptr<PoseCpuRenderer> poseCpuRenderer;
227  // Workers
228  TWorker scaleAndSizeExtractorW;
229  TWorker cvMatToOpInputW;
230  TWorker cvMatToOpOutputW;
231  std::vector<std::vector<TWorker>> poseExtractorsWs;
232  std::vector<std::vector<TWorker>> poseTriangulationsWs;
233  std::vector<std::vector<TWorker>> jointAngleEstimationsWs;
234  std::vector<TWorker> postProcessingWs;
235  if (numberThreads > 0)
236  {
237  // Get input scales and sizes
238  const auto scaleAndSizeExtractor = std::make_shared<ScaleAndSizeExtractor>(
239  wrapperStructPose.netInputSize, finalOutputSize, wrapperStructPose.scalesNumber,
240  wrapperStructPose.scaleGap
241  );
242  scaleAndSizeExtractorW = std::make_shared<WScaleAndSizeExtractor<TDatumsSP>>(scaleAndSizeExtractor);
243 
244  // Input cvMat to OpenPose input & output format
245  const auto cvMatToOpInput = std::make_shared<CvMatToOpInput>(wrapperStructPose.poseModel);
246  cvMatToOpInputW = std::make_shared<WCvMatToOpInput<TDatumsSP>>(cvMatToOpInput);
247  if (renderOutput)
248  {
249  const auto cvMatToOpOutput = std::make_shared<CvMatToOpOutput>();
250  cvMatToOpOutputW = std::make_shared<WCvMatToOpOutput<TDatumsSP>>(cvMatToOpOutput);
251  }
252 
253  // Pose estimators & renderers
254  std::vector<TWorker> cpuRenderers;
255  poseExtractorsWs.clear();
256  poseExtractorsWs.resize(numberThreads);
257  if (wrapperStructPose.enable)
258  {
259  // Pose estimators
260  for (auto gpuId = 0; gpuId < numberThreads; gpuId++)
261  poseExtractorNets.emplace_back(std::make_shared<PoseExtractorCaffe>(
262  wrapperStructPose.poseModel, modelFolder, gpuId + gpuNumberStart,
263  wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale,
264  wrapperStructPose.addPartCandidates, wrapperStructPose.maximizePositives,
265  wrapperStructPose.enableGoogleLogging
266  ));
267 
268  // Pose renderers
269  if (renderOutputGpu || wrapperStructPose.renderMode == RenderMode::Cpu)
270  {
271  // If wrapperStructPose.renderMode != RenderMode::Gpu but renderOutput, then we create an
272  // alpha = 0 pose renderer in order to keep the removing background option
273  const auto alphaKeypoint = (wrapperStructPose.renderMode != RenderMode::None
274  ? wrapperStructPose.alphaKeypoint : 0.f);
275  const auto alphaHeatMap = (wrapperStructPose.renderMode != RenderMode::None
276  ? wrapperStructPose.alphaHeatMap : 0.f);
277  // GPU rendering
278  if (renderOutputGpu)
279  {
280  for (const auto& poseExtractorNet : poseExtractorNets)
281  {
282  poseGpuRenderers.emplace_back(std::make_shared<PoseGpuRenderer>(
283  wrapperStructPose.poseModel, poseExtractorNet, wrapperStructPose.renderThreshold,
284  wrapperStructPose.blendOriginalFrame, alphaKeypoint,
285  alphaHeatMap, wrapperStructPose.defaultPartToRender
286  ));
287  }
288  }
289  // CPU rendering
290  if (wrapperStructPose.renderMode == RenderMode::Cpu)
291  {
292  poseCpuRenderer = std::make_shared<PoseCpuRenderer>(
293  wrapperStructPose.poseModel, wrapperStructPose.renderThreshold,
294  wrapperStructPose.blendOriginalFrame, alphaKeypoint, alphaHeatMap,
295  wrapperStructPose.defaultPartToRender);
296  cpuRenderers.emplace_back(std::make_shared<WPoseRenderer<TDatumsSP>>(poseCpuRenderer));
297  }
298  }
299  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
300 
301  // Pose extractor(s)
302  poseExtractorsWs.resize(poseExtractorNets.size());
303  const auto personIdExtractor = (wrapperStructExtra.identification
304  ? std::make_shared<PersonIdExtractor>() : nullptr);
305  // Keep top N people
306  // Added right after PoseExtractorNet to avoid:
307  // 1) Rendering people that are later deleted (wrong visualization).
308  // 2) Processing faces and hands on people that will be deleted (speed up).
309  // 3) Running tracking before deleting the people.
310  // Add KeepTopNPeople for each PoseExtractorNet
311  const auto keepTopNPeople = (wrapperStructPose.numberPeopleMax > 0 ?
312  std::make_shared<KeepTopNPeople>(wrapperStructPose.numberPeopleMax)
313  : nullptr);
314  // Person tracker
315  auto personTrackers = std::make_shared<std::vector<std::shared_ptr<PersonTracker>>>();
316  if (wrapperStructExtra.tracking > -1)
317  personTrackers->emplace_back(
318  std::make_shared<PersonTracker>(wrapperStructExtra.tracking == 0));
319  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
320  {
321  // OpenPose keypoint detector + keepTopNPeople
322  // + ID extractor (experimental) + tracking (experimental)
323  const auto poseExtractor = std::make_shared<PoseExtractor>(
324  poseExtractorNets.at(i), keepTopNPeople, personIdExtractor, personTrackers,
325  wrapperStructPose.numberPeopleMax, wrapperStructExtra.tracking);
326  poseExtractorsWs.at(i) = {std::make_shared<WPoseExtractor<TDatumsSP>>(poseExtractor)};
327  // // Just OpenPose keypoint detector
328  // poseExtractorsWs.at(i) = {std::make_shared<WPoseExtractorNet<TDatumsSP>>(
329  // poseExtractorNets.at(i))};
330  }
331 
332  // // (Before tracking / id extractor)
333  // // Added right after PoseExtractorNet to avoid:
334  // // 1) Rendering people that are later deleted (wrong visualization).
335  // // 2) Processing faces and hands on people that will be deleted (speed up).
336  // if (wrapperStructPose.numberPeopleMax > 0)
337  // {
338  // // Add KeepTopNPeople for each PoseExtractorNet
339  // const auto keepTopNPeople = std::make_shared<KeepTopNPeople>(
340  // wrapperStructPose.numberPeopleMax);
341  // for (auto& wPose : poseExtractorsWs)
342  // wPose.emplace_back(std::make_shared<WKeepTopNPeople<TDatumsSP>>(keepTopNPeople));
343  // }
344  }
345  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
346 
347  // Face extractor(s)
348  if (wrapperStructFace.enable)
349  {
350  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
351  // Face detector
352  // OpenPose face detector
353  if (wrapperStructPose.enable)
354  {
355  const auto faceDetector = std::make_shared<FaceDetector>(wrapperStructPose.poseModel);
356  for (auto& wPose : poseExtractorsWs)
357  wPose.emplace_back(std::make_shared<WFaceDetector<TDatumsSP>>(faceDetector));
358  }
359  // OpenCV face detector
360  else
361  {
362  log("Body keypoint detection is disabled. Hence, using OpenCV face detector (much less"
363  " accurate but faster).", Priority::High);
364  for (auto& wPose : poseExtractorsWs)
365  {
366  // 1 FaceDetectorOpenCV per thread, OpenCV face detector is not thread-safe
367  const auto faceDetectorOpenCV = std::make_shared<FaceDetectorOpenCV>(modelFolder);
368  wPose.emplace_back(
369  std::make_shared<WFaceDetectorOpenCV<TDatumsSP>>(faceDetectorOpenCV)
370  );
371  }
372  }
373  // Face keypoint extractor
374  for (auto gpu = 0u; gpu < poseExtractorsWs.size(); gpu++)
375  {
376  // Face keypoint extractor
377  const auto netOutputSize = wrapperStructFace.netInputSize;
378  const auto faceExtractorNet = std::make_shared<FaceExtractorCaffe>(
379  wrapperStructFace.netInputSize, netOutputSize, modelFolder,
380  gpu + gpuNumberStart, wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale,
381  wrapperStructPose.enableGoogleLogging
382  );
383  faceExtractorNets.emplace_back(faceExtractorNet);
384  poseExtractorsWs.at(gpu).emplace_back(
385  std::make_shared<WFaceExtractorNet<TDatumsSP>>(faceExtractorNet));
386  }
387  }
388  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
389 
390  // Hand extractor(s)
391  if (wrapperStructHand.enable)
392  {
393  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
394  const auto handDetector = std::make_shared<HandDetector>(wrapperStructPose.poseModel);
395  for (auto gpu = 0u; gpu < poseExtractorsWs.size(); gpu++)
396  {
397  // Hand detector
398  // If tracking
399  if (wrapperStructHand.tracking)
400  poseExtractorsWs.at(gpu).emplace_back(
401  std::make_shared<WHandDetectorTracking<TDatumsSP>>(handDetector)
402  );
403  // If detection
404  else
405  poseExtractorsWs.at(gpu).emplace_back(
406  std::make_shared<WHandDetector<TDatumsSP>>(handDetector));
407  // Hand keypoint extractor
408  const auto netOutputSize = wrapperStructHand.netInputSize;
409  const auto handExtractorNet = std::make_shared<HandExtractorCaffe>(
410  wrapperStructHand.netInputSize, netOutputSize, modelFolder,
411  gpu + gpuNumberStart, wrapperStructHand.scalesNumber, wrapperStructHand.scaleRange,
412  wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScale,
413  wrapperStructPose.enableGoogleLogging
414  );
415  handExtractorNets.emplace_back(handExtractorNet);
416  poseExtractorsWs.at(gpu).emplace_back(
417  std::make_shared<WHandExtractorNet<TDatumsSP>>(handExtractorNet)
418  );
419  // If tracking
420  if (wrapperStructHand.tracking)
421  poseExtractorsWs.at(gpu).emplace_back(
422  std::make_shared<WHandDetectorUpdate<TDatumsSP>>(handDetector)
423  );
424  }
425  }
426  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
427 
428  // Pose renderer(s)
429  if (!poseGpuRenderers.empty())
430  {
431  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
432  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
433  poseExtractorsWs.at(i).emplace_back(std::make_shared<WPoseRenderer<TDatumsSP>>(
434  poseGpuRenderers.at(i)
435  ));
436  }
437  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
438 
439  // Face renderer(s)
440  if (renderFace)
441  {
442  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
443  // CPU rendering
444  if (wrapperStructFace.renderMode == RenderMode::Cpu)
445  {
446  // Construct face renderer
447  const auto faceRenderer = std::make_shared<FaceCpuRenderer>(wrapperStructFace.renderThreshold,
448  wrapperStructFace.alphaKeypoint,
449  wrapperStructFace.alphaHeatMap);
450  // Add worker
451  cpuRenderers.emplace_back(std::make_shared<WFaceRenderer<TDatumsSP>>(faceRenderer));
452  }
453  // GPU rendering
454  else if (wrapperStructFace.renderMode == RenderMode::Gpu)
455  {
456  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
457  {
458  // Construct face renderer
459  const auto faceRenderer = std::make_shared<FaceGpuRenderer>(
460  wrapperStructFace.renderThreshold, wrapperStructFace.alphaKeypoint,
461  wrapperStructFace.alphaHeatMap
462  );
463  // Performance boost -> share spGpuMemory for all renderers
464  if (!poseGpuRenderers.empty())
465  {
466  const bool isLastRenderer = !renderHandGpu;
467  const auto renderer = std::static_pointer_cast<PoseGpuRenderer>(
468  poseGpuRenderers.at(i)
469  );
470  faceRenderer->setSharedParametersAndIfLast(renderer->getSharedParameters(),
471  isLastRenderer);
472  }
473  // Add worker
474  poseExtractorsWs.at(i).emplace_back(
475  std::make_shared<WFaceRenderer<TDatumsSP>>(faceRenderer));
476  }
477  }
478  else
479  error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__);
480  }
481  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
482 
483  // Hand renderer(s)
484  if (renderHand)
485  {
486  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
487  // CPU rendering
488  if (wrapperStructHand.renderMode == RenderMode::Cpu)
489  {
490  // Construct hand renderer
491  const auto handRenderer = std::make_shared<HandCpuRenderer>(wrapperStructHand.renderThreshold,
492  wrapperStructHand.alphaKeypoint,
493  wrapperStructHand.alphaHeatMap);
494  // Add worker
495  cpuRenderers.emplace_back(std::make_shared<WHandRenderer<TDatumsSP>>(handRenderer));
496  }
497  // GPU rendering
498  else if (wrapperStructHand.renderMode == RenderMode::Gpu)
499  {
500  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
501  {
502  // Construct hands renderer
503  const auto handRenderer = std::make_shared<HandGpuRenderer>(
504  wrapperStructHand.renderThreshold, wrapperStructHand.alphaKeypoint,
505  wrapperStructHand.alphaHeatMap
506  );
507  // Performance boost -> share spGpuMemory for all renderers
508  if (!poseGpuRenderers.empty())
509  {
510  const bool isLastRenderer = true;
511  const auto renderer = std::static_pointer_cast<PoseGpuRenderer>(
512  poseGpuRenderers.at(i)
513  );
514  handRenderer->setSharedParametersAndIfLast(renderer->getSharedParameters(),
515  isLastRenderer);
516  }
517  // Add worker
518  poseExtractorsWs.at(i).emplace_back(
519  std::make_shared<WHandRenderer<TDatumsSP>>(handRenderer));
520  }
521  }
522  else
523  error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__);
524  }
525  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
526 
527  // 3-D reconstruction
528  poseTriangulationsWs.clear();
529  if (wrapperStructExtra.reconstruct3d)
530  {
531  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
532  // For all (body/face/hands): PoseTriangulations ~30 msec, 8 GPUS ~30 msec for keypoint estimation
533  poseTriangulationsWs.resize(fastMax(1, int(poseExtractorsWs.size() / 4)));
534  for (auto i = 0u ; i < poseTriangulationsWs.size() ; i++)
535  {
536  const auto poseTriangulation = std::make_shared<PoseTriangulation>(
537  wrapperStructExtra.minViews3d);
538  poseTriangulationsWs.at(i) = {std::make_shared<WPoseTriangulation<TDatumsSP>>(
539  poseTriangulation)};
540  }
541  }
542  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
543  // Itermediate workers (e.g., OpenPose format to cv::Mat, json & frames recorder, ...)
544  postProcessingWs.clear();
545  // // Person ID identification (when no multi-thread and no dependency on tracking)
546  // if (wrapperStructExtra.identification)
547  // {
548  // const auto personIdExtractor = std::make_shared<PersonIdExtractor>();
549  // postProcessingWs.emplace_back(
550  // std::make_shared<WPersonIdExtractor<TDatumsSP>>(personIdExtractor)
551  // );
552  // }
553  // Frames processor (OpenPose format -> cv::Mat format)
554  if (renderOutput)
555  {
556  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
557  postProcessingWs = mergeVectors(postProcessingWs, cpuRenderers);
558  const auto opOutputToCvMat = std::make_shared<OpOutputToCvMat>();
559  postProcessingWs.emplace_back(std::make_shared<WOpOutputToCvMat<TDatumsSP>>(opOutputToCvMat));
560  }
561  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
562  // Re-scale pose if desired
563  // If desired scale is not the current input
564  if (wrapperStructPose.keypointScale != ScaleMode::InputResolution
565  // and desired scale is not output when size(input) = size(output)
566  && !(wrapperStructPose.keypointScale == ScaleMode::OutputResolution &&
567  (finalOutputSize == producerSize || finalOutputSize.x <= 0 || finalOutputSize.y <= 0))
568  // and desired scale is not net output when size(input) = size(net output)
569  && !(wrapperStructPose.keypointScale == ScaleMode::NetOutputResolution
570  && producerSize == wrapperStructPose.netInputSize))
571  {
572  // Then we must rescale the keypoints
573  auto keypointScaler = std::make_shared<KeypointScaler>(wrapperStructPose.keypointScale);
574  postProcessingWs.emplace_back(std::make_shared<WKeypointScaler<TDatumsSP>>(keypointScaler));
575  }
576  }
577  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
578 
579  // IK/Adam
580  const auto displayAdam = wrapperStructGui.displayMode == DisplayMode::DisplayAdam
581  || (wrapperStructGui.displayMode == DisplayMode::DisplayAll
582  && wrapperStructExtra.ikThreads > 0);
583  jointAngleEstimationsWs.clear();
584 #ifdef USE_3D_ADAM_MODEL
585  if (wrapperStructExtra.ikThreads > 0)
586  {
587  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
588  jointAngleEstimationsWs.resize(wrapperStructExtra.ikThreads);
589  // Pose extractor(s)
590  for (auto i = 0u; i < jointAngleEstimationsWs.size(); i++)
591  {
592  const auto jointAngleEstimation = std::make_shared<JointAngleEstimation>(displayAdam);
593  jointAngleEstimationsWs.at(i) = {std::make_shared<WJointAngleEstimation<TDatumsSP>>(
594  jointAngleEstimation)};
595  }
596  }
597  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
598 #endif
599 
600  // Output workers
601  std::vector<TWorker> outputWs;
602  // Print verbose
603  if (wrapperStructOutput.verbose > 0.)
604  {
605  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
606  const auto verbosePrinter = std::make_shared<VerbosePrinter>(
607  wrapperStructOutput.verbose, producerSharedPtr->get(CV_CAP_PROP_FRAME_COUNT));
608  outputWs.emplace_back(std::make_shared<WVerbosePrinter<TDatumsSP>>(verbosePrinter));
609  }
610  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
611  // Send information (e.g., to Unity) though UDP client-server communication
612 
613 #ifdef USE_3D_ADAM_MODEL
614  if (!wrapperStructOutput.udpHost.empty() && !wrapperStructOutput.udpPort.empty())
615  {
616  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
617  const auto udpSender = std::make_shared<UdpSender>(wrapperStructOutput.udpHost,
618  wrapperStructOutput.udpPort);
619  outputWs.emplace_back(std::make_shared<WUdpSender<TDatumsSP>>(udpSender));
620  }
621  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
622 #endif
623  // Write people pose data on disk (json for OpenCV >= 3, xml, yml...)
624  if (!writeKeypointCleaned.empty())
625  {
626  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
627  const auto keypointSaver = std::make_shared<KeypointSaver>(writeKeypointCleaned,
628  wrapperStructOutput.writeKeypointFormat);
629  outputWs.emplace_back(std::make_shared<WPoseSaver<TDatumsSP>>(keypointSaver));
630  if (wrapperStructFace.enable)
631  outputWs.emplace_back(std::make_shared<WFaceSaver<TDatumsSP>>(keypointSaver));
632  if (wrapperStructHand.enable)
633  outputWs.emplace_back(std::make_shared<WHandSaver<TDatumsSP>>(keypointSaver));
634  }
635  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
636  // Write OpenPose output data on disk in json format (body/hand/face keypoints, body part locations if
637  // enabled, etc.)
638  if (!writeJsonCleaned.empty())
639  {
640  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
641  const auto peopleJsonSaver = std::make_shared<PeopleJsonSaver>(writeJsonCleaned);
642  outputWs.emplace_back(std::make_shared<WPeopleJsonSaver<TDatumsSP>>(peopleJsonSaver));
643  }
644  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
645  // Write people pose data on disk (COCO validation json format)
646  if (!wrapperStructOutput.writeCocoJson.empty())
647  {
648  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
649  // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it
650  const auto humanFormat = true;
651  const auto cocoJsonSaver = std::make_shared<CocoJsonSaver>(
652  wrapperStructOutput.writeCocoJson, wrapperStructPose.poseModel, humanFormat,
653  (wrapperStructPose.poseModel != PoseModel::CAR_22
654  && wrapperStructPose.poseModel != PoseModel::CAR_12
656  wrapperStructOutput.writeCocoJsonVariant);
657  outputWs.emplace_back(std::make_shared<WCocoJsonSaver<TDatumsSP>>(cocoJsonSaver));
658  }
659  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
660  // Write people foot pose data on disk (COCO validation json format for foot data)
661  if (!wrapperStructOutput.writeCocoFootJson.empty())
662  {
663  // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it
664  const auto humanFormat = true;
665  const auto cocoJsonSaver = std::make_shared<CocoJsonSaver>(
666  wrapperStructOutput.writeCocoFootJson, wrapperStructPose.poseModel, humanFormat,
668  outputWs.emplace_back(std::make_shared<WCocoJsonSaver<TDatumsSP>>(cocoJsonSaver));
669  }
670  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
671  // Write frames as desired image format on hard disk
672  if (!writeImagesCleaned.empty())
673  {
674  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
675  const auto imageSaver = std::make_shared<ImageSaver>(writeImagesCleaned,
676  wrapperStructOutput.writeImagesFormat);
677  outputWs.emplace_back(std::make_shared<WImageSaver<TDatumsSP>>(imageSaver));
678  }
679  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
680  auto originalVideoFps = 0.;
681  if (!wrapperStructOutput.writeVideo.empty() || !wrapperStructOutput.writeVideo3D.empty()
682  || !wrapperStructOutput.writeBvh.empty())
683  {
684  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
685  if (wrapperStructOutput.writeVideoFps <= 0
686  && (!oPProducer || producerSharedPtr->get(CV_CAP_PROP_FPS) <= 0))
687  error("The frame rate of the frames producer is unknown. Set `--write_video_fps` to your desired"
688  " FPS if you wanna record video (`--write_video`). E.g., if it is a folder of images, you"
689  " will have to know or guess the frame rate; if it is a webcam, you should use the OpenPose"
690  " displayed FPS as desired value. If you do not care, simply add `--write_video_fps 30`.",
691  __LINE__, __FUNCTION__, __FILE__);
692  originalVideoFps = (
693  wrapperStructOutput.writeVideoFps > 0 ?
694  wrapperStructOutput.writeVideoFps : producerSharedPtr->get(CV_CAP_PROP_FPS));
695  }
696  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
697  // Write frames as *.avi video on hard disk
698  if (!wrapperStructOutput.writeVideo.empty())
699  {
700  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
701  if (!oPProducer)
702  error("Video file can only be recorded inside `wrapper/wrapper.hpp` if the producer"
703  " is one of the default ones (e.g., video, webcam, ...).",
704  __LINE__, __FUNCTION__, __FILE__);
705  const auto videoSaver = std::make_shared<VideoSaver>(
706  wrapperStructOutput.writeVideo, CV_FOURCC('M','J','P','G'), originalVideoFps);
707  outputWs.emplace_back(std::make_shared<WVideoSaver<TDatumsSP>>(videoSaver));
708  }
709  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
710  // Write joint angles as *.bvh file on hard disk
711 #ifdef USE_3D_ADAM_MODEL
712  if (!wrapperStructOutput.writeBvh.empty())
713  {
714  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
715  const auto bvhSaver = std::make_shared<BvhSaver>(
716  wrapperStructOutput.writeBvh, JointAngleEstimation::getTotalModel(), originalVideoFps
717  );
718  outputWs.emplace_back(std::make_shared<WBvhSaver<TDatumsSP>>(bvhSaver));
719  }
720  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
721 #endif
722  // Write heat maps as desired image format on hard disk
723  if (!writeHeatMapsCleaned.empty())
724  {
725  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
726  const auto heatMapSaver = std::make_shared<HeatMapSaver>(
727  writeHeatMapsCleaned, wrapperStructOutput.writeHeatMapsFormat);
728  outputWs.emplace_back(std::make_shared<WHeatMapSaver<TDatumsSP>>(heatMapSaver));
729  }
730  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
731  // Add frame information for GUI
732  const bool guiEnabled = (wrapperStructGui.displayMode != DisplayMode::NoDisplay);
733  // If this WGuiInfoAdder instance is placed before the WImageSaver or WVideoSaver, then the resulting
734  // recorded frames will look exactly as the final displayed image by the GUI
735  if (wrapperStructGui.guiVerbose && (guiEnabled || !userOutputWs.empty()
736  || threadManagerMode == ThreadManagerMode::Asynchronous
737  || threadManagerMode == ThreadManagerMode::AsynchronousOut))
738  {
739  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
740  const auto guiInfoAdder = std::make_shared<GuiInfoAdder>(numberThreads, guiEnabled);
741  outputWs.emplace_back(std::make_shared<WGuiInfoAdder<TDatumsSP>>(guiInfoAdder));
742  }
743  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
744  // Minimal graphical user interface (GUI)
745  TWorker guiW;
746  TWorker videoSaver3DW;
747  if (guiEnabled)
748  {
749  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
750  // PoseRenderers to Renderers
751  std::vector<std::shared_ptr<Renderer>> renderers;
752  if (wrapperStructPose.renderMode == RenderMode::Cpu)
753  renderers.emplace_back(std::static_pointer_cast<Renderer>(poseCpuRenderer));
754  else
755  for (const auto& poseGpuRenderer : poseGpuRenderers)
756  renderers.emplace_back(std::static_pointer_cast<Renderer>(poseGpuRenderer));
757  // Display
758  const auto numberViews = (producerSharedPtr != nullptr
759  ? intRound(producerSharedPtr->get(ProducerProperty::NumberViews)) : 1);
760  auto finalOutputSizeGui = finalOutputSize;
761  if (numberViews > 1 && finalOutputSizeGui.x > 0)
762  finalOutputSizeGui.x *= numberViews;
763  // Adam (+3-D/2-D) display
764  if (displayAdam)
765  {
766 #ifdef USE_3D_ADAM_MODEL
767  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
768  // Gui
769  const auto gui = std::make_shared<GuiAdam>(
770  finalOutputSizeGui, wrapperStructGui.fullScreen, threadManager.getIsRunningSharedPtr(),
771  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers,
772  wrapperStructGui.displayMode, JointAngleEstimation::getTotalModel(),
773  wrapperStructOutput.writeVideoAdam
774  );
775  // WGui
776  guiW = {std::make_shared<WGuiAdam<TDatumsSP>>(gui)};
777  // Write 3D frames as *.avi video on hard disk
778  if (!wrapperStructOutput.writeVideo3D.empty())
779  error("3D video can only be recorded if 3D render is enabled.",
780  __LINE__, __FUNCTION__, __FILE__);
781 #endif
782  }
783  // 3-D (+2-D) display
784  else if (wrapperStructGui.displayMode == DisplayMode::Display3D
785  || wrapperStructGui.displayMode == DisplayMode::DisplayAll)
786  {
787  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
788  // Gui
789  const auto gui = std::make_shared<Gui3D>(
790  finalOutputSizeGui, wrapperStructGui.fullScreen, threadManager.getIsRunningSharedPtr(),
791  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers,
792  wrapperStructPose.poseModel, wrapperStructGui.displayMode,
793  !wrapperStructOutput.writeVideo3D.empty()
794  );
795  // WGui
796  guiW = {std::make_shared<WGui3D<TDatumsSP>>(gui)};
797  // Write 3D frames as *.avi video on hard disk
798  if (!wrapperStructOutput.writeVideo3D.empty())
799  {
800  const auto videoSaver = std::make_shared<VideoSaver>(
801  wrapperStructOutput.writeVideo3D, CV_FOURCC('M','J','P','G'), originalVideoFps);
802  videoSaver3DW = std::make_shared<WVideoSaver3D<TDatumsSP>>(videoSaver);
803  }
804  }
805  // 2-D display
806  else if (wrapperStructGui.displayMode == DisplayMode::Display2D)
807  {
808  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
809  // Gui
810  const auto gui = std::make_shared<Gui>(
811  finalOutputSizeGui, wrapperStructGui.fullScreen, threadManager.getIsRunningSharedPtr(),
812  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers
813  );
814  // WGui
815  guiW = {std::make_shared<WGui<TDatumsSP>>(gui)};
816  // Write 3D frames as *.avi video on hard disk
817  if (!wrapperStructOutput.writeVideo3D.empty())
818  error("3D video can only be recorded if 3D render is enabled.",
819  __LINE__, __FUNCTION__, __FILE__);
820  }
821  else
822  error("Unknown DisplayMode.", __LINE__, __FUNCTION__, __FILE__);
823  }
824  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
825  // Set FpsMax
826  TWorker wFpsMax;
827  if (wrapperStructPose.fpsMax > 0.)
828  wFpsMax = std::make_shared<WFpsMax<TDatumsSP>>(wrapperStructPose.fpsMax);
829  // Set wrapper as configured
830  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
831 
832 
833 
834 
835 
836  // The less number of queues -> the less threads opened, and potentially the less lag
837 
838  // Sanity checks
839  if ((datumProducerW == nullptr) == (userInputWs.empty())
840  && threadManagerMode != ThreadManagerMode::Asynchronous
841  && threadManagerMode != ThreadManagerMode::AsynchronousIn)
842  {
843  const auto message = "You need to have 1 and only 1 producer selected. You can introduce your own"
844  " producer by using setWorker(WorkerType::Input, ...) or use the OpenPose"
845  " default producer by configuring it in the configure function) or use the"
846  " ThreadManagerMode::Asynchronous(In) mode.";
847  error(message, __LINE__, __FUNCTION__, __FILE__);
848  }
849  if (outputWs.empty() && userOutputWs.empty() && guiW == nullptr
850  && threadManagerMode != ThreadManagerMode::Asynchronous
851  && threadManagerMode != ThreadManagerMode::AsynchronousOut)
852  {
853  error("No output selected.", __LINE__, __FUNCTION__, __FILE__);
854  }
855 
856  // Thread Manager
857  // Clean previous thread manager (avoid configure to crash the program if used more than once)
858  threadManager.reset();
859  unsigned long long threadId = 0ull;
860  auto queueIn = 0ull;
861  auto queueOut = 1ull;
862  // After producer
863  // ID generator (before any multi-threading or any function that requires the ID)
864  const auto wIdGenerator = std::make_shared<WIdGenerator<TDatumsSP>>();
865  std::vector<TWorker> workersAux{wIdGenerator};
866  // Scale & cv::Mat to OP format
867  if (scaleAndSizeExtractorW != nullptr)
868  workersAux = mergeVectors(workersAux, {scaleAndSizeExtractorW});
869  if (cvMatToOpInputW != nullptr)
870  workersAux = mergeVectors(workersAux, {cvMatToOpInputW});
871  // cv::Mat to output format
872  if (cvMatToOpOutputW != nullptr)
873  workersAux = mergeVectors(workersAux, {cvMatToOpOutputW});
874 
875  // Producer
876  // If custom user Worker and uses its own thread
877  if (!userInputWs.empty() && userInputWsOnNewThread)
878  {
879  // Thread 0, queues 0 -> 1
880  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
881  threadManager.add(threadId, userInputWs, queueIn++, queueOut++);
882  threadIdPP(threadId, multiThreadEnabled);
883  }
884  // If custom user Worker in same thread
885  else if (!userInputWs.empty())
886  workersAux = mergeVectors(userInputWs, workersAux);
887  // If OpenPose producer (same thread)
888  else if (datumProducerW != nullptr)
889  workersAux = mergeVectors({datumProducerW}, workersAux);
890  // Otherwise
891  else if (threadManagerMode != ThreadManagerMode::Asynchronous
892  && threadManagerMode != ThreadManagerMode::AsynchronousIn)
893  error("No input selected.", __LINE__, __FUNCTION__, __FILE__);
894  // Thread 0 or 1, queues 0 -> 1
895  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
896  threadManager.add(threadId, workersAux, queueIn++, queueOut++);
897  // Increase thread
898  threadIdPP(threadId, multiThreadEnabled);
899 
900  // Pose estimation & rendering
901  // Thread 1 or 2...X, queues 1 -> 2, X = 2 + #GPUs
902  if (!poseExtractorsWs.empty())
903  {
904  if (multiThreadEnabled)
905  {
906  for (auto& wPose : poseExtractorsWs)
907  {
908  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
909  threadManager.add(threadId, wPose, queueIn, queueOut);
910  threadIdPP(threadId, multiThreadEnabled);
911  }
912  queueIn++;
913  queueOut++;
914  // Sort frames - Required own thread
915  if (poseExtractorsWs.size() > 1u)
916  {
917  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
918  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
919  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
920  threadIdPP(threadId, multiThreadEnabled);
921  }
922  }
923  else
924  {
925  if (poseExtractorsWs.size() > 1)
926  log("Multi-threading disabled, only 1 thread running. All GPUs have been disabled but the"
927  " first one, which is defined by gpuNumberStart (e.g., in the OpenPose demo, it is set"
928  " with the `--num_gpu_start` flag).", Priority::High);
929  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
930  threadManager.add(threadId, poseExtractorsWs.at(0), queueIn++, queueOut++);
931  }
932  }
933  // Assemble all frames from same time instant (3-D module)
934  const auto wQueueAssembler = std::make_shared<WQueueAssembler<TDatumsSP, TDatums>>();
935  // 3-D reconstruction
936  if (!poseTriangulationsWs.empty())
937  {
938  // Assemble frames
939  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
940  threadManager.add(threadId, wQueueAssembler, queueIn++, queueOut++);
941  threadIdPP(threadId, multiThreadEnabled);
942  // 3-D reconstruction
943  if (multiThreadEnabled)
944  {
945  for (auto& wPoseTriangulations : poseTriangulationsWs)
946  {
947  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
948  threadManager.add(threadId, wPoseTriangulations, queueIn, queueOut);
949  threadIdPP(threadId, multiThreadEnabled);
950  }
951  queueIn++;
952  queueOut++;
953  // Sort frames
954  if (poseTriangulationsWs.size() > 1u)
955  {
956  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
957  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
958  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
959  threadIdPP(threadId, multiThreadEnabled);
960  }
961  }
962  else
963  {
964  if (poseTriangulationsWs.size() > 1)
965  log("Multi-threading disabled, only 1 thread running for 3-D triangulation.",
967  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
968  threadManager.add(threadId, poseTriangulationsWs.at(0), queueIn++, queueOut++);
969  }
970  }
971  else
972  postProcessingWs = mergeVectors({wQueueAssembler}, postProcessingWs);
973  // Adam/IK step
974  if (!jointAngleEstimationsWs.empty())
975  {
976  if (multiThreadEnabled)
977  {
978  for (auto& wJointAngleEstimator : jointAngleEstimationsWs)
979  {
980  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
981  threadManager.add(threadId, wJointAngleEstimator, queueIn, queueOut);
982  threadIdPP(threadId, multiThreadEnabled);
983  }
984  queueIn++;
985  queueOut++;
986  // Sort frames
987  if (jointAngleEstimationsWs.size() > 1)
988  {
989  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
990  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
991  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
992  threadIdPP(threadId, multiThreadEnabled);
993  }
994  }
995  else
996  {
997  if (jointAngleEstimationsWs.size() > 1)
998  log("Multi-threading disabled, only 1 thread running for joint angle estimation.",
1000  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
1001  threadManager.add(threadId, jointAngleEstimationsWs.at(0), queueIn++, queueOut++);
1002  }
1003  }
1004  // Post processing workers
1005  if (!postProcessingWs.empty())
1006  {
1007  // Combining postProcessingWs and outputWs
1008  outputWs = mergeVectors(postProcessingWs, outputWs);
1009  // // If I wanna split them
1010  // log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
1011  // threadManager.add(threadId, postProcessingWs, queueIn++, queueOut++);
1012  // threadIdPP(threadId, multiThreadEnabled);
1013  }
1014  // If custom user Worker and uses its own thread
1015  if (!userPostProcessingWs.empty())
1016  {
1017  // If custom user Worker in its own thread
1018  if (userPostProcessingWsOnNewThread)
1019  {
1020  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
1021  threadManager.add(threadId, userPostProcessingWs, queueIn++, queueOut++);
1022  threadIdPP(threadId, multiThreadEnabled);
1023  }
1024  // If custom user Worker in same thread
1025  // Merge with outputWs
1026  else
1027  outputWs = mergeVectors(outputWs, userPostProcessingWs);
1028  }
1029  // Output workers
1030  if (!outputWs.empty())
1031  {
1032  // Thread 4 or 5, queues 4 -> 5
1033  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
1034  threadManager.add(threadId, outputWs, queueIn++, queueOut++);
1035  threadIdPP(threadId, multiThreadEnabled);
1036  }
1037  // User output worker
1038  // Thread Y, queues Q -> Q+1
1039  if (!userOutputWs.empty())
1040  {
1041  if (userOutputWsOnNewThread)
1042  {
1043  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
1044  threadManager.add(threadId, userOutputWs, queueIn++, queueOut++);
1045  threadIdPP(threadId, multiThreadEnabled);
1046  }
1047  else
1048  {
1049  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
1050  threadManager.add(threadId-1, userOutputWs, queueIn++, queueOut++);
1051  }
1052  }
1053  // OpenPose GUI
1054  if (guiW != nullptr)
1055  {
1056  // Thread Y+1, queues Q+1 -> Q+2
1057  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
1058  threadManager.add(threadId, guiW, queueIn++, queueOut++);
1059  // Saving 3D output
1060  if (videoSaver3DW != nullptr)
1061  threadManager.add(threadId, videoSaver3DW, queueIn++, queueOut++);
1062  threadIdPP(threadId, multiThreadEnabled);
1063  }
1064  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
1065  // Setting maximum speed
1066  if (wFpsMax != nullptr)
1067  {
1068  log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
1069  threadManager.add(threadId, wFpsMax, queueIn++, queueOut++);
1070  threadIdPP(threadId, multiThreadEnabled);
1071  }
1072  }
1073  catch (const std::exception& e)
1074  {
1075  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
1076  }
1077  }
1078 }
1079 
1080 #endif // OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
OP_API void threadIdPP(unsigned long long &threadId, const bool multiThreadEnabled)
int writeCocoJsonVariant
Definition: wrapperStructOutput.hpp:62
Definition: wHandDetectorTracking.hpp:11
Definition: wGuiInfoAdder.hpp:11
std::array< T, N > array
Definition: cl2.hpp:594
float alphaHeatMap
Definition: wrapperStructFace.hpp:46
std::string writeHeatMapsFormat
Definition: wrapperStructOutput.hpp:96
std::string writeVideo3D
Definition: wrapperStructOutput.hpp:112
Definition: wPoseSaver.hpp:12
std::string cameraParameterPath
Definition: wrapperStructInput.hpp:78
Definition: wrapperStructPose.hpp:17
std::string writeKeypoint
Definition: wrapperStructOutput.hpp:28
float alphaKeypoint
Definition: wrapperStructFace.hpp:39
bool frameFlip
Definition: wrapperStructInput.hpp:56
Definition: wFaceSaver.hpp:12
float alphaKeypoint
Definition: wrapperStructHand.hpp:61
OP_API void wrapperConfigureSanityChecks(WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const WrapperStructGui &wrapperStructGui, const bool renderOutput, const bool userOutputWsEmpty, const std::shared_ptr< Producer > &producerSharedPtr, const ThreadManagerMode threadManagerMode)
int tracking
Definition: wrapperStructExtra.hpp:39
Definition: wPeopleJsonSaver.hpp:11
Definition: wHandRenderer.hpp:11
double writeVideoFps
Definition: wrapperStructOutput.hpp:105
Definition: wImageSaver.hpp:11
std::string udpPort
Definition: wrapperStructOutput.hpp:136
std::string writeImages
Definition: wrapperStructOutput.hpp:68
std::shared_ptr< std::atomic< bool > > getIsRunningSharedPtr()
Definition: threadManager.hpp:40
T fastMax(const T a, const T b)
Definition: fastMath.hpp:70
float alphaHeatMap
Definition: wrapperStructHand.hpp:68
bool framesRepeat
Definition: wrapperStructInput.hpp:67
Definition: wKeypointScaler.hpp:11
int minViews3d
Definition: wrapperStructExtra.hpp:27
std::string writeHeatMaps
Definition: wrapperStructOutput.hpp:90
std::string writeCocoJson
Definition: wrapperStructOutput.hpp:51
DisplayMode displayMode
Definition: wrapperStructGui.hpp:23
void add(const unsigned long long threadId, const std::vector< TWorker > &tWorkers, const unsigned long long queueInId, const unsigned long long queueOutId)
Definition: threadManager.hpp:125
Definition: wFaceDetector.hpp:11
void reset()
Definition: threadManager.hpp:157
Definition: wrapperStructFace.hpp:15
OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
Definition: wVerbosePrinter.hpp:11
std::string udpHost
Definition: wrapperStructOutput.hpp:131
DataFormat writeKeypointFormat
Definition: wrapperStructOutput.hpp:35
Definition: wFaceExtractorNet.hpp:11
std::string writeCocoFootJson
Definition: wrapperStructOutput.hpp:56
RenderMode renderMode
Definition: wrapperStructPose.hpp:81
float scaleRange
Definition: wrapperStructHand.hpp:41
OP_API GpuMode getGpuMode()
RenderMode renderMode
Definition: wrapperStructFace.hpp:33
bool tracking
Definition: wrapperStructHand.hpp:49
bool enable
Definition: wrapperStructFace.hpp:20
Definition: wFaceDetectorOpenCV.hpp:11
Definition: wVideoSaver.hpp:11
Definition: wUdpSender.hpp:11
Definition: wOpOutputToCvMat.hpp:11
int ikThreads
Definition: wrapperStructExtra.hpp:46
bool realTimeProcessing
Definition: wrapperStructInput.hpp:51
OP_API std::string formatAsDirectory(const std::string &directoryPathString)
Definition: wHandDetectorUpdate.hpp:11
Definition: wrapperStructInput.hpp:14
unsigned long long frameStep
Definition: wrapperStructInput.hpp:40
Definition: wrapperStructGui.hpp:12
Definition: wHandExtractorNet.hpp:11
std::string writeBvh
Definition: wrapperStructOutput.hpp:126
Definition: wFaceRenderer.hpp:11
bool undistortImage
Definition: wrapperStructInput.hpp:83
Definition: wPoseRenderer.hpp:11
std::string writeJson
Definition: wrapperStructOutput.hpp:45
OP_API void log(const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
Definition: wCocoJsonSaver.hpp:11
double verbose
Definition: wrapperStructOutput.hpp:21
int numberViews
Definition: wrapperStructInput.hpp:92
bool guiVerbose
Definition: wrapperStructGui.hpp:29
Definition: wrapperStructExtra.hpp:13
Definition: wrapperStructHand.hpp:15
float renderThreshold
Definition: wrapperStructHand.hpp:76
Definition: poseGpuRenderer.hpp:13
Definition: wHandDetector.hpp:11
std::string writeVideoAdam
Definition: wrapperStructOutput.hpp:119
std::vector< T > mergeVectors(const std::vector< T > &vectorA, const std::vector< T > &vectorB)
Definition: standard.hpp:40
Point< int > cameraResolution
Definition: wrapperStructInput.hpp:72
Definition: wHandSaver.hpp:12
ProducerType producerType
Definition: wrapperStructInput.hpp:20
std::string writeVideo
Definition: wrapperStructOutput.hpp:83
unsigned long long frameLast
Definition: wrapperStructInput.hpp:46
bool identification
Definition: wrapperStructExtra.hpp:32
Point< int > netInputSize
Definition: wrapperStructFace.hpp:27
void setSharedParametersAndIfLast(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< std::atomic< unsigned long long >>, std::shared_ptr< const unsigned int >> &tuple, const bool isLast)
Definition: wHeatMapSaver.hpp:11
std::string producerString
Definition: wrapperStructInput.hpp:27
OP_API std::shared_ptr< Producer > createProducer(const ProducerType producerType=ProducerType::None, const std::string &producerString="", const Point< int > &cameraResolution=Point< int >{-1,-1}, const std::string &cameraParameterPath="models/cameraParameters/", const bool undistortImage=true, const int numberViews=-1)
bool enable
Definition: wrapperStructHand.hpp:20
ThreadManagerMode
Definition: enumClasses.hpp:9
int intRound(const T a)
Definition: fastMath.hpp:26
#define OP_API
Definition: macros.hpp:19
float renderThreshold
Definition: wrapperStructFace.hpp:54
int frameRotate
Definition: wrapperStructInput.hpp:62
std::string writeImagesFormat
Definition: wrapperStructOutput.hpp:76
unsigned long long frameFirst
Definition: wrapperStructInput.hpp:33
Definition: wrapperStructOutput.hpp:13
bool reconstruct3d
Definition: wrapperStructExtra.hpp:21
OP_API int getGpuNumber()
Point< int > netInputSize
Definition: wrapperStructHand.hpp:27
void configureThreadManager(ThreadManager< TDatumsSP > &threadManager, const bool multiThreadEnabled, const ThreadManagerMode threadManagerMode, const WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const WrapperStructGui &wrapperStructGui, const std::array< std::vector< TWorker >, int(WorkerType::Size)> &userWs, const std::array< bool, int(WorkerType::Size)> &userWsOnNewThread)
Definition: wrapperAuxiliary.hpp:85
int scalesNumber
Definition: wrapperStructHand.hpp:35
RenderMode renderMode
Definition: wrapperStructHand.hpp:55
bool fullScreen
Definition: wrapperStructGui.hpp:35