提交 1f476d4d 编写于 作者: G gineshidalgo99

Exception if input channels != 3

上级 6648027c
......@@ -33,11 +33,11 @@ You might select multiple topics, delete the rest:
### Your system configuration
**Operating system** (`lsb_release -a` on Ubuntu):
**Operating system** (`lsb_release -a` in Ubuntu):
**CUDA version** (`cat /usr/local/cuda/version.txt` in most cases):
**cuDNN version**:
**GPU model** (`nvidia-smi`):
**GPU model** (`nvidia-smi` in Ubuntu):
**Caffe version**: Default from OpenPose or custom version.
**OpenCV version**: installed with `apt-get install libopencv-dev` or OpenCV 2.X or OpenCV 3.X.
Generation mode (only for Ubuntu): Makefile + Makefile.config (default) or CMake.
Compiler (`gcc --version` on Ubuntu):
**OpenCV version**: installed with `apt-get install libopencv-dev` (Ubuntu) or default from OpenPose (Windows) or OpenCV 2.X or OpenCV 3.X.
Generation mode (only for Ubuntu): Makefile + Makefile.config (default, Ubuntu) or CMake (Ubuntu, Windows) or Visual Studio (Windows).
Compiler (`gcc --version` in Ubuntu):
......@@ -11,7 +11,7 @@ In order to learn how to use it, run `./build/examples/openpose/openpose.bin --h
Each flag is divided into flag name, default value, and description.
1. Debugging
- DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for low priority messages and 4 for important ones.");
- DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for low priority messages and 4 for important ones.");
2. Producer
- DEFINE_int32(camera, 0, "The camera index for cv::VideoCapture. Integer in the range [0, 9].");
- DEFINE_string(camera_resolution, "1280x720", "Size of the camera frames to ask for.");
......
......@@ -116,5 +116,5 @@ LIBS += -Wpedantic -Wall -Wextra -Wfatal-errors
## Expected Visual Results
The visual GUI should show 3 screens, the Windows command line or Ubuntu bash terminal, the different cameras 2-D keypoint estimations, and the final 3-D reconstruction, similarly to the following image:
<p align="center">
<img src="media/openpose3d.png", width="720">
<img src="media/openpose3d.png">
</p>
......@@ -5,7 +5,7 @@ OpenPose Library - Release Notes
## OpenPose 1.0.0rc1
1. Initial version, main functionality:
1. Body keypoint detection and rendering on Ubuntu 14 and 16.
1. Body keypoint detection and rendering in Ubuntu 14 and 16.
2. It can read an image directory, video or webcam.
3. It can display the results or storing them on disk.
......
......@@ -43,7 +43,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......
// ------------------------- OpenPose Library Tutorial - Hand Keypoint Detection from JSON Ground-Truth Data -------------------------
// Example to test hands accuracy given ground-truth bounding boxes.
#include <chrono> // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds
#include <gflags/gflags.h> // DEFINE_bool, DEFINE_int32, DEFINE_int64, DEFINE_uint64, DEFINE_double, DEFINE_string
#include <glog/logging.h> // google::InitGoogleLogging
#include <openpose/headers.hpp>
#include "wrapperHandFromJsonTest.hpp"
// For info about the flags, check `examples/openpose/openpose.bin`.
// Debugging
DEFINE_int32(logging_level, 3, "");
// Producer
DEFINE_string(image_dir, "", "");
DEFINE_string(hand_ground_truth, "", "");
// OpenPose
DEFINE_string(model_folder, "models/", "");
DEFINE_int32(num_gpu, -1, "");
DEFINE_int32(num_gpu_start, 0, "");
// OpenPose Hand
DEFINE_bool(hand, true, "");
DEFINE_string(hand_net_resolution, "368x368", "");
DEFINE_int32(hand_scale_number, 1, "");
DEFINE_double(hand_scale_range, 0.4, "");
DEFINE_bool(hand_tracking, false, "");
// Display
DEFINE_bool(no_display, false, "");
// Result Saving
DEFINE_string(write_keypoint_json, "", "");
int handFromJsonTest()
{
// logging_level
op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.", __LINE__, __FUNCTION__, __FILE__);
op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level);
// op::ConfigureLog::setPriorityThreshold(op::Priority::None); // To print all logging messages
op::log("Starting pose estimation demo.", op::Priority::High);
const auto timerBegin = std::chrono::high_resolution_clock::now();
// Applying user defined configuration - Google flags to program variables
// handNetInputSize
const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)");
// producerType
const auto producerSharedPtr = op::flagsToProducer(FLAGS_image_dir, "", 0);
op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__);
// OpenPose wrapper
op::log("Configuring OpenPose wrapper.", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__);
op::WrapperHandFromJsonTest<std::vector<op::Datum>> opWrapper;
// Pose configuration (use WrapperStructPose{} for default and recommended configuration)
op::WrapperStructPose wrapperStructPose{op::flagsToPoint("656x368"), op::flagsToPoint("1280x720"),
op::ScaleMode::InputResolution, FLAGS_num_gpu, FLAGS_num_gpu_start};
wrapperStructPose.modelFolder = FLAGS_model_folder;
// Hand configuration (use op::WrapperStructHand{} to disable it)
const op::WrapperStructHand wrapperStructHand{FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number,
(float)FLAGS_hand_scale_range, FLAGS_hand_tracking,
op::flagsToRenderMode(1)};
// Configure wrapper
opWrapper.configure(wrapperStructPose, wrapperStructHand, producerSharedPtr, FLAGS_hand_ground_truth, FLAGS_write_keypoint_json,
!FLAGS_no_display);
// Start processing
op::log("Starting thread(s)", op::Priority::High);
opWrapper.exec(); // It blocks this thread until all threads have finished
// Measuring total time
const auto now = std::chrono::high_resolution_clock::now();
const auto totalTimeSec = (double)std::chrono::duration_cast<std::chrono::nanoseconds>(now-timerBegin).count() * 1e-9;
const auto message = "Real-time pose estimation demo successfully finished. Total time: " + std::to_string(totalTimeSec) + " seconds.";
op::log(message, op::Priority::High);
return 0;
}
int main(int argc, char *argv[])
{
// Initializing google logging (Caffe uses it for logging)
google::InitGoogleLogging("handFromJsonTest");
// Parsing command line flags
gflags::ParseCommandLineFlags(&argc, &argv, true);
// Running handFromJsonTest
return handFromJsonTest();
}
# Script for internal use. We might completely change it continuously and we will not answer questions about it.
# Clear terminal screen
clear && clear
HAND_TESTING_FOLDER="/media/posefs3b/Users/gines/openpose_training/dataset/hand_testing/5_keypointJson/"
IMAGES_FOLDER=${HAND_TESTING_FOLDER}"0_images/"
IMAGES_BB_FOLDER=${HAND_TESTING_FOLDER}"3_images_bounding_box"
IMAGES_BB_FOLDER=${HAND_TESTING_FOLDER}"4_hand_detections"
KEYPOINT_JSON_FOLDER=${HAND_TESTING_FOLDER}"5_keypointJson/"
SCALES=6
SUFFIX="_test${SCALES}"
# Given bounding box
HAND_RESULTS_FOLDER_BB=${KEYPOINT_JSON_FOLDER}"hand_keypoints_estimated"${SUFFIX}"_bounding_box"
echo "Output on ${HAND_RESULTS_FOLDER_BB}"
rm -rf $HAND_RESULTS_FOLDER_BB
# 1 scale
./build/examples/tests/handFromJsonTest.bin \
--hand_scale_number ${SCALES} --hand_scale_range 0.4 \
--image_dir ${IMAGES_BB_FOLDER} \
--hand_ground_truth ${IMAGES_BB_FOLDER} \
--write_keypoint_json $HAND_RESULTS_FOLDER_BB \
--no_display
# No bounding box
HAND_RESULTS_FOLDER_NO_BB=${KEYPOINT_JSON_FOLDER}"hand_keypoints_estimated"${SUFFIX}
echo "Output on ${HAND_RESULTS_FOLDER_NO_BB}"
rm -rf $HAND_RESULTS_FOLDER_NO_BB
# 1 scale
./build/examples/openpose/openpose.bin \
--hand logging_level 3 \
--hand_scale_number ${SCALES} --hand_scale_range 0.4 \
--image_dir ${IMAGES_FOLDER} \
--write_keypoint_json $HAND_RESULTS_FOLDER_NO_BB \
--no_display
# Script for internal use. We might completely change it continuously and we will not answer questions about it.
# USAGE EXAMPLE
# clear && clear && make all -j24 && bash ./examples/tests/pose_accuracy_coco_test.sh
# # Go back to main folder
# cd ../../
# Write COCO-format JSON
# Note: `--frame_last 3558` --> total = 3559 frames
# Last id:
# ID 20671 <--> #frames = 1471 --> ~ 1.5 min at 15fps
# ID 50006 <--> #frames = 3559 --> ~ 4 min at 15fps
# 1 scale
./build/examples/openpose/openpose.bin --image_dir "/home/gines/devel/images/val2014" --write_coco_json ../evaluation/coco/results/openpose/1.json --no_display --render_pose 0 --frame_last 3558
# # 3 scales
# ./build/examples/openpose/openpose.bin --image_dir "/home/gines/devel/images/val2014" --write_coco_json ../evaluation/coco/results/openpose/1_3.json --no_display --render_pose 0 --scale_number 3 --scale_gap 0.25 --frame_last 3558
# # 4 scales
# ./build/examples/openpose/openpose.bin --num_gpu 1 --image_dir "/home/gines/devel/images/val2014" --write_coco_json ../evaluation/coco/results/openpose/1_4.json --no_display --render_pose 0 --scale_number 4 --scale_gap 0.25 --net_resolution "1312x736" --frame_last 3558
# Debugging - Rendered frames saved
# ./build/examples/openpose/openpose.bin --image_dir "/home/gines/devel/images/val2014" --write_images ../evaluation/coco/results/openpose/frameOutput --no_display
# Script for internal use. We might completely change it continuously and we will not answer questions about it.
# # Go back to main folder
# cd ../../
# Performance results
PROTOTXT_PATH=/home/gines/Dropbox/Perceptual_Computing_Lab/openpose/openpose/models/pose/coco/pose_deploy_linevec.prototxt
gedit $0
# First: Add 656 x 368 as input_dim in:
gedit $PROTOTXT_PATH
./3rdparty/caffe/build/tools/caffe time -model $PROTOTXT_PATH -gpu 0 -phase TEST
gedit $PROTOTXT_PATH
# Script for internal use. We might completely change it continuously and we will not answer questions about it.
# # Go back to main folder
# cd ../../
# Re-build
clear && clear && make all -j12
# Performance results (~1400)
./build/examples/openpose/openpose.bin --video soccer.mp4 --frame_last 1500
# Including 2nd graphics card (~3500)
# ./build/examples/openpose/openpose.bin --video soccer.mp4 --frame_last 3750
#ifndef OPENPOSE_WRAPPER_WRAPPER_HAND_FROM_JSON_TEST_HPP
#define OPENPOSE_WRAPPER_WRAPPER_HAND_FROM_JSON_TEST_HPP
#include <openpose/headers.hpp>
namespace op
{
template<typename TDatums,
typename TWorker = std::shared_ptr<Worker<std::shared_ptr<TDatums>>>,
typename TQueue = Queue<std::shared_ptr<TDatums>>>
class WrapperHandFromJsonTest
{
public:
/**
* Constructor.
*/
explicit WrapperHandFromJsonTest();
/**
* Destructor.
* It automatically frees resources.
*/
~WrapperHandFromJsonTest();
void configure(const WrapperStructPose& wrapperStructPose,
const WrapperStructHand& wrapperStructHand,
const std::shared_ptr<Producer>& producerSharedPtr,
const std::string& handGroundTruth,
const std::string& writeKeypointJson,
const bool displayGui = false);
/**
* Function to start multi-threading.
* Similar to start(), but exec() blocks the thread that calls the function (it saves 1 thread). Use exec() instead of
* start() if the calling thread will otherwise be waiting for the WrapperHandFromJsonTest to end.
*/
void exec();
private:
ThreadManager<std::shared_ptr<TDatums>> mThreadManager;
// Workers
TWorker wDatumProducer;
TWorker spWIdGenerator;
TWorker spWCvMatToOpInput;
TWorker spWCvMatToOpOutput;
std::vector<std::vector<TWorker>> spWPoses;
std::vector<TWorker> mPostProcessingWs;
std::vector<TWorker> mOutputWs;
TWorker spWGui;
/**
* Frees TWorker variables (private internal function).
* For most cases, this class is non-necessary, since std::shared_ptr are automatically cleaned on destruction of each class.
* However, it might be useful if the same WrapperHandFromJsonTest is gonna be started twice (not recommended on most cases).
*/
void reset();
/**
* Set ThreadManager from TWorkers (private internal function).
* After any configure() has been called, the TWorkers are initialized. This function resets the ThreadManager and adds them.
* Common code for start() and exec().
*/
void configureThreadManager();
/**
* TWorker concatenator (private internal function).
* Auxiliary function that concatenate std::vectors of TWorker. Since TWorker is some kind of smart pointer (usually
* std::shared_ptr), its copy still shares the same internal data. It will not work for TWorker classes that do not share
* the data when moved.
* @param workersA First std::shared_ptr<TDatums> element to be concatenated.
* @param workersB Second std::shared_ptr<TDatums> element to be concatenated.
* @return Concatenated std::vector<TWorker> of both workersA and workersB.
*/
std::vector<TWorker> mergeWorkers(const std::vector<TWorker>& workersA, const std::vector<TWorker>& workersB);
DELETE_COPY(WrapperHandFromJsonTest);
};
}
// Implementation
#include <openpose/core/headers.hpp>
#include <openpose/face/headers.hpp>
#include <openpose/filestream/headers.hpp>
#include <openpose/gui/headers.hpp>
#include <openpose/hand/headers.hpp>
#include <openpose/pose/headers.hpp>
#include <openpose/producer/headers.hpp>
#include <openpose/utilities/cuda.hpp>
#include <openpose/utilities/errorAndLog.hpp>
#include <openpose/utilities/fileSystem.hpp>
namespace op
{
template<typename TDatums, typename TWorker, typename TQueue>
WrapperHandFromJsonTest<TDatums, TWorker, TQueue>::WrapperHandFromJsonTest()
{
}
template<typename TDatums, typename TWorker, typename TQueue>
WrapperHandFromJsonTest<TDatums, TWorker, TQueue>::~WrapperHandFromJsonTest()
{
try
{
mThreadManager.stop();
reset();
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
template<typename TDatums, typename TWorker, typename TQueue>
void WrapperHandFromJsonTest<TDatums, TWorker, TQueue>::configure(const WrapperStructPose& wrapperStructPose,
const WrapperStructHand& wrapperStructHand,
const std::shared_ptr<Producer>& producerSharedPtr,
const std::string& handGroundTruth,
const std::string& writeKeypointJson,
const bool displayGui)
{
try
{
log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
// Shortcut
typedef std::shared_ptr<TDatums> TDatumsPtr;
// Check no wrong/contradictory flags enabled
if (wrapperStructPose.scaleGap <= 0.f && wrapperStructPose.scalesNumber > 1)
error("The scale gap must be greater than 0 (it has no effect if the number of scales is 1).", __LINE__, __FUNCTION__, __FILE__);
const std::string additionalMessage = " You could also set mThreadManagerMode = mThreadManagerMode::Asynchronous(Out) and/or add your own"
" output worker class before calling this function.";
const auto savingSomething = !writeKeypointJson.empty();
if (!displayGui && !savingSomething)
{
const auto message = "No output is selected (`no_display`) and no results are generated (no `write_X` flags enabled). Thus,"
" no output would be generated." + additionalMessage;
error(message, __LINE__, __FUNCTION__, __FILE__);
}
// Get number GPUs
auto gpuNumber = wrapperStructPose.gpuNumber;
auto gpuNumberStart = wrapperStructPose.gpuNumberStart;
// If number GPU < 0 --> set it to all the available GPUs
if (gpuNumber < 0)
{
// Get total number GPUs
gpuNumber = getGpuNumber();
// Reset initial GPU to 0 (we want them all)
gpuNumberStart = 0;
// Logging message
log("Auto-detecting GPUs... Detected " + std::to_string(gpuNumber) + " GPU(s), using them all.", Priority::High);
}
// Proper format
const auto writeKeypointJsonCleaned = formatAsDirectory(writeKeypointJson);
// Common parameters
const auto finalOutputSize = wrapperStructPose.outputSize;
const Point<int> producerSize{(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_WIDTH),
(int)producerSharedPtr->get(CV_CAP_PROP_FRAME_HEIGHT)};
if (finalOutputSize.x == -1 || finalOutputSize.y == -1)
{
const auto message = "Output resolution cannot be (-1 x -1) unless producerSharedPtr is also set.";
error(message, __LINE__, __FUNCTION__, __FILE__);
}
// Producer
const auto datumProducer = std::make_shared<DatumProducer<TDatums>>(producerSharedPtr);
wDatumProducer = std::make_shared<WDatumProducer<TDatumsPtr, TDatums>>(datumProducer);
// Input cvMat to OpenPose format
const auto cvMatToOpInput = std::make_shared<CvMatToOpInput>(
wrapperStructPose.netInputSize, wrapperStructPose.scalesNumber, wrapperStructPose.scaleGap
);
spWCvMatToOpInput = std::make_shared<WCvMatToOpInput<TDatumsPtr>>(cvMatToOpInput);
const auto cvMatToOpOutput = std::make_shared<CvMatToOpOutput>(finalOutputSize, displayGui);
spWCvMatToOpOutput = std::make_shared<WCvMatToOpOutput<TDatumsPtr>>(cvMatToOpOutput);
// Hand extractor(s)
if (wrapperStructHand.enable)
{
spWPoses.resize(gpuNumber);
const auto handDetector = std::make_shared<HandDetectorFromTxt>(handGroundTruth);
for (auto gpuId = 0; gpuId < spWPoses.size(); gpuId++)
{
// Hand detector
// If tracking
if (wrapperStructHand.tracking)
error("Tracking not valid for hand detector from JSON files.", __LINE__, __FUNCTION__, __FILE__);
// If detection
else
spWPoses.at(gpuId) = {std::make_shared<WHandDetectorFromTxt<TDatumsPtr>>(handDetector)};
// Hand keypoint extractor
const auto netOutputSize = wrapperStructHand.netInputSize;
const auto handExtractor = std::make_shared<HandExtractor>(
wrapperStructHand.netInputSize, netOutputSize, wrapperStructPose.modelFolder,
gpuId + gpuNumberStart, wrapperStructHand.scalesNumber, wrapperStructHand.scaleRange
);
spWPoses.at(gpuId).emplace_back(std::make_shared<WHandExtractor<TDatumsPtr>>(handExtractor));
}
}
// Hand renderer(s)
std::vector<TWorker> cpuRenderers;
if (displayGui)
{
// Construct hand renderer
const auto handRenderer = std::make_shared<HandRenderer>(finalOutputSize, wrapperStructHand.renderThreshold,
wrapperStructHand.alphaKeypoint,
wrapperStructHand.alphaHeatMap,
wrapperStructHand.renderMode);
// Add worker
cpuRenderers.emplace_back(std::make_shared<WHandRenderer<TDatumsPtr>>(handRenderer));
}
// Itermediate workers (e.g. OpenPose format to cv::Mat, json & frames recorder, ...)
mPostProcessingWs.clear();
// Frame buffer and ordering
if (spWPoses.size() > 1)
mPostProcessingWs.emplace_back(std::make_shared<WQueueOrderer<TDatumsPtr>>());
// Frames processor (OpenPose format -> cv::Mat format)
if (displayGui)
{
mPostProcessingWs = mergeWorkers(mPostProcessingWs, cpuRenderers);
const auto opOutputToCvMat = std::make_shared<OpOutputToCvMat>(finalOutputSize);
mPostProcessingWs.emplace_back(std::make_shared<WOpOutputToCvMat<TDatumsPtr>>(opOutputToCvMat));
}
// Re-scale pose if desired
if (wrapperStructPose.keypointScale != ScaleMode::InputResolution)
error("Only wrapperStructPose.keypointScale == ScaleMode::InputResolution.", __LINE__, __FUNCTION__, __FILE__);
if (finalOutputSize != producerSize)
{
auto keypointScaler = std::make_shared<KeypointScaler>(ScaleMode::InputResolution);
mPostProcessingWs.emplace_back(std::make_shared<WKeypointScaler<TDatumsPtr>>(keypointScaler));
}
mOutputWs.clear();
// Write people pose data on disk (json format)
if (!writeKeypointJsonCleaned.empty())
{
const auto keypointJsonSaver = std::make_shared<KeypointJsonSaver>(writeKeypointJsonCleaned);
mOutputWs.emplace_back(std::make_shared<WKeypointJsonSaver<TDatumsPtr>>(keypointJsonSaver));
}
// Minimal graphical user interface (GUI)
spWGui = nullptr;
if (displayGui)
{
const auto guiInfoAdder = std::make_shared<GuiInfoAdder>(finalOutputSize, gpuNumber, displayGui);
mOutputWs.emplace_back(std::make_shared<WGuiInfoAdder<TDatumsPtr>>(guiInfoAdder));
const auto gui = std::make_shared<Gui>(
false, finalOutputSize, mThreadManager.getIsRunningSharedPtr()
);
spWGui = {std::make_shared<WGui<TDatumsPtr>>(gui)};
}
log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
template<typename TDatums, typename TWorker, typename TQueue>
void WrapperHandFromJsonTest<TDatums, TWorker, TQueue>::exec()
{
try
{
configureThreadManager();
mThreadManager.exec();
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
template<typename TDatums, typename TWorker, typename TQueue>
void WrapperHandFromJsonTest<TDatums, TWorker, TQueue>::reset()
{
try
{
mThreadManager.reset();
// Reset
wDatumProducer = nullptr;
spWCvMatToOpInput = nullptr;
spWCvMatToOpOutput = nullptr;
spWPoses.clear();
mPostProcessingWs.clear();
mOutputWs.clear();
spWGui = nullptr;
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
template<typename TDatums, typename TWorker, typename TQueue>
void WrapperHandFromJsonTest<TDatums, TWorker, TQueue>::configureThreadManager()
{
try
{
// Security checks
if (spWCvMatToOpInput == nullptr || spWCvMatToOpOutput == nullptr)
error("Configure the WrapperHandFromJsonTest class before calling `start()`.", __LINE__, __FUNCTION__, __FILE__);
if (wDatumProducer == nullptr)
{
const auto message = "You need to use the OpenPose default producer.";
error(message, __LINE__, __FUNCTION__, __FILE__);
}
if (mOutputWs.empty() && spWGui == nullptr)
{
error("No output selected.", __LINE__, __FUNCTION__, __FILE__);
}
// Thread Manager:
// Clean previous thread manager (avoid configure to crash the program if used more than once)
mThreadManager.reset();
auto threadId = 0ull;
auto queueIn = 0ull;
auto queueOut = 1ull;
// If custom user Worker in same thread or producer on same thread
spWIdGenerator = std::make_shared<WIdGenerator<std::shared_ptr<TDatums>>>();
// OpenPose producer
mThreadManager.add(threadId++, {wDatumProducer, spWIdGenerator, spWCvMatToOpInput, spWCvMatToOpOutput}, queueIn++, queueOut++); // Thread 0 or 1, queues 0 -> 1
// Pose estimation & rendering
if (!spWPoses.empty()) // Thread 1 or 2...X, queues 1 -> 2, X = 2 + #GPUs
{
for (auto& wPose : spWPoses)
mThreadManager.add(threadId++, wPose, queueIn, queueOut);
queueIn++;
queueOut++;
}
// If custom user Worker in same thread or producer on same thread
// Post processing workers + User post processing workers + Output workers
mThreadManager.add(threadId++, mergeWorkers(mPostProcessingWs, mOutputWs), queueIn++, queueOut++); // Thread 2 or 3, queues 2 -> 3
// OpenPose GUI
if (spWGui != nullptr)
mThreadManager.add(threadId++, spWGui, queueIn++, queueOut++); // Thread Y+1, queues Q+1 -> Q+2
log("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
template<typename TDatums, typename TWorker, typename TQueue>
std::vector<TWorker> WrapperHandFromJsonTest<TDatums, TWorker, TQueue>::mergeWorkers(const std::vector<TWorker>& workersA, const std::vector<TWorker>& workersB)
{
try
{
auto workersToReturn(workersA);
for (auto& worker : workersB)
workersToReturn.emplace_back(worker);
return workersToReturn;
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return std::vector<TWorker>{};
}
}
}
#endif // OPENPOSE_WRAPPER_WRAPPER_HAND_FROM_JSON_TEST_HPP
......@@ -22,7 +22,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......
......@@ -22,7 +22,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......
......@@ -21,7 +21,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......
......@@ -22,7 +22,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......
......@@ -27,7 +27,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......
......@@ -27,7 +27,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......
......@@ -41,7 +41,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......
......@@ -41,7 +41,7 @@
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging
DEFINE_int32(logging_level, 4, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
// Producer
......
......@@ -14,7 +14,8 @@ namespace op
class DatumProducer
{
public:
explicit DatumProducer(const std::shared_ptr<Producer>& producerSharedPtr, const unsigned long long frameFirst = 0,
explicit DatumProducer(const std::shared_ptr<Producer>& producerSharedPtr,
const unsigned long long frameFirst = 0,
const unsigned long long frameLast = std::numeric_limits<unsigned long long>::max(),
const std::shared_ptr<std::pair<std::atomic<bool>, std::atomic<int>>>& videoSeekSharedPtr = nullptr);
......@@ -27,7 +28,8 @@ namespace op
unsigned int mNumberConsecutiveEmptyFrames;
std::shared_ptr<std::pair<std::atomic<bool>, std::atomic<int>>> spVideoSeek;
void checkIfTooManyConsecutiveEmptyFrames(unsigned int& numberConsecutiveEmptyFrames, const bool emptyFrame) const;
void checkIfTooManyConsecutiveEmptyFrames(unsigned int& numberConsecutiveEmptyFrames,
const bool emptyFrame) const;
DELETE_COPY(DatumProducer);
};
......@@ -38,13 +40,16 @@ namespace op
// Implementation
#include <opencv2/imgproc/imgproc.hpp> // cv::cvtColor
#include <openpose/producer/datumProducer.hpp>
namespace op
{
template<typename TDatumsNoPtr>
DatumProducer<TDatumsNoPtr>::DatumProducer(const std::shared_ptr<Producer>& producerSharedPtr, const unsigned long long frameFirst, const unsigned long long frameLast,
DatumProducer<TDatumsNoPtr>::DatumProducer(const std::shared_ptr<Producer>& producerSharedPtr,
const unsigned long long frameFirst, const unsigned long long frameLast,
const std::shared_ptr<std::pair<std::atomic<bool>, std::atomic<int>>>& videoSeekSharedPtr) :
mNumberFramesToProcess{(frameLast != std::numeric_limits<unsigned long long>::max() ? frameLast - frameFirst : frameLast)},
mNumberFramesToProcess{(frameLast != std::numeric_limits<unsigned long long>::max()
? frameLast - frameFirst : frameLast)},
spProducer{producerSharedPtr},
mGlobalCounter{0ll},
mNumberConsecutiveEmptyFrames{0u},
......@@ -67,8 +72,11 @@ namespace op
try
{
// Check last desired frame has not been reached
if (mNumberFramesToProcess != std::numeric_limits<unsigned long long>::max() && mGlobalCounter > mNumberFramesToProcess)
if (mNumberFramesToProcess != std::numeric_limits<unsigned long long>::max()
&& mGlobalCounter > mNumberFramesToProcess)
{
spProducer->release();
}
// If producer released -> it sends an empty cv::Mat + a datumProducerRunning signal
const bool datumProducerRunning = spProducer->isOpened();
auto datums = std::make_shared<TDatumsNoPtr>(1);
......@@ -91,6 +99,17 @@ namespace op
// Get cv::Mat
datum.name = spProducer->getFrameName();
datum.cvInputData = spProducer->getFrame();
if (datum.cvInputData.channels() != 3)
{
const std::string commonMessage{"Input images must be 3-channel BGR."};
if (datum.cvInputData.channels() == 1)
{
log(commonMessage + " Converting your grey image into BGR.", Priority::High, __LINE__, __FUNCTION__, __FILE__);
cv::cvtColor(datum.cvInputData, datum.cvInputData, CV_GRAY2BGR);
}
else
error(commonMessage, __LINE__, __FUNCTION__, __FILE__);
}
datum.cvOutputData = datum.cvInputData;
// Check frames are not empty
checkIfTooManyConsecutiveEmptyFrames(mNumberConsecutiveEmptyFrames, datum.cvInputData.empty());
......@@ -112,12 +131,14 @@ namespace op
}
template<typename TDatumsNoPtr>
void DatumProducer<TDatumsNoPtr>::checkIfTooManyConsecutiveEmptyFrames(unsigned int& numberConsecutiveEmptyFrames, const bool emptyFrame) const
void DatumProducer<TDatumsNoPtr>::checkIfTooManyConsecutiveEmptyFrames(unsigned int& numberConsecutiveEmptyFrames,
const bool emptyFrame) const
{
numberConsecutiveEmptyFrames = (emptyFrame ? numberConsecutiveEmptyFrames+1 : 0);
const auto threshold = 500u;
if (numberConsecutiveEmptyFrames >= threshold)
error("Detected too many (" + std::to_string(numberConsecutiveEmptyFrames) + ") empty frames in a row.", __LINE__, __FUNCTION__, __FILE__);
error("Detected too many (" + std::to_string(numberConsecutiveEmptyFrames) + ") empty frames in a row.",
__LINE__, __FUNCTION__, __FILE__);
}
extern template class DatumProducer<DATUM_BASE_NO_PTR>;
......
......@@ -28,6 +28,8 @@ namespace op
// Security checks
if (cvInputData.empty())
error("Wrong input element (empty cvInputData).", __LINE__, __FUNCTION__, __FILE__);
if (cvInputData.channels() != 3)
error("Input images must be 3-channel BGR.", __LINE__, __FUNCTION__, __FILE__);
// inputNetData - Reescale keeping aspect ratio and transform to float the input deep net image
Array<float> inputNetData{mInputNetSize4D};
......
......@@ -16,6 +16,8 @@ namespace op
// Security checks
if (cvInputData.empty())
error("Wrong input element (empty cvInputData).", __LINE__, __FUNCTION__, __FILE__);
if (cvInputData.channels() != 3)
error("Input images must be 3-channel BGR.", __LINE__, __FUNCTION__, __FILE__);
// outputData - Reescale keeping aspect ratio and transform to float the output image
const Point<int> outputResolution{mOutputSize3D[2], mOutputSize3D[1]};
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册