diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 000000000..e69de29bb diff --git a/doxygen.log b/doxygen.log new file mode 100644 index 000000000..81fe1d7b0 --- /dev/null +++ b/doxygen.log @@ -0,0 +1,1280 @@ +warning: Tag 'CLANG_ASSISTED_PARSING' at line 1161 of file '/home/runner/work/openpose/openpose/.doc_autogeneration.doxygen' belongs to an option that was not enabled at compile time. + To avoid this warning please remove this line from your configuration file or upgrade it using "doxygen -u", or recompile doxygen with this feature enabled. +warning: Tag 'CLANG_ADD_INC_PATHS' at line 1167 of file '/home/runner/work/openpose/openpose/.doc_autogeneration.doxygen' belongs to an option that was not enabled at compile time. + To avoid this warning please remove this line from your configuration file or upgrade it using "doxygen -u", or recompile doxygen with this feature enabled. +warning: Tag 'CLANG_OPTIONS' at line 1175 of file '/home/runner/work/openpose/openpose/.doc_autogeneration.doxygen' belongs to an option that was not enabled at compile time. + To avoid this warning please remove this line from your configuration file or upgrade it using "doxygen -u", or recompile doxygen with this feature enabled. +warning: Tag 'CLANG_DATABASE_PATH' at line 1188 of file '/home/runner/work/openpose/openpose/.doc_autogeneration.doxygen' belongs to an option that was not enabled at compile time. + To avoid this warning please remove this line from your configuration file or upgrade it using "doxygen -u", or recompile doxygen with this feature enabled. +warning: Tag 'MSCGEN_PATH' at line 2397 of file '/home/runner/work/openpose/openpose/.doc_autogeneration.doxygen' has become obsolete. + To avoid this warning please remove this line from your configuration file or upgrade it using "doxygen -u" +Doxygen version used: 1.9.1 (ef9b20ac7f8a8621fcfc299f8bd0b80422390f4b) +Searching for include files... +Searching for example files... +Searching for images... +Searching for dot files... +Searching for msc files... +Searching for dia files... +Searching for files to exclude +Searching INPUT for files to process... +Searching for files in directory /home/runner/work/openpose/openpose/doc +Searching for files in directory /home/runner/work/openpose/openpose/doc/advanced +Searching for files in directory /home/runner/work/openpose/openpose/doc/advanced/Chessboard_in_PDF +Searching for files in directory /home/runner/work/openpose/openpose/doc/GUI_help +Searching for files in directory /home/runner/work/openpose/openpose/doc/installation +Searching for files in directory /home/runner/work/openpose/openpose/doc/installation/deprecated +Searching for files in directory /home/runner/work/openpose/openpose/doc/installation/jetson_tx +Searching for files in directory /home/runner/work/openpose/openpose/doc/very_advanced +Searching for files in directory /home/runner/work/openpose/openpose/doc/very_advanced/library_structure +Searching for files in directory /home/runner/work/openpose/openpose/doc/very_advanced/library_structure/UML +Searching for files in directory /home/runner/work/openpose/openpose/doc/very_advanced/library_structure/UML/1_0_0rc3 +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/3d +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/calibration +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/core +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/face +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/filestream +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/gpu +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/gui +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/hand +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/net +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/pose +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/producer +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/thread +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/tracking +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/unity +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/utilities +Searching for files in directory /home/runner/work/openpose/openpose/include/openpose/wrapper +Reading and parsing tag files +Parsing files +Reading /home/runner/work/openpose/openpose/README.md... +Reading /home/runner/work/openpose/openpose/doc/00_index.md... +Reading /home/runner/work/openpose/openpose/doc/01_demo.md... +Reading /home/runner/work/openpose/openpose/doc/02_output.md... +Reading /home/runner/work/openpose/openpose/doc/03_python_api.md... +Reading /home/runner/work/openpose/openpose/doc/04_cpp_api.md... +Reading /home/runner/work/openpose/openpose/doc/05_faq.md... +Reading /home/runner/work/openpose/openpose/doc/06_maximizing_openpose_speed.md... +Reading /home/runner/work/openpose/openpose/doc/07_major_released_features.md... +Reading /home/runner/work/openpose/openpose/doc/08_release_notes.md... +Reading /home/runner/work/openpose/openpose/doc/09_authors_and_contributors.md... +Reading /home/runner/work/openpose/openpose/doc/10_community_projects.md... +Reading /home/runner/work/openpose/openpose/doc/advanced/3d_reconstruction_module.md... +Reading /home/runner/work/openpose/openpose/doc/advanced/calibration_module.md... +Reading /home/runner/work/openpose/openpose/doc/advanced/demo_advanced.md... +Reading /home/runner/work/openpose/openpose/doc/advanced/deployment.md... +Reading /home/runner/work/openpose/openpose/doc/advanced/heatmap_output.md... +Reading /home/runner/work/openpose/openpose/doc/advanced/standalone_face_or_hand_keypoint_detector.md... +Reading /home/runner/work/openpose/openpose/doc/installation/0_index.md... +Reading /home/runner/work/openpose/openpose/doc/installation/1_prerequisites.md... +Reading /home/runner/work/openpose/openpose/doc/installation/2_additional_settings.md... +Reading /home/runner/work/openpose/openpose/doc/installation/deprecated/installation_deprecated.md... +Reading /home/runner/work/openpose/openpose/doc/installation/jetson_tx/installation_jetson_tx1.md... +Reading /home/runner/work/openpose/openpose/doc/installation/jetson_tx/installation_jetson_tx2_jetpack3.1.md... +Reading /home/runner/work/openpose/openpose/doc/installation/jetson_tx/installation_jetson_tx2_jetpack3.3.md... +Reading /home/runner/work/openpose/openpose/doc/installation/README.md... +Reading /home/runner/work/openpose/openpose/doc/README.md... +Reading /home/runner/work/openpose/openpose/doc/very_advanced/library_structure/0_index.md... +Reading /home/runner/work/openpose/openpose/doc/very_advanced/library_structure/1_library_deep_overview.md... +Reading /home/runner/work/openpose/openpose/doc/very_advanced/library_structure/2_library_extend_functionality.md... +Reading /home/runner/work/openpose/openpose/doc/very_advanced/library_structure/3_library_add_new_module.md... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/3d/cameraParameterReader.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/3d/cameraParameterReader.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/3d/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/3d/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/3d/jointAngleEstimation.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/3d/jointAngleEstimation.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/3d/poseTriangulation.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/3d/poseTriangulation.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/3d/wJointAngleEstimation.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/3d/wJointAngleEstimation.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/3d/wPoseTriangulation.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/3d/wPoseTriangulation.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/calibration/cameraParameterEstimation.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/calibration/cameraParameterEstimation.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/calibration/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/calibration/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/array.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/array.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/arrayCpuGpu.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/arrayCpuGpu.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/common.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/common.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/cvMatToOpInput.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/cvMatToOpInput.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/cvMatToOpOutput.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/cvMatToOpOutput.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/datum.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/datum.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/enumClasses.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/enumClasses.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/gpuRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/gpuRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/keepTopNPeople.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/keepTopNPeople.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/keypointScaler.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/keypointScaler.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/macros.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/macros.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/matrix.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/matrix.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/opOutputToCvMat.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/opOutputToCvMat.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/point.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/point.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/rectangle.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/rectangle.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/renderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/renderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/scaleAndSizeExtractor.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/scaleAndSizeExtractor.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/string.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/string.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/verbosePrinter.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/verbosePrinter.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/wCvMatToOpInput.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/wCvMatToOpInput.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/wCvMatToOpOutput.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/wCvMatToOpOutput.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/wKeepTopNPeople.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/wKeepTopNPeople.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/wKeypointScaler.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/wKeypointScaler.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/wOpOutputToCvMat.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/wOpOutputToCvMat.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/wScaleAndSizeExtractor.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/wScaleAndSizeExtractor.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/core/wVerbosePrinter.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/core/wVerbosePrinter.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/faceCpuRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/faceCpuRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/faceDetector.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/faceDetector.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/faceDetectorOpenCV.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/faceDetectorOpenCV.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/faceExtractorCaffe.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/faceExtractorCaffe.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/faceExtractorNet.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/faceExtractorNet.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/faceGpuRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/faceGpuRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/faceParameters.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/faceParameters.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/faceRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/faceRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/renderFace.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/renderFace.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/wFaceDetector.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/wFaceDetector.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/wFaceDetectorOpenCV.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/wFaceDetectorOpenCV.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/wFaceExtractorNet.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/wFaceExtractorNet.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/face/wFaceRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/face/wFaceRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/bvhSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/bvhSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/cocoJsonSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/cocoJsonSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/enumClasses.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/enumClasses.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/fileSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/fileSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/fileStream.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/fileStream.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/heatMapSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/heatMapSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/imageSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/imageSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/jsonOfstream.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/jsonOfstream.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/keypointSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/keypointSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/peopleJsonSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/peopleJsonSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/udpSender.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/udpSender.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/videoSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/videoSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/wBvhSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/wBvhSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/wCocoJsonSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/wCocoJsonSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/wFaceSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/wFaceSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/wHandSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/wHandSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/wHeatMapSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/wHeatMapSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/wImageSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/wImageSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/wPeopleJsonSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/wPeopleJsonSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/wPoseSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/wPoseSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/wUdpSender.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/wUdpSender.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/wVideoSaver.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/wVideoSaver.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/filestream/wVideoSaver3D.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/filestream/wVideoSaver3D.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/flags.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/flags.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gpu/cuda.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gpu/cuda.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gpu/enumClasses.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gpu/enumClasses.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gpu/gpu.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gpu/gpu.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gpu/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gpu/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gui/enumClasses.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gui/enumClasses.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gui/frameDisplayer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gui/frameDisplayer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gui/gui.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gui/gui.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gui/gui3D.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gui/gui3D.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gui/guiAdam.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gui/guiAdam.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gui/guiInfoAdder.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gui/guiInfoAdder.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gui/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gui/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gui/wGui.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gui/wGui.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gui/wGui3D.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gui/wGui3D.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gui/wGuiAdam.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gui/wGuiAdam.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/gui/wGuiInfoAdder.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/gui/wGuiInfoAdder.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/handCpuRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/handCpuRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/handDetector.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/handDetector.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/handDetectorFromTxt.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/handDetectorFromTxt.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/handExtractorCaffe.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/handExtractorCaffe.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/handExtractorNet.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/handExtractorNet.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/handGpuRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/handGpuRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/handParameters.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/handParameters.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/handRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/handRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/renderHand.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/renderHand.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/wHandDetector.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/wHandDetector.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/wHandDetectorFromTxt.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/wHandDetectorFromTxt.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/wHandDetectorTracking.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/wHandDetectorTracking.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/wHandDetectorUpdate.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/wHandDetectorUpdate.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/wHandExtractorNet.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/wHandExtractorNet.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/hand/wHandRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/hand/wHandRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/net/bodyPartConnectorBase.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/net/bodyPartConnectorBase.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/net/bodyPartConnectorCaffe.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/net/bodyPartConnectorCaffe.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/net/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/net/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/net/maximumBase.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/net/maximumBase.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/net/maximumCaffe.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/net/maximumCaffe.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/net/net.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/net/net.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/net/netCaffe.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/net/netCaffe.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/net/netOpenCv.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/net/netOpenCv.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/net/nmsBase.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/net/nmsBase.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/net/nmsCaffe.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/net/nmsCaffe.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/net/resizeAndMergeBase.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/net/resizeAndMergeBase.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/net/resizeAndMergeCaffe.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/net/resizeAndMergeCaffe.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/enumClasses.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/enumClasses.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/poseCpuRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/poseCpuRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/poseExtractor.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/poseExtractor.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/poseExtractorCaffe.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/poseExtractorCaffe.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/poseExtractorNet.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/poseExtractorNet.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/poseGpuRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/poseGpuRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/poseParameters.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/poseParameters.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/poseParametersRender.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/poseParametersRender.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/poseRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/poseRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/renderPose.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/renderPose.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/wPoseExtractor.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/wPoseExtractor.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/wPoseExtractorNet.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/wPoseExtractorNet.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/pose/wPoseRenderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/pose/wPoseRenderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/producer/datumProducer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/producer/datumProducer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/producer/enumClasses.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/producer/enumClasses.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/producer/flirReader.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/producer/flirReader.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/producer/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/producer/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/producer/imageDirectoryReader.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/producer/imageDirectoryReader.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/producer/ipCameraReader.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/producer/ipCameraReader.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/producer/producer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/producer/producer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/producer/spinnakerWrapper.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/producer/spinnakerWrapper.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/producer/videoCaptureReader.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/producer/videoCaptureReader.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/producer/videoReader.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/producer/videoReader.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/producer/wDatumProducer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/producer/wDatumProducer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/producer/webcamReader.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/producer/webcamReader.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/enumClasses.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/enumClasses.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/priorityQueue.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/priorityQueue.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/queue.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/queue.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/queueBase.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/queueBase.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/subThread.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/subThread.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/subThreadNoQueue.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/subThreadNoQueue.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/subThreadQueueIn.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/subThreadQueueIn.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/subThreadQueueInOut.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/subThreadQueueInOut.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/subThreadQueueOut.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/subThreadQueueOut.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/thread.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/thread.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/threadManager.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/threadManager.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/wFpsMax.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/wFpsMax.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/wIdGenerator.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/wIdGenerator.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/worker.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/worker.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/workerConsumer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/workerConsumer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/workerProducer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/workerProducer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/wQueueAssembler.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/wQueueAssembler.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/thread/wQueueOrderer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/thread/wQueueOrderer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/tracking/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/tracking/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/tracking/personIdExtractor.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/tracking/personIdExtractor.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/tracking/personTracker.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/tracking/personTracker.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/tracking/wPersonIdExtractor.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/tracking/wPersonIdExtractor.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/unity/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/unity/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/unity/unityBinding.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/unity/unityBinding.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/check.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/check.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/enumClasses.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/enumClasses.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/errorAndLog.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/errorAndLog.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/fastMath.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/fastMath.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/fileSystem.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/fileSystem.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/flagsToOpenPose.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/flagsToOpenPose.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/keypoint.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/keypoint.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/openCv.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/openCv.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/pointerContainer.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/pointerContainer.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/profiler.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/profiler.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/standard.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/standard.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/utilities/string.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/utilities/string.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/wrapper/enumClasses.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/wrapper/enumClasses.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/wrapper/headers.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/wrapper/headers.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapper.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapper.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperAuxiliary.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperAuxiliary.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructExtra.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructExtra.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructFace.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructFace.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructGui.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructGui.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructHand.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructHand.hpp... +Pre/home/runner/work/openpose/openpose/doc/installation/README.md:1: warning: found more than one \mainpage comment block! (first occurrence: /home/runner/work/openpose/openpose/README.md, line 1), Skipping current block! +/home/runner/work/openpose/openpose/doc/README.md:1: warning: found more than one \mainpage comment block! (first occurrence: /home/runner/work/openpose/openpose/README.md, line 1), Skipping current block! +processing /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructInput.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructInput.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructOutput.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructOutput.hpp... +Preprocessing /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructPose.hpp... +Parsing file /home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructPose.hpp... +Building macro definition list... +Building group list... +Building directory list... +Building namespace list... +Building file list... +Building class list... +Computing nesting relations for classes... +Associating documentation with classes... +Building example list... +Searching for enumerations... +Searching for documented typedefs... +Searching for members imported via using declarations... +Searching for included using directives... +Searching for documented variables... +Building interface member list... +Building member list... +Searching for friends... +Searching for documented defines... +Computing class inheritance relations... +Computing class usage relations... +Flushing cached template relations that have become invalid... +Computing class relations... +Add enum values to enums... +Searching for member function documentation... +Creating members for template instances... +Building page list... +Search for main page... +Computing page relations... +Determining the scope of groups... +Sorting lists... +Determining which enums are documented +Computing member relations... +Building full member lists recursively... +Adding members to member groups. +Computing member references... +Inheriting documentation... +Generating disk names... +Adding source references... +Adding xrefitems... +Sorting member lists... +Setting anonymous enum type... +Computing dependencies between directories... +Generating citations page... +Counting members... +Counting data structures... +Resolving user defined references... +Finding anchors and sections in the documentation... +Transferring function references... +Combining using relations... +Adding members to index pages... +Correcting members for VHDL... +Computing tooltip texts... +Generating style sheet... +Generating search indices... +Generating example documentation... +Generating file sources... +Generating code for file include/openpose/3d/cameraParameterReader.hpp... +Generating code for file include/openpose/3d/headers.hpp... +Generating code for file include/openpose/calibration/headers.hpp... +Generating code for file include/openpose/core/headers.hpp... +Generating code for file include/openpose/face/headers.hpp... +Generating code for file include/openpose/filestream/headers.hpp... +Generating code for file include/openpose/gpu/headers.hpp... +Generating code for file include/openpose/gui/headers.hpp... +Generating code for file include/openpose/hand/headers.hpp... +Generating code for file include/openpose/headers.hpp... +Generating code for file include/openpose/net/headers.hpp... +Generating code for file include/openpose/pose/headers.hpp... +Generating code for file include/openpose/producer/headers.hpp... +Generating code for file include/openpose/thread/headers.hpp... +Generating code for file include/openpose/tracking/headers.hpp... +Generating code for file include/openpose/unity/headers.hpp... +Generating code for file include/openpose/utilities/headers.hpp... +Generating code for file include/openpose/wrapper/headers.hpp... +Generating code for file include/openpose/3d/jointAngleEstimation.hpp... +Generating code for file include/openpose/3d/poseTriangulation.hpp... +Generating code for file include/openpose/3d/wJointAngleEstimation.hpp... +Generating code for file include/openpose/3d/wPoseTriangulation.hpp... +Generating code for file include/openpose/calibration/cameraParameterEstimation.hpp... +Generating code for file include/openpose/core/array.hpp... +Generating code for file include/openpose/core/arrayCpuGpu.hpp... +Generating code for file include/openpose/core/common.hpp... +Generating code for file include/openpose/core/cvMatToOpInput.hpp... +Generating code for file include/openpose/core/cvMatToOpOutput.hpp... +Generating code for file include/openpose/core/datum.hpp... +Generating code for file include/openpose/core/enumClasses.hpp... +Generating code for file include/openpose/filestream/enumClasses.hpp... +Generating code for file include/openpose/gpu/enumClasses.hpp... +Generating code for file include/openpose/gui/enumClasses.hpp... +Generating code for file include/openpose/pose/enumClasses.hpp... +Generating code for file include/openpose/producer/enumClasses.hpp... +Generating code for file include/openpose/thread/enumClasses.hpp... +Generating code for file include/openpose/utilities/enumClasses.hpp... +Generating code for file include/openpose/wrapper/enumClasses.hpp... +Generating code for file include/openpose/core/gpuRenderer.hpp... +Generating code for file include/openpose/core/keepTopNPeople.hpp... +Generating code for file include/openpose/core/keypointScaler.hpp... +Generating code for file include/openpose/core/macros.hpp... +Generating code for file include/openpose/core/matrix.hpp... +Generating code for file include/openpose/core/opOutputToCvMat.hpp... +Generating code for file include/openpose/core/point.hpp... +Generating code for file include/openpose/core/rectangle.hpp... +Generating code for file include/openpose/core/renderer.hpp... +Generating code for file include/openpose/core/scaleAndSizeExtractor.hpp... +Generating code for file include/openpose/core/string.hpp... +Generating code for file include/openpose/utilities/string.hpp... +Generating code for file include/openpose/core/verbosePrinter.hpp... +Generating code for file include/openpose/core/wCvMatToOpInput.hpp... +Generating code for file include/openpose/core/wCvMatToOpOutput.hpp... +Generating code for file include/openpose/core/wKeepTopNPeople.hpp... +Generating code for file include/openpose/core/wKeypointScaler.hpp... +Generating code for file include/openpose/core/wOpOutputToCvMat.hpp... +Generating code for file include/openpose/core/wScaleAndSizeExtractor.hpp... +Generating code for file include/openpose/core/wVerbosePrinter.hpp... +Generating code for file include/openpose/face/faceCpuRenderer.hpp... +Generating code for file include/openpose/face/faceDetector.hpp... +Generating code for file include/openpose/face/faceDetectorOpenCV.hpp... +Generating code for file include/openpose/face/faceExtractorCaffe.hpp... +Generating code for file include/openpose/face/faceExtractorNet.hpp... +Generating code for file include/openpose/face/faceGpuRenderer.hpp... +Generating code for file include/openpose/face/faceParameters.hpp... +Generating code for file include/openpose/face/faceRenderer.hpp... +Generating code for file include/openpose/face/renderFace.hpp... +Generating code for file include/openpose/face/wFaceDetector.hpp... +Generating code for file include/openpose/face/wFaceDetectorOpenCV.hpp... +Generating code for file include/openpose/face/wFaceExtractorNet.hpp... +Generating code for file include/openpose/face/wFaceRenderer.hpp... +Generating code for file include/openpose/filestream/bvhSaver.hpp... +Generating code for file include/openpose/filestream/cocoJsonSaver.hpp... +Generating code for file include/openpose/filestream/fileSaver.hpp... +Generating code for file include/openpose/filestream/fileStream.hpp... +Generating code for file include/openpose/filestream/heatMapSaver.hpp... +Generating code for file include/openpose/filestream/imageSaver.hpp... +Generating code for file include/openpose/filestream/jsonOfstream.hpp... +Generating code for file include/openpose/filestream/keypointSaver.hpp... +Generating code for file include/openpose/filestream/peopleJsonSaver.hpp... +Generating code for file include/openpose/filestream/udpSender.hpp... +Generating code for file include/openpose/filestream/videoSaver.hpp... +Generating code for file include/openpose/filestream/wBvhSaver.hpp... +Generating code for file include/openpose/filestream/wCocoJsonSaver.hpp... +Generating code for file include/openpose/filestream/wFaceSaver.hpp... +Generating code for file include/openpose/filestream/wHandSaver.hpp... +Generating code for file include/openpose/filestream/wHeatMapSaver.hpp... +Generating code for file include/openpose/filestream/wImageSaver.hpp... +Generating code for file include/openpose/filestream/wPeopleJsonSaver.hpp... +Generating code for file include/openpose/filestream/wPoseSaver.hpp... +Generating code for file include/openpose/filestream/wUdpSender.hpp... +Generating code for file include/openpose/filestream/wVideoSaver.hpp... +Generating code for file include/openpose/filestream/wVideoSaver3D.hpp... +Generating code for file include/openpose/flags.hpp... +Generating code for file include/openpose/gpu/cuda.hpp... +Generating code for file include/openpose/gpu/gpu.hpp... +Generating code for file include/openpose/gui/frameDisplayer.hpp... +Generating code for file include/openpose/gui/gui.hpp... +Generating code for file include/openpose/gui/gui3D.hpp... +Generating code for file include/openpose/gui/guiAdam.hpp... +Generating code for file include/openpose/gui/guiInfoAdder.hpp... +Generating code for file include/openpose/gui/wGui.hpp... +Generating code for file include/openpose/gui/wGui3D.hpp... +Generating code for file include/openpose/gui/wGuiAdam.hpp... +Generating code for file include/openpose/gui/wGuiInfoAdder.hpp... +Generating code for file include/openpose/hand/handCpuRenderer.hpp... +Generating code for file include/openpose/hand/handDetector.hpp... +Generating code for file include/openpose/hand/handDetectorFromTxt.hpp... +Generating code for file include/openpose/hand/handExtractorCaffe.hpp... +Generating code for file include/openpose/hand/handExtractorNet.hpp... +Generating code for file include/openpose/hand/handGpuRenderer.hpp... +Generating code for file include/openpose/hand/handParameters.hpp... +Generating code for file include/openpose/hand/handRenderer.hpp... +Generating code for file include/openpose/hand/renderHand.hpp... +Generating code for file include/openpose/hand/wHandDetector.hpp... +Generating code for file include/openpose/hand/wHandDetectorFromTxt.hpp... +Generating code for file include/openpose/hand/wHandDetectorTracking.hpp... +Generating code for file include/openpose/hand/wHandDetectorUpdate.hpp... +Generating code for file include/openpose/hand/wHandExtractorNet.hpp... +Generating code for file include/openpose/hand/wHandRenderer.hpp... +Generating code for file include/openpose/net/bodyPartConnectorBase.hpp... +Generating code for file include/openpose/net/bodyPartConnectorCaffe.hpp... +Generating code for file include/openpose/net/maximumBase.hpp... +Generating code for file include/openpose/net/maximumCaffe.hpp... +Generating code for file include/openpose/net/net.hpp... +Generating code for file include/openpose/net/netCaffe.hpp... +Generating code for file include/openpose/net/netOpenCv.hpp... +Generating code for file include/openpose/net/nmsBase.hpp... +Generating code for file include/openpose/net/nmsCaffe.hpp... +Generating code for file include/openpose/net/resizeAndMergeBase.hpp... +Generating code for file include/openpose/net/resizeAndMergeCaffe.hpp... +Generating code for file include/openpose/pose/poseCpuRenderer.hpp... +Generating code for file include/openpose/pose/poseExtractor.hpp... +Generating code for file include/openpose/pose/poseExtractorCaffe.hpp... +Generating code for file include/openpose/pose/poseExtractorNet.hpp... +Generating code for file include/openpose/pose/poseGpuRenderer.hpp... +Generating code for file include/openpose/pose/poseParameters.hpp... +Generating code for file include/openpose/pose/poseParametersRender.hpp... +Generating code for file include/openpose/pose/poseRenderer.hpp... +Generating code for file include/openpose/pose/renderPose.hpp... +Generating code for file include/openpose/pose/wPoseExtractor.hpp... +Generating code for file include/openpose/pose/wPoseExtractorNet.hpp... +Generating code for file include/openpose/pose/wPoseRenderer.hpp... +Generating code for file include/openpose/producer/datumProducer.hpp... +Generating code for file include/openpose/producer/flirReader.hpp... +Generating code for file include/openpose/producer/imageDirectoryReader.hpp... +Generating code for file include/openpose/producer/ipCameraReader.hpp... +Generating code for file include/openpose/producer/producer.hpp... +Generating code for file include/openpose/producer/spinnakerWrapper.hpp... +Generating code for file include/openpose/producer/videoCaptureReader.hpp... +Generating code for file include/openpose/producer/videoReader.hpp... +Generating code for file include/openpose/producer/wDatumProducer.hpp... +Generating code for file include/openpose/producer/webcamReader.hpp... +Generating code for file include/openpose/thread/priorityQueue.hpp... +Generating code for file include/openpose/thread/queue.hpp... +Generating code for file include/openpose/thread/queueBase.hpp... +Generating code for file include/openpose/thread/subThread.hpp... +Generating code for file include/openpose/thread/subThreadNoQueue.hpp... +Generating code for file include/openpose/thread/subThreadQueueIn.hpp... +Generating code for file include/openpose/thread/subThreadQueueInOut.hpp... +Generating code for file include/openpose/thread/subThreadQueueOut.hpp... +Generating code for file include/openpose/thread/thread.hpp... +Generating code for file include/openpose/thread/threadManager.hpp... +Generating code for file include/openpose/thread/wFpsMax.hpp... +Generating code for file include/openpose/thread/wIdGenerator.hpp... +Generating code for file include/openpose/thread/worker.hpp... +Generating code for file include/openpose/thread/workerConsumer.hpp... +Generating code for file include/openpose/thread/workerProducer.hpp... +Generating code for file include/openpose/thread/wQueueAssembler.hpp... +Generating code for file include/openpose/thread/wQueueOrderer.hpp... +Generating code for file include/openpose/tracking/personIdExtractor.hpp... +Generating code for file include/openpose/tracking/personTracker.hpp... +Generating code for file include/openpose/tracking/wPersonIdExtractor.hpp... +Generating code for file include/openpose/unity/unityBinding.hpp... +Generating code for file include/openpose/utilities/check.hpp... +Generating code for file include/openpose/utilities/errorAndLog.hpp... +Generating code for file include/openpose/utilities/fastMath.hpp... +Generating code for file include/openpose/utilities/fileSystem.hpp... +Generating code for file include/openpose/utilities/flagsToOpenPose.hpp... +Generating code for file include/openpose/utilities/keypoint.hpp... +Generating code for file include/openpose/utilities/openCv.hpp... +Generating code for file include/openpose/utilities/pointerContainer.hpp... +Generating code for file include/openpose/utilities/profiler.hpp... +Generating code for file include/openpose/utilities/standard.hpp... +Generating code for file include/openpose/wrapper/wrapper.hpp... +Generating code for file include/openpose/wrapper/wrapperAuxiliary.hpp... +Generating code for file include/openpose/wrapper/wrapperStructExtra.hpp... +Generating code for file include/openpose/wrapper/wrapperStructFace.hpp... +Generating code for file include/openpose/wrapper/wrapperStructGui.hpp... +Generating code for file include/openpose/wrapper/wrapperStructHand.hpp... +Generating code for file include/openpose/wrapper/wrapperStructInput.hpp... +Generating code for file include/openpose/wrapper/wrapperStructOutput.hpp... +Generating code for file include/openpose/wrapper/wrapperStructPose.hpp... +Generating file documentation... +Generating docs for file doc/00_index.md... +Generating docs for file doc/01_demo.md... +Generating docs for file doc/02_output.md... +Generating docs for file doc/03_python_api.md... +Generating docs for file doc/04_cpp_api.md... +Generating docs for file doc/05_faq.md... +Generating docs for file doc/06_maximizing_openpose_speed.md... +Generating docs for file doc/07_major_released_features.md... +Generating docs for file doc/08_release_notes.md... +Generating docs for file doc/09_authors_and_contributors.md... +Generating docs for file doc/10_community_projects.md... +Generating docs for file doc/advanced/3d_reconstruction_module.md... +Generating docs for file doc/advanced/calibration_module.md... +Generating docs for file doc/advanced/demo_advanced.md... +Generating docs for file doc/advanced/deployment.md... +Generating docs for file doc/advanced/heatmap_output.md... +Generating docs for file doc/advanced/standalone_face_or_hand_keypoint_detector.md... +Generating docs for file doc/installation/0_index.md... +Generating docs for file doc/very_advanced/library_structure/0_index.md... +Generating docs for file doc/installation/1_prerequisites.md... +Generating docs for file doc/installation/2_additional_settings.md... +Generating docs for file doc/installation/deprecated/installation_deprecated.md... +Generating docs for file doc/installation/jetson_tx/installation_jetson_tx1.md... +Generating docs for file doc/installation/jetson_tx/installation_jetson_tx2_jetpack3.1.md... +Generating docs for file doc/installation/jetson_tx/installation_jetson_tx2_jetpack3.3.md... +Generating docs for file doc/very_advanced/library_structure/1_library_deep_overview.md... +Generating docs for file doc/very_advanced/library_structure/2_library_extend_functionality.md... +Generating docs for file doc/very_advanced/library_structure/3_library_add_new_module.md... +Generating docs for file include/openpose/3d/cameraParameterReader.hpp... +Generating docs for file include/openpose/3d/headers.hpp... +Generating docs for file include/openpose/calibration/headers.hpp... +Generating docs for file include/openpose/core/headers.hpp... +Generating docs for file include/openpose/face/headers.hpp... +Generating docs for file include/openpose/filestream/headers.hpp... +Generating docs for file include/openpose/gpu/headers.hpp... +Generating docs for file include/openpose/gui/headers.hpp... +Generating docs for file include/openpose/hand/headers.hpp... +Generating docs for file include/openpose/headers.hpp... +Generating docs for file include/openpose/net/headers.hpp... +Generating docs for file include/openpose/pose/headers.hpp... +Generating docs for file include/openpose/producer/headers.hpp... +Generating docs for file include/openpose/thread/headers.hpp... +Generating docs for file include/openpose/tracking/headers.hpp... +Generating docs for file include/openpose/unity/headers.hpp... +Generating docs for file include/openpose/utilities/headers.hpp... +Generating docs for file include/openpose/wrapper/headers.hpp... +Generating docs for file include/openpose/3d/jointAngleEstimation.hpp... +Generating docs for file include/openpose/3d/poseTriangulation.hpp... +Generating docs for file include/openpose/3d/wJointAngleEstimation.hpp... +Generating docs for file include/openpose/3d/wPoseTriangulation.hpp... +Generating docs for file include/openpose/calibration/cameraParameterEstimation.hpp... +Generating docs for file include/openpose/core/array.hpp... +Generating docs for file include/openpose/core/arrayCpuGpu.hpp... +Generating docs for file include/openpose/core/common.hpp... +Generating docs for file include/openpose/core/cvMatToOpInput.hpp... +Generating docs for file include/openpose/core/cvMatToOpOutput.hpp... +Generating docs for file include/openpose/core/datum.hpp... +Generating docs for file include/openpose/core/enumClasses.hpp... +Generating docs for file include/openpose/filestream/enumClasses.hpp... +Generating docs for file include/openpose/gpu/enumClasses.hpp... +Generating docs for file include/openpose/gui/enumClasses.hpp... +Generating docs for file include/openpose/pose/enumClasses.hpp... +Generating docs for file include/openpose/producer/enumClasses.hpp... +Generating docs for file include/openpose/thread/enumClasses.hpp... +Generating docs for file include/openpose/utilities/enumClasses.hpp... +Generating docs for file include/openpose/wrapper/enumClasses.hpp... +Generating docs for file include/openpose/core/gpuRenderer.hpp... +Generating docs for file include/openpose/core/keepTopNPeople.hpp... +Generating docs for file include/openpose/core/keypointScaler.hpp... +Generating docs for file include/openpose/core/macros.hpp... +Generating docs for file include/openpose/core/matrix.hpp... +Generating docs for file include/openpose/core/opOutputToCvMat.hpp... +Generating docs for file include/openpose/core/point.hpp... +Generating docs for file include/openpose/core/rectangle.hpp... +Generating docs for file include/openpose/core/renderer.hpp... +Generating docs for file include/openpose/core/scaleAndSizeExtractor.hpp... +Generating docs for file include/openpose/core/string.hpp... +Generating docs for file include/openpose/utilities/string.hpp... +Generating docs for file include/openpose/core/verbosePrinter.hpp... +Generating docs for file include/openpose/core/wCvMatToOpInput.hpp... +Generating docs for file include/openpose/core/wCvMatToOpOutput.hpp... +Generating docs for file include/openpose/core/wKeepTopNPeople.hpp... +Generating docs for file include/openpose/core/wKeypointScaler.hpp... +Generating docs for file include/openpose/core/wOpOutputToCvMat.hpp... +Generating docs for file include/openpose/core/wScaleAndSizeExtractor.hpp... +Generating docs for file include/openpose/core/wVerbosePrinter.hpp... +Generating docs for file include/openpose/face/faceCpuRenderer.hpp... +Generating docs for file include/openpose/face/faceDetector.hpp... +Generating docs for file include/openpose/face/faceDetectorOpenCV.hpp... +Generating docs for file include/openpose/face/faceExtractorCaffe.hpp... +Generating docs for file include/openpose/face/faceExtractorNet.hpp... +Generating docs for file include/openpose/face/faceGpuRenderer.hpp... +Generating docs for file include/openpose/face/faceParameters.hpp... +Generating docs for file include/openpose/face/faceRenderer.hpp... +Generating docs for file include/openpose/face/renderFace.hpp... +Generating docs for file include/openpose/face/wFaceDetector.hpp... +Generating docs for file include/openpose/face/wFaceDetectorOpenCV.hpp... +Generating docs for file include/openpose/face/wFaceExtractorNet.hpp... +Generating docs for file include/openpose/face/wFaceRenderer.hpp... +Generating docs for file include/openpose/filestream/bvhSaver.hpp... +Generating docs for file include/openpose/filestream/cocoJsonSaver.hpp... +Generating docs for file include/openpose/filestream/fileSaver.hpp... +Generating docs for file include/openpose/filestream/fileStream.hpp... +Generating docs for file include/openpose/filestream/heatMapSaver.hpp... +Generating docs for file include/openpose/filestream/imageSaver.hpp... +Generating docs for file include/openpose/filestream/jsonOfstream.hpp... +Generating docs for file include/openpose/filestream/keypointSaver.hpp... +Generating docs for file include/openpose/filestream/peopleJsonSaver.hpp... +Generating docs for file include/openpose/filestream/udpSender.hpp... +Generating docs for file include/openpose/filestream/videoSaver.hpp... +Generating docs for file include/openpose/filestream/wBvhSaver.hpp... +Generating docs for file include/openpose/filestream/wCocoJsonSaver.hpp... +Generating docs for file include/openpose/filestream/wFaceSaver.hpp... +Generating docs for file include/openpose/filestream/wHandSaver.hpp... +Generating docs for file include/openpose/filestream/wHeatMapSaver.hpp... +Generating docs for file include/openpose/filestream/wImageSaver.hpp... +Generating docs for file include/openpose/filestream/wPeopleJsonSaver.hpp... +Generating docs for file include/openpose/filestream/wPoseSaver.hpp... +Generating docs for file include/openpose/filestream/wUdpSender.hpp... +Generating docs for file include/openpose/filestream/wVideoSaver.hpp... +Generating docs for file include/openpose/filestream/wVideoSaver3D.hpp... +Generating docs for file include/openpose/flags.hpp... +Generating docs for file include/openpose/gpu/cuda.hpp... +Generating docs for file include/openpose/gpu/gpu.hpp... +Generating docs for file include/openpose/gui/frameDisplayer.hpp... +Generating docs for file include/openpose/gui/gui.hpp... +Generating docs for file include/openpose/gui/gui3D.hpp... +Generating docs for file include/openpose/gui/guiAdam.hpp... +Generating docs for file include/openpose/gui/guiInfoAdder.hpp... +Generating docs for file include/openpose/gui/wGui.hpp... +Generating docs for file include/openpose/gui/wGui3D.hpp... +Generating docs for file include/openpose/gui/wGuiAdam.hpp... +Generating docs for file include/openpose/gui/wGuiInfoAdder.hpp... +Generating docs for file include/openpose/hand/handCpuRenderer.hpp... +Generating docs for file include/openpose/hand/handDetector.hpp... +Generating docs for file include/openpose/hand/handDetectorFromTxt.hpp... +Generating docs for file include/openpose/hand/handExtractorCaffe.hpp... +Generating docs for file include/openpose/hand/handExtractorNet.hpp... +Generating docs for file include/openpose/hand/handGpuRenderer.hpp... +Generating docs for file include/openpose/hand/handParameters.hpp... +Generating docs for file include/openpose/hand/handRenderer.hpp... +Generating docs for file include/openpose/hand/renderHand.hpp... +Generating docs for file include/openpose/hand/wHandDetector.hpp... +Generating docs for file include/openpose/hand/wHandDetectorFromTxt.hpp... +Generating docs for file include/openpose/hand/wHandDetectorTracking.hpp... +Generating docs for file include/openpose/hand/wHandDetectorUpdate.hpp... +Generating docs for file include/openpose/hand/wHandExtractorNet.hpp... +Generating docs for file include/openpose/hand/wHandRenderer.hpp... +Generating docs for file include/openpose/net/bodyPartConnectorBase.hpp... +Generating docs for file include/openpose/net/bodyPartConnectorCaffe.hpp... +Generating docs for file include/openpose/net/maximumBase.hpp... +Generating docs for file include/openpose/net/maximumCaffe.hpp... +Generating docs for file include/openpose/net/net.hpp... +Generating docs for file include/openpose/net/netCaffe.hpp... +Generating docs for file include/openpose/net/netOpenCv.hpp... +Generating docs for file include/openpose/net/nmsBase.hpp... +Generating docs for file include/openpose/net/nmsCaffe.hpp... +Generating docs for file include/openpose/net/resizeAndMergeBase.hpp... +Generating docs for file include/openpose/net/resizeAndMergeCaffe.hpp... +Generating docs for file include/openpose/pose/poseCpuRenderer.hpp... +Generating docs for file include/openpose/pose/poseExtractor.hpp... +Generating docs for file include/openpose/pose/poseExtractorCaffe.hpp... +Generating docs for file include/openpose/pose/poseExtractorNet.hpp... +Generating docs for file include/openpose/pose/poseGpuRenderer.hpp... +Generating docs for file include/openpose/pose/poseParameters.hpp... +Generating docs for file include/openpose/pose/poseParametersRender.hpp... +Generating docs for file include/openpose/pose/poseRenderer.hpp... +Generating docs for file include/openpose/pose/renderPose.hpp... +Generating docs for file include/openpose/pose/wPoseExtractor.hpp... +Generating docs for file include/openpose/pose/wPoseExtractorNet.hpp... +Generating docs for file include/openpose/pose/wPoseRenderer.hpp... +Generating docs for file include/openpose/producer/datumProducer.hpp... +Generating docs for file include/openpose/producer/flirReader.hpp... +Generating docs for file include/openpose/producer/imageDirectoryReader.hpp... +Generating docs for file include/openpose/producer/ipCameraReader.hpp... +Generating docs for file include/openpose/producer/producer.hpp... +Generating docs for file include/openpose/producer/spinnakerWrapper.hpp... +Generating docs for file include/openpose/producer/videoCaptureReader.hpp... +Generating docs for file include/openpose/producer/videoReader.hpp... +Generating docs for file include/openpose/producer/wDatumProducer.hpp... +Generating docs for file include/openpose/producer/webcamReader.hpp... +Generating docs for file include/openpose/thread/priorityQueue.hpp... +Generating docs for file include/openpose/thread/queue.hpp... +Generating docs for file include/openpose/thread/queueBase.hpp... +Generating docs for file include/openpose/thread/subThread.hpp... +Generating docs for file include/openpose/thread/subThreadNoQueue.hpp... +Generating docs for file include/openpose/thread/subThreadQueueIn.hpp... +Generating docs for file include/openpose/thread/subThreadQueueInOut.hpp... +Generating docs for file include/openpose/thread/subThreadQueueOut.hpp... +Generating docs for file include/openpose/thread/thread.hpp... +Ge/home/runner/work/openpose/openpose/doc/00_index.md:10: warning: unable to resolve reference to 'installation/05_faq.md' for \ref command +/home/runner/work/openpose/openpose/doc/01_demo.md:33: warning: explicit link request to 'ui' could not be resolved +/home/runner/work/openpose/openpose/doc/01_demo.md:43: warning: explicit link request to 'windows' could not be resolved +/home/runner/work/openpose/openpose/doc/02_output.md:260: warning: explicit link request to 'camera' could not be resolved +/home/runner/work/openpose/openpose/doc/03_python_api.md:27: warning: explicit link request to 'L194' could not be resolved +/home/runner/work/openpose/openpose/doc/03_python_api.md:64: warning: explicit link request to 'cmake' could not be resolved +/home/runner/work/openpose/openpose/doc/05_faq.md:45: warning: explicit link request to 'issuecomment' could not be resolved +/home/runner/work/openpose/openpose/doc/05_faq.md:70: warning: explicit link request to 'windows' could not be resolved +/home/runner/work/openpose/openpose/doc/05_faq.md:112: warning: explicit link request to 'maximum' could not be resolved +/home/runner/work/openpose/openpose/doc/05_faq.md:139: warning: explicit link request to 'issuecomment' could not be resolved +/home/runner/work/openpose/openpose/doc/05_faq.md:139: warning: explicit link request to 'custom' could not be resolved +/home/runner/work/openpose/openpose/doc/05_faq.md:224: warning: explicit link request to 'cpu' could not be resolved +/home/runner/work/openpose/openpose/doc/05_faq.md:229: warning: explicit link request to 'profiling' could not be resolved +/home/runner/work/openpose/openpose/doc/05_faq.md:236: warning: explicit link request to 'reinstallation' could not be resolved +/home/runner/work/openpose/openpose/doc/06_maximizing_openpose_speed.md:23: warning: explicit link request to 'faster' could not be resolved +/home/runner/work/openpose/openpose/doc/08_release_notes.md:379: warning: explicit link request to 'cameras' could not be resolved +/home/runner/work/openpose/openpose/doc/advanced/calibration_module.md:34: warning: explicit link request to 'calibration' could not be resolved +/home/runner/work/openpose/openpose/doc/advanced/calibration_module.md:183: warning: explicit link request to 'using' could not be resolved +/home/runner/work/openpose/openpose/doc/advanced/demo_advanced.md:28: warning: explicit link request to 'maximum' could not be resolved +/home/runner/work/openpose/openpose/doc/advanced/demo_advanced.md:68: warning: explicit link request to 'heatmaps' could not be resolved +/home/runner/work/openpose/openpose/doc/advanced/heatmap_output.md:39: warning: explicit link request to 'heat' could not be resolved +/home/runner/work/openpose/openpose/doc/advanced/heatmap_output.md:43: warning: explicit link request to 'body' could not be resolved +/home/runner/work/openpose/openpose/doc/installation/0_index.md:182: warning: Found unknown command '\mkdir' +/home/runner/work/openpose/openpose/doc/installation/0_index.md:186: warning: Found unknown command '\Release' +/home/runner/work/openpose/openpose/doc/installation/0_index.md:192: warning: Found unknown command '\Release' +/home/runner/work/openpose/openpose/doc/installation/1_prerequisites.md:38: warning: explicit link request to 'CMake' could not be resolved +/home/runner/work/openpose/openpose/doc/installation/2_additional_settings.md:71: warning: explicit link request to 'difference' could not be resolved +/home/runner/work/openpose/openpose/doc/installation/deprecated/installation_deprecated.md:20: warning: explicit link request to 'operating' could not be resolved +/home/runner/work/openpose/openpose/doc/installation/deprecated/installation_deprecated.md:25: warning: explicit link request to 'requirements' could not be resolved +/home/runner/work/openpose/openpose/doc/installation/deprecated/installation_deprecated.md:30: warning: explicit link request to 'clone' could not be resolved +/home/runner/work/openpose/openpose/doc/installation/deprecated/installation_deprecated.md:35: warning: explicit link request to 'update' could not be resolved +/home/runner/work/openpose/openpose/doc/installation/deprecated/installation_deprecated.md:178: warning: explicit link request to 'doxygen' could not be resolved +/home/runner/work/openpose/openpose/doc/installation/deprecated/installation_deprecated.md:200: warning: explicit link request to 'compiling' could not be resolved +/home/runner/work/openpose/openpose/doc/installation/jetson_tx/installation_jetson_tx1.md:4: warning: Found unknown command '@dreinsdo' +/home/runner/work/openpose/openpose/doc/installation/jetson_tx/installation_jetson_tx1.md:4: warning: Found unknown command '@dreinsdo' +/home/runner/work/openpose/openpose/doc/very_advanced/library_structure/0_index.md:6: warning: unexpected token TK_EOF as the argument of ref +/home/runner/work/openpose/openpose/doc/very_advanced/library_structure/1_library_deep_overview.md:233: warning: Unsupported xml/html tag found +nerating docs for file include/openpose/thread/threadManager.hpp... +Generating docs for file include/openpose/thread/wFpsMax.hpp... +Generating docs for file include/openpose/thread/wIdGenerator.hpp... +Generating docs for file include/openpose/thread/worker.hpp... +Generating docs for file include/openpose/thread/workerConsumer.hpp... +Generating docs for file include/openpose/thread/workerProducer.hpp... +Generating docs for file include/openpose/thread/wQueueAssembler.hpp... +Generating docs for file include/openpose/thread/wQueueOrderer.hpp... +Generating docs for file include/openpose/tracking/personIdExtractor.hpp... +Generating docs for file include/openpose/tracking/personTracker.hpp... +Generating docs for file include/openpose/tracking/wPersonIdExtractor.hpp... +Generating docs for file include/openpose/unity/unityBinding.hpp... +Generating docs for file include/openpose/utilities/check.hpp... +Generating docs for file include/openpose/utilities/errorAndLog.hpp... +Generating docs for file include/openpose/utilities/fastMath.hpp... +Generating docs for file include/openpose/utilities/fileSystem.hpp... +Generating docs for file include/openpose/utilities/flagsToOpenPose.hpp... +Generating docs for file include/openpose/utilities/keypoint.hpp... +Generating docs for file include/openpose/utilities/openCv.hpp... +Generating docs for file include/openpose/utilities/pointerContainer.hpp... +Generating docs for file include/openpose/utilities/profiler.hpp... +Generating docs for file include/openpose/utilities/standard.hpp... +Generating docs for file include/openpose/wrapper/wrapper.hpp... +Generating docs for file include/openpose/wrapper/wrapperAuxiliary.hpp... +Generating docs for file include/openpose/wrapper/wrapperStructExtra.hpp... +Generating docs for file include/openpose/wrapper/wrapperStructFace.hpp... +Generating docs for file include/openpose/wrapper/wrapperStructGui.hpp... +Generating docs for file include/openpose/wrapper/wrapperStructHand.hpp... +Generating docs for file include/openpose/wrapper/wrapperStructInput.hpp... +Generating docs for file include/openpose/wrapper/wrapperStructOutput.hpp... +Generating docs for file include/openpose/wrapper/wrapperStructPose.hpp... +Generating docs for file README.md... +Generating docs for file doc/installation/README.md... +Generating docs for file doc/README.md... +Generating page documentation... +Generating docs for page md_doc_00_index... +Generating docs for page md_doc_01_demo... +Generating docs for page md_doc_02_output... +Generating docs for page md_doc_03_python_api... +Generating docs for page md_doc_04_cpp_api... +Generating docs for page md_doc_05_faq... +Generating docs for page md_doc_06_maximizing_openpose_speed... +Generating docs for page md_doc_07_major_released_features... +Generating docs for page md_doc_08_release_notes... +Generating docs for page md_doc_09_authors_and_contributors... +Generating docs for page md_doc_10_community_projects... +Generating docs for page md_doc_advanced_3d_reconstruction_module... +Generating docs for page md_doc_advanced_calibration_module... +Generating docs for page md_doc_advanced_demo_advanced... +Generating docs for page md_doc_advanced_deployment... +Generating docs for page md_doc_advanced_heatmap_output... +Generating docs for page md_doc_advanced_standalone_face_or_hand_keypoint_detector... +Generating docs for page md_doc_installation_0_index... +Generating docs for page md_doc_installation_1_prerequisites... +Generating docs for page md_doc_installation_2_additional_settings... +Generating docs for page md_doc_installation_deprecated_installation_deprecated... +Generating docs for page md_doc_installation_jetson_tx_installation_jetson_tx1... +Generating docs for page md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1... +Generating docs for page md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3... +Generating docs for page md_doc_very_advanced_library_structure_0_index... +Generating docs for page md_doc_very_advanced_library_structure_1_library_deep_overview... +Generating docs for page md_doc_very_advanced_library_structu/home/runner/work/openpose/openpose/doc/very_advanced/library_structure/2_library_extend_functionality.md:8: warning: unexpected token TK_EOF as the argument of ref +/home/runner/work/openpose/openpose/doc/very_advanced/library_structure/2_library_extend_functionality.md:8: warning: unexpected token TK_EOF as the argument of ref +/home/runner/work/openpose/openpose/include/openpose/calibration/cameraParameterEstimation.hpp:9: warning: argument 'outputFilePath' of command @param is not found in the argument list of op::estimateAndSaveIntrinsics(const Point< int > &gridInnerCorners, const float gridSquareSizeMm, const int flags, const std::string &outputParameterFolder, const std::string &imageFolder, const std::string &serialNumber, const bool saveImagesWithCorners=false) +/home/runner/work/openpose/openpose/include/openpose/calibration/cameraParameterEstimation.hpp:16: warning: The following parameters of op::estimateAndSaveIntrinsics(const Point< int > &gridInnerCorners, const float gridSquareSizeMm, const int flags, const std::string &outputParameterFolder, const std::string &imageFolder, const std::string &serialNumber, const bool saveImagesWithCorners=false) are not documented: + parameter 'outputParameterFolder' + parameter 'imageFolder' + parameter 'serialNumber' + parameter 'saveImagesWithCorners' +/home/runner/work/openpose/openpose/include/openpose/utilities/openCv.hpp:19: warning: argument 'cvMat' of command @param is not found in the argument list of op::rotateAndFlipFrame(Matrix &frame, const double rotationAngle, const bool flipFrame=false) +/home/runner/work/openpose/openpose/include/openpose/utilities/openCv.hpp:24: warning: The following parameter of op::rotateAndFlipFrame(Matrix &frame, const double rotationAngle, const bool flipFrame=false) is not documented: + parameter 'frame' +/home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperAuxiliary.hpp:45: warning: The following parameter of op::threadIdPP(unsigned long long &threadId, const bool multiThreadEnabled) is not documented: + parameter 'multiThreadEnabled' +/home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperAuxiliary.hpp:29: warning: The following parameters of op::wrapperConfigureSanityChecks(WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const WrapperStructGui &wrapperStructGui, const bool renderOutput, const bool userInputAndPreprocessingWsEmpty, const bool userOutputWsEmpty, const std::shared_ptr< Producer > &producerSharedPtr, const ThreadManagerMode threadManagerMode) are not documented: + parameter 'wrapperStructGui' + parameter 'userInputAndPreprocessingWsEmpty' +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:148: warning: explicit link request to 'people' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:148: warning: explicit link request to 'face' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:141: warning: explicit link request to 'people' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:141: warning: explicit link request to 'face' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:184: warning: explicit link request to 'people' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:184: warning: explicit link request to 'face' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:134: warning: explicit link request to 'people' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:170: warning: explicit link request to 'people' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:170: warning: explicit link request to 'hand' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:163: warning: explicit link request to 'people' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:163: warning: explicit link request to 'hand' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:192: warning: explicit link request to 'people' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:192: warning: explicit link request to 'hand' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:155: warning: explicit link request to 'people' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:53: warning: explicit link request to 'scales' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:125: warning: explicit link request to 'body' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:127: warning: explicit link request to 'BP' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:110: warning: explicit link request to 'heatmaps' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:110: warning: explicit link request to 'body' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:110: warning: explicit link request to 'PAFs' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:116: warning: explicit link request to 'heatmaps' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:92: warning: explicit link request to 'people' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:83: warning: explicit link request to 'people' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:83: warning: explicit link request to 'body' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:177: warning: explicit link request to 'people' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:177: warning: explicit link request to 'body' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/datum.hpp:102: warning: explicit link request to 'people' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/core/matrix.hpp:121: warning: argument 'ucharPtr' of command @param is not found in the argument list of op::Matrix::Matrix(const int rows, const int cols, const int type, void *cvMatPtr) +/home/runner/work/openpose/openpose/include/openpose/core/matrix.hpp:127: warning: The following parameters of op::Matrix::Matrix(const int rows, const int cols, const int type, void *cvMatPtr) are not documented: + parameter 'rows' + parameter 'cols' + parameter 'type' + parameter 'cvMatPtr' +/home/runner/work/openpose/openpose/include/openpose/face/faceExtractorCaffe.hpp:21: warning: The following parameters of op::FaceExtractorCaffe::FaceExtractorCaffe(const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::string &modelFolder, const int gpuId, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool enableGoogleLogging=true) are not documented: + parameter 'modelFolder' + parameter 'gpuId' + parameter 'heatMapTypes' + parameter 'heatMapScaleMode' + parameter 'enableGoogleLogging' +/home/runner/work/openpose/openpose/include/openpose/face/faceExtractorCaffe.hpp:36: warning: argument 'cvInputData' of command @param is not found in the argument list of op::FaceExtractorCaffe::forwardPass(const std::vector< Rectangle< float >> &faceRectangles, const Matrix &inputData) +/home/runner/work/openpose/openpose/include/openpose/face/faceExtractorCaffe.hpp:43: warning: The following parameter of op::FaceExtractorCaffe::forwardPass(const std::vector< Rectangle< float >> &faceRectangles, const Matrix &inputData) is not documented: + parameter 'inputData' +/home/runner/work/openpose/openpose/include/openpose/face/faceExtractorNet.hpp:21: warning: The following parameters of op::FaceExtractorNet::FaceExtractorNet(const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect) are not documented: + parameter 'heatMapTypes' + parameter 'heatMapScaleMode' +/home/runner/work/openpose/openpose/include/openpose/face/faceExtractorNet.hpp:38: warning: argument 'cvInputData' of command @param is not found in the argument list of op::FaceExtractorNet::forwardPass(const std::vector< Rectangle< float >> &faceRectangles, const Matrix &inputData)=0 +/home/runner/work/openpose/openpose/include/openpose/face/faceExtractorNet.hpp:45: warning: The following parameter of op::FaceExtractorNet::forwardPass(const std::vector< Rectangle< float >> &faceRectangles, const Matrix &inputData)=0 is not documented: + parameter 'inputData' +/home/runner/work/openpose/openpose/include/openpose/filestream/cocoJsonSaver.hpp:24: warning: The following parameters of op::CocoJsonSaver::CocoJsonSaver(const std::string &filePathToSave, const PoseModel poseModel, const bool humanReadable=true, const int cocoJsonVariants=1, const CocoJsonFormat cocoJsonFormat=CocoJsonFormat::Body, const int cocoJsonVariant=0) are not documented: + parameter 'poseModel' + parameter 'humanReadable' + parameter 'cocoJsonVariants' + parameter 'cocoJsonFormat' + parameter 'cocoJsonVariant' +/home/runner/work/openpose/openpose/include/openpose/filestream/jsonOfstream.hpp:15: warning: argument 'array' of command @param is not found in the argument list of op::JsonOfstream::JsonOfstream(JsonOfstream &&jsonOfstream) +/home/runner/work/openpose/openpose/include/openpose/filestream/jsonOfstream.hpp:19: warning: The following parameter of op::JsonOfstream::JsonOfstream(JsonOfstream &&jsonOfstream) is not documented: + parameter 'jsonOfstream' +/home/runner/work/openpose/openpose/include/openpose/filestream/jsonOfstream.hpp:22: warning: argument 'array' of command @param is not found in the argument list of op::JsonOfstream::operator=(JsonOfstream &&jsonOfstream) +/home/runner/work/openpose/openpose/include/openpose/filestream/jsonOfstream.hpp:27: warning: The following parameter of op::JsonOfstream::operator=(JsonOfstream &&jsonOfstream) is not documented: + parameter 'jsonOfstream' +/home/runner/work/openpose/openpose/include/openpose/hand/handExtractorCaffe.hpp:26: warning: The following parameters of op::HandExtractorCaffe::HandExtractorCaffe(const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::string &modelFolder, const int gpuId, const int numberScales=1, const float rangeScales=0.4f, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool enableGoogleLogging=true) are not documented: + parameter 'heatMapTypes' + parameter 'heatMapScaleMode' + parameter 'enableGoogleLogging' +/home/runner/work/openpose/openpose/include/openpose/hand/handExtractorNet.hpp:24: warning: The following parameters of op::HandExtractorNet::HandExtractorNet(const Point< int > &netInputSize, const Point< int > &netOutputSize, const int numberScales=1, const float rangeScales=0.4f, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect) are not documented: + parameter 'heatMapTypes' + parameter 'heatMapScaleMode' +re_2_library_extend_functionality... +Generating docs for page md_doc_very_advanced_library_structure_3_library_add_new_module... +Generating group documentation... +Generating class documentation... +Generating namespace index... +Generating docs for namespace op +Generating docs for compound op::CameraParameterReader... +Generating docs for compound op::PoseTriangulation... +Generating docs for compound op::WPoseTriangulation... +Generating docs for compound op::Array... +Generating docs for compound op::ArrayCpuGpu... +Generating docs for compound op::CvMatToOpInput... +Generating docs for compound op::CvMatToOpOutput... +Generating docs for compound op::Datum... +Generating docs for compound op::GpuRenderer... +Generating docs for compound op::KeepTopNPeople... +Generating docs for compound op::KeypointScaler... +Generating docs for compound op::Matrix... +Generating docs for compound op::OpOutputToCvMat... +Generating docs for compound op::Point... +Generating docs for compound op::Rectangle... +Generating docs for compound op::Renderer... +Generating docs for compound op::ScaleAndSizeExtractor... +Generating docs for compound op::String... +Generating docs for compound op::VerbosePrinter... +Generating docs for compound op::WCvMatToOpInput... +Generating docs for compound op::WCvMatToOpOutput... +Generating docs for compound op::WKeepTopNPeople... +Generating docs for compound op::WKeypointScaler... +Generating docs for compound op::WOpOutputToCvMat... +Generating docs for compound op::WScaleAndSizeExtractor... +Generating docs for compound op::WVerbosePrinter... +Generating docs for compound op::FaceCpuRenderer... +Generating docs for compound op::FaceDetector... +Generating docs for compound op::FaceDetectorOpenCV... +Generating docs for compound op::FaceExtractorCaffe... +Generating docs for compound op::FaceExtractorNet... +Generating docs for compound op::FaceGpuRenderer... +Generating docs for compound op::FaceRenderer... +Generating docs for compound op::WFaceDetector... +Generating docs for compound op::WFaceDetectorOpenCV... +Generating docs for compound op::WFaceExtractorNet... +Generating docs for compound op::WFaceRenderer... +Generating docs for compound op::CocoJsonSaver... +Generating docs for compound op::FileSaver... +Generating docs for compound op::HeatMapSaver... +Generating docs for compound op::ImageSaver... +Generating docs for compound op::JsonOfstream... +Generating docs for compound op::KeypointSaver... +Generating docs for compound op::PeopleJsonSaver... +Generating docs for compound op::UdpSender... +Generating docs for compound op::VideoSaver... +Generating docs for compound op::WCocoJsonSaver... +Generating docs for compound op::WFaceSaver... +Generating docs for compound op::WHandSaver... +Generating docs for compound op::WHeatMapSaver... +Generating docs for compound op::WImageSaver... +Generating docs for compound op::WPeopleJsonSaver... +Generating docs for compound op::WPoseSaver... +Generating docs for compound op::WUdpSender... +Generating docs for compound op::WVideoSaver... +Generating docs for compound op::WVideoSaver3D... +Generating docs for compound op::FrameDisplayer... +Generating docs for compound op::Gui... +Generating docs for compound op::Gui3D... +Generating docs for compound op::GuiInfoAdder... +Generating docs for compound op::WGui... +Generating docs for compound op::WGui3D... +Generating docs for compound op::WGuiInfoAdder... +Generating docs for compound op::HandCpuRenderer... +Generating docs for compound op::HandDetector... +Generating docs for compound op::HandDetectorFromTxt... +Generating docs for compound op::HandExtractorCaffe... +Generating docs for compound op::HandExtractorNet... +Generating docs for compound op::HandGpuRenderer... +Generating docs for compound op::HandRenderer... +Generating docs for compound op::WHandDetector... +Generating docs for compound op::WHandDetectorFromTxt... +Generating docs for compound op::WHandDetectorTracking... +Generating docs for compound op::WHandDetectorUpdate... +Generating docs for compound op::WHandExtractorNet... +Generating docs for compound op::WHandRenderer... +Generating docs for compo/home/runner/work/openpose/openpose/include/openpose/pose/poseExtractorCaffe.hpp:39: warning: The following parameters of op::PoseExtractorCaffe::forwardPass(const std::vector< Array< float >> &inputNetData, const Point< int > &inputDataSize, const std::vector< double > &scaleInputToNetInputs={1.f}, const Array< float > &poseNetOutput=Array< float >{}) are not documented: + parameter 'inputNetData' + parameter 'inputDataSize' + parameter 'scaleInputToNetInputs' +/home/runner/work/openpose/openpose/include/openpose/producer/imageDirectoryReader.hpp:26: warning: The following parameter of op::ImageDirectoryReader::ImageDirectoryReader(const std::string &imageDirectoryPath, const std::string &cameraParameterPath="", const bool undistortImage=false, const int numberViews=-1) is not documented: + parameter 'undistortImage' +/home/runner/work/openpose/openpose/include/openpose/producer/ipCameraReader.hpp:19: warning: The following parameters of op::IpCameraReader::IpCameraReader(const std::string &cameraPath, const std::string &cameraParameterPath="", const bool undistortImage=false) are not documented: + parameter 'cameraParameterPath' + parameter 'undistortImage' +/home/runner/work/openpose/openpose/include/openpose/producer/videoCaptureReader.hpp:20: warning: The following parameters of op::VideoCaptureReader::VideoCaptureReader(const int index, const bool throwExceptionIfNoOpened, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews) are not documented: + parameter 'throwExceptionIfNoOpened' + parameter 'cameraParameterPath' + parameter 'undistortImage' + parameter 'numberViews' +/home/runner/work/openpose/openpose/include/openpose/producer/videoCaptureReader.hpp:29: warning: The following parameters of op::VideoCaptureReader::VideoCaptureReader(const std::string &path, const ProducerType producerType, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews) are not documented: + parameter 'cameraParameterPath' + parameter 'undistortImage' + parameter 'numberViews' +/home/runner/work/openpose/openpose/include/openpose/producer/videoReader.hpp:25: warning: The following parameter of op::VideoReader::VideoReader(const std::string &videoPath, const std::string &cameraParameterPath="", const bool undistortImage=false, const int numberViews=-1) is not documented: + parameter 'undistortImage' +/home/runner/work/openpose/openpose/include/openpose/producer/webcamReader.hpp:27: warning: The following parameters of op::WebcamReader::WebcamReader(const int webcamIndex=0, const Point< int > &webcamResolution=Point< int >{}, const bool throwExceptionIfNoOpened=true, const std::string &cameraParameterPath="", const bool undistortImage=false) are not documented: + parameter 'cameraParameterPath' + parameter 'undistortImage' +/home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructExtra.hpp:25: warning: explicit link request to 'cameras' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructPose.hpp:120: warning: explicit link request to 'body' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructPose.hpp:121: warning: explicit link request to 'body' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructPose.hpp:121: warning: explicit link request to 'body' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructPose.hpp:122: warning: explicit link request to 'body' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructPose.hpp:122: warning: explicit link request to 'body' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructPose.hpp:122: warning: explicit link request to 'body' could not be resolved +/home/runner/work/openpose/openpose/include/openpose/wrapper/wrapperStructPose.hpp:122: warning: explicit link request to 'pair' could not be resolved +und op::BodyPartConnectorCaffe... +Generating docs for compound op::MaximumCaffe... +Generating docs for compound op::Net... +Generating docs for compound op::NetCaffe... +Generating docs for compound op::NetOpenCv... +Generating docs for compound op::NmsCaffe... +Generating docs for compound op::ResizeAndMergeCaffe... +Generating docs for compound op::PoseCpuRenderer... +Generating docs for compound op::PoseExtractor... +Generating docs for compound op::PoseExtractorCaffe... +Generating docs for compound op::PoseExtractorNet... +Generating docs for compound op::PoseGpuRenderer... +Generating docs for compound op::PoseRenderer... +Generating docs for compound op::WPoseExtractor... +Generating docs for compound op::WPoseExtractorNet... +Generating docs for compound op::WPoseRenderer... +Generating docs for compound op::DatumProducer... +Generating docs for compound op::FlirReader... +Generating docs for compound op::ImageDirectoryReader... +Generating docs for compound op::IpCameraReader... +Generating docs for compound op::Producer... +Generating docs for compound op::SpinnakerWrapper... +Generating docs for compound op::VideoCaptureReader... +Generating docs for compound op::VideoReader... +Generating docs for compound op::WDatumProducer... +Generating docs for compound op::WebcamReader... +Generating docs for compound op::PriorityQueue... +Generating docs for compound op::Queue... +Generating docs for compound op::QueueBase... +Generating docs for compound op::SubThread... +Generating docs for compound op::SubThreadNoQueue... +Generating docs for compound op::SubThreadQueueIn... +Generating docs for compound op::SubThreadQueueInOut... +Generating docs for compound op::SubThreadQueueOut... +Generating docs for compound op::Thread... +Generating docs for compound op::ThreadManager... +Generating docs for compound op::WFpsMax... +Generating docs for compound op::WIdGenerator... +Generating docs for compound op::Worker... +Generating docs for compound op::WorkerConsumer... +Generating docs for compound op::WorkerProducer... +Generating docs for compound op::WQueueAssembler... +Generating docs for compound op::WQueueOrderer... +Generating docs for compound op::PersonIdExtractor... +Generating docs for compound op::PersonTracker... +Generating docs for compound op::WPersonIdExtractor... +Generating docs for compound op::PointerContainerGreater... +Generating docs for compound op::PointerContainerLess... +Generating docs for compound op::Profiler... +Generating docs for compound op::WrapperT... +Generating docs for compound op::WrapperStructExtra... +Generating docs for compound op::WrapperStructFace... +Generating docs for compound op::WrapperStructGui... +Generating docs for compound op::WrapperStructHand... +Generating docs for compound op::WrapperStructInput... +Generating docs for compound op::WrapperStructOutput... +Generating docs for compound op::WrapperStructPose... +Generating docs for namespace op::ConfigureError +Generating docs for namespace op::ConfigureLog +Generating graph info page... +Generating directory documentation... +Generating index page... +Generating page index... +Generating module index... +Generating namespace index... +Generating namespace member index... +Generating annotated compound index... +Generating alphabetical compound index... +Generating hierarchical class index... +Generating member index... +Generating file index... +Generating file member index... +Generating example index... +finalizing index lists... +writing tag file... +Running plantuml with JAVA... +lookup cache used 3559/65536 hits=16885 misses=3882 +finished... diff --git a/index.html b/index.html new file mode 100644 index 000000000..34d72f1ba --- /dev/null +++ b/index.html @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/web/.github/media/body_heat_maps.png b/web/.github/media/body_heat_maps.png new file mode 100644 index 000000000..024ea2802 Binary files /dev/null and b/web/.github/media/body_heat_maps.png differ diff --git a/web/.github/media/dance_foot.gif b/web/.github/media/dance_foot.gif new file mode 100644 index 000000000..352da5e01 Binary files /dev/null and b/web/.github/media/dance_foot.gif differ diff --git a/web/.github/media/installation/cmake_im_1.png b/web/.github/media/installation/cmake_im_1.png new file mode 100644 index 000000000..66fff4520 Binary files /dev/null and b/web/.github/media/installation/cmake_im_1.png differ diff --git a/web/.github/media/installation/cmake_im_1_windows.png b/web/.github/media/installation/cmake_im_1_windows.png new file mode 100644 index 000000000..af4d864b8 Binary files /dev/null and b/web/.github/media/installation/cmake_im_1_windows.png differ diff --git a/web/.github/media/installation/cmake_im_2.png b/web/.github/media/installation/cmake_im_2.png new file mode 100644 index 000000000..0b5135607 Binary files /dev/null and b/web/.github/media/installation/cmake_im_2.png differ diff --git a/web/.github/media/installation/cmake_im_2_windows.png b/web/.github/media/installation/cmake_im_2_windows.png new file mode 100644 index 000000000..82f79633d Binary files /dev/null and b/web/.github/media/installation/cmake_im_2_windows.png differ diff --git a/web/.github/media/installation/cmake_im_2_windows_new.png b/web/.github/media/installation/cmake_im_2_windows_new.png new file mode 100644 index 000000000..018058aba Binary files /dev/null and b/web/.github/media/installation/cmake_im_2_windows_new.png differ diff --git a/web/.github/media/installation/cmake_im_3.png b/web/.github/media/installation/cmake_im_3.png new file mode 100644 index 000000000..447443daa Binary files /dev/null and b/web/.github/media/installation/cmake_im_3.png differ diff --git a/web/.github/media/installation/cmake_im_3_windows.png b/web/.github/media/installation/cmake_im_3_windows.png new file mode 100644 index 000000000..90c89cdd0 Binary files /dev/null and b/web/.github/media/installation/cmake_im_3_windows.png differ diff --git a/web/.github/media/installation/cmake_im_5.png b/web/.github/media/installation/cmake_im_5.png new file mode 100644 index 000000000..959f19d6e Binary files /dev/null and b/web/.github/media/installation/cmake_im_5.png differ diff --git a/web/.github/media/keypoints_face.png b/web/.github/media/keypoints_face.png new file mode 100644 index 000000000..4af4f179d Binary files /dev/null and b/web/.github/media/keypoints_face.png differ diff --git a/web/.github/media/keypoints_hand.odt b/web/.github/media/keypoints_hand.odt new file mode 100644 index 000000000..362910e41 Binary files /dev/null and b/web/.github/media/keypoints_hand.odt differ diff --git a/web/.github/media/keypoints_hand.png b/web/.github/media/keypoints_hand.png new file mode 100644 index 000000000..3fb38a8dd Binary files /dev/null and b/web/.github/media/keypoints_hand.png differ diff --git a/web/.github/media/keypoints_pose_18.png b/web/.github/media/keypoints_pose_18.png new file mode 100644 index 000000000..10dd473c4 Binary files /dev/null and b/web/.github/media/keypoints_pose_18.png differ diff --git a/web/.github/media/keypoints_pose_25.png b/web/.github/media/keypoints_pose_25.png new file mode 100644 index 000000000..7f0c24524 Binary files /dev/null and b/web/.github/media/keypoints_pose_25.png differ diff --git a/web/.github/media/openpose3d.gif b/web/.github/media/openpose3d.gif new file mode 100644 index 000000000..fd860f5ba Binary files /dev/null and b/web/.github/media/openpose3d.gif differ diff --git a/web/.github/media/openpose_vs_competition.png b/web/.github/media/openpose_vs_competition.png new file mode 100644 index 000000000..e0c4f9374 Binary files /dev/null and b/web/.github/media/openpose_vs_competition.png differ diff --git a/web/.github/media/paf_heat_maps.png b/web/.github/media/paf_heat_maps.png new file mode 100644 index 000000000..ca2ebc452 Binary files /dev/null and b/web/.github/media/paf_heat_maps.png differ diff --git a/web/.github/media/pose_face.gif b/web/.github/media/pose_face.gif new file mode 100644 index 000000000..74aab23b9 Binary files /dev/null and b/web/.github/media/pose_face.gif differ diff --git a/web/.github/media/pose_face_hands.gif b/web/.github/media/pose_face_hands.gif new file mode 100644 index 000000000..6e43b4357 Binary files /dev/null and b/web/.github/media/pose_face_hands.gif differ diff --git a/web/.github/media/pose_hands.gif b/web/.github/media/pose_hands.gif new file mode 100644 index 000000000..695430046 Binary files /dev/null and b/web/.github/media/pose_hands.gif differ diff --git a/web/.github/media/shake.gif b/web/.github/media/shake.gif new file mode 100644 index 000000000..eb2f9cbf3 Binary files /dev/null and b/web/.github/media/shake.gif differ diff --git a/web/.github/media/unity_body_foot.png b/web/.github/media/unity_body_foot.png new file mode 100644 index 000000000..c5c0eee09 Binary files /dev/null and b/web/.github/media/unity_body_foot.png differ diff --git a/web/.github/media/unity_hand_face.png b/web/.github/media/unity_hand_face.png new file mode 100644 index 000000000..210e71c66 Binary files /dev/null and b/web/.github/media/unity_hand_face.png differ diff --git a/web/.github/media/unity_main.png b/web/.github/media/unity_main.png new file mode 100644 index 000000000..3ce9e3215 Binary files /dev/null and b/web/.github/media/unity_main.png differ diff --git a/web/html/.github/media/body_heat_maps.png b/web/html/.github/media/body_heat_maps.png new file mode 100644 index 000000000..024ea2802 Binary files /dev/null and b/web/html/.github/media/body_heat_maps.png differ diff --git a/web/html/.github/media/dance_foot.gif b/web/html/.github/media/dance_foot.gif new file mode 100644 index 000000000..352da5e01 Binary files /dev/null and b/web/html/.github/media/dance_foot.gif differ diff --git a/web/html/.github/media/installation/cmake_im_1.png b/web/html/.github/media/installation/cmake_im_1.png new file mode 100644 index 000000000..66fff4520 Binary files /dev/null and b/web/html/.github/media/installation/cmake_im_1.png differ diff --git a/web/html/.github/media/installation/cmake_im_1_windows.png b/web/html/.github/media/installation/cmake_im_1_windows.png new file mode 100644 index 000000000..af4d864b8 Binary files /dev/null and b/web/html/.github/media/installation/cmake_im_1_windows.png differ diff --git a/web/html/.github/media/installation/cmake_im_2.png b/web/html/.github/media/installation/cmake_im_2.png new file mode 100644 index 000000000..0b5135607 Binary files /dev/null and b/web/html/.github/media/installation/cmake_im_2.png differ diff --git a/web/html/.github/media/installation/cmake_im_2_windows.png b/web/html/.github/media/installation/cmake_im_2_windows.png new file mode 100644 index 000000000..82f79633d Binary files /dev/null and b/web/html/.github/media/installation/cmake_im_2_windows.png differ diff --git a/web/html/.github/media/installation/cmake_im_2_windows_new.png b/web/html/.github/media/installation/cmake_im_2_windows_new.png new file mode 100644 index 000000000..018058aba Binary files /dev/null and b/web/html/.github/media/installation/cmake_im_2_windows_new.png differ diff --git a/web/html/.github/media/installation/cmake_im_3.png b/web/html/.github/media/installation/cmake_im_3.png new file mode 100644 index 000000000..447443daa Binary files /dev/null and b/web/html/.github/media/installation/cmake_im_3.png differ diff --git a/web/html/.github/media/installation/cmake_im_3_windows.png b/web/html/.github/media/installation/cmake_im_3_windows.png new file mode 100644 index 000000000..90c89cdd0 Binary files /dev/null and b/web/html/.github/media/installation/cmake_im_3_windows.png differ diff --git a/web/html/.github/media/installation/cmake_im_5.png b/web/html/.github/media/installation/cmake_im_5.png new file mode 100644 index 000000000..959f19d6e Binary files /dev/null and b/web/html/.github/media/installation/cmake_im_5.png differ diff --git a/web/html/.github/media/keypoints_face.png b/web/html/.github/media/keypoints_face.png new file mode 100644 index 000000000..4af4f179d Binary files /dev/null and b/web/html/.github/media/keypoints_face.png differ diff --git a/web/html/.github/media/keypoints_hand.odt b/web/html/.github/media/keypoints_hand.odt new file mode 100644 index 000000000..362910e41 Binary files /dev/null and b/web/html/.github/media/keypoints_hand.odt differ diff --git a/web/html/.github/media/keypoints_hand.png b/web/html/.github/media/keypoints_hand.png new file mode 100644 index 000000000..3fb38a8dd Binary files /dev/null and b/web/html/.github/media/keypoints_hand.png differ diff --git a/web/html/.github/media/keypoints_pose_18.png b/web/html/.github/media/keypoints_pose_18.png new file mode 100644 index 000000000..10dd473c4 Binary files /dev/null and b/web/html/.github/media/keypoints_pose_18.png differ diff --git a/web/html/.github/media/keypoints_pose_25.png b/web/html/.github/media/keypoints_pose_25.png new file mode 100644 index 000000000..7f0c24524 Binary files /dev/null and b/web/html/.github/media/keypoints_pose_25.png differ diff --git a/web/html/.github/media/openpose3d.gif b/web/html/.github/media/openpose3d.gif new file mode 100644 index 000000000..fd860f5ba Binary files /dev/null and b/web/html/.github/media/openpose3d.gif differ diff --git a/web/html/.github/media/openpose_vs_competition.png b/web/html/.github/media/openpose_vs_competition.png new file mode 100644 index 000000000..e0c4f9374 Binary files /dev/null and b/web/html/.github/media/openpose_vs_competition.png differ diff --git a/web/html/.github/media/paf_heat_maps.png b/web/html/.github/media/paf_heat_maps.png new file mode 100644 index 000000000..ca2ebc452 Binary files /dev/null and b/web/html/.github/media/paf_heat_maps.png differ diff --git a/web/html/.github/media/pose_face.gif b/web/html/.github/media/pose_face.gif new file mode 100644 index 000000000..74aab23b9 Binary files /dev/null and b/web/html/.github/media/pose_face.gif differ diff --git a/web/html/.github/media/pose_face_hands.gif b/web/html/.github/media/pose_face_hands.gif new file mode 100644 index 000000000..6e43b4357 Binary files /dev/null and b/web/html/.github/media/pose_face_hands.gif differ diff --git a/web/html/.github/media/pose_hands.gif b/web/html/.github/media/pose_hands.gif new file mode 100644 index 000000000..695430046 Binary files /dev/null and b/web/html/.github/media/pose_hands.gif differ diff --git a/web/html/.github/media/shake.gif b/web/html/.github/media/shake.gif new file mode 100644 index 000000000..eb2f9cbf3 Binary files /dev/null and b/web/html/.github/media/shake.gif differ diff --git a/web/html/.github/media/unity_body_foot.png b/web/html/.github/media/unity_body_foot.png new file mode 100644 index 000000000..c5c0eee09 Binary files /dev/null and b/web/html/.github/media/unity_body_foot.png differ diff --git a/web/html/.github/media/unity_hand_face.png b/web/html/.github/media/unity_hand_face.png new file mode 100644 index 000000000..210e71c66 Binary files /dev/null and b/web/html/.github/media/unity_hand_face.png differ diff --git a/web/html/.github/media/unity_main.png b/web/html/.github/media/unity_main.png new file mode 100644 index 000000000..3ce9e3215 Binary files /dev/null and b/web/html/.github/media/unity_main.png differ diff --git a/web/html/doc/.github/media/body_heat_maps.png b/web/html/doc/.github/media/body_heat_maps.png new file mode 100644 index 000000000..024ea2802 Binary files /dev/null and b/web/html/doc/.github/media/body_heat_maps.png differ diff --git a/web/html/doc/.github/media/dance_foot.gif b/web/html/doc/.github/media/dance_foot.gif new file mode 100644 index 000000000..352da5e01 Binary files /dev/null and b/web/html/doc/.github/media/dance_foot.gif differ diff --git a/web/html/doc/.github/media/installation/cmake_im_1.png b/web/html/doc/.github/media/installation/cmake_im_1.png new file mode 100644 index 000000000..66fff4520 Binary files /dev/null and b/web/html/doc/.github/media/installation/cmake_im_1.png differ diff --git a/web/html/doc/.github/media/installation/cmake_im_1_windows.png b/web/html/doc/.github/media/installation/cmake_im_1_windows.png new file mode 100644 index 000000000..af4d864b8 Binary files /dev/null and b/web/html/doc/.github/media/installation/cmake_im_1_windows.png differ diff --git a/web/html/doc/.github/media/installation/cmake_im_2.png b/web/html/doc/.github/media/installation/cmake_im_2.png new file mode 100644 index 000000000..0b5135607 Binary files /dev/null and b/web/html/doc/.github/media/installation/cmake_im_2.png differ diff --git a/web/html/doc/.github/media/installation/cmake_im_2_windows.png b/web/html/doc/.github/media/installation/cmake_im_2_windows.png new file mode 100644 index 000000000..82f79633d Binary files /dev/null and b/web/html/doc/.github/media/installation/cmake_im_2_windows.png differ diff --git a/web/html/doc/.github/media/installation/cmake_im_2_windows_new.png b/web/html/doc/.github/media/installation/cmake_im_2_windows_new.png new file mode 100644 index 000000000..018058aba Binary files /dev/null and b/web/html/doc/.github/media/installation/cmake_im_2_windows_new.png differ diff --git a/web/html/doc/.github/media/installation/cmake_im_3.png b/web/html/doc/.github/media/installation/cmake_im_3.png new file mode 100644 index 000000000..447443daa Binary files /dev/null and b/web/html/doc/.github/media/installation/cmake_im_3.png differ diff --git a/web/html/doc/.github/media/installation/cmake_im_3_windows.png b/web/html/doc/.github/media/installation/cmake_im_3_windows.png new file mode 100644 index 000000000..90c89cdd0 Binary files /dev/null and b/web/html/doc/.github/media/installation/cmake_im_3_windows.png differ diff --git a/web/html/doc/.github/media/installation/cmake_im_5.png b/web/html/doc/.github/media/installation/cmake_im_5.png new file mode 100644 index 000000000..959f19d6e Binary files /dev/null and b/web/html/doc/.github/media/installation/cmake_im_5.png differ diff --git a/web/html/doc/.github/media/keypoints_face.png b/web/html/doc/.github/media/keypoints_face.png new file mode 100644 index 000000000..4af4f179d Binary files /dev/null and b/web/html/doc/.github/media/keypoints_face.png differ diff --git a/web/html/doc/.github/media/keypoints_hand.odt b/web/html/doc/.github/media/keypoints_hand.odt new file mode 100644 index 000000000..362910e41 Binary files /dev/null and b/web/html/doc/.github/media/keypoints_hand.odt differ diff --git a/web/html/doc/.github/media/keypoints_hand.png b/web/html/doc/.github/media/keypoints_hand.png new file mode 100644 index 000000000..3fb38a8dd Binary files /dev/null and b/web/html/doc/.github/media/keypoints_hand.png differ diff --git a/web/html/doc/.github/media/keypoints_pose_18.png b/web/html/doc/.github/media/keypoints_pose_18.png new file mode 100644 index 000000000..10dd473c4 Binary files /dev/null and b/web/html/doc/.github/media/keypoints_pose_18.png differ diff --git a/web/html/doc/.github/media/keypoints_pose_25.png b/web/html/doc/.github/media/keypoints_pose_25.png new file mode 100644 index 000000000..7f0c24524 Binary files /dev/null and b/web/html/doc/.github/media/keypoints_pose_25.png differ diff --git a/web/html/doc/.github/media/openpose3d.gif b/web/html/doc/.github/media/openpose3d.gif new file mode 100644 index 000000000..fd860f5ba Binary files /dev/null and b/web/html/doc/.github/media/openpose3d.gif differ diff --git a/web/html/doc/.github/media/openpose_vs_competition.png b/web/html/doc/.github/media/openpose_vs_competition.png new file mode 100644 index 000000000..e0c4f9374 Binary files /dev/null and b/web/html/doc/.github/media/openpose_vs_competition.png differ diff --git a/web/html/doc/.github/media/paf_heat_maps.png b/web/html/doc/.github/media/paf_heat_maps.png new file mode 100644 index 000000000..ca2ebc452 Binary files /dev/null and b/web/html/doc/.github/media/paf_heat_maps.png differ diff --git a/web/html/doc/.github/media/pose_face.gif b/web/html/doc/.github/media/pose_face.gif new file mode 100644 index 000000000..74aab23b9 Binary files /dev/null and b/web/html/doc/.github/media/pose_face.gif differ diff --git a/web/html/doc/.github/media/pose_face_hands.gif b/web/html/doc/.github/media/pose_face_hands.gif new file mode 100644 index 000000000..6e43b4357 Binary files /dev/null and b/web/html/doc/.github/media/pose_face_hands.gif differ diff --git a/web/html/doc/.github/media/pose_hands.gif b/web/html/doc/.github/media/pose_hands.gif new file mode 100644 index 000000000..695430046 Binary files /dev/null and b/web/html/doc/.github/media/pose_hands.gif differ diff --git a/web/html/doc/.github/media/shake.gif b/web/html/doc/.github/media/shake.gif new file mode 100644 index 000000000..eb2f9cbf3 Binary files /dev/null and b/web/html/doc/.github/media/shake.gif differ diff --git a/web/html/doc/.github/media/unity_body_foot.png b/web/html/doc/.github/media/unity_body_foot.png new file mode 100644 index 000000000..c5c0eee09 Binary files /dev/null and b/web/html/doc/.github/media/unity_body_foot.png differ diff --git a/web/html/doc/.github/media/unity_hand_face.png b/web/html/doc/.github/media/unity_hand_face.png new file mode 100644 index 000000000..210e71c66 Binary files /dev/null and b/web/html/doc/.github/media/unity_hand_face.png differ diff --git a/web/html/doc/.github/media/unity_main.png b/web/html/doc/.github/media/unity_main.png new file mode 100644 index 000000000..3ce9e3215 Binary files /dev/null and b/web/html/doc/.github/media/unity_main.png differ diff --git a/web/html/doc/00__index_8md.html b/web/html/doc/00__index_8md.html new file mode 100644 index 000000000..a9fcc6ecb --- /dev/null +++ b/web/html/doc/00__index_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/00_index.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/00_index.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/01__demo_8md.html b/web/html/doc/01__demo_8md.html new file mode 100644 index 000000000..dc3f783a1 --- /dev/null +++ b/web/html/doc/01__demo_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/01_demo.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/01_demo.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/02__output_8md.html b/web/html/doc/02__output_8md.html new file mode 100644 index 000000000..a41d7f6db --- /dev/null +++ b/web/html/doc/02__output_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/02_output.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/02_output.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/03__python__api_8md.html b/web/html/doc/03__python__api_8md.html new file mode 100644 index 000000000..f9f374e8f --- /dev/null +++ b/web/html/doc/03__python__api_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/03_python_api.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/03_python_api.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/04__cpp__api_8md.html b/web/html/doc/04__cpp__api_8md.html new file mode 100644 index 000000000..54c802539 --- /dev/null +++ b/web/html/doc/04__cpp__api_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/04_cpp_api.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/04_cpp_api.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/05__faq_8md.html b/web/html/doc/05__faq_8md.html new file mode 100644 index 000000000..53a52ac0c --- /dev/null +++ b/web/html/doc/05__faq_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/05_faq.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/05_faq.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/06__maximizing__openpose__speed_8md.html b/web/html/doc/06__maximizing__openpose__speed_8md.html new file mode 100644 index 000000000..036ff6073 --- /dev/null +++ b/web/html/doc/06__maximizing__openpose__speed_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/06_maximizing_openpose_speed.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/06_maximizing_openpose_speed.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/07__major__released__features_8md.html b/web/html/doc/07__major__released__features_8md.html new file mode 100644 index 000000000..712799f20 --- /dev/null +++ b/web/html/doc/07__major__released__features_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/07_major_released_features.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/07_major_released_features.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/08__release__notes_8md.html b/web/html/doc/08__release__notes_8md.html new file mode 100644 index 000000000..3ba998f40 --- /dev/null +++ b/web/html/doc/08__release__notes_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/08_release_notes.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/08_release_notes.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/09__authors__and__contributors_8md.html b/web/html/doc/09__authors__and__contributors_8md.html new file mode 100644 index 000000000..8cf9fcf09 --- /dev/null +++ b/web/html/doc/09__authors__and__contributors_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/09_authors_and_contributors.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/09_authors_and_contributors.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/10__community__projects_8md.html b/web/html/doc/10__community__projects_8md.html new file mode 100644 index 000000000..e6531875c --- /dev/null +++ b/web/html/doc/10__community__projects_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/10_community_projects.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/10_community_projects.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/1__library__deep__overview_8md.html b/web/html/doc/1__library__deep__overview_8md.html new file mode 100644 index 000000000..cc24d5119 --- /dev/null +++ b/web/html/doc/1__library__deep__overview_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/very_advanced/library_structure/1_library_deep_overview.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/very_advanced/library_structure/1_library_deep_overview.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/1__prerequisites_8md.html b/web/html/doc/1__prerequisites_8md.html new file mode 100644 index 000000000..0b2fb93da --- /dev/null +++ b/web/html/doc/1__prerequisites_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/installation/1_prerequisites.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/installation/1_prerequisites.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/2__additional__settings_8md.html b/web/html/doc/2__additional__settings_8md.html new file mode 100644 index 000000000..127180417 --- /dev/null +++ b/web/html/doc/2__additional__settings_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/installation/2_additional_settings.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/installation/2_additional_settings.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/2__library__extend__functionality_8md.html b/web/html/doc/2__library__extend__functionality_8md.html new file mode 100644 index 000000000..cdad3c3cb --- /dev/null +++ b/web/html/doc/2__library__extend__functionality_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/very_advanced/library_structure/2_library_extend_functionality.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/very_advanced/library_structure/2_library_extend_functionality.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/3__library__add__new__module_8md.html b/web/html/doc/3__library__add__new__module_8md.html new file mode 100644 index 000000000..ba328548c --- /dev/null +++ b/web/html/doc/3__library__add__new__module_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/very_advanced/library_structure/3_library_add_new_module.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/very_advanced/library_structure/3_library_add_new_module.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/3d_2headers_8hpp.html b/web/html/doc/3d_2headers_8hpp.html new file mode 100644 index 000000000..8b3cffb0d --- /dev/null +++ b/web/html/doc/3d_2headers_8hpp.html @@ -0,0 +1,108 @@ + + + + + + + +OpenPose: include/openpose/3d/headers.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
headers.hpp File Reference
+
+ +
+ + + + diff --git a/web/html/doc/3d_2headers_8hpp_source.html b/web/html/doc/3d_2headers_8hpp_source.html new file mode 100644 index 000000000..e4ee49c45 --- /dev/null +++ b/web/html/doc/3d_2headers_8hpp_source.html @@ -0,0 +1,117 @@ + + + + + + + +OpenPose: include/openpose/3d/headers.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
headers.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_3D_HEADERS_HPP
+
2 #define OPENPOSE_3D_HEADERS_HPP
+
3 
+
4 // 3d module
+ + + + + +
10 
+
11 #endif // OPENPOSE_3D_HEADERS_HPP
+ + + + + +
+
+ + + + diff --git a/web/html/doc/3d__reconstruction__module_8md.html b/web/html/doc/3d__reconstruction__module_8md.html new file mode 100644 index 000000000..f9585c17e --- /dev/null +++ b/web/html/doc/3d__reconstruction__module_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/advanced/3d_reconstruction_module.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/advanced/3d_reconstruction_module.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/LICENSE b/web/html/doc/LICENSE new file mode 100644 index 000000000..71739cad8 --- /dev/null +++ b/web/html/doc/LICENSE @@ -0,0 +1,108 @@ +OPENPOSE: MULTIPERSON KEYPOINT DETECTION +SOFTWARE LICENSE AGREEMENT +ACADEMIC OR NON-PROFIT ORGANIZATION NONCOMMERCIAL RESEARCH USE ONLY + +BY USING OR DOWNLOADING THE SOFTWARE, YOU ARE AGREEING TO THE TERMS OF THIS LICENSE AGREEMENT. IF YOU DO NOT AGREE WITH THESE TERMS, YOU MAY NOT USE OR DOWNLOAD THE SOFTWARE. + +This is a license agreement ("Agreement") between your academic institution or non-profit organization or self (called "Licensee" or "You" in this Agreement) and Carnegie Mellon University (called "Licensor" in this Agreement). All rights not specifically granted to you in this Agreement are reserved for Licensor. + +RESERVATION OF OWNERSHIP AND GRANT OF LICENSE: +Licensor retains exclusive ownership of any copy of the Software (as defined below) licensed under this Agreement and hereby grants to Licensee a personal, non-exclusive, +non-transferable license to use the Software for noncommercial research purposes, without the right to sublicense, pursuant to the terms and conditions of this Agreement. As used in this Agreement, the term "Software" means (i) the actual copy of all or any portion of code for program routines made accessible to Licensee by Licensor pursuant to this Agreement, inclusive of backups, updates, and/or merged copies permitted hereunder or subsequently supplied by Licensor, including all or any file structures, programming instructions, user interfaces and screen formats and sequences as well as any and all documentation and instructions related to it, and (ii) all or any derivatives and/or modifications created or made by You to any of the items specified in (i). + +CONFIDENTIALITY: Licensee acknowledges that the Software is proprietary to Licensor, and as such, Licensee agrees to receive all such materials in confidence and use the Software only in accordance with the terms of this Agreement. Licensee agrees to use reasonable effort to protect the Software from unauthorized use, reproduction, distribution, or publication. + +COPYRIGHT: The Software is owned by Licensor and is protected by United +States copyright laws and applicable international treaties and/or conventions. + +PERMITTED USES: The Software may be used for your own noncommercial internal research purposes. You understand and agree that Licensor is not obligated to implement any suggestions and/or feedback you might provide regarding the Software, but to the extent Licensor does so, you are not entitled to any compensation related thereto. + +DERIVATIVES: You may create derivatives of or make modifications to the Software, however, You agree that all and any such derivatives and modifications will be owned by Licensor and become a part of the Software licensed to You under this Agreement.  You may only use such derivatives and modifications for your own noncommercial internal research purposes, and you may not otherwise use, distribute or copy such derivatives and modifications in violation of this Agreement. + +BACKUPS: If Licensee is an organization, it may make that number of copies of the Software necessary for internal noncommercial use at a single site within its organization provided that all information appearing in or on the original labels, including the copyright and trademark notices are copied onto the labels of the copies. + +USES NOT PERMITTED: You may not distribute, copy or use the Software except as explicitly permitted herein. Licensee has not been granted any trademark license as part of this Agreement and may not use the name or mark “OpenPose", "Carnegie Mellon" or any renditions thereof without the prior written permission of Licensor. + +You may not sell, rent, lease, sublicense, lend, time-share or transfer, in whole or in part, or provide third parties access to prior or present versions (or any parts thereof) of the Software. + +ASSIGNMENT: You may not assign this Agreement or your rights hereunder without the prior written consent of Licensor. Any attempted assignment without such consent shall be null and void. + +TERM: The term of the license granted by this Agreement is from Licensee's acceptance of this Agreement by downloading the Software or by using the Software until terminated as provided below. + +The Agreement automatically terminates without notice if you fail to comply with any provision of this Agreement. Licensee may terminate this Agreement by ceasing using the Software. Upon any termination of this Agreement, Licensee will delete any and all copies of the Software. You agree that all provisions which operate to protect the proprietary rights of Licensor shall remain in force should breach occur and that the obligation of confidentiality described in this Agreement is binding in perpetuity and, as such, survives the term of the Agreement. + +FEE: Provided Licensee abides completely by the terms and conditions of this Agreement, there is no fee due to Licensor for Licensee's use of the Software in accordance with this Agreement. + +DISCLAIMER OF WARRANTIES: THE SOFTWARE IS PROVIDED "AS-IS" WITHOUT WARRANTY OF ANY KIND INCLUDING ANY WARRANTIES OF PERFORMANCE OR MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE OR PURPOSE OR OF NON-INFRINGEMENT. LICENSEE BEARS ALL RISK RELATING TO QUALITY AND PERFORMANCE OF THE SOFTWARE AND RELATED MATERIALS. + +SUPPORT AND MAINTENANCE: No Software support or training by the Licensor is provided as part of this Agreement. + +EXCLUSIVE REMEDY AND LIMITATION OF LIABILITY: To the maximum extent permitted under applicable law, Licensor shall not be liable for direct, indirect, special, incidental, or consequential damages or lost profits related to Licensee's use of and/or inability to use the Software, even if Licensor is advised of the possibility of such damage. + +EXPORT REGULATION: Licensee agrees to comply with any and all applicable +U.S. export control laws, regulations, and/or other laws related to embargoes and sanction programs administered by the Office of Foreign Assets Control. + +SEVERABILITY: If any provision(s) of this Agreement shall be held to be invalid, illegal, or unenforceable by a court or other tribunal of competent jurisdiction, the validity, legality and enforceability of the remaining provisions shall not in any way be affected or impaired thereby. + +NO IMPLIED WAIVERS: No failure or delay by Licensor in enforcing any right or remedy under this Agreement shall be construed as a waiver of any future or other exercise of such right or remedy by Licensor. + +GOVERNING LAW: This Agreement shall be construed and enforced in accordance with the laws of the Commonwealth of Pennsylvania without reference to conflict of laws principles. You consent to the personal jurisdiction of the courts of this County and waive their rights to venue outside of Allegheny County, Pennsylvania. + +ENTIRE AGREEMENT AND AMENDMENTS: This Agreement constitutes the sole and entire agreement between Licensee and Licensor as to the matter set forth herein and supersedes any previous agreements, understandings, and arrangements between the parties relating hereto. + + + +************************************************************************ + +THIRD-PARTY SOFTWARE NOTICES AND INFORMATION + +This project incorporates material from the project(s) listed below (collectively, "Third Party Code"). This Third Party Code is licensed to you under their original license terms set forth below. We reserves all other rights not expressly granted, whether by implication, estoppel or otherwise. + +1. Caffe, version 1.0.0, (https://github.com/BVLC/caffe/) + +COPYRIGHT + +All contributions by the University of California: +Copyright (c) 2014-2017 The Regents of the University of California (Regents) +All rights reserved. + +All other contributions: +Copyright (c) 2014-2017, the respective contributors +All rights reserved. + +Caffe uses a shared copyright model: each contributor holds copyright over +their contributions to Caffe. The project versioning records all such +contribution and copyright details. If a contributor wants to further mark +their specific copyright on a particular contribution, they should indicate +their copyright solely in the commit message of the change when it is +committed. + +LICENSE + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +CONTRIBUTION AGREEMENT + +By contributing to the BVLC/caffe repository through pull-request, comment, +or otherwise, the contributor releases their content to the +license and copyright terms herein. + +************END OF THIRD-PARTY SOFTWARE NOTICES AND INFORMATION********** diff --git a/web/html/doc/Logo_doxygen_black.png b/web/html/doc/Logo_doxygen_black.png new file mode 100644 index 000000000..aa4fc2e49 Binary files /dev/null and b/web/html/doc/Logo_doxygen_black.png differ diff --git a/web/html/doc/_r_e_a_d_m_e_8md.html b/web/html/doc/_r_e_a_d_m_e_8md.html new file mode 100644 index 000000000..9e05d434a --- /dev/null +++ b/web/html/doc/_r_e_a_d_m_e_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: README.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
README.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/annotated.html b/web/html/doc/annotated.html new file mode 100644 index 000000000..ca5f129f0 --- /dev/null +++ b/web/html/doc/annotated.html @@ -0,0 +1,238 @@ + + + + + + + +OpenPose: Class List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
Class List
+
+
+
Here are the classes, structs, unions and interfaces with brief descriptions:
+
[detail level 12]
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 Nop
+
+
+
+ + + + diff --git a/web/html/doc/annotated_dup.js b/web/html/doc/annotated_dup.js new file mode 100644 index 000000000..69eef14af --- /dev/null +++ b/web/html/doc/annotated_dup.js @@ -0,0 +1,138 @@ +var annotated_dup = +[ + [ "op", "namespaceop.html", [ + [ "CameraParameterReader", "classop_1_1_camera_parameter_reader.html", "classop_1_1_camera_parameter_reader" ], + [ "PoseTriangulation", "classop_1_1_pose_triangulation.html", "classop_1_1_pose_triangulation" ], + [ "WPoseTriangulation", "classop_1_1_w_pose_triangulation.html", "classop_1_1_w_pose_triangulation" ], + [ "Array", "classop_1_1_array.html", "classop_1_1_array" ], + [ "ArrayCpuGpu", "classop_1_1_array_cpu_gpu.html", "classop_1_1_array_cpu_gpu" ], + [ "CvMatToOpInput", "classop_1_1_cv_mat_to_op_input.html", "classop_1_1_cv_mat_to_op_input" ], + [ "CvMatToOpOutput", "classop_1_1_cv_mat_to_op_output.html", "classop_1_1_cv_mat_to_op_output" ], + [ "Datum", "structop_1_1_datum.html", "structop_1_1_datum" ], + [ "GpuRenderer", "classop_1_1_gpu_renderer.html", "classop_1_1_gpu_renderer" ], + [ "KeepTopNPeople", "classop_1_1_keep_top_n_people.html", "classop_1_1_keep_top_n_people" ], + [ "KeypointScaler", "classop_1_1_keypoint_scaler.html", "classop_1_1_keypoint_scaler" ], + [ "Matrix", "classop_1_1_matrix.html", "classop_1_1_matrix" ], + [ "OpOutputToCvMat", "classop_1_1_op_output_to_cv_mat.html", "classop_1_1_op_output_to_cv_mat" ], + [ "Point", "structop_1_1_point.html", "structop_1_1_point" ], + [ "Rectangle", "structop_1_1_rectangle.html", "structop_1_1_rectangle" ], + [ "Renderer", "classop_1_1_renderer.html", "classop_1_1_renderer" ], + [ "ScaleAndSizeExtractor", "classop_1_1_scale_and_size_extractor.html", "classop_1_1_scale_and_size_extractor" ], + [ "String", "classop_1_1_string.html", "classop_1_1_string" ], + [ "VerbosePrinter", "classop_1_1_verbose_printer.html", "classop_1_1_verbose_printer" ], + [ "WCvMatToOpInput", "classop_1_1_w_cv_mat_to_op_input.html", "classop_1_1_w_cv_mat_to_op_input" ], + [ "WCvMatToOpOutput", "classop_1_1_w_cv_mat_to_op_output.html", "classop_1_1_w_cv_mat_to_op_output" ], + [ "WKeepTopNPeople", "classop_1_1_w_keep_top_n_people.html", "classop_1_1_w_keep_top_n_people" ], + [ "WKeypointScaler", "classop_1_1_w_keypoint_scaler.html", "classop_1_1_w_keypoint_scaler" ], + [ "WOpOutputToCvMat", "classop_1_1_w_op_output_to_cv_mat.html", "classop_1_1_w_op_output_to_cv_mat" ], + [ "WScaleAndSizeExtractor", "classop_1_1_w_scale_and_size_extractor.html", "classop_1_1_w_scale_and_size_extractor" ], + [ "WVerbosePrinter", "classop_1_1_w_verbose_printer.html", "classop_1_1_w_verbose_printer" ], + [ "FaceCpuRenderer", "classop_1_1_face_cpu_renderer.html", "classop_1_1_face_cpu_renderer" ], + [ "FaceDetector", "classop_1_1_face_detector.html", "classop_1_1_face_detector" ], + [ "FaceDetectorOpenCV", "classop_1_1_face_detector_open_c_v.html", "classop_1_1_face_detector_open_c_v" ], + [ "FaceExtractorCaffe", "classop_1_1_face_extractor_caffe.html", "classop_1_1_face_extractor_caffe" ], + [ "FaceExtractorNet", "classop_1_1_face_extractor_net.html", "classop_1_1_face_extractor_net" ], + [ "FaceGpuRenderer", "classop_1_1_face_gpu_renderer.html", "classop_1_1_face_gpu_renderer" ], + [ "FaceRenderer", "classop_1_1_face_renderer.html", "classop_1_1_face_renderer" ], + [ "WFaceDetector", "classop_1_1_w_face_detector.html", "classop_1_1_w_face_detector" ], + [ "WFaceDetectorOpenCV", "classop_1_1_w_face_detector_open_c_v.html", "classop_1_1_w_face_detector_open_c_v" ], + [ "WFaceExtractorNet", "classop_1_1_w_face_extractor_net.html", "classop_1_1_w_face_extractor_net" ], + [ "WFaceRenderer", "classop_1_1_w_face_renderer.html", "classop_1_1_w_face_renderer" ], + [ "CocoJsonSaver", "classop_1_1_coco_json_saver.html", "classop_1_1_coco_json_saver" ], + [ "FileSaver", "classop_1_1_file_saver.html", "classop_1_1_file_saver" ], + [ "HeatMapSaver", "classop_1_1_heat_map_saver.html", "classop_1_1_heat_map_saver" ], + [ "ImageSaver", "classop_1_1_image_saver.html", "classop_1_1_image_saver" ], + [ "JsonOfstream", "classop_1_1_json_ofstream.html", "classop_1_1_json_ofstream" ], + [ "KeypointSaver", "classop_1_1_keypoint_saver.html", "classop_1_1_keypoint_saver" ], + [ "PeopleJsonSaver", "classop_1_1_people_json_saver.html", "classop_1_1_people_json_saver" ], + [ "UdpSender", "classop_1_1_udp_sender.html", "classop_1_1_udp_sender" ], + [ "VideoSaver", "classop_1_1_video_saver.html", "classop_1_1_video_saver" ], + [ "WCocoJsonSaver", "classop_1_1_w_coco_json_saver.html", "classop_1_1_w_coco_json_saver" ], + [ "WFaceSaver", "classop_1_1_w_face_saver.html", "classop_1_1_w_face_saver" ], + [ "WHandSaver", "classop_1_1_w_hand_saver.html", "classop_1_1_w_hand_saver" ], + [ "WHeatMapSaver", "classop_1_1_w_heat_map_saver.html", "classop_1_1_w_heat_map_saver" ], + [ "WImageSaver", "classop_1_1_w_image_saver.html", "classop_1_1_w_image_saver" ], + [ "WPeopleJsonSaver", "classop_1_1_w_people_json_saver.html", "classop_1_1_w_people_json_saver" ], + [ "WPoseSaver", "classop_1_1_w_pose_saver.html", "classop_1_1_w_pose_saver" ], + [ "WUdpSender", "classop_1_1_w_udp_sender.html", "classop_1_1_w_udp_sender" ], + [ "WVideoSaver", "classop_1_1_w_video_saver.html", "classop_1_1_w_video_saver" ], + [ "WVideoSaver3D", "classop_1_1_w_video_saver3_d.html", "classop_1_1_w_video_saver3_d" ], + [ "FrameDisplayer", "classop_1_1_frame_displayer.html", "classop_1_1_frame_displayer" ], + [ "Gui", "classop_1_1_gui.html", "classop_1_1_gui" ], + [ "Gui3D", "classop_1_1_gui3_d.html", "classop_1_1_gui3_d" ], + [ "GuiInfoAdder", "classop_1_1_gui_info_adder.html", "classop_1_1_gui_info_adder" ], + [ "WGui", "classop_1_1_w_gui.html", "classop_1_1_w_gui" ], + [ "WGui3D", "classop_1_1_w_gui3_d.html", "classop_1_1_w_gui3_d" ], + [ "WGuiInfoAdder", "classop_1_1_w_gui_info_adder.html", "classop_1_1_w_gui_info_adder" ], + [ "HandCpuRenderer", "classop_1_1_hand_cpu_renderer.html", "classop_1_1_hand_cpu_renderer" ], + [ "HandDetector", "classop_1_1_hand_detector.html", "classop_1_1_hand_detector" ], + [ "HandDetectorFromTxt", "classop_1_1_hand_detector_from_txt.html", "classop_1_1_hand_detector_from_txt" ], + [ "HandExtractorCaffe", "classop_1_1_hand_extractor_caffe.html", "classop_1_1_hand_extractor_caffe" ], + [ "HandExtractorNet", "classop_1_1_hand_extractor_net.html", "classop_1_1_hand_extractor_net" ], + [ "HandGpuRenderer", "classop_1_1_hand_gpu_renderer.html", "classop_1_1_hand_gpu_renderer" ], + [ "HandRenderer", "classop_1_1_hand_renderer.html", "classop_1_1_hand_renderer" ], + [ "WHandDetector", "classop_1_1_w_hand_detector.html", "classop_1_1_w_hand_detector" ], + [ "WHandDetectorFromTxt", "classop_1_1_w_hand_detector_from_txt.html", "classop_1_1_w_hand_detector_from_txt" ], + [ "WHandDetectorTracking", "classop_1_1_w_hand_detector_tracking.html", "classop_1_1_w_hand_detector_tracking" ], + [ "WHandDetectorUpdate", "classop_1_1_w_hand_detector_update.html", "classop_1_1_w_hand_detector_update" ], + [ "WHandExtractorNet", "classop_1_1_w_hand_extractor_net.html", "classop_1_1_w_hand_extractor_net" ], + [ "WHandRenderer", "classop_1_1_w_hand_renderer.html", "classop_1_1_w_hand_renderer" ], + [ "BodyPartConnectorCaffe", "classop_1_1_body_part_connector_caffe.html", "classop_1_1_body_part_connector_caffe" ], + [ "MaximumCaffe", "classop_1_1_maximum_caffe.html", "classop_1_1_maximum_caffe" ], + [ "Net", "classop_1_1_net.html", "classop_1_1_net" ], + [ "NetCaffe", "classop_1_1_net_caffe.html", "classop_1_1_net_caffe" ], + [ "NetOpenCv", "classop_1_1_net_open_cv.html", "classop_1_1_net_open_cv" ], + [ "NmsCaffe", "classop_1_1_nms_caffe.html", "classop_1_1_nms_caffe" ], + [ "ResizeAndMergeCaffe", "classop_1_1_resize_and_merge_caffe.html", "classop_1_1_resize_and_merge_caffe" ], + [ "PoseCpuRenderer", "classop_1_1_pose_cpu_renderer.html", "classop_1_1_pose_cpu_renderer" ], + [ "PoseExtractor", "classop_1_1_pose_extractor.html", "classop_1_1_pose_extractor" ], + [ "PoseExtractorCaffe", "classop_1_1_pose_extractor_caffe.html", "classop_1_1_pose_extractor_caffe" ], + [ "PoseExtractorNet", "classop_1_1_pose_extractor_net.html", "classop_1_1_pose_extractor_net" ], + [ "PoseGpuRenderer", "classop_1_1_pose_gpu_renderer.html", "classop_1_1_pose_gpu_renderer" ], + [ "PoseRenderer", "classop_1_1_pose_renderer.html", "classop_1_1_pose_renderer" ], + [ "WPoseExtractor", "classop_1_1_w_pose_extractor.html", "classop_1_1_w_pose_extractor" ], + [ "WPoseExtractorNet", "classop_1_1_w_pose_extractor_net.html", "classop_1_1_w_pose_extractor_net" ], + [ "WPoseRenderer", "classop_1_1_w_pose_renderer.html", "classop_1_1_w_pose_renderer" ], + [ "DatumProducer", "classop_1_1_datum_producer.html", "classop_1_1_datum_producer" ], + [ "FlirReader", "classop_1_1_flir_reader.html", "classop_1_1_flir_reader" ], + [ "ImageDirectoryReader", "classop_1_1_image_directory_reader.html", "classop_1_1_image_directory_reader" ], + [ "IpCameraReader", "classop_1_1_ip_camera_reader.html", "classop_1_1_ip_camera_reader" ], + [ "Producer", "classop_1_1_producer.html", "classop_1_1_producer" ], + [ "SpinnakerWrapper", "classop_1_1_spinnaker_wrapper.html", "classop_1_1_spinnaker_wrapper" ], + [ "VideoCaptureReader", "classop_1_1_video_capture_reader.html", "classop_1_1_video_capture_reader" ], + [ "VideoReader", "classop_1_1_video_reader.html", "classop_1_1_video_reader" ], + [ "WDatumProducer", "classop_1_1_w_datum_producer.html", "classop_1_1_w_datum_producer" ], + [ "WebcamReader", "classop_1_1_webcam_reader.html", "classop_1_1_webcam_reader" ], + [ "PriorityQueue", "classop_1_1_priority_queue.html", "classop_1_1_priority_queue" ], + [ "Queue", "classop_1_1_queue.html", "classop_1_1_queue" ], + [ "QueueBase", "classop_1_1_queue_base.html", "classop_1_1_queue_base" ], + [ "SubThread", "classop_1_1_sub_thread.html", "classop_1_1_sub_thread" ], + [ "SubThreadNoQueue", "classop_1_1_sub_thread_no_queue.html", "classop_1_1_sub_thread_no_queue" ], + [ "SubThreadQueueIn", "classop_1_1_sub_thread_queue_in.html", "classop_1_1_sub_thread_queue_in" ], + [ "SubThreadQueueInOut", "classop_1_1_sub_thread_queue_in_out.html", "classop_1_1_sub_thread_queue_in_out" ], + [ "SubThreadQueueOut", "classop_1_1_sub_thread_queue_out.html", "classop_1_1_sub_thread_queue_out" ], + [ "Thread", "classop_1_1_thread.html", "classop_1_1_thread" ], + [ "ThreadManager", "classop_1_1_thread_manager.html", "classop_1_1_thread_manager" ], + [ "WFpsMax", "classop_1_1_w_fps_max.html", "classop_1_1_w_fps_max" ], + [ "WIdGenerator", "classop_1_1_w_id_generator.html", "classop_1_1_w_id_generator" ], + [ "Worker", "classop_1_1_worker.html", "classop_1_1_worker" ], + [ "WorkerConsumer", "classop_1_1_worker_consumer.html", "classop_1_1_worker_consumer" ], + [ "WorkerProducer", "classop_1_1_worker_producer.html", "classop_1_1_worker_producer" ], + [ "WQueueAssembler", "classop_1_1_w_queue_assembler.html", "classop_1_1_w_queue_assembler" ], + [ "WQueueOrderer", "classop_1_1_w_queue_orderer.html", "classop_1_1_w_queue_orderer" ], + [ "PersonIdExtractor", "classop_1_1_person_id_extractor.html", "classop_1_1_person_id_extractor" ], + [ "PersonTracker", "classop_1_1_person_tracker.html", "classop_1_1_person_tracker" ], + [ "WPersonIdExtractor", "classop_1_1_w_person_id_extractor.html", "classop_1_1_w_person_id_extractor" ], + [ "PointerContainerGreater", "classop_1_1_pointer_container_greater.html", "classop_1_1_pointer_container_greater" ], + [ "PointerContainerLess", "classop_1_1_pointer_container_less.html", "classop_1_1_pointer_container_less" ], + [ "Profiler", "classop_1_1_profiler.html", null ], + [ "WrapperT", "classop_1_1_wrapper_t.html", "classop_1_1_wrapper_t" ], + [ "WrapperStructExtra", "structop_1_1_wrapper_struct_extra.html", "structop_1_1_wrapper_struct_extra" ], + [ "WrapperStructFace", "structop_1_1_wrapper_struct_face.html", "structop_1_1_wrapper_struct_face" ], + [ "WrapperStructGui", "structop_1_1_wrapper_struct_gui.html", "structop_1_1_wrapper_struct_gui" ], + [ "WrapperStructHand", "structop_1_1_wrapper_struct_hand.html", "structop_1_1_wrapper_struct_hand" ], + [ "WrapperStructInput", "structop_1_1_wrapper_struct_input.html", "structop_1_1_wrapper_struct_input" ], + [ "WrapperStructOutput", "structop_1_1_wrapper_struct_output.html", "structop_1_1_wrapper_struct_output" ], + [ "WrapperStructPose", "structop_1_1_wrapper_struct_pose.html", "structop_1_1_wrapper_struct_pose" ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/array_8hpp.html b/web/html/doc/array_8hpp.html new file mode 100644 index 000000000..7e2285faa --- /dev/null +++ b/web/html/doc/array_8hpp.html @@ -0,0 +1,122 @@ + + + + + + + +OpenPose: include/openpose/core/array.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
array.hpp File Reference
+
+
+
#include <memory>
+#include <vector>
+#include <openpose/core/macros.hpp>
+#include <openpose/core/matrix.hpp>
+#include <openpose/utilities/errorAndLog.hpp>
+
+

Go to the source code of this file.

+ + + + +

+Classes

class  op::Array< T >
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/array_8hpp_source.html b/web/html/doc/array_8hpp_source.html new file mode 100644 index 000000000..a19e3fb44 --- /dev/null +++ b/web/html/doc/array_8hpp_source.html @@ -0,0 +1,349 @@ + + + + + + + +OpenPose: include/openpose/core/array.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
array.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_ARRAY_HPP
+
2 #define OPENPOSE_CORE_ARRAY_HPP
+
3 
+
4 #include <memory> // std::shared_ptr
+
5 #include <vector>
+ + + +
9 
+
10 namespace op
+
11 {
+
20  template<typename T>
+
21  class Array
+
22  {
+
23  public:
+
24  // ------------------------------ Constructors and Data Allocator Functions ------------------------------ //
+
31  explicit Array(const int size);
+
32 
+
39  explicit Array(const std::vector<int>& sizes = {});
+
40 
+
48  Array(const int size, const T value);
+
49 
+
57  Array(const std::vector<int>& sizes, const T value);
+
58 
+
66  Array(const int size, T* const dataPtr);
+
67 
+
75  Array(const std::vector<int>& sizes, T* const dataPtr);
+
76 
+
89  Array(const Array<T>& array, const int index, const bool noCopy = false);
+
90 
+
95  template<typename T2>
+
96  Array(const Array<T2>& array) :
+
97  Array{array.getSize()}
+
98  {
+
99  try
+
100  {
+
101  // Copy
+
102  for (auto i = 0u ; i < array.getVolume() ; i++)
+
103  pData[i] = T(array[i]);
+
104  }
+
105  catch (const std::exception& e)
+
106  {
+
107  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
+
108  }
+
109  }
+
110 
+
119  Array<T>(const Array<T>& array);
+
120 
+
127  Array<T>& operator=(const Array<T>& array);
+
128 
+
134  Array<T>(Array<T>&& array);
+
135 
+ +
143 
+
151  Array<T> clone() const;
+
152 
+
159  void reset(const int size);
+
160 
+
168  void reset(const std::vector<int>& sizes = {});
+
169 
+
177  void reset(const int size, const T value);
+
178 
+
187  void reset(const std::vector<int>& sizes, const T value);
+
188 
+
196  void reset(const int size, T* const dataPtr);
+
197 
+
205  void reset(const std::vector<int>& sizes, T* const dataPtr);
+
206 
+
212  void setFrom(const Matrix& cvMat);
+
213 
+
219  void setTo(const T value);
+
220 
+
221 
+
222 
+
223  // ------------------------------ Data Information Functions ------------------------------ //
+
228  inline bool empty() const
+
229  {
+
230  return (mVolume == 0);
+
231  }
+
232 
+
238  inline std::vector<int> getSize() const
+
239  {
+
240  return mSize;
+
241  }
+
242 
+
249  int getSize(const int index) const;
+
250 
+
256  std::string printSize() const;
+
257 
+
262  inline size_t getNumberDimensions() const
+
263  {
+
264  return mSize.size();
+
265  }
+
266 
+
272  inline size_t getVolume() const
+
273  {
+
274  return mVolume;
+
275  }
+
276 
+
286  size_t getVolume(const int indexA, const int indexB = -1) const;
+
287 
+
293  std::vector<int> getStride() const;
+
294 
+
299  int getStride(const int index) const;
+
300 
+
301 
+
302 
+
303  // ------------------------------ Data Access Functions And Operators ------------------------------ //
+
310  inline T* getPtr()
+
311  {
+
312  return pData; // spData.get()
+
313  }
+
314 
+
319  inline const T* getConstPtr() const
+
320  {
+
321  return pData; // spData.get()
+
322  }
+
323 
+
329  inline T* getPseudoConstPtr() const
+
330  {
+
331  return pData; // spData.get()
+
332  }
+
333 
+
346  const Matrix& getConstCvMat() const;
+
347 
+ +
356 
+
365  inline T& operator[](const int index)
+
366  {
+
367  #ifdef NDEBUG
+
368  return pData[index]; // spData.get()[index]
+
369  #else
+
370  return at(index);
+
371  #endif
+
372  }
+
373 
+
381  inline const T& operator[](const int index) const
+
382  {
+
383  #ifdef NDEBUG
+
384  return pData[index]; // spData.get()[index]
+
385  #else
+
386  return at(index);
+
387  #endif
+
388  }
+
389 
+
398  inline T& operator[](const std::vector<int>& indexes)
+
399  {
+
400  return operator[](getIndex(indexes));
+
401  }
+
402 
+
410  inline const T& operator[](const std::vector<int>& indexes) const
+
411  {
+
412  return operator[](getIndex(indexes));
+
413  }
+
414 
+
422  inline T& at(const int index)
+
423  {
+
424  return commonAt(index);
+
425  }
+
426 
+
434  inline const T& at(const int index) const
+
435  {
+
436  return commonAt(index);
+
437  }
+
438 
+
446  inline T& at(const std::vector<int>& indexes)
+
447  {
+
448  return at(getIndexAndCheck(indexes));
+
449  }
+
450 
+
458  inline const T& at(const std::vector<int>& indexes) const
+
459  {
+
460  return at(getIndexAndCheck(indexes));
+
461  }
+
462 
+
475  const std::string toString() const;
+
476 
+
477  private:
+
478  std::vector<int> mSize;
+
479  size_t mVolume;
+
480  std::shared_ptr<T> spData;
+
481  T* pData; // pData is a wrapper of spData. Used for Pybind11 binding.
+
482  std::pair<bool, Matrix> mCvMatData;
+
483 
+
491  int getIndex(const std::vector<int>& indexes) const;
+
492 
+
500  int getIndexAndCheck(const std::vector<int>& indexes) const;
+
501 
+
507  T& commonAt(const int index) const;
+
508 
+
509  void resetAuxiliary(const std::vector<int>& sizes, T* const dataPtr = nullptr);
+
510  };
+
511 
+
512  // Static methods
+ +
514 }
+
515 
+
516 #endif // OPENPOSE_CORE_ARRAY_HPP
+ +
void reset(const std::vector< int > &sizes={})
+
const T & operator[](const int index) const
Definition: array.hpp:381
+
void reset(const int size)
+
void setTo(const T value)
+
void reset(const std::vector< int > &sizes, T *const dataPtr)
+
std::vector< int > getStride() const
+
std::string printSize() const
+
Array(const Array< T > &array, const int index, const bool noCopy=false)
+
std::vector< int > getSize() const
Definition: array.hpp:238
+
Array(const std::vector< int > &sizes={})
+
const T & at(const int index) const
Definition: array.hpp:434
+
Matrix & getCvMat()
+
size_t getVolume(const int indexA, const int indexB=-1) const
+
size_t getNumberDimensions() const
Definition: array.hpp:262
+
T & at(const int index)
Definition: array.hpp:422
+
Array(const int size)
+
T * getPseudoConstPtr() const
Definition: array.hpp:329
+
T & at(const std::vector< int > &indexes)
Definition: array.hpp:446
+
Array(const std::vector< int > &sizes, T *const dataPtr)
+
Array(const std::vector< int > &sizes, const T value)
+
const Matrix & getConstCvMat() const
+
Array< T > & operator=(const Array< T > &array)
+
Array(const int size, T *const dataPtr)
+
void setFrom(const Matrix &cvMat)
+
bool empty() const
Definition: array.hpp:228
+
T & operator[](const int index)
Definition: array.hpp:365
+
T & operator[](const std::vector< int > &indexes)
Definition: array.hpp:398
+
int getStride(const int index) const
+
Array< T > clone() const
+
int getSize(const int index) const
+
const T & operator[](const std::vector< int > &indexes) const
Definition: array.hpp:410
+
const T * getConstPtr() const
Definition: array.hpp:319
+
void reset(const int size, const T value)
+
Array(const int size, const T value)
+
void reset(const std::vector< int > &sizes, const T value)
+
void reset(const int size, T *const dataPtr)
+
Array< T > & operator=(Array< T > &&array)
+
const T & at(const std::vector< int > &indexes) const
Definition: array.hpp:458
+
size_t getVolume() const
Definition: array.hpp:272
+
T * getPtr()
Definition: array.hpp:310
+
const std::string toString() const
+
Array(const Array< T2 > &array)
Definition: array.hpp:96
+ + + +
#define OVERLOAD_C_OUT(className)
Definition: macros.hpp:77
+ + +
OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
+
+
+ + + + diff --git a/web/html/doc/array_cpu_gpu_8hpp.html b/web/html/doc/array_cpu_gpu_8hpp.html new file mode 100644 index 000000000..998d5b304 --- /dev/null +++ b/web/html/doc/array_cpu_gpu_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/core/arrayCpuGpu.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
arrayCpuGpu.hpp File Reference
+
+
+
#include <memory>
+#include <vector>
+#include <openpose/core/array.hpp>
+#include <openpose/core/macros.hpp>
+
+

Go to the source code of this file.

+ + + + +

+Classes

class  op::ArrayCpuGpu< T >
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/array_cpu_gpu_8hpp_source.html b/web/html/doc/array_cpu_gpu_8hpp_source.html new file mode 100644 index 000000000..a4791d344 --- /dev/null +++ b/web/html/doc/array_cpu_gpu_8hpp_source.html @@ -0,0 +1,246 @@ + + + + + + + +OpenPose: include/openpose/core/arrayCpuGpu.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
arrayCpuGpu.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_ARRAY_CPU_GPU_HPP
+
2 #define OPENPOSE_CORE_ARRAY_CPU_GPU_HPP
+
3 
+
4 #include <memory> // std::shared_ptr
+
5 #include <vector>
+ + +
8 
+
9 namespace op
+
10 {
+
14  template<typename T>
+ +
16  {
+
17  public:
+ +
23  explicit ArrayCpuGpu(const void* caffeBlobTPtr);
+
30  explicit ArrayCpuGpu(const Array<T>& array, const bool copyFromGpu);
+
31  explicit ArrayCpuGpu(const int num, const int channels, const int height, const int width);
+
32  // explicit ArrayCpuGpu(const std::vector<int>& shape);
+
33 
+
34  void Reshape(const int num, const int channels, const int height, const int width);
+
35  void Reshape(const std::vector<int>& shape);
+
36  // // void Reshape(const BlobShape& shape);
+
37  // // void ReshapeLike(const Blob& other);
+
38  // void ReshapeLike(const ArrayCpuGpu& other);
+
39  std::string shape_string() const;
+
40  const std::vector<int>& shape() const;
+
41  int shape(const int index) const;
+
42  int num_axes() const;
+
43  int count() const;
+
44  int count(const int start_axis, const int end_axis) const;
+
45  int count(const int start_axis) const;
+
46 
+
47  int CanonicalAxisIndex(const int axis_index) const;
+
48 
+
49  int num() const;
+
50  int channels() const;
+
51  int height() const;
+
52  int width() const;
+
53  int LegacyShape(const int index) const;
+
54 
+
55  int offset(const int n, const int c = 0, const int h = 0, const int w = 0) const;
+
56  // int offset(const std::vector<int>& indices) const; // Caffe warning
+
57 
+
58  // // void CopyFrom(const Blob<T>& source, bool copy_diff = false, bool reshape = false);
+
59  // void CopyFrom(const ArrayCpuGpu<T>& source, bool copy_diff = false, bool reshape = false);
+
60 
+
61  T data_at(const int n, const int c, const int h, const int w) const;
+
62  T diff_at(const int n, const int c, const int h, const int w) const;
+
63  // T data_at(const std::vector<int>& index) const; // Caffe warning
+
64  // T diff_at(const std::vector<int>& index) const; // Caffe warning
+
65 
+
66  // const boost::shared_ptr<SyncedMemory>& data() const;
+
67  // const boost::shared_ptr<SyncedMemory>& diff() const;
+
68 
+
69  const T* cpu_data() const;
+
70  void set_cpu_data(T* data);
+
71  const int* gpu_shape() const;
+
72  const T* gpu_data() const;
+
73  void set_gpu_data(T* data);
+
74  const T* cpu_diff() const;
+
75  const T* gpu_diff() const;
+ + + + +
80  void Update();
+
81  // void FromProto(const BlobProto& proto, bool reshape = true);
+
82  // void ToProto(BlobProto* proto, bool write_diff = false) const;
+
83 
+
84  T asum_data() const;
+
85  T asum_diff() const;
+
86  T sumsq_data() const;
+
87  T sumsq_diff() const;
+
88 
+
89  void scale_data(const T scale_factor);
+
90  void scale_diff(const T scale_factor);
+
91 
+
92  // void ShareData(const Blob& other);
+
93  // void ShareDiff(const Blob& other);
+
94 
+
95  // bool ShapeEquals(const BlobProto& other);
+
96 
+
97  private:
+
98  // PIMPL idiom
+
99  // http://www.cppsamples.com/common-tasks/pimpl.html
+
100  struct ImplArrayCpuGpu;
+
101  std::shared_ptr<ImplArrayCpuGpu> spImpl;
+
102 
+
103  // PIMP requires DELETE_COPY & destructor, or extra code
+
104  // http://oliora.github.io/2015/12/29/pimpl-and-rule-of-zero.html
+ +
106  };
+
107 
+
108  // // Static methods
+
109  // OVERLOAD_C_OUT(ArrayCpuGpu)
+
110 }
+
111 
+
112 #endif // OPENPOSE_CORE_ARRAY_CPU_GPU_HPP
+ + +
ArrayCpuGpu(const int num, const int channels, const int height, const int width)
+
void scale_diff(const T scale_factor)
+
void scale_data(const T scale_factor)
+
int height() const
+
void Reshape(const std::vector< int > &shape)
+
T sumsq_diff() const
+
const T * gpu_data() const
+
int channels() const
+
int shape(const int index) const
+
T diff_at(const int n, const int c, const int h, const int w) const
+
std::string shape_string() const
+
T data_at(const int n, const int c, const int h, const int w) const
+
void set_gpu_data(T *data)
+
ArrayCpuGpu(const void *caffeBlobTPtr)
+
int width() const
+
int count(const int start_axis) const
+
T asum_diff() const
+
T * mutable_gpu_diff()
+
T sumsq_data() const
+
T * mutable_cpu_data()
+
const int * gpu_shape() const
+
T asum_data() const
+
const T * cpu_data() const
+
int LegacyShape(const int index) const
+
void Reshape(const int num, const int channels, const int height, const int width)
+
const T * gpu_diff() const
+
int count() const
+
int num() const
+
T * mutable_cpu_diff()
+
T * mutable_gpu_data()
+
const T * cpu_diff() const
+
int count(const int start_axis, const int end_axis) const
+
ArrayCpuGpu(const Array< T > &array, const bool copyFromGpu)
+
void set_cpu_data(T *data)
+
int num_axes() const
+
int CanonicalAxisIndex(const int axis_index) const
+ +
int offset(const int n, const int c=0, const int h=0, const int w=0) const
+
const std::vector< int > & shape() const
+ + + +
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
+
+ + + + diff --git a/web/html/doc/bc_s.png b/web/html/doc/bc_s.png new file mode 100644 index 000000000..224b29aa9 Binary files /dev/null and b/web/html/doc/bc_s.png differ diff --git a/web/html/doc/bdwn.png b/web/html/doc/bdwn.png new file mode 100644 index 000000000..940a0b950 Binary files /dev/null and b/web/html/doc/bdwn.png differ diff --git a/web/html/doc/body_part_connector_base_8hpp.html b/web/html/doc/body_part_connector_base_8hpp.html new file mode 100644 index 000000000..d28c9c963 --- /dev/null +++ b/web/html/doc/body_part_connector_base_8hpp.html @@ -0,0 +1,141 @@ + + + + + + + +OpenPose: include/openpose/net/bodyPartConnectorBase.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
bodyPartConnectorBase.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Functions

template<typename T >
void op::connectBodyPartsCpu (Array< T > &poseKeypoints, Array< T > &poseScores, const T *const heatMapPtr, const T *const peaksPtr, const PoseModel poseModel, const Point< int > &heatMapSize, const int maxPeaks, const T interMinAboveThreshold, const T interThreshold, const int minSubsetCnt, const T minSubsetScore, const T defaultNmsThreshold, const T scaleFactor=1.f, const bool maximizePositives=false)
 
template<typename T >
void op::connectBodyPartsGpu (Array< T > &poseKeypoints, Array< T > &poseScores, const T *const heatMapGpuPtr, const T *const peaksPtr, const PoseModel poseModel, const Point< int > &heatMapSize, const int maxPeaks, const T interMinAboveThreshold, const T interThreshold, const int minSubsetCnt, const T minSubsetScore, const T defaultNmsThreshold, const T scaleFactor, const bool maximizePositives, Array< T > pairScoresCpu, T *pairScoresGpuPtr, const unsigned int *const bodyPartPairsGpuPtr, const unsigned int *const mapIdxGpuPtr, const T *const peaksGpuPtr)
 
template<typename T >
void op::connectBodyPartsOcl (Array< T > &poseKeypoints, Array< T > &poseScores, const T *const heatMapGpuPtr, const T *const peaksPtr, const PoseModel poseModel, const Point< int > &heatMapSize, const int maxPeaks, const T interMinAboveThreshold, const T interThreshold, const int minSubsetCnt, const T minSubsetScore, const T defaultNmsThreshold, const T scaleFactor=1.f, const bool maximizePositives=false, Array< T > pairScoresCpu=Array< T >{}, T *pairScoresGpuPtr=nullptr, const unsigned int *const bodyPartPairsGpuPtr=nullptr, const unsigned int *const mapIdxGpuPtr=nullptr, const T *const peaksGpuPtr=nullptr, const int gpuID=0)
 
template<typename T >
std::vector< std::pair< std::vector< int >, T > > op::createPeopleVector (const T *const heatMapPtr, const T *const peaksPtr, const PoseModel poseModel, const Point< int > &heatMapSize, const int maxPeaks, const T interThreshold, const T interMinAboveThreshold, const std::vector< unsigned int > &bodyPartPairs, const unsigned int numberBodyParts, const unsigned int numberBodyPartPairs, const T defaultNmsThreshold, const Array< T > &precomputedPAFs=Array< T >())
 
template<typename T >
void op::removePeopleBelowThresholdsAndFillFaces (std::vector< int > &validSubsetIndexes, int &numberPeople, std::vector< std::pair< std::vector< int >, T >> &subsets, const unsigned int numberBodyParts, const int minSubsetCnt, const T minSubsetScore, const bool maximizePositives, const T *const peaksPtr)
 
template<typename T >
void op::peopleVectorToPeopleArray (Array< T > &poseKeypoints, Array< T > &poseScores, const T scaleFactor, const std::vector< std::pair< std::vector< int >, T >> &subsets, const std::vector< int > &validSubsetIndexes, const T *const peaksPtr, const int numberPeople, const unsigned int numberBodyParts, const unsigned int numberBodyPartPairs)
 
template<typename T >
std::vector< std::tuple< T, T, int, int, int > > op::pafPtrIntoVector (const Array< T > &pairScores, const T *const peaksPtr, const int maxPeaks, const std::vector< unsigned int > &bodyPartPairs, const unsigned int numberBodyPartPairs)
 
template<typename T >
std::vector< std::pair< std::vector< int >, T > > op::pafVectorIntoPeopleVector (const std::vector< std::tuple< T, T, int, int, int >> &pairScores, const T *const peaksPtr, const int maxPeaks, const std::vector< unsigned int > &bodyPartPairs, const unsigned int numberBodyParts)
 
+
+
+ + + + diff --git a/web/html/doc/body_part_connector_base_8hpp.js b/web/html/doc/body_part_connector_base_8hpp.js new file mode 100644 index 000000000..ce70985dd --- /dev/null +++ b/web/html/doc/body_part_connector_base_8hpp.js @@ -0,0 +1,11 @@ +var body_part_connector_base_8hpp = +[ + [ "connectBodyPartsCpu", "body_part_connector_base_8hpp.html#a2ae13dae91c41b29063b48158ccbcc4e", null ], + [ "connectBodyPartsGpu", "body_part_connector_base_8hpp.html#a927468f6931ddb1e7d1e6e6e59b8bd36", null ], + [ "connectBodyPartsOcl", "body_part_connector_base_8hpp.html#a77a4d87bbee791dfba0667aa10bcca99", null ], + [ "createPeopleVector", "body_part_connector_base_8hpp.html#ae5d883da8c8f11356d5e1b61bc3a99b6", null ], + [ "pafPtrIntoVector", "body_part_connector_base_8hpp.html#aaec4a34b015f898d28be2b9f2aba0d38", null ], + [ "pafVectorIntoPeopleVector", "body_part_connector_base_8hpp.html#a36f0207c6263e7174f4c79eba7c4df3f", null ], + [ "peopleVectorToPeopleArray", "body_part_connector_base_8hpp.html#a3dbd17f2f656a2bc751441a42b5b9516", null ], + [ "removePeopleBelowThresholdsAndFillFaces", "body_part_connector_base_8hpp.html#ae01dd412590493f5f732594e8332d3f0", null ] +]; \ No newline at end of file diff --git a/web/html/doc/body_part_connector_base_8hpp_source.html b/web/html/doc/body_part_connector_base_8hpp_source.html new file mode 100644 index 000000000..9704ecd17 --- /dev/null +++ b/web/html/doc/body_part_connector_base_8hpp_source.html @@ -0,0 +1,184 @@ + + + + + + + +OpenPose: include/openpose/net/bodyPartConnectorBase.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
bodyPartConnectorBase.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_POSE_BODY_PARTS_CONNECTOR_HPP
+
2 #define OPENPOSE_POSE_BODY_PARTS_CONNECTOR_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+
9  template <typename T>
+ +
11  Array<T>& poseKeypoints, Array<T>& poseScores, const T* const heatMapPtr, const T* const peaksPtr,
+
12  const PoseModel poseModel, const Point<int>& heatMapSize, const int maxPeaks, const T interMinAboveThreshold,
+
13  const T interThreshold, const int minSubsetCnt, const T minSubsetScore, const T defaultNmsThreshold,
+
14  const T scaleFactor = 1.f, const bool maximizePositives = false);
+
15 
+
16  // Windows: Cuda functions do not include OP_API
+
17  template <typename T>
+ +
19  Array<T>& poseKeypoints, Array<T>& poseScores, const T* const heatMapGpuPtr, const T* const peaksPtr,
+
20  const PoseModel poseModel, const Point<int>& heatMapSize, const int maxPeaks, const T interMinAboveThreshold,
+
21  const T interThreshold, const int minSubsetCnt, const T minSubsetScore, const T defaultNmsThreshold,
+
22  const T scaleFactor, const bool maximizePositives, Array<T> pairScoresCpu, T* pairScoresGpuPtr,
+
23  const unsigned int* const bodyPartPairsGpuPtr, const unsigned int* const mapIdxGpuPtr,
+
24  const T* const peaksGpuPtr);
+
25 
+
26  template <typename T>
+ +
28  Array<T>& poseKeypoints, Array<T>& poseScores, const T* const heatMapGpuPtr, const T* const peaksPtr,
+
29  const PoseModel poseModel, const Point<int>& heatMapSize, const int maxPeaks, const T interMinAboveThreshold,
+
30  const T interThreshold, const int minSubsetCnt, const T minSubsetScore, const T defaultNmsThreshold,
+
31  const T scaleFactor = 1.f, const bool maximizePositives = false,
+
32  Array<T> pairScoresCpu = Array<T>{}, T* pairScoresGpuPtr = nullptr,
+
33  const unsigned int* const bodyPartPairsGpuPtr = nullptr, const unsigned int* const mapIdxGpuPtr = nullptr,
+
34  const T* const peaksGpuPtr = nullptr, const int gpuID = 0);
+
35 
+
36  // Private functions used by the 2 above functions
+
37  template <typename T>
+
38  std::vector<std::pair<std::vector<int>, T>> createPeopleVector(
+
39  const T* const heatMapPtr, const T* const peaksPtr, const PoseModel poseModel, const Point<int>& heatMapSize,
+
40  const int maxPeaks, const T interThreshold, const T interMinAboveThreshold,
+
41  const std::vector<unsigned int>& bodyPartPairs, const unsigned int numberBodyParts,
+
42  const unsigned int numberBodyPartPairs, const T defaultNmsThreshold,
+
43  const Array<T>& precomputedPAFs = Array<T>());
+
44 
+
45  template <typename T>
+ +
47  std::vector<int>& validSubsetIndexes, int& numberPeople,
+
48  std::vector<std::pair<std::vector<int>, T>>& subsets, const unsigned int numberBodyParts,
+
49  const int minSubsetCnt, const T minSubsetScore, const bool maximizePositives, const T* const peaksPtr);
+
50 
+
51  template <typename T>
+ +
53  Array<T>& poseKeypoints, Array<T>& poseScores, const T scaleFactor,
+
54  const std::vector<std::pair<std::vector<int>, T>>& subsets, const std::vector<int>& validSubsetIndexes,
+
55  const T* const peaksPtr, const int numberPeople, const unsigned int numberBodyParts,
+
56  const unsigned int numberBodyPartPairs);
+
57 
+
58  template <typename T>
+
59  std::vector<std::tuple<T, T, int, int, int>> pafPtrIntoVector(
+
60  const Array<T>& pairScores, const T* const peaksPtr, const int maxPeaks,
+
61  const std::vector<unsigned int>& bodyPartPairs, const unsigned int numberBodyPartPairs);
+
62 
+
63  template <typename T>
+
64  std::vector<std::pair<std::vector<int>, T>> pafVectorIntoPeopleVector(
+
65  const std::vector<std::tuple<T, T, int, int, int>>& pairScores, const T* const peaksPtr, const int maxPeaks,
+
66  const std::vector<unsigned int>& bodyPartPairs, const unsigned int numberBodyParts);
+
67 }
+
68 
+
69 #endif // OPENPOSE_POSE_BODY_PARTS_CONNECTOR_HPP
+ + + +
void connectBodyPartsCpu(Array< T > &poseKeypoints, Array< T > &poseScores, const T *const heatMapPtr, const T *const peaksPtr, const PoseModel poseModel, const Point< int > &heatMapSize, const int maxPeaks, const T interMinAboveThreshold, const T interThreshold, const int minSubsetCnt, const T minSubsetScore, const T defaultNmsThreshold, const T scaleFactor=1.f, const bool maximizePositives=false)
+
std::vector< std::pair< std::vector< int >, T > > pafVectorIntoPeopleVector(const std::vector< std::tuple< T, T, int, int, int >> &pairScores, const T *const peaksPtr, const int maxPeaks, const std::vector< unsigned int > &bodyPartPairs, const unsigned int numberBodyParts)
+
void peopleVectorToPeopleArray(Array< T > &poseKeypoints, Array< T > &poseScores, const T scaleFactor, const std::vector< std::pair< std::vector< int >, T >> &subsets, const std::vector< int > &validSubsetIndexes, const T *const peaksPtr, const int numberPeople, const unsigned int numberBodyParts, const unsigned int numberBodyPartPairs)
+
void connectBodyPartsOcl(Array< T > &poseKeypoints, Array< T > &poseScores, const T *const heatMapGpuPtr, const T *const peaksPtr, const PoseModel poseModel, const Point< int > &heatMapSize, const int maxPeaks, const T interMinAboveThreshold, const T interThreshold, const int minSubsetCnt, const T minSubsetScore, const T defaultNmsThreshold, const T scaleFactor=1.f, const bool maximizePositives=false, Array< T > pairScoresCpu=Array< T >{}, T *pairScoresGpuPtr=nullptr, const unsigned int *const bodyPartPairsGpuPtr=nullptr, const unsigned int *const mapIdxGpuPtr=nullptr, const T *const peaksGpuPtr=nullptr, const int gpuID=0)
+
void connectBodyPartsGpu(Array< T > &poseKeypoints, Array< T > &poseScores, const T *const heatMapGpuPtr, const T *const peaksPtr, const PoseModel poseModel, const Point< int > &heatMapSize, const int maxPeaks, const T interMinAboveThreshold, const T interThreshold, const int minSubsetCnt, const T minSubsetScore, const T defaultNmsThreshold, const T scaleFactor, const bool maximizePositives, Array< T > pairScoresCpu, T *pairScoresGpuPtr, const unsigned int *const bodyPartPairsGpuPtr, const unsigned int *const mapIdxGpuPtr, const T *const peaksGpuPtr)
+
std::vector< std::tuple< T, T, int, int, int > > pafPtrIntoVector(const Array< T > &pairScores, const T *const peaksPtr, const int maxPeaks, const std::vector< unsigned int > &bodyPartPairs, const unsigned int numberBodyPartPairs)
+
void removePeopleBelowThresholdsAndFillFaces(std::vector< int > &validSubsetIndexes, int &numberPeople, std::vector< std::pair< std::vector< int >, T >> &subsets, const unsigned int numberBodyParts, const int minSubsetCnt, const T minSubsetScore, const bool maximizePositives, const T *const peaksPtr)
+
std::vector< std::pair< std::vector< int >, T > > createPeopleVector(const T *const heatMapPtr, const T *const peaksPtr, const PoseModel poseModel, const Point< int > &heatMapSize, const int maxPeaks, const T interThreshold, const T interMinAboveThreshold, const std::vector< unsigned int > &bodyPartPairs, const unsigned int numberBodyParts, const unsigned int numberBodyPartPairs, const T defaultNmsThreshold, const Array< T > &precomputedPAFs=Array< T >())
+
PoseModel
Definition: enumClasses.hpp:10
+ + +
+
+ + + + diff --git a/web/html/doc/body_part_connector_caffe_8hpp.html b/web/html/doc/body_part_connector_caffe_8hpp.html new file mode 100644 index 000000000..3966bfccf --- /dev/null +++ b/web/html/doc/body_part_connector_caffe_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/net/bodyPartConnectorCaffe.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
bodyPartConnectorCaffe.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::BodyPartConnectorCaffe< T >
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/body_part_connector_caffe_8hpp_source.html b/web/html/doc/body_part_connector_caffe_8hpp_source.html new file mode 100644 index 000000000..368becf99 --- /dev/null +++ b/web/html/doc/body_part_connector_caffe_8hpp_source.html @@ -0,0 +1,207 @@ + + + + + + + +OpenPose: include/openpose/net/bodyPartConnectorCaffe.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
bodyPartConnectorCaffe.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_POSE_BODY_PART_CONNECTOR_CAFFE_HPP
+
2 #define OPENPOSE_POSE_BODY_PART_CONNECTOR_CAFFE_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+
9  // It mostly follows the Caffe::layer implementation, so Caffe users can easily use it. However, in order to keep
+
10  // the compatibility with any generic Caffe version, we keep this 'layer' inside our library rather than in the
+
11  // Caffe code.
+
12  template <typename T>
+ +
14  {
+
15  public:
+ +
17 
+ +
19 
+
20  virtual void Reshape(const std::vector<ArrayCpuGpu<T>*>& bottom, const int gpuID = 0);
+
21 
+
22  virtual inline const char* type() const { return "BodyPartConnector"; }
+
23 
+
24  void setPoseModel(const PoseModel poseModel);
+
25 
+
26  void setMaximizePositives(const bool maximizePositives);
+
27 
+
28  void setDefaultNmsThreshold(const T defaultNmsThreshold);
+
29 
+
30  void setInterMinAboveThreshold(const T interMinAboveThreshold);
+
31 
+
32  void setInterThreshold(const T interThreshold);
+
33 
+
34  void setMinSubsetCnt(const int minSubsetCnt);
+
35 
+
36  void setMinSubsetScore(const T minSubsetScore);
+
37 
+
38  void setScaleNetToOutput(const T scaleNetToOutput);
+
39 
+
40  virtual void Forward(const std::vector<ArrayCpuGpu<T>*>& bottom, Array<T>& poseKeypoints,
+
41  Array<T>& poseScores);
+
42 
+
43  virtual void Forward_cpu(const std::vector<ArrayCpuGpu<T>*>& bottom, Array<T>& poseKeypoints,
+
44  Array<T>& poseScores);
+
45 
+
46  virtual void Forward_gpu(const std::vector<ArrayCpuGpu<T>*>& bottom, Array<T>& poseKeypoints,
+
47  Array<T>& poseScores);
+
48 
+
49  virtual void Forward_ocl(const std::vector<ArrayCpuGpu<T>*>& bottom, Array<T>& poseKeypoints,
+
50  Array<T>& poseScores);
+
51 
+
52  virtual void Backward_cpu(const std::vector<ArrayCpuGpu<T>*>& top, const std::vector<bool>& propagate_down,
+
53  const std::vector<ArrayCpuGpu<T>*>& bottom);
+
54 
+
55  virtual void Backward_gpu(const std::vector<ArrayCpuGpu<T>*>& top, const std::vector<bool>& propagate_down,
+
56  const std::vector<ArrayCpuGpu<T>*>& bottom);
+
57 
+
58  private:
+
59  PoseModel mPoseModel;
+
60  bool mMaximizePositives;
+
61  T mDefaultNmsThreshold;
+
62  T mInterMinAboveThreshold;
+
63  T mInterThreshold;
+
64  int mMinSubsetCnt;
+
65  T mMinSubsetScore;
+
66  T mScaleNetToOutput;
+
67  std::array<int, 4> mHeatMapsSize;
+
68  std::array<int, 4> mPeaksSize;
+
69  std::array<int, 4> mTopSize;
+
70  // GPU auxiliary
+
71  unsigned int* pBodyPartPairsGpuPtr;
+
72  unsigned int* pMapIdxGpuPtr;
+
73  Array<T> mFinalOutputCpu;
+
74  T* pFinalOutputGpuPtr;
+
75  int mGpuID;
+
76 
+
77  DELETE_COPY(BodyPartConnectorCaffe);
+
78  };
+
79 }
+
80 
+
81 #endif // OPENPOSE_POSE_BODY_PART_CONNECTOR_CAFFE_HPP
+ + + +
virtual void Forward_cpu(const std::vector< ArrayCpuGpu< T > * > &bottom, Array< T > &poseKeypoints, Array< T > &poseScores)
+
void setScaleNetToOutput(const T scaleNetToOutput)
+
void setPoseModel(const PoseModel poseModel)
+
virtual void Forward_ocl(const std::vector< ArrayCpuGpu< T > * > &bottom, Array< T > &poseKeypoints, Array< T > &poseScores)
+
virtual void Forward(const std::vector< ArrayCpuGpu< T > * > &bottom, Array< T > &poseKeypoints, Array< T > &poseScores)
+
void setMinSubsetCnt(const int minSubsetCnt)
+
void setMaximizePositives(const bool maximizePositives)
+
void setInterThreshold(const T interThreshold)
+
void setInterMinAboveThreshold(const T interMinAboveThreshold)
+
virtual void Backward_cpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
+ +
void setMinSubsetScore(const T minSubsetScore)
+
virtual void Forward_gpu(const std::vector< ArrayCpuGpu< T > * > &bottom, Array< T > &poseKeypoints, Array< T > &poseScores)
+ +
virtual void Reshape(const std::vector< ArrayCpuGpu< T > * > &bottom, const int gpuID=0)
+
virtual void Backward_gpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
+
virtual const char * type() const
+
void setDefaultNmsThreshold(const T defaultNmsThreshold)
+ + +
PoseModel
Definition: enumClasses.hpp:10
+ +
+
+ + + + diff --git a/web/html/doc/bvh_saver_8hpp.html b/web/html/doc/bvh_saver_8hpp.html new file mode 100644 index 000000000..ab4ee43b3 --- /dev/null +++ b/web/html/doc/bvh_saver_8hpp.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: include/openpose/filestream/bvhSaver.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
bvhSaver.hpp File Reference
+
+ +
+ + + + diff --git a/web/html/doc/bvh_saver_8hpp_source.html b/web/html/doc/bvh_saver_8hpp_source.html new file mode 100644 index 000000000..99aab5af3 --- /dev/null +++ b/web/html/doc/bvh_saver_8hpp_source.html @@ -0,0 +1,146 @@ + + + + + + + +OpenPose: include/openpose/filestream/bvhSaver.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
bvhSaver.hpp
+
+
+Go to the documentation of this file.
1 #ifdef USE_3D_ADAM_MODEL
+
2 #ifndef OPENPOSE_FILESTREAM_BVH_SAVER_HPP
+
3 #define OPENPOSE_FILESTREAM_BVH_SAVER_HPP
+
4 
+
5 #ifdef USE_3D_ADAM_MODEL
+
6  #include <adam/totalmodel.h>
+
7 #endif
+ +
9 
+
10 namespace op
+
11 {
+
12  class OP_API BvhSaver
+
13  {
+
14  public:
+
15  BvhSaver(const std::string bvhFilePath,
+
16  const std::shared_ptr<const TotalModel>& totalModel = nullptr,
+
17  const double fps = 30.);
+
18 
+
19  virtual ~BvhSaver();
+
20 
+
21  void initializationOnThread();
+
22 
+
23  void updateBvh(const Eigen::Matrix<double, 62, 3, Eigen::RowMajor>& adamPose,
+
24  const Eigen::Vector3d& adamTranslation,
+
25  const Eigen::Matrix<double, Eigen::Dynamic, 1>& j0Vec);
+
26 
+
27 
+
28  private:
+
29  // PIMPL idiom
+
30  // http://www.cppsamples.com/common-tasks/pimpl.html
+
31  struct ImplBvhSaver;
+
32  std::shared_ptr<ImplBvhSaver> spImpl;
+
33 
+
34  // PIMP requires DELETE_COPY & destructor, or extra code
+
35  // http://oliora.github.io/2015/12/29/pimpl-and-rule-of-zero.html
+
36  DELETE_COPY(BvhSaver);
+
37  };
+
38 }
+
39 
+
40 #endif // OPENPOSE_FILESTREAM_BVH_SAVER_HPP
+
41 #endif
+ +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
+
+ + + + diff --git a/web/html/doc/calibration_2headers_8hpp.html b/web/html/doc/calibration_2headers_8hpp.html new file mode 100644 index 000000000..dc5d3a2ed --- /dev/null +++ b/web/html/doc/calibration_2headers_8hpp.html @@ -0,0 +1,104 @@ + + + + + + + +OpenPose: include/openpose/calibration/headers.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
headers.hpp File Reference
+
+ +
+ + + + diff --git a/web/html/doc/calibration_2headers_8hpp_source.html b/web/html/doc/calibration_2headers_8hpp_source.html new file mode 100644 index 000000000..f674b0ee4 --- /dev/null +++ b/web/html/doc/calibration_2headers_8hpp_source.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: include/openpose/calibration/headers.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
headers.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CALIBRATION_HEADERS_HPP
+
2 #define OPENPOSE_CALIBRATION_HEADERS_HPP
+
3 
+
4 // calibration module
+ +
6 
+
7 #endif // OPENPOSE_CALIBRATION_HEADERS_HPP
+ +
+
+ + + + diff --git a/web/html/doc/calibration__module_8md.html b/web/html/doc/calibration__module_8md.html new file mode 100644 index 000000000..8d2c01a7d --- /dev/null +++ b/web/html/doc/calibration__module_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/advanced/calibration_module.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/advanced/calibration_module.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/camera_parameter_estimation_8hpp.html b/web/html/doc/camera_parameter_estimation_8hpp.html new file mode 100644 index 000000000..e7be92ef7 --- /dev/null +++ b/web/html/doc/camera_parameter_estimation_8hpp.html @@ -0,0 +1,124 @@ + + + + + + + +OpenPose: include/openpose/calibration/cameraParameterEstimation.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
cameraParameterEstimation.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + + + +

+Functions

OP_API void op::estimateAndSaveIntrinsics (const Point< int > &gridInnerCorners, const float gridSquareSizeMm, const int flags, const std::string &outputParameterFolder, const std::string &imageFolder, const std::string &serialNumber, const bool saveImagesWithCorners=false)
 
OP_API void op::estimateAndSaveExtrinsics (const std::string &parameterFolder, const std::string &imageFolder, const Point< int > &gridInnerCorners, const float gridSquareSizeMm, const int index0, const int index1, const bool imagesAreUndistorted, const bool combineCam0Extrinsics)
 
OP_API void op::refineAndSaveExtrinsics (const std::string &parameterFolder, const std::string &imageFolder, const Point< int > &gridInnerCorners, const float gridSquareSizeMm, const int numberCameras, const bool imagesAreUndistorted, const bool saveImagesWithCorners=false)
 
OP_API void op::estimateAndSaveSiftFile (const Point< int > &gridInnerCorners, const std::string &imageFolder, const int numberCameras, const bool saveImagesWithCorners=false)
 
+
+
+ + + + diff --git a/web/html/doc/camera_parameter_estimation_8hpp.js b/web/html/doc/camera_parameter_estimation_8hpp.js new file mode 100644 index 000000000..227e7494f --- /dev/null +++ b/web/html/doc/camera_parameter_estimation_8hpp.js @@ -0,0 +1,7 @@ +var camera_parameter_estimation_8hpp = +[ + [ "estimateAndSaveExtrinsics", "camera_parameter_estimation_8hpp.html#aed964859fbd282bd29f2b818a3bf10dd", null ], + [ "estimateAndSaveIntrinsics", "camera_parameter_estimation_8hpp.html#a1fd317d44606181c63ef8a4e5676a09e", null ], + [ "estimateAndSaveSiftFile", "camera_parameter_estimation_8hpp.html#a37cdfa8dd466c3df9e7da5724a909143", null ], + [ "refineAndSaveExtrinsics", "camera_parameter_estimation_8hpp.html#a50526c188f2ba94b07e0945c0871fd2c", null ] +]; \ No newline at end of file diff --git a/web/html/doc/camera_parameter_estimation_8hpp_source.html b/web/html/doc/camera_parameter_estimation_8hpp_source.html new file mode 100644 index 000000000..8503837ea --- /dev/null +++ b/web/html/doc/camera_parameter_estimation_8hpp_source.html @@ -0,0 +1,137 @@ + + + + + + + +OpenPose: include/openpose/calibration/cameraParameterEstimation.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
cameraParameterEstimation.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CALIBRATION_CAMERA_PARAMETER_ESTIMATION_HPP
+
2 #define OPENPOSE_CALIBRATION_CAMERA_PARAMETER_ESTIMATION_HPP
+
3 
+ +
5 
+
6 namespace op
+
7 {
+ +
17  const Point<int>& gridInnerCorners, const float gridSquareSizeMm, const int flags,
+
18  const std::string& outputParameterFolder, const std::string& imageFolder, const std::string& serialNumber,
+
19  const bool saveImagesWithCorners = false);
+
20 
+ +
22  const std::string& parameterFolder, const std::string& imageFolder, const Point<int>& gridInnerCorners,
+
23  const float gridSquareSizeMm, const int index0, const int index1, const bool imagesAreUndistorted,
+
24  const bool combineCam0Extrinsics);
+
25 
+ +
27  const std::string& parameterFolder, const std::string& imageFolder, const Point<int>& gridInnerCorners,
+
28  const float gridSquareSizeMm, const int numberCameras, const bool imagesAreUndistorted,
+
29  const bool saveImagesWithCorners = false);
+
30 
+ +
32  const Point<int>& gridInnerCorners, const std::string& imageFolder, const int numberCameras,
+
33  const bool saveImagesWithCorners = false);
+
34 }
+
35 
+
36 #endif // OPENPOSE_CALIBRATION_CAMERA_PARAMETER_ESTIMATION_HPP
+ +
#define OP_API
Definition: macros.hpp:18
+ +
OP_API void estimateAndSaveIntrinsics(const Point< int > &gridInnerCorners, const float gridSquareSizeMm, const int flags, const std::string &outputParameterFolder, const std::string &imageFolder, const std::string &serialNumber, const bool saveImagesWithCorners=false)
+
OP_API void estimateAndSaveSiftFile(const Point< int > &gridInnerCorners, const std::string &imageFolder, const int numberCameras, const bool saveImagesWithCorners=false)
+
OP_API void refineAndSaveExtrinsics(const std::string &parameterFolder, const std::string &imageFolder, const Point< int > &gridInnerCorners, const float gridSquareSizeMm, const int numberCameras, const bool imagesAreUndistorted, const bool saveImagesWithCorners=false)
+
OP_API void estimateAndSaveExtrinsics(const std::string &parameterFolder, const std::string &imageFolder, const Point< int > &gridInnerCorners, const float gridSquareSizeMm, const int index0, const int index1, const bool imagesAreUndistorted, const bool combineCam0Extrinsics)
+ +
+
+ + + + diff --git a/web/html/doc/camera_parameter_reader_8hpp.html b/web/html/doc/camera_parameter_reader_8hpp.html new file mode 100644 index 000000000..3c813834a --- /dev/null +++ b/web/html/doc/camera_parameter_reader_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/3d/cameraParameterReader.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
cameraParameterReader.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::CameraParameterReader
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/camera_parameter_reader_8hpp_source.html b/web/html/doc/camera_parameter_reader_8hpp_source.html new file mode 100644 index 000000000..1a4beb774 --- /dev/null +++ b/web/html/doc/camera_parameter_reader_8hpp_source.html @@ -0,0 +1,186 @@ + + + + + + + +OpenPose: include/openpose/3d/cameraParameterReader.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
cameraParameterReader.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_3D_CAMERA_PARAMETER_READER_HPP
+
2 #define OPENPOSE_3D_CAMERA_PARAMETER_READER_HPP
+
3 
+ +
5 
+
6 namespace op
+
7 {
+ +
9  {
+
10  public:
+ +
12 
+ +
14 
+
15  // cameraExtrinsics is optional
+
16  explicit CameraParameterReader(const std::string& serialNumber,
+
17  const Matrix& cameraIntrinsics,
+
18  const Matrix& cameraDistortion,
+
19  const Matrix& cameraExtrinsics = Matrix(),
+
20  const Matrix& cameraExtrinsicsInitial = Matrix());
+
21 
+
22  // serialNumbers is optional. If empty, it will load all the XML files available in the
+
23  // cameraParameterPath folder
+
24  void readParameters(const std::string& cameraParameterPath,
+
25  const std::vector<std::string>& serialNumbers = {});
+
26 
+
27  // It simply calls the previous readParameters with a single element
+
28  void readParameters(const std::string& cameraParameterPath,
+
29  const std::string& serialNumber);
+
30 
+
31  void writeParameters(const std::string& cameraParameterPath) const;
+
32 
+
33  unsigned long long getNumberCameras() const;
+
34 
+
35  const std::vector<std::string>& getCameraSerialNumbers() const;
+
36 
+
37  const std::vector<Matrix>& getCameraMatrices() const;
+
38 
+
39  const std::vector<Matrix>& getCameraDistortions() const;
+
40 
+
41  const std::vector<Matrix>& getCameraIntrinsics() const;
+
42 
+
43  const std::vector<Matrix>& getCameraExtrinsics() const;
+
44 
+
45  const std::vector<Matrix>& getCameraExtrinsicsInitial() const;
+
46 
+
47  bool getUndistortImage() const;
+
48 
+
49  void setUndistortImage(const bool undistortImage);
+
50 
+
51  void undistort(Matrix& frame, const unsigned int cameraIndex = 0u);
+
52 
+
53  private:
+
54  // PIMPL idiom
+
55  // http://www.cppsamples.com/common-tasks/pimpl.html
+
56  struct ImplCameraParameterReader;
+
57  std::shared_ptr<ImplCameraParameterReader> spImpl;
+
58 
+ +
60  };
+
61 }
+
62 
+
63 #endif // OPENPOSE_3D_CAMERA_PARAMETER_READER_HPP
+ +
void readParameters(const std::string &cameraParameterPath, const std::string &serialNumber)
+
bool getUndistortImage() const
+
const std::vector< Matrix > & getCameraIntrinsics() const
+
void writeParameters(const std::string &cameraParameterPath) const
+
const std::vector< Matrix > & getCameraExtrinsics() const
+
const std::vector< Matrix > & getCameraExtrinsicsInitial() const
+
unsigned long long getNumberCameras() const
+
const std::vector< Matrix > & getCameraDistortions() const
+
void readParameters(const std::string &cameraParameterPath, const std::vector< std::string > &serialNumbers={})
+
const std::vector< Matrix > & getCameraMatrices() const
+
CameraParameterReader(const std::string &serialNumber, const Matrix &cameraIntrinsics, const Matrix &cameraDistortion, const Matrix &cameraExtrinsics=Matrix(), const Matrix &cameraExtrinsicsInitial=Matrix())
+ +
const std::vector< std::string > & getCameraSerialNumbers() const
+ +
void setUndistortImage(const bool undistortImage)
+
void undistort(Matrix &frame, const unsigned int cameraIndex=0u)
+ + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
+
+ + + + diff --git a/web/html/doc/check_8hpp.html b/web/html/doc/check_8hpp.html new file mode 100644 index 000000000..09d1f132b --- /dev/null +++ b/web/html/doc/check_8hpp.html @@ -0,0 +1,137 @@ + + + + + + + +OpenPose: include/openpose/utilities/check.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
check.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + + + + + + + + + + + + + + + + +

+Functions

template<typename T >
void op::checkBool (const bool condition, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
 
template<typename T , typename T1 , typename T2 >
void op::checkEqual (const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
 
template<typename T , typename T1 , typename T2 >
void op::checkNotEqual (const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
 
template<typename T , typename T1 , typename T2 >
void op::checkLessOrEqual (const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
 
template<typename T , typename T1 , typename T2 >
void op::checkLessThan (const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
 
template<typename T , typename T1 , typename T2 >
void op::checkGreaterOrEqual (const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
 
template<typename T , typename T1 , typename T2 >
void op::checkGreaterThan (const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
 
+
+
+ + + + diff --git a/web/html/doc/check_8hpp.js b/web/html/doc/check_8hpp.js new file mode 100644 index 000000000..c92f36225 --- /dev/null +++ b/web/html/doc/check_8hpp.js @@ -0,0 +1,10 @@ +var check_8hpp = +[ + [ "checkBool", "check_8hpp.html#a410201fcc46274e24726c5a601bc1721", null ], + [ "checkEqual", "check_8hpp.html#aaff52f436911aa17bebb999cd91a44fd", null ], + [ "checkGreaterOrEqual", "check_8hpp.html#a92e8cd01741c90fbfdfaa33a13803f34", null ], + [ "checkGreaterThan", "check_8hpp.html#a3dd874d4341b99431819f9fa6b678ca9", null ], + [ "checkLessOrEqual", "check_8hpp.html#a7ecfc02dca25534a071acf3136ff175e", null ], + [ "checkLessThan", "check_8hpp.html#a1e71130dc8f280e4664c711128b18b42", null ], + [ "checkNotEqual", "check_8hpp.html#aaada2594361f6f929af5b1f9d50387eb", null ] +]; \ No newline at end of file diff --git a/web/html/doc/check_8hpp_source.html b/web/html/doc/check_8hpp_source.html new file mode 100644 index 000000000..08977194d --- /dev/null +++ b/web/html/doc/check_8hpp_source.html @@ -0,0 +1,191 @@ + + + + + + + +OpenPose: include/openpose/utilities/check.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
check.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_UTILITIES_CHECK_HPP
+
2 #define OPENPOSE_UTILITIES_CHECK_HPP
+
3 
+ +
5 
+
6 namespace op
+
7 {
+
8  // CHECK, CHECK_EQ, CHECK_NE, CHECK_LE, CHECK_LT, CHECK_GE, and CHECK_GT
+
9  template<typename T>
+
10  void checkBool(
+
11  const bool condition, const T& message = "", const int line = -1, const std::string& function = "",
+
12  const std::string& file = "")
+
13  {
+
14  if (!condition)
+
15  error("Check failed: " + tToString(message), line, function, file);
+
16  }
+
17 
+
18  template<typename T, typename T1, typename T2>
+
19  void checkEqual(
+
20  const T1& conditionA, const T2& conditionB, const T& message = "", const int line = -1,
+
21  const std::string& function = "", const std::string& file = "")
+
22  {
+
23  if (conditionA != conditionB)
+
24  error("CheckE failed (" + tToString(conditionA) + " vs. " + tToString(conditionB) + "): "
+
25  + tToString(message), line, function, file);
+
26  }
+
27 
+
28  template<typename T, typename T1, typename T2>
+ +
30  const T1& conditionA, const T2& conditionB, const T& message = "", const int line = -1,
+
31  const std::string& function = "", const std::string& file = "")
+
32  {
+
33  if (conditionA == conditionB)
+
34  error("CheckNE failed (" + tToString(conditionA) + " vs. " + tToString(conditionB) + "): "
+
35  + tToString(message), line, function, file);
+
36  }
+
37 
+
38  template<typename T, typename T1, typename T2>
+ +
40  const T1& conditionA, const T2& conditionB, const T& message = "", const int line = -1,
+
41  const std::string& function = "", const std::string& file = "")
+
42  {
+
43  if (conditionA > conditionB)
+
44  error("CheckLE failed (" + tToString(conditionA) + " vs. " + tToString(conditionB) + "): "
+
45  + tToString(message), line, function, file);
+
46  }
+
47 
+
48  template<typename T, typename T1, typename T2>
+ +
50  const T1& conditionA, const T2& conditionB, const T& message = "", const int line = -1,
+
51  const std::string& function = "", const std::string& file = "")
+
52  {
+
53  if (conditionA >= conditionB)
+
54  error("CheckLT failed (" + tToString(conditionA) + " vs. " + tToString(conditionB) + "): "
+
55  + tToString(message), line, function, file);
+
56  }
+
57 
+
58  template<typename T, typename T1, typename T2>
+ +
60  const T1& conditionA, const T2& conditionB, const T& message = "", const int line = -1,
+
61  const std::string& function = "", const std::string& file = "")
+
62  {
+
63  if (conditionA < conditionB)
+
64  error("CheckGE failed (" + tToString(conditionA) + " vs. " + tToString(conditionB) + "): "
+
65  + tToString(message), line, function, file);
+
66  }
+
67 
+
68  template<typename T, typename T1, typename T2>
+ +
70  const T1& conditionA, const T2& conditionB, const T& message = "", const int line = -1,
+
71  const std::string& function = "", const std::string& file = "")
+
72  {
+
73  if (conditionA <= conditionB)
+
74  error("CheckGT failed (" + tToString(conditionA) + " vs. " + tToString(conditionB) + "): "
+
75  + tToString(message), line, function, file);
+
76  }
+
77 }
+
78 
+
79 #endif // OPENPOSE_UTILITIES_CHECK_HPP
+ + +
void checkLessThan(const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
Definition: check.hpp:49
+
void checkGreaterThan(const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
Definition: check.hpp:69
+
void checkBool(const bool condition, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
Definition: check.hpp:10
+
OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
+
void checkLessOrEqual(const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
Definition: check.hpp:39
+
void checkGreaterOrEqual(const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
Definition: check.hpp:59
+
void checkNotEqual(const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
Definition: check.hpp:29
+
void checkEqual(const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
Definition: check.hpp:19
+
std::string tToString(const T &message)
Definition: errorAndLog.hpp:21
+
+
+ + + + diff --git a/web/html/doc/classes.html b/web/html/doc/classes.html new file mode 100644 index 000000000..640c00ee4 --- /dev/null +++ b/web/html/doc/classes.html @@ -0,0 +1,166 @@ + + + + + + + +OpenPose: Class Index + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
Class Index
+
+
+
A | B | C | D | F | G | H | I | J | K | M | N | O | P | Q | R | S | T | U | V | W
+
+
+
A
+
Array (op)
ArrayCpuGpu (op)
+
+
B
+
BodyPartConnectorCaffe (op)
+
+
C
+
CameraParameterReader (op)
CocoJsonSaver (op)
CvMatToOpInput (op)
CvMatToOpOutput (op)
+
+
D
+
Datum (op)
DatumProducer (op)
+
+
F
+
FaceCpuRenderer (op)
FaceDetector (op)
FaceDetectorOpenCV (op)
FaceExtractorCaffe (op)
FaceExtractorNet (op)
FaceGpuRenderer (op)
FaceRenderer (op)
FileSaver (op)
FlirReader (op)
FrameDisplayer (op)
+
+
G
+
GpuRenderer (op)
Gui (op)
Gui3D (op)
GuiInfoAdder (op)
+
+
H
+
HandCpuRenderer (op)
HandDetector (op)
HandDetectorFromTxt (op)
HandExtractorCaffe (op)
HandExtractorNet (op)
HandGpuRenderer (op)
HandRenderer (op)
HeatMapSaver (op)
+
+
I
+
ImageDirectoryReader (op)
ImageSaver (op)
IpCameraReader (op)
+
+
J
+
JsonOfstream (op)
+
+
K
+
KeepTopNPeople (op)
KeypointSaver (op)
KeypointScaler (op)
+
+
M
+
Matrix (op)
MaximumCaffe (op)
+
+
N
+
Net (op)
NetCaffe (op)
NetOpenCv (op)
NmsCaffe (op)
+
+
O
+
OpOutputToCvMat (op)
+
+
P
+
PeopleJsonSaver (op)
PersonIdExtractor (op)
PersonTracker (op)
Point (op)
PointerContainerGreater (op)
PointerContainerLess (op)
PoseCpuRenderer (op)
PoseExtractor (op)
PoseExtractorCaffe (op)
PoseExtractorNet (op)
PoseGpuRenderer (op)
PoseRenderer (op)
PoseTriangulation (op)
PriorityQueue (op)
Producer (op)
Profiler (op)
+
+
Q
+
Queue (op)
QueueBase (op)
+
+
R
+
Rectangle (op)
Renderer (op)
ResizeAndMergeCaffe (op)
+
+
S
+
ScaleAndSizeExtractor (op)
SpinnakerWrapper (op)
String (op)
SubThread (op)
SubThreadNoQueue (op)
SubThreadQueueIn (op)
SubThreadQueueInOut (op)
SubThreadQueueOut (op)
+
+
T
+
Thread (op)
ThreadManager (op)
+
+
U
+
UdpSender (op)
+
+
V
+
VerbosePrinter (op)
VideoCaptureReader (op)
VideoReader (op)
VideoSaver (op)
+
+
W
+
WCocoJsonSaver (op)
WCvMatToOpInput (op)
WCvMatToOpOutput (op)
WDatumProducer (op)
WebcamReader (op)
WFaceDetector (op)
WFaceDetectorOpenCV (op)
WFaceExtractorNet (op)
WFaceRenderer (op)
WFaceSaver (op)
WFpsMax (op)
WGui (op)
WGui3D (op)
WGuiInfoAdder (op)
WHandDetector (op)
WHandDetectorFromTxt (op)
WHandDetectorTracking (op)
WHandDetectorUpdate (op)
WHandExtractorNet (op)
WHandRenderer (op)
WHandSaver (op)
WHeatMapSaver (op)
WIdGenerator (op)
WImageSaver (op)
WKeepTopNPeople (op)
WKeypointScaler (op)
WOpOutputToCvMat (op)
Worker (op)
WorkerConsumer (op)
WorkerProducer (op)
WPeopleJsonSaver (op)
WPersonIdExtractor (op)
WPoseExtractor (op)
WPoseExtractorNet (op)
WPoseRenderer (op)
WPoseSaver (op)
WPoseTriangulation (op)
WQueueAssembler (op)
WQueueOrderer (op)
WrapperStructExtra (op)
WrapperStructFace (op)
WrapperStructGui (op)
WrapperStructHand (op)
WrapperStructInput (op)
WrapperStructOutput (op)
WrapperStructPose (op)
WrapperT (op)
WScaleAndSizeExtractor (op)
WUdpSender (op)
WVerbosePrinter (op)
WVideoSaver (op)
WVideoSaver3D (op)
+
+
+
+ + + + diff --git a/web/html/doc/classop_1_1_array-members.html b/web/html/doc/classop_1_1_array-members.html new file mode 100644 index 000000000..d8a1caf83 --- /dev/null +++ b/web/html/doc/classop_1_1_array-members.html @@ -0,0 +1,147 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::Array< T > Member List
+
+
+ +

This is the complete list of members for op::Array< T >, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Array(const int size)op::Array< T >explicit
Array(const std::vector< int > &sizes={})op::Array< T >explicit
Array(const int size, const T value)op::Array< T >
Array(const std::vector< int > &sizes, const T value)op::Array< T >
Array(const int size, T *const dataPtr)op::Array< T >
Array(const std::vector< int > &sizes, T *const dataPtr)op::Array< T >
Array(const Array< T > &array, const int index, const bool noCopy=false)op::Array< T >
Array(const Array< T2 > &array)op::Array< T >inline
Array(const Array< T > &array)op::Array< T >
Array(Array< T > &&array)op::Array< T >
at(const int index)op::Array< T >inline
at(const int index) constop::Array< T >inline
at(const std::vector< int > &indexes)op::Array< T >inline
at(const std::vector< int > &indexes) constop::Array< T >inline
clone() constop::Array< T >
empty() constop::Array< T >inline
getConstCvMat() constop::Array< T >
getConstPtr() constop::Array< T >inline
getCvMat()op::Array< T >
getNumberDimensions() constop::Array< T >inline
getPseudoConstPtr() constop::Array< T >inline
getPtr()op::Array< T >inline
getSize() constop::Array< T >inline
getSize(const int index) constop::Array< T >
getStride() constop::Array< T >
getStride(const int index) constop::Array< T >
getVolume() constop::Array< T >inline
getVolume(const int indexA, const int indexB=-1) constop::Array< T >
operator=(const Array< T > &array)op::Array< T >
operator=(Array< T > &&array)op::Array< T >
operator[](const int index)op::Array< T >inline
operator[](const int index) constop::Array< T >inline
operator[](const std::vector< int > &indexes)op::Array< T >inline
operator[](const std::vector< int > &indexes) constop::Array< T >inline
printSize() constop::Array< T >
reset(const int size)op::Array< T >
reset(const std::vector< int > &sizes={})op::Array< T >
reset(const int size, const T value)op::Array< T >
reset(const std::vector< int > &sizes, const T value)op::Array< T >
reset(const int size, T *const dataPtr)op::Array< T >
reset(const std::vector< int > &sizes, T *const dataPtr)op::Array< T >
setFrom(const Matrix &cvMat)op::Array< T >
setTo(const T value)op::Array< T >
toString() constop::Array< T >
+
+ + + + diff --git a/web/html/doc/classop_1_1_array.html b/web/html/doc/classop_1_1_array.html new file mode 100644 index 000000000..bf5aecdcf --- /dev/null +++ b/web/html/doc/classop_1_1_array.html @@ -0,0 +1,1586 @@ + + + + + + + +OpenPose: op::Array< T > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::Array< T > Class Template Reference
+
+
+ +

#include <array.hpp>

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 Array (const int size)
 
 Array (const std::vector< int > &sizes={})
 
 Array (const int size, const T value)
 
 Array (const std::vector< int > &sizes, const T value)
 
 Array (const int size, T *const dataPtr)
 
 Array (const std::vector< int > &sizes, T *const dataPtr)
 
 Array (const Array< T > &array, const int index, const bool noCopy=false)
 
template<typename T2 >
 Array (const Array< T2 > &array)
 
 Array (const Array< T > &array)
 
Array< T > & operator= (const Array< T > &array)
 
 Array (Array< T > &&array)
 
Array< T > & operator= (Array< T > &&array)
 
Array< T > clone () const
 
void reset (const int size)
 
void reset (const std::vector< int > &sizes={})
 
void reset (const int size, const T value)
 
void reset (const std::vector< int > &sizes, const T value)
 
void reset (const int size, T *const dataPtr)
 
void reset (const std::vector< int > &sizes, T *const dataPtr)
 
void setFrom (const Matrix &cvMat)
 
void setTo (const T value)
 
bool empty () const
 
std::vector< int > getSize () const
 
int getSize (const int index) const
 
std::string printSize () const
 
size_t getNumberDimensions () const
 
size_t getVolume () const
 
size_t getVolume (const int indexA, const int indexB=-1) const
 
std::vector< int > getStride () const
 
int getStride (const int index) const
 
T * getPtr ()
 
const T * getConstPtr () const
 
T * getPseudoConstPtr () const
 
const MatrixgetConstCvMat () const
 
MatrixgetCvMat ()
 
T & operator[] (const int index)
 
const T & operator[] (const int index) const
 
T & operator[] (const std::vector< int > &indexes)
 
const T & operator[] (const std::vector< int > &indexes) const
 
T & at (const int index)
 
const T & at (const int index) const
 
T & at (const std::vector< int > &indexes)
 
const T & at (const std::vector< int > &indexes) const
 
const std::string toString () const
 
+

Detailed Description

+

template<typename T>
+class op::Array< T >

+ +

Array<T>: The OpenPose Basic Raw Data Container This template class implements a multidimensional data array. It is our basic data container, analogous to Mat in OpenCV, Tensor in Torch/TensorFlow or Blob in Caffe. It wraps a Matrix and a std::shared_ptr, both of them pointing to the same raw data. I.e. they both share the same memory, so we can read and modify this data in both formats with no performance impact. Hence, it keeps high performance while adding high-level functions.

+ +

Definition at line 21 of file array.hpp.

+

Constructor & Destructor Documentation

+ +

◆ Array() [1/10]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + +
op::Array< T >::Array (const int size)
+
+explicit
+
+

Array constructor. Equivalent to default constructor + reset(const int size).

Parameters
+ + +
sizeInteger with the number of T element to be allocated. E.g., size = 5 is internally similar to new T[5].
+
+
+ +
+
+ +

◆ Array() [2/10]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + +
op::Array< T >::Array (const std::vector< int > & sizes = {})
+
+explicit
+
+

Array constructor. Equivalent to default constructor + reset(const std::vector<int>& size = {}).

Parameters
+ + +
sizesVector with the size of each dimension. E.g., size = {3, 5, 2} is internally similar to new T[3*5*2].
+
+
+ +
+
+ +

◆ Array() [3/10]

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + +
op::Array< T >::Array (const int size,
const T value 
)
+
+

Array constructor. Equivalent to default constructor + reset(const int size, const T value).

Parameters
+ + + +
sizeInteger with the number of T element to be allocated. E.g., size = 5 is internally similar to new T[5].
valueInitial value for each component of the Array.
+
+
+ +
+
+ +

◆ Array() [4/10]

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + +
op::Array< T >::Array (const std::vector< int > & sizes,
const T value 
)
+
+

Array constructor. Equivalent to default constructor + reset(const std::vector<int>& size, const T value).

Parameters
+ + + +
sizesVector with the size of each dimension. E.g., size = {3, 5, 2} is internally similar to: new T[3*5*2].
valueInitial value for each component of the Array.
+
+
+ +
+
+ +

◆ Array() [5/10]

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + +
op::Array< T >::Array (const int size,
T *const dataPtr 
)
+
+

Array constructor. Equivalent to default constructor, but it does not allocate memory, but rather use dataPtr.

Parameters
+ + + +
sizeInteger with the number of T element to be allocated. E.g., size = 5 is internally similar to new T[5].
dataPtrPointer to the memory to be used by the Array.
+
+
+ +
+
+ +

◆ Array() [6/10]

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + +
op::Array< T >::Array (const std::vector< int > & sizes,
T *const dataPtr 
)
+
+

Array constructor. Equivalent to default constructor, but it does not allocate memory, but rather use dataPtr.

Parameters
+ + + +
sizesVector with the size of each dimension. E.g., size = {3, 5, 2} is internally similar to: new T[3*5*2].
dataPtrPointer to the memory to be used by the Array.
+
+
+ +
+
+ +

◆ Array() [7/10]

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + + + + + + + +
op::Array< T >::Array (const Array< T > & array,
const int index,
const bool noCopy = false 
)
+
+

Array constructor.

Parameters
+ + + + +
arrayArray<T> with the original data array to slice.
indexindicates the index of the array to extract.
noCopyindicates whether to perform a copy. Copy will never go to undefined behavior, however, if noCopy == true, then:
    +
  1. It is faster, as no data copy is involved, but...
  2. +
  3. If the Array array goes out of scope, then the resulting Array will provoke an undefined behavior.
  4. +
  5. If the returned Array is modified, the information in the Array array will also be.
  6. +
+
+
+
+
Returns
Array<T> with the same dimension than array expect the first dimension being 1. E.g., if array is {p,k,m}, the resulting Array<T> is {1,k,m}.
+ +
+
+ +

◆ Array() [8/10]

+ +
+
+
+template<typename T >
+
+template<typename T2 >
+ + + + + +
+ + + + + + + + +
op::Array< T >::Array (const Array< T2 > & array)
+
+inline
+
+

Array constructor. It manually copies the Array<T2> into the new Array<T>

Parameters
+ + +
arrayArray<T2> with a format T2 different to the current Array type T.
+
+
+ +

Definition at line 96 of file array.hpp.

+ +
+
+ +

◆ Array() [9/10]

+ +
+
+
+template<typename T >
+ + + + + + + + +
op::Array< T >::Array (const Array< T > & array)
+
+

Copy constructor. It performs fast copy: For performance purpose, copying a Array<T> or Datum or cv::Mat just copies the reference, it still shares the same internal data. Modifying the copied element will modify the original one. Use clone() for a slower but real copy, similarly to cv::Mat and Array<T>.

Parameters
+ + +
arrayArray to be copied.
+
+
+ +
+
+ +

◆ Array() [10/10]

+ +
+
+
+template<typename T >
+ + + + + + + + +
op::Array< T >::Array (Array< T > && array)
+
+

Move constructor. It destroys the original Array to be moved.

Parameters
+ + +
arrayArray to be moved.
+
+
+ +
+
+

Member Function Documentation

+ +

◆ at() [1/4]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + +
T& op::Array< T >::at (const int index)
+
+inline
+
+

at() function Same functionality as operator[](const int index), but it always check whether the indexes are within the data bounds. Otherwise, it will throw an error.

Parameters
+ + +
indexThe desired memory location.
+
+
+
Returns
A editable reference to the data on the desired index location.
+ +

Definition at line 422 of file array.hpp.

+ +
+
+ +

◆ at() [2/4]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + +
const T& op::Array< T >::at (const int index) const
+
+inline
+
+

at() function Same functionality as operator[](const int index) const, but it always check whether the indexes are within the data bounds. Otherwise, it will throw an error.

Parameters
+ + +
indexThe desired memory location.
+
+
+
Returns
A non-editable reference to the data on the desired index location.
+ +

Definition at line 434 of file array.hpp.

+ +
+
+ +

◆ at() [3/4]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + +
T& op::Array< T >::at (const std::vector< int > & indexes)
+
+inline
+
+

at() function Same functionality as operator[](const std::vector<int>& indexes), but it always check whether the indexes are within the data bounds. Otherwise, it will throw an error.

Parameters
+ + +
indexesVector with the desired memory location.
+
+
+
Returns
A editable reference to the data on the desired index location.
+ +

Definition at line 446 of file array.hpp.

+ +
+
+ +

◆ at() [4/4]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + +
const T& op::Array< T >::at (const std::vector< int > & indexes) const
+
+inline
+
+

at() function Same functionality as operator[](const std::vector<int>& indexes) const, but it always check whether the indexes are within the data bounds. Otherwise, it will throw an error.

Parameters
+ + +
indexesVector with the desired memory location.
+
+
+
Returns
A non-editable reference to the data on the desired index location.
+ +

Definition at line 458 of file array.hpp.

+ +
+
+ +

◆ clone()

+ +
+
+
+template<typename T >
+ + + + + + + +
Array<T> op::Array< T >::clone () const
+
+

Clone function. Similar to cv::Mat::clone and Datum::clone. It performs a real but slow copy of the data, i.e., even if the copied element is modified, the original one is not.

Returns
The resulting Array.
+ +
+
+ +

◆ empty()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
bool op::Array< T >::empty () const
+
+inline
+
+

Check whether memory has been allocated.

Returns
True if no memory has been allocated, false otherwise.
+ +

Definition at line 228 of file array.hpp.

+ +
+
+ +

◆ getConstCvMat()

+ +
+
+
+template<typename T >
+ + + + + + + +
const Matrix& op::Array< T >::getConstCvMat () const
+
+

Return a Matrix wrapper to the data. It forbids the data to be modified. OpenCV only admits unsigned char, signed char, int, float & double. If the T class is not supported by OpenCV, it will throw an error. Note: Array<T> does not return an editable Matrix because some OpenCV functions reallocate memory and it would not longer point to the Array<T> instance. If you want to perform some OpenCV operation on the Array data, you can use: editedCvMat = array.getConstCvMat().clone(); // modify data array.setFrom(editedCvMat)

Returns
A const Matrix pointing to the data.
+ +
+
+ +

◆ getConstPtr()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
const T* op::Array< T >::getConstPtr () const
+
+inline
+
+

Similar to getPtr(), but it forbids the data to be edited.

Returns
A raw const pointer to the data.
+ +

Definition at line 319 of file array.hpp.

+ +
+
+ +

◆ getCvMat()

+ +
+
+
+template<typename T >
+ + + + + + + +
Matrix& op::Array< T >::getCvMat ()
+
+

Analogous to getConstCvMat, but in this case it returns a editable Matrix. Very important: Only allowed functions which do not provoke data reallocation. E.g., resizing functions will not work and they would provoke an undefined behavior and/or execution crashes.

Returns
A Matrix pointing to the data.
+ +
+
+ +

◆ getNumberDimensions()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
size_t op::Array< T >::getNumberDimensions () const
+
+inline
+
+

Return the total number of dimensions, equivalent to getSize().size().

Returns
The number of dimensions. If no memory is allocated, it returns 0.
+ +

Definition at line 262 of file array.hpp.

+ +
+
+ +

◆ getPseudoConstPtr()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
T* op::Array< T >::getPseudoConstPtr () const
+
+inline
+
+

Similar to getConstPtr(), but it allows the data to be edited. This function is only implemented for Pybind11 usage.

Returns
A raw pointer to the data.
+ +

Definition at line 329 of file array.hpp.

+ +
+
+ +

◆ getPtr()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
T* op::Array< T >::getPtr ()
+
+inline
+
+

Return a raw pointer to the data. Similar to: std::shared_ptr::get(). Note: if you modify the pointer data, you will directly modify it in the Array<T> instance too. If you know you do not want to modify the data, then use getConstPtr() instead.

Returns
A raw pointer to the data.
+ +

Definition at line 310 of file array.hpp.

+ +
+
+ +

◆ getSize() [1/2]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
std::vector<int> op::Array< T >::getSize () const
+
+inline
+
+

Return a vector with the size of each dimension allocated.

Returns
A std::vector<int> with the size of each dimension. If no memory has been allocated, it will return an empty std::vector.
+ +

Definition at line 238 of file array.hpp.

+ +
+
+ +

◆ getSize() [2/2]

+ +
+
+
+template<typename T >
+ + + + + + + + +
int op::Array< T >::getSize (const int index) const
+
+

Return a vector with the size of the desired dimension.

Parameters
+ + +
indexDimension to check its size.
+
+
+
Returns
Size of the desired dimension. It will return 0 if the requested dimension is higher than the number of dimensions.
+ +
+
+ +

◆ getStride() [1/2]

+ +
+
+
+template<typename T >
+ + + + + + + +
std::vector<int> op::Array< T >::getStride () const
+
+

Return the stride or step size of the array. E.g., given and Array<T> of size 5x3, getStride() would return the following vector: {5x3sizeof(T), 3sizeof(T), sizeof(T)}.

+ +
+
+ +

◆ getStride() [2/2]

+ +
+
+
+template<typename T >
+ + + + + + + + +
int op::Array< T >::getStride (const int index) const
+
+

Return the stride or step size of the array at the index-th dimension. E.g., given and Array<T> of size 5x3, getStride(2) would return sizeof(T).

+ +
+
+ +

◆ getVolume() [1/2]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
size_t op::Array< T >::getVolume () const
+
+inline
+
+

Return the total number of elements allocated, equivalent to multiply all the components from getSize(). E.g., for a Array<T> of size = {2,5,3}, the volume or total number of elements is: 2x5x3 = 30.

Returns
The total volume of the allocated data. If no memory is allocated, it returns 0.
+ +

Definition at line 272 of file array.hpp.

+ +
+
+ +

◆ getVolume() [2/2]

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + +
size_t op::Array< T >::getVolume (const int indexA,
const int indexB = -1 
) const
+
+

Similar to getVolume(), but in this case it just returns the volume between the desired dimensions. E.g., for a Array<T> of size = {2,5,3}, the volume or total number of elements for getVolume(1,2) is 5x3 = 15.

Parameters
+ + + +
indexADimension where to start.
indexBDimension where to stop. If indexB == -1, then it will take up to the last dimension.
+
+
+
Returns
The total volume of the allocated data between the desired dimensions. If the index are out of bounds, it throws an error.
+ +
+
+ +

◆ operator=() [1/2]

+ +
+
+
+template<typename T >
+ + + + + + + + +
Array<T>& op::Array< T >::operator= (Array< T > && array)
+
+

Move assignment. Similar to Array<T>(Array<T>&& array).

Parameters
+ + +
arrayArray to be moved.
+
+
+
Returns
The resulting Array.
+ +
+
+ +

◆ operator=() [2/2]

+ +
+
+
+template<typename T >
+ + + + + + + + +
Array<T>& op::Array< T >::operator= (const Array< T > & array)
+
+

Copy assignment. Similar to Array<T>(const Array<T>& array).

Parameters
+ + +
arrayArray to be copied.
+
+
+
Returns
The resulting Array.
+ +
+
+ +

◆ operator[]() [1/4]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + +
T& op::Array< T >::operator[] (const int index)
+
+inline
+
+

[] operator Similar to the [] operator for raw pointer data. If debug mode is enabled, then it will check that the desired index is in the data range, and it will throw an exception otherwise (similar to the at operator).

Parameters
+ + +
indexThe desired memory location.
+
+
+
Returns
A editable reference to the data on the desired index location.
+ +

Definition at line 365 of file array.hpp.

+ +
+
+ +

◆ operator[]() [2/4]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + +
const T& op::Array< T >::operator[] (const int index) const
+
+inline
+
+

[] operator Same functionality as operator[](const int index), but it forbids modifying the value. Otherwise, const functions would not be able to call the [] operator.

Parameters
+ + +
indexThe desired memory location.
+
+
+
Returns
A non-editable reference to the data on the desired index location.
+ +

Definition at line 381 of file array.hpp.

+ +
+
+ +

◆ operator[]() [3/4]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + +
T& op::Array< T >::operator[] (const std::vector< int > & indexes)
+
+inline
+
+

[] operator Same functionality as operator[](const int index), but it lets the user introduce the multi-dimensional index. E.g., given a (10 x 10 x 10) array, array[11] is equivalent to array[{1,1,0}]

Parameters
+ + +
indexesVector with the desired memory location.
+
+
+
Returns
A editable reference to the data on the desired index location.
+ +

Definition at line 398 of file array.hpp.

+ +
+
+ +

◆ operator[]() [4/4]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + +
const T& op::Array< T >::operator[] (const std::vector< int > & indexes) const
+
+inline
+
+

[] operator Same functionality as operator[](const std::vector<int>& indexes), but it forbids modifying the value. Otherwise, const functions would not be able to call the [] operator.

Parameters
+ + +
indexesVector with the desired memory location.
+
+
+
Returns
A non-editable reference to the data on the desired index location.
+ +

Definition at line 410 of file array.hpp.

+ +
+
+ +

◆ printSize()

+ +
+
+
+template<typename T >
+ + + + + + + +
std::string op::Array< T >::printSize () const
+
+

Return a string with the size of each dimension allocated.

Returns
A std::stringwith the size of each dimension. If no memory has been allocated, it will return an empty string.
+ +
+
+ +

◆ reset() [1/6]

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::Array< T >::reset (const int size)
+
+

Data allocation function. It allocates the required space for the memory (it does not initialize that memory).

Parameters
+ + +
sizeInteger with the number of T element to be allocated. E.g., size = 5 is internally similar to new T[5].
+
+
+ +
+
+ +

◆ reset() [2/6]

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + +
void op::Array< T >::reset (const int size,
const T value 
)
+
+

Data allocation function. Similar to reset(const int size), but initializing the data to the value specified by the second argument.

Parameters
+ + + +
sizeInteger with the number of T element to be allocated. E.g., size = 5 is internally similar to new T[5].
valueInitial value for each component of the Array.
+
+
+ +
+
+ +

◆ reset() [3/6]

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + +
void op::Array< T >::reset (const int size,
T *const dataPtr 
)
+
+

Data allocation function. Equivalent to default constructor, but it does not allocate memory, but rather use dataPtr.

Parameters
+ + + +
sizeInteger with the number of T element to be allocated. E.g., size = 5 is internally similar to new T[5].
dataPtrPointer to the memory to be used by the Array.
+
+
+ +
+
+ +

◆ reset() [4/6]

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + +
void op::Array< T >::reset (const std::vector< int > & sizes,
const T value 
)
+
+

Data allocation function. Similar to reset(const std::vector<int>& size), but initializing the data to the value specified by the second argument.

Parameters
+ + + +
sizesVector with the size of each dimension. E.g., size = {3, 5, 2} is internally similar to new T[3*5*2].
valueInitial value for each component of the Array.
+
+
+ +
+
+ +

◆ reset() [5/6]

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + +
void op::Array< T >::reset (const std::vector< int > & sizes,
T *const dataPtr 
)
+
+

Data allocation function. Equivalent to default constructor, but it does not allocate memory, but rather use dataPtr.

Parameters
+ + + +
sizesVector with the size of each dimension. E.g., size = {3, 5, 2} is internally similar to: new T[3*5*2].
dataPtrPointer to the memory to be used by the Array.
+
+
+ +
+
+ +

◆ reset() [6/6]

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::Array< T >::reset (const std::vector< int > & sizes = {})
+
+

Data allocation function. Similar to reset(const int size), but it allocates a multi-dimensional array of dimensions each of the values of the argument.

Parameters
+ + +
sizesVector with the size of each dimension. E.g., size = {3, 5, 2} is internally similar to new T[3*5*2].
+
+
+ +
+
+ +

◆ setFrom()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::Array< T >::setFrom (const MatrixcvMat)
+
+

Data allocation function. It internally allocates memory and copies the data of the argument to the Array allocated memory.

Parameters
+ + +
cvMatMatrix to be copied.
+
+
+ +
+
+ +

◆ setTo()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::Array< T >::setTo (const T value)
+
+

Data allocation function. It internally assigns all the allocated memory to the value indicated by the argument.

Parameters
+ + +
valueValue for each component of the Array.
+
+
+ +
+
+ +

◆ toString()

+ +
+
+
+template<typename T >
+ + + + + + + +
const std::string op::Array< T >::toString () const
+
+

It returns a string with the whole array data. Useful for debugging. The format is: values separated by a space, and a enter for each dimension. E.g., For the Array{2, 2, 3}, it will print: Array<T>::toString(): x1 x2 x3 x4 x5 x6

+

x7 x8 x9 x10 x11 x12

Returns
A string with the array values in the above format.
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_array.js b/web/html/doc/classop_1_1_array.js new file mode 100644 index 000000000..57f0c2434 --- /dev/null +++ b/web/html/doc/classop_1_1_array.js @@ -0,0 +1,47 @@ +var classop_1_1_array = +[ + [ "Array", "classop_1_1_array.html#a793b9851c7490bc98d4dd52020c0cd3c", null ], + [ "Array", "classop_1_1_array.html#a48c1ba1f7017b5aa8e0451079dd3a6d3", null ], + [ "Array", "classop_1_1_array.html#ac833fdcb245fcc3135ce65227bb9e4b2", null ], + [ "Array", "classop_1_1_array.html#a959ede0df7e535d2d3ac40d098541c27", null ], + [ "Array", "classop_1_1_array.html#a9cd386050e94c29b3c4ee40cafcacc46", null ], + [ "Array", "classop_1_1_array.html#a90895562def04a81db0b3e7eaa3722c7", null ], + [ "Array", "classop_1_1_array.html#a416e95541761c557c50b79b5e1b33389", null ], + [ "Array", "classop_1_1_array.html#afb4e1f55747898d29aa13606ded9991f", null ], + [ "Array", "classop_1_1_array.html#a5a68cca98a3ebaf565f1e546eebd9f01", null ], + [ "Array", "classop_1_1_array.html#a7a7d854d63815e10e158fe889d17a88e", null ], + [ "at", "classop_1_1_array.html#a6e0afd5f447efbfc29efbeac62716eff", null ], + [ "at", "classop_1_1_array.html#a4a24dfa0d0f1f3769bf3bfcea47e2220", null ], + [ "at", "classop_1_1_array.html#a8eebb6c34642cdf19ac74c7ed38d128b", null ], + [ "at", "classop_1_1_array.html#ae74dec8220582072f85fb3ec430238ce", null ], + [ "clone", "classop_1_1_array.html#ab0b95bf5488cccad3bce7413251b04de", null ], + [ "empty", "classop_1_1_array.html#aa173085fa7ec7c7af3a443c617edd97a", null ], + [ "getConstCvMat", "classop_1_1_array.html#a9b43d8d495a233c384a75a3f33eae75f", null ], + [ "getConstPtr", "classop_1_1_array.html#ac5e77d6926d1d344cf54c88036fc8a9c", null ], + [ "getCvMat", "classop_1_1_array.html#a530010928025b3f64743505d732b1308", null ], + [ "getNumberDimensions", "classop_1_1_array.html#a5eff0723f0bbd192248e602bfbb6956f", null ], + [ "getPseudoConstPtr", "classop_1_1_array.html#a85d749e637a7528325f86b80595a91d1", null ], + [ "getPtr", "classop_1_1_array.html#af4715967fd2b028a97fd30257e697275", null ], + [ "getSize", "classop_1_1_array.html#a4568f646a97fa8cea443b864d91a28df", null ], + [ "getSize", "classop_1_1_array.html#ab4123b36e0816793e206365397dd8f79", null ], + [ "getStride", "classop_1_1_array.html#a38de9c4ba539b8134fcac91287722044", null ], + [ "getStride", "classop_1_1_array.html#ab033fba3d9140020dd89edb10fe8b109", null ], + [ "getVolume", "classop_1_1_array.html#aee364306687e39e754117c98ad844157", null ], + [ "getVolume", "classop_1_1_array.html#a5ed838d2b9933b6a80906d0e0db39742", null ], + [ "operator=", "classop_1_1_array.html#ae388368128afac05369172198911e05d", null ], + [ "operator=", "classop_1_1_array.html#a9c8e006e0eea472485f37971330ecbab", null ], + [ "operator[]", "classop_1_1_array.html#aa40dc59e800d3c4cce623d560c0e0fad", null ], + [ "operator[]", "classop_1_1_array.html#a0e1d5ce14d11caa3b92306ee677af4cc", null ], + [ "operator[]", "classop_1_1_array.html#aada0f1bd6e9eb73b4f977e62da536f58", null ], + [ "operator[]", "classop_1_1_array.html#ac4e9514cfee78a3a0236c1a6265376d8", null ], + [ "printSize", "classop_1_1_array.html#a3f26a48c35cde008970078a66ff6e5c7", null ], + [ "reset", "classop_1_1_array.html#a12e538b09e98bf0900163031602ed2ed", null ], + [ "reset", "classop_1_1_array.html#ac7183eb2f4e78a6941da3a2079b9ed32", null ], + [ "reset", "classop_1_1_array.html#ae0c3d1a662f6c213da16ac87e53120fc", null ], + [ "reset", "classop_1_1_array.html#add2eeccd967cdf0900449649cb6f5afb", null ], + [ "reset", "classop_1_1_array.html#a3252c38318d81a8b8fb6f71f4d4c2642", null ], + [ "reset", "classop_1_1_array.html#a0ad0232daa69783cf2c8f7a0ff5b3b0c", null ], + [ "setFrom", "classop_1_1_array.html#a9f4b51216faaa967d81598a0cedcf78f", null ], + [ "setTo", "classop_1_1_array.html#a28f09d11de753a741334ee8094296acb", null ], + [ "toString", "classop_1_1_array.html#af488c66ddac6cb75f7690ba8207599ed", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_array_cpu_gpu-members.html b/web/html/doc/classop_1_1_array_cpu_gpu-members.html new file mode 100644 index 000000000..9065a3fbd --- /dev/null +++ b/web/html/doc/classop_1_1_array_cpu_gpu-members.html @@ -0,0 +1,143 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::ArrayCpuGpu< T > Member List
+
+
+ +

This is the complete list of members for op::ArrayCpuGpu< T >, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ArrayCpuGpu()op::ArrayCpuGpu< T >
ArrayCpuGpu(const void *caffeBlobTPtr)op::ArrayCpuGpu< T >explicit
ArrayCpuGpu(const Array< T > &array, const bool copyFromGpu)op::ArrayCpuGpu< T >explicit
ArrayCpuGpu(const int num, const int channels, const int height, const int width)op::ArrayCpuGpu< T >explicit
asum_data() constop::ArrayCpuGpu< T >
asum_diff() constop::ArrayCpuGpu< T >
CanonicalAxisIndex(const int axis_index) constop::ArrayCpuGpu< T >
channels() constop::ArrayCpuGpu< T >
count() constop::ArrayCpuGpu< T >
count(const int start_axis, const int end_axis) constop::ArrayCpuGpu< T >
count(const int start_axis) constop::ArrayCpuGpu< T >
cpu_data() constop::ArrayCpuGpu< T >
cpu_diff() constop::ArrayCpuGpu< T >
data_at(const int n, const int c, const int h, const int w) constop::ArrayCpuGpu< T >
diff_at(const int n, const int c, const int h, const int w) constop::ArrayCpuGpu< T >
gpu_data() constop::ArrayCpuGpu< T >
gpu_diff() constop::ArrayCpuGpu< T >
gpu_shape() constop::ArrayCpuGpu< T >
height() constop::ArrayCpuGpu< T >
LegacyShape(const int index) constop::ArrayCpuGpu< T >
mutable_cpu_data()op::ArrayCpuGpu< T >
mutable_cpu_diff()op::ArrayCpuGpu< T >
mutable_gpu_data()op::ArrayCpuGpu< T >
mutable_gpu_diff()op::ArrayCpuGpu< T >
num() constop::ArrayCpuGpu< T >
num_axes() constop::ArrayCpuGpu< T >
offset(const int n, const int c=0, const int h=0, const int w=0) constop::ArrayCpuGpu< T >
Reshape(const int num, const int channels, const int height, const int width)op::ArrayCpuGpu< T >
Reshape(const std::vector< int > &shape)op::ArrayCpuGpu< T >
scale_data(const T scale_factor)op::ArrayCpuGpu< T >
scale_diff(const T scale_factor)op::ArrayCpuGpu< T >
set_cpu_data(T *data)op::ArrayCpuGpu< T >
set_gpu_data(T *data)op::ArrayCpuGpu< T >
shape() constop::ArrayCpuGpu< T >
shape(const int index) constop::ArrayCpuGpu< T >
shape_string() constop::ArrayCpuGpu< T >
sumsq_data() constop::ArrayCpuGpu< T >
sumsq_diff() constop::ArrayCpuGpu< T >
Update()op::ArrayCpuGpu< T >
width() constop::ArrayCpuGpu< T >
+
+ + + + diff --git a/web/html/doc/classop_1_1_array_cpu_gpu.html b/web/html/doc/classop_1_1_array_cpu_gpu.html new file mode 100644 index 000000000..9615434c4 --- /dev/null +++ b/web/html/doc/classop_1_1_array_cpu_gpu.html @@ -0,0 +1,1146 @@ + + + + + + + +OpenPose: op::ArrayCpuGpu< T > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::ArrayCpuGpu< T > Class Template Reference
+
+
+ +

#include <arrayCpuGpu.hpp>

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 ArrayCpuGpu ()
 
 ArrayCpuGpu (const void *caffeBlobTPtr)
 
 ArrayCpuGpu (const Array< T > &array, const bool copyFromGpu)
 
 ArrayCpuGpu (const int num, const int channels, const int height, const int width)
 
void Reshape (const int num, const int channels, const int height, const int width)
 
void Reshape (const std::vector< int > &shape)
 
std::string shape_string () const
 
const std::vector< int > & shape () const
 
int shape (const int index) const
 
int num_axes () const
 
int count () const
 
int count (const int start_axis, const int end_axis) const
 
int count (const int start_axis) const
 
int CanonicalAxisIndex (const int axis_index) const
 
int num () const
 
int channels () const
 
int height () const
 
int width () const
 
int LegacyShape (const int index) const
 
int offset (const int n, const int c=0, const int h=0, const int w=0) const
 
data_at (const int n, const int c, const int h, const int w) const
 
diff_at (const int n, const int c, const int h, const int w) const
 
const T * cpu_data () const
 
void set_cpu_data (T *data)
 
const int * gpu_shape () const
 
const T * gpu_data () const
 
void set_gpu_data (T *data)
 
const T * cpu_diff () const
 
const T * gpu_diff () const
 
T * mutable_cpu_data ()
 
T * mutable_gpu_data ()
 
T * mutable_cpu_diff ()
 
T * mutable_gpu_diff ()
 
void Update ()
 
asum_data () const
 
asum_diff () const
 
sumsq_data () const
 
sumsq_diff () const
 
void scale_data (const T scale_factor)
 
void scale_diff (const T scale_factor)
 
+

Detailed Description

+

template<typename T>
+class op::ArrayCpuGpu< T >

+ +

ArrayCpuGpu<T>: Bind of caffe::Blob<T> to avoid Caffe as dependency in the headers.

+ +

Definition at line 15 of file arrayCpuGpu.hpp.

+

Constructor & Destructor Documentation

+ +

◆ ArrayCpuGpu() [1/4]

+ +
+
+
+template<typename T >
+ + + + + + + +
op::ArrayCpuGpu< T >::ArrayCpuGpu ()
+
+ +
+
+ +

◆ ArrayCpuGpu() [2/4]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + +
op::ArrayCpuGpu< T >::ArrayCpuGpu (const void * caffeBlobTPtr)
+
+explicit
+
+
Parameters
+ + +
caffeBlobTPtrshould be a caffe::Blob<T>* element or it will provoke a core dumped. Done to avoid explicitly exposing 3rdparty libraries on the headers.
+
+
+ +
+
+ +

◆ ArrayCpuGpu() [3/4]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
op::ArrayCpuGpu< T >::ArrayCpuGpu (const Array< T > & array,
const bool copyFromGpu 
)
+
+explicit
+
+

Create an ArrayCpuGpu from the data in the Array element (it will automatically copy that data).

Parameters
+ + + +
arrayArray<T> where the data to be copied is.
copyFromGpuIf false (default), it will copy the data to the CPU. If true, it will copy it to the GPU memory (using CUDA copy function).
+
+
+ +
+
+ +

◆ ArrayCpuGpu() [4/4]

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::ArrayCpuGpu< T >::ArrayCpuGpu (const int num,
const int channels,
const int height,
const int width 
)
+
+explicit
+
+ +
+
+

Member Function Documentation

+ +

◆ asum_data()

+ +
+
+
+template<typename T >
+ + + + + + + +
T op::ArrayCpuGpu< T >::asum_data () const
+
+ +
+
+ +

◆ asum_diff()

+ +
+
+
+template<typename T >
+ + + + + + + +
T op::ArrayCpuGpu< T >::asum_diff () const
+
+ +
+
+ +

◆ CanonicalAxisIndex()

+ +
+
+
+template<typename T >
+ + + + + + + + +
int op::ArrayCpuGpu< T >::CanonicalAxisIndex (const int axis_index) const
+
+ +
+
+ +

◆ channels()

+ +
+
+
+template<typename T >
+ + + + + + + +
int op::ArrayCpuGpu< T >::channels () const
+
+ +
+
+ +

◆ count() [1/3]

+ +
+
+
+template<typename T >
+ + + + + + + +
int op::ArrayCpuGpu< T >::count () const
+
+ +
+
+ +

◆ count() [2/3]

+ +
+
+
+template<typename T >
+ + + + + + + + +
int op::ArrayCpuGpu< T >::count (const int start_axis) const
+
+ +
+
+ +

◆ count() [3/3]

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + +
int op::ArrayCpuGpu< T >::count (const int start_axis,
const int end_axis 
) const
+
+ +
+
+ +

◆ cpu_data()

+ +
+
+
+template<typename T >
+ + + + + + + +
const T* op::ArrayCpuGpu< T >::cpu_data () const
+
+ +
+
+ +

◆ cpu_diff()

+ +
+
+
+template<typename T >
+ + + + + + + +
const T* op::ArrayCpuGpu< T >::cpu_diff () const
+
+ +
+
+ +

◆ data_at()

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
T op::ArrayCpuGpu< T >::data_at (const int n,
const int c,
const int h,
const int w 
) const
+
+ +
+
+ +

◆ diff_at()

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
T op::ArrayCpuGpu< T >::diff_at (const int n,
const int c,
const int h,
const int w 
) const
+
+ +
+
+ +

◆ gpu_data()

+ +
+
+
+template<typename T >
+ + + + + + + +
const T* op::ArrayCpuGpu< T >::gpu_data () const
+
+ +
+
+ +

◆ gpu_diff()

+ +
+
+
+template<typename T >
+ + + + + + + +
const T* op::ArrayCpuGpu< T >::gpu_diff () const
+
+ +
+
+ +

◆ gpu_shape()

+ +
+
+
+template<typename T >
+ + + + + + + +
const int* op::ArrayCpuGpu< T >::gpu_shape () const
+
+ +
+
+ +

◆ height()

+ +
+
+
+template<typename T >
+ + + + + + + +
int op::ArrayCpuGpu< T >::height () const
+
+ +
+
+ +

◆ LegacyShape()

+ +
+
+
+template<typename T >
+ + + + + + + + +
int op::ArrayCpuGpu< T >::LegacyShape (const int index) const
+
+ +
+
+ +

◆ mutable_cpu_data()

+ +
+
+
+template<typename T >
+ + + + + + + +
T* op::ArrayCpuGpu< T >::mutable_cpu_data ()
+
+ +
+
+ +

◆ mutable_cpu_diff()

+ +
+
+
+template<typename T >
+ + + + + + + +
T* op::ArrayCpuGpu< T >::mutable_cpu_diff ()
+
+ +
+
+ +

◆ mutable_gpu_data()

+ +
+
+
+template<typename T >
+ + + + + + + +
T* op::ArrayCpuGpu< T >::mutable_gpu_data ()
+
+ +
+
+ +

◆ mutable_gpu_diff()

+ +
+
+
+template<typename T >
+ + + + + + + +
T* op::ArrayCpuGpu< T >::mutable_gpu_diff ()
+
+ +
+
+ +

◆ num()

+ +
+
+
+template<typename T >
+ + + + + + + +
int op::ArrayCpuGpu< T >::num () const
+
+ +
+
+ +

◆ num_axes()

+ +
+
+
+template<typename T >
+ + + + + + + +
int op::ArrayCpuGpu< T >::num_axes () const
+
+ +
+
+ +

◆ offset()

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
int op::ArrayCpuGpu< T >::offset (const int n,
const int c = 0,
const int h = 0,
const int w = 0 
) const
+
+ +
+
+ +

◆ Reshape() [1/2]

+ +
+
+
+template<typename T >
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::ArrayCpuGpu< T >::Reshape (const int num,
const int channels,
const int height,
const int width 
)
+
+ +
+
+ +

◆ Reshape() [2/2]

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::ArrayCpuGpu< T >::Reshape (const std::vector< int > & shape)
+
+ +
+
+ +

◆ scale_data()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::ArrayCpuGpu< T >::scale_data (const T scale_factor)
+
+ +
+
+ +

◆ scale_diff()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::ArrayCpuGpu< T >::scale_diff (const T scale_factor)
+
+ +
+
+ +

◆ set_cpu_data()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::ArrayCpuGpu< T >::set_cpu_data (T * data)
+
+ +
+
+ +

◆ set_gpu_data()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::ArrayCpuGpu< T >::set_gpu_data (T * data)
+
+ +
+
+ +

◆ shape() [1/2]

+ +
+
+
+template<typename T >
+ + + + + + + +
const std::vector<int>& op::ArrayCpuGpu< T >::shape () const
+
+ +
+
+ +

◆ shape() [2/2]

+ +
+
+
+template<typename T >
+ + + + + + + + +
int op::ArrayCpuGpu< T >::shape (const int index) const
+
+ +
+
+ +

◆ shape_string()

+ +
+
+
+template<typename T >
+ + + + + + + +
std::string op::ArrayCpuGpu< T >::shape_string () const
+
+ +
+
+ +

◆ sumsq_data()

+ +
+
+
+template<typename T >
+ + + + + + + +
T op::ArrayCpuGpu< T >::sumsq_data () const
+
+ +
+
+ +

◆ sumsq_diff()

+ +
+
+
+template<typename T >
+ + + + + + + +
T op::ArrayCpuGpu< T >::sumsq_diff () const
+
+ +
+
+ +

◆ Update()

+ +
+
+
+template<typename T >
+ + + + + + + +
void op::ArrayCpuGpu< T >::Update ()
+
+ +
+
+ +

◆ width()

+ +
+
+
+template<typename T >
+ + + + + + + +
int op::ArrayCpuGpu< T >::width () const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_array_cpu_gpu.js b/web/html/doc/classop_1_1_array_cpu_gpu.js new file mode 100644 index 000000000..621de6bfc --- /dev/null +++ b/web/html/doc/classop_1_1_array_cpu_gpu.js @@ -0,0 +1,43 @@ +var classop_1_1_array_cpu_gpu = +[ + [ "ArrayCpuGpu", "classop_1_1_array_cpu_gpu.html#aee39459d54376c7ec98155b4add7f961", null ], + [ "ArrayCpuGpu", "classop_1_1_array_cpu_gpu.html#a4fb245f1557f61192ab54c24f4baf487", null ], + [ "ArrayCpuGpu", "classop_1_1_array_cpu_gpu.html#ad5d631890ff61a2e70695b797f1a6f34", null ], + [ "ArrayCpuGpu", "classop_1_1_array_cpu_gpu.html#a0234d36fab318cc2c672134fd4009301", null ], + [ "asum_data", "classop_1_1_array_cpu_gpu.html#a7cf928cf41b4477c59c91a0f45c0443c", null ], + [ "asum_diff", "classop_1_1_array_cpu_gpu.html#a675012bf11b17cb7b7c41b5dc1beccf6", null ], + [ "CanonicalAxisIndex", "classop_1_1_array_cpu_gpu.html#adeb393edfae4967cb510a8c7a2d07d80", null ], + [ "channels", "classop_1_1_array_cpu_gpu.html#a2eb57d0cb4f902b275d126e4b6f706f2", null ], + [ "count", "classop_1_1_array_cpu_gpu.html#aa3e701c15f11e563e0b442c28143188d", null ], + [ "count", "classop_1_1_array_cpu_gpu.html#a50f82490bab162626760d420f5f6779c", null ], + [ "count", "classop_1_1_array_cpu_gpu.html#acd9ea6e75dd2eb516d6a91bac91e43e4", null ], + [ "cpu_data", "classop_1_1_array_cpu_gpu.html#a7e982b668191924e6665645790fa18a2", null ], + [ "cpu_diff", "classop_1_1_array_cpu_gpu.html#ac5d005ccb8a3b8aba935e5276fcd20e4", null ], + [ "data_at", "classop_1_1_array_cpu_gpu.html#a4836fabbedf7e1ef97bfbd4d33db3d96", null ], + [ "diff_at", "classop_1_1_array_cpu_gpu.html#a3f10532b10ec840aa9e1dac3ccc7ee25", null ], + [ "gpu_data", "classop_1_1_array_cpu_gpu.html#a292b819460cbf56fd36e7435cd99c49a", null ], + [ "gpu_diff", "classop_1_1_array_cpu_gpu.html#aa0717b11c87da804e6da0d7aca4a5414", null ], + [ "gpu_shape", "classop_1_1_array_cpu_gpu.html#a7c92a38509887af087eafd7522047429", null ], + [ "height", "classop_1_1_array_cpu_gpu.html#a1ae24508718592592421f9491bcf50f0", null ], + [ "LegacyShape", "classop_1_1_array_cpu_gpu.html#a9aa5001613f7199de933eef152db40b0", null ], + [ "mutable_cpu_data", "classop_1_1_array_cpu_gpu.html#a6eafc0638925b776bb78c68c1fef972a", null ], + [ "mutable_cpu_diff", "classop_1_1_array_cpu_gpu.html#aba03b602ed1c745b3ba344d7ccedfd30", null ], + [ "mutable_gpu_data", "classop_1_1_array_cpu_gpu.html#ac0bb37052ae356e85d681f52a4716f3c", null ], + [ "mutable_gpu_diff", "classop_1_1_array_cpu_gpu.html#a678e65cb71d2cc2e1070499465894892", null ], + [ "num", "classop_1_1_array_cpu_gpu.html#aa87f658e2ff9699908f5cb823e988188", null ], + [ "num_axes", "classop_1_1_array_cpu_gpu.html#ad79b3b3cf4180535211e20e086262837", null ], + [ "offset", "classop_1_1_array_cpu_gpu.html#af24813492bad97de4e4c628fe356abe7", null ], + [ "Reshape", "classop_1_1_array_cpu_gpu.html#a9e3c6d812430d638187441e9d5cacfcc", null ], + [ "Reshape", "classop_1_1_array_cpu_gpu.html#a1cc1cc3226543f5a2eb4c8ddcb5ec8a5", null ], + [ "scale_data", "classop_1_1_array_cpu_gpu.html#a16dc8c19cc0b0442c1be6c859fe7d33c", null ], + [ "scale_diff", "classop_1_1_array_cpu_gpu.html#a0a307838959472e8e8815d76305c1bf6", null ], + [ "set_cpu_data", "classop_1_1_array_cpu_gpu.html#ad6e6a4da3987e9558d89b877f9ec7e82", null ], + [ "set_gpu_data", "classop_1_1_array_cpu_gpu.html#a4dd6f5e4d7f54e921c7c296078a594f0", null ], + [ "shape", "classop_1_1_array_cpu_gpu.html#af817bde68da318a8f9dd08feabe3c286", null ], + [ "shape", "classop_1_1_array_cpu_gpu.html#a3e44f7ede3ff5ef0db6df30ecd4562c5", null ], + [ "shape_string", "classop_1_1_array_cpu_gpu.html#a425d12f8d702ac9a57fb9a5f48cea152", null ], + [ "sumsq_data", "classop_1_1_array_cpu_gpu.html#a6dd38e027220beada2f8f55f9d073d53", null ], + [ "sumsq_diff", "classop_1_1_array_cpu_gpu.html#a280202f2a968ea68795d31accf5072bb", null ], + [ "Update", "classop_1_1_array_cpu_gpu.html#af9f32307732772c708ff45c52b4e7dd0", null ], + [ "width", "classop_1_1_array_cpu_gpu.html#a5011662a5cf4bc7f7c1a2d966dcc44cd", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_body_part_connector_caffe-members.html b/web/html/doc/classop_1_1_body_part_connector_caffe-members.html new file mode 100644 index 000000000..f46fdcd52 --- /dev/null +++ b/web/html/doc/classop_1_1_body_part_connector_caffe-members.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::BodyPartConnectorCaffe< T > Member List
+
+
+ +

This is the complete list of members for op::BodyPartConnectorCaffe< T >, including all inherited members.

+ + + + + + + + + + + + + + + + + + + +
Backward_cpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)op::BodyPartConnectorCaffe< T >virtual
Backward_gpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)op::BodyPartConnectorCaffe< T >virtual
BodyPartConnectorCaffe()op::BodyPartConnectorCaffe< T >explicit
Forward(const std::vector< ArrayCpuGpu< T > * > &bottom, Array< T > &poseKeypoints, Array< T > &poseScores)op::BodyPartConnectorCaffe< T >virtual
Forward_cpu(const std::vector< ArrayCpuGpu< T > * > &bottom, Array< T > &poseKeypoints, Array< T > &poseScores)op::BodyPartConnectorCaffe< T >virtual
Forward_gpu(const std::vector< ArrayCpuGpu< T > * > &bottom, Array< T > &poseKeypoints, Array< T > &poseScores)op::BodyPartConnectorCaffe< T >virtual
Forward_ocl(const std::vector< ArrayCpuGpu< T > * > &bottom, Array< T > &poseKeypoints, Array< T > &poseScores)op::BodyPartConnectorCaffe< T >virtual
Reshape(const std::vector< ArrayCpuGpu< T > * > &bottom, const int gpuID=0)op::BodyPartConnectorCaffe< T >virtual
setDefaultNmsThreshold(const T defaultNmsThreshold)op::BodyPartConnectorCaffe< T >
setInterMinAboveThreshold(const T interMinAboveThreshold)op::BodyPartConnectorCaffe< T >
setInterThreshold(const T interThreshold)op::BodyPartConnectorCaffe< T >
setMaximizePositives(const bool maximizePositives)op::BodyPartConnectorCaffe< T >
setMinSubsetCnt(const int minSubsetCnt)op::BodyPartConnectorCaffe< T >
setMinSubsetScore(const T minSubsetScore)op::BodyPartConnectorCaffe< T >
setPoseModel(const PoseModel poseModel)op::BodyPartConnectorCaffe< T >
setScaleNetToOutput(const T scaleNetToOutput)op::BodyPartConnectorCaffe< T >
type() constop::BodyPartConnectorCaffe< T >inlinevirtual
~BodyPartConnectorCaffe()op::BodyPartConnectorCaffe< T >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_body_part_connector_caffe.html b/web/html/doc/classop_1_1_body_part_connector_caffe.html new file mode 100644 index 000000000..ce4f4567a --- /dev/null +++ b/web/html/doc/classop_1_1_body_part_connector_caffe.html @@ -0,0 +1,702 @@ + + + + + + + +OpenPose: op::BodyPartConnectorCaffe< T > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::BodyPartConnectorCaffe< T > Class Template Reference
+
+
+ +

#include <bodyPartConnectorCaffe.hpp>

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 BodyPartConnectorCaffe ()
 
virtual ~BodyPartConnectorCaffe ()
 
virtual void Reshape (const std::vector< ArrayCpuGpu< T > * > &bottom, const int gpuID=0)
 
virtual const char * type () const
 
void setPoseModel (const PoseModel poseModel)
 
void setMaximizePositives (const bool maximizePositives)
 
void setDefaultNmsThreshold (const T defaultNmsThreshold)
 
void setInterMinAboveThreshold (const T interMinAboveThreshold)
 
void setInterThreshold (const T interThreshold)
 
void setMinSubsetCnt (const int minSubsetCnt)
 
void setMinSubsetScore (const T minSubsetScore)
 
void setScaleNetToOutput (const T scaleNetToOutput)
 
virtual void Forward (const std::vector< ArrayCpuGpu< T > * > &bottom, Array< T > &poseKeypoints, Array< T > &poseScores)
 
virtual void Forward_cpu (const std::vector< ArrayCpuGpu< T > * > &bottom, Array< T > &poseKeypoints, Array< T > &poseScores)
 
virtual void Forward_gpu (const std::vector< ArrayCpuGpu< T > * > &bottom, Array< T > &poseKeypoints, Array< T > &poseScores)
 
virtual void Forward_ocl (const std::vector< ArrayCpuGpu< T > * > &bottom, Array< T > &poseKeypoints, Array< T > &poseScores)
 
virtual void Backward_cpu (const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
 
virtual void Backward_gpu (const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
 
+

Detailed Description

+

template<typename T>
+class op::BodyPartConnectorCaffe< T >

+ + +

Definition at line 13 of file bodyPartConnectorCaffe.hpp.

+

Constructor & Destructor Documentation

+ +

◆ BodyPartConnectorCaffe()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
op::BodyPartConnectorCaffe< T >::BodyPartConnectorCaffe ()
+
+explicit
+
+ +
+
+ +

◆ ~BodyPartConnectorCaffe()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
virtual op::BodyPartConnectorCaffe< T >::~BodyPartConnectorCaffe ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ Backward_cpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::BodyPartConnectorCaffe< T >::Backward_cpu (const std::vector< ArrayCpuGpu< T > * > & top,
const std::vector< bool > & propagate_down,
const std::vector< ArrayCpuGpu< T > * > & bottom 
)
+
+virtual
+
+ +
+
+ +

◆ Backward_gpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::BodyPartConnectorCaffe< T >::Backward_gpu (const std::vector< ArrayCpuGpu< T > * > & top,
const std::vector< bool > & propagate_down,
const std::vector< ArrayCpuGpu< T > * > & bottom 
)
+
+virtual
+
+ +
+
+ +

◆ Forward()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::BodyPartConnectorCaffe< T >::Forward (const std::vector< ArrayCpuGpu< T > * > & bottom,
Array< T > & poseKeypoints,
Array< T > & poseScores 
)
+
+virtual
+
+ +
+
+ +

◆ Forward_cpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::BodyPartConnectorCaffe< T >::Forward_cpu (const std::vector< ArrayCpuGpu< T > * > & bottom,
Array< T > & poseKeypoints,
Array< T > & poseScores 
)
+
+virtual
+
+ +
+
+ +

◆ Forward_gpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::BodyPartConnectorCaffe< T >::Forward_gpu (const std::vector< ArrayCpuGpu< T > * > & bottom,
Array< T > & poseKeypoints,
Array< T > & poseScores 
)
+
+virtual
+
+ +
+
+ +

◆ Forward_ocl()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::BodyPartConnectorCaffe< T >::Forward_ocl (const std::vector< ArrayCpuGpu< T > * > & bottom,
Array< T > & poseKeypoints,
Array< T > & poseScores 
)
+
+virtual
+
+ +
+
+ +

◆ Reshape()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::BodyPartConnectorCaffe< T >::Reshape (const std::vector< ArrayCpuGpu< T > * > & bottom,
const int gpuID = 0 
)
+
+virtual
+
+ +
+
+ +

◆ setDefaultNmsThreshold()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::BodyPartConnectorCaffe< T >::setDefaultNmsThreshold (const T defaultNmsThreshold)
+
+ +
+
+ +

◆ setInterMinAboveThreshold()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::BodyPartConnectorCaffe< T >::setInterMinAboveThreshold (const T interMinAboveThreshold)
+
+ +
+
+ +

◆ setInterThreshold()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::BodyPartConnectorCaffe< T >::setInterThreshold (const T interThreshold)
+
+ +
+
+ +

◆ setMaximizePositives()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::BodyPartConnectorCaffe< T >::setMaximizePositives (const bool maximizePositives)
+
+ +
+
+ +

◆ setMinSubsetCnt()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::BodyPartConnectorCaffe< T >::setMinSubsetCnt (const int minSubsetCnt)
+
+ +
+
+ +

◆ setMinSubsetScore()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::BodyPartConnectorCaffe< T >::setMinSubsetScore (const T minSubsetScore)
+
+ +
+
+ +

◆ setPoseModel()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::BodyPartConnectorCaffe< T >::setPoseModel (const PoseModel poseModel)
+
+ +
+
+ +

◆ setScaleNetToOutput()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::BodyPartConnectorCaffe< T >::setScaleNetToOutput (const T scaleNetToOutput)
+
+ +
+
+ +

◆ type()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
virtual const char* op::BodyPartConnectorCaffe< T >::type () const
+
+inlinevirtual
+
+ +

Definition at line 22 of file bodyPartConnectorCaffe.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_body_part_connector_caffe.js b/web/html/doc/classop_1_1_body_part_connector_caffe.js new file mode 100644 index 000000000..8e974fa73 --- /dev/null +++ b/web/html/doc/classop_1_1_body_part_connector_caffe.js @@ -0,0 +1,21 @@ +var classop_1_1_body_part_connector_caffe = +[ + [ "BodyPartConnectorCaffe", "classop_1_1_body_part_connector_caffe.html#a94e2364fa13ea79b2d6fd72c5db34765", null ], + [ "~BodyPartConnectorCaffe", "classop_1_1_body_part_connector_caffe.html#ab0beade5f7d8e56e881231e46f9306ec", null ], + [ "Backward_cpu", "classop_1_1_body_part_connector_caffe.html#a8ad522722607c9ff6299337019f04175", null ], + [ "Backward_gpu", "classop_1_1_body_part_connector_caffe.html#ace9cba081581a4c58fcfbef73b6dd11b", null ], + [ "Forward", "classop_1_1_body_part_connector_caffe.html#a52cc46828bc7720a62fbbe841022331e", null ], + [ "Forward_cpu", "classop_1_1_body_part_connector_caffe.html#a03364fbed0c71e76eb5fb1f61a397de8", null ], + [ "Forward_gpu", "classop_1_1_body_part_connector_caffe.html#a9dbcac7de4a57a58733462f3ce1db10c", null ], + [ "Forward_ocl", "classop_1_1_body_part_connector_caffe.html#a51324177e60bf260f6c2def76e9e3d7d", null ], + [ "Reshape", "classop_1_1_body_part_connector_caffe.html#abf26360f5d25fab82705270dae5f5d86", null ], + [ "setDefaultNmsThreshold", "classop_1_1_body_part_connector_caffe.html#af4520659b0cfb644a3c2d6ceb0e81a21", null ], + [ "setInterMinAboveThreshold", "classop_1_1_body_part_connector_caffe.html#a789c77e69e5590a78b22e1e5f5cc4efc", null ], + [ "setInterThreshold", "classop_1_1_body_part_connector_caffe.html#a75d0a3f3c8dca99c8a9e1b680098da16", null ], + [ "setMaximizePositives", "classop_1_1_body_part_connector_caffe.html#a6d859f2e218b1ea707fddcaf0911886d", null ], + [ "setMinSubsetCnt", "classop_1_1_body_part_connector_caffe.html#a6442721373481873ddeb9ffd7c6fdb7b", null ], + [ "setMinSubsetScore", "classop_1_1_body_part_connector_caffe.html#a9b9fa9490fef0121a70c3d6d749272f7", null ], + [ "setPoseModel", "classop_1_1_body_part_connector_caffe.html#a104744fdab14d4c1335eb8778edea21e", null ], + [ "setScaleNetToOutput", "classop_1_1_body_part_connector_caffe.html#a0bad959b2da005b62cab962327ccba01", null ], + [ "type", "classop_1_1_body_part_connector_caffe.html#aec0d6f32107a6222406e73ca9ae4942d", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_camera_parameter_reader-members.html b/web/html/doc/classop_1_1_camera_parameter_reader-members.html new file mode 100644 index 000000000..985f7f8e0 --- /dev/null +++ b/web/html/doc/classop_1_1_camera_parameter_reader-members.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::CameraParameterReader Member List
+
+
+ +

This is the complete list of members for op::CameraParameterReader, including all inherited members.

+ + + + + + + + + + + + + + + + + +
CameraParameterReader()op::CameraParameterReaderexplicit
CameraParameterReader(const std::string &serialNumber, const Matrix &cameraIntrinsics, const Matrix &cameraDistortion, const Matrix &cameraExtrinsics=Matrix(), const Matrix &cameraExtrinsicsInitial=Matrix())op::CameraParameterReaderexplicit
getCameraDistortions() constop::CameraParameterReader
getCameraExtrinsics() constop::CameraParameterReader
getCameraExtrinsicsInitial() constop::CameraParameterReader
getCameraIntrinsics() constop::CameraParameterReader
getCameraMatrices() constop::CameraParameterReader
getCameraSerialNumbers() constop::CameraParameterReader
getNumberCameras() constop::CameraParameterReader
getUndistortImage() constop::CameraParameterReader
readParameters(const std::string &cameraParameterPath, const std::vector< std::string > &serialNumbers={})op::CameraParameterReader
readParameters(const std::string &cameraParameterPath, const std::string &serialNumber)op::CameraParameterReader
setUndistortImage(const bool undistortImage)op::CameraParameterReader
undistort(Matrix &frame, const unsigned int cameraIndex=0u)op::CameraParameterReader
writeParameters(const std::string &cameraParameterPath) constop::CameraParameterReader
~CameraParameterReader()op::CameraParameterReadervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_camera_parameter_reader.html b/web/html/doc/classop_1_1_camera_parameter_reader.html new file mode 100644 index 000000000..4f7962f1e --- /dev/null +++ b/web/html/doc/classop_1_1_camera_parameter_reader.html @@ -0,0 +1,510 @@ + + + + + + + +OpenPose: op::CameraParameterReader Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::CameraParameterReader Class Reference
+
+
+ +

#include <cameraParameterReader.hpp>

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 CameraParameterReader ()
 
virtual ~CameraParameterReader ()
 
 CameraParameterReader (const std::string &serialNumber, const Matrix &cameraIntrinsics, const Matrix &cameraDistortion, const Matrix &cameraExtrinsics=Matrix(), const Matrix &cameraExtrinsicsInitial=Matrix())
 
void readParameters (const std::string &cameraParameterPath, const std::vector< std::string > &serialNumbers={})
 
void readParameters (const std::string &cameraParameterPath, const std::string &serialNumber)
 
void writeParameters (const std::string &cameraParameterPath) const
 
unsigned long long getNumberCameras () const
 
const std::vector< std::string > & getCameraSerialNumbers () const
 
const std::vector< Matrix > & getCameraMatrices () const
 
const std::vector< Matrix > & getCameraDistortions () const
 
const std::vector< Matrix > & getCameraIntrinsics () const
 
const std::vector< Matrix > & getCameraExtrinsics () const
 
const std::vector< Matrix > & getCameraExtrinsicsInitial () const
 
bool getUndistortImage () const
 
void setUndistortImage (const bool undistortImage)
 
void undistort (Matrix &frame, const unsigned int cameraIndex=0u)
 
+

Detailed Description

+
+

Definition at line 8 of file cameraParameterReader.hpp.

+

Constructor & Destructor Documentation

+ +

◆ CameraParameterReader() [1/2]

+ +
+
+ + + + + +
+ + + + + + + +
op::CameraParameterReader::CameraParameterReader ()
+
+explicit
+
+ +
+
+ +

◆ ~CameraParameterReader()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::CameraParameterReader::~CameraParameterReader ()
+
+virtual
+
+ +
+
+ +

◆ CameraParameterReader() [2/2]

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::CameraParameterReader::CameraParameterReader (const std::string & serialNumber,
const MatrixcameraIntrinsics,
const MatrixcameraDistortion,
const MatrixcameraExtrinsics = Matrix(),
const MatrixcameraExtrinsicsInitial = Matrix() 
)
+
+explicit
+
+ +
+
+

Member Function Documentation

+ +

◆ getCameraDistortions()

+ +
+
+ + + + + + + +
const std::vector<Matrix>& op::CameraParameterReader::getCameraDistortions () const
+
+ +
+
+ +

◆ getCameraExtrinsics()

+ +
+
+ + + + + + + +
const std::vector<Matrix>& op::CameraParameterReader::getCameraExtrinsics () const
+
+ +
+
+ +

◆ getCameraExtrinsicsInitial()

+ +
+
+ + + + + + + +
const std::vector<Matrix>& op::CameraParameterReader::getCameraExtrinsicsInitial () const
+
+ +
+
+ +

◆ getCameraIntrinsics()

+ +
+
+ + + + + + + +
const std::vector<Matrix>& op::CameraParameterReader::getCameraIntrinsics () const
+
+ +
+
+ +

◆ getCameraMatrices()

+ +
+
+ + + + + + + +
const std::vector<Matrix>& op::CameraParameterReader::getCameraMatrices () const
+
+ +
+
+ +

◆ getCameraSerialNumbers()

+ +
+
+ + + + + + + +
const std::vector<std::string>& op::CameraParameterReader::getCameraSerialNumbers () const
+
+ +
+
+ +

◆ getNumberCameras()

+ +
+
+ + + + + + + +
unsigned long long op::CameraParameterReader::getNumberCameras () const
+
+ +
+
+ +

◆ getUndistortImage()

+ +
+
+ + + + + + + +
bool op::CameraParameterReader::getUndistortImage () const
+
+ +
+
+ +

◆ readParameters() [1/2]

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::CameraParameterReader::readParameters (const std::string & cameraParameterPath,
const std::string & serialNumber 
)
+
+ +
+
+ +

◆ readParameters() [2/2]

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::CameraParameterReader::readParameters (const std::string & cameraParameterPath,
const std::vector< std::string > & serialNumbers = {} 
)
+
+ +
+
+ +

◆ setUndistortImage()

+ +
+
+ + + + + + + + +
void op::CameraParameterReader::setUndistortImage (const bool undistortImage)
+
+ +
+
+ +

◆ undistort()

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::CameraParameterReader::undistort (Matrixframe,
const unsigned int cameraIndex = 0u 
)
+
+ +
+
+ +

◆ writeParameters()

+ +
+
+ + + + + + + + +
void op::CameraParameterReader::writeParameters (const std::string & cameraParameterPath) const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_camera_parameter_reader.js b/web/html/doc/classop_1_1_camera_parameter_reader.js new file mode 100644 index 000000000..c88c8ac08 --- /dev/null +++ b/web/html/doc/classop_1_1_camera_parameter_reader.js @@ -0,0 +1,19 @@ +var classop_1_1_camera_parameter_reader = +[ + [ "CameraParameterReader", "classop_1_1_camera_parameter_reader.html#ab7a4c3ef7ac8d8a41e5711ec85b7be4b", null ], + [ "~CameraParameterReader", "classop_1_1_camera_parameter_reader.html#acfa701389b1e566e1ea49cfd2605bbf8", null ], + [ "CameraParameterReader", "classop_1_1_camera_parameter_reader.html#aae3c60cbed99e1b5706c96577732ddae", null ], + [ "getCameraDistortions", "classop_1_1_camera_parameter_reader.html#a8edb22b20d1ed044335ec0d2175eeabf", null ], + [ "getCameraExtrinsics", "classop_1_1_camera_parameter_reader.html#a8122bb2a8a07555b5341141356fa37c7", null ], + [ "getCameraExtrinsicsInitial", "classop_1_1_camera_parameter_reader.html#a88c9f18f6b0f3e5d09240e65a5e04beb", null ], + [ "getCameraIntrinsics", "classop_1_1_camera_parameter_reader.html#a6db1e0c2b4ed63407d12ff0de97cb098", null ], + [ "getCameraMatrices", "classop_1_1_camera_parameter_reader.html#a975e5a340bd1b77d680007797ec9eeea", null ], + [ "getCameraSerialNumbers", "classop_1_1_camera_parameter_reader.html#acf2b4d428d18f2663f4df640171b254d", null ], + [ "getNumberCameras", "classop_1_1_camera_parameter_reader.html#a8d97033970f3e71657da070cd87fd70c", null ], + [ "getUndistortImage", "classop_1_1_camera_parameter_reader.html#a4c819945b0df95bcfb6f8d79451290d5", null ], + [ "readParameters", "classop_1_1_camera_parameter_reader.html#a2be8ff6d89e5f623f476c75afe3c5c3b", null ], + [ "readParameters", "classop_1_1_camera_parameter_reader.html#a906fd316f09d901280a5fe10a4a54541", null ], + [ "setUndistortImage", "classop_1_1_camera_parameter_reader.html#ae33e8637012413719b389649d1e5448a", null ], + [ "undistort", "classop_1_1_camera_parameter_reader.html#aee02b82d0c5fd51dd3ba5a2267f7b370", null ], + [ "writeParameters", "classop_1_1_camera_parameter_reader.html#a7210bc5ebfe6e90a52524b7f0f3f3991", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_coco_json_saver-members.html b/web/html/doc/classop_1_1_coco_json_saver-members.html new file mode 100644 index 000000000..ab8664a7f --- /dev/null +++ b/web/html/doc/classop_1_1_coco_json_saver-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::CocoJsonSaver Member List
+
+
+ +

This is the complete list of members for op::CocoJsonSaver, including all inherited members.

+ + + + +
CocoJsonSaver(const std::string &filePathToSave, const PoseModel poseModel, const bool humanReadable=true, const int cocoJsonVariants=1, const CocoJsonFormat cocoJsonFormat=CocoJsonFormat::Body, const int cocoJsonVariant=0)op::CocoJsonSaverexplicit
record(const Array< float > &poseKeypoints, const Array< float > &poseScores, const std::string &imageName, const unsigned long long frameNumber)op::CocoJsonSaver
~CocoJsonSaver()op::CocoJsonSavervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_coco_json_saver.html b/web/html/doc/classop_1_1_coco_json_saver.html new file mode 100644 index 000000000..fccb44d47 --- /dev/null +++ b/web/html/doc/classop_1_1_coco_json_saver.html @@ -0,0 +1,256 @@ + + + + + + + +OpenPose: op::CocoJsonSaver Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::CocoJsonSaver Class Reference
+
+
+ +

#include <cocoJsonSaver.hpp>

+ + + + + + + + +

+Public Member Functions

 CocoJsonSaver (const std::string &filePathToSave, const PoseModel poseModel, const bool humanReadable=true, const int cocoJsonVariants=1, const CocoJsonFormat cocoJsonFormat=CocoJsonFormat::Body, const int cocoJsonVariant=0)
 
virtual ~CocoJsonSaver ()
 
void record (const Array< float > &poseKeypoints, const Array< float > &poseScores, const std::string &imageName, const unsigned long long frameNumber)
 
+

Detailed Description

+

The CocoJsonSaver class creates a COCO validation json file with details about the processed images. It inherits from Recorder.

+ +

Definition at line 15 of file cocoJsonSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ CocoJsonSaver()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::CocoJsonSaver::CocoJsonSaver (const std::string & filePathToSave,
const PoseModel poseModel,
const bool humanReadable = true,
const int cocoJsonVariants = 1,
const CocoJsonFormat cocoJsonFormat = CocoJsonFormat::Body,
const int cocoJsonVariant = 0 
)
+
+explicit
+
+

This constructor of CocoJsonSaver extends the Recorder::Recorder(const std::string & filePathToSave) constructor.

Parameters
+ + +
filePathToSaveconst std::string parameter with the final file path where the generated json file will be saved.
+
+
+ +
+
+ +

◆ ~CocoJsonSaver()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::CocoJsonSaver::~CocoJsonSaver ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ record()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::CocoJsonSaver::record (const Array< float > & poseKeypoints,
const Array< float > & poseScores,
const std::string & imageName,
const unsigned long long frameNumber 
)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_coco_json_saver.js b/web/html/doc/classop_1_1_coco_json_saver.js new file mode 100644 index 000000000..b60a17ca2 --- /dev/null +++ b/web/html/doc/classop_1_1_coco_json_saver.js @@ -0,0 +1,6 @@ +var classop_1_1_coco_json_saver = +[ + [ "CocoJsonSaver", "classop_1_1_coco_json_saver.html#a6d596768658b4b32430d3686be557e33", null ], + [ "~CocoJsonSaver", "classop_1_1_coco_json_saver.html#a8bbfab84a7816cb0f189f243246f744b", null ], + [ "record", "classop_1_1_coco_json_saver.html#a2c3777cb55d09ee3911d4ed69334b17f", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_cv_mat_to_op_input-members.html b/web/html/doc/classop_1_1_cv_mat_to_op_input-members.html new file mode 100644 index 000000000..07016254f --- /dev/null +++ b/web/html/doc/classop_1_1_cv_mat_to_op_input-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::CvMatToOpInput Member List
+
+
+ +

This is the complete list of members for op::CvMatToOpInput, including all inherited members.

+ + + + +
createArray(const Matrix &inputData, const std::vector< double > &scaleInputToNetInputs, const std::vector< Point< int >> &netInputSizes)op::CvMatToOpInput
CvMatToOpInput(const PoseModel poseModel=PoseModel::BODY_25, const bool gpuResize=false)op::CvMatToOpInput
~CvMatToOpInput()op::CvMatToOpInputvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_cv_mat_to_op_input.html b/web/html/doc/classop_1_1_cv_mat_to_op_input.html new file mode 100644 index 000000000..841dba7b1 --- /dev/null +++ b/web/html/doc/classop_1_1_cv_mat_to_op_input.html @@ -0,0 +1,211 @@ + + + + + + + +OpenPose: op::CvMatToOpInput Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::CvMatToOpInput Class Reference
+
+
+ +

#include <cvMatToOpInput.hpp>

+ + + + + + + + +

+Public Member Functions

 CvMatToOpInput (const PoseModel poseModel=PoseModel::BODY_25, const bool gpuResize=false)
 
virtual ~CvMatToOpInput ()
 
std::vector< Array< float > > createArray (const Matrix &inputData, const std::vector< double > &scaleInputToNetInputs, const std::vector< Point< int >> &netInputSizes)
 
+

Detailed Description

+
+

Definition at line 9 of file cvMatToOpInput.hpp.

+

Constructor & Destructor Documentation

+ +

◆ CvMatToOpInput()

+ +
+
+ + + + + + + + + + + + + + + + + + +
op::CvMatToOpInput::CvMatToOpInput (const PoseModel poseModel = PoseModel::BODY_25,
const bool gpuResize = false 
)
+
+ +
+
+ +

◆ ~CvMatToOpInput()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::CvMatToOpInput::~CvMatToOpInput ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ createArray()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
std::vector<Array<float> > op::CvMatToOpInput::createArray (const MatrixinputData,
const std::vector< double > & scaleInputToNetInputs,
const std::vector< Point< int >> & netInputSizes 
)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_cv_mat_to_op_input.js b/web/html/doc/classop_1_1_cv_mat_to_op_input.js new file mode 100644 index 000000000..2b8cc9fac --- /dev/null +++ b/web/html/doc/classop_1_1_cv_mat_to_op_input.js @@ -0,0 +1,6 @@ +var classop_1_1_cv_mat_to_op_input = +[ + [ "CvMatToOpInput", "classop_1_1_cv_mat_to_op_input.html#a449eacb6cce9678c3ae399c68a45a2e5", null ], + [ "~CvMatToOpInput", "classop_1_1_cv_mat_to_op_input.html#adbe9ae80914d9c1d224c1fe753519090", null ], + [ "createArray", "classop_1_1_cv_mat_to_op_input.html#ad7c70d7843d64ab0dce9a8a1d993e5b5", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_cv_mat_to_op_output-members.html b/web/html/doc/classop_1_1_cv_mat_to_op_output-members.html new file mode 100644 index 000000000..7fdfb8060 --- /dev/null +++ b/web/html/doc/classop_1_1_cv_mat_to_op_output-members.html @@ -0,0 +1,107 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::CvMatToOpOutput Member List
+
+
+ +

This is the complete list of members for op::CvMatToOpOutput, including all inherited members.

+ + + + + +
createArray(const Matrix &inputData, const double scaleInputToOutput, const Point< int > &outputResolution)op::CvMatToOpOutput
CvMatToOpOutput(const bool gpuResize=false)op::CvMatToOpOutput
getSharedParameters()op::CvMatToOpOutput
~CvMatToOpOutput()op::CvMatToOpOutputvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_cv_mat_to_op_output.html b/web/html/doc/classop_1_1_cv_mat_to_op_output.html new file mode 100644 index 000000000..19093eea9 --- /dev/null +++ b/web/html/doc/classop_1_1_cv_mat_to_op_output.html @@ -0,0 +1,220 @@ + + + + + + + +OpenPose: op::CvMatToOpOutput Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::CvMatToOpOutput Class Reference
+
+
+ +

#include <cvMatToOpOutput.hpp>

+ + + + + + + + + + +

+Public Member Functions

 CvMatToOpOutput (const bool gpuResize=false)
 
virtual ~CvMatToOpOutput ()
 
std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long > > getSharedParameters ()
 
Array< float > createArray (const Matrix &inputData, const double scaleInputToOutput, const Point< int > &outputResolution)
 
+

Detailed Description

+
+

Definition at line 8 of file cvMatToOpOutput.hpp.

+

Constructor & Destructor Documentation

+ +

◆ CvMatToOpOutput()

+ +
+
+ + + + + + + + +
op::CvMatToOpOutput::CvMatToOpOutput (const bool gpuResize = false)
+
+ +
+
+ +

◆ ~CvMatToOpOutput()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::CvMatToOpOutput::~CvMatToOpOutput ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ createArray()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
Array<float> op::CvMatToOpOutput::createArray (const MatrixinputData,
const double scaleInputToOutput,
const Point< int > & outputResolution 
)
+
+ +
+
+ +

◆ getSharedParameters()

+ +
+
+ + + + + + + +
std::tuple<std::shared_ptr<float*>, std::shared_ptr<bool>, std::shared_ptr<unsigned long long> > op::CvMatToOpOutput::getSharedParameters ()
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_cv_mat_to_op_output.js b/web/html/doc/classop_1_1_cv_mat_to_op_output.js new file mode 100644 index 000000000..77b84edb7 --- /dev/null +++ b/web/html/doc/classop_1_1_cv_mat_to_op_output.js @@ -0,0 +1,7 @@ +var classop_1_1_cv_mat_to_op_output = +[ + [ "CvMatToOpOutput", "classop_1_1_cv_mat_to_op_output.html#a054c4dd7892ad540405413b071459b42", null ], + [ "~CvMatToOpOutput", "classop_1_1_cv_mat_to_op_output.html#abc8953e080adc30fa52345322ae8445a", null ], + [ "createArray", "classop_1_1_cv_mat_to_op_output.html#ad15a20bf40389e7dea888e982bd64e8b", null ], + [ "getSharedParameters", "classop_1_1_cv_mat_to_op_output.html#ad0ac01a9866ea00c873da7e2552c5b08", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_datum_producer-members.html b/web/html/doc/classop_1_1_datum_producer-members.html new file mode 100644 index 000000000..49a6c0b16 --- /dev/null +++ b/web/html/doc/classop_1_1_datum_producer-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::DatumProducer< TDatum > Member List
+
+
+ +

This is the complete list of members for op::DatumProducer< TDatum >, including all inherited members.

+ + + + +
checkIfRunningAndGetDatum()op::DatumProducer< TDatum >
DatumProducer(const std::shared_ptr< Producer > &producerSharedPtr, const unsigned long long frameFirst=0, const unsigned long long frameStep=1, const unsigned long long frameLast=std::numeric_limits< unsigned long long >::max(), const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr=nullptr)op::DatumProducer< TDatum >explicit
~DatumProducer()op::DatumProducer< TDatum >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_datum_producer.html b/web/html/doc/classop_1_1_datum_producer.html new file mode 100644 index 000000000..afe0939b9 --- /dev/null +++ b/web/html/doc/classop_1_1_datum_producer.html @@ -0,0 +1,229 @@ + + + + + + + +OpenPose: op::DatumProducer< TDatum > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::DatumProducer< TDatum > Class Template Reference
+
+
+ +

#include <datumProducer.hpp>

+ + + + + + + + +

+Public Member Functions

 DatumProducer (const std::shared_ptr< Producer > &producerSharedPtr, const unsigned long long frameFirst=0, const unsigned long long frameStep=1, const unsigned long long frameLast=std::numeric_limits< unsigned long long >::max(), const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr=nullptr)
 
virtual ~DatumProducer ()
 
std::pair< bool, std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > > checkIfRunningAndGetDatum ()
 
+

Detailed Description

+

template<typename TDatum>
+class op::DatumProducer< TDatum >

+ + +

Definition at line 14 of file datumProducer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ DatumProducer()

+ +
+
+
+template<typename TDatum >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::DatumProducer< TDatum >::DatumProducer (const std::shared_ptr< Producer > & producerSharedPtr,
const unsigned long long frameFirst = 0,
const unsigned long long frameStep = 1,
const unsigned long long frameLast = std::numeric_limits<unsigned long long>::max(),
const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> & videoSeekSharedPtr = nullptr 
)
+
+explicit
+
+ +

Definition at line 68 of file datumProducer.hpp.

+ +
+
+ +

◆ ~DatumProducer()

+ +
+
+
+template<typename TDatum >
+ + + + + +
+ + + + +
op::DatumProducer< TDatum >::~DatumProducer
+
+virtual
+
+ +

Definition at line 92 of file datumProducer.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ checkIfRunningAndGetDatum()

+ +
+
+
+template<typename TDatum >
+ + + + +
std::pair< bool, std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > > op::DatumProducer< TDatum >::checkIfRunningAndGetDatum
+
+ +

Definition at line 97 of file datumProducer.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_datum_producer.js b/web/html/doc/classop_1_1_datum_producer.js new file mode 100644 index 000000000..d58359867 --- /dev/null +++ b/web/html/doc/classop_1_1_datum_producer.js @@ -0,0 +1,6 @@ +var classop_1_1_datum_producer = +[ + [ "DatumProducer", "classop_1_1_datum_producer.html#a4d52ee6961e2c5c9564f49d203a2865e", null ], + [ "~DatumProducer", "classop_1_1_datum_producer.html#ad12f3202a265c989430d15bf7476a326", null ], + [ "checkIfRunningAndGetDatum", "classop_1_1_datum_producer.html#a39da4822705d23ca7e600b69f39e69be", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_face_cpu_renderer-members.html b/web/html/doc/classop_1_1_face_cpu_renderer-members.html new file mode 100644 index 000000000..225fb8828 --- /dev/null +++ b/web/html/doc/classop_1_1_face_cpu_renderer-members.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::FaceCpuRenderer Member List
+
+
+ +

This is the complete list of members for op::FaceCpuRenderer, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + +
DELETE_COPY(FaceCpuRenderer)op::FaceCpuRenderer
FaceCpuRenderer(const float renderThreshold, const float alphaKeypoint=FACE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=FACE_DEFAULT_ALPHA_HEAT_MAP)op::FaceCpuRenderer
getAlphaHeatMap() constop::Renderer
getAlphaKeypoint() constop::Renderer
getBlendOriginalFrame() constop::Renderer
getShowGooglyEyes() constop::Renderer
increaseElementToRender(const int increment)op::Renderer
initializationOnThread()op::FaceRendererinlinevirtual
mBlendOriginalFrameop::Rendererprotected
mRenderThresholdop::Rendererprotected
mShowGooglyEyesop::Rendererprotected
Renderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)op::Rendererexplicit
renderFace(Array< float > &outputData, const Array< float > &faceKeypoints, const float scaleInputToOutput)op::FaceRenderer
renderFaceInherited(Array< float > &outputData, const Array< float > &faceKeypoints)op::FaceCpuRenderervirtual
setAlphaHeatMap(const float alphaHeatMap)op::Renderer
setAlphaKeypoint(const float alphaKeypoint)op::Renderer
setBlendOriginalFrame(const bool blendOriginalFrame)op::Renderer
setElementToRender(const int elementToRender)op::Renderer
setElementToRender(const ElementToRender elementToRender)op::Renderer
setShowGooglyEyes(const bool showGooglyEyes)op::Renderer
spElementToRenderop::Rendererprotected
spNumberElementsToRenderop::Rendererprotected
~FaceCpuRenderer()op::FaceCpuRenderervirtual
~FaceRenderer()op::FaceRendererinlinevirtual
~Renderer()op::Renderervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_cpu_renderer.html b/web/html/doc/classop_1_1_face_cpu_renderer.html new file mode 100644 index 000000000..28cd53c00 --- /dev/null +++ b/web/html/doc/classop_1_1_face_cpu_renderer.html @@ -0,0 +1,299 @@ + + + + + + + +OpenPose: op::FaceCpuRenderer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::FaceCpuRenderer Class Reference
+
+
+ +

#include <faceCpuRenderer.hpp>

+
+Inheritance diagram for op::FaceCpuRenderer:
+
+
+ + +op::Renderer +op::FaceRenderer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 FaceCpuRenderer (const float renderThreshold, const float alphaKeypoint=FACE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=FACE_DEFAULT_ALPHA_HEAT_MAP)
 
virtual ~FaceCpuRenderer ()
 
void renderFaceInherited (Array< float > &outputData, const Array< float > &faceKeypoints)
 
 DELETE_COPY (FaceCpuRenderer)
 
- Public Member Functions inherited from op::Renderer
 Renderer (const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
 
virtual ~Renderer ()
 
void increaseElementToRender (const int increment)
 
void setElementToRender (const int elementToRender)
 
void setElementToRender (const ElementToRender elementToRender)
 
bool getBlendOriginalFrame () const
 
void setBlendOriginalFrame (const bool blendOriginalFrame)
 
float getAlphaKeypoint () const
 
void setAlphaKeypoint (const float alphaKeypoint)
 
float getAlphaHeatMap () const
 
void setAlphaHeatMap (const float alphaHeatMap)
 
bool getShowGooglyEyes () const
 
void setShowGooglyEyes (const bool showGooglyEyes)
 
- Public Member Functions inherited from op::FaceRenderer
virtual ~FaceRenderer ()
 
virtual void initializationOnThread ()
 
void renderFace (Array< float > &outputData, const Array< float > &faceKeypoints, const float scaleInputToOutput)
 
+ + + + + + + + + + + + +

+Additional Inherited Members

- Protected Attributes inherited from op::Renderer
const float mRenderThreshold
 
std::atomic< bool > mBlendOriginalFrame
 
std::shared_ptr< std::atomic< unsigned int > > spElementToRender
 
std::shared_ptr< const unsigned int > spNumberElementsToRender
 
std::atomic< bool > mShowGooglyEyes
 
+

Detailed Description

+
+

Definition at line 11 of file faceCpuRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ FaceCpuRenderer()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
op::FaceCpuRenderer::FaceCpuRenderer (const float renderThreshold,
const float alphaKeypoint = FACE_DEFAULT_ALPHA_KEYPOINT,
const float alphaHeatMap = FACE_DEFAULT_ALPHA_HEAT_MAP 
)
+
+ +
+
+ +

◆ ~FaceCpuRenderer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::FaceCpuRenderer::~FaceCpuRenderer ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ DELETE_COPY()

+ +
+
+ + + + + + + + +
op::FaceCpuRenderer::DELETE_COPY (FaceCpuRenderer )
+
+ +
+
+ +

◆ renderFaceInherited()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::FaceCpuRenderer::renderFaceInherited (Array< float > & outputData,
const Array< float > & faceKeypoints 
)
+
+virtual
+
+ +

Implements op::FaceRenderer.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_cpu_renderer.js b/web/html/doc/classop_1_1_face_cpu_renderer.js new file mode 100644 index 000000000..25b4db622 --- /dev/null +++ b/web/html/doc/classop_1_1_face_cpu_renderer.js @@ -0,0 +1,7 @@ +var classop_1_1_face_cpu_renderer = +[ + [ "FaceCpuRenderer", "classop_1_1_face_cpu_renderer.html#afb0dcfff75c4a89d5971d7b0bbd0b51b", null ], + [ "~FaceCpuRenderer", "classop_1_1_face_cpu_renderer.html#a5c5e1e9b016bd33b5740beb04fc0fb49", null ], + [ "DELETE_COPY", "classop_1_1_face_cpu_renderer.html#a233f2a83930d07e4d420b43c8a660f32", null ], + [ "renderFaceInherited", "classop_1_1_face_cpu_renderer.html#aa2f7c7f0a5a8df2dbb94c8a01fa41e2a", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_face_cpu_renderer.png b/web/html/doc/classop_1_1_face_cpu_renderer.png new file mode 100644 index 000000000..d38541678 Binary files /dev/null and b/web/html/doc/classop_1_1_face_cpu_renderer.png differ diff --git a/web/html/doc/classop_1_1_face_detector-members.html b/web/html/doc/classop_1_1_face_detector-members.html new file mode 100644 index 000000000..de8930da8 --- /dev/null +++ b/web/html/doc/classop_1_1_face_detector-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::FaceDetector Member List
+
+
+ +

This is the complete list of members for op::FaceDetector, including all inherited members.

+ + + + +
detectFaces(const Array< float > &poseKeypoints) constop::FaceDetector
FaceDetector(const PoseModel poseModel)op::FaceDetectorexplicit
~FaceDetector()op::FaceDetectorvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_detector.html b/web/html/doc/classop_1_1_face_detector.html new file mode 100644 index 000000000..ff8cf8ac5 --- /dev/null +++ b/web/html/doc/classop_1_1_face_detector.html @@ -0,0 +1,193 @@ + + + + + + + +OpenPose: op::FaceDetector Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::FaceDetector Class Reference
+
+
+ +

#include <faceDetector.hpp>

+ + + + + + + + +

+Public Member Functions

 FaceDetector (const PoseModel poseModel)
 
virtual ~FaceDetector ()
 
std::vector< Rectangle< float > > detectFaces (const Array< float > &poseKeypoints) const
 
+

Detailed Description

+
+

Definition at line 9 of file faceDetector.hpp.

+

Constructor & Destructor Documentation

+ +

◆ FaceDetector()

+ +
+
+ + + + + +
+ + + + + + + + +
op::FaceDetector::FaceDetector (const PoseModel poseModel)
+
+explicit
+
+ +
+
+ +

◆ ~FaceDetector()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::FaceDetector::~FaceDetector ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ detectFaces()

+ +
+
+ + + + + + + + +
std::vector<Rectangle<float> > op::FaceDetector::detectFaces (const Array< float > & poseKeypoints) const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_detector.js b/web/html/doc/classop_1_1_face_detector.js new file mode 100644 index 000000000..d636697bc --- /dev/null +++ b/web/html/doc/classop_1_1_face_detector.js @@ -0,0 +1,6 @@ +var classop_1_1_face_detector = +[ + [ "FaceDetector", "classop_1_1_face_detector.html#adfeab6977c93b7bef66c1dfbcf6f8150", null ], + [ "~FaceDetector", "classop_1_1_face_detector.html#a66ff3806053a5f86d01724f5029e0859", null ], + [ "detectFaces", "classop_1_1_face_detector.html#a6db84197d64104da0c26f49ecf8facd1", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_face_detector_open_c_v-members.html b/web/html/doc/classop_1_1_face_detector_open_c_v-members.html new file mode 100644 index 000000000..9224e60fb --- /dev/null +++ b/web/html/doc/classop_1_1_face_detector_open_c_v-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::FaceDetectorOpenCV Member List
+
+
+ +

This is the complete list of members for op::FaceDetectorOpenCV, including all inherited members.

+ + + + +
detectFaces(const Matrix &inputData)op::FaceDetectorOpenCV
FaceDetectorOpenCV(const std::string &modelFolder)op::FaceDetectorOpenCVexplicit
~FaceDetectorOpenCV()op::FaceDetectorOpenCVvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_detector_open_c_v.html b/web/html/doc/classop_1_1_face_detector_open_c_v.html new file mode 100644 index 000000000..5064c456b --- /dev/null +++ b/web/html/doc/classop_1_1_face_detector_open_c_v.html @@ -0,0 +1,193 @@ + + + + + + + +OpenPose: op::FaceDetectorOpenCV Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::FaceDetectorOpenCV Class Reference
+
+
+ +

#include <faceDetectorOpenCV.hpp>

+ + + + + + + + +

+Public Member Functions

 FaceDetectorOpenCV (const std::string &modelFolder)
 
virtual ~FaceDetectorOpenCV ()
 
std::vector< Rectangle< float > > detectFaces (const Matrix &inputData)
 
+

Detailed Description

+
+

Definition at line 8 of file faceDetectorOpenCV.hpp.

+

Constructor & Destructor Documentation

+ +

◆ FaceDetectorOpenCV()

+ +
+
+ + + + + +
+ + + + + + + + +
op::FaceDetectorOpenCV::FaceDetectorOpenCV (const std::string & modelFolder)
+
+explicit
+
+ +
+
+ +

◆ ~FaceDetectorOpenCV()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::FaceDetectorOpenCV::~FaceDetectorOpenCV ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ detectFaces()

+ +
+
+ + + + + + + + +
std::vector<Rectangle<float> > op::FaceDetectorOpenCV::detectFaces (const MatrixinputData)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_detector_open_c_v.js b/web/html/doc/classop_1_1_face_detector_open_c_v.js new file mode 100644 index 000000000..45217817a --- /dev/null +++ b/web/html/doc/classop_1_1_face_detector_open_c_v.js @@ -0,0 +1,6 @@ +var classop_1_1_face_detector_open_c_v = +[ + [ "FaceDetectorOpenCV", "classop_1_1_face_detector_open_c_v.html#a8c4d55863b726448762a142fa91bb69d", null ], + [ "~FaceDetectorOpenCV", "classop_1_1_face_detector_open_c_v.html#a88eae893ff7f7664243cadf0f84500da", null ], + [ "detectFaces", "classop_1_1_face_detector_open_c_v.html#aba2826bad3f87ce3967e1f999f941fc5", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_face_extractor_caffe-members.html b/web/html/doc/classop_1_1_face_extractor_caffe-members.html new file mode 100644 index 000000000..12843237e --- /dev/null +++ b/web/html/doc/classop_1_1_face_extractor_caffe-members.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::FaceExtractorCaffe Member List
+
+
+ +

This is the complete list of members for op::FaceExtractorCaffe, including all inherited members.

+ + + + + + + + + + + + + + + + + + + +
FaceExtractorCaffe(const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::string &modelFolder, const int gpuId, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool enableGoogleLogging=true)op::FaceExtractorCaffe
FaceExtractorNet(const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect)op::FaceExtractorNetexplicit
forwardPass(const std::vector< Rectangle< float >> &faceRectangles, const Matrix &inputData)op::FaceExtractorCaffevirtual
getEnabled() constop::FaceExtractorNet
getFaceKeypoints() constop::FaceExtractorNet
getHeatMaps() constop::FaceExtractorNet
initializationOnThread()op::FaceExtractorNet
mEnabledop::FaceExtractorNetprotected
mFaceImageCropop::FaceExtractorNetprotected
mFaceKeypointsop::FaceExtractorNetprotected
mHeatMapsop::FaceExtractorNetprotected
mHeatMapScaleModeop::FaceExtractorNetprotected
mHeatMapTypesop::FaceExtractorNetprotected
mNetOutputSizeop::FaceExtractorNetprotected
netInitializationOnThread()op::FaceExtractorCaffevirtual
setEnabled(const bool enabled)op::FaceExtractorNet
~FaceExtractorCaffe()op::FaceExtractorCaffevirtual
~FaceExtractorNet()op::FaceExtractorNetvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_extractor_caffe.html b/web/html/doc/classop_1_1_face_extractor_caffe.html new file mode 100644 index 000000000..300040ae8 --- /dev/null +++ b/web/html/doc/classop_1_1_face_extractor_caffe.html @@ -0,0 +1,332 @@ + + + + + + + +OpenPose: op::FaceExtractorCaffe Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::FaceExtractorCaffe Class Reference
+
+
+ +

#include <faceExtractorCaffe.hpp>

+
+Inheritance diagram for op::FaceExtractorCaffe:
+
+
+ + +op::FaceExtractorNet + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 FaceExtractorCaffe (const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::string &modelFolder, const int gpuId, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool enableGoogleLogging=true)
 
virtual ~FaceExtractorCaffe ()
 
void netInitializationOnThread ()
 
void forwardPass (const std::vector< Rectangle< float >> &faceRectangles, const Matrix &inputData)
 
- Public Member Functions inherited from op::FaceExtractorNet
 FaceExtractorNet (const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect)
 
virtual ~FaceExtractorNet ()
 
void initializationOnThread ()
 
Array< float > getHeatMaps () const
 
Array< float > getFaceKeypoints () const
 
bool getEnabled () const
 
void setEnabled (const bool enabled)
 
+ + + + + + + + + + + + + + + + +

+Additional Inherited Members

- Protected Attributes inherited from op::FaceExtractorNet
const Point< int > mNetOutputSize
 
Array< float > mFaceImageCrop
 
Array< float > mFaceKeypoints
 
Array< float > mHeatMaps
 
const ScaleMode mHeatMapScaleMode
 
const std::vector< HeatMapTypemHeatMapTypes
 
std::atomic< bool > mEnabled
 
+

Detailed Description

+

Face keypoint extractor class for Caffe framework.

+ +

Definition at line 13 of file faceExtractorCaffe.hpp.

+

Constructor & Destructor Documentation

+ +

◆ FaceExtractorCaffe()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::FaceExtractorCaffe::FaceExtractorCaffe (const Point< int > & netInputSize,
const Point< int > & netOutputSize,
const std::string & modelFolder,
const int gpuId,
const std::vector< HeatMapType > & heatMapTypes = {},
const ScaleMode heatMapScaleMode = ScaleMode::ZeroToOneFixedAspect,
const bool enableGoogleLogging = true 
)
+
+

Constructor of the FaceExtractor class.

Parameters
+ + + +
netInputSizeSize at which the cropped image (where the face is located) is resized.
netOutputSizeSize of the final results. At the moment, it must be equal than netOutputSize.
+
+
+ +
+
+ +

◆ ~FaceExtractorCaffe()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::FaceExtractorCaffe::~FaceExtractorCaffe ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ forwardPass()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::FaceExtractorCaffe::forwardPass (const std::vector< Rectangle< float >> & faceRectangles,
const MatrixinputData 
)
+
+virtual
+
+

This function extracts the face keypoints for each detected face in the image.

Parameters
+ + + +
faceRectangleslocation of the faces in the image. It is a length-variable std::vector, where each index corresponds to a different person in the image. Internally, a op::Rectangle<float> (similar to cv::Rect for floating values) with the position of that face (or 0,0,0,0 if some face is missing, e.g., if a specific person has only half of the body inside the image).
cvInputDataOriginal image in Mat format and BGR format.
+
+
+ +

Implements op::FaceExtractorNet.

+ +
+
+ +

◆ netInitializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
void op::FaceExtractorCaffe::netInitializationOnThread ()
+
+virtual
+
+

This function must be call before using any other function. It must also be called inside the thread in which the functions are going to be used.

+ +

Implements op::FaceExtractorNet.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_extractor_caffe.js b/web/html/doc/classop_1_1_face_extractor_caffe.js new file mode 100644 index 000000000..6aa69a913 --- /dev/null +++ b/web/html/doc/classop_1_1_face_extractor_caffe.js @@ -0,0 +1,7 @@ +var classop_1_1_face_extractor_caffe = +[ + [ "FaceExtractorCaffe", "classop_1_1_face_extractor_caffe.html#adedc0e50f2eacd8e02c5bd8b0563b2ee", null ], + [ "~FaceExtractorCaffe", "classop_1_1_face_extractor_caffe.html#a4450e656f21a8cb7f1d9bf5f545012f1", null ], + [ "forwardPass", "classop_1_1_face_extractor_caffe.html#ad78fc3e86428d89a513e8e3be10fc47f", null ], + [ "netInitializationOnThread", "classop_1_1_face_extractor_caffe.html#add2a24d9bd5e03ff90034239e90523c2", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_face_extractor_caffe.png b/web/html/doc/classop_1_1_face_extractor_caffe.png new file mode 100644 index 000000000..a8663b483 Binary files /dev/null and b/web/html/doc/classop_1_1_face_extractor_caffe.png differ diff --git a/web/html/doc/classop_1_1_face_extractor_net-members.html b/web/html/doc/classop_1_1_face_extractor_net-members.html new file mode 100644 index 000000000..0e05cfcf0 --- /dev/null +++ b/web/html/doc/classop_1_1_face_extractor_net-members.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::FaceExtractorNet Member List
+
+
+ +

This is the complete list of members for op::FaceExtractorNet, including all inherited members.

+ + + + + + + + + + + + + + + + + +
FaceExtractorNet(const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect)op::FaceExtractorNetexplicit
forwardPass(const std::vector< Rectangle< float >> &faceRectangles, const Matrix &inputData)=0op::FaceExtractorNetpure virtual
getEnabled() constop::FaceExtractorNet
getFaceKeypoints() constop::FaceExtractorNet
getHeatMaps() constop::FaceExtractorNet
initializationOnThread()op::FaceExtractorNet
mEnabledop::FaceExtractorNetprotected
mFaceImageCropop::FaceExtractorNetprotected
mFaceKeypointsop::FaceExtractorNetprotected
mHeatMapsop::FaceExtractorNetprotected
mHeatMapScaleModeop::FaceExtractorNetprotected
mHeatMapTypesop::FaceExtractorNetprotected
mNetOutputSizeop::FaceExtractorNetprotected
netInitializationOnThread()=0op::FaceExtractorNetprotectedpure virtual
setEnabled(const bool enabled)op::FaceExtractorNet
~FaceExtractorNet()op::FaceExtractorNetvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_extractor_net.html b/web/html/doc/classop_1_1_face_extractor_net.html new file mode 100644 index 000000000..633369efc --- /dev/null +++ b/web/html/doc/classop_1_1_face_extractor_net.html @@ -0,0 +1,578 @@ + + + + + + + +OpenPose: op::FaceExtractorNet Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::FaceExtractorNet Class Referenceabstract
+
+
+ +

#include <faceExtractorNet.hpp>

+
+Inheritance diagram for op::FaceExtractorNet:
+
+
+ + +op::FaceExtractorCaffe + +
+ + + + + + + + + + + + + + + + + + +

+Public Member Functions

 FaceExtractorNet (const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect)
 
virtual ~FaceExtractorNet ()
 
void initializationOnThread ()
 
virtual void forwardPass (const std::vector< Rectangle< float >> &faceRectangles, const Matrix &inputData)=0
 
Array< float > getHeatMaps () const
 
Array< float > getFaceKeypoints () const
 
bool getEnabled () const
 
void setEnabled (const bool enabled)
 
+ + + +

+Protected Member Functions

virtual void netInitializationOnThread ()=0
 
+ + + + + + + + + + + + + + + +

+Protected Attributes

const Point< int > mNetOutputSize
 
Array< float > mFaceImageCrop
 
Array< float > mFaceKeypoints
 
Array< float > mHeatMaps
 
const ScaleMode mHeatMapScaleMode
 
const std::vector< HeatMapTypemHeatMapTypes
 
std::atomic< bool > mEnabled
 
+

Detailed Description

+

Face keypoint extractor class.

+ +

Definition at line 13 of file faceExtractorNet.hpp.

+

Constructor & Destructor Documentation

+ +

◆ FaceExtractorNet()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::FaceExtractorNet::FaceExtractorNet (const Point< int > & netInputSize,
const Point< int > & netOutputSize,
const std::vector< HeatMapType > & heatMapTypes = {},
const ScaleMode heatMapScaleMode = ScaleMode::ZeroToOneFixedAspect 
)
+
+explicit
+
+

Constructor of the FaceExtractorNet class.

Parameters
+ + + +
netInputSizeSize at which the cropped image (where the face is located) is resized.
netOutputSizeSize of the final results. At the moment, it must be equal than netOutputSize.
+
+
+ +
+
+ +

◆ ~FaceExtractorNet()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::FaceExtractorNet::~FaceExtractorNet ()
+
+virtual
+
+

Virtual destructor of the HandExtractor class. Required to allow inheritance.

+ +
+
+

Member Function Documentation

+ +

◆ forwardPass()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::FaceExtractorNet::forwardPass (const std::vector< Rectangle< float >> & faceRectangles,
const MatrixinputData 
)
+
+pure virtual
+
+

This function extracts the face keypoints for each detected face in the image.

Parameters
+ + + +
faceRectangleslocation of the faces in the image. It is a length-variable std::vector, where each index corresponds to a different person in the image. Internally, a op::Rectangle<float> (similar to cv::Rect for floating values) with the position of that face (or 0,0,0,0 if some face is missing, e.g., if a specific person has only half of the body inside the image).
cvInputDataOriginal image in Mat format and BGR format.
+
+
+ +

Implemented in op::FaceExtractorCaffe.

+ +
+
+ +

◆ getEnabled()

+ +
+
+ + + + + + + +
bool op::FaceExtractorNet::getEnabled () const
+
+ +
+
+ +

◆ getFaceKeypoints()

+ +
+
+ + + + + + + +
Array<float> op::FaceExtractorNet::getFaceKeypoints () const
+
+

This function returns the face keypoins. VERY IMPORTANT: use getFaceKeypoints().clone() if the keypoints are going to be edited in a different thread.

Returns
A Array with all the face keypoints. It follows the pose structure, i.e., the first dimension corresponds to all the people in the image, the second to each specific keypoint, and the third one to (x, y, score).
+ +
+
+ +

◆ getHeatMaps()

+ +
+
+ + + + + + + +
Array<float> op::FaceExtractorNet::getHeatMaps () const
+
+ +
+
+ +

◆ initializationOnThread()

+ +
+
+ + + + + + + +
void op::FaceExtractorNet::initializationOnThread ()
+
+

This function must be call before using any other function. It must also be called inside the thread in which the functions are going to be used.

+ +
+
+ +

◆ netInitializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::FaceExtractorNet::netInitializationOnThread ()
+
+protectedpure virtual
+
+ +

Implemented in op::FaceExtractorCaffe.

+ +
+
+ +

◆ setEnabled()

+ +
+
+ + + + + + + + +
void op::FaceExtractorNet::setEnabled (const bool enabled)
+
+ +
+
+

Member Data Documentation

+ +

◆ mEnabled

+ +
+
+ + + + + +
+ + + + +
std::atomic<bool> op::FaceExtractorNet::mEnabled
+
+protected
+
+ +

Definition at line 71 of file faceExtractorNet.hpp.

+ +
+
+ +

◆ mFaceImageCrop

+ +
+
+ + + + + +
+ + + + +
Array<float> op::FaceExtractorNet::mFaceImageCrop
+
+protected
+
+ +

Definition at line 64 of file faceExtractorNet.hpp.

+ +
+
+ +

◆ mFaceKeypoints

+ +
+
+ + + + + +
+ + + + +
Array<float> op::FaceExtractorNet::mFaceKeypoints
+
+protected
+
+ +

Definition at line 65 of file faceExtractorNet.hpp.

+ +
+
+ +

◆ mHeatMaps

+ +
+
+ + + + + +
+ + + + +
Array<float> op::FaceExtractorNet::mHeatMaps
+
+protected
+
+ +

Definition at line 67 of file faceExtractorNet.hpp.

+ +
+
+ +

◆ mHeatMapScaleMode

+ +
+
+ + + + + +
+ + + + +
const ScaleMode op::FaceExtractorNet::mHeatMapScaleMode
+
+protected
+
+ +

Definition at line 68 of file faceExtractorNet.hpp.

+ +
+
+ +

◆ mHeatMapTypes

+ +
+
+ + + + + +
+ + + + +
const std::vector<HeatMapType> op::FaceExtractorNet::mHeatMapTypes
+
+protected
+
+ +

Definition at line 69 of file faceExtractorNet.hpp.

+ +
+
+ +

◆ mNetOutputSize

+ +
+
+ + + + + +
+ + + + +
const Point<int> op::FaceExtractorNet::mNetOutputSize
+
+protected
+
+ +

Definition at line 63 of file faceExtractorNet.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_extractor_net.js b/web/html/doc/classop_1_1_face_extractor_net.js new file mode 100644 index 000000000..b19cec72d --- /dev/null +++ b/web/html/doc/classop_1_1_face_extractor_net.js @@ -0,0 +1,19 @@ +var classop_1_1_face_extractor_net = +[ + [ "FaceExtractorNet", "classop_1_1_face_extractor_net.html#a125b052c75a5e39890e140e962b37838", null ], + [ "~FaceExtractorNet", "classop_1_1_face_extractor_net.html#a4cd488333e450cfbb19aab8910e7f138", null ], + [ "forwardPass", "classop_1_1_face_extractor_net.html#a6c5d14660690396edb1a939b11962a68", null ], + [ "getEnabled", "classop_1_1_face_extractor_net.html#a18911596f5ba442d50718f54a3d64fe0", null ], + [ "getFaceKeypoints", "classop_1_1_face_extractor_net.html#aee0d7b760214c805466ae515938b5190", null ], + [ "getHeatMaps", "classop_1_1_face_extractor_net.html#a1ba97136b2cc006cd066e3e950f0c179", null ], + [ "initializationOnThread", "classop_1_1_face_extractor_net.html#a6d6d5d6bd912bb940058a2b958aadf61", null ], + [ "netInitializationOnThread", "classop_1_1_face_extractor_net.html#a6a9a02b46596283cab6f8a4640161081", null ], + [ "setEnabled", "classop_1_1_face_extractor_net.html#a6c00e96ddf7465062d6f0b51a7a1348d", null ], + [ "mEnabled", "classop_1_1_face_extractor_net.html#a637f9c4c19e110be435cd05052248f86", null ], + [ "mFaceImageCrop", "classop_1_1_face_extractor_net.html#ae18226cef1478a929df9061c7d699c6f", null ], + [ "mFaceKeypoints", "classop_1_1_face_extractor_net.html#a5d3437e6a4a0fd834232b0afaab95a8a", null ], + [ "mHeatMaps", "classop_1_1_face_extractor_net.html#a43bd29f8c1fc0dbef051bd574df2deca", null ], + [ "mHeatMapScaleMode", "classop_1_1_face_extractor_net.html#aa3f6566e8b857262f57e18a88c90b9be", null ], + [ "mHeatMapTypes", "classop_1_1_face_extractor_net.html#a3bf177dbf1a3effbe6b15545e6102d6e", null ], + [ "mNetOutputSize", "classop_1_1_face_extractor_net.html#acf72945f62375b6ac8939c463a616f4a", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_face_extractor_net.png b/web/html/doc/classop_1_1_face_extractor_net.png new file mode 100644 index 000000000..eda4a4997 Binary files /dev/null and b/web/html/doc/classop_1_1_face_extractor_net.png differ diff --git a/web/html/doc/classop_1_1_face_gpu_renderer-members.html b/web/html/doc/classop_1_1_face_gpu_renderer-members.html new file mode 100644 index 000000000..c23ce0c79 --- /dev/null +++ b/web/html/doc/classop_1_1_face_gpu_renderer-members.html @@ -0,0 +1,135 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::FaceGpuRenderer Member List
+
+
+ +

This is the complete list of members for op::FaceGpuRenderer, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
cpuToGpuMemoryIfNotCopiedYet(const float *const cpuMemory, const unsigned long long memoryVolume)op::GpuRendererprotected
FaceGpuRenderer(const float renderThreshold, const float alphaKeypoint=FACE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=FACE_DEFAULT_ALPHA_HEAT_MAP)op::FaceGpuRenderer
getAlphaHeatMap() constop::Renderer
getAlphaKeypoint() constop::Renderer
getBlendOriginalFrame() constop::Renderer
getSharedParameters()op::GpuRenderer
getShowGooglyEyes() constop::Renderer
GpuRenderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)op::GpuRendererexplicit
gpuToCpuMemoryIfLastRenderer(float *cpuMemory, const unsigned long long memoryVolume)op::GpuRendererprotected
increaseElementToRender(const int increment)op::Renderer
initializationOnThread()op::FaceGpuRenderervirtual
mBlendOriginalFrameop::Rendererprotected
mRenderThresholdop::Rendererprotected
mShowGooglyEyesop::Rendererprotected
Renderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)op::Rendererexplicit
renderFace(Array< float > &outputData, const Array< float > &faceKeypoints, const float scaleInputToOutput)op::FaceRenderer
renderFaceInherited(Array< float > &outputData, const Array< float > &faceKeypoints)op::FaceGpuRenderervirtual
setAlphaHeatMap(const float alphaHeatMap)op::Renderer
setAlphaKeypoint(const float alphaKeypoint)op::Renderer
setBlendOriginalFrame(const bool blendOriginalFrame)op::Renderer
setElementToRender(const int elementToRender)op::Renderer
setElementToRender(const ElementToRender elementToRender)op::Renderer
setSharedParameters(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)op::GpuRenderer
setSharedParametersAndIfLast(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int >> &tuple, const bool isLast)op::GpuRenderer
setShowGooglyEyes(const bool showGooglyEyes)op::Renderer
spElementToRenderop::Rendererprotected
spGpuMemoryop::GpuRendererprotected
spNumberElementsToRenderop::Rendererprotected
~FaceGpuRenderer()op::FaceGpuRenderervirtual
~FaceRenderer()op::FaceRendererinlinevirtual
~GpuRenderer()op::GpuRenderervirtual
~Renderer()op::Renderervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_gpu_renderer.html b/web/html/doc/classop_1_1_face_gpu_renderer.html new file mode 100644 index 000000000..122efe20b --- /dev/null +++ b/web/html/doc/classop_1_1_face_gpu_renderer.html @@ -0,0 +1,326 @@ + + + + + + + +OpenPose: op::FaceGpuRenderer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::FaceGpuRenderer Class Reference
+
+
+ +

#include <faceGpuRenderer.hpp>

+
+Inheritance diagram for op::FaceGpuRenderer:
+
+
+ + +op::GpuRenderer +op::FaceRenderer +op::Renderer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 FaceGpuRenderer (const float renderThreshold, const float alphaKeypoint=FACE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=FACE_DEFAULT_ALPHA_HEAT_MAP)
 
virtual ~FaceGpuRenderer ()
 
void initializationOnThread ()
 
void renderFaceInherited (Array< float > &outputData, const Array< float > &faceKeypoints)
 
- Public Member Functions inherited from op::GpuRenderer
 GpuRenderer (const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
 
virtual ~GpuRenderer ()
 
std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int > >, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int > > getSharedParameters ()
 
void setSharedParametersAndIfLast (const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int >> &tuple, const bool isLast)
 
void setSharedParameters (const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)
 
- Public Member Functions inherited from op::Renderer
 Renderer (const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
 
virtual ~Renderer ()
 
void increaseElementToRender (const int increment)
 
void setElementToRender (const int elementToRender)
 
void setElementToRender (const ElementToRender elementToRender)
 
bool getBlendOriginalFrame () const
 
void setBlendOriginalFrame (const bool blendOriginalFrame)
 
float getAlphaKeypoint () const
 
void setAlphaKeypoint (const float alphaKeypoint)
 
float getAlphaHeatMap () const
 
void setAlphaHeatMap (const float alphaHeatMap)
 
bool getShowGooglyEyes () const
 
void setShowGooglyEyes (const bool showGooglyEyes)
 
- Public Member Functions inherited from op::FaceRenderer
virtual ~FaceRenderer ()
 
void renderFace (Array< float > &outputData, const Array< float > &faceKeypoints, const float scaleInputToOutput)
 
+ + + + + + + + + + + + + + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::GpuRenderer
void cpuToGpuMemoryIfNotCopiedYet (const float *const cpuMemory, const unsigned long long memoryVolume)
 
void gpuToCpuMemoryIfLastRenderer (float *cpuMemory, const unsigned long long memoryVolume)
 
- Protected Attributes inherited from op::GpuRenderer
std::shared_ptr< float * > spGpuMemory
 
- Protected Attributes inherited from op::Renderer
const float mRenderThreshold
 
std::atomic< bool > mBlendOriginalFrame
 
std::shared_ptr< std::atomic< unsigned int > > spElementToRender
 
std::shared_ptr< const unsigned int > spNumberElementsToRender
 
std::atomic< bool > mShowGooglyEyes
 
+

Detailed Description

+
+

Definition at line 11 of file faceGpuRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ FaceGpuRenderer()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
op::FaceGpuRenderer::FaceGpuRenderer (const float renderThreshold,
const float alphaKeypoint = FACE_DEFAULT_ALPHA_KEYPOINT,
const float alphaHeatMap = FACE_DEFAULT_ALPHA_HEAT_MAP 
)
+
+ +
+
+ +

◆ ~FaceGpuRenderer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::FaceGpuRenderer::~FaceGpuRenderer ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
void op::FaceGpuRenderer::initializationOnThread ()
+
+virtual
+
+ +

Reimplemented from op::FaceRenderer.

+ +
+
+ +

◆ renderFaceInherited()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::FaceGpuRenderer::renderFaceInherited (Array< float > & outputData,
const Array< float > & faceKeypoints 
)
+
+virtual
+
+ +

Implements op::FaceRenderer.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_gpu_renderer.js b/web/html/doc/classop_1_1_face_gpu_renderer.js new file mode 100644 index 000000000..95e5363b1 --- /dev/null +++ b/web/html/doc/classop_1_1_face_gpu_renderer.js @@ -0,0 +1,7 @@ +var classop_1_1_face_gpu_renderer = +[ + [ "FaceGpuRenderer", "classop_1_1_face_gpu_renderer.html#a344b4f1d256d6ad805273eb8ba29cde1", null ], + [ "~FaceGpuRenderer", "classop_1_1_face_gpu_renderer.html#a94758beab4bfbfed02cc8330a63abaeb", null ], + [ "initializationOnThread", "classop_1_1_face_gpu_renderer.html#a6ebd9287927529ffaa4200890190896b", null ], + [ "renderFaceInherited", "classop_1_1_face_gpu_renderer.html#ae54b7538a6ed6a5eaedcbdc117a0d61c", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_face_gpu_renderer.png b/web/html/doc/classop_1_1_face_gpu_renderer.png new file mode 100644 index 000000000..17844f529 Binary files /dev/null and b/web/html/doc/classop_1_1_face_gpu_renderer.png differ diff --git a/web/html/doc/classop_1_1_face_renderer-members.html b/web/html/doc/classop_1_1_face_renderer-members.html new file mode 100644 index 000000000..691d99395 --- /dev/null +++ b/web/html/doc/classop_1_1_face_renderer-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::FaceRenderer Member List
+
+
+ +

This is the complete list of members for op::FaceRenderer, including all inherited members.

+ + + + +
initializationOnThread()op::FaceRendererinlinevirtual
renderFace(Array< float > &outputData, const Array< float > &faceKeypoints, const float scaleInputToOutput)op::FaceRenderer
~FaceRenderer()op::FaceRendererinlinevirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_renderer.html b/web/html/doc/classop_1_1_face_renderer.html new file mode 100644 index 000000000..61a757e2f --- /dev/null +++ b/web/html/doc/classop_1_1_face_renderer.html @@ -0,0 +1,224 @@ + + + + + + + +OpenPose: op::FaceRenderer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::FaceRenderer Class Referenceabstract
+
+
+ +

#include <faceRenderer.hpp>

+
+Inheritance diagram for op::FaceRenderer:
+
+
+ + +op::FaceCpuRenderer +op::FaceGpuRenderer + +
+ + + + + + + + +

+Public Member Functions

virtual ~FaceRenderer ()
 
virtual void initializationOnThread ()
 
void renderFace (Array< float > &outputData, const Array< float > &faceKeypoints, const float scaleInputToOutput)
 
+

Detailed Description

+
+

Definition at line 8 of file faceRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ ~FaceRenderer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::FaceRenderer::~FaceRenderer ()
+
+inlinevirtual
+
+ +

Definition at line 11 of file faceRenderer.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::FaceRenderer::initializationOnThread ()
+
+inlinevirtual
+
+ +

Reimplemented in op::FaceGpuRenderer.

+ +

Definition at line 13 of file faceRenderer.hpp.

+ +
+
+ +

◆ renderFace()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
void op::FaceRenderer::renderFace (Array< float > & outputData,
const Array< float > & faceKeypoints,
const float scaleInputToOutput 
)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_face_renderer.js b/web/html/doc/classop_1_1_face_renderer.js new file mode 100644 index 000000000..6f291db3f --- /dev/null +++ b/web/html/doc/classop_1_1_face_renderer.js @@ -0,0 +1,6 @@ +var classop_1_1_face_renderer = +[ + [ "~FaceRenderer", "classop_1_1_face_renderer.html#a8ba7bad616bd2cf673d8faa846bf95b5", null ], + [ "initializationOnThread", "classop_1_1_face_renderer.html#aa34ce7a0602b0994cc3043b80627a31c", null ], + [ "renderFace", "classop_1_1_face_renderer.html#acbbdaca16f4115a5a68d006f4f325397", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_face_renderer.png b/web/html/doc/classop_1_1_face_renderer.png new file mode 100644 index 000000000..57001bbd7 Binary files /dev/null and b/web/html/doc/classop_1_1_face_renderer.png differ diff --git a/web/html/doc/classop_1_1_file_saver-members.html b/web/html/doc/classop_1_1_file_saver-members.html new file mode 100644 index 000000000..32dc75b1d --- /dev/null +++ b/web/html/doc/classop_1_1_file_saver-members.html @@ -0,0 +1,107 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::FileSaver Member List
+
+
+ +

This is the complete list of members for op::FileSaver, including all inherited members.

+ + + + + +
FileSaver(const std::string &directoryPath)op::FileSaverexplicitprotected
getNextFileName(const unsigned long long index) constop::FileSaverprotected
getNextFileName(const std::string &fileNameNoExtension) constop::FileSaverprotected
~FileSaver()op::FileSaverprotectedvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_file_saver.html b/web/html/doc/classop_1_1_file_saver.html new file mode 100644 index 000000000..7a38b7464 --- /dev/null +++ b/web/html/doc/classop_1_1_file_saver.html @@ -0,0 +1,241 @@ + + + + + + + +OpenPose: op::FileSaver Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::FileSaver Class Reference
+
+
+ +

#include <fileSaver.hpp>

+
+Inheritance diagram for op::FileSaver:
+
+
+ + +op::HeatMapSaver +op::ImageSaver +op::KeypointSaver +op::PeopleJsonSaver + +
+ + + + + + + + + + +

+Protected Member Functions

 FileSaver (const std::string &directoryPath)
 
virtual ~FileSaver ()
 
std::string getNextFileName (const unsigned long long index) const
 
std::string getNextFileName (const std::string &fileNameNoExtension) const
 
+

Detailed Description

+
+

Definition at line 9 of file fileSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ FileSaver()

+ +
+
+ + + + + +
+ + + + + + + + +
op::FileSaver::FileSaver (const std::string & directoryPath)
+
+explicitprotected
+
+ +
+
+ +

◆ ~FileSaver()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::FileSaver::~FileSaver ()
+
+protectedvirtual
+
+ +
+
+

Member Function Documentation

+ +

◆ getNextFileName() [1/2]

+ +
+
+ + + + + +
+ + + + + + + + +
std::string op::FileSaver::getNextFileName (const std::string & fileNameNoExtension) const
+
+protected
+
+ +
+
+ +

◆ getNextFileName() [2/2]

+ +
+
+ + + + + +
+ + + + + + + + +
std::string op::FileSaver::getNextFileName (const unsigned long long index) const
+
+protected
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_file_saver.js b/web/html/doc/classop_1_1_file_saver.js new file mode 100644 index 000000000..ecc847955 --- /dev/null +++ b/web/html/doc/classop_1_1_file_saver.js @@ -0,0 +1,7 @@ +var classop_1_1_file_saver = +[ + [ "FileSaver", "classop_1_1_file_saver.html#aa4632ae62ac77dbad85523845ce79999", null ], + [ "~FileSaver", "classop_1_1_file_saver.html#a080e6bb80adad7a3d534356cdfe40211", null ], + [ "getNextFileName", "classop_1_1_file_saver.html#a5940f007f3346580124cd1b6b27492e6", null ], + [ "getNextFileName", "classop_1_1_file_saver.html#a52aab3187cefc2e878790aa440a842aa", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_file_saver.png b/web/html/doc/classop_1_1_file_saver.png new file mode 100644 index 000000000..c023ee65f Binary files /dev/null and b/web/html/doc/classop_1_1_file_saver.png differ diff --git a/web/html/doc/classop_1_1_flir_reader-members.html b/web/html/doc/classop_1_1_flir_reader-members.html new file mode 100644 index 000000000..8724d5eea --- /dev/null +++ b/web/html/doc/classop_1_1_flir_reader-members.html @@ -0,0 +1,124 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::FlirReader Member List
+
+
+ +

This is the complete list of members for op::FlirReader, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + +
checkFrameIntegrity(Matrix &frame)op::Producerprotected
FlirReader(const std::string &cameraParametersPath, const Point< int > &cameraResolution, const bool undistortImage=true, const int cameraIndex=-1)op::FlirReaderexplicit
get(const int capProperty)op::FlirReadervirtual
op::Producer::get(const ProducerProperty property)op::Producer
getCameraExtrinsics()op::FlirReadervirtual
getCameraIntrinsics()op::FlirReadervirtual
getCameraMatrices()op::FlirReadervirtual
getFrame()op::Producer
getFrames()op::Producer
getNextFrameName()op::FlirReadervirtual
getType()op::Producerinline
ifEndedResetOrRelease()op::Producerprotected
isOpened() constop::FlirReadervirtual
keepDesiredFrameRate()op::Producerprotected
Producer(const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)op::Producerexplicit
release()op::FlirReadervirtual
set(const int capProperty, const double value)op::FlirReadervirtual
op::Producer::set(const ProducerProperty property, const double value)op::Producer
setProducerFpsMode(const ProducerFpsMode fpsMode)op::Producer
~FlirReader()op::FlirReadervirtual
~Producer()op::Producervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_flir_reader.html b/web/html/doc/classop_1_1_flir_reader.html new file mode 100644 index 000000000..922f2a9db --- /dev/null +++ b/web/html/doc/classop_1_1_flir_reader.html @@ -0,0 +1,497 @@ + + + + + + + +OpenPose: op::FlirReader Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::FlirReader Class Reference
+
+
+ +

#include <flirReader.hpp>

+
+Inheritance diagram for op::FlirReader:
+
+
+ + +op::Producer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 FlirReader (const std::string &cameraParametersPath, const Point< int > &cameraResolution, const bool undistortImage=true, const int cameraIndex=-1)
 
virtual ~FlirReader ()
 
std::vector< MatrixgetCameraMatrices ()
 
std::vector< MatrixgetCameraExtrinsics ()
 
std::vector< MatrixgetCameraIntrinsics ()
 
std::string getNextFrameName ()
 
bool isOpened () const
 
void release ()
 
double get (const int capProperty)
 
void set (const int capProperty, const double value)
 
- Public Member Functions inherited from op::Producer
 Producer (const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)
 
virtual ~Producer ()
 
Matrix getFrame ()
 
std::vector< MatrixgetFrames ()
 
void setProducerFpsMode (const ProducerFpsMode fpsMode)
 
ProducerType getType ()
 
double get (const ProducerProperty property)
 
void set (const ProducerProperty property, const double value)
 
+ + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::Producer
void checkFrameIntegrity (Matrix &frame)
 
void ifEndedResetOrRelease ()
 
void keepDesiredFrameRate ()
 
+

Detailed Description

+

FlirReader is an abstract class to extract frames from a FLIR stereo-camera system. Its interface imitates the cv::VideoCapture class, so it can be used quite similarly to the cv::VideoCapture class. Thus, it is quite similar to VideoReader and WebcamReader.

+ +

Definition at line 15 of file flirReader.hpp.

+

Constructor & Destructor Documentation

+ +

◆ FlirReader()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::FlirReader::FlirReader (const std::string & cameraParametersPath,
const Point< int > & cameraResolution,
const bool undistortImage = true,
const int cameraIndex = -1 
)
+
+explicit
+
+

Constructor of FlirReader. It opens all the available FLIR cameras

+ +
+
+ +

◆ ~FlirReader()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::FlirReader::~FlirReader ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ get()

+ +
+
+ + + + + +
+ + + + + + + + +
double op::FlirReader::get (const int capProperty)
+
+virtual
+
+

This function is a wrapper of cv::VideoCapture::get. It allows getting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + +
capPropertyint indicating the property to be modified.
+
+
+
Returns
double returning the property value.
+ +

Implements op::Producer.

+ +
+
+ +

◆ getCameraExtrinsics()

+ +
+
+ + + + + +
+ + + + + + + +
std::vector<Matrix> op::FlirReader::getCameraExtrinsics ()
+
+virtual
+
+

It retrieves and returns the camera extrinsic parameters from the frames producer. Virtual class because FlirReader implements their own.

Returns
std::vector<Mat> with the camera extrinsic parameters.
+ +

Reimplemented from op::Producer.

+ +
+
+ +

◆ getCameraIntrinsics()

+ +
+
+ + + + + +
+ + + + + + + +
std::vector<Matrix> op::FlirReader::getCameraIntrinsics ()
+
+virtual
+
+

It retrieves and returns the camera intrinsic parameters from the frames producer. Virtual class because FlirReader implements their own.

Returns
std::vector<Mat> with the camera intrinsic parameters.
+ +

Reimplemented from op::Producer.

+ +
+
+ +

◆ getCameraMatrices()

+ +
+
+ + + + + +
+ + + + + + + +
std::vector<Matrix> op::FlirReader::getCameraMatrices ()
+
+virtual
+
+

It retrieves and returns the camera matrixes from the frames producer. Virtual class because FlirReader implements their own.

Returns
std::vector<Mat> with the camera matrices.
+ +

Reimplemented from op::Producer.

+ +
+
+ +

◆ getNextFrameName()

+ +
+
+ + + + + +
+ + + + + + + +
std::string op::FlirReader::getNextFrameName ()
+
+virtual
+
+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.
+ +

Implements op::Producer.

+ +
+
+ +

◆ isOpened()

+ +
+
+ + + + + +
+ + + + + + + +
bool op::FlirReader::isOpened () const
+
+virtual
+
+

This function returns whether the Producer instance is still opened and able to retrieve more frames.

Returns
bool indicating whether the Producer is opened.
+ +

Implements op::Producer.

+ +
+
+ +

◆ release()

+ +
+
+ + + + + +
+ + + + + + + +
void op::FlirReader::release ()
+
+virtual
+
+

This function releases and closes the Producer. After it is called, no more frames can be retrieved from Producer::getFrames.

+ +

Implements op::Producer.

+ +
+
+ +

◆ set()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::FlirReader::set (const int capProperty,
const double value 
)
+
+virtual
+
+

This function is a wrapper of cv::VideoCapture::set. It allows setting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + + +
capPropertyint indicating the property to be modified.
valuedouble indicating the new value to be assigned.
+
+
+ +

Implements op::Producer.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_flir_reader.js b/web/html/doc/classop_1_1_flir_reader.js new file mode 100644 index 000000000..9094f3193 --- /dev/null +++ b/web/html/doc/classop_1_1_flir_reader.js @@ -0,0 +1,13 @@ +var classop_1_1_flir_reader = +[ + [ "FlirReader", "classop_1_1_flir_reader.html#a8fa5c03b6ce95372ce47013c01c782a5", null ], + [ "~FlirReader", "classop_1_1_flir_reader.html#a66d6144c5dcb0dd3cbadcd6f8eefa9e0", null ], + [ "get", "classop_1_1_flir_reader.html#a5101cdbcd46e51bf7f35995a3d87e900", null ], + [ "getCameraExtrinsics", "classop_1_1_flir_reader.html#ad3b940d5ed672ef17406843b102e9715", null ], + [ "getCameraIntrinsics", "classop_1_1_flir_reader.html#acb45c9a89ebc92c0a8ee69a0ec4d0476", null ], + [ "getCameraMatrices", "classop_1_1_flir_reader.html#a7ddcdf533c778df342a50c24c280499b", null ], + [ "getNextFrameName", "classop_1_1_flir_reader.html#a711db0919bd7516fde3e641c13259637", null ], + [ "isOpened", "classop_1_1_flir_reader.html#a3d383e03a405dcbff566a86253db90af", null ], + [ "release", "classop_1_1_flir_reader.html#ab28f40422c9edff8594d855bbef91f58", null ], + [ "set", "classop_1_1_flir_reader.html#af14f63c79272781429341dc3a0720485", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_flir_reader.png b/web/html/doc/classop_1_1_flir_reader.png new file mode 100644 index 000000000..62f3bf030 Binary files /dev/null and b/web/html/doc/classop_1_1_flir_reader.png differ diff --git a/web/html/doc/classop_1_1_frame_displayer-members.html b/web/html/doc/classop_1_1_frame_displayer-members.html new file mode 100644 index 000000000..84903586c --- /dev/null +++ b/web/html/doc/classop_1_1_frame_displayer-members.html @@ -0,0 +1,110 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::FrameDisplayer Member List
+
+
+ +

This is the complete list of members for op::FrameDisplayer, including all inherited members.

+ + + + + + + + +
displayFrame(const Matrix &frame, const int waitKeyValue=-1)op::FrameDisplayer
displayFrame(const std::vector< Matrix > &frames, const int waitKeyValue=-1)op::FrameDisplayer
FrameDisplayer(const std::string &windowedName=OPEN_POSE_NAME_AND_VERSION, const Point< int > &initialWindowedSize=Point< int >{}, const bool fullScreen=false)op::FrameDisplayer
initializationOnThread()op::FrameDisplayer
setFullScreenMode(const FullScreenMode fullScreenMode)op::FrameDisplayer
switchFullScreenMode()op::FrameDisplayer
~FrameDisplayer()op::FrameDisplayervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_frame_displayer.html b/web/html/doc/classop_1_1_frame_displayer.html new file mode 100644 index 000000000..4ad3dff05 --- /dev/null +++ b/web/html/doc/classop_1_1_frame_displayer.html @@ -0,0 +1,323 @@ + + + + + + + +OpenPose: op::FrameDisplayer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::FrameDisplayer Class Reference
+
+
+ +

#include <frameDisplayer.hpp>

+ + + + + + + + + + + + + + + + +

+Public Member Functions

 FrameDisplayer (const std::string &windowedName=OPEN_POSE_NAME_AND_VERSION, const Point< int > &initialWindowedSize=Point< int >{}, const bool fullScreen=false)
 
virtual ~FrameDisplayer ()
 
void initializationOnThread ()
 
void setFullScreenMode (const FullScreenMode fullScreenMode)
 
void switchFullScreenMode ()
 
void displayFrame (const Matrix &frame, const int waitKeyValue=-1)
 
void displayFrame (const std::vector< Matrix > &frames, const int waitKeyValue=-1)
 
+

Detailed Description

+

The FrameDisplayer class is the one presenting visually the processed frame to the user.

+ +

Definition at line 12 of file frameDisplayer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ FrameDisplayer()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
op::FrameDisplayer::FrameDisplayer (const std::string & windowedName = OPEN_POSE_NAME_AND_VERSION,
const Point< int > & initialWindowedSize = Point< int >{},
const bool fullScreen = false 
)
+
+

Constructor of the FrameDisplayer class.

Parameters
+ + + + +
windowedNameconst std::string value with the opencv resulting display name. Showed at the top-left part of the window.
initialWindowedSizeconst Point<int> with the initial window output resolution (width and height).
fullScreenbool from which the FrameDisplayer::FullScreenMode property mFullScreenMode will be set, i.e., specifying the type of initial display (it can be changed later).
+
+
+ +
+
+ +

◆ ~FrameDisplayer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::FrameDisplayer::~FrameDisplayer ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ displayFrame() [1/2]

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::FrameDisplayer::displayFrame (const Matrixframe,
const int waitKeyValue = -1 
)
+
+

This function displays an image on the display.

Parameters
+ + + +
frameMat image to display.
waitKeyValueint value that specifies the argument parameter for cv::waitKey (see OpenCV documentation for more information). Special cases: select -1 not to use cv::waitKey or 0 for cv::waitKey(0). OpenCV doc: http://docs.opencv.org/2.4/modules/highgui/doc/user_interface.html?highlight=waitkey
+
+
+ +
+
+ +

◆ displayFrame() [2/2]

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::FrameDisplayer::displayFrame (const std::vector< Matrix > & frames,
const int waitKeyValue = -1 
)
+
+

Analogous to the previous displayFrame, but first it horizontally concatenates all the frames

+ +
+
+ +

◆ initializationOnThread()

+ +
+
+ + + + + + + +
void op::FrameDisplayer::initializationOnThread ()
+
+ +
+
+ +

◆ setFullScreenMode()

+ +
+
+ + + + + + + + +
void op::FrameDisplayer::setFullScreenMode (const FullScreenMode fullScreenMode)
+
+

This function set the new FrameDisplayer::FullScreenMode (e.g., full screen).

Parameters
+ + +
fullScreenModeNew FrameDisplayer::FullScreenMode state.
+
+
+ +
+
+ +

◆ switchFullScreenMode()

+ +
+
+ + + + + + + +
void op::FrameDisplayer::switchFullScreenMode ()
+
+

This function switch between full screen and windowed modes (e.g., when double-click on video players or Ctrt+Enter are presed).

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_frame_displayer.js b/web/html/doc/classop_1_1_frame_displayer.js new file mode 100644 index 000000000..e7087cdd6 --- /dev/null +++ b/web/html/doc/classop_1_1_frame_displayer.js @@ -0,0 +1,10 @@ +var classop_1_1_frame_displayer = +[ + [ "FrameDisplayer", "classop_1_1_frame_displayer.html#a21a746ef46172c6a18ea72da6e7b5721", null ], + [ "~FrameDisplayer", "classop_1_1_frame_displayer.html#ab3dea1eefac57cf129b4828ecd856fb4", null ], + [ "displayFrame", "classop_1_1_frame_displayer.html#aa99517efbef90cd8a6e171a713c37501", null ], + [ "displayFrame", "classop_1_1_frame_displayer.html#a23263864af418160f489072716ba9951", null ], + [ "initializationOnThread", "classop_1_1_frame_displayer.html#af5d2e1c8bcd2012c66347252e8dbc543", null ], + [ "setFullScreenMode", "classop_1_1_frame_displayer.html#a2df856e4cf7542c7cda2757553674fb8", null ], + [ "switchFullScreenMode", "classop_1_1_frame_displayer.html#ad83a47005c52f066587f49d62c109802", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_gpu_renderer-members.html b/web/html/doc/classop_1_1_gpu_renderer-members.html new file mode 100644 index 000000000..537a9bb16 --- /dev/null +++ b/web/html/doc/classop_1_1_gpu_renderer-members.html @@ -0,0 +1,129 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::GpuRenderer Member List
+
+
+ +

This is the complete list of members for op::GpuRenderer, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
cpuToGpuMemoryIfNotCopiedYet(const float *const cpuMemory, const unsigned long long memoryVolume)op::GpuRendererprotected
getAlphaHeatMap() constop::Renderer
getAlphaKeypoint() constop::Renderer
getBlendOriginalFrame() constop::Renderer
getSharedParameters()op::GpuRenderer
getShowGooglyEyes() constop::Renderer
GpuRenderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)op::GpuRendererexplicit
gpuToCpuMemoryIfLastRenderer(float *cpuMemory, const unsigned long long memoryVolume)op::GpuRendererprotected
increaseElementToRender(const int increment)op::Renderer
mBlendOriginalFrameop::Rendererprotected
mRenderThresholdop::Rendererprotected
mShowGooglyEyesop::Rendererprotected
Renderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)op::Rendererexplicit
setAlphaHeatMap(const float alphaHeatMap)op::Renderer
setAlphaKeypoint(const float alphaKeypoint)op::Renderer
setBlendOriginalFrame(const bool blendOriginalFrame)op::Renderer
setElementToRender(const int elementToRender)op::Renderer
setElementToRender(const ElementToRender elementToRender)op::Renderer
setSharedParameters(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)op::GpuRenderer
setSharedParametersAndIfLast(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int >> &tuple, const bool isLast)op::GpuRenderer
setShowGooglyEyes(const bool showGooglyEyes)op::Renderer
spElementToRenderop::Rendererprotected
spGpuMemoryop::GpuRendererprotected
spNumberElementsToRenderop::Rendererprotected
~GpuRenderer()op::GpuRenderervirtual
~Renderer()op::Renderervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_gpu_renderer.html b/web/html/doc/classop_1_1_gpu_renderer.html new file mode 100644 index 000000000..ee91c8212 --- /dev/null +++ b/web/html/doc/classop_1_1_gpu_renderer.html @@ -0,0 +1,437 @@ + + + + + + + +OpenPose: op::GpuRenderer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ + +
+ +

#include <gpuRenderer.hpp>

+
+Inheritance diagram for op::GpuRenderer:
+
+
+ + +op::Renderer +op::FaceGpuRenderer +op::HandGpuRenderer +op::PoseGpuRenderer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 GpuRenderer (const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
 
virtual ~GpuRenderer ()
 
std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int > >, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int > > getSharedParameters ()
 
void setSharedParametersAndIfLast (const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int >> &tuple, const bool isLast)
 
void setSharedParameters (const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)
 
- Public Member Functions inherited from op::Renderer
 Renderer (const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
 
virtual ~Renderer ()
 
void increaseElementToRender (const int increment)
 
void setElementToRender (const int elementToRender)
 
void setElementToRender (const ElementToRender elementToRender)
 
bool getBlendOriginalFrame () const
 
void setBlendOriginalFrame (const bool blendOriginalFrame)
 
float getAlphaKeypoint () const
 
void setAlphaKeypoint (const float alphaKeypoint)
 
float getAlphaHeatMap () const
 
void setAlphaHeatMap (const float alphaHeatMap)
 
bool getShowGooglyEyes () const
 
void setShowGooglyEyes (const bool showGooglyEyes)
 
+ + + + + +

+Protected Member Functions

void cpuToGpuMemoryIfNotCopiedYet (const float *const cpuMemory, const unsigned long long memoryVolume)
 
void gpuToCpuMemoryIfLastRenderer (float *cpuMemory, const unsigned long long memoryVolume)
 
+ + + + + + + + + + + + + + +

+Protected Attributes

std::shared_ptr< float * > spGpuMemory
 
- Protected Attributes inherited from op::Renderer
const float mRenderThreshold
 
std::atomic< bool > mBlendOriginalFrame
 
std::shared_ptr< std::atomic< unsigned int > > spElementToRender
 
std::shared_ptr< const unsigned int > spNumberElementsToRender
 
std::atomic< bool > mShowGooglyEyes
 
+

Detailed Description

+
+

Definition at line 11 of file gpuRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ GpuRenderer()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::GpuRenderer::GpuRenderer (const float renderThreshold,
const float alphaKeypoint,
const float alphaHeatMap,
const bool blendOriginalFrame = true,
const unsigned int elementToRender = 0u,
const unsigned int numberElementsToRender = 0u 
)
+
+explicit
+
+ +
+
+ +

◆ ~GpuRenderer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::GpuRenderer::~GpuRenderer ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ cpuToGpuMemoryIfNotCopiedYet()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::GpuRenderer::cpuToGpuMemoryIfNotCopiedYet (const float *const cpuMemory,
const unsigned long long memoryVolume 
)
+
+protected
+
+ +
+
+ +

◆ getSharedParameters()

+ +
+
+ + + + + + + +
std::tuple<std::shared_ptr<float*>, std::shared_ptr<bool>, std::shared_ptr<std::atomic<unsigned int> >, std::shared_ptr<unsigned long long>, std::shared_ptr<const unsigned int> > op::GpuRenderer::getSharedParameters ()
+
+ +
+
+ +

◆ gpuToCpuMemoryIfLastRenderer()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::GpuRenderer::gpuToCpuMemoryIfLastRenderer (float * cpuMemory,
const unsigned long long memoryVolume 
)
+
+protected
+
+ +
+
+ +

◆ setSharedParameters()

+ +
+
+ + + + + + + + +
void op::GpuRenderer::setSharedParameters (const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> & tuple)
+
+ +
+
+ +

◆ setSharedParametersAndIfLast()

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::GpuRenderer::setSharedParametersAndIfLast (const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int >> & tuple,
const bool isLast 
)
+
+ +
+
+

Member Data Documentation

+ +

◆ spGpuMemory

+ +
+
+ + + + + +
+ + + + +
std::shared_ptr<float*> op::GpuRenderer::spGpuMemory
+
+protected
+
+ +

Definition at line 35 of file gpuRenderer.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_gpu_renderer.js b/web/html/doc/classop_1_1_gpu_renderer.js new file mode 100644 index 000000000..e7d13cc72 --- /dev/null +++ b/web/html/doc/classop_1_1_gpu_renderer.js @@ -0,0 +1,11 @@ +var classop_1_1_gpu_renderer = +[ + [ "GpuRenderer", "classop_1_1_gpu_renderer.html#a9852b2017e972637b47250bb7fbc53ea", null ], + [ "~GpuRenderer", "classop_1_1_gpu_renderer.html#a3ef06d85a62cd4049d5e8ac1e94d8fd8", null ], + [ "cpuToGpuMemoryIfNotCopiedYet", "classop_1_1_gpu_renderer.html#ac7c1ab0eebf1d54b55cc65a5560bad7b", null ], + [ "getSharedParameters", "classop_1_1_gpu_renderer.html#a63eb7ae0b440a5552ed9342043a8f369", null ], + [ "gpuToCpuMemoryIfLastRenderer", "classop_1_1_gpu_renderer.html#a6355f70d16c6427b028fa4596ce5d985", null ], + [ "setSharedParameters", "classop_1_1_gpu_renderer.html#acc83c7b857db7d35132febaebfcb84df", null ], + [ "setSharedParametersAndIfLast", "classop_1_1_gpu_renderer.html#afa58647bfd9efa02629e4b81bbe48c6e", null ], + [ "spGpuMemory", "classop_1_1_gpu_renderer.html#a5d729aab549908c758953be742dd0115", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_gpu_renderer.png b/web/html/doc/classop_1_1_gpu_renderer.png new file mode 100644 index 000000000..ac4ac4f06 Binary files /dev/null and b/web/html/doc/classop_1_1_gpu_renderer.png differ diff --git a/web/html/doc/classop_1_1_gui-members.html b/web/html/doc/classop_1_1_gui-members.html new file mode 100644 index 000000000..8306e6072 --- /dev/null +++ b/web/html/doc/classop_1_1_gui-members.html @@ -0,0 +1,112 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::Gui Member List
+
+
+ +

This is the complete list of members for op::Gui, including all inherited members.

+ + + + + + + + + + +
Gui(const Point< int > &outputSize, const bool fullScreen, const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr, const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr=nullptr, const std::vector< std::shared_ptr< PoseExtractorNet >> &poseExtractorNets={}, const std::vector< std::shared_ptr< FaceExtractorNet >> &faceExtractorNets={}, const std::vector< std::shared_ptr< HandExtractorNet >> &handExtractorNets={}, const std::vector< std::shared_ptr< Renderer >> &renderers={}, const DisplayMode displayMode=DisplayMode::Display2D)op::Gui
initializationOnThread()op::Guivirtual
mDisplayModeop::Guiprotected
mDisplayModeOriginalop::Guiprotected
setImage(const Matrix &cvMatOutput)op::Gui
setImage(const std::vector< Matrix > &cvMatOutputs)op::Gui
spIsRunningop::Guiprotected
update()op::Guivirtual
~Gui()op::Guivirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_gui.html b/web/html/doc/classop_1_1_gui.html new file mode 100644 index 000000000..10887627c --- /dev/null +++ b/web/html/doc/classop_1_1_gui.html @@ -0,0 +1,407 @@ + + + + + + + +OpenPose: op::Gui Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ + +
+ +

#include <gui.hpp>

+
+Inheritance diagram for op::Gui:
+
+
+ + +op::Gui3D + +
+ + + + + + + + + + + + + + +

+Public Member Functions

 Gui (const Point< int > &outputSize, const bool fullScreen, const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr, const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr=nullptr, const std::vector< std::shared_ptr< PoseExtractorNet >> &poseExtractorNets={}, const std::vector< std::shared_ptr< FaceExtractorNet >> &faceExtractorNets={}, const std::vector< std::shared_ptr< HandExtractorNet >> &handExtractorNets={}, const std::vector< std::shared_ptr< Renderer >> &renderers={}, const DisplayMode displayMode=DisplayMode::Display2D)
 
virtual ~Gui ()
 
virtual void initializationOnThread ()
 
void setImage (const Matrix &cvMatOutput)
 
void setImage (const std::vector< Matrix > &cvMatOutputs)
 
virtual void update ()
 
+ + + + + + + +

+Protected Attributes

std::shared_ptr< std::atomic< bool > > spIsRunning
 
DisplayMode mDisplayMode
 
DisplayMode mDisplayModeOriginal
 
+

Detailed Description

+
+

Definition at line 14 of file gui.hpp.

+

Constructor & Destructor Documentation

+ +

◆ Gui()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::Gui::Gui (const Point< int > & outputSize,
const bool fullScreen,
const std::shared_ptr< std::atomic< bool >> & isRunningSharedPtr,
const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> & videoSeekSharedPtr = nullptr,
const std::vector< std::shared_ptr< PoseExtractorNet >> & poseExtractorNets = {},
const std::vector< std::shared_ptr< FaceExtractorNet >> & faceExtractorNets = {},
const std::vector< std::shared_ptr< HandExtractorNet >> & handExtractorNets = {},
const std::vector< std::shared_ptr< Renderer >> & renderers = {},
const DisplayMode displayMode = DisplayMode::Display2D 
)
+
+ +
+
+ +

◆ ~Gui()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::Gui::~Gui ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::Gui::initializationOnThread ()
+
+virtual
+
+ +

Reimplemented in op::Gui3D.

+ +
+
+ +

◆ setImage() [1/2]

+ +
+
+ + + + + + + + +
void op::Gui::setImage (const MatrixcvMatOutput)
+
+ +
+
+ +

◆ setImage() [2/2]

+ +
+
+ + + + + + + + +
void op::Gui::setImage (const std::vector< Matrix > & cvMatOutputs)
+
+ +
+
+ +

◆ update()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::Gui::update ()
+
+virtual
+
+ +

Reimplemented in op::Gui3D.

+ +
+
+

Member Data Documentation

+ +

◆ mDisplayMode

+ +
+
+ + + + + +
+ + + + +
DisplayMode op::Gui::mDisplayMode
+
+protected
+
+ +

Definition at line 38 of file gui.hpp.

+ +
+
+ +

◆ mDisplayModeOriginal

+ +
+
+ + + + + +
+ + + + +
DisplayMode op::Gui::mDisplayModeOriginal
+
+protected
+
+ +

Definition at line 39 of file gui.hpp.

+ +
+
+ +

◆ spIsRunning

+ +
+
+ + + + + +
+ + + + +
std::shared_ptr<std::atomic<bool> > op::Gui::spIsRunning
+
+protected
+
+ +

Definition at line 37 of file gui.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_gui.js b/web/html/doc/classop_1_1_gui.js new file mode 100644 index 000000000..35b523a37 --- /dev/null +++ b/web/html/doc/classop_1_1_gui.js @@ -0,0 +1,12 @@ +var classop_1_1_gui = +[ + [ "Gui", "classop_1_1_gui.html#a1084d79f61d08f0551832de1ca337c70", null ], + [ "~Gui", "classop_1_1_gui.html#a5e7e30073c0f7ee18904b25fc638b4e2", null ], + [ "initializationOnThread", "classop_1_1_gui.html#a07cf9b4e7757979666d097278df02c20", null ], + [ "setImage", "classop_1_1_gui.html#a8fc6182d0124dd24e26e0fc139074061", null ], + [ "setImage", "classop_1_1_gui.html#abeff19fe8eceeacfb9115a059cdde4ad", null ], + [ "update", "classop_1_1_gui.html#a8e9a67dd507598654a5db06273d50c94", null ], + [ "mDisplayMode", "classop_1_1_gui.html#a5b95cbfa7cd4018977f4eb1fc095823b", null ], + [ "mDisplayModeOriginal", "classop_1_1_gui.html#a94cfbf759e88467bfcab18fcd2c987f2", null ], + [ "spIsRunning", "classop_1_1_gui.html#a0ad7be7018e634769da8d22d60e7edc0", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_gui.png b/web/html/doc/classop_1_1_gui.png new file mode 100644 index 000000000..089c185bc Binary files /dev/null and b/web/html/doc/classop_1_1_gui.png differ diff --git a/web/html/doc/classop_1_1_gui3_d-members.html b/web/html/doc/classop_1_1_gui3_d-members.html new file mode 100644 index 000000000..ce6326615 --- /dev/null +++ b/web/html/doc/classop_1_1_gui3_d-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::Gui3D Member List
+
+
+ +

This is the complete list of members for op::Gui3D, including all inherited members.

+ + + + + + + + + + + + + + +
Gui(const Point< int > &outputSize, const bool fullScreen, const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr, const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr=nullptr, const std::vector< std::shared_ptr< PoseExtractorNet >> &poseExtractorNets={}, const std::vector< std::shared_ptr< FaceExtractorNet >> &faceExtractorNets={}, const std::vector< std::shared_ptr< HandExtractorNet >> &handExtractorNets={}, const std::vector< std::shared_ptr< Renderer >> &renderers={}, const DisplayMode displayMode=DisplayMode::Display2D)op::Gui
Gui3D(const Point< int > &outputSize, const bool fullScreen, const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr, const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr=nullptr, const std::vector< std::shared_ptr< PoseExtractorNet >> &poseExtractorNets={}, const std::vector< std::shared_ptr< FaceExtractorNet >> &faceExtractorNets={}, const std::vector< std::shared_ptr< HandExtractorNet >> &handExtractorNets={}, const std::vector< std::shared_ptr< Renderer >> &renderers={}, const PoseModel poseModel=PoseModel::BODY_25, const DisplayMode displayMode=DisplayMode::DisplayAll, const bool copyGlToCvMat=false)op::Gui3D
initializationOnThread()op::Gui3Dvirtual
mDisplayModeop::Guiprotected
mDisplayModeOriginalop::Guiprotected
readCvMat()op::Gui3Dvirtual
setImage(const Matrix &cvMatOutput)op::Gui
setImage(const std::vector< Matrix > &cvMatOutputs)op::Gui
setKeypoints(const Array< float > &poseKeypoints3D, const Array< float > &faceKeypoints3D, const Array< float > &leftHandKeypoints3D, const Array< float > &rightHandKeypoints3D)op::Gui3D
spIsRunningop::Guiprotected
update()op::Gui3Dvirtual
~Gui()op::Guivirtual
~Gui3D()op::Gui3Dvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_gui3_d.html b/web/html/doc/classop_1_1_gui3_d.html new file mode 100644 index 000000000..436a27177 --- /dev/null +++ b/web/html/doc/classop_1_1_gui3_d.html @@ -0,0 +1,384 @@ + + + + + + + +OpenPose: op::Gui3D Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::Gui3D Class Reference
+
+
+ +

#include <gui3D.hpp>

+
+Inheritance diagram for op::Gui3D:
+
+
+ + +op::Gui + +
+ + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 Gui3D (const Point< int > &outputSize, const bool fullScreen, const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr, const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr=nullptr, const std::vector< std::shared_ptr< PoseExtractorNet >> &poseExtractorNets={}, const std::vector< std::shared_ptr< FaceExtractorNet >> &faceExtractorNets={}, const std::vector< std::shared_ptr< HandExtractorNet >> &handExtractorNets={}, const std::vector< std::shared_ptr< Renderer >> &renderers={}, const PoseModel poseModel=PoseModel::BODY_25, const DisplayMode displayMode=DisplayMode::DisplayAll, const bool copyGlToCvMat=false)
 
virtual ~Gui3D ()
 
virtual void initializationOnThread ()
 
void setKeypoints (const Array< float > &poseKeypoints3D, const Array< float > &faceKeypoints3D, const Array< float > &leftHandKeypoints3D, const Array< float > &rightHandKeypoints3D)
 
virtual void update ()
 
virtual Matrix readCvMat ()
 
- Public Member Functions inherited from op::Gui
 Gui (const Point< int > &outputSize, const bool fullScreen, const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr, const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr=nullptr, const std::vector< std::shared_ptr< PoseExtractorNet >> &poseExtractorNets={}, const std::vector< std::shared_ptr< FaceExtractorNet >> &faceExtractorNets={}, const std::vector< std::shared_ptr< HandExtractorNet >> &handExtractorNets={}, const std::vector< std::shared_ptr< Renderer >> &renderers={}, const DisplayMode displayMode=DisplayMode::Display2D)
 
virtual ~Gui ()
 
void setImage (const Matrix &cvMatOutput)
 
void setImage (const std::vector< Matrix > &cvMatOutputs)
 
+ + + + + + + + +

+Additional Inherited Members

- Protected Attributes inherited from op::Gui
std::shared_ptr< std::atomic< bool > > spIsRunning
 
DisplayMode mDisplayMode
 
DisplayMode mDisplayModeOriginal
 
+

Detailed Description

+
+

Definition at line 12 of file gui3D.hpp.

+

Constructor & Destructor Documentation

+ +

◆ Gui3D()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::Gui3D::Gui3D (const Point< int > & outputSize,
const bool fullScreen,
const std::shared_ptr< std::atomic< bool >> & isRunningSharedPtr,
const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> & videoSeekSharedPtr = nullptr,
const std::vector< std::shared_ptr< PoseExtractorNet >> & poseExtractorNets = {},
const std::vector< std::shared_ptr< FaceExtractorNet >> & faceExtractorNets = {},
const std::vector< std::shared_ptr< HandExtractorNet >> & handExtractorNets = {},
const std::vector< std::shared_ptr< Renderer >> & renderers = {},
const PoseModel poseModel = PoseModel::BODY_25,
const DisplayMode displayMode = DisplayMode::DisplayAll,
const bool copyGlToCvMat = false 
)
+
+ +
+
+ +

◆ ~Gui3D()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::Gui3D::~Gui3D ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::Gui3D::initializationOnThread ()
+
+virtual
+
+ +

Reimplemented from op::Gui.

+ +
+
+ +

◆ readCvMat()

+ +
+
+ + + + + +
+ + + + + + + +
virtual Matrix op::Gui3D::readCvMat ()
+
+virtual
+
+ +
+
+ +

◆ setKeypoints()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::Gui3D::setKeypoints (const Array< float > & poseKeypoints3D,
const Array< float > & faceKeypoints3D,
const Array< float > & leftHandKeypoints3D,
const Array< float > & rightHandKeypoints3D 
)
+
+ +
+
+ +

◆ update()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::Gui3D::update ()
+
+virtual
+
+ +

Reimplemented from op::Gui.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_gui3_d.js b/web/html/doc/classop_1_1_gui3_d.js new file mode 100644 index 000000000..381ceb92b --- /dev/null +++ b/web/html/doc/classop_1_1_gui3_d.js @@ -0,0 +1,9 @@ +var classop_1_1_gui3_d = +[ + [ "Gui3D", "classop_1_1_gui3_d.html#a23ead7d9d09b3f0b3ba81b284d49b4a4", null ], + [ "~Gui3D", "classop_1_1_gui3_d.html#a2fff0519028b406fe9ffc984ecd1dfa9", null ], + [ "initializationOnThread", "classop_1_1_gui3_d.html#a4247c56f90a535944b8aa14def754eaa", null ], + [ "readCvMat", "classop_1_1_gui3_d.html#a04abf8036928d58daf9417c5b5a41693", null ], + [ "setKeypoints", "classop_1_1_gui3_d.html#abd245c07a53d1d25c237aff22a2b6e6f", null ], + [ "update", "classop_1_1_gui3_d.html#af10162684889706894f13a308970ba32", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_gui3_d.png b/web/html/doc/classop_1_1_gui3_d.png new file mode 100644 index 000000000..d12cb7846 Binary files /dev/null and b/web/html/doc/classop_1_1_gui3_d.png differ diff --git a/web/html/doc/classop_1_1_gui_info_adder-members.html b/web/html/doc/classop_1_1_gui_info_adder-members.html new file mode 100644 index 000000000..0d5295f87 --- /dev/null +++ b/web/html/doc/classop_1_1_gui_info_adder-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::GuiInfoAdder Member List
+
+
+ +

This is the complete list of members for op::GuiInfoAdder, including all inherited members.

+ + + + +
addInfo(Matrix &outputData, const int numberPeople, const unsigned long long id, const std::string &elementRenderedName, const unsigned long long frameNumber, const Array< long long > &poseIds=Array< long long >{}, const Array< float > &poseKeypoints=Array< float >{})op::GuiInfoAdder
GuiInfoAdder(const int numberGpus, const bool guiEnabled=false)op::GuiInfoAdder
~GuiInfoAdder()op::GuiInfoAddervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_gui_info_adder.html b/web/html/doc/classop_1_1_gui_info_adder.html new file mode 100644 index 000000000..b0ce214d9 --- /dev/null +++ b/web/html/doc/classop_1_1_gui_info_adder.html @@ -0,0 +1,235 @@ + + + + + + + +OpenPose: op::GuiInfoAdder Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::GuiInfoAdder Class Reference
+
+
+ +

#include <guiInfoAdder.hpp>

+ + + + + + + + +

+Public Member Functions

 GuiInfoAdder (const int numberGpus, const bool guiEnabled=false)
 
virtual ~GuiInfoAdder ()
 
void addInfo (Matrix &outputData, const int numberPeople, const unsigned long long id, const std::string &elementRenderedName, const unsigned long long frameNumber, const Array< long long > &poseIds=Array< long long >{}, const Array< float > &poseKeypoints=Array< float >{})
 
+

Detailed Description

+
+

Definition at line 9 of file guiInfoAdder.hpp.

+

Constructor & Destructor Documentation

+ +

◆ GuiInfoAdder()

+ +
+
+ + + + + + + + + + + + + + + + + + +
op::GuiInfoAdder::GuiInfoAdder (const int numberGpus,
const bool guiEnabled = false 
)
+
+ +
+
+ +

◆ ~GuiInfoAdder()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::GuiInfoAdder::~GuiInfoAdder ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ addInfo()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::GuiInfoAdder::addInfo (MatrixoutputData,
const int numberPeople,
const unsigned long long id,
const std::string & elementRenderedName,
const unsigned long long frameNumber,
const Array< long long > & poseIds = Array< long long >{},
const Array< float > & poseKeypoints = Array< float >{} 
)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_gui_info_adder.js b/web/html/doc/classop_1_1_gui_info_adder.js new file mode 100644 index 000000000..88d88c869 --- /dev/null +++ b/web/html/doc/classop_1_1_gui_info_adder.js @@ -0,0 +1,6 @@ +var classop_1_1_gui_info_adder = +[ + [ "GuiInfoAdder", "classop_1_1_gui_info_adder.html#af23e17f9eeb51c7473cd0940292efa61", null ], + [ "~GuiInfoAdder", "classop_1_1_gui_info_adder.html#a942af111d6bc41991db4bca3e573b8e9", null ], + [ "addInfo", "classop_1_1_gui_info_adder.html#a6f2f2d449d48ca7e21729d03796a540c", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_hand_cpu_renderer-members.html b/web/html/doc/classop_1_1_hand_cpu_renderer-members.html new file mode 100644 index 000000000..ad1fa9a64 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_cpu_renderer-members.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::HandCpuRenderer Member List
+
+
+ +

This is the complete list of members for op::HandCpuRenderer, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + +
DELETE_COPY(HandCpuRenderer)op::HandCpuRenderer
getAlphaHeatMap() constop::Renderer
getAlphaKeypoint() constop::Renderer
getBlendOriginalFrame() constop::Renderer
getShowGooglyEyes() constop::Renderer
HandCpuRenderer(const float renderThreshold, const float alphaKeypoint=HAND_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=HAND_DEFAULT_ALPHA_HEAT_MAP)op::HandCpuRenderer
increaseElementToRender(const int increment)op::Renderer
initializationOnThread()op::HandRendererinlinevirtual
mBlendOriginalFrameop::Rendererprotected
mRenderThresholdop::Rendererprotected
mShowGooglyEyesop::Rendererprotected
Renderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)op::Rendererexplicit
renderHand(Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints, const float scaleInputToOutput)op::HandRenderer
renderHandInherited(Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints)op::HandCpuRenderervirtual
setAlphaHeatMap(const float alphaHeatMap)op::Renderer
setAlphaKeypoint(const float alphaKeypoint)op::Renderer
setBlendOriginalFrame(const bool blendOriginalFrame)op::Renderer
setElementToRender(const int elementToRender)op::Renderer
setElementToRender(const ElementToRender elementToRender)op::Renderer
setShowGooglyEyes(const bool showGooglyEyes)op::Renderer
spElementToRenderop::Rendererprotected
spNumberElementsToRenderop::Rendererprotected
~HandCpuRenderer()op::HandCpuRenderervirtual
~HandRenderer()op::HandRendererinlinevirtual
~Renderer()op::Renderervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_cpu_renderer.html b/web/html/doc/classop_1_1_hand_cpu_renderer.html new file mode 100644 index 000000000..48cef5899 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_cpu_renderer.html @@ -0,0 +1,299 @@ + + + + + + + +OpenPose: op::HandCpuRenderer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::HandCpuRenderer Class Reference
+
+
+ +

#include <handCpuRenderer.hpp>

+
+Inheritance diagram for op::HandCpuRenderer:
+
+
+ + +op::Renderer +op::HandRenderer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 HandCpuRenderer (const float renderThreshold, const float alphaKeypoint=HAND_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=HAND_DEFAULT_ALPHA_HEAT_MAP)
 
virtual ~HandCpuRenderer ()
 
void renderHandInherited (Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints)
 
 DELETE_COPY (HandCpuRenderer)
 
- Public Member Functions inherited from op::Renderer
 Renderer (const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
 
virtual ~Renderer ()
 
void increaseElementToRender (const int increment)
 
void setElementToRender (const int elementToRender)
 
void setElementToRender (const ElementToRender elementToRender)
 
bool getBlendOriginalFrame () const
 
void setBlendOriginalFrame (const bool blendOriginalFrame)
 
float getAlphaKeypoint () const
 
void setAlphaKeypoint (const float alphaKeypoint)
 
float getAlphaHeatMap () const
 
void setAlphaHeatMap (const float alphaHeatMap)
 
bool getShowGooglyEyes () const
 
void setShowGooglyEyes (const bool showGooglyEyes)
 
- Public Member Functions inherited from op::HandRenderer
virtual ~HandRenderer ()
 
virtual void initializationOnThread ()
 
void renderHand (Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints, const float scaleInputToOutput)
 
+ + + + + + + + + + + + +

+Additional Inherited Members

- Protected Attributes inherited from op::Renderer
const float mRenderThreshold
 
std::atomic< bool > mBlendOriginalFrame
 
std::shared_ptr< std::atomic< unsigned int > > spElementToRender
 
std::shared_ptr< const unsigned int > spNumberElementsToRender
 
std::atomic< bool > mShowGooglyEyes
 
+

Detailed Description

+
+

Definition at line 11 of file handCpuRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ HandCpuRenderer()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
op::HandCpuRenderer::HandCpuRenderer (const float renderThreshold,
const float alphaKeypoint = HAND_DEFAULT_ALPHA_KEYPOINT,
const float alphaHeatMap = HAND_DEFAULT_ALPHA_HEAT_MAP 
)
+
+ +
+
+ +

◆ ~HandCpuRenderer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::HandCpuRenderer::~HandCpuRenderer ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ DELETE_COPY()

+ +
+
+ + + + + + + + +
op::HandCpuRenderer::DELETE_COPY (HandCpuRenderer )
+
+ +
+
+ +

◆ renderHandInherited()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::HandCpuRenderer::renderHandInherited (Array< float > & outputData,
const std::array< Array< float >, 2 > & handKeypoints 
)
+
+virtual
+
+ +

Implements op::HandRenderer.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_cpu_renderer.js b/web/html/doc/classop_1_1_hand_cpu_renderer.js new file mode 100644 index 000000000..c1e60db5a --- /dev/null +++ b/web/html/doc/classop_1_1_hand_cpu_renderer.js @@ -0,0 +1,7 @@ +var classop_1_1_hand_cpu_renderer = +[ + [ "HandCpuRenderer", "classop_1_1_hand_cpu_renderer.html#a3145d482c0378288e7ba3e42091a56c2", null ], + [ "~HandCpuRenderer", "classop_1_1_hand_cpu_renderer.html#a8269f1879939d1b403787f982f10258d", null ], + [ "DELETE_COPY", "classop_1_1_hand_cpu_renderer.html#a66a7d318b240c73687320bf092363409", null ], + [ "renderHandInherited", "classop_1_1_hand_cpu_renderer.html#ae9e43ff22b0aae81dd88df3a313b0b0f", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_hand_cpu_renderer.png b/web/html/doc/classop_1_1_hand_cpu_renderer.png new file mode 100644 index 000000000..5da0b89fc Binary files /dev/null and b/web/html/doc/classop_1_1_hand_cpu_renderer.png differ diff --git a/web/html/doc/classop_1_1_hand_detector-members.html b/web/html/doc/classop_1_1_hand_detector-members.html new file mode 100644 index 000000000..8f8873634 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_detector-members.html @@ -0,0 +1,108 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::HandDetector Member List
+
+
+ +

This is the complete list of members for op::HandDetector, including all inherited members.

+ + + + + + +
detectHands(const Array< float > &poseKeypoints) constop::HandDetector
HandDetector(const PoseModel poseModel)op::HandDetectorexplicit
trackHands(const Array< float > &poseKeypoints)op::HandDetector
updateTracker(const std::array< Array< float >, 2 > &handKeypoints, const unsigned long long id)op::HandDetector
~HandDetector()op::HandDetectorvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_detector.html b/web/html/doc/classop_1_1_hand_detector.html new file mode 100644 index 000000000..bbd42f3f4 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_detector.html @@ -0,0 +1,243 @@ + + + + + + + +OpenPose: op::HandDetector Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::HandDetector Class Reference
+
+
+ +

#include <handDetector.hpp>

+ + + + + + + + + + + + +

+Public Member Functions

 HandDetector (const PoseModel poseModel)
 
virtual ~HandDetector ()
 
std::vector< std::array< Rectangle< float >, 2 > > detectHands (const Array< float > &poseKeypoints) const
 
std::vector< std::array< Rectangle< float >, 2 > > trackHands (const Array< float > &poseKeypoints)
 
void updateTracker (const std::array< Array< float >, 2 > &handKeypoints, const unsigned long long id)
 
+

Detailed Description

+
+

Definition at line 12 of file handDetector.hpp.

+

Constructor & Destructor Documentation

+ +

◆ HandDetector()

+ +
+
+ + + + + +
+ + + + + + + + +
op::HandDetector::HandDetector (const PoseModel poseModel)
+
+explicit
+
+ +
+
+ +

◆ ~HandDetector()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::HandDetector::~HandDetector ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ detectHands()

+ +
+
+ + + + + + + + +
std::vector<std::array<Rectangle<float>, 2> > op::HandDetector::detectHands (const Array< float > & poseKeypoints) const
+
+ +
+
+ +

◆ trackHands()

+ +
+
+ + + + + + + + +
std::vector<std::array<Rectangle<float>, 2> > op::HandDetector::trackHands (const Array< float > & poseKeypoints)
+
+ +
+
+ +

◆ updateTracker()

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::HandDetector::updateTracker (const std::array< Array< float >, 2 > & handKeypoints,
const unsigned long long id 
)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_detector.js b/web/html/doc/classop_1_1_hand_detector.js new file mode 100644 index 000000000..83cf48c86 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_detector.js @@ -0,0 +1,8 @@ +var classop_1_1_hand_detector = +[ + [ "HandDetector", "classop_1_1_hand_detector.html#a20b127dd7b51afcd336d1f16b40ee0b1", null ], + [ "~HandDetector", "classop_1_1_hand_detector.html#ae70826e6de6a8f26c240d0152578375e", null ], + [ "detectHands", "classop_1_1_hand_detector.html#a731a19ff54389b1f56b0aae76af6debe", null ], + [ "trackHands", "classop_1_1_hand_detector.html#a963972f9ecb769786b5f60018da443e4", null ], + [ "updateTracker", "classop_1_1_hand_detector.html#a58513169f01ab7c705979f1f2a88b571", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_hand_detector_from_txt-members.html b/web/html/doc/classop_1_1_hand_detector_from_txt-members.html new file mode 100644 index 000000000..cb86e0d5e --- /dev/null +++ b/web/html/doc/classop_1_1_hand_detector_from_txt-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::HandDetectorFromTxt Member List
+
+
+ +

This is the complete list of members for op::HandDetectorFromTxt, including all inherited members.

+ + + + +
detectHands()op::HandDetectorFromTxt
HandDetectorFromTxt(const std::string &txtDirectoryPath)op::HandDetectorFromTxtexplicit
~HandDetectorFromTxt()op::HandDetectorFromTxtvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_detector_from_txt.html b/web/html/doc/classop_1_1_hand_detector_from_txt.html new file mode 100644 index 000000000..5fcd44dd3 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_detector_from_txt.html @@ -0,0 +1,192 @@ + + + + + + + +OpenPose: op::HandDetectorFromTxt Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::HandDetectorFromTxt Class Reference
+
+
+ +

#include <handDetectorFromTxt.hpp>

+ + + + + + + + +

+Public Member Functions

 HandDetectorFromTxt (const std::string &txtDirectoryPath)
 
virtual ~HandDetectorFromTxt ()
 
std::vector< std::array< Rectangle< float >, 2 > > detectHands ()
 
+

Detailed Description

+
+

Definition at line 9 of file handDetectorFromTxt.hpp.

+

Constructor & Destructor Documentation

+ +

◆ HandDetectorFromTxt()

+ +
+
+ + + + + +
+ + + + + + + + +
op::HandDetectorFromTxt::HandDetectorFromTxt (const std::string & txtDirectoryPath)
+
+explicit
+
+ +
+
+ +

◆ ~HandDetectorFromTxt()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::HandDetectorFromTxt::~HandDetectorFromTxt ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ detectHands()

+ +
+
+ + + + + + + +
std::vector<std::array<Rectangle<float>, 2> > op::HandDetectorFromTxt::detectHands ()
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_detector_from_txt.js b/web/html/doc/classop_1_1_hand_detector_from_txt.js new file mode 100644 index 000000000..acb74d151 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_detector_from_txt.js @@ -0,0 +1,6 @@ +var classop_1_1_hand_detector_from_txt = +[ + [ "HandDetectorFromTxt", "classop_1_1_hand_detector_from_txt.html#a94ef5e925c5d25b181c56ae79bb1eed2", null ], + [ "~HandDetectorFromTxt", "classop_1_1_hand_detector_from_txt.html#a8fb6eb6ef5d5689cfdb502b5bc43685f", null ], + [ "detectHands", "classop_1_1_hand_detector_from_txt.html#a1e6ba23fa1486e92a3bdca36b2e86d22", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_hand_extractor_caffe-members.html b/web/html/doc/classop_1_1_hand_extractor_caffe-members.html new file mode 100644 index 000000000..cd6fd1f5f --- /dev/null +++ b/web/html/doc/classop_1_1_hand_extractor_caffe-members.html @@ -0,0 +1,122 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::HandExtractorCaffe Member List
+
+
+ +

This is the complete list of members for op::HandExtractorCaffe, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + +
forwardPass(const std::vector< std::array< Rectangle< float >, 2 >> handRectangles, const Matrix &inputData)op::HandExtractorCaffevirtual
getEnabled() constop::HandExtractorNet
getHandKeypoints() constop::HandExtractorNet
getHeatMaps() constop::HandExtractorNet
HandExtractorCaffe(const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::string &modelFolder, const int gpuId, const int numberScales=1, const float rangeScales=0.4f, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool enableGoogleLogging=true)op::HandExtractorCaffe
HandExtractorNet(const Point< int > &netInputSize, const Point< int > &netOutputSize, const int numberScales=1, const float rangeScales=0.4f, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect)op::HandExtractorNetexplicit
initializationOnThread()op::HandExtractorNet
mEnabledop::HandExtractorNetprotected
mHandImageCropop::HandExtractorNetprotected
mHandKeypointsop::HandExtractorNetprotected
mHeatMapsop::HandExtractorNetprotected
mHeatMapScaleModeop::HandExtractorNetprotected
mHeatMapTypesop::HandExtractorNetprotected
mMultiScaleNumberAndRangeop::HandExtractorNetprotected
mNetOutputSizeop::HandExtractorNetprotected
netInitializationOnThread()op::HandExtractorCaffevirtual
setEnabled(const bool enabled)op::HandExtractorNet
~HandExtractorCaffe()op::HandExtractorCaffevirtual
~HandExtractorNet()op::HandExtractorNetvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_extractor_caffe.html b/web/html/doc/classop_1_1_hand_extractor_caffe.html new file mode 100644 index 000000000..267294f3f --- /dev/null +++ b/web/html/doc/classop_1_1_hand_extractor_caffe.html @@ -0,0 +1,351 @@ + + + + + + + +OpenPose: op::HandExtractorCaffe Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::HandExtractorCaffe Class Reference
+
+
+ +

#include <handExtractorCaffe.hpp>

+
+Inheritance diagram for op::HandExtractorCaffe:
+
+
+ + +op::HandExtractorNet + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 HandExtractorCaffe (const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::string &modelFolder, const int gpuId, const int numberScales=1, const float rangeScales=0.4f, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool enableGoogleLogging=true)
 
virtual ~HandExtractorCaffe ()
 
void netInitializationOnThread ()
 
void forwardPass (const std::vector< std::array< Rectangle< float >, 2 >> handRectangles, const Matrix &inputData)
 
- Public Member Functions inherited from op::HandExtractorNet
 HandExtractorNet (const Point< int > &netInputSize, const Point< int > &netOutputSize, const int numberScales=1, const float rangeScales=0.4f, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect)
 
virtual ~HandExtractorNet ()
 
void initializationOnThread ()
 
std::array< Array< float >, 2 > getHeatMaps () const
 
std::array< Array< float >, 2 > getHandKeypoints () const
 
bool getEnabled () const
 
void setEnabled (const bool enabled)
 
+ + + + + + + + + + + + + + + + + + +

+Additional Inherited Members

- Protected Attributes inherited from op::HandExtractorNet
const std::pair< int, float > mMultiScaleNumberAndRange
 
const Point< int > mNetOutputSize
 
Array< float > mHandImageCrop
 
std::array< Array< float >, 2 > mHandKeypoints
 
const ScaleMode mHeatMapScaleMode
 
const std::vector< HeatMapTypemHeatMapTypes
 
std::array< Array< float >, 2 > mHeatMaps
 
std::atomic< bool > mEnabled
 
+

Detailed Description

+

Hand keypoint extractor class for Caffe framework.

+ +

Definition at line 13 of file handExtractorCaffe.hpp.

+

Constructor & Destructor Documentation

+ +

◆ HandExtractorCaffe()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::HandExtractorCaffe::HandExtractorCaffe (const Point< int > & netInputSize,
const Point< int > & netOutputSize,
const std::string & modelFolder,
const int gpuId,
const int numberScales = 1,
const float rangeScales = 0.4f,
const std::vector< HeatMapType > & heatMapTypes = {},
const ScaleMode heatMapScaleMode = ScaleMode::ZeroToOneFixedAspect,
const bool enableGoogleLogging = true 
)
+
+

Constructor of the HandExtractorCaffe class.

Parameters
+ + + + + + + +
netInputSizeSize at which the cropped image (where the hand is located) is resized.
netOutputSizeSize of the final results. At the moment, it must be equal than netOutputSize.
modelFolderFolder where the models are located.
gpuIdThe GPU index (0-based) which the deep net will use.
numberScalesNumber of scales to run. The more scales, the slower it will be but possibly also more accurate.
rangeScalesThe range between the smaller and bigger scale.
+
+
+ +
+
+ +

◆ ~HandExtractorCaffe()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::HandExtractorCaffe::~HandExtractorCaffe ()
+
+virtual
+
+

Virtual destructor of the HandExtractor class. Required to allow inheritance.

+ +
+
+

Member Function Documentation

+ +

◆ forwardPass()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::HandExtractorCaffe::forwardPass (const std::vector< std::array< Rectangle< float >, 2 >> handRectangles,
const MatrixinputData 
)
+
+virtual
+
+

This function extracts the hand keypoints for each detected hand in the image.

Parameters
+ + + +
handRectangleslocation of the hands in the image. It is a length-variable std::vector, where each index corresponds to a different person in the image. Internally the std::vector, a std::array of 2 elements: index 0 and 1 for left and right hand respectively. Inside each array element, a op::Rectangle<float> (similar to cv::Rect for floating values) with the position of that hand (or 0,0,0,0 if some hand is missing, e.g., if a specific person has only half of the body inside the image).
inputDataOriginal image in Mat format and BGR format.
+
+
+ +

Implements op::HandExtractorNet.

+ +
+
+ +

◆ netInitializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
void op::HandExtractorCaffe::netInitializationOnThread ()
+
+virtual
+
+

This function must be call before using any other function. It must also be called inside the thread in which the functions are going to be used.

+ +

Implements op::HandExtractorNet.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_extractor_caffe.js b/web/html/doc/classop_1_1_hand_extractor_caffe.js new file mode 100644 index 000000000..14bd48b69 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_extractor_caffe.js @@ -0,0 +1,7 @@ +var classop_1_1_hand_extractor_caffe = +[ + [ "HandExtractorCaffe", "classop_1_1_hand_extractor_caffe.html#a703c8b8d15de55bc2b6bbaee633a6384", null ], + [ "~HandExtractorCaffe", "classop_1_1_hand_extractor_caffe.html#aee681b43b8691ac1f07e08616522f6af", null ], + [ "forwardPass", "classop_1_1_hand_extractor_caffe.html#a2f8e53c8d4f4d509b4a1842f042fa548", null ], + [ "netInitializationOnThread", "classop_1_1_hand_extractor_caffe.html#ace3ee9d717887ee9dc0f00ce69bd0c82", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_hand_extractor_caffe.png b/web/html/doc/classop_1_1_hand_extractor_caffe.png new file mode 100644 index 000000000..451abb092 Binary files /dev/null and b/web/html/doc/classop_1_1_hand_extractor_caffe.png differ diff --git a/web/html/doc/classop_1_1_hand_extractor_net-members.html b/web/html/doc/classop_1_1_hand_extractor_net-members.html new file mode 100644 index 000000000..c93a13567 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_extractor_net-members.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::HandExtractorNet Member List
+
+
+ +

This is the complete list of members for op::HandExtractorNet, including all inherited members.

+ + + + + + + + + + + + + + + + + + +
forwardPass(const std::vector< std::array< Rectangle< float >, 2 >> handRectangles, const Matrix &cvInputData)=0op::HandExtractorNetpure virtual
getEnabled() constop::HandExtractorNet
getHandKeypoints() constop::HandExtractorNet
getHeatMaps() constop::HandExtractorNet
HandExtractorNet(const Point< int > &netInputSize, const Point< int > &netOutputSize, const int numberScales=1, const float rangeScales=0.4f, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect)op::HandExtractorNetexplicit
initializationOnThread()op::HandExtractorNet
mEnabledop::HandExtractorNetprotected
mHandImageCropop::HandExtractorNetprotected
mHandKeypointsop::HandExtractorNetprotected
mHeatMapsop::HandExtractorNetprotected
mHeatMapScaleModeop::HandExtractorNetprotected
mHeatMapTypesop::HandExtractorNetprotected
mMultiScaleNumberAndRangeop::HandExtractorNetprotected
mNetOutputSizeop::HandExtractorNetprotected
netInitializationOnThread()=0op::HandExtractorNetprotectedpure virtual
setEnabled(const bool enabled)op::HandExtractorNet
~HandExtractorNet()op::HandExtractorNetvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_extractor_net.html b/web/html/doc/classop_1_1_hand_extractor_net.html new file mode 100644 index 000000000..dd051630e --- /dev/null +++ b/web/html/doc/classop_1_1_hand_extractor_net.html @@ -0,0 +1,618 @@ + + + + + + + +OpenPose: op::HandExtractorNet Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::HandExtractorNet Class Referenceabstract
+
+
+ +

#include <handExtractorNet.hpp>

+
+Inheritance diagram for op::HandExtractorNet:
+
+
+ + +op::HandExtractorCaffe + +
+ + + + + + + + + + + + + + + + + + +

+Public Member Functions

 HandExtractorNet (const Point< int > &netInputSize, const Point< int > &netOutputSize, const int numberScales=1, const float rangeScales=0.4f, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect)
 
virtual ~HandExtractorNet ()
 
void initializationOnThread ()
 
virtual void forwardPass (const std::vector< std::array< Rectangle< float >, 2 >> handRectangles, const Matrix &cvInputData)=0
 
std::array< Array< float >, 2 > getHeatMaps () const
 
std::array< Array< float >, 2 > getHandKeypoints () const
 
bool getEnabled () const
 
void setEnabled (const bool enabled)
 
+ + + +

+Protected Member Functions

virtual void netInitializationOnThread ()=0
 
+ + + + + + + + + + + + + + + + + +

+Protected Attributes

const std::pair< int, float > mMultiScaleNumberAndRange
 
const Point< int > mNetOutputSize
 
Array< float > mHandImageCrop
 
std::array< Array< float >, 2 > mHandKeypoints
 
const ScaleMode mHeatMapScaleMode
 
const std::vector< HeatMapTypemHeatMapTypes
 
std::array< Array< float >, 2 > mHeatMaps
 
std::atomic< bool > mEnabled
 
+

Detailed Description

+

Hand keypoint extractor class.

+ +

Definition at line 13 of file handExtractorNet.hpp.

+

Constructor & Destructor Documentation

+ +

◆ HandExtractorNet()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::HandExtractorNet::HandExtractorNet (const Point< int > & netInputSize,
const Point< int > & netOutputSize,
const int numberScales = 1,
const float rangeScales = 0.4f,
const std::vector< HeatMapType > & heatMapTypes = {},
const ScaleMode heatMapScaleMode = ScaleMode::ZeroToOneFixedAspect 
)
+
+explicit
+
+

Constructor of the HandExtractorNet class.

Parameters
+ + + + + +
netInputSizeSize at which the cropped image (where the hand is located) is resized.
netOutputSizeSize of the final results. At the moment, it must be equal than netOutputSize.
numberScalesNumber of scales to run. The more scales, the slower it will be but possibly also more accurate.
rangeScalesThe range between the smaller and bigger scale.
+
+
+ +
+
+ +

◆ ~HandExtractorNet()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::HandExtractorNet::~HandExtractorNet ()
+
+virtual
+
+

Virtual destructor of the HandExtractorNet class. Required to allow inheritance.

+ +
+
+

Member Function Documentation

+ +

◆ forwardPass()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::HandExtractorNet::forwardPass (const std::vector< std::array< Rectangle< float >, 2 >> handRectangles,
const MatrixcvInputData 
)
+
+pure virtual
+
+

This function extracts the hand keypoints for each detected hand in the image.

Parameters
+ + + +
handRectangleslocation of the hands in the image. It is a length-variable std::vector, where each index corresponds to a different person in the image. Internally the std::vector, a std::array of 2 elements: index 0 and 1 for left and right hand respectively. Inside each array element, a op::Rectangle<float> (similar to cv::Rect for floating values) with the position of that hand (or 0,0,0,0 if some hand is missing, e.g., if a specific person has only half of the body inside the image).
cvInputDataOriginal image in Mat format and BGR format.
+
+
+ +

Implemented in op::HandExtractorCaffe.

+ +
+
+ +

◆ getEnabled()

+ +
+
+ + + + + + + +
bool op::HandExtractorNet::getEnabled () const
+
+ +
+
+ +

◆ getHandKeypoints()

+ +
+
+ + + + + + + +
std::array<Array<float>, 2> op::HandExtractorNet::getHandKeypoints () const
+
+

This function returns the hand keypoins. VERY IMPORTANT: use getHandKeypoints().clone() if the keypoints are going to be edited in a different thread.

Returns
A std::array with all the left hand keypoints (index 0) and all the right ones (index 1). Each Array<float> follows the pose structure, i.e., the first dimension corresponds to all the people in the image, the second to each specific keypoint, and the third one to (x, y, score).
+ +
+
+ +

◆ getHeatMaps()

+ +
+
+ + + + + + + +
std::array<Array<float>, 2> op::HandExtractorNet::getHeatMaps () const
+
+ +
+
+ +

◆ initializationOnThread()

+ +
+
+ + + + + + + +
void op::HandExtractorNet::initializationOnThread ()
+
+

This function must be call before using any other function. It must also be called inside the thread in which the functions are going to be used.

+ +
+
+ +

◆ netInitializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::HandExtractorNet::netInitializationOnThread ()
+
+protectedpure virtual
+
+ +

Implemented in op::HandExtractorCaffe.

+ +
+
+ +

◆ setEnabled()

+ +
+
+ + + + + + + + +
void op::HandExtractorNet::setEnabled (const bool enabled)
+
+ +
+
+

Member Data Documentation

+ +

◆ mEnabled

+ +
+
+ + + + + +
+ + + + +
std::atomic<bool> op::HandExtractorNet::mEnabled
+
+protected
+
+ +

Definition at line 78 of file handExtractorNet.hpp.

+ +
+
+ +

◆ mHandImageCrop

+ +
+
+ + + + + +
+ + + + +
Array<float> op::HandExtractorNet::mHandImageCrop
+
+protected
+
+ +

Definition at line 71 of file handExtractorNet.hpp.

+ +
+
+ +

◆ mHandKeypoints

+ +
+
+ + + + + +
+ + + + +
std::array<Array<float>, 2> op::HandExtractorNet::mHandKeypoints
+
+protected
+
+ +

Definition at line 72 of file handExtractorNet.hpp.

+ +
+
+ +

◆ mHeatMaps

+ +
+
+ + + + + +
+ + + + +
std::array<Array<float>, 2> op::HandExtractorNet::mHeatMaps
+
+protected
+
+ +

Definition at line 76 of file handExtractorNet.hpp.

+ +
+
+ +

◆ mHeatMapScaleMode

+ +
+
+ + + + + +
+ + + + +
const ScaleMode op::HandExtractorNet::mHeatMapScaleMode
+
+protected
+
+ +

Definition at line 74 of file handExtractorNet.hpp.

+ +
+
+ +

◆ mHeatMapTypes

+ +
+
+ + + + + +
+ + + + +
const std::vector<HeatMapType> op::HandExtractorNet::mHeatMapTypes
+
+protected
+
+ +

Definition at line 75 of file handExtractorNet.hpp.

+ +
+
+ +

◆ mMultiScaleNumberAndRange

+ +
+
+ + + + + +
+ + + + +
const std::pair<int, float> op::HandExtractorNet::mMultiScaleNumberAndRange
+
+protected
+
+ +

Definition at line 69 of file handExtractorNet.hpp.

+ +
+
+ +

◆ mNetOutputSize

+ +
+
+ + + + + +
+ + + + +
const Point<int> op::HandExtractorNet::mNetOutputSize
+
+protected
+
+ +

Definition at line 70 of file handExtractorNet.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_extractor_net.js b/web/html/doc/classop_1_1_hand_extractor_net.js new file mode 100644 index 000000000..7e1cb88a6 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_extractor_net.js @@ -0,0 +1,20 @@ +var classop_1_1_hand_extractor_net = +[ + [ "HandExtractorNet", "classop_1_1_hand_extractor_net.html#a8bcb44ea2618dea01c00255021425637", null ], + [ "~HandExtractorNet", "classop_1_1_hand_extractor_net.html#a3743bf97fd19ee7d52ffd1019baa0c46", null ], + [ "forwardPass", "classop_1_1_hand_extractor_net.html#a0aa50449396fd075bec29e0393a1ff9e", null ], + [ "getEnabled", "classop_1_1_hand_extractor_net.html#af064ccee582800f39ed3eac5d69a4134", null ], + [ "getHandKeypoints", "classop_1_1_hand_extractor_net.html#ae9617434c4dc7e390c18d596b868297d", null ], + [ "getHeatMaps", "classop_1_1_hand_extractor_net.html#a88a35f29d3c53c259756bc07b2bfb093", null ], + [ "initializationOnThread", "classop_1_1_hand_extractor_net.html#a37d86377da41c576c4d54027a9762733", null ], + [ "netInitializationOnThread", "classop_1_1_hand_extractor_net.html#aad7c29237d50e77e606bb32c20c60d24", null ], + [ "setEnabled", "classop_1_1_hand_extractor_net.html#ab59a77d051991734b0c74b122671f097", null ], + [ "mEnabled", "classop_1_1_hand_extractor_net.html#a2ee9d38650ed3138fa74fae2cad4bd77", null ], + [ "mHandImageCrop", "classop_1_1_hand_extractor_net.html#a0981f4dfd15ce4a13de9d166cad9e1d4", null ], + [ "mHandKeypoints", "classop_1_1_hand_extractor_net.html#a7f97a5b842d20d3d37d3469418faac7b", null ], + [ "mHeatMaps", "classop_1_1_hand_extractor_net.html#a5c4174ed2c09ff7c15edfc5d971f4aef", null ], + [ "mHeatMapScaleMode", "classop_1_1_hand_extractor_net.html#af03c8872258c644086bda26a3aaf95b5", null ], + [ "mHeatMapTypes", "classop_1_1_hand_extractor_net.html#aaf0386c8c15a37cf79e9f3f4b1ced2e8", null ], + [ "mMultiScaleNumberAndRange", "classop_1_1_hand_extractor_net.html#a270f22a05dbae6d156d79f0386cfbf4b", null ], + [ "mNetOutputSize", "classop_1_1_hand_extractor_net.html#ac5e36cd33696a684a4447acccec28fdd", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_hand_extractor_net.png b/web/html/doc/classop_1_1_hand_extractor_net.png new file mode 100644 index 000000000..ff315baf1 Binary files /dev/null and b/web/html/doc/classop_1_1_hand_extractor_net.png differ diff --git a/web/html/doc/classop_1_1_hand_gpu_renderer-members.html b/web/html/doc/classop_1_1_hand_gpu_renderer-members.html new file mode 100644 index 000000000..c938f1d5e --- /dev/null +++ b/web/html/doc/classop_1_1_hand_gpu_renderer-members.html @@ -0,0 +1,135 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::HandGpuRenderer Member List
+
+
+ +

This is the complete list of members for op::HandGpuRenderer, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
cpuToGpuMemoryIfNotCopiedYet(const float *const cpuMemory, const unsigned long long memoryVolume)op::GpuRendererprotected
getAlphaHeatMap() constop::Renderer
getAlphaKeypoint() constop::Renderer
getBlendOriginalFrame() constop::Renderer
getSharedParameters()op::GpuRenderer
getShowGooglyEyes() constop::Renderer
GpuRenderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)op::GpuRendererexplicit
gpuToCpuMemoryIfLastRenderer(float *cpuMemory, const unsigned long long memoryVolume)op::GpuRendererprotected
HandGpuRenderer(const float renderThreshold, const float alphaKeypoint=HAND_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=HAND_DEFAULT_ALPHA_HEAT_MAP)op::HandGpuRenderer
increaseElementToRender(const int increment)op::Renderer
initializationOnThread()op::HandGpuRenderervirtual
mBlendOriginalFrameop::Rendererprotected
mRenderThresholdop::Rendererprotected
mShowGooglyEyesop::Rendererprotected
Renderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)op::Rendererexplicit
renderHand(Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints, const float scaleInputToOutput)op::HandRenderer
renderHandInherited(Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints)op::HandGpuRenderervirtual
setAlphaHeatMap(const float alphaHeatMap)op::Renderer
setAlphaKeypoint(const float alphaKeypoint)op::Renderer
setBlendOriginalFrame(const bool blendOriginalFrame)op::Renderer
setElementToRender(const int elementToRender)op::Renderer
setElementToRender(const ElementToRender elementToRender)op::Renderer
setSharedParameters(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)op::GpuRenderer
setSharedParametersAndIfLast(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int >> &tuple, const bool isLast)op::GpuRenderer
setShowGooglyEyes(const bool showGooglyEyes)op::Renderer
spElementToRenderop::Rendererprotected
spGpuMemoryop::GpuRendererprotected
spNumberElementsToRenderop::Rendererprotected
~GpuRenderer()op::GpuRenderervirtual
~HandGpuRenderer()op::HandGpuRenderervirtual
~HandRenderer()op::HandRendererinlinevirtual
~Renderer()op::Renderervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_gpu_renderer.html b/web/html/doc/classop_1_1_hand_gpu_renderer.html new file mode 100644 index 000000000..b418dbc6e --- /dev/null +++ b/web/html/doc/classop_1_1_hand_gpu_renderer.html @@ -0,0 +1,326 @@ + + + + + + + +OpenPose: op::HandGpuRenderer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::HandGpuRenderer Class Reference
+
+
+ +

#include <handGpuRenderer.hpp>

+
+Inheritance diagram for op::HandGpuRenderer:
+
+
+ + +op::GpuRenderer +op::HandRenderer +op::Renderer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 HandGpuRenderer (const float renderThreshold, const float alphaKeypoint=HAND_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=HAND_DEFAULT_ALPHA_HEAT_MAP)
 
virtual ~HandGpuRenderer ()
 
void initializationOnThread ()
 
void renderHandInherited (Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints)
 
- Public Member Functions inherited from op::GpuRenderer
 GpuRenderer (const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
 
virtual ~GpuRenderer ()
 
std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int > >, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int > > getSharedParameters ()
 
void setSharedParametersAndIfLast (const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int >> &tuple, const bool isLast)
 
void setSharedParameters (const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)
 
- Public Member Functions inherited from op::Renderer
 Renderer (const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
 
virtual ~Renderer ()
 
void increaseElementToRender (const int increment)
 
void setElementToRender (const int elementToRender)
 
void setElementToRender (const ElementToRender elementToRender)
 
bool getBlendOriginalFrame () const
 
void setBlendOriginalFrame (const bool blendOriginalFrame)
 
float getAlphaKeypoint () const
 
void setAlphaKeypoint (const float alphaKeypoint)
 
float getAlphaHeatMap () const
 
void setAlphaHeatMap (const float alphaHeatMap)
 
bool getShowGooglyEyes () const
 
void setShowGooglyEyes (const bool showGooglyEyes)
 
- Public Member Functions inherited from op::HandRenderer
virtual ~HandRenderer ()
 
void renderHand (Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints, const float scaleInputToOutput)
 
+ + + + + + + + + + + + + + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::GpuRenderer
void cpuToGpuMemoryIfNotCopiedYet (const float *const cpuMemory, const unsigned long long memoryVolume)
 
void gpuToCpuMemoryIfLastRenderer (float *cpuMemory, const unsigned long long memoryVolume)
 
- Protected Attributes inherited from op::GpuRenderer
std::shared_ptr< float * > spGpuMemory
 
- Protected Attributes inherited from op::Renderer
const float mRenderThreshold
 
std::atomic< bool > mBlendOriginalFrame
 
std::shared_ptr< std::atomic< unsigned int > > spElementToRender
 
std::shared_ptr< const unsigned int > spNumberElementsToRender
 
std::atomic< bool > mShowGooglyEyes
 
+

Detailed Description

+
+

Definition at line 11 of file handGpuRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ HandGpuRenderer()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
op::HandGpuRenderer::HandGpuRenderer (const float renderThreshold,
const float alphaKeypoint = HAND_DEFAULT_ALPHA_KEYPOINT,
const float alphaHeatMap = HAND_DEFAULT_ALPHA_HEAT_MAP 
)
+
+ +
+
+ +

◆ ~HandGpuRenderer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::HandGpuRenderer::~HandGpuRenderer ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
void op::HandGpuRenderer::initializationOnThread ()
+
+virtual
+
+ +

Reimplemented from op::HandRenderer.

+ +
+
+ +

◆ renderHandInherited()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::HandGpuRenderer::renderHandInherited (Array< float > & outputData,
const std::array< Array< float >, 2 > & handKeypoints 
)
+
+virtual
+
+ +

Implements op::HandRenderer.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_gpu_renderer.js b/web/html/doc/classop_1_1_hand_gpu_renderer.js new file mode 100644 index 000000000..9936d0e95 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_gpu_renderer.js @@ -0,0 +1,7 @@ +var classop_1_1_hand_gpu_renderer = +[ + [ "HandGpuRenderer", "classop_1_1_hand_gpu_renderer.html#a0d2f742b995a6f34e414f9731db847d5", null ], + [ "~HandGpuRenderer", "classop_1_1_hand_gpu_renderer.html#ad6a87a582129d7ed18a520dc9cd6c3fc", null ], + [ "initializationOnThread", "classop_1_1_hand_gpu_renderer.html#a0489f10ddc9e37e87084ebf9a5138f3a", null ], + [ "renderHandInherited", "classop_1_1_hand_gpu_renderer.html#a8206b59519e8214e06af9994a6038dae", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_hand_gpu_renderer.png b/web/html/doc/classop_1_1_hand_gpu_renderer.png new file mode 100644 index 000000000..013a793a7 Binary files /dev/null and b/web/html/doc/classop_1_1_hand_gpu_renderer.png differ diff --git a/web/html/doc/classop_1_1_hand_renderer-members.html b/web/html/doc/classop_1_1_hand_renderer-members.html new file mode 100644 index 000000000..e71da55bb --- /dev/null +++ b/web/html/doc/classop_1_1_hand_renderer-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::HandRenderer Member List
+
+
+ +

This is the complete list of members for op::HandRenderer, including all inherited members.

+ + + + +
initializationOnThread()op::HandRendererinlinevirtual
renderHand(Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints, const float scaleInputToOutput)op::HandRenderer
~HandRenderer()op::HandRendererinlinevirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_renderer.html b/web/html/doc/classop_1_1_hand_renderer.html new file mode 100644 index 000000000..7c59c1a58 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_renderer.html @@ -0,0 +1,224 @@ + + + + + + + +OpenPose: op::HandRenderer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::HandRenderer Class Referenceabstract
+
+
+ +

#include <handRenderer.hpp>

+
+Inheritance diagram for op::HandRenderer:
+
+
+ + +op::HandCpuRenderer +op::HandGpuRenderer + +
+ + + + + + + + +

+Public Member Functions

virtual ~HandRenderer ()
 
virtual void initializationOnThread ()
 
void renderHand (Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints, const float scaleInputToOutput)
 
+

Detailed Description

+
+

Definition at line 8 of file handRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ ~HandRenderer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::HandRenderer::~HandRenderer ()
+
+inlinevirtual
+
+ +

Definition at line 11 of file handRenderer.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::HandRenderer::initializationOnThread ()
+
+inlinevirtual
+
+ +

Reimplemented in op::HandGpuRenderer.

+ +

Definition at line 13 of file handRenderer.hpp.

+ +
+
+ +

◆ renderHand()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
void op::HandRenderer::renderHand (Array< float > & outputData,
const std::array< Array< float >, 2 > & handKeypoints,
const float scaleInputToOutput 
)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_hand_renderer.js b/web/html/doc/classop_1_1_hand_renderer.js new file mode 100644 index 000000000..d36855c10 --- /dev/null +++ b/web/html/doc/classop_1_1_hand_renderer.js @@ -0,0 +1,6 @@ +var classop_1_1_hand_renderer = +[ + [ "~HandRenderer", "classop_1_1_hand_renderer.html#a66ca52089ca021542816a085d39ee640", null ], + [ "initializationOnThread", "classop_1_1_hand_renderer.html#adb91ae2a8ccf24671ad86e99e786b120", null ], + [ "renderHand", "classop_1_1_hand_renderer.html#a2693c02336fb373a42405ccc7cff29bd", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_hand_renderer.png b/web/html/doc/classop_1_1_hand_renderer.png new file mode 100644 index 000000000..e5c10a611 Binary files /dev/null and b/web/html/doc/classop_1_1_hand_renderer.png differ diff --git a/web/html/doc/classop_1_1_heat_map_saver-members.html b/web/html/doc/classop_1_1_heat_map_saver-members.html new file mode 100644 index 000000000..9e508b13e --- /dev/null +++ b/web/html/doc/classop_1_1_heat_map_saver-members.html @@ -0,0 +1,110 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::HeatMapSaver Member List
+
+
+ +

This is the complete list of members for op::HeatMapSaver, including all inherited members.

+ + + + + + + + +
FileSaver(const std::string &directoryPath)op::FileSaverexplicitprotected
getNextFileName(const unsigned long long index) constop::FileSaverprotected
getNextFileName(const std::string &fileNameNoExtension) constop::FileSaverprotected
HeatMapSaver(const std::string &directoryPath, const std::string &imageFormat)op::HeatMapSaver
saveHeatMaps(const std::vector< Array< float >> &heatMaps, const std::string &fileName) constop::HeatMapSaver
~FileSaver()op::FileSaverprotectedvirtual
~HeatMapSaver()op::HeatMapSavervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_heat_map_saver.html b/web/html/doc/classop_1_1_heat_map_saver.html new file mode 100644 index 000000000..7a1c20782 --- /dev/null +++ b/web/html/doc/classop_1_1_heat_map_saver.html @@ -0,0 +1,226 @@ + + + + + + + +OpenPose: op::HeatMapSaver Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::HeatMapSaver Class Reference
+
+
+ +

#include <heatMapSaver.hpp>

+
+Inheritance diagram for op::HeatMapSaver:
+
+
+ + +op::FileSaver + +
+ + + + + + + + +

+Public Member Functions

 HeatMapSaver (const std::string &directoryPath, const std::string &imageFormat)
 
virtual ~HeatMapSaver ()
 
void saveHeatMaps (const std::vector< Array< float >> &heatMaps, const std::string &fileName) const
 
+ + + + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::FileSaver
 FileSaver (const std::string &directoryPath)
 
virtual ~FileSaver ()
 
std::string getNextFileName (const unsigned long long index) const
 
std::string getNextFileName (const std::string &fileNameNoExtension) const
 
+

Detailed Description

+
+

Definition at line 9 of file heatMapSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ HeatMapSaver()

+ +
+
+ + + + + + + + + + + + + + + + + + +
op::HeatMapSaver::HeatMapSaver (const std::string & directoryPath,
const std::string & imageFormat 
)
+
+ +
+
+ +

◆ ~HeatMapSaver()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::HeatMapSaver::~HeatMapSaver ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ saveHeatMaps()

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::HeatMapSaver::saveHeatMaps (const std::vector< Array< float >> & heatMaps,
const std::string & fileName 
) const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_heat_map_saver.js b/web/html/doc/classop_1_1_heat_map_saver.js new file mode 100644 index 000000000..4be3283b9 --- /dev/null +++ b/web/html/doc/classop_1_1_heat_map_saver.js @@ -0,0 +1,6 @@ +var classop_1_1_heat_map_saver = +[ + [ "HeatMapSaver", "classop_1_1_heat_map_saver.html#aa6a339b70a9535a018584b93c932b12d", null ], + [ "~HeatMapSaver", "classop_1_1_heat_map_saver.html#a150c053182074a1cc846c3ced7a674fb", null ], + [ "saveHeatMaps", "classop_1_1_heat_map_saver.html#a48aeaad854bfff14fd2642dc13071137", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_heat_map_saver.png b/web/html/doc/classop_1_1_heat_map_saver.png new file mode 100644 index 000000000..a167d3f30 Binary files /dev/null and b/web/html/doc/classop_1_1_heat_map_saver.png differ diff --git a/web/html/doc/classop_1_1_image_directory_reader-members.html b/web/html/doc/classop_1_1_image_directory_reader-members.html new file mode 100644 index 000000000..4d0808a41 --- /dev/null +++ b/web/html/doc/classop_1_1_image_directory_reader-members.html @@ -0,0 +1,124 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::ImageDirectoryReader Member List
+
+
+ +

This is the complete list of members for op::ImageDirectoryReader, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + +
checkFrameIntegrity(Matrix &frame)op::Producerprotected
get(const int capProperty)op::ImageDirectoryReadervirtual
op::Producer::get(const ProducerProperty property)op::Producer
getCameraExtrinsics()op::Producervirtual
getCameraIntrinsics()op::Producervirtual
getCameraMatrices()op::Producervirtual
getFrame()op::Producer
getFrames()op::Producer
getNextFrameName()op::ImageDirectoryReadervirtual
getType()op::Producerinline
ifEndedResetOrRelease()op::Producerprotected
ImageDirectoryReader(const std::string &imageDirectoryPath, const std::string &cameraParameterPath="", const bool undistortImage=false, const int numberViews=-1)op::ImageDirectoryReaderexplicit
isOpened() constop::ImageDirectoryReaderinlinevirtual
keepDesiredFrameRate()op::Producerprotected
Producer(const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)op::Producerexplicit
release()op::ImageDirectoryReaderinlinevirtual
set(const int capProperty, const double value)op::ImageDirectoryReadervirtual
op::Producer::set(const ProducerProperty property, const double value)op::Producer
setProducerFpsMode(const ProducerFpsMode fpsMode)op::Producer
~ImageDirectoryReader()op::ImageDirectoryReadervirtual
~Producer()op::Producervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_image_directory_reader.html b/web/html/doc/classop_1_1_image_directory_reader.html new file mode 100644 index 000000000..821583fe0 --- /dev/null +++ b/web/html/doc/classop_1_1_image_directory_reader.html @@ -0,0 +1,424 @@ + + + + + + + +OpenPose: op::ImageDirectoryReader Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::ImageDirectoryReader Class Reference
+
+
+ +

#include <imageDirectoryReader.hpp>

+
+Inheritance diagram for op::ImageDirectoryReader:
+
+
+ + +op::Producer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 ImageDirectoryReader (const std::string &imageDirectoryPath, const std::string &cameraParameterPath="", const bool undistortImage=false, const int numberViews=-1)
 
virtual ~ImageDirectoryReader ()
 
std::string getNextFrameName ()
 
bool isOpened () const
 
void release ()
 
double get (const int capProperty)
 
void set (const int capProperty, const double value)
 
- Public Member Functions inherited from op::Producer
 Producer (const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)
 
virtual ~Producer ()
 
Matrix getFrame ()
 
std::vector< MatrixgetFrames ()
 
virtual std::vector< MatrixgetCameraMatrices ()
 
virtual std::vector< MatrixgetCameraExtrinsics ()
 
virtual std::vector< MatrixgetCameraIntrinsics ()
 
void setProducerFpsMode (const ProducerFpsMode fpsMode)
 
ProducerType getType ()
 
double get (const ProducerProperty property)
 
void set (const ProducerProperty property, const double value)
 
+ + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::Producer
void checkFrameIntegrity (Matrix &frame)
 
void ifEndedResetOrRelease ()
 
void keepDesiredFrameRate ()
 
+

Detailed Description

+

ImageDirectoryReader is an abstract class to extract frames from a image directory. Its interface imitates the cv::VideoCapture class, so it can be used quite similarly to the cv::VideoCapture class. Thus, it is quite similar to VideoReader and WebcamReader.

+ +

Definition at line 14 of file imageDirectoryReader.hpp.

+

Constructor & Destructor Documentation

+ +

◆ ImageDirectoryReader()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::ImageDirectoryReader::ImageDirectoryReader (const std::string & imageDirectoryPath,
const std::string & cameraParameterPath = "",
const bool undistortImage = false,
const int numberViews = -1 
)
+
+explicit
+
+

Constructor of ImageDirectoryReader. It sets the image directory path from which the images will be loaded and generates a std::vector<std::string> with the list of images on that directory.

Parameters
+ + + + +
imageDirectoryPathconst std::string parameter with the folder path containing the images.
cameraParameterPathconst std::string parameter with the folder path containing the camera parameters (only required if imageDirectorystereo > 1).
numberViewsconst int parameter with the number of images per iteration (>1 would represent stereo processing).
+
+
+ +
+
+ +

◆ ~ImageDirectoryReader()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::ImageDirectoryReader::~ImageDirectoryReader ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ get()

+ +
+
+ + + + + +
+ + + + + + + + +
double op::ImageDirectoryReader::get (const int capProperty)
+
+virtual
+
+

This function is a wrapper of cv::VideoCapture::get. It allows getting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + +
capPropertyint indicating the property to be modified.
+
+
+
Returns
double returning the property value.
+ +

Implements op::Producer.

+ +
+
+ +

◆ getNextFrameName()

+ +
+
+ + + + + +
+ + + + + + + +
std::string op::ImageDirectoryReader::getNextFrameName ()
+
+virtual
+
+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.
+ +

Implements op::Producer.

+ +
+
+ +

◆ isOpened()

+ +
+
+ + + + + +
+ + + + + + + +
bool op::ImageDirectoryReader::isOpened () const
+
+inlinevirtual
+
+

This function returns whether the Producer instance is still opened and able to retrieve more frames.

Returns
bool indicating whether the Producer is opened.
+ +

Implements op::Producer.

+ +

Definition at line 34 of file imageDirectoryReader.hpp.

+ +
+
+ +

◆ release()

+ +
+
+ + + + + +
+ + + + + + + +
void op::ImageDirectoryReader::release ()
+
+inlinevirtual
+
+

This function releases and closes the Producer. After it is called, no more frames can be retrieved from Producer::getFrames.

+ +

Implements op::Producer.

+ +

Definition at line 39 of file imageDirectoryReader.hpp.

+ +
+
+ +

◆ set()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::ImageDirectoryReader::set (const int capProperty,
const double value 
)
+
+virtual
+
+

This function is a wrapper of cv::VideoCapture::set. It allows setting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + + +
capPropertyint indicating the property to be modified.
valuedouble indicating the new value to be assigned.
+
+
+ +

Implements op::Producer.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_image_directory_reader.js b/web/html/doc/classop_1_1_image_directory_reader.js new file mode 100644 index 000000000..b04572c99 --- /dev/null +++ b/web/html/doc/classop_1_1_image_directory_reader.js @@ -0,0 +1,10 @@ +var classop_1_1_image_directory_reader = +[ + [ "ImageDirectoryReader", "classop_1_1_image_directory_reader.html#a10157e6234426dd809ffe83ebfbfd274", null ], + [ "~ImageDirectoryReader", "classop_1_1_image_directory_reader.html#a7551a8567f42f7cfb68020e149921438", null ], + [ "get", "classop_1_1_image_directory_reader.html#aa05bfd69272e81115ba23a3c0731b596", null ], + [ "getNextFrameName", "classop_1_1_image_directory_reader.html#a46ce23209afe6d3ca90db545b69cd04a", null ], + [ "isOpened", "classop_1_1_image_directory_reader.html#adbf9ff392cd52a585332dbdcd46ffb81", null ], + [ "release", "classop_1_1_image_directory_reader.html#af74e192f8cba5c10f8e252674a85185a", null ], + [ "set", "classop_1_1_image_directory_reader.html#a1965a4dca2ddb86b24e69e2da90b9dbf", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_image_directory_reader.png b/web/html/doc/classop_1_1_image_directory_reader.png new file mode 100644 index 000000000..64c9566b7 Binary files /dev/null and b/web/html/doc/classop_1_1_image_directory_reader.png differ diff --git a/web/html/doc/classop_1_1_image_saver-members.html b/web/html/doc/classop_1_1_image_saver-members.html new file mode 100644 index 000000000..43a4657bf --- /dev/null +++ b/web/html/doc/classop_1_1_image_saver-members.html @@ -0,0 +1,111 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::ImageSaver Member List
+
+
+ +

This is the complete list of members for op::ImageSaver, including all inherited members.

+ + + + + + + + + +
FileSaver(const std::string &directoryPath)op::FileSaverexplicitprotected
getNextFileName(const unsigned long long index) constop::FileSaverprotected
getNextFileName(const std::string &fileNameNoExtension) constop::FileSaverprotected
ImageSaver(const std::string &directoryPath, const std::string &imageFormat)op::ImageSaver
saveImages(const Matrix &cvOutputData, const std::string &fileName) constop::ImageSaver
saveImages(const std::vector< Matrix > &matOutputDatas, const std::string &fileName) constop::ImageSaver
~FileSaver()op::FileSaverprotectedvirtual
~ImageSaver()op::ImageSavervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_image_saver.html b/web/html/doc/classop_1_1_image_saver.html new file mode 100644 index 000000000..4e73ee985 --- /dev/null +++ b/web/html/doc/classop_1_1_image_saver.html @@ -0,0 +1,256 @@ + + + + + + + +OpenPose: op::ImageSaver Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::ImageSaver Class Reference
+
+
+ +

#include <imageSaver.hpp>

+
+Inheritance diagram for op::ImageSaver:
+
+
+ + +op::FileSaver + +
+ + + + + + + + + + +

+Public Member Functions

 ImageSaver (const std::string &directoryPath, const std::string &imageFormat)
 
virtual ~ImageSaver ()
 
void saveImages (const Matrix &cvOutputData, const std::string &fileName) const
 
void saveImages (const std::vector< Matrix > &matOutputDatas, const std::string &fileName) const
 
+ + + + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::FileSaver
 FileSaver (const std::string &directoryPath)
 
virtual ~FileSaver ()
 
std::string getNextFileName (const unsigned long long index) const
 
std::string getNextFileName (const std::string &fileNameNoExtension) const
 
+

Detailed Description

+
+

Definition at line 9 of file imageSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ ImageSaver()

+ +
+
+ + + + + + + + + + + + + + + + + + +
op::ImageSaver::ImageSaver (const std::string & directoryPath,
const std::string & imageFormat 
)
+
+ +
+
+ +

◆ ~ImageSaver()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::ImageSaver::~ImageSaver ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ saveImages() [1/2]

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::ImageSaver::saveImages (const MatrixcvOutputData,
const std::string & fileName 
) const
+
+ +
+
+ +

◆ saveImages() [2/2]

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::ImageSaver::saveImages (const std::vector< Matrix > & matOutputDatas,
const std::string & fileName 
) const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_image_saver.js b/web/html/doc/classop_1_1_image_saver.js new file mode 100644 index 000000000..0c5b197e4 --- /dev/null +++ b/web/html/doc/classop_1_1_image_saver.js @@ -0,0 +1,7 @@ +var classop_1_1_image_saver = +[ + [ "ImageSaver", "classop_1_1_image_saver.html#a723387e62a6b701202dd6cf35c57429f", null ], + [ "~ImageSaver", "classop_1_1_image_saver.html#ab11a6e42a910021fd072cdf287b796ed", null ], + [ "saveImages", "classop_1_1_image_saver.html#a0262916d0af8cc3be81b3375e0520e62", null ], + [ "saveImages", "classop_1_1_image_saver.html#a8c1f4ae3799db276753707879e59bee1", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_image_saver.png b/web/html/doc/classop_1_1_image_saver.png new file mode 100644 index 000000000..60c90fb63 Binary files /dev/null and b/web/html/doc/classop_1_1_image_saver.png differ diff --git a/web/html/doc/classop_1_1_ip_camera_reader-members.html b/web/html/doc/classop_1_1_ip_camera_reader-members.html new file mode 100644 index 000000000..f5fafeec4 --- /dev/null +++ b/web/html/doc/classop_1_1_ip_camera_reader-members.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::IpCameraReader Member List
+
+
+ +

This is the complete list of members for op::IpCameraReader, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + +
checkFrameIntegrity(Matrix &frame)op::Producerprotected
get(const int capProperty)op::IpCameraReaderinlinevirtual
op::Producer::get(const ProducerProperty property)op::Producer
getCameraExtrinsics()op::Producervirtual
getCameraIntrinsics()op::Producervirtual
getCameraMatrices()op::Producervirtual
getFrame()op::Producer
getFrames()op::Producer
getNextFrameName()op::IpCameraReadervirtual
getType()op::Producerinline
ifEndedResetOrRelease()op::Producerprotected
IpCameraReader(const std::string &cameraPath, const std::string &cameraParameterPath="", const bool undistortImage=false)op::IpCameraReaderexplicit
isOpened() constop::IpCameraReaderinlinevirtual
keepDesiredFrameRate()op::Producerprotected
Producer(const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)op::Producerexplicit
release()op::VideoCaptureReadervirtual
resetWebcam(const int index, const bool throwExceptionIfNoOpened)op::VideoCaptureReaderprotected
set(const int capProperty, const double value)op::IpCameraReaderinlinevirtual
op::Producer::set(const ProducerProperty property, const double value)op::Producer
setProducerFpsMode(const ProducerFpsMode fpsMode)op::Producer
VideoCaptureReader(const int index, const bool throwExceptionIfNoOpened, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)op::VideoCaptureReaderexplicit
VideoCaptureReader(const std::string &path, const ProducerType producerType, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)op::VideoCaptureReaderexplicit
~IpCameraReader()op::IpCameraReadervirtual
~Producer()op::Producervirtual
~VideoCaptureReader()op::VideoCaptureReadervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_ip_camera_reader.html b/web/html/doc/classop_1_1_ip_camera_reader.html new file mode 100644 index 000000000..2f103e332 --- /dev/null +++ b/web/html/doc/classop_1_1_ip_camera_reader.html @@ -0,0 +1,401 @@ + + + + + + + +OpenPose: op::IpCameraReader Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::IpCameraReader Class Reference
+
+
+ +

#include <ipCameraReader.hpp>

+
+Inheritance diagram for op::IpCameraReader:
+
+
+ + +op::VideoCaptureReader +op::Producer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 IpCameraReader (const std::string &cameraPath, const std::string &cameraParameterPath="", const bool undistortImage=false)
 
virtual ~IpCameraReader ()
 
std::string getNextFrameName ()
 
bool isOpened () const
 
double get (const int capProperty)
 
void set (const int capProperty, const double value)
 
- Public Member Functions inherited from op::VideoCaptureReader
 VideoCaptureReader (const int index, const bool throwExceptionIfNoOpened, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)
 
 VideoCaptureReader (const std::string &path, const ProducerType producerType, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)
 
virtual ~VideoCaptureReader ()
 
void release ()
 
- Public Member Functions inherited from op::Producer
 Producer (const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)
 
virtual ~Producer ()
 
Matrix getFrame ()
 
std::vector< MatrixgetFrames ()
 
virtual std::vector< MatrixgetCameraMatrices ()
 
virtual std::vector< MatrixgetCameraExtrinsics ()
 
virtual std::vector< MatrixgetCameraIntrinsics ()
 
void setProducerFpsMode (const ProducerFpsMode fpsMode)
 
ProducerType getType ()
 
double get (const ProducerProperty property)
 
void set (const ProducerProperty property, const double value)
 
+ + + + + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::VideoCaptureReader
void resetWebcam (const int index, const bool throwExceptionIfNoOpened)
 
- Protected Member Functions inherited from op::Producer
void checkFrameIntegrity (Matrix &frame)
 
void ifEndedResetOrRelease ()
 
void keepDesiredFrameRate ()
 
+

Detailed Description

+

IpCameraReader is a wrapper of the cv::VideoCapture class for IP camera streaming.

+ +

Definition at line 12 of file ipCameraReader.hpp.

+

Constructor & Destructor Documentation

+ +

◆ IpCameraReader()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
op::IpCameraReader::IpCameraReader (const std::string & cameraPath,
const std::string & cameraParameterPath = "",
const bool undistortImage = false 
)
+
+explicit
+
+

Constructor of IpCameraReader. It opens the IP camera as a wrapper of cv::VideoCapture.

Parameters
+ + +
cameraPathconst std::string parameter with the full camera IP link.
+
+
+ +
+
+ +

◆ ~IpCameraReader()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::IpCameraReader::~IpCameraReader ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ get()

+ +
+
+ + + + + +
+ + + + + + + + +
double op::IpCameraReader::get (const int capProperty)
+
+inlinevirtual
+
+

This function is a wrapper of cv::VideoCapture::get. It allows getting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + +
capPropertyint indicating the property to be modified.
+
+
+
Returns
double returning the property value.
+ +

Implements op::VideoCaptureReader.

+ +

Definition at line 31 of file ipCameraReader.hpp.

+ +
+
+ +

◆ getNextFrameName()

+ +
+
+ + + + + +
+ + + + + + + +
std::string op::IpCameraReader::getNextFrameName ()
+
+virtual
+
+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.
+ +

Implements op::VideoCaptureReader.

+ +
+
+ +

◆ isOpened()

+ +
+
+ + + + + +
+ + + + + + + +
bool op::IpCameraReader::isOpened () const
+
+inlinevirtual
+
+

This function returns whether the Producer instance is still opened and able to retrieve more frames.

Returns
bool indicating whether the Producer is opened.
+ +

Reimplemented from op::VideoCaptureReader.

+ +

Definition at line 26 of file ipCameraReader.hpp.

+ +
+
+ +

◆ set()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::IpCameraReader::set (const int capProperty,
const double value 
)
+
+inlinevirtual
+
+

This function is a wrapper of cv::VideoCapture::set. It allows setting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + + +
capPropertyint indicating the property to be modified.
valuedouble indicating the new value to be assigned.
+
+
+ +

Implements op::VideoCaptureReader.

+ +

Definition at line 36 of file ipCameraReader.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_ip_camera_reader.js b/web/html/doc/classop_1_1_ip_camera_reader.js new file mode 100644 index 000000000..c4a9a78f7 --- /dev/null +++ b/web/html/doc/classop_1_1_ip_camera_reader.js @@ -0,0 +1,9 @@ +var classop_1_1_ip_camera_reader = +[ + [ "IpCameraReader", "classop_1_1_ip_camera_reader.html#af3a67a2705107e04e79672fa087847c5", null ], + [ "~IpCameraReader", "classop_1_1_ip_camera_reader.html#ad90e52c898ddf32503ce94685977aae0", null ], + [ "get", "classop_1_1_ip_camera_reader.html#aa7ad6adac6e401193e03d279176dd889", null ], + [ "getNextFrameName", "classop_1_1_ip_camera_reader.html#a0c1582090cc7c54dd9cb752207b52986", null ], + [ "isOpened", "classop_1_1_ip_camera_reader.html#ac26913b4ff841f56f43bb53b012a2401", null ], + [ "set", "classop_1_1_ip_camera_reader.html#ac13cc7da97a31d8e69eaa64b2a7b31ba", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_ip_camera_reader.png b/web/html/doc/classop_1_1_ip_camera_reader.png new file mode 100644 index 000000000..dd7d6d530 Binary files /dev/null and b/web/html/doc/classop_1_1_ip_camera_reader.png differ diff --git a/web/html/doc/classop_1_1_json_ofstream-members.html b/web/html/doc/classop_1_1_json_ofstream-members.html new file mode 100644 index 000000000..b27805d0c --- /dev/null +++ b/web/html/doc/classop_1_1_json_ofstream-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::JsonOfstream Member List
+
+
+ +

This is the complete list of members for op::JsonOfstream, including all inherited members.

+ + + + + + + + + + + + + + +
arrayClose()op::JsonOfstream
arrayOpen()op::JsonOfstream
comma()op::JsonOfstreaminline
enter()op::JsonOfstream
JsonOfstream(const std::string &filePath, const bool humanReadable=true)op::JsonOfstreamexplicit
JsonOfstream(JsonOfstream &&jsonOfstream)op::JsonOfstream
key(const std::string &string)op::JsonOfstream
objectClose()op::JsonOfstream
objectOpen()op::JsonOfstream
operator=(JsonOfstream &&jsonOfstream)op::JsonOfstream
plainText(const T &value)op::JsonOfstreaminline
version(const std::string &version)op::JsonOfstream
~JsonOfstream()op::JsonOfstreamvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_json_ofstream.html b/web/html/doc/classop_1_1_json_ofstream.html new file mode 100644 index 000000000..17044fe86 --- /dev/null +++ b/web/html/doc/classop_1_1_json_ofstream.html @@ -0,0 +1,433 @@ + + + + + + + +OpenPose: op::JsonOfstream Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::JsonOfstream Class Reference
+
+
+ +

#include <jsonOfstream.hpp>

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 JsonOfstream (const std::string &filePath, const bool humanReadable=true)
 
 JsonOfstream (JsonOfstream &&jsonOfstream)
 
JsonOfstreamoperator= (JsonOfstream &&jsonOfstream)
 
virtual ~JsonOfstream ()
 
void objectOpen ()
 
void objectClose ()
 
void arrayOpen ()
 
void arrayClose ()
 
void version (const std::string &version)
 
void key (const std::string &string)
 
template<typename T >
void plainText (const T &value)
 
void comma ()
 
void enter ()
 
+

Detailed Description

+
+

Definition at line 9 of file jsonOfstream.hpp.

+

Constructor & Destructor Documentation

+ +

◆ JsonOfstream() [1/2]

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
op::JsonOfstream::JsonOfstream (const std::string & filePath,
const bool humanReadable = true 
)
+
+explicit
+
+ +
+
+ +

◆ JsonOfstream() [2/2]

+ +
+
+ + + + + + + + +
op::JsonOfstream::JsonOfstream (JsonOfstream && jsonOfstream)
+
+

Move constructor. It destroys the original JsonOfstream to be moved.

Parameters
+ + +
arrayJsonOfstream to be moved.
+
+
+ +
+
+ +

◆ ~JsonOfstream()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::JsonOfstream::~JsonOfstream ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ arrayClose()

+ +
+
+ + + + + + + +
void op::JsonOfstream::arrayClose ()
+
+ +
+
+ +

◆ arrayOpen()

+ +
+
+ + + + + + + +
void op::JsonOfstream::arrayOpen ()
+
+ +
+
+ +

◆ comma()

+ +
+
+ + + + + +
+ + + + + + + +
void op::JsonOfstream::comma ()
+
+inline
+
+ +

Definition at line 49 of file jsonOfstream.hpp.

+ +
+
+ +

◆ enter()

+ +
+
+ + + + + + + +
void op::JsonOfstream::enter ()
+
+ +
+
+ +

◆ key()

+ +
+
+ + + + + + + + +
void op::JsonOfstream::key (const std::string & string)
+
+ +
+
+ +

◆ objectClose()

+ +
+
+ + + + + + + +
void op::JsonOfstream::objectClose ()
+
+ +
+
+ +

◆ objectOpen()

+ +
+
+ + + + + + + +
void op::JsonOfstream::objectOpen ()
+
+ +
+
+ +

◆ operator=()

+ +
+
+ + + + + + + + +
JsonOfstream& op::JsonOfstream::operator= (JsonOfstream && jsonOfstream)
+
+

Move assignment. Similar to JsonOfstream(JsonOfstream&& jsonOfstream).

Parameters
+ + +
arrayJsonOfstream to be moved.
+
+
+
Returns
The resulting JsonOfstream.
+ +
+
+ +

◆ plainText()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + +
void op::JsonOfstream::plainText (const T & value)
+
+inline
+
+ +

Definition at line 44 of file jsonOfstream.hpp.

+ +
+
+ +

◆ version()

+ +
+
+ + + + + + + + +
void op::JsonOfstream::version (const std::string & version)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_json_ofstream.js b/web/html/doc/classop_1_1_json_ofstream.js new file mode 100644 index 000000000..c999fa9b9 --- /dev/null +++ b/web/html/doc/classop_1_1_json_ofstream.js @@ -0,0 +1,16 @@ +var classop_1_1_json_ofstream = +[ + [ "JsonOfstream", "classop_1_1_json_ofstream.html#afa4b3e1dee27f5afd0017b95c0f5e364", null ], + [ "JsonOfstream", "classop_1_1_json_ofstream.html#ab8220b4336ccc8998cc38f0fa5c36918", null ], + [ "~JsonOfstream", "classop_1_1_json_ofstream.html#a5c4b866df81cf36d8f6dcdfc8414de8f", null ], + [ "arrayClose", "classop_1_1_json_ofstream.html#a3f940d3ad51d0acb7126d62a5617fd69", null ], + [ "arrayOpen", "classop_1_1_json_ofstream.html#a4cc6d50256354b3dc7385e2db01aabc0", null ], + [ "comma", "classop_1_1_json_ofstream.html#ae4468279f789c8026d431b2ef62646f9", null ], + [ "enter", "classop_1_1_json_ofstream.html#a32f058b961605d418df1258a1dc5e6a0", null ], + [ "key", "classop_1_1_json_ofstream.html#af0c7f763e7e809810c00b394a260672e", null ], + [ "objectClose", "classop_1_1_json_ofstream.html#a45eeb25d9413fc31786f315b46c341cc", null ], + [ "objectOpen", "classop_1_1_json_ofstream.html#a5c38e36c1449d808dd4ab6558d65289c", null ], + [ "operator=", "classop_1_1_json_ofstream.html#aff76578c824c0314e33231884b40147e", null ], + [ "plainText", "classop_1_1_json_ofstream.html#aa432ff172e10bb9e3b6e2bfa6124c648", null ], + [ "version", "classop_1_1_json_ofstream.html#a6a5e0e4f3cdf8f93fb1bdef8cb63b0a2", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_keep_top_n_people-members.html b/web/html/doc/classop_1_1_keep_top_n_people-members.html new file mode 100644 index 000000000..0382e7e9c --- /dev/null +++ b/web/html/doc/classop_1_1_keep_top_n_people-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::KeepTopNPeople Member List
+
+
+ +

This is the complete list of members for op::KeepTopNPeople, including all inherited members.

+ + + + +
KeepTopNPeople(const int numberPeopleMax)op::KeepTopNPeopleexplicit
keepTopPeople(const Array< float > &peopleArrays, const Array< float > &poseScores) constop::KeepTopNPeople
~KeepTopNPeople()op::KeepTopNPeoplevirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_keep_top_n_people.html b/web/html/doc/classop_1_1_keep_top_n_people.html new file mode 100644 index 000000000..cae9412d0 --- /dev/null +++ b/web/html/doc/classop_1_1_keep_top_n_people.html @@ -0,0 +1,203 @@ + + + + + + + +OpenPose: op::KeepTopNPeople Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::KeepTopNPeople Class Reference
+
+
+ +

#include <keepTopNPeople.hpp>

+ + + + + + + + +

+Public Member Functions

 KeepTopNPeople (const int numberPeopleMax)
 
virtual ~KeepTopNPeople ()
 
Array< float > keepTopPeople (const Array< float > &peopleArrays, const Array< float > &poseScores) const
 
+

Detailed Description

+
+

Definition at line 8 of file keepTopNPeople.hpp.

+

Constructor & Destructor Documentation

+ +

◆ KeepTopNPeople()

+ +
+
+ + + + + +
+ + + + + + + + +
op::KeepTopNPeople::KeepTopNPeople (const int numberPeopleMax)
+
+explicit
+
+ +
+
+ +

◆ ~KeepTopNPeople()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::KeepTopNPeople::~KeepTopNPeople ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ keepTopPeople()

+ +
+
+ + + + + + + + + + + + + + + + + + +
Array<float> op::KeepTopNPeople::keepTopPeople (const Array< float > & peopleArrays,
const Array< float > & poseScores 
) const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_keep_top_n_people.js b/web/html/doc/classop_1_1_keep_top_n_people.js new file mode 100644 index 000000000..ed31f4147 --- /dev/null +++ b/web/html/doc/classop_1_1_keep_top_n_people.js @@ -0,0 +1,6 @@ +var classop_1_1_keep_top_n_people = +[ + [ "KeepTopNPeople", "classop_1_1_keep_top_n_people.html#ae9419ae35da5b0547989f19795a26808", null ], + [ "~KeepTopNPeople", "classop_1_1_keep_top_n_people.html#a7675c9c3668a2610827da67818a67741", null ], + [ "keepTopPeople", "classop_1_1_keep_top_n_people.html#a556a0d8d97985e0b73fc78e372be6ea8", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_keypoint_saver-members.html b/web/html/doc/classop_1_1_keypoint_saver-members.html new file mode 100644 index 000000000..613ca6f0e --- /dev/null +++ b/web/html/doc/classop_1_1_keypoint_saver-members.html @@ -0,0 +1,110 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::KeypointSaver Member List
+
+
+ +

This is the complete list of members for op::KeypointSaver, including all inherited members.

+ + + + + + + + +
FileSaver(const std::string &directoryPath)op::FileSaverexplicitprotected
getNextFileName(const unsigned long long index) constop::FileSaverprotected
getNextFileName(const std::string &fileNameNoExtension) constop::FileSaverprotected
KeypointSaver(const std::string &directoryPath, const DataFormat format)op::KeypointSaver
saveKeypoints(const std::vector< Array< float >> &keypointVector, const std::string &fileName, const std::string &keypointName) constop::KeypointSaver
~FileSaver()op::FileSaverprotectedvirtual
~KeypointSaver()op::KeypointSavervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_keypoint_saver.html b/web/html/doc/classop_1_1_keypoint_saver.html new file mode 100644 index 000000000..1164e664e --- /dev/null +++ b/web/html/doc/classop_1_1_keypoint_saver.html @@ -0,0 +1,232 @@ + + + + + + + +OpenPose: op::KeypointSaver Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::KeypointSaver Class Reference
+
+
+ +

#include <keypointSaver.hpp>

+
+Inheritance diagram for op::KeypointSaver:
+
+
+ + +op::FileSaver + +
+ + + + + + + + +

+Public Member Functions

 KeypointSaver (const std::string &directoryPath, const DataFormat format)
 
virtual ~KeypointSaver ()
 
void saveKeypoints (const std::vector< Array< float >> &keypointVector, const std::string &fileName, const std::string &keypointName) const
 
+ + + + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::FileSaver
 FileSaver (const std::string &directoryPath)
 
virtual ~FileSaver ()
 
std::string getNextFileName (const unsigned long long index) const
 
std::string getNextFileName (const std::string &fileNameNoExtension) const
 
+

Detailed Description

+
+

Definition at line 10 of file keypointSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ KeypointSaver()

+ +
+
+ + + + + + + + + + + + + + + + + + +
op::KeypointSaver::KeypointSaver (const std::string & directoryPath,
const DataFormat format 
)
+
+ +
+
+ +

◆ ~KeypointSaver()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::KeypointSaver::~KeypointSaver ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ saveKeypoints()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
void op::KeypointSaver::saveKeypoints (const std::vector< Array< float >> & keypointVector,
const std::string & fileName,
const std::string & keypointName 
) const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_keypoint_saver.js b/web/html/doc/classop_1_1_keypoint_saver.js new file mode 100644 index 000000000..a7944eb4e --- /dev/null +++ b/web/html/doc/classop_1_1_keypoint_saver.js @@ -0,0 +1,6 @@ +var classop_1_1_keypoint_saver = +[ + [ "KeypointSaver", "classop_1_1_keypoint_saver.html#aa6d9eb36cfd40c5cfa3995420cdf3dfa", null ], + [ "~KeypointSaver", "classop_1_1_keypoint_saver.html#a903a4fa8be0b0cb5008d015126ac0e59", null ], + [ "saveKeypoints", "classop_1_1_keypoint_saver.html#aad663949dc5f2262ebdc69ed0d0caa1b", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_keypoint_saver.png b/web/html/doc/classop_1_1_keypoint_saver.png new file mode 100644 index 000000000..b2cf42ce9 Binary files /dev/null and b/web/html/doc/classop_1_1_keypoint_saver.png differ diff --git a/web/html/doc/classop_1_1_keypoint_scaler-members.html b/web/html/doc/classop_1_1_keypoint_scaler-members.html new file mode 100644 index 000000000..895b32430 --- /dev/null +++ b/web/html/doc/classop_1_1_keypoint_scaler-members.html @@ -0,0 +1,108 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::KeypointScaler Member List
+
+
+ +

This is the complete list of members for op::KeypointScaler, including all inherited members.

+ + + + + + +
KeypointScaler(const ScaleMode scaleMode)op::KeypointScalerexplicit
scale(Array< float > &arrayToScale, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) constop::KeypointScaler
scale(std::vector< Array< float >> &arraysToScale, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) constop::KeypointScaler
scale(std::vector< std::vector< std::array< float, 3 >>> &poseCandidates, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) constop::KeypointScaler
~KeypointScaler()op::KeypointScalervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_keypoint_scaler.html b/web/html/doc/classop_1_1_keypoint_scaler.html new file mode 100644 index 000000000..b18d721c0 --- /dev/null +++ b/web/html/doc/classop_1_1_keypoint_scaler.html @@ -0,0 +1,299 @@ + + + + + + + +OpenPose: op::KeypointScaler Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::KeypointScaler Class Reference
+
+
+ +

#include <keypointScaler.hpp>

+ + + + + + + + + + + + +

+Public Member Functions

 KeypointScaler (const ScaleMode scaleMode)
 
virtual ~KeypointScaler ()
 
void scale (Array< float > &arrayToScale, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) const
 
void scale (std::vector< Array< float >> &arraysToScale, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) const
 
void scale (std::vector< std::vector< std::array< float, 3 >>> &poseCandidates, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) const
 
+

Detailed Description

+
+

Definition at line 9 of file keypointScaler.hpp.

+

Constructor & Destructor Documentation

+ +

◆ KeypointScaler()

+ +
+
+ + + + + +
+ + + + + + + + +
op::KeypointScaler::KeypointScaler (const ScaleMode scaleMode)
+
+explicit
+
+ +
+
+ +

◆ ~KeypointScaler()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::KeypointScaler::~KeypointScaler ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ scale() [1/3]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::KeypointScaler::scale (Array< float > & arrayToScale,
const double scaleInputToOutput,
const double scaleNetToOutput,
const Point< int > & producerSize 
) const
+
+ +
+
+ +

◆ scale() [2/3]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::KeypointScaler::scale (std::vector< Array< float >> & arraysToScale,
const double scaleInputToOutput,
const double scaleNetToOutput,
const Point< int > & producerSize 
) const
+
+ +
+
+ +

◆ scale() [3/3]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::KeypointScaler::scale (std::vector< std::vector< std::array< float, 3 >>> & poseCandidates,
const double scaleInputToOutput,
const double scaleNetToOutput,
const Point< int > & producerSize 
) const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_keypoint_scaler.js b/web/html/doc/classop_1_1_keypoint_scaler.js new file mode 100644 index 000000000..b29a759d6 --- /dev/null +++ b/web/html/doc/classop_1_1_keypoint_scaler.js @@ -0,0 +1,8 @@ +var classop_1_1_keypoint_scaler = +[ + [ "KeypointScaler", "classop_1_1_keypoint_scaler.html#a0f556c1b0fad63c7c3551a5d4fd72219", null ], + [ "~KeypointScaler", "classop_1_1_keypoint_scaler.html#a5797e76ffea7e3b6a4080b04f50f0c0f", null ], + [ "scale", "classop_1_1_keypoint_scaler.html#a9c2d575ce49bb6112b2a099cb92f07cc", null ], + [ "scale", "classop_1_1_keypoint_scaler.html#a687a64bbca93d54292f191762efe20d7", null ], + [ "scale", "classop_1_1_keypoint_scaler.html#a42e46aea4d43fcf0886f06c9700148ea", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_matrix-members.html b/web/html/doc/classop_1_1_matrix-members.html new file mode 100644 index 000000000..6c46e0aef --- /dev/null +++ b/web/html/doc/classop_1_1_matrix-members.html @@ -0,0 +1,132 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::Matrix Member List
+
+
+ +

This is the complete list of members for op::Matrix, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
channels() constop::Matrix
checkVector(const int elemChannels, const int depth=-1, const bool requireContinuous=true) constop::Matrix
clone() constop::Matrix
cols() constop::Matrix
copyTo(Matrix &outputMat) constop::Matrix
data()op::Matrix
dataConst() constop::Matrix
dataPseudoConst() constop::Matrix
depth() constop::Matrix
dims() constop::Matrix
elemSize() constop::Matrix
elemSize1() constop::Matrix
empty() constop::Matrix
eye(const int rows, const int cols, const int type)op::Matrixstatic
getConstCvMat() constop::Matrix
getCvMat()op::Matrix
isContinuous() constop::Matrix
isSubmatrix() constop::Matrix
Matrix()op::Matrix
Matrix(const void *cvMatPtr)op::Matrixexplicit
Matrix(const int rows, const int cols, const int type)op::Matrixexplicit
Matrix(const int rows, const int cols, const int type, void *cvMatPtr)op::Matrixexplicit
rows() constop::Matrix
setTo(const double value)op::Matrix
size(const int dimension) constop::Matrix
splitCvMatIntoVectorMatrix(std::vector< Matrix > &matrixesResized, const void *const cvMatPtr)op::Matrixstatic
step1(const int i=0) constop::Matrix
total() constop::Matrix
type() constop::Matrix
+
+ + + + diff --git a/web/html/doc/classop_1_1_matrix.html b/web/html/doc/classop_1_1_matrix.html new file mode 100644 index 000000000..502469658 --- /dev/null +++ b/web/html/doc/classop_1_1_matrix.html @@ -0,0 +1,836 @@ + + + + + + + +OpenPose: op::Matrix Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ + +
+ +

#include <matrix.hpp>

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 Matrix ()
 
 Matrix (const void *cvMatPtr)
 
 Matrix (const int rows, const int cols, const int type)
 
 Matrix (const int rows, const int cols, const int type, void *cvMatPtr)
 
Matrix clone () const
 
void * getCvMat ()
 
const void * getConstCvMat () const
 
unsigned char * data ()
 
const unsigned char * dataConst () const
 
unsigned char * dataPseudoConst () const
 
int cols () const
 
int rows () const
 
int size (const int dimension) const
 
int dims () const
 
bool isContinuous () const
 
bool isSubmatrix () const
 
size_t elemSize () const
 
size_t elemSize1 () const
 
int type () const
 
int depth () const
 
int channels () const
 
size_t step1 (const int i=0) const
 
bool empty () const
 
size_t total () const
 
int checkVector (const int elemChannels, const int depth=-1, const bool requireContinuous=true) const
 
void setTo (const double value)
 
void copyTo (Matrix &outputMat) const
 
+ + + + + +

+Static Public Member Functions

static void splitCvMatIntoVectorMatrix (std::vector< Matrix > &matrixesResized, const void *const cvMatPtr)
 
static Matrix eye (const int rows, const int cols, const int type)
 
+

Detailed Description

+

Matrix: Bind of cv::Mat to avoid OpenCV as dependency in the headers.

+ +

Definition at line 95 of file matrix.hpp.

+

Constructor & Destructor Documentation

+ +

◆ Matrix() [1/4]

+ +
+
+ + + + + + + +
op::Matrix::Matrix ()
+
+ +
+
+ +

◆ Matrix() [2/4]

+ +
+
+ + + + + +
+ + + + + + + + +
op::Matrix::Matrix (const void * cvMatPtr)
+
+explicit
+
+
Parameters
+ + +
cvMatPtrshould be a cv::Mat element or it will provoke a core dumped. Done to avoid explicitly exposing 3rdparty libraries on the headers.
+
+
+ +
+
+ +

◆ Matrix() [3/4]

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
op::Matrix::Matrix (const int rows,
const int cols,
const int type 
)
+
+explicit
+
+

Analog to cv::Mat(int rows, int cols, int type, void *data, size_t step=AUTO_STEP)

+ +
+
+ +

◆ Matrix() [4/4]

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::Matrix::Matrix (const int rows,
const int cols,
const int type,
void * cvMatPtr 
)
+
+explicit
+
+

Analog to cv::Mat(int rows, int cols, int type, void *data, size_t step=AUTO_STEP) Very important: This Matrix will only "borrow" this pointer, so the caller must make sure to maintain the memory allocated until this Matrix destructor is called and also to handle the ucharPtr memory deallocation.

Parameters
+ + +
ucharPtrshould be a cv::Mat::data (or analog) element or it will provoke a core dumped. Done to avoid explicitly exposing 3rdparty libraries on the headers.
+
+
+ +
+
+

Member Function Documentation

+ +

◆ channels()

+ +
+
+ + + + + + + +
int op::Matrix::channels () const
+
+ +
+
+ +

◆ checkVector()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
int op::Matrix::checkVector (const int elemChannels,
const int depth = -1,
const bool requireContinuous = true 
) const
+
+ +
+
+ +

◆ clone()

+ +
+
+ + + + + + + +
Matrix op::Matrix::clone () const
+
+ +
+
+ +

◆ cols()

+ +
+
+ + + + + + + +
int op::Matrix::cols () const
+
+

Equivalent to cv::Mat::cols

+ +
+
+ +

◆ copyTo()

+ +
+
+ + + + + + + + +
void op::Matrix::copyTo (MatrixoutputMat) const
+
+ +
+
+ +

◆ data()

+ +
+
+ + + + + + + +
unsigned char* op::Matrix::data ()
+
+

Equivalent to cv::Mat::data

Returns
A raw pointer to the internal data of cv::Mat.
+ +
+
+ +

◆ dataConst()

+ +
+
+ + + + + + + +
const unsigned char* op::Matrix::dataConst () const
+
+

Equivalent to cv::Mat::data

Returns
A raw pointer to the internal data of cv::Mat.
+ +
+
+ +

◆ dataPseudoConst()

+ +
+
+ + + + + + + +
unsigned char* op::Matrix::dataPseudoConst () const
+
+

Similar to dataConst(), but it allows the data to be edited. This function is only implemented for Pybind11 usage.

Returns
A raw pointer to the internal data of cv::Mat.
+ +
+
+ +

◆ depth()

+ +
+
+ + + + + + + +
int op::Matrix::depth () const
+
+ +
+
+ +

◆ dims()

+ +
+
+ + + + + + + +
int op::Matrix::dims () const
+
+

Equivalent to cv::Mat::dims

+ +
+
+ +

◆ elemSize()

+ +
+
+ + + + + + + +
size_t op::Matrix::elemSize () const
+
+ +
+
+ +

◆ elemSize1()

+ +
+
+ + + + + + + +
size_t op::Matrix::elemSize1 () const
+
+ +
+
+ +

◆ empty()

+ +
+
+ + + + + + + +
bool op::Matrix::empty () const
+
+ +
+
+ +

◆ eye()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
static Matrix op::Matrix::eye (const int rows,
const int cols,
const int type 
)
+
+static
+
+

Equivalent to cv::Mat::eye

+ +
+
+ +

◆ getConstCvMat()

+ +
+
+ + + + + + + +
const void* op::Matrix::getConstCvMat () const
+
+
Returns
const cv::Mat*.
+ +
+
+ +

◆ getCvMat()

+ +
+
+ + + + + + + +
void* op::Matrix::getCvMat ()
+
+
Returns
cv::Mat*.
+ +
+
+ +

◆ isContinuous()

+ +
+
+ + + + + + + +
bool op::Matrix::isContinuous () const
+
+

Equivalent to their analog cv::Mat functions

+ +
+
+ +

◆ isSubmatrix()

+ +
+
+ + + + + + + +
bool op::Matrix::isSubmatrix () const
+
+ +
+
+ +

◆ rows()

+ +
+
+ + + + + + + +
int op::Matrix::rows () const
+
+

Equivalent to cv::Mat::rows

+ +
+
+ +

◆ setTo()

+ +
+
+ + + + + + + + +
void op::Matrix::setTo (const double value)
+
+

Similar to their analog cv::Mat functions

+ +
+
+ +

◆ size()

+ +
+
+ + + + + + + + +
int op::Matrix::size (const int dimension) const
+
+

Equivalent to cv::Mat::size[dimension]

+ +
+
+ +

◆ splitCvMatIntoVectorMatrix()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
static void op::Matrix::splitCvMatIntoVectorMatrix (std::vector< Matrix > & matrixesResized,
const void *const cvMatPtr 
)
+
+static
+
+
Parameters
+ + + +
matrixesResizedFor 3-D OpenPose, if >1, it will assume the image is composed of numberImagesStackedHorizontally horizontally stacked images. It must be already resized to avoid internally allocating/removing elements of std::vector (to avoid errors if using different std DLLs)
cvMatPtrshould be a cv::Mat element or it will provoke a core dumped. Done to avoid explicitly exposing 3rdparty libraries on the headers.
+
+
+ +
+
+ +

◆ step1()

+ +
+
+ + + + + + + + +
size_t op::Matrix::step1 (const int i = 0) const
+
+ +
+
+ +

◆ total()

+ +
+
+ + + + + + + +
size_t op::Matrix::total () const
+
+ +
+
+ +

◆ type()

+ +
+
+ + + + + + + +
int op::Matrix::type () const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_matrix.js b/web/html/doc/classop_1_1_matrix.js new file mode 100644 index 000000000..f270b6156 --- /dev/null +++ b/web/html/doc/classop_1_1_matrix.js @@ -0,0 +1,30 @@ +var classop_1_1_matrix = +[ + [ "Matrix", "classop_1_1_matrix.html#adbdc98003dd0f666c845ac2acf592bd8", null ], + [ "Matrix", "classop_1_1_matrix.html#af9dc44c30ec3ae5f8e7ba8f76516985a", null ], + [ "Matrix", "classop_1_1_matrix.html#a770bbf0242b96b2e746d7f1e30dbf8fc", null ], + [ "Matrix", "classop_1_1_matrix.html#a53786b5c97e1cded5accbcb3cd6b992d", null ], + [ "channels", "classop_1_1_matrix.html#a4555d0f39c54ad5f7adcb39fe06503cc", null ], + [ "checkVector", "classop_1_1_matrix.html#a77cd25c8e23a4eec148039ab4832cde1", null ], + [ "clone", "classop_1_1_matrix.html#abc101fe6c039f6ef2311c5e9cef4c293", null ], + [ "cols", "classop_1_1_matrix.html#ac2171dc14ef5480496c05c115b6dd579", null ], + [ "copyTo", "classop_1_1_matrix.html#a6714cef92d6dce3089841ea124cd2b7d", null ], + [ "data", "classop_1_1_matrix.html#a69d3316b25c1fce55f067e92b31e4d57", null ], + [ "dataConst", "classop_1_1_matrix.html#a9af637b50e808c1d84e179cc6acb45b4", null ], + [ "dataPseudoConst", "classop_1_1_matrix.html#ab65ba706b58675da9a4512d448d44370", null ], + [ "depth", "classop_1_1_matrix.html#ae33558724a713e9a36f8dc0062d267a8", null ], + [ "dims", "classop_1_1_matrix.html#aabfd2f25b2459aac510e1e31b207fcf3", null ], + [ "elemSize", "classop_1_1_matrix.html#a401c028c88a65b69c0c029cfc990f631", null ], + [ "elemSize1", "classop_1_1_matrix.html#ae459fb36ef45c1215a7db39af8a8e6cf", null ], + [ "empty", "classop_1_1_matrix.html#a7f9df7fbdc9ef76e158f72d306f88ec2", null ], + [ "getConstCvMat", "classop_1_1_matrix.html#a1beb13525ec86c9827a7116eb2d175b7", null ], + [ "getCvMat", "classop_1_1_matrix.html#a9326d59a12659563d123ea6587fd4415", null ], + [ "isContinuous", "classop_1_1_matrix.html#ae82b851dd176317d72df95461a4bad76", null ], + [ "isSubmatrix", "classop_1_1_matrix.html#aa0ab094e21bab6757f502866bce7e79c", null ], + [ "rows", "classop_1_1_matrix.html#a3099b24c0ee295014d95c99a20615fdd", null ], + [ "setTo", "classop_1_1_matrix.html#ad74c013aa1aaed93271275cce6c77972", null ], + [ "size", "classop_1_1_matrix.html#a93188dad84f0f0a20f7a631edd32a620", null ], + [ "step1", "classop_1_1_matrix.html#a41ec72e2d80f73025c4c0837364c8193", null ], + [ "total", "classop_1_1_matrix.html#a09859c3f88b8c75c7d12f53667304450", null ], + [ "type", "classop_1_1_matrix.html#aac1572705e72a18198a8b2d32d1b5c24", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_maximum_caffe-members.html b/web/html/doc/classop_1_1_maximum_caffe-members.html new file mode 100644 index 000000000..084adf0d9 --- /dev/null +++ b/web/html/doc/classop_1_1_maximum_caffe-members.html @@ -0,0 +1,113 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::MaximumCaffe< T > Member List
+
+
+ +

This is the complete list of members for op::MaximumCaffe< T >, including all inherited members.

+ + + + + + + + + + + +
Backward_cpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)op::MaximumCaffe< T >virtual
Backward_gpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)op::MaximumCaffe< T >virtual
Forward(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::MaximumCaffe< T >virtual
Forward_cpu(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::MaximumCaffe< T >virtual
Forward_gpu(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::MaximumCaffe< T >virtual
LayerSetUp(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::MaximumCaffe< T >virtual
MaximumCaffe()op::MaximumCaffe< T >explicit
Reshape(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::MaximumCaffe< T >virtual
type() constop::MaximumCaffe< T >inlinevirtual
~MaximumCaffe()op::MaximumCaffe< T >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_maximum_caffe.html b/web/html/doc/classop_1_1_maximum_caffe.html new file mode 100644 index 000000000..ee582e4ed --- /dev/null +++ b/web/html/doc/classop_1_1_maximum_caffe.html @@ -0,0 +1,502 @@ + + + + + + + +OpenPose: op::MaximumCaffe< T > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::MaximumCaffe< T > Class Template Reference
+
+
+ +

#include <maximumCaffe.hpp>

+ + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 MaximumCaffe ()
 
virtual ~MaximumCaffe ()
 
virtual void LayerSetUp (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Reshape (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual const char * type () const
 
virtual void Forward (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Forward_cpu (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Forward_gpu (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Backward_cpu (const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
 
virtual void Backward_gpu (const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
 
+

Detailed Description

+

template<typename T>
+class op::MaximumCaffe< T >

+ + +

Definition at line 12 of file maximumCaffe.hpp.

+

Constructor & Destructor Documentation

+ +

◆ MaximumCaffe()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
op::MaximumCaffe< T >::MaximumCaffe ()
+
+explicit
+
+ +
+
+ +

◆ ~MaximumCaffe()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
virtual op::MaximumCaffe< T >::~MaximumCaffe ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ Backward_cpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::MaximumCaffe< T >::Backward_cpu (const std::vector< ArrayCpuGpu< T > * > & top,
const std::vector< bool > & propagate_down,
const std::vector< ArrayCpuGpu< T > * > & bottom 
)
+
+virtual
+
+ +
+
+ +

◆ Backward_gpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::MaximumCaffe< T >::Backward_gpu (const std::vector< ArrayCpuGpu< T > * > & top,
const std::vector< bool > & propagate_down,
const std::vector< ArrayCpuGpu< T > * > & bottom 
)
+
+virtual
+
+ +
+
+ +

◆ Forward()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::MaximumCaffe< T >::Forward (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ Forward_cpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::MaximumCaffe< T >::Forward_cpu (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ Forward_gpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::MaximumCaffe< T >::Forward_gpu (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ LayerSetUp()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::MaximumCaffe< T >::LayerSetUp (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ Reshape()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::MaximumCaffe< T >::Reshape (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ type()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
virtual const char* op::MaximumCaffe< T >::type () const
+
+inlinevirtual
+
+ +

Definition at line 23 of file maximumCaffe.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_maximum_caffe.js b/web/html/doc/classop_1_1_maximum_caffe.js new file mode 100644 index 000000000..befc6841d --- /dev/null +++ b/web/html/doc/classop_1_1_maximum_caffe.js @@ -0,0 +1,13 @@ +var classop_1_1_maximum_caffe = +[ + [ "MaximumCaffe", "classop_1_1_maximum_caffe.html#a66bd0aa863a97647ae6350d1f886ea51", null ], + [ "~MaximumCaffe", "classop_1_1_maximum_caffe.html#a0b438980e5c2fce978e9de80f75afcd3", null ], + [ "Backward_cpu", "classop_1_1_maximum_caffe.html#ad21700230d1f1ac1139e8ce76574232c", null ], + [ "Backward_gpu", "classop_1_1_maximum_caffe.html#a91989f6e0a2c1349c33815a8cd659e52", null ], + [ "Forward", "classop_1_1_maximum_caffe.html#a51604d40efcfa63c5a46dc257c72cf9c", null ], + [ "Forward_cpu", "classop_1_1_maximum_caffe.html#ae88c10cadaef2e4e7347ef7f8c101b67", null ], + [ "Forward_gpu", "classop_1_1_maximum_caffe.html#a6e44cdf4dc3fce4d1dcc75ce29bc051e", null ], + [ "LayerSetUp", "classop_1_1_maximum_caffe.html#a47047083e35d2af5a969acbf7cb55674", null ], + [ "Reshape", "classop_1_1_maximum_caffe.html#ab9fb5ce2358801ac4e85fa25f052baa4", null ], + [ "type", "classop_1_1_maximum_caffe.html#a8d047aa2e08e49199f9f422191e9bdd2", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_net-members.html b/web/html/doc/classop_1_1_net-members.html new file mode 100644 index 000000000..da769daa3 --- /dev/null +++ b/web/html/doc/classop_1_1_net-members.html @@ -0,0 +1,107 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::Net Member List
+
+
+ +

This is the complete list of members for op::Net, including all inherited members.

+ + + + + +
forwardPass(const Array< float > &inputData) const =0op::Netpure virtual
getOutputBlobArray() const =0op::Netpure virtual
initializationOnThread()=0op::Netpure virtual
~Net()op::Netinlinevirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_net.html b/web/html/doc/classop_1_1_net.html new file mode 100644 index 000000000..dd415c7e1 --- /dev/null +++ b/web/html/doc/classop_1_1_net.html @@ -0,0 +1,245 @@ + + + + + + + +OpenPose: op::Net Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::Net Class Referenceabstract
+
+
+ +

#include <net.hpp>

+
+Inheritance diagram for op::Net:
+
+
+ + +op::NetCaffe +op::NetOpenCv + +
+ + + + + + + + + + +

+Public Member Functions

virtual ~Net ()
 
virtual void initializationOnThread ()=0
 
virtual void forwardPass (const Array< float > &inputData) const =0
 
virtual std::shared_ptr< ArrayCpuGpu< float > > getOutputBlobArray () const =0
 
+

Detailed Description

+
+

Definition at line 8 of file net.hpp.

+

Constructor & Destructor Documentation

+ +

◆ ~Net()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::Net::~Net ()
+
+inlinevirtual
+
+ +

Definition at line 11 of file net.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ forwardPass()

+ +
+
+ + + + + +
+ + + + + + + + +
virtual void op::Net::forwardPass (const Array< float > & inputData) const
+
+pure virtual
+
+ +

Implemented in op::NetOpenCv, and op::NetCaffe.

+ +
+
+ +

◆ getOutputBlobArray()

+ +
+
+ + + + + +
+ + + + + + + +
virtual std::shared_ptr<ArrayCpuGpu<float> > op::Net::getOutputBlobArray () const
+
+pure virtual
+
+ +

Implemented in op::NetOpenCv, and op::NetCaffe.

+ +
+
+ +

◆ initializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::Net::initializationOnThread ()
+
+pure virtual
+
+ +

Implemented in op::NetOpenCv, and op::NetCaffe.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_net.js b/web/html/doc/classop_1_1_net.js new file mode 100644 index 000000000..ef764b7fc --- /dev/null +++ b/web/html/doc/classop_1_1_net.js @@ -0,0 +1,7 @@ +var classop_1_1_net = +[ + [ "~Net", "classop_1_1_net.html#ae20a74df1a401eb17d5b75b406574919", null ], + [ "forwardPass", "classop_1_1_net.html#a65193e857c721f2f606ea6b010953dbc", null ], + [ "getOutputBlobArray", "classop_1_1_net.html#a222cfe3d19800824b742b218b466586b", null ], + [ "initializationOnThread", "classop_1_1_net.html#a6e9e801f2c9950a798d0d2fa94a6c8f2", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_net.png b/web/html/doc/classop_1_1_net.png new file mode 100644 index 000000000..3220def24 Binary files /dev/null and b/web/html/doc/classop_1_1_net.png differ diff --git a/web/html/doc/classop_1_1_net_caffe-members.html b/web/html/doc/classop_1_1_net_caffe-members.html new file mode 100644 index 000000000..5d4d94daf --- /dev/null +++ b/web/html/doc/classop_1_1_net_caffe-members.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::NetCaffe Member List
+
+
+ +

This is the complete list of members for op::NetCaffe, including all inherited members.

+ + + + + + + +
forwardPass(const Array< float > &inputNetData) constop::NetCaffevirtual
getOutputBlobArray() constop::NetCaffevirtual
initializationOnThread()op::NetCaffevirtual
NetCaffe(const std::string &caffeProto, const std::string &caffeTrainedModel, const int gpuId=0, const bool enableGoogleLogging=true, const std::string &lastBlobName="net_output")op::NetCaffe
~Net()op::Netinlinevirtual
~NetCaffe()op::NetCaffevirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_net_caffe.html b/web/html/doc/classop_1_1_net_caffe.html new file mode 100644 index 000000000..c4babdcd2 --- /dev/null +++ b/web/html/doc/classop_1_1_net_caffe.html @@ -0,0 +1,293 @@ + + + + + + + +OpenPose: op::NetCaffe Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::NetCaffe Class Reference
+
+
+ +

#include <netCaffe.hpp>

+
+Inheritance diagram for op::NetCaffe:
+
+
+ + +op::Net + +
+ + + + + + + + + + + + + + + +

+Public Member Functions

 NetCaffe (const std::string &caffeProto, const std::string &caffeTrainedModel, const int gpuId=0, const bool enableGoogleLogging=true, const std::string &lastBlobName="net_output")
 
virtual ~NetCaffe ()
 
void initializationOnThread ()
 
void forwardPass (const Array< float > &inputNetData) const
 
std::shared_ptr< ArrayCpuGpu< float > > getOutputBlobArray () const
 
- Public Member Functions inherited from op::Net
virtual ~Net ()
 
+

Detailed Description

+
+

Definition at line 9 of file netCaffe.hpp.

+

Constructor & Destructor Documentation

+ +

◆ NetCaffe()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::NetCaffe::NetCaffe (const std::string & caffeProto,
const std::string & caffeTrainedModel,
const int gpuId = 0,
const bool enableGoogleLogging = true,
const std::string & lastBlobName = "net_output" 
)
+
+ +
+
+ +

◆ ~NetCaffe()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::NetCaffe::~NetCaffe ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ forwardPass()

+ +
+
+ + + + + +
+ + + + + + + + +
void op::NetCaffe::forwardPass (const Array< float > & inputNetData) const
+
+virtual
+
+ +

Implements op::Net.

+ +
+
+ +

◆ getOutputBlobArray()

+ +
+
+ + + + + +
+ + + + + + + +
std::shared_ptr<ArrayCpuGpu<float> > op::NetCaffe::getOutputBlobArray () const
+
+virtual
+
+ +

Implements op::Net.

+ +
+
+ +

◆ initializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
void op::NetCaffe::initializationOnThread ()
+
+virtual
+
+ +

Implements op::Net.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_net_caffe.js b/web/html/doc/classop_1_1_net_caffe.js new file mode 100644 index 000000000..8ad1d28ea --- /dev/null +++ b/web/html/doc/classop_1_1_net_caffe.js @@ -0,0 +1,8 @@ +var classop_1_1_net_caffe = +[ + [ "NetCaffe", "classop_1_1_net_caffe.html#af6d9ee03568d2783e0e4ed0b78a21c3d", null ], + [ "~NetCaffe", "classop_1_1_net_caffe.html#a84007645c88de286e9d306461a044e8d", null ], + [ "forwardPass", "classop_1_1_net_caffe.html#a439b30ec5d10c68cb620130ff5e1812a", null ], + [ "getOutputBlobArray", "classop_1_1_net_caffe.html#a37648c14f06ee46ca395c9d38635fade", null ], + [ "initializationOnThread", "classop_1_1_net_caffe.html#a08b71387287339e68327dd6d4cb1e8b3", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_net_caffe.png b/web/html/doc/classop_1_1_net_caffe.png new file mode 100644 index 000000000..62f949230 Binary files /dev/null and b/web/html/doc/classop_1_1_net_caffe.png differ diff --git a/web/html/doc/classop_1_1_net_open_cv-members.html b/web/html/doc/classop_1_1_net_open_cv-members.html new file mode 100644 index 000000000..8f39153d7 --- /dev/null +++ b/web/html/doc/classop_1_1_net_open_cv-members.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::NetOpenCv Member List
+
+
+ +

This is the complete list of members for op::NetOpenCv, including all inherited members.

+ + + + + + + +
forwardPass(const Array< float > &inputNetData) constop::NetOpenCvvirtual
getOutputBlobArray() constop::NetOpenCvvirtual
initializationOnThread()op::NetOpenCvvirtual
NetOpenCv(const std::string &caffeProto, const std::string &caffeTrainedModel, const int gpuId=0)op::NetOpenCv
~Net()op::Netinlinevirtual
~NetOpenCv()op::NetOpenCvvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_net_open_cv.html b/web/html/doc/classop_1_1_net_open_cv.html new file mode 100644 index 000000000..ee7967289 --- /dev/null +++ b/web/html/doc/classop_1_1_net_open_cv.html @@ -0,0 +1,281 @@ + + + + + + + +OpenPose: op::NetOpenCv Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::NetOpenCv Class Reference
+
+
+ +

#include <netOpenCv.hpp>

+
+Inheritance diagram for op::NetOpenCv:
+
+
+ + +op::Net + +
+ + + + + + + + + + + + + + + +

+Public Member Functions

 NetOpenCv (const std::string &caffeProto, const std::string &caffeTrainedModel, const int gpuId=0)
 
virtual ~NetOpenCv ()
 
void initializationOnThread ()
 
void forwardPass (const Array< float > &inputNetData) const
 
std::shared_ptr< ArrayCpuGpu< float > > getOutputBlobArray () const
 
- Public Member Functions inherited from op::Net
virtual ~Net ()
 
+

Detailed Description

+
+

Definition at line 9 of file netOpenCv.hpp.

+

Constructor & Destructor Documentation

+ +

◆ NetOpenCv()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
op::NetOpenCv::NetOpenCv (const std::string & caffeProto,
const std::string & caffeTrainedModel,
const int gpuId = 0 
)
+
+ +
+
+ +

◆ ~NetOpenCv()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::NetOpenCv::~NetOpenCv ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ forwardPass()

+ +
+
+ + + + + +
+ + + + + + + + +
void op::NetOpenCv::forwardPass (const Array< float > & inputNetData) const
+
+virtual
+
+ +

Implements op::Net.

+ +
+
+ +

◆ getOutputBlobArray()

+ +
+
+ + + + + +
+ + + + + + + +
std::shared_ptr<ArrayCpuGpu<float> > op::NetOpenCv::getOutputBlobArray () const
+
+virtual
+
+ +

Implements op::Net.

+ +
+
+ +

◆ initializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
void op::NetOpenCv::initializationOnThread ()
+
+virtual
+
+ +

Implements op::Net.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_net_open_cv.js b/web/html/doc/classop_1_1_net_open_cv.js new file mode 100644 index 000000000..89bfa6587 --- /dev/null +++ b/web/html/doc/classop_1_1_net_open_cv.js @@ -0,0 +1,8 @@ +var classop_1_1_net_open_cv = +[ + [ "NetOpenCv", "classop_1_1_net_open_cv.html#af46f57f8a4093c927dd39109ad0411e9", null ], + [ "~NetOpenCv", "classop_1_1_net_open_cv.html#a30ec3c3ee2ffe0a95656f6b11151243f", null ], + [ "forwardPass", "classop_1_1_net_open_cv.html#aa62d557f44d2d44f08b8b1dd3efd54fb", null ], + [ "getOutputBlobArray", "classop_1_1_net_open_cv.html#a9f4981ac196b094183c52caa6ce283db", null ], + [ "initializationOnThread", "classop_1_1_net_open_cv.html#a932f2f53f61e05bc0fd164a707f692b9", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_net_open_cv.png b/web/html/doc/classop_1_1_net_open_cv.png new file mode 100644 index 000000000..075e88a99 Binary files /dev/null and b/web/html/doc/classop_1_1_net_open_cv.png differ diff --git a/web/html/doc/classop_1_1_nms_caffe-members.html b/web/html/doc/classop_1_1_nms_caffe-members.html new file mode 100644 index 000000000..8c7cd03fd --- /dev/null +++ b/web/html/doc/classop_1_1_nms_caffe-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::NmsCaffe< T > Member List
+
+
+ +

This is the complete list of members for op::NmsCaffe< T >, including all inherited members.

+ + + + + + + + + + + + + + +
Backward_cpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)op::NmsCaffe< T >virtual
Backward_gpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)op::NmsCaffe< T >virtual
Forward(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::NmsCaffe< T >virtual
Forward_cpu(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::NmsCaffe< T >virtual
Forward_gpu(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::NmsCaffe< T >virtual
Forward_ocl(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::NmsCaffe< T >virtual
LayerSetUp(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::NmsCaffe< T >virtual
NmsCaffe()op::NmsCaffe< T >explicit
Reshape(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top, const int maxPeaks, const int outputChannels=-1, const int gpuID=0)op::NmsCaffe< T >virtual
setOffset(const Point< T > &offset)op::NmsCaffe< T >
setThreshold(const T threshold)op::NmsCaffe< T >
type() constop::NmsCaffe< T >inlinevirtual
~NmsCaffe()op::NmsCaffe< T >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_nms_caffe.html b/web/html/doc/classop_1_1_nms_caffe.html new file mode 100644 index 000000000..832b6bfc6 --- /dev/null +++ b/web/html/doc/classop_1_1_nms_caffe.html @@ -0,0 +1,604 @@ + + + + + + + +OpenPose: op::NmsCaffe< T > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::NmsCaffe< T > Class Template Reference
+
+
+ +

#include <nmsCaffe.hpp>

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 NmsCaffe ()
 
virtual ~NmsCaffe ()
 
virtual void LayerSetUp (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Reshape (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top, const int maxPeaks, const int outputChannels=-1, const int gpuID=0)
 
virtual const char * type () const
 
void setThreshold (const T threshold)
 
void setOffset (const Point< T > &offset)
 
virtual void Forward (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Forward_cpu (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Forward_gpu (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Forward_ocl (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Backward_cpu (const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
 
virtual void Backward_gpu (const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
 
+

Detailed Description

+

template<typename T>
+class op::NmsCaffe< T >

+ + +

Definition at line 12 of file nmsCaffe.hpp.

+

Constructor & Destructor Documentation

+ +

◆ NmsCaffe()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
op::NmsCaffe< T >::NmsCaffe ()
+
+explicit
+
+ +
+
+ +

◆ ~NmsCaffe()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
virtual op::NmsCaffe< T >::~NmsCaffe ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ Backward_cpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::NmsCaffe< T >::Backward_cpu (const std::vector< ArrayCpuGpu< T > * > & top,
const std::vector< bool > & propagate_down,
const std::vector< ArrayCpuGpu< T > * > & bottom 
)
+
+virtual
+
+ +
+
+ +

◆ Backward_gpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::NmsCaffe< T >::Backward_gpu (const std::vector< ArrayCpuGpu< T > * > & top,
const std::vector< bool > & propagate_down,
const std::vector< ArrayCpuGpu< T > * > & bottom 
)
+
+virtual
+
+ +
+
+ +

◆ Forward()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::NmsCaffe< T >::Forward (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ Forward_cpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::NmsCaffe< T >::Forward_cpu (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ Forward_gpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::NmsCaffe< T >::Forward_gpu (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ Forward_ocl()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::NmsCaffe< T >::Forward_ocl (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ LayerSetUp()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::NmsCaffe< T >::LayerSetUp (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ Reshape()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::NmsCaffe< T >::Reshape (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top,
const int maxPeaks,
const int outputChannels = -1,
const int gpuID = 0 
)
+
+virtual
+
+ +
+
+ +

◆ setOffset()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::NmsCaffe< T >::setOffset (const Point< T > & offset)
+
+ +
+
+ +

◆ setThreshold()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::NmsCaffe< T >::setThreshold (const T threshold)
+
+ +
+
+ +

◆ type()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
virtual const char* op::NmsCaffe< T >::type () const
+
+inlinevirtual
+
+ +

Definition at line 24 of file nmsCaffe.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_nms_caffe.js b/web/html/doc/classop_1_1_nms_caffe.js new file mode 100644 index 000000000..45e0d1231 --- /dev/null +++ b/web/html/doc/classop_1_1_nms_caffe.js @@ -0,0 +1,16 @@ +var classop_1_1_nms_caffe = +[ + [ "NmsCaffe", "classop_1_1_nms_caffe.html#afb808d9a264ce50664f8641e477d9e2d", null ], + [ "~NmsCaffe", "classop_1_1_nms_caffe.html#a0702488e5d899a6610535f6741601978", null ], + [ "Backward_cpu", "classop_1_1_nms_caffe.html#abbaee841e5cb64f97c94da67ef4349c9", null ], + [ "Backward_gpu", "classop_1_1_nms_caffe.html#a3d1d4cee2b93d5bc0d88c25019b17715", null ], + [ "Forward", "classop_1_1_nms_caffe.html#a263d87a3282cbc03182e4d8759ca9f3a", null ], + [ "Forward_cpu", "classop_1_1_nms_caffe.html#a8289f4e680cd16405555002a61de735b", null ], + [ "Forward_gpu", "classop_1_1_nms_caffe.html#a8520f4df4fb2d26a1289b1bcaa814e93", null ], + [ "Forward_ocl", "classop_1_1_nms_caffe.html#ad1719736dc5e459a1d8b28837e94f989", null ], + [ "LayerSetUp", "classop_1_1_nms_caffe.html#a8c7e69c32f1fff92893284ed70278f48", null ], + [ "Reshape", "classop_1_1_nms_caffe.html#abe113059484596e82efd8b5f6d346ab5", null ], + [ "setOffset", "classop_1_1_nms_caffe.html#a1dd658e4bc9e080867a99e9b57f1baa8", null ], + [ "setThreshold", "classop_1_1_nms_caffe.html#a5f257eb561fc705c2b74489b12269b49", null ], + [ "type", "classop_1_1_nms_caffe.html#adc88733fceaefc359a95f067c62c3b07", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_op_output_to_cv_mat-members.html b/web/html/doc/classop_1_1_op_output_to_cv_mat-members.html new file mode 100644 index 000000000..421ab25ba --- /dev/null +++ b/web/html/doc/classop_1_1_op_output_to_cv_mat-members.html @@ -0,0 +1,107 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::OpOutputToCvMat Member List
+
+
+ +

This is the complete list of members for op::OpOutputToCvMat, including all inherited members.

+ + + + + +
formatToCvMat(const Array< float > &outputData)op::OpOutputToCvMat
OpOutputToCvMat(const bool gpuResize=false)op::OpOutputToCvMat
setSharedParameters(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)op::OpOutputToCvMat
~OpOutputToCvMat()op::OpOutputToCvMatvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_op_output_to_cv_mat.html b/web/html/doc/classop_1_1_op_output_to_cv_mat.html new file mode 100644 index 000000000..0e11c8dbd --- /dev/null +++ b/web/html/doc/classop_1_1_op_output_to_cv_mat.html @@ -0,0 +1,205 @@ + + + + + + + +OpenPose: op::OpOutputToCvMat Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::OpOutputToCvMat Class Reference
+
+
+ +

#include <opOutputToCvMat.hpp>

+ + + + + + + + + + +

+Public Member Functions

 OpOutputToCvMat (const bool gpuResize=false)
 
virtual ~OpOutputToCvMat ()
 
void setSharedParameters (const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)
 
Matrix formatToCvMat (const Array< float > &outputData)
 
+

Detailed Description

+
+

Definition at line 8 of file opOutputToCvMat.hpp.

+

Constructor & Destructor Documentation

+ +

◆ OpOutputToCvMat()

+ +
+
+ + + + + + + + +
op::OpOutputToCvMat::OpOutputToCvMat (const bool gpuResize = false)
+
+ +
+
+ +

◆ ~OpOutputToCvMat()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::OpOutputToCvMat::~OpOutputToCvMat ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ formatToCvMat()

+ +
+
+ + + + + + + + +
Matrix op::OpOutputToCvMat::formatToCvMat (const Array< float > & outputData)
+
+ +
+
+ +

◆ setSharedParameters()

+ +
+
+ + + + + + + + +
void op::OpOutputToCvMat::setSharedParameters (const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> & tuple)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_op_output_to_cv_mat.js b/web/html/doc/classop_1_1_op_output_to_cv_mat.js new file mode 100644 index 000000000..5709b9385 --- /dev/null +++ b/web/html/doc/classop_1_1_op_output_to_cv_mat.js @@ -0,0 +1,7 @@ +var classop_1_1_op_output_to_cv_mat = +[ + [ "OpOutputToCvMat", "classop_1_1_op_output_to_cv_mat.html#a60affeb41b26b1357cf8c797c7e16ecb", null ], + [ "~OpOutputToCvMat", "classop_1_1_op_output_to_cv_mat.html#afe99e538dfcca6396b0672db1ec2f17f", null ], + [ "formatToCvMat", "classop_1_1_op_output_to_cv_mat.html#aaee9dc07945e0857de33308b12c9bd09", null ], + [ "setSharedParameters", "classop_1_1_op_output_to_cv_mat.html#af150c89ff5edbe4f4bd727b7162e9b36", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_people_json_saver-members.html b/web/html/doc/classop_1_1_people_json_saver-members.html new file mode 100644 index 000000000..2b06cd4e2 --- /dev/null +++ b/web/html/doc/classop_1_1_people_json_saver-members.html @@ -0,0 +1,110 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PeopleJsonSaver Member List
+
+
+ +

This is the complete list of members for op::PeopleJsonSaver, including all inherited members.

+ + + + + + + + +
FileSaver(const std::string &directoryPath)op::FileSaverexplicitprotected
getNextFileName(const unsigned long long index) constop::FileSaverprotected
getNextFileName(const std::string &fileNameNoExtension) constop::FileSaverprotected
PeopleJsonSaver(const std::string &directoryPath)op::PeopleJsonSaver
save(const std::vector< std::pair< Array< float >, std::string >> &keypointVector, const std::vector< std::vector< std::array< float, 3 >>> &candidates, const std::string &fileName, const bool humanReadable=true) constop::PeopleJsonSaver
~FileSaver()op::FileSaverprotectedvirtual
~PeopleJsonSaver()op::PeopleJsonSavervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_people_json_saver.html b/web/html/doc/classop_1_1_people_json_saver.html new file mode 100644 index 000000000..b87085ebf --- /dev/null +++ b/web/html/doc/classop_1_1_people_json_saver.html @@ -0,0 +1,228 @@ + + + + + + + +OpenPose: op::PeopleJsonSaver Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PeopleJsonSaver Class Reference
+
+
+ +

#include <peopleJsonSaver.hpp>

+
+Inheritance diagram for op::PeopleJsonSaver:
+
+
+ + +op::FileSaver + +
+ + + + + + + + +

+Public Member Functions

 PeopleJsonSaver (const std::string &directoryPath)
 
virtual ~PeopleJsonSaver ()
 
void save (const std::vector< std::pair< Array< float >, std::string >> &keypointVector, const std::vector< std::vector< std::array< float, 3 >>> &candidates, const std::string &fileName, const bool humanReadable=true) const
 
+ + + + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::FileSaver
 FileSaver (const std::string &directoryPath)
 
virtual ~FileSaver ()
 
std::string getNextFileName (const unsigned long long index) const
 
std::string getNextFileName (const std::string &fileNameNoExtension) const
 
+

Detailed Description

+
+

Definition at line 9 of file peopleJsonSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ PeopleJsonSaver()

+ +
+
+ + + + + + + + +
op::PeopleJsonSaver::PeopleJsonSaver (const std::string & directoryPath)
+
+ +
+
+ +

◆ ~PeopleJsonSaver()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::PeopleJsonSaver::~PeopleJsonSaver ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ save()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::PeopleJsonSaver::save (const std::vector< std::pair< Array< float >, std::string >> & keypointVector,
const std::vector< std::vector< std::array< float, 3 >>> & candidates,
const std::string & fileName,
const bool humanReadable = true 
) const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_people_json_saver.js b/web/html/doc/classop_1_1_people_json_saver.js new file mode 100644 index 000000000..6a4fb8d46 --- /dev/null +++ b/web/html/doc/classop_1_1_people_json_saver.js @@ -0,0 +1,6 @@ +var classop_1_1_people_json_saver = +[ + [ "PeopleJsonSaver", "classop_1_1_people_json_saver.html#aa6e2f479d959752c5c0f71cd8b4427c2", null ], + [ "~PeopleJsonSaver", "classop_1_1_people_json_saver.html#a4a84666529a0418ccf9256c9942ea3f8", null ], + [ "save", "classop_1_1_people_json_saver.html#ac0c0609652f89a3de44bcc940a82e235", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_people_json_saver.png b/web/html/doc/classop_1_1_people_json_saver.png new file mode 100644 index 000000000..590485233 Binary files /dev/null and b/web/html/doc/classop_1_1_people_json_saver.png differ diff --git a/web/html/doc/classop_1_1_person_id_extractor-members.html b/web/html/doc/classop_1_1_person_id_extractor-members.html new file mode 100644 index 000000000..4da425371 --- /dev/null +++ b/web/html/doc/classop_1_1_person_id_extractor-members.html @@ -0,0 +1,107 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PersonIdExtractor Member List
+
+
+ +

This is the complete list of members for op::PersonIdExtractor, including all inherited members.

+ + + + + +
extractIds(const Array< float > &poseKeypoints, const Matrix &cvMatInput, const unsigned long long imageViewIndex=0ull)op::PersonIdExtractor
extractIdsLockThread(const Array< float > &poseKeypoints, const Matrix &cvMatInput, const unsigned long long imageViewIndex, const long long frameId)op::PersonIdExtractor
PersonIdExtractor(const float confidenceThreshold=0.1f, const float inlierRatioThreshold=0.5f, const float distanceThreshold=30.f, const int numberFramesToDeletePerson=10)op::PersonIdExtractor
~PersonIdExtractor()op::PersonIdExtractorvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_person_id_extractor.html b/web/html/doc/classop_1_1_person_id_extractor.html new file mode 100644 index 000000000..87fd7a9b1 --- /dev/null +++ b/web/html/doc/classop_1_1_person_id_extractor.html @@ -0,0 +1,265 @@ + + + + + + + +OpenPose: op::PersonIdExtractor Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PersonIdExtractor Class Reference
+
+
+ +

#include <personIdExtractor.hpp>

+ + + + + + + + + + +

+Public Member Functions

 PersonIdExtractor (const float confidenceThreshold=0.1f, const float inlierRatioThreshold=0.5f, const float distanceThreshold=30.f, const int numberFramesToDeletePerson=10)
 
virtual ~PersonIdExtractor ()
 
Array< long long > extractIds (const Array< float > &poseKeypoints, const Matrix &cvMatInput, const unsigned long long imageViewIndex=0ull)
 
Array< long long > extractIdsLockThread (const Array< float > &poseKeypoints, const Matrix &cvMatInput, const unsigned long long imageViewIndex, const long long frameId)
 
+

Detailed Description

+
+

Definition at line 8 of file personIdExtractor.hpp.

+

Constructor & Destructor Documentation

+ +

◆ PersonIdExtractor()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::PersonIdExtractor::PersonIdExtractor (const float confidenceThreshold = 0.1f,
const float inlierRatioThreshold = 0.5f,
const float distanceThreshold = 30.f,
const int numberFramesToDeletePerson = 10 
)
+
+ +
+
+ +

◆ ~PersonIdExtractor()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::PersonIdExtractor::~PersonIdExtractor ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ extractIds()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
Array<long long> op::PersonIdExtractor::extractIds (const Array< float > & poseKeypoints,
const MatrixcvMatInput,
const unsigned long long imageViewIndex = 0ull 
)
+
+ +
+
+ +

◆ extractIdsLockThread()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Array<long long> op::PersonIdExtractor::extractIdsLockThread (const Array< float > & poseKeypoints,
const MatrixcvMatInput,
const unsigned long long imageViewIndex,
const long long frameId 
)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_person_id_extractor.js b/web/html/doc/classop_1_1_person_id_extractor.js new file mode 100644 index 000000000..dfd66df6f --- /dev/null +++ b/web/html/doc/classop_1_1_person_id_extractor.js @@ -0,0 +1,7 @@ +var classop_1_1_person_id_extractor = +[ + [ "PersonIdExtractor", "classop_1_1_person_id_extractor.html#a5916ec673bdbe127386b7f496b188828", null ], + [ "~PersonIdExtractor", "classop_1_1_person_id_extractor.html#a7ff9f8faf42bff0dbd7207105c149a1e", null ], + [ "extractIds", "classop_1_1_person_id_extractor.html#a8d0b309bdf1ce96ed1aa2bd3df6f6dbc", null ], + [ "extractIdsLockThread", "classop_1_1_person_id_extractor.html#a1aebf8006d814a02d7fa55f0609a7ab7", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_person_tracker-members.html b/web/html/doc/classop_1_1_person_tracker-members.html new file mode 100644 index 000000000..b0a430f64 --- /dev/null +++ b/web/html/doc/classop_1_1_person_tracker-members.html @@ -0,0 +1,108 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PersonTracker Member List
+
+
+ +

This is the complete list of members for op::PersonTracker, including all inherited members.

+ + + + + + +
getMergeResults() constop::PersonTracker
PersonTracker(const bool mergeResults, const int levels=3, const int patchSize=31, const float confidenceThreshold=0.05f, const bool trackVelocity=false, const bool scaleVarying=false, const float rescale=640)op::PersonTracker
track(Array< float > &poseKeypoints, Array< long long > &poseIds, const Matrix &cvMatInput)op::PersonTracker
trackLockThread(Array< float > &poseKeypoints, Array< long long > &poseIds, const Matrix &cvMatInput, const long long frameId)op::PersonTracker
~PersonTracker()op::PersonTrackervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_person_tracker.html b/web/html/doc/classop_1_1_person_tracker.html new file mode 100644 index 000000000..45fdcebf6 --- /dev/null +++ b/web/html/doc/classop_1_1_person_tracker.html @@ -0,0 +1,302 @@ + + + + + + + +OpenPose: op::PersonTracker Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PersonTracker Class Reference
+
+
+ +

#include <personTracker.hpp>

+ + + + + + + + + + + + +

+Public Member Functions

 PersonTracker (const bool mergeResults, const int levels=3, const int patchSize=31, const float confidenceThreshold=0.05f, const bool trackVelocity=false, const bool scaleVarying=false, const float rescale=640)
 
virtual ~PersonTracker ()
 
void track (Array< float > &poseKeypoints, Array< long long > &poseIds, const Matrix &cvMatInput)
 
void trackLockThread (Array< float > &poseKeypoints, Array< long long > &poseIds, const Matrix &cvMatInput, const long long frameId)
 
bool getMergeResults () const
 
+

Detailed Description

+
+

Definition at line 8 of file personTracker.hpp.

+

Constructor & Destructor Documentation

+ +

◆ PersonTracker()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::PersonTracker::PersonTracker (const bool mergeResults,
const int levels = 3,
const int patchSize = 31,
const float confidenceThreshold = 0.05f,
const bool trackVelocity = false,
const bool scaleVarying = false,
const float rescale = 640 
)
+
+ +
+
+ +

◆ ~PersonTracker()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::PersonTracker::~PersonTracker ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ getMergeResults()

+ +
+
+ + + + + + + +
bool op::PersonTracker::getMergeResults () const
+
+ +
+
+ +

◆ track()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
void op::PersonTracker::track (Array< float > & poseKeypoints,
Array< long long > & poseIds,
const MatrixcvMatInput 
)
+
+ +
+
+ +

◆ trackLockThread()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::PersonTracker::trackLockThread (Array< float > & poseKeypoints,
Array< long long > & poseIds,
const MatrixcvMatInput,
const long long frameId 
)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_person_tracker.js b/web/html/doc/classop_1_1_person_tracker.js new file mode 100644 index 000000000..656c32682 --- /dev/null +++ b/web/html/doc/classop_1_1_person_tracker.js @@ -0,0 +1,8 @@ +var classop_1_1_person_tracker = +[ + [ "PersonTracker", "classop_1_1_person_tracker.html#aa88f617ff9f1ff509c54b2cbf51e764a", null ], + [ "~PersonTracker", "classop_1_1_person_tracker.html#a840ed2e06c1cc4dfc89e6083b2a8bc37", null ], + [ "getMergeResults", "classop_1_1_person_tracker.html#a68f46367bd719196974aa5b1bd23cb7d", null ], + [ "track", "classop_1_1_person_tracker.html#a05eaf85bd389ad965f9960c9db31d873", null ], + [ "trackLockThread", "classop_1_1_person_tracker.html#a35cd3cd6c946f560220c9459a5dd7ee7", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_pointer_container_greater-members.html b/web/html/doc/classop_1_1_pointer_container_greater-members.html new file mode 100644 index 000000000..3639a0ffb --- /dev/null +++ b/web/html/doc/classop_1_1_pointer_container_greater-members.html @@ -0,0 +1,104 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PointerContainerGreater< TDatumsSP > Member List
+
+
+ +

This is the complete list of members for op::PointerContainerGreater< TDatumsSP >, including all inherited members.

+ + +
operator()(const TDatumsSP &a, const TDatumsSP &b)op::PointerContainerGreater< TDatumsSP >inline
+
+ + + + diff --git a/web/html/doc/classop_1_1_pointer_container_greater.html b/web/html/doc/classop_1_1_pointer_container_greater.html new file mode 100644 index 000000000..1b8ddbeb7 --- /dev/null +++ b/web/html/doc/classop_1_1_pointer_container_greater.html @@ -0,0 +1,162 @@ + + + + + + + +OpenPose: op::PointerContainerGreater< TDatumsSP > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PointerContainerGreater< TDatumsSP > Class Template Reference
+
+
+ +

#include <pointerContainer.hpp>

+ + + + +

+Public Member Functions

bool operator() (const TDatumsSP &a, const TDatumsSP &b)
 
+

Detailed Description

+

template<typename TDatumsSP>
+class op::PointerContainerGreater< TDatumsSP >

+ + +

Definition at line 13 of file pointerContainer.hpp.

+

Member Function Documentation

+ +

◆ operator()()

+ +
+
+
+template<typename TDatumsSP >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
bool op::PointerContainerGreater< TDatumsSP >::operator() (const TDatumsSP & a,
const TDatumsSP & b 
)
+
+inline
+
+ +

Definition at line 16 of file pointerContainer.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_pointer_container_greater.js b/web/html/doc/classop_1_1_pointer_container_greater.js new file mode 100644 index 000000000..c169b66c1 --- /dev/null +++ b/web/html/doc/classop_1_1_pointer_container_greater.js @@ -0,0 +1,4 @@ +var classop_1_1_pointer_container_greater = +[ + [ "operator()", "classop_1_1_pointer_container_greater.html#a7c571ddbcfd6eaaaf33bb6abe4b22aaa", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_pointer_container_less-members.html b/web/html/doc/classop_1_1_pointer_container_less-members.html new file mode 100644 index 000000000..08b55e10c --- /dev/null +++ b/web/html/doc/classop_1_1_pointer_container_less-members.html @@ -0,0 +1,104 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PointerContainerLess< TDatumsSP > Member List
+
+
+ +

This is the complete list of members for op::PointerContainerLess< TDatumsSP >, including all inherited members.

+ + +
operator()(const TDatumsSP &a, const TDatumsSP &b)op::PointerContainerLess< TDatumsSP >inline
+
+ + + + diff --git a/web/html/doc/classop_1_1_pointer_container_less.html b/web/html/doc/classop_1_1_pointer_container_less.html new file mode 100644 index 000000000..5cd864536 --- /dev/null +++ b/web/html/doc/classop_1_1_pointer_container_less.html @@ -0,0 +1,162 @@ + + + + + + + +OpenPose: op::PointerContainerLess< TDatumsSP > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PointerContainerLess< TDatumsSP > Class Template Reference
+
+
+ +

#include <pointerContainer.hpp>

+ + + + +

+Public Member Functions

bool operator() (const TDatumsSP &a, const TDatumsSP &b)
 
+

Detailed Description

+

template<typename TDatumsSP>
+class op::PointerContainerLess< TDatumsSP >

+ + +

Definition at line 28 of file pointerContainer.hpp.

+

Member Function Documentation

+ +

◆ operator()()

+ +
+
+
+template<typename TDatumsSP >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
bool op::PointerContainerLess< TDatumsSP >::operator() (const TDatumsSP & a,
const TDatumsSP & b 
)
+
+inline
+
+ +

Definition at line 31 of file pointerContainer.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_pointer_container_less.js b/web/html/doc/classop_1_1_pointer_container_less.js new file mode 100644 index 000000000..0ad2a2048 --- /dev/null +++ b/web/html/doc/classop_1_1_pointer_container_less.js @@ -0,0 +1,4 @@ +var classop_1_1_pointer_container_less = +[ + [ "operator()", "classop_1_1_pointer_container_less.html#af34bafbf659ff4768dbb33fe7454cb21", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_pose_cpu_renderer-members.html b/web/html/doc/classop_1_1_pose_cpu_renderer-members.html new file mode 100644 index 000000000..865cb7e1e --- /dev/null +++ b/web/html/doc/classop_1_1_pose_cpu_renderer-members.html @@ -0,0 +1,129 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PoseCpuRenderer Member List
+
+
+ +

This is the complete list of members for op::PoseCpuRenderer, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
getAlphaHeatMap() constop::Renderer
getAlphaKeypoint() constop::Renderer
getBlendOriginalFrame() constop::Renderer
getShowGooglyEyes() constop::Renderer
increaseElementToRender(const int increment)op::Renderer
initializationOnThread()op::PoseRendererinlinevirtual
mBlendOriginalFrameop::Rendererprotected
mPartIndexToNameop::PoseRendererprotected
mPoseModelop::PoseRendererprotected
mRenderThresholdop::Rendererprotected
mShowGooglyEyesop::Rendererprotected
PoseCpuRenderer(const PoseModel poseModel, const float renderThreshold, const bool blendOriginalFrame=true, const float alphaKeypoint=POSE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=POSE_DEFAULT_ALPHA_HEAT_MAP, const unsigned int elementToRender=0u)op::PoseCpuRenderer
PoseRenderer(const PoseModel poseModel)op::PoseRenderer
Renderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)op::Rendererexplicit
renderPose(Array< float > &outputData, const Array< float > &poseKeypoints, const float scaleInputToOutput, const float scaleNetToOutput=-1.f)op::PoseCpuRenderervirtual
setAlphaHeatMap(const float alphaHeatMap)op::Renderer
setAlphaKeypoint(const float alphaKeypoint)op::Renderer
setBlendOriginalFrame(const bool blendOriginalFrame)op::Renderer
setElementToRender(const int elementToRender)op::Renderer
setElementToRender(const ElementToRender elementToRender)op::Renderer
setShowGooglyEyes(const bool showGooglyEyes)op::Renderer
spElementToRenderop::Rendererprotected
spNumberElementsToRenderop::Rendererprotected
~PoseCpuRenderer()op::PoseCpuRenderervirtual
~PoseRenderer()op::PoseRenderervirtual
~Renderer()op::Renderervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_cpu_renderer.html b/web/html/doc/classop_1_1_pose_cpu_renderer.html new file mode 100644 index 000000000..23bc6b8f0 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_cpu_renderer.html @@ -0,0 +1,314 @@ + + + + + + + +OpenPose: op::PoseCpuRenderer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PoseCpuRenderer Class Reference
+
+
+ +

#include <poseCpuRenderer.hpp>

+
+Inheritance diagram for op::PoseCpuRenderer:
+
+
+ + +op::Renderer +op::PoseRenderer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 PoseCpuRenderer (const PoseModel poseModel, const float renderThreshold, const bool blendOriginalFrame=true, const float alphaKeypoint=POSE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=POSE_DEFAULT_ALPHA_HEAT_MAP, const unsigned int elementToRender=0u)
 
virtual ~PoseCpuRenderer ()
 
std::pair< int, std::string > renderPose (Array< float > &outputData, const Array< float > &poseKeypoints, const float scaleInputToOutput, const float scaleNetToOutput=-1.f)
 
- Public Member Functions inherited from op::Renderer
 Renderer (const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
 
virtual ~Renderer ()
 
void increaseElementToRender (const int increment)
 
void setElementToRender (const int elementToRender)
 
void setElementToRender (const ElementToRender elementToRender)
 
bool getBlendOriginalFrame () const
 
void setBlendOriginalFrame (const bool blendOriginalFrame)
 
float getAlphaKeypoint () const
 
void setAlphaKeypoint (const float alphaKeypoint)
 
float getAlphaHeatMap () const
 
void setAlphaHeatMap (const float alphaHeatMap)
 
bool getShowGooglyEyes () const
 
void setShowGooglyEyes (const bool showGooglyEyes)
 
- Public Member Functions inherited from op::PoseRenderer
 PoseRenderer (const PoseModel poseModel)
 
virtual ~PoseRenderer ()
 
virtual void initializationOnThread ()
 
+ + + + + + + + + + + + + + + + + +

+Additional Inherited Members

- Protected Attributes inherited from op::Renderer
const float mRenderThreshold
 
std::atomic< bool > mBlendOriginalFrame
 
std::shared_ptr< std::atomic< unsigned int > > spElementToRender
 
std::shared_ptr< const unsigned int > spNumberElementsToRender
 
std::atomic< bool > mShowGooglyEyes
 
- Protected Attributes inherited from op::PoseRenderer
const PoseModel mPoseModel
 
const std::map< unsigned int, std::string > mPartIndexToName
 
+

Detailed Description

+
+

Definition at line 12 of file poseCpuRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ PoseCpuRenderer()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::PoseCpuRenderer::PoseCpuRenderer (const PoseModel poseModel,
const float renderThreshold,
const bool blendOriginalFrame = true,
const float alphaKeypoint = POSE_DEFAULT_ALPHA_KEYPOINT,
const float alphaHeatMap = POSE_DEFAULT_ALPHA_HEAT_MAP,
const unsigned int elementToRender = 0u 
)
+
+ +
+
+ +

◆ ~PoseCpuRenderer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::PoseCpuRenderer::~PoseCpuRenderer ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ renderPose()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
std::pair<int, std::string> op::PoseCpuRenderer::renderPose (Array< float > & outputData,
const Array< float > & poseKeypoints,
const float scaleInputToOutput,
const float scaleNetToOutput = -1.f 
)
+
+virtual
+
+ +

Implements op::PoseRenderer.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_cpu_renderer.js b/web/html/doc/classop_1_1_pose_cpu_renderer.js new file mode 100644 index 000000000..573ba8c2a --- /dev/null +++ b/web/html/doc/classop_1_1_pose_cpu_renderer.js @@ -0,0 +1,6 @@ +var classop_1_1_pose_cpu_renderer = +[ + [ "PoseCpuRenderer", "classop_1_1_pose_cpu_renderer.html#a5863733d560345d4890557b0f8c0d08e", null ], + [ "~PoseCpuRenderer", "classop_1_1_pose_cpu_renderer.html#ad4994dcc005a5e283abc012e8889c481", null ], + [ "renderPose", "classop_1_1_pose_cpu_renderer.html#a98541b982847713472411402314efd96", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_pose_cpu_renderer.png b/web/html/doc/classop_1_1_pose_cpu_renderer.png new file mode 100644 index 000000000..b41d4be01 Binary files /dev/null and b/web/html/doc/classop_1_1_pose_cpu_renderer.png differ diff --git a/web/html/doc/classop_1_1_pose_extractor-members.html b/web/html/doc/classop_1_1_pose_extractor-members.html new file mode 100644 index 000000000..9ca173bf9 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_extractor-members.html @@ -0,0 +1,117 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PoseExtractor Member List
+
+
+ +

This is the complete list of members for op::PoseExtractor, including all inherited members.

+ + + + + + + + + + + + + + + +
extractIds(const Array< float > &poseKeypoints, const Matrix &cvMatInput, const unsigned long long imageIndex=0ull)op::PoseExtractor
extractIdsLockThread(const Array< float > &poseKeypoints, const Matrix &cvMatInput, const unsigned long long imageIndex, const long long frameId)op::PoseExtractor
forwardPass(const std::vector< Array< float >> &inputNetData, const Point< int > &inputDataSize, const std::vector< double > &scaleRatios, const Array< float > &poseNetOutput=Array< float >{}, const long long frameId=-1ll)op::PoseExtractor
getCandidatesCopy() constop::PoseExtractor
getHeatMapsCopy() constop::PoseExtractor
getPoseKeypoints() constop::PoseExtractor
getPoseScores() constop::PoseExtractor
getScaleNetToOutput() constop::PoseExtractor
initializationOnThread()op::PoseExtractor
keepTopPeople(Array< float > &poseKeypoints, const Array< float > &poseScores) constop::PoseExtractor
PoseExtractor(const std::shared_ptr< PoseExtractorNet > &poseExtractorNet, const std::shared_ptr< KeepTopNPeople > &keepTopNPeople=nullptr, const std::shared_ptr< PersonIdExtractor > &personIdExtractor=nullptr, const std::shared_ptr< std::vector< std::shared_ptr< PersonTracker >>> &personTracker={}, const int numberPeopleMax=-1, const int tracking=-1)op::PoseExtractor
track(Array< float > &poseKeypoints, Array< long long > &poseIds, const Matrix &cvMatInput, const unsigned long long imageViewIndex=0ull)op::PoseExtractor
trackLockThread(Array< float > &poseKeypoints, Array< long long > &poseIds, const Matrix &cvMatInput, const unsigned long long imageViewIndex, const long long frameId)op::PoseExtractor
~PoseExtractor()op::PoseExtractorvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_extractor.html b/web/html/doc/classop_1_1_pose_extractor.html new file mode 100644 index 000000000..51cd3d5f1 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_extractor.html @@ -0,0 +1,559 @@ + + + + + + + +OpenPose: op::PoseExtractor Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PoseExtractor Class Reference
+
+
+ +

#include <poseExtractor.hpp>

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 PoseExtractor (const std::shared_ptr< PoseExtractorNet > &poseExtractorNet, const std::shared_ptr< KeepTopNPeople > &keepTopNPeople=nullptr, const std::shared_ptr< PersonIdExtractor > &personIdExtractor=nullptr, const std::shared_ptr< std::vector< std::shared_ptr< PersonTracker >>> &personTracker={}, const int numberPeopleMax=-1, const int tracking=-1)
 
virtual ~PoseExtractor ()
 
void initializationOnThread ()
 
void forwardPass (const std::vector< Array< float >> &inputNetData, const Point< int > &inputDataSize, const std::vector< double > &scaleRatios, const Array< float > &poseNetOutput=Array< float >{}, const long long frameId=-1ll)
 
Array< float > getHeatMapsCopy () const
 
std::vector< std::vector< std::array< float, 3 > > > getCandidatesCopy () const
 
Array< float > getPoseKeypoints () const
 
Array< float > getPoseScores () const
 
float getScaleNetToOutput () const
 
void keepTopPeople (Array< float > &poseKeypoints, const Array< float > &poseScores) const
 
Array< long long > extractIds (const Array< float > &poseKeypoints, const Matrix &cvMatInput, const unsigned long long imageIndex=0ull)
 
Array< long long > extractIdsLockThread (const Array< float > &poseKeypoints, const Matrix &cvMatInput, const unsigned long long imageIndex, const long long frameId)
 
void track (Array< float > &poseKeypoints, Array< long long > &poseIds, const Matrix &cvMatInput, const unsigned long long imageViewIndex=0ull)
 
void trackLockThread (Array< float > &poseKeypoints, Array< long long > &poseIds, const Matrix &cvMatInput, const unsigned long long imageViewIndex, const long long frameId)
 
+

Detailed Description

+
+

Definition at line 14 of file poseExtractor.hpp.

+

Constructor & Destructor Documentation

+ +

◆ PoseExtractor()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::PoseExtractor::PoseExtractor (const std::shared_ptr< PoseExtractorNet > & poseExtractorNet,
const std::shared_ptr< KeepTopNPeople > & keepTopNPeople = nullptr,
const std::shared_ptr< PersonIdExtractor > & personIdExtractor = nullptr,
const std::shared_ptr< std::vector< std::shared_ptr< PersonTracker >>> & personTracker = {},
const int numberPeopleMax = -1,
const int tracking = -1 
)
+
+ +
+
+ +

◆ ~PoseExtractor()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::PoseExtractor::~PoseExtractor ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ extractIds()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
Array<long long> op::PoseExtractor::extractIds (const Array< float > & poseKeypoints,
const MatrixcvMatInput,
const unsigned long long imageIndex = 0ull 
)
+
+ +
+
+ +

◆ extractIdsLockThread()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Array<long long> op::PoseExtractor::extractIdsLockThread (const Array< float > & poseKeypoints,
const MatrixcvMatInput,
const unsigned long long imageIndex,
const long long frameId 
)
+
+ +
+
+ +

◆ forwardPass()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::PoseExtractor::forwardPass (const std::vector< Array< float >> & inputNetData,
const Point< int > & inputDataSize,
const std::vector< double > & scaleRatios,
const Array< float > & poseNetOutput = Array< float >{},
const long long frameId = -1ll 
)
+
+ +
+
+ +

◆ getCandidatesCopy()

+ +
+
+ + + + + + + +
std::vector<std::vector<std::array<float, 3> > > op::PoseExtractor::getCandidatesCopy () const
+
+ +
+
+ +

◆ getHeatMapsCopy()

+ +
+
+ + + + + + + +
Array<float> op::PoseExtractor::getHeatMapsCopy () const
+
+ +
+
+ +

◆ getPoseKeypoints()

+ +
+
+ + + + + + + +
Array<float> op::PoseExtractor::getPoseKeypoints () const
+
+ +
+
+ +

◆ getPoseScores()

+ +
+
+ + + + + + + +
Array<float> op::PoseExtractor::getPoseScores () const
+
+ +
+
+ +

◆ getScaleNetToOutput()

+ +
+
+ + + + + + + +
float op::PoseExtractor::getScaleNetToOutput () const
+
+ +
+
+ +

◆ initializationOnThread()

+ +
+
+ + + + + + + +
void op::PoseExtractor::initializationOnThread ()
+
+ +
+
+ +

◆ keepTopPeople()

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::PoseExtractor::keepTopPeople (Array< float > & poseKeypoints,
const Array< float > & poseScores 
) const
+
+ +
+
+ +

◆ track()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::PoseExtractor::track (Array< float > & poseKeypoints,
Array< long long > & poseIds,
const MatrixcvMatInput,
const unsigned long long imageViewIndex = 0ull 
)
+
+ +
+
+ +

◆ trackLockThread()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::PoseExtractor::trackLockThread (Array< float > & poseKeypoints,
Array< long long > & poseIds,
const MatrixcvMatInput,
const unsigned long long imageViewIndex,
const long long frameId 
)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_extractor.js b/web/html/doc/classop_1_1_pose_extractor.js new file mode 100644 index 000000000..1a21099d7 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_extractor.js @@ -0,0 +1,17 @@ +var classop_1_1_pose_extractor = +[ + [ "PoseExtractor", "classop_1_1_pose_extractor.html#acd50fa337aef1d658b6fed3edc717ada", null ], + [ "~PoseExtractor", "classop_1_1_pose_extractor.html#a9f98eef4ac08cacefe74e002ac086582", null ], + [ "extractIds", "classop_1_1_pose_extractor.html#a15d81f74033c643465864f8ab6e48bba", null ], + [ "extractIdsLockThread", "classop_1_1_pose_extractor.html#aa7b59f4bfe89219e75995bc048efe4de", null ], + [ "forwardPass", "classop_1_1_pose_extractor.html#a6c0abd998181d03d7890ec7abdee5efe", null ], + [ "getCandidatesCopy", "classop_1_1_pose_extractor.html#adc430a6b1b2bf4df75ebf088f97be8af", null ], + [ "getHeatMapsCopy", "classop_1_1_pose_extractor.html#a95f6235ab496ada0b8cbc4b614637ac0", null ], + [ "getPoseKeypoints", "classop_1_1_pose_extractor.html#a487be38105b0d3f310142d99e0ca6b12", null ], + [ "getPoseScores", "classop_1_1_pose_extractor.html#aee77aa0ca773abe442a278d9e9e69376", null ], + [ "getScaleNetToOutput", "classop_1_1_pose_extractor.html#ae798335b1606804c87220d3c72423dad", null ], + [ "initializationOnThread", "classop_1_1_pose_extractor.html#aab1cccc9ad99f6b007abaa14600ea6df", null ], + [ "keepTopPeople", "classop_1_1_pose_extractor.html#a291521decad2465df13dc769fe9cc4e5", null ], + [ "track", "classop_1_1_pose_extractor.html#ab464ae97522336cf69dec1c1561c431d", null ], + [ "trackLockThread", "classop_1_1_pose_extractor.html#adab126d32216aa9a27cc78d7158d6616", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_pose_extractor_caffe-members.html b/web/html/doc/classop_1_1_pose_extractor_caffe-members.html new file mode 100644 index 000000000..995086d09 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_extractor_caffe-members.html @@ -0,0 +1,130 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PoseExtractorCaffe Member List
+
+
+ +

This is the complete list of members for op::PoseExtractorCaffe, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + +
checkThread() constop::PoseExtractorNetprotected
clear()op::PoseExtractorNet
forwardPass(const std::vector< Array< float >> &inputNetData, const Point< int > &inputDataSize, const std::vector< double > &scaleInputToNetInputs={1.f}, const Array< float > &poseNetOutput=Array< float >{})op::PoseExtractorCaffevirtual
get(const PoseProperty property) constop::PoseExtractorNet
getCandidatesCopy() constop::PoseExtractorNet
getCandidatesCpuConstPtr() constop::PoseExtractorCaffevirtual
getCandidatesGpuConstPtr() constop::PoseExtractorCaffevirtual
getHeatMapCpuConstPtr() constop::PoseExtractorCaffevirtual
getHeatMapGpuConstPtr() constop::PoseExtractorCaffevirtual
getHeatMapsCopy() constop::PoseExtractorNet
getHeatMapSize() constop::PoseExtractorCaffevirtual
getPoseGpuConstPtr() constop::PoseExtractorCaffevirtual
getPoseKeypoints() constop::PoseExtractorNet
getPoseScores() constop::PoseExtractorNet
getScaleNetToOutput() constop::PoseExtractorNet
increase(const PoseProperty property, const double value)op::PoseExtractorNet
initializationOnThread()op::PoseExtractorNet
mNetOutputSizeop::PoseExtractorNetprotected
mPoseKeypointsop::PoseExtractorNetprotected
mPoseScoresop::PoseExtractorNetprotected
mScaleNetToOutputop::PoseExtractorNetprotected
netInitializationOnThread()op::PoseExtractorCaffevirtual
PoseExtractorCaffe(const PoseModel poseModel, const std::string &modelFolder, const int gpuId, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool addPartCandidates=false, const bool maximizePositives=false, const std::string &protoTxtPath="", const std::string &caffeModelPath="", const float upsamplingRatio=0.f, const bool enableNet=true, const bool enableGoogleLogging=true)op::PoseExtractorCaffe
PoseExtractorNet(const PoseModel poseModel, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool addPartCandidates=false, const bool maximizePositives=false)op::PoseExtractorNet
set(const PoseProperty property, const double value)op::PoseExtractorNet
~PoseExtractorCaffe()op::PoseExtractorCaffevirtual
~PoseExtractorNet()op::PoseExtractorNetvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_extractor_caffe.html b/web/html/doc/classop_1_1_pose_extractor_caffe.html new file mode 100644 index 000000000..1c49ed712 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_extractor_caffe.html @@ -0,0 +1,547 @@ + + + + + + + +OpenPose: op::PoseExtractorCaffe Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PoseExtractorCaffe Class Reference
+
+
+ +

#include <poseExtractorCaffe.hpp>

+
+Inheritance diagram for op::PoseExtractorCaffe:
+
+
+ + +op::PoseExtractorNet + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 PoseExtractorCaffe (const PoseModel poseModel, const std::string &modelFolder, const int gpuId, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool addPartCandidates=false, const bool maximizePositives=false, const std::string &protoTxtPath="", const std::string &caffeModelPath="", const float upsamplingRatio=0.f, const bool enableNet=true, const bool enableGoogleLogging=true)
 
virtual ~PoseExtractorCaffe ()
 
virtual void netInitializationOnThread ()
 
virtual void forwardPass (const std::vector< Array< float >> &inputNetData, const Point< int > &inputDataSize, const std::vector< double > &scaleInputToNetInputs={1.f}, const Array< float > &poseNetOutput=Array< float >{})
 
const float * getCandidatesCpuConstPtr () const
 
const float * getCandidatesGpuConstPtr () const
 
const float * getHeatMapCpuConstPtr () const
 
const float * getHeatMapGpuConstPtr () const
 
std::vector< int > getHeatMapSize () const
 
const float * getPoseGpuConstPtr () const
 
- Public Member Functions inherited from op::PoseExtractorNet
 PoseExtractorNet (const PoseModel poseModel, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool addPartCandidates=false, const bool maximizePositives=false)
 
virtual ~PoseExtractorNet ()
 
void initializationOnThread ()
 
Array< float > getHeatMapsCopy () const
 
std::vector< std::vector< std::array< float, 3 > > > getCandidatesCopy () const
 
Array< float > getPoseKeypoints () const
 
Array< float > getPoseScores () const
 
float getScaleNetToOutput () const
 
double get (const PoseProperty property) const
 
void set (const PoseProperty property, const double value)
 
void increase (const PoseProperty property, const double value)
 
void clear ()
 
+ + + + + + + + + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::PoseExtractorNet
void checkThread () const
 
- Protected Attributes inherited from op::PoseExtractorNet
const PoseModel mPoseModel
 
Point< int > mNetOutputSize
 
Array< float > mPoseKeypoints
 
Array< float > mPoseScores
 
float mScaleNetToOutput
 
+

Detailed Description

+
+

Definition at line 16 of file poseExtractorCaffe.hpp.

+

Constructor & Destructor Documentation

+ +

◆ PoseExtractorCaffe()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::PoseExtractorCaffe::PoseExtractorCaffe (const PoseModel poseModel,
const std::string & modelFolder,
const int gpuId,
const std::vector< HeatMapType > & heatMapTypes = {},
const ScaleMode heatMapScaleMode = ScaleMode::ZeroToOneFixedAspect,
const bool addPartCandidates = false,
const bool maximizePositives = false,
const std::string & protoTxtPath = "",
const std::string & caffeModelPath = "",
const float upsamplingRatio = 0.f,
const bool enableNet = true,
const bool enableGoogleLogging = true 
)
+
+ +
+
+ +

◆ ~PoseExtractorCaffe()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::PoseExtractorCaffe::~PoseExtractorCaffe ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ forwardPass()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::PoseExtractorCaffe::forwardPass (const std::vector< Array< float >> & inputNetData,
const Point< int > & inputDataSize,
const std::vector< double > & scaleInputToNetInputs = {1.f},
const Array< float > & poseNetOutput = Array< float >{} 
)
+
+virtual
+
+
Parameters
+ + +
poseNetOutputIf it is not empty, OpenPose will not run its internal body pose estimation network and will instead use this data as the substitute of its network. The size of this element must match the size of the output of its internal network, or it will lead to core dumped (segmentation) errors. You can modify the pose estimation flags to match the dimension of both elements (e.g., --net_resolution, --scale_number, etc.).
+
+
+ +

Implements op::PoseExtractorNet.

+ +
+
+ +

◆ getCandidatesCpuConstPtr()

+ +
+
+ + + + + +
+ + + + + + + +
const float* op::PoseExtractorCaffe::getCandidatesCpuConstPtr () const
+
+virtual
+
+ +

Implements op::PoseExtractorNet.

+ +
+
+ +

◆ getCandidatesGpuConstPtr()

+ +
+
+ + + + + +
+ + + + + + + +
const float* op::PoseExtractorCaffe::getCandidatesGpuConstPtr () const
+
+virtual
+
+ +

Implements op::PoseExtractorNet.

+ +
+
+ +

◆ getHeatMapCpuConstPtr()

+ +
+
+ + + + + +
+ + + + + + + +
const float* op::PoseExtractorCaffe::getHeatMapCpuConstPtr () const
+
+virtual
+
+ +

Implements op::PoseExtractorNet.

+ +
+
+ +

◆ getHeatMapGpuConstPtr()

+ +
+
+ + + + + +
+ + + + + + + +
const float* op::PoseExtractorCaffe::getHeatMapGpuConstPtr () const
+
+virtual
+
+ +

Implements op::PoseExtractorNet.

+ +
+
+ +

◆ getHeatMapSize()

+ +
+
+ + + + + +
+ + + + + + + +
std::vector<int> op::PoseExtractorCaffe::getHeatMapSize () const
+
+virtual
+
+ +

Implements op::PoseExtractorNet.

+ +
+
+ +

◆ getPoseGpuConstPtr()

+ +
+
+ + + + + +
+ + + + + + + +
const float* op::PoseExtractorCaffe::getPoseGpuConstPtr () const
+
+virtual
+
+ +

Implements op::PoseExtractorNet.

+ +
+
+ +

◆ netInitializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::PoseExtractorCaffe::netInitializationOnThread ()
+
+virtual
+
+ +

Implements op::PoseExtractorNet.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_extractor_caffe.js b/web/html/doc/classop_1_1_pose_extractor_caffe.js new file mode 100644 index 000000000..a66863e98 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_extractor_caffe.js @@ -0,0 +1,13 @@ +var classop_1_1_pose_extractor_caffe = +[ + [ "PoseExtractorCaffe", "classop_1_1_pose_extractor_caffe.html#a682152a072d07b1b0764c2f7aab09ab7", null ], + [ "~PoseExtractorCaffe", "classop_1_1_pose_extractor_caffe.html#a3359641c1199c712a07859dcb76b7dcf", null ], + [ "forwardPass", "classop_1_1_pose_extractor_caffe.html#a9f8677779c9c07c0fd4ac265cd8d2d8f", null ], + [ "getCandidatesCpuConstPtr", "classop_1_1_pose_extractor_caffe.html#a1444ad1ee245a5bcd9e0b5b55395d6d8", null ], + [ "getCandidatesGpuConstPtr", "classop_1_1_pose_extractor_caffe.html#a499d975f7b6add768425271b2af19a2e", null ], + [ "getHeatMapCpuConstPtr", "classop_1_1_pose_extractor_caffe.html#a9e8056cd50ba679636c5d5055f5a563b", null ], + [ "getHeatMapGpuConstPtr", "classop_1_1_pose_extractor_caffe.html#ac4737f29b467f6c0daad5f54aa20524b", null ], + [ "getHeatMapSize", "classop_1_1_pose_extractor_caffe.html#a350900a3b326f4ed7d3dcb9531055523", null ], + [ "getPoseGpuConstPtr", "classop_1_1_pose_extractor_caffe.html#a6ffc941073b66868177c91cc9e025098", null ], + [ "netInitializationOnThread", "classop_1_1_pose_extractor_caffe.html#ae5d41065ea3eaf37d2c9663aa35554d6", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_pose_extractor_caffe.png b/web/html/doc/classop_1_1_pose_extractor_caffe.png new file mode 100644 index 000000000..8533b4bbc Binary files /dev/null and b/web/html/doc/classop_1_1_pose_extractor_caffe.png differ diff --git a/web/html/doc/classop_1_1_pose_extractor_net-members.html b/web/html/doc/classop_1_1_pose_extractor_net-members.html new file mode 100644 index 000000000..910c9ba0a --- /dev/null +++ b/web/html/doc/classop_1_1_pose_extractor_net-members.html @@ -0,0 +1,129 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PoseExtractorNet Member List
+
+
+ +

This is the complete list of members for op::PoseExtractorNet, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
checkThread() constop::PoseExtractorNetprotected
clear()op::PoseExtractorNet
forwardPass(const std::vector< Array< float >> &inputNetData, const Point< int > &inputDataSize, const std::vector< double > &scaleRatios={1.f}, const Array< float > &poseNetOutput=Array< float >{})=0op::PoseExtractorNetpure virtual
get(const PoseProperty property) constop::PoseExtractorNet
getCandidatesCopy() constop::PoseExtractorNet
getCandidatesCpuConstPtr() const =0op::PoseExtractorNetpure virtual
getCandidatesGpuConstPtr() const =0op::PoseExtractorNetpure virtual
getHeatMapCpuConstPtr() const =0op::PoseExtractorNetpure virtual
getHeatMapGpuConstPtr() const =0op::PoseExtractorNetpure virtual
getHeatMapsCopy() constop::PoseExtractorNet
getHeatMapSize() const =0op::PoseExtractorNetpure virtual
getPoseGpuConstPtr() const =0op::PoseExtractorNetpure virtual
getPoseKeypoints() constop::PoseExtractorNet
getPoseScores() constop::PoseExtractorNet
getScaleNetToOutput() constop::PoseExtractorNet
increase(const PoseProperty property, const double value)op::PoseExtractorNet
initializationOnThread()op::PoseExtractorNet
mNetOutputSizeop::PoseExtractorNetprotected
mPoseKeypointsop::PoseExtractorNetprotected
mPoseModelop::PoseExtractorNetprotected
mPoseScoresop::PoseExtractorNetprotected
mScaleNetToOutputop::PoseExtractorNetprotected
netInitializationOnThread()=0op::PoseExtractorNetprotectedpure virtual
PoseExtractorNet(const PoseModel poseModel, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool addPartCandidates=false, const bool maximizePositives=false)op::PoseExtractorNet
set(const PoseProperty property, const double value)op::PoseExtractorNet
~PoseExtractorNet()op::PoseExtractorNetvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_extractor_net.html b/web/html/doc/classop_1_1_pose_extractor_net.html new file mode 100644 index 000000000..48dbf3816 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_extractor_net.html @@ -0,0 +1,836 @@ + + + + + + + +OpenPose: op::PoseExtractorNet Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PoseExtractorNet Class Referenceabstract
+
+
+ +

#include <poseExtractorNet.hpp>

+
+Inheritance diagram for op::PoseExtractorNet:
+
+
+ + +op::PoseExtractorCaffe + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 PoseExtractorNet (const PoseModel poseModel, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool addPartCandidates=false, const bool maximizePositives=false)
 
virtual ~PoseExtractorNet ()
 
void initializationOnThread ()
 
virtual void forwardPass (const std::vector< Array< float >> &inputNetData, const Point< int > &inputDataSize, const std::vector< double > &scaleRatios={1.f}, const Array< float > &poseNetOutput=Array< float >{})=0
 
virtual const float * getCandidatesCpuConstPtr () const =0
 
virtual const float * getCandidatesGpuConstPtr () const =0
 
virtual const float * getHeatMapCpuConstPtr () const =0
 
virtual const float * getHeatMapGpuConstPtr () const =0
 
virtual std::vector< int > getHeatMapSize () const =0
 
Array< float > getHeatMapsCopy () const
 
std::vector< std::vector< std::array< float, 3 > > > getCandidatesCopy () const
 
virtual const float * getPoseGpuConstPtr () const =0
 
Array< float > getPoseKeypoints () const
 
Array< float > getPoseScores () const
 
float getScaleNetToOutput () const
 
double get (const PoseProperty property) const
 
void set (const PoseProperty property, const double value)
 
void increase (const PoseProperty property, const double value)
 
void clear ()
 
+ + + + + +

+Protected Member Functions

void checkThread () const
 
virtual void netInitializationOnThread ()=0
 
+ + + + + + + + + + + +

+Protected Attributes

const PoseModel mPoseModel
 
Point< int > mNetOutputSize
 
Array< float > mPoseKeypoints
 
Array< float > mPoseScores
 
float mScaleNetToOutput
 
+

Detailed Description

+
+

Definition at line 11 of file poseExtractorNet.hpp.

+

Constructor & Destructor Documentation

+ +

◆ PoseExtractorNet()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::PoseExtractorNet::PoseExtractorNet (const PoseModel poseModel,
const std::vector< HeatMapType > & heatMapTypes = {},
const ScaleMode heatMapScaleMode = ScaleMode::ZeroToOneFixedAspect,
const bool addPartCandidates = false,
const bool maximizePositives = false 
)
+
+ +
+
+ +

◆ ~PoseExtractorNet()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::PoseExtractorNet::~PoseExtractorNet ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ checkThread()

+ +
+
+ + + + + +
+ + + + + + + +
void op::PoseExtractorNet::checkThread () const
+
+protected
+
+ +
+
+ +

◆ clear()

+ +
+
+ + + + + + + +
void op::PoseExtractorNet::clear ()
+
+ +
+
+ +

◆ forwardPass()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::PoseExtractorNet::forwardPass (const std::vector< Array< float >> & inputNetData,
const Point< int > & inputDataSize,
const std::vector< double > & scaleRatios = {1.f},
const Array< float > & poseNetOutput = Array< float >{} 
)
+
+pure virtual
+
+ +

Implemented in op::PoseExtractorCaffe.

+ +
+
+ +

◆ get()

+ +
+
+ + + + + + + + +
double op::PoseExtractorNet::get (const PoseProperty property) const
+
+ +
+
+ +

◆ getCandidatesCopy()

+ +
+
+ + + + + + + +
std::vector<std::vector<std::array<float,3> > > op::PoseExtractorNet::getCandidatesCopy () const
+
+ +
+
+ +

◆ getCandidatesCpuConstPtr()

+ +
+
+ + + + + +
+ + + + + + + +
virtual const float* op::PoseExtractorNet::getCandidatesCpuConstPtr () const
+
+pure virtual
+
+ +

Implemented in op::PoseExtractorCaffe.

+ +
+
+ +

◆ getCandidatesGpuConstPtr()

+ +
+
+ + + + + +
+ + + + + + + +
virtual const float* op::PoseExtractorNet::getCandidatesGpuConstPtr () const
+
+pure virtual
+
+ +

Implemented in op::PoseExtractorCaffe.

+ +
+
+ +

◆ getHeatMapCpuConstPtr()

+ +
+
+ + + + + +
+ + + + + + + +
virtual const float* op::PoseExtractorNet::getHeatMapCpuConstPtr () const
+
+pure virtual
+
+ +

Implemented in op::PoseExtractorCaffe.

+ +
+
+ +

◆ getHeatMapGpuConstPtr()

+ +
+
+ + + + + +
+ + + + + + + +
virtual const float* op::PoseExtractorNet::getHeatMapGpuConstPtr () const
+
+pure virtual
+
+ +

Implemented in op::PoseExtractorCaffe.

+ +
+
+ +

◆ getHeatMapsCopy()

+ +
+
+ + + + + + + +
Array<float> op::PoseExtractorNet::getHeatMapsCopy () const
+
+ +
+
+ +

◆ getHeatMapSize()

+ +
+
+ + + + + +
+ + + + + + + +
virtual std::vector<int> op::PoseExtractorNet::getHeatMapSize () const
+
+pure virtual
+
+ +

Implemented in op::PoseExtractorCaffe.

+ +
+
+ +

◆ getPoseGpuConstPtr()

+ +
+
+ + + + + +
+ + + + + + + +
virtual const float* op::PoseExtractorNet::getPoseGpuConstPtr () const
+
+pure virtual
+
+ +

Implemented in op::PoseExtractorCaffe.

+ +
+
+ +

◆ getPoseKeypoints()

+ +
+
+ + + + + + + +
Array<float> op::PoseExtractorNet::getPoseKeypoints () const
+
+ +
+
+ +

◆ getPoseScores()

+ +
+
+ + + + + + + +
Array<float> op::PoseExtractorNet::getPoseScores () const
+
+ +
+
+ +

◆ getScaleNetToOutput()

+ +
+
+ + + + + + + +
float op::PoseExtractorNet::getScaleNetToOutput () const
+
+ +
+
+ +

◆ increase()

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::PoseExtractorNet::increase (const PoseProperty property,
const double value 
)
+
+ +
+
+ +

◆ initializationOnThread()

+ +
+
+ + + + + + + +
void op::PoseExtractorNet::initializationOnThread ()
+
+ +
+
+ +

◆ netInitializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::PoseExtractorNet::netInitializationOnThread ()
+
+protectedpure virtual
+
+ +

Implemented in op::PoseExtractorCaffe.

+ +
+
+ +

◆ set()

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::PoseExtractorNet::set (const PoseProperty property,
const double value 
)
+
+ +
+
+

Member Data Documentation

+ +

◆ mNetOutputSize

+ +
+
+ + + + + +
+ + + + +
Point<int> op::PoseExtractorNet::mNetOutputSize
+
+protected
+
+ +

Definition at line 60 of file poseExtractorNet.hpp.

+ +
+
+ +

◆ mPoseKeypoints

+ +
+
+ + + + + +
+ + + + +
Array<float> op::PoseExtractorNet::mPoseKeypoints
+
+protected
+
+ +

Definition at line 61 of file poseExtractorNet.hpp.

+ +
+
+ +

◆ mPoseModel

+ +
+
+ + + + + +
+ + + + +
const PoseModel op::PoseExtractorNet::mPoseModel
+
+protected
+
+ +

Definition at line 59 of file poseExtractorNet.hpp.

+ +
+
+ +

◆ mPoseScores

+ +
+
+ + + + + +
+ + + + +
Array<float> op::PoseExtractorNet::mPoseScores
+
+protected
+
+ +

Definition at line 62 of file poseExtractorNet.hpp.

+ +
+
+ +

◆ mScaleNetToOutput

+ +
+
+ + + + + +
+ + + + +
float op::PoseExtractorNet::mScaleNetToOutput
+
+protected
+
+ +

Definition at line 63 of file poseExtractorNet.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_extractor_net.js b/web/html/doc/classop_1_1_pose_extractor_net.js new file mode 100644 index 000000000..15c55900a --- /dev/null +++ b/web/html/doc/classop_1_1_pose_extractor_net.js @@ -0,0 +1,29 @@ +var classop_1_1_pose_extractor_net = +[ + [ "PoseExtractorNet", "classop_1_1_pose_extractor_net.html#a5503fceecf280b6b1ed6e3251de46e26", null ], + [ "~PoseExtractorNet", "classop_1_1_pose_extractor_net.html#a963c679df20b16d475aa3a7c0661135c", null ], + [ "checkThread", "classop_1_1_pose_extractor_net.html#a840c6fbdbf59d088d966ad26d45572a4", null ], + [ "clear", "classop_1_1_pose_extractor_net.html#a3fe7256d9860f4c624f5cf928556bc28", null ], + [ "forwardPass", "classop_1_1_pose_extractor_net.html#a95c48a9fc5368af73a54aa66e44b4bc2", null ], + [ "get", "classop_1_1_pose_extractor_net.html#aa9138224f4977da54517398ba044b7c3", null ], + [ "getCandidatesCopy", "classop_1_1_pose_extractor_net.html#a56d7dd1157e70786850169897bcf6883", null ], + [ "getCandidatesCpuConstPtr", "classop_1_1_pose_extractor_net.html#a3e73f27594e61bf451b8e9fff7695f62", null ], + [ "getCandidatesGpuConstPtr", "classop_1_1_pose_extractor_net.html#abee987adbe411ca71b6b37ab9cd89a41", null ], + [ "getHeatMapCpuConstPtr", "classop_1_1_pose_extractor_net.html#a80cb59fa161a7ecd3d6a016354ab9002", null ], + [ "getHeatMapGpuConstPtr", "classop_1_1_pose_extractor_net.html#ad1b526d42f690a8857c0ccdc88ff88ac", null ], + [ "getHeatMapsCopy", "classop_1_1_pose_extractor_net.html#ad6e1c91c60cf0041c196fd4347bbcdf5", null ], + [ "getHeatMapSize", "classop_1_1_pose_extractor_net.html#a49e1dcb9f9d049131df866b7538507cd", null ], + [ "getPoseGpuConstPtr", "classop_1_1_pose_extractor_net.html#a546f0d6e0c62c7c7e2d44de848f9a174", null ], + [ "getPoseKeypoints", "classop_1_1_pose_extractor_net.html#a3e88bd2122835db768c123d1026ce30f", null ], + [ "getPoseScores", "classop_1_1_pose_extractor_net.html#a43317a6868ffa7391586f2b8b599ecdf", null ], + [ "getScaleNetToOutput", "classop_1_1_pose_extractor_net.html#ac67c1d8fcba15ccfb284f10776e9fd89", null ], + [ "increase", "classop_1_1_pose_extractor_net.html#a4959a9c9d433d9297e5daef0e3a0eabc", null ], + [ "initializationOnThread", "classop_1_1_pose_extractor_net.html#a28923c846dc7c731d3571c72a50acd2f", null ], + [ "netInitializationOnThread", "classop_1_1_pose_extractor_net.html#aa8bf8cdfdede22410e2dfcea5d3f0cdc", null ], + [ "set", "classop_1_1_pose_extractor_net.html#a7e49f2339e21ff784689ec78c9d69b75", null ], + [ "mNetOutputSize", "classop_1_1_pose_extractor_net.html#aab49f9af9f5d7e4e64957dc0feb60ca7", null ], + [ "mPoseKeypoints", "classop_1_1_pose_extractor_net.html#aaaa4c619868bbf6306a549280002a2c6", null ], + [ "mPoseModel", "classop_1_1_pose_extractor_net.html#a8595789b244399ecd9c4b2a774f2c74b", null ], + [ "mPoseScores", "classop_1_1_pose_extractor_net.html#a528c3056546b0759fafb249a02edd1b6", null ], + [ "mScaleNetToOutput", "classop_1_1_pose_extractor_net.html#a67ea32116dfaff15cc16e5a0a2bef822", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_pose_extractor_net.png b/web/html/doc/classop_1_1_pose_extractor_net.png new file mode 100644 index 000000000..39da92a91 Binary files /dev/null and b/web/html/doc/classop_1_1_pose_extractor_net.png differ diff --git a/web/html/doc/classop_1_1_pose_gpu_renderer-members.html b/web/html/doc/classop_1_1_pose_gpu_renderer-members.html new file mode 100644 index 000000000..ae1d7e402 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_gpu_renderer-members.html @@ -0,0 +1,137 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PoseGpuRenderer Member List
+
+
+ +

This is the complete list of members for op::PoseGpuRenderer, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
cpuToGpuMemoryIfNotCopiedYet(const float *const cpuMemory, const unsigned long long memoryVolume)op::GpuRendererprotected
getAlphaHeatMap() constop::Renderer
getAlphaKeypoint() constop::Renderer
getBlendOriginalFrame() constop::Renderer
getSharedParameters()op::GpuRenderer
getShowGooglyEyes() constop::Renderer
GpuRenderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)op::GpuRendererexplicit
gpuToCpuMemoryIfLastRenderer(float *cpuMemory, const unsigned long long memoryVolume)op::GpuRendererprotected
increaseElementToRender(const int increment)op::Renderer
initializationOnThread()op::PoseGpuRenderervirtual
mBlendOriginalFrameop::Rendererprotected
mPartIndexToNameop::PoseRendererprotected
mPoseModelop::PoseRendererprotected
mRenderThresholdop::Rendererprotected
mShowGooglyEyesop::Rendererprotected
PoseGpuRenderer(const PoseModel poseModel, const std::shared_ptr< PoseExtractorNet > &poseExtractorNet, const float renderThreshold, const bool blendOriginalFrame=true, const float alphaKeypoint=POSE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=POSE_DEFAULT_ALPHA_HEAT_MAP, const unsigned int elementToRender=0u)op::PoseGpuRenderer
PoseRenderer(const PoseModel poseModel)op::PoseRenderer
Renderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)op::Rendererexplicit
renderPose(Array< float > &outputData, const Array< float > &poseKeypoints, const float scaleInputToOutput, const float scaleNetToOutput=-1.f)op::PoseGpuRenderervirtual
setAlphaHeatMap(const float alphaHeatMap)op::Renderer
setAlphaKeypoint(const float alphaKeypoint)op::Renderer
setBlendOriginalFrame(const bool blendOriginalFrame)op::Renderer
setElementToRender(const int elementToRender)op::Renderer
setElementToRender(const ElementToRender elementToRender)op::Renderer
setSharedParameters(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)op::GpuRenderer
setSharedParametersAndIfLast(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int >> &tuple, const bool isLast)op::GpuRenderer
setShowGooglyEyes(const bool showGooglyEyes)op::Renderer
spElementToRenderop::Rendererprotected
spGpuMemoryop::GpuRendererprotected
spNumberElementsToRenderop::Rendererprotected
~GpuRenderer()op::GpuRenderervirtual
~PoseGpuRenderer()op::PoseGpuRenderervirtual
~PoseRenderer()op::PoseRenderervirtual
~Renderer()op::Renderervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_gpu_renderer.html b/web/html/doc/classop_1_1_pose_gpu_renderer.html new file mode 100644 index 000000000..4ea3b8629 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_gpu_renderer.html @@ -0,0 +1,367 @@ + + + + + + + +OpenPose: op::PoseGpuRenderer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PoseGpuRenderer Class Reference
+
+
+ +

#include <poseGpuRenderer.hpp>

+
+Inheritance diagram for op::PoseGpuRenderer:
+
+
+ + +op::GpuRenderer +op::PoseRenderer +op::Renderer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 PoseGpuRenderer (const PoseModel poseModel, const std::shared_ptr< PoseExtractorNet > &poseExtractorNet, const float renderThreshold, const bool blendOriginalFrame=true, const float alphaKeypoint=POSE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=POSE_DEFAULT_ALPHA_HEAT_MAP, const unsigned int elementToRender=0u)
 
virtual ~PoseGpuRenderer ()
 
void initializationOnThread ()
 
std::pair< int, std::string > renderPose (Array< float > &outputData, const Array< float > &poseKeypoints, const float scaleInputToOutput, const float scaleNetToOutput=-1.f)
 
- Public Member Functions inherited from op::GpuRenderer
 GpuRenderer (const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
 
virtual ~GpuRenderer ()
 
std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int > >, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int > > getSharedParameters ()
 
void setSharedParametersAndIfLast (const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int >> &tuple, const bool isLast)
 
void setSharedParameters (const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)
 
- Public Member Functions inherited from op::Renderer
 Renderer (const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
 
virtual ~Renderer ()
 
void increaseElementToRender (const int increment)
 
void setElementToRender (const int elementToRender)
 
void setElementToRender (const ElementToRender elementToRender)
 
bool getBlendOriginalFrame () const
 
void setBlendOriginalFrame (const bool blendOriginalFrame)
 
float getAlphaKeypoint () const
 
void setAlphaKeypoint (const float alphaKeypoint)
 
float getAlphaHeatMap () const
 
void setAlphaHeatMap (const float alphaHeatMap)
 
bool getShowGooglyEyes () const
 
void setShowGooglyEyes (const bool showGooglyEyes)
 
- Public Member Functions inherited from op::PoseRenderer
 PoseRenderer (const PoseModel poseModel)
 
virtual ~PoseRenderer ()
 
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::GpuRenderer
void cpuToGpuMemoryIfNotCopiedYet (const float *const cpuMemory, const unsigned long long memoryVolume)
 
void gpuToCpuMemoryIfLastRenderer (float *cpuMemory, const unsigned long long memoryVolume)
 
- Protected Attributes inherited from op::GpuRenderer
std::shared_ptr< float * > spGpuMemory
 
- Protected Attributes inherited from op::Renderer
const float mRenderThreshold
 
std::atomic< bool > mBlendOriginalFrame
 
std::shared_ptr< std::atomic< unsigned int > > spElementToRender
 
std::shared_ptr< const unsigned int > spNumberElementsToRender
 
std::atomic< bool > mShowGooglyEyes
 
- Protected Attributes inherited from op::PoseRenderer
const PoseModel mPoseModel
 
const std::map< unsigned int, std::string > mPartIndexToName
 
+

Detailed Description

+
+

Definition at line 14 of file poseGpuRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ PoseGpuRenderer()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::PoseGpuRenderer::PoseGpuRenderer (const PoseModel poseModel,
const std::shared_ptr< PoseExtractorNet > & poseExtractorNet,
const float renderThreshold,
const bool blendOriginalFrame = true,
const float alphaKeypoint = POSE_DEFAULT_ALPHA_KEYPOINT,
const float alphaHeatMap = POSE_DEFAULT_ALPHA_HEAT_MAP,
const unsigned int elementToRender = 0u 
)
+
+ +
+
+ +

◆ ~PoseGpuRenderer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::PoseGpuRenderer::~PoseGpuRenderer ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
void op::PoseGpuRenderer::initializationOnThread ()
+
+virtual
+
+ +

Reimplemented from op::PoseRenderer.

+ +
+
+ +

◆ renderPose()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
std::pair<int, std::string> op::PoseGpuRenderer::renderPose (Array< float > & outputData,
const Array< float > & poseKeypoints,
const float scaleInputToOutput,
const float scaleNetToOutput = -1.f 
)
+
+virtual
+
+ +

Implements op::PoseRenderer.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_gpu_renderer.js b/web/html/doc/classop_1_1_pose_gpu_renderer.js new file mode 100644 index 000000000..586c6cbcb --- /dev/null +++ b/web/html/doc/classop_1_1_pose_gpu_renderer.js @@ -0,0 +1,7 @@ +var classop_1_1_pose_gpu_renderer = +[ + [ "PoseGpuRenderer", "classop_1_1_pose_gpu_renderer.html#a1582e63e33192d79f80b5879ba04d448", null ], + [ "~PoseGpuRenderer", "classop_1_1_pose_gpu_renderer.html#afe3959a08624dd71cc5797eb3938e748", null ], + [ "initializationOnThread", "classop_1_1_pose_gpu_renderer.html#a9e94ab926baf360dd6b23e14fba09836", null ], + [ "renderPose", "classop_1_1_pose_gpu_renderer.html#a4705b3c47cd9ac8174e357999960a28f", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_pose_gpu_renderer.png b/web/html/doc/classop_1_1_pose_gpu_renderer.png new file mode 100644 index 000000000..b377d76e9 Binary files /dev/null and b/web/html/doc/classop_1_1_pose_gpu_renderer.png differ diff --git a/web/html/doc/classop_1_1_pose_renderer-members.html b/web/html/doc/classop_1_1_pose_renderer-members.html new file mode 100644 index 000000000..645d1edc6 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_renderer-members.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PoseRenderer Member List
+
+
+ +

This is the complete list of members for op::PoseRenderer, including all inherited members.

+ + + + + + + +
initializationOnThread()op::PoseRendererinlinevirtual
mPartIndexToNameop::PoseRendererprotected
mPoseModelop::PoseRendererprotected
PoseRenderer(const PoseModel poseModel)op::PoseRenderer
renderPose(Array< float > &outputData, const Array< float > &poseKeypoints, const float scaleInputToOutput, const float scaleNetToOutput=-1.f)=0op::PoseRendererpure virtual
~PoseRenderer()op::PoseRenderervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_renderer.html b/web/html/doc/classop_1_1_pose_renderer.html new file mode 100644 index 000000000..af4f702f9 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_renderer.html @@ -0,0 +1,315 @@ + + + + + + + +OpenPose: op::PoseRenderer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PoseRenderer Class Referenceabstract
+
+
+ +

#include <poseRenderer.hpp>

+
+Inheritance diagram for op::PoseRenderer:
+
+
+ + +op::PoseCpuRenderer +op::PoseGpuRenderer + +
+ + + + + + + + + + +

+Public Member Functions

 PoseRenderer (const PoseModel poseModel)
 
virtual ~PoseRenderer ()
 
virtual void initializationOnThread ()
 
virtual std::pair< int, std::string > renderPose (Array< float > &outputData, const Array< float > &poseKeypoints, const float scaleInputToOutput, const float scaleNetToOutput=-1.f)=0
 
+ + + + + +

+Protected Attributes

const PoseModel mPoseModel
 
const std::map< unsigned int, std::string > mPartIndexToName
 
+

Detailed Description

+
+

Definition at line 10 of file poseRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ PoseRenderer()

+ +
+
+ + + + + + + + +
op::PoseRenderer::PoseRenderer (const PoseModel poseModel)
+
+ +
+
+ +

◆ ~PoseRenderer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::PoseRenderer::~PoseRenderer ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::PoseRenderer::initializationOnThread ()
+
+inlinevirtual
+
+ +

Reimplemented in op::PoseGpuRenderer.

+ +

Definition at line 17 of file poseRenderer.hpp.

+ +
+
+ +

◆ renderPose()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
virtual std::pair<int, std::string> op::PoseRenderer::renderPose (Array< float > & outputData,
const Array< float > & poseKeypoints,
const float scaleInputToOutput,
const float scaleNetToOutput = -1.f 
)
+
+pure virtual
+
+ +

Implemented in op::PoseGpuRenderer, and op::PoseCpuRenderer.

+ +
+
+

Member Data Documentation

+ +

◆ mPartIndexToName

+ +
+
+ + + + + +
+ + + + +
const std::map<unsigned int, std::string> op::PoseRenderer::mPartIndexToName
+
+protected
+
+ +

Definition at line 25 of file poseRenderer.hpp.

+ +
+
+ +

◆ mPoseModel

+ +
+
+ + + + + +
+ + + + +
const PoseModel op::PoseRenderer::mPoseModel
+
+protected
+
+ +

Definition at line 24 of file poseRenderer.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_renderer.js b/web/html/doc/classop_1_1_pose_renderer.js new file mode 100644 index 000000000..d4d61d4bb --- /dev/null +++ b/web/html/doc/classop_1_1_pose_renderer.js @@ -0,0 +1,9 @@ +var classop_1_1_pose_renderer = +[ + [ "PoseRenderer", "classop_1_1_pose_renderer.html#a1dfd34d42fa69913a9702e0a0ebcd04e", null ], + [ "~PoseRenderer", "classop_1_1_pose_renderer.html#a8ff2470d813201e992cd5e07bab23386", null ], + [ "initializationOnThread", "classop_1_1_pose_renderer.html#af861d8213f1444b3246402061cea1b33", null ], + [ "renderPose", "classop_1_1_pose_renderer.html#ad1e96ceb47bf205b56b50c6b2792f9e3", null ], + [ "mPartIndexToName", "classop_1_1_pose_renderer.html#aecc0a9296ca880ad6ceaf38ecd8c3c53", null ], + [ "mPoseModel", "classop_1_1_pose_renderer.html#a9fea1f9ce47b4b5f1015cae13f4ddcb1", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_pose_renderer.png b/web/html/doc/classop_1_1_pose_renderer.png new file mode 100644 index 000000000..ab6816804 Binary files /dev/null and b/web/html/doc/classop_1_1_pose_renderer.png differ diff --git a/web/html/doc/classop_1_1_pose_triangulation-members.html b/web/html/doc/classop_1_1_pose_triangulation-members.html new file mode 100644 index 000000000..977eb4627 --- /dev/null +++ b/web/html/doc/classop_1_1_pose_triangulation-members.html @@ -0,0 +1,108 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PoseTriangulation Member List
+
+
+ +

This is the complete list of members for op::PoseTriangulation, including all inherited members.

+ + + + + + +
initializationOnThread()op::PoseTriangulation
PoseTriangulation(const int minViews3d)op::PoseTriangulation
reconstructArray(const std::vector< Array< float >> &keypointsVector, const std::vector< Matrix > &cameraMatrices, const std::vector< Point< int >> &imageSizes) constop::PoseTriangulation
reconstructArray(const std::vector< std::vector< Array< float >>> &keypointsVector, const std::vector< Matrix > &cameraMatrices, const std::vector< Point< int >> &imageSizes) constop::PoseTriangulation
~PoseTriangulation()op::PoseTriangulationvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_triangulation.html b/web/html/doc/classop_1_1_pose_triangulation.html new file mode 100644 index 000000000..73dc825df --- /dev/null +++ b/web/html/doc/classop_1_1_pose_triangulation.html @@ -0,0 +1,256 @@ + + + + + + + +OpenPose: op::PoseTriangulation Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PoseTriangulation Class Reference
+
+
+ +

#include <poseTriangulation.hpp>

+ + + + + + + + + + + + +

+Public Member Functions

 PoseTriangulation (const int minViews3d)
 
virtual ~PoseTriangulation ()
 
void initializationOnThread ()
 
Array< float > reconstructArray (const std::vector< Array< float >> &keypointsVector, const std::vector< Matrix > &cameraMatrices, const std::vector< Point< int >> &imageSizes) const
 
std::vector< Array< float > > reconstructArray (const std::vector< std::vector< Array< float >>> &keypointsVector, const std::vector< Matrix > &cameraMatrices, const std::vector< Point< int >> &imageSizes) const
 
+

Detailed Description

+
+

Definition at line 8 of file poseTriangulation.hpp.

+

Constructor & Destructor Documentation

+ +

◆ PoseTriangulation()

+ +
+
+ + + + + + + + +
op::PoseTriangulation::PoseTriangulation (const int minViews3d)
+
+ +
+
+ +

◆ ~PoseTriangulation()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::PoseTriangulation::~PoseTriangulation ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+ + + + + + + +
void op::PoseTriangulation::initializationOnThread ()
+
+ +
+
+ +

◆ reconstructArray() [1/2]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
Array<float> op::PoseTriangulation::reconstructArray (const std::vector< Array< float >> & keypointsVector,
const std::vector< Matrix > & cameraMatrices,
const std::vector< Point< int >> & imageSizes 
) const
+
+ +
+
+ +

◆ reconstructArray() [2/2]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
std::vector<Array<float> > op::PoseTriangulation::reconstructArray (const std::vector< std::vector< Array< float >>> & keypointsVector,
const std::vector< Matrix > & cameraMatrices,
const std::vector< Point< int >> & imageSizes 
) const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_pose_triangulation.js b/web/html/doc/classop_1_1_pose_triangulation.js new file mode 100644 index 000000000..082847aef --- /dev/null +++ b/web/html/doc/classop_1_1_pose_triangulation.js @@ -0,0 +1,8 @@ +var classop_1_1_pose_triangulation = +[ + [ "PoseTriangulation", "classop_1_1_pose_triangulation.html#a7858f0c4adf7845c2be072e0985af3ee", null ], + [ "~PoseTriangulation", "classop_1_1_pose_triangulation.html#a3f4764c7063d9849b75a354a6a92f062", null ], + [ "initializationOnThread", "classop_1_1_pose_triangulation.html#a90436697faa45a3676087426763014f4", null ], + [ "reconstructArray", "classop_1_1_pose_triangulation.html#a519abdb2477c518a565803a5ef5bdc1e", null ], + [ "reconstructArray", "classop_1_1_pose_triangulation.html#adc3cf7eb81cb9e7d7f72fda0602ed89b", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_priority_queue-members.html b/web/html/doc/classop_1_1_priority_queue-members.html new file mode 100644 index 000000000..a0ba60233 --- /dev/null +++ b/web/html/doc/classop_1_1_priority_queue-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::PriorityQueue< TDatums, TQueue > Member List
+
+
+ +

This is the complete list of members for op::PriorityQueue< TDatums, TQueue >, including all inherited members.

+ + + + +
front() constop::PriorityQueue< TDatums, TQueue >
PriorityQueue(const long long maxSize=256)op::PriorityQueue< TDatums, TQueue >explicit
~PriorityQueue()op::PriorityQueue< TDatums, TQueue >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_priority_queue.html b/web/html/doc/classop_1_1_priority_queue.html new file mode 100644 index 000000000..a11be3874 --- /dev/null +++ b/web/html/doc/classop_1_1_priority_queue.html @@ -0,0 +1,207 @@ + + + + + + + +OpenPose: op::PriorityQueue< TDatums, TQueue > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::PriorityQueue< TDatums, TQueue > Class Template Reference
+
+
+ +

#include <priorityQueue.hpp>

+
+Inheritance diagram for op::PriorityQueue< TDatums, TQueue >:
+
+
+ +
+ + + + + + + + +

+Public Member Functions

 PriorityQueue (const long long maxSize=256)
 
virtual ~PriorityQueue ()
 
TDatums front () const
 
+

Detailed Description

+

template<typename TDatums, typename TQueue = std::priority_queue<TDatums, std::vector<TDatums>, std::greater<TDatums>>>
+class op::PriorityQueue< TDatums, TQueue >

+ + +

Definition at line 11 of file priorityQueue.hpp.

+

Constructor & Destructor Documentation

+ +

◆ PriorityQueue()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + + + + + +
op::PriorityQueue< TDatums, TQueue >::PriorityQueue (const long long maxSize = 256)
+
+explicit
+
+ +

Definition at line 36 of file priorityQueue.hpp.

+ +
+
+ +

◆ ~PriorityQueue()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
op::PriorityQueue< TDatums, TQueue >::~PriorityQueue
+
+virtual
+
+ +

Definition at line 46 of file priorityQueue.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ front()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + +
TDatums op::PriorityQueue< TDatums, TQueue >::front
+
+ +

Definition at line 51 of file priorityQueue.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_priority_queue.js b/web/html/doc/classop_1_1_priority_queue.js new file mode 100644 index 000000000..4e4d22437 --- /dev/null +++ b/web/html/doc/classop_1_1_priority_queue.js @@ -0,0 +1,6 @@ +var classop_1_1_priority_queue = +[ + [ "PriorityQueue", "classop_1_1_priority_queue.html#acecdd3c5789942777652b66d08578d93", null ], + [ "~PriorityQueue", "classop_1_1_priority_queue.html#a469b458b035822f01b212c089d4245bc", null ], + [ "front", "classop_1_1_priority_queue.html#a8e468dfaed310e54987cbb8cb1cef909", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_priority_queue.png b/web/html/doc/classop_1_1_priority_queue.png new file mode 100644 index 000000000..09fab6cab Binary files /dev/null and b/web/html/doc/classop_1_1_priority_queue.png differ diff --git a/web/html/doc/classop_1_1_producer-members.html b/web/html/doc/classop_1_1_producer-members.html new file mode 100644 index 000000000..91652cf83 --- /dev/null +++ b/web/html/doc/classop_1_1_producer-members.html @@ -0,0 +1,124 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::Producer Member List
+
+
+ +

This is the complete list of members for op::Producer, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + +
checkFrameIntegrity(Matrix &frame)op::Producerprotected
get(const int capProperty)=0op::Producerpure virtual
get(const ProducerProperty property)op::Producer
getCameraExtrinsics()op::Producervirtual
getCameraIntrinsics()op::Producervirtual
getCameraMatrices()op::Producervirtual
getFrame()op::Producer
getFrames()op::Producer
getNextFrameName()=0op::Producerpure virtual
getRawFrame()=0op::Producerprotectedpure virtual
getRawFrames()=0op::Producerprotectedpure virtual
getType()op::Producerinline
ifEndedResetOrRelease()op::Producerprotected
isOpened() const =0op::Producerpure virtual
keepDesiredFrameRate()op::Producerprotected
Producer(const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)op::Producerexplicit
release()=0op::Producerpure virtual
set(const int capProperty, const double value)=0op::Producerpure virtual
set(const ProducerProperty property, const double value)op::Producer
setProducerFpsMode(const ProducerFpsMode fpsMode)op::Producer
~Producer()op::Producervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_producer.html b/web/html/doc/classop_1_1_producer.html new file mode 100644 index 000000000..db1a7d3b3 --- /dev/null +++ b/web/html/doc/classop_1_1_producer.html @@ -0,0 +1,789 @@ + + + + + + + +OpenPose: op::Producer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::Producer Class Referenceabstract
+
+
+ +

#include <producer.hpp>

+
+Inheritance diagram for op::Producer:
+
+
+ + +op::FlirReader +op::ImageDirectoryReader +op::VideoCaptureReader +op::IpCameraReader +op::VideoReader +op::WebcamReader + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 Producer (const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)
 
virtual ~Producer ()
 
Matrix getFrame ()
 
std::vector< MatrixgetFrames ()
 
virtual std::vector< MatrixgetCameraMatrices ()
 
virtual std::vector< MatrixgetCameraExtrinsics ()
 
virtual std::vector< MatrixgetCameraIntrinsics ()
 
virtual std::string getNextFrameName ()=0
 
void setProducerFpsMode (const ProducerFpsMode fpsMode)
 
ProducerType getType ()
 
virtual bool isOpened () const =0
 
virtual void release ()=0
 
virtual double get (const int capProperty)=0
 
virtual void set (const int capProperty, const double value)=0
 
double get (const ProducerProperty property)
 
void set (const ProducerProperty property, const double value)
 
+ + + + + + + + + + + +

+Protected Member Functions

void checkFrameIntegrity (Matrix &frame)
 
void ifEndedResetOrRelease ()
 
void keepDesiredFrameRate ()
 
virtual Matrix getRawFrame ()=0
 
virtual std::vector< MatrixgetRawFrames ()=0
 
+

Detailed Description

+

Producer is an abstract class to extract frames from a source (image directory, video file, webcam stream, etc.). It has the basic and common functions (e.g., getFrame, release & isOpened).

+ +

Definition at line 14 of file producer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ Producer()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::Producer::Producer (const ProducerType type,
const std::string & cameraParameterPath,
const bool undistortImage,
const int mNumberViews 
)
+
+explicit
+
+

Constructor of Producer.

+ +
+
+ +

◆ ~Producer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::Producer::~Producer ()
+
+virtual
+
+

Destructor of Producer. It is virtual so that any children class can implement its own destructor.

+ +
+
+

Member Function Documentation

+ +

◆ checkFrameIntegrity()

+ +
+
+ + + + + +
+ + + + + + + + +
void op::Producer::checkFrameIntegrity (Matrixframe)
+
+protected
+
+

Protected function which checks that the frames keeps their integry (some OpenCV versions might return corrupted frames within a video or webcam with a size different to the standard resolution). If the frame is corrupted, it is set to an empty Mat.

Parameters
+ + +
frameMat with the frame matrix to be checked and modified.
+
+
+ +
+
+ +

◆ get() [1/2]

+ +
+
+ + + + + +
+ + + + + + + + +
virtual double op::Producer::get (const int capProperty)
+
+pure virtual
+
+

This function is a wrapper of cv::VideoCapture::get. It allows getting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + +
capPropertyint indicating the property to be modified.
+
+
+
Returns
double returning the property value.
+ +

Implemented in op::VideoCaptureReader, op::WebcamReader, op::VideoReader, op::IpCameraReader, op::ImageDirectoryReader, and op::FlirReader.

+ +
+
+ +

◆ get() [2/2]

+ +
+
+ + + + + + + + +
double op::Producer::get (const ProducerProperty property)
+
+

Extra attributes that VideoCapture::get/set do not contain.

Parameters
+ + +
propertyProducerProperty indicating the property to be modified.
+
+
+ +
+
+ +

◆ getCameraExtrinsics()

+ +
+
+ + + + + +
+ + + + + + + +
virtual std::vector<Matrix> op::Producer::getCameraExtrinsics ()
+
+virtual
+
+

It retrieves and returns the camera extrinsic parameters from the frames producer. Virtual class because FlirReader implements their own.

Returns
std::vector<Mat> with the camera extrinsic parameters.
+ +

Reimplemented in op::FlirReader.

+ +
+
+ +

◆ getCameraIntrinsics()

+ +
+
+ + + + + +
+ + + + + + + +
virtual std::vector<Matrix> op::Producer::getCameraIntrinsics ()
+
+virtual
+
+

It retrieves and returns the camera intrinsic parameters from the frames producer. Virtual class because FlirReader implements their own.

Returns
std::vector<Mat> with the camera intrinsic parameters.
+ +

Reimplemented in op::FlirReader.

+ +
+
+ +

◆ getCameraMatrices()

+ +
+
+ + + + + +
+ + + + + + + +
virtual std::vector<Matrix> op::Producer::getCameraMatrices ()
+
+virtual
+
+

It retrieves and returns the camera matrixes from the frames producer. Virtual class because FlirReader implements their own.

Returns
std::vector<Mat> with the camera matrices.
+ +

Reimplemented in op::FlirReader.

+ +
+
+ +

◆ getFrame()

+ +
+
+ + + + + + + +
Matrix op::Producer::getFrame ()
+
+

Main function of Producer, it retrieves and returns a new frame from the frames producer.

Returns
Mat with the new frame.
+ +
+
+ +

◆ getFrames()

+ +
+
+ + + + + + + +
std::vector<Matrix> op::Producer::getFrames ()
+
+

Analogous to getFrame, but it could return > 1 frame.

Returns
std::vector<Mat> with the new frame(s).
+ +
+
+ +

◆ getNextFrameName()

+ +
+
+ + + + + +
+ + + + + + + +
virtual std::string op::Producer::getNextFrameName ()
+
+pure virtual
+
+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.
+ +

Implemented in op::VideoCaptureReader, op::WebcamReader, op::VideoReader, op::IpCameraReader, op::ImageDirectoryReader, and op::FlirReader.

+ +
+
+ +

◆ getRawFrame()

+ +
+
+ + + + + +
+ + + + + + + +
virtual Matrix op::Producer::getRawFrame ()
+
+protectedpure virtual
+
+

Function to be defined by its children class. It retrieves and returns a new frame from the frames producer.

Returns
Mat with the new frame.
+ +

Implemented in op::VideoCaptureReader.

+ +
+
+ +

◆ getRawFrames()

+ +
+
+ + + + + +
+ + + + + + + +
virtual std::vector<Matrix> op::Producer::getRawFrames ()
+
+protectedpure virtual
+
+

Function to be defined by its children class. It retrieves and returns a new frame from the frames producer. It is equivalent to getRawFrame when more than 1 image can be returned.

Returns
std::vector<Mat> with the new frames.
+ +

Implemented in op::VideoCaptureReader.

+ +
+
+ +

◆ getType()

+ +
+
+ + + + + +
+ + + + + + + +
ProducerType op::Producer::getType ()
+
+inline
+
+

This function returns the type of producer (video, webcam, ...).

Returns
ProducerType with the kind of producer.
+ +

Definition at line 80 of file producer.hpp.

+ +
+
+ +

◆ ifEndedResetOrRelease()

+ +
+
+ + + + + +
+ + + + + + + +
void op::Producer::ifEndedResetOrRelease ()
+
+protected
+
+

Protected function which checks that the frame producer has ended. If so, if resets or releases the producer according to mRepeatWhenFinished.

+ +
+
+ +

◆ isOpened()

+ +
+
+ + + + + +
+ + + + + + + +
virtual bool op::Producer::isOpened () const
+
+pure virtual
+
+

This function returns whether the Producer instance is still opened and able to retrieve more frames.

Returns
bool indicating whether the Producer is opened.
+ +

Implemented in op::WebcamReader, op::VideoReader, op::VideoCaptureReader, op::IpCameraReader, op::ImageDirectoryReader, and op::FlirReader.

+ +
+
+ +

◆ keepDesiredFrameRate()

+ +
+
+ + + + + +
+ + + + + + + +
void op::Producer::keepDesiredFrameRate ()
+
+protected
+
+

Protected function which forces the producer to get frames at the rate of get(CV_CAP_PROP_FPS).

+ +
+
+ +

◆ release()

+ +
+
+ + + + + +
+ + + + + + + +
virtual void op::Producer::release ()
+
+pure virtual
+
+

This function releases and closes the Producer. After it is called, no more frames can be retrieved from Producer::getFrames.

+ +

Implemented in op::VideoCaptureReader, op::ImageDirectoryReader, and op::FlirReader.

+ +
+
+ +

◆ set() [1/2]

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::Producer::set (const int capProperty,
const double value 
)
+
+pure virtual
+
+

This function is a wrapper of cv::VideoCapture::set. It allows setting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + + +
capPropertyint indicating the property to be modified.
valuedouble indicating the new value to be assigned.
+
+
+ +

Implemented in op::VideoCaptureReader, op::WebcamReader, op::VideoReader, op::IpCameraReader, op::ImageDirectoryReader, and op::FlirReader.

+ +
+
+ +

◆ set() [2/2]

+ +
+
+ + + + + + + + + + + + + + + + + + +
void op::Producer::set (const ProducerProperty property,
const double value 
)
+
+

Extra attributes that VideoCapture::get/set do not contain.

Parameters
+ + + +
propertyProducerProperty indicating the property to be modified.
valuedouble indicating the new value to be assigned.
+
+
+ +
+
+ +

◆ setProducerFpsMode()

+ +
+
+ + + + + + + + +
void op::Producer::setProducerFpsMode (const ProducerFpsMode fpsMode)
+
+

This function sets whether the producer must keep the original fps frame rate or extract the frames as quick as possible.

Parameters
+ + +
fpsModeProducerFpsMode parameter specifying the new value.
+
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_producer.js b/web/html/doc/classop_1_1_producer.js new file mode 100644 index 000000000..f5dbcfba8 --- /dev/null +++ b/web/html/doc/classop_1_1_producer.js @@ -0,0 +1,24 @@ +var classop_1_1_producer = +[ + [ "Producer", "classop_1_1_producer.html#aaec98c35fe9f2695cd31be3e2d437a61", null ], + [ "~Producer", "classop_1_1_producer.html#a8b48342b2c4003a080b17ac411f3454f", null ], + [ "checkFrameIntegrity", "classop_1_1_producer.html#abbfbe53757f75e5e77266b04e9d0fea1", null ], + [ "get", "classop_1_1_producer.html#a366881a952ad34071cc719477f08b968", null ], + [ "get", "classop_1_1_producer.html#a94d561f95384dfa0cd91113882869d06", null ], + [ "getCameraExtrinsics", "classop_1_1_producer.html#a2853a47b12ab1f32138b6d944c322ebd", null ], + [ "getCameraIntrinsics", "classop_1_1_producer.html#a6c5be8c556b0a744e11a11de3f185049", null ], + [ "getCameraMatrices", "classop_1_1_producer.html#a0d711ebc149dd71159ebc2902ccd8113", null ], + [ "getFrame", "classop_1_1_producer.html#a07f416a256a3f7e906748701ad569030", null ], + [ "getFrames", "classop_1_1_producer.html#aad1f861eaea12a3590e1beb286d023b7", null ], + [ "getNextFrameName", "classop_1_1_producer.html#ab35d570dc35573433ec86e3fce25e545", null ], + [ "getRawFrame", "classop_1_1_producer.html#ab23d9eeac2c1820be9191ab9f7bb1777", null ], + [ "getRawFrames", "classop_1_1_producer.html#ad6d701ad0867491736374d8ea753c00e", null ], + [ "getType", "classop_1_1_producer.html#a9a9424027e5bc8e0fba7c65eccc460e0", null ], + [ "ifEndedResetOrRelease", "classop_1_1_producer.html#ac72a751759ae8b5a0a99552580f7fbad", null ], + [ "isOpened", "classop_1_1_producer.html#a58590e4a409d31f839184b4bf030a68b", null ], + [ "keepDesiredFrameRate", "classop_1_1_producer.html#afad3eadd16cca0de2c2be8b083c0d56d", null ], + [ "release", "classop_1_1_producer.html#a7753ffb0daa486ab0f82873b3567f95e", null ], + [ "set", "classop_1_1_producer.html#ab30c7b3e34d962e0b17458d9a0947f6b", null ], + [ "set", "classop_1_1_producer.html#af11f1bbfbd61b9534c02c3e4839e19b0", null ], + [ "setProducerFpsMode", "classop_1_1_producer.html#a024e55b4ec769cdbc40ee21613a6ef6f", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_producer.png b/web/html/doc/classop_1_1_producer.png new file mode 100644 index 000000000..e6464553b Binary files /dev/null and b/web/html/doc/classop_1_1_producer.png differ diff --git a/web/html/doc/classop_1_1_profiler-members.html b/web/html/doc/classop_1_1_profiler-members.html new file mode 100644 index 000000000..07f60ddee --- /dev/null +++ b/web/html/doc/classop_1_1_profiler-members.html @@ -0,0 +1,110 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::Profiler Member List
+
+
+ +

This is the complete list of members for op::Profiler, including all inherited members.

+ + + + + + + + +
DEFAULT_Xop::Profilerstatic
printAveragedTimeMsEveryXIterations(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)op::Profilerstatic
printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)op::Profilerstatic
profileGpuMemory(const int line, const std::string &function, const std::string &file)op::Profilerstatic
setDefaultX(const unsigned long long defaultX)op::Profilerstatic
timerEnd(const std::string &key)op::Profilerstatic
timerInit(const int line, const std::string &function, const std::string &file)op::Profilerstatic
+
+ + + + diff --git a/web/html/doc/classop_1_1_profiler.html b/web/html/doc/classop_1_1_profiler.html new file mode 100644 index 000000000..6cde6f94b --- /dev/null +++ b/web/html/doc/classop_1_1_profiler.html @@ -0,0 +1,404 @@ + + + + + + + +OpenPose: op::Profiler Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ + +
+ +

#include <profiler.hpp>

+ + + + + + + + + + + + + + +

+Static Public Member Functions

static void setDefaultX (const unsigned long long defaultX)
 
static const std::string timerInit (const int line, const std::string &function, const std::string &file)
 
static void timerEnd (const std::string &key)
 
static void printAveragedTimeMsOnIterationX (const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
 
static void printAveragedTimeMsEveryXIterations (const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
 
static void profileGpuMemory (const int line, const std::string &function, const std::string &file)
 
+ + + +

+Static Public Attributes

static unsigned long long DEFAULT_X
 
+

Detailed Description

+
+

Definition at line 76 of file profiler.hpp.

+

Member Function Documentation

+ +

◆ printAveragedTimeMsEveryXIterations()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
static void op::Profiler::printAveragedTimeMsEveryXIterations (const std::string & key,
const int line,
const std::string & function,
const std::string & file,
const unsigned long long x = DEFAULT_X 
)
+
+static
+
+ +
+
+ +

◆ printAveragedTimeMsOnIterationX()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
static void op::Profiler::printAveragedTimeMsOnIterationX (const std::string & key,
const int line,
const std::string & function,
const std::string & file,
const unsigned long long x = DEFAULT_X 
)
+
+static
+
+ +
+
+ +

◆ profileGpuMemory()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
static void op::Profiler::profileGpuMemory (const int line,
const std::string & function,
const std::string & file 
)
+
+static
+
+ +
+
+ +

◆ setDefaultX()

+ +
+
+ + + + + +
+ + + + + + + + +
static void op::Profiler::setDefaultX (const unsigned long long defaultX)
+
+static
+
+ +
+
+ +

◆ timerEnd()

+ +
+
+ + + + + +
+ + + + + + + + +
static void op::Profiler::timerEnd (const std::string & key)
+
+static
+
+ +
+
+ +

◆ timerInit()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
static const std::string op::Profiler::timerInit (const int line,
const std::string & function,
const std::string & file 
)
+
+static
+
+ +
+
+

Member Data Documentation

+ +

◆ DEFAULT_X

+ +
+
+ + + + + +
+ + + + +
unsigned long long op::Profiler::DEFAULT_X
+
+static
+
+ +

Definition at line 79 of file profiler.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_queue-members.html b/web/html/doc/classop_1_1_queue-members.html new file mode 100644 index 000000000..693979ff7 --- /dev/null +++ b/web/html/doc/classop_1_1_queue-members.html @@ -0,0 +1,136 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::Queue< TDatums, TQueue > Member List
+
+
+ +

This is the complete list of members for op::Queue< TDatums, TQueue >, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
addPopper()op::QueueBase< TDatums, std::queue< TDatums > >
addPusher()op::QueueBase< TDatums, std::queue< TDatums > >
clear()op::QueueBase< TDatums, std::queue< TDatums > >
empty() constop::QueueBase< TDatums, std::queue< TDatums > >
forceEmplace(TDatums &tDatums)op::QueueBase< TDatums, std::queue< TDatums > >
forcePush(const TDatums &tDatums)op::QueueBase< TDatums, std::queue< TDatums > >
front() constop::Queue< TDatums, TQueue >virtual
getMaxSize() constop::QueueBase< TDatums, std::queue< TDatums > >protected
isFull() constop::QueueBase< TDatums, std::queue< TDatums > >
isRunning() constop::QueueBase< TDatums, std::queue< TDatums > >
mConditionVariableop::QueueBase< TDatums, std::queue< TDatums > >protected
mMaxPoppersPushersop::QueueBase< TDatums, std::queue< TDatums > >protected
mMutexop::QueueBase< TDatums, std::queue< TDatums > >mutableprotected
mPopIsStoppedop::QueueBase< TDatums, std::queue< TDatums > >protected
mPoppersop::QueueBase< TDatums, std::queue< TDatums > >protected
mPushersop::QueueBase< TDatums, std::queue< TDatums > >protected
mPushIsStoppedop::QueueBase< TDatums, std::queue< TDatums > >protected
mTQueueop::QueueBase< TDatums, std::queue< TDatums > >protected
Queue(const long long maxSize)op::Queue< TDatums, TQueue >explicit
QueueBase(const long long maxSize=-1)op::QueueBase< TDatums, std::queue< TDatums > >explicit
size() constop::QueueBase< TDatums, std::queue< TDatums > >
stop()op::QueueBase< TDatums, std::queue< TDatums > >
stopPusher()op::QueueBase< TDatums, std::queue< TDatums > >
tryEmplace(TDatums &tDatums)op::QueueBase< TDatums, std::queue< TDatums > >
tryPop(TDatums &tDatums)op::QueueBase< TDatums, std::queue< TDatums > >
tryPop()op::QueueBase< TDatums, std::queue< TDatums > >
tryPush(const TDatums &tDatums)op::QueueBase< TDatums, std::queue< TDatums > >
waitAndEmplace(TDatums &tDatums)op::QueueBase< TDatums, std::queue< TDatums > >
waitAndPop(TDatums &tDatums)op::QueueBase< TDatums, std::queue< TDatums > >
waitAndPop()op::QueueBase< TDatums, std::queue< TDatums > >
waitAndPush(const TDatums &tDatums)op::QueueBase< TDatums, std::queue< TDatums > >
~Queue()op::Queue< TDatums, TQueue >virtual
~QueueBase()op::QueueBase< TDatums, std::queue< TDatums > >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_queue.html b/web/html/doc/classop_1_1_queue.html new file mode 100644 index 000000000..3165dcebb --- /dev/null +++ b/web/html/doc/classop_1_1_queue.html @@ -0,0 +1,286 @@ + + + + + + + +OpenPose: op::Queue< TDatums, TQueue > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::Queue< TDatums, TQueue > Class Template Reference
+
+
+ +

#include <queue.hpp>

+
+Inheritance diagram for op::Queue< TDatums, TQueue >:
+
+
+ + +op::QueueBase< TDatums, std::queue< TDatums > > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 Queue (const long long maxSize)
 
virtual ~Queue ()
 
TDatums front () const
 
- Public Member Functions inherited from op::QueueBase< TDatums, std::queue< TDatums > >
 QueueBase (const long long maxSize=-1)
 
virtual ~QueueBase ()
 
bool forceEmplace (TDatums &tDatums)
 
bool tryEmplace (TDatums &tDatums)
 
bool waitAndEmplace (TDatums &tDatums)
 
bool forcePush (const TDatums &tDatums)
 
bool tryPush (const TDatums &tDatums)
 
bool waitAndPush (const TDatums &tDatums)
 
bool tryPop (TDatums &tDatums)
 
bool tryPop ()
 
bool waitAndPop (TDatums &tDatums)
 
bool waitAndPop ()
 
bool empty () const
 
void stop ()
 
void stopPusher ()
 
void addPopper ()
 
void addPusher ()
 
bool isRunning () const
 
bool isFull () const
 
size_t size () const
 
void clear ()
 
+ + + + + + + + + + + + + + + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::QueueBase< TDatums, std::queue< TDatums > >
unsigned long long getMaxSize () const
 
- Protected Attributes inherited from op::QueueBase< TDatums, std::queue< TDatums > >
std::mutex mMutex
 
long long mPoppers
 
long long mPushers
 
long long mMaxPoppersPushers
 
bool mPopIsStopped
 
bool mPushIsStopped
 
std::condition_variable mConditionVariable
 
std::queue< TDatums > mTQueue
 
+

Detailed Description

+

template<typename TDatums, typename TQueue = std::queue<TDatums>>
+class op::Queue< TDatums, TQueue >

+ + +

Definition at line 11 of file queue.hpp.

+

Constructor & Destructor Documentation

+ +

◆ Queue()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + + + + + +
op::Queue< TDatums, TQueue >::Queue (const long long maxSize)
+
+explicit
+
+ +

Definition at line 36 of file queue.hpp.

+ +
+
+ +

◆ ~Queue()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
op::Queue< TDatums, TQueue >::~Queue
+
+virtual
+
+ +

Definition at line 46 of file queue.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ front()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
TDatums op::Queue< TDatums, TQueue >::front
+
+virtual
+
+ +

Implements op::QueueBase< TDatums, std::queue< TDatums > >.

+ +

Definition at line 51 of file queue.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_queue.js b/web/html/doc/classop_1_1_queue.js new file mode 100644 index 000000000..5b6f328a3 --- /dev/null +++ b/web/html/doc/classop_1_1_queue.js @@ -0,0 +1,6 @@ +var classop_1_1_queue = +[ + [ "Queue", "classop_1_1_queue.html#ae2b845322940bfc89b6342137d8ac372", null ], + [ "~Queue", "classop_1_1_queue.html#a056600a7cf4503235ba4e172cee63a7f", null ], + [ "front", "classop_1_1_queue.html#a22f6d214fe4dfc743b3abf00e049c504", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_queue.png b/web/html/doc/classop_1_1_queue.png new file mode 100644 index 000000000..6d1eaa97d Binary files /dev/null and b/web/html/doc/classop_1_1_queue.png differ diff --git a/web/html/doc/classop_1_1_queue_base-members.html b/web/html/doc/classop_1_1_queue_base-members.html new file mode 100644 index 000000000..5a0c2113c --- /dev/null +++ b/web/html/doc/classop_1_1_queue_base-members.html @@ -0,0 +1,135 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::QueueBase< TDatums, TQueue > Member List
+
+
+ +

This is the complete list of members for op::QueueBase< TDatums, TQueue >, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
addPopper()op::QueueBase< TDatums, TQueue >
addPusher()op::QueueBase< TDatums, TQueue >
clear()op::QueueBase< TDatums, TQueue >
empty() constop::QueueBase< TDatums, TQueue >
forceEmplace(TDatums &tDatums)op::QueueBase< TDatums, TQueue >
forcePush(const TDatums &tDatums)op::QueueBase< TDatums, TQueue >
front() const =0op::QueueBase< TDatums, TQueue >pure virtual
getMaxSize() constop::QueueBase< TDatums, TQueue >protected
isFull() constop::QueueBase< TDatums, TQueue >
isRunning() constop::QueueBase< TDatums, TQueue >
mConditionVariableop::QueueBase< TDatums, TQueue >protected
mMaxPoppersPushersop::QueueBase< TDatums, TQueue >protected
mMutexop::QueueBase< TDatums, TQueue >mutableprotected
mPopIsStoppedop::QueueBase< TDatums, TQueue >protected
mPoppersop::QueueBase< TDatums, TQueue >protected
mPushersop::QueueBase< TDatums, TQueue >protected
mPushIsStoppedop::QueueBase< TDatums, TQueue >protected
mTQueueop::QueueBase< TDatums, TQueue >protected
pop(TDatums &tDatums)=0op::QueueBase< TDatums, TQueue >protectedpure virtual
QueueBase(const long long maxSize=-1)op::QueueBase< TDatums, TQueue >explicit
size() constop::QueueBase< TDatums, TQueue >
stop()op::QueueBase< TDatums, TQueue >
stopPusher()op::QueueBase< TDatums, TQueue >
tryEmplace(TDatums &tDatums)op::QueueBase< TDatums, TQueue >
tryPop(TDatums &tDatums)op::QueueBase< TDatums, TQueue >
tryPop()op::QueueBase< TDatums, TQueue >
tryPush(const TDatums &tDatums)op::QueueBase< TDatums, TQueue >
waitAndEmplace(TDatums &tDatums)op::QueueBase< TDatums, TQueue >
waitAndPop(TDatums &tDatums)op::QueueBase< TDatums, TQueue >
waitAndPop()op::QueueBase< TDatums, TQueue >
waitAndPush(const TDatums &tDatums)op::QueueBase< TDatums, TQueue >
~QueueBase()op::QueueBase< TDatums, TQueue >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_queue_base.html b/web/html/doc/classop_1_1_queue_base.html new file mode 100644 index 000000000..2397eaf48 --- /dev/null +++ b/web/html/doc/classop_1_1_queue_base.html @@ -0,0 +1,915 @@ + + + + + + + +OpenPose: op::QueueBase< TDatums, TQueue > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::QueueBase< TDatums, TQueue > Class Template Referenceabstract
+
+
+ +

#include <queueBase.hpp>

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 QueueBase (const long long maxSize=-1)
 
virtual ~QueueBase ()
 
bool forceEmplace (TDatums &tDatums)
 
bool tryEmplace (TDatums &tDatums)
 
bool waitAndEmplace (TDatums &tDatums)
 
bool forcePush (const TDatums &tDatums)
 
bool tryPush (const TDatums &tDatums)
 
bool waitAndPush (const TDatums &tDatums)
 
bool tryPop (TDatums &tDatums)
 
bool tryPop ()
 
bool waitAndPop (TDatums &tDatums)
 
bool waitAndPop ()
 
bool empty () const
 
void stop ()
 
void stopPusher ()
 
void addPopper ()
 
void addPusher ()
 
bool isRunning () const
 
bool isFull () const
 
size_t size () const
 
void clear ()
 
virtual TDatums front () const =0
 
+ + + + + +

+Protected Member Functions

virtual bool pop (TDatums &tDatums)=0
 
unsigned long long getMaxSize () const
 
+ + + + + + + + + + + + + + + + + +

+Protected Attributes

std::mutex mMutex
 
long long mPoppers
 
long long mPushers
 
long long mMaxPoppersPushers
 
bool mPopIsStopped
 
bool mPushIsStopped
 
std::condition_variable mConditionVariable
 
TQueue mTQueue
 
+

Detailed Description

+

template<typename TDatums, typename TQueue>
+class op::QueueBase< TDatums, TQueue >

+ + +

Definition at line 12 of file queueBase.hpp.

+

Constructor & Destructor Documentation

+ +

◆ QueueBase()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + + + + + +
op::QueueBase< TDatums, TQueue >::QueueBase (const long long maxSize = -1)
+
+explicit
+
+ +

Definition at line 98 of file queueBase.hpp.

+ +
+
+ +

◆ ~QueueBase()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
op::QueueBase< TDatums, TQueue >::~QueueBase
+
+virtual
+
+ +

Definition at line 109 of file queueBase.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ addPopper()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + +
void op::QueueBase< TDatums, TQueue >::addPopper
+
+ +

Definition at line 342 of file queueBase.hpp.

+ +
+
+ +

◆ addPusher()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + +
void op::QueueBase< TDatums, TQueue >::addPusher
+
+ +

Definition at line 358 of file queueBase.hpp.

+ +
+
+ +

◆ clear()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + +
void op::QueueBase< TDatums, TQueue >::clear
+
+ +

Definition at line 419 of file queueBase.hpp.

+ +
+
+ +

◆ empty()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + +
bool op::QueueBase< TDatums, TQueue >::empty
+
+ +

Definition at line 286 of file queueBase.hpp.

+ +
+
+ +

◆ forceEmplace()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + + + + +
bool op::QueueBase< TDatums, TQueue >::forceEmplace (TDatums & tDatums)
+
+ +

Definition at line 124 of file queueBase.hpp.

+ +
+
+ +

◆ forcePush()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + + + + +
bool op::QueueBase< TDatums, TQueue >::forcePush (const TDatums & tDatums)
+
+ +

Definition at line 174 of file queueBase.hpp.

+ +
+
+ +

◆ front()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + + + + +
virtual TDatums op::QueueBase< TDatums, TQueue >::front () const
+
+pure virtual
+
+ +

Implemented in op::Queue< TDatums, TQueue >.

+ +
+
+ +

◆ getMaxSize()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
unsigned long long op::QueueBase< TDatums, TQueue >::getMaxSize
+
+protected
+
+ +

Definition at line 434 of file queueBase.hpp.

+ +
+
+ +

◆ isFull()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + +
bool op::QueueBase< TDatums, TQueue >::isFull
+
+ +

Definition at line 389 of file queueBase.hpp.

+ +
+
+ +

◆ isRunning()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + +
bool op::QueueBase< TDatums, TQueue >::isRunning
+
+ +

Definition at line 374 of file queueBase.hpp.

+ +
+
+ +

◆ pop()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + + + + + +
virtual bool op::QueueBase< TDatums, TQueue >::pop (TDatums & tDatums)
+
+protectedpure virtual
+
+ +
+
+ +

◆ size()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + +
size_t op::QueueBase< TDatums, TQueue >::size
+
+ +

Definition at line 404 of file queueBase.hpp.

+ +
+
+ +

◆ stop()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + +
void op::QueueBase< TDatums, TQueue >::stop
+
+ +

Definition at line 301 of file queueBase.hpp.

+ +
+
+ +

◆ stopPusher()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + +
void op::QueueBase< TDatums, TQueue >::stopPusher
+
+ +

Definition at line 320 of file queueBase.hpp.

+ +
+
+ +

◆ tryEmplace()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + + + + +
bool op::QueueBase< TDatums, TQueue >::tryEmplace (TDatums & tDatums)
+
+ +

Definition at line 141 of file queueBase.hpp.

+ +
+
+ +

◆ tryPop() [1/2]

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + +
bool op::QueueBase< TDatums, TQueue >::tryPop
+
+ +

Definition at line 239 of file queueBase.hpp.

+ +
+
+ +

◆ tryPop() [2/2]

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + + + + +
bool op::QueueBase< TDatums, TQueue >::tryPop (TDatums & tDatums)
+
+ +

Definition at line 224 of file queueBase.hpp.

+ +
+
+ +

◆ tryPush()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + + + + +
bool op::QueueBase< TDatums, TQueue >::tryPush (const TDatums & tDatums)
+
+ +

Definition at line 191 of file queueBase.hpp.

+ +
+
+ +

◆ waitAndEmplace()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + + + + +
bool op::QueueBase< TDatums, TQueue >::waitAndEmplace (TDatums & tDatums)
+
+ +

Definition at line 158 of file queueBase.hpp.

+ +
+
+ +

◆ waitAndPop() [1/2]

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + +
bool op::QueueBase< TDatums, TQueue >::waitAndPop
+
+ +

Definition at line 270 of file queueBase.hpp.

+ +
+
+ +

◆ waitAndPop() [2/2]

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + + + + +
bool op::QueueBase< TDatums, TQueue >::waitAndPop (TDatums & tDatums)
+
+ +

Definition at line 254 of file queueBase.hpp.

+ +
+
+ +

◆ waitAndPush()

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + + + + +
bool op::QueueBase< TDatums, TQueue >::waitAndPush (const TDatums & tDatums)
+
+ +

Definition at line 208 of file queueBase.hpp.

+ +
+
+

Member Data Documentation

+ +

◆ mConditionVariable

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
std::condition_variable op::QueueBase< TDatums, TQueue >::mConditionVariable
+
+protected
+
+ +

Definition at line 66 of file queueBase.hpp.

+ +
+
+ +

◆ mMaxPoppersPushers

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
long long op::QueueBase< TDatums, TQueue >::mMaxPoppersPushers
+
+protected
+
+ +

Definition at line 63 of file queueBase.hpp.

+ +
+
+ +

◆ mMutex

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
std::mutex op::QueueBase< TDatums, TQueue >::mMutex
+
+mutableprotected
+
+ +

Definition at line 60 of file queueBase.hpp.

+ +
+
+ +

◆ mPopIsStopped

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
bool op::QueueBase< TDatums, TQueue >::mPopIsStopped
+
+protected
+
+ +

Definition at line 64 of file queueBase.hpp.

+ +
+
+ +

◆ mPoppers

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
long long op::QueueBase< TDatums, TQueue >::mPoppers
+
+protected
+
+ +

Definition at line 61 of file queueBase.hpp.

+ +
+
+ +

◆ mPushers

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
long long op::QueueBase< TDatums, TQueue >::mPushers
+
+protected
+
+ +

Definition at line 62 of file queueBase.hpp.

+ +
+
+ +

◆ mPushIsStopped

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
bool op::QueueBase< TDatums, TQueue >::mPushIsStopped
+
+protected
+
+ +

Definition at line 65 of file queueBase.hpp.

+ +
+
+ +

◆ mTQueue

+ +
+
+
+template<typename TDatums , typename TQueue >
+ + + + + +
+ + + + +
TQueue op::QueueBase< TDatums, TQueue >::mTQueue
+
+protected
+
+ +

Definition at line 67 of file queueBase.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_queue_base.js b/web/html/doc/classop_1_1_queue_base.js new file mode 100644 index 000000000..9059ffed0 --- /dev/null +++ b/web/html/doc/classop_1_1_queue_base.js @@ -0,0 +1,35 @@ +var classop_1_1_queue_base = +[ + [ "QueueBase", "classop_1_1_queue_base.html#aea7941746e2403a09356b9c6a208784c", null ], + [ "~QueueBase", "classop_1_1_queue_base.html#aef098201d9084083adba5ceeb45b12fa", null ], + [ "addPopper", "classop_1_1_queue_base.html#adc5df8a039d360831db06e3c610bf015", null ], + [ "addPusher", "classop_1_1_queue_base.html#a1ccdec39ea65a83edc54661acc283134", null ], + [ "clear", "classop_1_1_queue_base.html#a247f435c95709f3246d352eee4f757af", null ], + [ "empty", "classop_1_1_queue_base.html#a74d9b247804a226cf9a0758b25bd3ba9", null ], + [ "forceEmplace", "classop_1_1_queue_base.html#a8d218f599b84194909691c72ee0de8d0", null ], + [ "forcePush", "classop_1_1_queue_base.html#ad124d414b7c2680e5312ee163d18410f", null ], + [ "front", "classop_1_1_queue_base.html#aad7a6a666dcf70834d9d18ae6d92cb2c", null ], + [ "getMaxSize", "classop_1_1_queue_base.html#a7b3f810bb6e729be3afe3313c4b2f31b", null ], + [ "isFull", "classop_1_1_queue_base.html#a17a52df2e912a346c412418c62268425", null ], + [ "isRunning", "classop_1_1_queue_base.html#a9f529f94ff3b98e3ac11d796caa31239", null ], + [ "pop", "classop_1_1_queue_base.html#a5b28915cc58e040aca673bdfdf7c8be3", null ], + [ "size", "classop_1_1_queue_base.html#a8fd69ac0ffcda02d0d26102e2ebd2841", null ], + [ "stop", "classop_1_1_queue_base.html#a68b51dafaba93179fcef78731aaf1703", null ], + [ "stopPusher", "classop_1_1_queue_base.html#a32ac0e4b14a310aee62ce817e86c0356", null ], + [ "tryEmplace", "classop_1_1_queue_base.html#a7905841f953be7099847cc7b5b17ae0c", null ], + [ "tryPop", "classop_1_1_queue_base.html#a5e52b4ab7e310373e3d1f1d42cfe4549", null ], + [ "tryPop", "classop_1_1_queue_base.html#a80c6e2dda17afa82aae83aeadad1f7e0", null ], + [ "tryPush", "classop_1_1_queue_base.html#a35f0547f6020f22e49835b147b7ec52e", null ], + [ "waitAndEmplace", "classop_1_1_queue_base.html#a8a5d53c7b66fd0ef34b3e276f586e355", null ], + [ "waitAndPop", "classop_1_1_queue_base.html#a84da9e045acec02e3900153eea2bd92d", null ], + [ "waitAndPop", "classop_1_1_queue_base.html#a2c7b3d0fa6502c644c3083dd68332542", null ], + [ "waitAndPush", "classop_1_1_queue_base.html#ab28c5805dd23117c8d6d82d59617bb95", null ], + [ "mConditionVariable", "classop_1_1_queue_base.html#a8b5e59161a0b175d12955b552a90a47f", null ], + [ "mMaxPoppersPushers", "classop_1_1_queue_base.html#a1d55f40e032cd5d43d63ba02040b3117", null ], + [ "mMutex", "classop_1_1_queue_base.html#a22c5e2964e9d9c18a9f02b8d2e0f30b4", null ], + [ "mPopIsStopped", "classop_1_1_queue_base.html#a77bf3592bbb6ac586cd4c2b0aea98e62", null ], + [ "mPoppers", "classop_1_1_queue_base.html#a04f7160c199f90b8f8e91ddfd40e92fb", null ], + [ "mPushers", "classop_1_1_queue_base.html#a7c382bb98f5b769cde37b06d67cb0530", null ], + [ "mPushIsStopped", "classop_1_1_queue_base.html#af2c0f21c6b4f4639661b59aa247ae407", null ], + [ "mTQueue", "classop_1_1_queue_base.html#a49c1d6740f2ce7f26eae606f109b5738", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_renderer-members.html b/web/html/doc/classop_1_1_renderer-members.html new file mode 100644 index 000000000..c51007137 --- /dev/null +++ b/web/html/doc/classop_1_1_renderer-members.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::Renderer Member List
+
+
+ +

This is the complete list of members for op::Renderer, including all inherited members.

+ + + + + + + + + + + + + + + + + + + +
getAlphaHeatMap() constop::Renderer
getAlphaKeypoint() constop::Renderer
getBlendOriginalFrame() constop::Renderer
getShowGooglyEyes() constop::Renderer
increaseElementToRender(const int increment)op::Renderer
mBlendOriginalFrameop::Rendererprotected
mRenderThresholdop::Rendererprotected
mShowGooglyEyesop::Rendererprotected
Renderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)op::Rendererexplicit
setAlphaHeatMap(const float alphaHeatMap)op::Renderer
setAlphaKeypoint(const float alphaKeypoint)op::Renderer
setBlendOriginalFrame(const bool blendOriginalFrame)op::Renderer
setElementToRender(const int elementToRender)op::Renderer
setElementToRender(const ElementToRender elementToRender)op::Renderer
setShowGooglyEyes(const bool showGooglyEyes)op::Renderer
spElementToRenderop::Rendererprotected
spNumberElementsToRenderop::Rendererprotected
~Renderer()op::Renderervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_renderer.html b/web/html/doc/classop_1_1_renderer.html new file mode 100644 index 000000000..85954ba34 --- /dev/null +++ b/web/html/doc/classop_1_1_renderer.html @@ -0,0 +1,573 @@ + + + + + + + +OpenPose: op::Renderer Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::Renderer Class Reference
+
+
+ +

#include <renderer.hpp>

+
+Inheritance diagram for op::Renderer:
+
+
+ + +op::FaceCpuRenderer +op::GpuRenderer +op::HandCpuRenderer +op::PoseCpuRenderer +op::FaceGpuRenderer +op::HandGpuRenderer +op::PoseGpuRenderer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 Renderer (const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
 
virtual ~Renderer ()
 
void increaseElementToRender (const int increment)
 
void setElementToRender (const int elementToRender)
 
void setElementToRender (const ElementToRender elementToRender)
 
bool getBlendOriginalFrame () const
 
void setBlendOriginalFrame (const bool blendOriginalFrame)
 
float getAlphaKeypoint () const
 
void setAlphaKeypoint (const float alphaKeypoint)
 
float getAlphaHeatMap () const
 
void setAlphaHeatMap (const float alphaHeatMap)
 
bool getShowGooglyEyes () const
 
void setShowGooglyEyes (const bool showGooglyEyes)
 
+ + + + + + + + + + + +

+Protected Attributes

const float mRenderThreshold
 
std::atomic< bool > mBlendOriginalFrame
 
std::shared_ptr< std::atomic< unsigned int > > spElementToRender
 
std::shared_ptr< const unsigned int > spNumberElementsToRender
 
std::atomic< bool > mShowGooglyEyes
 
+

Detailed Description

+
+

Definition at line 10 of file renderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ Renderer()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::Renderer::Renderer (const float renderThreshold,
const float alphaKeypoint,
const float alphaHeatMap,
const bool blendOriginalFrame = true,
const unsigned int elementToRender = 0u,
const unsigned int numberElementsToRender = 0u 
)
+
+explicit
+
+ +
+
+ +

◆ ~Renderer()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::Renderer::~Renderer ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ getAlphaHeatMap()

+ +
+
+ + + + + + + +
float op::Renderer::getAlphaHeatMap () const
+
+ +
+
+ +

◆ getAlphaKeypoint()

+ +
+
+ + + + + + + +
float op::Renderer::getAlphaKeypoint () const
+
+ +
+
+ +

◆ getBlendOriginalFrame()

+ +
+
+ + + + + + + +
bool op::Renderer::getBlendOriginalFrame () const
+
+ +
+
+ +

◆ getShowGooglyEyes()

+ +
+
+ + + + + + + +
bool op::Renderer::getShowGooglyEyes () const
+
+ +
+
+ +

◆ increaseElementToRender()

+ +
+
+ + + + + + + + +
void op::Renderer::increaseElementToRender (const int increment)
+
+ +
+
+ +

◆ setAlphaHeatMap()

+ +
+
+ + + + + + + + +
void op::Renderer::setAlphaHeatMap (const float alphaHeatMap)
+
+ +
+
+ +

◆ setAlphaKeypoint()

+ +
+
+ + + + + + + + +
void op::Renderer::setAlphaKeypoint (const float alphaKeypoint)
+
+ +
+
+ +

◆ setBlendOriginalFrame()

+ +
+
+ + + + + + + + +
void op::Renderer::setBlendOriginalFrame (const bool blendOriginalFrame)
+
+ +
+
+ +

◆ setElementToRender() [1/2]

+ +
+
+ + + + + + + + +
void op::Renderer::setElementToRender (const ElementToRender elementToRender)
+
+ +
+
+ +

◆ setElementToRender() [2/2]

+ +
+
+ + + + + + + + +
void op::Renderer::setElementToRender (const int elementToRender)
+
+ +
+
+ +

◆ setShowGooglyEyes()

+ +
+
+ + + + + + + + +
void op::Renderer::setShowGooglyEyes (const bool showGooglyEyes)
+
+ +
+
+

Member Data Documentation

+ +

◆ mBlendOriginalFrame

+ +
+
+ + + + + +
+ + + + +
std::atomic<bool> op::Renderer::mBlendOriginalFrame
+
+protected
+
+ +

Definition at line 43 of file renderer.hpp.

+ +
+
+ +

◆ mRenderThreshold

+ +
+
+ + + + + +
+ + + + +
const float op::Renderer::mRenderThreshold
+
+protected
+
+ +

Definition at line 42 of file renderer.hpp.

+ +
+
+ +

◆ mShowGooglyEyes

+ +
+
+ + + + + +
+ + + + +
std::atomic<bool> op::Renderer::mShowGooglyEyes
+
+protected
+
+ +

Definition at line 46 of file renderer.hpp.

+ +
+
+ +

◆ spElementToRender

+ +
+
+ + + + + +
+ + + + +
std::shared_ptr<std::atomic<unsigned int> > op::Renderer::spElementToRender
+
+protected
+
+ +

Definition at line 44 of file renderer.hpp.

+ +
+
+ +

◆ spNumberElementsToRender

+ +
+
+ + + + + +
+ + + + +
std::shared_ptr<const unsigned int> op::Renderer::spNumberElementsToRender
+
+protected
+
+ +

Definition at line 45 of file renderer.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_renderer.js b/web/html/doc/classop_1_1_renderer.js new file mode 100644 index 000000000..228b52f82 --- /dev/null +++ b/web/html/doc/classop_1_1_renderer.js @@ -0,0 +1,21 @@ +var classop_1_1_renderer = +[ + [ "Renderer", "classop_1_1_renderer.html#a00caf604fad781dfcf3bc311ef6a6623", null ], + [ "~Renderer", "classop_1_1_renderer.html#abd45555a9864e799309b72902b6cec30", null ], + [ "getAlphaHeatMap", "classop_1_1_renderer.html#ab776e07b5b2f3a3b0aca0ce95d67796b", null ], + [ "getAlphaKeypoint", "classop_1_1_renderer.html#a3cf2d07dc9df42db4648398367c72dbb", null ], + [ "getBlendOriginalFrame", "classop_1_1_renderer.html#ad2ac64e018f2b925d0c8d45883928b68", null ], + [ "getShowGooglyEyes", "classop_1_1_renderer.html#a44e13a965a9b0fca119ea897ad1348e0", null ], + [ "increaseElementToRender", "classop_1_1_renderer.html#a298a5a58bab80b7252db7d3386a0ed8a", null ], + [ "setAlphaHeatMap", "classop_1_1_renderer.html#abaea1725725ff775aed0c120b2ba3d1f", null ], + [ "setAlphaKeypoint", "classop_1_1_renderer.html#a039e88897ed844551cadb115ea98e9ef", null ], + [ "setBlendOriginalFrame", "classop_1_1_renderer.html#aa8339054ed113d99ca70208d0cee5aa9", null ], + [ "setElementToRender", "classop_1_1_renderer.html#a9d46c28d88225af94468c757ab1b26c1", null ], + [ "setElementToRender", "classop_1_1_renderer.html#afd48a9cb0be184303dce2969fa2f8e02", null ], + [ "setShowGooglyEyes", "classop_1_1_renderer.html#ab226d47f554735fa3e0372ce429747c3", null ], + [ "mBlendOriginalFrame", "classop_1_1_renderer.html#a88449a7c29a48e157cd6b16089825be7", null ], + [ "mRenderThreshold", "classop_1_1_renderer.html#adc4cd0a62008325c5c7df6df2f95a167", null ], + [ "mShowGooglyEyes", "classop_1_1_renderer.html#ace2490fa3c5a87443e4d1e64007cd1ff", null ], + [ "spElementToRender", "classop_1_1_renderer.html#aca8ebf0c0a50b87f0be82afa090155a0", null ], + [ "spNumberElementsToRender", "classop_1_1_renderer.html#a3e04644546dd9990a16d0b6861b60553", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_renderer.png b/web/html/doc/classop_1_1_renderer.png new file mode 100644 index 000000000..7d6b1feb0 Binary files /dev/null and b/web/html/doc/classop_1_1_renderer.png differ diff --git a/web/html/doc/classop_1_1_resize_and_merge_caffe-members.html b/web/html/doc/classop_1_1_resize_and_merge_caffe-members.html new file mode 100644 index 000000000..91139d38c --- /dev/null +++ b/web/html/doc/classop_1_1_resize_and_merge_caffe-members.html @@ -0,0 +1,115 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::ResizeAndMergeCaffe< T > Member List
+
+
+ +

This is the complete list of members for op::ResizeAndMergeCaffe< T >, including all inherited members.

+ + + + + + + + + + + + + +
Backward_cpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)op::ResizeAndMergeCaffe< T >virtual
Backward_gpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)op::ResizeAndMergeCaffe< T >virtual
Forward(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::ResizeAndMergeCaffe< T >virtual
Forward_cpu(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::ResizeAndMergeCaffe< T >virtual
Forward_gpu(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::ResizeAndMergeCaffe< T >virtual
Forward_ocl(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::ResizeAndMergeCaffe< T >virtual
LayerSetUp(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)op::ResizeAndMergeCaffe< T >virtual
Reshape(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top, const T netFactor, const T scaleFactor, const bool mergeFirstDimension=true, const int gpuID=0)op::ResizeAndMergeCaffe< T >virtual
ResizeAndMergeCaffe()op::ResizeAndMergeCaffe< T >explicit
setScaleRatios(const std::vector< T > &scaleRatios)op::ResizeAndMergeCaffe< T >
type() constop::ResizeAndMergeCaffe< T >inlinevirtual
~ResizeAndMergeCaffe()op::ResizeAndMergeCaffe< T >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_resize_and_merge_caffe.html b/web/html/doc/classop_1_1_resize_and_merge_caffe.html new file mode 100644 index 000000000..8d2538c7e --- /dev/null +++ b/web/html/doc/classop_1_1_resize_and_merge_caffe.html @@ -0,0 +1,588 @@ + + + + + + + +OpenPose: op::ResizeAndMergeCaffe< T > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::ResizeAndMergeCaffe< T > Class Template Reference
+
+
+ +

#include <resizeAndMergeCaffe.hpp>

+ + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 ResizeAndMergeCaffe ()
 
virtual ~ResizeAndMergeCaffe ()
 
virtual void LayerSetUp (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Reshape (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top, const T netFactor, const T scaleFactor, const bool mergeFirstDimension=true, const int gpuID=0)
 
virtual const char * type () const
 
void setScaleRatios (const std::vector< T > &scaleRatios)
 
virtual void Forward (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Forward_cpu (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Forward_gpu (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Forward_ocl (const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
 
virtual void Backward_cpu (const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
 
virtual void Backward_gpu (const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
 
+

Detailed Description

+

template<typename T>
+class op::ResizeAndMergeCaffe< T >

+ + +

Definition at line 12 of file resizeAndMergeCaffe.hpp.

+

Constructor & Destructor Documentation

+ +

◆ ResizeAndMergeCaffe()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
op::ResizeAndMergeCaffe< T >::ResizeAndMergeCaffe ()
+
+explicit
+
+ +
+
+ +

◆ ~ResizeAndMergeCaffe()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
virtual op::ResizeAndMergeCaffe< T >::~ResizeAndMergeCaffe ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ Backward_cpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::ResizeAndMergeCaffe< T >::Backward_cpu (const std::vector< ArrayCpuGpu< T > * > & top,
const std::vector< bool > & propagate_down,
const std::vector< ArrayCpuGpu< T > * > & bottom 
)
+
+virtual
+
+ +
+
+ +

◆ Backward_gpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::ResizeAndMergeCaffe< T >::Backward_gpu (const std::vector< ArrayCpuGpu< T > * > & top,
const std::vector< bool > & propagate_down,
const std::vector< ArrayCpuGpu< T > * > & bottom 
)
+
+virtual
+
+ +
+
+ +

◆ Forward()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::ResizeAndMergeCaffe< T >::Forward (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ Forward_cpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::ResizeAndMergeCaffe< T >::Forward_cpu (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ Forward_gpu()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::ResizeAndMergeCaffe< T >::Forward_gpu (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ Forward_ocl()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::ResizeAndMergeCaffe< T >::Forward_ocl (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ LayerSetUp()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::ResizeAndMergeCaffe< T >::LayerSetUp (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top 
)
+
+virtual
+
+ +
+
+ +

◆ Reshape()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
virtual void op::ResizeAndMergeCaffe< T >::Reshape (const std::vector< ArrayCpuGpu< T > * > & bottom,
const std::vector< ArrayCpuGpu< T > * > & top,
const T netFactor,
const T scaleFactor,
const bool mergeFirstDimension = true,
const int gpuID = 0 
)
+
+virtual
+
+ +
+
+ +

◆ setScaleRatios()

+ +
+
+
+template<typename T >
+ + + + + + + + +
void op::ResizeAndMergeCaffe< T >::setScaleRatios (const std::vector< T > & scaleRatios)
+
+ +
+
+ +

◆ type()

+ +
+
+
+template<typename T >
+ + + + + +
+ + + + + + + +
virtual const char* op::ResizeAndMergeCaffe< T >::type () const
+
+inlinevirtual
+
+ +

Definition at line 25 of file resizeAndMergeCaffe.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_resize_and_merge_caffe.js b/web/html/doc/classop_1_1_resize_and_merge_caffe.js new file mode 100644 index 000000000..e1e3bccc9 --- /dev/null +++ b/web/html/doc/classop_1_1_resize_and_merge_caffe.js @@ -0,0 +1,15 @@ +var classop_1_1_resize_and_merge_caffe = +[ + [ "ResizeAndMergeCaffe", "classop_1_1_resize_and_merge_caffe.html#a30805a265fa887eff04b1200dbda91a5", null ], + [ "~ResizeAndMergeCaffe", "classop_1_1_resize_and_merge_caffe.html#a5dc1aa7c462bd8df8b6a8377418e19d4", null ], + [ "Backward_cpu", "classop_1_1_resize_and_merge_caffe.html#a4836b2f08273896f58c2d63a15c871e8", null ], + [ "Backward_gpu", "classop_1_1_resize_and_merge_caffe.html#acfa7742f943fd741acf0bf383c572655", null ], + [ "Forward", "classop_1_1_resize_and_merge_caffe.html#a2f3f7903827e3abc3dab35ebdad002a6", null ], + [ "Forward_cpu", "classop_1_1_resize_and_merge_caffe.html#a65e81f3ac60a58a29f302d818d5b0c8f", null ], + [ "Forward_gpu", "classop_1_1_resize_and_merge_caffe.html#a13d984c2ec4b5440a694b9a2dfa64521", null ], + [ "Forward_ocl", "classop_1_1_resize_and_merge_caffe.html#aba74db20a0aca30b797f590548de4272", null ], + [ "LayerSetUp", "classop_1_1_resize_and_merge_caffe.html#ad7441a1f8db85f6239830603fb7a6325", null ], + [ "Reshape", "classop_1_1_resize_and_merge_caffe.html#abd4c8a363c569fbb4187cd928c481334", null ], + [ "setScaleRatios", "classop_1_1_resize_and_merge_caffe.html#aa16862bbc207fef227d53d37223512b8", null ], + [ "type", "classop_1_1_resize_and_merge_caffe.html#a90e4af20eee1bfaf152937199f3ad068", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_scale_and_size_extractor-members.html b/web/html/doc/classop_1_1_scale_and_size_extractor-members.html new file mode 100644 index 000000000..8e9785922 --- /dev/null +++ b/web/html/doc/classop_1_1_scale_and_size_extractor-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::ScaleAndSizeExtractor Member List
+
+
+ +

This is the complete list of members for op::ScaleAndSizeExtractor, including all inherited members.

+ + + + +
extract(const Point< int > &inputResolution) constop::ScaleAndSizeExtractor
ScaleAndSizeExtractor(const Point< int > &netInputResolution, const float netInputResolutionDynamicBehavior, const Point< int > &outputResolution, const int scaleNumber=1, const double scaleGap=0.25)op::ScaleAndSizeExtractor
~ScaleAndSizeExtractor()op::ScaleAndSizeExtractorvirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_scale_and_size_extractor.html b/web/html/doc/classop_1_1_scale_and_size_extractor.html new file mode 100644 index 000000000..1ba11cbfa --- /dev/null +++ b/web/html/doc/classop_1_1_scale_and_size_extractor.html @@ -0,0 +1,213 @@ + + + + + + + +OpenPose: op::ScaleAndSizeExtractor Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::ScaleAndSizeExtractor Class Reference
+
+
+ +

#include <scaleAndSizeExtractor.hpp>

+ + + + + + + + +

+Public Member Functions

 ScaleAndSizeExtractor (const Point< int > &netInputResolution, const float netInputResolutionDynamicBehavior, const Point< int > &outputResolution, const int scaleNumber=1, const double scaleGap=0.25)
 
virtual ~ScaleAndSizeExtractor ()
 
std::tuple< std::vector< double >, std::vector< Point< int > >, double, Point< int > > extract (const Point< int > &inputResolution) const
 
+

Detailed Description

+
+

Definition at line 9 of file scaleAndSizeExtractor.hpp.

+

Constructor & Destructor Documentation

+ +

◆ ScaleAndSizeExtractor()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::ScaleAndSizeExtractor::ScaleAndSizeExtractor (const Point< int > & netInputResolution,
const float netInputResolutionDynamicBehavior,
const Point< int > & outputResolution,
const int scaleNumber = 1,
const double scaleGap = 0.25 
)
+
+ +
+
+ +

◆ ~ScaleAndSizeExtractor()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::ScaleAndSizeExtractor::~ScaleAndSizeExtractor ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ extract()

+ +
+
+ + + + + + + + +
std::tuple<std::vector<double>, std::vector<Point<int> >, double, Point<int> > op::ScaleAndSizeExtractor::extract (const Point< int > & inputResolution) const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_scale_and_size_extractor.js b/web/html/doc/classop_1_1_scale_and_size_extractor.js new file mode 100644 index 000000000..e3b84a045 --- /dev/null +++ b/web/html/doc/classop_1_1_scale_and_size_extractor.js @@ -0,0 +1,6 @@ +var classop_1_1_scale_and_size_extractor = +[ + [ "ScaleAndSizeExtractor", "classop_1_1_scale_and_size_extractor.html#a4618beea6f87df0c4eac6c6a204bd269", null ], + [ "~ScaleAndSizeExtractor", "classop_1_1_scale_and_size_extractor.html#a90bc64fe3c8ee45cfe5f3bd73a8bb3c9", null ], + [ "extract", "classop_1_1_scale_and_size_extractor.html#aa05b7698ff8417072787009c85a14421", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_spinnaker_wrapper-members.html b/web/html/doc/classop_1_1_spinnaker_wrapper-members.html new file mode 100644 index 000000000..17b179256 --- /dev/null +++ b/web/html/doc/classop_1_1_spinnaker_wrapper-members.html @@ -0,0 +1,112 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::SpinnakerWrapper Member List
+
+
+ +

This is the complete list of members for op::SpinnakerWrapper, including all inherited members.

+ + + + + + + + + + +
getCameraExtrinsics() constop::SpinnakerWrapper
getCameraIntrinsics() constop::SpinnakerWrapper
getCameraMatrices() constop::SpinnakerWrapper
getRawFrames()op::SpinnakerWrapper
getResolution() constop::SpinnakerWrapper
isOpened() constop::SpinnakerWrapper
release()op::SpinnakerWrapper
SpinnakerWrapper(const std::string &cameraParameterPath, const Point< int > &cameraResolution, const bool undistortImage, const int cameraIndex=-1)op::SpinnakerWrapperexplicit
~SpinnakerWrapper()op::SpinnakerWrappervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_spinnaker_wrapper.html b/web/html/doc/classop_1_1_spinnaker_wrapper.html new file mode 100644 index 000000000..d0ab582c9 --- /dev/null +++ b/web/html/doc/classop_1_1_spinnaker_wrapper.html @@ -0,0 +1,331 @@ + + + + + + + +OpenPose: op::SpinnakerWrapper Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::SpinnakerWrapper Class Reference
+
+
+ +

#include <spinnakerWrapper.hpp>

+ + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 SpinnakerWrapper (const std::string &cameraParameterPath, const Point< int > &cameraResolution, const bool undistortImage, const int cameraIndex=-1)
 
virtual ~SpinnakerWrapper ()
 
std::vector< MatrixgetRawFrames ()
 
std::vector< MatrixgetCameraMatrices () const
 
std::vector< MatrixgetCameraExtrinsics () const
 
std::vector< MatrixgetCameraIntrinsics () const
 
Point< int > getResolution () const
 
bool isOpened () const
 
void release ()
 
+

Detailed Description

+

SpinnakerWrapper is a subclass of SpinnakerWrapper. It decouples the final interface (meant to imitates cv::VideoCapture) from the Spinnaker SDK wrapper.

+ +

Definition at line 12 of file spinnakerWrapper.hpp.

+

Constructor & Destructor Documentation

+ +

◆ SpinnakerWrapper()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::SpinnakerWrapper::SpinnakerWrapper (const std::string & cameraParameterPath,
const Point< int > & cameraResolution,
const bool undistortImage,
const int cameraIndex = -1 
)
+
+explicit
+
+

Constructor of SpinnakerWrapper. It opens all the available FLIR cameras cameraIndex = -1 means that all cameras are taken

+ +
+
+ +

◆ ~SpinnakerWrapper()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::SpinnakerWrapper::~SpinnakerWrapper ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ getCameraExtrinsics()

+ +
+
+ + + + + + + +
std::vector<Matrix> op::SpinnakerWrapper::getCameraExtrinsics () const
+
+ +
+
+ +

◆ getCameraIntrinsics()

+ +
+
+ + + + + + + +
std::vector<Matrix> op::SpinnakerWrapper::getCameraIntrinsics () const
+
+ +
+
+ +

◆ getCameraMatrices()

+ +
+
+ + + + + + + +
std::vector<Matrix> op::SpinnakerWrapper::getCameraMatrices () const
+
+

Note: The camera parameters are only read if undistortImage is true. This should be changed to add a new bool flag in the constructor, e.g., readCameraParameters

+ +
+
+ +

◆ getRawFrames()

+ +
+
+ + + + + + + +
std::vector<Matrix> op::SpinnakerWrapper::getRawFrames ()
+
+ +
+
+ +

◆ getResolution()

+ +
+
+ + + + + + + +
Point<int> op::SpinnakerWrapper::getResolution () const
+
+ +
+
+ +

◆ isOpened()

+ +
+
+ + + + + + + +
bool op::SpinnakerWrapper::isOpened () const
+
+ +
+
+ +

◆ release()

+ +
+
+ + + + + + + +
void op::SpinnakerWrapper::release ()
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_spinnaker_wrapper.js b/web/html/doc/classop_1_1_spinnaker_wrapper.js new file mode 100644 index 000000000..659ecc6d5 --- /dev/null +++ b/web/html/doc/classop_1_1_spinnaker_wrapper.js @@ -0,0 +1,12 @@ +var classop_1_1_spinnaker_wrapper = +[ + [ "SpinnakerWrapper", "classop_1_1_spinnaker_wrapper.html#a5d1ba90b4d1987423b330de2fdcdb702", null ], + [ "~SpinnakerWrapper", "classop_1_1_spinnaker_wrapper.html#a8ae3e45fba6f9d0943cbd9038e98b066", null ], + [ "getCameraExtrinsics", "classop_1_1_spinnaker_wrapper.html#a427bf92ca3fc9011b01c57833b078154", null ], + [ "getCameraIntrinsics", "classop_1_1_spinnaker_wrapper.html#aaf441c78eeb921886a09412d8af9ddbc", null ], + [ "getCameraMatrices", "classop_1_1_spinnaker_wrapper.html#a76849430ae48ba14cbdd0b68bca133fb", null ], + [ "getRawFrames", "classop_1_1_spinnaker_wrapper.html#a2135a9d3c9dbab4c1e0ee6be6c31b93a", null ], + [ "getResolution", "classop_1_1_spinnaker_wrapper.html#aad97f57040a953cbce0f20c6b3303202", null ], + [ "isOpened", "classop_1_1_spinnaker_wrapper.html#a51e869f56a6517bd55783ea039066d7d", null ], + [ "release", "classop_1_1_spinnaker_wrapper.html#a6e66639ee75708486b3d9aa4598607c0", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_string-members.html b/web/html/doc/classop_1_1_string-members.html new file mode 100644 index 000000000..b3ab7f6ba --- /dev/null +++ b/web/html/doc/classop_1_1_string-members.html @@ -0,0 +1,108 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::String Member List
+
+
+ +

This is the complete list of members for op::String, including all inherited members.

+ + + + + + +
empty() constop::String
getStdString() constop::String
String()op::String
String(const char *charPtr)op::String
String(const std::string &string)op::Stringexplicit
+
+ + + + diff --git a/web/html/doc/classop_1_1_string.html b/web/html/doc/classop_1_1_string.html new file mode 100644 index 000000000..f36315cd6 --- /dev/null +++ b/web/html/doc/classop_1_1_string.html @@ -0,0 +1,226 @@ + + + + + + + +OpenPose: op::String Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::String Class Reference
+
+
+ +

#include <string.hpp>

+ + + + + + + + + + + + +

+Public Member Functions

 String ()
 
 String (const char *charPtr)
 
 String (const std::string &string)
 
const std::string & getStdString () const
 
bool empty () const
 
+

Detailed Description

+

String: Basic container for std::string to avoid std::string in the WrapperStructXXX classes. Otherwise, cryptic runtime DLL errors could occur when exporting OpenPose to other projects using different STL DLLs.

+ +

Definition at line 14 of file string.hpp.

+

Constructor & Destructor Documentation

+ +

◆ String() [1/3]

+ +
+
+ + + + + + + +
op::String::String ()
+
+ +
+
+ +

◆ String() [2/3]

+ +
+
+ + + + + + + + +
op::String::String (const char * charPtr)
+
+

It will force a copy of the char* of std::string to avoid DLL runtime errors. Example usages: std::string stdString = "This is a std::string"; String string(stdString.c_str());

+ +
+
+ +

◆ String() [3/3]

+ +
+
+ + + + + +
+ + + + + + + + +
op::String::String (const std::string & string)
+
+explicit
+
+

It will force a copy of string

+ +
+
+

Member Function Documentation

+ +

◆ empty()

+ +
+
+ + + + + + + +
bool op::String::empty () const
+
+ +
+
+ +

◆ getStdString()

+ +
+
+ + + + + + + +
const std::string& op::String::getStdString () const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_string.js b/web/html/doc/classop_1_1_string.js new file mode 100644 index 000000000..93cd9959f --- /dev/null +++ b/web/html/doc/classop_1_1_string.js @@ -0,0 +1,8 @@ +var classop_1_1_string = +[ + [ "String", "classop_1_1_string.html#a5f1c9f53adedf082ee0cad43fa6140be", null ], + [ "String", "classop_1_1_string.html#ad8384eb6141b3fc53e5dc246be77cf6c", null ], + [ "String", "classop_1_1_string.html#a5848aace0a849fafffb3a2ae78d05156", null ], + [ "empty", "classop_1_1_string.html#aeae63b12cb517a5cdaf55b836a92a49c", null ], + [ "getStdString", "classop_1_1_string.html#a82003e99b9f3e9bd0054873deac970da", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_sub_thread-members.html b/web/html/doc/classop_1_1_sub_thread-members.html new file mode 100644 index 000000000..94449be15 --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread-members.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::SubThread< TDatums, TWorker > Member List
+
+
+ +

This is the complete list of members for op::SubThread< TDatums, TWorker >, including all inherited members.

+ + + + + + + +
getTWorkersSize() constop::SubThread< TDatums, TWorker >inlineprotected
initializationOnThread()op::SubThread< TDatums, TWorker >
SubThread(const std::vector< TWorker > &tWorkers)op::SubThread< TDatums, TWorker >explicit
work()=0op::SubThread< TDatums, TWorker >pure virtual
workTWorkers(TDatums &tDatums, const bool inputIsRunning)op::SubThread< TDatums, TWorker >protected
~SubThread()op::SubThread< TDatums, TWorker >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_sub_thread.html b/web/html/doc/classop_1_1_sub_thread.html new file mode 100644 index 000000000..6ac47d70d --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread.html @@ -0,0 +1,307 @@ + + + + + + + +OpenPose: op::SubThread< TDatums, TWorker > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::SubThread< TDatums, TWorker > Class Template Referenceabstract
+
+
+ +

#include <subThread.hpp>

+ + + + + + + + + + +

+Public Member Functions

 SubThread (const std::vector< TWorker > &tWorkers)
 
virtual ~SubThread ()
 
void initializationOnThread ()
 
virtual bool work ()=0
 
+ + + + + +

+Protected Member Functions

size_t getTWorkersSize () const
 
bool workTWorkers (TDatums &tDatums, const bool inputIsRunning)
 
+

Detailed Description

+

template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>>
+class op::SubThread< TDatums, TWorker >

+ + +

Definition at line 10 of file subThread.hpp.

+

Constructor & Destructor Documentation

+ +

◆ SubThread()

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + + +
+ + + + + + + + +
op::SubThread< TDatums, TWorker >::SubThread (const std::vector< TWorker > & tWorkers)
+
+explicit
+
+ +

Definition at line 45 of file subThread.hpp.

+ +
+
+ +

◆ ~SubThread()

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + + +
+ + + + +
op::SubThread< TDatums, TWorker >::~SubThread
+
+virtual
+
+ +

Definition at line 51 of file subThread.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ getTWorkersSize()

+ +
+
+
+template<typename TDatums , typename TWorker = std::shared_ptr<Worker<TDatums>>>
+ + + + + +
+ + + + + + + +
size_t op::SubThread< TDatums, TWorker >::getTWorkersSize () const
+
+inlineprotected
+
+ +

Definition at line 23 of file subThread.hpp.

+ +
+
+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + +
void op::SubThread< TDatums, TWorker >::initializationOnThread
+
+ +

Definition at line 141 of file subThread.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums , typename TWorker = std::shared_ptr<Worker<TDatums>>>
+ + + + + +
+ + + + + + + +
virtual bool op::SubThread< TDatums, TWorker >::work ()
+
+pure virtual
+
+ +
+
+ +

◆ workTWorkers()

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
bool op::SubThread< TDatums, TWorker >::workTWorkers (TDatums & tDatums,
const bool inputIsRunning 
)
+
+protected
+
+ +

Definition at line 56 of file subThread.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_sub_thread.js b/web/html/doc/classop_1_1_sub_thread.js new file mode 100644 index 000000000..c0432e791 --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread.js @@ -0,0 +1,9 @@ +var classop_1_1_sub_thread = +[ + [ "SubThread", "classop_1_1_sub_thread.html#aa551df0d8f0b30aaf9e0840ecf29d749", null ], + [ "~SubThread", "classop_1_1_sub_thread.html#a6ee67e375611e8df2d09b3234dedf36c", null ], + [ "getTWorkersSize", "classop_1_1_sub_thread.html#a3e8e044b20842d15b1caedf8a78be622", null ], + [ "initializationOnThread", "classop_1_1_sub_thread.html#a8debc3b655463847fed2c547d13326f7", null ], + [ "work", "classop_1_1_sub_thread.html#a14330cbc1117f32b6d69c1733ccdeb61", null ], + [ "workTWorkers", "classop_1_1_sub_thread.html#ad9f2d3be9e05739b102fad350e1a1364", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_sub_thread_no_queue-members.html b/web/html/doc/classop_1_1_sub_thread_no_queue-members.html new file mode 100644 index 000000000..60ae3abed --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread_no_queue-members.html @@ -0,0 +1,107 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::SubThreadNoQueue< TDatums, TWorker > Member List
+
+
+ +

This is the complete list of members for op::SubThreadNoQueue< TDatums, TWorker >, including all inherited members.

+ + + + + +
DELETE_COPY(SubThreadNoQueue)op::SubThreadNoQueue< TDatums, TWorker >
SubThreadNoQueue(const std::vector< TWorker > &tWorkers)op::SubThreadNoQueue< TDatums, TWorker >explicit
work()op::SubThreadNoQueue< TDatums, TWorker >
~SubThreadNoQueue()op::SubThreadNoQueue< TDatums, TWorker >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_sub_thread_no_queue.html b/web/html/doc/classop_1_1_sub_thread_no_queue.html new file mode 100644 index 000000000..b1d56ea10 --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread_no_queue.html @@ -0,0 +1,229 @@ + + + + + + + +OpenPose: op::SubThreadNoQueue< TDatums, TWorker > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::SubThreadNoQueue< TDatums, TWorker > Class Template Reference
+
+
+ +

#include <subThreadNoQueue.hpp>

+
+Inheritance diagram for op::SubThreadNoQueue< TDatums, TWorker >:
+
+
+ +
+ + + + + + + + + + +

+Public Member Functions

 SubThreadNoQueue (const std::vector< TWorker > &tWorkers)
 
virtual ~SubThreadNoQueue ()
 
bool work ()
 
 DELETE_COPY (SubThreadNoQueue)
 
+

Detailed Description

+

template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>>
+class op::SubThreadNoQueue< TDatums, TWorker >

+ + +

Definition at line 11 of file subThreadNoQueue.hpp.

+

Constructor & Destructor Documentation

+ +

◆ SubThreadNoQueue()

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + + +
+ + + + + + + + +
op::SubThreadNoQueue< TDatums, TWorker >::SubThreadNoQueue (const std::vector< TWorker > & tWorkers)
+
+explicit
+
+ +

Definition at line 32 of file subThreadNoQueue.hpp.

+ +
+
+ +

◆ ~SubThreadNoQueue()

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + + +
+ + + + +
op::SubThreadNoQueue< TDatums, TWorker >::~SubThreadNoQueue
+
+virtual
+
+ +

Definition at line 38 of file subThreadNoQueue.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ DELETE_COPY()

+ +
+
+
+template<typename TDatums , typename TWorker = std::shared_ptr<Worker<TDatums>>>
+ + + + + + + + +
op::SubThreadNoQueue< TDatums, TWorker >::DELETE_COPY (SubThreadNoQueue< TDatums, TWorker > )
+
+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + +
bool op::SubThreadNoQueue< TDatums, TWorker >::work
+
+ +

Definition at line 43 of file subThreadNoQueue.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_sub_thread_no_queue.js b/web/html/doc/classop_1_1_sub_thread_no_queue.js new file mode 100644 index 000000000..351515d25 --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread_no_queue.js @@ -0,0 +1,7 @@ +var classop_1_1_sub_thread_no_queue = +[ + [ "SubThreadNoQueue", "classop_1_1_sub_thread_no_queue.html#afbaf89d7a8fb4d19f67064fb954a31eb", null ], + [ "~SubThreadNoQueue", "classop_1_1_sub_thread_no_queue.html#ad88bbbe72f4777603d71e322b0fd20ed", null ], + [ "DELETE_COPY", "classop_1_1_sub_thread_no_queue.html#a43504502c36461305d656fb87b914749", null ], + [ "work", "classop_1_1_sub_thread_no_queue.html#acb7edd02e1724e0fd131235666009f42", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_sub_thread_no_queue.png b/web/html/doc/classop_1_1_sub_thread_no_queue.png new file mode 100644 index 000000000..2e39f835b Binary files /dev/null and b/web/html/doc/classop_1_1_sub_thread_no_queue.png differ diff --git a/web/html/doc/classop_1_1_sub_thread_queue_in-members.html b/web/html/doc/classop_1_1_sub_thread_queue_in-members.html new file mode 100644 index 000000000..aa90a9661 --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread_queue_in-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::SubThreadQueueIn< TDatums, TWorker, TQueue > Member List
+
+
+ +

This is the complete list of members for op::SubThreadQueueIn< TDatums, TWorker, TQueue >, including all inherited members.

+ + + + +
SubThreadQueueIn(const std::vector< TWorker > &tWorkers, const std::shared_ptr< TQueue > &tQueueIn)op::SubThreadQueueIn< TDatums, TWorker, TQueue >
work()op::SubThreadQueueIn< TDatums, TWorker, TQueue >
~SubThreadQueueIn()op::SubThreadQueueIn< TDatums, TWorker, TQueue >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_sub_thread_queue_in.html b/web/html/doc/classop_1_1_sub_thread_queue_in.html new file mode 100644 index 000000000..74d64b500 --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread_queue_in.html @@ -0,0 +1,209 @@ + + + + + + + +OpenPose: op::SubThreadQueueIn< TDatums, TWorker, TQueue > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::SubThreadQueueIn< TDatums, TWorker, TQueue > Class Template Reference
+
+
+ +

#include <subThreadQueueIn.hpp>

+
+Inheritance diagram for op::SubThreadQueueIn< TDatums, TWorker, TQueue >:
+
+
+ +
+ + + + + + + + +

+Public Member Functions

 SubThreadQueueIn (const std::vector< TWorker > &tWorkers, const std::shared_ptr< TQueue > &tQueueIn)
 
virtual ~SubThreadQueueIn ()
 
bool work ()
 
+

Detailed Description

+

template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>, typename TQueue = Queue<TDatums>>
+class op::SubThreadQueueIn< TDatums, TWorker, TQueue >

+ + +

Definition at line 12 of file subThreadQueueIn.hpp.

+

Constructor & Destructor Documentation

+ +

◆ SubThreadQueueIn()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + + + + + + + + + + + + + + +
op::SubThreadQueueIn< TDatums, TWorker, TQueue >::SubThreadQueueIn (const std::vector< TWorker > & tWorkers,
const std::shared_ptr< TQueue > & tQueueIn 
)
+
+ +

Definition at line 36 of file subThreadQueueIn.hpp.

+ +
+
+ +

◆ ~SubThreadQueueIn()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + +
+ + + + +
op::SubThreadQueueIn< TDatums, TWorker, TQueue >::~SubThreadQueueIn
+
+virtual
+
+ +

Definition at line 45 of file subThreadQueueIn.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ work()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + +
bool op::SubThreadQueueIn< TDatums, TWorker, TQueue >::work
+
+ +

Definition at line 50 of file subThreadQueueIn.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_sub_thread_queue_in.js b/web/html/doc/classop_1_1_sub_thread_queue_in.js new file mode 100644 index 000000000..67d9a24f5 --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread_queue_in.js @@ -0,0 +1,6 @@ +var classop_1_1_sub_thread_queue_in = +[ + [ "SubThreadQueueIn", "classop_1_1_sub_thread_queue_in.html#a11aa71a818430c4eb435a1626e54f29a", null ], + [ "~SubThreadQueueIn", "classop_1_1_sub_thread_queue_in.html#a8a479c4ddc5b42f1dbf329c4a0c235c3", null ], + [ "work", "classop_1_1_sub_thread_queue_in.html#a7e9bd6ca09bb77a8de76ae8a02ee8ed4", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_sub_thread_queue_in.png b/web/html/doc/classop_1_1_sub_thread_queue_in.png new file mode 100644 index 000000000..6757ace9d Binary files /dev/null and b/web/html/doc/classop_1_1_sub_thread_queue_in.png differ diff --git a/web/html/doc/classop_1_1_sub_thread_queue_in_out-members.html b/web/html/doc/classop_1_1_sub_thread_queue_in_out-members.html new file mode 100644 index 000000000..942c77b16 --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread_queue_in_out-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::SubThreadQueueInOut< TDatums, TWorker, TQueue > Member List
+
+
+ +

This is the complete list of members for op::SubThreadQueueInOut< TDatums, TWorker, TQueue >, including all inherited members.

+ + + + +
SubThreadQueueInOut(const std::vector< TWorker > &tWorkers, const std::shared_ptr< TQueue > &tQueueIn, const std::shared_ptr< TQueue > &tQueueOut)op::SubThreadQueueInOut< TDatums, TWorker, TQueue >
work()op::SubThreadQueueInOut< TDatums, TWorker, TQueue >
~SubThreadQueueInOut()op::SubThreadQueueInOut< TDatums, TWorker, TQueue >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_sub_thread_queue_in_out.html b/web/html/doc/classop_1_1_sub_thread_queue_in_out.html new file mode 100644 index 000000000..47582d6b5 --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread_queue_in_out.html @@ -0,0 +1,215 @@ + + + + + + + +OpenPose: op::SubThreadQueueInOut< TDatums, TWorker, TQueue > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::SubThreadQueueInOut< TDatums, TWorker, TQueue > Class Template Reference
+
+
+ +

#include <subThreadQueueInOut.hpp>

+
+Inheritance diagram for op::SubThreadQueueInOut< TDatums, TWorker, TQueue >:
+
+
+ +
+ + + + + + + + +

+Public Member Functions

 SubThreadQueueInOut (const std::vector< TWorker > &tWorkers, const std::shared_ptr< TQueue > &tQueueIn, const std::shared_ptr< TQueue > &tQueueOut)
 
virtual ~SubThreadQueueInOut ()
 
bool work ()
 
+

Detailed Description

+

template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>, typename TQueue = Queue<TDatums>>
+class op::SubThreadQueueInOut< TDatums, TWorker, TQueue >

+ + +

Definition at line 12 of file subThreadQueueInOut.hpp.

+

Constructor & Destructor Documentation

+ +

◆ SubThreadQueueInOut()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + + + + + + + + + + + + + + + + + + + + +
op::SubThreadQueueInOut< TDatums, TWorker, TQueue >::SubThreadQueueInOut (const std::vector< TWorker > & tWorkers,
const std::shared_ptr< TQueue > & tQueueIn,
const std::shared_ptr< TQueue > & tQueueOut 
)
+
+ +

Definition at line 38 of file subThreadQueueInOut.hpp.

+ +
+
+ +

◆ ~SubThreadQueueInOut()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + +
+ + + + +
op::SubThreadQueueInOut< TDatums, TWorker, TQueue >::~SubThreadQueueInOut
+
+virtual
+
+ +

Definition at line 50 of file subThreadQueueInOut.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ work()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + +
bool op::SubThreadQueueInOut< TDatums, TWorker, TQueue >::work
+
+ +

Definition at line 55 of file subThreadQueueInOut.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_sub_thread_queue_in_out.js b/web/html/doc/classop_1_1_sub_thread_queue_in_out.js new file mode 100644 index 000000000..f6ec00543 --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread_queue_in_out.js @@ -0,0 +1,6 @@ +var classop_1_1_sub_thread_queue_in_out = +[ + [ "SubThreadQueueInOut", "classop_1_1_sub_thread_queue_in_out.html#aa5b9beea615b8b968c5da74dd66a6d78", null ], + [ "~SubThreadQueueInOut", "classop_1_1_sub_thread_queue_in_out.html#a87d122e11adc7363d9b24c7f796d3d33", null ], + [ "work", "classop_1_1_sub_thread_queue_in_out.html#abb65911e9d9b6d5efe782ca0e599be3b", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_sub_thread_queue_in_out.png b/web/html/doc/classop_1_1_sub_thread_queue_in_out.png new file mode 100644 index 000000000..33efa2c2d Binary files /dev/null and b/web/html/doc/classop_1_1_sub_thread_queue_in_out.png differ diff --git a/web/html/doc/classop_1_1_sub_thread_queue_out-members.html b/web/html/doc/classop_1_1_sub_thread_queue_out-members.html new file mode 100644 index 000000000..571ce2821 --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread_queue_out-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::SubThreadQueueOut< TDatums, TWorker, TQueue > Member List
+
+
+ +

This is the complete list of members for op::SubThreadQueueOut< TDatums, TWorker, TQueue >, including all inherited members.

+ + + + +
SubThreadQueueOut(const std::vector< TWorker > &tWorkers, const std::shared_ptr< TQueue > &tQueueOut)op::SubThreadQueueOut< TDatums, TWorker, TQueue >
work()op::SubThreadQueueOut< TDatums, TWorker, TQueue >
~SubThreadQueueOut()op::SubThreadQueueOut< TDatums, TWorker, TQueue >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_sub_thread_queue_out.html b/web/html/doc/classop_1_1_sub_thread_queue_out.html new file mode 100644 index 000000000..f3d1801d4 --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread_queue_out.html @@ -0,0 +1,209 @@ + + + + + + + +OpenPose: op::SubThreadQueueOut< TDatums, TWorker, TQueue > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::SubThreadQueueOut< TDatums, TWorker, TQueue > Class Template Reference
+
+
+ +

#include <subThreadQueueOut.hpp>

+
+Inheritance diagram for op::SubThreadQueueOut< TDatums, TWorker, TQueue >:
+
+
+ +
+ + + + + + + + +

+Public Member Functions

 SubThreadQueueOut (const std::vector< TWorker > &tWorkers, const std::shared_ptr< TQueue > &tQueueOut)
 
virtual ~SubThreadQueueOut ()
 
bool work ()
 
+

Detailed Description

+

template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>, typename TQueue = Queue<TDatums>>
+class op::SubThreadQueueOut< TDatums, TWorker, TQueue >

+ + +

Definition at line 12 of file subThreadQueueOut.hpp.

+

Constructor & Destructor Documentation

+ +

◆ SubThreadQueueOut()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + + + + + + + + + + + + + + +
op::SubThreadQueueOut< TDatums, TWorker, TQueue >::SubThreadQueueOut (const std::vector< TWorker > & tWorkers,
const std::shared_ptr< TQueue > & tQueueOut 
)
+
+ +

Definition at line 36 of file subThreadQueueOut.hpp.

+ +
+
+ +

◆ ~SubThreadQueueOut()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + +
+ + + + +
op::SubThreadQueueOut< TDatums, TWorker, TQueue >::~SubThreadQueueOut
+
+virtual
+
+ +

Definition at line 45 of file subThreadQueueOut.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ work()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + +
bool op::SubThreadQueueOut< TDatums, TWorker, TQueue >::work
+
+ +

Definition at line 50 of file subThreadQueueOut.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_sub_thread_queue_out.js b/web/html/doc/classop_1_1_sub_thread_queue_out.js new file mode 100644 index 000000000..9c6e0363b --- /dev/null +++ b/web/html/doc/classop_1_1_sub_thread_queue_out.js @@ -0,0 +1,6 @@ +var classop_1_1_sub_thread_queue_out = +[ + [ "SubThreadQueueOut", "classop_1_1_sub_thread_queue_out.html#aa4a827932f632f1f30b5650a4fcc77ff", null ], + [ "~SubThreadQueueOut", "classop_1_1_sub_thread_queue_out.html#ab61e068d6dddd2914b25638ebeff0f3b", null ], + [ "work", "classop_1_1_sub_thread_queue_out.html#a0ff5f79e63038ffa5b4aca24cfea7e7c", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_sub_thread_queue_out.png b/web/html/doc/classop_1_1_sub_thread_queue_out.png new file mode 100644 index 000000000..2920788ee Binary files /dev/null and b/web/html/doc/classop_1_1_sub_thread_queue_out.png differ diff --git a/web/html/doc/classop_1_1_thread-members.html b/web/html/doc/classop_1_1_thread-members.html new file mode 100644 index 000000000..99bf6e222 --- /dev/null +++ b/web/html/doc/classop_1_1_thread-members.html @@ -0,0 +1,113 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::Thread< TDatums, TWorker > Member List
+
+
+ +

This is the complete list of members for op::Thread< TDatums, TWorker >, including all inherited members.

+ + + + + + + + + + + +
add(const std::vector< std::shared_ptr< SubThread< TDatums, TWorker >>> &subThreads)op::Thread< TDatums, TWorker >
add(const std::shared_ptr< SubThread< TDatums, TWorker >> &subThread)op::Thread< TDatums, TWorker >
exec(const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr)op::Thread< TDatums, TWorker >
isRunning() constop::Thread< TDatums, TWorker >inline
operator=(Thread &&t)op::Thread< TDatums, TWorker >
startInThread()op::Thread< TDatums, TWorker >
stopAndJoin()op::Thread< TDatums, TWorker >
Thread(const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr=nullptr)op::Thread< TDatums, TWorker >explicit
Thread(Thread &&t)op::Thread< TDatums, TWorker >
~Thread()op::Thread< TDatums, TWorker >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_thread.html b/web/html/doc/classop_1_1_thread.html new file mode 100644 index 000000000..6e6569683 --- /dev/null +++ b/web/html/doc/classop_1_1_thread.html @@ -0,0 +1,372 @@ + + + + + + + +OpenPose: op::Thread< TDatums, TWorker > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::Thread< TDatums, TWorker > Class Template Reference
+
+
+ +

#include <thread.hpp>

+ + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 Thread (const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr=nullptr)
 
 Thread (Thread &&t)
 
Threadoperator= (Thread &&t)
 
virtual ~Thread ()
 
void add (const std::vector< std::shared_ptr< SubThread< TDatums, TWorker >>> &subThreads)
 
void add (const std::shared_ptr< SubThread< TDatums, TWorker >> &subThread)
 
void exec (const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr)
 
void startInThread ()
 
void stopAndJoin ()
 
bool isRunning () const
 
+

Detailed Description

+

template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>>
+class op::Thread< TDatums, TWorker >

+ + +

Definition at line 12 of file thread.hpp.

+

Constructor & Destructor Documentation

+ +

◆ Thread() [1/2]

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + + +
+ + + + + + + + +
op::Thread< TDatums, TWorker >::Thread (const std::shared_ptr< std::atomic< bool >> & isRunningSharedPtr = nullptr)
+
+explicit
+
+ +

Definition at line 66 of file thread.hpp.

+ +
+
+ +

◆ Thread() [2/2]

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + + + + + +
op::Thread< TDatums, TWorker >::Thread (Thread< TDatums, TWorker > && t)
+
+ +

Definition at line 72 of file thread.hpp.

+ +
+
+ +

◆ ~Thread()

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + + +
+ + + + +
op::Thread< TDatums, TWorker >::~Thread
+
+virtual
+
+ +

Definition at line 89 of file thread.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ add() [1/2]

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + + + + + +
void op::Thread< TDatums, TWorker >::add (const std::shared_ptr< SubThread< TDatums, TWorker >> & subThread)
+
+ +

Definition at line 111 of file thread.hpp.

+ +
+
+ +

◆ add() [2/2]

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + + + + + +
void op::Thread< TDatums, TWorker >::add (const std::vector< std::shared_ptr< SubThread< TDatums, TWorker >>> & subThreads)
+
+ +

Definition at line 104 of file thread.hpp.

+ +
+
+ +

◆ exec()

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + + + + + +
void op::Thread< TDatums, TWorker >::exec (const std::shared_ptr< std::atomic< bool >> & isRunningSharedPtr)
+
+ +

Definition at line 117 of file thread.hpp.

+ +
+
+ +

◆ isRunning()

+ +
+
+
+template<typename TDatums , typename TWorker = std::shared_ptr<Worker<TDatums>>>
+ + + + + +
+ + + + + + + +
bool op::Thread< TDatums, TWorker >::isRunning () const
+
+inline
+
+ +

Definition at line 36 of file thread.hpp.

+ +
+
+ +

◆ operator=()

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + + + + + +
Thread< TDatums, TWorker > & op::Thread< TDatums, TWorker >::operator= (Thread< TDatums, TWorker > && t)
+
+ +

Definition at line 80 of file thread.hpp.

+ +
+
+ +

◆ startInThread()

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + +
void op::Thread< TDatums, TWorker >::startInThread
+
+ +

Definition at line 133 of file thread.hpp.

+ +
+
+ +

◆ stopAndJoin()

+ +
+
+
+template<typename TDatums , typename TWorker >
+ + + + +
void op::Thread< TDatums, TWorker >::stopAndJoin
+
+ +

Definition at line 149 of file thread.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_thread.js b/web/html/doc/classop_1_1_thread.js new file mode 100644 index 000000000..7bcab53ff --- /dev/null +++ b/web/html/doc/classop_1_1_thread.js @@ -0,0 +1,13 @@ +var classop_1_1_thread = +[ + [ "Thread", "classop_1_1_thread.html#a9d3408a329a475da22a8e2a0bdf5f68d", null ], + [ "Thread", "classop_1_1_thread.html#a6ae463dc996ca6941a303b0c41288063", null ], + [ "~Thread", "classop_1_1_thread.html#a151e4e647917f2351cc05a8861588e2a", null ], + [ "add", "classop_1_1_thread.html#a820b9416b96c69cb1fc6773b9a53a47a", null ], + [ "add", "classop_1_1_thread.html#a0617df4103c25bb04ee2c75f05ea2978", null ], + [ "exec", "classop_1_1_thread.html#ad6c3721793d0f65ffe755ab74534afed", null ], + [ "isRunning", "classop_1_1_thread.html#a3ed032f4c42ef1797873122aa96a055d", null ], + [ "operator=", "classop_1_1_thread.html#a16d1835e2bd7c5ae988f4bc225b3ca09", null ], + [ "startInThread", "classop_1_1_thread.html#ac898abffd6ed18456b97ef1b72935ec6", null ], + [ "stopAndJoin", "classop_1_1_thread.html#a92e5dd0f60a0485e7d0fad3e82bb74f3", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_thread_manager-members.html b/web/html/doc/classop_1_1_thread_manager-members.html new file mode 100644 index 000000000..3e3ac8c99 --- /dev/null +++ b/web/html/doc/classop_1_1_thread_manager-members.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::ThreadManager< TDatums, TWorker, TQueue > Member List
+
+
+ +

This is the complete list of members for op::ThreadManager< TDatums, TWorker, TQueue >, including all inherited members.

+ + + + + + + + + + + + + + + + + + +
add(const unsigned long long threadId, const std::vector< TWorker > &tWorkers, const unsigned long long queueInId, const unsigned long long queueOutId)op::ThreadManager< TDatums, TWorker, TQueue >
add(const unsigned long long threadId, const TWorker &tWorker, const unsigned long long queueInId, const unsigned long long queueOutId)op::ThreadManager< TDatums, TWorker, TQueue >
exec()op::ThreadManager< TDatums, TWorker, TQueue >
getIsRunningSharedPtr()op::ThreadManager< TDatums, TWorker, TQueue >inline
isRunning() constop::ThreadManager< TDatums, TWorker, TQueue >inline
reset()op::ThreadManager< TDatums, TWorker, TQueue >
setDefaultMaxSizeQueues(const long long defaultMaxSizeQueues=-1)op::ThreadManager< TDatums, TWorker, TQueue >
start()op::ThreadManager< TDatums, TWorker, TQueue >
stop()op::ThreadManager< TDatums, TWorker, TQueue >
ThreadManager(const ThreadManagerMode threadManagerMode=ThreadManagerMode::Synchronous)op::ThreadManager< TDatums, TWorker, TQueue >explicit
tryEmplace(TDatums &tDatums)op::ThreadManager< TDatums, TWorker, TQueue >
tryPop(TDatums &tDatums)op::ThreadManager< TDatums, TWorker, TQueue >
tryPush(const TDatums &tDatums)op::ThreadManager< TDatums, TWorker, TQueue >
waitAndEmplace(TDatums &tDatums)op::ThreadManager< TDatums, TWorker, TQueue >
waitAndPop(TDatums &tDatums)op::ThreadManager< TDatums, TWorker, TQueue >
waitAndPush(const TDatums &tDatums)op::ThreadManager< TDatums, TWorker, TQueue >
~ThreadManager()op::ThreadManager< TDatums, TWorker, TQueue >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_thread_manager.html b/web/html/doc/classop_1_1_thread_manager.html new file mode 100644 index 000000000..ef529587a --- /dev/null +++ b/web/html/doc/classop_1_1_thread_manager.html @@ -0,0 +1,593 @@ + + + + + + + +OpenPose: op::ThreadManager< TDatums, TWorker, TQueue > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::ThreadManager< TDatums, TWorker, TQueue > Class Template Reference
+
+
+ +

#include <threadManager.hpp>

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 ThreadManager (const ThreadManagerMode threadManagerMode=ThreadManagerMode::Synchronous)
 
virtual ~ThreadManager ()
 
void setDefaultMaxSizeQueues (const long long defaultMaxSizeQueues=-1)
 
void add (const unsigned long long threadId, const std::vector< TWorker > &tWorkers, const unsigned long long queueInId, const unsigned long long queueOutId)
 
void add (const unsigned long long threadId, const TWorker &tWorker, const unsigned long long queueInId, const unsigned long long queueOutId)
 
void reset ()
 
void exec ()
 
void start ()
 
void stop ()
 
std::shared_ptr< std::atomic< bool > > getIsRunningSharedPtr ()
 
bool isRunning () const
 
bool tryEmplace (TDatums &tDatums)
 
bool waitAndEmplace (TDatums &tDatums)
 
bool tryPush (const TDatums &tDatums)
 
bool waitAndPush (const TDatums &tDatums)
 
bool tryPop (TDatums &tDatums)
 
bool waitAndPop (TDatums &tDatums)
 
+

Detailed Description

+

template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>, typename TQueue = Queue<TDatums>>
+class op::ThreadManager< TDatums, TWorker, TQueue >

+ + +

Definition at line 16 of file threadManager.hpp.

+

Constructor & Destructor Documentation

+ +

◆ ThreadManager()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + +
+ + + + + + + + +
op::ThreadManager< TDatums, TWorker, TQueue >::ThreadManager (const ThreadManagerMode threadManagerMode = ThreadManagerMode::Synchronous)
+
+explicit
+
+ +

Definition at line 108 of file threadManager.hpp.

+ +
+
+ +

◆ ~ThreadManager()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + +
+ + + + +
op::ThreadManager< TDatums, TWorker, TQueue >::~ThreadManager
+
+virtual
+
+ +

Definition at line 116 of file threadManager.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ add() [1/2]

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::ThreadManager< TDatums, TWorker, TQueue >::add (const unsigned long long threadId,
const std::vector< TWorker > & tWorkers,
const unsigned long long queueInId,
const unsigned long long queueOutId 
)
+
+ +

Definition at line 134 of file threadManager.hpp.

+ +
+
+ +

◆ add() [2/2]

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::ThreadManager< TDatums, TWorker, TQueue >::add (const unsigned long long threadId,
const TWorker & tWorker,
const unsigned long long queueInId,
const unsigned long long queueOutId 
)
+
+ +

Definition at line 150 of file threadManager.hpp.

+ +
+
+ +

◆ exec()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + +
void op::ThreadManager< TDatums, TWorker, TQueue >::exec
+
+ +

Definition at line 181 of file threadManager.hpp.

+ +
+
+ +

◆ getIsRunningSharedPtr()

+ +
+
+
+template<typename TDatums , typename TWorker = std::shared_ptr<Worker<TDatums>>, typename TQueue = Queue<TDatums>>
+ + + + + +
+ + + + + + + +
std::shared_ptr<std::atomic<bool> > op::ThreadManager< TDatums, TWorker, TQueue >::getIsRunningSharedPtr ()
+
+inline
+
+ +

Definition at line 49 of file threadManager.hpp.

+ +
+
+ +

◆ isRunning()

+ +
+
+
+template<typename TDatums , typename TWorker = std::shared_ptr<Worker<TDatums>>, typename TQueue = Queue<TDatums>>
+ + + + + +
+ + + + + + + +
bool op::ThreadManager< TDatums, TWorker, TQueue >::isRunning () const
+
+inline
+
+ +

Definition at line 54 of file threadManager.hpp.

+ +
+
+ +

◆ reset()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + +
void op::ThreadManager< TDatums, TWorker, TQueue >::reset
+
+ +

Definition at line 166 of file threadManager.hpp.

+ +
+
+ +

◆ setDefaultMaxSizeQueues()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + + + + +
void op::ThreadManager< TDatums, TWorker, TQueue >::setDefaultMaxSizeQueues (const long long defaultMaxSizeQueues = -1)
+
+

It sets the maximum number of elements in the queue. For maximum speed, set to a very large number, but the trade-off would be:

    +
  • Latency will hugely increase.
  • +
  • The program might go out of RAM memory (so the computer might freeze). For minimum latency while keeping an optimal speed, set to -1, that will automatically detect the ideal number based on how many elements are connected to that queue.
    Parameters
    + + +
    defaultMaxSizeQueueslong long element with the maximum number of elements on the queue.
    +
    +
    +
  • +
+ +

Definition at line 121 of file threadManager.hpp.

+ +
+
+ +

◆ start()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + +
void op::ThreadManager< TDatums, TWorker, TQueue >::start
+
+ +

Definition at line 207 of file threadManager.hpp.

+ +
+
+ +

◆ stop()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + +
void op::ThreadManager< TDatums, TWorker, TQueue >::stop
+
+ +

Definition at line 226 of file threadManager.hpp.

+ +
+
+ +

◆ tryEmplace()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + + + + +
bool op::ThreadManager< TDatums, TWorker, TQueue >::tryEmplace (TDatums & tDatums)
+
+ +

Definition at line 248 of file threadManager.hpp.

+ +
+
+ +

◆ tryPop()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + + + + +
bool op::ThreadManager< TDatums, TWorker, TQueue >::tryPop (TDatums & tDatums)
+
+ +

Definition at line 324 of file threadManager.hpp.

+ +
+
+ +

◆ tryPush()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + + + + +
bool op::ThreadManager< TDatums, TWorker, TQueue >::tryPush (const TDatums & tDatums)
+
+ +

Definition at line 286 of file threadManager.hpp.

+ +
+
+ +

◆ waitAndEmplace()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + + + + +
bool op::ThreadManager< TDatums, TWorker, TQueue >::waitAndEmplace (TDatums & tDatums)
+
+ +

Definition at line 267 of file threadManager.hpp.

+ +
+
+ +

◆ waitAndPop()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + + + + +
bool op::ThreadManager< TDatums, TWorker, TQueue >::waitAndPop (TDatums & tDatums)
+
+ +

Definition at line 343 of file threadManager.hpp.

+ +
+
+ +

◆ waitAndPush()

+ +
+
+
+template<typename TDatums , typename TWorker , typename TQueue >
+ + + + + + + + +
bool op::ThreadManager< TDatums, TWorker, TQueue >::waitAndPush (const TDatums & tDatums)
+
+ +

Definition at line 305 of file threadManager.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_thread_manager.js b/web/html/doc/classop_1_1_thread_manager.js new file mode 100644 index 000000000..077a304cd --- /dev/null +++ b/web/html/doc/classop_1_1_thread_manager.js @@ -0,0 +1,20 @@ +var classop_1_1_thread_manager = +[ + [ "ThreadManager", "classop_1_1_thread_manager.html#a8b7d17f4a330495389e646bb21907303", null ], + [ "~ThreadManager", "classop_1_1_thread_manager.html#a03c6587dbc60b266bee04b9714647fba", null ], + [ "add", "classop_1_1_thread_manager.html#a8134abeaec65b5647ae92e34f3ad420b", null ], + [ "add", "classop_1_1_thread_manager.html#a762acc9eb60bd10857da1f416e169f3d", null ], + [ "exec", "classop_1_1_thread_manager.html#a67a2d7cecc749be414e6896a88ec268d", null ], + [ "getIsRunningSharedPtr", "classop_1_1_thread_manager.html#a48ea53b3de4d09c84db18e2c31ce1be1", null ], + [ "isRunning", "classop_1_1_thread_manager.html#a7bad63adddf7a35a436911ada2a1c519", null ], + [ "reset", "classop_1_1_thread_manager.html#a5b7c5ea46c360496e261094c5e1397a7", null ], + [ "setDefaultMaxSizeQueues", "classop_1_1_thread_manager.html#ace408d1d281193a9f3d3d6561181ef56", null ], + [ "start", "classop_1_1_thread_manager.html#a01c2d62e539896e36564457ab9cac25c", null ], + [ "stop", "classop_1_1_thread_manager.html#a472a1ebee700c3449bac4d6d2bb0c3a8", null ], + [ "tryEmplace", "classop_1_1_thread_manager.html#a8d5ffd9473557ff0f90ac1c6a1bae3ad", null ], + [ "tryPop", "classop_1_1_thread_manager.html#a59916fc3428aaf5c487e1dd373d437cd", null ], + [ "tryPush", "classop_1_1_thread_manager.html#a7a24fd902ebd4b5fd81166547a5654d9", null ], + [ "waitAndEmplace", "classop_1_1_thread_manager.html#a36bd8060a4f7f449a8aa35d9a166270d", null ], + [ "waitAndPop", "classop_1_1_thread_manager.html#a393a9f04c70a002f5ceb5e301eea5cff", null ], + [ "waitAndPush", "classop_1_1_thread_manager.html#abfa315257b3e8cd022573f439b4936ec", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_udp_sender-members.html b/web/html/doc/classop_1_1_udp_sender-members.html new file mode 100644 index 000000000..73b4c3962 --- /dev/null +++ b/web/html/doc/classop_1_1_udp_sender-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::UdpSender Member List
+
+
+ +

This is the complete list of members for op::UdpSender, including all inherited members.

+ + + + +
sendJointAngles(const double *const adamPosePtr, const int adamPoseRows, const double *const adamTranslationPtr, const double *const adamFaceCoeffsExpPtr, const int faceCoeffRows)op::UdpSender
UdpSender(const std::string &udpHost, const std::string &udpPort)op::UdpSender
~UdpSender()op::UdpSendervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_udp_sender.html b/web/html/doc/classop_1_1_udp_sender.html new file mode 100644 index 000000000..8d7d6a962 --- /dev/null +++ b/web/html/doc/classop_1_1_udp_sender.html @@ -0,0 +1,223 @@ + + + + + + + +OpenPose: op::UdpSender Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::UdpSender Class Reference
+
+
+ +

#include <udpSender.hpp>

+ + + + + + + + +

+Public Member Functions

 UdpSender (const std::string &udpHost, const std::string &udpPort)
 
virtual ~UdpSender ()
 
void sendJointAngles (const double *const adamPosePtr, const int adamPoseRows, const double *const adamTranslationPtr, const double *const adamFaceCoeffsExpPtr, const int faceCoeffRows)
 
+

Detailed Description

+
+

Definition at line 8 of file udpSender.hpp.

+

Constructor & Destructor Documentation

+ +

◆ UdpSender()

+ +
+
+ + + + + + + + + + + + + + + + + + +
op::UdpSender::UdpSender (const std::string & udpHost,
const std::string & udpPort 
)
+
+ +
+
+ +

◆ ~UdpSender()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::UdpSender::~UdpSender ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ sendJointAngles()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
void op::UdpSender::sendJointAngles (const double *const adamPosePtr,
const int adamPoseRows,
const double *const adamTranslationPtr,
const double *const adamFaceCoeffsExpPtr,
const int faceCoeffRows 
)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_udp_sender.js b/web/html/doc/classop_1_1_udp_sender.js new file mode 100644 index 000000000..b640c9afa --- /dev/null +++ b/web/html/doc/classop_1_1_udp_sender.js @@ -0,0 +1,6 @@ +var classop_1_1_udp_sender = +[ + [ "UdpSender", "classop_1_1_udp_sender.html#a80fb12e5d4357e5dbb37c8a7b660c67c", null ], + [ "~UdpSender", "classop_1_1_udp_sender.html#ac85192d475d5e84b9dcc839d5e240585", null ], + [ "sendJointAngles", "classop_1_1_udp_sender.html#a2e8b52e1fd5a3383ebc9063ce21f6f06", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_verbose_printer-members.html b/web/html/doc/classop_1_1_verbose_printer-members.html new file mode 100644 index 000000000..86176dd6a --- /dev/null +++ b/web/html/doc/classop_1_1_verbose_printer-members.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::VerbosePrinter Member List
+
+
+ +

This is the complete list of members for op::VerbosePrinter, including all inherited members.

+ + + + +
printVerbose(const unsigned long long frameNumber) constop::VerbosePrinter
VerbosePrinter(const double verbose, const unsigned long long numberFrames)op::VerbosePrinter
~VerbosePrinter()op::VerbosePrintervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_verbose_printer.html b/web/html/doc/classop_1_1_verbose_printer.html new file mode 100644 index 000000000..f2a175f77 --- /dev/null +++ b/web/html/doc/classop_1_1_verbose_printer.html @@ -0,0 +1,195 @@ + + + + + + + +OpenPose: op::VerbosePrinter Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::VerbosePrinter Class Reference
+
+
+ +

#include <verbosePrinter.hpp>

+ + + + + + + + +

+Public Member Functions

 VerbosePrinter (const double verbose, const unsigned long long numberFrames)
 
virtual ~VerbosePrinter ()
 
void printVerbose (const unsigned long long frameNumber) const
 
+

Detailed Description

+
+

Definition at line 8 of file verbosePrinter.hpp.

+

Constructor & Destructor Documentation

+ +

◆ VerbosePrinter()

+ +
+
+ + + + + + + + + + + + + + + + + + +
op::VerbosePrinter::VerbosePrinter (const double verbose,
const unsigned long long numberFrames 
)
+
+ +
+
+ +

◆ ~VerbosePrinter()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::VerbosePrinter::~VerbosePrinter ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ printVerbose()

+ +
+
+ + + + + + + + +
void op::VerbosePrinter::printVerbose (const unsigned long long frameNumber) const
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_verbose_printer.js b/web/html/doc/classop_1_1_verbose_printer.js new file mode 100644 index 000000000..e992c5c32 --- /dev/null +++ b/web/html/doc/classop_1_1_verbose_printer.js @@ -0,0 +1,6 @@ +var classop_1_1_verbose_printer = +[ + [ "VerbosePrinter", "classop_1_1_verbose_printer.html#a79d2dc59b75a0164f60d875ef78523da", null ], + [ "~VerbosePrinter", "classop_1_1_verbose_printer.html#a5c4ef10db4aba13be43b92ab4e6c4d3e", null ], + [ "printVerbose", "classop_1_1_verbose_printer.html#ab85c8d6555a52eb77042646dfe798fbf", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_video_capture_reader-members.html b/web/html/doc/classop_1_1_video_capture_reader-members.html new file mode 100644 index 000000000..091232781 --- /dev/null +++ b/web/html/doc/classop_1_1_video_capture_reader-members.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::VideoCaptureReader Member List
+
+
+ +

This is the complete list of members for op::VideoCaptureReader, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + +
checkFrameIntegrity(Matrix &frame)op::Producerprotected
get(const int capProperty)=0op::VideoCaptureReaderpure virtual
op::Producer::get(const ProducerProperty property)op::Producer
getCameraExtrinsics()op::Producervirtual
getCameraIntrinsics()op::Producervirtual
getCameraMatrices()op::Producervirtual
getFrame()op::Producer
getFrames()op::Producer
getNextFrameName()=0op::VideoCaptureReaderpure virtual
getRawFrame()=0op::VideoCaptureReaderprotectedpure virtual
getRawFrames()=0op::VideoCaptureReaderprotectedpure virtual
getType()op::Producerinline
ifEndedResetOrRelease()op::Producerprotected
isOpened() constop::VideoCaptureReadervirtual
keepDesiredFrameRate()op::Producerprotected
Producer(const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)op::Producerexplicit
release()op::VideoCaptureReadervirtual
resetWebcam(const int index, const bool throwExceptionIfNoOpened)op::VideoCaptureReaderprotected
set(const int capProperty, const double value)=0op::VideoCaptureReaderpure virtual
op::Producer::set(const ProducerProperty property, const double value)op::Producer
setProducerFpsMode(const ProducerFpsMode fpsMode)op::Producer
VideoCaptureReader(const int index, const bool throwExceptionIfNoOpened, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)op::VideoCaptureReaderexplicit
VideoCaptureReader(const std::string &path, const ProducerType producerType, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)op::VideoCaptureReaderexplicit
~Producer()op::Producervirtual
~VideoCaptureReader()op::VideoCaptureReadervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_video_capture_reader.html b/web/html/doc/classop_1_1_video_capture_reader.html new file mode 100644 index 000000000..bf7d0d168 --- /dev/null +++ b/web/html/doc/classop_1_1_video_capture_reader.html @@ -0,0 +1,598 @@ + + + + + + + +OpenPose: op::VideoCaptureReader Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::VideoCaptureReader Class Referenceabstract
+
+
+ +

#include <videoCaptureReader.hpp>

+
+Inheritance diagram for op::VideoCaptureReader:
+
+
+ + +op::Producer +op::IpCameraReader +op::VideoReader +op::WebcamReader + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 VideoCaptureReader (const int index, const bool throwExceptionIfNoOpened, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)
 
 VideoCaptureReader (const std::string &path, const ProducerType producerType, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)
 
virtual ~VideoCaptureReader ()
 
virtual std::string getNextFrameName ()=0
 
virtual bool isOpened () const
 
void release ()
 
virtual double get (const int capProperty)=0
 
virtual void set (const int capProperty, const double value)=0
 
- Public Member Functions inherited from op::Producer
 Producer (const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)
 
virtual ~Producer ()
 
Matrix getFrame ()
 
std::vector< MatrixgetFrames ()
 
virtual std::vector< MatrixgetCameraMatrices ()
 
virtual std::vector< MatrixgetCameraExtrinsics ()
 
virtual std::vector< MatrixgetCameraIntrinsics ()
 
void setProducerFpsMode (const ProducerFpsMode fpsMode)
 
ProducerType getType ()
 
double get (const ProducerProperty property)
 
void set (const ProducerProperty property, const double value)
 
+ + + + + + + + + + + + + + +

+Protected Member Functions

virtual Matrix getRawFrame ()=0
 
virtual std::vector< MatrixgetRawFrames ()=0
 
void resetWebcam (const int index, const bool throwExceptionIfNoOpened)
 
- Protected Member Functions inherited from op::Producer
void checkFrameIntegrity (Matrix &frame)
 
void ifEndedResetOrRelease ()
 
void keepDesiredFrameRate ()
 
+

Detailed Description

+

VideoCaptureReader is an abstract class to extract frames from a cv::VideoCapture source (video file, webcam stream, etc.). It has the basic and common functions of the cv::VideoCapture class (e.g., get, set, etc.).

+ +

Definition at line 13 of file videoCaptureReader.hpp.

+

Constructor & Destructor Documentation

+ +

◆ VideoCaptureReader() [1/2]

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::VideoCaptureReader::VideoCaptureReader (const int index,
const bool throwExceptionIfNoOpened,
const std::string & cameraParameterPath,
const bool undistortImage,
const int numberViews 
)
+
+explicit
+
+

This constructor of VideoCaptureReader wraps cv::VideoCapture(const int).

Parameters
+ + +
indexconst int indicating the cv::VideoCapture constructor int argument, in the range [0, 9].
+
+
+ +
+
+ +

◆ VideoCaptureReader() [2/2]

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::VideoCaptureReader::VideoCaptureReader (const std::string & path,
const ProducerType producerType,
const std::string & cameraParameterPath,
const bool undistortImage,
const int numberViews 
)
+
+explicit
+
+

This constructor of VideoCaptureReader wraps cv::VideoCapture(const std::string).

Parameters
+ + + +
pathconst std::string indicating the cv::VideoCapture constructor string argument.
producerTypeconst std::string indicating whether the frame source is an IP camera or video.
+
+
+ +
+
+ +

◆ ~VideoCaptureReader()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::VideoCaptureReader::~VideoCaptureReader ()
+
+virtual
+
+

Destructor of VideoCaptureReader. It releases the cv::VideoCapture member. It is virtual so that any children class can implement its own destructor.

+ +
+
+

Member Function Documentation

+ +

◆ get()

+ +
+
+ + + + + +
+ + + + + + + + +
virtual double op::VideoCaptureReader::get (const int capProperty)
+
+pure virtual
+
+

This function is a wrapper of cv::VideoCapture::get. It allows getting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + +
capPropertyint indicating the property to be modified.
+
+
+
Returns
double returning the property value.
+ +

Implements op::Producer.

+ +

Implemented in op::WebcamReader, op::VideoReader, and op::IpCameraReader.

+ +
+
+ +

◆ getNextFrameName()

+ +
+
+ + + + + +
+ + + + + + + +
virtual std::string op::VideoCaptureReader::getNextFrameName ()
+
+pure virtual
+
+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.
+ +

Implements op::Producer.

+ +

Implemented in op::WebcamReader, op::VideoReader, and op::IpCameraReader.

+ +
+
+ +

◆ getRawFrame()

+ +
+
+ + + + + +
+ + + + + + + +
virtual Matrix op::VideoCaptureReader::getRawFrame ()
+
+protectedpure virtual
+
+

Function to be defined by its children class. It retrieves and returns a new frame from the frames producer.

Returns
Mat with the new frame.
+ +

Implements op::Producer.

+ +
+
+ +

◆ getRawFrames()

+ +
+
+ + + + + +
+ + + + + + + +
virtual std::vector<Matrix> op::VideoCaptureReader::getRawFrames ()
+
+protectedpure virtual
+
+

Function to be defined by its children class. It retrieves and returns a new frame from the frames producer. It is equivalent to getRawFrame when more than 1 image can be returned.

Returns
std::vector<Mat> with the new frames.
+ +

Implements op::Producer.

+ +
+
+ +

◆ isOpened()

+ +
+
+ + + + + +
+ + + + + + + +
virtual bool op::VideoCaptureReader::isOpened () const
+
+virtual
+
+

This function returns whether the Producer instance is still opened and able to retrieve more frames.

Returns
bool indicating whether the Producer is opened.
+ +

Implements op::Producer.

+ +

Reimplemented in op::WebcamReader, op::VideoReader, and op::IpCameraReader.

+ +
+
+ +

◆ release()

+ +
+
+ + + + + +
+ + + + + + + +
void op::VideoCaptureReader::release ()
+
+virtual
+
+

This function releases and closes the Producer. After it is called, no more frames can be retrieved from Producer::getFrames.

+ +

Implements op::Producer.

+ +
+
+ +

◆ resetWebcam()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::VideoCaptureReader::resetWebcam (const int index,
const bool throwExceptionIfNoOpened 
)
+
+protected
+
+ +
+
+ +

◆ set()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
virtual void op::VideoCaptureReader::set (const int capProperty,
const double value 
)
+
+pure virtual
+
+

This function is a wrapper of cv::VideoCapture::set. It allows setting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + + +
capPropertyint indicating the property to be modified.
valuedouble indicating the new value to be assigned.
+
+
+ +

Implements op::Producer.

+ +

Implemented in op::WebcamReader, op::VideoReader, and op::IpCameraReader.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_video_capture_reader.js b/web/html/doc/classop_1_1_video_capture_reader.js new file mode 100644 index 000000000..f5e5de2c6 --- /dev/null +++ b/web/html/doc/classop_1_1_video_capture_reader.js @@ -0,0 +1,14 @@ +var classop_1_1_video_capture_reader = +[ + [ "VideoCaptureReader", "classop_1_1_video_capture_reader.html#ae07295c083ce99b032ce219ea15405d9", null ], + [ "VideoCaptureReader", "classop_1_1_video_capture_reader.html#a3fe940326900ac6a2289de85664b14be", null ], + [ "~VideoCaptureReader", "classop_1_1_video_capture_reader.html#a7ea52eabf5133a1a01d38f95b1a4b601", null ], + [ "get", "classop_1_1_video_capture_reader.html#a64e5cbfb1c556d64cabcebc6eb94eaf1", null ], + [ "getNextFrameName", "classop_1_1_video_capture_reader.html#a06348fd9a290fc2ece2f3c2e4dc9bc70", null ], + [ "getRawFrame", "classop_1_1_video_capture_reader.html#a33aabaf7c82773f117f6842ff900fa18", null ], + [ "getRawFrames", "classop_1_1_video_capture_reader.html#a9f58d9280a26d94ff4ba6cd93f4928a0", null ], + [ "isOpened", "classop_1_1_video_capture_reader.html#ab0c6519396faae82ec1b49262ed454a2", null ], + [ "release", "classop_1_1_video_capture_reader.html#a2f73e10efe7f9b24a6cb75af2167de58", null ], + [ "resetWebcam", "classop_1_1_video_capture_reader.html#ab85b68c93854dd7c2ad437477e819506", null ], + [ "set", "classop_1_1_video_capture_reader.html#ab2929b7d2d002b58ebaf7b9b56999cca", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_video_capture_reader.png b/web/html/doc/classop_1_1_video_capture_reader.png new file mode 100644 index 000000000..e4a6b30b2 Binary files /dev/null and b/web/html/doc/classop_1_1_video_capture_reader.png differ diff --git a/web/html/doc/classop_1_1_video_reader-members.html b/web/html/doc/classop_1_1_video_reader-members.html new file mode 100644 index 000000000..46a6e9725 --- /dev/null +++ b/web/html/doc/classop_1_1_video_reader-members.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::VideoReader Member List
+
+
+ +

This is the complete list of members for op::VideoReader, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + +
checkFrameIntegrity(Matrix &frame)op::Producerprotected
get(const int capProperty)op::VideoReadervirtual
op::Producer::get(const ProducerProperty property)op::Producer
getCameraExtrinsics()op::Producervirtual
getCameraIntrinsics()op::Producervirtual
getCameraMatrices()op::Producervirtual
getFrame()op::Producer
getFrames()op::Producer
getNextFrameName()op::VideoReadervirtual
getType()op::Producerinline
ifEndedResetOrRelease()op::Producerprotected
isOpened() constop::VideoReaderinlinevirtual
keepDesiredFrameRate()op::Producerprotected
Producer(const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)op::Producerexplicit
release()op::VideoCaptureReadervirtual
resetWebcam(const int index, const bool throwExceptionIfNoOpened)op::VideoCaptureReaderprotected
set(const int capProperty, const double value)op::VideoReadervirtual
op::Producer::set(const ProducerProperty property, const double value)op::Producer
setProducerFpsMode(const ProducerFpsMode fpsMode)op::Producer
VideoCaptureReader(const int index, const bool throwExceptionIfNoOpened, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)op::VideoCaptureReaderexplicit
VideoCaptureReader(const std::string &path, const ProducerType producerType, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)op::VideoCaptureReaderexplicit
VideoReader(const std::string &videoPath, const std::string &cameraParameterPath="", const bool undistortImage=false, const int numberViews=-1)op::VideoReaderexplicit
~Producer()op::Producervirtual
~VideoCaptureReader()op::VideoCaptureReadervirtual
~VideoReader()op::VideoReadervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_video_reader.html b/web/html/doc/classop_1_1_video_reader.html new file mode 100644 index 000000000..c5d0ada8d --- /dev/null +++ b/web/html/doc/classop_1_1_video_reader.html @@ -0,0 +1,405 @@ + + + + + + + +OpenPose: op::VideoReader Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::VideoReader Class Reference
+
+
+ +

#include <videoReader.hpp>

+
+Inheritance diagram for op::VideoReader:
+
+
+ + +op::VideoCaptureReader +op::Producer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 VideoReader (const std::string &videoPath, const std::string &cameraParameterPath="", const bool undistortImage=false, const int numberViews=-1)
 
virtual ~VideoReader ()
 
std::string getNextFrameName ()
 
bool isOpened () const
 
double get (const int capProperty)
 
void set (const int capProperty, const double value)
 
- Public Member Functions inherited from op::VideoCaptureReader
 VideoCaptureReader (const int index, const bool throwExceptionIfNoOpened, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)
 
 VideoCaptureReader (const std::string &path, const ProducerType producerType, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)
 
virtual ~VideoCaptureReader ()
 
void release ()
 
- Public Member Functions inherited from op::Producer
 Producer (const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)
 
virtual ~Producer ()
 
Matrix getFrame ()
 
std::vector< MatrixgetFrames ()
 
virtual std::vector< MatrixgetCameraMatrices ()
 
virtual std::vector< MatrixgetCameraExtrinsics ()
 
virtual std::vector< MatrixgetCameraIntrinsics ()
 
void setProducerFpsMode (const ProducerFpsMode fpsMode)
 
ProducerType getType ()
 
double get (const ProducerProperty property)
 
void set (const ProducerProperty property, const double value)
 
+ + + + + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::VideoCaptureReader
void resetWebcam (const int index, const bool throwExceptionIfNoOpened)
 
- Protected Member Functions inherited from op::Producer
void checkFrameIntegrity (Matrix &frame)
 
void ifEndedResetOrRelease ()
 
void keepDesiredFrameRate ()
 
+

Detailed Description

+

VideoReader is a wrapper of the cv::VideoCapture class for video. It allows controlling a video (e.g., extracting frames, setting resolution & fps, etc).

+ +

Definition at line 13 of file videoReader.hpp.

+

Constructor & Destructor Documentation

+ +

◆ VideoReader()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::VideoReader::VideoReader (const std::string & videoPath,
const std::string & cameraParameterPath = "",
const bool undistortImage = false,
const int numberViews = -1 
)
+
+explicit
+
+

Constructor of VideoReader. It opens the video as a wrapper of cv::VideoCapture. It includes a flag to indicate whether the video should be repeated once it is completely read.

Parameters
+ + + + +
videoPathconst std::string parameter with the full video path location.
cameraParameterPathconst std::string parameter with the folder path containing the camera parameters (only required if imageDirectorystereo > 1).
numberViewsconst int parameter with the number of images per iteration (>1 would represent stereo processing).
+
+
+ +
+
+ +

◆ ~VideoReader()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::VideoReader::~VideoReader ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ get()

+ +
+
+ + + + + +
+ + + + + + + + +
double op::VideoReader::get (const int capProperty)
+
+virtual
+
+

This function is a wrapper of cv::VideoCapture::get. It allows getting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + +
capPropertyint indicating the property to be modified.
+
+
+
Returns
double returning the property value.
+ +

Implements op::VideoCaptureReader.

+ +
+
+ +

◆ getNextFrameName()

+ +
+
+ + + + + +
+ + + + + + + +
std::string op::VideoReader::getNextFrameName ()
+
+virtual
+
+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.
+ +

Implements op::VideoCaptureReader.

+ +
+
+ +

◆ isOpened()

+ +
+
+ + + + + +
+ + + + + + + +
bool op::VideoReader::isOpened () const
+
+inlinevirtual
+
+

This function returns whether the Producer instance is still opened and able to retrieve more frames.

Returns
bool indicating whether the Producer is opened.
+ +

Reimplemented from op::VideoCaptureReader.

+ +

Definition at line 33 of file videoReader.hpp.

+ +
+
+ +

◆ set()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::VideoReader::set (const int capProperty,
const double value 
)
+
+virtual
+
+

This function is a wrapper of cv::VideoCapture::set. It allows setting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + + +
capPropertyint indicating the property to be modified.
valuedouble indicating the new value to be assigned.
+
+
+ +

Implements op::VideoCaptureReader.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_video_reader.js b/web/html/doc/classop_1_1_video_reader.js new file mode 100644 index 000000000..481a54cbe --- /dev/null +++ b/web/html/doc/classop_1_1_video_reader.js @@ -0,0 +1,9 @@ +var classop_1_1_video_reader = +[ + [ "VideoReader", "classop_1_1_video_reader.html#a219e3901e489a293e85fe9a872e7fb78", null ], + [ "~VideoReader", "classop_1_1_video_reader.html#a26cee6225a62c4e120ae9ea2e4a9a41c", null ], + [ "get", "classop_1_1_video_reader.html#a057a7d0c498c48639b38c10ac7efc183", null ], + [ "getNextFrameName", "classop_1_1_video_reader.html#a508eed918fbe3bfe3eff4c1ebacb3463", null ], + [ "isOpened", "classop_1_1_video_reader.html#a503e70039e2cfecfe2d31771df509733", null ], + [ "set", "classop_1_1_video_reader.html#a0dd53334327642368d41ec860e64e756", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_video_reader.png b/web/html/doc/classop_1_1_video_reader.png new file mode 100644 index 000000000..52fbeb23b Binary files /dev/null and b/web/html/doc/classop_1_1_video_reader.png differ diff --git a/web/html/doc/classop_1_1_video_saver-members.html b/web/html/doc/classop_1_1_video_saver-members.html new file mode 100644 index 000000000..c5c7e5ce0 --- /dev/null +++ b/web/html/doc/classop_1_1_video_saver-members.html @@ -0,0 +1,108 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::VideoSaver Member List
+
+
+ +

This is the complete list of members for op::VideoSaver, including all inherited members.

+ + + + + + +
isOpened()op::VideoSaver
VideoSaver(const std::string &videoSaverPath, const int cvFourcc, const double fps, const std::string &addAudioFromThisVideo="")op::VideoSaver
write(const Matrix &matToSave)op::VideoSaver
write(const std::vector< Matrix > &matsToSave)op::VideoSaver
~VideoSaver()op::VideoSavervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_video_saver.html b/web/html/doc/classop_1_1_video_saver.html new file mode 100644 index 000000000..c3b68501c --- /dev/null +++ b/web/html/doc/classop_1_1_video_saver.html @@ -0,0 +1,246 @@ + + + + + + + +OpenPose: op::VideoSaver Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::VideoSaver Class Reference
+
+
+ +

#include <videoSaver.hpp>

+ + + + + + + + + + + + +

+Public Member Functions

 VideoSaver (const std::string &videoSaverPath, const int cvFourcc, const double fps, const std::string &addAudioFromThisVideo="")
 
virtual ~VideoSaver ()
 
bool isOpened ()
 
void write (const Matrix &matToSave)
 
void write (const std::vector< Matrix > &matsToSave)
 
+

Detailed Description

+
+

Definition at line 8 of file videoSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ VideoSaver()

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::VideoSaver::VideoSaver (const std::string & videoSaverPath,
const int cvFourcc,
const double fps,
const std::string & addAudioFromThisVideo = "" 
)
+
+ +
+
+ +

◆ ~VideoSaver()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::VideoSaver::~VideoSaver ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ isOpened()

+ +
+
+ + + + + + + +
bool op::VideoSaver::isOpened ()
+
+ +
+
+ +

◆ write() [1/2]

+ +
+
+ + + + + + + + +
void op::VideoSaver::write (const MatrixmatToSave)
+
+ +
+
+ +

◆ write() [2/2]

+ +
+
+ + + + + + + + +
void op::VideoSaver::write (const std::vector< Matrix > & matsToSave)
+
+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_video_saver.js b/web/html/doc/classop_1_1_video_saver.js new file mode 100644 index 000000000..c10fb5fcb --- /dev/null +++ b/web/html/doc/classop_1_1_video_saver.js @@ -0,0 +1,8 @@ +var classop_1_1_video_saver = +[ + [ "VideoSaver", "classop_1_1_video_saver.html#a413aba00e90b40f6cd62144c98d7723c", null ], + [ "~VideoSaver", "classop_1_1_video_saver.html#acfb839eb14ac032055930932db966e84", null ], + [ "isOpened", "classop_1_1_video_saver.html#a0c5dadfa4f687283c370e7890ae5037f", null ], + [ "write", "classop_1_1_video_saver.html#a6f6914d16434cebc9a6c596472b212aa", null ], + [ "write", "classop_1_1_video_saver.html#a4ecf895fc5cd7508ac139a7b69fc25e7", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_coco_json_saver-members.html b/web/html/doc/classop_1_1_w_coco_json_saver-members.html new file mode 100644 index 000000000..4528fb156 --- /dev/null +++ b/web/html/doc/classop_1_1_w_coco_json_saver-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WCocoJsonSaver< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_coco_json_saver.html b/web/html/doc/classop_1_1_w_coco_json_saver.html new file mode 100644 index 000000000..8a1209af3 --- /dev/null +++ b/web/html/doc/classop_1_1_w_coco_json_saver.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: op::WCocoJsonSaver< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WCocoJsonSaver< TDatums > Class Template Reference
+
+
+ +

#include <wCocoJsonSaver.hpp>

+
+Inheritance diagram for op::WCocoJsonSaver< TDatums >:
+
+
+ + +op::WorkerConsumer< TDatums > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WCocoJsonSaver (const std::shared_ptr< CocoJsonSaver > &cocoJsonSaver)
 
virtual ~WCocoJsonSaver ()
 
void initializationOnThread ()
 
void workConsumer (const TDatums &tDatums)
 
- Public Member Functions inherited from op::WorkerConsumer< TDatums >
virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WCocoJsonSaver< TDatums >

+ + +

Definition at line 11 of file wCocoJsonSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WCocoJsonSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WCocoJsonSaver< TDatums >::WCocoJsonSaver (const std::shared_ptr< CocoJsonSaver > & cocoJsonSaver)
+
+explicit
+
+ +

Definition at line 38 of file wCocoJsonSaver.hpp.

+ +
+
+ +

◆ ~WCocoJsonSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WCocoJsonSaver< TDatums >::~WCocoJsonSaver
+
+virtual
+
+ +

Definition at line 44 of file wCocoJsonSaver.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WCocoJsonSaver< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wCocoJsonSaver.hpp.

+ +
+
+ +

◆ workConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WCocoJsonSaver< TDatums >::workConsumer (const TDatums & tDatums)
+
+virtual
+
+ +

Implements op::WorkerConsumer< TDatums >.

+ +

Definition at line 54 of file wCocoJsonSaver.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_coco_json_saver.js b/web/html/doc/classop_1_1_w_coco_json_saver.js new file mode 100644 index 000000000..8c7b62ed3 --- /dev/null +++ b/web/html/doc/classop_1_1_w_coco_json_saver.js @@ -0,0 +1,7 @@ +var classop_1_1_w_coco_json_saver = +[ + [ "WCocoJsonSaver", "classop_1_1_w_coco_json_saver.html#a508c1105406b3cc55dc6bd1b299f6ed3", null ], + [ "~WCocoJsonSaver", "classop_1_1_w_coco_json_saver.html#a49ba32973e43c176c88d17aa805f1ab5", null ], + [ "initializationOnThread", "classop_1_1_w_coco_json_saver.html#a5cca095ff23c3134ab0addc9a4feabaf", null ], + [ "workConsumer", "classop_1_1_w_coco_json_saver.html#af152a61abc9ab46da651c9d87e6775f0", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_coco_json_saver.png b/web/html/doc/classop_1_1_w_coco_json_saver.png new file mode 100644 index 000000000..5af0a8b3c Binary files /dev/null and b/web/html/doc/classop_1_1_w_coco_json_saver.png differ diff --git a/web/html/doc/classop_1_1_w_cv_mat_to_op_input-members.html b/web/html/doc/classop_1_1_w_cv_mat_to_op_input-members.html new file mode 100644 index 000000000..03e13792c --- /dev/null +++ b/web/html/doc/classop_1_1_w_cv_mat_to_op_input-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WCvMatToOpInput< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_cv_mat_to_op_input.html b/web/html/doc/classop_1_1_w_cv_mat_to_op_input.html new file mode 100644 index 000000000..2d482ac0b --- /dev/null +++ b/web/html/doc/classop_1_1_w_cv_mat_to_op_input.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WCvMatToOpInput< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WCvMatToOpInput< TDatums > Class Template Reference
+
+
+ +

#include <wCvMatToOpInput.hpp>

+
+Inheritance diagram for op::WCvMatToOpInput< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WCvMatToOpInput (const std::shared_ptr< CvMatToOpInput > &cvMatToOpInput)
 
virtual ~WCvMatToOpInput ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WCvMatToOpInput< TDatums >

+ + +

Definition at line 11 of file wCvMatToOpInput.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WCvMatToOpInput()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WCvMatToOpInput< TDatums >::WCvMatToOpInput (const std::shared_ptr< CvMatToOpInput > & cvMatToOpInput)
+
+explicit
+
+ +

Definition at line 38 of file wCvMatToOpInput.hpp.

+ +
+
+ +

◆ ~WCvMatToOpInput()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WCvMatToOpInput< TDatums >::~WCvMatToOpInput
+
+virtual
+
+ +

Definition at line 44 of file wCvMatToOpInput.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WCvMatToOpInput< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wCvMatToOpInput.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WCvMatToOpInput< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 54 of file wCvMatToOpInput.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_cv_mat_to_op_input.js b/web/html/doc/classop_1_1_w_cv_mat_to_op_input.js new file mode 100644 index 000000000..11c481794 --- /dev/null +++ b/web/html/doc/classop_1_1_w_cv_mat_to_op_input.js @@ -0,0 +1,7 @@ +var classop_1_1_w_cv_mat_to_op_input = +[ + [ "WCvMatToOpInput", "classop_1_1_w_cv_mat_to_op_input.html#a82c13641d071fdb5db50afdee7cfa849", null ], + [ "~WCvMatToOpInput", "classop_1_1_w_cv_mat_to_op_input.html#a8ae2eb423f1fe70f4154716b38b62719", null ], + [ "initializationOnThread", "classop_1_1_w_cv_mat_to_op_input.html#ac03534bbe3b6c3c45efb61b5d78402da", null ], + [ "work", "classop_1_1_w_cv_mat_to_op_input.html#aa7faa9e2671a85d36aad3366a7958f58", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_cv_mat_to_op_input.png b/web/html/doc/classop_1_1_w_cv_mat_to_op_input.png new file mode 100644 index 000000000..c1ab5cb7a Binary files /dev/null and b/web/html/doc/classop_1_1_w_cv_mat_to_op_input.png differ diff --git a/web/html/doc/classop_1_1_w_cv_mat_to_op_output-members.html b/web/html/doc/classop_1_1_w_cv_mat_to_op_output-members.html new file mode 100644 index 000000000..8f8a42e70 --- /dev/null +++ b/web/html/doc/classop_1_1_w_cv_mat_to_op_output-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WCvMatToOpOutput< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_cv_mat_to_op_output.html b/web/html/doc/classop_1_1_w_cv_mat_to_op_output.html new file mode 100644 index 000000000..cd054babc --- /dev/null +++ b/web/html/doc/classop_1_1_w_cv_mat_to_op_output.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WCvMatToOpOutput< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WCvMatToOpOutput< TDatums > Class Template Reference
+
+
+ +

#include <wCvMatToOpOutput.hpp>

+
+Inheritance diagram for op::WCvMatToOpOutput< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WCvMatToOpOutput (const std::shared_ptr< CvMatToOpOutput > &cvMatToOpOutput)
 
virtual ~WCvMatToOpOutput ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WCvMatToOpOutput< TDatums >

+ + +

Definition at line 11 of file wCvMatToOpOutput.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WCvMatToOpOutput()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WCvMatToOpOutput< TDatums >::WCvMatToOpOutput (const std::shared_ptr< CvMatToOpOutput > & cvMatToOpOutput)
+
+explicit
+
+ +

Definition at line 39 of file wCvMatToOpOutput.hpp.

+ +
+
+ +

◆ ~WCvMatToOpOutput()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WCvMatToOpOutput< TDatums >::~WCvMatToOpOutput
+
+virtual
+
+ +

Definition at line 45 of file wCvMatToOpOutput.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WCvMatToOpOutput< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 50 of file wCvMatToOpOutput.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WCvMatToOpOutput< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 55 of file wCvMatToOpOutput.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_cv_mat_to_op_output.js b/web/html/doc/classop_1_1_w_cv_mat_to_op_output.js new file mode 100644 index 000000000..f7e9f2459 --- /dev/null +++ b/web/html/doc/classop_1_1_w_cv_mat_to_op_output.js @@ -0,0 +1,7 @@ +var classop_1_1_w_cv_mat_to_op_output = +[ + [ "WCvMatToOpOutput", "classop_1_1_w_cv_mat_to_op_output.html#a04cd3d8e91d731a36e3c7830631e47af", null ], + [ "~WCvMatToOpOutput", "classop_1_1_w_cv_mat_to_op_output.html#add97e472ab242fe72221cf0591801f81", null ], + [ "initializationOnThread", "classop_1_1_w_cv_mat_to_op_output.html#ad4c957d391e371b7ee56cdb5be6b1452", null ], + [ "work", "classop_1_1_w_cv_mat_to_op_output.html#a0bf2e43d2586c83fdd5cb0b1b54aefca", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_cv_mat_to_op_output.png b/web/html/doc/classop_1_1_w_cv_mat_to_op_output.png new file mode 100644 index 000000000..55c5a32bf Binary files /dev/null and b/web/html/doc/classop_1_1_w_cv_mat_to_op_output.png differ diff --git a/web/html/doc/classop_1_1_w_datum_producer-members.html b/web/html/doc/classop_1_1_w_datum_producer-members.html new file mode 100644 index 000000000..9a1298ef8 --- /dev/null +++ b/web/html/doc/classop_1_1_w_datum_producer-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WDatumProducer< TDatum > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_datum_producer.html b/web/html/doc/classop_1_1_w_datum_producer.html new file mode 100644 index 000000000..de36fb7f6 --- /dev/null +++ b/web/html/doc/classop_1_1_w_datum_producer.html @@ -0,0 +1,274 @@ + + + + + + + +OpenPose: op::WDatumProducer< TDatum > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WDatumProducer< TDatum > Class Template Reference
+
+
+ +

#include <wDatumProducer.hpp>

+
+Inheritance diagram for op::WDatumProducer< TDatum >:
+
+
+ + +op::WorkerProducer< std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WDatumProducer (const std::shared_ptr< DatumProducer< TDatum >> &datumProducer)
 
virtual ~WDatumProducer ()
 
void initializationOnThread ()
 
std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > workProducer ()
 
- Public Member Functions inherited from op::WorkerProducer< std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > >
virtual ~WorkerProducer ()
 
void work (std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatum>
+class op::WDatumProducer< TDatum >

+ + +

Definition at line 13 of file wDatumProducer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WDatumProducer()

+ +
+
+
+template<typename TDatum >
+ + + + + +
+ + + + + + + + +
op::WDatumProducer< TDatum >::WDatumProducer (const std::shared_ptr< DatumProducer< TDatum >> & datumProducer)
+
+explicit
+
+ +

Definition at line 41 of file wDatumProducer.hpp.

+ +
+
+ +

◆ ~WDatumProducer()

+ +
+
+
+template<typename TDatum >
+ + + + + +
+ + + + +
op::WDatumProducer< TDatum >::~WDatumProducer
+
+virtual
+
+ +

Definition at line 48 of file wDatumProducer.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatum >
+ + + + + +
+ + + + +
void op::WDatumProducer< TDatum >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 54 of file wDatumProducer.hpp.

+ +
+
+ +

◆ workProducer()

+ +
+
+
+template<typename TDatum >
+ + + + + +
+ + + + +
std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > op::WDatumProducer< TDatum >::workProducer
+
+virtual
+
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_datum_producer.js b/web/html/doc/classop_1_1_w_datum_producer.js new file mode 100644 index 000000000..3b7464eed --- /dev/null +++ b/web/html/doc/classop_1_1_w_datum_producer.js @@ -0,0 +1,7 @@ +var classop_1_1_w_datum_producer = +[ + [ "WDatumProducer", "classop_1_1_w_datum_producer.html#a728efd416b307b5ffa25c44b0fbf7760", null ], + [ "~WDatumProducer", "classop_1_1_w_datum_producer.html#a858e64351ef6d3942bc7d53678badcc7", null ], + [ "initializationOnThread", "classop_1_1_w_datum_producer.html#a4381eaec4625824ebaa2d23f0cf1be48", null ], + [ "workProducer", "classop_1_1_w_datum_producer.html#aac2674f961492fa299da18d716a617b4", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_datum_producer.png b/web/html/doc/classop_1_1_w_datum_producer.png new file mode 100644 index 000000000..f18fd66a1 Binary files /dev/null and b/web/html/doc/classop_1_1_w_datum_producer.png differ diff --git a/web/html/doc/classop_1_1_w_face_detector-members.html b/web/html/doc/classop_1_1_w_face_detector-members.html new file mode 100644 index 000000000..c2ff0996c --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_detector-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WFaceDetector< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_face_detector.html b/web/html/doc/classop_1_1_w_face_detector.html new file mode 100644 index 000000000..0188dbc8f --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_detector.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WFaceDetector< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WFaceDetector< TDatums > Class Template Reference
+
+
+ +

#include <wFaceDetector.hpp>

+
+Inheritance diagram for op::WFaceDetector< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WFaceDetector (const std::shared_ptr< FaceDetector > &faceDetector)
 
virtual ~WFaceDetector ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WFaceDetector< TDatums >

+ + +

Definition at line 11 of file wFaceDetector.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WFaceDetector()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WFaceDetector< TDatums >::WFaceDetector (const std::shared_ptr< FaceDetector > & faceDetector)
+
+explicit
+
+ +

Definition at line 38 of file wFaceDetector.hpp.

+ +
+
+ +

◆ ~WFaceDetector()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WFaceDetector< TDatums >::~WFaceDetector
+
+virtual
+
+ +

Definition at line 44 of file wFaceDetector.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WFaceDetector< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wFaceDetector.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WFaceDetector< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 54 of file wFaceDetector.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_face_detector.js b/web/html/doc/classop_1_1_w_face_detector.js new file mode 100644 index 000000000..ab2ed7d0a --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_detector.js @@ -0,0 +1,7 @@ +var classop_1_1_w_face_detector = +[ + [ "WFaceDetector", "classop_1_1_w_face_detector.html#a77355426bc59b212a8eb1730ff6289f3", null ], + [ "~WFaceDetector", "classop_1_1_w_face_detector.html#ac0aa45b289e6800bb76bfbfc8a216035", null ], + [ "initializationOnThread", "classop_1_1_w_face_detector.html#afaca53a669f0cd43103f7317aded75d3", null ], + [ "work", "classop_1_1_w_face_detector.html#a721ced99378516c04cb3cff296cc274a", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_face_detector.png b/web/html/doc/classop_1_1_w_face_detector.png new file mode 100644 index 000000000..7ae5007a6 Binary files /dev/null and b/web/html/doc/classop_1_1_w_face_detector.png differ diff --git a/web/html/doc/classop_1_1_w_face_detector_open_c_v-members.html b/web/html/doc/classop_1_1_w_face_detector_open_c_v-members.html new file mode 100644 index 000000000..5b4b8473d --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_detector_open_c_v-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WFaceDetectorOpenCV< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_face_detector_open_c_v.html b/web/html/doc/classop_1_1_w_face_detector_open_c_v.html new file mode 100644 index 000000000..49a13bc6e --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_detector_open_c_v.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WFaceDetectorOpenCV< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WFaceDetectorOpenCV< TDatums > Class Template Reference
+
+
+ +

#include <wFaceDetectorOpenCV.hpp>

+
+Inheritance diagram for op::WFaceDetectorOpenCV< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WFaceDetectorOpenCV (const std::shared_ptr< FaceDetectorOpenCV > &faceDetectorOpenCV)
 
virtual ~WFaceDetectorOpenCV ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WFaceDetectorOpenCV< TDatums >

+ + +

Definition at line 11 of file wFaceDetectorOpenCV.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WFaceDetectorOpenCV()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WFaceDetectorOpenCV< TDatums >::WFaceDetectorOpenCV (const std::shared_ptr< FaceDetectorOpenCV > & faceDetectorOpenCV)
+
+explicit
+
+ +

Definition at line 38 of file wFaceDetectorOpenCV.hpp.

+ +
+
+ +

◆ ~WFaceDetectorOpenCV()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WFaceDetectorOpenCV< TDatums >::~WFaceDetectorOpenCV
+
+virtual
+
+ +

Definition at line 44 of file wFaceDetectorOpenCV.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WFaceDetectorOpenCV< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wFaceDetectorOpenCV.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WFaceDetectorOpenCV< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 54 of file wFaceDetectorOpenCV.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_face_detector_open_c_v.js b/web/html/doc/classop_1_1_w_face_detector_open_c_v.js new file mode 100644 index 000000000..fb9a09fa0 --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_detector_open_c_v.js @@ -0,0 +1,7 @@ +var classop_1_1_w_face_detector_open_c_v = +[ + [ "WFaceDetectorOpenCV", "classop_1_1_w_face_detector_open_c_v.html#a8c765201f0cc9440f8d172c8d8c76a62", null ], + [ "~WFaceDetectorOpenCV", "classop_1_1_w_face_detector_open_c_v.html#a2942e145f9c4c720aad7c810a3d0f3f3", null ], + [ "initializationOnThread", "classop_1_1_w_face_detector_open_c_v.html#ad7dce5824ba32bc07d2474c20b23e62d", null ], + [ "work", "classop_1_1_w_face_detector_open_c_v.html#a4d3a4a29bcb7b8c141ae1917634ca4c9", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_face_detector_open_c_v.png b/web/html/doc/classop_1_1_w_face_detector_open_c_v.png new file mode 100644 index 000000000..e0914b4ee Binary files /dev/null and b/web/html/doc/classop_1_1_w_face_detector_open_c_v.png differ diff --git a/web/html/doc/classop_1_1_w_face_extractor_net-members.html b/web/html/doc/classop_1_1_w_face_extractor_net-members.html new file mode 100644 index 000000000..306238c99 --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_extractor_net-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WFaceExtractorNet< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_face_extractor_net.html b/web/html/doc/classop_1_1_w_face_extractor_net.html new file mode 100644 index 000000000..07ec6ccc3 --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_extractor_net.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WFaceExtractorNet< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WFaceExtractorNet< TDatums > Class Template Reference
+
+
+ +

#include <wFaceExtractorNet.hpp>

+
+Inheritance diagram for op::WFaceExtractorNet< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WFaceExtractorNet (const std::shared_ptr< FaceExtractorNet > &faceExtractorNet)
 
virtual ~WFaceExtractorNet ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WFaceExtractorNet< TDatums >

+ + +

Definition at line 11 of file wFaceExtractorNet.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WFaceExtractorNet()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WFaceExtractorNet< TDatums >::WFaceExtractorNet (const std::shared_ptr< FaceExtractorNet > & faceExtractorNet)
+
+explicit
+
+ +

Definition at line 38 of file wFaceExtractorNet.hpp.

+ +
+
+ +

◆ ~WFaceExtractorNet()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WFaceExtractorNet< TDatums >::~WFaceExtractorNet
+
+virtual
+
+ +

Definition at line 44 of file wFaceExtractorNet.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WFaceExtractorNet< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wFaceExtractorNet.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WFaceExtractorNet< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 55 of file wFaceExtractorNet.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_face_extractor_net.js b/web/html/doc/classop_1_1_w_face_extractor_net.js new file mode 100644 index 000000000..99da190e1 --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_extractor_net.js @@ -0,0 +1,7 @@ +var classop_1_1_w_face_extractor_net = +[ + [ "WFaceExtractorNet", "classop_1_1_w_face_extractor_net.html#a31bd32d4b9922ea456c97343c94501ac", null ], + [ "~WFaceExtractorNet", "classop_1_1_w_face_extractor_net.html#ae781bd1a7d450983a9aa168860d4e96d", null ], + [ "initializationOnThread", "classop_1_1_w_face_extractor_net.html#ac04b0bec061a6cbc6a6afacb3f8d15c7", null ], + [ "work", "classop_1_1_w_face_extractor_net.html#aa47940fb2ed940a53c7a305ce45817a3", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_face_extractor_net.png b/web/html/doc/classop_1_1_w_face_extractor_net.png new file mode 100644 index 000000000..8117c0679 Binary files /dev/null and b/web/html/doc/classop_1_1_w_face_extractor_net.png differ diff --git a/web/html/doc/classop_1_1_w_face_renderer-members.html b/web/html/doc/classop_1_1_w_face_renderer-members.html new file mode 100644 index 000000000..37ad4b090 --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_renderer-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WFaceRenderer< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_face_renderer.html b/web/html/doc/classop_1_1_w_face_renderer.html new file mode 100644 index 000000000..76f37b3be --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_renderer.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WFaceRenderer< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WFaceRenderer< TDatums > Class Template Reference
+
+
+ +

#include <wFaceRenderer.hpp>

+
+Inheritance diagram for op::WFaceRenderer< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WFaceRenderer (const std::shared_ptr< FaceRenderer > &faceRenderer)
 
virtual ~WFaceRenderer ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WFaceRenderer< TDatums >

+ + +

Definition at line 11 of file wFaceRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WFaceRenderer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WFaceRenderer< TDatums >::WFaceRenderer (const std::shared_ptr< FaceRenderer > & faceRenderer)
+
+explicit
+
+ +

Definition at line 38 of file wFaceRenderer.hpp.

+ +
+
+ +

◆ ~WFaceRenderer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WFaceRenderer< TDatums >::~WFaceRenderer
+
+virtual
+
+ +

Definition at line 44 of file wFaceRenderer.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WFaceRenderer< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wFaceRenderer.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WFaceRenderer< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 55 of file wFaceRenderer.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_face_renderer.js b/web/html/doc/classop_1_1_w_face_renderer.js new file mode 100644 index 000000000..5c2f2f4c9 --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_renderer.js @@ -0,0 +1,7 @@ +var classop_1_1_w_face_renderer = +[ + [ "WFaceRenderer", "classop_1_1_w_face_renderer.html#a51e2a661867adee200f5c4029a585e5d", null ], + [ "~WFaceRenderer", "classop_1_1_w_face_renderer.html#a2f06bfea6521c7528fc7b07b9b067351", null ], + [ "initializationOnThread", "classop_1_1_w_face_renderer.html#a7b72c70dc02c9209d84992caad6ad7d0", null ], + [ "work", "classop_1_1_w_face_renderer.html#aa52166ea2d5e0f201c94d5c4fe74216e", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_face_renderer.png b/web/html/doc/classop_1_1_w_face_renderer.png new file mode 100644 index 000000000..25a0906f7 Binary files /dev/null and b/web/html/doc/classop_1_1_w_face_renderer.png differ diff --git a/web/html/doc/classop_1_1_w_face_saver-members.html b/web/html/doc/classop_1_1_w_face_saver-members.html new file mode 100644 index 000000000..37b1ac48c --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_saver-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WFaceSaver< TDatums > Member List
+
+
+ +

This is the complete list of members for op::WFaceSaver< TDatums >, including all inherited members.

+ + + + + + + + + + + + + + +
checkAndWork(TDatums &tDatums)op::Worker< TDatums >
initializationOnThread()op::WFaceSaver< TDatums >virtual
initializationOnThreadNoException()op::Worker< TDatums >
isRunning() constop::Worker< TDatums >inline
stop()op::Worker< TDatums >inline
tryStop()op::Worker< TDatums >inlinevirtual
WFaceSaver(const std::shared_ptr< KeypointSaver > &keypointSaver)op::WFaceSaver< TDatums >explicit
work(TDatums &tDatums)op::WorkerConsumer< TDatums >virtual
workConsumer(const TDatums &tDatums)op::WFaceSaver< TDatums >virtual
Worker()op::Worker< TDatums >
~WFaceSaver()op::WFaceSaver< TDatums >virtual
~Worker()op::Worker< TDatums >virtual
~WorkerConsumer()op::WorkerConsumer< TDatums >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_face_saver.html b/web/html/doc/classop_1_1_w_face_saver.html new file mode 100644 index 000000000..9f2b49597 --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_saver.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: op::WFaceSaver< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WFaceSaver< TDatums > Class Template Reference
+
+
+ +

#include <wFaceSaver.hpp>

+
+Inheritance diagram for op::WFaceSaver< TDatums >:
+
+
+ + +op::WorkerConsumer< TDatums > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WFaceSaver (const std::shared_ptr< KeypointSaver > &keypointSaver)
 
virtual ~WFaceSaver ()
 
void initializationOnThread ()
 
void workConsumer (const TDatums &tDatums)
 
- Public Member Functions inherited from op::WorkerConsumer< TDatums >
virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WFaceSaver< TDatums >

+ + +

Definition at line 12 of file wFaceSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WFaceSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WFaceSaver< TDatums >::WFaceSaver (const std::shared_ptr< KeypointSaver > & keypointSaver)
+
+explicit
+
+ +

Definition at line 39 of file wFaceSaver.hpp.

+ +
+
+ +

◆ ~WFaceSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WFaceSaver< TDatums >::~WFaceSaver
+
+virtual
+
+ +

Definition at line 45 of file wFaceSaver.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WFaceSaver< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 50 of file wFaceSaver.hpp.

+ +
+
+ +

◆ workConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WFaceSaver< TDatums >::workConsumer (const TDatums & tDatums)
+
+virtual
+
+ +

Implements op::WorkerConsumer< TDatums >.

+ +

Definition at line 55 of file wFaceSaver.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_face_saver.js b/web/html/doc/classop_1_1_w_face_saver.js new file mode 100644 index 000000000..96e71a3ff --- /dev/null +++ b/web/html/doc/classop_1_1_w_face_saver.js @@ -0,0 +1,7 @@ +var classop_1_1_w_face_saver = +[ + [ "WFaceSaver", "classop_1_1_w_face_saver.html#a5dc60ede4b88594d59ece4ce3e4683d6", null ], + [ "~WFaceSaver", "classop_1_1_w_face_saver.html#ae27f54e5aead73b6eb604d0a0a06e18f", null ], + [ "initializationOnThread", "classop_1_1_w_face_saver.html#ae8401789881462eb8438c65e9d2d3fb2", null ], + [ "workConsumer", "classop_1_1_w_face_saver.html#a026bfad8cd9e0d1289a1db473cef34a0", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_face_saver.png b/web/html/doc/classop_1_1_w_face_saver.png new file mode 100644 index 000000000..a0a1cf9c3 Binary files /dev/null and b/web/html/doc/classop_1_1_w_face_saver.png differ diff --git a/web/html/doc/classop_1_1_w_fps_max-members.html b/web/html/doc/classop_1_1_w_fps_max-members.html new file mode 100644 index 000000000..48b49d32d --- /dev/null +++ b/web/html/doc/classop_1_1_w_fps_max-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WFpsMax< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_fps_max.html b/web/html/doc/classop_1_1_w_fps_max.html new file mode 100644 index 000000000..2cf695246 --- /dev/null +++ b/web/html/doc/classop_1_1_w_fps_max.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WFpsMax< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WFpsMax< TDatums > Class Template Reference
+
+
+ +

#include <wFpsMax.hpp>

+
+Inheritance diagram for op::WFpsMax< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WFpsMax (const double fpsMax)
 
virtual ~WFpsMax ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WFpsMax< TDatums >

+ + +

Definition at line 12 of file wFpsMax.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WFpsMax()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WFpsMax< TDatums >::WFpsMax (const double fpsMax)
+
+explicit
+
+ +

Definition at line 38 of file wFpsMax.hpp.

+ +
+
+ +

◆ ~WFpsMax()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WFpsMax< TDatums >::~WFpsMax
+
+virtual
+
+ +

Definition at line 44 of file wFpsMax.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WFpsMax< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wFpsMax.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WFpsMax< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 54 of file wFpsMax.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_fps_max.js b/web/html/doc/classop_1_1_w_fps_max.js new file mode 100644 index 000000000..0947b0c19 --- /dev/null +++ b/web/html/doc/classop_1_1_w_fps_max.js @@ -0,0 +1,7 @@ +var classop_1_1_w_fps_max = +[ + [ "WFpsMax", "classop_1_1_w_fps_max.html#afc487c8404a9d4794bcccdd43f0368f6", null ], + [ "~WFpsMax", "classop_1_1_w_fps_max.html#af48214bbb4ed5c84efe1adf845aa9318", null ], + [ "initializationOnThread", "classop_1_1_w_fps_max.html#af8c5f74f0271d227b2c70b4415366332", null ], + [ "work", "classop_1_1_w_fps_max.html#a8b9f49fb22b18dbee786922af15ba939", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_fps_max.png b/web/html/doc/classop_1_1_w_fps_max.png new file mode 100644 index 000000000..ba7d3e5c6 Binary files /dev/null and b/web/html/doc/classop_1_1_w_fps_max.png differ diff --git a/web/html/doc/classop_1_1_w_gui-members.html b/web/html/doc/classop_1_1_w_gui-members.html new file mode 100644 index 000000000..a18c14a1a --- /dev/null +++ b/web/html/doc/classop_1_1_w_gui-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WGui< TDatums > Member List
+
+
+ +

This is the complete list of members for op::WGui< TDatums >, including all inherited members.

+ + + + + + + + + + + + + + +
checkAndWork(TDatums &tDatums)op::Worker< TDatums >
initializationOnThread()op::WGui< TDatums >virtual
initializationOnThreadNoException()op::Worker< TDatums >
isRunning() constop::Worker< TDatums >inline
stop()op::Worker< TDatums >inline
tryStop()op::Worker< TDatums >inlinevirtual
WGui(const std::shared_ptr< Gui > &gui)op::WGui< TDatums >explicit
work(TDatums &tDatums)op::WorkerConsumer< TDatums >virtual
workConsumer(const TDatums &tDatums)op::WGui< TDatums >virtual
Worker()op::Worker< TDatums >
~WGui()op::WGui< TDatums >virtual
~Worker()op::Worker< TDatums >virtual
~WorkerConsumer()op::WorkerConsumer< TDatums >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_gui.html b/web/html/doc/classop_1_1_w_gui.html new file mode 100644 index 000000000..2a572e0eb --- /dev/null +++ b/web/html/doc/classop_1_1_w_gui.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: op::WGui< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WGui< TDatums > Class Template Reference
+
+
+ +

#include <wGui.hpp>

+
+Inheritance diagram for op::WGui< TDatums >:
+
+
+ + +op::WorkerConsumer< TDatums > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WGui (const std::shared_ptr< Gui > &gui)
 
virtual ~WGui ()
 
void initializationOnThread ()
 
void workConsumer (const TDatums &tDatums)
 
- Public Member Functions inherited from op::WorkerConsumer< TDatums >
virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WGui< TDatums >

+ + +

Definition at line 11 of file wGui.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WGui()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WGui< TDatums >::WGui (const std::shared_ptr< Gui > & gui)
+
+explicit
+
+ +

Definition at line 38 of file wGui.hpp.

+ +
+
+ +

◆ ~WGui()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WGui< TDatums >::~WGui
+
+virtual
+
+ +

Definition at line 44 of file wGui.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WGui< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wGui.hpp.

+ +
+
+ +

◆ workConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WGui< TDatums >::workConsumer (const TDatums & tDatums)
+
+virtual
+
+ +

Implements op::WorkerConsumer< TDatums >.

+ +

Definition at line 62 of file wGui.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_gui.js b/web/html/doc/classop_1_1_w_gui.js new file mode 100644 index 000000000..bfb96681a --- /dev/null +++ b/web/html/doc/classop_1_1_w_gui.js @@ -0,0 +1,7 @@ +var classop_1_1_w_gui = +[ + [ "WGui", "classop_1_1_w_gui.html#a4e7d3f5b3ddaf02109738b4348250611", null ], + [ "~WGui", "classop_1_1_w_gui.html#a3c55ca3290f64181201890fae10e4002", null ], + [ "initializationOnThread", "classop_1_1_w_gui.html#a4e4db210b87f78cc1238dd3ab2bedaa4", null ], + [ "workConsumer", "classop_1_1_w_gui.html#a664e1f76211510e38b8d5f5bed37ffcb", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_gui.png b/web/html/doc/classop_1_1_w_gui.png new file mode 100644 index 000000000..221f05a3b Binary files /dev/null and b/web/html/doc/classop_1_1_w_gui.png differ diff --git a/web/html/doc/classop_1_1_w_gui3_d-members.html b/web/html/doc/classop_1_1_w_gui3_d-members.html new file mode 100644 index 000000000..42f7b618b --- /dev/null +++ b/web/html/doc/classop_1_1_w_gui3_d-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WGui3D< TDatums > Member List
+
+
+ +

This is the complete list of members for op::WGui3D< TDatums >, including all inherited members.

+ + + + + + + + + + + + + + +
checkAndWork(TDatums &tDatums)op::Worker< TDatums >
initializationOnThread()op::WGui3D< TDatums >virtual
initializationOnThreadNoException()op::Worker< TDatums >
isRunning() constop::Worker< TDatums >inline
stop()op::Worker< TDatums >inline
tryStop()op::Worker< TDatums >inlinevirtual
WGui3D(const std::shared_ptr< Gui3D > &gui3D)op::WGui3D< TDatums >explicit
work(TDatums &tDatums)op::WorkerConsumer< TDatums >virtual
workConsumer(const TDatums &tDatums)op::WGui3D< TDatums >virtual
Worker()op::Worker< TDatums >
~WGui3D()op::WGui3D< TDatums >virtual
~Worker()op::Worker< TDatums >virtual
~WorkerConsumer()op::WorkerConsumer< TDatums >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_gui3_d.html b/web/html/doc/classop_1_1_w_gui3_d.html new file mode 100644 index 000000000..eeea87a38 --- /dev/null +++ b/web/html/doc/classop_1_1_w_gui3_d.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: op::WGui3D< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WGui3D< TDatums > Class Template Reference
+
+
+ +

#include <wGui3D.hpp>

+
+Inheritance diagram for op::WGui3D< TDatums >:
+
+
+ + +op::WorkerConsumer< TDatums > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WGui3D (const std::shared_ptr< Gui3D > &gui3D)
 
virtual ~WGui3D ()
 
void initializationOnThread ()
 
void workConsumer (const TDatums &tDatums)
 
- Public Member Functions inherited from op::WorkerConsumer< TDatums >
virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WGui3D< TDatums >

+ + +

Definition at line 12 of file wGui3D.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WGui3D()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WGui3D< TDatums >::WGui3D (const std::shared_ptr< Gui3D > & gui3D)
+
+explicit
+
+ +

Definition at line 39 of file wGui3D.hpp.

+ +
+
+ +

◆ ~WGui3D()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WGui3D< TDatums >::~WGui3D
+
+virtual
+
+ +

Definition at line 45 of file wGui3D.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WGui3D< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 50 of file wGui3D.hpp.

+ +
+
+ +

◆ workConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WGui3D< TDatums >::workConsumer (const TDatums & tDatums)
+
+virtual
+
+ +

Implements op::WorkerConsumer< TDatums >.

+ +

Definition at line 63 of file wGui3D.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_gui3_d.js b/web/html/doc/classop_1_1_w_gui3_d.js new file mode 100644 index 000000000..30a98c29d --- /dev/null +++ b/web/html/doc/classop_1_1_w_gui3_d.js @@ -0,0 +1,7 @@ +var classop_1_1_w_gui3_d = +[ + [ "WGui3D", "classop_1_1_w_gui3_d.html#ab61a31574460ff87efa99ed7362474ed", null ], + [ "~WGui3D", "classop_1_1_w_gui3_d.html#a62b93d2704634170339827ee1f93fa97", null ], + [ "initializationOnThread", "classop_1_1_w_gui3_d.html#a7da4f85892e0d7d9e105c6d471a706a3", null ], + [ "workConsumer", "classop_1_1_w_gui3_d.html#afe019cff8fd5ed2f59f59d886de7473a", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_gui3_d.png b/web/html/doc/classop_1_1_w_gui3_d.png new file mode 100644 index 000000000..7cee03d22 Binary files /dev/null and b/web/html/doc/classop_1_1_w_gui3_d.png differ diff --git a/web/html/doc/classop_1_1_w_gui_info_adder-members.html b/web/html/doc/classop_1_1_w_gui_info_adder-members.html new file mode 100644 index 000000000..bb52f1cca --- /dev/null +++ b/web/html/doc/classop_1_1_w_gui_info_adder-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WGuiInfoAdder< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_gui_info_adder.html b/web/html/doc/classop_1_1_w_gui_info_adder.html new file mode 100644 index 000000000..5aa98647c --- /dev/null +++ b/web/html/doc/classop_1_1_w_gui_info_adder.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WGuiInfoAdder< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WGuiInfoAdder< TDatums > Class Template Reference
+
+
+ +

#include <wGuiInfoAdder.hpp>

+
+Inheritance diagram for op::WGuiInfoAdder< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WGuiInfoAdder (const std::shared_ptr< GuiInfoAdder > &guiInfoAdder)
 
virtual ~WGuiInfoAdder ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WGuiInfoAdder< TDatums >

+ + +

Definition at line 11 of file wGuiInfoAdder.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WGuiInfoAdder()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WGuiInfoAdder< TDatums >::WGuiInfoAdder (const std::shared_ptr< GuiInfoAdder > & guiInfoAdder)
+
+explicit
+
+ +

Definition at line 38 of file wGuiInfoAdder.hpp.

+ +
+
+ +

◆ ~WGuiInfoAdder()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WGuiInfoAdder< TDatums >::~WGuiInfoAdder
+
+virtual
+
+ +

Definition at line 44 of file wGuiInfoAdder.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WGuiInfoAdder< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wGuiInfoAdder.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WGuiInfoAdder< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 54 of file wGuiInfoAdder.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_gui_info_adder.js b/web/html/doc/classop_1_1_w_gui_info_adder.js new file mode 100644 index 000000000..b7fabd527 --- /dev/null +++ b/web/html/doc/classop_1_1_w_gui_info_adder.js @@ -0,0 +1,7 @@ +var classop_1_1_w_gui_info_adder = +[ + [ "WGuiInfoAdder", "classop_1_1_w_gui_info_adder.html#a0b2c539b72ef09106ab0306dc88c5ac5", null ], + [ "~WGuiInfoAdder", "classop_1_1_w_gui_info_adder.html#ab369f542339af87ff652fc6e8e5408dd", null ], + [ "initializationOnThread", "classop_1_1_w_gui_info_adder.html#ae620275d6570fd5c74f33728cd340217", null ], + [ "work", "classop_1_1_w_gui_info_adder.html#ae90a68c6ef7b4f45595a020efd232612", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_gui_info_adder.png b/web/html/doc/classop_1_1_w_gui_info_adder.png new file mode 100644 index 000000000..2545731aa Binary files /dev/null and b/web/html/doc/classop_1_1_w_gui_info_adder.png differ diff --git a/web/html/doc/classop_1_1_w_hand_detector-members.html b/web/html/doc/classop_1_1_w_hand_detector-members.html new file mode 100644 index 000000000..822dbee97 --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_detector-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WHandDetector< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_detector.html b/web/html/doc/classop_1_1_w_hand_detector.html new file mode 100644 index 000000000..1fd2a8699 --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_detector.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WHandDetector< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WHandDetector< TDatums > Class Template Reference
+
+
+ +

#include <wHandDetector.hpp>

+
+Inheritance diagram for op::WHandDetector< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WHandDetector (const std::shared_ptr< HandDetector > &handDetector)
 
virtual ~WHandDetector ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WHandDetector< TDatums >

+ + +

Definition at line 11 of file wHandDetector.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WHandDetector()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WHandDetector< TDatums >::WHandDetector (const std::shared_ptr< HandDetector > & handDetector)
+
+explicit
+
+ +

Definition at line 38 of file wHandDetector.hpp.

+ +
+
+ +

◆ ~WHandDetector()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WHandDetector< TDatums >::~WHandDetector
+
+virtual
+
+ +

Definition at line 44 of file wHandDetector.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WHandDetector< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wHandDetector.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WHandDetector< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 54 of file wHandDetector.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_detector.js b/web/html/doc/classop_1_1_w_hand_detector.js new file mode 100644 index 000000000..ebc13f87b --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_detector.js @@ -0,0 +1,7 @@ +var classop_1_1_w_hand_detector = +[ + [ "WHandDetector", "classop_1_1_w_hand_detector.html#ac44b474c7d8bd4876e32ceb9c9a322fe", null ], + [ "~WHandDetector", "classop_1_1_w_hand_detector.html#a7a740a7f9275b7016013728dbed001d0", null ], + [ "initializationOnThread", "classop_1_1_w_hand_detector.html#a5c29c944205ee0727f76c282ef55ae52", null ], + [ "work", "classop_1_1_w_hand_detector.html#aa82ef40fad1d343b5856b41ec4dbcd5c", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_hand_detector.png b/web/html/doc/classop_1_1_w_hand_detector.png new file mode 100644 index 000000000..f2142c6c4 Binary files /dev/null and b/web/html/doc/classop_1_1_w_hand_detector.png differ diff --git a/web/html/doc/classop_1_1_w_hand_detector_from_txt-members.html b/web/html/doc/classop_1_1_w_hand_detector_from_txt-members.html new file mode 100644 index 000000000..e6a87a6ed --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_detector_from_txt-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WHandDetectorFromTxt< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_detector_from_txt.html b/web/html/doc/classop_1_1_w_hand_detector_from_txt.html new file mode 100644 index 000000000..5b2603bed --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_detector_from_txt.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WHandDetectorFromTxt< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WHandDetectorFromTxt< TDatums > Class Template Reference
+
+
+ +

#include <wHandDetectorFromTxt.hpp>

+
+Inheritance diagram for op::WHandDetectorFromTxt< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WHandDetectorFromTxt (const std::shared_ptr< HandDetectorFromTxt > &handDetectorFromTxt)
 
virtual ~WHandDetectorFromTxt ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WHandDetectorFromTxt< TDatums >

+ + +

Definition at line 11 of file wHandDetectorFromTxt.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WHandDetectorFromTxt()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WHandDetectorFromTxt< TDatums >::WHandDetectorFromTxt (const std::shared_ptr< HandDetectorFromTxt > & handDetectorFromTxt)
+
+explicit
+
+ +

Definition at line 38 of file wHandDetectorFromTxt.hpp.

+ +
+
+ +

◆ ~WHandDetectorFromTxt()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WHandDetectorFromTxt< TDatums >::~WHandDetectorFromTxt
+
+virtual
+
+ +

Definition at line 44 of file wHandDetectorFromTxt.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WHandDetectorFromTxt< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wHandDetectorFromTxt.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WHandDetectorFromTxt< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 54 of file wHandDetectorFromTxt.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_detector_from_txt.js b/web/html/doc/classop_1_1_w_hand_detector_from_txt.js new file mode 100644 index 000000000..af473b787 --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_detector_from_txt.js @@ -0,0 +1,7 @@ +var classop_1_1_w_hand_detector_from_txt = +[ + [ "WHandDetectorFromTxt", "classop_1_1_w_hand_detector_from_txt.html#a01a5f73b0a8a1b8998937e7ba3d747a3", null ], + [ "~WHandDetectorFromTxt", "classop_1_1_w_hand_detector_from_txt.html#ae51bcc36e790b298d3cd0c231d4b3640", null ], + [ "initializationOnThread", "classop_1_1_w_hand_detector_from_txt.html#acd7d37555c09a58dc660811724930276", null ], + [ "work", "classop_1_1_w_hand_detector_from_txt.html#a51ebff94734350463fcf507a84eeefdc", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_hand_detector_from_txt.png b/web/html/doc/classop_1_1_w_hand_detector_from_txt.png new file mode 100644 index 000000000..fc1293671 Binary files /dev/null and b/web/html/doc/classop_1_1_w_hand_detector_from_txt.png differ diff --git a/web/html/doc/classop_1_1_w_hand_detector_tracking-members.html b/web/html/doc/classop_1_1_w_hand_detector_tracking-members.html new file mode 100644 index 000000000..fbfa3ded6 --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_detector_tracking-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WHandDetectorTracking< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_detector_tracking.html b/web/html/doc/classop_1_1_w_hand_detector_tracking.html new file mode 100644 index 000000000..d85b55b2d --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_detector_tracking.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WHandDetectorTracking< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WHandDetectorTracking< TDatums > Class Template Reference
+
+
+ +

#include <wHandDetectorTracking.hpp>

+
+Inheritance diagram for op::WHandDetectorTracking< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WHandDetectorTracking (const std::shared_ptr< HandDetector > &handDetector)
 
virtual ~WHandDetectorTracking ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WHandDetectorTracking< TDatums >

+ + +

Definition at line 11 of file wHandDetectorTracking.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WHandDetectorTracking()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WHandDetectorTracking< TDatums >::WHandDetectorTracking (const std::shared_ptr< HandDetector > & handDetector)
+
+explicit
+
+ +

Definition at line 38 of file wHandDetectorTracking.hpp.

+ +
+
+ +

◆ ~WHandDetectorTracking()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WHandDetectorTracking< TDatums >::~WHandDetectorTracking
+
+virtual
+
+ +

Definition at line 44 of file wHandDetectorTracking.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WHandDetectorTracking< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wHandDetectorTracking.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WHandDetectorTracking< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 54 of file wHandDetectorTracking.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_detector_tracking.js b/web/html/doc/classop_1_1_w_hand_detector_tracking.js new file mode 100644 index 000000000..31c0c7e00 --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_detector_tracking.js @@ -0,0 +1,7 @@ +var classop_1_1_w_hand_detector_tracking = +[ + [ "WHandDetectorTracking", "classop_1_1_w_hand_detector_tracking.html#ad2a5ac720f4ed651f4cf5e42d21c05dd", null ], + [ "~WHandDetectorTracking", "classop_1_1_w_hand_detector_tracking.html#a7d884dfd00822de27742a2392fb210bb", null ], + [ "initializationOnThread", "classop_1_1_w_hand_detector_tracking.html#a20ef6206194a873c2cfa7fe13d905d92", null ], + [ "work", "classop_1_1_w_hand_detector_tracking.html#a7c849c5a423ffc150c6a4aee9055d34e", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_hand_detector_tracking.png b/web/html/doc/classop_1_1_w_hand_detector_tracking.png new file mode 100644 index 000000000..ece67103d Binary files /dev/null and b/web/html/doc/classop_1_1_w_hand_detector_tracking.png differ diff --git a/web/html/doc/classop_1_1_w_hand_detector_update-members.html b/web/html/doc/classop_1_1_w_hand_detector_update-members.html new file mode 100644 index 000000000..ccf1f42b2 --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_detector_update-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WHandDetectorUpdate< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_detector_update.html b/web/html/doc/classop_1_1_w_hand_detector_update.html new file mode 100644 index 000000000..68fa62bea --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_detector_update.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WHandDetectorUpdate< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WHandDetectorUpdate< TDatums > Class Template Reference
+
+
+ +

#include <wHandDetectorUpdate.hpp>

+
+Inheritance diagram for op::WHandDetectorUpdate< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WHandDetectorUpdate (const std::shared_ptr< HandDetector > &handDetector)
 
virtual ~WHandDetectorUpdate ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WHandDetectorUpdate< TDatums >

+ + +

Definition at line 11 of file wHandDetectorUpdate.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WHandDetectorUpdate()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WHandDetectorUpdate< TDatums >::WHandDetectorUpdate (const std::shared_ptr< HandDetector > & handDetector)
+
+explicit
+
+ +

Definition at line 38 of file wHandDetectorUpdate.hpp.

+ +
+
+ +

◆ ~WHandDetectorUpdate()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WHandDetectorUpdate< TDatums >::~WHandDetectorUpdate
+
+virtual
+
+ +

Definition at line 44 of file wHandDetectorUpdate.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WHandDetectorUpdate< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wHandDetectorUpdate.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WHandDetectorUpdate< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 54 of file wHandDetectorUpdate.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_detector_update.js b/web/html/doc/classop_1_1_w_hand_detector_update.js new file mode 100644 index 000000000..bac4d91c0 --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_detector_update.js @@ -0,0 +1,7 @@ +var classop_1_1_w_hand_detector_update = +[ + [ "WHandDetectorUpdate", "classop_1_1_w_hand_detector_update.html#abd8b56fbfbd2a619a4f37d148592f61b", null ], + [ "~WHandDetectorUpdate", "classop_1_1_w_hand_detector_update.html#a29d71b3c1ee52f04bd52b932db350b59", null ], + [ "initializationOnThread", "classop_1_1_w_hand_detector_update.html#a729aaa628e4f4c24e7cb9afca1cdc761", null ], + [ "work", "classop_1_1_w_hand_detector_update.html#af9287dc0a3c67abd35974c1c74614f3c", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_hand_detector_update.png b/web/html/doc/classop_1_1_w_hand_detector_update.png new file mode 100644 index 000000000..b13116f12 Binary files /dev/null and b/web/html/doc/classop_1_1_w_hand_detector_update.png differ diff --git a/web/html/doc/classop_1_1_w_hand_extractor_net-members.html b/web/html/doc/classop_1_1_w_hand_extractor_net-members.html new file mode 100644 index 000000000..9b9b134fc --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_extractor_net-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WHandExtractorNet< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_extractor_net.html b/web/html/doc/classop_1_1_w_hand_extractor_net.html new file mode 100644 index 000000000..aa2114eca --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_extractor_net.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WHandExtractorNet< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WHandExtractorNet< TDatums > Class Template Reference
+
+
+ +

#include <wHandExtractorNet.hpp>

+
+Inheritance diagram for op::WHandExtractorNet< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WHandExtractorNet (const std::shared_ptr< HandExtractorNet > &handExtractorNet)
 
virtual ~WHandExtractorNet ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WHandExtractorNet< TDatums >

+ + +

Definition at line 11 of file wHandExtractorNet.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WHandExtractorNet()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WHandExtractorNet< TDatums >::WHandExtractorNet (const std::shared_ptr< HandExtractorNet > & handExtractorNet)
+
+explicit
+
+ +

Definition at line 38 of file wHandExtractorNet.hpp.

+ +
+
+ +

◆ ~WHandExtractorNet()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WHandExtractorNet< TDatums >::~WHandExtractorNet
+
+virtual
+
+ +

Definition at line 44 of file wHandExtractorNet.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WHandExtractorNet< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wHandExtractorNet.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WHandExtractorNet< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 55 of file wHandExtractorNet.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_extractor_net.js b/web/html/doc/classop_1_1_w_hand_extractor_net.js new file mode 100644 index 000000000..db0f5d067 --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_extractor_net.js @@ -0,0 +1,7 @@ +var classop_1_1_w_hand_extractor_net = +[ + [ "WHandExtractorNet", "classop_1_1_w_hand_extractor_net.html#a464a629c6ecd9727da53453af8266e1d", null ], + [ "~WHandExtractorNet", "classop_1_1_w_hand_extractor_net.html#ab46b680c14fb2a0cb171b040da484eda", null ], + [ "initializationOnThread", "classop_1_1_w_hand_extractor_net.html#a7904f62b91d658a06ed89f0bfd307642", null ], + [ "work", "classop_1_1_w_hand_extractor_net.html#a21ffee48567b1c7c8994e4effef6cffe", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_hand_extractor_net.png b/web/html/doc/classop_1_1_w_hand_extractor_net.png new file mode 100644 index 000000000..820f55051 Binary files /dev/null and b/web/html/doc/classop_1_1_w_hand_extractor_net.png differ diff --git a/web/html/doc/classop_1_1_w_hand_renderer-members.html b/web/html/doc/classop_1_1_w_hand_renderer-members.html new file mode 100644 index 000000000..4026a68bf --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_renderer-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WHandRenderer< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_renderer.html b/web/html/doc/classop_1_1_w_hand_renderer.html new file mode 100644 index 000000000..99b4644d4 --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_renderer.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WHandRenderer< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WHandRenderer< TDatums > Class Template Reference
+
+
+ +

#include <wHandRenderer.hpp>

+
+Inheritance diagram for op::WHandRenderer< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WHandRenderer (const std::shared_ptr< HandRenderer > &handRenderer)
 
virtual ~WHandRenderer ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WHandRenderer< TDatums >

+ + +

Definition at line 11 of file wHandRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WHandRenderer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WHandRenderer< TDatums >::WHandRenderer (const std::shared_ptr< HandRenderer > & handRenderer)
+
+explicit
+
+ +

Definition at line 38 of file wHandRenderer.hpp.

+ +
+
+ +

◆ ~WHandRenderer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WHandRenderer< TDatums >::~WHandRenderer
+
+virtual
+
+ +

Definition at line 44 of file wHandRenderer.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WHandRenderer< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wHandRenderer.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WHandRenderer< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 55 of file wHandRenderer.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_renderer.js b/web/html/doc/classop_1_1_w_hand_renderer.js new file mode 100644 index 000000000..ad595ca84 --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_renderer.js @@ -0,0 +1,7 @@ +var classop_1_1_w_hand_renderer = +[ + [ "WHandRenderer", "classop_1_1_w_hand_renderer.html#a30121b55c601aed3644996d010b6bf8c", null ], + [ "~WHandRenderer", "classop_1_1_w_hand_renderer.html#ab18c8602c8bf65e3e762b2ff06def220", null ], + [ "initializationOnThread", "classop_1_1_w_hand_renderer.html#a2ee88145b38fea1a6a2bb7987a33bd40", null ], + [ "work", "classop_1_1_w_hand_renderer.html#ad178e8d413b3b15edc53625e1f5119d7", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_hand_renderer.png b/web/html/doc/classop_1_1_w_hand_renderer.png new file mode 100644 index 000000000..876914e8f Binary files /dev/null and b/web/html/doc/classop_1_1_w_hand_renderer.png differ diff --git a/web/html/doc/classop_1_1_w_hand_saver-members.html b/web/html/doc/classop_1_1_w_hand_saver-members.html new file mode 100644 index 000000000..f12a0dcc8 --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_saver-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WHandSaver< TDatums > Member List
+
+
+ +

This is the complete list of members for op::WHandSaver< TDatums >, including all inherited members.

+ + + + + + + + + + + + + + +
checkAndWork(TDatums &tDatums)op::Worker< TDatums >
initializationOnThread()op::WHandSaver< TDatums >virtual
initializationOnThreadNoException()op::Worker< TDatums >
isRunning() constop::Worker< TDatums >inline
stop()op::Worker< TDatums >inline
tryStop()op::Worker< TDatums >inlinevirtual
WHandSaver(const std::shared_ptr< KeypointSaver > &keypointSaver)op::WHandSaver< TDatums >explicit
work(TDatums &tDatums)op::WorkerConsumer< TDatums >virtual
workConsumer(const TDatums &tDatums)op::WHandSaver< TDatums >virtual
Worker()op::Worker< TDatums >
~WHandSaver()op::WHandSaver< TDatums >virtual
~Worker()op::Worker< TDatums >virtual
~WorkerConsumer()op::WorkerConsumer< TDatums >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_saver.html b/web/html/doc/classop_1_1_w_hand_saver.html new file mode 100644 index 000000000..2383384d9 --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_saver.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: op::WHandSaver< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WHandSaver< TDatums > Class Template Reference
+
+
+ +

#include <wHandSaver.hpp>

+
+Inheritance diagram for op::WHandSaver< TDatums >:
+
+
+ + +op::WorkerConsumer< TDatums > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WHandSaver (const std::shared_ptr< KeypointSaver > &keypointSaver)
 
virtual ~WHandSaver ()
 
void initializationOnThread ()
 
void workConsumer (const TDatums &tDatums)
 
- Public Member Functions inherited from op::WorkerConsumer< TDatums >
virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WHandSaver< TDatums >

+ + +

Definition at line 12 of file wHandSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WHandSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WHandSaver< TDatums >::WHandSaver (const std::shared_ptr< KeypointSaver > & keypointSaver)
+
+explicit
+
+ +

Definition at line 39 of file wHandSaver.hpp.

+ +
+
+ +

◆ ~WHandSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WHandSaver< TDatums >::~WHandSaver
+
+virtual
+
+ +

Definition at line 45 of file wHandSaver.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WHandSaver< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 50 of file wHandSaver.hpp.

+ +
+
+ +

◆ workConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WHandSaver< TDatums >::workConsumer (const TDatums & tDatums)
+
+virtual
+
+ +

Implements op::WorkerConsumer< TDatums >.

+ +

Definition at line 55 of file wHandSaver.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_hand_saver.js b/web/html/doc/classop_1_1_w_hand_saver.js new file mode 100644 index 000000000..143820dbd --- /dev/null +++ b/web/html/doc/classop_1_1_w_hand_saver.js @@ -0,0 +1,7 @@ +var classop_1_1_w_hand_saver = +[ + [ "WHandSaver", "classop_1_1_w_hand_saver.html#ab41ecc429abfe0a1424facd6ee4acd1f", null ], + [ "~WHandSaver", "classop_1_1_w_hand_saver.html#abf4a45c6ebe82fca1e0f0db1d3e2af79", null ], + [ "initializationOnThread", "classop_1_1_w_hand_saver.html#aa234a68d1cc7ec97fefbf30239149baa", null ], + [ "workConsumer", "classop_1_1_w_hand_saver.html#afc3976b394070927b9396163137317e5", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_hand_saver.png b/web/html/doc/classop_1_1_w_hand_saver.png new file mode 100644 index 000000000..506b603be Binary files /dev/null and b/web/html/doc/classop_1_1_w_hand_saver.png differ diff --git a/web/html/doc/classop_1_1_w_heat_map_saver-members.html b/web/html/doc/classop_1_1_w_heat_map_saver-members.html new file mode 100644 index 000000000..4aada9322 --- /dev/null +++ b/web/html/doc/classop_1_1_w_heat_map_saver-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WHeatMapSaver< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_heat_map_saver.html b/web/html/doc/classop_1_1_w_heat_map_saver.html new file mode 100644 index 000000000..2764d9590 --- /dev/null +++ b/web/html/doc/classop_1_1_w_heat_map_saver.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: op::WHeatMapSaver< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WHeatMapSaver< TDatums > Class Template Reference
+
+
+ +

#include <wHeatMapSaver.hpp>

+
+Inheritance diagram for op::WHeatMapSaver< TDatums >:
+
+
+ + +op::WorkerConsumer< TDatums > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WHeatMapSaver (const std::shared_ptr< HeatMapSaver > &heatMapSaver)
 
virtual ~WHeatMapSaver ()
 
void initializationOnThread ()
 
void workConsumer (const TDatums &tDatums)
 
- Public Member Functions inherited from op::WorkerConsumer< TDatums >
virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WHeatMapSaver< TDatums >

+ + +

Definition at line 11 of file wHeatMapSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WHeatMapSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WHeatMapSaver< TDatums >::WHeatMapSaver (const std::shared_ptr< HeatMapSaver > & heatMapSaver)
+
+explicit
+
+ +

Definition at line 38 of file wHeatMapSaver.hpp.

+ +
+
+ +

◆ ~WHeatMapSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WHeatMapSaver< TDatums >::~WHeatMapSaver
+
+virtual
+
+ +

Definition at line 44 of file wHeatMapSaver.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WHeatMapSaver< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wHeatMapSaver.hpp.

+ +
+
+ +

◆ workConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WHeatMapSaver< TDatums >::workConsumer (const TDatums & tDatums)
+
+virtual
+
+ +

Implements op::WorkerConsumer< TDatums >.

+ +

Definition at line 54 of file wHeatMapSaver.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_heat_map_saver.js b/web/html/doc/classop_1_1_w_heat_map_saver.js new file mode 100644 index 000000000..c8d667505 --- /dev/null +++ b/web/html/doc/classop_1_1_w_heat_map_saver.js @@ -0,0 +1,7 @@ +var classop_1_1_w_heat_map_saver = +[ + [ "WHeatMapSaver", "classop_1_1_w_heat_map_saver.html#a5b72d5f3bcbdacb26ba440b80eef0109", null ], + [ "~WHeatMapSaver", "classop_1_1_w_heat_map_saver.html#aa651ec613c81cf9a19222428bd59feed", null ], + [ "initializationOnThread", "classop_1_1_w_heat_map_saver.html#a20e82b121a580c578f69cbb0401c4cb0", null ], + [ "workConsumer", "classop_1_1_w_heat_map_saver.html#a5fd729a47f0cdbe94001219f971f8f51", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_heat_map_saver.png b/web/html/doc/classop_1_1_w_heat_map_saver.png new file mode 100644 index 000000000..cf97cfecf Binary files /dev/null and b/web/html/doc/classop_1_1_w_heat_map_saver.png differ diff --git a/web/html/doc/classop_1_1_w_id_generator-members.html b/web/html/doc/classop_1_1_w_id_generator-members.html new file mode 100644 index 000000000..fb6269ece --- /dev/null +++ b/web/html/doc/classop_1_1_w_id_generator-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WIdGenerator< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_id_generator.html b/web/html/doc/classop_1_1_w_id_generator.html new file mode 100644 index 000000000..9e3638308 --- /dev/null +++ b/web/html/doc/classop_1_1_w_id_generator.html @@ -0,0 +1,268 @@ + + + + + + + +OpenPose: op::WIdGenerator< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WIdGenerator< TDatums > Class Template Reference
+
+
+ +

#include <wIdGenerator.hpp>

+
+Inheritance diagram for op::WIdGenerator< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WIdGenerator ()
 
virtual ~WIdGenerator ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WIdGenerator< TDatums >

+ + +

Definition at line 12 of file wIdGenerator.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WIdGenerator()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WIdGenerator< TDatums >::WIdGenerator
+
+explicit
+
+ +

Definition at line 39 of file wIdGenerator.hpp.

+ +
+
+ +

◆ ~WIdGenerator()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WIdGenerator< TDatums >::~WIdGenerator
+
+virtual
+
+ +

Definition at line 45 of file wIdGenerator.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WIdGenerator< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 50 of file wIdGenerator.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WIdGenerator< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 55 of file wIdGenerator.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_id_generator.js b/web/html/doc/classop_1_1_w_id_generator.js new file mode 100644 index 000000000..820309456 --- /dev/null +++ b/web/html/doc/classop_1_1_w_id_generator.js @@ -0,0 +1,7 @@ +var classop_1_1_w_id_generator = +[ + [ "WIdGenerator", "classop_1_1_w_id_generator.html#a6112733ee0b537d4d91191f93f0a84f8", null ], + [ "~WIdGenerator", "classop_1_1_w_id_generator.html#ad9e160c5120aa850fbe2285f78e062e2", null ], + [ "initializationOnThread", "classop_1_1_w_id_generator.html#a50a1b7929810daae87ee6443c659edad", null ], + [ "work", "classop_1_1_w_id_generator.html#a03bd005cf88749702fb8a29c20d4cb91", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_id_generator.png b/web/html/doc/classop_1_1_w_id_generator.png new file mode 100644 index 000000000..623d430a2 Binary files /dev/null and b/web/html/doc/classop_1_1_w_id_generator.png differ diff --git a/web/html/doc/classop_1_1_w_image_saver-members.html b/web/html/doc/classop_1_1_w_image_saver-members.html new file mode 100644 index 000000000..a7c1f34a3 --- /dev/null +++ b/web/html/doc/classop_1_1_w_image_saver-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WImageSaver< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_image_saver.html b/web/html/doc/classop_1_1_w_image_saver.html new file mode 100644 index 000000000..d90cb536a --- /dev/null +++ b/web/html/doc/classop_1_1_w_image_saver.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: op::WImageSaver< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WImageSaver< TDatums > Class Template Reference
+
+
+ +

#include <wImageSaver.hpp>

+
+Inheritance diagram for op::WImageSaver< TDatums >:
+
+
+ + +op::WorkerConsumer< TDatums > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WImageSaver (const std::shared_ptr< ImageSaver > &imageSaver)
 
virtual ~WImageSaver ()
 
void initializationOnThread ()
 
void workConsumer (const TDatums &tDatums)
 
- Public Member Functions inherited from op::WorkerConsumer< TDatums >
virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WImageSaver< TDatums >

+ + +

Definition at line 11 of file wImageSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WImageSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WImageSaver< TDatums >::WImageSaver (const std::shared_ptr< ImageSaver > & imageSaver)
+
+explicit
+
+ +

Definition at line 38 of file wImageSaver.hpp.

+ +
+
+ +

◆ ~WImageSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WImageSaver< TDatums >::~WImageSaver
+
+virtual
+
+ +

Definition at line 44 of file wImageSaver.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WImageSaver< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wImageSaver.hpp.

+ +
+
+ +

◆ workConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WImageSaver< TDatums >::workConsumer (const TDatums & tDatums)
+
+virtual
+
+ +

Implements op::WorkerConsumer< TDatums >.

+ +

Definition at line 54 of file wImageSaver.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_image_saver.js b/web/html/doc/classop_1_1_w_image_saver.js new file mode 100644 index 000000000..3690c8204 --- /dev/null +++ b/web/html/doc/classop_1_1_w_image_saver.js @@ -0,0 +1,7 @@ +var classop_1_1_w_image_saver = +[ + [ "WImageSaver", "classop_1_1_w_image_saver.html#a11add012ee88b64a4f36d3f63cb65ee0", null ], + [ "~WImageSaver", "classop_1_1_w_image_saver.html#ab8371a260e35cdea5010327240c9a53d", null ], + [ "initializationOnThread", "classop_1_1_w_image_saver.html#a78655ea3d4dac28bdf7e2e4a80b5a337", null ], + [ "workConsumer", "classop_1_1_w_image_saver.html#a198bbfcf625354ddda419e0121d0cb33", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_image_saver.png b/web/html/doc/classop_1_1_w_image_saver.png new file mode 100644 index 000000000..c23f5311f Binary files /dev/null and b/web/html/doc/classop_1_1_w_image_saver.png differ diff --git a/web/html/doc/classop_1_1_w_keep_top_n_people-members.html b/web/html/doc/classop_1_1_w_keep_top_n_people-members.html new file mode 100644 index 000000000..dd880137a --- /dev/null +++ b/web/html/doc/classop_1_1_w_keep_top_n_people-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WKeepTopNPeople< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_keep_top_n_people.html b/web/html/doc/classop_1_1_w_keep_top_n_people.html new file mode 100644 index 000000000..bada6b4ef --- /dev/null +++ b/web/html/doc/classop_1_1_w_keep_top_n_people.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WKeepTopNPeople< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WKeepTopNPeople< TDatums > Class Template Reference
+
+
+ +

#include <wKeepTopNPeople.hpp>

+
+Inheritance diagram for op::WKeepTopNPeople< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WKeepTopNPeople (const std::shared_ptr< KeepTopNPeople > &keepTopNPeople)
 
virtual ~WKeepTopNPeople ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WKeepTopNPeople< TDatums >

+ + +

Definition at line 11 of file wKeepTopNPeople.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WKeepTopNPeople()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WKeepTopNPeople< TDatums >::WKeepTopNPeople (const std::shared_ptr< KeepTopNPeople > & keepTopNPeople)
+
+explicit
+
+ +

Definition at line 36 of file wKeepTopNPeople.hpp.

+ +
+
+ +

◆ ~WKeepTopNPeople()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WKeepTopNPeople< TDatums >::~WKeepTopNPeople
+
+virtual
+
+ +

Definition at line 42 of file wKeepTopNPeople.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WKeepTopNPeople< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 47 of file wKeepTopNPeople.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WKeepTopNPeople< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 52 of file wKeepTopNPeople.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_keep_top_n_people.js b/web/html/doc/classop_1_1_w_keep_top_n_people.js new file mode 100644 index 000000000..2b1d6df5a --- /dev/null +++ b/web/html/doc/classop_1_1_w_keep_top_n_people.js @@ -0,0 +1,7 @@ +var classop_1_1_w_keep_top_n_people = +[ + [ "WKeepTopNPeople", "classop_1_1_w_keep_top_n_people.html#aebe939c354cfb62cb6d950f73d14731b", null ], + [ "~WKeepTopNPeople", "classop_1_1_w_keep_top_n_people.html#ad23785b42b85c166e5080f47591cccaa", null ], + [ "initializationOnThread", "classop_1_1_w_keep_top_n_people.html#a56371016b6fe1fbacdba8d558685719b", null ], + [ "work", "classop_1_1_w_keep_top_n_people.html#a5928a091e0990706ab2ea5e5e07629dd", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_keep_top_n_people.png b/web/html/doc/classop_1_1_w_keep_top_n_people.png new file mode 100644 index 000000000..b152afeab Binary files /dev/null and b/web/html/doc/classop_1_1_w_keep_top_n_people.png differ diff --git a/web/html/doc/classop_1_1_w_keypoint_scaler-members.html b/web/html/doc/classop_1_1_w_keypoint_scaler-members.html new file mode 100644 index 000000000..aa6ed453e --- /dev/null +++ b/web/html/doc/classop_1_1_w_keypoint_scaler-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WKeypointScaler< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_keypoint_scaler.html b/web/html/doc/classop_1_1_w_keypoint_scaler.html new file mode 100644 index 000000000..013cf3e05 --- /dev/null +++ b/web/html/doc/classop_1_1_w_keypoint_scaler.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WKeypointScaler< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WKeypointScaler< TDatums > Class Template Reference
+
+
+ +

#include <wKeypointScaler.hpp>

+
+Inheritance diagram for op::WKeypointScaler< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WKeypointScaler (const std::shared_ptr< KeypointScaler > &keypointScaler)
 
virtual ~WKeypointScaler ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WKeypointScaler< TDatums >

+ + +

Definition at line 11 of file wKeypointScaler.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WKeypointScaler()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WKeypointScaler< TDatums >::WKeypointScaler (const std::shared_ptr< KeypointScaler > & keypointScaler)
+
+explicit
+
+ +

Definition at line 36 of file wKeypointScaler.hpp.

+ +
+
+ +

◆ ~WKeypointScaler()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WKeypointScaler< TDatums >::~WKeypointScaler
+
+virtual
+
+ +

Definition at line 42 of file wKeypointScaler.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WKeypointScaler< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 47 of file wKeypointScaler.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WKeypointScaler< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 52 of file wKeypointScaler.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_keypoint_scaler.js b/web/html/doc/classop_1_1_w_keypoint_scaler.js new file mode 100644 index 000000000..facd3c85a --- /dev/null +++ b/web/html/doc/classop_1_1_w_keypoint_scaler.js @@ -0,0 +1,7 @@ +var classop_1_1_w_keypoint_scaler = +[ + [ "WKeypointScaler", "classop_1_1_w_keypoint_scaler.html#a31624e262988b0840a8ddbf098e56e9b", null ], + [ "~WKeypointScaler", "classop_1_1_w_keypoint_scaler.html#af4e30e78dba64f2784a1757bc2eb9f8b", null ], + [ "initializationOnThread", "classop_1_1_w_keypoint_scaler.html#aba4fb004818f3adc22959e382a90cd2c", null ], + [ "work", "classop_1_1_w_keypoint_scaler.html#aacad5116921e2ff746fbdf9f6c0cbb25", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_keypoint_scaler.png b/web/html/doc/classop_1_1_w_keypoint_scaler.png new file mode 100644 index 000000000..878e1b7ca Binary files /dev/null and b/web/html/doc/classop_1_1_w_keypoint_scaler.png differ diff --git a/web/html/doc/classop_1_1_w_op_output_to_cv_mat-members.html b/web/html/doc/classop_1_1_w_op_output_to_cv_mat-members.html new file mode 100644 index 000000000..db5c1550b --- /dev/null +++ b/web/html/doc/classop_1_1_w_op_output_to_cv_mat-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WOpOutputToCvMat< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_op_output_to_cv_mat.html b/web/html/doc/classop_1_1_w_op_output_to_cv_mat.html new file mode 100644 index 000000000..83498adda --- /dev/null +++ b/web/html/doc/classop_1_1_w_op_output_to_cv_mat.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WOpOutputToCvMat< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WOpOutputToCvMat< TDatums > Class Template Reference
+
+
+ +

#include <wOpOutputToCvMat.hpp>

+
+Inheritance diagram for op::WOpOutputToCvMat< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WOpOutputToCvMat (const std::shared_ptr< OpOutputToCvMat > &opOutputToCvMat)
 
virtual ~WOpOutputToCvMat ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WOpOutputToCvMat< TDatums >

+ + +

Definition at line 11 of file wOpOutputToCvMat.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WOpOutputToCvMat()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WOpOutputToCvMat< TDatums >::WOpOutputToCvMat (const std::shared_ptr< OpOutputToCvMat > & opOutputToCvMat)
+
+explicit
+
+ +

Definition at line 38 of file wOpOutputToCvMat.hpp.

+ +
+
+ +

◆ ~WOpOutputToCvMat()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WOpOutputToCvMat< TDatums >::~WOpOutputToCvMat
+
+virtual
+
+ +

Definition at line 44 of file wOpOutputToCvMat.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WOpOutputToCvMat< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wOpOutputToCvMat.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WOpOutputToCvMat< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 54 of file wOpOutputToCvMat.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_op_output_to_cv_mat.js b/web/html/doc/classop_1_1_w_op_output_to_cv_mat.js new file mode 100644 index 000000000..7b15465e3 --- /dev/null +++ b/web/html/doc/classop_1_1_w_op_output_to_cv_mat.js @@ -0,0 +1,7 @@ +var classop_1_1_w_op_output_to_cv_mat = +[ + [ "WOpOutputToCvMat", "classop_1_1_w_op_output_to_cv_mat.html#a6f632a83de4cdc731c3f52d1541060f3", null ], + [ "~WOpOutputToCvMat", "classop_1_1_w_op_output_to_cv_mat.html#a5a4e433aa0c7cc62a5f97cc63a67c3fa", null ], + [ "initializationOnThread", "classop_1_1_w_op_output_to_cv_mat.html#adea2e8b1d33e6c091640c7d904dac7cd", null ], + [ "work", "classop_1_1_w_op_output_to_cv_mat.html#ae3fc21569d56a648c606b23fcc016349", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_op_output_to_cv_mat.png b/web/html/doc/classop_1_1_w_op_output_to_cv_mat.png new file mode 100644 index 000000000..019eeb7e9 Binary files /dev/null and b/web/html/doc/classop_1_1_w_op_output_to_cv_mat.png differ diff --git a/web/html/doc/classop_1_1_w_people_json_saver-members.html b/web/html/doc/classop_1_1_w_people_json_saver-members.html new file mode 100644 index 000000000..1a88d69b1 --- /dev/null +++ b/web/html/doc/classop_1_1_w_people_json_saver-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WPeopleJsonSaver< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_people_json_saver.html b/web/html/doc/classop_1_1_w_people_json_saver.html new file mode 100644 index 000000000..1f13c2968 --- /dev/null +++ b/web/html/doc/classop_1_1_w_people_json_saver.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: op::WPeopleJsonSaver< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WPeopleJsonSaver< TDatums > Class Template Reference
+
+
+ +

#include <wPeopleJsonSaver.hpp>

+
+Inheritance diagram for op::WPeopleJsonSaver< TDatums >:
+
+
+ + +op::WorkerConsumer< TDatums > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WPeopleJsonSaver (const std::shared_ptr< PeopleJsonSaver > &peopleJsonSaver)
 
virtual ~WPeopleJsonSaver ()
 
void initializationOnThread ()
 
void workConsumer (const TDatums &tDatums)
 
- Public Member Functions inherited from op::WorkerConsumer< TDatums >
virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WPeopleJsonSaver< TDatums >

+ + +

Definition at line 11 of file wPeopleJsonSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WPeopleJsonSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WPeopleJsonSaver< TDatums >::WPeopleJsonSaver (const std::shared_ptr< PeopleJsonSaver > & peopleJsonSaver)
+
+explicit
+
+ +

Definition at line 38 of file wPeopleJsonSaver.hpp.

+ +
+
+ +

◆ ~WPeopleJsonSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WPeopleJsonSaver< TDatums >::~WPeopleJsonSaver
+
+virtual
+
+ +

Definition at line 44 of file wPeopleJsonSaver.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WPeopleJsonSaver< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wPeopleJsonSaver.hpp.

+ +
+
+ +

◆ workConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WPeopleJsonSaver< TDatums >::workConsumer (const TDatums & tDatums)
+
+virtual
+
+ +

Implements op::WorkerConsumer< TDatums >.

+ +

Definition at line 54 of file wPeopleJsonSaver.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_people_json_saver.js b/web/html/doc/classop_1_1_w_people_json_saver.js new file mode 100644 index 000000000..69e5974c4 --- /dev/null +++ b/web/html/doc/classop_1_1_w_people_json_saver.js @@ -0,0 +1,7 @@ +var classop_1_1_w_people_json_saver = +[ + [ "WPeopleJsonSaver", "classop_1_1_w_people_json_saver.html#ac12dfe8c1414ec36ace474ecbf148f67", null ], + [ "~WPeopleJsonSaver", "classop_1_1_w_people_json_saver.html#a386b5b64f2eee08cb344b242f5adb122", null ], + [ "initializationOnThread", "classop_1_1_w_people_json_saver.html#a5d4239596a996723a20a1031d32c7446", null ], + [ "workConsumer", "classop_1_1_w_people_json_saver.html#af874a16a06a9a3452a0e3792ac15647e", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_people_json_saver.png b/web/html/doc/classop_1_1_w_people_json_saver.png new file mode 100644 index 000000000..f238ba067 Binary files /dev/null and b/web/html/doc/classop_1_1_w_people_json_saver.png differ diff --git a/web/html/doc/classop_1_1_w_person_id_extractor-members.html b/web/html/doc/classop_1_1_w_person_id_extractor-members.html new file mode 100644 index 000000000..f125388bf --- /dev/null +++ b/web/html/doc/classop_1_1_w_person_id_extractor-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WPersonIdExtractor< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_person_id_extractor.html b/web/html/doc/classop_1_1_w_person_id_extractor.html new file mode 100644 index 000000000..df822e150 --- /dev/null +++ b/web/html/doc/classop_1_1_w_person_id_extractor.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WPersonIdExtractor< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WPersonIdExtractor< TDatums > Class Template Reference
+
+
+ +

#include <wPersonIdExtractor.hpp>

+
+Inheritance diagram for op::WPersonIdExtractor< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WPersonIdExtractor (const std::shared_ptr< PersonIdExtractor > &personIdExtractor)
 
virtual ~WPersonIdExtractor ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WPersonIdExtractor< TDatums >

+ + +

Definition at line 11 of file wPersonIdExtractor.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WPersonIdExtractor()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WPersonIdExtractor< TDatums >::WPersonIdExtractor (const std::shared_ptr< PersonIdExtractor > & personIdExtractor)
+
+explicit
+
+ +

Definition at line 38 of file wPersonIdExtractor.hpp.

+ +
+
+ +

◆ ~WPersonIdExtractor()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WPersonIdExtractor< TDatums >::~WPersonIdExtractor
+
+virtual
+
+ +

Definition at line 44 of file wPersonIdExtractor.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WPersonIdExtractor< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wPersonIdExtractor.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WPersonIdExtractor< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 54 of file wPersonIdExtractor.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_person_id_extractor.js b/web/html/doc/classop_1_1_w_person_id_extractor.js new file mode 100644 index 000000000..dae837b9a --- /dev/null +++ b/web/html/doc/classop_1_1_w_person_id_extractor.js @@ -0,0 +1,7 @@ +var classop_1_1_w_person_id_extractor = +[ + [ "WPersonIdExtractor", "classop_1_1_w_person_id_extractor.html#a14a6cc9c6c70acd4847482fd71e4972b", null ], + [ "~WPersonIdExtractor", "classop_1_1_w_person_id_extractor.html#a15f33c528ac92d30be226e784256be01", null ], + [ "initializationOnThread", "classop_1_1_w_person_id_extractor.html#a72b888875be18eb3fc8d0a8c267630de", null ], + [ "work", "classop_1_1_w_person_id_extractor.html#a4066bf1c8cad753c74de1ceabdd76505", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_person_id_extractor.png b/web/html/doc/classop_1_1_w_person_id_extractor.png new file mode 100644 index 000000000..2bfab495f Binary files /dev/null and b/web/html/doc/classop_1_1_w_person_id_extractor.png differ diff --git a/web/html/doc/classop_1_1_w_pose_extractor-members.html b/web/html/doc/classop_1_1_w_pose_extractor-members.html new file mode 100644 index 000000000..80af1e6ce --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_extractor-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WPoseExtractor< TDatums > Member List
+
+
+ +

This is the complete list of members for op::WPoseExtractor< TDatums >, including all inherited members.

+ + + + + + + + + + + + +
checkAndWork(TDatums &tDatums)op::Worker< TDatums >
initializationOnThread()op::WPoseExtractor< TDatums >virtual
initializationOnThreadNoException()op::Worker< TDatums >
isRunning() constop::Worker< TDatums >inline
stop()op::Worker< TDatums >inline
tryStop()op::Worker< TDatums >inlinevirtual
work(TDatums &tDatums)op::WPoseExtractor< TDatums >virtual
Worker()op::Worker< TDatums >
WPoseExtractor(const std::shared_ptr< PoseExtractor > &poseExtractorSharedPtr)op::WPoseExtractor< TDatums >explicit
~Worker()op::Worker< TDatums >virtual
~WPoseExtractor()op::WPoseExtractor< TDatums >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_pose_extractor.html b/web/html/doc/classop_1_1_w_pose_extractor.html new file mode 100644 index 000000000..310638f1d --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_extractor.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WPoseExtractor< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WPoseExtractor< TDatums > Class Template Reference
+
+
+ +

#include <wPoseExtractor.hpp>

+
+Inheritance diagram for op::WPoseExtractor< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WPoseExtractor (const std::shared_ptr< PoseExtractor > &poseExtractorSharedPtr)
 
virtual ~WPoseExtractor ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WPoseExtractor< TDatums >

+ + +

Definition at line 11 of file wPoseExtractor.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WPoseExtractor()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WPoseExtractor< TDatums >::WPoseExtractor (const std::shared_ptr< PoseExtractor > & poseExtractorSharedPtr)
+
+explicit
+
+ +

Definition at line 38 of file wPoseExtractor.hpp.

+ +
+
+ +

◆ ~WPoseExtractor()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WPoseExtractor< TDatums >::~WPoseExtractor
+
+virtual
+
+ +

Definition at line 44 of file wPoseExtractor.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WPoseExtractor< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wPoseExtractor.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WPoseExtractor< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 62 of file wPoseExtractor.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_pose_extractor.js b/web/html/doc/classop_1_1_w_pose_extractor.js new file mode 100644 index 000000000..413cb08cd --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_extractor.js @@ -0,0 +1,7 @@ +var classop_1_1_w_pose_extractor = +[ + [ "WPoseExtractor", "classop_1_1_w_pose_extractor.html#ae85b1ec41bf47dcf1aed7bdae1d91915", null ], + [ "~WPoseExtractor", "classop_1_1_w_pose_extractor.html#aedf9cc53f7dfdb1ec2aa77651ca37eef", null ], + [ "initializationOnThread", "classop_1_1_w_pose_extractor.html#a9b621ed9915da9bf3cce49db547de9e6", null ], + [ "work", "classop_1_1_w_pose_extractor.html#ae0f02aaefccab05bbbd919dd7a9e0f61", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_pose_extractor.png b/web/html/doc/classop_1_1_w_pose_extractor.png new file mode 100644 index 000000000..d18a408a6 Binary files /dev/null and b/web/html/doc/classop_1_1_w_pose_extractor.png differ diff --git a/web/html/doc/classop_1_1_w_pose_extractor_net-members.html b/web/html/doc/classop_1_1_w_pose_extractor_net-members.html new file mode 100644 index 000000000..290741f90 --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_extractor_net-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WPoseExtractorNet< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_pose_extractor_net.html b/web/html/doc/classop_1_1_w_pose_extractor_net.html new file mode 100644 index 000000000..590566108 --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_extractor_net.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WPoseExtractorNet< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WPoseExtractorNet< TDatums > Class Template Reference
+
+
+ +

#include <wPoseExtractorNet.hpp>

+
+Inheritance diagram for op::WPoseExtractorNet< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WPoseExtractorNet (const std::shared_ptr< PoseExtractorNet > &poseExtractorSharedPtr)
 
virtual ~WPoseExtractorNet ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WPoseExtractorNet< TDatums >

+ + +

Definition at line 11 of file wPoseExtractorNet.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WPoseExtractorNet()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WPoseExtractorNet< TDatums >::WPoseExtractorNet (const std::shared_ptr< PoseExtractorNet > & poseExtractorSharedPtr)
+
+explicit
+
+ +

Definition at line 38 of file wPoseExtractorNet.hpp.

+ +
+
+ +

◆ ~WPoseExtractorNet()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WPoseExtractorNet< TDatums >::~WPoseExtractorNet
+
+virtual
+
+ +

Definition at line 44 of file wPoseExtractorNet.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WPoseExtractorNet< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wPoseExtractorNet.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WPoseExtractorNet< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 62 of file wPoseExtractorNet.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_pose_extractor_net.js b/web/html/doc/classop_1_1_w_pose_extractor_net.js new file mode 100644 index 000000000..9d42c4fa8 --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_extractor_net.js @@ -0,0 +1,7 @@ +var classop_1_1_w_pose_extractor_net = +[ + [ "WPoseExtractorNet", "classop_1_1_w_pose_extractor_net.html#aa0f6b7ec6f36fe2a27649ac2c7490c09", null ], + [ "~WPoseExtractorNet", "classop_1_1_w_pose_extractor_net.html#aa085377f965ffc8385d34d77a2e65e5a", null ], + [ "initializationOnThread", "classop_1_1_w_pose_extractor_net.html#a18d4a120314ec44d1722cc164aaba7a8", null ], + [ "work", "classop_1_1_w_pose_extractor_net.html#a3d691e30c419c70e23a4d7b3c92adb4b", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_pose_extractor_net.png b/web/html/doc/classop_1_1_w_pose_extractor_net.png new file mode 100644 index 000000000..8f77a0a80 Binary files /dev/null and b/web/html/doc/classop_1_1_w_pose_extractor_net.png differ diff --git a/web/html/doc/classop_1_1_w_pose_renderer-members.html b/web/html/doc/classop_1_1_w_pose_renderer-members.html new file mode 100644 index 000000000..c65d59105 --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_renderer-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WPoseRenderer< TDatums > Member List
+
+
+ +

This is the complete list of members for op::WPoseRenderer< TDatums >, including all inherited members.

+ + + + + + + + + + + + +
checkAndWork(TDatums &tDatums)op::Worker< TDatums >
initializationOnThread()op::WPoseRenderer< TDatums >virtual
initializationOnThreadNoException()op::Worker< TDatums >
isRunning() constop::Worker< TDatums >inline
stop()op::Worker< TDatums >inline
tryStop()op::Worker< TDatums >inlinevirtual
work(TDatums &tDatums)op::WPoseRenderer< TDatums >virtual
Worker()op::Worker< TDatums >
WPoseRenderer(const std::shared_ptr< PoseRenderer > &poseRendererSharedPtr)op::WPoseRenderer< TDatums >explicit
~Worker()op::Worker< TDatums >virtual
~WPoseRenderer()op::WPoseRenderer< TDatums >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_pose_renderer.html b/web/html/doc/classop_1_1_w_pose_renderer.html new file mode 100644 index 000000000..97cf561fc --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_renderer.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WPoseRenderer< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WPoseRenderer< TDatums > Class Template Reference
+
+
+ +

#include <wPoseRenderer.hpp>

+
+Inheritance diagram for op::WPoseRenderer< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WPoseRenderer (const std::shared_ptr< PoseRenderer > &poseRendererSharedPtr)
 
virtual ~WPoseRenderer ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WPoseRenderer< TDatums >

+ + +

Definition at line 11 of file wPoseRenderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WPoseRenderer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WPoseRenderer< TDatums >::WPoseRenderer (const std::shared_ptr< PoseRenderer > & poseRendererSharedPtr)
+
+explicit
+
+ +

Definition at line 38 of file wPoseRenderer.hpp.

+ +
+
+ +

◆ ~WPoseRenderer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WPoseRenderer< TDatums >::~WPoseRenderer
+
+virtual
+
+ +

Definition at line 44 of file wPoseRenderer.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WPoseRenderer< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wPoseRenderer.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WPoseRenderer< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 62 of file wPoseRenderer.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_pose_renderer.js b/web/html/doc/classop_1_1_w_pose_renderer.js new file mode 100644 index 000000000..2e7d3098d --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_renderer.js @@ -0,0 +1,7 @@ +var classop_1_1_w_pose_renderer = +[ + [ "WPoseRenderer", "classop_1_1_w_pose_renderer.html#ae74189143175b89ccd36662cec4de72e", null ], + [ "~WPoseRenderer", "classop_1_1_w_pose_renderer.html#ae748fc721246c2a3ad8ffd32adf5e9e7", null ], + [ "initializationOnThread", "classop_1_1_w_pose_renderer.html#aba989a73cef9a807879ad2196725c61c", null ], + [ "work", "classop_1_1_w_pose_renderer.html#a10b1631d78d8270ed2a16e538b30eb76", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_pose_renderer.png b/web/html/doc/classop_1_1_w_pose_renderer.png new file mode 100644 index 000000000..cfef4a2df Binary files /dev/null and b/web/html/doc/classop_1_1_w_pose_renderer.png differ diff --git a/web/html/doc/classop_1_1_w_pose_saver-members.html b/web/html/doc/classop_1_1_w_pose_saver-members.html new file mode 100644 index 000000000..80b503fb2 --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_saver-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WPoseSaver< TDatums > Member List
+
+
+ +

This is the complete list of members for op::WPoseSaver< TDatums >, including all inherited members.

+ + + + + + + + + + + + + + +
checkAndWork(TDatums &tDatums)op::Worker< TDatums >
initializationOnThread()op::WPoseSaver< TDatums >virtual
initializationOnThreadNoException()op::Worker< TDatums >
isRunning() constop::Worker< TDatums >inline
stop()op::Worker< TDatums >inline
tryStop()op::Worker< TDatums >inlinevirtual
work(TDatums &tDatums)op::WorkerConsumer< TDatums >virtual
workConsumer(const TDatums &tDatums)op::WPoseSaver< TDatums >virtual
Worker()op::Worker< TDatums >
WPoseSaver(const std::shared_ptr< KeypointSaver > &keypointSaver)op::WPoseSaver< TDatums >explicit
~Worker()op::Worker< TDatums >virtual
~WorkerConsumer()op::WorkerConsumer< TDatums >virtual
~WPoseSaver()op::WPoseSaver< TDatums >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_pose_saver.html b/web/html/doc/classop_1_1_w_pose_saver.html new file mode 100644 index 000000000..0fce67040 --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_saver.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: op::WPoseSaver< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WPoseSaver< TDatums > Class Template Reference
+
+
+ +

#include <wPoseSaver.hpp>

+
+Inheritance diagram for op::WPoseSaver< TDatums >:
+
+
+ + +op::WorkerConsumer< TDatums > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WPoseSaver (const std::shared_ptr< KeypointSaver > &keypointSaver)
 
virtual ~WPoseSaver ()
 
void initializationOnThread ()
 
void workConsumer (const TDatums &tDatums)
 
- Public Member Functions inherited from op::WorkerConsumer< TDatums >
virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WPoseSaver< TDatums >

+ + +

Definition at line 12 of file wPoseSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WPoseSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WPoseSaver< TDatums >::WPoseSaver (const std::shared_ptr< KeypointSaver > & keypointSaver)
+
+explicit
+
+ +

Definition at line 39 of file wPoseSaver.hpp.

+ +
+
+ +

◆ ~WPoseSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WPoseSaver< TDatums >::~WPoseSaver
+
+virtual
+
+ +

Definition at line 45 of file wPoseSaver.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WPoseSaver< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 50 of file wPoseSaver.hpp.

+ +
+
+ +

◆ workConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WPoseSaver< TDatums >::workConsumer (const TDatums & tDatums)
+
+virtual
+
+ +

Implements op::WorkerConsumer< TDatums >.

+ +

Definition at line 55 of file wPoseSaver.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_pose_saver.js b/web/html/doc/classop_1_1_w_pose_saver.js new file mode 100644 index 000000000..bc703a1d0 --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_saver.js @@ -0,0 +1,7 @@ +var classop_1_1_w_pose_saver = +[ + [ "WPoseSaver", "classop_1_1_w_pose_saver.html#aa9dd0f4649c9e8efef10201caf9e4cfd", null ], + [ "~WPoseSaver", "classop_1_1_w_pose_saver.html#a62394c885abe4d95bece4469ac3657e9", null ], + [ "initializationOnThread", "classop_1_1_w_pose_saver.html#a4f0774832e12389593361186f1b83128", null ], + [ "workConsumer", "classop_1_1_w_pose_saver.html#a039027281498168b57df8dfeefd82cd8", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_pose_saver.png b/web/html/doc/classop_1_1_w_pose_saver.png new file mode 100644 index 000000000..d49d5b430 Binary files /dev/null and b/web/html/doc/classop_1_1_w_pose_saver.png differ diff --git a/web/html/doc/classop_1_1_w_pose_triangulation-members.html b/web/html/doc/classop_1_1_w_pose_triangulation-members.html new file mode 100644 index 000000000..32de18d11 --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_triangulation-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WPoseTriangulation< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_pose_triangulation.html b/web/html/doc/classop_1_1_w_pose_triangulation.html new file mode 100644 index 000000000..f6ed7cd20 --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_triangulation.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WPoseTriangulation< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WPoseTriangulation< TDatums > Class Template Reference
+
+
+ +

#include <wPoseTriangulation.hpp>

+
+Inheritance diagram for op::WPoseTriangulation< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WPoseTriangulation (const std::shared_ptr< PoseTriangulation > &poseTriangulation)
 
virtual ~WPoseTriangulation ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WPoseTriangulation< TDatums >

+ + +

Definition at line 11 of file wPoseTriangulation.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WPoseTriangulation()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WPoseTriangulation< TDatums >::WPoseTriangulation (const std::shared_ptr< PoseTriangulation > & poseTriangulation)
+
+explicit
+
+ +

Definition at line 38 of file wPoseTriangulation.hpp.

+ +
+
+ +

◆ ~WPoseTriangulation()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WPoseTriangulation< TDatums >::~WPoseTriangulation
+
+virtual
+
+ +

Definition at line 44 of file wPoseTriangulation.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WPoseTriangulation< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wPoseTriangulation.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WPoseTriangulation< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 62 of file wPoseTriangulation.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_pose_triangulation.js b/web/html/doc/classop_1_1_w_pose_triangulation.js new file mode 100644 index 000000000..0417e4f9d --- /dev/null +++ b/web/html/doc/classop_1_1_w_pose_triangulation.js @@ -0,0 +1,7 @@ +var classop_1_1_w_pose_triangulation = +[ + [ "WPoseTriangulation", "classop_1_1_w_pose_triangulation.html#a439c75d19eae34fdd20f2f1c4ee18e48", null ], + [ "~WPoseTriangulation", "classop_1_1_w_pose_triangulation.html#ae88fe6766fbcca1a682306af99684fa3", null ], + [ "initializationOnThread", "classop_1_1_w_pose_triangulation.html#a5711329db1768eb77d2d96575c9fb668", null ], + [ "work", "classop_1_1_w_pose_triangulation.html#a495b29e03933d750827acc0531c72c78", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_pose_triangulation.png b/web/html/doc/classop_1_1_w_pose_triangulation.png new file mode 100644 index 000000000..8c29db1d3 Binary files /dev/null and b/web/html/doc/classop_1_1_w_pose_triangulation.png differ diff --git a/web/html/doc/classop_1_1_w_queue_assembler-members.html b/web/html/doc/classop_1_1_w_queue_assembler-members.html new file mode 100644 index 000000000..e285bd5c4 --- /dev/null +++ b/web/html/doc/classop_1_1_w_queue_assembler-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WQueueAssembler< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_queue_assembler.html b/web/html/doc/classop_1_1_w_queue_assembler.html new file mode 100644 index 000000000..305695412 --- /dev/null +++ b/web/html/doc/classop_1_1_w_queue_assembler.html @@ -0,0 +1,268 @@ + + + + + + + +OpenPose: op::WQueueAssembler< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WQueueAssembler< TDatums > Class Template Reference
+
+
+ +

#include <wQueueAssembler.hpp>

+
+Inheritance diagram for op::WQueueAssembler< TDatums >:
+
+
+ + +op::Worker< std::shared_ptr< TDatums > > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WQueueAssembler ()
 
virtual ~WQueueAssembler ()
 
void initializationOnThread ()
 
void work (std::shared_ptr< TDatums > &tDatums)
 
- Public Member Functions inherited from op::Worker< std::shared_ptr< TDatums > >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (std::shared_ptr< TDatums > &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WQueueAssembler< TDatums >

+ + +

Definition at line 17 of file wQueueAssembler.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WQueueAssembler()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WQueueAssembler< TDatums >::WQueueAssembler
+
+explicit
+
+ +

Definition at line 43 of file wQueueAssembler.hpp.

+ +
+
+ +

◆ ~WQueueAssembler()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WQueueAssembler< TDatums >::~WQueueAssembler
+
+virtual
+
+ +

Definition at line 48 of file wQueueAssembler.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WQueueAssembler< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< std::shared_ptr< TDatums > >.

+ +

Definition at line 53 of file wQueueAssembler.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WQueueAssembler< TDatums >::work (std::shared_ptr< TDatums > & tDatums)
+
+virtual
+
+ +

Implements op::Worker< std::shared_ptr< TDatums > >.

+ +

Definition at line 58 of file wQueueAssembler.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_queue_assembler.js b/web/html/doc/classop_1_1_w_queue_assembler.js new file mode 100644 index 000000000..974fa346b --- /dev/null +++ b/web/html/doc/classop_1_1_w_queue_assembler.js @@ -0,0 +1,7 @@ +var classop_1_1_w_queue_assembler = +[ + [ "WQueueAssembler", "classop_1_1_w_queue_assembler.html#ad4a4ec3e060ad6483331156a5a62af25", null ], + [ "~WQueueAssembler", "classop_1_1_w_queue_assembler.html#abe8d97c0749cd8d968c8df2727b643e6", null ], + [ "initializationOnThread", "classop_1_1_w_queue_assembler.html#a02bb2d4e47689903434c05a911a5ba15", null ], + [ "work", "classop_1_1_w_queue_assembler.html#ad3b1ca56d18e1e234773ba15efea7158", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_queue_assembler.png b/web/html/doc/classop_1_1_w_queue_assembler.png new file mode 100644 index 000000000..ac423ef95 Binary files /dev/null and b/web/html/doc/classop_1_1_w_queue_assembler.png differ diff --git a/web/html/doc/classop_1_1_w_queue_orderer-members.html b/web/html/doc/classop_1_1_w_queue_orderer-members.html new file mode 100644 index 000000000..7470dc4d9 --- /dev/null +++ b/web/html/doc/classop_1_1_w_queue_orderer-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WQueueOrderer< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_queue_orderer.html b/web/html/doc/classop_1_1_w_queue_orderer.html new file mode 100644 index 000000000..913430cab --- /dev/null +++ b/web/html/doc/classop_1_1_w_queue_orderer.html @@ -0,0 +1,300 @@ + + + + + + + +OpenPose: op::WQueueOrderer< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WQueueOrderer< TDatums > Class Template Reference
+
+
+ +

#include <wQueueOrderer.hpp>

+
+Inheritance diagram for op::WQueueOrderer< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WQueueOrderer (const unsigned int maxBufferSize=64u)
 
virtual ~WQueueOrderer ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
void tryStop ()
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WQueueOrderer< TDatums >

+ + +

Definition at line 12 of file wQueueOrderer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WQueueOrderer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WQueueOrderer< TDatums >::WQueueOrderer (const unsigned int maxBufferSize = 64u)
+
+explicit
+
+ +

Definition at line 44 of file wQueueOrderer.hpp.

+ +
+
+ +

◆ ~WQueueOrderer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WQueueOrderer< TDatums >::~WQueueOrderer
+
+virtual
+
+ +

Definition at line 53 of file wQueueOrderer.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WQueueOrderer< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 58 of file wQueueOrderer.hpp.

+ +
+
+ +

◆ tryStop()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WQueueOrderer< TDatums >::tryStop
+
+virtual
+
+ +

Reimplemented from op::Worker< TDatums >.

+ +

Definition at line 159 of file wQueueOrderer.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WQueueOrderer< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 63 of file wQueueOrderer.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_queue_orderer.js b/web/html/doc/classop_1_1_w_queue_orderer.js new file mode 100644 index 000000000..20795721f --- /dev/null +++ b/web/html/doc/classop_1_1_w_queue_orderer.js @@ -0,0 +1,8 @@ +var classop_1_1_w_queue_orderer = +[ + [ "WQueueOrderer", "classop_1_1_w_queue_orderer.html#a3303add5fa8cc36593d3d859ffdd8ae0", null ], + [ "~WQueueOrderer", "classop_1_1_w_queue_orderer.html#a720b2cd490e2267258bf5d5776f00095", null ], + [ "initializationOnThread", "classop_1_1_w_queue_orderer.html#a85598f83f6f3a30b7ddce9bc7beddf33", null ], + [ "tryStop", "classop_1_1_w_queue_orderer.html#a01bc7495ec992cc9c54a040534cb3634", null ], + [ "work", "classop_1_1_w_queue_orderer.html#a1ea314eeaa8d99fbf33885d9a4c6d044", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_queue_orderer.png b/web/html/doc/classop_1_1_w_queue_orderer.png new file mode 100644 index 000000000..5a94082c0 Binary files /dev/null and b/web/html/doc/classop_1_1_w_queue_orderer.png differ diff --git a/web/html/doc/classop_1_1_w_scale_and_size_extractor-members.html b/web/html/doc/classop_1_1_w_scale_and_size_extractor-members.html new file mode 100644 index 000000000..7dd7782b9 --- /dev/null +++ b/web/html/doc/classop_1_1_w_scale_and_size_extractor-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WScaleAndSizeExtractor< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_scale_and_size_extractor.html b/web/html/doc/classop_1_1_w_scale_and_size_extractor.html new file mode 100644 index 000000000..086130e13 --- /dev/null +++ b/web/html/doc/classop_1_1_w_scale_and_size_extractor.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WScaleAndSizeExtractor< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WScaleAndSizeExtractor< TDatums > Class Template Reference
+
+
+ +

#include <wScaleAndSizeExtractor.hpp>

+
+Inheritance diagram for op::WScaleAndSizeExtractor< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WScaleAndSizeExtractor (const std::shared_ptr< ScaleAndSizeExtractor > &scaleAndSizeExtractor)
 
virtual ~WScaleAndSizeExtractor ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WScaleAndSizeExtractor< TDatums >

+ + +

Definition at line 11 of file wScaleAndSizeExtractor.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WScaleAndSizeExtractor()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WScaleAndSizeExtractor< TDatums >::WScaleAndSizeExtractor (const std::shared_ptr< ScaleAndSizeExtractor > & scaleAndSizeExtractor)
+
+explicit
+
+ +

Definition at line 38 of file wScaleAndSizeExtractor.hpp.

+ +
+
+ +

◆ ~WScaleAndSizeExtractor()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WScaleAndSizeExtractor< TDatums >::~WScaleAndSizeExtractor
+
+virtual
+
+ +

Definition at line 45 of file wScaleAndSizeExtractor.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WScaleAndSizeExtractor< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 50 of file wScaleAndSizeExtractor.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WScaleAndSizeExtractor< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 55 of file wScaleAndSizeExtractor.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_scale_and_size_extractor.js b/web/html/doc/classop_1_1_w_scale_and_size_extractor.js new file mode 100644 index 000000000..7af8aef04 --- /dev/null +++ b/web/html/doc/classop_1_1_w_scale_and_size_extractor.js @@ -0,0 +1,7 @@ +var classop_1_1_w_scale_and_size_extractor = +[ + [ "WScaleAndSizeExtractor", "classop_1_1_w_scale_and_size_extractor.html#a8e6ef291bd809987f06fbb1cc2173b0f", null ], + [ "~WScaleAndSizeExtractor", "classop_1_1_w_scale_and_size_extractor.html#afe30e073c4410146e2c8ba8f2752737f", null ], + [ "initializationOnThread", "classop_1_1_w_scale_and_size_extractor.html#ac1203ef395a836b13f5586432f284c41", null ], + [ "work", "classop_1_1_w_scale_and_size_extractor.html#afddf54d061dc5325e78252a3bba482b9", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_scale_and_size_extractor.png b/web/html/doc/classop_1_1_w_scale_and_size_extractor.png new file mode 100644 index 000000000..006feb13f Binary files /dev/null and b/web/html/doc/classop_1_1_w_scale_and_size_extractor.png differ diff --git a/web/html/doc/classop_1_1_w_udp_sender-members.html b/web/html/doc/classop_1_1_w_udp_sender-members.html new file mode 100644 index 000000000..5b7c9e315 --- /dev/null +++ b/web/html/doc/classop_1_1_w_udp_sender-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WUdpSender< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_udp_sender.html b/web/html/doc/classop_1_1_w_udp_sender.html new file mode 100644 index 000000000..c3aceef30 --- /dev/null +++ b/web/html/doc/classop_1_1_w_udp_sender.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: op::WUdpSender< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WUdpSender< TDatums > Class Template Reference
+
+
+ +

#include <wUdpSender.hpp>

+
+Inheritance diagram for op::WUdpSender< TDatums >:
+
+
+ + +op::WorkerConsumer< TDatums > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WUdpSender (const std::shared_ptr< UdpSender > &udpSender)
 
virtual ~WUdpSender ()
 
void initializationOnThread ()
 
void workConsumer (const TDatums &tDatums)
 
- Public Member Functions inherited from op::WorkerConsumer< TDatums >
virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WUdpSender< TDatums >

+ + +

Definition at line 11 of file wUdpSender.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WUdpSender()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WUdpSender< TDatums >::WUdpSender (const std::shared_ptr< UdpSender > & udpSender)
+
+explicit
+
+ +

Definition at line 38 of file wUdpSender.hpp.

+ +
+
+ +

◆ ~WUdpSender()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WUdpSender< TDatums >::~WUdpSender
+
+virtual
+
+ +

Definition at line 44 of file wUdpSender.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WUdpSender< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wUdpSender.hpp.

+ +
+
+ +

◆ workConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WUdpSender< TDatums >::workConsumer (const TDatums & tDatums)
+
+virtual
+
+ +

Implements op::WorkerConsumer< TDatums >.

+ +

Definition at line 54 of file wUdpSender.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_udp_sender.js b/web/html/doc/classop_1_1_w_udp_sender.js new file mode 100644 index 000000000..7d7bbe2ab --- /dev/null +++ b/web/html/doc/classop_1_1_w_udp_sender.js @@ -0,0 +1,7 @@ +var classop_1_1_w_udp_sender = +[ + [ "WUdpSender", "classop_1_1_w_udp_sender.html#a22a5ec90fe83ed654bd0aef112fac98b", null ], + [ "~WUdpSender", "classop_1_1_w_udp_sender.html#a684854618fbd74bce420ed44d867f8cd", null ], + [ "initializationOnThread", "classop_1_1_w_udp_sender.html#a567d9fe2adc85ae086379696573112e3", null ], + [ "workConsumer", "classop_1_1_w_udp_sender.html#a615fc6a537ca9f624022698391c11a54", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_udp_sender.png b/web/html/doc/classop_1_1_w_udp_sender.png new file mode 100644 index 000000000..377eb1164 Binary files /dev/null and b/web/html/doc/classop_1_1_w_udp_sender.png differ diff --git a/web/html/doc/classop_1_1_w_verbose_printer-members.html b/web/html/doc/classop_1_1_w_verbose_printer-members.html new file mode 100644 index 000000000..eeafd48ac --- /dev/null +++ b/web/html/doc/classop_1_1_w_verbose_printer-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WVerbosePrinter< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_verbose_printer.html b/web/html/doc/classop_1_1_w_verbose_printer.html new file mode 100644 index 000000000..884a51e46 --- /dev/null +++ b/web/html/doc/classop_1_1_w_verbose_printer.html @@ -0,0 +1,272 @@ + + + + + + + +OpenPose: op::WVerbosePrinter< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WVerbosePrinter< TDatums > Class Template Reference
+
+
+ +

#include <wVerbosePrinter.hpp>

+
+Inheritance diagram for op::WVerbosePrinter< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WVerbosePrinter (const std::shared_ptr< VerbosePrinter > &verbosePrinter)
 
virtual ~WVerbosePrinter ()
 
void initializationOnThread ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WVerbosePrinter< TDatums >

+ + +

Definition at line 11 of file wVerbosePrinter.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WVerbosePrinter()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WVerbosePrinter< TDatums >::WVerbosePrinter (const std::shared_ptr< VerbosePrinter > & verbosePrinter)
+
+explicit
+
+ +

Definition at line 38 of file wVerbosePrinter.hpp.

+ +
+
+ +

◆ ~WVerbosePrinter()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WVerbosePrinter< TDatums >::~WVerbosePrinter
+
+virtual
+
+ +

Definition at line 45 of file wVerbosePrinter.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WVerbosePrinter< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 50 of file wVerbosePrinter.hpp.

+ +
+
+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WVerbosePrinter< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 55 of file wVerbosePrinter.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_verbose_printer.js b/web/html/doc/classop_1_1_w_verbose_printer.js new file mode 100644 index 000000000..689134951 --- /dev/null +++ b/web/html/doc/classop_1_1_w_verbose_printer.js @@ -0,0 +1,7 @@ +var classop_1_1_w_verbose_printer = +[ + [ "WVerbosePrinter", "classop_1_1_w_verbose_printer.html#a6ece5acbf5f8a7a3e900c5029a56271d", null ], + [ "~WVerbosePrinter", "classop_1_1_w_verbose_printer.html#a32ea8ffef9a255ee33d6d56a550706f8", null ], + [ "initializationOnThread", "classop_1_1_w_verbose_printer.html#a9d21f5db0e70ba4cad73cf2bdf6c9fe2", null ], + [ "work", "classop_1_1_w_verbose_printer.html#af98586e3da7cedd902f70e6521c0ffc4", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_verbose_printer.png b/web/html/doc/classop_1_1_w_verbose_printer.png new file mode 100644 index 000000000..be247f09b Binary files /dev/null and b/web/html/doc/classop_1_1_w_verbose_printer.png differ diff --git a/web/html/doc/classop_1_1_w_video_saver-members.html b/web/html/doc/classop_1_1_w_video_saver-members.html new file mode 100644 index 000000000..57391d370 --- /dev/null +++ b/web/html/doc/classop_1_1_w_video_saver-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WVideoSaver< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_video_saver.html b/web/html/doc/classop_1_1_w_video_saver.html new file mode 100644 index 000000000..c88558923 --- /dev/null +++ b/web/html/doc/classop_1_1_w_video_saver.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: op::WVideoSaver< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WVideoSaver< TDatums > Class Template Reference
+
+
+ +

#include <wVideoSaver.hpp>

+
+Inheritance diagram for op::WVideoSaver< TDatums >:
+
+
+ + +op::WorkerConsumer< TDatums > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WVideoSaver (const std::shared_ptr< VideoSaver > &videoSaver)
 
virtual ~WVideoSaver ()
 
void initializationOnThread ()
 
void workConsumer (const TDatums &tDatums)
 
- Public Member Functions inherited from op::WorkerConsumer< TDatums >
virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WVideoSaver< TDatums >

+ + +

Definition at line 11 of file wVideoSaver.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WVideoSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WVideoSaver< TDatums >::WVideoSaver (const std::shared_ptr< VideoSaver > & videoSaver)
+
+explicit
+
+ +

Definition at line 38 of file wVideoSaver.hpp.

+ +
+
+ +

◆ ~WVideoSaver()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WVideoSaver< TDatums >::~WVideoSaver
+
+virtual
+
+ +

Definition at line 44 of file wVideoSaver.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WVideoSaver< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wVideoSaver.hpp.

+ +
+
+ +

◆ workConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WVideoSaver< TDatums >::workConsumer (const TDatums & tDatums)
+
+virtual
+
+ +

Implements op::WorkerConsumer< TDatums >.

+ +

Definition at line 54 of file wVideoSaver.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_video_saver.js b/web/html/doc/classop_1_1_w_video_saver.js new file mode 100644 index 000000000..2868378a1 --- /dev/null +++ b/web/html/doc/classop_1_1_w_video_saver.js @@ -0,0 +1,7 @@ +var classop_1_1_w_video_saver = +[ + [ "WVideoSaver", "classop_1_1_w_video_saver.html#a04dc4e6f039d047a0da6f94283c145d9", null ], + [ "~WVideoSaver", "classop_1_1_w_video_saver.html#ac0057c1bbfb3e193c891f167d56fcbab", null ], + [ "initializationOnThread", "classop_1_1_w_video_saver.html#ada90f76b28e4bafe9c8ecbb9bcbb2d14", null ], + [ "workConsumer", "classop_1_1_w_video_saver.html#a40bcb8ccf137c6cbee3ca31e6cc3bfbf", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_video_saver.png b/web/html/doc/classop_1_1_w_video_saver.png new file mode 100644 index 000000000..beeca4d60 Binary files /dev/null and b/web/html/doc/classop_1_1_w_video_saver.png differ diff --git a/web/html/doc/classop_1_1_w_video_saver3_d-members.html b/web/html/doc/classop_1_1_w_video_saver3_d-members.html new file mode 100644 index 000000000..3020be2e8 --- /dev/null +++ b/web/html/doc/classop_1_1_w_video_saver3_d-members.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WVideoSaver3D< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_w_video_saver3_d.html b/web/html/doc/classop_1_1_w_video_saver3_d.html new file mode 100644 index 000000000..66de6a6e4 --- /dev/null +++ b/web/html/doc/classop_1_1_w_video_saver3_d.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: op::WVideoSaver3D< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WVideoSaver3D< TDatums > Class Template Reference
+
+
+ +

#include <wVideoSaver3D.hpp>

+
+Inheritance diagram for op::WVideoSaver3D< TDatums >:
+
+
+ + +op::WorkerConsumer< TDatums > +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WVideoSaver3D (const std::shared_ptr< VideoSaver > &videoSaver)
 
virtual ~WVideoSaver3D ()
 
void initializationOnThread ()
 
void workConsumer (const TDatums &tDatums)
 
- Public Member Functions inherited from op::WorkerConsumer< TDatums >
virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ +

+Additional Inherited Members

+

Detailed Description

+

template<typename TDatums>
+class op::WVideoSaver3D< TDatums >

+ + +

Definition at line 11 of file wVideoSaver3D.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WVideoSaver3D()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
op::WVideoSaver3D< TDatums >::WVideoSaver3D (const std::shared_ptr< VideoSaver > & videoSaver)
+
+explicit
+
+ +

Definition at line 38 of file wVideoSaver3D.hpp.

+ +
+
+ +

◆ ~WVideoSaver3D()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WVideoSaver3D< TDatums >::~WVideoSaver3D
+
+virtual
+
+ +

Definition at line 44 of file wVideoSaver3D.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ initializationOnThread()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
void op::WVideoSaver3D< TDatums >::initializationOnThread
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 49 of file wVideoSaver3D.hpp.

+ +
+
+ +

◆ workConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WVideoSaver3D< TDatums >::workConsumer (const TDatums & tDatums)
+
+virtual
+
+ +

Implements op::WorkerConsumer< TDatums >.

+ +

Definition at line 54 of file wVideoSaver3D.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_w_video_saver3_d.js b/web/html/doc/classop_1_1_w_video_saver3_d.js new file mode 100644 index 000000000..99f8c302e --- /dev/null +++ b/web/html/doc/classop_1_1_w_video_saver3_d.js @@ -0,0 +1,7 @@ +var classop_1_1_w_video_saver3_d = +[ + [ "WVideoSaver3D", "classop_1_1_w_video_saver3_d.html#a570d2b868a6c3d3932671d56b0dbb531", null ], + [ "~WVideoSaver3D", "classop_1_1_w_video_saver3_d.html#a39482b591eafa150fee3db7027ae093f", null ], + [ "initializationOnThread", "classop_1_1_w_video_saver3_d.html#ad5a050f5646af36bf8d91909e8f47b2f", null ], + [ "workConsumer", "classop_1_1_w_video_saver3_d.html#adef743533fbab522d55c43768d28469e", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_w_video_saver3_d.png b/web/html/doc/classop_1_1_w_video_saver3_d.png new file mode 100644 index 000000000..b7aa18915 Binary files /dev/null and b/web/html/doc/classop_1_1_w_video_saver3_d.png differ diff --git a/web/html/doc/classop_1_1_webcam_reader-members.html b/web/html/doc/classop_1_1_webcam_reader-members.html new file mode 100644 index 000000000..f91ddc4b5 --- /dev/null +++ b/web/html/doc/classop_1_1_webcam_reader-members.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WebcamReader Member List
+
+
+ +

This is the complete list of members for op::WebcamReader, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + +
checkFrameIntegrity(Matrix &frame)op::Producerprotected
get(const int capProperty)op::WebcamReadervirtual
op::Producer::get(const ProducerProperty property)op::Producer
getCameraExtrinsics()op::Producervirtual
getCameraIntrinsics()op::Producervirtual
getCameraMatrices()op::Producervirtual
getFrame()op::Producer
getFrames()op::Producer
getNextFrameName()op::WebcamReadervirtual
getType()op::Producerinline
ifEndedResetOrRelease()op::Producerprotected
isOpened() constop::WebcamReadervirtual
keepDesiredFrameRate()op::Producerprotected
Producer(const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)op::Producerexplicit
release()op::VideoCaptureReadervirtual
resetWebcam(const int index, const bool throwExceptionIfNoOpened)op::VideoCaptureReaderprotected
set(const int capProperty, const double value)op::WebcamReadervirtual
op::Producer::set(const ProducerProperty property, const double value)op::Producer
setProducerFpsMode(const ProducerFpsMode fpsMode)op::Producer
VideoCaptureReader(const int index, const bool throwExceptionIfNoOpened, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)op::VideoCaptureReaderexplicit
VideoCaptureReader(const std::string &path, const ProducerType producerType, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)op::VideoCaptureReaderexplicit
WebcamReader(const int webcamIndex=0, const Point< int > &webcamResolution=Point< int >{}, const bool throwExceptionIfNoOpened=true, const std::string &cameraParameterPath="", const bool undistortImage=false)op::WebcamReaderexplicit
~Producer()op::Producervirtual
~VideoCaptureReader()op::VideoCaptureReadervirtual
~WebcamReader()op::WebcamReadervirtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_webcam_reader.html b/web/html/doc/classop_1_1_webcam_reader.html new file mode 100644 index 000000000..843f40ed0 --- /dev/null +++ b/web/html/doc/classop_1_1_webcam_reader.html @@ -0,0 +1,409 @@ + + + + + + + +OpenPose: op::WebcamReader Class Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WebcamReader Class Reference
+
+
+ +

#include <webcamReader.hpp>

+
+Inheritance diagram for op::WebcamReader:
+
+
+ + +op::VideoCaptureReader +op::Producer + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WebcamReader (const int webcamIndex=0, const Point< int > &webcamResolution=Point< int >{}, const bool throwExceptionIfNoOpened=true, const std::string &cameraParameterPath="", const bool undistortImage=false)
 
virtual ~WebcamReader ()
 
std::string getNextFrameName ()
 
bool isOpened () const
 
double get (const int capProperty)
 
void set (const int capProperty, const double value)
 
- Public Member Functions inherited from op::VideoCaptureReader
 VideoCaptureReader (const int index, const bool throwExceptionIfNoOpened, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)
 
 VideoCaptureReader (const std::string &path, const ProducerType producerType, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)
 
virtual ~VideoCaptureReader ()
 
void release ()
 
- Public Member Functions inherited from op::Producer
 Producer (const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)
 
virtual ~Producer ()
 
Matrix getFrame ()
 
std::vector< MatrixgetFrames ()
 
virtual std::vector< MatrixgetCameraMatrices ()
 
virtual std::vector< MatrixgetCameraExtrinsics ()
 
virtual std::vector< MatrixgetCameraIntrinsics ()
 
void setProducerFpsMode (const ProducerFpsMode fpsMode)
 
ProducerType getType ()
 
double get (const ProducerProperty property)
 
void set (const ProducerProperty property, const double value)
 
+ + + + + + + + + + + +

+Additional Inherited Members

- Protected Member Functions inherited from op::VideoCaptureReader
void resetWebcam (const int index, const bool throwExceptionIfNoOpened)
 
- Protected Member Functions inherited from op::Producer
void checkFrameIntegrity (Matrix &frame)
 
void ifEndedResetOrRelease ()
 
void keepDesiredFrameRate ()
 
+

Detailed Description

+

WebcamReader is a wrapper of the cv::VideoCapture class for webcam. It allows controlling a video (extracting frames, setting resolution & fps, seeking to a particular frame, etc).

+ +

Definition at line 15 of file webcamReader.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WebcamReader()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
op::WebcamReader::WebcamReader (const int webcamIndex = 0,
const Point< int > & webcamResolution = Point< int >{},
const bool throwExceptionIfNoOpened = true,
const std::string & cameraParameterPath = "",
const bool undistortImage = false 
)
+
+explicit
+
+

Constructor of WebcamReader. It opens the webcam as a wrapper of cv::VideoCapture. It includes an argument to indicate the desired resolution.

Parameters
+ + + + +
webcamIndexconst int indicating the camera source (see the OpenCV documentation about cv::VideoCapture for more details), in the range [0, 9].
webcamResolutionconst Point<int> parameter which specifies the desired camera resolution.
throwExceptionIfNoOpenedBool parameter which specifies whether to throw an exception if the camera cannot be opened.
+
+
+ +
+
+ +

◆ ~WebcamReader()

+ +
+
+ + + + + +
+ + + + + + + +
virtual op::WebcamReader::~WebcamReader ()
+
+virtual
+
+ +
+
+

Member Function Documentation

+ +

◆ get()

+ +
+
+ + + + + +
+ + + + + + + + +
double op::WebcamReader::get (const int capProperty)
+
+virtual
+
+

This function is a wrapper of cv::VideoCapture::get. It allows getting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + +
capPropertyint indicating the property to be modified.
+
+
+
Returns
double returning the property value.
+ +

Implements op::VideoCaptureReader.

+ +
+
+ +

◆ getNextFrameName()

+ +
+
+ + + + + +
+ + + + + + + +
std::string op::WebcamReader::getNextFrameName ()
+
+virtual
+
+

This function returns a unique frame name (e.g., the frame number for video, the frame counter for webcam, the image name for image directory reader, etc.).

Returns
std::string with an unique frame name.
+ +

Implements op::VideoCaptureReader.

+ +
+
+ +

◆ isOpened()

+ +
+
+ + + + + +
+ + + + + + + +
bool op::WebcamReader::isOpened () const
+
+virtual
+
+

This function returns whether the Producer instance is still opened and able to retrieve more frames.

Returns
bool indicating whether the Producer is opened.
+ +

Reimplemented from op::VideoCaptureReader.

+ +
+
+ +

◆ set()

+ +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + +
void op::WebcamReader::set (const int capProperty,
const double value 
)
+
+virtual
+
+

This function is a wrapper of cv::VideoCapture::set. It allows setting different properties of the Producer (fps, width, height, etc.). See the OpenCV documentation for all the available properties.

Parameters
+ + + +
capPropertyint indicating the property to be modified.
valuedouble indicating the new value to be assigned.
+
+
+ +

Implements op::VideoCaptureReader.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_webcam_reader.js b/web/html/doc/classop_1_1_webcam_reader.js new file mode 100644 index 000000000..63d4dae1e --- /dev/null +++ b/web/html/doc/classop_1_1_webcam_reader.js @@ -0,0 +1,9 @@ +var classop_1_1_webcam_reader = +[ + [ "WebcamReader", "classop_1_1_webcam_reader.html#a15fa1b26adfb75c0f072dcdb44c8fc44", null ], + [ "~WebcamReader", "classop_1_1_webcam_reader.html#aea29bfce4df5493d662ed3a892f364d2", null ], + [ "get", "classop_1_1_webcam_reader.html#a38ccbdf61f21fba0694362077cb6bdb1", null ], + [ "getNextFrameName", "classop_1_1_webcam_reader.html#a58c315e577c12486e5ab1b941d4cce04", null ], + [ "isOpened", "classop_1_1_webcam_reader.html#a6a065fcf3d6dca624741adc0f77da11d", null ], + [ "set", "classop_1_1_webcam_reader.html#ae66b26829cc2d6e3f02109d4431a7bc2", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_webcam_reader.png b/web/html/doc/classop_1_1_webcam_reader.png new file mode 100644 index 000000000..56b9e2b4e Binary files /dev/null and b/web/html/doc/classop_1_1_webcam_reader.png differ diff --git a/web/html/doc/classop_1_1_worker-members.html b/web/html/doc/classop_1_1_worker-members.html new file mode 100644 index 000000000..8dda7b88d --- /dev/null +++ b/web/html/doc/classop_1_1_worker-members.html @@ -0,0 +1,112 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::Worker< TDatums > Member List
+
+
+ +

This is the complete list of members for op::Worker< TDatums >, including all inherited members.

+ + + + + + + + + + +
checkAndWork(TDatums &tDatums)op::Worker< TDatums >
initializationOnThread()=0op::Worker< TDatums >protectedpure virtual
initializationOnThreadNoException()op::Worker< TDatums >
isRunning() constop::Worker< TDatums >inline
stop()op::Worker< TDatums >inline
tryStop()op::Worker< TDatums >inlinevirtual
work(TDatums &tDatums)=0op::Worker< TDatums >protectedpure virtual
Worker()op::Worker< TDatums >
~Worker()op::Worker< TDatums >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_worker.html b/web/html/doc/classop_1_1_worker.html new file mode 100644 index 000000000..1bb7a1c7d --- /dev/null +++ b/web/html/doc/classop_1_1_worker.html @@ -0,0 +1,412 @@ + + + + + + + +OpenPose: op::Worker< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::Worker< TDatums > Class Template Referenceabstract
+
+
+ +

#include <worker.hpp>

+
+Inheritance diagram for op::Worker< TDatums >:
+
+
+ + +op::WorkerProducer< std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > > +op::WCvMatToOpInput< TDatums > +op::WCvMatToOpOutput< TDatums > +op::WFaceDetector< TDatums > +op::WFaceDetectorOpenCV< TDatums > +op::WFaceExtractorNet< TDatums > +op::WFaceRenderer< TDatums > +op::WFpsMax< TDatums > +op::WGuiInfoAdder< TDatums > +op::WHandDetector< TDatums > +op::WHandDetectorFromTxt< TDatums > +op::WHandDetectorTracking< TDatums > +op::WHandDetectorUpdate< TDatums > +op::WHandExtractorNet< TDatums > +op::WHandRenderer< TDatums > +op::WIdGenerator< TDatums > +op::WKeepTopNPeople< TDatums > +op::WKeypointScaler< TDatums > +op::WOpOutputToCvMat< TDatums > +op::WPersonIdExtractor< TDatums > +op::WPoseExtractor< TDatums > +op::WPoseExtractorNet< TDatums > +op::WPoseRenderer< TDatums > +op::WPoseTriangulation< TDatums > +op::WQueueOrderer< TDatums > +op::WScaleAndSizeExtractor< TDatums > +op::WVerbosePrinter< TDatums > +op::WorkerConsumer< TDatums > +op::WorkerProducer< TDatums > + +
+ + + + + + + + + + + + + + + + +

+Public Member Functions

 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ + + + + +

+Protected Member Functions

virtual void initializationOnThread ()=0
 
virtual void work (TDatums &tDatums)=0
 
+

Detailed Description

+

template<typename TDatums>
+class op::Worker< TDatums >

+ + +

Definition at line 9 of file worker.hpp.

+

Constructor & Destructor Documentation

+ +

◆ Worker()

+ +
+
+
+template<typename TDatums >
+ + + + +
op::Worker< TDatums >::Worker
+
+ +

Definition at line 57 of file worker.hpp.

+ +
+
+ +

◆ ~Worker()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::Worker< TDatums >::~Worker
+
+virtual
+
+ +

Definition at line 63 of file worker.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ checkAndWork()

+ +
+
+
+template<typename TDatums >
+ + + + + + + + +
bool op::Worker< TDatums >::checkAndWork (TDatums & tDatums)
+
+ +

Definition at line 82 of file worker.hpp.

+ +
+
+ +

◆ initializationOnThread()

+ + + +

◆ initializationOnThreadNoException()

+ +
+
+
+template<typename TDatums >
+ + + + +
void op::Worker< TDatums >::initializationOnThreadNoException
+
+ +

Definition at line 68 of file worker.hpp.

+ +
+
+ +

◆ isRunning()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + +
bool op::Worker< TDatums >::isRunning () const
+
+inline
+
+ +

Definition at line 20 of file worker.hpp.

+ +
+
+ +

◆ stop()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + +
void op::Worker< TDatums >::stop ()
+
+inline
+
+ +

Definition at line 25 of file worker.hpp.

+ +
+
+ +

◆ tryStop()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + +
virtual void op::Worker< TDatums >::tryStop ()
+
+inlinevirtual
+
+ +

Reimplemented in op::WQueueOrderer< TDatums >.

+ +

Definition at line 32 of file worker.hpp.

+ +
+
+ +

◆ work()

+ + +
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_worker.js b/web/html/doc/classop_1_1_worker.js new file mode 100644 index 000000000..e22a46b95 --- /dev/null +++ b/web/html/doc/classop_1_1_worker.js @@ -0,0 +1,12 @@ +var classop_1_1_worker = +[ + [ "Worker", "classop_1_1_worker.html#a5008fc4ef4e41366ba0022f5cd79edba", null ], + [ "~Worker", "classop_1_1_worker.html#a6ebe180098e00ac062a1bb31d462df60", null ], + [ "checkAndWork", "classop_1_1_worker.html#a6e4e84bd2052919bc48df1ec4b913ecf", null ], + [ "initializationOnThread", "classop_1_1_worker.html#aa5be4df9d4d8302728c653870e7d2a23", null ], + [ "initializationOnThreadNoException", "classop_1_1_worker.html#a5df10dd8a245df1a6d8df18978490899", null ], + [ "isRunning", "classop_1_1_worker.html#a567902b58e492421a6ad771e730ddf53", null ], + [ "stop", "classop_1_1_worker.html#ae45ac828f6e8f6055203c224e50f145b", null ], + [ "tryStop", "classop_1_1_worker.html#ad689b232d68f3b3e0b41f9e219b01134", null ], + [ "work", "classop_1_1_worker.html#a9acadd6df7af03b31b9e354ae815f781", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_worker.png b/web/html/doc/classop_1_1_worker.png new file mode 100644 index 000000000..616ebd3a6 Binary files /dev/null and b/web/html/doc/classop_1_1_worker.png differ diff --git a/web/html/doc/classop_1_1_worker_consumer-members.html b/web/html/doc/classop_1_1_worker_consumer-members.html new file mode 100644 index 000000000..65899acaa --- /dev/null +++ b/web/html/doc/classop_1_1_worker_consumer-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WorkerConsumer< TDatums > Member List
+
+
+ +

This is the complete list of members for op::WorkerConsumer< TDatums >, including all inherited members.

+ + + + + + + + + + + + +
checkAndWork(TDatums &tDatums)op::Worker< TDatums >
initializationOnThread()=0op::Worker< TDatums >protectedpure virtual
initializationOnThreadNoException()op::Worker< TDatums >
isRunning() constop::Worker< TDatums >inline
stop()op::Worker< TDatums >inline
tryStop()op::Worker< TDatums >inlinevirtual
work(TDatums &tDatums)op::WorkerConsumer< TDatums >virtual
workConsumer(const TDatums &tDatums)=0op::WorkerConsumer< TDatums >protectedpure virtual
Worker()op::Worker< TDatums >
~Worker()op::Worker< TDatums >virtual
~WorkerConsumer()op::WorkerConsumer< TDatums >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_worker_consumer.html b/web/html/doc/classop_1_1_worker_consumer.html new file mode 100644 index 000000000..cfb07ce9b --- /dev/null +++ b/web/html/doc/classop_1_1_worker_consumer.html @@ -0,0 +1,258 @@ + + + + + + + +OpenPose: op::WorkerConsumer< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WorkerConsumer< TDatums > Class Template Referenceabstract
+
+
+ +

#include <workerConsumer.hpp>

+
+Inheritance diagram for op::WorkerConsumer< TDatums >:
+
+
+ + +op::Worker< TDatums > +op::WCocoJsonSaver< TDatums > +op::WFaceSaver< TDatums > +op::WGui< TDatums > +op::WGui3D< TDatums > +op::WHandSaver< TDatums > +op::WHeatMapSaver< TDatums > +op::WImageSaver< TDatums > +op::WPeopleJsonSaver< TDatums > +op::WPoseSaver< TDatums > +op::WUdpSender< TDatums > +op::WVideoSaver< TDatums > +op::WVideoSaver3D< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

virtual ~WorkerConsumer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ + + + + + +

+Protected Member Functions

virtual void workConsumer (const TDatums &tDatums)=0
 
- Protected Member Functions inherited from op::Worker< TDatums >
virtual void initializationOnThread ()=0
 
+

Detailed Description

+

template<typename TDatums>
+class op::WorkerConsumer< TDatums >

+ + +

Definition at line 10 of file workerConsumer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ ~WorkerConsumer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WorkerConsumer< TDatums >::~WorkerConsumer
+
+virtual
+
+ +

Definition at line 30 of file workerConsumer.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WorkerConsumer< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 35 of file workerConsumer.hpp.

+ +
+
+ +

◆ workConsumer()

+ + +
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_worker_consumer.js b/web/html/doc/classop_1_1_worker_consumer.js new file mode 100644 index 000000000..531842bc3 --- /dev/null +++ b/web/html/doc/classop_1_1_worker_consumer.js @@ -0,0 +1,6 @@ +var classop_1_1_worker_consumer = +[ + [ "~WorkerConsumer", "classop_1_1_worker_consumer.html#a9aaa75f194df6b3ed4994c8e95aa0ab5", null ], + [ "work", "classop_1_1_worker_consumer.html#a7383747b3bdc6ac79e6f9afbf2c28d27", null ], + [ "workConsumer", "classop_1_1_worker_consumer.html#a26cf5c40df363d94d603fce92a5b69eb", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_worker_consumer.png b/web/html/doc/classop_1_1_worker_consumer.png new file mode 100644 index 000000000..fdaa74df4 Binary files /dev/null and b/web/html/doc/classop_1_1_worker_consumer.png differ diff --git a/web/html/doc/classop_1_1_worker_producer-members.html b/web/html/doc/classop_1_1_worker_producer-members.html new file mode 100644 index 000000000..0a9e848ee --- /dev/null +++ b/web/html/doc/classop_1_1_worker_producer-members.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WorkerProducer< TDatums > Member List
+
+ +
+ + + + diff --git a/web/html/doc/classop_1_1_worker_producer.html b/web/html/doc/classop_1_1_worker_producer.html new file mode 100644 index 000000000..5c250fe9b --- /dev/null +++ b/web/html/doc/classop_1_1_worker_producer.html @@ -0,0 +1,245 @@ + + + + + + + +OpenPose: op::WorkerProducer< TDatums > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WorkerProducer< TDatums > Class Template Referenceabstract
+
+
+ +

#include <workerProducer.hpp>

+
+Inheritance diagram for op::WorkerProducer< TDatums >:
+
+
+ + +op::Worker< TDatums > + +
+ + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

virtual ~WorkerProducer ()
 
void work (TDatums &tDatums)
 
- Public Member Functions inherited from op::Worker< TDatums >
 Worker ()
 
virtual ~Worker ()
 
void initializationOnThreadNoException ()
 
bool checkAndWork (TDatums &tDatums)
 
bool isRunning () const
 
void stop ()
 
virtual void tryStop ()
 
+ + + + + + +

+Protected Member Functions

virtual TDatums workProducer ()=0
 
- Protected Member Functions inherited from op::Worker< TDatums >
virtual void initializationOnThread ()=0
 
+

Detailed Description

+

template<typename TDatums>
+class op::WorkerProducer< TDatums >

+ + +

Definition at line 10 of file workerProducer.hpp.

+

Constructor & Destructor Documentation

+ +

◆ ~WorkerProducer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + +
op::WorkerProducer< TDatums >::~WorkerProducer
+
+virtual
+
+ +

Definition at line 30 of file workerProducer.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ work()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + + +
void op::WorkerProducer< TDatums >::work (TDatums & tDatums)
+
+virtual
+
+ +

Implements op::Worker< TDatums >.

+ +

Definition at line 35 of file workerProducer.hpp.

+ +
+
+ +

◆ workProducer()

+ +
+
+
+template<typename TDatums >
+ + + + + +
+ + + + + + + +
virtual TDatums op::WorkerProducer< TDatums >::workProducer ()
+
+protectedpure virtual
+
+ +

Implemented in op::WDatumProducer< TDatum >.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_worker_producer.js b/web/html/doc/classop_1_1_worker_producer.js new file mode 100644 index 000000000..d9c130b5d --- /dev/null +++ b/web/html/doc/classop_1_1_worker_producer.js @@ -0,0 +1,6 @@ +var classop_1_1_worker_producer = +[ + [ "~WorkerProducer", "classop_1_1_worker_producer.html#a8f2592f70d723de4b818c97b25c5a476", null ], + [ "work", "classop_1_1_worker_producer.html#a0259f0b387e2b868388ba0a6769f4691", null ], + [ "workProducer", "classop_1_1_worker_producer.html#a364992ef862fe84a78416e2b556daae7", null ] +]; \ No newline at end of file diff --git a/web/html/doc/classop_1_1_worker_producer.png b/web/html/doc/classop_1_1_worker_producer.png new file mode 100644 index 000000000..0243571ec Binary files /dev/null and b/web/html/doc/classop_1_1_worker_producer.png differ diff --git a/web/html/doc/classop_1_1_wrapper_t-members.html b/web/html/doc/classop_1_1_wrapper_t-members.html new file mode 100644 index 000000000..9e5bacbc6 --- /dev/null +++ b/web/html/doc/classop_1_1_wrapper_t-members.html @@ -0,0 +1,129 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker > Member List
+
+
+ +

This is the complete list of members for op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >, including all inherited members.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
configure(const WrapperStructPose &wrapperStructPose)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
configure(const WrapperStructFace &wrapperStructFace)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
configure(const WrapperStructHand &wrapperStructHand)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
configure(const WrapperStructExtra &wrapperStructExtra)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
configure(const WrapperStructInput &wrapperStructInput)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
configure(const WrapperStructOutput &wrapperStructOutput)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
configure(const WrapperStructGui &wrapperStructGui)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
disableMultiThreading()op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
emplaceAndPop(TDatumsSP &tDatums)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
emplaceAndPop(const Matrix &matrix)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
exec()op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
isRunning() constop::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
setDefaultMaxSizeQueues(const long long defaultMaxSizeQueues=-1)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
setWorker(const WorkerType workerType, const TWorker &worker, const bool workerOnNewThread=true)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
start()op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
stop()op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
tryEmplace(TDatumsSP &tDatums)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
tryPop(TDatumsSP &tDatums)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
tryPush(const TDatumsSP &tDatums)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
waitAndEmplace(TDatumsSP &tDatums)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
waitAndEmplace(Matrix &matrix)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
waitAndPop(TDatumsSP &tDatums)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
waitAndPush(const TDatumsSP &tDatums)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
waitAndPush(const Matrix &matrix)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
WrapperT(const ThreadManagerMode threadManagerMode=ThreadManagerMode::Synchronous)op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >explicit
~WrapperT()op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >virtual
+
+ + + + diff --git a/web/html/doc/classop_1_1_wrapper_t.html b/web/html/doc/classop_1_1_wrapper_t.html new file mode 100644 index 000000000..2f4c1d460 --- /dev/null +++ b/web/html/doc/classop_1_1_wrapper_t.html @@ -0,0 +1,870 @@ + + + + + + + +OpenPose: op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker > Class Template Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker > Class Template Reference
+
+
+ +

#include <wrapper.hpp>

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Public Member Functions

 WrapperT (const ThreadManagerMode threadManagerMode=ThreadManagerMode::Synchronous)
 
virtual ~WrapperT ()
 
void disableMultiThreading ()
 
void setWorker (const WorkerType workerType, const TWorker &worker, const bool workerOnNewThread=true)
 
void configure (const WrapperStructPose &wrapperStructPose)
 
void configure (const WrapperStructFace &wrapperStructFace)
 
void configure (const WrapperStructHand &wrapperStructHand)
 
void configure (const WrapperStructExtra &wrapperStructExtra)
 
void configure (const WrapperStructInput &wrapperStructInput)
 
void configure (const WrapperStructOutput &wrapperStructOutput)
 
void configure (const WrapperStructGui &wrapperStructGui)
 
void exec ()
 
void start ()
 
void stop ()
 
bool isRunning () const
 
void setDefaultMaxSizeQueues (const long long defaultMaxSizeQueues=-1)
 
bool tryEmplace (TDatumsSP &tDatums)
 
bool waitAndEmplace (TDatumsSP &tDatums)
 
bool waitAndEmplace (Matrix &matrix)
 
bool tryPush (const TDatumsSP &tDatums)
 
bool waitAndPush (const TDatumsSP &tDatums)
 
bool waitAndPush (const Matrix &matrix)
 
bool tryPop (TDatumsSP &tDatums)
 
bool waitAndPop (TDatumsSP &tDatums)
 
bool emplaceAndPop (TDatumsSP &tDatums)
 
TDatumsSP emplaceAndPop (const Matrix &matrix)
 
+

Detailed Description

+

template<typename TDatum = BASE_DATUM, typename TDatums = std::vector<std::shared_ptr<TDatum>>, typename TDatumsSP = std::shared_ptr<TDatums>, typename TWorker = std::shared_ptr<Worker<TDatumsSP>>>
+class op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >

+ +

WrapperT: OpenPose all-in-one wrapper template class. Simplified into Wrapper for WrapperT<std::vector<Datum>> WrapperT allows the user to set up the input (video, webcam, custom input, etc.), pose, face and/or hands estimation and rendering, and output (integrated small GUI, custom output, etc.).

+

This function can be used in 2 ways:

    +
  • Synchronous mode: call the full constructor with your desired input and output workers.
  • +
  • Asynchronous mode: call the empty constructor WrapperT() + use the emplace and pop functions to push the original frames and retrieve the processed ones.
  • +
  • Mix of them:
      +
    • Synchronous input + asynchronous output: call the constructor WrapperT(ThreadManagerMode::Synchronous, workersInput, {}, true)
    • +
    • Asynchronous input + synchronous output: call the constructor WrapperT(ThreadManagerMode::Synchronous, nullptr, workersOutput, irrelevantBoolean, true)
    • +
    +
  • +
+ +

Definition at line 36 of file wrapper.hpp.

+

Constructor & Destructor Documentation

+ +

◆ WrapperT()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + +
+ + + + + + + + +
op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::WrapperT (const ThreadManagerMode threadManagerMode = ThreadManagerMode::Synchronous)
+
+explicit
+
+

Constructor.

Parameters
+ + +
threadManagerModeThread synchronization mode. If set to ThreadManagerMode::Synchronous, everything will run inside the WrapperT. If ThreadManagerMode::Synchronous(In/Out), then input (frames producer) and/or output (GUI, writing results, etc.) will be controlled outside the WrapperT class by the user. See ThreadManagerMode for a detailed explanation of when to use each one.
+
+
+ +

Definition at line 261 of file wrapper.hpp.

+ +
+
+ +

◆ ~WrapperT()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + +
+ + + + +
op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::~WrapperT
+
+virtual
+
+

Destructor. It automatically frees resources.

+ +

Definition at line 269 of file wrapper.hpp.

+ +
+
+

Member Function Documentation

+ +

◆ configure() [1/7]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::configure (const WrapperStructExtrawrapperStructExtra)
+
+

Analogous to configure() but applied to the extra options (WrapperStructExtra)

+ +

Definition at line 359 of file wrapper.hpp.

+ +
+
+ +

◆ configure() [2/7]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::configure (const WrapperStructFacewrapperStructFace)
+
+

Analogous to configure(WrapperStructPose) but applied to face (WrapperStructFace)

+ +

Definition at line 333 of file wrapper.hpp.

+ +
+
+ +

◆ configure() [3/7]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::configure (const WrapperStructGuiwrapperStructGui)
+
+

Analogous to configure() but applied to the GUI (WrapperStructGui)

+ +

Definition at line 398 of file wrapper.hpp.

+ +
+
+ +

◆ configure() [4/7]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::configure (const WrapperStructHandwrapperStructHand)
+
+

Analogous to configure() but applied to hand (WrapperStructHand)

+ +

Definition at line 346 of file wrapper.hpp.

+ +
+
+ +

◆ configure() [5/7]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::configure (const WrapperStructInputwrapperStructInput)
+
+

Analogous to configure() but applied to the input (WrapperStructInput)

+ +

Definition at line 372 of file wrapper.hpp.

+ +
+
+ +

◆ configure() [6/7]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::configure (const WrapperStructOutputwrapperStructOutput)
+
+

Analogous to configure() but applied to the output (WrapperStructOutput)

+ +

Definition at line 385 of file wrapper.hpp.

+ +
+
+ +

◆ configure() [7/7]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::configure (const WrapperStructPosewrapperStructPose)
+
+

It configures the pose parameters. Do not call for default values.

+ +

Definition at line 320 of file wrapper.hpp.

+ +
+
+ +

◆ disableMultiThreading()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::disableMultiThreading
+
+

Disable multi-threading. Useful for debugging and logging, all the Workers will run in the same thread. Note that workerOnNewThread (argument for setWorker function) will not make any effect.

+ +

Definition at line 287 of file wrapper.hpp.

+ +
+
+ +

◆ emplaceAndPop() [1/2]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
TDatumsSP op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::emplaceAndPop (const Matrixmatrix)
+
+

Similar to emplaceAndPop(TDatumsSP& tDatums), but it takes a Matrix as input.

Parameters
+ + +
matrixMatrix with the image to be processed.
+
+
+
Returns
TDatumsSP element where the processed information will be placed.
+ +

Definition at line 690 of file wrapper.hpp.

+ +
+
+ +

◆ emplaceAndPop() [2/2]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
bool op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::emplaceAndPop (TDatumsSP & tDatums)
+
+

Runs both waitAndEmplace and waitAndPop.

Parameters
+ + +
tDatumsTDatumsSP element where the retrieved element will be placed.
+
+
+
Returns
Boolean specifying whether the tDatums could be retrieved.
+ +

Definition at line 673 of file wrapper.hpp.

+ +
+
+ +

◆ exec()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::exec
+
+

Function to start multi-threading. Similar to start(), but exec() blocks the thread that calls the function (it saves 1 thread). Use exec() instead of start() if the calling thread will otherwise be waiting for the WrapperT to end.

+ +

Definition at line 411 of file wrapper.hpp.

+ +
+
+ +

◆ isRunning()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + +
bool op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::isRunning
+
+

Whether the WrapperT is running. It will return true after exec() or start() and before stop(), and false otherwise.

Returns
Boolean specifying whether the WrapperT is running.
+ +

Definition at line 460 of file wrapper.hpp.

+ +
+
+ +

◆ setDefaultMaxSizeQueues()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::setDefaultMaxSizeQueues (const long long defaultMaxSizeQueues = -1)
+
+

It sets the maximum number of elements in the queue. For maximum speed, set to a very large number, but the trade-off would be:

    +
  • Latency will hugely increase.
  • +
  • The program might go out of RAM memory (so the computer might freeze). For minimum latency while keeping an optimal speed, set to -1, that will automatically detect the ideal number based on how many elements are connected to that queue.
    Parameters
    + + +
    defaultMaxSizeQueueslong long element with the maximum number of elements on the queue.
    +
    +
    +
  • +
+ +

Definition at line 474 of file wrapper.hpp.

+ +
+
+ +

◆ setWorker()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + + + + + + + + + + + + + + + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::setWorker (const WorkerType workerType,
const TWorker & worker,
const bool workerOnNewThread = true 
)
+
+

Add an user-defined extra Worker for a desired task (input, output, ...).

Parameters
+ + + + +
workerTypeWorkerType to configure (e.g., Input, PreProcessing, PostProcessing, Output).
workerTWorker to be added.
workerOnNewThreadWhether to add this TWorker on a new thread (if it is computationally demanding) or simply reuse existing threads (for light functions). Set to true if the performance time is unknown.
+
+
+ +

Definition at line 300 of file wrapper.hpp.

+ +
+
+ +

◆ start()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::start
+
+

Function to start multi-threading. Similar to exec(), but start() does not block the thread that calls the function. It just opens new threads, so it lets the user perform other tasks meanwhile on the calling thread. VERY IMPORTANT NOTE: if the GUI is selected and OpenCV is compiled with Qt support, this option will not work. Qt needs the main thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: QMetaMethod::invoke: Unable to invoke methods with return values in queued connections. Use exec() in that case.

+ +

Definition at line 429 of file wrapper.hpp.

+ +
+
+ +

◆ stop()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + +
void op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::stop
+
+

Function to stop multi-threading. It can be called internally or externally.

+ +

Definition at line 447 of file wrapper.hpp.

+ +
+
+ +

◆ tryEmplace()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
bool op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::tryEmplace (TDatumsSP & tDatums)
+
+

Emplace (move) an element on the first (input) queue. Only valid if ThreadManagerMode::Asynchronous or ThreadManagerMode::AsynchronousIn. If the input queue is full or the WrapperT was stopped, it will return false and not emplace it.

Parameters
+ + +
tDatumsTDatumsSP element to be emplaced.
+
+
+
Returns
Boolean specifying whether the tDatums could be emplaced.
+ +

Definition at line 487 of file wrapper.hpp.

+ +
+
+ +

◆ tryPop()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
bool op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::tryPop (TDatumsSP & tDatums)
+
+

Pop (retrieve) an element from the last (output) queue. Only valid if ThreadManagerMode::Asynchronous or ThreadManagerMode::AsynchronousOut. If the output queue is empty or the WrapperT was stopped, it will return false and not retrieve it.

Parameters
+ + +
tDatumsTDatumsSP element where the retrieved element will be placed.
+
+
+
Returns
Boolean specifying whether the tDatums could be retrieved.
+ +

Definition at line 639 of file wrapper.hpp.

+ +
+
+ +

◆ tryPush()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
bool op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::tryPush (const TDatumsSP & tDatums)
+
+

Push (copy) an element on the first (input) queue. Same as tryEmplace, but it copies the data instead of moving it.

Parameters
+ + +
tDatumsTDatumsSP element to be pushed.
+
+
+
Returns
Boolean specifying whether the tDatums could be pushed.
+ +

Definition at line 583 of file wrapper.hpp.

+ +
+
+ +

◆ waitAndEmplace() [1/2]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
bool op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::waitAndEmplace (Matrixmatrix)
+
+

Similar to waitAndEmplace(const TDatumsSP& tDatums), but it takes a Matrix as input.

Parameters
+ + +
matrixMatrix with the image to be processed.
+
+
+
Returns
Boolean specifying whether the tDatums could be emplaced.
+ +

Definition at line 561 of file wrapper.hpp.

+ +
+
+ +

◆ waitAndEmplace() [2/2]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
bool op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::waitAndEmplace (TDatumsSP & tDatums)
+
+

Emplace (move) an element on the first (input) queue. Similar to tryEmplace. However, if the input queue is full, it will wait until it can emplace it. If the WrapperT class is stopped before adding the element, it will return false and not emplace it.

Parameters
+ + +
tDatumsTDatumsSP element to be emplaced.
+
+
+
Returns
Boolean specifying whether the tDatums could be emplaced.
+ +

Definition at line 523 of file wrapper.hpp.

+ +
+
+ +

◆ waitAndPop()

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
bool op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::waitAndPop (TDatumsSP & tDatums)
+
+

Pop (retrieve) an element from the last (output) queue. Similar to tryPop. However, if the output queue is empty, it will wait until it can pop an element. If the WrapperT class is stopped before popping the element, it will return false and not retrieve it.

Parameters
+ + +
tDatumsTDatumsSP element where the retrieved element will be placed.
+
+
+
Returns
Boolean specifying whether the tDatums could be retrieved.
+ +

Definition at line 656 of file wrapper.hpp.

+ +
+
+ +

◆ waitAndPush() [1/2]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
bool op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::waitAndPush (const Matrixmatrix)
+
+

Similar to waitAndPush(const TDatumsSP& tDatums), but it takes a Matrix as input.

Parameters
+ + +
matrixMatrix with the image to be processed.
+
+
+
Returns
Boolean specifying whether the tDatums could be pushed.
+ +

Definition at line 617 of file wrapper.hpp.

+ +
+
+ +

◆ waitAndPush() [2/2]

+ +
+
+
+template<typename TDatum , typename TDatums , typename TDatumsSP , typename TWorker >
+ + + + + + + + +
bool op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >::waitAndPush (const TDatumsSP & tDatums)
+
+

Push (copy) an element on the first (input) queue. Same as waitAndEmplace, but it copies the data instead of moving it.

Parameters
+ + +
tDatumsTDatumsSP element to be pushed.
+
+
+
Returns
Boolean specifying whether the tDatums could be pushed.
+ +

Definition at line 600 of file wrapper.hpp.

+ +
+
+
The documentation for this class was generated from the following file: +
+
+ + + + diff --git a/web/html/doc/classop_1_1_wrapper_t.js b/web/html/doc/classop_1_1_wrapper_t.js new file mode 100644 index 000000000..c9a326456 --- /dev/null +++ b/web/html/doc/classop_1_1_wrapper_t.js @@ -0,0 +1,29 @@ +var classop_1_1_wrapper_t = +[ + [ "WrapperT", "classop_1_1_wrapper_t.html#a94151754dddc2a37044aea26b9dac6c7", null ], + [ "~WrapperT", "classop_1_1_wrapper_t.html#a65e310384f3b898c4c3621e0e1ee6883", null ], + [ "configure", "classop_1_1_wrapper_t.html#ad9d83f0332c27aa64cde22c66755deec", null ], + [ "configure", "classop_1_1_wrapper_t.html#a3ce073fb177c316aaeab406c1f4808db", null ], + [ "configure", "classop_1_1_wrapper_t.html#a7a37b4a945171fd42d1ab16b0b7e8205", null ], + [ "configure", "classop_1_1_wrapper_t.html#aaa18264f99da260efb8fa12dd293ee75", null ], + [ "configure", "classop_1_1_wrapper_t.html#af3d5d56e63b8c6faee0d7954db95c69d", null ], + [ "configure", "classop_1_1_wrapper_t.html#a98a7310bc4062fb72f5d26e37d6d7c70", null ], + [ "configure", "classop_1_1_wrapper_t.html#a7508886116ccfbbb8567a1921591751e", null ], + [ "disableMultiThreading", "classop_1_1_wrapper_t.html#a6ba81304df06fbec71103973ce0041c5", null ], + [ "emplaceAndPop", "classop_1_1_wrapper_t.html#aaca8a5dc6f342470c8241fda5cd6cdb9", null ], + [ "emplaceAndPop", "classop_1_1_wrapper_t.html#a3818c026e33cc573ba8b5722daa003a7", null ], + [ "exec", "classop_1_1_wrapper_t.html#a478b8bd7deb43322f220593552fe683d", null ], + [ "isRunning", "classop_1_1_wrapper_t.html#a0577721c5e714861b27ad4ff356980bc", null ], + [ "setDefaultMaxSizeQueues", "classop_1_1_wrapper_t.html#aa89055f5cf4e762071479f5fec8d2faf", null ], + [ "setWorker", "classop_1_1_wrapper_t.html#a0b502ef38ee46749733ae3dda7e5fd31", null ], + [ "start", "classop_1_1_wrapper_t.html#a8111d8cdb984e996410ace159a896992", null ], + [ "stop", "classop_1_1_wrapper_t.html#a061ea09aac902a8a44438feffd18998f", null ], + [ "tryEmplace", "classop_1_1_wrapper_t.html#a79fa1a518495e1e3684f05943d1c04f8", null ], + [ "tryPop", "classop_1_1_wrapper_t.html#a55af0ab1f0ea4329f2c0bb3feb92b835", null ], + [ "tryPush", "classop_1_1_wrapper_t.html#a4d9396d4490b90f32a45d4a80d2cd5c7", null ], + [ "waitAndEmplace", "classop_1_1_wrapper_t.html#ae2c6cf519701c320ae53c597ae54a7aa", null ], + [ "waitAndEmplace", "classop_1_1_wrapper_t.html#a442ff1e4fec93ec28457f7c7c4b4bfbb", null ], + [ "waitAndPop", "classop_1_1_wrapper_t.html#a3c3b605e0787b55ffd00725c09a1cd53", null ], + [ "waitAndPush", "classop_1_1_wrapper_t.html#abcb907a2718260a14c0472279254df84", null ], + [ "waitAndPush", "classop_1_1_wrapper_t.html#a0e0aea3f8bf81458c0662c46f4d345d5", null ] +]; \ No newline at end of file diff --git a/web/html/doc/closed.png b/web/html/doc/closed.png new file mode 100644 index 000000000..98cc2c909 Binary files /dev/null and b/web/html/doc/closed.png differ diff --git a/web/html/doc/coco_json_saver_8hpp.html b/web/html/doc/coco_json_saver_8hpp.html new file mode 100644 index 000000000..cb0e697ac --- /dev/null +++ b/web/html/doc/coco_json_saver_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/filestream/cocoJsonSaver.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
cocoJsonSaver.hpp File Reference
+
+ +
+ + + + diff --git a/web/html/doc/coco_json_saver_8hpp_source.html b/web/html/doc/coco_json_saver_8hpp_source.html new file mode 100644 index 000000000..7413c6408 --- /dev/null +++ b/web/html/doc/coco_json_saver_8hpp_source.html @@ -0,0 +1,150 @@ + + + + + + + +OpenPose: include/openpose/filestream/cocoJsonSaver.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
cocoJsonSaver.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FILESTREAM_POSE_JSON_COCO_SAVER_HPP
+
2 #define OPENPOSE_FILESTREAM_POSE_JSON_COCO_SAVER_HPP
+
3 
+ + + + +
8 
+
9 namespace op
+
10 {
+ +
16  {
+
17  public:
+
24  explicit CocoJsonSaver(
+
25  const std::string& filePathToSave, const PoseModel poseModel, const bool humanReadable = true,
+
26  const int cocoJsonVariants = 1, const CocoJsonFormat cocoJsonFormat = CocoJsonFormat::Body,
+
27  const int cocoJsonVariant = 0);
+
28 
+
29  virtual ~CocoJsonSaver();
+
30 
+
31  void record(
+
32  const Array<float>& poseKeypoints, const Array<float>& poseScores, const std::string& imageName,
+
33  const unsigned long long frameNumber);
+
34 
+
35  private:
+
36  const PoseModel mPoseModel;
+
37  const int mCocoJsonVariant;
+
38  std::vector<std::tuple<JsonOfstream, CocoJsonFormat, bool>> mJsonOfstreams;
+
39 
+ +
41  };
+
42 }
+
43 
+
44 #endif // OPENPOSE_FILESTREAM_POSE_JSON_COCO_SAVER_HPP
+ + +
void record(const Array< float > &poseKeypoints, const Array< float > &poseScores, const std::string &imageName, const unsigned long long frameNumber)
+
CocoJsonSaver(const std::string &filePathToSave, const PoseModel poseModel, const bool humanReadable=true, const int cocoJsonVariants=1, const CocoJsonFormat cocoJsonFormat=CocoJsonFormat::Body, const int cocoJsonVariant=0)
+
virtual ~CocoJsonSaver()
+ + + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
CocoJsonFormat
Definition: enumClasses.hpp:15
+ +
PoseModel
Definition: enumClasses.hpp:10
+ +
+
+ + + + diff --git a/web/html/doc/common_8hpp.html b/web/html/doc/common_8hpp.html new file mode 100644 index 000000000..7fe13b584 --- /dev/null +++ b/web/html/doc/common_8hpp.html @@ -0,0 +1,117 @@ + + + + + + + +OpenPose: include/openpose/core/common.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
common.hpp File Reference
+
+
+
#include <array>
+#include <memory>
+#include <string>
+#include <vector>
+#include <openpose/core/array.hpp>
+#include <openpose/core/arrayCpuGpu.hpp>
+#include <openpose/core/macros.hpp>
+#include <openpose/core/matrix.hpp>
+#include <openpose/core/point.hpp>
+#include <openpose/core/rectangle.hpp>
+#include <openpose/core/string.hpp>
+#include <openpose/utilities/errorAndLog.hpp>
+#include <openpose/utilities/profiler.hpp>
+#include <openpose/core/datum.hpp>
+
+

Go to the source code of this file.

+
+
+ + + + diff --git a/web/html/doc/common_8hpp_source.html b/web/html/doc/common_8hpp_source.html new file mode 100644 index 000000000..ef1d3bed1 --- /dev/null +++ b/web/html/doc/common_8hpp_source.html @@ -0,0 +1,133 @@ + + + + + + + +OpenPose: include/openpose/core/common.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
common.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_COMMON_HPP
+
2 #define OPENPOSE_CORE_COMMON_HPP
+
3 
+
4 // Std library most used classes
+
5 #include <array>
+
6 #include <memory> // std::shared_ptr, std::unique_ptr
+
7 #include <string>
+
8 #include <vector>
+
9 // OpenPose most used classes
+
10 #include <openpose/core/array.hpp>
+ +
12 #include <openpose/core/macros.hpp>
+
13 #include <openpose/core/matrix.hpp>
+
14 #include <openpose/core/point.hpp>
+ +
16 #include <openpose/core/string.hpp>
+ + +
19 // Datum at the end, otherwise circular dependency with array, point & rectangle
+
20 #include <openpose/core/datum.hpp>
+
21 
+
22 #endif // OPENPOSE_CORE_COMMON_HPP
+ + + + + + + + + + +
+
+ + + + diff --git a/web/html/doc/core_2enum_classes_8hpp.html b/web/html/doc/core_2enum_classes_8hpp.html new file mode 100644 index 000000000..196247124 --- /dev/null +++ b/web/html/doc/core_2enum_classes_8hpp.html @@ -0,0 +1,147 @@ + + + + + + + +OpenPose: include/openpose/core/enumClasses.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
enumClasses.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + + + +

+Enumerations

enum class  op::ScaleMode : unsigned char {
+  op::InputResolution +, op::NetOutputResolution +, op::OutputResolution +, op::ZeroToOne +,
+  op::ZeroToOneFixedAspect +, op::PlusMinusOne +, op::PlusMinusOneFixedAspect +, op::UnsignedChar +,
+  op::NoScale +
+ }
 
enum class  op::HeatMapType : unsigned char { op::Parts +, op::Background +, op::PAFs + }
 
enum class  op::RenderMode : unsigned char { op::None +, op::Auto +, op::Cpu +, op::Gpu + }
 
enum class  op::ElementToRender : unsigned char { op::Skeleton +, op::Background +, op::AddKeypoints +, op::AddPAFs + }
 
+
+
+ + + + diff --git a/web/html/doc/core_2enum_classes_8hpp.js b/web/html/doc/core_2enum_classes_8hpp.js new file mode 100644 index 000000000..1850388a3 --- /dev/null +++ b/web/html/doc/core_2enum_classes_8hpp.js @@ -0,0 +1,31 @@ +var core_2enum_classes_8hpp = +[ + [ "ElementToRender", "core_2enum_classes_8hpp.html#a37a23e10d9cbc428c793c3df1d62993e", [ + [ "Skeleton", "core_2enum_classes_8hpp.html#a37a23e10d9cbc428c793c3df1d62993ea6ab48f7ed56efc362f41853c5616bf75", null ], + [ "Background", "core_2enum_classes_8hpp.html#a37a23e10d9cbc428c793c3df1d62993eaa9ded1e5ce5d75814730bb4caaf49419", null ], + [ "AddKeypoints", "core_2enum_classes_8hpp.html#a37a23e10d9cbc428c793c3df1d62993ea5f4badd072493724e560fa43d0cf2c71", null ], + [ "AddPAFs", "core_2enum_classes_8hpp.html#a37a23e10d9cbc428c793c3df1d62993eaca9f686d0a3d6b8bfe5865b59b2fc84f", null ] + ] ], + [ "HeatMapType", "core_2enum_classes_8hpp.html#a1c3dbc214e7552f7ef9cc753ee97226b", [ + [ "Parts", "core_2enum_classes_8hpp.html#a1c3dbc214e7552f7ef9cc753ee97226ba9ce2d07469b39a72159ed8b0e0e597ca", null ], + [ "Background", "core_2enum_classes_8hpp.html#a1c3dbc214e7552f7ef9cc753ee97226baa9ded1e5ce5d75814730bb4caaf49419", null ], + [ "PAFs", "core_2enum_classes_8hpp.html#a1c3dbc214e7552f7ef9cc753ee97226ba21c5c3f60f4881b8d5477f5628db74f1", null ] + ] ], + [ "RenderMode", "core_2enum_classes_8hpp.html#afce557f02e337e16150d00bdf72ec033", [ + [ "None", "core_2enum_classes_8hpp.html#afce557f02e337e16150d00bdf72ec033a6adf97f83acf6453d4a6a4b1070f3754", null ], + [ "Auto", "core_2enum_classes_8hpp.html#afce557f02e337e16150d00bdf72ec033a06b9281e396db002010bde1de57262eb", null ], + [ "Cpu", "core_2enum_classes_8hpp.html#afce557f02e337e16150d00bdf72ec033a54c82ef76ecbbd4c2293e09bae01b54e", null ], + [ "Gpu", "core_2enum_classes_8hpp.html#afce557f02e337e16150d00bdf72ec033a3432ca64f06615abf07ab44c10cada38", null ] + ] ], + [ "ScaleMode", "core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bda", [ + [ "InputResolution", "core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaa46f9a0da0a5d448fd0cc8b3aa0a9b228", null ], + [ "NetOutputResolution", "core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaa668a2bc599fd07445eae0730d043c96d", null ], + [ "OutputResolution", "core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaa73c42013aac51c335d50d103f30fcb99", null ], + [ "ZeroToOne", "core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaa4b942544cb3e764bbb8d33f8a8744855", null ], + [ "ZeroToOneFixedAspect", "core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaafa90ddb034be42f1cdf13a6829eed2ad", null ], + [ "PlusMinusOne", "core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaab7e7b2beae3435e73021d6d9a6a3fd8a", null ], + [ "PlusMinusOneFixedAspect", "core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaaee080e43c505aa85cdda0e480b0abc06", null ], + [ "UnsignedChar", "core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaaa93f121640d609f8772397a0f40f40d6", null ], + [ "NoScale", "core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaa6089ccf7c3fe93a62745e51200419c60", null ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/core_2enum_classes_8hpp_source.html b/web/html/doc/core_2enum_classes_8hpp_source.html new file mode 100644 index 000000000..f76fa2249 --- /dev/null +++ b/web/html/doc/core_2enum_classes_8hpp_source.html @@ -0,0 +1,169 @@ + + + + + + + +OpenPose: include/openpose/core/enumClasses.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
enumClasses.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_ENUM_CLASSES_HPP
+
2 #define OPENPOSE_CORE_ENUM_CLASSES_HPP
+
3 
+
4 namespace op
+
5 {
+
6  enum class ScaleMode : unsigned char
+
7  {
+ + + +
11  ZeroToOne, // [0, 1]
+
12  ZeroToOneFixedAspect, // [0, 1]
+
13  PlusMinusOne, // [-1, 1]
+
14  PlusMinusOneFixedAspect, // [-1, 1]
+
15  UnsignedChar, // [0, 255]
+
16  NoScale,
+
17  };
+
18 
+
19  enum class HeatMapType : unsigned char
+
20  {
+
21  Parts,
+
22  Background,
+
23  PAFs,
+
24  };
+
25 
+
26  enum class RenderMode : unsigned char
+
27  {
+
28  None,
+
29  Auto, // It will select Gpu if CUDA version, or Cpu otherwise
+
30  Cpu,
+
31  Gpu,
+
32  };
+
33 
+
34  enum class ElementToRender : unsigned char
+
35  {
+
36  Skeleton,
+
37  Background,
+ +
39  AddPAFs,
+
40  };
+
41 }
+
42 
+
43 #endif // OPENPOSE_CORE_ENUM_CLASSES_HPP
+ +
HeatMapType
Definition: enumClasses.hpp:20
+ + + +
ElementToRender
Definition: enumClasses.hpp:35
+ + + + +
ScaleMode
Definition: enumClasses.hpp:7
+ + + + + + + + + +
RenderMode
Definition: enumClasses.hpp:27
+ + + + +
+
+ + + + diff --git a/web/html/doc/core_2headers_8hpp.html b/web/html/doc/core_2headers_8hpp.html new file mode 100644 index 000000000..ca73966e9 --- /dev/null +++ b/web/html/doc/core_2headers_8hpp.html @@ -0,0 +1,129 @@ + + + + + + + +OpenPose: include/openpose/core/headers.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
headers.hpp File Reference
+
+ +
+ + + + diff --git a/web/html/doc/core_2headers_8hpp_source.html b/web/html/doc/core_2headers_8hpp_source.html new file mode 100644 index 000000000..11f6b6e02 --- /dev/null +++ b/web/html/doc/core_2headers_8hpp_source.html @@ -0,0 +1,159 @@ + + + + + + + +OpenPose: include/openpose/core/headers.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
headers.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_HEADERS_HPP
+
2 #define OPENPOSE_CORE_HEADERS_HPP
+
3 
+
4 // core module
+ + + + + +
10 #include <openpose/core/datum.hpp>
+ + + + +
15 #include <openpose/core/macros.hpp>
+
16 #include <openpose/core/matrix.hpp>
+ +
18 #include <openpose/core/point.hpp>
+ + + +
22 #include <openpose/core/string.hpp>
+ + + + + + + + +
31 
+
32 #endif // OPENPOSE_CORE_HEADERS_HPP
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + + diff --git a/web/html/doc/core_2string_8hpp.html b/web/html/doc/core_2string_8hpp.html new file mode 100644 index 000000000..a610dd25c --- /dev/null +++ b/web/html/doc/core_2string_8hpp.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: include/openpose/core/string.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
string.hpp File Reference
+
+
+
#include <memory>
+#include <string>
+#include <openpose/core/macros.hpp>
+
+

Go to the source code of this file.

+ + + + +

+Classes

class  op::String
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/core_2string_8hpp_source.html b/web/html/doc/core_2string_8hpp_source.html new file mode 100644 index 000000000..c0c7dfaf7 --- /dev/null +++ b/web/html/doc/core_2string_8hpp_source.html @@ -0,0 +1,141 @@ + + + + + + + +OpenPose: include/openpose/core/string.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
string.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_STRING_HPP
+
2 #define OPENPOSE_CORE_STRING_HPP
+
3 
+
4 #include <memory> // std::shared_ptr
+
5 #include <string>
+ +
7 
+
8 namespace op
+
9 {
+
14  class OP_API String
+
15  {
+
16  public:
+
17  String();
+
18 
+
24  String(const char* charPtr);
+
25 
+
29  explicit String(const std::string& string);
+
30 
+
31  const std::string& getStdString() const;
+
32 
+
33  bool empty() const;
+
34 
+
35  private:
+
36  // PIMPL idiom
+
37  // http://www.cppsamples.com/common-tasks/pimpl.html
+
38  struct ImplString;
+
39  std::shared_ptr<ImplString> spImpl;
+
40  };
+
41 }
+
42 
+
43 #endif // OPENPOSE_CORE_STRING_HPP
+ +
String(const std::string &string)
+ +
const std::string & getStdString() const
+
String(const char *charPtr)
+
bool empty() const
+ +
#define OP_API
Definition: macros.hpp:18
+ +
+
+ + + + diff --git a/web/html/doc/cuda_8hpp.html b/web/html/doc/cuda_8hpp.html new file mode 100644 index 000000000..8b9fd425e --- /dev/null +++ b/web/html/doc/cuda_8hpp.html @@ -0,0 +1,137 @@ + + + + + + + +OpenPose: include/openpose/gpu/cuda.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
cuda.hpp File Reference
+
+
+
#include <utility>
+#include <openpose/core/common.hpp>
+
+

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + + + + + + + + + +

+Functions

OP_API void op::cudaCheck (const int line=-1, const std::string &function="", const std::string &file="")
 
OP_API int op::getCudaGpuNumber ()
 
unsigned int op::getNumberCudaBlocks (const unsigned int totalRequired, const unsigned int numberCudaThreads=CUDA_NUM_THREADS)
 
OP_API void op::getNumberCudaThreadsAndBlocks (dim3 &numberCudaThreads, dim3 &numberCudaBlocks, const Point< unsigned int > &frameSize)
 
template<typename T >
void op::reorderAndNormalize (T *targetPtr, const unsigned char *const srcPtr, const int width, const int height, const int channels)
 
template<typename T >
void op::uCharImageCast (unsigned char *targetPtr, const T *const srcPtr, const int volume)
 
+ + + +

+Variables

const auto op::CUDA_NUM_THREADS = 512u
 
+
+
+ + + + diff --git a/web/html/doc/cuda_8hpp.js b/web/html/doc/cuda_8hpp.js new file mode 100644 index 000000000..f7e101d80 --- /dev/null +++ b/web/html/doc/cuda_8hpp.js @@ -0,0 +1,10 @@ +var cuda_8hpp = +[ + [ "cudaCheck", "cuda_8hpp.html#a2af8422ada0de882cc222920ca15c6d2", null ], + [ "getCudaGpuNumber", "cuda_8hpp.html#ad9b7765a4396ee4470585ded07285563", null ], + [ "getNumberCudaBlocks", "cuda_8hpp.html#a4ba080c11cc9758051db97ce2a11c023", null ], + [ "getNumberCudaThreadsAndBlocks", "cuda_8hpp.html#a17da233ea322ae172ff5bda7caaf2124", null ], + [ "reorderAndNormalize", "cuda_8hpp.html#a8587bab6b02056384b7c424555cd50d8", null ], + [ "uCharImageCast", "cuda_8hpp.html#a6aeab543a61ef23ed58a6e29401424ae", null ], + [ "CUDA_NUM_THREADS", "cuda_8hpp.html#ac7bbf63b37bf6762c47557ad227e036d", null ] +]; \ No newline at end of file diff --git a/web/html/doc/cuda_8hpp_source.html b/web/html/doc/cuda_8hpp_source.html new file mode 100644 index 000000000..a198053f1 --- /dev/null +++ b/web/html/doc/cuda_8hpp_source.html @@ -0,0 +1,144 @@ + + + + + + + +OpenPose: include/openpose/gpu/cuda.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
cuda.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_GPU_CUDA_HPP
+
2 #define OPENPOSE_GPU_CUDA_HPP
+
3 
+
4 #include <utility> // std::pair
+ +
6 
+
7 namespace op
+
8 {
+
9  const auto CUDA_NUM_THREADS = 512u;
+
10 
+
11  OP_API void cudaCheck(const int line = -1, const std::string& function = "", const std::string& file = "");
+
12 
+ +
14 
+
15  inline unsigned int getNumberCudaBlocks(
+
16  const unsigned int totalRequired, const unsigned int numberCudaThreads = CUDA_NUM_THREADS)
+
17  {
+
18  return (totalRequired + numberCudaThreads - 1) / numberCudaThreads;
+
19  }
+
20 
+ +
22  dim3& numberCudaThreads, dim3& numberCudaBlocks, const Point<unsigned int>& frameSize);
+
23 
+
24  template <typename T>
+ +
26  T* targetPtr, const unsigned char* const srcPtr, const int width, const int height, const int channels);
+
27 
+
28  template <typename T>
+
29  void uCharImageCast(unsigned char* targetPtr, const T* const srcPtr, const int volume);
+
30 }
+
31 
+
32 #endif // OPENPOSE_GPU_CUDA_HPP
+ +
#define OP_API
Definition: macros.hpp:18
+ +
OP_API void getNumberCudaThreadsAndBlocks(dim3 &numberCudaThreads, dim3 &numberCudaBlocks, const Point< unsigned int > &frameSize)
+
OP_API void cudaCheck(const int line=-1, const std::string &function="", const std::string &file="")
+
unsigned int getNumberCudaBlocks(const unsigned int totalRequired, const unsigned int numberCudaThreads=CUDA_NUM_THREADS)
Definition: cuda.hpp:15
+
void uCharImageCast(unsigned char *targetPtr, const T *const srcPtr, const int volume)
+
void reorderAndNormalize(T *targetPtr, const unsigned char *const srcPtr, const int width, const int height, const int channels)
+
const auto CUDA_NUM_THREADS
Definition: cuda.hpp:9
+
OP_API int getCudaGpuNumber()
+ +
+
+ + + + diff --git a/web/html/doc/cv_mat_to_op_input_8hpp.html b/web/html/doc/cv_mat_to_op_input_8hpp.html new file mode 100644 index 000000000..b2fec3837 --- /dev/null +++ b/web/html/doc/cv_mat_to_op_input_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/core/cvMatToOpInput.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
cvMatToOpInput.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::CvMatToOpInput
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/cv_mat_to_op_input_8hpp_source.html b/web/html/doc/cv_mat_to_op_input_8hpp_source.html new file mode 100644 index 000000000..f47a82c17 --- /dev/null +++ b/web/html/doc/cv_mat_to_op_input_8hpp_source.html @@ -0,0 +1,144 @@ + + + + + + + +OpenPose: include/openpose/core/cvMatToOpInput.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
cvMatToOpInput.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_CV_MAT_TO_OP_INPUT_HPP
+
2 #define OPENPOSE_CORE_CV_MAT_TO_OP_INPUT_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+ +
10  {
+
11  public:
+
12  CvMatToOpInput(const PoseModel poseModel = PoseModel::BODY_25, const bool gpuResize = false);
+
13 
+
14  virtual ~CvMatToOpInput();
+
15 
+
16  std::vector<Array<float>> createArray(
+
17  const Matrix& inputData, const std::vector<double>& scaleInputToNetInputs,
+
18  const std::vector<Point<int>>& netInputSizes);
+
19 
+
20  private:
+
21  const PoseModel mPoseModel;
+
22  const bool mGpuResize;
+
23  unsigned char* pInputImageCuda;
+
24  float* pInputImageReorderedCuda;
+
25  float* pOutputImageCuda;
+
26  unsigned long long pInputMaxSize;
+
27  unsigned long long pOutputMaxSize;
+
28  };
+
29 }
+
30 
+
31 #endif // OPENPOSE_CORE_CV_MAT_TO_OP_INPUT_HPP
+ +
CvMatToOpInput(const PoseModel poseModel=PoseModel::BODY_25, const bool gpuResize=false)
+
std::vector< Array< float > > createArray(const Matrix &inputData, const std::vector< double > &scaleInputToNetInputs, const std::vector< Point< int >> &netInputSizes)
+
virtual ~CvMatToOpInput()
+ + +
#define OP_API
Definition: macros.hpp:18
+ +
PoseModel
Definition: enumClasses.hpp:10
+ + + +
+
+ + + + diff --git a/web/html/doc/cv_mat_to_op_output_8hpp.html b/web/html/doc/cv_mat_to_op_output_8hpp.html new file mode 100644 index 000000000..5223238ee --- /dev/null +++ b/web/html/doc/cv_mat_to_op_output_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/core/cvMatToOpOutput.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
cvMatToOpOutput.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::CvMatToOpOutput
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/cv_mat_to_op_output_8hpp_source.html b/web/html/doc/cv_mat_to_op_output_8hpp_source.html new file mode 100644 index 000000000..b69837625 --- /dev/null +++ b/web/html/doc/cv_mat_to_op_output_8hpp_source.html @@ -0,0 +1,143 @@ + + + + + + + +OpenPose: include/openpose/core/cvMatToOpOutput.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
cvMatToOpOutput.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_CV_MAT_TO_OP_OUTPUT_HPP
+
2 #define OPENPOSE_CORE_CV_MAT_TO_OP_OUTPUT_HPP
+
3 
+ +
5 
+
6 namespace op
+
7 {
+ +
9  {
+
10  public:
+
11  CvMatToOpOutput(const bool gpuResize = false);
+
12 
+
13  virtual ~CvMatToOpOutput();
+
14 
+
15  std::tuple<std::shared_ptr<float*>, std::shared_ptr<bool>, std::shared_ptr<unsigned long long>>
+ +
17 
+ +
19  const Matrix& inputData, const double scaleInputToOutput, const Point<int>& outputResolution);
+
20 
+
21  private:
+
22  const bool mGpuResize;
+
23  unsigned char* pInputImageCuda;
+
24  std::shared_ptr<float*> spOutputImageCuda;
+
25  unsigned long long pInputMaxSize;
+
26  std::shared_ptr<unsigned long long> spOutputMaxSize;
+
27  std::shared_ptr<bool> spGpuMemoryAllocated;
+
28  };
+
29 }
+
30 
+
31 #endif // OPENPOSE_CORE_CV_MAT_TO_OP_OUTPUT_HPP
+ + +
CvMatToOpOutput(const bool gpuResize=false)
+
virtual ~CvMatToOpOutput()
+
std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long > > getSharedParameters()
+
Array< float > createArray(const Matrix &inputData, const double scaleInputToOutput, const Point< int > &outputResolution)
+ + +
#define OP_API
Definition: macros.hpp:18
+ + +
+
+ + + + diff --git a/web/html/doc/datum_8hpp.html b/web/html/doc/datum_8hpp.html new file mode 100644 index 000000000..21e0e356a --- /dev/null +++ b/web/html/doc/datum_8hpp.html @@ -0,0 +1,221 @@ + + + + + + + +OpenPose: include/openpose/core/datum.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
datum.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

struct  op::Datum
 
+ + + +

+Namespaces

 op
 
+ + + + + + + + + + + +

+Macros

#define BASE_DATUM   Datum
 
#define BASE_DATUMS   std::vector<std::shared_ptr<BASE_DATUM>>
 
#define BASE_DATUMS_SH   std::shared_ptr<BASE_DATUMS>
 
#define DEFINE_TEMPLATE_DATUM(templateName)   template class OP_API templateName<BASE_DATUMS_SH>
 
#define COMPILE_TEMPLATE_DATUM(templateName)   extern template class templateName<BASE_DATUMS_SH>
 
+

Macro Definition Documentation

+ +

◆ BASE_DATUM

+ +
+
+ + + + +
#define BASE_DATUM   Datum
+
+ +

Definition at line 403 of file datum.hpp.

+ +
+
+ +

◆ BASE_DATUMS

+ +
+
+ + + + +
#define BASE_DATUMS   std::vector<std::shared_ptr<BASE_DATUM>>
+
+ +

Definition at line 404 of file datum.hpp.

+ +
+
+ +

◆ BASE_DATUMS_SH

+ +
+
+ + + + +
#define BASE_DATUMS_SH   std::shared_ptr<BASE_DATUMS>
+
+ +

Definition at line 405 of file datum.hpp.

+ +
+
+ +

◆ COMPILE_TEMPLATE_DATUM

+ +
+
+ + + + + + + + +
#define COMPILE_TEMPLATE_DATUM( templateName)   extern template class templateName<BASE_DATUMS_SH>
+
+ +

Definition at line 407 of file datum.hpp.

+ +
+
+ +

◆ DEFINE_TEMPLATE_DATUM

+ +
+
+ + + + + + + + +
#define DEFINE_TEMPLATE_DATUM( templateName)   template class OP_API templateName<BASE_DATUMS_SH>
+
+ +

Definition at line 406 of file datum.hpp.

+ +
+
+
+
+ + + + diff --git a/web/html/doc/datum_8hpp.js b/web/html/doc/datum_8hpp.js new file mode 100644 index 000000000..f152e66b1 --- /dev/null +++ b/web/html/doc/datum_8hpp.js @@ -0,0 +1,9 @@ +var datum_8hpp = +[ + [ "Datum", "structop_1_1_datum.html", "structop_1_1_datum" ], + [ "BASE_DATUM", "datum_8hpp.html#a03de732ffb0edab021fb745b21a05fdd", null ], + [ "BASE_DATUMS", "datum_8hpp.html#aa0a67922cf9df1e30dad2c32785b147e", null ], + [ "BASE_DATUMS_SH", "datum_8hpp.html#ae2331967a21fec02341dec3ca39d3809", null ], + [ "COMPILE_TEMPLATE_DATUM", "datum_8hpp.html#af87cd873cebb915837ae27248f67e822", null ], + [ "DEFINE_TEMPLATE_DATUM", "datum_8hpp.html#ad11d52b69bc54e48ceb2f5787f700431", null ] +]; \ No newline at end of file diff --git a/web/html/doc/datum_8hpp_source.html b/web/html/doc/datum_8hpp_source.html new file mode 100644 index 000000000..76b67cce9 --- /dev/null +++ b/web/html/doc/datum_8hpp_source.html @@ -0,0 +1,327 @@ + + + + + + + +OpenPose: include/openpose/core/datum.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
datum.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_DATUM_HPP
+
2 #define OPENPOSE_CORE_DATUM_HPP
+
3 
+
4 #ifdef USE_3D_ADAM_MODEL
+
5  #ifdef USE_EIGEN
+
6  #include <Eigen/Core>
+
7  #endif
+
8 #endif
+ +
10 
+
11 namespace op
+
12 {
+
19  struct OP_API Datum
+
20  {
+
21  // ---------------------------------------- ID parameters ---------------------------------------- //
+
22  unsigned long long id;
+
24  unsigned long long subId;
+
26  unsigned long long subIdMax;
+
31  std::string name;
+
32 
+
38  unsigned long long frameNumber;
+
39 
+
40  // ------------------------------ Input image and rendered version parameters ------------------------------ //
+ +
46 
+
56  std::vector<Array<float>> inputNetData;
+
57 
+ +
65 
+ +
73 
+ +
78 
+
79  // ------------------------------ Resulting Array<float> data parameters ------------------------------ //
+ +
86 
+ +
95 
+ +
105 
+ +
119 
+
129  std::vector<std::vector<std::array<float,3>>> poseCandidates;
+
130 
+
136  std::vector<Rectangle<float>> faceRectangles;
+
137 
+ +
144 
+ +
151 
+
157  std::vector<std::array<Rectangle<float>, 2>> handRectangles;
+
158 
+
165  std::array<Array<float>, 2> handKeypoints;
+
166 
+
172  std::array<Array<float>, 2> handHeatMaps;
+
173 
+
174  // ---------------------------------------- 3-D Reconstruction parameters ---------------------------------------- //
+ +
180 
+ +
187 
+
194  std::array<Array<float>, 2> handKeypoints3D;
+
195 
+ +
200 
+ +
205 
+ +
210 
+ +
218 
+
219  // ---------------------------------------- Other (internal) parameters ---------------------------------------- //
+
223  std::vector<double> scaleInputToNetInputs;
+
224 
+
229  std::vector<Point<int>> netInputSizes;
+
230 
+ +
235 
+ +
240 
+ +
245 
+
250  std::pair<int, std::string> elementRendered;
+
251 
+
252  // 3D/Adam parameters (experimental code not meant to be publicly used)
+
253  #ifdef USE_3D_ADAM_MODEL
+
254  // Adam/Unity params
+
255  std::vector<double> adamPosePtr;
+
256  int adamPoseRows;
+
257  std::vector<double> adamTranslationPtr;
+
258  std::vector<double> vtVecPtr;
+
259  int vtVecRows;
+
260  std::vector<double> j0VecPtr;
+
261  int j0VecRows;
+
262  std::vector<double> adamFaceCoeffsExpPtr;
+
263  int adamFaceCoeffsExpRows;
+
264  #ifdef USE_EIGEN
+
265  // Adam/Unity params
+
266  Eigen::Matrix<double, 62, 3, Eigen::RowMajor> adamPose;
+
267  Eigen::Vector3d adamTranslation;
+
268  // Adam params (Jacobians)
+
269  Eigen::Matrix<double, Eigen::Dynamic, 1> vtVec;
+
270  Eigen::Matrix<double, Eigen::Dynamic, 1> j0Vec;
+
271  Eigen::VectorXd adamFaceCoeffsExp;
+
272  #endif
+
273  #endif
+
274 
+
275 
+
276 
+
277 
+
278 
+
279  // ---------------------------------------- Functions ---------------------------------------- //
+
285  explicit Datum();
+
286 
+
295  Datum(const Datum& datum);
+
296 
+
303  Datum& operator=(const Datum& datum);
+
304 
+
310  Datum(Datum&& datum);
+
311 
+
318  Datum& operator=(Datum&& datum);
+
319 
+
324  virtual ~Datum();
+
325 
+
333  Datum clone() const;
+
334 
+
335 
+
336 
+
337 
+
338 
+
339  // ---------------------------------------- Comparison operators ---------------------------------------- //
+
345  inline bool operator<(const Datum& datum) const
+
346  {
+
347  // return id < datum.id;
+
348  return id < datum.id || (id == datum.id && subId < datum.subId);
+
349  }
+
355  inline bool operator>(const Datum& datum) const
+
356  {
+
357  // return id > datum.id;
+
358  return id > datum.id || (id == datum.id && subId > datum.subId);
+
359  }
+
365  inline bool operator<=(const Datum& datum) const
+
366  {
+
367  // return id <= datum.id;
+
368  return id < datum.id || (id == datum.id && subId <= datum.subId);
+
369  }
+
375  inline bool operator>=(const Datum& datum) const
+
376  {
+
377  // return id >= datum.id;
+
378  return id > datum.id || (id == datum.id && subId >= datum.subId);
+
379  }
+
385  inline bool operator==(const Datum& datum) const
+
386  {
+
387  // return id == datum.id;
+
388  return id == datum.id && subId == datum.subId;
+
389  }
+
395  inline bool operator!=(const Datum& datum) const
+
396  {
+
397  // return id != datum.id;
+
398  return id != datum.id || subId != datum.subId;
+
399  }
+
400  };
+
401 
+
402  // Defines for Datum. Added here rather than in `macros.hpp` to avoid circular dependencies
+
403  #define BASE_DATUM Datum
+
404  #define BASE_DATUMS std::vector<std::shared_ptr<BASE_DATUM>>
+
405  #define BASE_DATUMS_SH std::shared_ptr<BASE_DATUMS>
+
406  #define DEFINE_TEMPLATE_DATUM(templateName) template class OP_API templateName<BASE_DATUMS_SH>
+
407  #define COMPILE_TEMPLATE_DATUM(templateName) extern template class templateName<BASE_DATUMS_SH>
+
408 }
+
409 
+
410 #endif // OPENPOSE_CORE_DATUM_HPP
+ + + +
#define OP_API
Definition: macros.hpp:18
+ + +
Matrix cvOutputData3D
Definition: datum.hpp:77
+
std::vector< Rectangle< float > > faceRectangles
Definition: datum.hpp:136
+
std::vector< double > scaleInputToNetInputs
Definition: datum.hpp:223
+
virtual ~Datum()
+
Matrix cvInputData
Definition: datum.hpp:45
+
Datum & operator=(Datum &&datum)
+
std::array< Array< float >, 2 > handKeypoints3D
Definition: datum.hpp:194
+
Datum(Datum &&datum)
+
bool operator<=(const Datum &datum) const
Definition: datum.hpp:365
+
std::vector< Point< int > > netInputSizes
Definition: datum.hpp:229
+
std::pair< int, std::string > elementRendered
Definition: datum.hpp:250
+
Array< float > outputData
Definition: datum.hpp:64
+
Datum(const Datum &datum)
+
double scaleNetToOutput
Definition: datum.hpp:244
+
std::vector< Array< float > > inputNetData
Definition: datum.hpp:56
+
std::vector< std::array< Rectangle< float >, 2 > > handRectangles
Definition: datum.hpp:157
+
Array< float > poseHeatMaps
Definition: datum.hpp:118
+
std::vector< std::vector< std::array< float, 3 > > > poseCandidates
Definition: datum.hpp:129
+
std::array< Array< float >, 2 > handKeypoints
Definition: datum.hpp:165
+
Array< float > poseKeypoints3D
Definition: datum.hpp:179
+
unsigned long long id
Definition: datum.hpp:22
+
double scaleInputToOutput
Definition: datum.hpp:234
+
Array< float > poseKeypoints
Definition: datum.hpp:85
+ +
Datum & operator=(const Datum &datum)
+
bool operator>(const Datum &datum) const
Definition: datum.hpp:355
+
bool operator!=(const Datum &datum) const
Definition: datum.hpp:395
+
unsigned long long frameNumber
Definition: datum.hpp:38
+
Array< float > poseNetOutput
Definition: datum.hpp:217
+
Array< float > faceKeypoints3D
Definition: datum.hpp:186
+
bool operator<(const Datum &datum) const
Definition: datum.hpp:345
+
Matrix cameraMatrix
Definition: datum.hpp:199
+
Matrix cameraExtrinsics
Definition: datum.hpp:204
+
unsigned long long subIdMax
Definition: datum.hpp:26
+
bool operator>=(const Datum &datum) const
Definition: datum.hpp:375
+
Array< long long > poseIds
Definition: datum.hpp:94
+
Point< int > netOutputSize
Definition: datum.hpp:239
+
Datum clone() const
+
Matrix cvOutputData
Definition: datum.hpp:72
+
Array< float > faceHeatMaps
Definition: datum.hpp:150
+
Matrix cameraIntrinsics
Definition: datum.hpp:209
+
std::string name
Definition: datum.hpp:31
+
bool operator==(const Datum &datum) const
Definition: datum.hpp:385
+
unsigned long long subId
Definition: datum.hpp:24
+
Array< float > faceKeypoints
Definition: datum.hpp:143
+
std::array< Array< float >, 2 > handHeatMaps
Definition: datum.hpp:172
+
Array< float > poseScores
Definition: datum.hpp:104
+ +
+
+ + + + diff --git a/web/html/doc/datum_producer_8hpp.html b/web/html/doc/datum_producer_8hpp.html new file mode 100644 index 000000000..c86803311 --- /dev/null +++ b/web/html/doc/datum_producer_8hpp.html @@ -0,0 +1,141 @@ + + + + + + + +OpenPose: include/openpose/producer/datumProducer.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
datumProducer.hpp File Reference
+
+
+
#include <atomic>
+#include <limits>
+#include <openpose/core/common.hpp>
+#include <openpose/core/datum.hpp>
+#include <openpose/utilities/fastMath.hpp>
+#include <openpose/producer/producer.hpp>
+#include <openpose/producer/datumProducer.hpp>
+#include <openpose/utilities/openCv.hpp>
+
+

Go to the source code of this file.

+ + + + +

+Classes

class  op::DatumProducer< TDatum >
 
+ + + +

+Namespaces

 op
 
+ + + + + + + + + + + + + +

+Functions

OP_API void op::datumProducerConstructor (const std::shared_ptr< Producer > &producerSharedPtr, const unsigned long long frameFirst, const unsigned long long frameStep, const unsigned long long frameLast)
 
OP_API void op::datumProducerConstructorTooManyConsecutiveEmptyFrames (unsigned int &numberConsecutiveEmptyFrames, const bool emptyFrame)
 
OP_API bool op::datumProducerConstructorRunningAndGetDatumIsDatumProducerRunning (const std::shared_ptr< Producer > &producerSharedPtr, const unsigned long long numberFramesToProcess, const unsigned long long globalCounter)
 
OP_API void op::datumProducerConstructorRunningAndGetDatumApplyPlayerControls (const std::shared_ptr< Producer > &producerSharedPtr, const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr)
 
OP_API unsigned long long op::datumProducerConstructorRunningAndGetNextFrameNumber (const std::shared_ptr< Producer > &producerSharedPtr)
 
OP_API void op::datumProducerConstructorRunningAndGetDatumFrameIntegrity (Matrix &matrix)
 
+
+
+ + + + diff --git a/web/html/doc/datum_producer_8hpp.js b/web/html/doc/datum_producer_8hpp.js new file mode 100644 index 000000000..c2ed33171 --- /dev/null +++ b/web/html/doc/datum_producer_8hpp.js @@ -0,0 +1,10 @@ +var datum_producer_8hpp = +[ + [ "DatumProducer", "classop_1_1_datum_producer.html", "classop_1_1_datum_producer" ], + [ "datumProducerConstructor", "datum_producer_8hpp.html#ad72abbc7b2600f543e4ee8e28392711e", null ], + [ "datumProducerConstructorRunningAndGetDatumApplyPlayerControls", "datum_producer_8hpp.html#a177ffd3101c7a1f5cf32e100474a1234", null ], + [ "datumProducerConstructorRunningAndGetDatumFrameIntegrity", "datum_producer_8hpp.html#a427c6244ee27171037bc201f401de16a", null ], + [ "datumProducerConstructorRunningAndGetDatumIsDatumProducerRunning", "datum_producer_8hpp.html#a71c68de51a3608e782854c298b91cd62", null ], + [ "datumProducerConstructorRunningAndGetNextFrameNumber", "datum_producer_8hpp.html#a71cdc487bbec12ddbe4bac9123745494", null ], + [ "datumProducerConstructorTooManyConsecutiveEmptyFrames", "datum_producer_8hpp.html#a5001474237d31d72c9145a84ec5143da", null ] +]; \ No newline at end of file diff --git a/web/html/doc/datum_producer_8hpp_source.html b/web/html/doc/datum_producer_8hpp_source.html new file mode 100644 index 000000000..038bb633a --- /dev/null +++ b/web/html/doc/datum_producer_8hpp_source.html @@ -0,0 +1,314 @@ + + + + + + + +OpenPose: include/openpose/producer/datumProducer.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
datumProducer.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_PRODUCER_DATUM_PRODUCER_HPP
+
2 #define OPENPOSE_PRODUCER_DATUM_PRODUCER_HPP
+
3 
+
4 #include <atomic>
+
5 #include <limits> // std::numeric_limits
+ + + + +
10 
+
11 namespace op
+
12 {
+
13  template<typename TDatum>
+ +
15  {
+
16  public:
+
17  explicit DatumProducer(
+
18  const std::shared_ptr<Producer>& producerSharedPtr,
+
19  const unsigned long long frameFirst = 0, const unsigned long long frameStep = 1,
+
20  const unsigned long long frameLast = std::numeric_limits<unsigned long long>::max(),
+
21  const std::shared_ptr<std::pair<std::atomic<bool>, std::atomic<int>>>& videoSeekSharedPtr = nullptr);
+
22 
+
23  virtual ~DatumProducer();
+
24 
+
25  std::pair<bool, std::shared_ptr<std::vector<std::shared_ptr<TDatum>>>> checkIfRunningAndGetDatum();
+
26 
+
27  private:
+
28  const unsigned long long mNumberFramesToProcess;
+
29  std::shared_ptr<Producer> spProducer;
+
30  unsigned long long mGlobalCounter;
+
31  unsigned long long mFrameStep;
+
32  unsigned int mNumberConsecutiveEmptyFrames;
+
33  std::shared_ptr<std::pair<std::atomic<bool>, std::atomic<int>>> spVideoSeek;
+
34 
+
35  void checkIfTooManyConsecutiveEmptyFrames(
+
36  unsigned int& numberConsecutiveEmptyFrames, const bool emptyFrame) const;
+
37 
+
38  DELETE_COPY(DatumProducer);
+
39  };
+
40 }
+
41 
+
42 
+
43 
+
44 
+
45 
+
46 // Implementation
+ + +
49 namespace op
+
50 {
+
51  // Auxiliary functions for DatumProducer in order to 1) Reduce compiling time and 2) Remove OpenCV deps.
+ +
53  const std::shared_ptr<Producer>& producerSharedPtr, const unsigned long long frameFirst,
+
54  const unsigned long long frameStep, const unsigned long long frameLast);
+ +
56  unsigned int& numberConsecutiveEmptyFrames, const bool emptyFrame);
+ +
58  const std::shared_ptr<Producer>& producerSharedPtr, const unsigned long long numberFramesToProcess,
+
59  const unsigned long long globalCounter);
+ +
61  const std::shared_ptr<Producer>& producerSharedPtr,
+
62  const std::shared_ptr<std::pair<std::atomic<bool>, std::atomic<int>>>& videoSeekSharedPtr);
+ +
64  const std::shared_ptr<Producer>& producerSharedPtr);
+ +
66 
+
67  template<typename TDatum>
+ +
69  const std::shared_ptr<Producer>& producerSharedPtr,
+
70  const unsigned long long frameFirst, const unsigned long long frameStep,
+
71  const unsigned long long frameLast,
+
72  const std::shared_ptr<std::pair<std::atomic<bool>, std::atomic<int>>>& videoSeekSharedPtr) :
+
73  mNumberFramesToProcess{(frameLast != std::numeric_limits<unsigned long long>::max()
+
74  ? frameLast - frameFirst : frameLast)},
+
75  spProducer{producerSharedPtr},
+
76  mGlobalCounter{0ll},
+
77  mFrameStep{frameStep},
+
78  mNumberConsecutiveEmptyFrames{0u},
+
79  spVideoSeek{videoSeekSharedPtr}
+
80  {
+
81  try
+
82  {
+
83  datumProducerConstructor(producerSharedPtr, frameFirst, frameStep, frameLast);
+
84  }
+
85  catch (const std::exception& e)
+
86  {
+
87  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
+
88  }
+
89  }
+
90 
+
91  template<typename TDatum>
+ +
93  {
+
94  }
+
95 
+
96  template<typename TDatum>
+
97  std::pair<bool, std::shared_ptr<std::vector<std::shared_ptr<TDatum>>>> DatumProducer<TDatum>::checkIfRunningAndGetDatum()
+
98  {
+
99  try
+
100  {
+
101  // If producer released -> it sends an empty Matrix + a datumProducerRunning signal
+ +
103  spProducer, mNumberFramesToProcess, mGlobalCounter);
+
104  // If device is open
+
105  auto datums = std::make_shared<std::vector<std::shared_ptr<TDatum>>>();
+
106  if (datumProducerRunning)
+
107  {
+
108  // Fast forward/backward - Seek to specific frame index desired
+ +
110  // Get Matrix vector
+
111  std::string nextFrameName = spProducer->getNextFrameName();
+
112  const unsigned long long nextFrameNumber = datumProducerConstructorRunningAndGetNextFrameNumber(
+
113  spProducer);
+
114  const std::vector<Matrix> matrices = spProducer->getFrames();
+
115  // Check frames are not empty
+
116  checkIfTooManyConsecutiveEmptyFrames(
+
117  mNumberConsecutiveEmptyFrames, matrices.empty() || matrices[0].empty());
+
118  if (!matrices.empty())
+
119  {
+
120  // Get camera parameters
+
121  const std::vector<Matrix> cameraMatrices = spProducer->getCameraMatrices();
+
122  const std::vector<Matrix> cameraExtrinsics = spProducer->getCameraExtrinsics();
+
123  const std::vector<Matrix> cameraIntrinsics = spProducer->getCameraIntrinsics();
+
124  // Resize datum
+
125  datums->resize(matrices.size());
+
126  // Datum cannot be assigned before resize()
+
127  auto& datumPtr = (*datums)[0];
+
128  datumPtr = std::make_shared<TDatum>();
+
129  // Filling first element
+
130  std::swap(datumPtr->name, nextFrameName);
+
131  datumPtr->frameNumber = nextFrameNumber;
+
132  datumPtr->cvInputData = matrices[0];
+ +
134  if (!cameraMatrices.empty())
+
135  {
+
136  datumPtr->cameraMatrix = cameraMatrices[0];
+
137  datumPtr->cameraExtrinsics = cameraExtrinsics[0];
+
138  datumPtr->cameraIntrinsics = cameraIntrinsics[0];
+
139  }
+
140  // Initially, cvOutputData = cvInputData. No performance hit (both cv::Mat share raw memory)
+
141  datumPtr->cvOutputData = datumPtr->cvInputData;
+
142  // Resize if it's stereo-system
+
143  if (datums->size() > 1)
+
144  {
+
145  // Stereo-system: Assign all Matrices
+
146  for (auto i = 1u ; i < datums->size() ; i++)
+
147  {
+
148  auto& datumIPtr = (*datums)[i];
+
149  datumIPtr = std::make_shared<TDatum>();
+
150  datumIPtr->name = datumPtr->name;
+
151  datumIPtr->frameNumber = datumPtr->frameNumber;
+
152  datumIPtr->cvInputData = matrices[i];
+ +
154  datumIPtr->cvOutputData = datumIPtr->cvInputData;
+
155  if (cameraMatrices.size() > i)
+
156  {
+
157  datumIPtr->cameraMatrix = cameraMatrices[i];
+
158  datumIPtr->cameraExtrinsics = cameraExtrinsics[i];
+
159  datumIPtr->cameraIntrinsics = cameraIntrinsics[i];
+
160  }
+
161  }
+
162  }
+
163  // Check producer is running
+
164  if ((*datums)[0]->cvInputData.empty())
+
165  datums = nullptr;
+
166  // Increase counter if successful image
+
167  if (datums != nullptr)
+
168  mGlobalCounter += mFrameStep;
+
169  }
+
170  }
+
171  // Return result
+
172  return std::make_pair(datumProducerRunning, datums);
+
173  }
+
174  catch (const std::exception& e)
+
175  {
+
176  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
+
177  return std::make_pair(false, std::make_shared<std::vector<std::shared_ptr<TDatum>>>());
+
178  }
+
179  }
+
180 
+
181  template<typename TDatum>
+ +
183  unsigned int& numberConsecutiveEmptyFrames, const bool emptyFrame) const
+
184  {
+ +
186  numberConsecutiveEmptyFrames, emptyFrame);
+
187  }
+
188 
+
189  extern template class DatumProducer<BASE_DATUM>;
+
190 }
+
191 
+
192 
+
193 #endif // OPENPOSE_PRODUCER_DATUM_PRODUCER_HPP
+ +
std::pair< bool, std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > > checkIfRunningAndGetDatum()
+
DatumProducer(const std::shared_ptr< Producer > &producerSharedPtr, const unsigned long long frameFirst=0, const unsigned long long frameStep=1, const unsigned long long frameLast=std::numeric_limits< unsigned long long >::max(), const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr=nullptr)
+
virtual ~DatumProducer()
+ + + + + +
#define OP_API
Definition: macros.hpp:18
+ +
OP_API void datumProducerConstructorRunningAndGetDatumApplyPlayerControls(const std::shared_ptr< Producer > &producerSharedPtr, const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr)
+
OP_API void datumProducerConstructorRunningAndGetDatumFrameIntegrity(Matrix &matrix)
+
OP_API void datumProducerConstructorTooManyConsecutiveEmptyFrames(unsigned int &numberConsecutiveEmptyFrames, const bool emptyFrame)
+
OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
+
OP_API bool datumProducerConstructorRunningAndGetDatumIsDatumProducerRunning(const std::shared_ptr< Producer > &producerSharedPtr, const unsigned long long numberFramesToProcess, const unsigned long long globalCounter)
+
OP_API unsigned long long datumProducerConstructorRunningAndGetNextFrameNumber(const std::shared_ptr< Producer > &producerSharedPtr)
+
OP_API void datumProducerConstructor(const std::shared_ptr< Producer > &producerSharedPtr, const unsigned long long frameFirst, const unsigned long long frameStep, const unsigned long long frameLast)
+ + +
+
+ + + + diff --git a/web/html/doc/demo__advanced_8md.html b/web/html/doc/demo__advanced_8md.html new file mode 100644 index 000000000..055e60b0a --- /dev/null +++ b/web/html/doc/demo__advanced_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/advanced/demo_advanced.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/advanced/demo_advanced.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/deployment_8md.html b/web/html/doc/deployment_8md.html new file mode 100644 index 000000000..c80c3a033 --- /dev/null +++ b/web/html/doc/deployment_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/advanced/deployment.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/advanced/deployment.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/dir_2012eae92ff1c095841b0424b32ded73.html b/web/html/doc/dir_2012eae92ff1c095841b0424b32ded73.html new file mode 100644 index 000000000..7b9a7836f --- /dev/null +++ b/web/html/doc/dir_2012eae92ff1c095841b0424b32ded73.html @@ -0,0 +1,117 @@ + + + + + + + +OpenPose: include/openpose/3d Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
3d Directory Reference
+
+ +
+ + + + diff --git a/web/html/doc/dir_2012eae92ff1c095841b0424b32ded73.js b/web/html/doc/dir_2012eae92ff1c095841b0424b32ded73.js new file mode 100644 index 000000000..13d977806 --- /dev/null +++ b/web/html/doc/dir_2012eae92ff1c095841b0424b32ded73.js @@ -0,0 +1,13 @@ +var dir_2012eae92ff1c095841b0424b32ded73 = +[ + [ "cameraParameterReader.hpp", "camera_parameter_reader_8hpp.html", [ + [ "CameraParameterReader", "classop_1_1_camera_parameter_reader.html", "classop_1_1_camera_parameter_reader" ] + ] ], + [ "headers.hpp", "3d_2headers_8hpp.html", null ], + [ "jointAngleEstimation.hpp", "joint_angle_estimation_8hpp.html", null ], + [ "poseTriangulation.hpp", "pose_triangulation_8hpp.html", [ + [ "PoseTriangulation", "classop_1_1_pose_triangulation.html", "classop_1_1_pose_triangulation" ] + ] ], + [ "wJointAngleEstimation.hpp", "w_joint_angle_estimation_8hpp.html", null ], + [ "wPoseTriangulation.hpp", "w_pose_triangulation_8hpp.html", "w_pose_triangulation_8hpp" ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_2fe3b209ba91c67403a5b318dad0feb6.html b/web/html/doc/dir_2fe3b209ba91c67403a5b318dad0feb6.html new file mode 100644 index 000000000..b08c6ab81 --- /dev/null +++ b/web/html/doc/dir_2fe3b209ba91c67403a5b318dad0feb6.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: doc/installation Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
installation Directory Reference
+
+
+ + + + + + +

+Directories

directory  deprecated
 
directory  jetson_tx
 
+
+
+ + + + diff --git a/web/html/doc/dir_2fe3b209ba91c67403a5b318dad0feb6.js b/web/html/doc/dir_2fe3b209ba91c67403a5b318dad0feb6.js new file mode 100644 index 000000000..dcf3910a0 --- /dev/null +++ b/web/html/doc/dir_2fe3b209ba91c67403a5b318dad0feb6.js @@ -0,0 +1,5 @@ +var dir_2fe3b209ba91c67403a5b318dad0feb6 = +[ + [ "deprecated", "dir_860fec895be5700f57450c90b9398659.html", null ], + [ "jetson_tx", "dir_d30173e4a8fdb18630e5d2bb2948a3be.html", null ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_30b75edf6ab089fde7a8426886bd6b03.html b/web/html/doc/dir_30b75edf6ab089fde7a8426886bd6b03.html new file mode 100644 index 000000000..a9b70a0d2 --- /dev/null +++ b/web/html/doc/dir_30b75edf6ab089fde7a8426886bd6b03.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: include/openpose/unity Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
unity Directory Reference
+
+
+ + + + + + +

+Files

file  headers.hpp [code]
 
file  unityBinding.hpp [code]
 
+
+
+ + + + diff --git a/web/html/doc/dir_30b75edf6ab089fde7a8426886bd6b03.js b/web/html/doc/dir_30b75edf6ab089fde7a8426886bd6b03.js new file mode 100644 index 000000000..3ce30c748 --- /dev/null +++ b/web/html/doc/dir_30b75edf6ab089fde7a8426886bd6b03.js @@ -0,0 +1,5 @@ +var dir_30b75edf6ab089fde7a8426886bd6b03 = +[ + [ "headers.hpp", "unity_2headers_8hpp.html", null ], + [ "unityBinding.hpp", "unity_binding_8hpp.html", null ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_3521bb8cf95d1c22170a875d1710b83f.html b/web/html/doc/dir_3521bb8cf95d1c22170a875d1710b83f.html new file mode 100644 index 000000000..96c8998b6 --- /dev/null +++ b/web/html/doc/dir_3521bb8cf95d1c22170a875d1710b83f.html @@ -0,0 +1,107 @@ + + + + + + + +OpenPose: doc/very_advanced Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
very_advanced Directory Reference
+
+
+ + + + +

+Directories

directory  library_structure
 
+
+
+ + + + diff --git a/web/html/doc/dir_3521bb8cf95d1c22170a875d1710b83f.js b/web/html/doc/dir_3521bb8cf95d1c22170a875d1710b83f.js new file mode 100644 index 000000000..5118af210 --- /dev/null +++ b/web/html/doc/dir_3521bb8cf95d1c22170a875d1710b83f.js @@ -0,0 +1,4 @@ +var dir_3521bb8cf95d1c22170a875d1710b83f = +[ + [ "library_structure", "dir_bb6374c98f96ead39f6032c111104f04.html", null ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_3c425d4d4c1956c7a8db48efb69a2718.html b/web/html/doc/dir_3c425d4d4c1956c7a8db48efb69a2718.html new file mode 100644 index 000000000..8d2f4d8f9 --- /dev/null +++ b/web/html/doc/dir_3c425d4d4c1956c7a8db48efb69a2718.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/wrapper Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
wrapper Directory Reference
+
+ +
+ + + + diff --git a/web/html/doc/dir_3c425d4d4c1956c7a8db48efb69a2718.js b/web/html/doc/dir_3c425d4d4c1956c7a8db48efb69a2718.js new file mode 100644 index 000000000..4d205ea71 --- /dev/null +++ b/web/html/doc/dir_3c425d4d4c1956c7a8db48efb69a2718.js @@ -0,0 +1,28 @@ +var dir_3c425d4d4c1956c7a8db48efb69a2718 = +[ + [ "enumClasses.hpp", "wrapper_2enum_classes_8hpp.html", "wrapper_2enum_classes_8hpp" ], + [ "headers.hpp", "wrapper_2headers_8hpp.html", null ], + [ "wrapper.hpp", "wrapper_8hpp.html", "wrapper_8hpp" ], + [ "wrapperAuxiliary.hpp", "wrapper_auxiliary_8hpp.html", "wrapper_auxiliary_8hpp" ], + [ "wrapperStructExtra.hpp", "wrapper_struct_extra_8hpp.html", [ + [ "WrapperStructExtra", "structop_1_1_wrapper_struct_extra.html", "structop_1_1_wrapper_struct_extra" ] + ] ], + [ "wrapperStructFace.hpp", "wrapper_struct_face_8hpp.html", [ + [ "WrapperStructFace", "structop_1_1_wrapper_struct_face.html", "structop_1_1_wrapper_struct_face" ] + ] ], + [ "wrapperStructGui.hpp", "wrapper_struct_gui_8hpp.html", [ + [ "WrapperStructGui", "structop_1_1_wrapper_struct_gui.html", "structop_1_1_wrapper_struct_gui" ] + ] ], + [ "wrapperStructHand.hpp", "wrapper_struct_hand_8hpp.html", [ + [ "WrapperStructHand", "structop_1_1_wrapper_struct_hand.html", "structop_1_1_wrapper_struct_hand" ] + ] ], + [ "wrapperStructInput.hpp", "wrapper_struct_input_8hpp.html", [ + [ "WrapperStructInput", "structop_1_1_wrapper_struct_input.html", "structop_1_1_wrapper_struct_input" ] + ] ], + [ "wrapperStructOutput.hpp", "wrapper_struct_output_8hpp.html", [ + [ "WrapperStructOutput", "structop_1_1_wrapper_struct_output.html", "structop_1_1_wrapper_struct_output" ] + ] ], + [ "wrapperStructPose.hpp", "wrapper_struct_pose_8hpp.html", [ + [ "WrapperStructPose", "structop_1_1_wrapper_struct_pose.html", "structop_1_1_wrapper_struct_pose" ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_3dc351f869bb7a8f0afe68ebb7e681e8.html b/web/html/doc/dir_3dc351f869bb7a8f0afe68ebb7e681e8.html new file mode 100644 index 000000000..24a8d9f01 --- /dev/null +++ b/web/html/doc/dir_3dc351f869bb7a8f0afe68ebb7e681e8.html @@ -0,0 +1,129 @@ + + + + + + + +OpenPose: include/openpose/producer Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
producer Directory Reference
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +

+Files

file  datumProducer.hpp [code]
 
file  enumClasses.hpp [code]
 
file  flirReader.hpp [code]
 
file  headers.hpp [code]
 
file  imageDirectoryReader.hpp [code]
 
file  ipCameraReader.hpp [code]
 
file  producer.hpp [code]
 
file  spinnakerWrapper.hpp [code]
 
file  videoCaptureReader.hpp [code]
 
file  videoReader.hpp [code]
 
file  wDatumProducer.hpp [code]
 
file  webcamReader.hpp [code]
 
+
+
+ + + + diff --git a/web/html/doc/dir_3dc351f869bb7a8f0afe68ebb7e681e8.js b/web/html/doc/dir_3dc351f869bb7a8f0afe68ebb7e681e8.js new file mode 100644 index 000000000..0e32dbe77 --- /dev/null +++ b/web/html/doc/dir_3dc351f869bb7a8f0afe68ebb7e681e8.js @@ -0,0 +1,31 @@ +var dir_3dc351f869bb7a8f0afe68ebb7e681e8 = +[ + [ "datumProducer.hpp", "datum_producer_8hpp.html", "datum_producer_8hpp" ], + [ "enumClasses.hpp", "producer_2enum_classes_8hpp.html", "producer_2enum_classes_8hpp" ], + [ "flirReader.hpp", "flir_reader_8hpp.html", [ + [ "FlirReader", "classop_1_1_flir_reader.html", "classop_1_1_flir_reader" ] + ] ], + [ "headers.hpp", "producer_2headers_8hpp.html", null ], + [ "imageDirectoryReader.hpp", "image_directory_reader_8hpp.html", [ + [ "ImageDirectoryReader", "classop_1_1_image_directory_reader.html", "classop_1_1_image_directory_reader" ] + ] ], + [ "ipCameraReader.hpp", "ip_camera_reader_8hpp.html", [ + [ "IpCameraReader", "classop_1_1_ip_camera_reader.html", "classop_1_1_ip_camera_reader" ] + ] ], + [ "producer.hpp", "producer_8hpp.html", "producer_8hpp" ], + [ "spinnakerWrapper.hpp", "spinnaker_wrapper_8hpp.html", [ + [ "SpinnakerWrapper", "classop_1_1_spinnaker_wrapper.html", "classop_1_1_spinnaker_wrapper" ] + ] ], + [ "videoCaptureReader.hpp", "video_capture_reader_8hpp.html", [ + [ "VideoCaptureReader", "classop_1_1_video_capture_reader.html", "classop_1_1_video_capture_reader" ] + ] ], + [ "videoReader.hpp", "video_reader_8hpp.html", [ + [ "VideoReader", "classop_1_1_video_reader.html", "classop_1_1_video_reader" ] + ] ], + [ "wDatumProducer.hpp", "w_datum_producer_8hpp.html", [ + [ "WDatumProducer", "classop_1_1_w_datum_producer.html", "classop_1_1_w_datum_producer" ] + ] ], + [ "webcamReader.hpp", "webcam_reader_8hpp.html", [ + [ "WebcamReader", "classop_1_1_webcam_reader.html", "classop_1_1_webcam_reader" ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_40f12b33899adef613f503ab305e6d57.html b/web/html/doc/dir_40f12b33899adef613f503ab305e6d57.html new file mode 100644 index 000000000..456664e37 --- /dev/null +++ b/web/html/doc/dir_40f12b33899adef613f503ab305e6d57.html @@ -0,0 +1,143 @@ + + + + + + + +OpenPose: include/openpose/thread Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
thread Directory Reference
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Files

file  enumClasses.hpp [code]
 
file  headers.hpp [code]
 
file  priorityQueue.hpp [code]
 
file  queue.hpp [code]
 
file  queueBase.hpp [code]
 
file  subThread.hpp [code]
 
file  subThreadNoQueue.hpp [code]
 
file  subThreadQueueIn.hpp [code]
 
file  subThreadQueueInOut.hpp [code]
 
file  subThreadQueueOut.hpp [code]
 
file  thread.hpp [code]
 
file  threadManager.hpp [code]
 
file  wFpsMax.hpp [code]
 
file  wIdGenerator.hpp [code]
 
file  worker.hpp [code]
 
file  workerConsumer.hpp [code]
 
file  workerProducer.hpp [code]
 
file  wQueueAssembler.hpp [code]
 
file  wQueueOrderer.hpp [code]
 
+
+
+ + + + diff --git a/web/html/doc/dir_40f12b33899adef613f503ab305e6d57.js b/web/html/doc/dir_40f12b33899adef613f503ab305e6d57.js new file mode 100644 index 000000000..eed9db8e7 --- /dev/null +++ b/web/html/doc/dir_40f12b33899adef613f503ab305e6d57.js @@ -0,0 +1,26 @@ +var dir_40f12b33899adef613f503ab305e6d57 = +[ + [ "enumClasses.hpp", "thread_2enum_classes_8hpp.html", "thread_2enum_classes_8hpp" ], + [ "headers.hpp", "thread_2headers_8hpp.html", null ], + [ "priorityQueue.hpp", "priority_queue_8hpp.html", "priority_queue_8hpp" ], + [ "queue.hpp", "queue_8hpp.html", "queue_8hpp" ], + [ "queueBase.hpp", "queue_base_8hpp.html", [ + [ "QueueBase", "classop_1_1_queue_base.html", "classop_1_1_queue_base" ] + ] ], + [ "subThread.hpp", "sub_thread_8hpp.html", "sub_thread_8hpp" ], + [ "subThreadNoQueue.hpp", "sub_thread_no_queue_8hpp.html", "sub_thread_no_queue_8hpp" ], + [ "subThreadQueueIn.hpp", "sub_thread_queue_in_8hpp.html", "sub_thread_queue_in_8hpp" ], + [ "subThreadQueueInOut.hpp", "sub_thread_queue_in_out_8hpp.html", "sub_thread_queue_in_out_8hpp" ], + [ "subThreadQueueOut.hpp", "sub_thread_queue_out_8hpp.html", "sub_thread_queue_out_8hpp" ], + [ "thread.hpp", "thread_8hpp.html", "thread_8hpp" ], + [ "threadManager.hpp", "thread_manager_8hpp.html", "thread_manager_8hpp" ], + [ "wFpsMax.hpp", "w_fps_max_8hpp.html", "w_fps_max_8hpp" ], + [ "wIdGenerator.hpp", "w_id_generator_8hpp.html", "w_id_generator_8hpp" ], + [ "worker.hpp", "worker_8hpp.html", "worker_8hpp" ], + [ "workerConsumer.hpp", "worker_consumer_8hpp.html", "worker_consumer_8hpp" ], + [ "workerProducer.hpp", "worker_producer_8hpp.html", "worker_producer_8hpp" ], + [ "wQueueAssembler.hpp", "w_queue_assembler_8hpp.html", [ + [ "WQueueAssembler", "classop_1_1_w_queue_assembler.html", "classop_1_1_w_queue_assembler" ] + ] ], + [ "wQueueOrderer.hpp", "w_queue_orderer_8hpp.html", "w_queue_orderer_8hpp" ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_50b707fa6cdf56bfdc2ad79b44ee2e7d.html b/web/html/doc/dir_50b707fa6cdf56bfdc2ad79b44ee2e7d.html new file mode 100644 index 000000000..9d8f9f670 --- /dev/null +++ b/web/html/doc/dir_50b707fa6cdf56bfdc2ad79b44ee2e7d.html @@ -0,0 +1,113 @@ + + + + + + + +OpenPose: include/openpose/gpu Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
gpu Directory Reference
+
+
+ + + + + + + + + + +

+Files

file  cuda.hpp [code]
 
file  enumClasses.hpp [code]
 
file  gpu.hpp [code]
 
file  headers.hpp [code]
 
+
+
+ + + + diff --git a/web/html/doc/dir_50b707fa6cdf56bfdc2ad79b44ee2e7d.js b/web/html/doc/dir_50b707fa6cdf56bfdc2ad79b44ee2e7d.js new file mode 100644 index 000000000..47a8849e2 --- /dev/null +++ b/web/html/doc/dir_50b707fa6cdf56bfdc2ad79b44ee2e7d.js @@ -0,0 +1,7 @@ +var dir_50b707fa6cdf56bfdc2ad79b44ee2e7d = +[ + [ "cuda.hpp", "cuda_8hpp.html", "cuda_8hpp" ], + [ "enumClasses.hpp", "gpu_2enum_classes_8hpp.html", "gpu_2enum_classes_8hpp" ], + [ "gpu.hpp", "gpu_8hpp.html", "gpu_8hpp" ], + [ "headers.hpp", "gpu_2headers_8hpp.html", null ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_6b0b8e919b15d8dea67b0fa2c092513b.html b/web/html/doc/dir_6b0b8e919b15d8dea67b0fa2c092513b.html new file mode 100644 index 000000000..f07a55d56 --- /dev/null +++ b/web/html/doc/dir_6b0b8e919b15d8dea67b0fa2c092513b.html @@ -0,0 +1,129 @@ + + + + + + + +OpenPose: include/openpose/net Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
net Directory Reference
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +

+Files

file  bodyPartConnectorBase.hpp [code]
 
file  bodyPartConnectorCaffe.hpp [code]
 
file  headers.hpp [code]
 
file  maximumBase.hpp [code]
 
file  maximumCaffe.hpp [code]
 
file  net.hpp [code]
 
file  netCaffe.hpp [code]
 
file  netOpenCv.hpp [code]
 
file  nmsBase.hpp [code]
 
file  nmsCaffe.hpp [code]
 
file  resizeAndMergeBase.hpp [code]
 
file  resizeAndMergeCaffe.hpp [code]
 
+
+
+ + + + diff --git a/web/html/doc/dir_6b0b8e919b15d8dea67b0fa2c092513b.js b/web/html/doc/dir_6b0b8e919b15d8dea67b0fa2c092513b.js new file mode 100644 index 000000000..e71d8da92 --- /dev/null +++ b/web/html/doc/dir_6b0b8e919b15d8dea67b0fa2c092513b.js @@ -0,0 +1,29 @@ +var dir_6b0b8e919b15d8dea67b0fa2c092513b = +[ + [ "bodyPartConnectorBase.hpp", "body_part_connector_base_8hpp.html", "body_part_connector_base_8hpp" ], + [ "bodyPartConnectorCaffe.hpp", "body_part_connector_caffe_8hpp.html", [ + [ "BodyPartConnectorCaffe", "classop_1_1_body_part_connector_caffe.html", "classop_1_1_body_part_connector_caffe" ] + ] ], + [ "headers.hpp", "net_2headers_8hpp.html", null ], + [ "maximumBase.hpp", "maximum_base_8hpp.html", "maximum_base_8hpp" ], + [ "maximumCaffe.hpp", "maximum_caffe_8hpp.html", [ + [ "MaximumCaffe", "classop_1_1_maximum_caffe.html", "classop_1_1_maximum_caffe" ] + ] ], + [ "net.hpp", "net_8hpp.html", [ + [ "Net", "classop_1_1_net.html", "classop_1_1_net" ] + ] ], + [ "netCaffe.hpp", "net_caffe_8hpp.html", [ + [ "NetCaffe", "classop_1_1_net_caffe.html", "classop_1_1_net_caffe" ] + ] ], + [ "netOpenCv.hpp", "net_open_cv_8hpp.html", [ + [ "NetOpenCv", "classop_1_1_net_open_cv.html", "classop_1_1_net_open_cv" ] + ] ], + [ "nmsBase.hpp", "nms_base_8hpp.html", "nms_base_8hpp" ], + [ "nmsCaffe.hpp", "nms_caffe_8hpp.html", [ + [ "NmsCaffe", "classop_1_1_nms_caffe.html", "classop_1_1_nms_caffe" ] + ] ], + [ "resizeAndMergeBase.hpp", "resize_and_merge_base_8hpp.html", "resize_and_merge_base_8hpp" ], + [ "resizeAndMergeCaffe.hpp", "resize_and_merge_caffe_8hpp.html", [ + [ "ResizeAndMergeCaffe", "classop_1_1_resize_and_merge_caffe.html", "classop_1_1_resize_and_merge_caffe" ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_6beae53d0373f2b1f5bd56e91c6780e1.html b/web/html/doc/dir_6beae53d0373f2b1f5bd56e91c6780e1.html new file mode 100644 index 000000000..f4ebd27e8 --- /dev/null +++ b/web/html/doc/dir_6beae53d0373f2b1f5bd56e91c6780e1.html @@ -0,0 +1,131 @@ + + + + + + + +OpenPose: include/openpose/utilities Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
utilities Directory Reference
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Files

file  check.hpp [code]
 
file  enumClasses.hpp [code]
 
file  errorAndLog.hpp [code]
 
file  fastMath.hpp [code]
 
file  fileSystem.hpp [code]
 
file  flagsToOpenPose.hpp [code]
 
file  headers.hpp [code]
 
file  keypoint.hpp [code]
 
file  openCv.hpp [code]
 
file  pointerContainer.hpp [code]
 
file  profiler.hpp [code]
 
file  standard.hpp [code]
 
file  string.hpp [code]
 
+
+
+ + + + diff --git a/web/html/doc/dir_6beae53d0373f2b1f5bd56e91c6780e1.js b/web/html/doc/dir_6beae53d0373f2b1f5bd56e91c6780e1.js new file mode 100644 index 000000000..ad6ac56a2 --- /dev/null +++ b/web/html/doc/dir_6beae53d0373f2b1f5bd56e91c6780e1.js @@ -0,0 +1,16 @@ +var dir_6beae53d0373f2b1f5bd56e91c6780e1 = +[ + [ "check.hpp", "check_8hpp.html", "check_8hpp" ], + [ "enumClasses.hpp", "utilities_2enum_classes_8hpp.html", "utilities_2enum_classes_8hpp" ], + [ "errorAndLog.hpp", "error_and_log_8hpp.html", "error_and_log_8hpp" ], + [ "fastMath.hpp", "fast_math_8hpp.html", "fast_math_8hpp" ], + [ "fileSystem.hpp", "file_system_8hpp.html", "file_system_8hpp" ], + [ "flagsToOpenPose.hpp", "flags_to_open_pose_8hpp.html", "flags_to_open_pose_8hpp" ], + [ "headers.hpp", "utilities_2headers_8hpp.html", null ], + [ "keypoint.hpp", "keypoint_8hpp.html", "keypoint_8hpp" ], + [ "openCv.hpp", "open_cv_8hpp.html", "open_cv_8hpp" ], + [ "pointerContainer.hpp", "pointer_container_8hpp.html", "pointer_container_8hpp" ], + [ "profiler.hpp", "profiler_8hpp.html", "profiler_8hpp" ], + [ "standard.hpp", "standard_8hpp.html", "standard_8hpp" ], + [ "string.hpp", "utilities_2string_8hpp.html", "utilities_2string_8hpp" ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_76d1d371018acee6886a69c6ed6f0be5.html b/web/html/doc/dir_76d1d371018acee6886a69c6ed6f0be5.html new file mode 100644 index 000000000..0a833b777 --- /dev/null +++ b/web/html/doc/dir_76d1d371018acee6886a69c6ed6f0be5.html @@ -0,0 +1,137 @@ + + + + + + + +OpenPose: include/openpose/hand Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
hand Directory Reference
+
+ +
+ + + + diff --git a/web/html/doc/dir_76d1d371018acee6886a69c6ed6f0be5.js b/web/html/doc/dir_76d1d371018acee6886a69c6ed6f0be5.js new file mode 100644 index 000000000..d76053cce --- /dev/null +++ b/web/html/doc/dir_76d1d371018acee6886a69c6ed6f0be5.js @@ -0,0 +1,33 @@ +var dir_76d1d371018acee6886a69c6ed6f0be5 = +[ + [ "handCpuRenderer.hpp", "hand_cpu_renderer_8hpp.html", [ + [ "HandCpuRenderer", "classop_1_1_hand_cpu_renderer.html", "classop_1_1_hand_cpu_renderer" ] + ] ], + [ "handDetector.hpp", "hand_detector_8hpp.html", [ + [ "HandDetector", "classop_1_1_hand_detector.html", "classop_1_1_hand_detector" ] + ] ], + [ "handDetectorFromTxt.hpp", "hand_detector_from_txt_8hpp.html", [ + [ "HandDetectorFromTxt", "classop_1_1_hand_detector_from_txt.html", "classop_1_1_hand_detector_from_txt" ] + ] ], + [ "handExtractorCaffe.hpp", "hand_extractor_caffe_8hpp.html", [ + [ "HandExtractorCaffe", "classop_1_1_hand_extractor_caffe.html", "classop_1_1_hand_extractor_caffe" ] + ] ], + [ "handExtractorNet.hpp", "hand_extractor_net_8hpp.html", [ + [ "HandExtractorNet", "classop_1_1_hand_extractor_net.html", "classop_1_1_hand_extractor_net" ] + ] ], + [ "handGpuRenderer.hpp", "hand_gpu_renderer_8hpp.html", [ + [ "HandGpuRenderer", "classop_1_1_hand_gpu_renderer.html", "classop_1_1_hand_gpu_renderer" ] + ] ], + [ "handParameters.hpp", "hand_parameters_8hpp.html", "hand_parameters_8hpp" ], + [ "handRenderer.hpp", "hand_renderer_8hpp.html", [ + [ "HandRenderer", "classop_1_1_hand_renderer.html", "classop_1_1_hand_renderer" ] + ] ], + [ "headers.hpp", "hand_2headers_8hpp.html", null ], + [ "renderHand.hpp", "render_hand_8hpp.html", "render_hand_8hpp" ], + [ "wHandDetector.hpp", "w_hand_detector_8hpp.html", "w_hand_detector_8hpp" ], + [ "wHandDetectorFromTxt.hpp", "w_hand_detector_from_txt_8hpp.html", "w_hand_detector_from_txt_8hpp" ], + [ "wHandDetectorTracking.hpp", "w_hand_detector_tracking_8hpp.html", "w_hand_detector_tracking_8hpp" ], + [ "wHandDetectorUpdate.hpp", "w_hand_detector_update_8hpp.html", "w_hand_detector_update_8hpp" ], + [ "wHandExtractorNet.hpp", "w_hand_extractor_net_8hpp.html", "w_hand_extractor_net_8hpp" ], + [ "wHandRenderer.hpp", "w_hand_renderer_8hpp.html", "w_hand_renderer_8hpp" ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_7de49b63a676cf8462ac8f05c3f89409.html b/web/html/doc/dir_7de49b63a676cf8462ac8f05c3f89409.html new file mode 100644 index 000000000..3ca22d6f8 --- /dev/null +++ b/web/html/doc/dir_7de49b63a676cf8462ac8f05c3f89409.html @@ -0,0 +1,159 @@ + + + + + + + +OpenPose: include/openpose/core Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
core Directory Reference
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Files

file  array.hpp [code]
 
file  arrayCpuGpu.hpp [code]
 
file  common.hpp [code]
 
file  cvMatToOpInput.hpp [code]
 
file  cvMatToOpOutput.hpp [code]
 
file  datum.hpp [code]
 
file  enumClasses.hpp [code]
 
file  gpuRenderer.hpp [code]
 
file  headers.hpp [code]
 
file  keepTopNPeople.hpp [code]
 
file  keypointScaler.hpp [code]
 
file  macros.hpp [code]
 
file  matrix.hpp [code]
 
file  opOutputToCvMat.hpp [code]
 
file  point.hpp [code]
 
file  rectangle.hpp [code]
 
file  renderer.hpp [code]
 
file  scaleAndSizeExtractor.hpp [code]
 
file  string.hpp [code]
 
file  verbosePrinter.hpp [code]
 
file  wCvMatToOpInput.hpp [code]
 
file  wCvMatToOpOutput.hpp [code]
 
file  wKeepTopNPeople.hpp [code]
 
file  wKeypointScaler.hpp [code]
 
file  wOpOutputToCvMat.hpp [code]
 
file  wScaleAndSizeExtractor.hpp [code]
 
file  wVerbosePrinter.hpp [code]
 
+
+
+ + + + diff --git a/web/html/doc/dir_7de49b63a676cf8462ac8f05c3f89409.js b/web/html/doc/dir_7de49b63a676cf8462ac8f05c3f89409.js new file mode 100644 index 000000000..9ed21e8d8 --- /dev/null +++ b/web/html/doc/dir_7de49b63a676cf8462ac8f05c3f89409.js @@ -0,0 +1,56 @@ +var dir_7de49b63a676cf8462ac8f05c3f89409 = +[ + [ "array.hpp", "array_8hpp.html", [ + [ "Array", "classop_1_1_array.html", "classop_1_1_array" ] + ] ], + [ "arrayCpuGpu.hpp", "array_cpu_gpu_8hpp.html", [ + [ "ArrayCpuGpu", "classop_1_1_array_cpu_gpu.html", "classop_1_1_array_cpu_gpu" ] + ] ], + [ "common.hpp", "common_8hpp.html", null ], + [ "cvMatToOpInput.hpp", "cv_mat_to_op_input_8hpp.html", [ + [ "CvMatToOpInput", "classop_1_1_cv_mat_to_op_input.html", "classop_1_1_cv_mat_to_op_input" ] + ] ], + [ "cvMatToOpOutput.hpp", "cv_mat_to_op_output_8hpp.html", [ + [ "CvMatToOpOutput", "classop_1_1_cv_mat_to_op_output.html", "classop_1_1_cv_mat_to_op_output" ] + ] ], + [ "datum.hpp", "datum_8hpp.html", "datum_8hpp" ], + [ "enumClasses.hpp", "core_2enum_classes_8hpp.html", "core_2enum_classes_8hpp" ], + [ "gpuRenderer.hpp", "gpu_renderer_8hpp.html", [ + [ "GpuRenderer", "classop_1_1_gpu_renderer.html", "classop_1_1_gpu_renderer" ] + ] ], + [ "headers.hpp", "core_2headers_8hpp.html", null ], + [ "keepTopNPeople.hpp", "keep_top_n_people_8hpp.html", [ + [ "KeepTopNPeople", "classop_1_1_keep_top_n_people.html", "classop_1_1_keep_top_n_people" ] + ] ], + [ "keypointScaler.hpp", "keypoint_scaler_8hpp.html", [ + [ "KeypointScaler", "classop_1_1_keypoint_scaler.html", "classop_1_1_keypoint_scaler" ] + ] ], + [ "macros.hpp", "macros_8hpp.html", "macros_8hpp" ], + [ "matrix.hpp", "matrix_8hpp.html", "matrix_8hpp" ], + [ "opOutputToCvMat.hpp", "op_output_to_cv_mat_8hpp.html", [ + [ "OpOutputToCvMat", "classop_1_1_op_output_to_cv_mat.html", "classop_1_1_op_output_to_cv_mat" ] + ] ], + [ "point.hpp", "point_8hpp.html", [ + [ "Point", "structop_1_1_point.html", "structop_1_1_point" ] + ] ], + [ "rectangle.hpp", "rectangle_8hpp.html", "rectangle_8hpp" ], + [ "renderer.hpp", "renderer_8hpp.html", [ + [ "Renderer", "classop_1_1_renderer.html", "classop_1_1_renderer" ] + ] ], + [ "scaleAndSizeExtractor.hpp", "scale_and_size_extractor_8hpp.html", [ + [ "ScaleAndSizeExtractor", "classop_1_1_scale_and_size_extractor.html", "classop_1_1_scale_and_size_extractor" ] + ] ], + [ "string.hpp", "core_2string_8hpp.html", [ + [ "String", "classop_1_1_string.html", "classop_1_1_string" ] + ] ], + [ "verbosePrinter.hpp", "verbose_printer_8hpp.html", [ + [ "VerbosePrinter", "classop_1_1_verbose_printer.html", "classop_1_1_verbose_printer" ] + ] ], + [ "wCvMatToOpInput.hpp", "w_cv_mat_to_op_input_8hpp.html", "w_cv_mat_to_op_input_8hpp" ], + [ "wCvMatToOpOutput.hpp", "w_cv_mat_to_op_output_8hpp.html", "w_cv_mat_to_op_output_8hpp" ], + [ "wKeepTopNPeople.hpp", "w_keep_top_n_people_8hpp.html", "w_keep_top_n_people_8hpp" ], + [ "wKeypointScaler.hpp", "w_keypoint_scaler_8hpp.html", "w_keypoint_scaler_8hpp" ], + [ "wOpOutputToCvMat.hpp", "w_op_output_to_cv_mat_8hpp.html", "w_op_output_to_cv_mat_8hpp" ], + [ "wScaleAndSizeExtractor.hpp", "w_scale_and_size_extractor_8hpp.html", "w_scale_and_size_extractor_8hpp" ], + [ "wVerbosePrinter.hpp", "w_verbose_printer_8hpp.html", "w_verbose_printer_8hpp" ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_85db440c4601460769d5050789bfa56d.html b/web/html/doc/dir_85db440c4601460769d5050789bfa56d.html new file mode 100644 index 000000000..71b903740 --- /dev/null +++ b/web/html/doc/dir_85db440c4601460769d5050789bfa56d.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: include/openpose/calibration Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
calibration Directory Reference
+
+
+ + + + + + +

+Files

file  cameraParameterEstimation.hpp [code]
 
file  headers.hpp [code]
 
+
+
+ + + + diff --git a/web/html/doc/dir_85db440c4601460769d5050789bfa56d.js b/web/html/doc/dir_85db440c4601460769d5050789bfa56d.js new file mode 100644 index 000000000..d6b10dc1e --- /dev/null +++ b/web/html/doc/dir_85db440c4601460769d5050789bfa56d.js @@ -0,0 +1,5 @@ +var dir_85db440c4601460769d5050789bfa56d = +[ + [ "cameraParameterEstimation.hpp", "camera_parameter_estimation_8hpp.html", "camera_parameter_estimation_8hpp" ], + [ "headers.hpp", "calibration_2headers_8hpp.html", null ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_860fec895be5700f57450c90b9398659.html b/web/html/doc/dir_860fec895be5700f57450c90b9398659.html new file mode 100644 index 000000000..cf545c39a --- /dev/null +++ b/web/html/doc/dir_860fec895be5700f57450c90b9398659.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/installation/deprecated Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
deprecated Directory Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/dir_8f5ad9f4aef96c85fa295a06f65e3f88.html b/web/html/doc/dir_8f5ad9f4aef96c85fa295a06f65e3f88.html new file mode 100644 index 000000000..e2f8e64ab --- /dev/null +++ b/web/html/doc/dir_8f5ad9f4aef96c85fa295a06f65e3f88.html @@ -0,0 +1,113 @@ + + + + + + + +OpenPose: include/openpose/tracking Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
tracking Directory Reference
+
+
+ + + + + + + + + + +

+Files

file  headers.hpp [code]
 
file  personIdExtractor.hpp [code]
 
file  personTracker.hpp [code]
 
file  wPersonIdExtractor.hpp [code]
 
+
+
+ + + + diff --git a/web/html/doc/dir_8f5ad9f4aef96c85fa295a06f65e3f88.js b/web/html/doc/dir_8f5ad9f4aef96c85fa295a06f65e3f88.js new file mode 100644 index 000000000..34024f871 --- /dev/null +++ b/web/html/doc/dir_8f5ad9f4aef96c85fa295a06f65e3f88.js @@ -0,0 +1,11 @@ +var dir_8f5ad9f4aef96c85fa295a06f65e3f88 = +[ + [ "headers.hpp", "tracking_2headers_8hpp.html", null ], + [ "personIdExtractor.hpp", "person_id_extractor_8hpp.html", [ + [ "PersonIdExtractor", "classop_1_1_person_id_extractor.html", "classop_1_1_person_id_extractor" ] + ] ], + [ "personTracker.hpp", "person_tracker_8hpp.html", [ + [ "PersonTracker", "classop_1_1_person_tracker.html", "classop_1_1_person_tracker" ] + ] ], + [ "wPersonIdExtractor.hpp", "w_person_id_extractor_8hpp.html", "w_person_id_extractor_8hpp" ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_a43890ebc8412b28a6c3727ba389c186.html b/web/html/doc/dir_a43890ebc8412b28a6c3727ba389c186.html new file mode 100644 index 000000000..1f0b3c54b --- /dev/null +++ b/web/html/doc/dir_a43890ebc8412b28a6c3727ba389c186.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/advanced Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
advanced Directory Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/dir_bb0b1a6a2f2ff4919a87df39c7181fc7.html b/web/html/doc/dir_bb0b1a6a2f2ff4919a87df39c7181fc7.html new file mode 100644 index 000000000..084a43dd1 --- /dev/null +++ b/web/html/doc/dir_bb0b1a6a2f2ff4919a87df39c7181fc7.html @@ -0,0 +1,133 @@ + + + + + + + +OpenPose: include/openpose/pose Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
pose Directory Reference
+
+ +
+ + + + diff --git a/web/html/doc/dir_bb0b1a6a2f2ff4919a87df39c7181fc7.js b/web/html/doc/dir_bb0b1a6a2f2ff4919a87df39c7181fc7.js new file mode 100644 index 000000000..221c7e2b5 --- /dev/null +++ b/web/html/doc/dir_bb0b1a6a2f2ff4919a87df39c7181fc7.js @@ -0,0 +1,29 @@ +var dir_bb0b1a6a2f2ff4919a87df39c7181fc7 = +[ + [ "enumClasses.hpp", "pose_2enum_classes_8hpp.html", "pose_2enum_classes_8hpp" ], + [ "headers.hpp", "pose_2headers_8hpp.html", null ], + [ "poseCpuRenderer.hpp", "pose_cpu_renderer_8hpp.html", [ + [ "PoseCpuRenderer", "classop_1_1_pose_cpu_renderer.html", "classop_1_1_pose_cpu_renderer" ] + ] ], + [ "poseExtractor.hpp", "pose_extractor_8hpp.html", [ + [ "PoseExtractor", "classop_1_1_pose_extractor.html", "classop_1_1_pose_extractor" ] + ] ], + [ "poseExtractorCaffe.hpp", "pose_extractor_caffe_8hpp.html", [ + [ "PoseExtractorCaffe", "classop_1_1_pose_extractor_caffe.html", "classop_1_1_pose_extractor_caffe" ] + ] ], + [ "poseExtractorNet.hpp", "pose_extractor_net_8hpp.html", [ + [ "PoseExtractorNet", "classop_1_1_pose_extractor_net.html", "classop_1_1_pose_extractor_net" ] + ] ], + [ "poseGpuRenderer.hpp", "pose_gpu_renderer_8hpp.html", [ + [ "PoseGpuRenderer", "classop_1_1_pose_gpu_renderer.html", "classop_1_1_pose_gpu_renderer" ] + ] ], + [ "poseParameters.hpp", "pose_parameters_8hpp.html", "pose_parameters_8hpp" ], + [ "poseParametersRender.hpp", "pose_parameters_render_8hpp.html", "pose_parameters_render_8hpp" ], + [ "poseRenderer.hpp", "pose_renderer_8hpp.html", [ + [ "PoseRenderer", "classop_1_1_pose_renderer.html", "classop_1_1_pose_renderer" ] + ] ], + [ "renderPose.hpp", "render_pose_8hpp.html", "render_pose_8hpp" ], + [ "wPoseExtractor.hpp", "w_pose_extractor_8hpp.html", "w_pose_extractor_8hpp" ], + [ "wPoseExtractorNet.hpp", "w_pose_extractor_net_8hpp.html", "w_pose_extractor_net_8hpp" ], + [ "wPoseRenderer.hpp", "w_pose_renderer_8hpp.html", "w_pose_renderer_8hpp" ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_bb6374c98f96ead39f6032c111104f04.html b/web/html/doc/dir_bb6374c98f96ead39f6032c111104f04.html new file mode 100644 index 000000000..566568485 --- /dev/null +++ b/web/html/doc/dir_bb6374c98f96ead39f6032c111104f04.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/very_advanced/library_structure Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
library_structure Directory Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/dir_c7344abab9be755d8deee77d51a9abb7.html b/web/html/doc/dir_c7344abab9be755d8deee77d51a9abb7.html new file mode 100644 index 000000000..fe1db1b0c --- /dev/null +++ b/web/html/doc/dir_c7344abab9be755d8deee77d51a9abb7.html @@ -0,0 +1,133 @@ + + + + + + + +OpenPose: include/openpose/face Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
face Directory Reference
+
+ +
+ + + + diff --git a/web/html/doc/dir_c7344abab9be755d8deee77d51a9abb7.js b/web/html/doc/dir_c7344abab9be755d8deee77d51a9abb7.js new file mode 100644 index 000000000..bcc671c35 --- /dev/null +++ b/web/html/doc/dir_c7344abab9be755d8deee77d51a9abb7.js @@ -0,0 +1,31 @@ +var dir_c7344abab9be755d8deee77d51a9abb7 = +[ + [ "faceCpuRenderer.hpp", "face_cpu_renderer_8hpp.html", [ + [ "FaceCpuRenderer", "classop_1_1_face_cpu_renderer.html", "classop_1_1_face_cpu_renderer" ] + ] ], + [ "faceDetector.hpp", "face_detector_8hpp.html", [ + [ "FaceDetector", "classop_1_1_face_detector.html", "classop_1_1_face_detector" ] + ] ], + [ "faceDetectorOpenCV.hpp", "face_detector_open_c_v_8hpp.html", [ + [ "FaceDetectorOpenCV", "classop_1_1_face_detector_open_c_v.html", "classop_1_1_face_detector_open_c_v" ] + ] ], + [ "faceExtractorCaffe.hpp", "face_extractor_caffe_8hpp.html", [ + [ "FaceExtractorCaffe", "classop_1_1_face_extractor_caffe.html", "classop_1_1_face_extractor_caffe" ] + ] ], + [ "faceExtractorNet.hpp", "face_extractor_net_8hpp.html", [ + [ "FaceExtractorNet", "classop_1_1_face_extractor_net.html", "classop_1_1_face_extractor_net" ] + ] ], + [ "faceGpuRenderer.hpp", "face_gpu_renderer_8hpp.html", [ + [ "FaceGpuRenderer", "classop_1_1_face_gpu_renderer.html", "classop_1_1_face_gpu_renderer" ] + ] ], + [ "faceParameters.hpp", "face_parameters_8hpp.html", "face_parameters_8hpp" ], + [ "faceRenderer.hpp", "face_renderer_8hpp.html", [ + [ "FaceRenderer", "classop_1_1_face_renderer.html", "classop_1_1_face_renderer" ] + ] ], + [ "headers.hpp", "face_2headers_8hpp.html", null ], + [ "renderFace.hpp", "render_face_8hpp.html", "render_face_8hpp" ], + [ "wFaceDetector.hpp", "w_face_detector_8hpp.html", "w_face_detector_8hpp" ], + [ "wFaceDetectorOpenCV.hpp", "w_face_detector_open_c_v_8hpp.html", "w_face_detector_open_c_v_8hpp" ], + [ "wFaceExtractorNet.hpp", "w_face_extractor_net_8hpp.html", "w_face_extractor_net_8hpp" ], + [ "wFaceRenderer.hpp", "w_face_renderer_8hpp.html", "w_face_renderer_8hpp" ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_d30173e4a8fdb18630e5d2bb2948a3be.html b/web/html/doc/dir_d30173e4a8fdb18630e5d2bb2948a3be.html new file mode 100644 index 000000000..fe1728815 --- /dev/null +++ b/web/html/doc/dir_d30173e4a8fdb18630e5d2bb2948a3be.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/installation/jetson_tx Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
jetson_tx Directory Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/dir_d44c64559bbebec7f509842c48db8b23.html b/web/html/doc/dir_d44c64559bbebec7f509842c48db8b23.html new file mode 100644 index 000000000..d9ecd979c --- /dev/null +++ b/web/html/doc/dir_d44c64559bbebec7f509842c48db8b23.html @@ -0,0 +1,107 @@ + + + + + + + +OpenPose: include Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
include Directory Reference
+
+
+ + + + +

+Directories

directory  openpose
 
+
+
+ + + + diff --git a/web/html/doc/dir_d44c64559bbebec7f509842c48db8b23.js b/web/html/doc/dir_d44c64559bbebec7f509842c48db8b23.js new file mode 100644 index 000000000..092345490 --- /dev/null +++ b/web/html/doc/dir_d44c64559bbebec7f509842c48db8b23.js @@ -0,0 +1,4 @@ +var dir_d44c64559bbebec7f509842c48db8b23 = +[ + [ "openpose", "dir_fdee266290491355a8cca55fd72320d8.html", "dir_fdee266290491355a8cca55fd72320d8" ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_e68e8157741866f444e17edd764ebbae.html b/web/html/doc/dir_e68e8157741866f444e17edd764ebbae.html new file mode 100644 index 000000000..ab0623d7d --- /dev/null +++ b/web/html/doc/dir_e68e8157741866f444e17edd764ebbae.html @@ -0,0 +1,111 @@ + + + + + + + +OpenPose: doc Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc Directory Reference
+
+
+ + + + + + + + +

+Directories

directory  advanced
 
directory  installation
 
directory  very_advanced
 
+
+
+ + + + diff --git a/web/html/doc/dir_e68e8157741866f444e17edd764ebbae.js b/web/html/doc/dir_e68e8157741866f444e17edd764ebbae.js new file mode 100644 index 000000000..28322cfbb --- /dev/null +++ b/web/html/doc/dir_e68e8157741866f444e17edd764ebbae.js @@ -0,0 +1,6 @@ +var dir_e68e8157741866f444e17edd764ebbae = +[ + [ "advanced", "dir_a43890ebc8412b28a6c3727ba389c186.html", null ], + [ "installation", "dir_2fe3b209ba91c67403a5b318dad0feb6.html", "dir_2fe3b209ba91c67403a5b318dad0feb6" ], + [ "very_advanced", "dir_3521bb8cf95d1c22170a875d1710b83f.html", "dir_3521bb8cf95d1c22170a875d1710b83f" ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_fbdcb2ba3f7af3b3a55342a5bedf43b8.html b/web/html/doc/dir_fbdcb2ba3f7af3b3a55342a5bedf43b8.html new file mode 100644 index 000000000..c71d9e613 --- /dev/null +++ b/web/html/doc/dir_fbdcb2ba3f7af3b3a55342a5bedf43b8.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/gui Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
gui Directory Reference
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + +

+Files

file  enumClasses.hpp [code]
 
file  frameDisplayer.hpp [code]
 
file  gui.hpp [code]
 
file  gui3D.hpp [code]
 
file  guiAdam.hpp [code]
 
file  guiInfoAdder.hpp [code]
 
file  headers.hpp [code]
 
file  wGui.hpp [code]
 
file  wGui3D.hpp [code]
 
file  wGuiAdam.hpp [code]
 
file  wGuiInfoAdder.hpp [code]
 
+
+
+ + + + diff --git a/web/html/doc/dir_fbdcb2ba3f7af3b3a55342a5bedf43b8.js b/web/html/doc/dir_fbdcb2ba3f7af3b3a55342a5bedf43b8.js new file mode 100644 index 000000000..c12768f75 --- /dev/null +++ b/web/html/doc/dir_fbdcb2ba3f7af3b3a55342a5bedf43b8.js @@ -0,0 +1,22 @@ +var dir_fbdcb2ba3f7af3b3a55342a5bedf43b8 = +[ + [ "enumClasses.hpp", "gui_2enum_classes_8hpp.html", "gui_2enum_classes_8hpp" ], + [ "frameDisplayer.hpp", "frame_displayer_8hpp.html", [ + [ "FrameDisplayer", "classop_1_1_frame_displayer.html", "classop_1_1_frame_displayer" ] + ] ], + [ "gui.hpp", "gui_8hpp.html", [ + [ "Gui", "classop_1_1_gui.html", "classop_1_1_gui" ] + ] ], + [ "gui3D.hpp", "gui3_d_8hpp.html", [ + [ "Gui3D", "classop_1_1_gui3_d.html", "classop_1_1_gui3_d" ] + ] ], + [ "guiAdam.hpp", "gui_adam_8hpp.html", null ], + [ "guiInfoAdder.hpp", "gui_info_adder_8hpp.html", [ + [ "GuiInfoAdder", "classop_1_1_gui_info_adder.html", "classop_1_1_gui_info_adder" ] + ] ], + [ "headers.hpp", "gui_2headers_8hpp.html", null ], + [ "wGui.hpp", "w_gui_8hpp.html", "w_gui_8hpp" ], + [ "wGui3D.hpp", "w_gui3_d_8hpp.html", "w_gui3_d_8hpp" ], + [ "wGuiAdam.hpp", "w_gui_adam_8hpp.html", null ], + [ "wGuiInfoAdder.hpp", "w_gui_info_adder_8hpp.html", "w_gui_info_adder_8hpp" ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_fdee266290491355a8cca55fd72320d8.html b/web/html/doc/dir_fdee266290491355a8cca55fd72320d8.html new file mode 100644 index 000000000..860fe82a9 --- /dev/null +++ b/web/html/doc/dir_fdee266290491355a8cca55fd72320d8.html @@ -0,0 +1,144 @@ + + + + + + + +OpenPose: include/openpose Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
openpose Directory Reference
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Directories

directory  3d
 
directory  calibration
 
directory  core
 
directory  face
 
directory  filestream
 
directory  gpu
 
directory  gui
 
directory  hand
 
directory  net
 
directory  pose
 
directory  producer
 
directory  thread
 
directory  tracking
 
directory  unity
 
directory  utilities
 
directory  wrapper
 
+ + + + + +

+Files

file  flags.hpp [code]
 
file  headers.hpp [code]
 
+
+
+ + + + diff --git a/web/html/doc/dir_fdee266290491355a8cca55fd72320d8.js b/web/html/doc/dir_fdee266290491355a8cca55fd72320d8.js new file mode 100644 index 000000000..6e160ceaa --- /dev/null +++ b/web/html/doc/dir_fdee266290491355a8cca55fd72320d8.js @@ -0,0 +1,21 @@ +var dir_fdee266290491355a8cca55fd72320d8 = +[ + [ "3d", "dir_2012eae92ff1c095841b0424b32ded73.html", "dir_2012eae92ff1c095841b0424b32ded73" ], + [ "calibration", "dir_85db440c4601460769d5050789bfa56d.html", "dir_85db440c4601460769d5050789bfa56d" ], + [ "core", "dir_7de49b63a676cf8462ac8f05c3f89409.html", "dir_7de49b63a676cf8462ac8f05c3f89409" ], + [ "face", "dir_c7344abab9be755d8deee77d51a9abb7.html", "dir_c7344abab9be755d8deee77d51a9abb7" ], + [ "filestream", "dir_fec53438e526959e34f7f645cf03dfd4.html", "dir_fec53438e526959e34f7f645cf03dfd4" ], + [ "gpu", "dir_50b707fa6cdf56bfdc2ad79b44ee2e7d.html", "dir_50b707fa6cdf56bfdc2ad79b44ee2e7d" ], + [ "gui", "dir_fbdcb2ba3f7af3b3a55342a5bedf43b8.html", "dir_fbdcb2ba3f7af3b3a55342a5bedf43b8" ], + [ "hand", "dir_76d1d371018acee6886a69c6ed6f0be5.html", "dir_76d1d371018acee6886a69c6ed6f0be5" ], + [ "net", "dir_6b0b8e919b15d8dea67b0fa2c092513b.html", "dir_6b0b8e919b15d8dea67b0fa2c092513b" ], + [ "pose", "dir_bb0b1a6a2f2ff4919a87df39c7181fc7.html", "dir_bb0b1a6a2f2ff4919a87df39c7181fc7" ], + [ "producer", "dir_3dc351f869bb7a8f0afe68ebb7e681e8.html", "dir_3dc351f869bb7a8f0afe68ebb7e681e8" ], + [ "thread", "dir_40f12b33899adef613f503ab305e6d57.html", "dir_40f12b33899adef613f503ab305e6d57" ], + [ "tracking", "dir_8f5ad9f4aef96c85fa295a06f65e3f88.html", "dir_8f5ad9f4aef96c85fa295a06f65e3f88" ], + [ "unity", "dir_30b75edf6ab089fde7a8426886bd6b03.html", "dir_30b75edf6ab089fde7a8426886bd6b03" ], + [ "utilities", "dir_6beae53d0373f2b1f5bd56e91c6780e1.html", "dir_6beae53d0373f2b1f5bd56e91c6780e1" ], + [ "wrapper", "dir_3c425d4d4c1956c7a8db48efb69a2718.html", "dir_3c425d4d4c1956c7a8db48efb69a2718" ], + [ "flags.hpp", "flags_8hpp.html", "flags_8hpp" ], + [ "headers.hpp", "headers_8hpp.html", null ] +]; \ No newline at end of file diff --git a/web/html/doc/dir_fec53438e526959e34f7f645cf03dfd4.html b/web/html/doc/dir_fec53438e526959e34f7f645cf03dfd4.html new file mode 100644 index 000000000..1bcab646f --- /dev/null +++ b/web/html/doc/dir_fec53438e526959e34f7f645cf03dfd4.html @@ -0,0 +1,153 @@ + + + + + + + +OpenPose: include/openpose/filestream Directory Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
filestream Directory Reference
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Files

file  bvhSaver.hpp [code]
 
file  cocoJsonSaver.hpp [code]
 
file  enumClasses.hpp [code]
 
file  fileSaver.hpp [code]
 
file  fileStream.hpp [code]
 
file  headers.hpp [code]
 
file  heatMapSaver.hpp [code]
 
file  imageSaver.hpp [code]
 
file  jsonOfstream.hpp [code]
 
file  keypointSaver.hpp [code]
 
file  peopleJsonSaver.hpp [code]
 
file  udpSender.hpp [code]
 
file  videoSaver.hpp [code]
 
file  wBvhSaver.hpp [code]
 
file  wCocoJsonSaver.hpp [code]
 
file  wFaceSaver.hpp [code]
 
file  wHandSaver.hpp [code]
 
file  wHeatMapSaver.hpp [code]
 
file  wImageSaver.hpp [code]
 
file  wPeopleJsonSaver.hpp [code]
 
file  wPoseSaver.hpp [code]
 
file  wUdpSender.hpp [code]
 
file  wVideoSaver.hpp [code]
 
file  wVideoSaver3D.hpp [code]
 
+
+
+ + + + diff --git a/web/html/doc/dir_fec53438e526959e34f7f645cf03dfd4.js b/web/html/doc/dir_fec53438e526959e34f7f645cf03dfd4.js new file mode 100644 index 000000000..f73ee6b43 --- /dev/null +++ b/web/html/doc/dir_fec53438e526959e34f7f645cf03dfd4.js @@ -0,0 +1,45 @@ +var dir_fec53438e526959e34f7f645cf03dfd4 = +[ + [ "bvhSaver.hpp", "bvh_saver_8hpp.html", null ], + [ "cocoJsonSaver.hpp", "coco_json_saver_8hpp.html", [ + [ "CocoJsonSaver", "classop_1_1_coco_json_saver.html", "classop_1_1_coco_json_saver" ] + ] ], + [ "enumClasses.hpp", "filestream_2enum_classes_8hpp.html", "filestream_2enum_classes_8hpp" ], + [ "fileSaver.hpp", "file_saver_8hpp.html", [ + [ "FileSaver", "classop_1_1_file_saver.html", "classop_1_1_file_saver" ] + ] ], + [ "fileStream.hpp", "file_stream_8hpp.html", "file_stream_8hpp" ], + [ "headers.hpp", "filestream_2headers_8hpp.html", null ], + [ "heatMapSaver.hpp", "heat_map_saver_8hpp.html", [ + [ "HeatMapSaver", "classop_1_1_heat_map_saver.html", "classop_1_1_heat_map_saver" ] + ] ], + [ "imageSaver.hpp", "image_saver_8hpp.html", [ + [ "ImageSaver", "classop_1_1_image_saver.html", "classop_1_1_image_saver" ] + ] ], + [ "jsonOfstream.hpp", "json_ofstream_8hpp.html", [ + [ "JsonOfstream", "classop_1_1_json_ofstream.html", "classop_1_1_json_ofstream" ] + ] ], + [ "keypointSaver.hpp", "keypoint_saver_8hpp.html", [ + [ "KeypointSaver", "classop_1_1_keypoint_saver.html", "classop_1_1_keypoint_saver" ] + ] ], + [ "peopleJsonSaver.hpp", "people_json_saver_8hpp.html", [ + [ "PeopleJsonSaver", "classop_1_1_people_json_saver.html", "classop_1_1_people_json_saver" ] + ] ], + [ "udpSender.hpp", "udp_sender_8hpp.html", [ + [ "UdpSender", "classop_1_1_udp_sender.html", "classop_1_1_udp_sender" ] + ] ], + [ "videoSaver.hpp", "video_saver_8hpp.html", [ + [ "VideoSaver", "classop_1_1_video_saver.html", "classop_1_1_video_saver" ] + ] ], + [ "wBvhSaver.hpp", "w_bvh_saver_8hpp.html", null ], + [ "wCocoJsonSaver.hpp", "w_coco_json_saver_8hpp.html", "w_coco_json_saver_8hpp" ], + [ "wFaceSaver.hpp", "w_face_saver_8hpp.html", "w_face_saver_8hpp" ], + [ "wHandSaver.hpp", "w_hand_saver_8hpp.html", "w_hand_saver_8hpp" ], + [ "wHeatMapSaver.hpp", "w_heat_map_saver_8hpp.html", "w_heat_map_saver_8hpp" ], + [ "wImageSaver.hpp", "w_image_saver_8hpp.html", "w_image_saver_8hpp" ], + [ "wPeopleJsonSaver.hpp", "w_people_json_saver_8hpp.html", "w_people_json_saver_8hpp" ], + [ "wPoseSaver.hpp", "w_pose_saver_8hpp.html", "w_pose_saver_8hpp" ], + [ "wUdpSender.hpp", "w_udp_sender_8hpp.html", "w_udp_sender_8hpp" ], + [ "wVideoSaver.hpp", "w_video_saver_8hpp.html", "w_video_saver_8hpp" ], + [ "wVideoSaver3D.hpp", "w_video_saver3_d_8hpp.html", "w_video_saver3_d_8hpp" ] +]; \ No newline at end of file diff --git a/web/html/doc/doc.png b/web/html/doc/doc.png new file mode 100644 index 000000000..17edabff9 Binary files /dev/null and b/web/html/doc/doc.png differ diff --git a/web/html/doc/doc_2_r_e_a_d_m_e_8md.html b/web/html/doc/doc_2_r_e_a_d_m_e_8md.html new file mode 100644 index 000000000..18470d129 --- /dev/null +++ b/web/html/doc/doc_2_r_e_a_d_m_e_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/README.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/README.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/doc_2installation_2_r_e_a_d_m_e_8md.html b/web/html/doc/doc_2installation_2_r_e_a_d_m_e_8md.html new file mode 100644 index 000000000..a0ab3b6fe --- /dev/null +++ b/web/html/doc/doc_2installation_2_r_e_a_d_m_e_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/installation/README.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/installation/README.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/doxygen.css b/web/html/doc/doxygen.css new file mode 100644 index 000000000..ffbff0224 --- /dev/null +++ b/web/html/doc/doxygen.css @@ -0,0 +1,1793 @@ +/* The standard CSS for doxygen 1.9.1 */ + +body, table, div, p, dl { + font: 400 14px/22px Roboto,sans-serif; +} + +p.reference, p.definition { + font: 400 14px/22px Roboto,sans-serif; +} + +/* @group Heading Levels */ + +h1.groupheader { + font-size: 150%; +} + +.title { + font: 400 14px/28px Roboto,sans-serif; + font-size: 150%; + font-weight: bold; + margin: 10px 2px; +} + +h2.groupheader { + border-bottom: 1px solid #879ECB; + color: #354C7B; + font-size: 150%; + font-weight: normal; + margin-top: 1.75em; + padding-top: 8px; + padding-bottom: 4px; + width: 100%; +} + +h3.groupheader { + font-size: 100%; +} + +h1, h2, h3, h4, h5, h6 { + -webkit-transition: text-shadow 0.5s linear; + -moz-transition: text-shadow 0.5s linear; + -ms-transition: text-shadow 0.5s linear; + -o-transition: text-shadow 0.5s linear; + transition: text-shadow 0.5s linear; + margin-right: 15px; +} + +h1.glow, h2.glow, h3.glow, h4.glow, h5.glow, h6.glow { + text-shadow: 0 0 15px cyan; +} + +dt { + font-weight: bold; +} + +ul.multicol { + -moz-column-gap: 1em; + -webkit-column-gap: 1em; + column-gap: 1em; + -moz-column-count: 3; + -webkit-column-count: 3; + column-count: 3; +} + +p.startli, p.startdd { + margin-top: 2px; +} + +th p.starttd, th p.intertd, th p.endtd { + font-size: 100%; + font-weight: 700; +} + +p.starttd { + margin-top: 0px; +} + +p.endli { + margin-bottom: 0px; +} + +p.enddd { + margin-bottom: 4px; +} + +p.endtd { + margin-bottom: 2px; +} + +p.interli { +} + +p.interdd { +} + +p.intertd { +} + +/* @end */ + +caption { + font-weight: bold; +} + +span.legend { + font-size: 70%; + text-align: center; +} + +h3.version { + font-size: 90%; + text-align: center; +} + +div.navtab { + border-right: 1px solid #A3B4D7; + padding-right: 15px; + text-align: right; + line-height: 110%; +} + +div.navtab table { + border-spacing: 0; +} + +td.navtab { + padding-right: 6px; + padding-left: 6px; +} +td.navtabHL { + background-image: url('tab_a.png'); + background-repeat:repeat-x; + padding-right: 6px; + padding-left: 6px; +} + +td.navtabHL a, td.navtabHL a:visited { + color: #fff; + text-shadow: 0px 1px 1px rgba(0, 0, 0, 1.0); +} + +a.navtab { + font-weight: bold; +} + +div.qindex{ + text-align: center; + width: 100%; + line-height: 140%; + font-size: 130%; + color: #A0A0A0; +} + +dt.alphachar{ + font-size: 180%; + font-weight: bold; +} + +.alphachar a{ + color: black; +} + +.alphachar a:hover, .alphachar a:visited{ + text-decoration: none; +} + +.classindex dl { + padding: 25px; + column-count:1 +} + +.classindex dd { + display:inline-block; + margin-left: 50px; + width: 90%; + line-height: 1.15em; +} + +.classindex dl.odd { + background-color: #F8F9FC; +} + +@media(min-width: 1120px) { + .classindex dl { + column-count:2 + } +} + +@media(min-width: 1320px) { + .classindex dl { + column-count:3 + } +} + + +/* @group Link Styling */ + +a { + color: #3D578C; + font-weight: normal; + text-decoration: none; +} + +.contents a:visited { + color: #4665A2; +} + +a:hover { + text-decoration: underline; +} + +.contents a.qindexHL:visited { + color: #FFFFFF; +} + +a.el { + font-weight: bold; +} + +a.elRef { +} + +a.code, a.code:visited, a.line, a.line:visited { + color: #4665A2; +} + +a.codeRef, a.codeRef:visited, a.lineRef, a.lineRef:visited { + color: #4665A2; +} + +/* @end */ + +dl.el { + margin-left: -1cm; +} + +ul { + overflow: hidden; /*Fixed: list item bullets overlap floating elements*/ +} + +#side-nav ul { + overflow: visible; /* reset ul rule for scroll bar in GENERATE_TREEVIEW window */ +} + +#main-nav ul { + overflow: visible; /* reset ul rule for the navigation bar drop down lists */ +} + +.fragment { + text-align: left; + direction: ltr; + overflow-x: auto; /*Fixed: fragment lines overlap floating elements*/ + overflow-y: hidden; +} + +pre.fragment { + border: 1px solid #C4CFE5; + background-color: #FBFCFD; + padding: 4px 6px; + margin: 4px 8px 4px 2px; + overflow: auto; + word-wrap: break-word; + font-size: 9pt; + line-height: 125%; + font-family: monospace, fixed; + font-size: 105%; +} + +div.fragment { + padding: 0 0 1px 0; /*Fixed: last line underline overlap border*/ + margin: 4px 8px 4px 2px; + background-color: #FBFCFD; + border: 1px solid #C4CFE5; +} + +div.line { + font-family: monospace, fixed; + font-size: 13px; + min-height: 13px; + line-height: 1.0; + text-wrap: unrestricted; + white-space: -moz-pre-wrap; /* Moz */ + white-space: -pre-wrap; /* Opera 4-6 */ + white-space: -o-pre-wrap; /* Opera 7 */ + white-space: pre-wrap; /* CSS3 */ + word-wrap: break-word; /* IE 5.5+ */ + text-indent: -53px; + padding-left: 53px; + padding-bottom: 0px; + margin: 0px; + -webkit-transition-property: background-color, box-shadow; + -webkit-transition-duration: 0.5s; + -moz-transition-property: background-color, box-shadow; + -moz-transition-duration: 0.5s; + -ms-transition-property: background-color, box-shadow; + -ms-transition-duration: 0.5s; + -o-transition-property: background-color, box-shadow; + -o-transition-duration: 0.5s; + transition-property: background-color, box-shadow; + transition-duration: 0.5s; +} + +div.line:after { + content:"\000A"; + white-space: pre; +} + +div.line.glow { + background-color: cyan; + box-shadow: 0 0 10px cyan; +} + + +span.lineno { + padding-right: 4px; + text-align: right; + border-right: 2px solid #0F0; + background-color: #E8E8E8; + white-space: pre; +} +span.lineno a { + background-color: #D8D8D8; +} + +span.lineno a:hover { + background-color: #C8C8C8; +} + +.lineno { + -webkit-touch-callout: none; + -webkit-user-select: none; + -khtml-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} + +div.ah, span.ah { + background-color: black; + font-weight: bold; + color: #FFFFFF; + margin-bottom: 3px; + margin-top: 3px; + padding: 0.2em; + border: solid thin #333; + border-radius: 0.5em; + -webkit-border-radius: .5em; + -moz-border-radius: .5em; + box-shadow: 2px 2px 3px #999; + -webkit-box-shadow: 2px 2px 3px #999; + -moz-box-shadow: rgba(0, 0, 0, 0.15) 2px 2px 2px; + background-image: -webkit-gradient(linear, left top, left bottom, from(#eee), to(#000),color-stop(0.3, #444)); + background-image: -moz-linear-gradient(center top, #eee 0%, #444 40%, #000 110%); +} + +div.classindex ul { + list-style: none; + padding-left: 0; +} + +div.classindex span.ai { + display: inline-block; +} + +div.groupHeader { + margin-left: 16px; + margin-top: 12px; + font-weight: bold; +} + +div.groupText { + margin-left: 16px; + font-style: italic; +} + +body { + background-color: white; + color: black; + margin: 0; +} + +div.contents { + margin-top: 10px; + margin-left: 12px; + margin-right: 8px; +} + +td.indexkey { + background-color: #EBEFF6; + font-weight: bold; + border: 1px solid #C4CFE5; + margin: 2px 0px 2px 0; + padding: 2px 10px; + white-space: nowrap; + vertical-align: top; +} + +td.indexvalue { + background-color: #EBEFF6; + border: 1px solid #C4CFE5; + padding: 2px 10px; + margin: 2px 0px; +} + +tr.memlist { + background-color: #EEF1F7; +} + +p.formulaDsp { + text-align: center; +} + +img.formulaDsp { + +} + +img.formulaInl, img.inline { + vertical-align: middle; +} + +div.center { + text-align: center; + margin-top: 0px; + margin-bottom: 0px; + padding: 0px; +} + +div.center img { + border: 0px; +} + +address.footer { + text-align: right; + padding-right: 12px; +} + +img.footer { + border: 0px; + vertical-align: middle; +} + +/* @group Code Colorization */ + +span.keyword { + color: #008000 +} + +span.keywordtype { + color: #604020 +} + +span.keywordflow { + color: #e08000 +} + +span.comment { + color: #800000 +} + +span.preprocessor { + color: #806020 +} + +span.stringliteral { + color: #002080 +} + +span.charliteral { + color: #008080 +} + +span.vhdldigit { + color: #ff00ff +} + +span.vhdlchar { + color: #000000 +} + +span.vhdlkeyword { + color: #700070 +} + +span.vhdllogic { + color: #ff0000 +} + +blockquote { + background-color: #F7F8FB; + border-left: 2px solid #9CAFD4; + margin: 0 24px 0 4px; + padding: 0 12px 0 16px; +} + +blockquote.DocNodeRTL { + border-left: 0; + border-right: 2px solid #9CAFD4; + margin: 0 4px 0 24px; + padding: 0 16px 0 12px; +} + +/* @end */ + +/* +.search { + color: #003399; + font-weight: bold; +} + +form.search { + margin-bottom: 0px; + margin-top: 0px; +} + +input.search { + font-size: 75%; + color: #000080; + font-weight: normal; + background-color: #e8eef2; +} +*/ + +td.tiny { + font-size: 75%; +} + +.dirtab { + padding: 4px; + border-collapse: collapse; + border: 1px solid #A3B4D7; +} + +th.dirtab { + background: #EBEFF6; + font-weight: bold; +} + +hr { + height: 0px; + border: none; + border-top: 1px solid #4A6AAA; +} + +hr.footer { + height: 1px; +} + +/* @group Member Descriptions */ + +table.memberdecls { + border-spacing: 0px; + padding: 0px; +} + +.memberdecls td, .fieldtable tr { + -webkit-transition-property: background-color, box-shadow; + -webkit-transition-duration: 0.5s; + -moz-transition-property: background-color, box-shadow; + -moz-transition-duration: 0.5s; + -ms-transition-property: background-color, box-shadow; + -ms-transition-duration: 0.5s; + -o-transition-property: background-color, box-shadow; + -o-transition-duration: 0.5s; + transition-property: background-color, box-shadow; + transition-duration: 0.5s; +} + +.memberdecls td.glow, .fieldtable tr.glow { + background-color: cyan; + box-shadow: 0 0 15px cyan; +} + +.mdescLeft, .mdescRight, +.memItemLeft, .memItemRight, +.memTemplItemLeft, .memTemplItemRight, .memTemplParams { + background-color: #F9FAFC; + border: none; + margin: 4px; + padding: 1px 0 0 8px; +} + +.mdescLeft, .mdescRight { + padding: 0px 8px 4px 8px; + color: #555; +} + +.memSeparator { + border-bottom: 1px solid #DEE4F0; + line-height: 1px; + margin: 0px; + padding: 0px; +} + +.memItemLeft, .memTemplItemLeft { + white-space: nowrap; +} + +.memItemRight, .memTemplItemRight { + width: 100%; +} + +.memTemplParams { + color: #4665A2; + white-space: nowrap; + font-size: 80%; +} + +/* @end */ + +/* @group Member Details */ + +/* Styles for detailed member documentation */ + +.memtitle { + padding: 8px; + border-top: 1px solid #A8B8D9; + border-left: 1px solid #A8B8D9; + border-right: 1px solid #A8B8D9; + border-top-right-radius: 4px; + border-top-left-radius: 4px; + margin-bottom: -1px; + background-image: url('nav_f.png'); + background-repeat: repeat-x; + background-color: #E2E8F2; + line-height: 1.25; + font-weight: 300; + float:left; +} + +.permalink +{ + font-size: 65%; + display: inline-block; + vertical-align: middle; +} + +.memtemplate { + font-size: 80%; + color: #4665A2; + font-weight: normal; + margin-left: 9px; +} + +.memnav { + background-color: #EBEFF6; + border: 1px solid #A3B4D7; + text-align: center; + margin: 2px; + margin-right: 15px; + padding: 2px; +} + +.mempage { + width: 100%; +} + +.memitem { + padding: 0; + margin-bottom: 10px; + margin-right: 5px; + -webkit-transition: box-shadow 0.5s linear; + -moz-transition: box-shadow 0.5s linear; + -ms-transition: box-shadow 0.5s linear; + -o-transition: box-shadow 0.5s linear; + transition: box-shadow 0.5s linear; + display: table !important; + width: 100%; +} + +.memitem.glow { + box-shadow: 0 0 15px cyan; +} + +.memname { + font-weight: 400; + margin-left: 6px; +} + +.memname td { + vertical-align: bottom; +} + +.memproto, dl.reflist dt { + border-top: 1px solid #A8B8D9; + border-left: 1px solid #A8B8D9; + border-right: 1px solid #A8B8D9; + padding: 6px 0px 6px 0px; + color: #253555; + font-weight: bold; + text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.9); + background-color: #DFE5F1; + /* opera specific markup */ + box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); + border-top-right-radius: 4px; + /* firefox specific markup */ + -moz-box-shadow: rgba(0, 0, 0, 0.15) 5px 5px 5px; + -moz-border-radius-topright: 4px; + /* webkit specific markup */ + -webkit-box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); + -webkit-border-top-right-radius: 4px; + +} + +.overload { + font-family: "courier new",courier,monospace; + font-size: 65%; +} + +.memdoc, dl.reflist dd { + border-bottom: 1px solid #A8B8D9; + border-left: 1px solid #A8B8D9; + border-right: 1px solid #A8B8D9; + padding: 6px 10px 2px 10px; + background-color: #FBFCFD; + border-top-width: 0; + background-image:url('nav_g.png'); + background-repeat:repeat-x; + background-color: #FFFFFF; + /* opera specific markup */ + border-bottom-left-radius: 4px; + border-bottom-right-radius: 4px; + box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); + /* firefox specific markup */ + -moz-border-radius-bottomleft: 4px; + -moz-border-radius-bottomright: 4px; + -moz-box-shadow: rgba(0, 0, 0, 0.15) 5px 5px 5px; + /* webkit specific markup */ + -webkit-border-bottom-left-radius: 4px; + -webkit-border-bottom-right-radius: 4px; + -webkit-box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); +} + +dl.reflist dt { + padding: 5px; +} + +dl.reflist dd { + margin: 0px 0px 10px 0px; + padding: 5px; +} + +.paramkey { + text-align: right; +} + +.paramtype { + white-space: nowrap; +} + +.paramname { + color: #602020; + white-space: nowrap; +} +.paramname em { + font-style: normal; +} +.paramname code { + line-height: 14px; +} + +.params, .retval, .exception, .tparams { + margin-left: 0px; + padding-left: 0px; +} + +.params .paramname, .retval .paramname, .tparams .paramname, .exception .paramname { + font-weight: bold; + vertical-align: top; +} + +.params .paramtype, .tparams .paramtype { + font-style: italic; + vertical-align: top; +} + +.params .paramdir, .tparams .paramdir { + font-family: "courier new",courier,monospace; + vertical-align: top; +} + +table.mlabels { + border-spacing: 0px; +} + +td.mlabels-left { + width: 100%; + padding: 0px; +} + +td.mlabels-right { + vertical-align: bottom; + padding: 0px; + white-space: nowrap; +} + +span.mlabels { + margin-left: 8px; +} + +span.mlabel { + background-color: #728DC1; + border-top:1px solid #5373B4; + border-left:1px solid #5373B4; + border-right:1px solid #C4CFE5; + border-bottom:1px solid #C4CFE5; + text-shadow: none; + color: white; + margin-right: 4px; + padding: 2px 3px; + border-radius: 3px; + font-size: 7pt; + white-space: nowrap; + vertical-align: middle; +} + + + +/* @end */ + +/* these are for tree view inside a (index) page */ + +div.directory { + margin: 10px 0px; + border-top: 1px solid #9CAFD4; + border-bottom: 1px solid #9CAFD4; + width: 100%; +} + +.directory table { + border-collapse:collapse; +} + +.directory td { + margin: 0px; + padding: 0px; + vertical-align: top; +} + +.directory td.entry { + white-space: nowrap; + padding-right: 6px; + padding-top: 3px; +} + +.directory td.entry a { + outline:none; +} + +.directory td.entry a img { + border: none; +} + +.directory td.desc { + width: 100%; + padding-left: 6px; + padding-right: 6px; + padding-top: 3px; + border-left: 1px solid rgba(0,0,0,0.05); +} + +.directory tr.even { + padding-left: 6px; + background-color: #F7F8FB; +} + +.directory img { + vertical-align: -30%; +} + +.directory .levels { + white-space: nowrap; + width: 100%; + text-align: right; + font-size: 9pt; +} + +.directory .levels span { + cursor: pointer; + padding-left: 2px; + padding-right: 2px; + color: #3D578C; +} + +.arrow { + color: #9CAFD4; + -webkit-user-select: none; + -khtml-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; + cursor: pointer; + font-size: 80%; + display: inline-block; + width: 16px; + height: 22px; +} + +.icon { + font-family: Arial, Helvetica; + font-weight: bold; + font-size: 12px; + height: 14px; + width: 16px; + display: inline-block; + background-color: #728DC1; + color: white; + text-align: center; + border-radius: 4px; + margin-left: 2px; + margin-right: 2px; +} + +.icona { + width: 24px; + height: 22px; + display: inline-block; +} + +.iconfopen { + width: 24px; + height: 18px; + margin-bottom: 4px; + background-image:url('folderopen.png'); + background-position: 0px -4px; + background-repeat: repeat-y; + vertical-align:top; + display: inline-block; +} + +.iconfclosed { + width: 24px; + height: 18px; + margin-bottom: 4px; + background-image:url('folderclosed.png'); + background-position: 0px -4px; + background-repeat: repeat-y; + vertical-align:top; + display: inline-block; +} + +.icondoc { + width: 24px; + height: 18px; + margin-bottom: 4px; + background-image:url('doc.png'); + background-position: 0px -4px; + background-repeat: repeat-y; + vertical-align:top; + display: inline-block; +} + +table.directory { + font: 400 14px Roboto,sans-serif; +} + +/* @end */ + +div.dynheader { + margin-top: 8px; + -webkit-touch-callout: none; + -webkit-user-select: none; + -khtml-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} + +address { + font-style: normal; + color: #2A3D61; +} + +table.doxtable caption { + caption-side: top; +} + +table.doxtable { + border-collapse:collapse; + margin-top: 4px; + margin-bottom: 4px; +} + +table.doxtable td, table.doxtable th { + border: 1px solid #2D4068; + padding: 3px 7px 2px; +} + +table.doxtable th { + background-color: #374F7F; + color: #FFFFFF; + font-size: 110%; + padding-bottom: 4px; + padding-top: 5px; +} + +table.fieldtable { + /*width: 100%;*/ + margin-bottom: 10px; + border: 1px solid #A8B8D9; + border-spacing: 0px; + -moz-border-radius: 4px; + -webkit-border-radius: 4px; + border-radius: 4px; + -moz-box-shadow: rgba(0, 0, 0, 0.15) 2px 2px 2px; + -webkit-box-shadow: 2px 2px 2px rgba(0, 0, 0, 0.15); + box-shadow: 2px 2px 2px rgba(0, 0, 0, 0.15); +} + +.fieldtable td, .fieldtable th { + padding: 3px 7px 2px; +} + +.fieldtable td.fieldtype, .fieldtable td.fieldname { + white-space: nowrap; + border-right: 1px solid #A8B8D9; + border-bottom: 1px solid #A8B8D9; + vertical-align: top; +} + +.fieldtable td.fieldname { + padding-top: 3px; +} + +.fieldtable td.fielddoc { + border-bottom: 1px solid #A8B8D9; + /*width: 100%;*/ +} + +.fieldtable td.fielddoc p:first-child { + margin-top: 0px; +} + +.fieldtable td.fielddoc p:last-child { + margin-bottom: 2px; +} + +.fieldtable tr:last-child td { + border-bottom: none; +} + +.fieldtable th { + background-image:url('nav_f.png'); + background-repeat:repeat-x; + background-color: #E2E8F2; + font-size: 90%; + color: #253555; + padding-bottom: 4px; + padding-top: 5px; + text-align:left; + font-weight: 400; + -moz-border-radius-topleft: 4px; + -moz-border-radius-topright: 4px; + -webkit-border-top-left-radius: 4px; + -webkit-border-top-right-radius: 4px; + border-top-left-radius: 4px; + border-top-right-radius: 4px; + border-bottom: 1px solid #A8B8D9; +} + + +.tabsearch { + top: 0px; + left: 10px; + height: 36px; + background-image: url('tab_b.png'); + z-index: 101; + overflow: hidden; + font-size: 13px; +} + +.navpath ul +{ + font-size: 11px; + background-image:url('tab_b.png'); + background-repeat:repeat-x; + background-position: 0 -5px; + height:30px; + line-height:30px; + color:#8AA0CC; + border:solid 1px #C2CDE4; + overflow:hidden; + margin:0px; + padding:0px; +} + +.navpath li +{ + list-style-type:none; + float:left; + padding-left:10px; + padding-right:15px; + background-image:url('bc_s.png'); + background-repeat:no-repeat; + background-position:right; + color:#364D7C; +} + +.navpath li.navelem a +{ + height:32px; + display:block; + text-decoration: none; + outline: none; + color: #283A5D; + font-family: 'Lucida Grande',Geneva,Helvetica,Arial,sans-serif; + text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.9); + text-decoration: none; +} + +.navpath li.navelem a:hover +{ + color:#6884BD; +} + +.navpath li.footer +{ + list-style-type:none; + float:right; + padding-left:10px; + padding-right:15px; + background-image:none; + background-repeat:no-repeat; + background-position:right; + color:#364D7C; + font-size: 8pt; +} + + +div.summary +{ + float: right; + font-size: 8pt; + padding-right: 5px; + width: 50%; + text-align: right; +} + +div.summary a +{ + white-space: nowrap; +} + +table.classindex +{ + margin: 10px; + white-space: nowrap; + margin-left: 3%; + margin-right: 3%; + width: 94%; + border: 0; + border-spacing: 0; + padding: 0; +} + +div.ingroups +{ + font-size: 8pt; + width: 50%; + text-align: left; +} + +div.ingroups a +{ + white-space: nowrap; +} + +div.header +{ + background-image:url('nav_h.png'); + background-repeat:repeat-x; + background-color: #F9FAFC; + margin: 0px; + border-bottom: 1px solid #C4CFE5; +} + +div.headertitle +{ + padding: 5px 5px 5px 10px; +} + +.PageDocRTL-title div.headertitle { + text-align: right; + direction: rtl; +} + +dl { + padding: 0 0 0 0; +} + +/* dl.note, dl.warning, dl.attention, dl.pre, dl.post, dl.invariant, dl.deprecated, dl.todo, dl.test, dl.bug, dl.examples */ +dl.section { + margin-left: 0px; + padding-left: 0px; +} + +dl.section.DocNodeRTL { + margin-right: 0px; + padding-right: 0px; +} + +dl.note { + margin-left: -7px; + padding-left: 3px; + border-left: 4px solid; + border-color: #D0C000; +} + +dl.note.DocNodeRTL { + margin-left: 0; + padding-left: 0; + border-left: 0; + margin-right: -7px; + padding-right: 3px; + border-right: 4px solid; + border-color: #D0C000; +} + +dl.warning, dl.attention { + margin-left: -7px; + padding-left: 3px; + border-left: 4px solid; + border-color: #FF0000; +} + +dl.warning.DocNodeRTL, dl.attention.DocNodeRTL { + margin-left: 0; + padding-left: 0; + border-left: 0; + margin-right: -7px; + padding-right: 3px; + border-right: 4px solid; + border-color: #FF0000; +} + +dl.pre, dl.post, dl.invariant { + margin-left: -7px; + padding-left: 3px; + border-left: 4px solid; + border-color: #00D000; +} + +dl.pre.DocNodeRTL, dl.post.DocNodeRTL, dl.invariant.DocNodeRTL { + margin-left: 0; + padding-left: 0; + border-left: 0; + margin-right: -7px; + padding-right: 3px; + border-right: 4px solid; + border-color: #00D000; +} + +dl.deprecated { + margin-left: -7px; + padding-left: 3px; + border-left: 4px solid; + border-color: #505050; +} + +dl.deprecated.DocNodeRTL { + margin-left: 0; + padding-left: 0; + border-left: 0; + margin-right: -7px; + padding-right: 3px; + border-right: 4px solid; + border-color: #505050; +} + +dl.todo { + margin-left: -7px; + padding-left: 3px; + border-left: 4px solid; + border-color: #00C0E0; +} + +dl.todo.DocNodeRTL { + margin-left: 0; + padding-left: 0; + border-left: 0; + margin-right: -7px; + padding-right: 3px; + border-right: 4px solid; + border-color: #00C0E0; +} + +dl.test { + margin-left: -7px; + padding-left: 3px; + border-left: 4px solid; + border-color: #3030E0; +} + +dl.test.DocNodeRTL { + margin-left: 0; + padding-left: 0; + border-left: 0; + margin-right: -7px; + padding-right: 3px; + border-right: 4px solid; + border-color: #3030E0; +} + +dl.bug { + margin-left: -7px; + padding-left: 3px; + border-left: 4px solid; + border-color: #C08050; +} + +dl.bug.DocNodeRTL { + margin-left: 0; + padding-left: 0; + border-left: 0; + margin-right: -7px; + padding-right: 3px; + border-right: 4px solid; + border-color: #C08050; +} + +dl.section dd { + margin-bottom: 6px; +} + + +#projectlogo +{ + text-align: center; + vertical-align: bottom; + border-collapse: separate; +} + +#projectlogo img +{ + border: 0px none; +} + +#projectalign +{ + vertical-align: middle; +} + +#projectname +{ + font: 300% Tahoma, Arial,sans-serif; + margin: 0px; + padding: 2px 0px; +} + +#projectbrief +{ + font: 120% Tahoma, Arial,sans-serif; + margin: 0px; + padding: 0px; +} + +#projectnumber +{ + font: 50% Tahoma, Arial,sans-serif; + margin: 0px; + padding: 0px; +} + +#titlearea +{ + padding: 0px; + margin: 0px; + width: 100%; + border-bottom: 1px solid #5373B4; +} + +.image +{ + text-align: center; +} + +.dotgraph +{ + text-align: center; +} + +.mscgraph +{ + text-align: center; +} + +.plantumlgraph +{ + text-align: center; +} + +.diagraph +{ + text-align: center; +} + +.caption +{ + font-weight: bold; +} + +div.zoom +{ + border: 1px solid #90A5CE; +} + +dl.citelist { + margin-bottom:50px; +} + +dl.citelist dt { + color:#334975; + float:left; + font-weight:bold; + margin-right:10px; + padding:5px; + text-align:right; + width:52px; +} + +dl.citelist dd { + margin:2px 0 2px 72px; + padding:5px 0; +} + +div.toc { + padding: 14px 25px; + background-color: #F4F6FA; + border: 1px solid #D8DFEE; + border-radius: 7px 7px 7px 7px; + float: right; + height: auto; + margin: 0 8px 10px 10px; + width: 200px; +} + +.PageDocRTL-title div.toc { + float: left !important; + text-align: right; +} + +div.toc li { + background: url("bdwn.png") no-repeat scroll 0 5px transparent; + font: 10px/1.2 Verdana,DejaVu Sans,Geneva,sans-serif; + margin-top: 5px; + padding-left: 10px; + padding-top: 2px; +} + +.PageDocRTL-title div.toc li { + background-position-x: right !important; + padding-left: 0 !important; + padding-right: 10px; +} + +div.toc h3 { + font: bold 12px/1.2 Arial,FreeSans,sans-serif; + color: #4665A2; + border-bottom: 0 none; + margin: 0; +} + +div.toc ul { + list-style: none outside none; + border: medium none; + padding: 0px; +} + +div.toc li.level1 { + margin-left: 0px; +} + +div.toc li.level2 { + margin-left: 15px; +} + +div.toc li.level3 { + margin-left: 30px; +} + +div.toc li.level4 { + margin-left: 45px; +} + +span.emoji { + /* font family used at the site: https://unicode.org/emoji/charts/full-emoji-list.html + * font-family: "Noto Color Emoji", "Apple Color Emoji", "Segoe UI Emoji", Times, Symbola, Aegyptus, Code2000, Code2001, Code2002, Musica, serif, LastResort; + */ +} + +.PageDocRTL-title div.toc li.level1 { + margin-left: 0 !important; + margin-right: 0; +} + +.PageDocRTL-title div.toc li.level2 { + margin-left: 0 !important; + margin-right: 15px; +} + +.PageDocRTL-title div.toc li.level3 { + margin-left: 0 !important; + margin-right: 30px; +} + +.PageDocRTL-title div.toc li.level4 { + margin-left: 0 !important; + margin-right: 45px; +} + +.inherit_header { + font-weight: bold; + color: gray; + cursor: pointer; + -webkit-touch-callout: none; + -webkit-user-select: none; + -khtml-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} + +.inherit_header td { + padding: 6px 0px 2px 5px; +} + +.inherit { + display: none; +} + +tr.heading h2 { + margin-top: 12px; + margin-bottom: 4px; +} + +/* tooltip related style info */ + +.ttc { + position: absolute; + display: none; +} + +#powerTip { + cursor: default; + white-space: nowrap; + background-color: white; + border: 1px solid gray; + border-radius: 4px 4px 4px 4px; + box-shadow: 1px 1px 7px gray; + display: none; + font-size: smaller; + max-width: 80%; + opacity: 0.9; + padding: 1ex 1em 1em; + position: absolute; + z-index: 2147483647; +} + +#powerTip div.ttdoc { + color: grey; + font-style: italic; +} + +#powerTip div.ttname a { + font-weight: bold; +} + +#powerTip div.ttname { + font-weight: bold; +} + +#powerTip div.ttdeci { + color: #006318; +} + +#powerTip div { + margin: 0px; + padding: 0px; + font: 12px/16px Roboto,sans-serif; +} + +#powerTip:before, #powerTip:after { + content: ""; + position: absolute; + margin: 0px; +} + +#powerTip.n:after, #powerTip.n:before, +#powerTip.s:after, #powerTip.s:before, +#powerTip.w:after, #powerTip.w:before, +#powerTip.e:after, #powerTip.e:before, +#powerTip.ne:after, #powerTip.ne:before, +#powerTip.se:after, #powerTip.se:before, +#powerTip.nw:after, #powerTip.nw:before, +#powerTip.sw:after, #powerTip.sw:before { + border: solid transparent; + content: " "; + height: 0; + width: 0; + position: absolute; +} + +#powerTip.n:after, #powerTip.s:after, +#powerTip.w:after, #powerTip.e:after, +#powerTip.nw:after, #powerTip.ne:after, +#powerTip.sw:after, #powerTip.se:after { + border-color: rgba(255, 255, 255, 0); +} + +#powerTip.n:before, #powerTip.s:before, +#powerTip.w:before, #powerTip.e:before, +#powerTip.nw:before, #powerTip.ne:before, +#powerTip.sw:before, #powerTip.se:before { + border-color: rgba(128, 128, 128, 0); +} + +#powerTip.n:after, #powerTip.n:before, +#powerTip.ne:after, #powerTip.ne:before, +#powerTip.nw:after, #powerTip.nw:before { + top: 100%; +} + +#powerTip.n:after, #powerTip.ne:after, #powerTip.nw:after { + border-top-color: #FFFFFF; + border-width: 10px; + margin: 0px -10px; +} +#powerTip.n:before { + border-top-color: #808080; + border-width: 11px; + margin: 0px -11px; +} +#powerTip.n:after, #powerTip.n:before { + left: 50%; +} + +#powerTip.nw:after, #powerTip.nw:before { + right: 14px; +} + +#powerTip.ne:after, #powerTip.ne:before { + left: 14px; +} + +#powerTip.s:after, #powerTip.s:before, +#powerTip.se:after, #powerTip.se:before, +#powerTip.sw:after, #powerTip.sw:before { + bottom: 100%; +} + +#powerTip.s:after, #powerTip.se:after, #powerTip.sw:after { + border-bottom-color: #FFFFFF; + border-width: 10px; + margin: 0px -10px; +} + +#powerTip.s:before, #powerTip.se:before, #powerTip.sw:before { + border-bottom-color: #808080; + border-width: 11px; + margin: 0px -11px; +} + +#powerTip.s:after, #powerTip.s:before { + left: 50%; +} + +#powerTip.sw:after, #powerTip.sw:before { + right: 14px; +} + +#powerTip.se:after, #powerTip.se:before { + left: 14px; +} + +#powerTip.e:after, #powerTip.e:before { + left: 100%; +} +#powerTip.e:after { + border-left-color: #FFFFFF; + border-width: 10px; + top: 50%; + margin-top: -10px; +} +#powerTip.e:before { + border-left-color: #808080; + border-width: 11px; + top: 50%; + margin-top: -11px; +} + +#powerTip.w:after, #powerTip.w:before { + right: 100%; +} +#powerTip.w:after { + border-right-color: #FFFFFF; + border-width: 10px; + top: 50%; + margin-top: -10px; +} +#powerTip.w:before { + border-right-color: #808080; + border-width: 11px; + top: 50%; + margin-top: -11px; +} + +@media print +{ + #top { display: none; } + #side-nav { display: none; } + #nav-path { display: none; } + body { overflow:visible; } + h1, h2, h3, h4, h5, h6 { page-break-after: avoid; } + .summary { display: none; } + .memitem { page-break-inside: avoid; } + #doc-content + { + margin-left:0 !important; + height:auto !important; + width:auto !important; + overflow:inherit; + display:inline; + } +} + +/* @group Markdown */ + +table.markdownTable { + border-collapse:collapse; + margin-top: 4px; + margin-bottom: 4px; +} + +table.markdownTable td, table.markdownTable th { + border: 1px solid #2D4068; + padding: 3px 7px 2px; +} + +table.markdownTable tr { +} + +th.markdownTableHeadLeft, th.markdownTableHeadRight, th.markdownTableHeadCenter, th.markdownTableHeadNone { + background-color: #374F7F; + color: #FFFFFF; + font-size: 110%; + padding-bottom: 4px; + padding-top: 5px; +} + +th.markdownTableHeadLeft, td.markdownTableBodyLeft { + text-align: left +} + +th.markdownTableHeadRight, td.markdownTableBodyRight { + text-align: right +} + +th.markdownTableHeadCenter, td.markdownTableBodyCenter { + text-align: center +} + +.DocNodeRTL { + text-align: right; + direction: rtl; +} + +.DocNodeLTR { + text-align: left; + direction: ltr; +} + +table.DocNodeRTL { + width: auto; + margin-right: 0; + margin-left: auto; +} + +table.DocNodeLTR { + width: auto; + margin-right: auto; + margin-left: 0; +} + +tt, code, kbd, samp +{ + display: inline-block; + direction:ltr; +} +/* @end */ + +u { + text-decoration: underline; +} + diff --git a/web/html/doc/doxygen.svg b/web/html/doc/doxygen.svg new file mode 100644 index 000000000..d42dad52d --- /dev/null +++ b/web/html/doc/doxygen.svg @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/web/html/doc/dynsections.js b/web/html/doc/dynsections.js new file mode 100644 index 000000000..88f2c27e6 --- /dev/null +++ b/web/html/doc/dynsections.js @@ -0,0 +1,128 @@ +/* + @licstart The following is the entire license notice for the JavaScript code in this file. + + The MIT License (MIT) + + Copyright (C) 1997-2020 by Dimitri van Heesch + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software + and associated documentation files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, publish, distribute, + sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or + substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + @licend The above is the entire license notice for the JavaScript code in this file + */ +function toggleVisibility(linkObj) +{ + var base = $(linkObj).attr('id'); + var summary = $('#'+base+'-summary'); + var content = $('#'+base+'-content'); + var trigger = $('#'+base+'-trigger'); + var src=$(trigger).attr('src'); + if (content.is(':visible')===true) { + content.hide(); + summary.show(); + $(linkObj).addClass('closed').removeClass('opened'); + $(trigger).attr('src',src.substring(0,src.length-8)+'closed.png'); + } else { + content.show(); + summary.hide(); + $(linkObj).removeClass('closed').addClass('opened'); + $(trigger).attr('src',src.substring(0,src.length-10)+'open.png'); + } + return false; +} + +function updateStripes() +{ + $('table.directory tr'). + removeClass('even').filter(':visible:even').addClass('even'); +} + +function toggleLevel(level) +{ + $('table.directory tr').each(function() { + var l = this.id.split('_').length-1; + var i = $('#img'+this.id.substring(3)); + var a = $('#arr'+this.id.substring(3)); + if (l + + + + + + +OpenPose: include/openpose/utilities/errorAndLog.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
errorAndLog.hpp File Reference
+
+
+
#include <sstream>
+#include <string>
+#include <vector>
+#include <openpose/core/macros.hpp>
+#include <openpose/utilities/enumClasses.hpp>
+
+

Go to the source code of this file.

+ + + + + + + + +

+Namespaces

 op
 
 op::ConfigureError
 
 op::ConfigureLog
 
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Functions

OP_API void op::setMainThread ()
 
OP_API std::string op::getThreadId ()
 
OP_API bool op::getIfInMainThreadOrEmpty ()
 
OP_API bool op::getIfNotInMainThreadOrEmpty ()
 
template<typename T >
std::string op::tToString (const T &message)
 
OP_API void op::error (const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
 
template<typename T >
void op::error (const T &message, const int line=-1, const std::string &function="", const std::string &file="")
 
OP_API void op::checkWorkerErrors ()
 
OP_API void op::errorWorker (const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
 
template<typename T >
void op::errorWorker (const T &message, const int line=-1, const std::string &function="", const std::string &file="")
 
OP_API void op::errorDestructor (const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
 
template<typename T >
void op::errorDestructor (const T &message, const int line=-1, const std::string &function="", const std::string &file="")
 
OP_API void op::opLog (const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
 
template<typename T >
void op::opLog (const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
 
template<typename T >
void op::opLogIfDebug (const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
 
OP_API std::vector< ErrorMode > op::ConfigureError::getErrorModes ()
 
OP_API void op::ConfigureError::setErrorModes (const std::vector< ErrorMode > &errorModes)
 
OP_API Priority op::ConfigureLog::getPriorityThreshold ()
 
OP_API const std::vector< LogMode > & op::ConfigureLog::getLogModes ()
 
OP_API void op::ConfigureLog::setPriorityThreshold (const Priority priorityThreshold)
 
OP_API void op::ConfigureLog::setLogModes (const std::vector< LogMode > &loggingModes)
 
+
+
+ + + + diff --git a/web/html/doc/error_and_log_8hpp.js b/web/html/doc/error_and_log_8hpp.js new file mode 100644 index 000000000..96ec77465 --- /dev/null +++ b/web/html/doc/error_and_log_8hpp.js @@ -0,0 +1,24 @@ +var error_and_log_8hpp = +[ + [ "checkWorkerErrors", "error_and_log_8hpp.html#a865a4cd0ba3b596667dc7242756837bd", null ], + [ "error", "error_and_log_8hpp.html#a5f092bd36c716a894cb035e1ead2aca3", null ], + [ "error", "error_and_log_8hpp.html#a42d364d9fbd1a719341bd7187d97cf18", null ], + [ "errorDestructor", "error_and_log_8hpp.html#a825f15fdf9dc9cb7473c20f970f15b60", null ], + [ "errorDestructor", "error_and_log_8hpp.html#a758b08be140e27dd2642d286a383be54", null ], + [ "errorWorker", "error_and_log_8hpp.html#a61af88aac41ef77ab4e8816023fe32f0", null ], + [ "errorWorker", "error_and_log_8hpp.html#a96d1720ea5d160cfd4c8404060a9bebd", null ], + [ "getErrorModes", "error_and_log_8hpp.html#ae8dbbccc9a2ca8a4670716ac5fdd8d53", null ], + [ "getIfInMainThreadOrEmpty", "error_and_log_8hpp.html#ad5e1c975a1b7dce9b02bc8cdf3d45a01", null ], + [ "getIfNotInMainThreadOrEmpty", "error_and_log_8hpp.html#abdedc8f1fd2f723dae5bb8ff20b93a93", null ], + [ "getLogModes", "error_and_log_8hpp.html#a5ab07ae8c026e4f7782a113778d9082d", null ], + [ "getPriorityThreshold", "error_and_log_8hpp.html#a0e5c3fad2ace3eb129dd1d97afd59558", null ], + [ "getThreadId", "error_and_log_8hpp.html#a5a3db1a0d272d8fb5ea723845beee150", null ], + [ "opLog", "error_and_log_8hpp.html#aa72861fea0671209aca1ea5fa385891a", null ], + [ "opLog", "error_and_log_8hpp.html#a838b69fead43c8a848d059b5f9d63baf", null ], + [ "opLogIfDebug", "error_and_log_8hpp.html#a91dd00cbb8fb646e6612455eb0f1b3e9", null ], + [ "setErrorModes", "error_and_log_8hpp.html#a96e56b0ddbe2cb17443b93aaba05d672", null ], + [ "setLogModes", "error_and_log_8hpp.html#a2f41e9a74bbda434ef16189c32a13aba", null ], + [ "setMainThread", "error_and_log_8hpp.html#a7eb0121791185c13a6c3dd88994e0eab", null ], + [ "setPriorityThreshold", "error_and_log_8hpp.html#a149393c3c87c82a5cf14417c6b430d30", null ], + [ "tToString", "error_and_log_8hpp.html#af548fe1a2ad2b392a25afe9b0b87b8dd", null ] +]; \ No newline at end of file diff --git a/web/html/doc/error_and_log_8hpp_source.html b/web/html/doc/error_and_log_8hpp_source.html new file mode 100644 index 000000000..39d2763b0 --- /dev/null +++ b/web/html/doc/error_and_log_8hpp_source.html @@ -0,0 +1,251 @@ + + + + + + + +OpenPose: include/openpose/utilities/errorAndLog.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
errorAndLog.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_UTILITIES_ERROR_AND_LOG_HPP
+
2 #define OPENPOSE_UTILITIES_ERROR_AND_LOG_HPP
+
3 
+
4 #include <sstream> // std::stringstream
+
5 #include <string>
+
6 #include <vector>
+ + +
9 
+
10 namespace op
+
11 {
+ +
13 
+
14  OP_API std::string getThreadId();
+
15 
+ +
17 
+ +
19 
+
20  template<typename T>
+
21  std::string tToString(const T& message)
+
22  {
+
23  // Message -> ostringstream
+
24  std::ostringstream oss;
+
25  oss << message;
+
26  // ostringstream -> std::string
+
27  return oss.str();
+
28  }
+
29 
+
39  // Error management - How to use:
+
40  // error(message, __LINE__, __FUNCTION__, __FILE__);
+
41  OP_API void error(
+
42  const std::string& message, const int line = -1, const std::string& function = "",
+
43  const std::string& file = "");
+
44 
+
45  template<typename T>
+
46  inline void error(
+
47  const T& message, const int line = -1, const std::string& function = "", const std::string& file = "")
+
48  {
+
49  error(tToString(message), line, function, file);
+
50  }
+
51 
+
52  // Worker error management
+ +
54 
+ +
56  const std::string& message, const int line = -1, const std::string& function = "",
+
57  const std::string& file = "");
+
58 
+
59  template<typename T>
+
60  inline void errorWorker(
+
61  const T& message, const int line = -1, const std::string& function = "", const std::string& file = "")
+
62  {
+
63  errorWorker(tToString(message), line, function, file);
+
64  }
+
65 
+
66  // Destructor error management
+ +
68  const std::string& message, const int line = -1, const std::string& function = "",
+
69  const std::string& file = "");
+
70 
+
71  template<typename T>
+
72  inline void errorDestructor(
+
73  const T& message, const int line = -1, const std::string& function = "", const std::string& file = "")
+
74  {
+
75  errorDestructor(tToString(message), line, function, file);
+
76  }
+
77 
+
78  // Printing info - How to use:
+
79  // It will print info if desiredPriority >= sPriorityThreshold
+
80  // opLog(message, desiredPriority, __LINE__, __FUNCTION__, __FILE__);
+
81  OP_API void opLog(
+
82  const std::string& message, const Priority priority = Priority::Max, const int line = -1,
+
83  const std::string& function = "", const std::string& file = "");
+
84 
+
85  template<typename T>
+
86  inline void opLog(
+
87  const T& message, const Priority priority = Priority::Max, const int line = -1,
+
88  const std::string& function = "", const std::string& file = "")
+
89  {
+
90  opLog(tToString(message), priority, line, function, file);
+
91  }
+
92 
+
93  // If only desired on debug mode (no computational cost at all on release mode):
+
94  // It will print info if desiredPriority >= sPriorityThreshold
+
95  // opLogIfDebug(message, desiredPriority, __LINE__, __FUNCTION__, __FILE__);
+
96  template<typename T>
+
97  inline void opLogIfDebug(
+
98  const T& message, const Priority priority = Priority::Max, const int line = -1,
+
99  const std::string& function = "", const std::string& file = "")
+
100  {
+
101  #ifndef NDEBUG
+
102  opLog(message, priority, line, function, file);
+
103  #else
+
104  UNUSED(message);
+
105  UNUSED(priority);
+
106  UNUSED(line);
+
107  UNUSED(function);
+
108  UNUSED(file);
+
109  #endif
+
110  }
+
111 
+
112  // This class is thread-safe
+
113  namespace ConfigureError
+
114  {
+
115  OP_API std::vector<ErrorMode> getErrorModes();
+
116 
+
117  OP_API void setErrorModes(const std::vector<ErrorMode>& errorModes);
+
118  }
+
119 
+
120  // This class is not fully thread-safe
+
121  namespace ConfigureLog
+
122  {
+ +
124 
+
125  OP_API const std::vector<LogMode>& getLogModes();
+
126 
+
127  // This function is not thread-safe. It must be run at the beginning
+
128  OP_API void setPriorityThreshold(const Priority priorityThreshold);
+
129 
+
130  // This function is not thread-safe. It must be run at the beginning
+
131  OP_API void setLogModes(const std::vector<LogMode>& loggingModes);
+
132  }
+
133 }
+
134 
+
135 #endif // OPENPOSE_UTILITIES_ERROR_AND_LOG_HPP
+ +
#define OP_API
Definition: macros.hpp:18
+
#define UNUSED(unusedVariable)
Definition: macros.hpp:30
+
OP_API void setErrorModes(const std::vector< ErrorMode > &errorModes)
+
OP_API std::vector< ErrorMode > getErrorModes()
+
OP_API Priority getPriorityThreshold()
+
OP_API void setPriorityThreshold(const Priority priorityThreshold)
+
OP_API void setLogModes(const std::vector< LogMode > &loggingModes)
+
OP_API const std::vector< LogMode > & getLogModes()
+ +
OP_API std::string getThreadId()
+
OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
+
OP_API void errorWorker(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
+
OP_API void setMainThread()
+
OP_API void errorDestructor(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
+
OP_API void checkWorkerErrors()
+
void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
Definition: errorAndLog.hpp:97
+
OP_API void opLog(const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
+
OP_API bool getIfNotInMainThreadOrEmpty()
+
OP_API bool getIfInMainThreadOrEmpty()
+
Priority
Definition: enumClasses.hpp:22
+ +
std::string tToString(const T &message)
Definition: errorAndLog.hpp:21
+ +
+
+ + + + diff --git a/web/html/doc/face_2headers_8hpp.html b/web/html/doc/face_2headers_8hpp.html new file mode 100644 index 000000000..b083056fd --- /dev/null +++ b/web/html/doc/face_2headers_8hpp.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: include/openpose/face/headers.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ + + + + + diff --git a/web/html/doc/face_2headers_8hpp_source.html b/web/html/doc/face_2headers_8hpp_source.html new file mode 100644 index 000000000..6418515ba --- /dev/null +++ b/web/html/doc/face_2headers_8hpp_source.html @@ -0,0 +1,133 @@ + + + + + + + +OpenPose: include/openpose/face/headers.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ + + + + + diff --git a/web/html/doc/face_cpu_renderer_8hpp.html b/web/html/doc/face_cpu_renderer_8hpp.html new file mode 100644 index 000000000..dadaefb1e --- /dev/null +++ b/web/html/doc/face_cpu_renderer_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/face/faceCpuRenderer.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
faceCpuRenderer.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::FaceCpuRenderer
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/face_cpu_renderer_8hpp_source.html b/web/html/doc/face_cpu_renderer_8hpp_source.html new file mode 100644 index 000000000..cb6f2a7cc --- /dev/null +++ b/web/html/doc/face_cpu_renderer_8hpp_source.html @@ -0,0 +1,142 @@ + + + + + + + +OpenPose: include/openpose/face/faceCpuRenderer.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
faceCpuRenderer.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FACE_FACE_CPU_RENDERER_HPP
+
2 #define OPENPOSE_FACE_FACE_CPU_RENDERER_HPP
+
3 
+ + + + +
8 
+
9 namespace op
+
10 {
+ +
12  {
+
13  public:
+
14  FaceCpuRenderer(const float renderThreshold, const float alphaKeypoint = FACE_DEFAULT_ALPHA_KEYPOINT,
+
15  const float alphaHeatMap = FACE_DEFAULT_ALPHA_HEAT_MAP);
+
16 
+
17  virtual ~FaceCpuRenderer();
+
18 
+
19  void renderFaceInherited(Array<float>& outputData, const Array<float>& faceKeypoints);
+
20 
+ +
22  };
+
23 }
+
24 
+
25 #endif // OPENPOSE_FACE_FACE_CPU_RENDERER_HPP
+ + +
DELETE_COPY(FaceCpuRenderer)
+
virtual ~FaceCpuRenderer()
+
void renderFaceInherited(Array< float > &outputData, const Array< float > &faceKeypoints)
+
FaceCpuRenderer(const float renderThreshold, const float alphaKeypoint=FACE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=FACE_DEFAULT_ALPHA_HEAT_MAP)
+ + + + + +
#define OP_API
Definition: macros.hpp:18
+ +
const auto FACE_DEFAULT_ALPHA_KEYPOINT
+
const auto FACE_DEFAULT_ALPHA_HEAT_MAP
+ +
+
+ + + + diff --git a/web/html/doc/face_detector_8hpp.html b/web/html/doc/face_detector_8hpp.html new file mode 100644 index 000000000..5ca05ad7a --- /dev/null +++ b/web/html/doc/face_detector_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/face/faceDetector.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
faceDetector.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::FaceDetector
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/face_detector_8hpp_source.html b/web/html/doc/face_detector_8hpp_source.html new file mode 100644 index 000000000..59352afb7 --- /dev/null +++ b/web/html/doc/face_detector_8hpp_source.html @@ -0,0 +1,142 @@ + + + + + + + +OpenPose: include/openpose/face/faceDetector.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
faceDetector.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FACE_FACE_DETECTOR_HPP
+
2 #define OPENPOSE_FACE_FACE_DETECTOR_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+ +
10  {
+
11  public:
+
12  explicit FaceDetector(const PoseModel poseModel);
+
13 
+
14  virtual ~FaceDetector();
+
15 
+
16  std::vector<Rectangle<float>> detectFaces(const Array<float>& poseKeypoints) const;
+
17 
+
18  private:
+
19  const unsigned int mNeck;
+
20  const unsigned int mNose;
+
21  const unsigned int mLEar;
+
22  const unsigned int mREar;
+
23  const unsigned int mLEye;
+
24  const unsigned int mREye;
+
25 
+ +
27  };
+
28 }
+
29 
+
30 #endif // OPENPOSE_FACE_FACE_DETECTOR_HPP
+ + +
virtual ~FaceDetector()
+
std::vector< Rectangle< float > > detectFaces(const Array< float > &poseKeypoints) const
+
FaceDetector(const PoseModel poseModel)
+ +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
PoseModel
Definition: enumClasses.hpp:10
+ +
+
+ + + + diff --git a/web/html/doc/face_detector_open_c_v_8hpp.html b/web/html/doc/face_detector_open_c_v_8hpp.html new file mode 100644 index 000000000..e274e8450 --- /dev/null +++ b/web/html/doc/face_detector_open_c_v_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/face/faceDetectorOpenCV.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
faceDetectorOpenCV.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::FaceDetectorOpenCV
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/face_detector_open_c_v_8hpp_source.html b/web/html/doc/face_detector_open_c_v_8hpp_source.html new file mode 100644 index 000000000..0975f855c --- /dev/null +++ b/web/html/doc/face_detector_open_c_v_8hpp_source.html @@ -0,0 +1,138 @@ + + + + + + + +OpenPose: include/openpose/face/faceDetectorOpenCV.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
faceDetectorOpenCV.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FACE_FACE_DETECTOR_OPENCV_HPP
+
2 #define OPENPOSE_FACE_FACE_DETECTOR_OPENCV_HPP
+
3 
+ +
5 
+
6 namespace op
+
7 {
+ +
9  {
+
10  public:
+
11  explicit FaceDetectorOpenCV(const std::string& modelFolder);
+
12 
+ +
14 
+
15  // No thread-save
+
16  std::vector<Rectangle<float>> detectFaces(const Matrix& inputData);
+
17 
+
18  private:
+
19  // PIMPL idiom
+
20  // http://www.cppsamples.com/common-tasks/pimpl.html
+
21  struct ImplFaceDetectorOpenCV;
+
22  std::unique_ptr<ImplFaceDetectorOpenCV> upImpl;
+
23 
+ +
25  };
+
26 }
+
27 
+
28 #endif // OPENPOSE_FACE_FACE_DETECTOR_OPENCV_HPP
+ +
virtual ~FaceDetectorOpenCV()
+
FaceDetectorOpenCV(const std::string &modelFolder)
+
std::vector< Rectangle< float > > detectFaces(const Matrix &inputData)
+ + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
+
+ + + + diff --git a/web/html/doc/face_extractor_caffe_8hpp.html b/web/html/doc/face_extractor_caffe_8hpp.html new file mode 100644 index 000000000..f5188e60d --- /dev/null +++ b/web/html/doc/face_extractor_caffe_8hpp.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: include/openpose/face/faceExtractorCaffe.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
faceExtractorCaffe.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::FaceExtractorCaffe
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/face_extractor_caffe_8hpp_source.html b/web/html/doc/face_extractor_caffe_8hpp_source.html new file mode 100644 index 000000000..abb31fa81 --- /dev/null +++ b/web/html/doc/face_extractor_caffe_8hpp_source.html @@ -0,0 +1,155 @@ + + + + + + + +OpenPose: include/openpose/face/faceExtractorCaffe.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
faceExtractorCaffe.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FACE_FACE_EXTRACTOR_CAFFE_HPP
+
2 #define OPENPOSE_FACE_FACE_EXTRACTOR_CAFFE_HPP
+
3 
+ + + +
7 
+
8 namespace op
+
9 {
+ +
14  {
+
15  public:
+
21  FaceExtractorCaffe(const Point<int>& netInputSize, const Point<int>& netOutputSize,
+
22  const std::string& modelFolder, const int gpuId,
+
23  const std::vector<HeatMapType>& heatMapTypes = {},
+
24  const ScaleMode heatMapScaleMode = ScaleMode::ZeroToOneFixedAspect,
+
25  const bool enableGoogleLogging = true);
+
26 
+ +
28 
+ +
34 
+
43  void forwardPass(const std::vector<Rectangle<float>>& faceRectangles, const Matrix& inputData);
+
44 
+
45  private:
+
46  // PIMPL idiom
+
47  // http://www.cppsamples.com/common-tasks/pimpl.html
+
48  struct ImplFaceExtractorCaffe;
+
49  std::unique_ptr<ImplFaceExtractorCaffe> upImpl;
+
50 
+
51  // PIMP requires DELETE_COPY & destructor, or extra code
+
52  // http://oliora.github.io/2015/12/29/pimpl-and-rule-of-zero.html
+ +
54  };
+
55 }
+
56 
+
57 #endif // OPENPOSE_FACE_FACE_EXTRACTOR_CAFFE_HPP
+ +
virtual ~FaceExtractorCaffe()
+
void forwardPass(const std::vector< Rectangle< float >> &faceRectangles, const Matrix &inputData)
+ +
FaceExtractorCaffe(const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::string &modelFolder, const int gpuId, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool enableGoogleLogging=true)
+ + + + + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
ScaleMode
Definition: enumClasses.hpp:7
+ + + +
+
+ + + + diff --git a/web/html/doc/face_extractor_net_8hpp.html b/web/html/doc/face_extractor_net_8hpp.html new file mode 100644 index 000000000..637d6e1e9 --- /dev/null +++ b/web/html/doc/face_extractor_net_8hpp.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: include/openpose/face/faceExtractorNet.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
faceExtractorNet.hpp File Reference
+
+
+
#include <atomic>
+#include <openpose/core/common.hpp>
+#include <openpose/core/enumClasses.hpp>
+
+

Go to the source code of this file.

+ + + + +

+Classes

class  op::FaceExtractorNet
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/face_extractor_net_8hpp_source.html b/web/html/doc/face_extractor_net_8hpp_source.html new file mode 100644 index 000000000..562640f22 --- /dev/null +++ b/web/html/doc/face_extractor_net_8hpp_source.html @@ -0,0 +1,183 @@ + + + + + + + +OpenPose: include/openpose/face/faceExtractorNet.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
faceExtractorNet.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FACE_FACE_EXTRACTOR_HPP
+
2 #define OPENPOSE_FACE_FACE_EXTRACTOR_HPP
+
3 
+
4 #include <atomic>
+ + +
7 
+
8 namespace op
+
9 {
+ +
14  {
+
15  public:
+
21  explicit FaceExtractorNet(const Point<int>& netInputSize, const Point<int>& netOutputSize,
+
22  const std::vector<HeatMapType>& heatMapTypes = {},
+
23  const ScaleMode heatMapScaleMode = ScaleMode::ZeroToOneFixedAspect);
+
24 
+
29  virtual ~FaceExtractorNet();
+
30 
+ +
36 
+
45  virtual void forwardPass(const std::vector<Rectangle<float>>& faceRectangles, const Matrix& inputData) = 0;
+
46 
+ +
48 
+ +
57 
+
58  bool getEnabled() const;
+
59 
+
60  void setEnabled(const bool enabled);
+
61 
+
62  protected:
+ + + +
66  // HeatMaps parameters
+ + +
69  const std::vector<HeatMapType> mHeatMapTypes;
+
70  // Temporarily disable it
+
71  std::atomic<bool> mEnabled;
+
72 
+
73  virtual void netInitializationOnThread() = 0;
+
74 
+
75  private:
+
76  // Init with thread
+
77  std::thread::id mThreadId;
+
78 
+
79  void checkThread() const;
+
80 
+ +
82  };
+
83 }
+
84 
+
85 #endif // OPENPOSE_FACE_FACE_EXTRACTOR_HPP
+ + +
FaceExtractorNet(const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect)
+
bool getEnabled() const
+
Array< float > getHeatMaps() const
+
const std::vector< HeatMapType > mHeatMapTypes
+
Array< float > mHeatMaps
+
virtual ~FaceExtractorNet()
+
Array< float > mFaceKeypoints
+
std::atomic< bool > mEnabled
+
virtual void netInitializationOnThread()=0
+
void setEnabled(const bool enabled)
+
virtual void forwardPass(const std::vector< Rectangle< float >> &faceRectangles, const Matrix &inputData)=0
+ +
const ScaleMode mHeatMapScaleMode
+
const Point< int > mNetOutputSize
+
Array< float > mFaceImageCrop
+
Array< float > getFaceKeypoints() const
+ + + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
ScaleMode
Definition: enumClasses.hpp:7
+ + + +
+
+ + + + diff --git a/web/html/doc/face_gpu_renderer_8hpp.html b/web/html/doc/face_gpu_renderer_8hpp.html new file mode 100644 index 000000000..f31a37de5 --- /dev/null +++ b/web/html/doc/face_gpu_renderer_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/face/faceGpuRenderer.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
faceGpuRenderer.hpp File Reference
+
+ +
+ + + + diff --git a/web/html/doc/face_gpu_renderer_8hpp_source.html b/web/html/doc/face_gpu_renderer_8hpp_source.html new file mode 100644 index 000000000..cb5ea3138 --- /dev/null +++ b/web/html/doc/face_gpu_renderer_8hpp_source.html @@ -0,0 +1,152 @@ + + + + + + + +OpenPose: include/openpose/face/faceGpuRenderer.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
faceGpuRenderer.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FACE_FACE_GPU_RENDERER_HPP
+
2 #define OPENPOSE_FACE_FACE_GPU_RENDERER_HPP
+
3 
+ + + + +
8 
+
9 namespace op
+
10 {
+ +
12  {
+
13  public:
+
14  FaceGpuRenderer(const float renderThreshold,
+
15  const float alphaKeypoint = FACE_DEFAULT_ALPHA_KEYPOINT,
+
16  const float alphaHeatMap = FACE_DEFAULT_ALPHA_HEAT_MAP);
+
17 
+
18  virtual ~FaceGpuRenderer();
+
19 
+ +
21 
+
22  void renderFaceInherited(Array<float>& outputData, const Array<float>& faceKeypoints);
+
23 
+
24  private:
+
25  float* pGpuFace; // GPU aux memory
+
26  float* pMaxPtr; // GPU aux memory
+
27  float* pMinPtr; // GPU aux memory
+
28  float* pScalePtr; // GPU aux memory
+
29 
+ +
31  };
+
32 }
+
33 
+
34 #endif // OPENPOSE_FACE_FACE_GPU_RENDERER_HPP
+ + +
FaceGpuRenderer(const float renderThreshold, const float alphaKeypoint=FACE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=FACE_DEFAULT_ALPHA_HEAT_MAP)
+
void initializationOnThread()
+
virtual ~FaceGpuRenderer()
+
void renderFaceInherited(Array< float > &outputData, const Array< float > &faceKeypoints)
+ + + + + + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
const auto FACE_DEFAULT_ALPHA_KEYPOINT
+
const auto FACE_DEFAULT_ALPHA_HEAT_MAP
+
+
+ + + + diff --git a/web/html/doc/face_parameters_8hpp.html b/web/html/doc/face_parameters_8hpp.html new file mode 100644 index 000000000..f12228060 --- /dev/null +++ b/web/html/doc/face_parameters_8hpp.html @@ -0,0 +1,200 @@ + + + + + + + +OpenPose: include/openpose/face/faceParameters.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
faceParameters.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + +

+Macros

#define FACE_PAIRS_RENDER_GPU
 
#define FACE_SCALES_RENDER_GPU   1
 
#define FACE_COLORS_RENDER_GPU   255.f, 255.f, 255.f
 
+ + + + + + + + + + + + + + + + + + + + + +

+Variables

const auto op::FACE_MAX_FACES = POSE_MAX_PEOPLE
 
const auto op::FACE_NUMBER_PARTS = 70u
 
const std::vector< unsigned int > op::FACE_PAIRS_RENDER {FACE_PAIRS_RENDER_GPU}
 
const std::vector< float > op::FACE_COLORS_RENDER {FACE_COLORS_RENDER_GPU}
 
const std::vector< float > op::FACE_SCALES_RENDER {FACE_SCALES_RENDER_GPU}
 
const auto op::FACE_CCN_DECREASE_FACTOR = 8.f
 
const std::string op::FACE_PROTOTXT {"face/pose_deploy.prototxt"}
 
const std::string op::FACE_TRAINED_MODEL {"face/pose_iter_116000.caffemodel"}
 
const auto op::FACE_DEFAULT_ALPHA_KEYPOINT = POSE_DEFAULT_ALPHA_KEYPOINT
 
const auto op::FACE_DEFAULT_ALPHA_HEAT_MAP = POSE_DEFAULT_ALPHA_HEAT_MAP
 
+

Macro Definition Documentation

+ +

◆ FACE_COLORS_RENDER_GPU

+ +
+
+ + + + +
#define FACE_COLORS_RENDER_GPU   255.f, 255.f, 255.f
+
+ +

Definition at line 19 of file faceParameters.hpp.

+ +
+
+ +

◆ FACE_PAIRS_RENDER_GPU

+ +
+
+ + + + +
#define FACE_PAIRS_RENDER_GPU
+
+Value:
0,1, 1,2, 2,3, 3,4, 4,5, 5,6, 6,7, 7,8, 8,9, 9,10, 10,11, 11,12, 12,13, 13,14, 14,15, 15,16, 17,18, 18,19, 19,20, \
+
20,21, 22,23, 23,24, 24,25, 25,26, 27,28, 28,29, 29,30, 31,32, 32,33, 33,34, 34,35, 36,37, 37,38, 38,39, 39,40, 40,41, \
+
41,36, 42,43, 43,44, 44,45, 45,46, 46,47, 47,42, 48,49, 49,50, 50,51, 51,52, 52,53, 53,54, 54,55, 55,56, 56,57, 57,58, \
+
58,59, 59,48, 60,61, 61,62, 62,63, 63,64, 64,65, 65,66, 66,67, 67,60
+
+

Definition at line 12 of file faceParameters.hpp.

+ +
+
+ +

◆ FACE_SCALES_RENDER_GPU

+ +
+
+ + + + +
#define FACE_SCALES_RENDER_GPU   1
+
+ +

Definition at line 17 of file faceParameters.hpp.

+ +
+
+
+
+ + + + diff --git a/web/html/doc/face_parameters_8hpp.js b/web/html/doc/face_parameters_8hpp.js new file mode 100644 index 000000000..5c5ed1cb2 --- /dev/null +++ b/web/html/doc/face_parameters_8hpp.js @@ -0,0 +1,16 @@ +var face_parameters_8hpp = +[ + [ "FACE_COLORS_RENDER_GPU", "face_parameters_8hpp.html#a740a6228babfde5f18fba6fc033ef0ed", null ], + [ "FACE_PAIRS_RENDER_GPU", "face_parameters_8hpp.html#a7e2f64c1349d6a881c6ceb49757e099a", null ], + [ "FACE_SCALES_RENDER_GPU", "face_parameters_8hpp.html#a1a7ddb1a137c44091a1b4161725adfa0", null ], + [ "FACE_CCN_DECREASE_FACTOR", "face_parameters_8hpp.html#aa6701cc08e1a8651798ef3bf8437375b", null ], + [ "FACE_COLORS_RENDER", "face_parameters_8hpp.html#a3fbae1778780ae5bf4ffcc84cdef1870", null ], + [ "FACE_DEFAULT_ALPHA_HEAT_MAP", "face_parameters_8hpp.html#a4f191afed46fea5e3ce5b2a8756e1ddd", null ], + [ "FACE_DEFAULT_ALPHA_KEYPOINT", "face_parameters_8hpp.html#a15f6c39797cee87f6aa941d93f22b78b", null ], + [ "FACE_MAX_FACES", "face_parameters_8hpp.html#a9b4b92c621cc5962a72898899d2f2534", null ], + [ "FACE_NUMBER_PARTS", "face_parameters_8hpp.html#a8a05bdc38612c38e28b96bba5b4679b8", null ], + [ "FACE_PAIRS_RENDER", "face_parameters_8hpp.html#a1245f62cf98c4ee7591dfc8807ef355d", null ], + [ "FACE_PROTOTXT", "face_parameters_8hpp.html#a4d07868d77fb11253b413ed579e04c22", null ], + [ "FACE_SCALES_RENDER", "face_parameters_8hpp.html#a00c56c20997f734b2bd44d6f85b86cf0", null ], + [ "FACE_TRAINED_MODEL", "face_parameters_8hpp.html#abd0ef2306478c3295283e7f1b59e3aff", null ] +]; \ No newline at end of file diff --git a/web/html/doc/face_parameters_8hpp_source.html b/web/html/doc/face_parameters_8hpp_source.html new file mode 100644 index 000000000..031276476 --- /dev/null +++ b/web/html/doc/face_parameters_8hpp_source.html @@ -0,0 +1,153 @@ + + + + + + + +OpenPose: include/openpose/face/faceParameters.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
faceParameters.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FACE_FACE_PARAMETERS_HPP
+
2 #define OPENPOSE_FACE_FACE_PARAMETERS_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+ +
10 
+
11  const auto FACE_NUMBER_PARTS = 70u;
+
12  #define FACE_PAIRS_RENDER_GPU \
+
13  0,1, 1,2, 2,3, 3,4, 4,5, 5,6, 6,7, 7,8, 8,9, 9,10, 10,11, 11,12, 12,13, 13,14, 14,15, 15,16, 17,18, 18,19, 19,20, \
+
14  20,21, 22,23, 23,24, 24,25, 25,26, 27,28, 28,29, 29,30, 31,32, 32,33, 33,34, 34,35, 36,37, 37,38, 38,39, 39,40, 40,41, \
+
15  41,36, 42,43, 43,44, 44,45, 45,46, 46,47, 47,42, 48,49, 49,50, 50,51, 51,52, 52,53, 53,54, 54,55, 55,56, 56,57, 57,58, \
+
16  58,59, 59,48, 60,61, 61,62, 62,63, 63,64, 64,65, 65,66, 66,67, 67,60
+
17  #define FACE_SCALES_RENDER_GPU 1
+
18  const std::vector<unsigned int> FACE_PAIRS_RENDER {FACE_PAIRS_RENDER_GPU};
+
19  #define FACE_COLORS_RENDER_GPU 255.f, 255.f, 255.f
+
20  const std::vector<float> FACE_COLORS_RENDER{FACE_COLORS_RENDER_GPU};
+
21  const std::vector<float> FACE_SCALES_RENDER{FACE_SCALES_RENDER_GPU};
+
22 
+
23  // Constant parameters
+
24  const auto FACE_CCN_DECREASE_FACTOR = 8.f;
+
25  const std::string FACE_PROTOTXT{"face/pose_deploy.prototxt"};
+
26  const std::string FACE_TRAINED_MODEL{"face/pose_iter_116000.caffemodel"};
+
27 
+
28  // Rendering parameters
+ + +
31 }
+
32 
+
33 #endif // OPENPOSE_FACE_FACE_PARAMETERS_HPP
+
#define FACE_SCALES_RENDER_GPU
+
#define FACE_COLORS_RENDER_GPU
+
#define FACE_PAIRS_RENDER_GPU
+ +
const std::vector< float > FACE_SCALES_RENDER
+
const std::vector< unsigned int > FACE_PAIRS_RENDER
+
const auto FACE_DEFAULT_ALPHA_KEYPOINT
+
const auto POSE_DEFAULT_ALPHA_KEYPOINT
+
const std::vector< float > FACE_COLORS_RENDER
+
const std::string FACE_PROTOTXT
+
const auto FACE_DEFAULT_ALPHA_HEAT_MAP
+
const auto POSE_MAX_PEOPLE
+
const auto FACE_NUMBER_PARTS
+
const auto FACE_MAX_FACES
+
const auto FACE_CCN_DECREASE_FACTOR
+
const std::string FACE_TRAINED_MODEL
+
const auto POSE_DEFAULT_ALPHA_HEAT_MAP
+ + +
+
+ + + + diff --git a/web/html/doc/face_renderer_8hpp.html b/web/html/doc/face_renderer_8hpp.html new file mode 100644 index 000000000..46a189de2 --- /dev/null +++ b/web/html/doc/face_renderer_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/face/faceRenderer.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
faceRenderer.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::FaceRenderer
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/face_renderer_8hpp_source.html b/web/html/doc/face_renderer_8hpp_source.html new file mode 100644 index 000000000..c9f2cf18f --- /dev/null +++ b/web/html/doc/face_renderer_8hpp_source.html @@ -0,0 +1,132 @@ + + + + + + + +OpenPose: include/openpose/face/faceRenderer.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
faceRenderer.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FACE_FACE_RENDERER_HPP
+
2 #define OPENPOSE_FACE_FACE_RENDERER_HPP
+
3 
+ +
5 
+
6 namespace op
+
7 {
+ +
9  {
+
10  public:
+
11  virtual ~FaceRenderer(){};
+
12 
+
13  virtual void initializationOnThread(){};
+
14 
+
15  void renderFace(Array<float>& outputData, const Array<float>& faceKeypoints,
+
16  const float scaleInputToOutput);
+
17 
+
18  private:
+
19  virtual void renderFaceInherited(Array<float>& outputData, const Array<float>& faceKeypoints) = 0;
+
20  };
+
21 }
+
22 
+
23 #endif // OPENPOSE_FACE_FACE_RENDERER_HPP
+ + +
virtual ~FaceRenderer()
+
virtual void initializationOnThread()
+
void renderFace(Array< float > &outputData, const Array< float > &faceKeypoints, const float scaleInputToOutput)
+ +
#define OP_API
Definition: macros.hpp:18
+ +
+
+ + + + diff --git a/web/html/doc/fast_math_8hpp.html b/web/html/doc/fast_math_8hpp.html new file mode 100644 index 000000000..970ac42b8 --- /dev/null +++ b/web/html/doc/fast_math_8hpp.html @@ -0,0 +1,151 @@ + + + + + + + +OpenPose: include/openpose/utilities/fastMath.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
fastMath.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Functions

template<typename T >
char op::positiveCharRound (const T a)
 
template<typename T >
signed char op::positiveSCharRound (const T a)
 
template<typename T >
int op::positiveIntRound (const T a)
 
template<typename T >
long op::positiveLongRound (const T a)
 
template<typename T >
long long op::positiveLongLongRound (const T a)
 
template<typename T >
unsigned char op::uCharRound (const T a)
 
template<typename T >
unsigned int op::uIntRound (const T a)
 
template<typename T >
unsigned long op::ulongRound (const T a)
 
template<typename T >
unsigned long long op::uLongLongRound (const T a)
 
template<typename T >
op::fastMax (const T a, const T b)
 
template<typename T >
op::fastMin (const T a, const T b)
 
template<class T >
op::fastTruncate (T value, T min=0, T max=1)
 
+
+
+ + + + diff --git a/web/html/doc/fast_math_8hpp.js b/web/html/doc/fast_math_8hpp.js new file mode 100644 index 000000000..1171577fe --- /dev/null +++ b/web/html/doc/fast_math_8hpp.js @@ -0,0 +1,15 @@ +var fast_math_8hpp = +[ + [ "fastMax", "fast_math_8hpp.html#a9f4b99449c0c73e2c89ee1a1eff007c7", null ], + [ "fastMin", "fast_math_8hpp.html#a6e1d1f90ef06cc7af576fdaad4b4e320", null ], + [ "fastTruncate", "fast_math_8hpp.html#a2dafd3db8f922405b38240345dd1dce5", null ], + [ "positiveCharRound", "fast_math_8hpp.html#ab5eb10c958f3f37fb82d29361ad81467", null ], + [ "positiveIntRound", "fast_math_8hpp.html#a699ef17b0f27b8bc2c4d4a03e46e6be1", null ], + [ "positiveLongLongRound", "fast_math_8hpp.html#a1b479fea39a56c041a8a51aecf024bed", null ], + [ "positiveLongRound", "fast_math_8hpp.html#a57eee48e4cefd583a81cfc907586c035", null ], + [ "positiveSCharRound", "fast_math_8hpp.html#ab71596bc88b87ea5920f19f978d6d6ac", null ], + [ "uCharRound", "fast_math_8hpp.html#a61240e5fbd4ea84a2cfdc89407bcb1ae", null ], + [ "uIntRound", "fast_math_8hpp.html#a8525e440d6ac1b558e72637dc4f4e3c4", null ], + [ "uLongLongRound", "fast_math_8hpp.html#a757a5cc88734e7be9e910e7d8213c282", null ], + [ "ulongRound", "fast_math_8hpp.html#aaafe2e235a1a3a146bb026b71c521c7b", null ] +]; \ No newline at end of file diff --git a/web/html/doc/fast_math_8hpp_source.html b/web/html/doc/fast_math_8hpp_source.html new file mode 100644 index 000000000..3447d5e5e --- /dev/null +++ b/web/html/doc/fast_math_8hpp_source.html @@ -0,0 +1,205 @@ + + + + + + + +OpenPose: include/openpose/utilities/fastMath.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
fastMath.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_UTILITIES_MATH_HPP
+
2 #define OPENPOSE_UTILITIES_MATH_HPP
+
3 
+
4 namespace op
+
5 {
+
6  // Use op::round/max/min for basic types (int, char, long, float, double, etc). Never with classes!
+
7  // `std::` alternatives uses 'const T&' instead of 'const T' as argument.
+
8  // E.g., std::round is really slow (~300 ms vs ~10 ms when I individually apply it to each element of a whole
+
9  // image array
+
10 
+
11  // VERY IMPORTANT: These fast functions does NOT work for negative integer numbers.
+
12  // E.g., positiveIntRound(-180.f) = -179.
+
13 
+
14  // Round functions
+
15  // Signed
+
16  template<typename T>
+
17  inline char positiveCharRound(const T a)
+
18  {
+
19  return char(a+0.5f);
+
20  }
+
21 
+
22  template<typename T>
+
23  inline signed char positiveSCharRound(const T a)
+
24  {
+
25  return (signed char)(a+0.5f);
+
26  }
+
27 
+
28  template<typename T>
+
29  inline int positiveIntRound(const T a)
+
30  {
+
31  return int(a+0.5f);
+
32  }
+
33 
+
34  template<typename T>
+
35  inline long positiveLongRound(const T a)
+
36  {
+
37  return long(a+0.5f);
+
38  }
+
39 
+
40  template<typename T>
+
41  inline long long positiveLongLongRound(const T a)
+
42  {
+
43  return (long long)(a+0.5f);
+
44  }
+
45 
+
46  // Unsigned
+
47  template<typename T>
+
48  inline unsigned char uCharRound(const T a)
+
49  {
+
50  return (unsigned char)(a+0.5f);
+
51  }
+
52 
+
53  template<typename T>
+
54  inline unsigned int uIntRound(const T a)
+
55  {
+
56  return (unsigned int)(a+0.5f);
+
57  }
+
58 
+
59  template<typename T>
+
60  inline unsigned long ulongRound(const T a)
+
61  {
+
62  return (unsigned long)(a+0.5f);
+
63  }
+
64 
+
65  template<typename T>
+
66  inline unsigned long long uLongLongRound(const T a)
+
67  {
+
68  return (unsigned long long)(a+0.5f);
+
69  }
+
70 
+
71  // Max/min functions
+
72  template<typename T>
+
73  inline T fastMax(const T a, const T b)
+
74  {
+
75  return (a > b ? a : b);
+
76  }
+
77 
+
78  template<typename T>
+
79  inline T fastMin(const T a, const T b)
+
80  {
+
81  return (a < b ? a : b);
+
82  }
+
83 
+
84  template<class T>
+
85  inline T fastTruncate(T value, T min = 0, T max = 1)
+
86  {
+
87  return fastMin(max, fastMax(min, value));
+
88  }
+
89 }
+
90 
+
91 #endif // OPENPOSE_UTILITIES_MATH_HPP
+ +
long long positiveLongLongRound(const T a)
Definition: fastMath.hpp:41
+
T fastTruncate(T value, T min=0, T max=1)
Definition: fastMath.hpp:85
+
long positiveLongRound(const T a)
Definition: fastMath.hpp:35
+
unsigned char uCharRound(const T a)
Definition: fastMath.hpp:48
+
int positiveIntRound(const T a)
Definition: fastMath.hpp:29
+
T fastMin(const T a, const T b)
Definition: fastMath.hpp:79
+
unsigned long long uLongLongRound(const T a)
Definition: fastMath.hpp:66
+
unsigned int uIntRound(const T a)
Definition: fastMath.hpp:54
+
T fastMax(const T a, const T b)
Definition: fastMath.hpp:73
+
unsigned long ulongRound(const T a)
Definition: fastMath.hpp:60
+
char positiveCharRound(const T a)
Definition: fastMath.hpp:17
+
signed char positiveSCharRound(const T a)
Definition: fastMath.hpp:23
+
+
+ + + + diff --git a/web/html/doc/file_saver_8hpp.html b/web/html/doc/file_saver_8hpp.html new file mode 100644 index 000000000..6a290ac44 --- /dev/null +++ b/web/html/doc/file_saver_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/filestream/fileSaver.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
fileSaver.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::FileSaver
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/file_saver_8hpp_source.html b/web/html/doc/file_saver_8hpp_source.html new file mode 100644 index 000000000..86ef81bfd --- /dev/null +++ b/web/html/doc/file_saver_8hpp_source.html @@ -0,0 +1,135 @@ + + + + + + + +OpenPose: include/openpose/filestream/fileSaver.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
fileSaver.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FILESTREAM_DATA_SAVER_HPP
+
2 #define OPENPOSE_FILESTREAM_DATA_SAVER_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+ +
10  {
+
11  protected:
+
12  explicit FileSaver(const std::string& directoryPath);
+
13 
+
14  virtual ~FileSaver();
+
15 
+
16  std::string getNextFileName(const unsigned long long index) const;
+
17 
+
18  std::string getNextFileName(const std::string& fileNameNoExtension) const;
+
19 
+
20  private:
+
21  const std::string mDirectoryPath;
+
22  };
+
23 }
+
24 
+
25 #endif // OPENPOSE_FILESTREAM_DATA_SAVER_HPP
+ +
virtual ~FileSaver()
+
std::string getNextFileName(const unsigned long long index) const
+
std::string getNextFileName(const std::string &fileNameNoExtension) const
+
FileSaver(const std::string &directoryPath)
+ +
#define OP_API
Definition: macros.hpp:18
+ + +
+
+ + + + diff --git a/web/html/doc/file_stream_8hpp.html b/web/html/doc/file_stream_8hpp.html new file mode 100644 index 000000000..136b6407e --- /dev/null +++ b/web/html/doc/file_stream_8hpp.html @@ -0,0 +1,142 @@ + + + + + + + +OpenPose: include/openpose/filestream/fileStream.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
fileStream.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + + + + + + + + + + + + + + + + + + + +

+Functions

OP_API std::string op::dataFormatToString (const DataFormat dataFormat)
 
OP_API DataFormat op::stringToDataFormat (const std::string &dataFormat)
 
OP_API void op::saveFloatArray (const Array< float > &array, const std::string &fullFilePath)
 
OP_API void op::saveData (const std::vector< Matrix > &opMats, const std::vector< std::string > &cvMatNames, const std::string &fileNameNoExtension, const DataFormat dataFormat)
 
OP_API void op::saveData (const Matrix &opMat, const std::string cvMatName, const std::string &fileNameNoExtension, const DataFormat dataFormat)
 
OP_API std::vector< Matrix > op::loadData (const std::vector< std::string > &cvMatNames, const std::string &fileNameNoExtension, const DataFormat dataFormat)
 
OP_API Matrix op::loadData (const std::string &cvMatName, const std::string &fileNameNoExtension, const DataFormat dataFormat)
 
OP_API void op::savePeopleJson (const Array< float > &keypoints, const std::vector< std::vector< std::array< float, 3 >>> &candidates, const std::string &keypointName, const std::string &fileName, const bool humanReadable)
 
OP_API void op::savePeopleJson (const std::vector< std::pair< Array< float >, std::string >> &keypointVector, const std::vector< std::vector< std::array< float, 3 >>> &candidates, const std::string &fileName, const bool humanReadable)
 
OP_API void op::saveImage (const Matrix &matrix, const std::string &fullFilePath, const std::vector< int > &openCvCompressionParams={getCvImwriteJpegQuality(), 100, getCvImwritePngCompression(), 9})
 
OP_API Matrix op::loadImage (const std::string &fullFilePath, const int openCvFlags=getCvLoadImageAnydepth())
 
OP_API std::vector< std::array< Rectangle< float >, 2 > > op::loadHandDetectorTxt (const std::string &txtFilePath)
 
+
+
+ + + + diff --git a/web/html/doc/file_stream_8hpp.js b/web/html/doc/file_stream_8hpp.js new file mode 100644 index 000000000..782d453e2 --- /dev/null +++ b/web/html/doc/file_stream_8hpp.js @@ -0,0 +1,15 @@ +var file_stream_8hpp = +[ + [ "dataFormatToString", "file_stream_8hpp.html#a9d121f33179e41075f4602eb6527e658", null ], + [ "loadData", "file_stream_8hpp.html#a1c2921f841ab87033b535b5ae8a4d526", null ], + [ "loadData", "file_stream_8hpp.html#a9f14054fbf4e63fc85d10c83f2f9ecb7", null ], + [ "loadHandDetectorTxt", "file_stream_8hpp.html#a0ce96f84c6e380b261802c7e2639dc7d", null ], + [ "loadImage", "file_stream_8hpp.html#a871a61f08021460e0f24f51583546a75", null ], + [ "saveData", "file_stream_8hpp.html#a7b9bcb57dd8488ade8ea288342eaed08", null ], + [ "saveData", "file_stream_8hpp.html#aafac1158605748694e3c3ed4eb34b3b7", null ], + [ "saveFloatArray", "file_stream_8hpp.html#ac1080e627185a65b88ec788184a95552", null ], + [ "saveImage", "file_stream_8hpp.html#a8c9d3469086a12607b097731848b6dea", null ], + [ "savePeopleJson", "file_stream_8hpp.html#af9c189f7c80092570699c8b9d5686fea", null ], + [ "savePeopleJson", "file_stream_8hpp.html#a1e986a510a29bfd8c682f65a8b399551", null ], + [ "stringToDataFormat", "file_stream_8hpp.html#a46e815df32db67d78a94367b7f97df25", null ] +]; \ No newline at end of file diff --git a/web/html/doc/file_stream_8hpp_source.html b/web/html/doc/file_stream_8hpp_source.html new file mode 100644 index 000000000..9b0d27cc9 --- /dev/null +++ b/web/html/doc/file_stream_8hpp_source.html @@ -0,0 +1,185 @@ + + + + + + + +OpenPose: include/openpose/filestream/fileStream.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
fileStream.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FILESTREAM_FILE_STREAM_HPP
+
2 #define OPENPOSE_FILESTREAM_FILE_STREAM_HPP
+
3 
+ + + +
7 
+
8 namespace op
+
9 {
+
10  OP_API std::string dataFormatToString(const DataFormat dataFormat);
+
11 
+
12  OP_API DataFormat stringToDataFormat(const std::string& dataFormat);
+
13 
+
14  // Save custom float format
+
15  // Example to read it in Python, assuming a (18 x 300 x 500) size Array
+
16  // x = np.fromfile(heatMapFullPath, dtype=np.float32)
+
17  // assert x[0] == 3 # First parameter saves the number of dimensions (18x300x500 = 3 dimensions)
+
18  // shape_x = x[1:1+int(x[0])]
+
19  // assert len(shape_x[0]) == 3 # Number of dimensions
+
20  // assert shape_x[0] == 18 # Size of the first dimension
+
21  // assert shape_x[1] == 300 # Size of the second dimension
+
22  // assert shape_x[2] == 500 # Size of the third dimension
+
23  // arrayData = x[1+int(round(x[0])):]
+
24  OP_API void saveFloatArray(const Array<float>& array, const std::string& fullFilePath);
+
25 
+
26  // Save/load json, xml, yaml, yml
+ +
28  const std::vector<Matrix>& opMats, const std::vector<std::string>& cvMatNames,
+
29  const std::string& fileNameNoExtension, const DataFormat dataFormat);
+
30 
+ +
32  const Matrix& opMat, const std::string cvMatName, const std::string& fileNameNoExtension,
+
33  const DataFormat dataFormat);
+
34 
+
35  OP_API std::vector<Matrix> loadData(
+
36  const std::vector<std::string>& cvMatNames, const std::string& fileNameNoExtension,
+
37  const DataFormat dataFormat);
+
38 
+ +
40  const std::string& cvMatName, const std::string& fileNameNoExtension, const DataFormat dataFormat);
+
41 
+
42  // Json - Saving as *.json not available in OpenCV versions < 3.0, this function is a quick fix
+ +
44  const Array<float>& keypoints, const std::vector<std::vector<std::array<float,3>>>& candidates,
+
45  const std::string& keypointName, const std::string& fileName, const bool humanReadable);
+
46 
+
47  // It will save a bunch of Array<float> elements
+ +
49  const std::vector<std::pair<Array<float>, std::string>>& keypointVector,
+
50  const std::vector<std::vector<std::array<float,3>>>& candidates, const std::string& fileName,
+
51  const bool humanReadable);
+
52 
+
53  // Save/load image
+ +
55  const Matrix& matrix, const std::string& fullFilePath,
+
56  const std::vector<int>& openCvCompressionParams
+ +
58 
+
59  OP_API Matrix loadImage(const std::string& fullFilePath, const int openCvFlags = getCvLoadImageAnydepth());
+
60 
+
61  OP_API std::vector<std::array<Rectangle<float>, 2>> loadHandDetectorTxt(const std::string& txtFilePath);
+
62 }
+
63 
+
64 #endif // OPENPOSE_FILESTREAM_FILE_STREAM_HPP
+ + + + +
#define OP_API
Definition: macros.hpp:18
+ +
OP_API std::vector< std::array< Rectangle< float >, 2 > > loadHandDetectorTxt(const std::string &txtFilePath)
+
OP_API int getCvImwritePngCompression()
+
OP_API DataFormat stringToDataFormat(const std::string &dataFormat)
+
OP_API int getCvLoadImageAnydepth()
+
OP_API Matrix loadImage(const std::string &fullFilePath, const int openCvFlags=getCvLoadImageAnydepth())
+
OP_API void saveImage(const Matrix &matrix, const std::string &fullFilePath, const std::vector< int > &openCvCompressionParams={getCvImwriteJpegQuality(), 100, getCvImwritePngCompression(), 9})
+
OP_API std::string dataFormatToString(const DataFormat dataFormat)
+
OP_API std::vector< Matrix > loadData(const std::vector< std::string > &cvMatNames, const std::string &fileNameNoExtension, const DataFormat dataFormat)
+
OP_API void saveData(const std::vector< Matrix > &opMats, const std::vector< std::string > &cvMatNames, const std::string &fileNameNoExtension, const DataFormat dataFormat)
+
OP_API void saveFloatArray(const Array< float > &array, const std::string &fullFilePath)
+
OP_API int getCvImwriteJpegQuality()
+
DataFormat
Definition: enumClasses.hpp:7
+
OP_API void savePeopleJson(const Array< float > &keypoints, const std::vector< std::vector< std::array< float, 3 >>> &candidates, const std::string &keypointName, const std::string &fileName, const bool humanReadable)
+ +
+
+ + + + diff --git a/web/html/doc/file_system_8hpp.html b/web/html/doc/file_system_8hpp.html new file mode 100644 index 000000000..3ad39132f --- /dev/null +++ b/web/html/doc/file_system_8hpp.html @@ -0,0 +1,146 @@ + + + + + + + +OpenPose: include/openpose/utilities/fileSystem.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
fileSystem.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Functions

OP_API void op::makeDirectory (const std::string &directoryPath)
 
OP_API bool op::existDirectory (const std::string &directoryPath)
 
OP_API bool op::existFile (const std::string &filePath)
 
OP_API std::string op::formatAsDirectory (const std::string &directoryPathString)
 
OP_API std::string op::getFileNameAndExtension (const std::string &fullPath)
 
OP_API std::string op::getFileNameNoExtension (const std::string &fullPath)
 
OP_API std::string op::getFileExtension (const std::string &fullPath)
 
OP_API std::string op::getFullFilePathNoExtension (const std::string &fullPath)
 
OP_API std::string op::getFileParentFolderPath (const std::string &fullPath)
 
OP_API std::vector< std::string > op::getFilesOnDirectory (const std::string &directoryPath, const std::vector< std::string > &extensions={})
 
OP_API std::vector< std::string > op::getFilesOnDirectory (const std::string &directoryPath, const std::string &extension)
 
OP_API std::vector< std::string > op::getFilesOnDirectory (const std::string &directoryPath, const Extensions extensions)
 
OP_API std::string op::removeSpecialsCharacters (const std::string &stringToVariate)
 
OP_API void op::removeAllOcurrencesOfSubString (std::string &stringToModify, const std::string &substring)
 
OP_API void op::replaceAll (std::string &stringText, const char charToChange, const char charToAdd)
 
+
+
+ + + + diff --git a/web/html/doc/file_system_8hpp.js b/web/html/doc/file_system_8hpp.js new file mode 100644 index 000000000..08e8b5623 --- /dev/null +++ b/web/html/doc/file_system_8hpp.js @@ -0,0 +1,18 @@ +var file_system_8hpp = +[ + [ "existDirectory", "file_system_8hpp.html#a6fc2ee2d2c256695fb7b2b953ee7f762", null ], + [ "existFile", "file_system_8hpp.html#ac1f4b95440d2fb57fc715558d039b947", null ], + [ "formatAsDirectory", "file_system_8hpp.html#ab38ea91ef7b7dad700d8e4a4654d48f5", null ], + [ "getFileExtension", "file_system_8hpp.html#a515273b013402d8c75780330588421bc", null ], + [ "getFileNameAndExtension", "file_system_8hpp.html#a573544858d0a9c29c9707eeda3a21c98", null ], + [ "getFileNameNoExtension", "file_system_8hpp.html#a6f37638480139a4076eef4d0c7dc6cd1", null ], + [ "getFileParentFolderPath", "file_system_8hpp.html#a2e35510c95e5525aae7a398b03b32488", null ], + [ "getFilesOnDirectory", "file_system_8hpp.html#a858f70fa9d84ad85c60f19a2229ebbde", null ], + [ "getFilesOnDirectory", "file_system_8hpp.html#adb26da2c52486e926d98471b5387c7e1", null ], + [ "getFilesOnDirectory", "file_system_8hpp.html#a3ff74a37eb4bf12e31bc5aa95b69f9e3", null ], + [ "getFullFilePathNoExtension", "file_system_8hpp.html#ac1737c19228b83a5e93ae51e5d9556eb", null ], + [ "makeDirectory", "file_system_8hpp.html#acc650faa23df88ca16a09a2d2a522960", null ], + [ "removeAllOcurrencesOfSubString", "file_system_8hpp.html#a82471a2af285bada830bac3c95a8440b", null ], + [ "removeSpecialsCharacters", "file_system_8hpp.html#a8664658afa7be03e173cec9eff2873ad", null ], + [ "replaceAll", "file_system_8hpp.html#a5fe477200af87dadb07c8d6a75b4414b", null ] +]; \ No newline at end of file diff --git a/web/html/doc/file_system_8hpp_source.html b/web/html/doc/file_system_8hpp_source.html new file mode 100644 index 000000000..487fd1330 --- /dev/null +++ b/web/html/doc/file_system_8hpp_source.html @@ -0,0 +1,160 @@ + + + + + + + +OpenPose: include/openpose/utilities/fileSystem.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
fileSystem.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_UTILITIES_FILE_SYSTEM_HPP
+
2 #define OPENPOSE_UTILITIES_FILE_SYSTEM_HPP
+
3 
+ +
5 
+
6 namespace op
+
7 {
+
8  OP_API void makeDirectory(const std::string& directoryPath);
+
9 
+
10  OP_API bool existDirectory(const std::string& directoryPath);
+
11 
+
12  OP_API bool existFile(const std::string& filePath);
+
13 
+
20  OP_API std::string formatAsDirectory(const std::string& directoryPathString);
+
21 
+
27  OP_API std::string getFileNameAndExtension(const std::string& fullPath);
+
28 
+
34  OP_API std::string getFileNameNoExtension(const std::string& fullPath);
+
35 
+
42  OP_API std::string getFileExtension(const std::string& fullPath);
+
43 
+
49  OP_API std::string getFullFilePathNoExtension(const std::string& fullPath);
+
50 
+
56  OP_API std::string getFileParentFolderPath(const std::string& fullPath);
+
57 
+
65  OP_API std::vector<std::string> getFilesOnDirectory(
+
66  const std::string& directoryPath, const std::vector<std::string>& extensions = {});
+
67 
+
75  OP_API std::vector<std::string> getFilesOnDirectory(
+
76  const std::string& directoryPath, const std::string& extension);
+
77 
+
85  OP_API std::vector<std::string> getFilesOnDirectory(
+
86  const std::string& directoryPath, const Extensions extensions);
+
87 
+
88  OP_API std::string removeSpecialsCharacters(const std::string& stringToVariate);
+
89 
+
90  OP_API void removeAllOcurrencesOfSubString(std::string& stringToModify, const std::string& substring);
+
91 
+
92  OP_API void replaceAll(std::string& stringText, const char charToChange, const char charToAdd);
+
93 }
+
94 
+
95 #endif // OPENPOSE_UTILITIES_FILE_SYSTEM_HPP
+ +
#define OP_API
Definition: macros.hpp:18
+ +
OP_API std::string getFileParentFolderPath(const std::string &fullPath)
+
OP_API std::vector< std::string > getFilesOnDirectory(const std::string &directoryPath, const std::vector< std::string > &extensions={})
+
OP_API std::string getFileExtension(const std::string &fullPath)
+
Extensions
Definition: enumClasses.hpp:32
+
OP_API std::string getFileNameAndExtension(const std::string &fullPath)
+
OP_API void replaceAll(std::string &stringText, const char charToChange, const char charToAdd)
+
OP_API std::string getFileNameNoExtension(const std::string &fullPath)
+
OP_API bool existDirectory(const std::string &directoryPath)
+
OP_API void removeAllOcurrencesOfSubString(std::string &stringToModify, const std::string &substring)
+
OP_API std::string removeSpecialsCharacters(const std::string &stringToVariate)
+
OP_API std::string formatAsDirectory(const std::string &directoryPathString)
+
OP_API std::string getFullFilePathNoExtension(const std::string &fullPath)
+
OP_API bool existFile(const std::string &filePath)
+
OP_API void makeDirectory(const std::string &directoryPath)
+
+
+ + + + diff --git a/web/html/doc/files.html b/web/html/doc/files.html new file mode 100644 index 000000000..23d9a2c68 --- /dev/null +++ b/web/html/doc/files.html @@ -0,0 +1,322 @@ + + + + + + + +OpenPose: File List + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
File List
+
+
+
Here is a list of all files with brief descriptions:
+
[detail level 1234]
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
  doc
 advanced
  installation
 deprecated
 jetson_tx
  very_advanced
 library_structure
  include
  openpose
  3d
  calibration
  core
  face
  filestream
  gpu
  gui
  hand
  net
  pose
  producer
  thread
  tracking
  unity
  utilities
  wrapper
 flags.hpp
 headers.hpp
+
+
+
+ + + + diff --git a/web/html/doc/files_dup.js b/web/html/doc/files_dup.js new file mode 100644 index 000000000..09d1d5d38 --- /dev/null +++ b/web/html/doc/files_dup.js @@ -0,0 +1,5 @@ +var files_dup = +[ + [ "doc", "dir_e68e8157741866f444e17edd764ebbae.html", "dir_e68e8157741866f444e17edd764ebbae" ], + [ "include", "dir_d44c64559bbebec7f509842c48db8b23.html", "dir_d44c64559bbebec7f509842c48db8b23" ] +]; \ No newline at end of file diff --git a/web/html/doc/filestream_2enum_classes_8hpp.html b/web/html/doc/filestream_2enum_classes_8hpp.html new file mode 100644 index 000000000..805e4b940 --- /dev/null +++ b/web/html/doc/filestream_2enum_classes_8hpp.html @@ -0,0 +1,133 @@ + + + + + + + +OpenPose: include/openpose/filestream/enumClasses.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
enumClasses.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + +

+Enumerations

enum class  op::DataFormat : unsigned char { op::Json +, op::Xml +, op::Yaml +, op::Yml + }
 
enum class  op::CocoJsonFormat : unsigned char {
+  op::Body +, op::Hand21 +, op::Hand42 +, op::Face +,
+  op::Foot +, op::Car +, op::Size +
+ }
 
+
+
+ + + + diff --git a/web/html/doc/filestream_2enum_classes_8hpp.js b/web/html/doc/filestream_2enum_classes_8hpp.js new file mode 100644 index 000000000..af026ff36 --- /dev/null +++ b/web/html/doc/filestream_2enum_classes_8hpp.js @@ -0,0 +1,18 @@ +var filestream_2enum_classes_8hpp = +[ + [ "CocoJsonFormat", "filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715ac", [ + [ "Body", "filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715acaac101b32dda4448cf13a93fe283dddd8", null ], + [ "Hand21", "filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715aca9909f7cecc318ee0049ad0f3b409b3b3", null ], + [ "Hand42", "filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715aca1d9502bb9f6efc989b3578dcfde7901e", null ], + [ "Face", "filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715aca8af5861002f3c157f9ba842bba10aa3f", null ], + [ "Foot", "filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715aca129e74dde7b475c8848310e16754c965", null ], + [ "Car", "filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715acae9989db5dabeea617f40c8dbfd07f5fb", null ], + [ "Size", "filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715aca6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "DataFormat", "filestream_2enum_classes_8hpp.html#ae52c21a24cf2c21e3b419c127924fd7e", [ + [ "Json", "filestream_2enum_classes_8hpp.html#ae52c21a24cf2c21e3b419c127924fd7eaeed8d85b888a6c015834240885ee6333", null ], + [ "Xml", "filestream_2enum_classes_8hpp.html#ae52c21a24cf2c21e3b419c127924fd7ea9ec8e4e3ab4c7eeba097f27d7364d743", null ], + [ "Yaml", "filestream_2enum_classes_8hpp.html#ae52c21a24cf2c21e3b419c127924fd7ea65f6036bfc9798ce230c5d8567551315", null ], + [ "Yml", "filestream_2enum_classes_8hpp.html#ae52c21a24cf2c21e3b419c127924fd7ea55eeca17b45365c188d0edbd35f6e0c3", null ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/filestream_2enum_classes_8hpp_source.html b/web/html/doc/filestream_2enum_classes_8hpp_source.html new file mode 100644 index 000000000..6eea863f7 --- /dev/null +++ b/web/html/doc/filestream_2enum_classes_8hpp_source.html @@ -0,0 +1,141 @@ + + + + + + + +OpenPose: include/openpose/filestream/enumClasses.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
enumClasses.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FILESTREAM_ENUM_CLASSES_HPP
+
2 #define OPENPOSE_FILESTREAM_ENUM_CLASSES_HPP
+
3 
+
4 namespace op
+
5 {
+
6  enum class DataFormat : unsigned char
+
7  {
+
8  Json,
+
9  Xml,
+
10  Yaml,
+
11  Yml,
+
12  };
+
13 
+
14  enum class CocoJsonFormat : unsigned char
+
15  {
+
16  Body,
+
17  Hand21,
+
18  Hand42,
+
19  Face,
+
20  Foot,
+
21  Car,
+
22  Size,
+
23  };
+
24 }
+
25 
+
26 #endif // OPENPOSE_FILESTREAM_ENUM_CLASSES_HPP
+ +
CocoJsonFormat
Definition: enumClasses.hpp:15
+ + + + + + + +
DataFormat
Definition: enumClasses.hpp:7
+ + + + +
+
+ + + + diff --git a/web/html/doc/filestream_2headers_8hpp.html b/web/html/doc/filestream_2headers_8hpp.html new file mode 100644 index 000000000..957dd12e3 --- /dev/null +++ b/web/html/doc/filestream_2headers_8hpp.html @@ -0,0 +1,126 @@ + + + + + + + +OpenPose: include/openpose/filestream/headers.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ + + + + + diff --git a/web/html/doc/filestream_2headers_8hpp_source.html b/web/html/doc/filestream_2headers_8hpp_source.html new file mode 100644 index 000000000..5e6cac329 --- /dev/null +++ b/web/html/doc/filestream_2headers_8hpp_source.html @@ -0,0 +1,153 @@ + + + + + + + +OpenPose: include/openpose/filestream/headers.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
headers.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FILESTREAM_HEADERS_HPP
+
2 #define OPENPOSE_FILESTREAM_HEADERS_HPP
+
3 
+
4 // fileStream module
+ + + + + + + + + + + + + + + + + + + + + + + +
28 
+
29 #endif // OPENPOSE_FILESTREAM_HEADERS_HPP
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + + diff --git a/web/html/doc/flags_8hpp.html b/web/html/doc/flags_8hpp.html new file mode 100644 index 000000000..64cb90391 --- /dev/null +++ b/web/html/doc/flags_8hpp.html @@ -0,0 +1,4257 @@ + + + + + + + +OpenPose: include/openpose/flags.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
flags.hpp File Reference
+
+
+
#include <gflags/gflags.h>
+
+

Go to the source code of this file.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Functions

 DEFINE_int32 (logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any opLog() message," " while 255 will not output any. Current OpenPose library messages are in the range 0-4:" " 1 for low priority messages and 4 for important ones.")
 
 DEFINE_bool (disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful" " for 1) Cases where it is needed a low latency (e.g., webcam in real-time scenarios with" " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the" " error.")
 
 DEFINE_int32 (profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some" " runtime statistics at this frame number.")
 
 DEFINE_int32 (camera, -1, "The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative" " number (by default), to auto-detect and open the first available camera.")
 
 DEFINE_string (camera_resolution, "-1x-1", "Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the" " default 1280x720 for `--camera`, or the maximum flir camera resolution available for" " `--flir_camera`")
 
 DEFINE_string (video, "", "Use a video file instead of the camera. Use `examples/media/video.avi` for our default" " example video.")
 
 DEFINE_string (image_dir, "", "Process a directory of images. Use `examples/media/` for our default example folder with 20" " images. Read all standard formats (jpg, png, bmp, etc.).")
 
 DEFINE_bool (flir_camera, false, "Whether to use FLIR (Point-Grey) stereo camera.")
 
 DEFINE_int32 (flir_camera_index, -1, "Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir" " camera index to run, where 0 corresponds to the detected flir camera with the lowest" " serial number, and `n` to the `n`-th lowest serial number camera.")
 
 DEFINE_string (ip_camera, "", "String with the IP camera URL. It supports protocols like RTSP and HTTP.")
 
 DEFINE_uint64 (frame_first, 0, "Start on desired frame number. Indexes are 0-based, i.e., the first frame has index 0.")
 
 DEFINE_uint64 (frame_step, 1, "Step or gap between processed frames. E.g., `--frame_step 5` would read and process frames" " 0, 5, 10, etc..")
 
 DEFINE_uint64 (frame_last, -1, "Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g., if set to" " 10, it will process 11 frames (0-10).")
 
 DEFINE_bool (frame_flip, false, "Flip/mirror each frame (e.g., for real time webcam demonstrations).")
 
 DEFINE_int32 (frame_rotate, 0, "Rotate each frame, 4 possible values: 0, 90, 180, 270.")
 
 DEFINE_bool (frames_repeat, false, "Repeat frames when finished.")
 
 DEFINE_bool (process_real_time, false, "Enable to keep the original source frame rate (e.g., for video). If the processing time is" " too long, it will skip frames. If it is too fast, it will slow it down.")
 
 DEFINE_string (camera_parameter_path, "models/cameraParameters/flir/", "String with the folder where the camera parameters are located. If there" " is only 1 XML file (for single video, webcam, or images from the same camera), you must" " specify the whole XML file path (ending in .xml).")
 
 DEFINE_bool (frame_undistort, false, "If false (default), it will not undistort the image, if true, it will undistortionate them" " based on the camera parameters found in `camera_parameter_path`")
 
 DEFINE_string (model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located.")
 
 DEFINE_string (prototxt_path, "", "The combination `--model_folder` + `--prototxt_path` represents the whole path to the" " prototxt file. If empty, it will use the default OpenPose ProtoTxt file.")
 
 DEFINE_string (caffemodel_path, "", "The combination `--model_folder` + `--caffemodel_path` represents the whole path to the" " caffemodel file. If empty, it will use the default OpenPose CaffeModel file.")
 
 DEFINE_string (output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the" " input image resolution.")
 
 DEFINE_int32 (num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your" " machine.")
 
 DEFINE_int32 (num_gpu_start, 0, "GPU device start number.")
 
 DEFINE_int32 (keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e., the scale of the (x,y)" " coordinates that will be saved with the `write_json` & `write_keypoint` flags." " Select `0` to scale it to the original source resolution; `1`to scale it to the net output" " size (set with `net_resolution`); `2` to scale it to the final output size (set with" " `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left" " corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where" " (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non" " related with `scale_number` and `scale_gap`.")
 
 DEFINE_int32 (number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with" " top scores. The score is based in person area over the image, body part score, as well as" " joint score (between each pair of connected body parts). Useful if you know the exact" " number of people in the scene, so it can remove false positives (if all the people have" " been detected. However, it might also include false negatives by removing very small or" " highly occluded people. -1 will keep them all.")
 
 DEFINE_bool (maximize_positives, false, "It reduces the thresholds to accept a person candidate. It highly increases both false and" " true positives. I.e., it maximizes average recall but could harm average precision.")
 
 DEFINE_double (fps_max, -1., "Maximum processing frame rate. By default (-1), OpenPose will process frames as fast as" " possible. Example usage: If OpenPose is displaying images too quickly, this can reduce" " the speed so the user can analyze better each frame from the GUI.")
 
 DEFINE_int32 (body, 1, "Select 0 to disable body keypoint detection (e.g., for faster but less accurate face" " keypoint detection, custom hand detector, etc.), 1 (default) for body keypoint" " estimation, and 2 to disable its internal body pose estimation network but still" " still run the greedy association parsing algorithm")
 
 DEFINE_string (model_pose, "BODY_25", "Model to be used. E.g., `BODY_25` (fastest for CUDA version, most accurate, and includes" " foot keypoints), `COCO` (18 keypoints), `MPI` (15 keypoints, least accurate model but" " fastest on CPU), `MPI_4_layers` (15 keypoints, even faster but less accurate).")
 
 DEFINE_string (net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is" " decreased, the speed increases. For maximum speed-accuracy balance, it should keep the" " closest aspect ratio possible to the images or videos to be processed. Using `-1` in" " any of the dimensions, OP will choose the optimal aspect ratio depending on the user's" " input value. E.g., the default `-1x368` is equivalent to `656x368` in 16:9 resolutions," " e.g., full HD (1980x1080) and HD (1280x720) resolutions.")
 
 DEFINE_double (net_resolution_dynamic, 1., "This flag only applies to images or custom inputs (not to video or webcam). If it is zero" " or a negative value, it means that using `-1` in `net_resolution` will behave as explained" " in its description. Otherwise, and to avoid out of memory errors, the `-1` in" " `net_resolution` will clip to this value times the default 16/9 aspect ratio value (which" " is 656 width for a 368 height). E.g., `net_resolution_dynamic 10 net_resolution -1x368`" " will clip to 6560x368 (10 x 656). Recommended 1 for small GPUs (to avoid out of memory" " errors but maximize speed) and 0 for big GPUs (for maximum accuracy and speed).")
 
 DEFINE_int32 (scale_number, 1, "Number of scales to average.")
 
 DEFINE_double (scale_gap, 0.25, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1." " If you want to change the initial scale, you actually want to multiply the" " `net_resolution` by your desired initial scale.")
 
 DEFINE_bool (heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and" " analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps." " If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential" " memory order: body parts + bkg + PAFs. It will follow the order on" " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will" " considerably decrease. Not required for OpenPose, enable it only if you intend to" " explicitly use this information later.")
 
 DEFINE_bool (heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to" " background.")
 
 DEFINE_bool (heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs.")
 
 DEFINE_int32 (heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer" " rounded [0,255]; and 3 for no scaling.")
 
 DEFINE_bool (part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the" " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all" " the detected body parts, before being assembled into people. Note that the number of" " candidates is equal or higher than the number of final body parts (i.e., after being" " assembled into people). The empty body parts are filled with 0s. Program speed will" " slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly" " use this information.")
 
 DEFINE_double (upsampling_ratio, 0., "Upsampling ratio between the `net_resolution` and the output net results. A value less" " or equal than 0 (default) will use the network default value (recommended).")
 
 DEFINE_bool (face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g." " `model_folder`. Note that this will considerable slow down the performance and increase" " the required GPU memory. In addition, the greater number of people on the image, the" " slower OpenPose will be.")
 
 DEFINE_int32 (face_detector, 0, "Kind of face rectangle detector. Select 0 (default) to select OpenPose body detector (most" " accurate one and fastest one if body is enabled), 1 to select OpenCV face detector (not" " implemented for hands), 2 to indicate that it will be provided by the user, or 3 to" " also apply hand tracking (only for hand). Hand tracking might improve hand keypoint" " detection for webcam (if the frame rate is high enough, i.e., >7 FPS per GPU) and video." " This is not person ID tracking, it simply looks for hands in positions at which hands were" " located in previous frames, but it does not guarantee the same person ID among frames.")
 
 DEFINE_string (face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint" " detector. 320x320 usually works fine while giving a substantial speed up when multiple" " faces on the image.")
 
 DEFINE_bool (hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g." " `model_folder`. Analogously to `--face`, it will also slow down the performance, increase" " the required GPU memory and its speed depends on the number of people.")
 
 DEFINE_int32 (hand_detector, 0, "Kind of hand rectangle detector. Analogous to `--face_detector`.")
 
 DEFINE_string (hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint" " detector.")
 
 DEFINE_int32 (hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results" " were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4.")
 
 DEFINE_double (hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range" " between smallest and biggest scale. The scales will be centered in ratio 1. E.g., if" " scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2.")
 
 DEFINE_bool (3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system." " 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction" " results. Note that it will only display 1 person. If multiple people is present, it will" " fail.")
 
 DEFINE_int32 (3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will" " require max(2, min(4, #cameras-1)) cameras to see the keypoint in order to reconstruct" " it.")
 
 DEFINE_int32 (3d_views, -1, "Complementary option for `--image_dir` or `--video`. OpenPose will read as many images per" " iteration, allowing tasks such as stereo camera processing (`--3d`). Note that" " `--camera_parameter_path` must be set. OpenPose must find as many `xml` files in the" " parameter folder as this number indicates.")
 
 DEFINE_bool (identification, false, "Experimental, not available yet. Whether to enable people identification across frames.")
 
 DEFINE_int32 (tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The" " value indicates the number of frames where tracking is run between each OpenPose keypoint" " detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint" " detector and tracking for potentially higher accuracy than only OpenPose.")
 
 DEFINE_int32 (ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D" " keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing" " the number of threads will increase the speed but also the global system latency.")
 
 DEFINE_int32 (part_to_show, 0, "Prediction channel to visualize: 0 (default) for all the body parts, 1 for the background" " heat map, 2 for the superposition of heatmaps, 3 for the superposition of PAFs," " 4-(4+#keypoints) for each body part heat map, the following ones for each body part pair" " PAF.")
 
 DEFINE_bool (disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black" " background, instead of being rendered into the original image. Related: `part_to_show`," " `alpha_pose`, and `alpha_pose`.")
 
 DEFINE_double (render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be" " rendered. Note: Rendered refers only to visual display in the OpenPose basic GUI, not in" " the saved results. Generally, a high threshold (> 0.5) will only render very clear body" " parts; while small thresholds (~0.1) will also output guessed and occluded keypoints," " but also more false positives (i.e., wrong detections).")
 
 DEFINE_int32 (render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering" " (slower but greater functionality, e.g., `alpha_X` flags). If -1, it will pick CPU if" " CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render" " both `outputData` and `cvOutputData` with the original image and desired body part to be" " shown (i.e., keypoints, heat maps or PAFs).")
 
 DEFINE_double (alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will" " hide it. Only valid for GPU rendering.")
 
 DEFINE_double (alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the" " heatmap, 0 will only show the frame. Only valid for GPU rendering.")
 
 DEFINE_double (face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints.")
 
 DEFINE_int32 (face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same" " configuration that `render_pose` is using.")
 
 DEFINE_double (face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face.")
 
 DEFINE_double (face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face.")
 
 DEFINE_double (hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints.")
 
 DEFINE_int32 (hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same" " configuration that `render_pose` is using.")
 
 DEFINE_double (hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand.")
 
 DEFINE_double (hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand.")
 
 DEFINE_bool (fullscreen, false, "Run in full-screen mode (press f during runtime to toggle).")
 
 DEFINE_bool (no_gui_verbose, false, "Do not write text on output images on GUI (e.g., number of current frame and people). It" " does not affect the pose rendering.")
 
 DEFINE_int32 (display, -1, "Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server" " and/or to slightly speed up the processing if visual output is not required); 2 for 2-D" " display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display.")
 
 DEFINE_double (cli_verbose, -1.f, "If -1, it will be disabled (default). If it is a positive integer number, it will print on" " the command line every `verbose` frames. If number in the range (0,1), it will print the" " progress every `verbose` times the total of frames.")
 
 DEFINE_string (write_images, "", "Directory to write rendered frames in `write_images_format` image format.")
 
 DEFINE_string (write_images_format, "png", "File extension and format for `write_images`, e.g., png, jpg or bmp. Check the OpenCV" " function cv::imwrite for all compatible extensions.")
 
 DEFINE_string (write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the" " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag" " `write_video_fps` controls FPS. Alternatively, the video extension can be `.mp4`," " resulting in a file with a much smaller size and allowing `--write_video_with_audio`." " However, that would require: 1) Ubuntu or Mac system, 2) FFmpeg library installed" " (`sudo apt-get install ffmpeg`), 3) the creation temporarily of a folder with the same" " file path than the final video (without the extension) to storage the intermediate frames" " that will later be used to generate the final MP4 video.")
 
 DEFINE_double (write_video_fps, -1., "Frame rate for the recorded video. By default, it will try to get the input frames producer" " frame rate (e.g., input video or webcam frame rate). If the input frames producer does not" " have a set FPS (e.g., image_dir or webcam if OpenCV not compiled with its support), set" " this value accordingly (e.g., to the frame rate displayed by the OpenPose GUI).")
 
 DEFINE_bool (write_video_with_audio, false, "If the input is video and the output is so too, it will save the video with audio. It" " requires the output video file path finishing in `.mp4` format (see `write_video` for" " details).")
 
 DEFINE_string (write_video_3d, "", "Analogous to `--write_video`, but applied to the 3D output.")
 
 DEFINE_string (write_video_adam, "", "Experimental, not available yet. Analogous to `--write_video`, but applied to Adam model.")
 
 DEFINE_string (write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose" " keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled).")
 
 DEFINE_string (write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format. If foot, face," " hands, etc. JSON is also desired (`--write_coco_json_variants`), they are saved with" " different file name suffix.")
 
 DEFINE_int32 (write_coco_json_variants, 1, "Add 1 for body, add 2 for foot, 4 for face, and/or 8 for hands. Use 0 to use all the" " possible candidates. E.g., 7 would mean body+foot+face COCO JSON.")
 
 DEFINE_int32 (write_coco_json_variant, 0, "Currently, this option is experimental and only makes effect on car JSON generation. It" " selects the COCO variant for cocoJsonSaver.")
 
 DEFINE_string (write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag" " must be enabled.")
 
 DEFINE_string (write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`." " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for" " floating values. See `doc/02_output.md` for more details.")
 
 DEFINE_string (write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format" " with `write_keypoint_format`.")
 
 DEFINE_string (write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml," " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead.")
 
 DEFINE_string (write_bvh, "", "Experimental, not available yet. E.g., `~/Desktop/mocapResult.bvh`.")
 
 DEFINE_string (udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`.")
 
 DEFINE_string (udp_port, "8051", "Experimental, not available yet. Port number for UDP communication.")
 
+

Function Documentation

+ +

◆ DEFINE_bool() [1/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (3d ,
false ,
"Running OpenPose 3-D reconstruction demo: 1 
) const
+
+ +
+
+ +

◆ DEFINE_bool() [2/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (disable_blending ,
false ,
"If enabled,
it will render the results(keypoint skeletons or heatmaps) on a black" " background,
instead of being rendered into the original image. Related:`part_to_show` ,
" " `alpha_pose` ,
and `alpha_pose`."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [3/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (disable_multi_thread ,
false ,
"It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful" " for 1 
)
+
+ +
+
+ +

◆ DEFINE_bool() [4/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (face ,
false ,
"Enables face keypoint detection. It will share some parameters from the body pose,
e.g." " `model_folder`. Note that this will considerable slow down the performance and increase" " the required GPU memory. In addition,
the greater number of people on the image,
the" " slower OpenPose will be."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [5/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (flir_camera ,
false ,
"Whether to use FLIR (Point-Grey) stereo camera."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [6/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (frame_flip ,
false ,
"Flip/mirror each frame (e.g., for real time webcam demonstrations)."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [7/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (frame_undistort ,
false ,
"If false  default,
it will not undistort the image,
if true,
it will undistortionate them" " based on the camera parameters found in `camera_parameter_path`"  
)
+
+ +
+
+ +

◆ DEFINE_bool() [8/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (frames_repeat ,
false ,
"Repeat frames when finished."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [9/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (fullscreen ,
false ,
"Run in full-screen mode (press f during runtime to toggle)."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [10/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (hand ,
false ,
"Enables hand keypoint detection. It will share some parameters from the body pose,
e.g." " `model_folder`. Analogously to `--face` ,
it will also slow down the performance,
increase" " the required GPU memory and its speed depends on the number of people."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [11/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (heatmaps_add_bkg ,
false ,
"Same functionality as `add_heatmaps_parts` ,
but adding the heatmap corresponding to" " background."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [12/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (heatmaps_add_PAFs ,
false ,
"Same functionality as `add_heatmaps_parts` ,
but adding the PAFs."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [13/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (heatmaps_add_parts ,
false ,
"If true,
it will fill op::Datum::poseHeatMaps array with the body part heatmaps,
and" " analogously face &hand heatmaps to op::Datum::faceHeatMaps &op::Datum::handHeatMaps." " If more than one `add_heatmaps_X` flag is enabled,
it will place then in sequential" " memory order:body parts+bkg+PAFs. It will follow the order on" " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will" " considerably decrease. Not required for OpenPose,
enable it only if you intend to" " explicitly use this information later."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [14/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (identification ,
false ,
Experimental,
not available yet. Whether to enable people identification across frames."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [15/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (maximize_positives ,
false ,
"It reduces the thresholds to accept a person candidate. It highly increases both false and" " true positives. I. e.,
it maximizes average recall but could harm average precision."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [16/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (no_gui_verbose ,
false ,
"Do not write text on output images on GUI (e.g., number of current frame and people). It" " does not affect the pose rendering."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [17/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (part_candidates ,
false ,
"Also enable `write_json` in order to save this information. If true,
it will fill the" " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all" " the detected body parts,
before being assembled into people. Note that the number of" " candidates is equal or higher than the number of final body parts(i.e., after being" " assembled into people). The empty body parts are filled with 0s. Program speed will" " slightly decrease. Not required for OpenPose,
enable it only if you intend to explicitly" " use this information."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [18/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (process_real_time ,
false ,
"Enable to keep the original source frame rate (e.g., for video). If the processing time is" " too long ,
it will skip frames. If it is too fast,
it will slow it down."  
)
+
+ +
+
+ +

◆ DEFINE_bool() [19/19]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_bool (write_video_with_audio ,
false ,
"If the input is video and the output is so too,
it will save the video with audio. It" " requires the output video file path finishing in `.mp4` format(see `write_video` for" " details)."  
)
+
+ +
+
+ +

◆ DEFINE_double() [1/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (alpha_heatmap ,
0. 7,
"Blending factor (range 0-1) between heatmap and original frame. 1 will only show the" " heatmap,
0 will only show the frame. Only valid for GPU rendering."  
)
+
+ +
+
+ +

◆ DEFINE_double() [2/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (alpha_pose ,
0. 6,
"Blending factor (range 0-1) for the body part rendering. 1 will show it completely,
0 will" " hide it. Only valid for GPU rendering."  
)
+
+ +
+
+ +

◆ DEFINE_double() [3/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (cli_verbose ,
-1. f,
"If - 1,
it will be disabled(default). If it is a positive integer number,
it will print on" " the command line every `verbose` frames. If number in the  range0, 1,
it will print the" " progress every `verbose` times the total of frames."  
)
+
+ +
+
+ +

◆ DEFINE_double() [4/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (face_alpha_heatmap ,
0. 7,
"Analogous to `alpha_heatmap` but applied to face."  
)
+
+ +
+
+ +

◆ DEFINE_double() [5/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (face_alpha_pose ,
0. 6,
"Analogous to `alpha_pose` but applied to face."  
)
+
+ +
+
+ +

◆ DEFINE_double() [6/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (face_render_threshold ,
0. 4,
"Analogous to `render_threshold` ,
but applied to the face keypoints."  
)
+
+ +
+
+ +

◆ DEFINE_double() [7/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (fps_max ,
1.,
"Maximum processing frame rate. By default  -1,
OpenPose will process frames as fast as" " possible. Example usage:If OpenPose is displaying images too quickly,
this can reduce" " the speed so the user can analyze better each frame from the GUI."  
)
+
+ +
+
+ +

◆ DEFINE_double() [8/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (hand_alpha_heatmap ,
0. 7,
"Analogous to `alpha_heatmap` but applied to hand."  
)
+
+ +
+
+ +

◆ DEFINE_double() [9/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (hand_alpha_pose ,
0. 6,
"Analogous to `alpha_pose` but applied to hand."  
)
+
+ +
+
+ +

◆ DEFINE_double() [10/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (hand_render_threshold ,
0. 2,
"Analogous to `render_threshold` ,
but applied to the hand keypoints."  
)
+
+ +
+
+ +

◆ DEFINE_double() [11/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (hand_scale_range ,
0. 4,
"Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range" " between smallest and biggest scale. The scales will be centered in ratio 1. E. g.,
if" " scaleRange = 0.4 and scalesNumber = 2,
then there will be 2 scales,
0.8 and 1.2."  
)
+
+ +
+
+ +

◆ DEFINE_double() [12/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (net_resolution_dynamic ,
1. ,
"This flag only applies to images or custom inputs (not to video or webcam). If it is zero" " or a negative value,
it means that using `-1` in `net_resolution` will behave as explained" " in its description. Otherwise,
and to avoid out of memory errors,
the `-1` in" " `net_resolution` will clip to this value times the default 16/9 aspect ratio value(which" " is 656 width for a 368 height). E. g.,
`net_resolution_dynamic 10 net_resolution -1x368`" " will clip to 6560x368(10 x 656). Recommended 1 for small GPUs(to avoid out of memory" " errors but maximize speed) and 0 for big GPUs(for maximum accuracy and speed)."  
)
+
+ +
+
+ +

◆ DEFINE_double() [13/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (render_threshold ,
0. 05,
"Only estimated keypoints whose score confidences are higher than this threshold will be" " rendered. Note: Rendered refers only to visual display in the OpenPose basic GUI,
not in" " the saved results. Generally,
a high threshold(> 0.5) will only render very clear body" " parts;while small thresholds(~0.1) will also output guessed and occluded keypoints,
" " but also more false positives(i.e., wrong detections)."  
)
+
+ +
+
+ +

◆ DEFINE_double() [14/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (scale_gap ,
0. 25,
"Scale gap between scales. No effect unless scale_number,
1. Initial scale is always 1." " If you want to change the initial scale,
you actually want to multiply the" " `net_resolution` by your desired initial scale."  
)
+
+ +
+
+ +

◆ DEFINE_double() [15/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (upsampling_ratio ,
0. ,
"Upsampling ratio between the `net_resolution` and the output net results. A value less" " or equal than 0 (default) will use the network default value (recommended)."  
)
+
+ +
+
+ +

◆ DEFINE_double() [16/16]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_double (write_video_fps ,
1.,
"Frame rate for the recorded video. By default,
it will try to get the input frames producer" " frame  ratee.g., input video or webcam frame rate). If the input frames producer does not" " have a set FPS(e.g., image_dir or webcam if OpenCV not compiled with its support,
set" " this value accordingly(e.g., to the frame rate displayed by the OpenPose GUI)."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [1/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (3d_min_views ,
1,
"Minimum number of views required to reconstruct each keypoint. By default  -1,
it will" " require max(2, min(4, #cameras-1)) cameras to see the keypoint in order to reconstruct" " it."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [2/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (3d_views ,
1,
"Complementary option for `--image_dir` or `--video`. OpenPose will read as many images per" " iteration,
allowing tasks such as stereo camera processing(`--3d`). Note that" " `--camera_parameter_path` must be set. OpenPose must find as many `xml` files in the" " parameter folder as this number indicates."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [3/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (body ,
,
"Select 0 to disable body keypoint detection  e.g., for faster but less accurate face" " keypoint detection, custom hand detector, etc.,
1(default) for body keypoint" " estimation,
and 2 to disable its internal body pose estimation network but still" " still run the greedy association parsing algorithm"  
)
+
+ +
+
+ +

◆ DEFINE_int32() [4/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (camera ,
1,
"The camera index for cv::VideoCapture. Integer in the range . Select a negative" " number  (by default)[0, 9],
to auto-detect and open the first available camera."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [5/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (display ,
1,
"Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server" " and/or to slightly speed up the processing if visual output is not required); 2 for 2-D" " display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [6/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (face_detector ,
,
"Kind of face rectangle detector. Select 0  default) to select OpenPose body detector (most" " accurate one and fastest one if body is enabled,
1 to select OpenCV face  detectornot" " implemented for hands,
2 to indicate that it will be provided by the user,
or 3 to" " also apply hand tracking(only for hand). Hand tracking might improve hand keypoint" " detection for webcam(if the frame rate is high enough, i.e., >7 FPS per GPU) and video." " This is not person ID tracking,
it simply looks for hands in positions at which hands were" " located in previous frames,
but it does not guarantee the same person ID among frames."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [7/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (face_render ,
1,
"Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same" " configuration that `render_pose` is using."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [8/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (flir_camera_index ,
1,
"Select -1 (default) to run on all detected flir cameras at once. Otherwise,
select the flir" " camera index to run,
where 0 corresponds to the detected flir camera with the lowest" " serial number,
and `n` to the `n`-th lowest serial number camera."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [9/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (frame_rotate ,
,
"Rotate each frame,
4 possible values:0 ,
90 ,
180 ,
270."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [10/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (hand_detector ,
,
"Kind of hand rectangle detector. Analogous to `--face_detector`."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [11/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (hand_render ,
1,
"Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same" " configuration that `render_pose` is using."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [12/26]

+ +
+
+ + + + + + + + + + + + + + + + + + +
DEFINE_int32 (hand_scale_number ,
 
)
+
+ +
+
+ +

◆ DEFINE_int32() [13/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (heatmaps_scale ,
,
"Set 0 to scale op::Datum::poseHeatMaps in the range[-1, 1],
1 for ;2 for integer" " rounded ;and 3 for no scaling." [0, 1][0, 255] 
)
+
+ +
+
+ +

◆ DEFINE_int32() [14/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (ik_threads ,
,
Experimental,
not available yet. Whether to enable inverse  kinematicsIK) from 3-D" " keypoints to obtain 3-D joint angles. By default(0 threads,
it is disabled. Increasing" " the number of threads will increase the speed but also the global system latency."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [15/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (keypoint_scale ,
,
"Scaling of the (x,y) coordinates of the final pose data array,
i. e.,
the scale of the(x, y)" " coordinates that will be saved with the `write_json` &`write_keypoint` flags." " Select `0` to scale it to the original source resolution;`1`to scale it to the net output" " size(set with `net_resolution`);`2` to scale it to the final output size(set with" " `resolution`);`3` to scale it in the range[0, 1],
where(0, 0) would be the top-left" " corner of the image,
and(1, 1) the bottom-right one;and 4 for range[-1, 1],
where" "(-1,-1) would be the top-left corner of the image,
and(1, 1) the bottom-right one. Non" " related with `scale_number` and `scale_gap`."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [16/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (logging_level ,
,
"The logging level. Integer in the range . 0 will output any opLog() message[0, 255],
" " while 255 will not output any. Current OpenPose library messages are in the range 0-4:" " 1 for low priority messages and 4 for important ones."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [17/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (num_gpu ,
1,
"The number of GPU devices to use. If negative,
it will use all the available GPUs in your" " machine."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [18/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (num_gpu_start ,
,
"GPU device start number."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [19/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (number_people_max ,
1,
"This parameter will limit the maximum number of people detected,
by keeping the people with" " top scores. The score is based in person area over the image,
body part score,
as well as" " joint score(between each pair of connected body parts). Useful if you know the exact" " number of people in the scene 
)
+
+ +
+
+ +

◆ DEFINE_int32() [20/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (part_to_show ,
,
"Prediction channel to visualize: 0 (default) for all the body parts,
1 for the background" " heat map,
2 for the superposition of heatmaps,
3 for the superposition of PAFs,
" " 4-(4+#keypoints) for each body part heat map,
the following ones for each body part pair" " PAF."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [21/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (profile_speed ,
1000 ,
"If PROFILER_ENABLED was set in CMake or Makefile.config files,
OpenPose will show some" " runtime statistics at this frame number."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [22/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (render_pose ,
1,
"Set to 0 for no rendering,
1 for CPU  renderingslightly faster,
and 2 for GPU rendering" "(slower but greater functionality, e.g., `alpha_X` flags). If - 1,
it will pick CPU if" " CPU_ONLY is enabled,
or GPU if CUDA is enabled. If rendering is enabled,
it will render" " both `outputData` and `cvOutputData` with the original image and desired body part to be" " shown(i.e., keypoints, heat maps or PAFs)."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [23/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (scale_number ,
,
"Number of scales to average."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [24/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (tracking ,
1,
Experimental,
not available yet. Whether to enable people tracking across frames. The" " value indicates the number of frames where tracking is run between each OpenPose keypoint" " detection. Select -1(default) to disable it or 0 to run simultaneously OpenPose keypoint" " detector and tracking for potentially higher accuracy than only OpenPose."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [25/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (write_coco_json_variant ,
,
Currently,
this option is experimental and only makes effect on car JSON generation. It" " selects the COCO variant for cocoJsonSaver."  
)
+
+ +
+
+ +

◆ DEFINE_int32() [26/26]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_int32 (write_coco_json_variants ,
,
"Add 1 for body,
add 2 for foot,
4 for face,
and/or 8 for hands. Use 0 to use all the" " possible candidates. E. g.,
7 would mean body+foot+face COCO JSON."  
)
+
+ +
+
+ +

◆ DEFINE_string() [1/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (caffemodel_path ,
"" ,
"The combination `--model_folder` + `--caffemodel_path` represents the whole path to the" " caffemodel file. If empty,
it will use the default OpenPose CaffeModel file."  
)
+
+ +
+
+ +

◆ DEFINE_string() [2/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (camera_parameter_path ,
"models/cameraParameters/flir/" ,
"String with the folder where the camera parameters are located. If there" " is only 1 XML file  for single video, webcam, or images from the same camera,
you must" " specify the whole XML file path(ending in .xml)."  
)
+
+ +
+
+ +

◆ DEFINE_string() [3/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (camera_resolution ,
"-1x-1" ,
"Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the" " default 1280x720 for `--camera` ,
or the maximum flir camera resolution available for" " `--flir_camera`"  
)
+
+ +
+
+ +

◆ DEFINE_string() [4/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (face_net_resolution ,
"368x368" ,
"Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint" " detector. 320x320 usually works fine while giving a substantial speed up when multiple" " faces on the image."  
)
+
+ +
+
+ +

◆ DEFINE_string() [5/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (hand_net_resolution ,
"368x368" ,
"Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint" " detector."  
)
+
+ +
+
+ +

◆ DEFINE_string() [6/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (image_dir ,
"" ,
"Process a directory of images. Use `examples/media/` for our default example folder with 20" " images. Read all standard formats (jpg, png, bmp, etc.)."  
)
+
+ +
+
+ +

◆ DEFINE_string() [7/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (ip_camera ,
"" ,
"String with the IP camera URL. It supports protocols like RTSP and HTTP."  
)
+
+ +
+
+ +

◆ DEFINE_string() [8/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (model_folder ,
"models/" ,
"Folder path (absolute or relative) where the models (pose, face, ...) are located."  
)
+
+ +
+
+ +

◆ DEFINE_string() [9/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (model_pose ,
"BODY_25" ,
"Model to be used. E. g.,
`BODY_25` fastest for CUDA version, most accurate, and includes" " foot keypoints,
`COCO` 18 keypoints,
`MPI` 15 keypoints, least accurate model but" " fastest on CPU,
`MPI_4_layers`(15 keypoints, even faster but less accurate)."  
)
+
+ +
+
+ +

◆ DEFINE_string() [10/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (net_resolution ,
"-1x368" ,
"Multiples of 16. If it is increased,
the accuracy potentially increases. If it is" " decreased,
the speed increases. For maximum speed-accuracy balance,
it should keep the" " closest aspect ratio possible to the images or videos to be processed. Using `-1` in" " any of the dimensions,
OP will choose the optimal aspect ratio depending on the user 's" " input value. E. g.,
the default `-1x368` is equivalent to `656x368` in 16:9 resolutions,
" " e. g.,
full HD(1980x1080) and HD(1280x720) resolutions."  
)
+
+ +
+
+ +

◆ DEFINE_string() [11/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (output_resolution ,
"-1x-1" ,
"The image resolution (display and output). Use \"-1x-1\" to force the program to use the" " input image resolution."  
)
+
+ +
+
+ +

◆ DEFINE_string() [12/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (prototxt_path ,
"" ,
"The combination `--model_folder` + `--prototxt_path` represents the whole path to the" " prototxt file. If empty,
it will use the default OpenPose ProtoTxt file."  
)
+
+ +
+
+ +

◆ DEFINE_string() [13/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (udp_host ,
"" ,
Experimental,
not available yet. IP for UDP communication. E. g.,
`192.168.0.1`."  
)
+
+ +
+
+ +

◆ DEFINE_string() [14/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (udp_port ,
"8051" ,
Experimental,
not available yet. Port number for UDP communication."  
)
+
+ +
+
+ +

◆ DEFINE_string() [15/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (video ,
"" ,
"Use a video file instead of the camera. Use `examples/media/video.avi` for our default" " example video."  
)
+
+ +
+
+ +

◆ DEFINE_string() [16/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (write_bvh ,
"" ,
Experimental,
not available yet. E. g.,
`~/Desktop/mocapResult.bvh`."  
)
+
+ +
+
+ +

◆ DEFINE_string() [17/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (write_coco_json ,
"" ,
"Full file path to write people pose data with JSON COCO validation format. If foot,
face ,
" " hands,
etc. JSON is also  desired`--write_coco_json_variants`,
they are saved with" " different file name suffix."  
)
+
+ +
+
+ +

◆ DEFINE_string() [18/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (write_heatmaps ,
"" ,
"Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag" " must be enabled."  
)
+
+ +
+
+ +

◆ DEFINE_string() [19/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (write_heatmaps_format ,
"png" ,
"File extension and format for `write_heatmaps` ,
analogous to `write_images_format`." " For lossless compression,
recommended `png` for integer `heatmaps_scale` and `float` for" " floating values. See `doc/02_output.md` for more details."  
)
+
+ +
+
+ +

◆ DEFINE_string() [20/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (write_images ,
"" ,
"Directory to write rendered frames in `write_images_format` image format."  
)
+
+ +
+
+ +

◆ DEFINE_string() [21/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (write_images_format ,
"png" ,
"File extension and format for `write_images` ,
e. g.,
png ,
jpg or bmp. Check the OpenCV" " function cv::imwrite for all compatible extensions."  
)
+
+ +
+
+ +

◆ DEFINE_string() [22/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (write_json ,
"" ,
"Directory to write OpenPose output in JSON format. It includes body,
hand ,
and face pose" "  keypoints2-D and 3-D,
as well as pose candidates(if `--part_candidates` enabled)."  
)
+
+ +
+
+ +

◆ DEFINE_string() [23/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (write_keypoint ,
"" ,
"(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format" " with `write_keypoint_format`."  
)
+
+ +
+
+ +

◆ DEFINE_string() [24/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (write_keypoint_format ,
"yml" ,
"(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json,
xml  
)
+
+ +
+
+ +

◆ DEFINE_string() [25/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (write_video ,
"" ,
"Full file path to write rendered frames in motion JPEG video format. It might fail if the" " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag" " `write_video_fps` controls FPS. Alternatively,
the video extension can be `.mp4` ,
" " resulting in a file with a much smaller size and allowing `--write_video_with_audio`." " However,
that would require:1  
)
+
+ +
+
+ +

◆ DEFINE_string() [26/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (write_video_3d ,
"" ,
"Analogous to `--write_video` ,
but applied to the 3D output."  
)
+
+ +
+
+ +

◆ DEFINE_string() [27/27]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_string (write_video_adam ,
"" ,
Experimental,
not available yet. Analogous to `--write_video` ,
but applied to Adam model."  
)
+
+ +
+
+ +

◆ DEFINE_uint64() [1/3]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_uint64 (frame_first ,
,
"Start on desired frame number. Indexes are 0- based,
i. e.,
the first frame has index 0."  
)
+
+ +
+
+ +

◆ DEFINE_uint64() [2/3]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_uint64 (frame_last ,
1,
"Finish on desired frame number. Select -1 to disable. Indexes are 0- based,
e. g.,
if set to" " 10,
it will process 11 frames(0-10)."  
)
+
+ +
+
+ +

◆ DEFINE_uint64() [3/3]

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DEFINE_uint64 (frame_step ,
,
"Step or gap between processed frames. E. g.,
`--frame_step 5` would read and process frames" " 0,
,
10 ,
etc.."  
)
+
+ +
+
+
+
+ + + + diff --git a/web/html/doc/flags_8hpp.js b/web/html/doc/flags_8hpp.js new file mode 100644 index 000000000..06e1c8f93 --- /dev/null +++ b/web/html/doc/flags_8hpp.js @@ -0,0 +1,94 @@ +var flags_8hpp = +[ + [ "DEFINE_bool", "flags_8hpp.html#af12ef9f66fbf74e05c08b69caf54821e", null ], + [ "DEFINE_bool", "flags_8hpp.html#ad677c3dfae13a1ec9a3ee2fabe9e37a0", null ], + [ "DEFINE_bool", "flags_8hpp.html#af4ef631dab577c13f59e10b626c580f6", null ], + [ "DEFINE_bool", "flags_8hpp.html#a9b1025da93c8ab21c0fdfe4941f26ad4", null ], + [ "DEFINE_bool", "flags_8hpp.html#a245491f4f0bb36e5e8655877402f50eb", null ], + [ "DEFINE_bool", "flags_8hpp.html#ae9ea69ae8e64ee99bb4516199f847980", null ], + [ "DEFINE_bool", "flags_8hpp.html#abada704ec59515bb12563262a3f21aeb", null ], + [ "DEFINE_bool", "flags_8hpp.html#ac0db13ec99b09bf9bd38582da33cff1d", null ], + [ "DEFINE_bool", "flags_8hpp.html#abd8fa8293ee3a05d4c5a2b6082460ab0", null ], + [ "DEFINE_bool", "flags_8hpp.html#a064e35f07a2835d7b4b5d31f0f625865", null ], + [ "DEFINE_bool", "flags_8hpp.html#a2c805a3cd1797a9f67783ed5c9b7c5b1", null ], + [ "DEFINE_bool", "flags_8hpp.html#ad9114bc8e1fc8f306e5296eaae5d542f", null ], + [ "DEFINE_bool", "flags_8hpp.html#a4670eb3cdedb3f3bac3886e2c21a7750", null ], + [ "DEFINE_bool", "flags_8hpp.html#aefe90773deaaa525b3b115d37b46e281", null ], + [ "DEFINE_bool", "flags_8hpp.html#a4d830b4bcbd7998d8c6de665c0531ce9", null ], + [ "DEFINE_bool", "flags_8hpp.html#ab8ac5e6be119dc06f351810053ea8bcf", null ], + [ "DEFINE_bool", "flags_8hpp.html#a9973307b6bd2af114083ba1badf4c297", null ], + [ "DEFINE_bool", "flags_8hpp.html#adf7a6ca551fbd934deb6784bfe37f897", null ], + [ "DEFINE_bool", "flags_8hpp.html#af50c82bf332c1a699f0615e7eb734c51", null ], + [ "DEFINE_double", "flags_8hpp.html#af9d388afd71b21640a573e6e8cad4c1a", null ], + [ "DEFINE_double", "flags_8hpp.html#a33562cf43d115a4d26f9958aa04c15ff", null ], + [ "DEFINE_double", "flags_8hpp.html#a87455dc2555757a087e99d8b52138835", null ], + [ "DEFINE_double", "flags_8hpp.html#a9b80aae5395b7d99c980198374bde9f2", null ], + [ "DEFINE_double", "flags_8hpp.html#a83309bdfd7daadfb89be65edf399ac9a", null ], + [ "DEFINE_double", "flags_8hpp.html#a8dfdbe0ff3a68f46e440f379621f8f0a", null ], + [ "DEFINE_double", "flags_8hpp.html#ace91dac10649fcbe836a71459b2f2584", null ], + [ "DEFINE_double", "flags_8hpp.html#aea6dc9d3cb9ea69426d012d1f41fadf0", null ], + [ "DEFINE_double", "flags_8hpp.html#ac2cc95296b63a048521a2c41dce82b45", null ], + [ "DEFINE_double", "flags_8hpp.html#a0fe12ed9bee07b6120d595b3a1b85b15", null ], + [ "DEFINE_double", "flags_8hpp.html#a349e235659cc7b31dcf5db0d3d468fce", null ], + [ "DEFINE_double", "flags_8hpp.html#ad90e61b31f6bd48c3514195da36ff31c", null ], + [ "DEFINE_double", "flags_8hpp.html#a166e98128271506645ce14000faace73", null ], + [ "DEFINE_double", "flags_8hpp.html#abecb5274ddd75ca51fb40064478b1ed3", null ], + [ "DEFINE_double", "flags_8hpp.html#af437a0d8f293cd02b992a94b268571a4", null ], + [ "DEFINE_double", "flags_8hpp.html#af4490397ad693c3d51835699a5dcddf3", null ], + [ "DEFINE_int32", "flags_8hpp.html#ac9b1dcda85ac079222769931cad6bebc", null ], + [ "DEFINE_int32", "flags_8hpp.html#a13dcbbdf12e9e72eb29ccf25d7a7cd42", null ], + [ "DEFINE_int32", "flags_8hpp.html#aa2bc11c618a37698d88f7ae100e1729f", null ], + [ "DEFINE_int32", "flags_8hpp.html#a807c2eb873d9e727f14d6c7ee6e02e11", null ], + [ "DEFINE_int32", "flags_8hpp.html#ab598c69ed7164089afcdd4149c24a5eb", null ], + [ "DEFINE_int32", "flags_8hpp.html#a71a0fc42dd98d1739571e4f7fed4873c", null ], + [ "DEFINE_int32", "flags_8hpp.html#a796a3b14805d5e0b01b21b9bab844382", null ], + [ "DEFINE_int32", "flags_8hpp.html#a513eca9e40da3a149e02c0c1fb6d10d3", null ], + [ "DEFINE_int32", "flags_8hpp.html#adbd3e76b28ecc7415ccb782c3419a9de", null ], + [ "DEFINE_int32", "flags_8hpp.html#ae7ff5e3adea9c5f572455ec30dd3fbff", null ], + [ "DEFINE_int32", "flags_8hpp.html#a46b9edf947872e29ea5cbd7a95bee719", null ], + [ "DEFINE_int32", "flags_8hpp.html#ad696d262dc852c2f872470b90c25fafe", null ], + [ "DEFINE_int32", "flags_8hpp.html#aa3c62563ce9d99c25d4a2977f253c6c7", null ], + [ "DEFINE_int32", "flags_8hpp.html#a2c213e3a0c01a36f52667d1707b49062", null ], + [ "DEFINE_int32", "flags_8hpp.html#a547efed657b6e562d8d5f071124fcf17", null ], + [ "DEFINE_int32", "flags_8hpp.html#aae4cfb31c1a5934475d8cbd6b2f8268e", null ], + [ "DEFINE_int32", "flags_8hpp.html#a8bd040787ac075ae4cf483be01fe2c5f", null ], + [ "DEFINE_int32", "flags_8hpp.html#a844330d264f5648ae7d99b76f72f391a", null ], + [ "DEFINE_int32", "flags_8hpp.html#ac5e8f82d85a3eb0ee72a64569395497c", null ], + [ "DEFINE_int32", "flags_8hpp.html#add0ca9baf682a84f3236e7f5c001db06", null ], + [ "DEFINE_int32", "flags_8hpp.html#a2d8a3ae1f10dd657619e2a5d2dcb4b61", null ], + [ "DEFINE_int32", "flags_8hpp.html#a61f245285b5a4b77b1d923276fe6f995", null ], + [ "DEFINE_int32", "flags_8hpp.html#a6561fc0841b80f5c19a1c4bc549175e9", null ], + [ "DEFINE_int32", "flags_8hpp.html#a6d2331153c7051c742d11dcb0a4220ec", null ], + [ "DEFINE_int32", "flags_8hpp.html#a17550ab833803b7862beaab957642af6", null ], + [ "DEFINE_int32", "flags_8hpp.html#a80cdeb8d094d26ae5840a74ccff8613c", null ], + [ "DEFINE_string", "flags_8hpp.html#acd0c383a2043852c83e284b669a5cf7e", null ], + [ "DEFINE_string", "flags_8hpp.html#abcc67acb9ca2d225394445eb6017bc4d", null ], + [ "DEFINE_string", "flags_8hpp.html#ab1d4b66fac361d1f3f450cd6bc5311d4", null ], + [ "DEFINE_string", "flags_8hpp.html#a1da3324efb2a917d0714100dcdb13aba", null ], + [ "DEFINE_string", "flags_8hpp.html#a81e3bebeb0cec269b90097fb5856c96f", null ], + [ "DEFINE_string", "flags_8hpp.html#add5d5807feef88090f8c9d11bf904ba8", null ], + [ "DEFINE_string", "flags_8hpp.html#a02962b73af4084b90494b777ff1826c1", null ], + [ "DEFINE_string", "flags_8hpp.html#a456ac1650914494fbdacd53b55420e2b", null ], + [ "DEFINE_string", "flags_8hpp.html#a8e45f79c948490e55be06e3541b3681f", null ], + [ "DEFINE_string", "flags_8hpp.html#aa0cc9af40fd8fdee2d8c61da206913dc", null ], + [ "DEFINE_string", "flags_8hpp.html#a4fff2a82464bb9e180e04f70f0d5cbad", null ], + [ "DEFINE_string", "flags_8hpp.html#a81edc2bb181cd79c98bfae1520f8ab71", null ], + [ "DEFINE_string", "flags_8hpp.html#abd20da73260490fba6e09a17c235fc4a", null ], + [ "DEFINE_string", "flags_8hpp.html#a7ffa026d9b667e5551909aba895f0dfb", null ], + [ "DEFINE_string", "flags_8hpp.html#a5690d1f0bce6904d9ccea011b0a0262f", null ], + [ "DEFINE_string", "flags_8hpp.html#a1f0ce14d63633af19e375d6fbcccc463", null ], + [ "DEFINE_string", "flags_8hpp.html#a4ebc35e01d48db77575a1cdd53ac0815", null ], + [ "DEFINE_string", "flags_8hpp.html#af1f0085881667603ed4e0404d7140bdc", null ], + [ "DEFINE_string", "flags_8hpp.html#aac91c51c83200f18076e7354067ccbb0", null ], + [ "DEFINE_string", "flags_8hpp.html#a4a4d36b7f90a4d53a0fa29f86bbbb9aa", null ], + [ "DEFINE_string", "flags_8hpp.html#a91ba99b997951b09eab545a40c019f85", null ], + [ "DEFINE_string", "flags_8hpp.html#a66ec3a67de281684d9ff60c7b80c9430", null ], + [ "DEFINE_string", "flags_8hpp.html#a8763644943c3413220cfe6bf8f385d44", null ], + [ "DEFINE_string", "flags_8hpp.html#a488d04acd61a19fe00cd2e56844dd8c5", null ], + [ "DEFINE_string", "flags_8hpp.html#abd5499ff7014225c02e6149bde93e3a3", null ], + [ "DEFINE_string", "flags_8hpp.html#a85ed9ce7f145fad05a50344a6fdbee37", null ], + [ "DEFINE_string", "flags_8hpp.html#a08c988c91c179c16944f9f703c24324b", null ], + [ "DEFINE_uint64", "flags_8hpp.html#a99f9d7e0dcbf9f6ceddf589dc482d17a", null ], + [ "DEFINE_uint64", "flags_8hpp.html#a9fbfea8bf51a80ff2254f329366a19b8", null ], + [ "DEFINE_uint64", "flags_8hpp.html#ab7c61c5b25d4b3db1284761933c66aed", null ] +]; \ No newline at end of file diff --git a/web/html/doc/flags_8hpp_source.html b/web/html/doc/flags_8hpp_source.html new file mode 100644 index 000000000..b263d0464 --- /dev/null +++ b/web/html/doc/flags_8hpp_source.html @@ -0,0 +1,386 @@ + + + + + + + +OpenPose: include/openpose/flags.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
flags.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FLAGS_HPP
+
2 #define OPENPOSE_FLAGS_HPP
+
3 
+
4 // Note: This class is not included within the basic OpenPose `headers.hpp` and must be explicitly included. In
+
5 // addition, Google Flags library must also be linked to the resulting binary or library. OpenPose library does
+
6 // not force to use Google Flags, but the OpenPose examples do so.
+
7 
+
8 // GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string
+
9 #include <gflags/gflags.h>
+
10 // Allow Google Flags in Ubuntu 14
+
11 #ifndef GFLAGS_GFLAGS_H_
+
12  namespace gflags = google;
+
13 #endif
+
14 
+
15 // See all the available parameter options with the `--help` flag. E.g., `build/examples/openpose/openpose.bin --help`
+
16 // Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
+
17 // executable. E.g., for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
+
18 // Debugging/Other
+
19 DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any opLog() message,"
+
20  " while 255 will not output any. Current OpenPose library messages are in the range 0-4:"
+
21  " 1 for low priority messages and 4 for important ones.");
+
22 DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful"
+
23  " for 1) Cases where it is needed a low latency (e.g., webcam in real-time scenarios with"
+
24  " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the"
+
25  " error.");
+
26 DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some"
+
27  " runtime statistics at this frame number.");
+
28 #ifndef OPENPOSE_FLAGS_DISABLE_POSE
+
29 #ifndef OPENPOSE_FLAGS_DISABLE_PRODUCER
+
30 // Producer
+
31 DEFINE_int32(camera, -1, "The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative"
+
32  " number (by default), to auto-detect and open the first available camera.");
+
33 DEFINE_string(camera_resolution, "-1x-1", "Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the"
+
34  " default 1280x720 for `--camera`, or the maximum flir camera resolution available for"
+
35  " `--flir_camera`");
+
36 DEFINE_string(video, "", "Use a video file instead of the camera. Use `examples/media/video.avi` for our default"
+
37  " example video.");
+
38 DEFINE_string(image_dir, "", "Process a directory of images. Use `examples/media/` for our default example folder with 20"
+
39  " images. Read all standard formats (jpg, png, bmp, etc.).");
+
40 DEFINE_bool(flir_camera, false, "Whether to use FLIR (Point-Grey) stereo camera.");
+
41 DEFINE_int32(flir_camera_index, -1, "Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir"
+
42  " camera index to run, where 0 corresponds to the detected flir camera with the lowest"
+
43  " serial number, and `n` to the `n`-th lowest serial number camera.");
+
44 DEFINE_string(ip_camera, "", "String with the IP camera URL. It supports protocols like RTSP and HTTP.");
+
45 DEFINE_uint64(frame_first, 0, "Start on desired frame number. Indexes are 0-based, i.e., the first frame has index 0.");
+
46 DEFINE_uint64(frame_step, 1, "Step or gap between processed frames. E.g., `--frame_step 5` would read and process frames"
+
47  " 0, 5, 10, etc..");
+
48 DEFINE_uint64(frame_last, -1, "Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g., if set to"
+
49  " 10, it will process 11 frames (0-10).");
+
50 DEFINE_bool(frame_flip, false, "Flip/mirror each frame (e.g., for real time webcam demonstrations).");
+
51 DEFINE_int32(frame_rotate, 0, "Rotate each frame, 4 possible values: 0, 90, 180, 270.");
+
52 DEFINE_bool(frames_repeat, false, "Repeat frames when finished.");
+
53 DEFINE_bool(process_real_time, false, "Enable to keep the original source frame rate (e.g., for video). If the processing time is"
+
54  " too long, it will skip frames. If it is too fast, it will slow it down.");
+
55 DEFINE_string(camera_parameter_path, "models/cameraParameters/flir/", "String with the folder where the camera parameters are located. If there"
+
56  " is only 1 XML file (for single video, webcam, or images from the same camera), you must"
+
57  " specify the whole XML file path (ending in .xml).");
+
58 DEFINE_bool(frame_undistort, false, "If false (default), it will not undistort the image, if true, it will undistortionate them"
+
59  " based on the camera parameters found in `camera_parameter_path`");
+
60 #endif // OPENPOSE_FLAGS_DISABLE_PRODUCER
+
61 // OpenPose
+
62 DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located.");
+
63 DEFINE_string(prototxt_path, "", "The combination `--model_folder` + `--prototxt_path` represents the whole path to the"
+
64  " prototxt file. If empty, it will use the default OpenPose ProtoTxt file.");
+
65 DEFINE_string(caffemodel_path, "", "The combination `--model_folder` + `--caffemodel_path` represents the whole path to the"
+
66  " caffemodel file. If empty, it will use the default OpenPose CaffeModel file.");
+
67 DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the"
+
68  " input image resolution.");
+
69 DEFINE_int32(num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your"
+
70  " machine.");
+
71 DEFINE_int32(num_gpu_start, 0, "GPU device start number.");
+
72 DEFINE_int32(keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e., the scale of the (x,y)"
+
73  " coordinates that will be saved with the `write_json` & `write_keypoint` flags."
+
74  " Select `0` to scale it to the original source resolution; `1`to scale it to the net output"
+
75  " size (set with `net_resolution`); `2` to scale it to the final output size (set with"
+
76  " `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left"
+
77  " corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where"
+
78  " (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non"
+
79  " related with `scale_number` and `scale_gap`.");
+
80 DEFINE_int32(number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with"
+
81  " top scores. The score is based in person area over the image, body part score, as well as"
+
82  " joint score (between each pair of connected body parts). Useful if you know the exact"
+
83  " number of people in the scene, so it can remove false positives (if all the people have"
+
84  " been detected. However, it might also include false negatives by removing very small or"
+
85  " highly occluded people. -1 will keep them all.");
+
86 DEFINE_bool(maximize_positives, false, "It reduces the thresholds to accept a person candidate. It highly increases both false and"
+
87  " true positives. I.e., it maximizes average recall but could harm average precision.");
+
88 DEFINE_double(fps_max, -1., "Maximum processing frame rate. By default (-1), OpenPose will process frames as fast as"
+
89  " possible. Example usage: If OpenPose is displaying images too quickly, this can reduce"
+
90  " the speed so the user can analyze better each frame from the GUI.");
+
91 // OpenPose Body Pose
+
92 DEFINE_int32(body, 1, "Select 0 to disable body keypoint detection (e.g., for faster but less accurate face"
+
93  " keypoint detection, custom hand detector, etc.), 1 (default) for body keypoint"
+
94  " estimation, and 2 to disable its internal body pose estimation network but still"
+
95  " still run the greedy association parsing algorithm");
+
96 DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g., `BODY_25` (fastest for CUDA version, most accurate, and includes"
+
97  " foot keypoints), `COCO` (18 keypoints), `MPI` (15 keypoints, least accurate model but"
+
98  " fastest on CPU), `MPI_4_layers` (15 keypoints, even faster but less accurate).");
+
99 DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is"
+
100  " decreased, the speed increases. For maximum speed-accuracy balance, it should keep the"
+
101  " closest aspect ratio possible to the images or videos to be processed. Using `-1` in"
+
102  " any of the dimensions, OP will choose the optimal aspect ratio depending on the user's"
+
103  " input value. E.g., the default `-1x368` is equivalent to `656x368` in 16:9 resolutions,"
+
104  " e.g., full HD (1980x1080) and HD (1280x720) resolutions.");
+
105 DEFINE_double(net_resolution_dynamic, 1., "This flag only applies to images or custom inputs (not to video or webcam). If it is zero"
+
106  " or a negative value, it means that using `-1` in `net_resolution` will behave as explained"
+
107  " in its description. Otherwise, and to avoid out of memory errors, the `-1` in"
+
108  " `net_resolution` will clip to this value times the default 16/9 aspect ratio value (which"
+
109  " is 656 width for a 368 height). E.g., `net_resolution_dynamic 10 net_resolution -1x368`"
+
110  " will clip to 6560x368 (10 x 656). Recommended 1 for small GPUs (to avoid out of memory"
+
111  " errors but maximize speed) and 0 for big GPUs (for maximum accuracy and speed).");
+
112 DEFINE_int32(scale_number, 1, "Number of scales to average.");
+
113 DEFINE_double(scale_gap, 0.25, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1."
+
114  " If you want to change the initial scale, you actually want to multiply the"
+
115  " `net_resolution` by your desired initial scale.");
+
116 // OpenPose Body Pose Heatmaps and Part Candidates
+
117 DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and"
+
118  " analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps."
+
119  " If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential"
+
120  " memory order: body parts + bkg + PAFs. It will follow the order on"
+
121  " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will"
+
122  " considerably decrease. Not required for OpenPose, enable it only if you intend to"
+
123  " explicitly use this information later.");
+
124 DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to"
+
125  " background.");
+
126 DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs.");
+
127 DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer"
+
128  " rounded [0,255]; and 3 for no scaling.");
+
129 DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the"
+
130  " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all"
+
131  " the detected body parts, before being assembled into people. Note that the number of"
+
132  " candidates is equal or higher than the number of final body parts (i.e., after being"
+
133  " assembled into people). The empty body parts are filled with 0s. Program speed will"
+
134  " slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly"
+
135  " use this information.");
+
136 DEFINE_double(upsampling_ratio, 0., "Upsampling ratio between the `net_resolution` and the output net results. A value less"
+
137  " or equal than 0 (default) will use the network default value (recommended).");
+
138 // OpenPose Face
+
139 DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g."
+
140  " `model_folder`. Note that this will considerable slow down the performance and increase"
+
141  " the required GPU memory. In addition, the greater number of people on the image, the"
+
142  " slower OpenPose will be.");
+
143 DEFINE_int32(face_detector, 0, "Kind of face rectangle detector. Select 0 (default) to select OpenPose body detector (most"
+
144  " accurate one and fastest one if body is enabled), 1 to select OpenCV face detector (not"
+
145  " implemented for hands), 2 to indicate that it will be provided by the user, or 3 to"
+
146  " also apply hand tracking (only for hand). Hand tracking might improve hand keypoint"
+
147  " detection for webcam (if the frame rate is high enough, i.e., >7 FPS per GPU) and video."
+
148  " This is not person ID tracking, it simply looks for hands in positions at which hands were"
+
149  " located in previous frames, but it does not guarantee the same person ID among frames.");
+
150 DEFINE_string(face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint"
+
151  " detector. 320x320 usually works fine while giving a substantial speed up when multiple"
+
152  " faces on the image.");
+
153 // OpenPose Hand
+
154 DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g."
+
155  " `model_folder`. Analogously to `--face`, it will also slow down the performance, increase"
+
156  " the required GPU memory and its speed depends on the number of people.");
+
157 DEFINE_int32(hand_detector, 0, "Kind of hand rectangle detector. Analogous to `--face_detector`.");
+
158 DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint"
+
159  " detector.");
+
160 DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results"
+
161  " were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4.");
+
162 DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range"
+
163  " between smallest and biggest scale. The scales will be centered in ratio 1. E.g., if"
+
164  " scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2.");
+
165 // OpenPose 3-D Reconstruction
+
166 DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system."
+
167  " 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction"
+
168  " results. Note that it will only display 1 person. If multiple people is present, it will"
+
169  " fail.");
+
170 DEFINE_int32(3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will"
+
171  " require max(2, min(4, #cameras-1)) cameras to see the keypoint in order to reconstruct"
+
172  " it.");
+
173 DEFINE_int32(3d_views, -1, "Complementary option for `--image_dir` or `--video`. OpenPose will read as many images per"
+
174  " iteration, allowing tasks such as stereo camera processing (`--3d`). Note that"
+
175  " `--camera_parameter_path` must be set. OpenPose must find as many `xml` files in the"
+
176  " parameter folder as this number indicates.");
+
177 // Extra algorithms
+
178 DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames.");
+
179 DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The"
+
180  " value indicates the number of frames where tracking is run between each OpenPose keypoint"
+
181  " detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint"
+
182  " detector and tracking for potentially higher accuracy than only OpenPose.");
+
183 DEFINE_int32(ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D"
+
184  " keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing"
+
185  " the number of threads will increase the speed but also the global system latency.");
+
186 // OpenPose Rendering
+
187 DEFINE_int32(part_to_show, 0, "Prediction channel to visualize: 0 (default) for all the body parts, 1 for the background"
+
188  " heat map, 2 for the superposition of heatmaps, 3 for the superposition of PAFs,"
+
189  " 4-(4+#keypoints) for each body part heat map, the following ones for each body part pair"
+
190  " PAF.");
+
191 DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black"
+
192  " background, instead of being rendered into the original image. Related: `part_to_show`,"
+
193  " `alpha_pose`, and `alpha_pose`.");
+
194 // OpenPose Rendering Pose
+
195 DEFINE_double(render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be"
+
196  " rendered. Note: Rendered refers only to visual display in the OpenPose basic GUI, not in"
+
197  " the saved results. Generally, a high threshold (> 0.5) will only render very clear body"
+
198  " parts; while small thresholds (~0.1) will also output guessed and occluded keypoints,"
+
199  " but also more false positives (i.e., wrong detections).");
+
200 DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering"
+
201  " (slower but greater functionality, e.g., `alpha_X` flags). If -1, it will pick CPU if"
+
202  " CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render"
+
203  " both `outputData` and `cvOutputData` with the original image and desired body part to be"
+
204  " shown (i.e., keypoints, heat maps or PAFs).");
+
205 DEFINE_double(alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will"
+
206  " hide it. Only valid for GPU rendering.");
+
207 DEFINE_double(alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the"
+
208  " heatmap, 0 will only show the frame. Only valid for GPU rendering.");
+
209 // OpenPose Rendering Face
+
210 DEFINE_double(face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints.");
+
211 DEFINE_int32(face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same"
+
212  " configuration that `render_pose` is using.");
+
213 DEFINE_double(face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face.");
+
214 DEFINE_double(face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face.");
+
215 // OpenPose Rendering Hand
+
216 DEFINE_double(hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints.");
+
217 DEFINE_int32(hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same"
+
218  " configuration that `render_pose` is using.");
+
219 DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand.");
+
220 DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand.");
+
221 #ifndef OPENPOSE_FLAGS_DISABLE_DISPLAY
+
222 // Display
+
223 DEFINE_bool(fullscreen, false, "Run in full-screen mode (press f during runtime to toggle).");
+
224 DEFINE_bool(no_gui_verbose, false, "Do not write text on output images on GUI (e.g., number of current frame and people). It"
+
225  " does not affect the pose rendering.");
+
226 DEFINE_int32(display, -1, "Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server"
+
227  " and/or to slightly speed up the processing if visual output is not required); 2 for 2-D"
+
228  " display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display.");
+
229 #endif // OPENPOSE_FLAGS_DISABLE_DISPLAY
+
230 // Command Line Interface Verbose
+
231 DEFINE_double(cli_verbose, -1.f, "If -1, it will be disabled (default). If it is a positive integer number, it will print on"
+
232  " the command line every `verbose` frames. If number in the range (0,1), it will print the"
+
233  " progress every `verbose` times the total of frames.");
+
234 // Result Saving
+
235 DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format.");
+
236 DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g., png, jpg or bmp. Check the OpenCV"
+
237  " function cv::imwrite for all compatible extensions.");
+
238 DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the"
+
239  " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag"
+
240  " `write_video_fps` controls FPS. Alternatively, the video extension can be `.mp4`,"
+
241  " resulting in a file with a much smaller size and allowing `--write_video_with_audio`."
+
242  " However, that would require: 1) Ubuntu or Mac system, 2) FFmpeg library installed"
+
243  " (`sudo apt-get install ffmpeg`), 3) the creation temporarily of a folder with the same"
+
244  " file path than the final video (without the extension) to storage the intermediate frames"
+
245  " that will later be used to generate the final MP4 video.");
+
246 DEFINE_double(write_video_fps, -1., "Frame rate for the recorded video. By default, it will try to get the input frames producer"
+
247  " frame rate (e.g., input video or webcam frame rate). If the input frames producer does not"
+
248  " have a set FPS (e.g., image_dir or webcam if OpenCV not compiled with its support), set"
+
249  " this value accordingly (e.g., to the frame rate displayed by the OpenPose GUI).");
+
250 DEFINE_bool(write_video_with_audio, false, "If the input is video and the output is so too, it will save the video with audio. It"
+
251  " requires the output video file path finishing in `.mp4` format (see `write_video` for"
+
252  " details).");
+
253 DEFINE_string(write_video_3d, "", "Analogous to `--write_video`, but applied to the 3D output.");
+
254 DEFINE_string(write_video_adam, "", "Experimental, not available yet. Analogous to `--write_video`, but applied to Adam model.");
+
255 DEFINE_string(write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose"
+
256  " keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled).");
+
257 DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format. If foot, face,"
+
258  " hands, etc. JSON is also desired (`--write_coco_json_variants`), they are saved with"
+
259  " different file name suffix.");
+
260 DEFINE_int32(write_coco_json_variants, 1, "Add 1 for body, add 2 for foot, 4 for face, and/or 8 for hands. Use 0 to use all the"
+
261  " possible candidates. E.g., 7 would mean body+foot+face COCO JSON.");
+
262 DEFINE_int32(write_coco_json_variant, 0, "Currently, this option is experimental and only makes effect on car JSON generation. It"
+
263  " selects the COCO variant for cocoJsonSaver.");
+
264 DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag"
+
265  " must be enabled.");
+
266 DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`."
+
267  " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for"
+
268  " floating values. See `doc/02_output.md` for more details.");
+
269 DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format"
+
270  " with `write_keypoint_format`.");
+
271 DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml,"
+
272  " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead.");
+
273 // Result Saving - Extra Algorithms
+
274 DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g., `~/Desktop/mocapResult.bvh`.");
+
275 // UDP Communication
+
276 DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`.");
+
277 DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication.");
+
278 #endif // OPENPOSE_FLAGS_DISABLE_POSE
+
279 
+
280 #endif // OPENPOSE_FLAGS_HPP
+
DEFINE_uint64(frame_first, 0, "Start on desired frame number. Indexes are 0-based, i.e., the first frame has index 0.")
+
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any opLog() message," " while 255 will not output any. Current OpenPose library messages are in the range 0-4:" " 1 for low priority messages and 4 for important ones.")
+
DEFINE_string(camera_resolution, "-1x-1", "Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the" " default 1280x720 for `--camera`, or the maximum flir camera resolution available for" " `--flir_camera`")
+
DEFINE_double(fps_max, -1., "Maximum processing frame rate. By default (-1), OpenPose will process frames as fast as" " possible. Example usage: If OpenPose is displaying images too quickly, this can reduce" " the speed so the user can analyze better each frame from the GUI.")
+
DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful" " for 1) Cases where it is needed a low latency (e.g., webcam in real-time scenarios with" " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the" " error.")
+
+
+ + + + diff --git a/web/html/doc/flags_to_open_pose_8hpp.html b/web/html/doc/flags_to_open_pose_8hpp.html new file mode 100644 index 000000000..091509d0a --- /dev/null +++ b/web/html/doc/flags_to_open_pose_8hpp.html @@ -0,0 +1,143 @@ + + + + + + + +OpenPose: include/openpose/utilities/flagsToOpenPose.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
flagsToOpenPose.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + + + + + + + + + + + + + + + + + +

+Functions

OP_API PoseMode op::flagsToPoseMode (const int poseModeInt)
 
OP_API PoseModel op::flagsToPoseModel (const String &poseModeString)
 
OP_API ScaleMode op::flagsToScaleMode (const int keypointScaleMode)
 
OP_API ScaleMode op::flagsToHeatMapScaleMode (const int heatMapScaleMode)
 
OP_API Detector op::flagsToDetector (const int detector)
 
OP_API ProducerType op::flagsToProducerType (const String &imageDirectory, const String &videoPath, const String &ipCameraPath, const int webcamIndex, const bool flirCamera)
 
OP_API std::pair< ProducerType, String > op::flagsToProducer (const String &imageDirectory, const String &videoPath, const String &ipCameraPath=String(""), const int webcamIndex=-1, const bool flirCamera=false, const int flirCameraIndex=-1)
 
OP_API std::vector< HeatMapType > op::flagsToHeatMaps (const bool heatMapsAddParts=false, const bool heatMapsAddBkg=false, const bool heatMapsAddPAFs=false)
 
OP_API RenderMode op::flagsToRenderMode (const int renderFlag, const bool gpuBuggy=false, const int renderPoseFlag=-2)
 
OP_API DisplayMode op::flagsToDisplayMode (const int display, const bool enabled3d)
 
OP_API Point< int > op::flagsToPoint (const String &pointString, const String &pointExample)
 
+
+
+ + + + diff --git a/web/html/doc/flags_to_open_pose_8hpp.js b/web/html/doc/flags_to_open_pose_8hpp.js new file mode 100644 index 000000000..d727fde27 --- /dev/null +++ b/web/html/doc/flags_to_open_pose_8hpp.js @@ -0,0 +1,14 @@ +var flags_to_open_pose_8hpp = +[ + [ "flagsToDetector", "flags_to_open_pose_8hpp.html#a9f585930a5246e4a9a70145fa8763447", null ], + [ "flagsToDisplayMode", "flags_to_open_pose_8hpp.html#afdf2dd76cbae54789a139d9415790f82", null ], + [ "flagsToHeatMaps", "flags_to_open_pose_8hpp.html#ad3b02ca66d11f4129372f4a9f98c6437", null ], + [ "flagsToHeatMapScaleMode", "flags_to_open_pose_8hpp.html#aed9ab5282e3e60f22dc11c301af897e6", null ], + [ "flagsToPoint", "flags_to_open_pose_8hpp.html#a0e1275fd8690a55200fcd193c94dcf08", null ], + [ "flagsToPoseMode", "flags_to_open_pose_8hpp.html#af5ec8b7e6271798cbd09475766c64d2f", null ], + [ "flagsToPoseModel", "flags_to_open_pose_8hpp.html#a60ab295fba5d41b31d6ba5a4942889a9", null ], + [ "flagsToProducer", "flags_to_open_pose_8hpp.html#a8264a6feec695adef80d40940863d511", null ], + [ "flagsToProducerType", "flags_to_open_pose_8hpp.html#a1ca09f1d0e1f01d95842e99ebeef0631", null ], + [ "flagsToRenderMode", "flags_to_open_pose_8hpp.html#a70f65da8f70ebd07b093932927187c90", null ], + [ "flagsToScaleMode", "flags_to_open_pose_8hpp.html#abe3f4d783191416b8e62e54c953fe36b", null ] +]; \ No newline at end of file diff --git a/web/html/doc/flags_to_open_pose_8hpp_source.html b/web/html/doc/flags_to_open_pose_8hpp_source.html new file mode 100644 index 000000000..896af8052 --- /dev/null +++ b/web/html/doc/flags_to_open_pose_8hpp_source.html @@ -0,0 +1,173 @@ + + + + + + + +OpenPose: include/openpose/utilities/flagsToOpenPose.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
flagsToOpenPose.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_UTILITIES_FLAGS_TO_OPEN_POSE_HPP
+
2 #define OPENPOSE_UTILITIES_FLAGS_TO_OPEN_POSE_HPP
+
3 
+ + + + + + +
10 
+
11 namespace op
+
12 {
+
13  OP_API PoseMode flagsToPoseMode(const int poseModeInt);
+
14 
+
15  OP_API PoseModel flagsToPoseModel(const String& poseModeString);
+
16 
+
17  OP_API ScaleMode flagsToScaleMode(const int keypointScaleMode);
+
18 
+
19  OP_API ScaleMode flagsToHeatMapScaleMode(const int heatMapScaleMode);
+
20 
+
21  OP_API Detector flagsToDetector(const int detector);
+
22 
+
23  // Determine type of frame source
+ +
25  const String& imageDirectory, const String& videoPath, const String& ipCameraPath,
+
26  const int webcamIndex, const bool flirCamera);
+
27 
+
28  OP_API std::pair<ProducerType, String> flagsToProducer(
+
29  const String& imageDirectory, const String& videoPath, const String& ipCameraPath = String(""),
+
30  const int webcamIndex = -1, const bool flirCamera = false, const int flirCameraIndex = -1);
+
31 
+
32  OP_API std::vector<HeatMapType> flagsToHeatMaps(
+
33  const bool heatMapsAddParts = false, const bool heatMapsAddBkg = false,
+
34  const bool heatMapsAddPAFs = false);
+
35 
+ +
37  const int renderFlag, const bool gpuBuggy = false, const int renderPoseFlag = -2);
+
38 
+
39  OP_API DisplayMode flagsToDisplayMode(const int display, const bool enabled3d);
+
40 
+
45  OP_API Point<int> flagsToPoint(const String& pointString, const String& pointExample);
+
46 }
+
47 
+
48 #endif // OPENPOSE_UTILITIES_FLAGS_TO_OPEN_POSE_HPP
+ + + + +
#define OP_API
Definition: macros.hpp:18
+ +
OP_API Point< int > flagsToPoint(const String &pointString, const String &pointExample)
+
Detector
Definition: enumClasses.hpp:15
+
OP_API ProducerType flagsToProducerType(const String &imageDirectory, const String &videoPath, const String &ipCameraPath, const int webcamIndex, const bool flirCamera)
+
PoseMode
Definition: enumClasses.hpp:7
+
ProducerType
Definition: enumClasses.hpp:30
+
OP_API PoseModel flagsToPoseModel(const String &poseModeString)
+
OP_API RenderMode flagsToRenderMode(const int renderFlag, const bool gpuBuggy=false, const int renderPoseFlag=-2)
+
OP_API std::pair< ProducerType, String > flagsToProducer(const String &imageDirectory, const String &videoPath, const String &ipCameraPath=String(""), const int webcamIndex=-1, const bool flirCamera=false, const int flirCameraIndex=-1)
+
OP_API Detector flagsToDetector(const int detector)
+
OP_API ScaleMode flagsToScaleMode(const int keypointScaleMode)
+
OP_API std::vector< HeatMapType > flagsToHeatMaps(const bool heatMapsAddParts=false, const bool heatMapsAddBkg=false, const bool heatMapsAddPAFs=false)
+
DisplayMode
Definition: enumClasses.hpp:11
+
OP_API ScaleMode flagsToHeatMapScaleMode(const int heatMapScaleMode)
+
PoseModel
Definition: enumClasses.hpp:10
+
OP_API PoseMode flagsToPoseMode(const int poseModeInt)
+
ScaleMode
Definition: enumClasses.hpp:7
+
RenderMode
Definition: enumClasses.hpp:27
+
OP_API DisplayMode flagsToDisplayMode(const int display, const bool enabled3d)
+ + + + +
+
+ + + + diff --git a/web/html/doc/flir_reader_8hpp.html b/web/html/doc/flir_reader_8hpp.html new file mode 100644 index 000000000..0105b631a --- /dev/null +++ b/web/html/doc/flir_reader_8hpp.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: include/openpose/producer/flirReader.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
flirReader.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::FlirReader
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/flir_reader_8hpp_source.html b/web/html/doc/flir_reader_8hpp_source.html new file mode 100644 index 000000000..41ebf372f --- /dev/null +++ b/web/html/doc/flir_reader_8hpp_source.html @@ -0,0 +1,169 @@ + + + + + + + +OpenPose: include/openpose/producer/flirReader.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
flirReader.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_PRODUCER_FLIR_READER_HPP
+
2 #define OPENPOSE_PRODUCER_FLIR_READER_HPP
+
3 
+ + + +
7 
+
8 namespace op
+
9 {
+
15  class OP_API FlirReader : public Producer
+
16  {
+
17  public:
+
21  explicit FlirReader(const std::string& cameraParametersPath, const Point<int>& cameraResolution,
+
22  const bool undistortImage = true, const int cameraIndex = -1);
+
23 
+
24  virtual ~FlirReader();
+
25 
+
26  std::vector<Matrix> getCameraMatrices();
+
27 
+
28  std::vector<Matrix> getCameraExtrinsics();
+
29 
+
30  std::vector<Matrix> getCameraIntrinsics();
+
31 
+
32  std::string getNextFrameName();
+
33 
+
34  bool isOpened() const;
+
35 
+
36  void release();
+
37 
+
38  double get(const int capProperty);
+
39 
+
40  void set(const int capProperty, const double value);
+
41 
+
42  private:
+
43  SpinnakerWrapper mSpinnakerWrapper;
+
44  Point<int> mResolution;
+
45  unsigned long long mFrameNameCounter;
+
46 
+
47  Matrix getRawFrame();
+
48 
+
49  std::vector<Matrix> getRawFrames();
+
50 
+ +
52  };
+
53 }
+
54 
+
55 #endif // OPENPOSE_PRODUCER_FLIR_READER_HPP
+ +
bool isOpened() const
+
double get(const int capProperty)
+
virtual ~FlirReader()
+
std::string getNextFrameName()
+
std::vector< Matrix > getCameraMatrices()
+
FlirReader(const std::string &cameraParametersPath, const Point< int > &cameraResolution, const bool undistortImage=true, const int cameraIndex=-1)
+ +
std::vector< Matrix > getCameraIntrinsics()
+
std::vector< Matrix > getCameraExtrinsics()
+
void set(const int capProperty, const double value)
+ + + + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ + + + +
+
+ + + + diff --git a/web/html/doc/folderclosed.png b/web/html/doc/folderclosed.png new file mode 100644 index 000000000..bb8ab35ed Binary files /dev/null and b/web/html/doc/folderclosed.png differ diff --git a/web/html/doc/folderopen.png b/web/html/doc/folderopen.png new file mode 100644 index 000000000..d6c7f676a Binary files /dev/null and b/web/html/doc/folderopen.png differ diff --git a/web/html/doc/frame_displayer_8hpp.html b/web/html/doc/frame_displayer_8hpp.html new file mode 100644 index 000000000..b43295b82 --- /dev/null +++ b/web/html/doc/frame_displayer_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/gui/frameDisplayer.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
frameDisplayer.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::FrameDisplayer
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/frame_displayer_8hpp_source.html b/web/html/doc/frame_displayer_8hpp_source.html new file mode 100644 index 000000000..a3f3678f0 --- /dev/null +++ b/web/html/doc/frame_displayer_8hpp_source.html @@ -0,0 +1,152 @@ + + + + + + + +OpenPose: include/openpose/gui/frameDisplayer.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
frameDisplayer.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_GUI_FRAMES_DISPLAY_HPP
+
2 #define OPENPOSE_GUI_FRAMES_DISPLAY_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+ +
13  {
+
14  public:
+
23  FrameDisplayer(const std::string& windowedName = OPEN_POSE_NAME_AND_VERSION,
+
24  const Point<int>& initialWindowedSize = Point<int>{}, const bool fullScreen = false);
+
25 
+
26  virtual ~FrameDisplayer();
+
27 
+
28  // Due to OpenCV visualization issues (all visualization functions must be in the same thread)
+ +
30 
+
35  void setFullScreenMode(const FullScreenMode fullScreenMode);
+
36 
+ +
42 
+
51  void displayFrame(const Matrix& frame, const int waitKeyValue = -1);
+
52 
+
56  void displayFrame(const std::vector<Matrix>& frames, const int waitKeyValue = -1);
+
57 
+
58  private:
+
59  const std::string mWindowName;
+
60  Point<int> mWindowedSize;
+
61  FullScreenMode mFullScreenMode;
+
62  };
+
63 }
+
64 
+
65 #endif // OPENPOSE_GUI_FRAMES_DISPLAY_HPP
+ +
FrameDisplayer(const std::string &windowedName=OPEN_POSE_NAME_AND_VERSION, const Point< int > &initialWindowedSize=Point< int >{}, const bool fullScreen=false)
+
void displayFrame(const std::vector< Matrix > &frames, const int waitKeyValue=-1)
+
void setFullScreenMode(const FullScreenMode fullScreenMode)
+
void displayFrame(const Matrix &frame, const int waitKeyValue=-1)
+
virtual ~FrameDisplayer()
+
void switchFullScreenMode()
+
void initializationOnThread()
+ + + +
#define OP_API
Definition: macros.hpp:18
+
const std::string OPEN_POSE_NAME_AND_VERSION
Definition: macros.hpp:14
+ +
FullScreenMode
Definition: enumClasses.hpp:24
+ +
+
+ + + + diff --git a/web/html/doc/functions.html b/web/html/doc/functions.html new file mode 100644 index 000000000..17e3f4988 --- /dev/null +++ b/web/html/doc/functions.html @@ -0,0 +1,151 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- a -

+
+
+ + + + diff --git a/web/html/doc/functions_b.html b/web/html/doc/functions_b.html new file mode 100644 index 000000000..811914c55 --- /dev/null +++ b/web/html/doc/functions_b.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- b -

+
+
+ + + + diff --git a/web/html/doc/functions_c.html b/web/html/doc/functions_c.html new file mode 100644 index 000000000..2fbfe5042 --- /dev/null +++ b/web/html/doc/functions_c.html @@ -0,0 +1,201 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- c -

+
+
+ + + + diff --git a/web/html/doc/functions_d.html b/web/html/doc/functions_d.html new file mode 100644 index 000000000..6f3886e2b --- /dev/null +++ b/web/html/doc/functions_d.html @@ -0,0 +1,159 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- d -

+
+
+ + + + diff --git a/web/html/doc/functions_dup.js b/web/html/doc/functions_dup.js new file mode 100644 index 000000000..f67586e5b --- /dev/null +++ b/web/html/doc/functions_dup.js @@ -0,0 +1,29 @@ +var functions_dup = +[ + [ "a", "functions.html", null ], + [ "b", "functions_b.html", null ], + [ "c", "functions_c.html", null ], + [ "d", "functions_d.html", null ], + [ "e", "functions_e.html", null ], + [ "f", "functions_f.html", null ], + [ "g", "functions_g.html", null ], + [ "h", "functions_h.html", null ], + [ "i", "functions_i.html", null ], + [ "j", "functions_j.html", null ], + [ "k", "functions_k.html", null ], + [ "l", "functions_l.html", null ], + [ "m", "functions_m.html", null ], + [ "n", "functions_n.html", null ], + [ "o", "functions_o.html", null ], + [ "p", "functions_p.html", null ], + [ "q", "functions_q.html", null ], + [ "r", "functions_r.html", null ], + [ "s", "functions_s.html", null ], + [ "t", "functions_t.html", null ], + [ "u", "functions_u.html", null ], + [ "v", "functions_v.html", null ], + [ "w", "functions_w.html", null ], + [ "x", "functions_x.html", null ], + [ "y", "functions_y.html", null ], + [ "~", "functions_~.html", null ] +]; \ No newline at end of file diff --git a/web/html/doc/functions_e.html b/web/html/doc/functions_e.html new file mode 100644 index 000000000..55be2bf6b --- /dev/null +++ b/web/html/doc/functions_e.html @@ -0,0 +1,147 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- e -

+
+
+ + + + diff --git a/web/html/doc/functions_f.html b/web/html/doc/functions_f.html new file mode 100644 index 000000000..7e3db583c --- /dev/null +++ b/web/html/doc/functions_f.html @@ -0,0 +1,215 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- f -

+
+
+ + + + diff --git a/web/html/doc/functions_func.html b/web/html/doc/functions_func.html new file mode 100644 index 000000000..1a877416d --- /dev/null +++ b/web/html/doc/functions_func.html @@ -0,0 +1,138 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- a -

+
+
+ + + + diff --git a/web/html/doc/functions_func.js b/web/html/doc/functions_func.js new file mode 100644 index 000000000..c44c4784c --- /dev/null +++ b/web/html/doc/functions_func.js @@ -0,0 +1,27 @@ +var functions_func = +[ + [ "a", "functions_func.html", null ], + [ "b", "functions_func_b.html", null ], + [ "c", "functions_func_c.html", null ], + [ "d", "functions_func_d.html", null ], + [ "e", "functions_func_e.html", null ], + [ "f", "functions_func_f.html", null ], + [ "g", "functions_func_g.html", null ], + [ "h", "functions_func_h.html", null ], + [ "i", "functions_func_i.html", null ], + [ "j", "functions_func_j.html", null ], + [ "k", "functions_func_k.html", null ], + [ "l", "functions_func_l.html", null ], + [ "m", "functions_func_m.html", null ], + [ "n", "functions_func_n.html", null ], + [ "o", "functions_func_o.html", null ], + [ "p", "functions_func_p.html", null ], + [ "q", "functions_func_q.html", null ], + [ "r", "functions_func_r.html", null ], + [ "s", "functions_func_s.html", null ], + [ "t", "functions_func_t.html", null ], + [ "u", "functions_func_u.html", null ], + [ "v", "functions_func_v.html", null ], + [ "w", "functions_func_w.html", null ], + [ "~", "functions_func_~.html", null ] +]; \ No newline at end of file diff --git a/web/html/doc/functions_func_b.html b/web/html/doc/functions_func_b.html new file mode 100644 index 000000000..847b0a8ff --- /dev/null +++ b/web/html/doc/functions_func_b.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ + +
+ + + + diff --git a/web/html/doc/functions_func_c.html b/web/html/doc/functions_func_c.html new file mode 100644 index 000000000..1ed415f5c --- /dev/null +++ b/web/html/doc/functions_func_c.html @@ -0,0 +1,174 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- c -

+
+
+ + + + diff --git a/web/html/doc/functions_func_d.html b/web/html/doc/functions_func_d.html new file mode 100644 index 000000000..188a66a0b --- /dev/null +++ b/web/html/doc/functions_func_d.html @@ -0,0 +1,146 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- d -

+
+
+ + + + diff --git a/web/html/doc/functions_func_e.html b/web/html/doc/functions_func_e.html new file mode 100644 index 000000000..3bbec6209 --- /dev/null +++ b/web/html/doc/functions_func_e.html @@ -0,0 +1,137 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ + + + + + diff --git a/web/html/doc/functions_func_f.html b/web/html/doc/functions_func_f.html new file mode 100644 index 000000000..9cb7db224 --- /dev/null +++ b/web/html/doc/functions_func_f.html @@ -0,0 +1,176 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ + + + + + diff --git a/web/html/doc/functions_func_g.html b/web/html/doc/functions_func_g.html new file mode 100644 index 000000000..8cbb7c8a1 --- /dev/null +++ b/web/html/doc/functions_func_g.html @@ -0,0 +1,323 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- g -

+
+
+ + + + diff --git a/web/html/doc/functions_func_h.html b/web/html/doc/functions_func_h.html new file mode 100644 index 000000000..b93bff556 --- /dev/null +++ b/web/html/doc/functions_func_h.html @@ -0,0 +1,124 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- h -

+
+
+ + + + diff --git a/web/html/doc/functions_func_i.html b/web/html/doc/functions_func_i.html new file mode 100644 index 000000000..8bca7540f --- /dev/null +++ b/web/html/doc/functions_func_i.html @@ -0,0 +1,209 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- i -

+
+
+ + + + diff --git a/web/html/doc/functions_func_j.html b/web/html/doc/functions_func_j.html new file mode 100644 index 000000000..fe980a446 --- /dev/null +++ b/web/html/doc/functions_func_j.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- j -

+
+
+ + + + diff --git a/web/html/doc/functions_func_k.html b/web/html/doc/functions_func_k.html new file mode 100644 index 000000000..39a75bfcb --- /dev/null +++ b/web/html/doc/functions_func_k.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- k -

+
+
+ + + + diff --git a/web/html/doc/functions_func_l.html b/web/html/doc/functions_func_l.html new file mode 100644 index 000000000..4168678d0 --- /dev/null +++ b/web/html/doc/functions_func_l.html @@ -0,0 +1,108 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- l -

+
+
+ + + + diff --git a/web/html/doc/functions_func_m.html b/web/html/doc/functions_func_m.html new file mode 100644 index 000000000..6c727f474 --- /dev/null +++ b/web/html/doc/functions_func_m.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- m -

+
+
+ + + + diff --git a/web/html/doc/functions_func_n.html b/web/html/doc/functions_func_n.html new file mode 100644 index 000000000..2afadb152 --- /dev/null +++ b/web/html/doc/functions_func_n.html @@ -0,0 +1,123 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- n -

+
+
+ + + + diff --git a/web/html/doc/functions_func_o.html b/web/html/doc/functions_func_o.html new file mode 100644 index 000000000..ec9592269 --- /dev/null +++ b/web/html/doc/functions_func_o.html @@ -0,0 +1,179 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- o -

+
+
+ + + + diff --git a/web/html/doc/functions_func_p.html b/web/html/doc/functions_func_p.html new file mode 100644 index 000000000..92b3b48c2 --- /dev/null +++ b/web/html/doc/functions_func_p.html @@ -0,0 +1,160 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- p -

+
+
+ + + + diff --git a/web/html/doc/functions_func_q.html b/web/html/doc/functions_func_q.html new file mode 100644 index 000000000..2dd53faf3 --- /dev/null +++ b/web/html/doc/functions_func_q.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- q -

+
+
+ + + + diff --git a/web/html/doc/functions_func_r.html b/web/html/doc/functions_func_r.html new file mode 100644 index 000000000..0af79f809 --- /dev/null +++ b/web/html/doc/functions_func_r.html @@ -0,0 +1,167 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- r -

+
+
+ + + + diff --git a/web/html/doc/functions_func_s.html b/web/html/doc/functions_func_s.html new file mode 100644 index 000000000..9dc9f6450 --- /dev/null +++ b/web/html/doc/functions_func_s.html @@ -0,0 +1,299 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- s -

+
+
+ + + + diff --git a/web/html/doc/functions_func_t.html b/web/html/doc/functions_func_t.html new file mode 100644 index 000000000..34bcfb0da --- /dev/null +++ b/web/html/doc/functions_func_t.html @@ -0,0 +1,160 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ + + + + + diff --git a/web/html/doc/functions_func_u.html b/web/html/doc/functions_func_u.html new file mode 100644 index 000000000..f3b0bd16c --- /dev/null +++ b/web/html/doc/functions_func_u.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- u -

+
+
+ + + + diff --git a/web/html/doc/functions_func_v.html b/web/html/doc/functions_func_v.html new file mode 100644 index 000000000..1fc7c747e --- /dev/null +++ b/web/html/doc/functions_func_v.html @@ -0,0 +1,115 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- v -

+
+
+ + + + diff --git a/web/html/doc/functions_func_w.html b/web/html/doc/functions_func_w.html new file mode 100644 index 000000000..291ada5e8 --- /dev/null +++ b/web/html/doc/functions_func_w.html @@ -0,0 +1,333 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- w -

+
+
+ + + + diff --git a/web/html/doc/functions_func_~.html b/web/html/doc/functions_func_~.html new file mode 100644 index 000000000..c8ca778c4 --- /dev/null +++ b/web/html/doc/functions_func_~.html @@ -0,0 +1,451 @@ + + + + + + + +OpenPose: Class Members - Functions + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- ~ -

+
+
+ + + + diff --git a/web/html/doc/functions_g.html b/web/html/doc/functions_g.html new file mode 100644 index 000000000..004171bde --- /dev/null +++ b/web/html/doc/functions_g.html @@ -0,0 +1,332 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- g -

+
+
+ + + + diff --git a/web/html/doc/functions_h.html b/web/html/doc/functions_h.html new file mode 100644 index 000000000..a5b31e949 --- /dev/null +++ b/web/html/doc/functions_h.html @@ -0,0 +1,143 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- h -

+
+
+ + + + diff --git a/web/html/doc/functions_i.html b/web/html/doc/functions_i.html new file mode 100644 index 000000000..c1b78e2db --- /dev/null +++ b/web/html/doc/functions_i.html @@ -0,0 +1,221 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- i -

+
+
+ + + + diff --git a/web/html/doc/functions_j.html b/web/html/doc/functions_j.html new file mode 100644 index 000000000..6436b5acb --- /dev/null +++ b/web/html/doc/functions_j.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- j -

+
+
+ + + + diff --git a/web/html/doc/functions_k.html b/web/html/doc/functions_k.html new file mode 100644 index 000000000..c720692cb --- /dev/null +++ b/web/html/doc/functions_k.html @@ -0,0 +1,122 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- k -

+
+
+ + + + diff --git a/web/html/doc/functions_l.html b/web/html/doc/functions_l.html new file mode 100644 index 000000000..0b934336d --- /dev/null +++ b/web/html/doc/functions_l.html @@ -0,0 +1,108 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- l -

+
+
+ + + + diff --git a/web/html/doc/functions_m.html b/web/html/doc/functions_m.html new file mode 100644 index 000000000..6bddca063 --- /dev/null +++ b/web/html/doc/functions_m.html @@ -0,0 +1,218 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- m -

+
+
+ + + + diff --git a/web/html/doc/functions_n.html b/web/html/doc/functions_n.html new file mode 100644 index 000000000..76583f299 --- /dev/null +++ b/web/html/doc/functions_n.html @@ -0,0 +1,146 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- n -

+
+
+ + + + diff --git a/web/html/doc/functions_o.html b/web/html/doc/functions_o.html new file mode 100644 index 000000000..71fff35ac --- /dev/null +++ b/web/html/doc/functions_o.html @@ -0,0 +1,185 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- o -

+
+
+ + + + diff --git a/web/html/doc/functions_p.html b/web/html/doc/functions_p.html new file mode 100644 index 000000000..84ef7d179 --- /dev/null +++ b/web/html/doc/functions_p.html @@ -0,0 +1,196 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- p -

+
+
+ + + + diff --git a/web/html/doc/functions_q.html b/web/html/doc/functions_q.html new file mode 100644 index 000000000..761d30fbf --- /dev/null +++ b/web/html/doc/functions_q.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- q -

+
+
+ + + + diff --git a/web/html/doc/functions_r.html b/web/html/doc/functions_r.html new file mode 100644 index 000000000..5f3f69f87 --- /dev/null +++ b/web/html/doc/functions_r.html @@ -0,0 +1,183 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- r -

+
+
+ + + + diff --git a/web/html/doc/functions_s.html b/web/html/doc/functions_s.html new file mode 100644 index 000000000..b61e18c37 --- /dev/null +++ b/web/html/doc/functions_s.html @@ -0,0 +1,336 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- s -

+
+
+ + + + diff --git a/web/html/doc/functions_t.html b/web/html/doc/functions_t.html new file mode 100644 index 000000000..9a15597c1 --- /dev/null +++ b/web/html/doc/functions_t.html @@ -0,0 +1,163 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ + + + + + diff --git a/web/html/doc/functions_u.html b/web/html/doc/functions_u.html new file mode 100644 index 000000000..d7c35a4f4 --- /dev/null +++ b/web/html/doc/functions_u.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- u -

+
+
+ + + + diff --git a/web/html/doc/functions_v.html b/web/html/doc/functions_v.html new file mode 100644 index 000000000..9f1b99e37 --- /dev/null +++ b/web/html/doc/functions_v.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- v -

+
+
+ + + + diff --git a/web/html/doc/functions_vars.html b/web/html/doc/functions_vars.html new file mode 100644 index 000000000..95939cf3d --- /dev/null +++ b/web/html/doc/functions_vars.html @@ -0,0 +1,629 @@ + + + + + + + +OpenPose: Class Members - Variables + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- a -

+ + +

- b -

+ + +

- c -

+ + +

- d -

+ + +

- e -

+ + +

- f -

+ + +

- g -

+ + +

- h -

+ + +

- i -

+ + +

- k -

+ + +

- m -

+ + +

- n -

+ + +

- o -

+ + +

- p -

+ + +

- r -

+ + +

- s -

+ + +

- t -

+ + +

- u -

+ + +

- v -

+ + +

- w -

+ + +

- x -

+ + +

- y -

+
+
+ + + + diff --git a/web/html/doc/functions_w.html b/web/html/doc/functions_w.html new file mode 100644 index 000000000..384e1a027 --- /dev/null +++ b/web/html/doc/functions_w.html @@ -0,0 +1,382 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- w -

+
+
+ + + + diff --git a/web/html/doc/functions_x.html b/web/html/doc/functions_x.html new file mode 100644 index 000000000..bf0e9b3ff --- /dev/null +++ b/web/html/doc/functions_x.html @@ -0,0 +1,104 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- x -

+
+
+ + + + diff --git a/web/html/doc/functions_y.html b/web/html/doc/functions_y.html new file mode 100644 index 000000000..f12b47c9c --- /dev/null +++ b/web/html/doc/functions_y.html @@ -0,0 +1,104 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- y -

+
+
+ + + + diff --git a/web/html/doc/functions_~.html b/web/html/doc/functions_~.html new file mode 100644 index 000000000..6be5187be --- /dev/null +++ b/web/html/doc/functions_~.html @@ -0,0 +1,451 @@ + + + + + + + +OpenPose: Class Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all class members with links to the classes they belong to:
+ +

- ~ -

+
+
+ + + + diff --git a/web/html/doc/globals.html b/web/html/doc/globals.html new file mode 100644 index 000000000..35a4aedd6 --- /dev/null +++ b/web/html/doc/globals.html @@ -0,0 +1,347 @@ + + + + + + + +OpenPose: File Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
Here is a list of all file members with links to the files they belong to:
+ +

- b -

+ + +

- c -

    +
  • COMPILE_TEMPLATE_BASIC_TYPES +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_BASIC_TYPES_CLASS +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_BASIC_TYPES_STRUCT +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_DATUM +: datum.hpp +
  • +
  • COMPILE_TEMPLATE_FLOATING_INT_TYPES +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_FLOATING_INT_TYPES_CLASS +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_FLOATING_INT_TYPES_STRUCT +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_FLOATING_TYPES +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_FLOATING_TYPES_CLASS +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_FLOATING_TYPES_STRUCT +: macros.hpp +
  • +
+ + +

- d -

+ + +

- f -

+ + +

- h -

+ + +

- o -

+ + +

- p -

+ + +

- u -

+
+
+ + + + diff --git a/web/html/doc/globals_defs.html b/web/html/doc/globals_defs.html new file mode 100644 index 000000000..6eae3d5d2 --- /dev/null +++ b/web/html/doc/globals_defs.html @@ -0,0 +1,323 @@ + + + + + + + +OpenPose: File Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- b -

+ + +

- c -

    +
  • COMPILE_TEMPLATE_BASIC_TYPES +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_BASIC_TYPES_CLASS +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_BASIC_TYPES_STRUCT +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_DATUM +: datum.hpp +
  • +
  • COMPILE_TEMPLATE_FLOATING_INT_TYPES +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_FLOATING_INT_TYPES_CLASS +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_FLOATING_INT_TYPES_STRUCT +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_FLOATING_TYPES +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_FLOATING_TYPES_CLASS +: macros.hpp +
  • +
  • COMPILE_TEMPLATE_FLOATING_TYPES_STRUCT +: macros.hpp +
  • +
+ + +

- d -

+ + +

- f -

+ + +

- h -

+ + +

- o -

+ + +

- p -

+ + +

- u -

+
+
+ + + + diff --git a/web/html/doc/globals_func.html b/web/html/doc/globals_func.html new file mode 100644 index 000000000..64c3edbaf --- /dev/null +++ b/web/html/doc/globals_func.html @@ -0,0 +1,115 @@ + + + + + + + +OpenPose: File Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+  + +

- d -

+
+
+ + + + diff --git a/web/html/doc/globals_vars.html b/web/html/doc/globals_vars.html new file mode 100644 index 000000000..16a368208 --- /dev/null +++ b/web/html/doc/globals_vars.html @@ -0,0 +1,107 @@ + + + + + + + +OpenPose: File Members + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
+ + + + diff --git a/web/html/doc/gpu_2enum_classes_8hpp.html b/web/html/doc/gpu_2enum_classes_8hpp.html new file mode 100644 index 000000000..621681615 --- /dev/null +++ b/web/html/doc/gpu_2enum_classes_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/gpu/enumClasses.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
enumClasses.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + +

+Enumerations

enum class  op::GpuMode : unsigned char { op::Cuda = 0 +, op::OpenCL = 1 +, op::NoGpu = 2 +, op::Size + }
 
+
+
+ + + + diff --git a/web/html/doc/gpu_2enum_classes_8hpp.js b/web/html/doc/gpu_2enum_classes_8hpp.js new file mode 100644 index 000000000..4584db2c9 --- /dev/null +++ b/web/html/doc/gpu_2enum_classes_8hpp.js @@ -0,0 +1,9 @@ +var gpu_2enum_classes_8hpp = +[ + [ "GpuMode", "gpu_2enum_classes_8hpp.html#adbb34b5c8f2b6f0c051f831f18582e7f", [ + [ "Cuda", "gpu_2enum_classes_8hpp.html#adbb34b5c8f2b6f0c051f831f18582e7fa8b95dcff7397d0693c03e394af5552aa", null ], + [ "OpenCL", "gpu_2enum_classes_8hpp.html#adbb34b5c8f2b6f0c051f831f18582e7fa7982b09a852b37f2afb1227eaf552e47", null ], + [ "NoGpu", "gpu_2enum_classes_8hpp.html#adbb34b5c8f2b6f0c051f831f18582e7fa3c1472839b807c90abff3c7c36dff458", null ], + [ "Size", "gpu_2enum_classes_8hpp.html#adbb34b5c8f2b6f0c051f831f18582e7fa6f6cb72d544962fa333e2e34ce64f719", null ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/gpu_2enum_classes_8hpp_source.html b/web/html/doc/gpu_2enum_classes_8hpp_source.html new file mode 100644 index 000000000..3b6c37305 --- /dev/null +++ b/web/html/doc/gpu_2enum_classes_8hpp_source.html @@ -0,0 +1,122 @@ + + + + + + + +OpenPose: include/openpose/gpu/enumClasses.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
enumClasses.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_GPU_ENUM_CLASSES_HPP
+
2 #define OPENPOSE_GPU_ENUM_CLASSES_HPP
+
3 
+
4 namespace op
+
5 {
+
6  enum class GpuMode : unsigned char
+
7  {
+
8  Cuda = 0,
+
9  OpenCL = 1,
+
10  NoGpu = 2,
+
11  Size,
+
12  };
+
13 }
+
14 
+
15 #endif // OPENPOSE_GPU_ENUM_CLASSES_HPP
+ +
GpuMode
Definition: enumClasses.hpp:7
+ + + + +
+
+ + + + diff --git a/web/html/doc/gpu_2headers_8hpp.html b/web/html/doc/gpu_2headers_8hpp.html new file mode 100644 index 000000000..c5f7500c4 --- /dev/null +++ b/web/html/doc/gpu_2headers_8hpp.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: include/openpose/gpu/headers.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
headers.hpp File Reference
+
+ +
+ + + + diff --git a/web/html/doc/gpu_2headers_8hpp_source.html b/web/html/doc/gpu_2headers_8hpp_source.html new file mode 100644 index 000000000..de460765e --- /dev/null +++ b/web/html/doc/gpu_2headers_8hpp_source.html @@ -0,0 +1,113 @@ + + + + + + + +OpenPose: include/openpose/gpu/headers.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
headers.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_GPU_HEADERS_HPP
+
2 #define OPENPOSE_GPU_HEADERS_HPP
+
3 
+
4 // gpu module
+
5 #include <openpose/gpu/cuda.hpp>
+ +
7 #include <openpose/gpu/gpu.hpp>
+
8 
+
9 #endif // OPENPOSE_GPU_HEADERS_HPP
+ + + +
+
+ + + + diff --git a/web/html/doc/gpu_8hpp.html b/web/html/doc/gpu_8hpp.html new file mode 100644 index 000000000..1df2d12a2 --- /dev/null +++ b/web/html/doc/gpu_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/gpu/gpu.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
gpu.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + +

+Functions

OP_API int op::getGpuNumber ()
 
OP_API GpuMode op::getGpuMode ()
 
+
+
+ + + + diff --git a/web/html/doc/gpu_8hpp.js b/web/html/doc/gpu_8hpp.js new file mode 100644 index 000000000..382c51127 --- /dev/null +++ b/web/html/doc/gpu_8hpp.js @@ -0,0 +1,5 @@ +var gpu_8hpp = +[ + [ "getGpuMode", "gpu_8hpp.html#a971a7caa96be5b715b5c22f6e5dc6ad1", null ], + [ "getGpuNumber", "gpu_8hpp.html#aaad222b087dd041c35de2f3414c1a01f", null ] +]; \ No newline at end of file diff --git a/web/html/doc/gpu_8hpp_source.html b/web/html/doc/gpu_8hpp_source.html new file mode 100644 index 000000000..61a346d03 --- /dev/null +++ b/web/html/doc/gpu_8hpp_source.html @@ -0,0 +1,122 @@ + + + + + + + +OpenPose: include/openpose/gpu/gpu.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
gpu.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_GPU_GPU_HPP
+
2 #define OPENPOSE_GPU_GPU_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+ +
10 
+ +
12 }
+
13 
+
14 #endif // OPENPOSE_GPU_GPU_HPP
+ + +
#define OP_API
Definition: macros.hpp:18
+ +
OP_API GpuMode getGpuMode()
+
OP_API int getGpuNumber()
+
GpuMode
Definition: enumClasses.hpp:7
+
+
+ + + + diff --git a/web/html/doc/gpu_renderer_8hpp.html b/web/html/doc/gpu_renderer_8hpp.html new file mode 100644 index 000000000..9f13671c8 --- /dev/null +++ b/web/html/doc/gpu_renderer_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/core/gpuRenderer.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
gpuRenderer.hpp File Reference
+
+
+
#include <atomic>
+#include <tuple>
+#include <openpose/core/common.hpp>
+#include <openpose/core/renderer.hpp>
+
+

Go to the source code of this file.

+ + + + +

+Classes

class  op::GpuRenderer
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/gpu_renderer_8hpp_source.html b/web/html/doc/gpu_renderer_8hpp_source.html new file mode 100644 index 000000000..b496b4619 --- /dev/null +++ b/web/html/doc/gpu_renderer_8hpp_source.html @@ -0,0 +1,167 @@ + + + + + + + +OpenPose: include/openpose/core/gpuRenderer.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
gpuRenderer.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_GPU_RENDERER_HPP
+
2 #define OPENPOSE_CORE_GPU_RENDERER_HPP
+
3 
+
4 #include <atomic>
+
5 #include <tuple>
+ + +
8 
+
9 namespace op
+
10 {
+
11  class OP_API GpuRenderer : public Renderer
+
12  {
+
13  public:
+
14  explicit GpuRenderer(
+
15  const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap,
+
16  const bool blendOriginalFrame = true, const unsigned int elementToRender = 0u,
+
17  const unsigned int numberElementsToRender = 0u);
+
18 
+
19  virtual ~GpuRenderer();
+
20 
+
21  std::tuple<std::shared_ptr<float*>, std::shared_ptr<bool>, std::shared_ptr<std::atomic<unsigned int>>,
+
22  std::shared_ptr<unsigned long long>, std::shared_ptr<const unsigned int>>
+ +
24 
+ +
26  const std::tuple<std::shared_ptr<float*>, std::shared_ptr<bool>, std::shared_ptr<std::atomic<unsigned int>>,
+
27  std::shared_ptr<unsigned long long>, std::shared_ptr<const unsigned int>>& tuple,
+
28  const bool isLast);
+
29 
+ +
31  const std::tuple<std::shared_ptr<float*>, std::shared_ptr<bool>,
+
32  std::shared_ptr<unsigned long long>>& tuple);
+
33 
+
34  protected:
+
35  std::shared_ptr<float*> spGpuMemory;
+
36 
+
37  void cpuToGpuMemoryIfNotCopiedYet(const float* const cpuMemory, const unsigned long long memoryVolume);
+
38 
+
39  void gpuToCpuMemoryIfLastRenderer(float* cpuMemory, const unsigned long long memoryVolume);
+
40 
+
41  private:
+
42  std::shared_ptr<unsigned long long> spVolume;
+
43  bool mIsFirstRenderer;
+
44  bool mIsLastRenderer;
+
45  std::shared_ptr<bool> spGpuMemoryAllocated;
+
46 
+ +
48  };
+
49 }
+
50 
+
51 #endif // OPENPOSE_CORE_GPU_RENDERER_HPP
+ +
virtual ~GpuRenderer()
+
std::shared_ptr< float * > spGpuMemory
Definition: gpuRenderer.hpp:35
+
void gpuToCpuMemoryIfLastRenderer(float *cpuMemory, const unsigned long long memoryVolume)
+
std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int > >, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int > > getSharedParameters()
+
GpuRenderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
+
void cpuToGpuMemoryIfNotCopiedYet(const float *const cpuMemory, const unsigned long long memoryVolume)
+
void setSharedParameters(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)
+
void setSharedParametersAndIfLast(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< std::atomic< unsigned int >>, std::shared_ptr< unsigned long long >, std::shared_ptr< const unsigned int >> &tuple, const bool isLast)
+ + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ + +
+
+ + + + diff --git a/web/html/doc/gui3_d_8hpp.html b/web/html/doc/gui3_d_8hpp.html new file mode 100644 index 000000000..59722e2c6 --- /dev/null +++ b/web/html/doc/gui3_d_8hpp.html @@ -0,0 +1,122 @@ + + + + + + + +OpenPose: include/openpose/gui/gui3D.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
gui3D.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::Gui3D
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/gui3_d_8hpp_source.html b/web/html/doc/gui3_d_8hpp_source.html new file mode 100644 index 000000000..52b8e3048 --- /dev/null +++ b/web/html/doc/gui3_d_8hpp_source.html @@ -0,0 +1,165 @@ + + + + + + + +OpenPose: include/openpose/gui/gui3D.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
gui3D.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_GUI_GUI_3D_HPP
+
2 #define OPENPOSE_GUI_GUI_3D_HPP
+
3 
+ + +
6 #include <openpose/gui/gui.hpp>
+ + +
9 
+
10 namespace op
+
11 {
+
12  class OP_API Gui3D : public Gui
+
13  {
+
14  public:
+
15  Gui3D(const Point<int>& outputSize, const bool fullScreen,
+
16  const std::shared_ptr<std::atomic<bool>>& isRunningSharedPtr,
+
17  const std::shared_ptr<std::pair<std::atomic<bool>, std::atomic<int>>>& videoSeekSharedPtr = nullptr,
+
18  const std::vector<std::shared_ptr<PoseExtractorNet>>& poseExtractorNets = {},
+
19  const std::vector<std::shared_ptr<FaceExtractorNet>>& faceExtractorNets = {},
+
20  const std::vector<std::shared_ptr<HandExtractorNet>>& handExtractorNets = {},
+
21  const std::vector<std::shared_ptr<Renderer>>& renderers = {},
+
22  const PoseModel poseModel = PoseModel::BODY_25,
+
23  const DisplayMode displayMode = DisplayMode::DisplayAll,
+
24  const bool copyGlToCvMat = false);
+
25 
+
26  virtual ~Gui3D();
+
27 
+
28  virtual void initializationOnThread();
+
29 
+
30  void setKeypoints(const Array<float>& poseKeypoints3D, const Array<float>& faceKeypoints3D,
+
31  const Array<float>& leftHandKeypoints3D, const Array<float>& rightHandKeypoints3D);
+
32 
+
33  virtual void update();
+
34 
+
35  virtual Matrix readCvMat();
+
36 
+
37  private:
+
38  const bool mCopyGlToCvMat;
+
39  };
+
40 }
+
41 
+
42 #endif // OPENPOSE_GUI_GUI_3D_HPP
+ + +
virtual Matrix readCvMat()
+
Gui3D(const Point< int > &outputSize, const bool fullScreen, const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr, const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr=nullptr, const std::vector< std::shared_ptr< PoseExtractorNet >> &poseExtractorNets={}, const std::vector< std::shared_ptr< FaceExtractorNet >> &faceExtractorNets={}, const std::vector< std::shared_ptr< HandExtractorNet >> &handExtractorNets={}, const std::vector< std::shared_ptr< Renderer >> &renderers={}, const PoseModel poseModel=PoseModel::BODY_25, const DisplayMode displayMode=DisplayMode::DisplayAll, const bool copyGlToCvMat=false)
+
virtual ~Gui3D()
+
virtual void initializationOnThread()
+
void setKeypoints(const Array< float > &poseKeypoints3D, const Array< float > &faceKeypoints3D, const Array< float > &leftHandKeypoints3D, const Array< float > &rightHandKeypoints3D)
+
virtual void update()
+
Definition: gui.hpp:15
+ + + + +
#define OP_API
Definition: macros.hpp:18
+ +
DisplayMode
Definition: enumClasses.hpp:11
+ +
PoseModel
Definition: enumClasses.hpp:10
+ + + + +
+
+ + + + diff --git a/web/html/doc/gui_2enum_classes_8hpp.html b/web/html/doc/gui_2enum_classes_8hpp.html new file mode 100644 index 000000000..08d5fce4c --- /dev/null +++ b/web/html/doc/gui_2enum_classes_8hpp.html @@ -0,0 +1,129 @@ + + + + + + + +OpenPose: include/openpose/gui/enumClasses.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
enumClasses.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + +

+Enumerations

enum class  op::DisplayMode : unsigned short {
+  op::NoDisplay +, op::DisplayAll +, op::Display2D +, op::Display3D +,
+  op::DisplayAdam +
+ }
 
enum class  op::FullScreenMode : bool { op::FullScreen +, op::Windowed + }
 
+
+
+ + + + diff --git a/web/html/doc/gui_2enum_classes_8hpp.js b/web/html/doc/gui_2enum_classes_8hpp.js new file mode 100644 index 000000000..888b67d3f --- /dev/null +++ b/web/html/doc/gui_2enum_classes_8hpp.js @@ -0,0 +1,14 @@ +var gui_2enum_classes_8hpp = +[ + [ "DisplayMode", "gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6", [ + [ "NoDisplay", "gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6a28b652e57d2da6b7c939166be21efd9a", null ], + [ "DisplayAll", "gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6a105036ef087117869f656cd72bfd8dd6", null ], + [ "Display2D", "gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6a3bd9369403112127ae7db2f866002be2", null ], + [ "Display3D", "gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6ae18221460ca8434295f980225fd6a91b", null ], + [ "DisplayAdam", "gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6a442304e26339521bc296bdc47ff5fddf", null ] + ] ], + [ "FullScreenMode", "gui_2enum_classes_8hpp.html#a6c22a72ce93c64e7582cb670492a50bf", [ + [ "FullScreen", "gui_2enum_classes_8hpp.html#a6c22a72ce93c64e7582cb670492a50bfae7ec409749889353b8f83a6b04159420", null ], + [ "Windowed", "gui_2enum_classes_8hpp.html#a6c22a72ce93c64e7582cb670492a50bfab13311ab51c4c34757f67f26580018dd", null ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/gui_2enum_classes_8hpp_source.html b/web/html/doc/gui_2enum_classes_8hpp_source.html new file mode 100644 index 000000000..a9b5f52c0 --- /dev/null +++ b/web/html/doc/gui_2enum_classes_8hpp_source.html @@ -0,0 +1,133 @@ + + + + + + + +OpenPose: include/openpose/gui/enumClasses.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
enumClasses.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_GUI_ENUM_CLASSES_HPP
+
2 #define OPENPOSE_GUI_ENUM_CLASSES_HPP
+
3 
+
4 namespace op
+
5 {
+
10  enum class DisplayMode : unsigned short
+
11  {
+
12  NoDisplay,
+
13  DisplayAll,
+
14  Display2D,
+
15  Display3D,
+ +
17  };
+
18 
+
23  enum class FullScreenMode : bool
+
24  {
+
25  FullScreen,
+
26  Windowed,
+
27  };
+
28 }
+
29 
+
30 #endif // OPENPOSE_GUI_ENUM_CLASSES_HPP
+ +
FullScreenMode
Definition: enumClasses.hpp:24
+ + +
DisplayMode
Definition: enumClasses.hpp:11
+ + + + + +
+
+ + + + diff --git a/web/html/doc/gui_2headers_8hpp.html b/web/html/doc/gui_2headers_8hpp.html new file mode 100644 index 000000000..026413dbf --- /dev/null +++ b/web/html/doc/gui_2headers_8hpp.html @@ -0,0 +1,113 @@ + + + + + + + +OpenPose: include/openpose/gui/headers.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
headers.hpp File Reference
+
+ +
+ + + + diff --git a/web/html/doc/gui_2headers_8hpp_source.html b/web/html/doc/gui_2headers_8hpp_source.html new file mode 100644 index 000000000..299123a6c --- /dev/null +++ b/web/html/doc/gui_2headers_8hpp_source.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/gui/headers.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
headers.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_GUI_HEADERS_HPP
+
2 #define OPENPOSE_GUI_HEADERS_HPP
+
3 
+
4 // gui module
+ + +
7 #include <openpose/gui/gui.hpp>
+ +
9 #include <openpose/gui/gui3D.hpp>
+ +
11 #include <openpose/gui/wGui.hpp>
+ +
13 #include <openpose/gui/wGui3D.hpp>
+ +
15 
+
16 #endif // OPENPOSE_GUI_HEADERS_HPP
+ + + + + + + + + + +
+
+ + + + diff --git a/web/html/doc/gui_8hpp.html b/web/html/doc/gui_8hpp.html new file mode 100644 index 000000000..fc2774f24 --- /dev/null +++ b/web/html/doc/gui_8hpp.html @@ -0,0 +1,124 @@ + + + + + + + +OpenPose: include/openpose/gui/gui.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
gui.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::Gui
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/gui_8hpp_source.html b/web/html/doc/gui_8hpp_source.html new file mode 100644 index 000000000..d20c64665 --- /dev/null +++ b/web/html/doc/gui_8hpp_source.html @@ -0,0 +1,177 @@ + + + + + + + +OpenPose: include/openpose/gui/gui.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
gui.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_GUI_GUI_HPP
+
2 #define OPENPOSE_GUI_GUI_HPP
+
3 
+
4 #include <atomic>
+ + + + + + +
11 
+
12 namespace op
+
13 {
+
14  class OP_API Gui
+
15  {
+
16  public:
+
17  Gui(const Point<int>& outputSize, const bool fullScreen,
+
18  const std::shared_ptr<std::atomic<bool>>& isRunningSharedPtr,
+
19  const std::shared_ptr<std::pair<std::atomic<bool>, std::atomic<int>>>& videoSeekSharedPtr = nullptr,
+
20  const std::vector<std::shared_ptr<PoseExtractorNet>>& poseExtractorNets = {},
+
21  const std::vector<std::shared_ptr<FaceExtractorNet>>& faceExtractorNets = {},
+
22  const std::vector<std::shared_ptr<HandExtractorNet>>& handExtractorNets = {},
+
23  const std::vector<std::shared_ptr<Renderer>>& renderers = {},
+
24  const DisplayMode displayMode = DisplayMode::Display2D);
+
25 
+
26  virtual ~Gui();
+
27 
+
28  virtual void initializationOnThread();
+
29 
+
30  void setImage(const Matrix& cvMatOutput);
+
31 
+
32  void setImage(const std::vector<Matrix>& cvMatOutputs);
+
33 
+
34  virtual void update();
+
35 
+
36  protected:
+
37  std::shared_ptr<std::atomic<bool>> spIsRunning;
+ + +
40 
+
41  private:
+
42  // Frames display
+
43  FrameDisplayer mFrameDisplayer;
+
44  // Other variables
+
45  std::vector<std::shared_ptr<PoseExtractorNet>> mPoseExtractorNets;
+
46  std::vector<std::shared_ptr<FaceExtractorNet>> mFaceExtractorNets;
+
47  std::vector<std::shared_ptr<HandExtractorNet>> mHandExtractorNets;
+
48  std::vector<std::shared_ptr<Renderer>> mRenderers;
+
49  std::shared_ptr<std::pair<std::atomic<bool>, std::atomic<int>>> spVideoSeek;
+
50  };
+
51 }
+
52 
+
53 #endif // OPENPOSE_GUI_GUI_HPP
+ +
Definition: gui.hpp:15
+
virtual void initializationOnThread()
+
std::shared_ptr< std::atomic< bool > > spIsRunning
Definition: gui.hpp:37
+
Gui(const Point< int > &outputSize, const bool fullScreen, const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr, const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr=nullptr, const std::vector< std::shared_ptr< PoseExtractorNet >> &poseExtractorNets={}, const std::vector< std::shared_ptr< FaceExtractorNet >> &faceExtractorNets={}, const std::vector< std::shared_ptr< HandExtractorNet >> &handExtractorNets={}, const std::vector< std::shared_ptr< Renderer >> &renderers={}, const DisplayMode displayMode=DisplayMode::Display2D)
+
DisplayMode mDisplayMode
Definition: gui.hpp:38
+
virtual ~Gui()
+
virtual void update()
+
void setImage(const Matrix &cvMatOutput)
+
DisplayMode mDisplayModeOriginal
Definition: gui.hpp:39
+
void setImage(const std::vector< Matrix > &cvMatOutputs)
+ + + + + +
#define OP_API
Definition: macros.hpp:18
+ +
DisplayMode
Definition: enumClasses.hpp:11
+ + + + +
+
+ + + + diff --git a/web/html/doc/gui_adam_8hpp.html b/web/html/doc/gui_adam_8hpp.html new file mode 100644 index 000000000..626835716 --- /dev/null +++ b/web/html/doc/gui_adam_8hpp.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: include/openpose/gui/guiAdam.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
guiAdam.hpp File Reference
+
+ +
+ + + + diff --git a/web/html/doc/gui_adam_8hpp_source.html b/web/html/doc/gui_adam_8hpp_source.html new file mode 100644 index 000000000..83866f460 --- /dev/null +++ b/web/html/doc/gui_adam_8hpp_source.html @@ -0,0 +1,165 @@ + + + + + + + +OpenPose: include/openpose/gui/guiAdam.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
guiAdam.hpp
+
+
+Go to the documentation of this file.
1 #ifdef USE_3D_ADAM_MODEL
+
2 #ifndef OPENPOSE_GUI_GUI_ADAM_HPP
+
3 #define OPENPOSE_GUI_GUI_ADAM_HPP
+
4 
+
5 #ifdef USE_3D_ADAM_MODEL
+
6  #include <adam/totalmodel.h>
+
7 #endif
+ + +
10 #include <openpose/gui/gui.hpp>
+
11 
+
12 namespace op
+
13 {
+
14  // This worker will do 3-D rendering
+
15  class OP_API GuiAdam : public Gui
+
16  {
+
17  public:
+
18  GuiAdam(const Point<int>& outputSize, const bool fullScreen,
+
19  const std::shared_ptr<std::atomic<bool>>& isRunningSharedPtr,
+
20  const std::shared_ptr<std::pair<std::atomic<bool>, std::atomic<int>>>& videoSeekSharedPtr = nullptr,
+
21  const std::vector<std::shared_ptr<PoseExtractorNet>>& poseExtractorNets = {},
+
22  const std::vector<std::shared_ptr<FaceExtractorNet>>& faceExtractorNets = {},
+
23  const std::vector<std::shared_ptr<HandExtractorNet>>& handExtractorNets = {},
+
24  const std::vector<std::shared_ptr<Renderer>>& renderers = {},
+
25  const DisplayMode displayMode = DisplayMode::DisplayAll,
+
26  const std::shared_ptr<const TotalModel>& totalModel = nullptr,
+
27  const std::string& adamRenderedVideoPath = "");
+
28 
+
29  virtual ~GuiAdam();
+
30 
+
31  virtual void initializationOnThread();
+
32 
+
33  void generateMesh(const Array<float>& poseKeypoints3D, const Array<float>& faceKeypoints3D,
+
34  const std::array<Array<float>, 2>& handKeypoints3D,
+
35  const double* const adamPosePtr,
+
36  const double* const adamTranslationPtr,
+
37  const double* const vtVecPtr, const int vtVecRows,
+
38  const double* const j0VecPtr, const int j0VecRows,
+
39  const double* const adamFaceCoeffsExpPtr);
+
40 
+
41  virtual void update();
+
42 
+
43  private:
+
44  // PIMPL idiom
+
45  // http://www.cppsamples.com/common-tasks/pimpl.html
+
46  struct ImplGuiAdam;
+
47  std::shared_ptr<ImplGuiAdam> spImpl;
+
48 
+
49  // PIMP requires DELETE_COPY & destructor, or extra code
+
50  // http://oliora.github.io/2015/12/29/pimpl-and-rule-of-zero.html
+
51  DELETE_COPY(GuiAdam);
+
52  };
+
53 }
+
54 
+
55 #endif // OPENPOSE_GUI_GUI_ADAM_HPP
+
56 #endif
+ + + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
DisplayMode
Definition: enumClasses.hpp:11
+ +
+
+ + + + diff --git a/web/html/doc/gui_info_adder_8hpp.html b/web/html/doc/gui_info_adder_8hpp.html new file mode 100644 index 000000000..dcdc74875 --- /dev/null +++ b/web/html/doc/gui_info_adder_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/gui/guiInfoAdder.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
guiInfoAdder.hpp File Reference
+
+
+
#include <queue>
+#include <openpose/core/common.hpp>
+
+

Go to the source code of this file.

+ + + + +

+Classes

class  op::GuiInfoAdder
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/gui_info_adder_8hpp_source.html b/web/html/doc/gui_info_adder_8hpp_source.html new file mode 100644 index 000000000..07c38448b --- /dev/null +++ b/web/html/doc/gui_info_adder_8hpp_source.html @@ -0,0 +1,145 @@ + + + + + + + +OpenPose: include/openpose/gui/guiInfoAdder.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
guiInfoAdder.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_GUI_ADD_GUI_INFO_HPP
+
2 #define OPENPOSE_GUI_ADD_GUI_INFO_HPP
+
3 
+
4 #include <queue>
+ +
6 
+
7 namespace op
+
8 {
+ +
10  {
+
11  public:
+
12  GuiInfoAdder(const int numberGpus, const bool guiEnabled = false);
+
13 
+
14  virtual ~GuiInfoAdder();
+
15 
+
16  void addInfo(Matrix& outputData, const int numberPeople, const unsigned long long id,
+
17  const std::string& elementRenderedName, const unsigned long long frameNumber,
+
18  const Array<long long>& poseIds = Array<long long>{},
+
19  const Array<float>& poseKeypoints = Array<float>{});
+
20 
+
21  private:
+
22  // Const variables
+
23  const int mNumberGpus;
+
24  const bool mGuiEnabled;
+
25  // Other variables
+
26  std::queue<std::chrono::high_resolution_clock::time_point> mFpsQueue;
+
27  double mFps;
+
28  unsigned int mFpsCounter;
+
29  std::string mLastElementRenderedName;
+
30  int mLastElementRenderedCounter;
+
31  unsigned long long mLastId;
+
32  };
+
33 }
+
34 
+
35 #endif // OPENPOSE_GUI_ADD_GUI_INFO_HPP
+ + +
void addInfo(Matrix &outputData, const int numberPeople, const unsigned long long id, const std::string &elementRenderedName, const unsigned long long frameNumber, const Array< long long > &poseIds=Array< long long >{}, const Array< float > &poseKeypoints=Array< float >{})
+
virtual ~GuiInfoAdder()
+
GuiInfoAdder(const int numberGpus, const bool guiEnabled=false)
+ + +
#define OP_API
Definition: macros.hpp:18
+ +
+
+ + + + diff --git a/web/html/doc/hand_2headers_8hpp.html b/web/html/doc/hand_2headers_8hpp.html new file mode 100644 index 000000000..aa55623b3 --- /dev/null +++ b/web/html/doc/hand_2headers_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/hand/headers.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ + + + + + diff --git a/web/html/doc/hand_2headers_8hpp_source.html b/web/html/doc/hand_2headers_8hpp_source.html new file mode 100644 index 000000000..594cb1fa2 --- /dev/null +++ b/web/html/doc/hand_2headers_8hpp_source.html @@ -0,0 +1,137 @@ + + + + + + + +OpenPose: include/openpose/hand/headers.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ + + + + + diff --git a/web/html/doc/hand_cpu_renderer_8hpp.html b/web/html/doc/hand_cpu_renderer_8hpp.html new file mode 100644 index 000000000..21aad2848 --- /dev/null +++ b/web/html/doc/hand_cpu_renderer_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/hand/handCpuRenderer.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
handCpuRenderer.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::HandCpuRenderer
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/hand_cpu_renderer_8hpp_source.html b/web/html/doc/hand_cpu_renderer_8hpp_source.html new file mode 100644 index 000000000..91ba2fd8a --- /dev/null +++ b/web/html/doc/hand_cpu_renderer_8hpp_source.html @@ -0,0 +1,142 @@ + + + + + + + +OpenPose: include/openpose/hand/handCpuRenderer.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
handCpuRenderer.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_HAND_HAND_CPU_RENDERER_HPP
+
2 #define OPENPOSE_HAND_HAND_CPU_RENDERER_HPP
+
3 
+ + + + +
8 
+
9 namespace op
+
10 {
+ +
12  {
+
13  public:
+
14  HandCpuRenderer(const float renderThreshold, const float alphaKeypoint = HAND_DEFAULT_ALPHA_KEYPOINT,
+
15  const float alphaHeatMap = HAND_DEFAULT_ALPHA_HEAT_MAP);
+
16 
+
17  virtual ~HandCpuRenderer();
+
18 
+
19  void renderHandInherited(Array<float>& outputData, const std::array<Array<float>, 2>& handKeypoints);
+
20 
+ +
22  };
+
23 }
+
24 
+
25 #endif // OPENPOSE_HAND_HAND_CPU_RENDERER_HPP
+ + +
HandCpuRenderer(const float renderThreshold, const float alphaKeypoint=HAND_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=HAND_DEFAULT_ALPHA_HEAT_MAP)
+
DELETE_COPY(HandCpuRenderer)
+
virtual ~HandCpuRenderer()
+
void renderHandInherited(Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints)
+ + + + + +
#define OP_API
Definition: macros.hpp:18
+ +
const auto HAND_DEFAULT_ALPHA_HEAT_MAP
+
const auto HAND_DEFAULT_ALPHA_KEYPOINT
+ +
+
+ + + + diff --git a/web/html/doc/hand_detector_8hpp.html b/web/html/doc/hand_detector_8hpp.html new file mode 100644 index 000000000..97547197c --- /dev/null +++ b/web/html/doc/hand_detector_8hpp.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: include/openpose/hand/handDetector.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
handDetector.hpp File Reference
+
+
+
#include <mutex>
+#include <openpose/core/common.hpp>
+#include <openpose/pose/enumClasses.hpp>
+
+

Go to the source code of this file.

+ + + + +

+Classes

class  op::HandDetector
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/hand_detector_8hpp_source.html b/web/html/doc/hand_detector_8hpp_source.html new file mode 100644 index 000000000..07ba5f63d --- /dev/null +++ b/web/html/doc/hand_detector_8hpp_source.html @@ -0,0 +1,166 @@ + + + + + + + +OpenPose: include/openpose/hand/handDetector.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
handDetector.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_HAND_HAND_DETECTOR_HPP
+
2 #define OPENPOSE_HAND_HAND_DETECTOR_HPP
+
3 
+
4 #include <mutex>
+ + +
7 
+
8 namespace op
+
9 {
+
10  // Note: This class is thread-safe, so several GPUs can be running hands and using `updateTracker`, and updateTracker will keep the latest known
+
11  // tracking
+ +
13  {
+
14  public:
+
15  explicit HandDetector(const PoseModel poseModel);
+
16 
+
17  virtual ~HandDetector();
+
18 
+
19  std::vector<std::array<Rectangle<float>, 2>> detectHands(const Array<float>& poseKeypoints) const;
+
20 
+
21  std::vector<std::array<Rectangle<float>, 2>> trackHands(const Array<float>& poseKeypoints);
+
22 
+
23  void updateTracker(const std::array<Array<float>, 2>& handKeypoints, const unsigned long long id);
+
24 
+
25  private:
+
26  enum class PosePart : unsigned int
+
27  {
+
28  LWrist = 0,
+
29  LElbow,
+
30  LShoulder,
+
31  RWrist,
+
32  RElbow,
+
33  RShoulder,
+
34  Size,
+
35  };
+
36 
+
37  const std::array<unsigned int, (int)PosePart::Size> mPoseIndexes;
+
38  std::vector<std::array<Point<float>, (int)PosePart::Size>> mPoseTrack;
+
39  std::vector<Rectangle<float>> mHandLeftPrevious;
+
40  std::vector<Rectangle<float>> mHandRightPrevious;
+
41  unsigned long long mCurrentId;
+
42  std::mutex mMutex;
+
43 
+
44  std::array<unsigned int, (int)PosePart::Size> getPoseKeypoints(const PoseModel poseModel,
+
45  const std::array<std::string, (int)PosePart::Size>& poseStrings) const;
+
46 
+ +
48  };
+
49 }
+
50 
+
51 #endif // OPENPOSE_HAND_HAND_DETECTOR_HPP
+ + +
HandDetector(const PoseModel poseModel)
+
void updateTracker(const std::array< Array< float >, 2 > &handKeypoints, const unsigned long long id)
+
std::vector< std::array< Rectangle< float >, 2 > > detectHands(const Array< float > &poseKeypoints) const
+
std::vector< std::array< Rectangle< float >, 2 > > trackHands(const Array< float > &poseKeypoints)
+
virtual ~HandDetector()
+ +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ + +
PoseModel
Definition: enumClasses.hpp:10
+ +
+
+ + + + diff --git a/web/html/doc/hand_detector_from_txt_8hpp.html b/web/html/doc/hand_detector_from_txt_8hpp.html new file mode 100644 index 000000000..03b2bf157 --- /dev/null +++ b/web/html/doc/hand_detector_from_txt_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/hand/handDetectorFromTxt.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
handDetectorFromTxt.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::HandDetectorFromTxt
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/hand_detector_from_txt_8hpp_source.html b/web/html/doc/hand_detector_from_txt_8hpp_source.html new file mode 100644 index 000000000..3bc49549a --- /dev/null +++ b/web/html/doc/hand_detector_from_txt_8hpp_source.html @@ -0,0 +1,137 @@ + + + + + + + +OpenPose: include/openpose/hand/handDetectorFromTxt.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
handDetectorFromTxt.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_HAND_HAND_DETECTOR_FROM_TXT_HPP
+
2 #define OPENPOSE_HAND_HAND_DETECTOR_FROM_TXT_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+ +
10  {
+
11  public:
+
12  explicit HandDetectorFromTxt(const std::string& txtDirectoryPath);
+
13 
+ +
15 
+
16  std::vector<std::array<Rectangle<float>, 2>> detectHands();
+
17 
+
18  private:
+
19  const std::string mTxtDirectoryPath;
+
20  const std::vector<std::string> mFilePaths;
+
21  long long mFrameNameCounter;
+
22 
+ +
24  };
+
25 }
+
26 
+
27 #endif // OPENPOSE_HAND_HAND_DETECTOR_FROM_TXT_HPP
+ +
std::vector< std::array< Rectangle< float >, 2 > > detectHands()
+
virtual ~HandDetectorFromTxt()
+
HandDetectorFromTxt(const std::string &txtDirectoryPath)
+ +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ + +
+
+ + + + diff --git a/web/html/doc/hand_extractor_caffe_8hpp.html b/web/html/doc/hand_extractor_caffe_8hpp.html new file mode 100644 index 000000000..d18703d9b --- /dev/null +++ b/web/html/doc/hand_extractor_caffe_8hpp.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: include/openpose/hand/handExtractorCaffe.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
handExtractorCaffe.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::HandExtractorCaffe
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/hand_extractor_caffe_8hpp_source.html b/web/html/doc/hand_extractor_caffe_8hpp_source.html new file mode 100644 index 000000000..a2004389a --- /dev/null +++ b/web/html/doc/hand_extractor_caffe_8hpp_source.html @@ -0,0 +1,159 @@ + + + + + + + +OpenPose: include/openpose/hand/handExtractorCaffe.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
handExtractorCaffe.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_HAND_HAND_EXTRACTOR_CAFFE_HPP
+
2 #define OPENPOSE_HAND_HAND_EXTRACTOR_CAFFE_HPP
+
3 
+ + + +
7 
+
8 namespace op
+
9 {
+ +
14  {
+
15  public:
+
26  HandExtractorCaffe(const Point<int>& netInputSize, const Point<int>& netOutputSize,
+
27  const std::string& modelFolder, const int gpuId,
+
28  const int numberScales = 1, const float rangeScales = 0.4f,
+
29  const std::vector<HeatMapType>& heatMapTypes = {},
+
30  const ScaleMode heatMapScaleMode = ScaleMode::ZeroToOneFixedAspect,
+
31  const bool enableGoogleLogging = true);
+
32 
+ +
38 
+ +
44 
+
54  void forwardPass(const std::vector<std::array<Rectangle<float>, 2>> handRectangles, const Matrix& inputData);
+
55 
+
56  private:
+
57  // PIMPL idiom
+
58  // http://www.cppsamples.com/common-tasks/pimpl.html
+
59  struct ImplHandExtractorCaffe;
+
60  std::unique_ptr<ImplHandExtractorCaffe> upImpl;
+
61 
+
62  Array<float> getHeatMapsFromLastPass() const;
+
63 
+
64  // PIMP requires DELETE_COPY & destructor, or extra code
+
65  // http://oliora.github.io/2015/12/29/pimpl-and-rule-of-zero.html
+ +
67  };
+
68 }
+
69 
+
70 #endif // OPENPOSE_HAND_HAND_EXTRACTOR_CAFFE_HPP
+ + +
void forwardPass(const std::vector< std::array< Rectangle< float >, 2 >> handRectangles, const Matrix &inputData)
+
HandExtractorCaffe(const Point< int > &netInputSize, const Point< int > &netOutputSize, const std::string &modelFolder, const int gpuId, const int numberScales=1, const float rangeScales=0.4f, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool enableGoogleLogging=true)
+ +
virtual ~HandExtractorCaffe()
+ + + + + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
ScaleMode
Definition: enumClasses.hpp:7
+ + + +
+
+ + + + diff --git a/web/html/doc/hand_extractor_net_8hpp.html b/web/html/doc/hand_extractor_net_8hpp.html new file mode 100644 index 000000000..0445c9ae8 --- /dev/null +++ b/web/html/doc/hand_extractor_net_8hpp.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: include/openpose/hand/handExtractorNet.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
handExtractorNet.hpp File Reference
+
+
+
#include <atomic>
+#include <openpose/core/common.hpp>
+#include <openpose/core/enumClasses.hpp>
+
+

Go to the source code of this file.

+ + + + +

+Classes

class  op::HandExtractorNet
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/hand_extractor_net_8hpp_source.html b/web/html/doc/hand_extractor_net_8hpp_source.html new file mode 100644 index 000000000..61a679427 --- /dev/null +++ b/web/html/doc/hand_extractor_net_8hpp_source.html @@ -0,0 +1,187 @@ + + + + + + + +OpenPose: include/openpose/hand/handExtractorNet.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
handExtractorNet.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_HAND_HAND_EXTRACTOR_HPP
+
2 #define OPENPOSE_HAND_HAND_EXTRACTOR_HPP
+
3 
+
4 #include <atomic>
+ + +
7 
+
8 namespace op
+
9 {
+ +
14  {
+
15  public:
+
24  explicit HandExtractorNet(const Point<int>& netInputSize, const Point<int>& netOutputSize,
+
25  const int numberScales = 1, const float rangeScales = 0.4f,
+
26  const std::vector<HeatMapType>& heatMapTypes = {},
+
27  const ScaleMode heatMapScaleMode = ScaleMode::ZeroToOneFixedAspect);
+
28 
+
33  virtual ~HandExtractorNet();
+
34 
+ +
40 
+
50  virtual void forwardPass(const std::vector<std::array<Rectangle<float>, 2>> handRectangles,
+
51  const Matrix& cvInputData) = 0;
+
52 
+
53  std::array<Array<float>, 2> getHeatMaps() const;
+
54 
+
62  std::array<Array<float>, 2> getHandKeypoints() const;
+
63 
+
64  bool getEnabled() const;
+
65 
+
66  void setEnabled(const bool enabled);
+
67 
+
68  protected:
+
69  const std::pair<int, float> mMultiScaleNumberAndRange;
+ + +
72  std::array<Array<float>, 2> mHandKeypoints;
+
73  // HeatMaps parameters
+ +
75  const std::vector<HeatMapType> mHeatMapTypes;
+
76  std::array<Array<float>, 2> mHeatMaps;
+
77  // Temporarily disable it
+
78  std::atomic<bool> mEnabled;
+
79 
+
80  virtual void netInitializationOnThread() = 0;
+
81 
+
82  private:
+
83  // Init with thread
+
84  std::thread::id mThreadId;
+
85 
+
86  void checkThread() const;
+
87 
+ +
89  };
+
90 }
+
91 
+
92 #endif // OPENPOSE_HAND_HAND_EXTRACTOR_HPP
+ + +
Array< float > mHandImageCrop
+
virtual void forwardPass(const std::vector< std::array< Rectangle< float >, 2 >> handRectangles, const Matrix &cvInputData)=0
+
const std::pair< int, float > mMultiScaleNumberAndRange
+
std::atomic< bool > mEnabled
+
virtual ~HandExtractorNet()
+ +
std::array< Array< float >, 2 > mHeatMaps
+
std::array< Array< float >, 2 > mHandKeypoints
+
std::array< Array< float >, 2 > getHeatMaps() const
+
HandExtractorNet(const Point< int > &netInputSize, const Point< int > &netOutputSize, const int numberScales=1, const float rangeScales=0.4f, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect)
+
virtual void netInitializationOnThread()=0
+
const std::vector< HeatMapType > mHeatMapTypes
+
void setEnabled(const bool enabled)
+
const Point< int > mNetOutputSize
+
std::array< Array< float >, 2 > getHandKeypoints() const
+
const ScaleMode mHeatMapScaleMode
+
bool getEnabled() const
+ + + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
ScaleMode
Definition: enumClasses.hpp:7
+ + + +
+
+ + + + diff --git a/web/html/doc/hand_gpu_renderer_8hpp.html b/web/html/doc/hand_gpu_renderer_8hpp.html new file mode 100644 index 000000000..225a623e3 --- /dev/null +++ b/web/html/doc/hand_gpu_renderer_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/hand/handGpuRenderer.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
handGpuRenderer.hpp File Reference
+
+ +
+ + + + diff --git a/web/html/doc/hand_gpu_renderer_8hpp_source.html b/web/html/doc/hand_gpu_renderer_8hpp_source.html new file mode 100644 index 000000000..08244fe54 --- /dev/null +++ b/web/html/doc/hand_gpu_renderer_8hpp_source.html @@ -0,0 +1,152 @@ + + + + + + + +OpenPose: include/openpose/hand/handGpuRenderer.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
handGpuRenderer.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_HAND_HAND_GPU_RENDERER_HPP
+
2 #define OPENPOSE_HAND_HAND_GPU_RENDERER_HPP
+
3 
+ + + + +
8 
+
9 namespace op
+
10 {
+ +
12  {
+
13  public:
+
14  HandGpuRenderer(const float renderThreshold,
+
15  const float alphaKeypoint = HAND_DEFAULT_ALPHA_KEYPOINT,
+
16  const float alphaHeatMap = HAND_DEFAULT_ALPHA_HEAT_MAP);
+
17 
+
18  virtual ~HandGpuRenderer();
+
19 
+ +
21 
+
22  void renderHandInherited(Array<float>& outputData, const std::array<Array<float>, 2>& handKeypoints);
+
23 
+
24  private:
+
25  float* pGpuHand; // GPU aux memory
+
26  float* pMaxPtr; // GPU aux memory
+
27  float* pMinPtr; // GPU aux memory
+
28  float* pScalePtr; // GPU aux memory
+
29 
+ +
31  };
+
32 }
+
33 
+
34 #endif // OPENPOSE_HAND_HAND_GPU_RENDERER_HPP
+ + + +
void initializationOnThread()
+
HandGpuRenderer(const float renderThreshold, const float alphaKeypoint=HAND_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=HAND_DEFAULT_ALPHA_HEAT_MAP)
+
void renderHandInherited(Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints)
+
virtual ~HandGpuRenderer()
+ + + + + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
const auto HAND_DEFAULT_ALPHA_HEAT_MAP
+
const auto HAND_DEFAULT_ALPHA_KEYPOINT
+
+
+ + + + diff --git a/web/html/doc/hand_parameters_8hpp.html b/web/html/doc/hand_parameters_8hpp.html new file mode 100644 index 000000000..494beba6f --- /dev/null +++ b/web/html/doc/hand_parameters_8hpp.html @@ -0,0 +1,217 @@ + + + + + + + +OpenPose: include/openpose/hand/handParameters.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
handParameters.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + +

+Macros

#define HAND_PAIRS_RENDER_GPU    0,1, 1,2, 2,3, 3,4, 0,5, 5,6, 6,7, 7,8, 0,9, 9,10, 10,11, 11,12, 0,13, 13,14, 14,15, 15,16, 0,17, 17,18, 18,19, 19,20
 
#define HAND_SCALES_RENDER_GPU   1
 
#define HAND_COLORS_RENDER_GPU
 
+ + + + + + + + + + + + + + + + + + + + + +

+Variables

const auto op::HAND_MAX_HANDS = 2*POSE_MAX_PEOPLE
 
const auto op::HAND_NUMBER_PARTS = 21u
 
const std::vector< unsigned int > op::HAND_PAIRS_RENDER {HAND_PAIRS_RENDER_GPU}
 
const std::vector< float > op::HAND_COLORS_RENDER {HAND_COLORS_RENDER_GPU}
 
const std::vector< float > op::HAND_SCALES_RENDER {HAND_SCALES_RENDER_GPU}
 
const auto op::HAND_CCN_DECREASE_FACTOR = 8.f
 
const std::string op::HAND_PROTOTXT {"hand/pose_deploy.prototxt"}
 
const std::string op::HAND_TRAINED_MODEL {"hand/pose_iter_102000.caffemodel"}
 
const auto op::HAND_DEFAULT_ALPHA_KEYPOINT = POSE_DEFAULT_ALPHA_KEYPOINT
 
const auto op::HAND_DEFAULT_ALPHA_HEAT_MAP = POSE_DEFAULT_ALPHA_HEAT_MAP
 
+

Macro Definition Documentation

+ +

◆ HAND_COLORS_RENDER_GPU

+ +
+
+ + + + +
#define HAND_COLORS_RENDER_GPU
+
+Value:
100.f, 100.f, 100.f, \
+
100.f, 0.f, 0.f, \
+
150.f, 0.f, 0.f, \
+
200.f, 0.f, 0.f, \
+
255.f, 0.f, 0.f, \
+
100.f, 100.f, 0.f, \
+
150.f, 150.f, 0.f, \
+
200.f, 200.f, 0.f, \
+
255.f, 255.f, 0.f, \
+
0.f, 100.f, 50.f, \
+
0.f, 150.f, 75.f, \
+
0.f, 200.f, 100.f, \
+
0.f, 255.f, 125.f, \
+
0.f, 50.f, 100.f, \
+
0.f, 75.f, 150.f, \
+
0.f, 100.f, 200.f, \
+
0.f, 125.f, 255.f, \
+
100.f, 0.f, 100.f, \
+
150.f, 0.f, 150.f, \
+
200.f, 0.f, 200.f, \
+
255.f, 0.f, 255.f
+
+

Definition at line 16 of file handParameters.hpp.

+ +
+
+ +

◆ HAND_PAIRS_RENDER_GPU

+ +
+
+ + + + +
#define HAND_PAIRS_RENDER_GPU    0,1, 1,2, 2,3, 3,4, 0,5, 5,6, 6,7, 7,8, 0,9, 9,10, 10,11, 11,12, 0,13, 13,14, 14,15, 15,16, 0,17, 17,18, 18,19, 19,20
+
+ +

Definition at line 12 of file handParameters.hpp.

+ +
+
+ +

◆ HAND_SCALES_RENDER_GPU

+ +
+
+ + + + +
#define HAND_SCALES_RENDER_GPU   1
+
+ +

Definition at line 14 of file handParameters.hpp.

+ +
+
+
+
+ + + + diff --git a/web/html/doc/hand_parameters_8hpp.js b/web/html/doc/hand_parameters_8hpp.js new file mode 100644 index 000000000..56af539ae --- /dev/null +++ b/web/html/doc/hand_parameters_8hpp.js @@ -0,0 +1,16 @@ +var hand_parameters_8hpp = +[ + [ "HAND_COLORS_RENDER_GPU", "hand_parameters_8hpp.html#a5ca06d0202756b9a3b8825ccbafc0558", null ], + [ "HAND_PAIRS_RENDER_GPU", "hand_parameters_8hpp.html#a5ec40add22e28bc75596c75a7be8a692", null ], + [ "HAND_SCALES_RENDER_GPU", "hand_parameters_8hpp.html#a799d629d7fddd7f0daf40ccdae0293b9", null ], + [ "HAND_CCN_DECREASE_FACTOR", "hand_parameters_8hpp.html#aed0d108f5ada623eeb0ed41f896f8e97", null ], + [ "HAND_COLORS_RENDER", "hand_parameters_8hpp.html#a450bb646e7573322d8f622bfdbab4833", null ], + [ "HAND_DEFAULT_ALPHA_HEAT_MAP", "hand_parameters_8hpp.html#a76c1f1ea90b73e13e93f72413b3cab0e", null ], + [ "HAND_DEFAULT_ALPHA_KEYPOINT", "hand_parameters_8hpp.html#aa8cc53d2fe5353f9d87d50c32a8c1a95", null ], + [ "HAND_MAX_HANDS", "hand_parameters_8hpp.html#a182585e2e944cdb62f3dededdd85d1fc", null ], + [ "HAND_NUMBER_PARTS", "hand_parameters_8hpp.html#a41b6fb82924c5532cf10151e6ce497f2", null ], + [ "HAND_PAIRS_RENDER", "hand_parameters_8hpp.html#a335d707e98d311d39d9a9dab0e325391", null ], + [ "HAND_PROTOTXT", "hand_parameters_8hpp.html#a3fe70bd1eacdd78aef3344c83533ffc7", null ], + [ "HAND_SCALES_RENDER", "hand_parameters_8hpp.html#a4e9bbc2167923763c5982d6d1f41f560", null ], + [ "HAND_TRAINED_MODEL", "hand_parameters_8hpp.html#ac13af59538bcb8a1709f20010681d1c7", null ] +]; \ No newline at end of file diff --git a/web/html/doc/hand_parameters_8hpp_source.html b/web/html/doc/hand_parameters_8hpp_source.html new file mode 100644 index 000000000..b677673e7 --- /dev/null +++ b/web/html/doc/hand_parameters_8hpp_source.html @@ -0,0 +1,172 @@ + + + + + + + +OpenPose: include/openpose/hand/handParameters.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
handParameters.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_HAND_HAND_PARAMETERS_HPP
+
2 #define OPENPOSE_HAND_HAND_PARAMETERS_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+ +
10 
+
11  const auto HAND_NUMBER_PARTS = 21u;
+
12  #define HAND_PAIRS_RENDER_GPU \
+
13  0,1, 1,2, 2,3, 3,4, 0,5, 5,6, 6,7, 7,8, 0,9, 9,10, 10,11, 11,12, 0,13, 13,14, 14,15, 15,16, 0,17, 17,18, 18,19, 19,20
+
14  #define HAND_SCALES_RENDER_GPU 1
+
15  const std::vector<unsigned int> HAND_PAIRS_RENDER {HAND_PAIRS_RENDER_GPU};
+
16  #define HAND_COLORS_RENDER_GPU \
+
17  100.f, 100.f, 100.f, \
+
18  100.f, 0.f, 0.f, \
+
19  150.f, 0.f, 0.f, \
+
20  200.f, 0.f, 0.f, \
+
21  255.f, 0.f, 0.f, \
+
22  100.f, 100.f, 0.f, \
+
23  150.f, 150.f, 0.f, \
+
24  200.f, 200.f, 0.f, \
+
25  255.f, 255.f, 0.f, \
+
26  0.f, 100.f, 50.f, \
+
27  0.f, 150.f, 75.f, \
+
28  0.f, 200.f, 100.f, \
+
29  0.f, 255.f, 125.f, \
+
30  0.f, 50.f, 100.f, \
+
31  0.f, 75.f, 150.f, \
+
32  0.f, 100.f, 200.f, \
+
33  0.f, 125.f, 255.f, \
+
34  100.f, 0.f, 100.f, \
+
35  150.f, 0.f, 150.f, \
+
36  200.f, 0.f, 200.f, \
+
37  255.f, 0.f, 255.f
+
38  const std::vector<float> HAND_COLORS_RENDER{HAND_COLORS_RENDER_GPU};
+
39  const std::vector<float> HAND_SCALES_RENDER{HAND_SCALES_RENDER_GPU};
+
40 
+
41 
+
42  // Constant parameters
+
43  const auto HAND_CCN_DECREASE_FACTOR = 8.f;
+
44  const std::string HAND_PROTOTXT{"hand/pose_deploy.prototxt"};
+
45  const std::string HAND_TRAINED_MODEL{"hand/pose_iter_102000.caffemodel"};
+
46 
+
47  // Rendering parameters
+ + +
50 }
+
51 
+
52 #endif // OPENPOSE_HAND_HAND_PARAMETERS_HPP
+
#define HAND_COLORS_RENDER_GPU
+
#define HAND_PAIRS_RENDER_GPU
+
#define HAND_SCALES_RENDER_GPU
+ +
const auto HAND_MAX_HANDS
+
const auto POSE_DEFAULT_ALPHA_KEYPOINT
+
const std::vector< unsigned int > HAND_PAIRS_RENDER
+
const std::string HAND_PROTOTXT
+
const auto HAND_NUMBER_PARTS
+
const std::vector< float > HAND_COLORS_RENDER
+
const std::vector< float > HAND_SCALES_RENDER
+
const auto POSE_MAX_PEOPLE
+
const auto HAND_DEFAULT_ALPHA_HEAT_MAP
+
const auto HAND_DEFAULT_ALPHA_KEYPOINT
+
const std::string HAND_TRAINED_MODEL
+
const auto HAND_CCN_DECREASE_FACTOR
+
const auto POSE_DEFAULT_ALPHA_HEAT_MAP
+ + +
+
+ + + + diff --git a/web/html/doc/hand_renderer_8hpp.html b/web/html/doc/hand_renderer_8hpp.html new file mode 100644 index 000000000..31fde1885 --- /dev/null +++ b/web/html/doc/hand_renderer_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/hand/handRenderer.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
handRenderer.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::HandRenderer
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/hand_renderer_8hpp_source.html b/web/html/doc/hand_renderer_8hpp_source.html new file mode 100644 index 000000000..4c3ed8435 --- /dev/null +++ b/web/html/doc/hand_renderer_8hpp_source.html @@ -0,0 +1,133 @@ + + + + + + + +OpenPose: include/openpose/hand/handRenderer.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
handRenderer.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_HAND_HAND_RENDERER_HPP
+
2 #define OPENPOSE_HAND_HAND_RENDERER_HPP
+
3 
+ +
5 
+
6 namespace op
+
7 {
+ +
9  {
+
10  public:
+
11  virtual ~HandRenderer(){};
+
12 
+
13  virtual void initializationOnThread(){};
+
14 
+
15  void renderHand(Array<float>& outputData, const std::array<Array<float>, 2>& handKeypoints,
+
16  const float scaleInputToOutput);
+
17 
+
18  private:
+
19  virtual void renderHandInherited(Array<float>& outputData,
+
20  const std::array<Array<float>, 2>& handKeypoints) = 0;
+
21  };
+
22 }
+
23 
+
24 #endif // OPENPOSE_HAND_HAND_RENDERER_HPP
+ + +
void renderHand(Array< float > &outputData, const std::array< Array< float >, 2 > &handKeypoints, const float scaleInputToOutput)
+
virtual ~HandRenderer()
+
virtual void initializationOnThread()
+ +
#define OP_API
Definition: macros.hpp:18
+ +
+
+ + + + diff --git a/web/html/doc/headers_8hpp.html b/web/html/doc/headers_8hpp.html new file mode 100644 index 000000000..4b94ee0ef --- /dev/null +++ b/web/html/doc/headers_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/headers.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ + + + + + diff --git a/web/html/doc/headers_8hpp_source.html b/web/html/doc/headers_8hpp_source.html new file mode 100644 index 000000000..55c5a546c --- /dev/null +++ b/web/html/doc/headers_8hpp_source.html @@ -0,0 +1,165 @@ + + + + + + + +OpenPose: include/openpose/headers.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
headers.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_HEADERS_HPP
+
2 #define OPENPOSE_HEADERS_HPP
+
3 
+
4 // 3d module
+ +
6 
+
7 // calibration module
+ +
9 
+
10 // core module
+ +
12 
+
13 // face module
+ +
15 
+
16 // filestream module
+ +
18 
+
19 // gui module
+
20 #include <openpose/gui/headers.hpp>
+
21 
+
22 // hand module
+ +
24 
+
25 // net module
+
26 #include <openpose/net/headers.hpp>
+
27 
+
28 // pose module
+ +
30 
+
31 // producer module
+ +
33 
+
34 // threading module
+ +
36 
+
37 // tracking module
+ +
39 
+
40 // unity module
+ +
42 
+
43 // utilities module
+ +
45 
+
46 // wrapper module
+ +
48 
+
49 #endif // OPENPOSE_HEADERS_HPP
+ + + + + + + + + + + + + + + +
+
+ + + + diff --git a/web/html/doc/heat_map_saver_8hpp.html b/web/html/doc/heat_map_saver_8hpp.html new file mode 100644 index 000000000..e9c24416c --- /dev/null +++ b/web/html/doc/heat_map_saver_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/filestream/heatMapSaver.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
heatMapSaver.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::HeatMapSaver
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/heat_map_saver_8hpp_source.html b/web/html/doc/heat_map_saver_8hpp_source.html new file mode 100644 index 000000000..76ee1ae51 --- /dev/null +++ b/web/html/doc/heat_map_saver_8hpp_source.html @@ -0,0 +1,134 @@ + + + + + + + +OpenPose: include/openpose/filestream/heatMapSaver.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
heatMapSaver.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FILESTREAM_HEAT_MAP_SAVER_HPP
+
2 #define OPENPOSE_FILESTREAM_HEAT_MAP_SAVER_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+
9  class OP_API HeatMapSaver : public FileSaver
+
10  {
+
11  public:
+
12  HeatMapSaver(const std::string& directoryPath, const std::string& imageFormat);
+
13 
+
14  virtual ~HeatMapSaver();
+
15 
+
16  void saveHeatMaps(const std::vector<Array<float>>& heatMaps, const std::string& fileName) const;
+
17 
+
18  private:
+
19  const std::string mImageFormat;
+
20  };
+
21 }
+
22 
+
23 #endif // OPENPOSE_FILESTREAM_HEAT_MAP_SAVER_HPP
+ + + +
virtual ~HeatMapSaver()
+
void saveHeatMaps(const std::vector< Array< float >> &heatMaps, const std::string &fileName) const
+
HeatMapSaver(const std::string &directoryPath, const std::string &imageFormat)
+ + +
#define OP_API
Definition: macros.hpp:18
+ +
+
+ + + + diff --git a/web/html/doc/heatmap__output_8md.html b/web/html/doc/heatmap__output_8md.html new file mode 100644 index 000000000..ae3b82c9c --- /dev/null +++ b/web/html/doc/heatmap__output_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/advanced/heatmap_output.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/advanced/heatmap_output.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/hierarchy.html b/web/html/doc/hierarchy.html new file mode 100644 index 000000000..8302ad15a --- /dev/null +++ b/web/html/doc/hierarchy.html @@ -0,0 +1,251 @@ + + + + + + + +OpenPose: Class Hierarchy + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
Class Hierarchy
+
+
+
This inheritance list is sorted roughly, but not completely, alphabetically:
+
[detail level 123]
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 Cop::Array< T >
 Cop::Array< float >
 Cop::Array< long long >
 Cop::ArrayCpuGpu< T >
 Cop::BodyPartConnectorCaffe< T >
 Cop::CameraParameterReader
 Cop::CocoJsonSaver
 Cop::CvMatToOpInput
 Cop::CvMatToOpOutput
 Cop::Datum
 Cop::DatumProducer< TDatum >
 Cop::FaceDetector
 Cop::FaceDetectorOpenCV
 Cop::FaceExtractorNet
 Cop::FaceRenderer
 Cop::FileSaver
 Cop::FrameDisplayer
 Cop::Gui
 Cop::GuiInfoAdder
 Cop::HandDetector
 Cop::HandDetectorFromTxt
 Cop::HandExtractorNet
 Cop::HandRenderer
 Cop::JsonOfstream
 Cop::KeepTopNPeople
 Cop::KeypointScaler
 Cop::Matrix
 Cop::MaximumCaffe< T >
 Cop::Net
 Cop::NmsCaffe< T >
 Cop::OpOutputToCvMat
 Cop::PersonIdExtractor
 Cop::PersonTracker
 Cop::Point< T >
 Cop::Point< int >
 Cop::PointerContainerGreater< TDatumsSP >
 Cop::PointerContainerLess< TDatumsSP >
 Cop::PoseExtractor
 Cop::PoseExtractorNet
 Cop::PoseRenderer
 Cop::PoseTriangulation
 Cop::Producer
 Cop::Profiler
 Cop::QueueBase< TDatums, TQueue >
 CQueueBase< TDatums, std::priority_queue< TDatums, std::vector< TDatums >, std::greater< TDatums >> >
 Cop::QueueBase< TDatums, std::queue< TDatums > >
 Cop::Rectangle< T >
 Cop::Renderer
 Cop::ResizeAndMergeCaffe< T >
 Cop::ScaleAndSizeExtractor
 Cop::SpinnakerWrapper
 Cop::String
 Cop::SubThread< TDatums, TWorker >
 CSubThread< TDatums, std::shared_ptr< Worker< TDatums >> >
 Cop::Thread< TDatums, TWorker >
 Cop::ThreadManager< TDatums, TWorker, TQueue >
 Cop::UdpSender
 Cop::VerbosePrinter
 Cop::VideoSaver
 Cop::Worker< TDatums >
 Cop::Worker< std::shared_ptr< TDatums > >
 Cop::WrapperStructExtra
 Cop::WrapperStructFace
 Cop::WrapperStructGui
 Cop::WrapperStructHand
 Cop::WrapperStructInput
 Cop::WrapperStructOutput
 Cop::WrapperStructPose
 Cop::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >
+
+
+
+ + + + diff --git a/web/html/doc/hierarchy.js b/web/html/doc/hierarchy.js new file mode 100644 index 000000000..308fb8057 --- /dev/null +++ b/web/html/doc/hierarchy.js @@ -0,0 +1,170 @@ +var hierarchy = +[ + [ "op::Array< T >", "classop_1_1_array.html", null ], + [ "op::Array< float >", "classop_1_1_array.html", null ], + [ "op::Array< long long >", "classop_1_1_array.html", null ], + [ "op::ArrayCpuGpu< T >", "classop_1_1_array_cpu_gpu.html", null ], + [ "op::BodyPartConnectorCaffe< T >", "classop_1_1_body_part_connector_caffe.html", null ], + [ "op::CameraParameterReader", "classop_1_1_camera_parameter_reader.html", null ], + [ "op::CocoJsonSaver", "classop_1_1_coco_json_saver.html", null ], + [ "op::CvMatToOpInput", "classop_1_1_cv_mat_to_op_input.html", null ], + [ "op::CvMatToOpOutput", "classop_1_1_cv_mat_to_op_output.html", null ], + [ "op::Datum", "structop_1_1_datum.html", null ], + [ "op::DatumProducer< TDatum >", "classop_1_1_datum_producer.html", null ], + [ "op::FaceDetector", "classop_1_1_face_detector.html", null ], + [ "op::FaceDetectorOpenCV", "classop_1_1_face_detector_open_c_v.html", null ], + [ "op::FaceExtractorNet", "classop_1_1_face_extractor_net.html", [ + [ "op::FaceExtractorCaffe", "classop_1_1_face_extractor_caffe.html", null ] + ] ], + [ "op::FaceRenderer", "classop_1_1_face_renderer.html", [ + [ "op::FaceCpuRenderer", "classop_1_1_face_cpu_renderer.html", null ], + [ "op::FaceGpuRenderer", "classop_1_1_face_gpu_renderer.html", null ] + ] ], + [ "op::FileSaver", "classop_1_1_file_saver.html", [ + [ "op::HeatMapSaver", "classop_1_1_heat_map_saver.html", null ], + [ "op::ImageSaver", "classop_1_1_image_saver.html", null ], + [ "op::KeypointSaver", "classop_1_1_keypoint_saver.html", null ], + [ "op::PeopleJsonSaver", "classop_1_1_people_json_saver.html", null ] + ] ], + [ "op::FrameDisplayer", "classop_1_1_frame_displayer.html", null ], + [ "op::Gui", "classop_1_1_gui.html", [ + [ "op::Gui3D", "classop_1_1_gui3_d.html", null ] + ] ], + [ "op::GuiInfoAdder", "classop_1_1_gui_info_adder.html", null ], + [ "op::HandDetector", "classop_1_1_hand_detector.html", null ], + [ "op::HandDetectorFromTxt", "classop_1_1_hand_detector_from_txt.html", null ], + [ "op::HandExtractorNet", "classop_1_1_hand_extractor_net.html", [ + [ "op::HandExtractorCaffe", "classop_1_1_hand_extractor_caffe.html", null ] + ] ], + [ "op::HandRenderer", "classop_1_1_hand_renderer.html", [ + [ "op::HandCpuRenderer", "classop_1_1_hand_cpu_renderer.html", null ], + [ "op::HandGpuRenderer", "classop_1_1_hand_gpu_renderer.html", null ] + ] ], + [ "op::JsonOfstream", "classop_1_1_json_ofstream.html", null ], + [ "op::KeepTopNPeople", "classop_1_1_keep_top_n_people.html", null ], + [ "op::KeypointScaler", "classop_1_1_keypoint_scaler.html", null ], + [ "op::Matrix", "classop_1_1_matrix.html", null ], + [ "op::MaximumCaffe< T >", "classop_1_1_maximum_caffe.html", null ], + [ "op::Net", "classop_1_1_net.html", [ + [ "op::NetCaffe", "classop_1_1_net_caffe.html", null ], + [ "op::NetOpenCv", "classop_1_1_net_open_cv.html", null ] + ] ], + [ "op::NmsCaffe< T >", "classop_1_1_nms_caffe.html", null ], + [ "op::OpOutputToCvMat", "classop_1_1_op_output_to_cv_mat.html", null ], + [ "op::PersonIdExtractor", "classop_1_1_person_id_extractor.html", null ], + [ "op::PersonTracker", "classop_1_1_person_tracker.html", null ], + [ "op::Point< T >", "structop_1_1_point.html", null ], + [ "op::Point< int >", "structop_1_1_point.html", null ], + [ "op::PointerContainerGreater< TDatumsSP >", "classop_1_1_pointer_container_greater.html", null ], + [ "op::PointerContainerLess< TDatumsSP >", "classop_1_1_pointer_container_less.html", null ], + [ "op::PoseExtractor", "classop_1_1_pose_extractor.html", null ], + [ "op::PoseExtractorNet", "classop_1_1_pose_extractor_net.html", [ + [ "op::PoseExtractorCaffe", "classop_1_1_pose_extractor_caffe.html", null ] + ] ], + [ "op::PoseRenderer", "classop_1_1_pose_renderer.html", [ + [ "op::PoseCpuRenderer", "classop_1_1_pose_cpu_renderer.html", null ], + [ "op::PoseGpuRenderer", "classop_1_1_pose_gpu_renderer.html", null ] + ] ], + [ "op::PoseTriangulation", "classop_1_1_pose_triangulation.html", null ], + [ "op::Producer", "classop_1_1_producer.html", [ + [ "op::FlirReader", "classop_1_1_flir_reader.html", null ], + [ "op::ImageDirectoryReader", "classop_1_1_image_directory_reader.html", null ], + [ "op::VideoCaptureReader", "classop_1_1_video_capture_reader.html", [ + [ "op::IpCameraReader", "classop_1_1_ip_camera_reader.html", null ], + [ "op::VideoReader", "classop_1_1_video_reader.html", null ], + [ "op::WebcamReader", "classop_1_1_webcam_reader.html", null ] + ] ] + ] ], + [ "op::Profiler", "classop_1_1_profiler.html", null ], + [ "op::QueueBase< TDatums, TQueue >", "classop_1_1_queue_base.html", null ], + [ "QueueBase< TDatums, std::priority_queue< TDatums, std::vector< TDatums >, std::greater< TDatums >> >", null, [ + [ "op::PriorityQueue< TDatums, TQueue >", "classop_1_1_priority_queue.html", null ] + ] ], + [ "op::QueueBase< TDatums, std::queue< TDatums > >", "classop_1_1_queue_base.html", [ + [ "op::Queue< TDatums, TQueue >", "classop_1_1_queue.html", null ] + ] ], + [ "op::Rectangle< T >", "structop_1_1_rectangle.html", null ], + [ "op::Renderer", "classop_1_1_renderer.html", [ + [ "op::FaceCpuRenderer", "classop_1_1_face_cpu_renderer.html", null ], + [ "op::GpuRenderer", "classop_1_1_gpu_renderer.html", [ + [ "op::FaceGpuRenderer", "classop_1_1_face_gpu_renderer.html", null ], + [ "op::HandGpuRenderer", "classop_1_1_hand_gpu_renderer.html", null ], + [ "op::PoseGpuRenderer", "classop_1_1_pose_gpu_renderer.html", null ] + ] ], + [ "op::HandCpuRenderer", "classop_1_1_hand_cpu_renderer.html", null ], + [ "op::PoseCpuRenderer", "classop_1_1_pose_cpu_renderer.html", null ] + ] ], + [ "op::ResizeAndMergeCaffe< T >", "classop_1_1_resize_and_merge_caffe.html", null ], + [ "op::ScaleAndSizeExtractor", "classop_1_1_scale_and_size_extractor.html", null ], + [ "op::SpinnakerWrapper", "classop_1_1_spinnaker_wrapper.html", null ], + [ "op::String", "classop_1_1_string.html", null ], + [ "op::SubThread< TDatums, TWorker >", "classop_1_1_sub_thread.html", null ], + [ "SubThread< TDatums, std::shared_ptr< Worker< TDatums >> >", null, [ + [ "op::SubThreadNoQueue< TDatums, TWorker >", "classop_1_1_sub_thread_no_queue.html", null ], + [ "op::SubThreadQueueIn< TDatums, TWorker, TQueue >", "classop_1_1_sub_thread_queue_in.html", null ], + [ "op::SubThreadQueueInOut< TDatums, TWorker, TQueue >", "classop_1_1_sub_thread_queue_in_out.html", null ], + [ "op::SubThreadQueueOut< TDatums, TWorker, TQueue >", "classop_1_1_sub_thread_queue_out.html", null ] + ] ], + [ "op::Thread< TDatums, TWorker >", "classop_1_1_thread.html", null ], + [ "op::ThreadManager< TDatums, TWorker, TQueue >", "classop_1_1_thread_manager.html", null ], + [ "op::UdpSender", "classop_1_1_udp_sender.html", null ], + [ "op::VerbosePrinter", "classop_1_1_verbose_printer.html", null ], + [ "op::VideoSaver", "classop_1_1_video_saver.html", null ], + [ "op::Worker< TDatums >", "classop_1_1_worker.html", [ + [ "op::WorkerProducer< std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > >", "classop_1_1_worker_producer.html", [ + [ "op::WDatumProducer< TDatum >", "classop_1_1_w_datum_producer.html", null ] + ] ], + [ "op::WCvMatToOpInput< TDatums >", "classop_1_1_w_cv_mat_to_op_input.html", null ], + [ "op::WCvMatToOpOutput< TDatums >", "classop_1_1_w_cv_mat_to_op_output.html", null ], + [ "op::WFaceDetector< TDatums >", "classop_1_1_w_face_detector.html", null ], + [ "op::WFaceDetectorOpenCV< TDatums >", "classop_1_1_w_face_detector_open_c_v.html", null ], + [ "op::WFaceExtractorNet< TDatums >", "classop_1_1_w_face_extractor_net.html", null ], + [ "op::WFaceRenderer< TDatums >", "classop_1_1_w_face_renderer.html", null ], + [ "op::WFpsMax< TDatums >", "classop_1_1_w_fps_max.html", null ], + [ "op::WGuiInfoAdder< TDatums >", "classop_1_1_w_gui_info_adder.html", null ], + [ "op::WHandDetector< TDatums >", "classop_1_1_w_hand_detector.html", null ], + [ "op::WHandDetectorFromTxt< TDatums >", "classop_1_1_w_hand_detector_from_txt.html", null ], + [ "op::WHandDetectorTracking< TDatums >", "classop_1_1_w_hand_detector_tracking.html", null ], + [ "op::WHandDetectorUpdate< TDatums >", "classop_1_1_w_hand_detector_update.html", null ], + [ "op::WHandExtractorNet< TDatums >", "classop_1_1_w_hand_extractor_net.html", null ], + [ "op::WHandRenderer< TDatums >", "classop_1_1_w_hand_renderer.html", null ], + [ "op::WIdGenerator< TDatums >", "classop_1_1_w_id_generator.html", null ], + [ "op::WKeepTopNPeople< TDatums >", "classop_1_1_w_keep_top_n_people.html", null ], + [ "op::WKeypointScaler< TDatums >", "classop_1_1_w_keypoint_scaler.html", null ], + [ "op::WOpOutputToCvMat< TDatums >", "classop_1_1_w_op_output_to_cv_mat.html", null ], + [ "op::WPersonIdExtractor< TDatums >", "classop_1_1_w_person_id_extractor.html", null ], + [ "op::WPoseExtractor< TDatums >", "classop_1_1_w_pose_extractor.html", null ], + [ "op::WPoseExtractorNet< TDatums >", "classop_1_1_w_pose_extractor_net.html", null ], + [ "op::WPoseRenderer< TDatums >", "classop_1_1_w_pose_renderer.html", null ], + [ "op::WPoseTriangulation< TDatums >", "classop_1_1_w_pose_triangulation.html", null ], + [ "op::WQueueOrderer< TDatums >", "classop_1_1_w_queue_orderer.html", null ], + [ "op::WScaleAndSizeExtractor< TDatums >", "classop_1_1_w_scale_and_size_extractor.html", null ], + [ "op::WVerbosePrinter< TDatums >", "classop_1_1_w_verbose_printer.html", null ], + [ "op::WorkerConsumer< TDatums >", "classop_1_1_worker_consumer.html", [ + [ "op::WCocoJsonSaver< TDatums >", "classop_1_1_w_coco_json_saver.html", null ], + [ "op::WFaceSaver< TDatums >", "classop_1_1_w_face_saver.html", null ], + [ "op::WGui< TDatums >", "classop_1_1_w_gui.html", null ], + [ "op::WGui3D< TDatums >", "classop_1_1_w_gui3_d.html", null ], + [ "op::WHandSaver< TDatums >", "classop_1_1_w_hand_saver.html", null ], + [ "op::WHeatMapSaver< TDatums >", "classop_1_1_w_heat_map_saver.html", null ], + [ "op::WImageSaver< TDatums >", "classop_1_1_w_image_saver.html", null ], + [ "op::WPeopleJsonSaver< TDatums >", "classop_1_1_w_people_json_saver.html", null ], + [ "op::WPoseSaver< TDatums >", "classop_1_1_w_pose_saver.html", null ], + [ "op::WUdpSender< TDatums >", "classop_1_1_w_udp_sender.html", null ], + [ "op::WVideoSaver< TDatums >", "classop_1_1_w_video_saver.html", null ], + [ "op::WVideoSaver3D< TDatums >", "classop_1_1_w_video_saver3_d.html", null ] + ] ], + [ "op::WorkerProducer< TDatums >", "classop_1_1_worker_producer.html", null ] + ] ], + [ "op::Worker< std::shared_ptr< TDatums > >", "classop_1_1_worker.html", [ + [ "op::WQueueAssembler< TDatums >", "classop_1_1_w_queue_assembler.html", null ] + ] ], + [ "op::WrapperStructExtra", "structop_1_1_wrapper_struct_extra.html", null ], + [ "op::WrapperStructFace", "structop_1_1_wrapper_struct_face.html", null ], + [ "op::WrapperStructGui", "structop_1_1_wrapper_struct_gui.html", null ], + [ "op::WrapperStructHand", "structop_1_1_wrapper_struct_hand.html", null ], + [ "op::WrapperStructInput", "structop_1_1_wrapper_struct_input.html", null ], + [ "op::WrapperStructOutput", "structop_1_1_wrapper_struct_output.html", null ], + [ "op::WrapperStructPose", "structop_1_1_wrapper_struct_pose.html", null ], + [ "op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >", "classop_1_1_wrapper_t.html", null ] +]; \ No newline at end of file diff --git a/web/html/doc/image_directory_reader_8hpp.html b/web/html/doc/image_directory_reader_8hpp.html new file mode 100644 index 000000000..401856d39 --- /dev/null +++ b/web/html/doc/image_directory_reader_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/producer/imageDirectoryReader.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
imageDirectoryReader.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::ImageDirectoryReader
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/image_directory_reader_8hpp_source.html b/web/html/doc/image_directory_reader_8hpp_source.html new file mode 100644 index 000000000..2e3f191a8 --- /dev/null +++ b/web/html/doc/image_directory_reader_8hpp_source.html @@ -0,0 +1,165 @@ + + + + + + + +OpenPose: include/openpose/producer/imageDirectoryReader.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
imageDirectoryReader.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_PRODUCER_IMAGE_DIRECTORY_READER_HPP
+
2 #define OPENPOSE_PRODUCER_IMAGE_DIRECTORY_READER_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+ +
15  {
+
16  public:
+ +
27  const std::string& imageDirectoryPath, const std::string& cameraParameterPath = "",
+
28  const bool undistortImage = false, const int numberViews = -1);
+
29 
+ +
31 
+
32  std::string getNextFrameName();
+
33 
+
34  inline bool isOpened() const
+
35  {
+
36  return (mFrameNameCounter >= 0);
+
37  }
+
38 
+
39  inline void release()
+
40  {
+
41  mFrameNameCounter = {-1ll};
+
42  }
+
43 
+
44  double get(const int capProperty);
+
45 
+
46  void set(const int capProperty, const double value);
+
47 
+
48  private:
+
49  const std::string mImageDirectoryPath;
+
50  const std::vector<std::string> mFilePaths;
+
51  Point<int> mResolution;
+
52  long long mFrameNameCounter;
+
53 
+
54  Matrix getRawFrame();
+
55 
+
56  std::vector<Matrix> getRawFrames();
+
57 
+ +
59  };
+
60 }
+
61 
+
62 #endif // OPENPOSE_PRODUCER_IMAGE_DIRECTORY_READER_HPP
+ +
ImageDirectoryReader(const std::string &imageDirectoryPath, const std::string &cameraParameterPath="", const bool undistortImage=false, const int numberViews=-1)
+
void set(const int capProperty, const double value)
+
std::string getNextFrameName()
+ +
double get(const int capProperty)
+ + + + + +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ + + +
+
+ + + + diff --git a/web/html/doc/image_saver_8hpp.html b/web/html/doc/image_saver_8hpp.html new file mode 100644 index 000000000..d7e19f53f --- /dev/null +++ b/web/html/doc/image_saver_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/filestream/imageSaver.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
imageSaver.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::ImageSaver
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/image_saver_8hpp_source.html b/web/html/doc/image_saver_8hpp_source.html new file mode 100644 index 000000000..202f8437b --- /dev/null +++ b/web/html/doc/image_saver_8hpp_source.html @@ -0,0 +1,137 @@ + + + + + + + +OpenPose: include/openpose/filestream/imageSaver.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
imageSaver.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FILESTREAM_IMAGE_SAVER_HPP
+
2 #define OPENPOSE_FILESTREAM_IMAGE_SAVER_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+
9  class OP_API ImageSaver : public FileSaver
+
10  {
+
11  public:
+
12  ImageSaver(const std::string& directoryPath, const std::string& imageFormat);
+
13 
+
14  virtual ~ImageSaver();
+
15 
+
16  void saveImages(const Matrix& cvOutputData, const std::string& fileName) const;
+
17 
+
18  void saveImages(const std::vector<Matrix>& matOutputDatas, const std::string& fileName) const;
+
19 
+
20  private:
+
21  const std::string mImageFormat;
+
22  };
+
23 }
+
24 
+
25 #endif // OPENPOSE_FILESTREAM_IMAGE_SAVER_HPP
+ + +
void saveImages(const Matrix &cvOutputData, const std::string &fileName) const
+
ImageSaver(const std::string &directoryPath, const std::string &imageFormat)
+
void saveImages(const std::vector< Matrix > &matOutputDatas, const std::string &fileName) const
+
virtual ~ImageSaver()
+ + + +
#define OP_API
Definition: macros.hpp:18
+ +
+
+ + + + diff --git a/web/html/doc/index.html b/web/html/doc/index.html new file mode 100644 index 000000000..9ddd812ad --- /dev/null +++ b/web/html/doc/index.html @@ -0,0 +1,241 @@ + + + + + + + +OpenPose: Main Page + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Documentation
+
+
+


+ + + + + +
Build Type Linux MacOS Windows
Build Status Status Status Status
+

OpenPose has represented the first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints (in total 135 keypoints) on single images.

+

It is authored by Ginés Hidalgo, Zhe Cao, Tomas Simon, Shih-En Wei, Yaadhav Raaj, Hanbyul Joo, and Yaser Sheikh. It is maintained by Ginés Hidalgo and Yaadhav Raaj. OpenPose would not be possible without the CMU Panoptic Studio dataset. We would also like to thank all the people who have helped OpenPose in any way.

+

+


+ Authors Ginés Hidalgo (left) and Hanbyul Joo (right) in front of the CMU Panoptic Studio

+

+Contents

+
    +
  1. Results
  2. +
  3. Features
  4. +
  5. Related Work
  6. +
  7. Installation
  8. +
  9. Quick Start Overview
  10. +
  11. Send Us Feedback!
  12. +
  13. Citation
  14. +
  15. License
  16. +
+

+Results

+

+Whole-body (Body, Foot, Face, and Hands) 2D Pose Estimation

+


+ Testing OpenPose: (Left) Crazy Uptown Funk flashmob in Sydney video sequence. (Center and right) Authors Ginés Hidalgo and Tomas Simon testing face and hands

+

+Whole-body 3D Pose Reconstruction and Estimation

+


+ Tianyi Zhao testing the OpenPose 3D Module

+

+Unity Plugin

+


+ Tianyi Zhao and Ginés Hidalgo testing the OpenPose Unity Plugin

+

+Runtime Analysis

+

We show an inference time comparison between the 3 available pose estimation libraries (same hardware and conditions): OpenPose, Alpha-Pose (fast Pytorch version), and Mask R-CNN. The OpenPose runtime is constant, while the runtime of Alpha-Pose and Mask R-CNN grow linearly with the number of people. More details here.

+

+

+Features

+

Main Functionality:

    +
  • 2D real-time multi-person keypoint detection:
      +
    • 15, 18 or 25-keypoint body/foot keypoint estimation, including 6 foot keypoints. Runtime invariant to number of detected people.
    • +
    • 2x21-keypoint hand keypoint estimation. Runtime depends on number of detected people. See OpenPose Training for a runtime invariant alternative.
    • +
    • 70-keypoint face keypoint estimation. Runtime depends on number of detected people. See OpenPose Training for a runtime invariant alternative.
    • +
    +
  • +
  • **3D real-time single-person keypoint detection**:
      +
    • 3D triangulation from multiple single views.
    • +
    • Synchronization of Flir cameras handled.
    • +
    • Compatible with Flir/Point Grey cameras.
    • +
    +
  • +
  • **Calibration toolbox**: Estimation of distortion, intrinsic, and extrinsic camera parameters.
  • +
  • Single-person tracking for further speedup or visual smoothing.
  • +
+

Input: Image, video, webcam, Flir/Point Grey, IP camera, and support to add your own custom input source (e.g., depth camera).

+

Output: Basic image + keypoint display/saving (PNG, JPG, AVI, ...), keypoint saving (JSON, XML, YML, ...), keypoints as array class, and support to add your own custom output code (e.g., some fancy UI).

+

OS: Ubuntu (20, 18, 16, 14), Windows (10, 8), Mac OSX, Nvidia TX2.

+

Hardware compatibility: CUDA (Nvidia GPU), OpenCL (AMD GPU), and non-GPU (CPU-only) versions.

+

Usage Alternatives:

+

For further details, check the major released features and release notes docs.

+

+Related Work

+ +

+Installation

+

If you want to use OpenPose without installing or writing any code, simply download and use the latest Windows portable version of OpenPose!

+

Otherwise, you could build OpenPose from source. See the installation doc for all the alternatives.

+

+Quick Start Overview

+

Simply use the OpenPose Demo from your favorite command-line tool (e.g., Windows PowerShell or Ubuntu Terminal). E.g., this example runs OpenPose on your webcam and displays the body keypoints:

# Ubuntu
+
./build/examples/openpose/openpose.bin
+
:: Windows - Portable Demo
+
bin\OpenPoseDemo.exe --video examples\media\video.avi
+

You can also add any of the available flags in any order. E.g., the following example runs on a video (--video {PATH}), enables face (--face) and hands (--hand), and saves the output keypoints on JSON files on disk (--write_json {PATH}).

# Ubuntu
+
./build/examples/openpose/openpose.bin --video examples/media/video.avi --face --hand --write_json output_json_folder/
+
:: Windows - Portable Demo
+
bin\OpenPoseDemo.exe --video examples\media\video.avi --face --hand --write_json output_json_folder/
+

Optionally, you can also extend OpenPose's functionality from its Python and C++ APIs. After installing OpenPose, check its official doc for a quick overview of all the alternatives and tutorials.

+

+Send Us Feedback!

+

Our library is open source for research purposes, and we want to improve it! So let us know (create a new GitHub issue or pull request, email us, etc.) if you...

    +
  1. Find/fix any bug (in functionality or speed) or know how to speed up or improve any part of OpenPose.
  2. +
  3. Want to add/show some cool functionality/demo/project made on top of OpenPose. We can add your project link to our Community-based Projects section or even integrate it with OpenPose!
  4. +
+

+Citation

+

Please cite these papers in your publications if OpenPose helps your research. All of OpenPose is based on OpenPose: Realtime Multi-Person 2D Pose Estimation using Part Affinity Fields, while the hand and face detectors also use Hand Keypoint Detection in Single Images using Multiview Bootstrapping (the face detector was trained using the same procedure as the hand detector).

@article{8765346,
+  author = {Z. {Cao} and G. {Hidalgo Martinez} and T. {Simon} and S. {Wei} and Y. A. {Sheikh}},
+  journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence},
+  title = {OpenPose: Realtime Multi-Person 2D Pose Estimation using Part Affinity Fields},
+  year = {2019}
+}
+
+@inproceedings{simon2017hand,
+  author = {Tomas Simon and Hanbyul Joo and Iain Matthews and Yaser Sheikh},
+  booktitle = {CVPR},
+  title = {Hand Keypoint Detection in Single Images using Multiview Bootstrapping},
+  year = {2017}
+}
+
+@inproceedings{cao2017realtime,
+  author = {Zhe Cao and Tomas Simon and Shih-En Wei and Yaser Sheikh},
+  booktitle = {CVPR},
+  title = {Realtime Multi-Person 2D Pose Estimation using Part Affinity Fields},
+  year = {2017}
+}
+
+@inproceedings{wei2016cpm,
+  author = {Shih-En Wei and Varun Ramakrishna and Takeo Kanade and Yaser Sheikh},
+  booktitle = {CVPR},
+  title = {Convolutional pose machines},
+  year = {2016}
+}
+

Paper links:

+

+License

+

OpenPose is freely available for free non-commercial use, and may be redistributed under these conditions. Please, see the license for further details. Interested in a commercial license? Check this FlintBox link. For commercial queries, use the Contact section from the FlintBox link and also send a copy of that message to Yaser Sheikh.

+
+
+
+ + + + diff --git a/web/html/doc/installation_20__index_8md.html b/web/html/doc/installation_20__index_8md.html new file mode 100644 index 000000000..596ca61ed --- /dev/null +++ b/web/html/doc/installation_20__index_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/installation/0_index.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/installation/0_index.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/installation__deprecated_8md.html b/web/html/doc/installation__deprecated_8md.html new file mode 100644 index 000000000..5f636c263 --- /dev/null +++ b/web/html/doc/installation__deprecated_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/installation/deprecated/installation_deprecated.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/installation/deprecated/installation_deprecated.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/installation__jetson__tx1_8md.html b/web/html/doc/installation__jetson__tx1_8md.html new file mode 100644 index 000000000..a997c182a --- /dev/null +++ b/web/html/doc/installation__jetson__tx1_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/installation/jetson_tx/installation_jetson_tx1.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/installation/jetson_tx/installation_jetson_tx1.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/installation__jetson__tx2__jetpack3_81_8md.html b/web/html/doc/installation__jetson__tx2__jetpack3_81_8md.html new file mode 100644 index 000000000..18b245bb3 --- /dev/null +++ b/web/html/doc/installation__jetson__tx2__jetpack3_81_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/installation/jetson_tx/installation_jetson_tx2_jetpack3.1.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/installation/jetson_tx/installation_jetson_tx2_jetpack3.1.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/installation__jetson__tx2__jetpack3_83_8md.html b/web/html/doc/installation__jetson__tx2__jetpack3_83_8md.html new file mode 100644 index 000000000..3e1069f9b --- /dev/null +++ b/web/html/doc/installation__jetson__tx2__jetpack3_83_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/installation/jetson_tx/installation_jetson_tx2_jetpack3.3.md File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
doc/installation/jetson_tx/installation_jetson_tx2_jetpack3.3.md File Reference
+
+
+
+
+ + + + diff --git a/web/html/doc/ip_camera_reader_8hpp.html b/web/html/doc/ip_camera_reader_8hpp.html new file mode 100644 index 000000000..36b2aac5d --- /dev/null +++ b/web/html/doc/ip_camera_reader_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/producer/ipCameraReader.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
ipCameraReader.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::IpCameraReader
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/ip_camera_reader_8hpp_source.html b/web/html/doc/ip_camera_reader_8hpp_source.html new file mode 100644 index 000000000..e9eb408db --- /dev/null +++ b/web/html/doc/ip_camera_reader_8hpp_source.html @@ -0,0 +1,163 @@ + + + + + + + +OpenPose: include/openpose/producer/ipCameraReader.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
ipCameraReader.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_PRODUCER_IP_CAMERA_READER_HPP
+
2 #define OPENPOSE_PRODUCER_IP_CAMERA_READER_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+ +
13  {
+
14  public:
+
19  explicit IpCameraReader(const std::string& cameraPath, const std::string& cameraParameterPath = "",
+
20  const bool undistortImage = false);
+
21 
+
22  virtual ~IpCameraReader();
+
23 
+
24  std::string getNextFrameName();
+
25 
+
26  inline bool isOpened() const
+
27  {
+ +
29  }
+
30 
+
31  inline double get(const int capProperty)
+
32  {
+
33  return VideoCaptureReader::get(capProperty);
+
34  }
+
35 
+
36  inline void set(const int capProperty, const double value)
+
37  {
+
38  VideoCaptureReader::set(capProperty, value);
+
39  }
+
40 
+
41  private:
+
42  const std::string mPathName;
+
43 
+
44  Matrix getRawFrame();
+
45 
+
46  std::vector<Matrix> getRawFrames();
+
47 
+ +
49  };
+
50 }
+
51 
+
52 #endif // OPENPOSE_PRODUCER_IP_CAMERA_READER_HPP
+ +
std::string getNextFrameName()
+
double get(const int capProperty)
+
void set(const int capProperty, const double value)
+
bool isOpened() const
+
virtual ~IpCameraReader()
+
IpCameraReader(const std::string &cameraPath, const std::string &cameraParameterPath="", const bool undistortImage=false)
+ + +
virtual double get(const int capProperty)=0
+
virtual bool isOpened() const
+
virtual void set(const int capProperty, const double value)=0
+ +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ + +
+
+ + + + diff --git a/web/html/doc/joint_angle_estimation_8hpp.html b/web/html/doc/joint_angle_estimation_8hpp.html new file mode 100644 index 000000000..75aa60681 --- /dev/null +++ b/web/html/doc/joint_angle_estimation_8hpp.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: include/openpose/3d/jointAngleEstimation.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
jointAngleEstimation.hpp File Reference
+
+ +
+ + + + diff --git a/web/html/doc/joint_angle_estimation_8hpp_source.html b/web/html/doc/joint_angle_estimation_8hpp_source.html new file mode 100644 index 000000000..05fa25be7 --- /dev/null +++ b/web/html/doc/joint_angle_estimation_8hpp_source.html @@ -0,0 +1,155 @@ + + + + + + + +OpenPose: include/openpose/3d/jointAngleEstimation.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
jointAngleEstimation.hpp
+
+
+Go to the documentation of this file.
1 #ifdef USE_3D_ADAM_MODEL
+
2 #ifndef OPENPOSE_3D_JOINT_ANGLE_ESTIMATION_HPP
+
3 #define OPENPOSE_3D_JOINT_ANGLE_ESTIMATION_HPP
+
4 
+
5 #ifdef USE_EIGEN
+
6  #include <Eigen/Core>
+
7 #endif
+
8 #ifdef USE_3D_ADAM_MODEL
+
9  #include <adam/totalmodel.h>
+
10 #endif
+
11 #include <openpose/core/common.hpp>
+
12 
+
13 namespace op
+
14 {
+
15  OP_API int mapOPToAdam(const int oPPart);
+
16 
+
17  class OP_API JointAngleEstimation
+
18  {
+
19  public:
+
20  static const std::shared_ptr<const TotalModel> getTotalModel();
+
21 
+
22  JointAngleEstimation(const bool returnJacobian);
+
23 
+
24  virtual ~JointAngleEstimation();
+
25 
+
26  void initializationOnThread();
+
27 
+
28  void adamFastFit(Eigen::Matrix<double, 62, 3, Eigen::RowMajor>& adamPose,
+
29  Eigen::Vector3d& adamTranslation,
+
30  Eigen::Matrix<double, Eigen::Dynamic, 1>& vtVec,
+
31  Eigen::Matrix<double, Eigen::Dynamic, 1>& j0Vec,
+
32  Eigen::VectorXd& adamFacecoeffsExp,
+
33  const Array<float>& poseKeypoints3D,
+
34  const Array<float>& faceKeypoints3D,
+
35  const std::array<Array<float>, 2>& handKeypoints3D);
+
36 
+
37  private:
+
38  // PIMPL idiom
+
39  // http://www.cppsamples.com/common-tasks/pimpl.html
+
40  struct ImplJointAngleEstimation;
+
41  std::shared_ptr<ImplJointAngleEstimation> spImpl;
+
42 
+
43  // PIMP requires DELETE_COPY & destructor, or extra code
+
44  // http://oliora.github.io/2015/12/29/pimpl-and-rule-of-zero.html
+
45  DELETE_COPY(JointAngleEstimation);
+
46  };
+
47 }
+
48 
+
49 #endif // OPENPOSE_3D_JOINT_ANGLE_ESTIMATION_HPP
+
50 #endif
+ +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
+
+ + + + diff --git a/web/html/doc/jquery.js b/web/html/doc/jquery.js new file mode 100644 index 000000000..103c32d79 --- /dev/null +++ b/web/html/doc/jquery.js @@ -0,0 +1,35 @@ +/*! jQuery v3.4.1 | (c) JS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],E=C.document,r=Object.getPrototypeOf,s=t.slice,g=t.concat,u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType},x=function(e){return null!=e&&e===e.window},c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.4.1",k=function(e,t){return new k.fn.init(e,t)},p=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;function d(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp($),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+$),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ne=function(e,t,n){var r="0x"+t-65536;return r!=r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023&r|56320)},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(m.childNodes),m.childNodes),t[m.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&((e?e.ownerDocument||e:m)!==C&&T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!A[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&U.test(t)){(s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=k),o=(l=h(t)).length;while(o--)l[o]="#"+s+" "+xe(l[o]);c=l.join(","),f=ee.test(t)&&ye(e.parentNode)||e}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){A(t,!0)}finally{s===k&&e.removeAttribute("id")}}}return g(t.replace(B,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[k]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e.namespaceURI,n=(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:m;return r!==C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),m!==C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=k,!C.getElementsByName||!C.getElementsByName(k).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+k+"-]").length||v.push("~="),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+k+"+*").length||v.push(".#.+[+~]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",$)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)===(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e===C||e.ownerDocument===m&&y(m,e)?-1:t===C||t.ownerDocument===m&&y(m,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e===C?-1:t===C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]===m?-1:s[r]===m?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if((e.ownerDocument||e)!==C&&T(e),d.matchesSelector&&E&&!A[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){A(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=p[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&p(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?k.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?k.grep(e,function(e){return e===n!==r}):"string"!=typeof n?k.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(k.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||q,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:L.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof k?t[0]:t,k.merge(this,k.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),D.test(r[1])&&k.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(k):k.makeArray(e,this)}).prototype=k.fn,q=k(E);var H=/^(?:parents|prev(?:Until|All))/,O={children:!0,contents:!0,next:!0,prev:!0};function P(e,t){while((e=e[t])&&1!==e.nodeType);return e}k.fn.extend({has:function(e){var t=k(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i,ge={option:[1,""],thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?k.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;nx",y.noCloneChecked=!!me.cloneNode(!0).lastChild.defaultValue;var Te=/^key/,Ce=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,Ee=/^([^.]*)(?:\.(.+)|)/;function ke(){return!0}function Se(){return!1}function Ne(e,t){return e===function(){try{return E.activeElement}catch(e){}}()==("focus"===t)}function Ae(e,t,n,r,i,o){var a,s;if("object"==typeof t){for(s in"string"!=typeof n&&(r=r||n,n=void 0),t)Ae(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=Se;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return k().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=k.guid++)),e.each(function(){k.event.add(this,t,i,r,n)})}function De(e,i,o){o?(Q.set(e,i,!1),k.event.add(e,i,{namespace:!1,handler:function(e){var t,n,r=Q.get(this,i);if(1&e.isTrigger&&this[i]){if(r.length)(k.event.special[i]||{}).delegateType&&e.stopPropagation();else if(r=s.call(arguments),Q.set(this,i,r),t=o(this,i),this[i](),r!==(n=Q.get(this,i))||t?Q.set(this,i,!1):n={},r!==n)return e.stopImmediatePropagation(),e.preventDefault(),n.value}else r.length&&(Q.set(this,i,{value:k.event.trigger(k.extend(r[0],k.Event.prototype),r.slice(1),this)}),e.stopImmediatePropagation())}})):void 0===Q.get(e,i)&&k.event.add(e,i,ke)}k.event={global:{},add:function(t,e,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.get(t);if(v){n.handler&&(n=(o=n).handler,i=o.selector),i&&k.find.matchesSelector(ie,i),n.guid||(n.guid=k.guid++),(u=v.events)||(u=v.events={}),(a=v.handle)||(a=v.handle=function(e){return"undefined"!=typeof k&&k.event.triggered!==e.type?k.event.dispatch.apply(t,arguments):void 0}),l=(e=(e||"").match(R)||[""]).length;while(l--)d=g=(s=Ee.exec(e[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=k.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=k.event.special[d]||{},c=k.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&k.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(t,r,h,a)||t.addEventListener&&t.addEventListener(d,a)),f.add&&(f.add.call(t,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),k.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.hasData(e)&&Q.get(e);if(v&&(u=v.events)){l=(t=(t||"").match(R)||[""]).length;while(l--)if(d=g=(s=Ee.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d){f=k.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,v.handle)||k.removeEvent(e,d,v.handle),delete u[d])}else for(d in u)k.event.remove(e,d+t[l],n,r,!0);k.isEmptyObject(u)&&Q.remove(e,"handle events")}},dispatch:function(e){var t,n,r,i,o,a,s=k.event.fix(e),u=new Array(arguments.length),l=(Q.get(this,"events")||{})[s.type]||[],c=k.event.special[s.type]||{};for(u[0]=s,t=1;t\x20\t\r\n\f]*)[^>]*)\/>/gi,qe=/\s*$/g;function Oe(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&k(e).children("tbody")[0]||e}function Pe(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Re(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Me(e,t){var n,r,i,o,a,s,u,l;if(1===t.nodeType){if(Q.hasData(e)&&(o=Q.access(e),a=Q.set(t,o),l=o.events))for(i in delete a.handle,a.events={},l)for(n=0,r=l[i].length;n")},clone:function(e,t,n){var r,i,o,a,s,u,l,c=e.cloneNode(!0),f=oe(e);if(!(y.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||k.isXMLDoc(e)))for(a=ve(c),r=0,i=(o=ve(e)).length;r").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var Vt,Gt=[],Yt=/(=)\?(?=&|$)|\?\?/;k.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Gt.pop()||k.expando+"_"+kt++;return this[e]=!0,e}}),k.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Yt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Yt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Yt,"$1"+r):!1!==e.jsonp&&(e.url+=(St.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||k.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?k(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Gt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((Vt=E.implementation.createHTMLDocument("").body).innerHTML="
",2===Vt.childNodes.length),k.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=D.exec(e))?[t.createElement(i[1])]:(i=we([e],t,o),o&&o.length&&k(o).remove(),k.merge([],i.childNodes)));var r,i,o},k.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(k.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},k.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){k.fn[t]=function(e){return this.on(t,e)}}),k.expr.pseudos.animated=function(t){return k.grep(k.timers,function(e){return t===e.elem}).length},k.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=k.css(e,"position"),c=k(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=k.css(e,"top"),u=k.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,k.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},k.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){k.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===k.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===k.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=k(e).offset()).top+=k.css(e,"borderTopWidth",!0),i.left+=k.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-k.css(r,"marginTop",!0),left:t.left-i.left-k.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===k.css(e,"position"))e=e.offsetParent;return e||ie})}}),k.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;k.fn[t]=function(e){return _(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),k.each(["top","left"],function(e,n){k.cssHooks[n]=ze(y.pixelPosition,function(e,t){if(t)return t=_e(e,n),$e.test(t)?k(e).position()[n]+"px":t})}),k.each({Height:"height",Width:"width"},function(a,s){k.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){k.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return _(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?k.css(e,t,i):k.style(e,t,n,i)},s,n?e:void 0,n)}})}),k.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){k.fn[n]=function(e,t){return 0a;a++)for(i in o[a])n=o[a][i],o[a].hasOwnProperty(i)&&void 0!==n&&(e[i]=t.isPlainObject(n)?t.isPlainObject(e[i])?t.widget.extend({},e[i],n):t.widget.extend({},n):n);return e},t.widget.bridge=function(e,i){var n=i.prototype.widgetFullName||e;t.fn[e]=function(o){var a="string"==typeof o,r=s.call(arguments,1),h=this;return a?this.length||"instance"!==o?this.each(function(){var i,s=t.data(this,n);return"instance"===o?(h=s,!1):s?t.isFunction(s[o])&&"_"!==o.charAt(0)?(i=s[o].apply(s,r),i!==s&&void 0!==i?(h=i&&i.jquery?h.pushStack(i.get()):i,!1):void 0):t.error("no such method '"+o+"' for "+e+" widget instance"):t.error("cannot call methods on "+e+" prior to initialization; "+"attempted to call method '"+o+"'")}):h=void 0:(r.length&&(o=t.widget.extend.apply(null,[o].concat(r))),this.each(function(){var e=t.data(this,n);e?(e.option(o||{}),e._init&&e._init()):t.data(this,n,new i(o,this))})),h}},t.Widget=function(){},t.Widget._childConstructors=[],t.Widget.prototype={widgetName:"widget",widgetEventPrefix:"",defaultElement:"
",options:{classes:{},disabled:!1,create:null},_createWidget:function(e,s){s=t(s||this.defaultElement||this)[0],this.element=t(s),this.uuid=i++,this.eventNamespace="."+this.widgetName+this.uuid,this.bindings=t(),this.hoverable=t(),this.focusable=t(),this.classesElementLookup={},s!==this&&(t.data(s,this.widgetFullName,this),this._on(!0,this.element,{remove:function(t){t.target===s&&this.destroy()}}),this.document=t(s.style?s.ownerDocument:s.document||s),this.window=t(this.document[0].defaultView||this.document[0].parentWindow)),this.options=t.widget.extend({},this.options,this._getCreateOptions(),e),this._create(),this.options.disabled&&this._setOptionDisabled(this.options.disabled),this._trigger("create",null,this._getCreateEventData()),this._init()},_getCreateOptions:function(){return{}},_getCreateEventData:t.noop,_create:t.noop,_init:t.noop,destroy:function(){var e=this;this._destroy(),t.each(this.classesElementLookup,function(t,i){e._removeClass(i,t)}),this.element.off(this.eventNamespace).removeData(this.widgetFullName),this.widget().off(this.eventNamespace).removeAttr("aria-disabled"),this.bindings.off(this.eventNamespace)},_destroy:t.noop,widget:function(){return this.element},option:function(e,i){var s,n,o,a=e;if(0===arguments.length)return t.widget.extend({},this.options);if("string"==typeof e)if(a={},s=e.split("."),e=s.shift(),s.length){for(n=a[e]=t.widget.extend({},this.options[e]),o=0;s.length-1>o;o++)n[s[o]]=n[s[o]]||{},n=n[s[o]];if(e=s.pop(),1===arguments.length)return void 0===n[e]?null:n[e];n[e]=i}else{if(1===arguments.length)return void 0===this.options[e]?null:this.options[e];a[e]=i}return this._setOptions(a),this},_setOptions:function(t){var e;for(e in t)this._setOption(e,t[e]);return this},_setOption:function(t,e){return"classes"===t&&this._setOptionClasses(e),this.options[t]=e,"disabled"===t&&this._setOptionDisabled(e),this},_setOptionClasses:function(e){var i,s,n;for(i in e)n=this.classesElementLookup[i],e[i]!==this.options.classes[i]&&n&&n.length&&(s=t(n.get()),this._removeClass(n,i),s.addClass(this._classes({element:s,keys:i,classes:e,add:!0})))},_setOptionDisabled:function(t){this._toggleClass(this.widget(),this.widgetFullName+"-disabled",null,!!t),t&&(this._removeClass(this.hoverable,null,"ui-state-hover"),this._removeClass(this.focusable,null,"ui-state-focus"))},enable:function(){return this._setOptions({disabled:!1})},disable:function(){return this._setOptions({disabled:!0})},_classes:function(e){function i(i,o){var a,r;for(r=0;i.length>r;r++)a=n.classesElementLookup[i[r]]||t(),a=e.add?t(t.unique(a.get().concat(e.element.get()))):t(a.not(e.element).get()),n.classesElementLookup[i[r]]=a,s.push(i[r]),o&&e.classes[i[r]]&&s.push(e.classes[i[r]])}var s=[],n=this;return e=t.extend({element:this.element,classes:this.options.classes||{}},e),this._on(e.element,{remove:"_untrackClassesElement"}),e.keys&&i(e.keys.match(/\S+/g)||[],!0),e.extra&&i(e.extra.match(/\S+/g)||[]),s.join(" ")},_untrackClassesElement:function(e){var i=this;t.each(i.classesElementLookup,function(s,n){-1!==t.inArray(e.target,n)&&(i.classesElementLookup[s]=t(n.not(e.target).get()))})},_removeClass:function(t,e,i){return this._toggleClass(t,e,i,!1)},_addClass:function(t,e,i){return this._toggleClass(t,e,i,!0)},_toggleClass:function(t,e,i,s){s="boolean"==typeof s?s:i;var n="string"==typeof t||null===t,o={extra:n?e:i,keys:n?t:e,element:n?this.element:t,add:s};return o.element.toggleClass(this._classes(o),s),this},_on:function(e,i,s){var n,o=this;"boolean"!=typeof e&&(s=i,i=e,e=!1),s?(i=n=t(i),this.bindings=this.bindings.add(i)):(s=i,i=this.element,n=this.widget()),t.each(s,function(s,a){function r(){return e||o.options.disabled!==!0&&!t(this).hasClass("ui-state-disabled")?("string"==typeof a?o[a]:a).apply(o,arguments):void 0}"string"!=typeof a&&(r.guid=a.guid=a.guid||r.guid||t.guid++);var h=s.match(/^([\w:-]*)\s*(.*)$/),l=h[1]+o.eventNamespace,c=h[2];c?n.on(l,c,r):i.on(l,r)})},_off:function(e,i){i=(i||"").split(" ").join(this.eventNamespace+" ")+this.eventNamespace,e.off(i).off(i),this.bindings=t(this.bindings.not(e).get()),this.focusable=t(this.focusable.not(e).get()),this.hoverable=t(this.hoverable.not(e).get())},_delay:function(t,e){function i(){return("string"==typeof t?s[t]:t).apply(s,arguments)}var s=this;return setTimeout(i,e||0)},_hoverable:function(e){this.hoverable=this.hoverable.add(e),this._on(e,{mouseenter:function(e){this._addClass(t(e.currentTarget),null,"ui-state-hover")},mouseleave:function(e){this._removeClass(t(e.currentTarget),null,"ui-state-hover")}})},_focusable:function(e){this.focusable=this.focusable.add(e),this._on(e,{focusin:function(e){this._addClass(t(e.currentTarget),null,"ui-state-focus")},focusout:function(e){this._removeClass(t(e.currentTarget),null,"ui-state-focus")}})},_trigger:function(e,i,s){var n,o,a=this.options[e];if(s=s||{},i=t.Event(i),i.type=(e===this.widgetEventPrefix?e:this.widgetEventPrefix+e).toLowerCase(),i.target=this.element[0],o=i.originalEvent)for(n in o)n in i||(i[n]=o[n]);return this.element.trigger(i,s),!(t.isFunction(a)&&a.apply(this.element[0],[i].concat(s))===!1||i.isDefaultPrevented())}},t.each({show:"fadeIn",hide:"fadeOut"},function(e,i){t.Widget.prototype["_"+e]=function(s,n,o){"string"==typeof n&&(n={effect:n});var a,r=n?n===!0||"number"==typeof n?i:n.effect||i:e;n=n||{},"number"==typeof n&&(n={duration:n}),a=!t.isEmptyObject(n),n.complete=o,n.delay&&s.delay(n.delay),a&&t.effects&&t.effects.effect[r]?s[e](n):r!==e&&s[r]?s[r](n.duration,n.easing,o):s.queue(function(i){t(this)[e](),o&&o.call(s[0]),i()})}}),t.widget,function(){function e(t,e,i){return[parseFloat(t[0])*(u.test(t[0])?e/100:1),parseFloat(t[1])*(u.test(t[1])?i/100:1)]}function i(e,i){return parseInt(t.css(e,i),10)||0}function s(e){var i=e[0];return 9===i.nodeType?{width:e.width(),height:e.height(),offset:{top:0,left:0}}:t.isWindow(i)?{width:e.width(),height:e.height(),offset:{top:e.scrollTop(),left:e.scrollLeft()}}:i.preventDefault?{width:0,height:0,offset:{top:i.pageY,left:i.pageX}}:{width:e.outerWidth(),height:e.outerHeight(),offset:e.offset()}}var n,o=Math.max,a=Math.abs,r=/left|center|right/,h=/top|center|bottom/,l=/[\+\-]\d+(\.[\d]+)?%?/,c=/^\w+/,u=/%$/,d=t.fn.position;t.position={scrollbarWidth:function(){if(void 0!==n)return n;var e,i,s=t("
"),o=s.children()[0];return t("body").append(s),e=o.offsetWidth,s.css("overflow","scroll"),i=o.offsetWidth,e===i&&(i=s[0].clientWidth),s.remove(),n=e-i},getScrollInfo:function(e){var i=e.isWindow||e.isDocument?"":e.element.css("overflow-x"),s=e.isWindow||e.isDocument?"":e.element.css("overflow-y"),n="scroll"===i||"auto"===i&&e.widthi?"left":e>0?"right":"center",vertical:0>r?"top":s>0?"bottom":"middle"};l>p&&p>a(e+i)&&(u.horizontal="center"),c>f&&f>a(s+r)&&(u.vertical="middle"),u.important=o(a(e),a(i))>o(a(s),a(r))?"horizontal":"vertical",n.using.call(this,t,u)}),h.offset(t.extend(D,{using:r}))})},t.ui.position={fit:{left:function(t,e){var i,s=e.within,n=s.isWindow?s.scrollLeft:s.offset.left,a=s.width,r=t.left-e.collisionPosition.marginLeft,h=n-r,l=r+e.collisionWidth-a-n;e.collisionWidth>a?h>0&&0>=l?(i=t.left+h+e.collisionWidth-a-n,t.left+=h-i):t.left=l>0&&0>=h?n:h>l?n+a-e.collisionWidth:n:h>0?t.left+=h:l>0?t.left-=l:t.left=o(t.left-r,t.left)},top:function(t,e){var i,s=e.within,n=s.isWindow?s.scrollTop:s.offset.top,a=e.within.height,r=t.top-e.collisionPosition.marginTop,h=n-r,l=r+e.collisionHeight-a-n;e.collisionHeight>a?h>0&&0>=l?(i=t.top+h+e.collisionHeight-a-n,t.top+=h-i):t.top=l>0&&0>=h?n:h>l?n+a-e.collisionHeight:n:h>0?t.top+=h:l>0?t.top-=l:t.top=o(t.top-r,t.top)}},flip:{left:function(t,e){var i,s,n=e.within,o=n.offset.left+n.scrollLeft,r=n.width,h=n.isWindow?n.scrollLeft:n.offset.left,l=t.left-e.collisionPosition.marginLeft,c=l-h,u=l+e.collisionWidth-r-h,d="left"===e.my[0]?-e.elemWidth:"right"===e.my[0]?e.elemWidth:0,p="left"===e.at[0]?e.targetWidth:"right"===e.at[0]?-e.targetWidth:0,f=-2*e.offset[0];0>c?(i=t.left+d+p+f+e.collisionWidth-r-o,(0>i||a(c)>i)&&(t.left+=d+p+f)):u>0&&(s=t.left-e.collisionPosition.marginLeft+d+p+f-h,(s>0||u>a(s))&&(t.left+=d+p+f))},top:function(t,e){var i,s,n=e.within,o=n.offset.top+n.scrollTop,r=n.height,h=n.isWindow?n.scrollTop:n.offset.top,l=t.top-e.collisionPosition.marginTop,c=l-h,u=l+e.collisionHeight-r-h,d="top"===e.my[1],p=d?-e.elemHeight:"bottom"===e.my[1]?e.elemHeight:0,f="top"===e.at[1]?e.targetHeight:"bottom"===e.at[1]?-e.targetHeight:0,m=-2*e.offset[1];0>c?(s=t.top+p+f+m+e.collisionHeight-r-o,(0>s||a(c)>s)&&(t.top+=p+f+m)):u>0&&(i=t.top-e.collisionPosition.marginTop+p+f+m-h,(i>0||u>a(i))&&(t.top+=p+f+m))}},flipfit:{left:function(){t.ui.position.flip.left.apply(this,arguments),t.ui.position.fit.left.apply(this,arguments)},top:function(){t.ui.position.flip.top.apply(this,arguments),t.ui.position.fit.top.apply(this,arguments)}}}}(),t.ui.position,t.extend(t.expr[":"],{data:t.expr.createPseudo?t.expr.createPseudo(function(e){return function(i){return!!t.data(i,e)}}):function(e,i,s){return!!t.data(e,s[3])}}),t.fn.extend({disableSelection:function(){var t="onselectstart"in document.createElement("div")?"selectstart":"mousedown";return function(){return this.on(t+".ui-disableSelection",function(t){t.preventDefault()})}}(),enableSelection:function(){return this.off(".ui-disableSelection")}}),t.ui.focusable=function(i,s){var n,o,a,r,h,l=i.nodeName.toLowerCase();return"area"===l?(n=i.parentNode,o=n.name,i.href&&o&&"map"===n.nodeName.toLowerCase()?(a=t("img[usemap='#"+o+"']"),a.length>0&&a.is(":visible")):!1):(/^(input|select|textarea|button|object)$/.test(l)?(r=!i.disabled,r&&(h=t(i).closest("fieldset")[0],h&&(r=!h.disabled))):r="a"===l?i.href||s:s,r&&t(i).is(":visible")&&e(t(i)))},t.extend(t.expr[":"],{focusable:function(e){return t.ui.focusable(e,null!=t.attr(e,"tabindex"))}}),t.ui.focusable,t.fn.form=function(){return"string"==typeof this[0].form?this.closest("form"):t(this[0].form)},t.ui.formResetMixin={_formResetHandler:function(){var e=t(this);setTimeout(function(){var i=e.data("ui-form-reset-instances");t.each(i,function(){this.refresh()})})},_bindFormResetHandler:function(){if(this.form=this.element.form(),this.form.length){var t=this.form.data("ui-form-reset-instances")||[];t.length||this.form.on("reset.ui-form-reset",this._formResetHandler),t.push(this),this.form.data("ui-form-reset-instances",t)}},_unbindFormResetHandler:function(){if(this.form.length){var e=this.form.data("ui-form-reset-instances");e.splice(t.inArray(this,e),1),e.length?this.form.data("ui-form-reset-instances",e):this.form.removeData("ui-form-reset-instances").off("reset.ui-form-reset")}}},"1.7"===t.fn.jquery.substring(0,3)&&(t.each(["Width","Height"],function(e,i){function s(e,i,s,o){return t.each(n,function(){i-=parseFloat(t.css(e,"padding"+this))||0,s&&(i-=parseFloat(t.css(e,"border"+this+"Width"))||0),o&&(i-=parseFloat(t.css(e,"margin"+this))||0)}),i}var n="Width"===i?["Left","Right"]:["Top","Bottom"],o=i.toLowerCase(),a={innerWidth:t.fn.innerWidth,innerHeight:t.fn.innerHeight,outerWidth:t.fn.outerWidth,outerHeight:t.fn.outerHeight};t.fn["inner"+i]=function(e){return void 0===e?a["inner"+i].call(this):this.each(function(){t(this).css(o,s(this,e)+"px")})},t.fn["outer"+i]=function(e,n){return"number"!=typeof e?a["outer"+i].call(this,e):this.each(function(){t(this).css(o,s(this,e,!0,n)+"px")})}}),t.fn.addBack=function(t){return this.add(null==t?this.prevObject:this.prevObject.filter(t))}),t.ui.keyCode={BACKSPACE:8,COMMA:188,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,LEFT:37,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SPACE:32,TAB:9,UP:38},t.ui.escapeSelector=function(){var t=/([!"#$%&'()*+,./:;<=>?@[\]^`{|}~])/g;return function(e){return e.replace(t,"\\$1")}}(),t.fn.labels=function(){var e,i,s,n,o;return this[0].labels&&this[0].labels.length?this.pushStack(this[0].labels):(n=this.eq(0).parents("label"),s=this.attr("id"),s&&(e=this.eq(0).parents().last(),o=e.add(e.length?e.siblings():this.siblings()),i="label[for='"+t.ui.escapeSelector(s)+"']",n=n.add(o.find(i).addBack(i))),this.pushStack(n))},t.fn.scrollParent=function(e){var i=this.css("position"),s="absolute"===i,n=e?/(auto|scroll|hidden)/:/(auto|scroll)/,o=this.parents().filter(function(){var e=t(this);return s&&"static"===e.css("position")?!1:n.test(e.css("overflow")+e.css("overflow-y")+e.css("overflow-x"))}).eq(0);return"fixed"!==i&&o.length?o:t(this[0].ownerDocument||document)},t.extend(t.expr[":"],{tabbable:function(e){var i=t.attr(e,"tabindex"),s=null!=i;return(!s||i>=0)&&t.ui.focusable(e,s)}}),t.fn.extend({uniqueId:function(){var t=0;return function(){return this.each(function(){this.id||(this.id="ui-id-"+ ++t)})}}(),removeUniqueId:function(){return this.each(function(){/^ui-id-\d+$/.test(this.id)&&t(this).removeAttr("id")})}}),t.ui.ie=!!/msie [\w.]+/.exec(navigator.userAgent.toLowerCase());var n=!1;t(document).on("mouseup",function(){n=!1}),t.widget("ui.mouse",{version:"1.12.1",options:{cancel:"input, textarea, button, select, option",distance:1,delay:0},_mouseInit:function(){var e=this;this.element.on("mousedown."+this.widgetName,function(t){return e._mouseDown(t)}).on("click."+this.widgetName,function(i){return!0===t.data(i.target,e.widgetName+".preventClickEvent")?(t.removeData(i.target,e.widgetName+".preventClickEvent"),i.stopImmediatePropagation(),!1):void 0}),this.started=!1},_mouseDestroy:function(){this.element.off("."+this.widgetName),this._mouseMoveDelegate&&this.document.off("mousemove."+this.widgetName,this._mouseMoveDelegate).off("mouseup."+this.widgetName,this._mouseUpDelegate)},_mouseDown:function(e){if(!n){this._mouseMoved=!1,this._mouseStarted&&this._mouseUp(e),this._mouseDownEvent=e;var i=this,s=1===e.which,o="string"==typeof this.options.cancel&&e.target.nodeName?t(e.target).closest(this.options.cancel).length:!1;return s&&!o&&this._mouseCapture(e)?(this.mouseDelayMet=!this.options.delay,this.mouseDelayMet||(this._mouseDelayTimer=setTimeout(function(){i.mouseDelayMet=!0},this.options.delay)),this._mouseDistanceMet(e)&&this._mouseDelayMet(e)&&(this._mouseStarted=this._mouseStart(e)!==!1,!this._mouseStarted)?(e.preventDefault(),!0):(!0===t.data(e.target,this.widgetName+".preventClickEvent")&&t.removeData(e.target,this.widgetName+".preventClickEvent"),this._mouseMoveDelegate=function(t){return i._mouseMove(t)},this._mouseUpDelegate=function(t){return i._mouseUp(t)},this.document.on("mousemove."+this.widgetName,this._mouseMoveDelegate).on("mouseup."+this.widgetName,this._mouseUpDelegate),e.preventDefault(),n=!0,!0)):!0}},_mouseMove:function(e){if(this._mouseMoved){if(t.ui.ie&&(!document.documentMode||9>document.documentMode)&&!e.button)return this._mouseUp(e);if(!e.which)if(e.originalEvent.altKey||e.originalEvent.ctrlKey||e.originalEvent.metaKey||e.originalEvent.shiftKey)this.ignoreMissingWhich=!0;else if(!this.ignoreMissingWhich)return this._mouseUp(e)}return(e.which||e.button)&&(this._mouseMoved=!0),this._mouseStarted?(this._mouseDrag(e),e.preventDefault()):(this._mouseDistanceMet(e)&&this._mouseDelayMet(e)&&(this._mouseStarted=this._mouseStart(this._mouseDownEvent,e)!==!1,this._mouseStarted?this._mouseDrag(e):this._mouseUp(e)),!this._mouseStarted)},_mouseUp:function(e){this.document.off("mousemove."+this.widgetName,this._mouseMoveDelegate).off("mouseup."+this.widgetName,this._mouseUpDelegate),this._mouseStarted&&(this._mouseStarted=!1,e.target===this._mouseDownEvent.target&&t.data(e.target,this.widgetName+".preventClickEvent",!0),this._mouseStop(e)),this._mouseDelayTimer&&(clearTimeout(this._mouseDelayTimer),delete this._mouseDelayTimer),this.ignoreMissingWhich=!1,n=!1,e.preventDefault()},_mouseDistanceMet:function(t){return Math.max(Math.abs(this._mouseDownEvent.pageX-t.pageX),Math.abs(this._mouseDownEvent.pageY-t.pageY))>=this.options.distance},_mouseDelayMet:function(){return this.mouseDelayMet},_mouseStart:function(){},_mouseDrag:function(){},_mouseStop:function(){},_mouseCapture:function(){return!0}}),t.ui.plugin={add:function(e,i,s){var n,o=t.ui[e].prototype;for(n in s)o.plugins[n]=o.plugins[n]||[],o.plugins[n].push([i,s[n]])},call:function(t,e,i,s){var n,o=t.plugins[e];if(o&&(s||t.element[0].parentNode&&11!==t.element[0].parentNode.nodeType))for(n=0;o.length>n;n++)t.options[o[n][0]]&&o[n][1].apply(t.element,i)}},t.widget("ui.resizable",t.ui.mouse,{version:"1.12.1",widgetEventPrefix:"resize",options:{alsoResize:!1,animate:!1,animateDuration:"slow",animateEasing:"swing",aspectRatio:!1,autoHide:!1,classes:{"ui-resizable-se":"ui-icon ui-icon-gripsmall-diagonal-se"},containment:!1,ghost:!1,grid:!1,handles:"e,s,se",helper:!1,maxHeight:null,maxWidth:null,minHeight:10,minWidth:10,zIndex:90,resize:null,start:null,stop:null},_num:function(t){return parseFloat(t)||0},_isNumber:function(t){return!isNaN(parseFloat(t))},_hasScroll:function(e,i){if("hidden"===t(e).css("overflow"))return!1;var s=i&&"left"===i?"scrollLeft":"scrollTop",n=!1;return e[s]>0?!0:(e[s]=1,n=e[s]>0,e[s]=0,n)},_create:function(){var e,i=this.options,s=this;this._addClass("ui-resizable"),t.extend(this,{_aspectRatio:!!i.aspectRatio,aspectRatio:i.aspectRatio,originalElement:this.element,_proportionallyResizeElements:[],_helper:i.helper||i.ghost||i.animate?i.helper||"ui-resizable-helper":null}),this.element[0].nodeName.match(/^(canvas|textarea|input|select|button|img)$/i)&&(this.element.wrap(t("
").css({position:this.element.css("position"),width:this.element.outerWidth(),height:this.element.outerHeight(),top:this.element.css("top"),left:this.element.css("left")})),this.element=this.element.parent().data("ui-resizable",this.element.resizable("instance")),this.elementIsWrapper=!0,e={marginTop:this.originalElement.css("marginTop"),marginRight:this.originalElement.css("marginRight"),marginBottom:this.originalElement.css("marginBottom"),marginLeft:this.originalElement.css("marginLeft")},this.element.css(e),this.originalElement.css("margin",0),this.originalResizeStyle=this.originalElement.css("resize"),this.originalElement.css("resize","none"),this._proportionallyResizeElements.push(this.originalElement.css({position:"static",zoom:1,display:"block"})),this.originalElement.css(e),this._proportionallyResize()),this._setupHandles(),i.autoHide&&t(this.element).on("mouseenter",function(){i.disabled||(s._removeClass("ui-resizable-autohide"),s._handles.show())}).on("mouseleave",function(){i.disabled||s.resizing||(s._addClass("ui-resizable-autohide"),s._handles.hide())}),this._mouseInit()},_destroy:function(){this._mouseDestroy();var e,i=function(e){t(e).removeData("resizable").removeData("ui-resizable").off(".resizable").find(".ui-resizable-handle").remove()};return this.elementIsWrapper&&(i(this.element),e=this.element,this.originalElement.css({position:e.css("position"),width:e.outerWidth(),height:e.outerHeight(),top:e.css("top"),left:e.css("left")}).insertAfter(e),e.remove()),this.originalElement.css("resize",this.originalResizeStyle),i(this.originalElement),this},_setOption:function(t,e){switch(this._super(t,e),t){case"handles":this._removeHandles(),this._setupHandles();break;default:}},_setupHandles:function(){var e,i,s,n,o,a=this.options,r=this;if(this.handles=a.handles||(t(".ui-resizable-handle",this.element).length?{n:".ui-resizable-n",e:".ui-resizable-e",s:".ui-resizable-s",w:".ui-resizable-w",se:".ui-resizable-se",sw:".ui-resizable-sw",ne:".ui-resizable-ne",nw:".ui-resizable-nw"}:"e,s,se"),this._handles=t(),this.handles.constructor===String)for("all"===this.handles&&(this.handles="n,e,s,w,se,sw,ne,nw"),s=this.handles.split(","),this.handles={},i=0;s.length>i;i++)e=t.trim(s[i]),n="ui-resizable-"+e,o=t("
"),this._addClass(o,"ui-resizable-handle "+n),o.css({zIndex:a.zIndex}),this.handles[e]=".ui-resizable-"+e,this.element.append(o);this._renderAxis=function(e){var i,s,n,o;e=e||this.element;for(i in this.handles)this.handles[i].constructor===String?this.handles[i]=this.element.children(this.handles[i]).first().show():(this.handles[i].jquery||this.handles[i].nodeType)&&(this.handles[i]=t(this.handles[i]),this._on(this.handles[i],{mousedown:r._mouseDown})),this.elementIsWrapper&&this.originalElement[0].nodeName.match(/^(textarea|input|select|button)$/i)&&(s=t(this.handles[i],this.element),o=/sw|ne|nw|se|n|s/.test(i)?s.outerHeight():s.outerWidth(),n=["padding",/ne|nw|n/.test(i)?"Top":/se|sw|s/.test(i)?"Bottom":/^e$/.test(i)?"Right":"Left"].join(""),e.css(n,o),this._proportionallyResize()),this._handles=this._handles.add(this.handles[i])},this._renderAxis(this.element),this._handles=this._handles.add(this.element.find(".ui-resizable-handle")),this._handles.disableSelection(),this._handles.on("mouseover",function(){r.resizing||(this.className&&(o=this.className.match(/ui-resizable-(se|sw|ne|nw|n|e|s|w)/i)),r.axis=o&&o[1]?o[1]:"se")}),a.autoHide&&(this._handles.hide(),this._addClass("ui-resizable-autohide"))},_removeHandles:function(){this._handles.remove()},_mouseCapture:function(e){var i,s,n=!1;for(i in this.handles)s=t(this.handles[i])[0],(s===e.target||t.contains(s,e.target))&&(n=!0);return!this.options.disabled&&n},_mouseStart:function(e){var i,s,n,o=this.options,a=this.element;return this.resizing=!0,this._renderProxy(),i=this._num(this.helper.css("left")),s=this._num(this.helper.css("top")),o.containment&&(i+=t(o.containment).scrollLeft()||0,s+=t(o.containment).scrollTop()||0),this.offset=this.helper.offset(),this.position={left:i,top:s},this.size=this._helper?{width:this.helper.width(),height:this.helper.height()}:{width:a.width(),height:a.height()},this.originalSize=this._helper?{width:a.outerWidth(),height:a.outerHeight()}:{width:a.width(),height:a.height()},this.sizeDiff={width:a.outerWidth()-a.width(),height:a.outerHeight()-a.height()},this.originalPosition={left:i,top:s},this.originalMousePosition={left:e.pageX,top:e.pageY},this.aspectRatio="number"==typeof o.aspectRatio?o.aspectRatio:this.originalSize.width/this.originalSize.height||1,n=t(".ui-resizable-"+this.axis).css("cursor"),t("body").css("cursor","auto"===n?this.axis+"-resize":n),this._addClass("ui-resizable-resizing"),this._propagate("start",e),!0},_mouseDrag:function(e){var i,s,n=this.originalMousePosition,o=this.axis,a=e.pageX-n.left||0,r=e.pageY-n.top||0,h=this._change[o];return this._updatePrevProperties(),h?(i=h.apply(this,[e,a,r]),this._updateVirtualBoundaries(e.shiftKey),(this._aspectRatio||e.shiftKey)&&(i=this._updateRatio(i,e)),i=this._respectSize(i,e),this._updateCache(i),this._propagate("resize",e),s=this._applyChanges(),!this._helper&&this._proportionallyResizeElements.length&&this._proportionallyResize(),t.isEmptyObject(s)||(this._updatePrevProperties(),this._trigger("resize",e,this.ui()),this._applyChanges()),!1):!1},_mouseStop:function(e){this.resizing=!1;var i,s,n,o,a,r,h,l=this.options,c=this;return this._helper&&(i=this._proportionallyResizeElements,s=i.length&&/textarea/i.test(i[0].nodeName),n=s&&this._hasScroll(i[0],"left")?0:c.sizeDiff.height,o=s?0:c.sizeDiff.width,a={width:c.helper.width()-o,height:c.helper.height()-n},r=parseFloat(c.element.css("left"))+(c.position.left-c.originalPosition.left)||null,h=parseFloat(c.element.css("top"))+(c.position.top-c.originalPosition.top)||null,l.animate||this.element.css(t.extend(a,{top:h,left:r})),c.helper.height(c.size.height),c.helper.width(c.size.width),this._helper&&!l.animate&&this._proportionallyResize()),t("body").css("cursor","auto"),this._removeClass("ui-resizable-resizing"),this._propagate("stop",e),this._helper&&this.helper.remove(),!1},_updatePrevProperties:function(){this.prevPosition={top:this.position.top,left:this.position.left},this.prevSize={width:this.size.width,height:this.size.height}},_applyChanges:function(){var t={};return this.position.top!==this.prevPosition.top&&(t.top=this.position.top+"px"),this.position.left!==this.prevPosition.left&&(t.left=this.position.left+"px"),this.size.width!==this.prevSize.width&&(t.width=this.size.width+"px"),this.size.height!==this.prevSize.height&&(t.height=this.size.height+"px"),this.helper.css(t),t},_updateVirtualBoundaries:function(t){var e,i,s,n,o,a=this.options;o={minWidth:this._isNumber(a.minWidth)?a.minWidth:0,maxWidth:this._isNumber(a.maxWidth)?a.maxWidth:1/0,minHeight:this._isNumber(a.minHeight)?a.minHeight:0,maxHeight:this._isNumber(a.maxHeight)?a.maxHeight:1/0},(this._aspectRatio||t)&&(e=o.minHeight*this.aspectRatio,s=o.minWidth/this.aspectRatio,i=o.maxHeight*this.aspectRatio,n=o.maxWidth/this.aspectRatio,e>o.minWidth&&(o.minWidth=e),s>o.minHeight&&(o.minHeight=s),o.maxWidth>i&&(o.maxWidth=i),o.maxHeight>n&&(o.maxHeight=n)),this._vBoundaries=o},_updateCache:function(t){this.offset=this.helper.offset(),this._isNumber(t.left)&&(this.position.left=t.left),this._isNumber(t.top)&&(this.position.top=t.top),this._isNumber(t.height)&&(this.size.height=t.height),this._isNumber(t.width)&&(this.size.width=t.width)},_updateRatio:function(t){var e=this.position,i=this.size,s=this.axis;return this._isNumber(t.height)?t.width=t.height*this.aspectRatio:this._isNumber(t.width)&&(t.height=t.width/this.aspectRatio),"sw"===s&&(t.left=e.left+(i.width-t.width),t.top=null),"nw"===s&&(t.top=e.top+(i.height-t.height),t.left=e.left+(i.width-t.width)),t},_respectSize:function(t){var e=this._vBoundaries,i=this.axis,s=this._isNumber(t.width)&&e.maxWidth&&e.maxWidtht.width,a=this._isNumber(t.height)&&e.minHeight&&e.minHeight>t.height,r=this.originalPosition.left+this.originalSize.width,h=this.originalPosition.top+this.originalSize.height,l=/sw|nw|w/.test(i),c=/nw|ne|n/.test(i);return o&&(t.width=e.minWidth),a&&(t.height=e.minHeight),s&&(t.width=e.maxWidth),n&&(t.height=e.maxHeight),o&&l&&(t.left=r-e.minWidth),s&&l&&(t.left=r-e.maxWidth),a&&c&&(t.top=h-e.minHeight),n&&c&&(t.top=h-e.maxHeight),t.width||t.height||t.left||!t.top?t.width||t.height||t.top||!t.left||(t.left=null):t.top=null,t},_getPaddingPlusBorderDimensions:function(t){for(var e=0,i=[],s=[t.css("borderTopWidth"),t.css("borderRightWidth"),t.css("borderBottomWidth"),t.css("borderLeftWidth")],n=[t.css("paddingTop"),t.css("paddingRight"),t.css("paddingBottom"),t.css("paddingLeft")];4>e;e++)i[e]=parseFloat(s[e])||0,i[e]+=parseFloat(n[e])||0;return{height:i[0]+i[2],width:i[1]+i[3]}},_proportionallyResize:function(){if(this._proportionallyResizeElements.length)for(var t,e=0,i=this.helper||this.element;this._proportionallyResizeElements.length>e;e++)t=this._proportionallyResizeElements[e],this.outerDimensions||(this.outerDimensions=this._getPaddingPlusBorderDimensions(t)),t.css({height:i.height()-this.outerDimensions.height||0,width:i.width()-this.outerDimensions.width||0})},_renderProxy:function(){var e=this.element,i=this.options;this.elementOffset=e.offset(),this._helper?(this.helper=this.helper||t("
"),this._addClass(this.helper,this._helper),this.helper.css({width:this.element.outerWidth(),height:this.element.outerHeight(),position:"absolute",left:this.elementOffset.left+"px",top:this.elementOffset.top+"px",zIndex:++i.zIndex}),this.helper.appendTo("body").disableSelection()):this.helper=this.element +},_change:{e:function(t,e){return{width:this.originalSize.width+e}},w:function(t,e){var i=this.originalSize,s=this.originalPosition;return{left:s.left+e,width:i.width-e}},n:function(t,e,i){var s=this.originalSize,n=this.originalPosition;return{top:n.top+i,height:s.height-i}},s:function(t,e,i){return{height:this.originalSize.height+i}},se:function(e,i,s){return t.extend(this._change.s.apply(this,arguments),this._change.e.apply(this,[e,i,s]))},sw:function(e,i,s){return t.extend(this._change.s.apply(this,arguments),this._change.w.apply(this,[e,i,s]))},ne:function(e,i,s){return t.extend(this._change.n.apply(this,arguments),this._change.e.apply(this,[e,i,s]))},nw:function(e,i,s){return t.extend(this._change.n.apply(this,arguments),this._change.w.apply(this,[e,i,s]))}},_propagate:function(e,i){t.ui.plugin.call(this,e,[i,this.ui()]),"resize"!==e&&this._trigger(e,i,this.ui())},plugins:{},ui:function(){return{originalElement:this.originalElement,element:this.element,helper:this.helper,position:this.position,size:this.size,originalSize:this.originalSize,originalPosition:this.originalPosition}}}),t.ui.plugin.add("resizable","animate",{stop:function(e){var i=t(this).resizable("instance"),s=i.options,n=i._proportionallyResizeElements,o=n.length&&/textarea/i.test(n[0].nodeName),a=o&&i._hasScroll(n[0],"left")?0:i.sizeDiff.height,r=o?0:i.sizeDiff.width,h={width:i.size.width-r,height:i.size.height-a},l=parseFloat(i.element.css("left"))+(i.position.left-i.originalPosition.left)||null,c=parseFloat(i.element.css("top"))+(i.position.top-i.originalPosition.top)||null;i.element.animate(t.extend(h,c&&l?{top:c,left:l}:{}),{duration:s.animateDuration,easing:s.animateEasing,step:function(){var s={width:parseFloat(i.element.css("width")),height:parseFloat(i.element.css("height")),top:parseFloat(i.element.css("top")),left:parseFloat(i.element.css("left"))};n&&n.length&&t(n[0]).css({width:s.width,height:s.height}),i._updateCache(s),i._propagate("resize",e)}})}}),t.ui.plugin.add("resizable","containment",{start:function(){var e,i,s,n,o,a,r,h=t(this).resizable("instance"),l=h.options,c=h.element,u=l.containment,d=u instanceof t?u.get(0):/parent/.test(u)?c.parent().get(0):u;d&&(h.containerElement=t(d),/document/.test(u)||u===document?(h.containerOffset={left:0,top:0},h.containerPosition={left:0,top:0},h.parentData={element:t(document),left:0,top:0,width:t(document).width(),height:t(document).height()||document.body.parentNode.scrollHeight}):(e=t(d),i=[],t(["Top","Right","Left","Bottom"]).each(function(t,s){i[t]=h._num(e.css("padding"+s))}),h.containerOffset=e.offset(),h.containerPosition=e.position(),h.containerSize={height:e.innerHeight()-i[3],width:e.innerWidth()-i[1]},s=h.containerOffset,n=h.containerSize.height,o=h.containerSize.width,a=h._hasScroll(d,"left")?d.scrollWidth:o,r=h._hasScroll(d)?d.scrollHeight:n,h.parentData={element:d,left:s.left,top:s.top,width:a,height:r}))},resize:function(e){var i,s,n,o,a=t(this).resizable("instance"),r=a.options,h=a.containerOffset,l=a.position,c=a._aspectRatio||e.shiftKey,u={top:0,left:0},d=a.containerElement,p=!0;d[0]!==document&&/static/.test(d.css("position"))&&(u=h),l.left<(a._helper?h.left:0)&&(a.size.width=a.size.width+(a._helper?a.position.left-h.left:a.position.left-u.left),c&&(a.size.height=a.size.width/a.aspectRatio,p=!1),a.position.left=r.helper?h.left:0),l.top<(a._helper?h.top:0)&&(a.size.height=a.size.height+(a._helper?a.position.top-h.top:a.position.top),c&&(a.size.width=a.size.height*a.aspectRatio,p=!1),a.position.top=a._helper?h.top:0),n=a.containerElement.get(0)===a.element.parent().get(0),o=/relative|absolute/.test(a.containerElement.css("position")),n&&o?(a.offset.left=a.parentData.left+a.position.left,a.offset.top=a.parentData.top+a.position.top):(a.offset.left=a.element.offset().left,a.offset.top=a.element.offset().top),i=Math.abs(a.sizeDiff.width+(a._helper?a.offset.left-u.left:a.offset.left-h.left)),s=Math.abs(a.sizeDiff.height+(a._helper?a.offset.top-u.top:a.offset.top-h.top)),i+a.size.width>=a.parentData.width&&(a.size.width=a.parentData.width-i,c&&(a.size.height=a.size.width/a.aspectRatio,p=!1)),s+a.size.height>=a.parentData.height&&(a.size.height=a.parentData.height-s,c&&(a.size.width=a.size.height*a.aspectRatio,p=!1)),p||(a.position.left=a.prevPosition.left,a.position.top=a.prevPosition.top,a.size.width=a.prevSize.width,a.size.height=a.prevSize.height)},stop:function(){var e=t(this).resizable("instance"),i=e.options,s=e.containerOffset,n=e.containerPosition,o=e.containerElement,a=t(e.helper),r=a.offset(),h=a.outerWidth()-e.sizeDiff.width,l=a.outerHeight()-e.sizeDiff.height;e._helper&&!i.animate&&/relative/.test(o.css("position"))&&t(this).css({left:r.left-n.left-s.left,width:h,height:l}),e._helper&&!i.animate&&/static/.test(o.css("position"))&&t(this).css({left:r.left-n.left-s.left,width:h,height:l})}}),t.ui.plugin.add("resizable","alsoResize",{start:function(){var e=t(this).resizable("instance"),i=e.options;t(i.alsoResize).each(function(){var e=t(this);e.data("ui-resizable-alsoresize",{width:parseFloat(e.width()),height:parseFloat(e.height()),left:parseFloat(e.css("left")),top:parseFloat(e.css("top"))})})},resize:function(e,i){var s=t(this).resizable("instance"),n=s.options,o=s.originalSize,a=s.originalPosition,r={height:s.size.height-o.height||0,width:s.size.width-o.width||0,top:s.position.top-a.top||0,left:s.position.left-a.left||0};t(n.alsoResize).each(function(){var e=t(this),s=t(this).data("ui-resizable-alsoresize"),n={},o=e.parents(i.originalElement[0]).length?["width","height"]:["width","height","top","left"];t.each(o,function(t,e){var i=(s[e]||0)+(r[e]||0);i&&i>=0&&(n[e]=i||null)}),e.css(n)})},stop:function(){t(this).removeData("ui-resizable-alsoresize")}}),t.ui.plugin.add("resizable","ghost",{start:function(){var e=t(this).resizable("instance"),i=e.size;e.ghost=e.originalElement.clone(),e.ghost.css({opacity:.25,display:"block",position:"relative",height:i.height,width:i.width,margin:0,left:0,top:0}),e._addClass(e.ghost,"ui-resizable-ghost"),t.uiBackCompat!==!1&&"string"==typeof e.options.ghost&&e.ghost.addClass(this.options.ghost),e.ghost.appendTo(e.helper)},resize:function(){var e=t(this).resizable("instance");e.ghost&&e.ghost.css({position:"relative",height:e.size.height,width:e.size.width})},stop:function(){var e=t(this).resizable("instance");e.ghost&&e.helper&&e.helper.get(0).removeChild(e.ghost.get(0))}}),t.ui.plugin.add("resizable","grid",{resize:function(){var e,i=t(this).resizable("instance"),s=i.options,n=i.size,o=i.originalSize,a=i.originalPosition,r=i.axis,h="number"==typeof s.grid?[s.grid,s.grid]:s.grid,l=h[0]||1,c=h[1]||1,u=Math.round((n.width-o.width)/l)*l,d=Math.round((n.height-o.height)/c)*c,p=o.width+u,f=o.height+d,m=s.maxWidth&&p>s.maxWidth,g=s.maxHeight&&f>s.maxHeight,_=s.minWidth&&s.minWidth>p,v=s.minHeight&&s.minHeight>f;s.grid=h,_&&(p+=l),v&&(f+=c),m&&(p-=l),g&&(f-=c),/^(se|s|e)$/.test(r)?(i.size.width=p,i.size.height=f):/^(ne)$/.test(r)?(i.size.width=p,i.size.height=f,i.position.top=a.top-d):/^(sw)$/.test(r)?(i.size.width=p,i.size.height=f,i.position.left=a.left-u):((0>=f-c||0>=p-l)&&(e=i._getPaddingPlusBorderDimensions(this)),f-c>0?(i.size.height=f,i.position.top=a.top-d):(f=c-e.height,i.size.height=f,i.position.top=a.top+o.height-f),p-l>0?(i.size.width=p,i.position.left=a.left-u):(p=l-e.width,i.size.width=p,i.position.left=a.left+o.width-p))}}),t.ui.resizable});/** + * Copyright (c) 2007 Ariel Flesler - aflesler ○ gmail • com | https://github.com/flesler + * Licensed under MIT + * @author Ariel Flesler + * @version 2.1.2 + */ +;(function(f){"use strict";"function"===typeof define&&define.amd?define(["jquery"],f):"undefined"!==typeof module&&module.exports?module.exports=f(require("jquery")):f(jQuery)})(function($){"use strict";function n(a){return!a.nodeName||-1!==$.inArray(a.nodeName.toLowerCase(),["iframe","#document","html","body"])}function h(a){return $.isFunction(a)||$.isPlainObject(a)?a:{top:a,left:a}}var p=$.scrollTo=function(a,d,b){return $(window).scrollTo(a,d,b)};p.defaults={axis:"xy",duration:0,limit:!0};$.fn.scrollTo=function(a,d,b){"object"=== typeof d&&(b=d,d=0);"function"===typeof b&&(b={onAfter:b});"max"===a&&(a=9E9);b=$.extend({},p.defaults,b);d=d||b.duration;var u=b.queue&&1=f[g]?0:Math.min(f[g],n));!a&&1-1){targetElements.on(evt+EVENT_NAMESPACE,function elementToggle(event){$.powerTip.toggle(this,event)})}else{targetElements.on(evt+EVENT_NAMESPACE,function elementOpen(event){$.powerTip.show(this,event)})}});$.each(options.closeEvents,function(idx,evt){if($.inArray(evt,options.openEvents)<0){targetElements.on(evt+EVENT_NAMESPACE,function elementClose(event){$.powerTip.hide(this,!isMouseEvent(event))})}});targetElements.on("keydown"+EVENT_NAMESPACE,function elementKeyDown(event){if(event.keyCode===27){$.powerTip.hide(this,true)}})}return targetElements};$.fn.powerTip.defaults={fadeInTime:200,fadeOutTime:100,followMouse:false,popupId:"powerTip",popupClass:null,intentSensitivity:7,intentPollInterval:100,closeDelay:100,placement:"n",smartPlacement:false,offset:10,mouseOnToPopup:false,manual:false,openEvents:["mouseenter","focus"],closeEvents:["mouseleave","blur"]};$.fn.powerTip.smartPlacementLists={n:["n","ne","nw","s"],e:["e","ne","se","w","nw","sw","n","s","e"],s:["s","se","sw","n"],w:["w","nw","sw","e","ne","se","n","s","w"],nw:["nw","w","sw","n","s","se","nw"],ne:["ne","e","se","n","s","sw","ne"],sw:["sw","w","nw","s","n","ne","sw"],se:["se","e","ne","s","n","nw","se"],"nw-alt":["nw-alt","n","ne-alt","sw-alt","s","se-alt","w","e"],"ne-alt":["ne-alt","n","nw-alt","se-alt","s","sw-alt","e","w"],"sw-alt":["sw-alt","s","se-alt","nw-alt","n","ne-alt","w","e"],"se-alt":["se-alt","s","sw-alt","ne-alt","n","nw-alt","e","w"]};$.powerTip={show:function apiShowTip(element,event){if(isMouseEvent(event)){trackMouse(event);session.previousX=event.pageX;session.previousY=event.pageY;$(element).data(DATA_DISPLAYCONTROLLER).show()}else{$(element).first().data(DATA_DISPLAYCONTROLLER).show(true,true)}return element},reposition:function apiResetPosition(element){$(element).first().data(DATA_DISPLAYCONTROLLER).resetPosition();return element},hide:function apiCloseTip(element,immediate){var displayController;immediate=element?immediate:true;if(element){displayController=$(element).first().data(DATA_DISPLAYCONTROLLER)}else if(session.activeHover){displayController=session.activeHover.data(DATA_DISPLAYCONTROLLER)}if(displayController){displayController.hide(immediate)}return element},toggle:function apiToggle(element,event){if(session.activeHover&&session.activeHover.is(element)){$.powerTip.hide(element,!isMouseEvent(event))}else{$.powerTip.show(element,event)}return element}};$.powerTip.showTip=$.powerTip.show;$.powerTip.closeTip=$.powerTip.hide;function CSSCoordinates(){var me=this;me.top="auto";me.left="auto";me.right="auto";me.bottom="auto";me.set=function(property,value){if($.isNumeric(value)){me[property]=Math.round(value)}}}function DisplayController(element,options,tipController){var hoverTimer=null,myCloseDelay=null;function openTooltip(immediate,forceOpen){cancelTimer();if(!element.data(DATA_HASACTIVEHOVER)){if(!immediate){session.tipOpenImminent=true;hoverTimer=setTimeout(function intentDelay(){hoverTimer=null;checkForIntent()},options.intentPollInterval)}else{if(forceOpen){element.data(DATA_FORCEDOPEN,true)}closeAnyDelayed();tipController.showTip(element)}}else{cancelClose()}}function closeTooltip(disableDelay){if(myCloseDelay){myCloseDelay=session.closeDelayTimeout=clearTimeout(myCloseDelay);session.delayInProgress=false}cancelTimer();session.tipOpenImminent=false;if(element.data(DATA_HASACTIVEHOVER)){element.data(DATA_FORCEDOPEN,false);if(!disableDelay){session.delayInProgress=true;session.closeDelayTimeout=setTimeout(function closeDelay(){session.closeDelayTimeout=null;tipController.hideTip(element);session.delayInProgress=false;myCloseDelay=null},options.closeDelay);myCloseDelay=session.closeDelayTimeout}else{tipController.hideTip(element)}}}function checkForIntent(){var xDifference=Math.abs(session.previousX-session.currentX),yDifference=Math.abs(session.previousY-session.currentY),totalDifference=xDifference+yDifference;if(totalDifference",{id:options.popupId});if($body.length===0){$body=$("body")}$body.append(tipElement);session.tooltips=session.tooltips?session.tooltips.add(tipElement):tipElement}if(options.followMouse){if(!tipElement.data(DATA_HASMOUSEMOVE)){$document.on("mousemove"+EVENT_NAMESPACE,positionTipOnCursor);$window.on("scroll"+EVENT_NAMESPACE,positionTipOnCursor);tipElement.data(DATA_HASMOUSEMOVE,true)}}function beginShowTip(element){element.data(DATA_HASACTIVEHOVER,true);tipElement.queue(function queueTipInit(next){showTip(element);next()})}function showTip(element){var tipContent;if(!element.data(DATA_HASACTIVEHOVER)){return}if(session.isTipOpen){if(!session.isClosing){hideTip(session.activeHover)}tipElement.delay(100).queue(function queueTipAgain(next){showTip(element);next()});return}element.trigger("powerTipPreRender");tipContent=getTooltipContent(element);if(tipContent){tipElement.empty().append(tipContent)}else{return}element.trigger("powerTipRender");session.activeHover=element;session.isTipOpen=true;tipElement.data(DATA_MOUSEONTOTIP,options.mouseOnToPopup);tipElement.addClass(options.popupClass);if(!options.followMouse||element.data(DATA_FORCEDOPEN)){positionTipOnElement(element);session.isFixedTipOpen=true}else{positionTipOnCursor()}if(!element.data(DATA_FORCEDOPEN)&&!options.followMouse){$document.on("click"+EVENT_NAMESPACE,function documentClick(event){var target=event.target;if(target!==element[0]){if(options.mouseOnToPopup){if(target!==tipElement[0]&&!$.contains(tipElement[0],target)){$.powerTip.hide()}}else{$.powerTip.hide()}}})}if(options.mouseOnToPopup&&!options.manual){tipElement.on("mouseenter"+EVENT_NAMESPACE,function tipMouseEnter(){if(session.activeHover){session.activeHover.data(DATA_DISPLAYCONTROLLER).cancel()}});tipElement.on("mouseleave"+EVENT_NAMESPACE,function tipMouseLeave(){if(session.activeHover){session.activeHover.data(DATA_DISPLAYCONTROLLER).hide()}})}tipElement.fadeIn(options.fadeInTime,function fadeInCallback(){if(!session.desyncTimeout){session.desyncTimeout=setInterval(closeDesyncedTip,500)}element.trigger("powerTipOpen")})}function hideTip(element){session.isClosing=true;session.isTipOpen=false;session.desyncTimeout=clearInterval(session.desyncTimeout);element.data(DATA_HASACTIVEHOVER,false);element.data(DATA_FORCEDOPEN,false);$document.off("click"+EVENT_NAMESPACE);tipElement.off(EVENT_NAMESPACE);tipElement.fadeOut(options.fadeOutTime,function fadeOutCallback(){var coords=new CSSCoordinates;session.activeHover=null;session.isClosing=false;session.isFixedTipOpen=false;tipElement.removeClass();coords.set("top",session.currentY+options.offset);coords.set("left",session.currentX+options.offset);tipElement.css(coords);element.trigger("powerTipClose")})}function positionTipOnCursor(){var tipWidth,tipHeight,coords,collisions,collisionCount;if(!session.isFixedTipOpen&&(session.isTipOpen||session.tipOpenImminent&&tipElement.data(DATA_HASMOUSEMOVE))){tipWidth=tipElement.outerWidth();tipHeight=tipElement.outerHeight();coords=new CSSCoordinates;coords.set("top",session.currentY+options.offset);coords.set("left",session.currentX+options.offset);collisions=getViewportCollisions(coords,tipWidth,tipHeight);if(collisions!==Collision.none){collisionCount=countFlags(collisions);if(collisionCount===1){if(collisions===Collision.right){coords.set("left",session.scrollLeft+session.windowWidth-tipWidth)}else if(collisions===Collision.bottom){coords.set("top",session.scrollTop+session.windowHeight-tipHeight)}}else{coords.set("left",session.currentX-tipWidth-options.offset);coords.set("top",session.currentY-tipHeight-options.offset)}}tipElement.css(coords)}}function positionTipOnElement(element){var priorityList,finalPlacement;if(options.smartPlacement||options.followMouse&&element.data(DATA_FORCEDOPEN)){priorityList=$.fn.powerTip.smartPlacementLists[options.placement];$.each(priorityList,function(idx,pos){var collisions=getViewportCollisions(placeTooltip(element,pos),tipElement.outerWidth(),tipElement.outerHeight());finalPlacement=pos;return collisions!==Collision.none})}else{placeTooltip(element,options.placement);finalPlacement=options.placement}tipElement.removeClass("w nw sw e ne se n s w se-alt sw-alt ne-alt nw-alt");tipElement.addClass(finalPlacement)}function placeTooltip(element,placement){var iterationCount=0,tipWidth,tipHeight,coords=new CSSCoordinates;coords.set("top",0);coords.set("left",0);tipElement.css(coords);do{tipWidth=tipElement.outerWidth();tipHeight=tipElement.outerHeight();coords=placementCalculator.compute(element,placement,tipWidth,tipHeight,options.offset);tipElement.css(coords)}while(++iterationCount<=5&&(tipWidth!==tipElement.outerWidth()||tipHeight!==tipElement.outerHeight()));return coords}function closeDesyncedTip(){var isDesynced=false,hasDesyncableCloseEvent=$.grep(["mouseleave","mouseout","blur","focusout"],function(eventType){return $.inArray(eventType,options.closeEvents)!==-1}).length>0;if(session.isTipOpen&&!session.isClosing&&!session.delayInProgress&&hasDesyncableCloseEvent){if(session.activeHover.data(DATA_HASACTIVEHOVER)===false||session.activeHover.is(":disabled")){isDesynced=true}else if(!isMouseOver(session.activeHover)&&!session.activeHover.is(":focus")&&!session.activeHover.data(DATA_FORCEDOPEN)){if(tipElement.data(DATA_MOUSEONTOTIP)){if(!isMouseOver(tipElement)){isDesynced=true}}else{isDesynced=true}}if(isDesynced){hideTip(session.activeHover)}}}this.showTip=beginShowTip;this.hideTip=hideTip;this.resetPosition=positionTipOnElement}function isSvgElement(element){return Boolean(window.SVGElement&&element[0]instanceof SVGElement)}function isMouseEvent(event){return Boolean(event&&$.inArray(event.type,MOUSE_EVENTS)>-1&&typeof event.pageX==="number")}function initTracking(){if(!session.mouseTrackingActive){session.mouseTrackingActive=true;getViewportDimensions();$(getViewportDimensions);$document.on("mousemove"+EVENT_NAMESPACE,trackMouse);$window.on("resize"+EVENT_NAMESPACE,trackResize);$window.on("scroll"+EVENT_NAMESPACE,trackScroll)}}function getViewportDimensions(){session.scrollLeft=$window.scrollLeft();session.scrollTop=$window.scrollTop();session.windowWidth=$window.width();session.windowHeight=$window.height()}function trackResize(){session.windowWidth=$window.width();session.windowHeight=$window.height()}function trackScroll(){var x=$window.scrollLeft(),y=$window.scrollTop();if(x!==session.scrollLeft){session.currentX+=x-session.scrollLeft;session.scrollLeft=x}if(y!==session.scrollTop){session.currentY+=y-session.scrollTop;session.scrollTop=y}}function trackMouse(event){session.currentX=event.pageX;session.currentY=event.pageY}function isMouseOver(element){var elementPosition=element.offset(),elementBox=element[0].getBoundingClientRect(),elementWidth=elementBox.right-elementBox.left,elementHeight=elementBox.bottom-elementBox.top;return session.currentX>=elementPosition.left&&session.currentX<=elementPosition.left+elementWidth&&session.currentY>=elementPosition.top&&session.currentY<=elementPosition.top+elementHeight}function getTooltipContent(element){var tipText=element.data(DATA_POWERTIP),tipObject=element.data(DATA_POWERTIPJQ),tipTarget=element.data(DATA_POWERTIPTARGET),targetElement,content;if(tipText){if($.isFunction(tipText)){tipText=tipText.call(element[0])}content=tipText}else if(tipObject){if($.isFunction(tipObject)){tipObject=tipObject.call(element[0])}if(tipObject.length>0){content=tipObject.clone(true,true)}}else if(tipTarget){targetElement=$("#"+tipTarget);if(targetElement.length>0){content=targetElement.html()}}return content}function getViewportCollisions(coords,elementWidth,elementHeight){var viewportTop=session.scrollTop,viewportLeft=session.scrollLeft,viewportBottom=viewportTop+session.windowHeight,viewportRight=viewportLeft+session.windowWidth,collisions=Collision.none;if(coords.topviewportBottom||Math.abs(coords.bottom-session.windowHeight)>viewportBottom){collisions|=Collision.bottom}if(coords.leftviewportRight){collisions|=Collision.left}if(coords.left+elementWidth>viewportRight||coords.right1)){a.preventDefault();var c=a.originalEvent.changedTouches[0],d=document.createEvent("MouseEvents");d.initMouseEvent(b,!0,!0,window,1,c.screenX,c.screenY,c.clientX,c.clientY,!1,!1,!1,!1,0,null),a.target.dispatchEvent(d)}}if(a.support.touch="ontouchend"in document,a.support.touch){var e,b=a.ui.mouse.prototype,c=b._mouseInit,d=b._mouseDestroy;b._touchStart=function(a){var b=this;!e&&b._mouseCapture(a.originalEvent.changedTouches[0])&&(e=!0,b._touchMoved=!1,f(a,"mouseover"),f(a,"mousemove"),f(a,"mousedown"))},b._touchMove=function(a){e&&(this._touchMoved=!0,f(a,"mousemove"))},b._touchEnd=function(a){e&&(f(a,"mouseup"),f(a,"mouseout"),this._touchMoved||f(a,"click"),e=!1)},b._mouseInit=function(){var b=this;b.element.bind({touchstart:a.proxy(b,"_touchStart"),touchmove:a.proxy(b,"_touchMove"),touchend:a.proxy(b,"_touchEnd")}),c.call(b)},b._mouseDestroy=function(){var b=this;b.element.unbind({touchstart:a.proxy(b,"_touchStart"),touchmove:a.proxy(b,"_touchMove"),touchend:a.proxy(b,"_touchEnd")}),d.call(b)}}}(jQuery);/*! SmartMenus jQuery Plugin - v1.1.0 - September 17, 2017 + * http://www.smartmenus.org/ + * Copyright Vasil Dinkov, Vadikom Web Ltd. http://vadikom.com; Licensed MIT */(function(t){"function"==typeof define&&define.amd?define(["jquery"],t):"object"==typeof module&&"object"==typeof module.exports?module.exports=t(require("jquery")):t(jQuery)})(function($){function initMouseDetection(t){var e=".smartmenus_mouse";if(mouseDetectionEnabled||t)mouseDetectionEnabled&&t&&($(document).off(e),mouseDetectionEnabled=!1);else{var i=!0,s=null,o={mousemove:function(t){var e={x:t.pageX,y:t.pageY,timeStamp:(new Date).getTime()};if(s){var o=Math.abs(s.x-e.x),a=Math.abs(s.y-e.y);if((o>0||a>0)&&2>=o&&2>=a&&300>=e.timeStamp-s.timeStamp&&(mouse=!0,i)){var n=$(t.target).closest("a");n.is("a")&&$.each(menuTrees,function(){return $.contains(this.$root[0],n[0])?(this.itemEnter({currentTarget:n[0]}),!1):void 0}),i=!1}}s=e}};o[touchEvents?"touchstart":"pointerover pointermove pointerout MSPointerOver MSPointerMove MSPointerOut"]=function(t){isTouchEvent(t.originalEvent)&&(mouse=!1)},$(document).on(getEventsNS(o,e)),mouseDetectionEnabled=!0}}function isTouchEvent(t){return!/^(4|mouse)$/.test(t.pointerType)}function getEventsNS(t,e){e||(e="");var i={};for(var s in t)i[s.split(" ").join(e+" ")+e]=t[s];return i}var menuTrees=[],mouse=!1,touchEvents="ontouchstart"in window,mouseDetectionEnabled=!1,requestAnimationFrame=window.requestAnimationFrame||function(t){return setTimeout(t,1e3/60)},cancelAnimationFrame=window.cancelAnimationFrame||function(t){clearTimeout(t)},canAnimate=!!$.fn.animate;return $.SmartMenus=function(t,e){this.$root=$(t),this.opts=e,this.rootId="",this.accessIdPrefix="",this.$subArrow=null,this.activatedItems=[],this.visibleSubMenus=[],this.showTimeout=0,this.hideTimeout=0,this.scrollTimeout=0,this.clickActivated=!1,this.focusActivated=!1,this.zIndexInc=0,this.idInc=0,this.$firstLink=null,this.$firstSub=null,this.disabled=!1,this.$disableOverlay=null,this.$touchScrollingSub=null,this.cssTransforms3d="perspective"in t.style||"webkitPerspective"in t.style,this.wasCollapsible=!1,this.init()},$.extend($.SmartMenus,{hideAll:function(){$.each(menuTrees,function(){this.menuHideAll()})},destroy:function(){for(;menuTrees.length;)menuTrees[0].destroy();initMouseDetection(!0)},prototype:{init:function(t){var e=this;if(!t){menuTrees.push(this),this.rootId=((new Date).getTime()+Math.random()+"").replace(/\D/g,""),this.accessIdPrefix="sm-"+this.rootId+"-",this.$root.hasClass("sm-rtl")&&(this.opts.rightToLeftSubMenus=!0);var i=".smartmenus";this.$root.data("smartmenus",this).attr("data-smartmenus-id",this.rootId).dataSM("level",1).on(getEventsNS({"mouseover focusin":$.proxy(this.rootOver,this),"mouseout focusout":$.proxy(this.rootOut,this),keydown:$.proxy(this.rootKeyDown,this)},i)).on(getEventsNS({mouseenter:$.proxy(this.itemEnter,this),mouseleave:$.proxy(this.itemLeave,this),mousedown:$.proxy(this.itemDown,this),focus:$.proxy(this.itemFocus,this),blur:$.proxy(this.itemBlur,this),click:$.proxy(this.itemClick,this)},i),"a"),i+=this.rootId,this.opts.hideOnClick&&$(document).on(getEventsNS({touchstart:$.proxy(this.docTouchStart,this),touchmove:$.proxy(this.docTouchMove,this),touchend:$.proxy(this.docTouchEnd,this),click:$.proxy(this.docClick,this)},i)),$(window).on(getEventsNS({"resize orientationchange":$.proxy(this.winResize,this)},i)),this.opts.subIndicators&&(this.$subArrow=$("").addClass("sub-arrow"),this.opts.subIndicatorsText&&this.$subArrow.html(this.opts.subIndicatorsText)),initMouseDetection()}if(this.$firstSub=this.$root.find("ul").each(function(){e.menuInit($(this))}).eq(0),this.$firstLink=this.$root.find("a").eq(0),this.opts.markCurrentItem){var s=/(index|default)\.[^#\?\/]*/i,o=/#.*/,a=window.location.href.replace(s,""),n=a.replace(o,"");this.$root.find("a").each(function(){var t=this.href.replace(s,""),i=$(this);(t==a||t==n)&&(i.addClass("current"),e.opts.markCurrentTree&&i.parentsUntil("[data-smartmenus-id]","ul").each(function(){$(this).dataSM("parent-a").addClass("current")}))})}this.wasCollapsible=this.isCollapsible()},destroy:function(t){if(!t){var e=".smartmenus";this.$root.removeData("smartmenus").removeAttr("data-smartmenus-id").removeDataSM("level").off(e),e+=this.rootId,$(document).off(e),$(window).off(e),this.opts.subIndicators&&(this.$subArrow=null)}this.menuHideAll();var i=this;this.$root.find("ul").each(function(){var t=$(this);t.dataSM("scroll-arrows")&&t.dataSM("scroll-arrows").remove(),t.dataSM("shown-before")&&((i.opts.subMenusMinWidth||i.opts.subMenusMaxWidth)&&t.css({width:"",minWidth:"",maxWidth:""}).removeClass("sm-nowrap"),t.dataSM("scroll-arrows")&&t.dataSM("scroll-arrows").remove(),t.css({zIndex:"",top:"",left:"",marginLeft:"",marginTop:"",display:""})),0==(t.attr("id")||"").indexOf(i.accessIdPrefix)&&t.removeAttr("id")}).removeDataSM("in-mega").removeDataSM("shown-before").removeDataSM("scroll-arrows").removeDataSM("parent-a").removeDataSM("level").removeDataSM("beforefirstshowfired").removeAttr("role").removeAttr("aria-hidden").removeAttr("aria-labelledby").removeAttr("aria-expanded"),this.$root.find("a.has-submenu").each(function(){var t=$(this);0==t.attr("id").indexOf(i.accessIdPrefix)&&t.removeAttr("id")}).removeClass("has-submenu").removeDataSM("sub").removeAttr("aria-haspopup").removeAttr("aria-controls").removeAttr("aria-expanded").closest("li").removeDataSM("sub"),this.opts.subIndicators&&this.$root.find("span.sub-arrow").remove(),this.opts.markCurrentItem&&this.$root.find("a.current").removeClass("current"),t||(this.$root=null,this.$firstLink=null,this.$firstSub=null,this.$disableOverlay&&(this.$disableOverlay.remove(),this.$disableOverlay=null),menuTrees.splice($.inArray(this,menuTrees),1))},disable:function(t){if(!this.disabled){if(this.menuHideAll(),!t&&!this.opts.isPopup&&this.$root.is(":visible")){var e=this.$root.offset();this.$disableOverlay=$('
').css({position:"absolute",top:e.top,left:e.left,width:this.$root.outerWidth(),height:this.$root.outerHeight(),zIndex:this.getStartZIndex(!0),opacity:0}).appendTo(document.body)}this.disabled=!0}},docClick:function(t){return this.$touchScrollingSub?(this.$touchScrollingSub=null,void 0):((this.visibleSubMenus.length&&!$.contains(this.$root[0],t.target)||$(t.target).closest("a").length)&&this.menuHideAll(),void 0)},docTouchEnd:function(){if(this.lastTouch){if(!(!this.visibleSubMenus.length||void 0!==this.lastTouch.x2&&this.lastTouch.x1!=this.lastTouch.x2||void 0!==this.lastTouch.y2&&this.lastTouch.y1!=this.lastTouch.y2||this.lastTouch.target&&$.contains(this.$root[0],this.lastTouch.target))){this.hideTimeout&&(clearTimeout(this.hideTimeout),this.hideTimeout=0);var t=this;this.hideTimeout=setTimeout(function(){t.menuHideAll()},350)}this.lastTouch=null}},docTouchMove:function(t){if(this.lastTouch){var e=t.originalEvent.touches[0];this.lastTouch.x2=e.pageX,this.lastTouch.y2=e.pageY}},docTouchStart:function(t){var e=t.originalEvent.touches[0];this.lastTouch={x1:e.pageX,y1:e.pageY,target:e.target}},enable:function(){this.disabled&&(this.$disableOverlay&&(this.$disableOverlay.remove(),this.$disableOverlay=null),this.disabled=!1)},getClosestMenu:function(t){for(var e=$(t).closest("ul");e.dataSM("in-mega");)e=e.parent().closest("ul");return e[0]||null},getHeight:function(t){return this.getOffset(t,!0)},getOffset:function(t,e){var i;"none"==t.css("display")&&(i={position:t[0].style.position,visibility:t[0].style.visibility},t.css({position:"absolute",visibility:"hidden"}).show());var s=t[0].getBoundingClientRect&&t[0].getBoundingClientRect(),o=s&&(e?s.height||s.bottom-s.top:s.width||s.right-s.left);return o||0===o||(o=e?t[0].offsetHeight:t[0].offsetWidth),i&&t.hide().css(i),o},getStartZIndex:function(t){var e=parseInt(this[t?"$root":"$firstSub"].css("z-index"));return!t&&isNaN(e)&&(e=parseInt(this.$root.css("z-index"))),isNaN(e)?1:e},getTouchPoint:function(t){return t.touches&&t.touches[0]||t.changedTouches&&t.changedTouches[0]||t},getViewport:function(t){var e=t?"Height":"Width",i=document.documentElement["client"+e],s=window["inner"+e];return s&&(i=Math.min(i,s)),i},getViewportHeight:function(){return this.getViewport(!0)},getViewportWidth:function(){return this.getViewport()},getWidth:function(t){return this.getOffset(t)},handleEvents:function(){return!this.disabled&&this.isCSSOn()},handleItemEvents:function(t){return this.handleEvents()&&!this.isLinkInMegaMenu(t)},isCollapsible:function(){return"static"==this.$firstSub.css("position")},isCSSOn:function(){return"inline"!=this.$firstLink.css("display")},isFixed:function(){var t="fixed"==this.$root.css("position");return t||this.$root.parentsUntil("body").each(function(){return"fixed"==$(this).css("position")?(t=!0,!1):void 0}),t},isLinkInMegaMenu:function(t){return $(this.getClosestMenu(t[0])).hasClass("mega-menu")},isTouchMode:function(){return!mouse||this.opts.noMouseOver||this.isCollapsible()},itemActivate:function(t,e){var i=t.closest("ul"),s=i.dataSM("level");if(s>1&&(!this.activatedItems[s-2]||this.activatedItems[s-2][0]!=i.dataSM("parent-a")[0])){var o=this;$(i.parentsUntil("[data-smartmenus-id]","ul").get().reverse()).add(i).each(function(){o.itemActivate($(this).dataSM("parent-a"))})}if((!this.isCollapsible()||e)&&this.menuHideSubMenus(this.activatedItems[s-1]&&this.activatedItems[s-1][0]==t[0]?s:s-1),this.activatedItems[s-1]=t,this.$root.triggerHandler("activate.smapi",t[0])!==!1){var a=t.dataSM("sub");a&&(this.isTouchMode()||!this.opts.showOnClick||this.clickActivated)&&this.menuShow(a)}},itemBlur:function(t){var e=$(t.currentTarget);this.handleItemEvents(e)&&this.$root.triggerHandler("blur.smapi",e[0])},itemClick:function(t){var e=$(t.currentTarget);if(this.handleItemEvents(e)){if(this.$touchScrollingSub&&this.$touchScrollingSub[0]==e.closest("ul")[0])return this.$touchScrollingSub=null,t.stopPropagation(),!1;if(this.$root.triggerHandler("click.smapi",e[0])===!1)return!1;var i=$(t.target).is(".sub-arrow"),s=e.dataSM("sub"),o=s?2==s.dataSM("level"):!1,a=this.isCollapsible(),n=/toggle$/.test(this.opts.collapsibleBehavior),r=/link$/.test(this.opts.collapsibleBehavior),h=/^accordion/.test(this.opts.collapsibleBehavior);if(s&&!s.is(":visible")){if((!r||!a||i)&&(this.opts.showOnClick&&o&&(this.clickActivated=!0),this.itemActivate(e,h),s.is(":visible")))return this.focusActivated=!0,!1}else if(a&&(n||i))return this.itemActivate(e,h),this.menuHide(s),n&&(this.focusActivated=!1),!1;return this.opts.showOnClick&&o||e.hasClass("disabled")||this.$root.triggerHandler("select.smapi",e[0])===!1?!1:void 0}},itemDown:function(t){var e=$(t.currentTarget);this.handleItemEvents(e)&&e.dataSM("mousedown",!0)},itemEnter:function(t){var e=$(t.currentTarget);if(this.handleItemEvents(e)){if(!this.isTouchMode()){this.showTimeout&&(clearTimeout(this.showTimeout),this.showTimeout=0);var i=this;this.showTimeout=setTimeout(function(){i.itemActivate(e)},this.opts.showOnClick&&1==e.closest("ul").dataSM("level")?1:this.opts.showTimeout)}this.$root.triggerHandler("mouseenter.smapi",e[0])}},itemFocus:function(t){var e=$(t.currentTarget);this.handleItemEvents(e)&&(!this.focusActivated||this.isTouchMode()&&e.dataSM("mousedown")||this.activatedItems.length&&this.activatedItems[this.activatedItems.length-1][0]==e[0]||this.itemActivate(e,!0),this.$root.triggerHandler("focus.smapi",e[0]))},itemLeave:function(t){var e=$(t.currentTarget);this.handleItemEvents(e)&&(this.isTouchMode()||(e[0].blur(),this.showTimeout&&(clearTimeout(this.showTimeout),this.showTimeout=0)),e.removeDataSM("mousedown"),this.$root.triggerHandler("mouseleave.smapi",e[0]))},menuHide:function(t){if(this.$root.triggerHandler("beforehide.smapi",t[0])!==!1&&(canAnimate&&t.stop(!0,!0),"none"!=t.css("display"))){var e=function(){t.css("z-index","")};this.isCollapsible()?canAnimate&&this.opts.collapsibleHideFunction?this.opts.collapsibleHideFunction.call(this,t,e):t.hide(this.opts.collapsibleHideDuration,e):canAnimate&&this.opts.hideFunction?this.opts.hideFunction.call(this,t,e):t.hide(this.opts.hideDuration,e),t.dataSM("scroll")&&(this.menuScrollStop(t),t.css({"touch-action":"","-ms-touch-action":"","-webkit-transform":"",transform:""}).off(".smartmenus_scroll").removeDataSM("scroll").dataSM("scroll-arrows").hide()),t.dataSM("parent-a").removeClass("highlighted").attr("aria-expanded","false"),t.attr({"aria-expanded":"false","aria-hidden":"true"});var i=t.dataSM("level");this.activatedItems.splice(i-1,1),this.visibleSubMenus.splice($.inArray(t,this.visibleSubMenus),1),this.$root.triggerHandler("hide.smapi",t[0])}},menuHideAll:function(){this.showTimeout&&(clearTimeout(this.showTimeout),this.showTimeout=0);for(var t=this.opts.isPopup?1:0,e=this.visibleSubMenus.length-1;e>=t;e--)this.menuHide(this.visibleSubMenus[e]);this.opts.isPopup&&(canAnimate&&this.$root.stop(!0,!0),this.$root.is(":visible")&&(canAnimate&&this.opts.hideFunction?this.opts.hideFunction.call(this,this.$root):this.$root.hide(this.opts.hideDuration))),this.activatedItems=[],this.visibleSubMenus=[],this.clickActivated=!1,this.focusActivated=!1,this.zIndexInc=0,this.$root.triggerHandler("hideAll.smapi")},menuHideSubMenus:function(t){for(var e=this.activatedItems.length-1;e>=t;e--){var i=this.activatedItems[e].dataSM("sub");i&&this.menuHide(i)}},menuInit:function(t){if(!t.dataSM("in-mega")){t.hasClass("mega-menu")&&t.find("ul").dataSM("in-mega",!0);for(var e=2,i=t[0];(i=i.parentNode.parentNode)!=this.$root[0];)e++;var s=t.prevAll("a").eq(-1);s.length||(s=t.prevAll().find("a").eq(-1)),s.addClass("has-submenu").dataSM("sub",t),t.dataSM("parent-a",s).dataSM("level",e).parent().dataSM("sub",t);var o=s.attr("id")||this.accessIdPrefix+ ++this.idInc,a=t.attr("id")||this.accessIdPrefix+ ++this.idInc;s.attr({id:o,"aria-haspopup":"true","aria-controls":a,"aria-expanded":"false"}),t.attr({id:a,role:"group","aria-hidden":"true","aria-labelledby":o,"aria-expanded":"false"}),this.opts.subIndicators&&s[this.opts.subIndicatorsPos](this.$subArrow.clone())}},menuPosition:function(t){var e,i,s=t.dataSM("parent-a"),o=s.closest("li"),a=o.parent(),n=t.dataSM("level"),r=this.getWidth(t),h=this.getHeight(t),u=s.offset(),l=u.left,c=u.top,d=this.getWidth(s),m=this.getHeight(s),p=$(window),f=p.scrollLeft(),v=p.scrollTop(),b=this.getViewportWidth(),S=this.getViewportHeight(),g=a.parent().is("[data-sm-horizontal-sub]")||2==n&&!a.hasClass("sm-vertical"),M=this.opts.rightToLeftSubMenus&&!o.is("[data-sm-reverse]")||!this.opts.rightToLeftSubMenus&&o.is("[data-sm-reverse]"),w=2==n?this.opts.mainMenuSubOffsetX:this.opts.subMenusSubOffsetX,T=2==n?this.opts.mainMenuSubOffsetY:this.opts.subMenusSubOffsetY;if(g?(e=M?d-r-w:w,i=this.opts.bottomToTopSubMenus?-h-T:m+T):(e=M?w-r:d-w,i=this.opts.bottomToTopSubMenus?m-T-h:T),this.opts.keepInViewport){var y=l+e,I=c+i;if(M&&f>y?e=g?f-y+e:d-w:!M&&y+r>f+b&&(e=g?f+b-r-y+e:w-r),g||(S>h&&I+h>v+S?i+=v+S-h-I:(h>=S||v>I)&&(i+=v-I)),g&&(I+h>v+S+.49||v>I)||!g&&h>S+.49){var x=this;t.dataSM("scroll-arrows")||t.dataSM("scroll-arrows",$([$('')[0],$('')[0]]).on({mouseenter:function(){t.dataSM("scroll").up=$(this).hasClass("scroll-up"),x.menuScroll(t)},mouseleave:function(e){x.menuScrollStop(t),x.menuScrollOut(t,e)},"mousewheel DOMMouseScroll":function(t){t.preventDefault()}}).insertAfter(t));var A=".smartmenus_scroll";if(t.dataSM("scroll",{y:this.cssTransforms3d?0:i-m,step:1,itemH:m,subH:h,arrowDownH:this.getHeight(t.dataSM("scroll-arrows").eq(1))}).on(getEventsNS({mouseover:function(e){x.menuScrollOver(t,e)},mouseout:function(e){x.menuScrollOut(t,e)},"mousewheel DOMMouseScroll":function(e){x.menuScrollMousewheel(t,e)}},A)).dataSM("scroll-arrows").css({top:"auto",left:"0",marginLeft:e+(parseInt(t.css("border-left-width"))||0),width:r-(parseInt(t.css("border-left-width"))||0)-(parseInt(t.css("border-right-width"))||0),zIndex:t.css("z-index")}).eq(g&&this.opts.bottomToTopSubMenus?0:1).show(),this.isFixed()){var C={};C[touchEvents?"touchstart touchmove touchend":"pointerdown pointermove pointerup MSPointerDown MSPointerMove MSPointerUp"]=function(e){x.menuScrollTouch(t,e)},t.css({"touch-action":"none","-ms-touch-action":"none"}).on(getEventsNS(C,A))}}}t.css({top:"auto",left:"0",marginLeft:e,marginTop:i-m})},menuScroll:function(t,e,i){var s,o=t.dataSM("scroll"),a=t.dataSM("scroll-arrows"),n=o.up?o.upEnd:o.downEnd;if(!e&&o.momentum){if(o.momentum*=.92,s=o.momentum,.5>s)return this.menuScrollStop(t),void 0}else s=i||(e||!this.opts.scrollAccelerate?this.opts.scrollStep:Math.floor(o.step));var r=t.dataSM("level");if(this.activatedItems[r-1]&&this.activatedItems[r-1].dataSM("sub")&&this.activatedItems[r-1].dataSM("sub").is(":visible")&&this.menuHideSubMenus(r-1),o.y=o.up&&o.y>=n||!o.up&&n>=o.y?o.y:Math.abs(n-o.y)>s?o.y+(o.up?s:-s):n,t.css(this.cssTransforms3d?{"-webkit-transform":"translate3d(0, "+o.y+"px, 0)",transform:"translate3d(0, "+o.y+"px, 0)"}:{marginTop:o.y}),mouse&&(o.up&&o.y>o.downEnd||!o.up&&o.y0;t.dataSM("scroll-arrows").eq(i?0:1).is(":visible")&&(t.dataSM("scroll").up=i,this.menuScroll(t,!0))}e.preventDefault()},menuScrollOut:function(t,e){mouse&&(/^scroll-(up|down)/.test((e.relatedTarget||"").className)||(t[0]==e.relatedTarget||$.contains(t[0],e.relatedTarget))&&this.getClosestMenu(e.relatedTarget)==t[0]||t.dataSM("scroll-arrows").css("visibility","hidden"))},menuScrollOver:function(t,e){if(mouse&&!/^scroll-(up|down)/.test(e.target.className)&&this.getClosestMenu(e.target)==t[0]){this.menuScrollRefreshData(t);var i=t.dataSM("scroll"),s=$(window).scrollTop()-t.dataSM("parent-a").offset().top-i.itemH;t.dataSM("scroll-arrows").eq(0).css("margin-top",s).end().eq(1).css("margin-top",s+this.getViewportHeight()-i.arrowDownH).end().css("visibility","visible")}},menuScrollRefreshData:function(t){var e=t.dataSM("scroll"),i=$(window).scrollTop()-t.dataSM("parent-a").offset().top-e.itemH;this.cssTransforms3d&&(i=-(parseFloat(t.css("margin-top"))-i)),$.extend(e,{upEnd:i,downEnd:i+this.getViewportHeight()-e.subH})},menuScrollStop:function(t){return this.scrollTimeout?(cancelAnimationFrame(this.scrollTimeout),this.scrollTimeout=0,t.dataSM("scroll").step=1,!0):void 0},menuScrollTouch:function(t,e){if(e=e.originalEvent,isTouchEvent(e)){var i=this.getTouchPoint(e);if(this.getClosestMenu(i.target)==t[0]){var s=t.dataSM("scroll");if(/(start|down)$/i.test(e.type))this.menuScrollStop(t)?(e.preventDefault(),this.$touchScrollingSub=t):this.$touchScrollingSub=null,this.menuScrollRefreshData(t),$.extend(s,{touchStartY:i.pageY,touchStartTime:e.timeStamp});else if(/move$/i.test(e.type)){var o=void 0!==s.touchY?s.touchY:s.touchStartY;if(void 0!==o&&o!=i.pageY){this.$touchScrollingSub=t;var a=i.pageY>o;void 0!==s.up&&s.up!=a&&$.extend(s,{touchStartY:i.pageY,touchStartTime:e.timeStamp}),$.extend(s,{up:a,touchY:i.pageY}),this.menuScroll(t,!0,Math.abs(i.pageY-o))}e.preventDefault()}else void 0!==s.touchY&&((s.momentum=15*Math.pow(Math.abs(i.pageY-s.touchStartY)/(e.timeStamp-s.touchStartTime),2))&&(this.menuScrollStop(t),this.menuScroll(t),e.preventDefault()),delete s.touchY)}}},menuShow:function(t){if((t.dataSM("beforefirstshowfired")||(t.dataSM("beforefirstshowfired",!0),this.$root.triggerHandler("beforefirstshow.smapi",t[0])!==!1))&&this.$root.triggerHandler("beforeshow.smapi",t[0])!==!1&&(t.dataSM("shown-before",!0),canAnimate&&t.stop(!0,!0),!t.is(":visible"))){var e=t.dataSM("parent-a"),i=this.isCollapsible();if((this.opts.keepHighlighted||i)&&e.addClass("highlighted"),i)t.removeClass("sm-nowrap").css({zIndex:"",width:"auto",minWidth:"",maxWidth:"",top:"",left:"",marginLeft:"",marginTop:""});else{if(t.css("z-index",this.zIndexInc=(this.zIndexInc||this.getStartZIndex())+1),(this.opts.subMenusMinWidth||this.opts.subMenusMaxWidth)&&(t.css({width:"auto",minWidth:"",maxWidth:""}).addClass("sm-nowrap"),this.opts.subMenusMinWidth&&t.css("min-width",this.opts.subMenusMinWidth),this.opts.subMenusMaxWidth)){var s=this.getWidth(t);t.css("max-width",this.opts.subMenusMaxWidth),s>this.getWidth(t)&&t.removeClass("sm-nowrap").css("width",this.opts.subMenusMaxWidth)}this.menuPosition(t)}var o=function(){t.css("overflow","")};i?canAnimate&&this.opts.collapsibleShowFunction?this.opts.collapsibleShowFunction.call(this,t,o):t.show(this.opts.collapsibleShowDuration,o):canAnimate&&this.opts.showFunction?this.opts.showFunction.call(this,t,o):t.show(this.opts.showDuration,o),e.attr("aria-expanded","true"),t.attr({"aria-expanded":"true","aria-hidden":"false"}),this.visibleSubMenus.push(t),this.$root.triggerHandler("show.smapi",t[0])}},popupHide:function(t){this.hideTimeout&&(clearTimeout(this.hideTimeout),this.hideTimeout=0);var e=this;this.hideTimeout=setTimeout(function(){e.menuHideAll()},t?1:this.opts.hideTimeout)},popupShow:function(t,e){if(!this.opts.isPopup)return alert('SmartMenus jQuery Error:\n\nIf you want to show this menu via the "popupShow" method, set the isPopup:true option.'),void 0;if(this.hideTimeout&&(clearTimeout(this.hideTimeout),this.hideTimeout=0),this.$root.dataSM("shown-before",!0),canAnimate&&this.$root.stop(!0,!0),!this.$root.is(":visible")){this.$root.css({left:t,top:e});var i=this,s=function(){i.$root.css("overflow","")};canAnimate&&this.opts.showFunction?this.opts.showFunction.call(this,this.$root,s):this.$root.show(this.opts.showDuration,s),this.visibleSubMenus[0]=this.$root}},refresh:function(){this.destroy(!0),this.init(!0)},rootKeyDown:function(t){if(this.handleEvents())switch(t.keyCode){case 27:var e=this.activatedItems[0];if(e){this.menuHideAll(),e[0].focus();var i=e.dataSM("sub");i&&this.menuHide(i)}break;case 32:var s=$(t.target);if(s.is("a")&&this.handleItemEvents(s)){var i=s.dataSM("sub");i&&!i.is(":visible")&&(this.itemClick({currentTarget:t.target}),t.preventDefault())}}},rootOut:function(t){if(this.handleEvents()&&!this.isTouchMode()&&t.target!=this.$root[0]&&(this.hideTimeout&&(clearTimeout(this.hideTimeout),this.hideTimeout=0),!this.opts.showOnClick||!this.opts.hideOnClick)){var e=this;this.hideTimeout=setTimeout(function(){e.menuHideAll()},this.opts.hideTimeout)}},rootOver:function(t){this.handleEvents()&&!this.isTouchMode()&&t.target!=this.$root[0]&&this.hideTimeout&&(clearTimeout(this.hideTimeout),this.hideTimeout=0)},winResize:function(t){if(this.handleEvents()){if(!("onorientationchange"in window)||"orientationchange"==t.type){var e=this.isCollapsible();this.wasCollapsible&&e||(this.activatedItems.length&&this.activatedItems[this.activatedItems.length-1][0].blur(),this.menuHideAll()),this.wasCollapsible=e}}else if(this.$disableOverlay){var i=this.$root.offset();this.$disableOverlay.css({top:i.top,left:i.left,width:this.$root.outerWidth(),height:this.$root.outerHeight()})}}}}),$.fn.dataSM=function(t,e){return e?this.data(t+"_smartmenus",e):this.data(t+"_smartmenus")},$.fn.removeDataSM=function(t){return this.removeData(t+"_smartmenus")},$.fn.smartmenus=function(options){if("string"==typeof options){var args=arguments,method=options;return Array.prototype.shift.call(args),this.each(function(){var t=$(this).data("smartmenus");t&&t[method]&&t[method].apply(t,args)})}return this.each(function(){var dataOpts=$(this).data("sm-options")||null;if(dataOpts)try{dataOpts=eval("("+dataOpts+")")}catch(e){dataOpts=null,alert('ERROR\n\nSmartMenus jQuery init:\nInvalid "data-sm-options" attribute value syntax.')}new $.SmartMenus(this,$.extend({},$.fn.smartmenus.defaults,options,dataOpts))})},$.fn.smartmenus.defaults={isPopup:!1,mainMenuSubOffsetX:0,mainMenuSubOffsetY:0,subMenusSubOffsetX:0,subMenusSubOffsetY:0,subMenusMinWidth:"10em",subMenusMaxWidth:"20em",subIndicators:!0,subIndicatorsPos:"append",subIndicatorsText:"",scrollStep:30,scrollAccelerate:!0,showTimeout:250,hideTimeout:500,showDuration:0,showFunction:null,hideDuration:0,hideFunction:function(t,e){t.fadeOut(200,e)},collapsibleShowDuration:0,collapsibleShowFunction:function(t,e){t.slideDown(200,e)},collapsibleHideDuration:0,collapsibleHideFunction:function(t,e){t.slideUp(200,e)},showOnClick:!1,hideOnClick:!0,noMouseOver:!1,keepInViewport:!0,keepHighlighted:!0,markCurrentItem:!1,markCurrentTree:!0,rightToLeftSubMenus:!1,bottomToTopSubMenus:!1,collapsibleBehavior:"default"},$}); \ No newline at end of file diff --git a/web/html/doc/json_ofstream_8hpp.html b/web/html/doc/json_ofstream_8hpp.html new file mode 100644 index 000000000..9ade66af5 --- /dev/null +++ b/web/html/doc/json_ofstream_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/filestream/jsonOfstream.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
jsonOfstream.hpp File Reference
+
+
+
#include <fstream>
+#include <openpose/core/common.hpp>
+
+

Go to the source code of this file.

+ + + + +

+Classes

class  op::JsonOfstream
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/json_ofstream_8hpp_source.html b/web/html/doc/json_ofstream_8hpp_source.html new file mode 100644 index 000000000..57a5e040c --- /dev/null +++ b/web/html/doc/json_ofstream_8hpp_source.html @@ -0,0 +1,174 @@ + + + + + + + +OpenPose: include/openpose/filestream/jsonOfstream.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
jsonOfstream.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FILESTREAM_JSON_OFSTREAM_HPP
+
2 #define OPENPOSE_FILESTREAM_JSON_OFSTREAM_HPP
+
3 
+
4 #include <fstream> // std::ofstream
+ +
6 
+
7 namespace op
+
8 {
+ +
10  {
+
11  public:
+
12  explicit JsonOfstream(const std::string& filePath, const bool humanReadable = true);
+
13 
+
19  JsonOfstream(JsonOfstream&& jsonOfstream);
+
20 
+ +
28 
+
29  virtual ~JsonOfstream();
+
30 
+
31  void objectOpen();
+
32 
+
33  void objectClose();
+
34 
+
35  void arrayOpen();
+
36 
+
37  void arrayClose();
+
38 
+
39  void version(const std::string& version);
+
40 
+
41  void key(const std::string& string);
+
42 
+
43  template <typename T>
+
44  inline void plainText(const T& value)
+
45  {
+
46  *upOfstream << value;
+
47  }
+
48 
+
49  inline void comma()
+
50  {
+
51  *upOfstream << ",";
+
52  }
+
53 
+
54  void enter();
+
55 
+
56  private:
+
57  bool mHumanReadable;
+
58  long long mBracesCounter;
+
59  long long mBracketsCounter;
+
60  std::unique_ptr<std::ofstream> upOfstream; // std::unique_ptr to solve std::move issue in GCC < 5
+
61 
+ +
63  };
+
64 }
+
65 
+
66 #endif // OPENPOSE_FILESTREAM_JSON_OFSTREAM_HPP
+ + + + + + +
virtual ~JsonOfstream()
+
void version(const std::string &version)
+
void plainText(const T &value)
+
JsonOfstream(JsonOfstream &&jsonOfstream)
+ +
void key(const std::string &string)
+
JsonOfstream(const std::string &filePath, const bool humanReadable=true)
+
JsonOfstream & operator=(JsonOfstream &&jsonOfstream)
+ +
#define OP_API
Definition: macros.hpp:18
+
#define DELETE_COPY(className)
Definition: macros.hpp:32
+ +
+
+ + + + diff --git a/web/html/doc/keep_top_n_people_8hpp.html b/web/html/doc/keep_top_n_people_8hpp.html new file mode 100644 index 000000000..c84ecc2ad --- /dev/null +++ b/web/html/doc/keep_top_n_people_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/core/keepTopNPeople.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
keepTopNPeople.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::KeepTopNPeople
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/keep_top_n_people_8hpp_source.html b/web/html/doc/keep_top_n_people_8hpp_source.html new file mode 100644 index 000000000..03ccc9825 --- /dev/null +++ b/web/html/doc/keep_top_n_people_8hpp_source.html @@ -0,0 +1,131 @@ + + + + + + + +OpenPose: include/openpose/core/keepTopNPeople.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
keepTopNPeople.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_KEEP_TOP_N_PEOPLE_HPP
+
2 #define OPENPOSE_CORE_KEEP_TOP_N_PEOPLE_HPP
+
3 
+ +
5 
+
6 namespace op
+
7 {
+ +
9  {
+
10  public:
+
11  explicit KeepTopNPeople(const int numberPeopleMax);
+
12 
+
13  virtual ~KeepTopNPeople();
+
14 
+
15  Array<float> keepTopPeople(const Array<float>& peopleArrays, const Array<float>& poseScores) const;
+
16 
+
17  private:
+
18  const int mNumberPeopleMax;
+
19  };
+
20 }
+
21 
+
22 #endif // OPENPOSE_CORE_KEEP_TOP_N_PEOPLE_HPP
+ + +
Array< float > keepTopPeople(const Array< float > &peopleArrays, const Array< float > &poseScores) const
+
virtual ~KeepTopNPeople()
+
KeepTopNPeople(const int numberPeopleMax)
+ +
#define OP_API
Definition: macros.hpp:18
+ +
+
+ + + + diff --git a/web/html/doc/keypoint_8hpp.html b/web/html/doc/keypoint_8hpp.html new file mode 100644 index 000000000..af65f055e --- /dev/null +++ b/web/html/doc/keypoint_8hpp.html @@ -0,0 +1,167 @@ + + + + + + + +OpenPose: include/openpose/utilities/keypoint.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
keypoint.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Functions

template<typename T >
op::getDistance (const Array< T > &keypoints, const int person, const int elementA, const int elementB)
 
template<typename T >
void op::averageKeypoints (Array< T > &keypointsA, const Array< T > &keypointsB, const int personA)
 
template<typename T >
void op::scaleKeypoints (Array< T > &keypoints, const T scale)
 
template<typename T >
void op::scaleKeypoints2d (Array< T > &keypoints, const T scaleX, const T scaleY)
 
template<typename T >
void op::scaleKeypoints2d (Array< T > &keypoints, const T scaleX, const T scaleY, const T offsetX, const T offsetY)
 
template<typename T >
void op::renderKeypointsCpu (Array< T > &frameArray, const Array< T > &keypoints, const std::vector< unsigned int > &pairs, const std::vector< T > colors, const T thicknessCircleRatio, const T thicknessLineRatioWRTCircle, const std::vector< T > &poseScales, const T threshold)
 
template<typename T >
Rectangle< T > op::getKeypointsRectangle (const Array< T > &keypoints, const int person, const T threshold, const int firstIndex=0, const int lastIndex=-1)
 
template<typename T >
op::getAverageScore (const Array< T > &keypoints, const int person)
 
template<typename T >
op::getKeypointsArea (const Array< T > &keypoints, const int person, const T threshold)
 
template<typename T >
int op::getBiggestPerson (const Array< T > &keypoints, const T threshold)
 
template<typename T >
int op::getNonZeroKeypoints (const Array< T > &keypoints, const int person, const T threshold)
 
template<typename T >
op::getDistanceAverage (const Array< T > &keypoints, const int personA, const int personB, const T threshold)
 
template<typename T >
op::getDistanceAverage (const Array< T > &keypointsA, const int personA, const Array< T > &keypointsB, const int personB, const T threshold)
 
template<typename T >
Array< T > op::getKeypointsPerson (const Array< T > &keypoints, const int person, const bool noCopy=false)
 
template<typename T >
float op::getKeypointsRoi (const Array< T > &keypoints, const int personA, const int personB, const T threshold)
 
template<typename T >
float op::getKeypointsRoi (const Array< T > &keypointsA, const int personA, const Array< T > &keypointsB, const int personB, const T threshold)
 
template<typename T >
float op::getKeypointsRoi (const Rectangle< T > &rectangleA, const Rectangle< T > &rectangleB)
 
+
+
+ + + + diff --git a/web/html/doc/keypoint_8hpp.js b/web/html/doc/keypoint_8hpp.js new file mode 100644 index 000000000..b78bfa33b --- /dev/null +++ b/web/html/doc/keypoint_8hpp.js @@ -0,0 +1,20 @@ +var keypoint_8hpp = +[ + [ "averageKeypoints", "keypoint_8hpp.html#a1f931e210eb575a084b8e6f462b0b382", null ], + [ "getAverageScore", "keypoint_8hpp.html#a1110f4c0017c43ea1d0896a3225c55f8", null ], + [ "getBiggestPerson", "keypoint_8hpp.html#ace4af20d19066df9ec502c5a09097c24", null ], + [ "getDistance", "keypoint_8hpp.html#ac968b1c98c60b74be78225be27805706", null ], + [ "getDistanceAverage", "keypoint_8hpp.html#acf638f00b0a825c05683f8e23942a9d5", null ], + [ "getDistanceAverage", "keypoint_8hpp.html#aa053f4b0533d9e981aa171a1ef57fc30", null ], + [ "getKeypointsArea", "keypoint_8hpp.html#a1dd5dde18458975a36bdbd6dd38720a2", null ], + [ "getKeypointsPerson", "keypoint_8hpp.html#a75411d98f69051860379730e16103178", null ], + [ "getKeypointsRectangle", "keypoint_8hpp.html#ac74cba4141f2bee2b9d94dc171029a73", null ], + [ "getKeypointsRoi", "keypoint_8hpp.html#a6913c67141fcbbba84fc88ac8a45aa0f", null ], + [ "getKeypointsRoi", "keypoint_8hpp.html#ac9af122ccd8dcdafb11e37b6633245b4", null ], + [ "getKeypointsRoi", "keypoint_8hpp.html#a36296ff5a5945244c5131e3ae16057e1", null ], + [ "getNonZeroKeypoints", "keypoint_8hpp.html#aa9366cf1b4ac3494965749eeb5537da1", null ], + [ "renderKeypointsCpu", "keypoint_8hpp.html#aa7803aa62abc21471e7d966bd674a81a", null ], + [ "scaleKeypoints", "keypoint_8hpp.html#ac5fc565b24e499e306ca170b9139eeb6", null ], + [ "scaleKeypoints2d", "keypoint_8hpp.html#aae9e38fa6c56e188b4f649732f0d4cd3", null ], + [ "scaleKeypoints2d", "keypoint_8hpp.html#a6b9adf8f7e698e566414c9f44f0c85f1", null ] +]; \ No newline at end of file diff --git a/web/html/doc/keypoint_8hpp_source.html b/web/html/doc/keypoint_8hpp_source.html new file mode 100644 index 000000000..8a5c85289 --- /dev/null +++ b/web/html/doc/keypoint_8hpp_source.html @@ -0,0 +1,188 @@ + + + + + + + +OpenPose: include/openpose/utilities/keypoint.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
keypoint.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_UTILITIES_KEYPOINT_HPP
+
2 #define OPENPOSE_UTILITIES_KEYPOINT_HPP
+
3 
+ +
5 
+
6 namespace op
+
7 {
+
8  template <typename T>
+
9  T getDistance(const Array<T>& keypoints, const int person, const int elementA, const int elementB);
+
10 
+
11  template <typename T>
+
12  void averageKeypoints(Array<T>& keypointsA, const Array<T>& keypointsB, const int personA);
+
13 
+
14  template <typename T>
+
15  void scaleKeypoints(Array<T>& keypoints, const T scale);
+
16 
+
17  template <typename T>
+
18  void scaleKeypoints2d(Array<T>& keypoints, const T scaleX, const T scaleY);
+
19 
+
20  template <typename T>
+
21  void scaleKeypoints2d(Array<T>& keypoints, const T scaleX, const T scaleY, const T offsetX, const T offsetY);
+
22 
+
23  template <typename T>
+ +
25  Array<T>& frameArray, const Array<T>& keypoints, const std::vector<unsigned int>& pairs,
+
26  const std::vector<T> colors, const T thicknessCircleRatio, const T thicknessLineRatioWRTCircle,
+
27  const std::vector<T>& poseScales, const T threshold);
+
28 
+
29  template <typename T>
+ +
31  const Array<T>& keypoints, const int person, const T threshold, const int firstIndex = 0,
+
32  const int lastIndex = -1);
+
33 
+
34  template <typename T>
+
35  T getAverageScore(const Array<T>& keypoints, const int person);
+
36 
+
37  template <typename T>
+
38  T getKeypointsArea(const Array<T>& keypoints, const int person, const T threshold);
+
39 
+
40  template <typename T>
+
41  int getBiggestPerson(const Array<T>& keypoints, const T threshold);
+
42 
+
43  template <typename T>
+
44  int getNonZeroKeypoints(const Array<T>& keypoints, const int person, const T threshold);
+
45 
+
46  template <typename T>
+
47  T getDistanceAverage(const Array<T>& keypoints, const int personA, const int personB, const T threshold);
+
48 
+
49  template <typename T>
+ +
51  const Array<T>& keypointsA, const int personA, const Array<T>& keypointsB, const int personB,
+
52  const T threshold);
+
53 
+
66  template <typename T>
+
67  Array<T> getKeypointsPerson(const Array<T>& keypoints, const int person, const bool noCopy = false);
+
68 
+
69  template <typename T>
+
70  float getKeypointsRoi(const Array<T>& keypoints, const int personA, const int personB, const T threshold);
+
71 
+
72  template <typename T>
+ +
74  const Array<T>& keypointsA, const int personA, const Array<T>& keypointsB, const int personB,
+
75  const T threshold);
+
76 
+
77  template <typename T>
+ +
79  const Rectangle<T>& rectangleA, const Rectangle<T>& rectangleB);
+
80 }
+
81 
+
82 #endif // OPENPOSE_UTILITIES_KEYPOINT_HPP
+ + + +
T getAverageScore(const Array< T > &keypoints, const int person)
+
T getKeypointsArea(const Array< T > &keypoints, const int person, const T threshold)
+
void averageKeypoints(Array< T > &keypointsA, const Array< T > &keypointsB, const int personA)
+
float getKeypointsRoi(const Array< T > &keypoints, const int personA, const int personB, const T threshold)
+
Array< T > getKeypointsPerson(const Array< T > &keypoints, const int person, const bool noCopy=false)
+
void renderKeypointsCpu(Array< T > &frameArray, const Array< T > &keypoints, const std::vector< unsigned int > &pairs, const std::vector< T > colors, const T thicknessCircleRatio, const T thicknessLineRatioWRTCircle, const std::vector< T > &poseScales, const T threshold)
+
int getNonZeroKeypoints(const Array< T > &keypoints, const int person, const T threshold)
+
void scaleKeypoints2d(Array< T > &keypoints, const T scaleX, const T scaleY)
+
void scaleKeypoints(Array< T > &keypoints, const T scale)
+
Rectangle< T > getKeypointsRectangle(const Array< T > &keypoints, const int person, const T threshold, const int firstIndex=0, const int lastIndex=-1)
+
T getDistance(const Array< T > &keypoints, const int person, const int elementA, const int elementB)
+
int getBiggestPerson(const Array< T > &keypoints, const T threshold)
+
T getDistanceAverage(const Array< T > &keypoints, const int personA, const int personB, const T threshold)
+ +
+
+ + + + diff --git a/web/html/doc/keypoint_saver_8hpp.html b/web/html/doc/keypoint_saver_8hpp.html new file mode 100644 index 000000000..efdf974fd --- /dev/null +++ b/web/html/doc/keypoint_saver_8hpp.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: include/openpose/filestream/keypointSaver.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
keypointSaver.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::KeypointSaver
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/keypoint_saver_8hpp_source.html b/web/html/doc/keypoint_saver_8hpp_source.html new file mode 100644 index 000000000..523f29d7f --- /dev/null +++ b/web/html/doc/keypoint_saver_8hpp_source.html @@ -0,0 +1,138 @@ + + + + + + + +OpenPose: include/openpose/filestream/keypointSaver.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
keypointSaver.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_FILESTREAM_KEYPOINT_SAVER_HPP
+
2 #define OPENPOSE_FILESTREAM_KEYPOINT_SAVER_HPP
+
3 
+ + + +
7 
+
8 namespace op
+
9 {
+ +
11  {
+
12  public:
+
13  KeypointSaver(const std::string& directoryPath, const DataFormat format);
+
14 
+
15  virtual ~KeypointSaver();
+
16 
+
17  void saveKeypoints(const std::vector<Array<float>>& keypointVector, const std::string& fileName,
+
18  const std::string& keypointName) const;
+
19 
+
20  private:
+
21  const DataFormat mFormat;
+
22  };
+
23 }
+
24 
+
25 #endif // OPENPOSE_FILESTREAM_KEYPOINT_SAVER_HPP
+ + + +
virtual ~KeypointSaver()
+
KeypointSaver(const std::string &directoryPath, const DataFormat format)
+
void saveKeypoints(const std::vector< Array< float >> &keypointVector, const std::string &fileName, const std::string &keypointName) const
+ + + +
#define OP_API
Definition: macros.hpp:18
+ +
DataFormat
Definition: enumClasses.hpp:7
+
+
+ + + + diff --git a/web/html/doc/keypoint_scaler_8hpp.html b/web/html/doc/keypoint_scaler_8hpp.html new file mode 100644 index 000000000..aafdf7b6b --- /dev/null +++ b/web/html/doc/keypoint_scaler_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/core/keypointScaler.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
keypointScaler.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::KeypointScaler
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/keypoint_scaler_8hpp_source.html b/web/html/doc/keypoint_scaler_8hpp_source.html new file mode 100644 index 000000000..8a6e0bef2 --- /dev/null +++ b/web/html/doc/keypoint_scaler_8hpp_source.html @@ -0,0 +1,144 @@ + + + + + + + +OpenPose: include/openpose/core/keypointScaler.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
keypointScaler.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_KEYPOINT_SCALER_HPP
+
2 #define OPENPOSE_CORE_KEYPOINT_SCALER_HPP
+
3 
+ + +
6 
+
7 namespace op
+
8 {
+ +
10  {
+
11  public:
+
12  explicit KeypointScaler(const ScaleMode scaleMode);
+
13 
+
14  virtual ~KeypointScaler();
+
15 
+
16  void scale(Array<float>& arrayToScale, const double scaleInputToOutput, const double scaleNetToOutput,
+
17  const Point<int>& producerSize) const;
+
18 
+
19  void scale(std::vector<Array<float>>& arraysToScale, const double scaleInputToOutput,
+
20  const double scaleNetToOutput, const Point<int>& producerSize) const;
+
21 
+
22  void scale(std::vector<std::vector<std::array<float,3>>>& poseCandidates, const double scaleInputToOutput,
+
23  const double scaleNetToOutput, const Point<int>& producerSize) const;
+
24 
+
25  private:
+
26  const ScaleMode mScaleMode;
+
27  };
+
28 }
+
29 
+
30 #endif // OPENPOSE_CORE_KEYPOINT_SCALER_HPP
+ + +
KeypointScaler(const ScaleMode scaleMode)
+
void scale(std::vector< std::vector< std::array< float, 3 >>> &poseCandidates, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) const
+
virtual ~KeypointScaler()
+
void scale(std::vector< Array< float >> &arraysToScale, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) const
+
void scale(Array< float > &arrayToScale, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) const
+ + +
#define OP_API
Definition: macros.hpp:18
+ +
ScaleMode
Definition: enumClasses.hpp:7
+ +
+
+ + + + diff --git a/web/html/doc/macros_8hpp.html b/web/html/doc/macros_8hpp.html new file mode 100644 index 000000000..a6e200c3a --- /dev/null +++ b/web/html/doc/macros_8hpp.html @@ -0,0 +1,518 @@ + + + + + + + +OpenPose: include/openpose/core/macros.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
macros.hpp File Reference
+
+
+
#include <chrono>
+#include <memory>
+#include <ostream>
+#include <string>
+#include <thread>
+#include <vector>
+
+

Go to the source code of this file.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+Macros

#define OP_API
 
#define UNUSED(unusedVariable)   (void)(unusedVariable)
 
#define DELETE_COPY(className)
 
#define COMPILE_TEMPLATE_BASIC_TYPES_CLASS(className)   COMPILE_TEMPLATE_BASIC_TYPES(className, class)
 
#define COMPILE_TEMPLATE_BASIC_TYPES_STRUCT(className)   COMPILE_TEMPLATE_BASIC_TYPES(className, struct)
 
#define COMPILE_TEMPLATE_BASIC_TYPES(className, classType)
 
#define COMPILE_TEMPLATE_FLOATING_TYPES_CLASS(className)   COMPILE_TEMPLATE_FLOATING_TYPES(className, class)
 
#define COMPILE_TEMPLATE_FLOATING_TYPES_STRUCT(className)   COMPILE_TEMPLATE_FLOATING_TYPES(className, struct)
 
#define COMPILE_TEMPLATE_FLOATING_TYPES(className, classType)
 
#define COMPILE_TEMPLATE_FLOATING_INT_TYPES_CLASS(className)   COMPILE_TEMPLATE_FLOATING_INT_TYPES(className, class)
 
#define COMPILE_TEMPLATE_FLOATING_INT_TYPES_STRUCT(className)   COMPILE_TEMPLATE_FLOATING_INT_TYPES(className, struct)
 
#define COMPILE_TEMPLATE_FLOATING_INT_TYPES(className, classType)
 
#define OVERLOAD_C_OUT(className)
 
+ + + + + + + +

+Variables

const std::string OPEN_POSE_NAME_STRING = "OpenPose"
 
const std::string OPEN_POSE_VERSION_STRING = "1.7.0"
 
const std::string OPEN_POSE_NAME_AND_VERSION = OPEN_POSE_NAME_STRING + " " + OPEN_POSE_VERSION_STRING
 
+

Macro Definition Documentation

+ +

◆ COMPILE_TEMPLATE_BASIC_TYPES

+ +
+
+ + + + + + + + + + + + + + + + + + +
#define COMPILE_TEMPLATE_BASIC_TYPES( className,
 classType 
)
+
+Value:
template classType OP_API className<char>; \
+
template classType OP_API className<signed char>; \
+
template classType OP_API className<short>; \
+
template classType OP_API className<int>; \
+
template classType OP_API className<long>; \
+
template classType OP_API className<long long>; \
+
template classType OP_API className<unsigned char>; \
+
template classType OP_API className<unsigned short>; \
+
template classType OP_API className<unsigned int>; \
+
template classType OP_API className<unsigned long>; \
+
template classType OP_API className<unsigned long long>; \
+
template classType OP_API className<float>; \
+
template classType OP_API className<double>; \
+
template classType OP_API className<long double>
+
#define OP_API
Definition: macros.hpp:18
+
+

Definition at line 39 of file macros.hpp.

+ +
+
+ +

◆ COMPILE_TEMPLATE_BASIC_TYPES_CLASS

+ +
+
+ + + + + + + + +
#define COMPILE_TEMPLATE_BASIC_TYPES_CLASS( className)   COMPILE_TEMPLATE_BASIC_TYPES(className, class)
+
+ +

Definition at line 37 of file macros.hpp.

+ +
+
+ +

◆ COMPILE_TEMPLATE_BASIC_TYPES_STRUCT

+ +
+
+ + + + + + + + +
#define COMPILE_TEMPLATE_BASIC_TYPES_STRUCT( className)   COMPILE_TEMPLATE_BASIC_TYPES(className, struct)
+
+ +

Definition at line 38 of file macros.hpp.

+ +
+
+ +

◆ COMPILE_TEMPLATE_FLOATING_INT_TYPES

+ +
+
+ + + + + + + + + + + + + + + + + + +
#define COMPILE_TEMPLATE_FLOATING_INT_TYPES( className,
 classType 
)
+
+Value:
char gInstantiationGuard##className; \
+
template classType OP_API className<int>; \
+
template classType OP_API className<unsigned int>; \
+
template classType OP_API className<float>; \
+
template classType OP_API className<double>
+
+

Definition at line 66 of file macros.hpp.

+ +
+
+ +

◆ COMPILE_TEMPLATE_FLOATING_INT_TYPES_CLASS

+ +
+
+ + + + + + + + +
#define COMPILE_TEMPLATE_FLOATING_INT_TYPES_CLASS( className)   COMPILE_TEMPLATE_FLOATING_INT_TYPES(className, class)
+
+ +

Definition at line 64 of file macros.hpp.

+ +
+
+ +

◆ COMPILE_TEMPLATE_FLOATING_INT_TYPES_STRUCT

+ +
+
+ + + + + + + + +
#define COMPILE_TEMPLATE_FLOATING_INT_TYPES_STRUCT( className)   COMPILE_TEMPLATE_FLOATING_INT_TYPES(className, struct)
+
+ +

Definition at line 65 of file macros.hpp.

+ +
+
+ +

◆ COMPILE_TEMPLATE_FLOATING_TYPES

+ +
+
+ + + + + + + + + + + + + + + + + + +
#define COMPILE_TEMPLATE_FLOATING_TYPES( className,
 classType 
)
+
+Value:
char gInstantiationGuard##className; \
+
template classType OP_API className<float>; \
+
template classType OP_API className<double>
+
+

Definition at line 58 of file macros.hpp.

+ +
+
+ +

◆ COMPILE_TEMPLATE_FLOATING_TYPES_CLASS

+ +
+
+ + + + + + + + +
#define COMPILE_TEMPLATE_FLOATING_TYPES_CLASS( className)   COMPILE_TEMPLATE_FLOATING_TYPES(className, class)
+
+ +

Definition at line 56 of file macros.hpp.

+ +
+
+ +

◆ COMPILE_TEMPLATE_FLOATING_TYPES_STRUCT

+ +
+
+ + + + + + + + +
#define COMPILE_TEMPLATE_FLOATING_TYPES_STRUCT( className)   COMPILE_TEMPLATE_FLOATING_TYPES(className, struct)
+
+ +

Definition at line 57 of file macros.hpp.

+ +
+
+ +

◆ DELETE_COPY

+ +
+
+ + + + + + + + +
#define DELETE_COPY( className)
+
+Value:
className(const className&) = delete; \
+
className& operator=(const className&) = delete
+
+

Definition at line 32 of file macros.hpp.

+ +
+
+ +

◆ OP_API

+ +
+
+ + + + +
#define OP_API
+
+ +

Definition at line 18 of file macros.hpp.

+ +
+
+ +

◆ OVERLOAD_C_OUT

+ +
+
+ + + + + + + + +
#define OVERLOAD_C_OUT( className)
+
+Value:
template<typename T> std::ostream &operator<<(std::ostream& ostream, const op::className<T>& obj) \
+
{ \
+
ostream << obj.toString(); \
+
return ostream; \
+
}
+

cout operator overload calling toString() function

Returns
std::ostream containing output from toString()
+ +

Definition at line 77 of file macros.hpp.

+ +
+
+ +

◆ UNUSED

+ +
+
+ + + + + + + + +
#define UNUSED( unusedVariable)   (void)(unusedVariable)
+
+ +

Definition at line 30 of file macros.hpp.

+ +
+
+

Variable Documentation

+ +

◆ OPEN_POSE_NAME_AND_VERSION

+ +
+
+ + + + +
const std::string OPEN_POSE_NAME_AND_VERSION = OPEN_POSE_NAME_STRING + " " + OPEN_POSE_VERSION_STRING
+
+ +

Definition at line 14 of file macros.hpp.

+ +
+
+ +

◆ OPEN_POSE_NAME_STRING

+ +
+
+ + + + +
const std::string OPEN_POSE_NAME_STRING = "OpenPose"
+
+ +

Definition at line 12 of file macros.hpp.

+ +
+
+ +

◆ OPEN_POSE_VERSION_STRING

+ +
+
+ + + + +
const std::string OPEN_POSE_VERSION_STRING = "1.7.0"
+
+ +

Definition at line 13 of file macros.hpp.

+ +
+
+
+
+ + + + diff --git a/web/html/doc/macros_8hpp.js b/web/html/doc/macros_8hpp.js new file mode 100644 index 000000000..58f99ac74 --- /dev/null +++ b/web/html/doc/macros_8hpp.js @@ -0,0 +1,19 @@ +var macros_8hpp = +[ + [ "COMPILE_TEMPLATE_BASIC_TYPES", "macros_8hpp.html#a6bf32c65e0f388d5b09d8b2424416c0e", null ], + [ "COMPILE_TEMPLATE_BASIC_TYPES_CLASS", "macros_8hpp.html#a60e010d8a2352d94b8b57d97cf4a7d73", null ], + [ "COMPILE_TEMPLATE_BASIC_TYPES_STRUCT", "macros_8hpp.html#ac5627744abe5fd0c8eacfe9c7f8bd32e", null ], + [ "COMPILE_TEMPLATE_FLOATING_INT_TYPES", "macros_8hpp.html#ad0aef3afcb2a9da69c3453426f56b0ac", null ], + [ "COMPILE_TEMPLATE_FLOATING_INT_TYPES_CLASS", "macros_8hpp.html#a7bacf9f65110ec8292bc69e1eb0f426e", null ], + [ "COMPILE_TEMPLATE_FLOATING_INT_TYPES_STRUCT", "macros_8hpp.html#acc5af19a77b18cf5aa2e1f82e2e484dd", null ], + [ "COMPILE_TEMPLATE_FLOATING_TYPES", "macros_8hpp.html#a80404791b46a15fd601feaa11f1e5028", null ], + [ "COMPILE_TEMPLATE_FLOATING_TYPES_CLASS", "macros_8hpp.html#a1eadbb31e92e7fbc799bf7cf4d2a6f50", null ], + [ "COMPILE_TEMPLATE_FLOATING_TYPES_STRUCT", "macros_8hpp.html#af9fed593b7a4237bc6ede717a1ae70f0", null ], + [ "DELETE_COPY", "macros_8hpp.html#abef96b5dd35dd9d44ad27ddf0e2f5f2e", null ], + [ "OP_API", "macros_8hpp.html#a4ba443bb7a0e5dbe8054a9ac37a5e000", null ], + [ "OVERLOAD_C_OUT", "macros_8hpp.html#aa883b8ec96d2804b37d3bfb0bd4c5f16", null ], + [ "UNUSED", "macros_8hpp.html#af57a843cfdae82e064838c20b3b54851", null ], + [ "OPEN_POSE_NAME_AND_VERSION", "macros_8hpp.html#adcf24c45000a4f44f355f1cc3062ea49", null ], + [ "OPEN_POSE_NAME_STRING", "macros_8hpp.html#afda430d83b9513af7270f1d680bf5471", null ], + [ "OPEN_POSE_VERSION_STRING", "macros_8hpp.html#a7de0b663a3aa8043a346ebf2c411bda3", null ] +]; \ No newline at end of file diff --git a/web/html/doc/macros_8hpp_source.html b/web/html/doc/macros_8hpp_source.html new file mode 100644 index 000000000..91765da3d --- /dev/null +++ b/web/html/doc/macros_8hpp_source.html @@ -0,0 +1,188 @@ + + + + + + + +OpenPose: include/openpose/core/macros.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
macros.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_MACROS_HPP
+
2 #define OPENPOSE_CORE_MACROS_HPP
+
3 
+
4 #include <chrono> // std::chrono:: functionaligy, e.g., std::chrono::milliseconds
+
5 #include <memory> // std::shared_ptr
+
6 #include <ostream>
+
7 #include <string>
+
8 #include <thread> // std::this_thread
+
9 #include <vector>
+
10 
+
11 // OpenPose name and version
+
12 const std::string OPEN_POSE_NAME_STRING = "OpenPose";
+
13 const std::string OPEN_POSE_VERSION_STRING = "1.7.0";
+ +
15 // #define COMMERCIAL_LICENSE
+
16 
+
17 #ifndef _WIN32
+
18  #define OP_API
+
19 #elif defined OP_EXPORTS
+
20  #define OP_API __declspec(dllexport)
+
21 #else
+
22  #define OP_API __declspec(dllimport)
+
23 #endif
+
24 
+
25 // Disable some Windows Warnings
+
26 #ifdef _WIN32
+
27  #pragma warning(disable: 4251) // 'XXX': class 'YYY' needs to have dll-interface to be used by clients of class 'ZZZ'
+
28 #endif
+
29 
+
30 #define UNUSED(unusedVariable) (void)(unusedVariable)
+
31 
+
32 #define DELETE_COPY(className) \
+
33  className(const className&) = delete; \
+
34  className& operator=(const className&) = delete
+
35 
+
36 // Instantiate a class with all the basic types
+
37 #define COMPILE_TEMPLATE_BASIC_TYPES_CLASS(className) COMPILE_TEMPLATE_BASIC_TYPES(className, class)
+
38 #define COMPILE_TEMPLATE_BASIC_TYPES_STRUCT(className) COMPILE_TEMPLATE_BASIC_TYPES(className, struct)
+
39 #define COMPILE_TEMPLATE_BASIC_TYPES(className, classType) \
+
40  template classType OP_API className<char>; \
+
41  template classType OP_API className<signed char>; \
+
42  template classType OP_API className<short>; \
+
43  template classType OP_API className<int>; \
+
44  template classType OP_API className<long>; \
+
45  template classType OP_API className<long long>; \
+
46  template classType OP_API className<unsigned char>; \
+
47  template classType OP_API className<unsigned short>; \
+
48  template classType OP_API className<unsigned int>; \
+
49  template classType OP_API className<unsigned long>; \
+
50  template classType OP_API className<unsigned long long>; \
+
51  template classType OP_API className<float>; \
+
52  template classType OP_API className<double>; \
+
53  template classType OP_API className<long double>
+
54 
+
55 // Instantiate a class with float and double specifications
+
56 #define COMPILE_TEMPLATE_FLOATING_TYPES_CLASS(className) COMPILE_TEMPLATE_FLOATING_TYPES(className, class)
+
57 #define COMPILE_TEMPLATE_FLOATING_TYPES_STRUCT(className) COMPILE_TEMPLATE_FLOATING_TYPES(className, struct)
+
58 #define COMPILE_TEMPLATE_FLOATING_TYPES(className, classType) \
+
59  char gInstantiationGuard##className; \
+
60  template classType OP_API className<float>; \
+
61  template classType OP_API className<double>
+
62 
+
63 // Instantiate a class with float and double specifications
+
64 #define COMPILE_TEMPLATE_FLOATING_INT_TYPES_CLASS(className) COMPILE_TEMPLATE_FLOATING_INT_TYPES(className, class)
+
65 #define COMPILE_TEMPLATE_FLOATING_INT_TYPES_STRUCT(className) COMPILE_TEMPLATE_FLOATING_INT_TYPES(className, struct)
+
66 #define COMPILE_TEMPLATE_FLOATING_INT_TYPES(className, classType) \
+
67  char gInstantiationGuard##className; \
+
68  template classType OP_API className<int>; \
+
69  template classType OP_API className<unsigned int>; \
+
70  template classType OP_API className<float>; \
+
71  template classType OP_API className<double>
+
72 
+
77 #define OVERLOAD_C_OUT(className) \
+
78  template<typename T> std::ostream &operator<<(std::ostream& ostream, const op::className<T>& obj) \
+
79  { \
+
80  ostream << obj.toString(); \
+
81  return ostream; \
+
82  }
+
83 
+
84 // PIMPL does not work if function arguments need the 3rd-party class. Alternative:
+
85 // stackoverflow.com/questions/13978775/how-to-avoid-include-dependency-to-external-library?answertab=active#tab-top
+
86 struct dim3;
+
87 
+
88 #endif // OPENPOSE_CORE_MACROS_HPP
+
const std::string OPEN_POSE_VERSION_STRING
Definition: macros.hpp:13
+
const std::string OPEN_POSE_NAME_AND_VERSION
Definition: macros.hpp:14
+
const std::string OPEN_POSE_NAME_STRING
Definition: macros.hpp:12
+
+
+ + + + diff --git a/web/html/doc/matrix_8hpp.html b/web/html/doc/matrix_8hpp.html new file mode 100644 index 000000000..df55d231b --- /dev/null +++ b/web/html/doc/matrix_8hpp.html @@ -0,0 +1,447 @@ + + + + + + + +OpenPose: include/openpose/core/matrix.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
matrix.hpp File Reference
+
+
+
#include <memory>
+#include <openpose/core/macros.hpp>
+
+

Go to the source code of this file.

+ + + + +

+Classes

class  op::Matrix
 
+ + + +

+Namespaces

 op
 
+ + + + + + + + + + + + + + + + + + + + + +

+Macros

#define OP_OP2CVMAT(opMat)    (*((cv::Mat*)((opMat).getCvMat())))
 
#define OP_OP2CVCONSTMAT(opMat)    (*((cv::Mat*)((opMat).getConstCvMat())))
 
#define OP_CV2OPMAT(cvMat)    (op::Matrix((void*)&(cvMat)))
 
#define OP_CV2OPCONSTMAT(cvMat)    (op::Matrix((const void*)&(cvMat)))
 
#define OP_OP2CVVECTORMAT(cvMats, opMats)
 
#define OP_CV2OPVECTORMAT(opMats, cvMats)
 
#define OP_MAT_VOID_FUNCTION(opMat, function)
 
#define OP_CONST_MAT_VOID_FUNCTION(opMat, function)
 
#define OP_MAT_RETURN_FUNCTION(outputVariable, opMat, function)
 
#define OP_CONST_MAT_RETURN_FUNCTION(outputVariable, opMat, function)
 
+

Macro Definition Documentation

+ +

◆ OP_CONST_MAT_RETURN_FUNCTION

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
#define OP_CONST_MAT_RETURN_FUNCTION( outputVariable,
 opMat,
 function 
)
+
+Value:
{ \
+
const cv::Mat cvMat = OP_OP2CVCONSTMAT(opMat); \
+
outputVariable = cvMat.function; \
+
}
+
#define OP_OP2CVCONSTMAT(opMat)
Definition: matrix.hpp:20
+
+

Definition at line 86 of file matrix.hpp.

+ +
+
+ +

◆ OP_CONST_MAT_VOID_FUNCTION

+ +
+
+ + + + + + + + + + + + + + + + + + +
#define OP_CONST_MAT_VOID_FUNCTION( opMat,
 function 
)
+
+Value:
{ \
+
const cv::Mat cvMat = OP_OP2CVCONSTMAT(opMat); \
+
cvMat.function; \
+
}
+
+

Definition at line 76 of file matrix.hpp.

+ +
+
+ +

◆ OP_CV2OPCONSTMAT

+ +
+
+ + + + + + + + +
#define OP_CV2OPCONSTMAT( cvMat)    (op::Matrix((const void*)&(cvMat)))
+
+ +

Definition at line 34 of file matrix.hpp.

+ +
+
+ +

◆ OP_CV2OPMAT

+ +
+
+ + + + + + + + +
#define OP_CV2OPMAT( cvMat)    (op::Matrix((void*)&(cvMat)))
+
+ +

Definition at line 27 of file matrix.hpp.

+ +
+
+ +

◆ OP_CV2OPVECTORMAT

+ +
+
+ + + + + + + + + + + + + + + + + + +
#define OP_CV2OPVECTORMAT( opMats,
 cvMats 
)
+
+Value:
std::vector<op::Matrix> opMats; \
+
for (auto& cvMat : (cvMats)) \
+
{ \
+
const auto opMat = OP_CV2OPMAT(cvMat); \
+
opMats.emplace_back(opMat); \
+
}
+
#define OP_CV2OPMAT(cvMat)
Definition: matrix.hpp:27
+
+

Definition at line 55 of file matrix.hpp.

+ +
+
+ +

◆ OP_MAT_RETURN_FUNCTION

+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
#define OP_MAT_RETURN_FUNCTION( outputVariable,
 opMat,
 function 
)
+
+Value:
{ \
+
cv::Mat cvMat = OP_OP2CVMAT(cvMat, opMat); \
+
outputVariable = cvMat.function; \
+
}
+
#define OP_OP2CVMAT(opMat)
Definition: matrix.hpp:13
+
+

Definition at line 81 of file matrix.hpp.

+ +
+
+ +

◆ OP_MAT_VOID_FUNCTION

+ +
+
+ + + + + + + + + + + + + + + + + + +
#define OP_MAT_VOID_FUNCTION( opMat,
 function 
)
+
+Value:
{ \
+
cv::Mat cvMat = OP_OP2CVMAT(cvMat, opMat); \
+
cvMat.function; \
+
}
+
+

Definition at line 71 of file matrix.hpp.

+ +
+
+ +

◆ OP_OP2CVCONSTMAT

+ +
+
+ + + + + + + + +
#define OP_OP2CVCONSTMAT( opMat)    (*((cv::Mat*)((opMat).getConstCvMat())))
+
+ +

Definition at line 20 of file matrix.hpp.

+ +
+
+ +

◆ OP_OP2CVMAT

+ +
+
+ + + + + + + + +
#define OP_OP2CVMAT( opMat)    (*((cv::Mat*)((opMat).getCvMat())))
+
+ +

Definition at line 13 of file matrix.hpp.

+ +
+
+ +

◆ OP_OP2CVVECTORMAT

+ +
+
+ + + + + + + + + + + + + + + + + + +
#define OP_OP2CVVECTORMAT( cvMats,
 opMats 
)
+
+Value:
std::vector<cv::Mat> cvMats; \
+
for (auto& opMat : (opMats)) \
+
{ \
+
const auto cvMat = OP_OP2CVCONSTMAT(opMat); \
+
cvMats.emplace_back(cvMat); \
+
}
+
+

Definition at line 42 of file matrix.hpp.

+ +
+
+
+
+ + + + diff --git a/web/html/doc/matrix_8hpp.js b/web/html/doc/matrix_8hpp.js new file mode 100644 index 000000000..10083b36c --- /dev/null +++ b/web/html/doc/matrix_8hpp.js @@ -0,0 +1,14 @@ +var matrix_8hpp = +[ + [ "Matrix", "classop_1_1_matrix.html", "classop_1_1_matrix" ], + [ "OP_CONST_MAT_RETURN_FUNCTION", "matrix_8hpp.html#adb6fa4cc9ba470382895a448b7cf1257", null ], + [ "OP_CONST_MAT_VOID_FUNCTION", "matrix_8hpp.html#a1b810570f8207983b20ea93e8f9f71a2", null ], + [ "OP_CV2OPCONSTMAT", "matrix_8hpp.html#ad0bd05468e4619f7061bb513fc2cb86d", null ], + [ "OP_CV2OPMAT", "matrix_8hpp.html#a00c8b0a04adbe37ba8b6d08e0ba23287", null ], + [ "OP_CV2OPVECTORMAT", "matrix_8hpp.html#ad2790de0442f8b1a303b781ffe171c6e", null ], + [ "OP_MAT_RETURN_FUNCTION", "matrix_8hpp.html#a1a8232a2c14792f9315d85004973c33c", null ], + [ "OP_MAT_VOID_FUNCTION", "matrix_8hpp.html#a2bab8a00953b4ba71a8b965347f7dd92", null ], + [ "OP_OP2CVCONSTMAT", "matrix_8hpp.html#a1c9288885fc29db5560426556d3fba41", null ], + [ "OP_OP2CVMAT", "matrix_8hpp.html#af06d0e620916e1f08ca609fb02f25dc8", null ], + [ "OP_OP2CVVECTORMAT", "matrix_8hpp.html#a1a8d8a14fa0269d045f8d8c8228098af", null ] +]; \ No newline at end of file diff --git a/web/html/doc/matrix_8hpp_source.html b/web/html/doc/matrix_8hpp_source.html new file mode 100644 index 000000000..275a3c70c --- /dev/null +++ b/web/html/doc/matrix_8hpp_source.html @@ -0,0 +1,278 @@ + + + + + + + +OpenPose: include/openpose/core/matrix.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
matrix.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_CORE_MAT_HPP
+
2 #define OPENPOSE_CORE_MAT_HPP
+
3 
+
4 #include <memory> // std::shared_ptr
+ +
6 
+
7 namespace op
+
8 {
+
9  // Convert from Matrix into cv::Mat. Usage example:
+
10  // #include <opencv2/core/core.hpp>
+
11  // ...
+
12  // cv::Mat opMat = OP2CVMAT(cv::Mat());
+
13  #define OP_OP2CVMAT(opMat) \
+
14  (*((cv::Mat*)((opMat).getCvMat())))
+
15 
+
16  // Convert from Matrix into const cv::Mat. Usage example:
+
17  // #include <opencv2/core/core.hpp>
+
18  // ...
+
19  // cv::Mat opMat = OP2CVCONSTMAT(cv::Mat());
+
20  #define OP_OP2CVCONSTMAT(opMat) \
+
21  (*((cv::Mat*)((opMat).getConstCvMat())))
+
22 
+
23  // Convert from cv::Mat into Matrix. Usage example:
+
24  // #include <opencv2/core/core.hpp>
+
25  // ...
+
26  // Matrix opMat = CV2OPMAT(Matrix());
+
27  #define OP_CV2OPMAT(cvMat) \
+
28  (op::Matrix((void*)&(cvMat)))
+
29 
+
30  // Convert from cv::Mat into const Matrix. Usage example:
+
31  // #include <opencv2/core/core.hpp>
+
32  // ...
+
33  // Matrix opMat = CV2OPCONSTMAT(Matrix());
+
34  #define OP_CV2OPCONSTMAT(cvMat) \
+
35  (op::Matrix((const void*)&(cvMat)))
+
36 
+
37  // Convert from std::vector<Matrix> into std::vector<cv::Mat>. Usage example:
+
38  // #include <opencv2/core/core.hpp>
+
39  // ...
+
40  // std::vector<Matrix> opMats; // Assume filled
+
41  // OP_OP2CVVECTORMAT(cvMats, opMats);
+
42  #define OP_OP2CVVECTORMAT(cvMats, opMats) \
+
43  std::vector<cv::Mat> cvMats; \
+
44  for (auto& opMat : (opMats)) \
+
45  { \
+
46  const auto cvMat = OP_OP2CVCONSTMAT(opMat); \
+
47  cvMats.emplace_back(cvMat); \
+
48  }
+
49 
+
50  // Convert from std::vector<cv::Mat> into std::vector<Matrix>. Usage example:
+
51  // #include <opencv2/core/core.hpp>
+
52  // ...
+
53  // std::vector<cv::Mat> cvMats; // Assume filled
+
54  // OP_CV2OPVECTORMAT(opMats, cvMats);
+
55  #define OP_CV2OPVECTORMAT(opMats, cvMats) \
+
56  std::vector<op::Matrix> opMats; \
+
57  for (auto& cvMat : (cvMats)) \
+
58  { \
+
59  const auto opMat = OP_CV2OPMAT(cvMat); \
+
60  opMats.emplace_back(opMat); \
+
61  }
+
62 
+
63  // Convert from std::vector<cv::Mat> into std::vector<Matrix>. Usage example:
+
64  // #include <opencv2/core/core.hpp>
+
65  // ...
+
66  // // Equivalents:
+
67  // OP_CV_VOID_FUNCTION(opMat, size());
+
68  // // and
+
69  // OP_OP2CVMAT(cvMat, opMat);
+
70  // cvMat.size();
+
71  #define OP_MAT_VOID_FUNCTION(opMat, function) \
+
72  { \
+
73  cv::Mat cvMat = OP_OP2CVMAT(cvMat, opMat); \
+
74  cvMat.function; \
+
75  }
+
76  #define OP_CONST_MAT_VOID_FUNCTION(opMat, function) \
+
77  { \
+
78  const cv::Mat cvMat = OP_OP2CVCONSTMAT(opMat); \
+
79  cvMat.function; \
+
80  }
+
81  #define OP_MAT_RETURN_FUNCTION(outputVariable, opMat, function) \
+
82  { \
+
83  cv::Mat cvMat = OP_OP2CVMAT(cvMat, opMat); \
+
84  outputVariable = cvMat.function; \
+
85  }
+
86  #define OP_CONST_MAT_RETURN_FUNCTION(outputVariable, opMat, function) \
+
87  { \
+
88  const cv::Mat cvMat = OP_OP2CVCONSTMAT(opMat); \
+
89  outputVariable = cvMat.function; \
+
90  }
+
91 
+
95  class OP_API Matrix
+
96  {
+
97  public:
+
105  static void splitCvMatIntoVectorMatrix(std::vector<Matrix>& matrixesResized, const void* const cvMatPtr);
+
106 
+ +
108 
+
113  explicit Matrix(const void* cvMatPtr);
+
114 
+
118  explicit Matrix(const int rows, const int cols, const int type);
+
119 
+
127  explicit Matrix(const int rows, const int cols, const int type, void* cvMatPtr);
+
128 
+
129  Matrix clone() const;
+
130 
+
134  void* getCvMat();
+
135 
+
139  const void* getConstCvMat() const;
+
140 
+
145  unsigned char* data();
+
150  const unsigned char* dataConst() const;
+
156  unsigned char* dataPseudoConst() const;
+
157 
+
161  static Matrix eye(const int rows, const int cols, const int type);
+
165  int cols() const;
+
169  int rows() const;
+
173  int size(const int dimension) const;
+
177  int dims() const;
+
178 
+
182  bool isContinuous() const;
+
183  bool isSubmatrix() const;
+
184  size_t elemSize() const;
+
185  size_t elemSize1() const;
+
186  int type() const;
+
187  int depth() const;
+
188  int channels() const;
+
189  size_t step1(const int i = 0) const;
+
190  bool empty() const;
+
191  size_t total() const;
+
192  int checkVector(const int elemChannels, const int depth = -1, const bool requireContinuous = true) const;
+
193 
+
197  void setTo(const double value);
+
198  void copyTo(Matrix& outputMat) const;
+
199 
+
200  private:
+
201  // PIMPL idiom
+
202  // http://www.cppsamples.com/common-tasks/pimpl.html
+
203  struct ImplMatrix;
+
204  std::shared_ptr<ImplMatrix> spImpl;
+
205  };
+
206 }
+
207 
+
208 #endif // OPENPOSE_CORE_MAT_HPP
+ +
static void splitCvMatIntoVectorMatrix(std::vector< Matrix > &matrixesResized, const void *const cvMatPtr)
+
size_t total() const
+
const void * getConstCvMat() const
+
int rows() const
+
size_t elemSize() const
+
size_t step1(const int i=0) const
+
int channels() const
+
Matrix(const int rows, const int cols, const int type, void *cvMatPtr)
+
void copyTo(Matrix &outputMat) const
+
unsigned char * data()
+
Matrix(const int rows, const int cols, const int type)
+
int checkVector(const int elemChannels, const int depth=-1, const bool requireContinuous=true) const
+
static Matrix eye(const int rows, const int cols, const int type)
+
bool empty() const
+
int size(const int dimension) const
+
void * getCvMat()
+
const unsigned char * dataConst() const
+
bool isSubmatrix() const
+
int dims() const
+
int type() const
+
unsigned char * dataPseudoConst() const
+
Matrix clone() const
+
int cols() const
+
void setTo(const double value)
+ +
int depth() const
+
size_t elemSize1() const
+
bool isContinuous() const
+
Matrix(const void *cvMatPtr)
+ +
#define OP_API
Definition: macros.hpp:18
+ +
+
+ + + + diff --git a/web/html/doc/maximum_base_8hpp.html b/web/html/doc/maximum_base_8hpp.html new file mode 100644 index 000000000..3084fa4dd --- /dev/null +++ b/web/html/doc/maximum_base_8hpp.html @@ -0,0 +1,122 @@ + + + + + + + +OpenPose: include/openpose/net/maximumBase.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
maximumBase.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Namespaces

 op
 
+ + + + + + + +

+Functions

template<typename T >
void op::maximumCpu (T *targetPtr, const T *const sourcePtr, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize)
 
template<typename T >
void op::maximumGpu (T *targetPtr, const T *const sourcePtr, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize)
 
+
+
+ + + + diff --git a/web/html/doc/maximum_base_8hpp.js b/web/html/doc/maximum_base_8hpp.js new file mode 100644 index 000000000..ad564dc87 --- /dev/null +++ b/web/html/doc/maximum_base_8hpp.js @@ -0,0 +1,5 @@ +var maximum_base_8hpp = +[ + [ "maximumCpu", "maximum_base_8hpp.html#ae0fea41041a70ae8449a77f46ffe8100", null ], + [ "maximumGpu", "maximum_base_8hpp.html#a8ec109805adf02f9872a6af37d602caa", null ] +]; \ No newline at end of file diff --git a/web/html/doc/maximum_base_8hpp_source.html b/web/html/doc/maximum_base_8hpp_source.html new file mode 100644 index 000000000..04cf7f4aa --- /dev/null +++ b/web/html/doc/maximum_base_8hpp_source.html @@ -0,0 +1,123 @@ + + + + + + + +OpenPose: include/openpose/net/maximumBase.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
maximumBase.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_NET_MAXIMUM_BASE_HPP
+
2 #define OPENPOSE_NET_MAXIMUM_BASE_HPP
+
3 
+ +
5 
+
6 namespace op
+
7 {
+
8  template <typename T>
+
9  void maximumCpu(T* targetPtr, const T* const sourcePtr, const std::array<int, 4>& targetSize,
+
10  const std::array<int, 4>& sourceSize);
+
11 
+
12  // Windows: Cuda functions do not include OP_API
+
13  template <typename T>
+
14  void maximumGpu(T* targetPtr, const T* const sourcePtr, const std::array<int, 4>& targetSize,
+
15  const std::array<int, 4>& sourceSize);
+
16 }
+
17 
+
18 #endif // OPENPOSE_NET_MAXIMUM_BASE_HPP
+ + +
void maximumGpu(T *targetPtr, const T *const sourcePtr, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize)
+
void maximumCpu(T *targetPtr, const T *const sourcePtr, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize)
+
+
+ + + + diff --git a/web/html/doc/maximum_caffe_8hpp.html b/web/html/doc/maximum_caffe_8hpp.html new file mode 100644 index 000000000..16199a786 --- /dev/null +++ b/web/html/doc/maximum_caffe_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/net/maximumCaffe.hpp File Reference + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+ +
+
maximumCaffe.hpp File Reference
+
+
+ +

Go to the source code of this file.

+ + + + +

+Classes

class  op::MaximumCaffe< T >
 
+ + + +

+Namespaces

 op
 
+
+
+ + + + diff --git a/web/html/doc/maximum_caffe_8hpp_source.html b/web/html/doc/maximum_caffe_8hpp_source.html new file mode 100644 index 000000000..14a29182a --- /dev/null +++ b/web/html/doc/maximum_caffe_8hpp_source.html @@ -0,0 +1,158 @@ + + + + + + + +OpenPose: include/openpose/net/maximumCaffe.hpp Source File + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
maximumCaffe.hpp
+
+
+Go to the documentation of this file.
1 #ifndef OPENPOSE_NET_MAXIMUM_CAFFE_HPP
+
2 #define OPENPOSE_NET_MAXIMUM_CAFFE_HPP
+
3 
+ +
5 
+
6 namespace op
+
7 {
+
8  // It mostly follows the Caffe::layer implementation, so Caffe users can easily use it. However, in order to keep
+
9  // the compatibility with any generic Caffe version, we keep this 'layer' inside our library rather than in the
+
10  // Caffe code.
+
11  template <typename T>
+ +
13  {
+
14  public:
+
15  explicit MaximumCaffe();
+
16 
+
17  virtual ~MaximumCaffe();
+
18 
+
19  virtual void LayerSetUp(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
+
20 
+
21  virtual void Reshape(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
+
22 
+
23  virtual inline const char* type() const { return "Maximum"; }
+
24 
+
25  virtual void Forward(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
+
26 
+
27  virtual void Forward_cpu(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
+
28 
+
29  virtual void Forward_gpu(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
+
30 
+
31  virtual void Backward_cpu(const std::vector<ArrayCpuGpu<T>*>& top, const std::vector<bool>& propagate_down,
+
32  const std::vector<ArrayCpuGpu<T>*>& bottom);
+
33 
+
34  virtual void Backward_gpu(const std::vector<ArrayCpuGpu<T>*>& top, const std::vector<bool>& propagate_down,
+
35  const std::vector<ArrayCpuGpu<T>*>& bottom);
+
36 
+
37  private:
+
38  std::array<int, 4> mBottomSize;
+
39  std::array<int, 4> mTopSize;
+
40  };
+
41 }
+
42 
+
43 #endif // OPENPOSE_NET_MAXIMUM_CAFFE_HPP
+ + +
virtual ~MaximumCaffe()
+
virtual void LayerSetUp(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
+
virtual void Forward(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
+ +
virtual void Forward_gpu(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
+
virtual const char * type() const
+
virtual void Backward_gpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
+
virtual void Reshape(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
+
virtual void Backward_cpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
+
virtual void Forward_cpu(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
+ + +
+
+ + + + diff --git a/web/html/doc/md_doc_00_index.html b/web/html/doc/md_doc_00_index.html new file mode 100644 index 000000000..b61f54a93 --- /dev/null +++ b/web/html/doc/md_doc_00_index.html @@ -0,0 +1,129 @@ + + + + + + + +OpenPose: OpenPose Doc + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc
+
+
+

The OpenPose documentation is available in 2 different formats, choose your preferred one!

+

Most users can simply use the OpenPose Demo without reading any C++/Python code. Users that need to add additional functionality (new inputs, outputs, etc) should check the C++/Python APIs:

+
    +
  • If you face issues with any of these steps, keep in mind to check the FAQ section.
  • +
  • The first step for any software, install it!
  • +
  • OpenPose Demo: Choose your input (e.g., images, video, webcam), set of algorithms (body, hand, face), output (e.g., display, JSON keypoint saving, image+keypoints), and run OpenPose from your terminal or PowerShell!
      +
    • E.g.: Given an input video (--video), extract body (by default), face (--face) and hand (--hand) keypoints, save the keypoints in a JSON file (--write_json), and display (by default) the results in the screen. You can remove any of the flags to remove that particular functionality or add any other.
      # Ubuntu
      +
      ./build/examples/openpose/openpose.bin --video examples/media/video.avi --face --hand --write_json output_json_folder/
      +
      +
      :: Windows - Portable Demo
      +
      bin\OpenPoseDemo.exe --video examples\media\video.avi --face --hand --write_json output_json_folder/
      +
    • +
    +
  • +
  • Output information: Learn about the output format, keypoint index ordering, etc.
  • +
  • OpenPose Python API: Almost all the OpenPose functionality, but in Python! If you want to read a specific input, and/or add your custom post-processing function, and/or implement your own display/saving.
  • +
  • OpenPose C++ API: If you want to read a specific input, and/or add your custom post-processing function, and/or implement your own display/saving.
  • +
  • Maximizing OpenPose speed and benchmark: Check the OpenPose Benchmark as well as some hints to speed up and/or reduce the memory requirements for OpenPose.
  • +
  • Calibration toolbox and 3D OpenPose: Calibrate your cameras for 3D OpenPose (or any other stereo vision tasks) and start obtaining 3D keypoints!
  • +
  • Standalone face or hand detector is useful if you want to do any of the following:
      +
    • Face keypoint detection without body keypoint detection: Pros: Speedup and RAM/GPU memory reduction. Cons: Worse accuracy and less detected number of faces).
    • +
    • Use your own face/hand detector: You can use the hand and/or face keypoint detectors with your own face or hand detectors, rather than using the body detector. E.g., useful for camera views at which the hands are visible but not the body (OpenPose detector would fail).
    • +
    +
  • +
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_01_demo.html b/web/html/doc/md_doc_01_demo.html new file mode 100644 index 000000000..e8199755f --- /dev/null +++ b/web/html/doc/md_doc_01_demo.html @@ -0,0 +1,360 @@ + + + + + + + +OpenPose: OpenPose Doc - Demo + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Demo
+
+
+

Forget about the OpenPose code, just download the portable Windows binaries (or compile the code from source) and use the demo by following this tutorial!

+

+Contents

+
    +
  1. Quick Start
      +
    1. Running on Images, Video, or Webcam
    2. +
    3. Face and Hands
    4. +
    5. Different Outputs (JSON, Images, Video, UI)
    6. +
    7. Only Skeleton without Background Image
    8. +
    9. Not Running All GPUs
    10. +
    11. Maximum Accuracy Configuration
        +
      1. Additional Model with Maximum Accuracy
      2. +
      3. Additional Model with Lower False Positives
      4. +
      +
    12. +
    13. 3-D Reconstruction
    14. +
    15. Tracking
    16. +
    17. Kinect 2.0 as Webcam on Windows 10
    18. +
    19. Main Flags
    20. +
    +
  2. +
  3. Advanced Quick Start
  4. +
  5. Bug Solving
      +
    1. Improving Memory and Speed but Decreasing Accuracy
    2. +
    3. Mac OSX Additional Step
    4. +
    5. FAQ
    6. +
    +
  6. +
+

+Quick Start

+

In Ubuntu, Mac, and other Unix systems, use Terminal or Terminator. In Windows, the Windows PowerShell. Watch any Youtube video tutorial if you are not familiar with these tools. Make sure that you are in the root directory of the project when running any command (i.e., in the OpenPose folder, not inside build/ nor windows/ nor bin/). In addition, examples/media/video.avi and examples/media exist, so there is no need to change any lines of code.

+

Test OpenPose by running the following. The expected visual result should look like doc/02_output.md#ui-and-visual-output.

# Ubuntu and Mac
+
./build/examples/openpose/openpose.bin --video examples/media/video.avi
+
:: Windows - Portable Demo
+
bin\OpenPoseDemo.exe --video examples/media/video.avi
+

If you are only using the OpenPose demo, we highly recommend using the latest Windows portable version of OpenPose. If you still want to use the demo with Visual Studio, you can copy the bin/*.dll files into the final DLL bin location following doc/installation/0_index.md#windows, or you could also simply modify the default flag values from include/flags.hpp. If you have copied the DLLs, you can execute this:

:: Windows - Library - Assuming you have copied the DLLs following doc/installation/0_index.md#windows
+
build\x64\Release\OpenPoseDemo.exe --video examples/media/video.avi
+

If it worked, continue with the next section. Otherwise:

+

+Running on Images, Video, or Webcam

+
    +
  • Directory with images (--image_dir {DIRECTORY_PATH}):
    # Ubuntu and Mac
    +
    ./build/examples/openpose/openpose.bin --image_dir examples/media/
    +
    :: Windows - Portable Demo
    +
    bin\OpenPoseDemo.exe --image_dir examples/media/
    +
  • +
  • Video (--video {VIDEO_PATH}):
    # Ubuntu and Mac
    +
    ./build/examples/openpose/openpose.bin --video examples/media/video.avi
    +
    :: Windows - Portable Demo
    +
    bin\OpenPoseDemo.exe --video examples/media/video.avi
    +
  • +
  • Webcam is applied by default (i.e., if no --image_dir or --video flags used). Optionally, if you have more than 1 camera, you could use --camera {CAMERA_NUMBER} to select the right one:
    # Ubuntu and Mac
    +
    ./build/examples/openpose/openpose.bin
    +
    ./build/examples/openpose/openpose.bin --camera 0
    +
    ./build/examples/openpose/openpose.bin --camera 1
    +
    :: Windows - Portable Demo
    +
    bin\OpenPoseDemo.exe
    +
    bin\OpenPoseDemo.exe --camera 0
    +
    bin\OpenPoseDemo.exe --camera 1
    +
  • +
+

+Face and Hands

+

Simply add --face and/or --hand to any command:

# Ubuntu and Mac
+
./build/examples/openpose/openpose.bin --image_dir examples/media/ --face --hand
+
./build/examples/openpose/openpose.bin --video examples/media/video.avi --face --hand
+
./build/examples/openpose/openpose.bin --face --hand
+
:: Windows - Portable Demo
+
bin\OpenPoseDemo.exe --image_dir examples/media/ --face --hand
+
bin\OpenPoseDemo.exe --video examples/media/video.avi --face --hand
+
bin\OpenPoseDemo.exe --face --hand
+

+Different Outputs (JSON, Images, Video, UI)

+

All the output options are complementary to each other. E.g., whether you display the images with the skeletons on the UI (or not) is independent on whether you save them on disk (or not).

+
    +
  • Save the skeletons in a set of JSON files with --write_json {OUTPUT_JSON_PATH}, see doc/02_output.md to understand its format.
    # Ubuntu and Mac (same flags for Windows)
    +
    ./build/examples/openpose/openpose.bin --image_dir examples/media/ --write_json output_jsons/
    +
    ./build/examples/openpose/openpose.bin --video examples/media/video.avi --write_json output_jsons/
    +
    ./build/examples/openpose/openpose.bin --write_json output_jsons/
    +
  • +
  • Save on disk the visual output of OpenPose (the images with the skeletons overlaid) as an output video (--write_video {OUTPUT_VIDEO_PATH}) or set of images (--write_images {OUTPUT_IMAGE_DIRECTORY_PATH}.:
    # Ubuntu and Mac (same flags for Windows)
    +
    ./build/examples/openpose/openpose.bin --video examples/media/video.avi --write_video output/result.avi
    +
    ./build/examples/openpose/openpose.bin --image_dir examples/media/ --write_video output/result.avi
    +
    ./build/examples/openpose/openpose.bin --video examples/media/video.avi --write_images output_images/
    +
    ./build/examples/openpose/openpose.bin --video examples/media/video.avi --write_images output_images/ --write_images_format jpg
    +
    ./build/examples/openpose/openpose.bin --image_dir examples/media/ --write_images output_images/
    +
    ./build/examples/openpose/openpose.bin --image_dir examples/media/ --write_images output_images/ --write_images_format jpg
    +
  • +
  • You can also disable the UI visualization with --display 0. However, some kind of output must be generated. I.e., set one out of --write_json, --write_video, or --write_images if --display 0.
    # Ubuntu and Mac (same flags for Windows)
    +
    ./build/examples/openpose/openpose.bin --video examples/media/video.avi --write_images output_images/ --display 0
    +
  • +
  • To speed up OpenPose even further when using --display 0, also add --render_pose 0 if you are not using --write_video or --write_images (so OpenPose will not overlay skeletons with the input images).
    # Ubuntu and Mac (same flags for Windows)
    +
    ./build/examples/openpose/openpose.bin --video examples/media/video.avi --write_json output_jsons/ --display 0 --render_pose 0
    +
  • +
+

+Only Skeleton without Background Image

+

You can also visualize/save the skeleton without the original image overlaid or blended by adding --disable_blending:

# Ubuntu and Mac (same flags for Windows)
+
# Only body
+
./build/examples/openpose/openpose.bin --video examples/media/video.avi --disable_blending
+

+Not Running All GPUs

+

By default, OpenPose will use all the GPUs available in your machine. Otherwise, --num_gpu sets the number of total GPUs and --num_gpu_start the first GPU to use. E.g., --num_gpu 2 --num_gpu_start 1 will use GPUs ID 1 and 2 while ignore GPU ID 0 (assuming there are at least 3 GPUs):

:: Windows - Portable Demo (same flags for Ubuntu and Mac)
+
bin\OpenPoseDemo.exe --video examples/media/video.avi --num_gpu 2 --num_gpu_start 1
+

+Maximum Accuracy Configuration

+

This command provides the most accurate results we have been able to achieve for body, hand and face keypoint detection.

# Ubuntu and Mac: Body
+
./build/examples/openpose/openpose.bin --net_resolution "1312x736" --scale_number 4 --scale_gap 0.25
+
# Ubuntu and Mac: Body + Hand + Face
+
./build/examples/openpose/openpose.bin --net_resolution "1312x736" --scale_number 4 --scale_gap 0.25 --hand --hand_scale_number 6 --hand_scale_range 0.4 --face
+
:: Windows - Portable Demo: Body
+
bin\OpenPoseDemo.exe --net_resolution "1312x736" --scale_number 4 --scale_gap 0.25
+
:: Windows - Portable Demo: Body + Hand + Face
+
bin\OpenPoseDemo.exe --net_resolution "1312x736" --scale_number 4 --scale_gap 0.25 --hand --hand_scale_number 6 --hand_scale_range 0.4 --face
+
    +
  • Required:
      +
    • BODY_25 (default model). COCO is less accurate (but still usable), while MPI is not supported (i.e., MPI accuracy and speed will drop by using these settings).
    • +
    • Nvidia GPU with at least 16 GB of memory. 8 or 12 GB could work in some subcases detailed here.
        +
      • BODY_25 (body + foot, default model): Nvidia GPU with at least about 10.5 GB of memory. E.g., Titan X(P), some Quadro models, P100, V100.
      • +
      • BODY_25 + face + hands: Nvidia GPU with at least about 16 GB of memory. E.g., V100.
      • +
      • COCO Body + face + hands: Nvidia GPU with at least about 6.7 GB of memory. E.g., 2070, 2080.
      • +
      +
    • +
    • It won't work on CPU/OpenCL modes, your only option there is to manually crop each person, rescale it, and fed it into the default OpenPose
    • +
    +
  • +
  • Additional information:
      +
    • It runs at about 2 FPS on a Titan X for BODY_25 (1 FPS for COCO).
    • +
    • Increasing --net_resolution will highly reduce speed, while it does not guarantee the accuracy to increase. Thus, we recommend only using the exact flags and values detailed here (or alternatively ask the user to make their own accuracy analysis if using other values).
    • +
    • (Not recommended, use at your own risk) You can add --maximize_positives to harm the visual/qualitative accuracy, but it increases the accuracy value metric on COCO challenge. It reduces the thresholds to accept a person candidate (i.e., more false and true positives), which maximizes average recall but could harm average precision. Our experience: it looks much worse visually, but improves the challenge accuracy numbers.
    • +
    • If you are operating on Ubuntu, you can check the experimental scripts that we use to test our accuracy (we do not officially support it, i.e., we will not answer questions about it, as well as it might change it continuously), they are placed in openpose/scripts/tests/, called pose_accuracy_coco_test_dev.sh and pose_accuracy_coco_val.sh.
    • +
    +
  • +
+

+Additional Model with Maximum Accuracy

+

Disclaimer: It is more accurate but also slower, requires more GPU memory, and must use the Nvidia GPU version.

+

Our paper accuracy numbers do not match the default model numbers. We released our best model at the time but found better ones later.

+

For our best model, you can download the BODY_25B pre-trained model from the OpenPose training repository: BODY_25B Model - Option 1 (Maximum Accuracy, Less Speed).

+

+Additional Model with Lower False Positives

+

Disclaimer: It must use the Nvidia GPU version.

+

Do you need a model with less false positives but the same runtime performance and GPU requirements? You can download the BODY_25B pre-trained model from the OpenPose training repository: BODY_25B Model - Option 2 (Recommended).

+

+3-D Reconstruction

+
    +
  1. Real-time demo
    # Ubuntu and Mac
    +
    ./build/examples/openpose/openpose.bin --flir_camera --3d --number_people_max 1
    +
    # With face and hands
    +
    ./build/examples/openpose/openpose.bin --flir_camera --3d --number_people_max 1 --face --hand
    +
    :: Windows - Portable Demo
    +
    bin\OpenPoseDemo.exe --flir_camera --3d --number_people_max 1
    +
    :: With face and hands
    +
    bin\OpenPoseDemo.exe --flir_camera --3d --number_people_max 1 --face --hand
    +
  2. +
  3. Saving 3-D keypoints and video
    # Ubuntu and Mac (same flags for Windows)
    +
    ./build/examples/openpose/openpose.bin --flir_camera --3d --number_people_max 1 --write_json output_folder_path/ --write_video_3d output_folder_path/video_3d.avi
    +
  4. +
  5. Fast stereo camera image saving (without keypoint detection) for later post-processing
    # Ubuntu and Mac (same flags for Windows)
    +
    # Saving video
    +
    # Note: saving in PNG rather than JPG will improve image quality, but slow down FPS (depending on hard disk writing speed and camera number)
    +
    ./build/examples/openpose/openpose.bin --flir_camera --num_gpu 0 --write_video output_folder_path/video.avi --write_video_fps 5
    +
    # Saving images
    +
    # Note: saving in PNG rather than JPG will improve image quality, but slow down FPS (depending on hard disk writing speed and camera number)
    +
    ./build/examples/openpose/openpose.bin --flir_camera --num_gpu 0 --write_images output_folder_path/ --write_images_format jpg
    +
  6. +
  7. Reading and processing previously saved stereo camera images
    # Ubuntu and Mac (same flags for Windows)
    +
    # Optionally add `--face` and/or `--hand` to include face and/or hands
    +
    # Assuming 3 cameras
    +
    # Note: We highly recommend to reduce `--output_resolution`. E.g., for 3 cameras recording at 1920x1080, the resulting image is (3x1920)x1080, so we recommend e.g. 640x360 (x3 reduction).
    +
    # Video
    +
    ./build/examples/openpose/openpose.bin --video output_folder_path/video.avi --3d_views 3 --3d --number_people_max 1 --output_resolution {desired_output_resolution}
    +
    # Images
    +
    ./build/examples/openpose/openpose.bin --image_dir output_folder_path/ --3d_views 3 --3d --number_people_max 1 --output_resolution {desired_output_resolution}
    +
  8. +
  9. Reconstruction when the keypoint is visible in at least x camera views out of the total n cameras
    # Ubuntu and Mac (same flags for Windows)
    +
    # Reconstruction when a keypoint is visible in at least 2 camera views (assuming `n` >= 2)
    +
    ./build/examples/openpose/openpose.bin --flir_camera --3d --number_people_max 1 --3d_min_views 2 --output_resolution {desired_output_resolution}
    +
    # Reconstruction when a keypoint is visible in at least max(2, min(4, n-1)) camera views
    +
    ./build/examples/openpose/openpose.bin --flir_camera --3d --number_people_max 1 --output_resolution {desired_output_resolution}
    +
  10. +
+

+Tracking

+
    +
  1. Runtime huge speed up by reducing the accuracy:
    :: Windows - Portable Demo (same flags for Ubuntu and Mac)
    +
    # Using OpenPose 1 frame, tracking the following e.g., 5 frames
    +
    bin\OpenPoseDemo.exe --tracking 5 --number_people_max 1
    +
  2. +
  3. Runtime speed up while keeping most of the accuracy:
    :: Windows - Portable Demo (same flags for Ubuntu and Mac)
    +
    # Using OpenPose 1 frame and tracking another frame
    +
    bin\OpenPoseDemo.exe --tracking 1 --number_people_max 1
    +
  4. +
  5. Visual smoothness:
    :: Windows - Portable Demo (same flags for Ubuntu and Mac)
    +
    # Running both OpenPose and tracking on each frame. Note: There is no speed up/slow down
    +
    bin\OpenPoseDemo.exe --tracking 0 --number_people_max 1
    +
  6. +
+

+Kinect 2.0 as Webcam on Windows 10

+

Since the Windows 10 Anniversary, Kinect 2.0 can be read as a normal webcam. All you need to do is go to device manager, expand the kinect sensor devices tab, right click and update driver of WDF kinectSensor Interface. If you already have another webcam, disconnect it or use --camera 2.

+

+Main Flags

+

These are the most common flags, but check doc/advanced/demo_advanced.md for a full list and description of all of them.

+
    +
  • --face: Enables face keypoint detection.
  • +
  • --hand: Enables hand keypoint detection.
  • +
  • --video input.mp4: Read video input.mp4.
  • +
  • --camera 3: Read webcam number 3.
  • +
  • --image_dir path_with_images/: Run on the directory path_with_images/ with images.
  • +
  • --ip_camera http://iris.not.iac.es/axis-cgi/mjpg/video.cgi?resolution=320x240?x.mjpeg: Run on a streamed IP camera. See examples public IP cameras here.
  • +
  • --write_video path.avi: Save processed images as video.
  • +
  • --write_images folder_path: Save processed images on a folder.
  • +
  • --write_keypoint path/: Output JSON, XML or YML files with the people pose data on a folder.
  • +
  • --process_real_time: For video, it might skip frames to display at real time.
  • +
  • --disable_blending: If enabled, it will render the results (keypoint skeletons or heatmaps) on a black background, not showing the original image. Related: part_to_show, alpha_pose, and alpha_pose.
  • +
  • --part_to_show: Prediction channel to visualize.
  • +
  • --display 0: Display window not opened. Useful for servers and/or to slightly speed up OpenPose.
  • +
  • --num_gpu 2 --num_gpu_start 1: Parallelize over this number of GPUs starting by the desired device id. By default it uses all the available GPUs.
  • +
  • --model_pose MPI: Model to use, affects number keypoints, speed and accuracy.
  • +
  • --logging_level 3: Logging messages threshold, range [0,255]: 0 will output any message & 255 will output none. Current messages in the range [1-4], 1 for low priority messages and 4 for important ones.
  • +
+

+Advanced Quick Start

+

In order to learn about many more flags, check doc/advanced/demo_advanced.md.

+

+Bug Solving

+

+Improving Memory and Speed but Decreasing Accuracy

+

If you have a Nvidia GPU that does not goes out of memory when running, you should skip this step!

+

Use net_resolution at your own risk: If your GPU runs out of memory or you do not have a Nvidia GPU, you can reduce --net_resolution to improve the speed and reduce the memory requirements, but it will also highly reduce accuracy! The lower the resolution, the lower accuracy but better speed/memory.

# Ubuntu and Mac
+
./build/examples/openpose/openpose.bin --video examples/media/video.avi --net_resolution -1x320
+
./build/examples/openpose/openpose.bin --video examples/media/video.avi --net_resolution -1x256
+
./build/examples/openpose/openpose.bin --video examples/media/video.avi --net_resolution -1x196
+
./build/examples/openpose/openpose.bin --video examples/media/video.avi --net_resolution -1x128
+
:: Windows - Portable Demo
+
bin\OpenPoseDemo.exe --video examples/media/video.avi --net_resolution -1x320
+
bin\OpenPoseDemo.exe --video examples/media/video.avi --net_resolution -1x256
+
bin\OpenPoseDemo.exe --video examples/media/video.avi --net_resolution -1x196
+
bin\OpenPoseDemo.exe --video examples/media/video.avi --net_resolution -1x128
+

Additional notes:

    +
  • The default resolution is -1x368, any resolution smaller will improve speed.
  • +
  • The -1 means that that the resolution will be adapted to maintain the aspect ratio of the input source. E.g., -1x368, 656x-1, and 656x368 will result in the same exact resolution for 720p and 1080p input images.
  • +
  • For videos, using -1 is recommended to let OpenPose find the ideal resolution. For a folder of images of different sizes, not adding -1 and using images with completely different aspect ratios might result in out of memory issues. E.g., if a folder contains 2 images of resolution 100x11040 and 10000x368. Then, using the default -1x368 will result in the network output resolutions of 3x368 and 10000x368, resulting in an obvious out of memory for the 10000x368 image.
  • +
+

+Mac OSX Additional Step

+

If you are not using Mac, or you are using Mac with CPU_only, you can skip this section.

+

If you are using a Mac and selected OPENCL support, and it has an AMD graphics card, that means that the machine has 2 GPUs that are not compatible with each other (AMD and Intel). Then, you will have to manually select one of them (the AMD one should be more powerful). To do that, first check which device your Graphics card is set under. Most likely, your AMD device will be device 2.

clinfo
+

For any OpenPose command you run, add the following 2 flags to use your AMD card for acceleration (where num_gpu_start should be the ID number given above).

./build/examples/openpose/openpose.bin --num_gpu 1 --num_gpu_start 2
+

If you only have an integrated Intel Graphics card, then it will most probably be the device 1. Then, always add the following 2 flags to use your AMD card for acceleration.

./build/examples/openpose/openpose.bin --num_gpu 1 --num_gpu_start 1
+

+FAQ

+

Check doc/05_faq.md to see if you can find your error, issue, or concern.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_02_output.html b/web/html/doc/md_doc_02_output.html new file mode 100644 index 000000000..6ff2d03c8 --- /dev/null +++ b/web/html/doc/md_doc_02_output.html @@ -0,0 +1,317 @@ + + + + + + + +OpenPose: OpenPose Doc - Output + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Output
+
+
+

+Contents

+
    +
  1. UI and Visual Output
  2. +
  3. JSON-UI Mapping
      +
    1. Pose Output Format (BODY_25)
    2. +
    3. Pose Output Format (COCO)
    4. +
    5. Face Output Format
    6. +
    7. Hand Output Format
    8. +
    +
  4. +
  5. JSON Output Format
  6. +
  7. Keypoints in C++/Python
      +
    1. Keypoint Ordering in C++/Python
    2. +
    3. Keypoint Format in Datum (Advanced)
    4. +
    +
  8. +
  9. Reading Saved Results
  10. +
  11. Advanced
      +
    1. Camera Matrix Output Format
    2. +
    3. Heatmaps
    4. +
    +
  12. +
+

+UI and Visual Output

+

The visual GUI should show the original image with the poses blended on it, similarly to the pose of this gif:

+

+

+JSON-UI Mapping

+

The output of the JSON files consist of a set of keypoints, whose ordering is related with the UI output as follows:

+

+Pose Output Format (BODY_25)

+

+

+Pose Output Format (COCO)

+

+

+Face Output Format

+

+

+Hand Output Format

+

+

+JSON Output Format

+

There are 2 alternatives to save the OpenPose output. But both of them follow the keypoint ordering described in the section Keypoint Ordering in C++/Python section (which you should read next).

+
    +
  1. The --write_json flag saves the people pose data into JSON files. Each file represents a frame, it has a people array of objects, where each object has:
      +
    1. pose_keypoints_2d: Body part locations (x, y) and detection confidence (c) formatted as x0,y0,c0,x1,y1,c1,.... The coordinates x and y can be normalized to the range [0,1], [-1,1], [0, source size], [0, output size], etc. (see the flag --keypoint_scale for more information), while the confidence score (c) in the range [0,1]. Additionally, the number of keypoints output in this array will remain consistent between JSON files.
    2. +
    3. face_keypoints_2d, hand_left_keypoints_2d, and hand_right_keypoints_2d are analogous to pose_keypoints_2d but applied to the face and hand parts.
    4. +
    5. body_keypoints_3d, face_keypoints_3d, hand_left_keypoints_2d, and hand_right_keypoints_2d are analogous but applied to the 3-D parts. They are empty if --3d is not enabled. Their format is x0,y0,z0,c0,x1,y1,z1,c1,..., where c is 1 or 0 depending on whether the 3-D reconstruction was successful or not.
    6. +
    7. part_candidates (optional and advanced): The body part candidates before being assembled into people. Empty if --part_candidates is not enabled (see that flag for more details).
      {
      +
      "version":1.1,
      +
      "people":[
      +
      {
      +
      "pose_keypoints_2d":[582.349,507.866,0.845918,746.975,631.307,0.587007,...],
      +
      "face_keypoints_2d":[468.725,715.636,0.189116,554.963,652.863,0.665039,...],
      +
      "hand_left_keypoints_2d":[746.975,631.307,0.587007,615.659,617.567,0.377899,...],
      +
      "hand_right_keypoints_2d":[617.581,472.65,0.797508,0,0,0,723.431,462.783,0.88765,...]
      +
      "pose_keypoints_3d":[582.349,507.866,507.866,0.845918,507.866,746.975,631.307,0.587007,...],
      +
      "face_keypoints_3d":[468.725,715.636,715.636,0.189116,715.636,554.963,652.863,0.665039,...],
      +
      "hand_left_keypoints_3d":[746.975,631.307,631.307,0.587007,631.307,615.659,617.567,0.377899,...],
      +
      "hand_right_keypoints_3d":[617.581,472.65,472.65,0.797508,472.65,0,0,0,723.431,462.783,0.88765,...]
      +
      }
      +
      ],
      +
      // If `--part_candidates` enabled
      +
      "part_candidates":[
      +
      {
      +
      "0":[296.994,258.976,0.845918,238.996,365.027,0.189116],
      +
      "1":[381.024,321.984,0.587007],
      +
      "2":[313.996,314.97,0.377899],
      +
      "3":[238.996,365.027,0.189116],
      +
      "4":[283.015,332.986,0.665039],
      +
      "5":[457.987,324.003,0.430488,283.015,332.986,0.665039],
      +
      "6":[],
      +
      "7":[],
      +
      "8":[],
      +
      "9":[],
      +
      "10":[],
      +
      "11":[],
      +
      "12":[],
      +
      "13":[],
      +
      "14":[293.001,242.991,0.674305],
      +
      "15":[314.978,241,0.797508],
      +
      "16":[],
      +
      "17":[369.007,235.964,0.88765]
      +
      }
      +
      ]
      +
      }
      +
    8. +
    +
  2. +
  3. (Deprecated) --write_keypoint uses the OpenCV cv::FileStorage default formats, i.e., JSON (if OpenCV 3 or higher), XML, and YML. It only prints 2D body information (no 3D or face/hands).
  4. +
+

(Low level details) If you wanted to change anything in this file format (e.g., the output file name), you could check and/or modify peopleJsonSaver.cpp.

+

+Keypoints in C++/Python

+

+Keypoint Ordering in C++/Python

+

The body part mapping order of any body model (e.g., BODY_25, COCO, MPI) can be extracted from the C++ and Python APIs.

+
    +
  • In C++, getPoseBodyPartMapping(const PoseModel poseModel) is available in poseParameters.hpp:
    // C++ API call
    +
    #include <openpose/pose/poseParameters.hpp>
    +
    const auto& poseBodyPartMappingBody25 = getPoseBodyPartMapping(PoseModel::BODY_25);
    +
    const auto& poseBodyPartMappingCoco = getPoseBodyPartMapping(PoseModel::COCO_18);
    +
    const auto& poseBodyPartMappingMpi = getPoseBodyPartMapping(PoseModel::MPI_15);
    +
    const auto& poseBodyPartMappingBody25B = getPoseBodyPartMapping(PoseModel::BODY_25B);
    +
    const auto& poseBodyPartMappingBody135 = getPoseBodyPartMapping(PoseModel::BODY_135);
    +
    +
    // Result for BODY_25 (25 body parts consisting of COCO + foot)
    +
    // const std::map<unsigned int, std::string> POSE_BODY_25_BODY_PARTS {
    +
    // {0, "Nose"},
    +
    // {1, "Neck"},
    +
    // {2, "RShoulder"},
    +
    // {3, "RElbow"},
    +
    // {4, "RWrist"},
    +
    // {5, "LShoulder"},
    +
    // {6, "LElbow"},
    +
    // {7, "LWrist"},
    +
    // {8, "MidHip"},
    +
    // {9, "RHip"},
    +
    // {10, "RKnee"},
    +
    // {11, "RAnkle"},
    +
    // {12, "LHip"},
    +
    // {13, "LKnee"},
    +
    // {14, "LAnkle"},
    +
    // {15, "REye"},
    +
    // {16, "LEye"},
    +
    // {17, "REar"},
    +
    // {18, "LEar"},
    +
    // {19, "LBigToe"},
    +
    // {20, "LSmallToe"},
    +
    // {21, "LHeel"},
    +
    // {22, "RBigToe"},
    +
    // {23, "RSmallToe"},
    +
    // {24, "RHeel"},
    +
    // {25, "Background"}
    +
    // };
    +
  • +
  • You can also check them on Python:
    poseModel = op.PoseModel.BODY_25
    +
    print(op.getPoseBodyPartMapping(poseModel))
    +
    print(op.getPoseNumberBodyParts(poseModel))
    +
    print(op.getPosePartPairs(poseModel))
    +
    print(op.getPoseMapIndex(poseModel))
    +
  • +
+

+Keypoint Format in Datum (Advanced)

+

This section is only for advance users that plan to use the C++ API. Not needed for the OpenPose demo and/or Python API.

+

There are 3 different keypoint Array<float> elements in the Datum class:

+
    +
  1. Array<float> poseKeypoints: In order to access person person and body part part (where the index matches POSE_COCO_BODY_PARTS or POSE_MPI_BODY_PARTS), you can simply output:
    // Common parameters needed
    +
    const auto numberPeopleDetected = poseKeypoints.getSize(0);
    +
    const auto numberBodyParts = poseKeypoints.getSize(1);
    +
    // Easy version
    +
    const auto x = poseKeypoints[{person, part, 0}];
    +
    const auto y = poseKeypoints[{person, part, 1}];
    +
    const auto score = poseKeypoints[{person, part, 2}];
    +
    // Slightly more efficient version
    +
    // If you want to access these elements on a huge loop, you can get the index
    +
    // by your own, but it is usually not faster enough to be worthy
    +
    const auto baseIndex = poseKeypoints.getSize(2)*(person*numberBodyParts + part);
    +
    const auto x = poseKeypoints[baseIndex];
    +
    const auto y = poseKeypoints[baseIndex + 1];
    +
    const auto score = poseKeypoints[baseIndex + 2];
    +
  2. +
  3. Array<float> faceKeypoints: It is completely analogous to poseKeypoints.
    // Common parameters needed
    +
    const auto numberPeopleDetected = faceKeypoints.getSize(0);
    +
    const auto numberFaceParts = faceKeypoints.getSize(1);
    +
    // Easy version
    +
    const auto x = faceKeypoints[{person, part, 0}];
    +
    const auto y = faceKeypoints[{person, part, 1}];
    +
    const auto score = faceKeypoints[{person, part, 2}];
    +
    // Slightly more efficient version
    +
    const auto baseIndex = faceKeypoints.getSize(2)*(person*numberFaceParts + part);
    +
    const auto x = faceKeypoints[baseIndex];
    +
    const auto y = faceKeypoints[baseIndex + 1];
    +
    const auto score = faceKeypoints[baseIndex + 2];
    +
  4. +
  5. std::array<Array<float>, 2> handKeypoints, where handKeypoints[0] corresponds to the left hand and handKeypoints[1] to the right one. Each handKeypoints[i] is analogous to poseKeypoints and faceKeypoints:
    // Common parameters needed
    +
    const auto numberPeopleDetected = handKeypoints[0].getSize(0); // = handKeypoints[1].getSize(0)
    +
    const auto numberHandParts = handKeypoints[0].getSize(1); // = handKeypoints[1].getSize(1)
    +
    +
    // Easy version
    +
    // Left Hand
    +
    const auto xL = handKeypoints[0][{person, part, 0}];
    +
    const auto yL = handKeypoints[0][{person, part, 1}];
    +
    const auto scoreL = handKeypoints[0][{person, part, 2}];
    +
    // Right Hand
    +
    const auto xR = handKeypoints[1][{person, part, 0}];
    +
    const auto yR = handKeypoints[1][{person, part, 1}];
    +
    const auto scoreR = handKeypoints[1][{person, part, 2}];
    +
    +
    // Slightly more efficient version
    +
    const auto baseIndex = handKeypoints[0].getSize(2)*(person*numberHandParts + part);
    +
    // Left Hand
    +
    const auto xL = handKeypoints[0][baseIndex];
    +
    const auto yL = handKeypoints[0][baseIndex + 1];
    +
    const auto scoreL = handKeypoints[0][baseIndex + 2];
    +
    // Right Hand
    +
    const auto xR = handKeypoints[1][baseIndex];
    +
    const auto yR = handKeypoints[1][baseIndex + 1];
    +
    const auto scoreR = handKeypoints[1][baseIndex + 2];
    +
  6. +
+

+Reading Saved Results

+

We use the standard formats (JSON, PNG, JPG, ...) to save our results, so there are many open-source libraries to read them in most programming languages (especially Python). For C++, you might want to check include/openpose/filestream/fileStream.hpp. In particular, loadData (for JSON, XML and YML files) and loadImage (for image formats such as PNG or JPG) to load the data into cv::Mat format.

+

+Advanced

+

+Camera Matrix Output Format

+

If you need to use the camera calibration or 3D modules, the camera matrix output format is detailed in doc/advanced/calibration_module.md#camera-matrix-output-format.

+

+Heatmaps

+

If you need to use heatmaps, check doc/advanced/heatmap_output.md.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_03_python_api.html b/web/html/doc/md_doc_03_python_api.html new file mode 100644 index 000000000..9b9047a51 --- /dev/null +++ b/web/html/doc/md_doc_03_python_api.html @@ -0,0 +1,203 @@ + + + + + + + +OpenPose: OpenPose Doc - Python API + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Python API
+
+
+

+Contents

+
    +
  1. Introduction
  2. +
  3. Advance Introduction (Optional)
  4. +
  5. Compatibility
  6. +
  7. Installation
  8. +
  9. Testing And Developing
  10. +
  11. Exporting Python OpenPose
  12. +
  13. Common Issues
  14. +
+

+Introduction

+

Almost all the OpenPose functionality, but in Python!

+

When should you look at the Python or C++ APIs? If you want to read a specific input, and/or add your custom post-processing function, and/or implement your own display/saving.

+

You should be familiar with the **OpenPose Demo** and the main OpenPose flags before trying to read the C++ or Python API examples. Otherwise, it will be way harder to follow.

+

+Advance Introduction (Optional)

+

This module exposes a Python API for OpenPose. It is effectively a wrapper that replicates most of the functionality of the op::Wrapper class and allows you to populate and retrieve data from the op::Datum class using standard Python and Numpy constructs.

+

The Python API is analogous to the C++ function calls. You may find them in python/openpose/openpose_python.cpp#L194.

+

The Python API is rather simple: op::Array<float> and cv::Mat objects get casted to numpy arrays automatically. Every other data structure based on the standard library is automatically converted into Python objects. For example, an std::vector<std::vector<float>> would become [[item, item], [item, item]], etc. We also provide a casting of op::Rectangle and op::Point which simply expose setter getter for [x, y, width, height], etc.

+

+Compatibility

+

The OpenPose Python module is compatible with both Python 2 and Python 3 (default and recommended). In addition, it will also run in all OpenPose compatible operating systems. It uses Pybind11 for mapping between C++ and Python datatypes.

+

To compile, enable BUILD_PYTHON in CMake-gui, or run cmake -DBUILD_PYTHON=ON .. from your build directory. In Windows, make sure you compile the whole solution (clicking the green play button does not compile the whole solution!). You can do that by right-click on the OpenPose project solution, and clicking in Build Solution (or individually building the PyOpenPose module).

+

Pybind selects the latest version of Python by default (Python 3). To use Python 2, change PYTHON_EXECUTABLE and PYTHON_LIBRARY flags in CMake-gui to your desired Python version.

+
# Ubuntu
+
PYTHON_EXECUTABLE=/usr/bin/python2.7
+
PYTHON_LIBRARY=/usr/lib/x86_64-linux-gnu/libpython2.7m.so
+
# Mac OSX
+
PYTHON_EXECUTABLE=/usr/local/bin/python2.7
+
PYTHON_LIBRARY=/usr/local/opt/python/Frameworks/Python.framework/Versions/2.7/lib/libpython2.7m.dylib
+
:: Windows
+
PYTHON_EXECUTABLE=C:/Users/user/AppData/Local/Programs/Python/Python27/python.exe
+

If run via the command line, you may need to run cmake twice in order for this change to take effect.

+

+Installation

+

Make sure you followed the Python steps in doc/installation/0_index.md#cmake-configuration.

+

+Testing And Developing

+

All the Python examples from the Tutorial API Python module can be found in build/examples/tutorial_api_python in your build folder. Navigate directly to this path to run examples.

+
# From command line
+
cd build/examples/tutorial_api_python
+
+
# Python 3 (default version)
+
python3 01_body_from_image.py
+
python3 02_whole_body_from_image.py
+
# python3 [any_other_python_example.py]
+
+
# Python 2
+
python2 01_body_from_image.py
+
python2 02_whole_body_from_image.py
+
# python2 [any_other_python_example.py]
+

For quick prototyping, you can simply duplicate and rename any of the existing sample files in build/examples/tutorial_api_python within that same folder and start building in there. These files are copied from existing example files on compiling time. 2 alternatives:

    +
  • You can either duplicate and create your files in examples/tutorial_api_python/, but you will have to recompile OpenPose every time you make changes to your Python files so they are copied over the build/ folder.
  • +
  • Or you can directly edit them in build/examples/tutorial_api_python. This does not require rebuilding, but cleaning OpenPose will remove the whole build/ folder, so make sure to back your files up!
  • +
+

+Exporting Python OpenPose

+

Note: This step is only required if you are moving the *.py files outside their original location, or writing new *.py scripts outside build/examples/tutorial_api_python.

+

Ubuntu/OSX:

+
    +
  • Option a, installing OpenPose: On an Ubuntu or OSX based system, you could install OpenPose by running sudo make install, you could then set the OpenPose path in your python scripts to the OpenPose installation path (default: /usr/local/python) and start using OpenPose at any location. Take a look at build/examples/tutorial_api_python/01_body_from_image.py for an example.
  • +
  • Option b, not installing OpenPose: To move the OpenPose Python API demos to a different folder, ensure that the line ‘sys.path.append(’{OpenPose_path}/python')is properly set in your*.pyfiles, where{OpenPose_path}points to your build folder of OpenPose. Take a look atbuild/examples/tutorial_api_python/01_body_from_image.py` for an example.
  • +
+

Windows:

+
    +
  • Ensure that the folder build/x{86/64}/Releaseand build/bin are copied along with build/python As noted in the example, the path for these can be changed in the following two variables:
  • +
+
sys.path.append(dir_path + '/../../python/openpose/Release);
+
os.environ['PATH'] = os.environ['PATH'] + ';' + dir_path + '/../../{x86/x64}/Release;' + dir_path + '/../../bin;'
+

+Common Issues

+

+Do not use PIL

+

In order to read images in Python, make sure to use OpenCV (do not use PIL). We found that feeding a PIL image format to OpenPose results in the input image appearing in grey and duplicated 9 times (so the output skeleton appear 3 times smaller than they should be, and duplicated 9 times).

+

+Cannot Import Name PyOpenPose

+

The error in general is that PyOpenPose cannot be found (an error similar to: ImportError: cannot import name pyopenpose). Ensure first that BUILD_PYTHON flag is set to ON. If the error persists, check the following:

+

In the script you are running, check for the following line, and run the following command in the same location as where the file is

+

Ubuntu/OSX:

+

‘sys.path.append(’../../python');`

+
ls ../../python/openpose
+

Check the contents of this location. It should contain one of the following files:

+
pyopenpose.cpython-35m-x86_64-linux-gnu.so
+
pyopenpose.so
+

If you do not have any one of those, you may not have compiled openpose successfully, or you may be running the examples, not from the build folder but the source folder. If you have the first one, you have compiled PyOpenPose for Python 3, and have to run the scripts with python3, and vice versa for the 2nd one. Follow the testing examples above for exact commands.

+

Windows:

+

Problem 1: If you are in Windows, and you fail to install the required third party Python libraries, it might print an error similar to: Exception: Error: OpenPose library could not be found. Did you enable BUILD_PYTHON in CMake and have this Python script in the right folder?. From GitHub issue #941:

I had a similar issue with Visual Studio (VS). I am pretty sure that the issue is that while you are compiling OpenPose in VS, it tries to import cv2 (python-opencv) and it fails. So make sure that if you open cmd.exe and run Python, you can actually import cv2 without errors. I could not, but I had cv2 installed in a IPython environment (Anaconda), so I activated that environment, and then ran (change this to adapt it to your VS version and location of OpenPose.sln):
+
+
C:\Program Files (x86)\Microsoft Visual Studio\2017\Enterprise\MSBuild.exe C:\path\to\OpenPose.sln
+

Problem 2: Python for Openpose needs to be compiled in Release mode for now. This can be done in Visual Studio. Once that is done check this line:

+

‘sys.path.append(dir_path + ’/../../python/openpose/Release');`

+
dir ../../python/openpose/Release
+

Check the contents of this location. It should contain one of the following files:

+
pyopenpose.cp36-win_amd64.pyd
+
pyopenpose.pyd
+

If such a folder does not exist, you need to compile in Release mode as seen above. If you have the first one, you have compiled PyOpenPose for Python 3, and have to run the scripts with python3, and vice versa for the 2nd one. Follow the testing examples above for exact commands. If that still does not work, check this line:

+

‘os.environ['PATH’] = os.environ['PATH'] + ';' + dir_path + '/../../x64/Release;' + dir_path + '/../../bin;'`

+
dir ../../x64/Release
+
dir ../../bin
+

Ensure that both of these paths exist, as PyOpenPose needs to reference those libraries. If they don't exist, change the path so that they point to the correct location in your build folder.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_04_cpp_api.html b/web/html/doc/md_doc_04_cpp_api.html new file mode 100644 index 000000000..0394fb3da --- /dev/null +++ b/web/html/doc/md_doc_04_cpp_api.html @@ -0,0 +1,122 @@ + + + + + + + +OpenPose: OpenPose Doc - C++ API + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - C++ API
+
+
+

+Contents

+
    +
  1. Introduction
  2. +
  3. Advance Introduction (Optional)
  4. +
  5. Compatibility
  6. +
  7. Installation
  8. +
  9. Testing And Developing
  10. +
  11. Exporting Python OpenPose
  12. +
  13. Common Issues
  14. +
+

+Introduction

+

Extend the OpenPose functionality with all the power and performance of C++!

+

When should you look at the Python or C++ APIs? If you want to read a specific input, and/or add your custom post-processing function, and/or implement your own display/saving.

+

You should be familiar with the **OpenPose Demo** and the main OpenPose flags before trying to read the C++ or Python API examples. Otherwise, it will be way harder to follow.

+

+Adding your Custom Code

+

Once you are familiar with the command line demo, then you should explore the different C++ examples in the OpenPose C++ API folder.

+

For quick prototyping, you can simply duplicate and rename any of the existing sample files from the OpenPose C++ API folder into the examples/user_code/ folder and start building in there. Add the name of your new file(s) into the CMake file from that folder, and CMake will automatically compile it together with the whole OpenPose project.

+

See examples/user_code/README.md for more details.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_05_faq.html b/web/html/doc/md_doc_05_faq.html new file mode 100644 index 000000000..18176a0f0 --- /dev/null +++ b/web/html/doc/md_doc_05_faq.html @@ -0,0 +1,283 @@ + + + + + + + +OpenPose: OpenPose Doc - Frequently Asked Questions (FAQ) + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Frequently Asked Questions (FAQ)
+
+
+

+Contents

+
    +
  1. FAQ
      +
    1. Errors
        +
      1. Download Server Down
      2. +
      3. Out of Memory Error
      4. +
      5. Video/Webcam Not Working
      6. +
      7. Cannot Find OpenPose.dll Error
      8. +
      9. Free Invalid Pointer Error
      10. +
      11. Source Directory does not Contain CMakeLists.txt (Windows)
      12. +
      13. Always Zero People Detected
      14. +
      15. Very Few People Detected
      16. +
      17. Check Failed for ReadProtoFromBinaryFile (Failed to Parse NetParameter File)
      18. +
      19. 3D OpenPose Returning Wrong Results: 0, NaN, Infinity, etc.
      20. +
      21. Protobuf Clip Param Caffe Error
      22. +
      23. The Human Skeleton Looks like Dotted Lines Rather than Solid Lines
      24. +
      25. CUDA_cublas_device_LIBRARY Not Found
      26. +
      27. CMake-GUI Error While Getting Default Caffe
      28. +
      29. Libgomp Out of Memory Error
      30. +
      31. Runtime Error with Turing GPU (Tesla T4) or Volta GPU
      32. +
      33. Obscure CMake Error about Caffe or Pybind.
      34. +
      +
    2. +
    3. Speed Performance Issues
        +
      1. Speed Up, Memory Reduction, and Benchmark
      2. +
      3. How to Measure the Latency Time?
      4. +
      5. CPU Version Too Slow
      6. +
      7. Profiling Speed and Estimating FPS without Display
      8. +
      9. Webcam Slower than Images
      10. +
      +
    4. +
    5. Accuracy Issues
        +
      1. Is Maximum Accuracy Configuration Possible on Lower End GPUs?
      2. +
      +
    6. +
    7. Other FAQ
        +
      1. How Should I Link my IP Camera?
      2. +
      3. Difference between BODY_25 vs. COCO vs. MPI
      4. +
      5. Huge RAM Usage
      6. +
      +
    8. +
    +
  2. +
+

+FAQ

+

+Errors

+

+Download Server Down

+

Q: The download server is down, Download Hash mismatch, Error 502: Bad Gateway, etc. I.e., I cannot download the OpenPose models and/or 3rd party libraries. A: The are 2 alternatives. Option 1 (recommended): Download the links from 1602#issuecomment-641653411 and download them in the places indicated by doc/1_prerequisites.md. Option 2: If you downloaded the models and dependencies to some local server, you could set the advanced CMake property DOWNLOAD_SERVER to link OpenPose to your local mirror server instead.

+

+Out of Memory Error

+

Q: Out of memory error - I get an error similar to: Check failed: error == cudaSuccess (2 vs. 0) out of memory.

+

A: Make sure you have a GPU with at least 4 GB of memory. If your GPU is between 2 and 4 GB, it should be fine for body-only settings, but you can also reduce the --net_resolution if it does not fit (check Speed Up, Memory Reduction, and Benchmark for the consequences of this).

+

(Only if you are compiling OpenPose by yourself, this does not apply to the portable OpenPose binaries for Windows because they already include cuDNN): If you have a GPU with >= 4 GB of memory, and you still face this error, most probably cuDNN is not installed/enabled. The default Caffe model uses >12 GB of GPU memory, cuDNN reduces it to ~2.2 GB for BODY_25 (default) and ~1.5 GB for COCO (--model_pose COCO). Note that you still need at least about 2.2 GB free for the default OpenPose to run. I.e., GPUs with only 2 GB will not fit the default OpenPose, and you will have to either switch to the COCO model (slower and less accurate), or reduce the --net_resolution (faster speed but also lower accuracy).

+

Also, hands and face increases the GPU memory requeriments, and 4 GB GPUs might run a bit short in some cases.

+

+Video/Webcam Not Working

+

Q: Video and/or webcam are not working - Using a folder with images does work, but the video and/or the webcam do not. Note: often on Windows.

+

A: OpenCV has some issues with some camera drivers and video codecs (specially on Windows). Follow the same steps as the Webcam is slow question to test the webcam is working. After re-compiling OpenCV, you can also try this OpenCV example for video.

+

+Cannot Find OpenPose.dll Error (Windows)

+

Q: System cannot find the file specified (Openpose.dll) error when trying to release - Using a folder with images does work, but the video and/or the webcam do not. Note: often on Windows.

+

A: Visual Studio (VS) and the doc/installation/0_index.md section is only intended if you plan to modify the OpenPose code or integrate it with another library or project. If you just want to use the OpenPose demo, simply follow doc/installation/0_index.md#windows-portable-demo and download the OpenPose binaries in the Releases section.

+

If you need to compile it with Visual Studio (VS), then keep reading. In this error, VS is simply saying that there were errors while compiling the OpenPose library. Try compiling only the OpenPose library (not the demo), by right clicking on it, then Set as StartUp Project, and finally right click + Build. Then, at the bottom left part of VS, press Error list and then you should see which errors VS encountered while compiling. In that way, VS gives you the exact error so you can know it and share the exact issue.

+

If it didn't have any error, then setting OpenPoseDemo as main project again and F5 (or green play button) should work.

+

Note: OpenPose library is not an executable, but a library. So instead clicking F5 or the green button instead of Build will give you an error similar to openpose.dll is not a valid Win32 application.

+

+Free Invalid Pointer Error

+

Q: I am getting an error of the type: munmap_chunk()/free/invalid pointer.

+

A: In order to run OpenCV 3.X and Caffe simultaneously, OpenCV must be compiled without WITH_GTK and with WITH_QT flags. On Ubuntu 16.04 the qt5 package is "qt5-default" and the OpenCV cmake option is WITH_QT.

+

+Source Directory does not Contain CMakeLists.txt (Windows)

+

Q: I am getting an error of the type: The source directory {path to file} does not contain a CMakeLists.txt file..

+

A: You might not have writing access to that folder. If you are in Windows, you should not try to install it in Program Files.

+

+Always Zero People Detected

+

Q: Always 0 people detected and displayed in default video and images.

+

A: This answer assumes that never a single person is detected. If in your case it works sometimes, then check Always Zero People Detected. This always-0-people problem usually occurs in 2 situations: 1) When you selection --num_gpu 0, and 2) when the caffemodel has not been properly downloaded. E.g., if the connection drops when downloading the models.

+

For problem 1, setting --num_gpu 0 means that no processing is done, so you can use this setting e.g., to record webcam. This functionality is kept for back-compatibility. You are most probably trying to run on CPU-only mode, for that, install OpenPose in CPU-only mode following doc/installation/0_index.md.

+

For problem 2, try the following solutions (in this order):

+
    +
  1. Assuming that default OpenPose (i.e., BODY_25 model) failed, try with --model_pose COCO and --model_pose MPII models. If any of them work, the caffemodel files of the other models were corrupted while being downloaded. Otherwise, it will most probably be a Caffe/protobuf issue.
  2. +
  3. Assuming that the model is corrupted, remove the current models in the model folder, and download them manually from the links in doc/installation/0_index.md. Alternatively, remove them and re-run Cmake again. If this does not work, try downloading the COCO_25 model from the browser following the download link on this Dropbox link.
  4. +
  5. If none of the OpenPose models are working, make sure Caffe is working properly and that you can run the Caffe examples with other caffemodel / prototxt files.
  6. +
+

+Very Few People Detected

+

Q: Low detection rate. It can detect the person on some images (usually higher contrast, with bigger people), but it will fail for most of images with low resolution or small people.

+

A: Images with low resolution, or with people too tiny will simply not work too well. However, it can be highly improved by using the maximum accuracy configuration detailed in doc/01_demo.md#maximum-accuracy-configuration.

+

+Check Failed for ReadProtoFromBinaryFile (Failed to Parse NetParameter File)

+

Q: I am facing an error similar to: Check failed: ReadProtoFromBinaryFile(param_file, param) Failed to parse NetParameter file: models/pose/body_25/pose_iter_584000.caffemodel

+

A: Same answer than for Always Zero People Detected.

+

+3D OpenPose Returning Wrong Results: 0, NaN, Infinity, etc.

+

Q: 3D OpenPose is returning wrong results.

+

A: 99.99% of the cases, this is due to wrong or poor calibration. Repeat the calibration making sure that the final reprojection error is about or less than 0.1 pixels.

+

+Protobuf Clip Param Caffe Error

+

Q: Runtime error similar to:

[libprotobuf ERROR google/protobuf/message_lite.cc:123] Can't parse message of type "caffe.NetParameter" because it is missing required fields: layer[0].clip_param.min, layer[0].clip_param.max
+
F0821 14:26:29.665053 22812 upgrade_proto.cpp:97] Check failed: ReadProtoFromBinaryFile(param_file, param) Failed to parse NetParameter file: models/pose/body_25/pose_iter_584000.caffemodel
+

A: This error has been solved in the latest OpenPose versions. Completely remove OpenPose and re-download the latest version (just cleaning the compilation or removing the build/ folder will not work).

+

If you wanna use your custom Caffe and it has this error: This error only happens in some Ubuntu machines. Following #787, compile your own Caffe with an older version of it. The hacky (quick but not recommended way) is to follow #787#issuecomment-415476837, the elegant way (compatible with future OpenPose versions) is to build your own Caffe independently, following doc/installation/0_index.md#custom-caffe-ubuntu-only.

+

+The Human Skeleton Looks like Dotted Lines Rather than Solid Lines

+

Q: When I use the demo to handle my images,the skeletons are dotted lines. I want to know how to make them to be solid lines.

+

A: The reason is that your input image size is too small. You can either 1) manually rescale your images up or 2) use a bigger --output_resolution so OpenPose will resize them up.

+

+CUDA_cublas_device_LIBRARY Not Found

+

Q: I encounter an error similar to the following:

CMake Error: The following variables are used in this project, but they are set to NOTFOUND.
+
Please set them or make sure they are set and tested correctly in the CMake files:
+
CUDA_cublas_device_LIBRARY (ADVANCED)
+
linked by target "caffe" in directory /home/jakebmalis/Documents/openpose/3rdparty/caffe/src/caffe
+

A: Make sure to download and install CMake-GUI following the doc/1_prerequisites.md section. This is a known problem with CMake-GUI versions from 3.8 to 3.11 (unfortunately, default Ubuntu 18 CMake-GUI uses 3.10). You will need a CMake version >= 3.12.

+

+CMake-GUI Error While Getting Default Caffe

+

Q: It seems to me CMake-gui does not download Caffe at all. I tried to wipe everything and try to install OpenPose again, but received the same mistake. I also tried to see if cmake follows the ifs in the CMakeLists.txt correctly and reaches the branches where he establishes that Caffe needs to be downloaded and it seems to me it does so.

+

A: There are 2 solutions to try. First, if you were using an old OP version and you just updated it, you should simply completely remove that OpenPose folder, and then re-download and re-compile OpenPose. Second, and only if after re-cloning master and running CMake-GUI the 3rdparty/caffe/ folder stays empty, manually trigger the git submodules to update. So the clone step becomes:

git clone https://github.com/CMU-Perceptual-Computing-Lab/openpose
+
cd openpose
+
+
git submodule init
+
git submodule update
+

+Libgomp Out of Memory Error

+

Q: When I start OpenPose, I receive an error similar to the following: libgomp: Out of memory allocating 927712937064 bytes.

+

A: Reinstall and upgrade from scratch CUDA, cuDNN, Python, and OpenCV (GitHub issue #1160).

+

+Runtime Error with Turing GPU (Tesla T4) or Volta GPU

+

Q: When I start OpenPose, I receive a runtime error for new GPU architectures.

+

A: To solve this problem, 1) make sure you are using CUDA 10 or higher, and 2) change line 7 in {OPENPOSE_PATH}/3rdparty/caffe/cmake/Cuda.cmake, from set(Caffe_known_gpu_archs "30 35 50 52 60 61") to set(Caffe_known_gpu_archs "30 35 50 52 60 61 75").

+

+Obscure CMake Error about Caffe or Pybind

+

Q: There appear some weird and obscure errors on CMake about Caffe and/or Pybind.

+

A: Check doc/installation/0_index.md to run the git submodule command, i.e.,

git submodule update --init --recursive --remote
+

+Speed Performance Issues

+

+Speed Up, Memory Reduction, and Benchmark

+

Q: Low speed - OpenPose is quite slow, is it normal? How can I speed it up?

+

A: Check doc/06_maximizing_openpose_speed.md to discover the approximate speed of your graphics card and some speed tips.

+

+How to Measure the Latency Time?

+

Q: How to measure/calculate/estimate the latency/lag time?

+

A: Profile the OpenPose speed. For 1-GPU or CPU-only systems (use --disable_multi_thread for simplicity in multi-GPU systems for latency measurement), the latency will be roughly the sum of all the reported measurements.

+

+CPU Version Too Slow

+

Q: The CPU version is insanely slow compared to the GPU version.

+

A: Check doc/06_maximizing_openpose_speed.md#cpu-version to discover the approximate speed and some speed tips.

+

+Profiling Speed and Estimating FPS without Display

+

Check the doc/06_maximizing_openpose_speed.md#profiling-speed section.

+

+Webcam Slower than Images

+

Q: Webcam is slow - Using a folder with images matches the speed FPS benchmarks, but the webcam has lower FPS. Note: often on Windows.

+

A: OpenCV has some issues with some camera drivers (specially on Windows). The first step should be to compile OpenCV by your own and re-compile OpenPose after that (following the doc/installation/0_index.md#reinstallation section). If the speed is still slower, you can better debug it by running a webcam OpenCV example (e.g. this C++ example). If you are able to get the proper FPS with the OpenCV demo but OpenPose is still low, then let us know!

+

+Accuracy Issues

+

+Is Maximum Accuracy Configuration Possible on Lower End GPUs?

+

Q: I've read that this command provides the most accurate results possible on Openpose so far: https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/doc/01_demo.md#maximum-accuracy-configuration. However, a 8GB GPU (e.g., 1080 or 2080) will run out of memory, is there any method to achieve the same accuracy on GPU using less memory even if it meant sacrificing speed?

+

A: Unfortunately no, there is no way at the moment. Caffe just takes so much memory doing that. You can try with --scale_number 3 instead of 4, reducing a bit the net_resolution (e.g. 720 vs. 736) and starting the computer without GUI (which also takes about 1GB of memory just to keep the computer GUI running).

+

+Other FAQ

+

+How Should I Link my IP Camera?

+

Q: How Should I Link my IP Camera with http protocol?

+

A: Usually with http://CamIP:PORT_NO./video?x.mjpeg.

+

+Difference between BODY_25 vs. COCO vs. MPI

+

COCO model will eventually be removed. BODY_25 model is faster, more accurate, and it includes foot keypoints. However, COCO requires less memory on GPU (being able to fit into 2GB GPUs with the default settings) and it runs faster on CPU-only mode. MPI model is only meant for people requiring the MPI-keypoint structure. It is also slower than BODY_25 and far less accurate.

+

Key differences:

    +
  • BODY_25 is the fastest one on the GPU, the most accurate one, and the one with the highest number of keypoints (including foot keypoints!), highly recommended. It also uses the most amount of RAM/GPU memory.
  • +
  • COCO is faster than BODY_25 on the CPU but slower on the GPU and less accurate.
  • +
  • MPI is the fastest on the CPU but the least accurate of all 3.
  • +
+

+Huge RAM Usage

+

Q: During the execution of the demo, the CPU usage oscillates between 50 and 99%, but the RAM is almost at 100%.

+

A: Highly reducing the --net_resolution and following the tips in the Speed Up, Memory Reduction, and Benchmark section are the only way to reduce the RAM usage. Alternatively, you can disable USE_MKL in CMake, which will highly reduce the RAM usage of the Caffe version, but it might also reduce the program speed.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_06_maximizing_openpose_speed.html b/web/html/doc/md_doc_06_maximizing_openpose_speed.html new file mode 100644 index 000000000..a7a920f63 --- /dev/null +++ b/web/html/doc/md_doc_06_maximizing_openpose_speed.html @@ -0,0 +1,138 @@ + + + + + + + +OpenPose: OpenPose Doc - Maximizing the OpenPose Speed + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Maximizing the OpenPose Speed
+
+
+

+Contents

+
    +
  1. OpenPose Benchmark
  2. +
  3. Profiling Speed
  4. +
  5. CPU Version
  6. +
  7. Speed Up Preserving Accuracy
  8. +
  9. Speed Up and Memory Reduction
  10. +
+

+OpenPose Benchmark

+

Check the OpenPose Benchmark to discover the approximate expected speed of your graphics card.

+

+CPU Version

+

The CPU version runs at about 0.3 FPS on the COCO model, and at about 0.1 FPS (i.e., about 15 sec / frame) on the default BODY_25 model. Switch to COCO model and/or reduce the net_resolution as indicated above. Contradictory fact: BODY_25 model is about 5x slower than COCO on CPU-only version, but it is about 40% faster on GPU version.

+

On Ubuntu (for OS versions older than 20), you can also boost CPU-only speed by 2-3x by following installation/0_index.md#faster-cpu-version-ubuntu-only.

+

+Profiling Speed

+

OpenPose displays the FPS in the basic GUI. However, more complex speed metrics can be obtained from the command line while running OpenPose. In order to obtain those, compile OpenPose with the PROFILER_ENABLED flag on CMake-gui. OpenPose will automatically display time measurements for each subthread after processing F frames (by default F = 1000, but it can be modified with the --profile_speed flag, e.g. --profile_speed 100).

+
    +
  • Time measurement for 1 graphic card: The FPS will be the slowest time displayed in your terminal command line (as OpenPose is multi-threaded). Times are in milliseconds, so FPS = 1000/millisecond_measurement.
  • +
  • Time measurement for >1 graphic cards: Assuming n graphic cards, you will have to wait up to n x F frames to visualize each graphic card speed (as the frames are split among them). In addition, the FPS would be: FPS = minFPS(speed_per_GPU/n, worst_time_measurement_other_than_GPUs). For < 4 GPUs, this is usually FPS = speed_per_GPU/n.
  • +
+

Make sure that wPoseExtractor time is the slowest timing. Otherwise the input producer (video/webcam codecs issues with OpenCV, images too big, etc.) or the GUI display (use OpenGL support as detailed in the next section (Speed Up Preserving Accuracy) might not be optimized.

+

+Speed Up Preserving Accuracy

+

Some speed tips to maximize the OpenPose runtime speed while preserving the accuracy (do not expect miracles, but it might help a bit boosting the framerate):

1. Enable the `WITH_OPENCV_WITH_OPENGL` flag in CMake to have a much faster GUI display. It reduces the lag and increase the speed of displaying images by telling OpenCV to render the images using OpenGL support. This speeds up display rendering about 3x. E.g., it reduces from about 30 msec to about 3-10 msec the display time for HD resolution images. It requires OpenCV to be compiled with OpenGL support and it provokes a visual aspect-ratio artifact when rendering a folder with images of different resolutions. Note: Default OpenCV in Ubuntu 16 (from apt-get install) does have OpenGL support included. Nevertheless, default one from Ubuntu 18 and the Windows portable binaries do not.
+2. Change GPU rendering by CPU rendering to get approximately +0.5 FPS (`--render_pose 1`).
+3. Use cuDNN 5.1 or 7.2 (cuDNN 6 is ~10% slower).
+4. Use the `BODY_25` model for simultaneously maximum speed and accuracy (both COCO and MPII models are slower and less accurate). But it does increase the GPU memory, so it might go out of memory more easily in low-memory GPUs.
+5. Enable the AVX flag in CMake-GUI (if your computer supports it).
+

+Speed Up and Memory Reduction

+

Some speed tips to highly maximize the OpenPose speed, but keep in mind the accuracy trade-off:

1. Reduce the `--net_resolution` (e.g., to 320x176) (lower accuracy). Note: For maximum accuracy, follow [doc/01_demo.md#maximum-accuracy-configuration](01_demo.md#maximum-accuracy-configuration).
+2. For face, reduce the `--face_net_resolution`. The resolution 320x320 usually works pretty decently.
+3. Points 1-2 will also reduce the GPU memory usage (or RAM memory for CPU version).
+4. Use the `BODY_25` model for maximum speed. Use `MPI_4_layers` model for minimum GPU memory usage (but lower accuracy, speed, and number of parts).
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_07_major_released_features.html b/web/html/doc/md_doc_07_major_released_features.html new file mode 100644 index 000000000..3b9007165 --- /dev/null +++ b/web/html/doc/md_doc_07_major_released_features.html @@ -0,0 +1,123 @@ + + + + + + + +OpenPose: OpenPose Doc - Major Released Features + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Major Released Features
+
+
+
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_08_release_notes.html b/web/html/doc/md_doc_08_release_notes.html new file mode 100644 index 000000000..80c38da6d --- /dev/null +++ b/web/html/doc/md_doc_08_release_notes.html @@ -0,0 +1,680 @@ + + + + + + + +OpenPose: OpenPose Doc - Release Notes + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Release Notes
+
+
+

+OpenPose 1.0.0rc1 (Apr 24, 2017)

+
    +
  1. Initial version, main functionality:
      +
    1. Body keypoint detection and rendering in Ubuntu 14 and 16.
    2. +
    3. It can read an image directory, video or webcam.
    4. +
    5. It can display the results or store them on disk.
    6. +
    +
  2. +
+

+OpenPose 1.0.0rc2 (May 10, 2017)

+
    +
  1. Main improvements:
      +
    1. Rendering max resolution from 720p to >32k images.
    2. +
    3. Highly improved documentation.
    4. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. Demo renamed from rtpose to openpose.
    2. +
    +
  4. +
  5. Main bugs fixed:
      +
    1. Demo uses exec instead of start, so it works with more OpenCV custom compiled versions.
    2. +
    +
  6. +
+

+OpenPose 1.0.0rc3 (Jun 8, 2017)

+
    +
  1. Main improvements:
      +
    1. Added face keypoint detection.
    2. +
    3. Added Windows 10 compatibility.
    4. +
    5. Auto-detection of the number of GPUs.
    6. +
    7. MPI visualization is more similar to COCO one.
    8. +
    9. Rendering max resolution from 720p to >32k images.
    10. +
    11. GUI info adder working when the worker TDatum has more than 1 Datum.
    12. +
    13. It prints out the error description before throwing the exception (so that it is written on the Windows cmd).
    14. +
    15. Highly improved documentation.
    16. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. Flag --write_pose renamed as --write_keypoint and it also applies to face and/or hands.
    2. +
    3. Flag --write_pose_json renamed as --write_keypoint_json and it also applies to face and/or hands.
    4. +
    5. Flag --write_pose_format renamed as --write_keypoint_format and it also applies to face and/or hands.
    6. +
    7. PoseSaver and its JSON variant renamed as KeypointSaver.
    8. +
    9. PoseJsonCocoSaver renamed as CocoJsonSaver.
    10. +
    +
  4. +
  5. Main bugs fixed:
      +
    1. All visualization functions moved to the same thread, so it works with most OpenCV custom compiled versions.
    2. +
    3. Fixed error on debug mode: Too many resources requested for launch.
    4. +
    +
  6. +
+

+OpenPose 1.0.0 (Jul 8, 2017)

+
    +
  1. Main improvements:
      +
    1. Added hand keypoint detection.
    2. +
    3. Windows branch merged to master branch.
    4. +
    5. Face and hands use Maximum instead of Nms, since there is only 1 person / detection.
    6. +
    7. Increased accuracy on multi-scale (added Datum::scaleRatios to save the relative scale ratio when multi-scale).
    8. +
    9. Increased speed ~5% by adding CPU rendering (but GPU is the default rendering).
    10. +
    11. Rendering colors modified, visually better results.
    12. +
    13. Rendering threshold for pose, face and hands becomes user-configurable.
    14. +
    15. Check() functions give more feedback.
    16. +
    17. WCocoJsonSaver finished and removed its 3599-image limit.
    18. +
    19. Added --camera_fps so generated video (--write_video) will use that frame rate.
    20. +
    21. Reduced the number of printed information messages. Default logging priority threshold increased to Priority::Max.
    22. +
    23. GFlags to OpenPose configuration parameters reader moved from each demo to utilities/flagsToOpenPose.
    24. +
    25. Nms classes do not use numberParts for Reshape, they deduce the value.
    26. +
    27. Improved documentation.
    28. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. Render flags renamed in the demo in order to incorporate the CPU/GPU rendering.
    2. +
    3. Keypoints saved in JSON files (--write_keypoint_json) are now saved as pose_keypoints, face_keypoints, hand_left_keypoints, and hand_right_keypoints. They all were previously saved as body_parts.
    4. +
    5. Flag --num_scales renamed as --scale_number.
    6. +
    7. All hand and pose flags renamed such as they start by --hand_ and --face_ respectively.
    8. +
    +
  4. +
  5. Main bugs fixed:
      +
    1. Fixed bug in Array::getConstCvMat() if mVolume=0, now returning empty cv::Mat.
    2. +
    3. Fixed bug: --process_real_time threw error with webcam.
    4. +
    5. Fixed bug: Face not working when input and output resolutions are different.
    6. +
    7. Fixed some bugs that prevented the debug version to run.
    8. +
    9. Face saved in JSON files were called --body_parts. Now they are called --face_keypoints.
    10. +
    +
  6. +
+

+OpenPose 1.0.1 (Jul 11, 2017)

+
    +
  1. Main improvements:
      +
    1. Windows library turned into DLL dynamic library (i.e., portable).
    2. +
    3. Improved documentation.
    4. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. openpose/utilities/macros.hpp moved to openpose/utilities/macros.hpp.
    2. +
    +
  4. +
+

+OpenPose 1.0.2 (Sep 3, 2017)

+
    +
  1. Main improvements:
      +
    1. Added OpenCV 3.3 compatibility.
    2. +
    3. Caffe turned into a DLL library.
    4. +
    5. OpenPose is now completely portable across Windows 10 computers (with Nvidia graphic card).
    6. +
    7. Added OpenPose 1.0.1 portable demo.
    8. +
    9. Removed Python and some unnecessary boost dependencies on the VS project.
    10. +
    11. Replaced all double quotes by angle brackets in include statements (issue #61).
    12. +
    13. Added 3-D reconstruction demo.
    14. +
    15. Auto-detection of the camera index.
    16. +
    17. Speed up of about 30% in floatPtrToUCharCvMat.
    18. +
    19. COCO extractor now extracts image ID from the image name itself (format "string_%d"). Before, only working with validation test, now applicable to e.g., test sets.
    20. +
    21. Changed display texts, added OpenPose name.
    22. +
    +
  2. +
  3. Main bugs fixed:
      +
    1. Pycaffe can now be imported from Python.
    2. +
    3. Fixed Tutorial/Wrapper VS linking errors.
    4. +
    +
  4. +
+

+OpenPose 1.1.0 (Sep 19, 2017)

+
    +
  1. Main improvements:
      +
    1. Added CMake installer for Ubuntu.
    2. +
    3. Added how to use keypoint data in examples/tutorial_wrapper/.
    4. +
    5. Added flag for warnings of type -Wsign-compare and removed in code.
    6. +
    7. Slightly improved accuracy by considering ears-shoulder connection (e.g., +0.4 mAP for 1 scale in validation set).
    8. +
    +
  2. +
  3. Main bugs fixed:
      +
    1. Windows version crashing with std::map copy.
    2. +
    +
  4. +
+

+OpenPose 1.2.0 (Nov 3, 2017)

+
    +
  1. Main improvements:
      +
    1. Speed increase when processing images with different aspect ratios. E.g., ~20% increase over 3.7k COCO validation images on 1 scale.
    2. +
    3. Huge speed increase and memory reduction when processing multi-scale. E.g., over 3.7k COCO validation images on 4 scales: ~40% (~770 to ~450 sec) speed increase, ~25% memory reduction (from ~8.9 to ~6.7 GB / GPU).
    4. +
    5. Slightly increase of accuracy given the fixed mini-bugs.
    6. +
    7. Added IP camera support.
    8. +
    9. Output images can have the input size, OpenPose able to change its size for each image and not required fixed size anymore.
        +
      1. FrameDisplayer accepts variable size images by rescaling every time a frame with a bigger width or height is displayed (gui module).
      2. +
      3. OpOutputToCvMat & GuiInfoAdder does not require to know the output size at construction time, deduced from each image.
      4. +
      5. CvMatToOutput and Renderers allow to keep input resolution as output for images (core module).
      6. +
      +
    10. +
    11. New standalone face keypoint detector based on OpenCV face detector: much faster if body keypoint detection is not required but much less accurate.
    12. +
    13. Face and hand keypoint detectors now can return each keypoint heatmap.
    14. +
    15. The flag USE_CUDNN is no longer required; USE_CAFFE and USE_CUDA (replacing the old CPU_ONLY) are no longer required to use the library, only to build it. In addition, Boost, Caffe, and its dependencies have been removed from the OpenPose header files. Only OpenCV include and lib directories are required when building a project using OpenPose.
    16. +
    17. OpenPose successfully compiles if the flags USE_CAFFE and/or USE_CUDA are not enabled, although it will give an error saying they are required.
    18. +
    19. COCO JSON file outputs 0 as score for non-detected keypoints.
    20. +
    21. Added example for OpenPose for user asynchronous output and cleaned all tutorial_wrapper/ examples.
    22. +
    23. Added -1 option for --net_resolution in order to auto-select the best possible aspect ratio given the user input.
    24. +
    25. Net resolution can be dynamically changed (e.g., for images with different size).
    26. +
    27. Added example to add functionality/modules to OpenPose.
    28. +
    29. Added --disable_multi_thread flag in order to allow debug and/or highly reduce the latency (e.g., when using webcam in real-time).
    30. +
    31. Allowed to output images without any rendering.
    32. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. OpenPose able to change its size and initial size dynamically:
        +
      1. Flag --resolution renamed as --output_resolution.
      2. +
      3. FrameDisplayer, GuiInfoAdder and Gui constructors arguments modified (gui module).
      4. +
      5. OpOutputToCvMat constructor removed (core module).
      6. +
      7. New Renders classes to split GpuRenderers from CpuRenderers.
      8. +
      9. Etc.
      10. +
      +
    2. +
    3. OpenPose able to change its net resolution size dynamically:
        +
      1. Changed several functions on core/, pose/, face/, and hand/ modules.
      2. +
      +
    4. +
    5. CPU_ONLY changed by USE_CUDA to keep format.
    6. +
    +
  4. +
  5. Main bugs fixed:
      +
    1. Scaling resize issue fixed: approximately 1-pixel offset due to not considering 0-based indexes.
    2. +
    3. Ubuntu installer script now works even if Python pip was not installed previously.
    4. +
    5. Flags to set first and last frame as well as jumping frames backward and forward now works on the image directory reader.
    6. +
    +
  6. +
+

+OpenPose 1.2.1 (Jan 9, 2018)

+
    +
  1. Main improvements:
      +
    1. Heatmaps can be saved in floating format.
    2. +
    3. More efficient non-processing version (i.e., if all keypoint extractors are disabled, and only image extraction and display/saving operations are performed).
    4. +
    5. Heat maps scaling: Added --heatmaps_scale to OpenPoseDemo, added option not to scale the heatmaps, and added custom float format to save heatmaps in floating format.
    6. +
    7. Detector of the number of GPUs also considers the initial GPU index given by the user.
    8. +
    9. Added --write_json as a new version of --write_keypoint_json. It includes the body part candidates (if enabled), as well as any extra information added in the future (e.g., person ID).
    10. +
    11. Body part candidates can be retrieved in Datum and saved with --write_json.
    12. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. PoseParameters split into PoseParameters and PoseParametersRender and const parameters turned into functions for more clarity.
    2. +
    +
  4. +
  5. Main bugs fixed:
      +
    1. Render working on images > 4K (#324).
    2. +
    3. Cleaned redundant arguments on getAverageScore and getKeypointsArea.
    4. +
    5. Slight speed up when heatmaps must be returned to the user (not doing a double copy anymore).
    6. +
    +
  6. +
+

+OpenPose 1.3.0 (Mar 24, 2018)

+
    +
  1. Main improvements:
      +
    1. Output of --write_json uses less hard disk space (enters and tabs removed).
    2. +
    3. Removed Boost dependencies.
    4. +
    5. Caffe added as a submodule.
    6. +
    7. CMake installer compatible with Windows.
    8. +
    9. Added freeglut download script (3-D reconstruction demo for Windows).
    10. +
    11. Added Debug version for Windows (CMake).
    12. +
    13. Runtime verbose about average speed configurable by user with PROFILER_ENABLED option (CMake/Makefile.config) and --profile_speed flag.
    14. +
    15. Lighter Caffe version compiled by CMake in Ubuntu: disabled Caffe extra support (e.g., OpenCV, Python) and doc.
    16. +
    17. Renamed CMake binaries (Ubuntu) to match old Makefile format: _bin by .bin.
    18. +
    19. 3-D reconstruction demo cleaned, implemented in Ubuntu too, and now defined as module of OpenPose rather than just a demo.
    20. +
    21. CMake as default installer in documentation.
    22. +
    23. Added flag: number_people_max to optionally select the maximum number of people to be detected.
    24. +
    25. 3-D reconstruction module forces the user to set number_people_max 1 to avoid errors (as it assumes only 1 person per image).
    26. +
    27. Removed old windows/ version. CMake is the only Windows version available.
    28. +
    29. Camera parameters (flir camera) are read from disk at runtime rather than being compiled.
    30. +
    31. 3-D reconstruction module can be implemented with different camera brands or custom image sources.
    32. +
    33. Flag --write_json includes 3-D keypoints.
    34. +
    35. 3-D reconstruction module can be used with images and videos. Flag --3d_views added to allow --image_dir and --video to load stereo images.
    36. +
    37. Flag --camera_resolution applicable to --flir_camera.
    38. +
    39. Throwing an error message if requested GPU IDs do not exist (e.g., asking for 2 GPUs starting with ID 1 if there are only 2 GPUs in total).
    40. +
    41. VideoSaver (--write_video) compatible with multi-camera setting. It will save all the different views concatenated.
    42. +
    43. OpenPose small GUI rescale the verbose text to the displayed image, to avoid the text to be either too big or small.
    44. +
    45. OpenPose small GUI shows the frame number w.r.t. the original producer, rather than the frame id. E.g., if video is started at frame 30, OpenPose will display 30 rather than 0 in the first frame.
    46. +
    47. OpenPose GUI: 'l' and 'k' functionality swapped.
    48. +
    49. 3-D reconstruction module: Added flag --3d_min_views to select minimum number of cameras required for 3-D reconstruction.
    50. +
    51. Flir camera producer n times faster for n cameras (multi-threaded). If the number of cameras is greater than the number of the computer threads, the speed up might not be exactly n times.
    52. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. Flag no_display renamed as display, able to select between NoDisplay, Display2D, Display3D, and DisplayAll.
    2. +
    3. 3-D reconstruction demo is now inside the OpenPose demo binary.
    4. +
    5. Renamed *_keypoints by *_keypoints_2d to avoid confusion with 3d ones in --write_json output file.
    6. +
    7. CvMatToOpInput requires PoseModel to know the normalization to be performed.
    8. +
    9. Created net/ module in order to reduce core/ number of classes and files and for future scalability.
    10. +
    +
  4. +
  5. Main bugs fixed:
      +
    1. Slight speed up (around 1%) for performing the non-maximum suppression stage only in the body part heatmaps channels, and not also in the PAF channels.
    2. +
    3. Fixed core-dumped in PoseRenderer with GUI when changed element to be rendered to something else than skeleton.
    4. +
    5. 3-D visualizer does not crash on exit anymore.
    6. +
    7. Fake pause ('m' key pressed) works again.
    8. +
    +
  6. +
+

+OpenPose 1.4.0 (Sep 01, 2018)

+
    +
  1. Main improvements:
      +
    1. Model BODY_25 released, that includes the 17 COCO keypoints + neck + midhip + 6 foot keypoints. It is also about 3% more accurate and 30% faster than the original COCO model.
    2. +
    3. New calibration module: Intrinsic and extrinsic camera calibration toolbox based on OpenCV.
    4. +
    5. Improvements involving Flir cameras:
        +
      1. Added software trigger and a dedicated thread to keep reading images so latency is removed and runtime is faster (analogously to webcamReader).
      2. +
      3. Undistortion of the images is x3.5 faster per camera, i.e., x3.5 Flir camera producer reading w.r.t previous multi-threaded version, which was x number_cameras faster than the original version.
      4. +
      5. Added flag flir_camera_index to allow running on all the cameras at once, or only on 1 camera at the time.
      6. +
      7. Added flag frame_keep_distortion not to undistort the images. E.g., useful when recording images for camera calibration.
      8. +
      9. Changed Spinnaker::DEFAULT image extraction mode by Spinnaker::IPP, which does not show a pixelated image while keeping a very similar runtime.
      10. +
      +
    6. +
    7. 3-D reconstruction:
        +
      1. Added non-linear minimization to further improve 3-D triangulation accuracy by ~5% (Ubuntu only).
      2. +
      3. It is only run if reprojection error is more than a minimum threshold (improve speed with already good quality results) and also less than another outlier threshold.
      4. +
      5. Outliers are removed from the final result if >= 3 camera views.
      6. +
      7. Applied RANSAC if >=4 camera views.
      8. +
      9. Latency highly reduced in multi-GPU setting. Each GPU processes a different camera view, instead of a different time-instant sequence.
      10. +
      +
    8. +
    9. CMake: All libraries as a single variable (simpler to add/remove libraries).
    10. +
    11. Averaged latency reduced to half.
    12. +
    13. 15% speed up for the default CMake version. CMake was not setting Release mode by default.
    14. +
    15. Light speed up, and body approach much more invariant to number of people. Removed checkEQ from tight loop in bodyPartConnectorBase, which took a huge time exponential to the number of people.
    16. +
    17. Datum includes extrinsic and intrinsic camera parameters.
    18. +
    19. Function scaleKeypoints(Array<float>& keypoints, const float scale) also accepts 3D keypoints.
    20. +
    21. 3D keypoints and camera parameters in meters (instead of millimeters) in order to reduce numerical errors.
    22. +
    23. New PoseExtractor class to contain future ID and tracking algorithms as well as the current OpenPose keypoint detection algorithm.
    24. +
    25. Added initial alpha versions of the tracking and identification modules (for now disabled but available in the source code), including PersonIdExtractor and PersonTracker. PersonIdExtractor includes greedy matrix OP-LK matching.
    26. +
    27. Added catches to all demos for higher debug information.
    28. +
    29. GUI includes the capability of dynamically enabling/disabling the face, hand, and 3-D rendering, as well as more clear visualization for skeleton, background, heatmap addition, and PAF addition channels.
    30. +
    31. When GUI changes some parameter from PoseExtractorNet, there is a log to notify the user of the change.
    32. +
    33. Deprecated flag --write_keypoint_json removed (--write_json is the equivalent since version 1.2.1).
    34. +
    35. Speed up of cvMatToOpOutput and opOutputToCvMat: Datum::outputData is now H x W x C instead of C x H x W, making it much faster to be copied to/from Datum::cvOutputData.
    36. +
    37. Much faster GUI display by adding the WITH_OPENCV_WITH_OPENGL flag to tell whether to use OpenGL support for OpenCV.
    38. +
    39. Turned sanity check error into warning when using dynamic net_resolution for image_dir in CPU/OpenCL versions.
    40. +
    41. Minimized CPU usage when queues are empty or full, in order to prevent problems such as general computer slow down, overheating, or excessive power usage.
    42. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. Removed scale parameter from hand and face rectangle extractor (causing wrong results if custom --output_resolution).
    2. +
    3. Functions scaleKeypoints, other than scaleKeypoints(Array<float>& keypoints, const float scale), renamed as scaleKeypoints2d.
    4. +
    5. (W)PoseExtractor renamed to (W)PoseExtractorNet to distinguish from the new PoseExtractor. Analogously with (W)FaceExtractorNet and (W)HandExtractorNet.
    6. +
    7. Experimental module removed and internal tracking directory moved to main openpose directory.
    8. +
    9. Switched GUI shortcuts for the kind of channel to render (skeleton, heatmap, PAF, ...) in order to make it more intuitive: 1 for skeleton, 1 for background heatmap, 2 for adding all heatmaps, 3 for adding all PAFs, and 4 to 0 for the initial heatmaps.
    10. +
    +
  4. +
  5. Main bugs fixed:
      +
    1. Fixed hand and face extraction and rendering scaling issues when --output_resolution is not the default one.
    2. +
    3. Part candidates (--part_candidates) are saved with the same scale than the final keypoints itself.
    4. +
    5. Fixed bug in keepTopNPeople.hpp (--number_people_max) that provoked core dumped if lots of values equal to the threshold.
    6. +
    7. Flir cameras: Cameras sorted by serial number. Video and images recorded from flir cameras were (and are) assigned the camera parameters based on serial number order, so it would fail if the cameras order was not the same as if sorted by serial number.
    8. +
    9. CPU version working in non-Nvidia Windows machines.
    10. +
    +
  6. +
+

+OpenPose 1.5.0 (May 16, 2019)

+
    +
  1. Main improvements:
      +
    1. Added initial single-person tracker for further speed up or visual smoothing (--tracking flag).
    2. +
    3. Speed up of the CUDA functions of OpenPose:
        +
      1. Greedy body part connector implemented in CUDA: +~30% speedup in Nvidia (CUDA) version with default flags and +~10% in maximum accuracy configuration. In addition, it provides a small 0.5% boost in accuracy (default flags).
      2. +
      3. +5-30% additional speedup for the body part connector of point 1.
      4. +
      5. About 2-4x speedup for NMS.
      6. +
      7. About 2x speedup for image resize and about 2x speedup for multi-scale resize.
      8. +
      9. About 25-30% speedup for rendering.
      10. +
      11. Reduced latency and increased speed by moving the resize in CvMatToOpOutput and OpOutputToCvMat to CUDA. The linear speedup generalizes better to a higher number of GPUs.
      12. +
      +
    4. +
    5. Unity binding of OpenPose released. OpenPose adds the flag BUILD_UNITY_SUPPORT on CMake, which enables special Unity code so it can be built as a Unity plugin.
    6. +
    7. If the camera is unplugged, OpenPose GUI and command line will display a warning and try to reconnect it.
    8. +
    9. Wrapper classes simplified and renamed. Wrapper renamed as WrapperT, and created Wrapper as the non-templated class equivalent.
    10. +
    11. API and examples improved:
        +
      1. New header file flags.hpp that includes all OpenPose flags, removing the need to copy them repeatedly on each OpenPose example file.
      2. +
      3. Renamed tutorial_wrapper as tutorial_api_cpp as well as new examples were added.
      4. +
      +
        +
      1. Renamed tutorial_python as tutorial_api_python as well as new examples were added.
      2. +
      3. Renamed tutorial_thread as tutorial_api_thread, focused on the multi-thread mechanism.
      4. +
      5. Removed tutorial_pose, the directory tutorial_api_cpp includes much cleaner and commented examples.
      6. +
      7. Examples do not end in core dumped if an OpenPose exception occurred during initialization, but they are rather closed returning -1. However, it will still result in core dumped if the exception occurs during multithreading execution.
      8. +
      9. Added new examples, including examples to extract face and/or hand from images.
      10. +
      11. Added --no_display flag for the examples that do not use OpenPose output.
      12. +
      13. Given that display can be disabled in all examples, they all have been added to the Travis build so they can be tested.
      14. +
      +
    12. +
    13. Added a virtual destructor to almost all classes, so they can be inherited. Exceptions (for performance reasons): Array, Point, Rectangle, CvMatToOpOutput, OpOutputToCvMat.
    14. +
    15. Auxiliary classes in errorAndLog turned into namespaces (Profiler must be kept as class to allow static parameters).
    16. +
    17. Added flags:
        +
      1. Added flag --frame_step to allow the user to select the step or gap between processed frames. E.g., --frame_step 5 would read and process frames 0, 5, 10, etc.
      2. +
      3. Previously hardcoded COCO_CHALLENGE variable turned into a user configurable flag --maximize_positives.
      4. +
      5. Added flag --verbose to plot the progress.
      6. +
      7. Added flag --fps_max to limit the maximum processing frame rate of OpenPose (useful to display results at a maximum desired speed).
      8. +
      9. Added sanit30. Added the flags --prototxt_path and --caffemodel_path to allow custom ProtoTxt and CaffeModel paths.
      10. +
      11. Added the flags --face_detector and --hand_detector, that enable the user to select the face/hand rectangle detector that is used for the later face/hand keypoint detection. It includes OpenCV (for face), and also allows the user to provide its own input. Flag --hand_tracking is removed and integrated into this flag too. y checks to avoid --frame_last to be smaller than --frame_first or higher than the number of total frames.
      12. +
      13. Added the flag --upsampling_ratio, which controls the upsampling than OpenPose will perform to the frame before the greedy association parsing algorithm.
      14. +
      15. Added the flag --body (replacing --body_disable), which adds the possibility of disabling the OpenPose pose network but still running the greedy association parsing algorithm (on top of the user heatmaps, see the associated tutorial_api_cpp example).
      16. +
      +
    18. +
    19. Array improvements for Pybind11 compatibility:
        +
      1. Array::getStride() to get step size of each dimension of the array.
      2. +
      3. Array::getPybindPtr() to get an editable const pointer.
      4. +
      5. Array::pData as binding of spData.
      6. +
      7. Array::Array that takes as input a pointer, so it does not re-allocate memory.
      8. +
      +
    20. +
    21. Producer defined inside Wrapper rather than being defined on each example.
    22. +
    23. Reduced many Visual Studio warnings (e.g., uncontrolled conversions between types).
    24. +
    25. Added new keypoint-related auxiliary functions in utilities/keypoints.hpp.
    26. +
    27. Function resizeFixedAspectRatio can take already allocated memory (e.g., faster if target is an Array<T> object, no intermediate cv::Mat required).
    28. +
    29. Added compatibility for OpenCV 4.0, while preserving 2.4.X and 3.X compatibility.
    30. +
    31. Improved and added several functions to utilities/keypoints.hpp and Array to simplify keypoint post-processing.
    32. +
    33. Removed warnings from Spinnaker SDK at compiling time.
    34. +
    35. All bash scripts incorporate #!/bin/bash to tell the terminal that they are bash scripts.
    36. +
    37. Added find_package(Protobuf) to allow specific versions of Protobuf.
    38. +
    39. Video saving improvements:
        +
      1. Video (--write_video) can be generated from images (--image_dir), as long as they maintain the same resolution.
      2. +
      3. Video with the 3D output can be saved with the new --write_video_3d flag.
      4. +
      5. Added the capability of saving videos in MP4 format (by using the ffmpeg library).
      6. +
      7. Added the flag write_video_with_audio to enable saving these output MP4 videos with audio.
      8. +
      +
    40. +
    41. Frame undistortion can be applied not only to FLIR cameras, but also to all other input sources (image, webcam, video, etc.).
    42. +
    43. Calibration improvements:
        +
      1. Improved chessboard orientation detection, more robust and less errors.
      2. +
      3. Triangulation functions (triangulate and triangulateWithOptimization) public, so calibration can use them for bundle adjustment.
      4. +
      5. Added bundle adjustment refinement for camera extrinsic calibration.
      6. +
      7. Added CameraMatrixInitial field into the XML calibration files to keep the information of the original camera extrinsic parameters when bundle adjustment is run.
      8. +
      +
    44. +
    45. Added Mac OpenCL compatibility.
    46. +
    47. Added documentation for Nvidia TX2 with JetPack 3.3.
    48. +
    49. Added Travis build check for several configurations: Ubuntu (14/16)/Mac/Windows, CPU/CUDA/OpenCL, with/without Python, and Release/Debug.
    50. +
    51. Assigned 755 access to all sh scripts (some of them were only 644).
    52. +
    53. Replaced the old Python wrapper for an updated Pybind11 wrapper version, that includes all the functionality of the C++ API.
    54. +
    55. Function getFilesOnDirectory() can extra all basic image file types at once without requiring to manually enumerate them.
    56. +
    57. Maximum queue size per OpenPose thread is configurable through the Wrapper class.
    58. +
    59. Added pre-processing capabilities to Wrapper (WorkerType::PreProcessing), which will be run right after the image has been read.
    60. +
    61. Removed boost::shared_ptr and caffe::Blob dependencies from the headers. No 3rdparty dependencies left on headers (except dim3 for CUDA).
    62. +
    63. Added Array poseNetOutput to Datum so that the user can introduce his custom network output.
    64. +
    65. OpenPose will never provoke a core dumped or crash. Exceptions in threads (errorWorker() instead of error()) lead to stopping the threads and reporting the error from the main thread, while exceptions in destructors (errorDestructor() instead of error()) are reported with std::cerr but not thrown as std::exceptions.
    66. +
    67. When reading a directory of images, they will be sorted in natural order (rather than regular sort).
    68. +
    69. Windows updates:
        +
      1. Upgraded OpenCV version for Windows from 3.1 to 4.0.1, which provides stable 30 FPS for webcams (vs. 10 FPS that OpenCV 3.1 provides by default on Windows).
      2. +
      3. Upgrade VS2015 to VS2017, allowing CUDA 10 and 20XX Nvidia cards.
      4. +
      +
    70. +
    71. Output JSON updated to version 1.3, which now includes the person IDs (if any).
    72. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. By default, python example tutorial_developer/python_2_pose_from_heatmaps.py was using 2 scales starting at -1x736, changed to 1 scale at -1x368.
    2. +
    3. WrapperStructPose default parameters changed to match those of the OpenPose demo binary.
    4. +
    5. WrapperT.configure() changed from 1 function that requires all arguments to individual functions that take 1 argument each.
    6. +
    7. Added Forward to all net classes that automatically selects between CUDA, OpenCL, or CPU-only versions depending on the defines.
    8. +
    9. Removed old COCO 2014 validation scripts.
    10. +
    11. WrapperStructOutput split into WrapperStructOutput and WrapperStructGui.
    12. +
    13. Replaced flags:
        +
      1. Replaced --camera_fps flag by --write_video_fps, given that it was a confusing name: It did not affect the webcam FPS, but only the FPS of the output video. In addition, the default value changed from 30 to -1.
      2. +
      3. Flag --hand_tracking is a subcase of --hand_detector, so it has been removed and incorporated as --hand_detector 3.
      4. +
      +
    14. +
    15. Renamed --frame_keep_distortion as --frame_undistort, which performs the opposite operation (the default value has been also changed to the opposite).
    16. +
    17. Renamed --camera_parameter_folder as --camera_parameter_path because it could also take a whole XML file path rather than its parent directory.
    18. +
    19. Default value of flag --scale_gap changed from 0.3 to 0.25.
    20. +
    21. Moved most sh scripts into the scripts/ directory. Only models/getModels.sh and the *.bat files are kept under models/ and 3rdparty/windows.
    22. +
    23. For Python compatibility and scalability increase, template TDatums used for include/openpose/wrapper/wrapper.hpp has changed from std::vector<Datum> to std::vector<std::shared_ptr<Datum>>, including the respective changes in all the worker classes. In addition, some template classes have been simplified to only take 1 template parameter for user simplicity.
    24. +
    25. Renamed intRound, charRound, etc. by positiveIntRound, positiveCharRound, etc. so that people can realize it is not safe for negative numbers.
    26. +
    27. Replaced flag --write_coco_foot_json by --write_coco_json_variants in order to generalize to any COCO JSON format (i.e., hand, face, etc).
    28. +
    +
  4. +
  5. Main bugs fixed:
      +
    1. CMake-GUI was forced to Release mode, now also allowed Debug modes.
    2. +
    3. NMS returns in index 0 the number of found peaks. However, while the number of peaks was truncated to a maximum of 127, this index 0 was saving the real number instead of the truncated one.
    4. +
    5. Template functions could not be imported in Windows for projects using the OpenPose library DLL.
    6. +
    7. Function scaleKeypoints2d was not working if any of the scales was 1 (e.g., fail if scaleX = 1 but scaleY != 1, or if any offset was not 0).
    8. +
    9. Fixed bug in KeepTopNPeople that could provoke segmentation fault for number_people_max > 1.
    10. +
    11. Camera parameter reader can now take directory paths even if they are not finished in / (e.g., ~/Desktop/ worked but ~/Desktop did not).
    12. +
    13. 3D module: If the image area was smaller than HD resolution image area, the 3D keypoints were not properly estimated.
    14. +
    15. OpenCL fixes.
    16. +
    17. If manual CUDA architectures are set in CMake, they are also set for Caffe rather than only for OpenPose.
    18. +
    19. Fixed flag --hand_alpha_pose.
    20. +
    +
  6. +
+

+OpenPose 1.5.1 (Sep 03, 2019)

+
    +
  1. Main improvements:
      +
    1. Highly improved 3D triangulation for >3 cameras by fixing some small bugs.
    2. +
    3. Added community-based support for Nvidia NVCaffe.
    4. +
    5. Increased accuracy very lightly for CUDA version (about 0.01%) by adapting the threshold in process() in bodyPartConnectorBase.cu to defaultNmsThreshold. This also removes any possibility of future bugs in that function for using a default NMS threshold higher than 0.15 (which was the hard-coded value used previously).
    6. +
    7. Increased mAP but reduced mAR (both about 0.01%) as well as reduction of false positives. Step 1: removed legs where only knee/ankle/feet are found. Step 2: If no people are found in an image, removePeopleBelowThresholdsAndFillFaces is re-run with maximizePositives = true.
    8. +
    9. Number of maximum people is not limited by the maximum number of max peaks anymore. However, the number of body part candidates for a specific keypoint (e.g., nose) is still limited to the number of max peaks.
    10. +
    11. Added more checks during destructors of CUDA-related functions and safer CUDA frees.
    12. +
    13. Improved accuracy of CPU version about 0.2% by following the CUDA/OpenCL approach of assigning the minimum possible PAF score to keypoints that are very close to each other.
    14. +
    15. Added Windows auto-testing (AppVeyor).
    16. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. --3d_min_views default value (-1) no longer means that all camera views are required. Instead, it will be equal to max(2, min(4, #cameras-1)). This should provide a good trade-off between recall and precision.
    2. +
    +
  4. +
  5. Main bugs fixed:
      +
    1. Windows: Added back support for OpenGL and Spinnaker, as well as DLLs for debug compilation.
    2. +
    3. 06_face_from_image.cpp, 07_hand_from_image.cpp, and 09_keypoints_from_heatmaps working again, they stopped working in version 1.5.0 with the GPU image resize for the GUI.
    4. +
    +
  6. +
+

+OpenPose 1.6.0 (Apr 26, 2020)

+
    +
  1. Main improvements:
      +
    1. Multi-camera (3D) working on Asynchronous mode.
        +
      1. Functions WrapperT::waitAndEmplace() and WrapperT::tryEmplace() improved, allowing multi-camera/3-D (TDatums of size > 1).
      2. +
      3. Added createMultiviewTDatum() to auto-generate a TDatums for multi-camera/3-D from a single cv::Mat (that is split) and the desired camera parameter matrices.
      4. +
      5. Added examples/tutorial_api_cpp/11_asynchronous_custom_input_multi_camera.cpp for a test example.
      6. +
      +
    2. +
    3. Created Matrix as container of cv::Mat, and String as container of std::string.
    4. +
    5. After replacing cv::Mat by Matrix, headers do not contain any 3rd-party library includes nor functions. This way, OpenPose can be exported without needing 3rd-party includes nor static library files (e.g., lib files in Windows), allowing people to use their own versions of OpenCV, Eigen, etc. without conflicting with OpenPose. Dynamic library files (e.g., dll files in Windows, so in Ubuntu) are still required.
    6. +
    7. Created the openpose_private directory with some internal headers that, if exported with OpenPose, would require including 3rd-party headers and static library files.
    8. +
    9. Default OpenCV version for Windows upgraded to version 4.2.0, extracted from their official website: section Releases, subsection OpenCV - 4.2.0, Windows version.
    10. +
    11. In all *.cpp files, their include of their analog *.hpp file has been moved to the first line of those *.cpp files to slightly speed up compiling time.
    12. +
    13. String is used in include/openpose/wrapper/ to avoid std::string to cause errors for using different std DLLs.
    14. +
    15. Added ScaleMode::ZeroToOneFixedAspect and ScaleMode::PlusMinusOneFixedAspect. Compared to ZeroToOne and PlusMinusOne, the new ones also preserve the aspect ratio of each axis.
    16. +
    17. Added more verbose to wrapper when it has been configured, showing the values of some of its parameters.
    18. +
    19. Removed many Visual Studio (Windows) warnings.
    20. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. All headers moved into openpose_private, all 3rd-party library calls in headers, and std::string calls in include/openpose/wrapper/.
    2. +
    3. Renamed dLog() as opLogIfDebug(), log() as opLog(), check() as checkBool(), and also renamed all the checkX() functions in include/openpose/utilities/check.hpp. This avoids compiling crashes when exporting OpenPose to other projects which contain other 3rd-party libraries that define functions with the same popular names with #define.
    4. +
    +
  4. +
  5. Main bugs fixed:
      +
    1. The debug version of OpenPose actually targets debug lib/DLL files of 3rd-party libraries.
    2. +
    3. The debug version no longer prints on console a huge log message from Caffe with the network when starting OpenPose (fixed by using the right debug libraries).
    4. +
    5. Natural sort now works properly with filenames containing numbers longer than the limit of an int.
    6. +
    7. The optionally auto-generated bin directory only contains the required DLLs (depending on the CMake configuration), instead of all of them.
    8. +
    9. When WrapperStructFace and WrapperStructHand are not called and configured for Wrapper, setting the body to CPU rendering was not working.
    10. +
    11. Skeleton rendering bugs:
        +
      1. All or some skeletons were not properly displayed or completely missing on images with many people (e.g., videos with about 32 people).
      2. +
      3. All or some skeletons were not properly displayed or completely missing on images where the multiplication of people and image resolution was too big (e.g., videos with about 32 people on 4k resolution).
      4. +
      5. Flag output_resolution was not working with GPU resize, redirected to CPU in those cases.
      6. +
      +
    12. +
    +
  6. +
+

+OpenPose 1.7.0 (Nov 15, 2020)

+
    +
  1. Main improvements:
      +
    1. Added compatibility with CUDA 11.X and cuDNN 8.X.
    2. +
    3. Added compatibility with Ubuntu 20.04.
    4. +
    5. Added Asynchronous mode to Python API.
    6. +
    7. Added DOWNLOAD_SERVER variable to CMake. It specifies the link where the models and 3rd party libraries will be downloaded from.
    8. +
    9. Installation documentation highly simplified and improved.
    10. +
    11. Removed all compiler warnings for Ubuntu 20.04 (GCC and Clang) as well as some for Windows 10.
    12. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. USE_MKL disabled by default in Ubuntu. Reason: Not compatible with non-intel CPUs or Ubuntu 20.
    2. +
    +
  4. +
  5. Main bugs fixed:
      +
    1. 90 and 270-degree rotations working again.
    2. +
    3. C++ tutorial API demos only try to cv::imshow the image if it is not empty (avoiding the assert that it would trigger otherwise).
    4. +
    5. Several typos fixed in the documentation.
    6. +
    +
  6. +
+

+Current version - Future OpenPose 1.7.1

+
    +
  1. Main improvements:
      +
    1. (For images and custom inputs only): Flag --net_resolution_dynamic added to avoid out of memory errors in low GPUs (set by default to 1). It also allows maintaining the maximum possible accuracy (for big GPUs), which was the previous default until OpenPose v1.7.0.
    2. +
    3. Webcam speed boosted on Windows (by updating OpenCV).
    4. +
    5. Third party versions updated:
        +
      1. OpenCV (Windows): From 4.2 to 4.5.
      2. +
      3. Eigen: From 3.3.4 to 3.3.8.
      4. +
      5. wget (Windows): From 1.19.1 to 1.20.3.
      6. +
      +
    6. +
    7. AppVeyor now tests the actual OpenPoseDemo.exe example (Windows).
    8. +
    9. Documentation improvements:
        +
      1. Highly simplified README.md.
      2. +
      3. Highly simplified doc, restructured to improve simplicity and now divided into subdirectories (advanced, deprecated, installation, etc).
      4. +
      5. More examples added to the demo quick start documentation.
      6. +
      +
    10. +
    11. Calibration documentation links to the included chessboard pdf.
    12. +
    13. Deprecated examples directories tutorial_add_module and tutorial_api_thread (and renamed as deprecated). They still compile, but we no longer support them.
    14. +
    15. GitHub Pages autogenerated into cmu-perceptual-computing-lab.github.io/openpose with README.md, doc/ and include/openpose folders.
    16. +
    +
  2. +
  3. Functions or parameters renamed:
      +
    1. Added GitHub Actions (Workspaces) to test Ubuntu and Mac OSX versions (rather than the deprecated Travis). Travis was giving many issues, that were not OpenPose errors, making Travis not usable. Its code has been left for now (but commented out).
    2. +
    3. Doc highly reordered and renamed in order to fit the Doxygen and GitHub Markdown styles simultaneously.
    4. +
    +
  4. +
  5. Main bugs fixed:
      +
    1. Eigen working again on Windows.
    2. +
    3. OpenPose links to the right OpenCV DLL Files on Windows (it was wrongly linking to v14 rather than v15).
    4. +
    5. AppVeyor auto-testing working again by disabling artifacts (Windows).
    6. +
    7. All CI tests pass (after switching to GitHub actions).
    8. +
    +
  6. +
+

+All OpenPose Versions

+

Download and/or check any OpenPose version from https://github.com/CMU-Perceptual-Computing-Lab/openpose/releases.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_09_authors_and_contributors.html b/web/html/doc/md_doc_09_authors_and_contributors.html new file mode 100644 index 000000000..639472dda --- /dev/null +++ b/web/html/doc/md_doc_09_authors_and_contributors.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: OpenPose Doc - Authors and Contributors + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Authors and Contributors
+
+
+

+Authors

+

OpenPose is authored by Ginés Hidalgo, Zhe Cao, Tomas Simon, Shih-En Wei, Yaadhav Raaj, Hanbyul Joo, and Yaser Sheikh. It is maintained by Ginés Hidalgo and Yaadhav Raaj.

+

OpenPose would not be possible without the CMU Panoptic Studio dataset. The body pose estimation work is based on the following and original 2 repositories: CVPR 2017 repository and ECCV 2016 repository.

+

+Contributors

+

We would also like to thank the following people, who have contributed to key components of OpenPose:

    +
  1. Bikramjot Hanzra: Former OpenPose maintainer, CMake (Ubuntu and Windows) version, and initial Travis Build version for Ubuntu.
  2. +
  3. Donglai Xiang: Camera calibration toolbox improvement, including the implementation of its bundle adjustment algorithm.
  4. +
  5. Luis Fernando Fraga: Implementation of Lukas-Kanade algorithm and person ID extractor.
  6. +
  7. Akash Patwal: Speedup of the CUDA image resize and visual skeleton rendering, as well as extension that allows OpenPose to speedup linearly to more than 4 GPUs.
  8. +
  9. Helen Medina: First Windows version.
  10. +
  11. Matthijs van der Burgh: First GitHub Actions CI version for Ubuntu and Mac, and ported all the deprecated Travis CI tests into the new CI system.
  12. +
+

We would also like to thank all the people who have helped OpenPose in any way.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_10_community_projects.html b/web/html/doc/md_doc_10_community_projects.html new file mode 100644 index 000000000..98e15a74d --- /dev/null +++ b/web/html/doc/md_doc_10_community_projects.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: OpenPose Doc - Community-based Projects + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Community-based Projects
+
+
+

Here we expose all projects created with OpenPose by the community and that were shared with us. Do you want to share yours? Simply create a pull request and add to this file your demo and a description of it!

+
    +
  1. ROS OpenPose: ROS wrapper for OpenPose
  2. +
  3. Hand gesture classification application - OpenHand: Third-party application that eases hand keypoints datasets creation and real-time hand gesture classification. You can deploy your own Neural Network classification model on top of OpenPose and play with it in real-time through a GUI!
  4. +
  5. Integrated to Huggingface Spaces with Gradio. See demo: Hugging Face Spaces
  6. +
  7. RealSense2OpenPose3D: Use an Intel RealSense RGB-D camera to add depth to OpenPose. Generates JSON files with 3D keypoints.
  8. +
+

Disclaimer: We do not support any of these projects, we are simply exposing them. GitHub issues or questions about those will result in strict user bans and the posts being deleted.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_advanced_3d_reconstruction_module.html b/web/html/doc/md_doc_advanced_3d_reconstruction_module.html new file mode 100644 index 000000000..ad6674c06 --- /dev/null +++ b/web/html/doc/md_doc_advanced_3d_reconstruction_module.html @@ -0,0 +1,214 @@ + + + + + + + +OpenPose: OpenPose Advanced Doc - 3-D Reconstruction Module and Demo + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Advanced Doc - 3-D Reconstruction Module and Demo
+
+
+

+Contents

+
    +
  1. Introduction
  2. +
  3. Installation
  4. +
  5. Non Linear Optimization
  6. +
  7. Features
  8. +
  9. Required Hardware
  10. +
  11. Camera Calibration
  12. +
  13. Camera Ordering
  14. +
  15. Quick Start
  16. +
  17. Expected Visual Results
  18. +
  19. Using a Different Camera Brand
  20. +
  21. Known Bug
  22. +
+

+Introduction

+

This module performs 3-D keypoint (body, face, and hand) reconstruction and rendering for 1 person. We will not keep updating it nor solving questions/issues about it at the moment. It requires the user to be familiar with computer vision and camera calibration, including extraction of intrinsic and extrinsic parameters.

+

+Installation

+

Check doc/installation/2_additional_settings.md#3d-reconstruction-module for installation steps.

+

+Non Linear Optimization

+

In order to increase the 3-D reconstruction accuracy, OpenPose optionally performs non-linear optimization if Ceres solver support is enabled (only available in Ubuntu for now). To enable it, check doc/installation/2_additional_settings.md#3d-reconstruction-module for more details.

+

+Features

+
    +
  • Auto detection of all FLIR cameras connected to your machine, and image streaming from all of them.
  • +
  • Hardware trigger and buffer NewestFirstOverwrite modes enabled. Hence, the algorithm will always get the last synchronized frame from each camera, deleting the rest.
  • +
  • 3-D reconstruction of body, face, and hands for 1 person.
  • +
  • If more than 1 person is detected per camera, the algorithm will just try to match person 0 on each camera, which will potentially correspond to different people in the scene. Thus, the 3-D reconstruction will completely fail.
  • +
  • Only points with high threshold with respect to each one of the cameras are reprojected (and later rendered). An alternative for > 4 cameras could potentially do 3-D reprojection and render all points with good views in more than N different cameras (not implemented here).
  • +
  • Only Direct linear transformation (DLT) is applied for reconstruction. Non-linear optimization methods (e.g., from Ceres Solver) will potentially improve results (not implemented).
  • +
  • Basic OpenGL rendering with the freeglut library.
  • +
+

+Required Hardware

+

This demo assumes n arbitrary stereo cameras from the FLIR company (formerly Point Grey). Ideally any USB-3 FLIR model should work, but we have only used the following specific specifications:

+
    +
  1. Camera details: +
  2. +
  3. Fujinon 3 MP Varifocal Lens (3.8-13mm, 3.4x Zoom) for each camera. +
  4. +
  5. 4-Port PCI Express (PCIe) USB 3.0 Card Adapter with 4 dedicated channels. +
  6. +
  7. USB 3.0 cable for each FLIR camera. +
  8. +
+

+Camera Calibration

+

The user must manually get the intrinsic and extrinsic parameters of the stereo-cameras. Note, we will assume Flir cameras, which is specified by default with the flag --camera_parameter_path "models/cameraParameters/flir/". Otherwise, change the path to your camera name accordingly.

+

There are 2 alternatives to calibrate the cameras:

    +
  1. Using the OpenPose calibration toolbox, doc/advanced/calibration_module.md.
  2. +
  3. Using your own calibration toolbox (or if you already know the camera parameters of your cameras):
      +
    1. Create a xml file for each camera named as models/cameraParameters/flir/{camera_serial_number}.xml.
    2. +
    3. The elements inside each xml file are the extrinsic parameters of the camera (CameraMatrix), the intrinsic parameters (Intrinsics), and the distortion coefficients (Distortion). Copy the format from models/cameraParameters/flir/17012332.xml.example. For the extrinsic parameters of the camera, it allows you to set the coordinate origin (so that 3-d keypoints are distances with respect to that origin).
        +
      • E.g., in order to set the camera 1 as the coordinate center, set its CameraMatrix as the identity matrix of size 3x4, and the CameraMatrix of the other cameras as the camera extrinsic parameters of from those cameras with respect to the main camera M_1_i.
      • +
      • VERY IMPORTANT: The intrinsic camera matrix should be an upper triangular matrix.
      • +
      +
    4. +
    5. The program can use any arbitrary number of cameras. Even if lots of cameras are added in models/cameraParameters/flir/, the program will check at runtime which FLIR cameras are detected and simply read those camera parameters. If the file corresponding to any of the cameras detected at runtime is not found, OpenPose will return an error.
    6. +
    7. In the example XML, OpenPose uses the 8-distortion-parameter version of OpenCV. The distortion parameters are internally used by the OpenCV function undistort() to rectify the images. Therefore, this function can take either 4-, 5- or 8-parameter distortion coefficients (OpenCV 3.X also adds a 12- and 14-parameter alternatives). Therefore, either version (4, 5, 8, 12 or 14) will work in 3D OpenPose.
    8. +
    +
  4. +
+

+Camera Ordering

+

OpenPose will display the cameras sorted by serial number, starting in the left with the image corresponding to the lowest serial number. When the program is run, OpenPose displays the camera serial number associated to each index of each detected camera. If the number of cameras detected is different to the number of actual cameras, make sure the hardware is properly connected and the camera leds are on.

+

+Quick Start

+

Check the doc/01_demo.md#3-d-reconstruction for basic examples.

+

+Expected Visual Results

+

The visual GUI should show 3 screens.

+
    +
  1. The Windows command line or Ubuntu bash terminal.
  2. +
  3. The different cameras 2-D keypoint estimations.
  4. +
  5. The final 3-D reconstruction.
  6. +
+

It should be similar to the following image.

+


+ Tianyi Zhao testing the OpenPose 3D Module

+

+Using a Different Camera Brand

+

You can copy and modify the OpenPose 3-D demo to use any camera brand by:

+
    +
  1. You can optionally turn off the WITH_FLIR_CAMERA while compiling CMake.
  2. +
  3. Copy examples/tutorial_api_cpp/14_synchronous_custom_input.cpp (or 18_synchronous_custom_all_and_datum.cpp).
  4. +
  5. Modify WUserInput and add your custom code there. Your code should fill Datum::name, Datum::cameraMatrix, Datum::cvInputData, and Datum::cvOutputData (fill cvOutputData = cvInputData).
  6. +
  7. Remove WUserPostProcessing and WUserOutput (unless you want to have your custom post-processing and/or output).
  8. +
+

Note that your custom code should retrieve synchronized images from your cameras or any other source, as well as their intrinsic and extrinsic camera parameters.

+

+Known Bug

+

FreeGLUT is a quite light library. Due to that, there is a known bug in the 3D module:

+
    +
  1. The window must be closed with the Esc key. Clicking the close button will cause a core dumped or std::exception error in OpenPose. Reason: There is no way to control the behaviour of the exit button in a FreeGLUT program. Feel free to let us know or create a pull request if you find a workaround applicable to 3-D OpenPose. Another alternative is ussing --disable_multi_thread in OpenPose. This would avoid the issue but slow down the program, especially in multi-GPU systems.
  2. +
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_advanced_calibration_module.html b/web/html/doc/md_doc_advanced_calibration_module.html new file mode 100644 index 000000000..cc003f6ae --- /dev/null +++ b/web/html/doc/md_doc_advanced_calibration_module.html @@ -0,0 +1,274 @@ + + + + + + + +OpenPose: OpenPose Advanced Doc - Calibration Module and Demo + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Advanced Doc - Calibration Module and Demo
+
+
+

+Contents

+
    +
  1. Introduction
  2. +
  3. Example Chessboard
  4. +
  5. Installing the Calibration Module
  6. +
  7. Running Calibration
      +
    1. General Quality Tips
    2. +
    3. Step 1 - Distortion and Intrinsic Parameter Calibration
    4. +
    5. Step 2 - Extrinsic Parameter Calibration
    6. +
    +
  8. +
  9. Camera Matrix Output Format
  10. +
  11. Using a Different Camera Brand
  12. +
  13. Naming Convention for the Output Images
  14. +
+

+Introduction

+

This experimental module performs camera calibration (distortion, intrinsic, and extrinsic camera parameter extraction). It computes and saves the intrinsics parameters of the input images. It is built on top of OpenCV, but aimed to simplify the process for people with no calibration or computer vision background at all (or for lazy people like myself).

+

Note: We are not aiming to have the best calibration toolbox, but the simplest one. If very high quality calibration is required, I am sure there must exist many other toolboxs with better extrinsic parameter estimation tools.

+

VERY IMPORTANT: OpenPose requires an upper triangular matrix for the intrinsic camera matrix. If you plan to use your own camera intrinsics, be aware of this.

+

+Example Chessboard

+

doc/Chessboard_in_PDF/ contains a chessboard example that you can use to follow this documentation. You can simply print the PDF version (doc/Chessboard_in_PDF/pattern.pdf), making your your printer maintains the aspect ratio and, if possible, does not zoom in/out the image. Simply replace the "127" used below by the size of each square on your printed chessboard.

+

+Installing the Calibration Module

+

Check doc/installation/0_index.md#calibration-module for installation steps.

+

+Running Calibration

+

Note: In order to maximize calibration quality, do not reuse the same video sequence for both intrinsic and extrinsic parameter estimation. The intrinsic parameter calibration should be run camera by camera, where each recorded video sequence should be focused in covering all regions of the camera view and repeated from several distances. In the extrinsic sequence, this video sequence should be focused in making sure that the checkboard is visible from at least 2 cameras at the time. So for 3-camera calibration, you would need 1 video sequence per camera as well as a final sequence for the extrinsic parameter calibration.

+

+General Quality Tips

+
    +
  1. Keep the same orientation of the chessboard, i.e., do not rotate it more than ~15-30 degrees with respect to its center (i.e., going from a w x h number of squares to a h x w one). Our algorithm assumes that the origin is the corner at the top left, so rotating the chessboard will change this origin across frames, resulting in many frames being rejected for the final calibration, i.e., lower calibration accuracy.
  2. +
  3. Cover several distances, and within each distance, cover all parts of the image view (all corners and center).
  4. +
  5. Save the images in PNG format (default behavior) in order to improve calibration quality. PNG images are bigger than JPG equivalent, but do not lose information by compression.
  6. +
  7. Use a chessboard as big as possible, ideally a chessboard with of at least 8x6 squares with a square size of at least 100 millimeters. It will specially affect the extrinsic calibration quality.
  8. +
  9. Intrinsics: Recommended about 400 image views for high quality calibration. You should get at least 150 images for a good calibration, while no more than 500. The calibration of a camera takes about 3 minutes with about 100 images, about 1.5h with 200 images, and about 9.5h with 450 images. Required RAM memory also grows exponentially.
  10. +
  11. Extrinsics: Recommended at least 250 images per camera for high quality calibration.
  12. +
+

+Step 1 - Distortion and Intrinsic Parameter Calibration

+
    +
  1. Run OpenPose and save images for your desired camera. Use a grid (chessboard) pattern and move around all the image area. Depending on the images source:
      +
    1. Webcam calibration: ./build/examples/openpose/openpose.bin --num_gpu 0 --write_images {intrinsic_images_folder_path}.
    2. +
    3. Flir camera calibration: Add the flags --flir_camera --flir_camera_index 0 (or the desired flir camera index) to the webcam command.
    4. +
    5. Calibration from video sequence: Add the flag --video {video_path} to the webcam command.
    6. +
    7. Any other camera brand: Simply save your images in {intrinsic_images_folder_path}, file names are not relevant.
    8. +
    +
  2. +
  3. Get familiar with the calibration parameters used in point 3 (i.e., grid_square_size_mm, grid_number_inner_corners, etc.) by running the --help flag:
    ./build/examples/calibration/calibration.bin --help
    +
  4. +
  5. Extract and save the intrinsic parameters:
    ./build/examples/calibration/calibration.bin --mode 1 --grid_square_size_mm 40.0 --grid_number_inner_corners "9x5" --camera_serial_number 18079958 --calibration_image_dir {intrinsic_images_folder_path}
    +
  6. +
  7. In this case, the intrinsic parameters would have been generated as {intrinsic_images_folder_path}/18079958.xml.
  8. +
  9. Run steps 1-4 for each one of your cameras.
  10. +
  11. After you calibrate the camera intrinsics, when you run OpenPose with those cameras, you should see the lines in real-life to be (almost) perfect lines in the image. Otherwise, the calibration was not good. Try checking straight patterns such us wall or ceiling edges:
    # With distortion (straight lines might not look as straight lines but rather with a more circular shape)
    +
    ./build/examples/openpose/openpose.bin --num_gpu 0 --flir_camera --flir_camera_index 0
    +
    # Without distortion (straight lines should look as straight lines)
    +
    ./build/examples/openpose/openpose.bin --num_gpu 0 --flir_camera --flir_camera_index 0 --frame_undistort
    +
  12. +
+

Examples:

+
    +
  1. Full example for a folder of images, a video, webcam streaming, etc.:
    # Ubuntu and Mac
    +
    # Get images for calibration (only if target is not `--image_dir`)
    +
    # If video
    +
    ./build/examples/openpose/openpose.bin --num_gpu 0 --video examples/media/video_chessboard.avi --write_images ~/Desktop/Calib_intrinsics
    +
    # If webcam
    +
    ./build/examples/openpose/openpose.bin --num_gpu 0 --webcam --write_images ~/Desktop/Calib_intrinsics
    +
    # Run calibration
    +
    ./build/examples/calibration/calibration.bin --mode 1 --grid_square_size_mm 30.0 --grid_number_inner_corners "8x6" --calibration_image_dir ~/Desktop/Calib_intrinsics/ --camera_parameter_folder models/cameraParameters/ --camera_serial_number frame_intrinsics
    +
    # Output: {OpenPose path}/models/cameraParameters/frame_intrinsics.xml
    +
    # Visualize undistorted images
    +
    ./build/examples/openpose/openpose.bin --num_gpu 0 --image_dir ~/Desktop/Calib_intrinsics/ --frame_undistort --camera_parameter_path "models/cameraParameters/frame_intrinsics.xml"
    +
    # If video
    +
    ./build/examples/openpose/openpose.bin --num_gpu 0 --video examples/media/video_chessboard.avi --frame_undistort --camera_parameter_path "models/cameraParameters/frame_intrinsics.xml"
    +
    # If webcam
    +
    ./build/examples/openpose/openpose.bin --num_gpu 0 --webcam --frame_undistort --camera_parameter_path "models/cameraParameters/frame_intrinsics.xml"
    +
  2. +
  3. Full example for 4-view Flir/Point Grey camera system:
    # Ubuntu and Mac
    +
    # Get images for calibration
    +
    ./build/examples/openpose/openpose.bin --num_gpu 0 --flir_camera --flir_camera_index 0 --write_images ~/Desktop/intrinsics_0
    +
    ./build/examples/openpose/openpose.bin --num_gpu 0 --flir_camera --flir_camera_index 1 --write_images ~/Desktop/intrinsics_1
    +
    ./build/examples/openpose/openpose.bin --num_gpu 0 --flir_camera --flir_camera_index 2 --write_images ~/Desktop/intrinsics_2
    +
    ./build/examples/openpose/openpose.bin --num_gpu 0 --flir_camera --flir_camera_index 3 --write_images ~/Desktop/intrinsics_3
    +
    # Run calibration
    +
    # - Note: If your computer has enough RAM memory, you can run all of them at the same time in order to speed up the time (they are not internally multi-threaded).
    +
    ./build/examples/calibration/calibration.bin --mode 1 --grid_square_size_mm 127.0 --grid_number_inner_corners "9x6" --camera_serial_number 17012332 --calibration_image_dir ~/Desktop/intrinsics_0
    +
    ./build/examples/calibration/calibration.bin --mode 1 --grid_square_size_mm 127.0 --grid_number_inner_corners "9x6" --camera_serial_number 17092861 --calibration_image_dir ~/Desktop/intrinsics_1
    +
    ./build/examples/calibration/calibration.bin --mode 1 --grid_square_size_mm 127.0 --grid_number_inner_corners "9x6" --camera_serial_number 17092865 --calibration_image_dir ~/Desktop/intrinsics_2
    +
    ./build/examples/calibration/calibration.bin --mode 1 --grid_square_size_mm 127.0 --grid_number_inner_corners "9x6" --camera_serial_number 18079957 --calibration_image_dir ~/Desktop/intrinsics_3
    +
    # Visualize undistorted images
    +
    # - Camera parameters will be saved on their respective serial number files, so OpenPose will automatically find them
    +
    ./build/examples/openpose/openpose.bin --num_gpu 0 --flir_camera --frame_undistort
    +
  4. +
  5. For Windows, simply run build\x64\Release\calibration.exe (or the one from the binary portable demo) with the same flags as above.
  6. +
+

+Step 2 - Extrinsic Parameter Calibration

+
    +
  1. VERY IMPORTANT NOTE: If you want to re-run the extrinsic parameter calibration over the same intrinsic XML files (e.g., if you move the camera location, but you know the instrinsics are the same), you must manually re-set to 1 0 0 0 0 1 0 0 0 0 1 0 the camera matrix of each XML file that will be used for --combine_cam0_extrinsics.
  2. +
  3. After intrinsics calibration, save undistorted images for all the camera views:
    ./build/examples/openpose/openpose.bin --num_gpu 0 --flir_camera --frame_undistort --write_images ~/Desktop/extrinsics
    +
  4. +
  5. Run the extrinsic calibration tool between each pair of close cameras. In this example:
      +
    • We assume camera 0 to the right, 1 in the middle-right, 2 in the middle-left, and 3 in the left.
    • +
    • We assume camera 1 as the coordinate origin.
      # Ubuntu and Mac
      +
      ./build/examples/calibration/calibration.bin --mode 2 --grid_square_size_mm 127.0 --grid_number_inner_corners 9x6 --omit_distortion --calibration_image_dir ~/Desktop/extrinsics/ --cam0 1 --cam1 0
      +
      ./build/examples/calibration/calibration.bin --mode 2 --grid_square_size_mm 127.0 --grid_number_inner_corners 9x6 --omit_distortion --calibration_image_dir ~/Desktop/extrinsics/ --cam0 1 --cam1 2
      +
      ./build/examples/calibration/calibration.bin --mode 2 --grid_square_size_mm 127.0 --grid_number_inner_corners 9x6 --omit_distortion --calibration_image_dir ~/Desktop/extrinsics/ --cam0 1 --cam1 3
      +
      # Potentially more accurate equivalent for the calibration between cameras 1 and 3: If camera 3 and 1 are too far from each other and the calibration chessboard is not visible from both cameras at the same time enough times, the calibration can be run between camera 3 and camera 2, which is closer to 3. In that case, the `combine_cam0_extrinsics` flag is required, which tells the calibration toolbox that cam0 is not the global origin (in this case is camera 1).
      +
      # Note: Wait until calibration of camera index 2 with respect to 1 is completed, as information from camera 2 XML calibration file will be used:
      +
      ./build/examples/calibration/calibration.bin --mode 2 --grid_square_size_mm 127.0 --grid_number_inner_corners 9x6 --omit_distortion --calibration_image_dir ~/Desktop/extrinsics/ --cam0 2 --cam1 3 --combine_cam0_extrinsics
      +
      :: Windows
      +
      :: build\x64\Release\calibration.exe with the same flags as above
      +
    • +
    +
  6. +
  7. If you use Ceres solver (WITH_CERES flag in CMake), you can improve the calibration results by performing an additional Bundle Adjustment refinement step on top of the previous results. We use camera 0 as the baseline for the internal computation, so try to avoid weird camera configurations in which camera 0 is completely isolated from the other cameras. Ideally, camera 0 should physically be the closest to all other cameras (i.e., the one more centered). But in practice, the accuracy improvement is almost none (as long as it is not too far from the others). To perform this bundle adjustment refinement for the example above, simply run the following line:
    # Ubuntu and Mac
    +
    ./build/examples/calibration/calibration.bin --mode 3 --grid_square_size_mm 127.0 --grid_number_inner_corners 9x6 --omit_distortion --calibration_image_dir ~/Desktop/extrinsics/ --number_cameras 4
    +
    :: Windows
    +
    :: Ceres-compatible version not implemented for Windows yet. Make a pull request if you have a working version in Windows.
    +
  8. +
  9. Hint to verify extrinsic calibration is successful:
      +
    1. Our final reprojection error (after rescaling) for the bundle adjustment step is usually about 0.1-0.15 pixels.
    2. +
    3. Translation vector - Global distance:
        +
      1. Manually open each one of the generated XML files from the folder indicated by the flag --camera_parameter_path (or the default one indicated by the --help flag if the former was not used).
      2. +
      3. The field CameraMatrix is a 3 x 4 matrix (you can see that the subfield rows in that file is 3 and cols is 4).
      4. +
      5. Order the matrix in that 3 x 4 shape (e.g., by copying in a different text file with the shape of 3 rows and 4 columns).
      6. +
      7. The 3 first components of the last column of the CameraMatrix field define the global translation (in meters) with respect to the global origin (in our case camera 1).
      8. +
      9. Thus, the distance between that camera and the origin camera 1 should be (approximately) equal to the L2-norm of the translation vector.
      10. +
      +
    4. +
    5. Translation vector - Relative x-y-z distances:
        +
      1. The 3x1 translation vector represents the x, y, and z distances to the origin camera, respectively. The camera is looking along the positive z axis, the y axis is down, and the x axis is right. This should match the real distance between both cameras.
      2. +
      +
    6. +
    +
  10. +
+

+Camera Matrix Output Format

+

Your CameraMatrix will look something like:

<CameraMatrix type_id="opencv-matrix">
+
<rows>3</rows>
+
<cols>4</cols>
+
<dt>d</dt>
+
<data>
+
8.4965260991319647e-01 1.1164693980389649e-01
+
-5.1538859446064478e-01 2.1494190603291283e+00
+
-1.5848315388246692e-01 9.8621217567379460e-01
+
-4.7630184633558698e-02 -4.5237471366168569e-01
+
5.0296474270386005e-01 1.2214952060972525e-01 8.5563190813085876e-01
+
1.1418502919988400e+00</data></CameraMatrix>
+

This is a 3x4 matrix, which represents rotation (R as a 3x3 matrix) and translation (t as a 3x1 matrix) in the following format: [R | t]. They represent the rotation and translation with respect to the world origin. When calibrating with OpenPose, we set one of the cameras as the origin, but this can be easily modified with some manual post-processing.

+

+Using a Different Camera Brand

+

If you plan to use the calibration tool without using OpenPose, you can manually save a video sequence of your desired camera into each of the camera image folders (i.e., in the above example, the ~/Desktop/intrinsics_0, ~/Desktop/intrinsics_1, etc. folders).

+

If you wanna eventually run that camera with OpenPose, check doc/advanced/3d_reconstruction_module.md#using-a-different-camera-brand.

+

+Naming Convention for the Output Images

+

The naming convention for the saved images is the following: [%12d]_rendered[CAMERA_NUMBER_MINUS_1].png, where [CAMERA_NUMBER_MINUS_1] is nothing for camera 0, _1 for camera 1, _2 for camera 2, etc. E.g., for 4 cameras:

000000000000_rendered.png
+
000000000000_rendered_1.png
+
000000000000_rendered_2.png
+
000000000000_rendered_3.png
+
000000000001_rendered.png
+
000000000001_rendered_1.png
+
000000000001_rendered_2.png
+
000000000001_rendered_3.png
+
[...]
+

OpenPose generates them with the base name [%12d]_rendered. Ideally, any other base number should work as long as the termination [CAMERA_NUMBER_MINUS_1] is kept consistent for all the camera views. E.g., you could call them also as follows (assuming 4 cameras):

a.png, a_1.png, a_2.png, a_3.png,
+
b.png, b_1.png, b_2.png, b_3.png,
+
etc.
+

Again, the critical step is to keep the file termination fixed as _1, _2, etc.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_advanced_demo_advanced.html b/web/html/doc/md_doc_advanced_demo_advanced.html new file mode 100644 index 000000000..dc480ec25 --- /dev/null +++ b/web/html/doc/md_doc_advanced_demo_advanced.html @@ -0,0 +1,347 @@ + + + + + + + +OpenPose: OpenPose Advanced Doc - Demo - Advanced + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Advanced Doc - Demo - Advanced
+
+
+

This document is a more detailed continuation of doc/01_demo.md, and it assumes the user is quite familiar with the OpenPose demo and the contents of doc/01_demo.md.

+

+Contents

+
    +
  1. More Advanced Common Settings
      +
    1. Reducing Latency/Lag
    2. +
    3. Advanced Hands
    4. +
    5. Rendering Face and Hands without Pose
    6. +
    7. Debugging Information
    8. +
    9. Heat Maps Storing
    10. +
    11. BODY_25 vs. COCO vs. MPI Models
    12. +
    +
  2. +
  3. Help Flag
  4. +
  5. All Flags
  6. +
+

+More Advanced Common Settings

+

+Reducing Latency/Lag

+

In general, there are 3 ways to reduce the latency (with some drawbacks each one):

+
    +
  • Reducing --output_resolution: It will slightly reduce the latency and increase the FPS. But the quality of the displayed image will deteriorate.
  • +
  • Reducing --net_resolution and/or --face_net_resolution and/or --hand_net_resolution: It will increase the FPS and reduce the latency. But the accuracy will drop, specially for small people in the image. Note: For maximum accuracy, follow doc/01_demo.md#maximum-accuracy-configuration.
  • +
  • Enabling --disable_multi_thread: The latency should be reduced. But the speed will drop to 1-GPU speed (as it will only use 1 GPU). Note that it's practical only for body, if hands and face are also extracted, it's usually not worth it.
  • +
+

+Advanced Hands

+
# Fast method for speed
+
./build/examples/openpose/openpose.bin --hand
+
# Best results found with 6 scales
+
./build/examples/openpose/openpose.bin --hand --hand_scale_number 6 --hand_scale_range 0.4
+
# Adding tracking to Webcam (if FPS per GPU > 10 FPS) and Video
+
./build/examples/openpose/openpose.bin --video examples/media/video.avi --hand --hand_detector 3
+
# Multi-scale + tracking is also possible
+
./build/examples/openpose/openpose.bin --video examples/media/video.avi --hand --hand_scale_number 6 --hand_scale_range 0.4 --hand_detector 3
+

+Rendering Face and Hands without Pose

+
# CPU rendering (faster)
+
./build/examples/openpose/openpose.bin --render_pose 0 --face --face_render 1 --hand --hand_render 1
+
# GPU rendering
+
./build/examples/openpose/openpose.bin --render_pose 0 --face --face_render 2 --hand --hand_render 2
+

+Debugging Information

+
# Basic information
+
./build/examples/openpose/openpose.bin --logging_level 3
+
# Showing all messages
+
./build/examples/openpose/openpose.bin --logging_level 0
+

+Heat Maps Storing

+

The following command will save all the body part heat maps, background heat map and Part Affinity Fields (PAFs) in the folder output_heatmaps_folder. It will save them on PNG format. Instead of individually saving each of the 67 heatmaps (18 body parts + background + 2 x 19 PAFs) individually, the library concatenate them vertically into a huge (width x #heatmaps) x (height) matrix. The PAFs channels are multiplied by 2 because there is one heatmpa for the x-coordinates and one for the y-coordinates. The order is body parts + bkg + PAFs. It will follow the sequence on POSE_BODY_PART_MAPPING in include/openpose/pose/poseParameters.hpp.

./build/examples/openpose/openpose.bin --video examples/media/video.avi --heatmaps_add_parts --heatmaps_add_bkg --heatmaps_add_PAFs --write_heatmaps output_heatmaps_folder/
+

+BODY_25 vs. COCO vs. MPI Models

+

The BODY_25 model (--model_pose BODY_25) includes both body and foot keypoints and it is based in OpenPose: Realtime Multi-Person 2D Pose Estimation using Part Affinity Fields. COCO and MPI models are slower, less accurate, and do not contain foot keypoints. They are based in our older paper Realtime Multi-Person 2D Pose Estimation using Part Affinity Fields. We highly recommend only using the BODY_25 model.

+

There is an exception, for CPU version, the COCO and MPI models seems to be faster. Accuracy is still better for the BODY_25 model.

+

+Help Flag

+

We recommend following the next section (All Flags), which shows all the flags in this document and sorts them by category.

+

However, you could add the flag --help at any point to see all the available OpenPose flags. Check only the flags for examples/openpose/openpose.cpp itself (i.e., the ones in the section Flags from examples/openpose/openpose.cpp:).

# Ubuntu and Mac
+
./build/examples/openpose/openpose.bin --help
+
:: Windows - Portable Demo
+
bin\OpenPoseDemo.exe --help
+

+All Flags

+

Now that you are more familiar with OpenPose, this is a list with all the available flags. Each one is divided into flag name, default value, and description.

+
    +
  1. Debugging/Other
  2. +
+
    +
  • DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any opLog() message, while 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for low priority messages and 4 for important ones.");
  • +
  • DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful for 1) Cases where it is needed a low latency (e.g., webcam in real-time scenarios with low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the error.");
  • +
  • DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some runtime statistics at this frame number.");
  • +
+
    +
  1. Producer
  2. +
+
    +
  • DEFINE_int32(camera, -1, "The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative number (by default), to auto-detect and open the first available camera.");
  • +
  • DEFINE_string(camera_resolution, "-1x-1", "Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the default 1280x720 for `--camera`, or the maximum flir camera resolution available for `--flir_camera`");
  • +
  • DEFINE_string(video, "", "Use a video file instead of the camera. Use `examples/media/video.avi` for our default example video.");
  • +
  • DEFINE_string(image_dir, "", "Process a directory of images. Use `examples/media/` for our default example folder with 20 images. Read all standard formats (jpg, png, bmp, etc.).");
  • +
  • DEFINE_bool(flir_camera, false, "Whether to use FLIR (Point-Grey) stereo camera.");
  • +
  • DEFINE_int32(flir_camera_index, -1, "Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir camera index to run, where 0 corresponds to the detected flir camera with the lowest serial number, and `n` to the `n`-th lowest serial number camera.");
  • +
  • DEFINE_string(ip_camera, "", "String with the IP camera URL. It supports protocols like RTSP and HTTP.");
  • +
  • DEFINE_uint64(frame_first, 0, "Start on desired frame number. Indexes are 0-based, i.e., the first frame has index 0.");
  • +
  • DEFINE_uint64(frame_step, 1, "Step or gap between processed frames. E.g., `--frame_step 5` would read and process frames 0, 5, 10, etc..");
  • +
  • DEFINE_uint64(frame_last, -1, "Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g., if set to 10, it will process 11 frames (0-10).");
  • +
  • DEFINE_bool(frame_flip, false, "Flip/mirror each frame (e.g., for real time webcam demonstrations).");
  • +
  • DEFINE_int32(frame_rotate, 0, "Rotate each frame, 4 possible values: 0, 90, 180, 270.");
  • +
  • DEFINE_bool(frames_repeat, false, "Repeat frames when finished.");
  • +
  • DEFINE_bool(process_real_time, false, "Enable to keep the original source frame rate (e.g., for video). If the processing time is too long, it will skip frames. If it is too fast, it will slow it down.");
  • +
  • DEFINE_string(camera_parameter_path, "models/cameraParameters/flir", "String with the folder where the camera parameters are located. If there is only 1 XML file (for single video, webcam, or images from the same camera), you must specify the whole XML file path (ending in .xml).");
  • +
  • DEFINE_bool(frame_undistort, false, "If false (default), it will not undistort the image, if true, it will undistortionate them based on the camera parameters found in `camera_parameter_path`");
  • +
+
    +
  1. OpenPose
  2. +
+
    +
  • DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located.");
  • +
  • DEFINE_string(prototxt_path, "", "The combination `--model_folder` + `--prototxt_path` represents the whole path to the prototxt file. If empty, it will use the default OpenPose ProtoTxt file.");
  • +
  • DEFINE_string(caffemodel_path, "", "The combination `--model_folder` + `--caffemodel_path` represents the whole path to the caffemodel file. If empty, it will use the default OpenPose CaffeModel file.");
  • +
  • DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1" to force the program to use the input image resolution."); +- DEFINE_int32(num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your machine."); +- DEFINE_int32(num_gpu_start, 0, "GPU device start number."); +- DEFINE_int32(keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e., the scale of the (x,y) coordinates that will be saved with the `write_json` & `write_keypoint` flags. Select `0` to scale it to the original source resolution; `1`to scale it to the net output size (set with `net_resolution`); `2` to scale it to the final output size (set with `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non related with `scale_number` and `scale_gap`."); +- DEFINE_int32(number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with top scores. The score is based in person area over the image, body part score, as well as joint score (between each pair of connected body parts). Useful if you know the exact number of people in the scene, so it can remove false positives (if all the people have been detected. However, it might also include false negatives by removing very small or highly occluded people. -1 will keep them all."); +- DEFINE_bool(maximize_positives, false, "It reduces the thresholds to accept a person candidate. It highly increases both false and true positives. I.e., it maximizes average recall but could harm average precision."); +- DEFINE_double(fps_max, -1., "Maximum processing frame rate. By default (-1), OpenPose will process frames as fast as possible. Example usage: If OpenPose is displaying images too quickly, this can reduce the speed so the user can analyze better each frame from the GUI.");
  • +
+
    +
  1. OpenPose Body Pose
  2. +
+
    +
  • DEFINE_int32(body, 1, "Select 0 to disable body keypoint detection (e.g., for faster but less accurate face keypoint detection, custom hand detector, etc.), 1 (default) for body keypoint estimation, and 2 to disable its internal body pose estimation network but still still run the greedy association parsing algorithm");
  • +
  • DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g., `BODY_25` (fastest for CUDA version, most accurate, and includes foot keypoints), `COCO` (18 keypoints), `MPI` (15 keypoints, least accurate model but fastest on CPU), `MPI_4_layers` (15 keypoints, even faster but less accurate).");
  • +
  • DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is decreased, the speed increases. For maximum speed-accuracy balance, it should keep the closest aspect ratio possible to the images or videos to be processed. Using `-1` in any of the dimensions, OP will choose the optimal aspect ratio depending on the user's input value. E.g., the default `-1x368` is equivalent to `656x368` in 16:9 resolutions, e.g., full HD (1980x1080) and HD (1280x720) resolutions.");
  • +
  • DEFINE_double(net_resolution_dynamic, 1., "This flag only applies to images or custom inputs (not to video or webcam). If it is zero or a negativevalue, it means that using `-1` in `net_resolution` will behave as explained in its description. Otherwise, and to avoid out of memory errors, the `-1` in `net_resolution` will clip to this value times the default 16/9 aspect ratio value (which is 656 width for a 368 height). E.g., `net_resolution_dynamic 10 net_resolution -1x368` will clip to 6560x368 (10 x 656). Recommended 1 for small GPUs (to avoid out of memory errors but maximize speed) and 0 for big GPUs (for maximum accuracy and speed).");
  • +
  • DEFINE_int32(scale_number, 1, "Number of scales to average.");
  • +
  • DEFINE_double(scale_gap, 0.25, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1. If you want to change the initial scale, you actually want to multiply the `net_resolution` by your desired initial scale.");
  • +
  • DEFINE_double(upsampling_ratio, 0., "Upsampling ratio between the `net_resolution` and the output net results. A value less or equal than 0 (default) will use the network default value (recommended).");
  • +
+
    +
  1. OpenPose Body Pose Heatmaps and Part Candidates
  2. +
+
    +
  • DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps. If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential memory order: body parts + bkg + PAFs. It will follow the order on POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will considerably decrease. Not required for OpenPose, enable it only if you intend to explicitly use this information later.");
  • +
  • DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to background.");
  • +
  • DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs.");
  • +
  • DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer rounded [0,255]; and 3 for no scaling.");
  • +
  • DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the op::Datum::poseCandidates array with the body part candidates. Candidates refer to all the detected body parts, before being assembled into people. Note that the number of candidates is equal or higher than the number of final body parts (i.e., after being assembled into people). The empty body parts are filled with 0s. Program speed will slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly use this information.");
  • +
+
    +
  1. OpenPose Face
  2. +
+
    +
  • DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g. `model_folder`. Note that this will considerable slow down the performance and increase the required GPU memory. In addition, the greater number of people on the image, the slower OpenPose will be.");
  • +
  • DEFINE_int32(face_detector, 0, "Kind of face rectangle detector. Select 0 (default) to select OpenPose body detector (most accurate one and fastest one if body is enabled), 1 to select OpenCV face detector (not implemented for hands), 2 to indicate that it will be provided by the user, or 3 to also apply hand tracking (only for hand). Hand tracking might improve hand keypoint detection for webcam (if the frame rate is high enough, i.e., >7 FPS per GPU) and video. This is not person ID tracking, it simply looks for hands in positions at which hands were located in previous frames, but it does not guarantee the same person ID among frames.");
  • +
  • DEFINE_string(face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint detector. 320x320 usually works fine while giving a substantial speed up when multiple faces on the image.");
  • +
+
    +
  1. OpenPose Hand
  2. +
+
    +
  • DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g. `model_folder`. Analogously to `--face`, it will also slow down the performance, increase the required GPU memory and its speed depends on the number of people.");
  • +
  • DEFINE_int32(hand_detector, 0, "Kind of hand rectangle detector. Analogous to `--face_detector`.");
  • +
  • DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint detector.");
  • +
  • DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4.");
  • +
  • DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range between smallest and biggest scale. The scales will be centered in ratio 1. E.g., if scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2.");
  • +
+
    +
  1. OpenPose 3-D Reconstruction
  2. +
+
    +
  • DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system. 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction results. Note that it will only display 1 person. If multiple people is present, it will fail.");
  • +
  • DEFINE_int32(3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will require max(2, min(4, #cameras-1)) cameras to see the keypoint in order to reconstruct it.");
  • +
  • DEFINE_int32(3d_views, -1, "Complementary option for `--image_dir` or `--video`. OpenPose will read as many images per iteration, allowing tasks such as stereo camera processing (`--3d`). Note that `--camera_parameter_path` must be set. OpenPose must find as many `xml` files in the parameter folder as this number indicates.");
  • +
+
    +
  1. Extra algorithms
  2. +
+
    +
  • DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames.");
  • +
  • DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The value indicates the number of frames where tracking is run between each OpenPose keypoint detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint detector and tracking for potentially higher accuracy than only OpenPose.");
  • +
  • DEFINE_int32(ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing the number of threads will increase the speed but also the global system latency.");
  • +
+
    +
  1. OpenPose Rendering
  2. +
+
    +
  • DEFINE_int32(part_to_show, 0, "Prediction channel to visualize: 0 (default) for all the body parts, 1 for the background heat map, 2 for the superposition of heatmaps, 3 for the superposition of PAFs, 4-(4+#keypoints) for each body part heat map, the following ones for each body part pair PAF.");
  • +
  • DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black background, instead of being rendered into the original image. Related: `part_to_show`, `alpha_pose`, and `alpha_pose`.");
  • +
+
    +
  1. OpenPose Rendering Pose
  2. +
+
    +
  • DEFINE_double(render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be rendered. Note: Rendered refers only to visual display in the OpenPose basic GUI, not in the saved results. Generally, a high threshold (> 0.5) will only render very clear body parts; while small thresholds (~0.1) will also output guessed and occluded keypoints, but also more false positives (i.e., wrong detections).");
  • +
  • DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering (slower but greater functionality, e.g., `alpha_X` flags). If -1, it will pick CPU if CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render both `outputData` and `cvOutputData` with the original image and desired body part to be shown (i.e., keypoints, heat maps or PAFs).");
  • +
  • DEFINE_double(alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will hide it. Only valid for GPU rendering.");
  • +
  • DEFINE_double(alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the heatmap, 0 will only show the frame. Only valid for GPU rendering.");
  • +
+
    +
  1. OpenPose Rendering Face
  2. +
+
    +
  • DEFINE_double(face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints.");
  • +
  • DEFINE_int32(face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same configuration that `render_pose` is using.");
  • +
  • DEFINE_double(face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face.");
  • +
  • DEFINE_double(face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face.");
  • +
+
    +
  1. OpenPose Rendering Hand
  2. +
+
    +
  • DEFINE_double(hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints.");
  • +
  • DEFINE_int32(hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same configuration that `render_pose` is using.");
  • +
  • DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand.");
  • +
  • DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand.");
  • +
+
    +
  1. Display
  2. +
+
    +
  • DEFINE_bool(fullscreen, false, "Run in full-screen mode (press f during runtime to toggle).");
  • +
  • DEFINE_bool(no_gui_verbose, false, "Do not write text on output images on GUI (e.g., number of current frame and people). It does not affect the pose rendering.");
  • +
  • DEFINE_int32(display, -1, "Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server and/or to slightly speed up the processing if visual output is not required); 2 for 2-D display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display.");
  • +
+
    +
  1. Command Line Interface Verbose
  2. +
+
    +
  • DEFINE_double(cli_verbose, -1.f, "If -1, it will be disabled (default). If it is a positive integer number, it will print on the command line every `verbose` frames. If number in the range (0,1), it will print the progress every `verbose` times the total of frames.");
  • +
+
    +
  1. Result Saving
  2. +
+
    +
  • DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format.");
  • +
  • DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g., png, jpg or bmp. Check the OpenCV function cv::imwrite for all compatible extensions.");
  • +
  • DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag `write_video_fps` controls FPS. Alternatively, the video extension can be `.mp4`, resulting in a file with a much smaller size and allowing `--write_video_with_audio`. However, that would require: 1) Ubuntu or Mac system, 2) FFmpeg library installed (`sudo apt-get install ffmpeg`), 3) the creation temporarily of a folder with the same file path than the final video (without the extension) to storage the intermediate frames that will later be used to generate the final MP4 video.");
  • +
  • DEFINE_double(write_video_fps, -1., "Frame rate for the recorded video. By default, it will try to get the input frames producer frame rate (e.g., input video or webcam frame rate). If the input frames producer does not have a set FPS (e.g., image_dir or webcam if OpenCV not compiled with its support), set this value accordingly (e.g., to the frame rate displayed by the OpenPose GUI).");
  • +
  • DEFINE_bool(write_video_with_audio, false, "If the input is video and the output is so too, it will save the video with audio. It requires the output video file path finishing in `.mp4` format (see `write_video` for details).");
  • +
  • DEFINE_string(write_video_3d, "", "Analogous to `--write_video`, but applied to the 3D output.");
  • +
  • DEFINE_string(write_video_adam, "", "Experimental, not available yet. Analogous to `--write_video`, but applied to Adam model.");
  • +
  • DEFINE_string(write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled).");
  • +
  • DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format. If foot, face, hands, etc. JSON is also desired (`--write_coco_json_variants`), they are saved with different file name suffix.");
  • +
  • DEFINE_int32(write_coco_json_variants, 1, "Add 1 for body, add 2 for foot, 4 for face, and/or 8 for hands. Use 0 to use all the possible candidates. E.g., 7 would mean body+foot+face COCO JSON.");
  • +
  • DEFINE_int32(write_coco_json_variant, 0, "Currently, this option is experimental and only makes effect on car JSON generation. It selects the COCO variant for cocoJsonSaver.");
  • +
  • DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag must be enabled.");
  • +
  • DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`. For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for floating values. See `doc/02_output.md` for more details.");
  • +
  • DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format with `write_keypoint_format`.");
  • +
  • DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml, yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead.");
  • +
+
    +
  1. Result Saving - Extra Algorithms
  2. +
+
    +
  • DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g., `~/Desktop/mocapResult.bvh`.");
  • +
+
    +
  1. UDP Communication
  2. +
+
    +
  • DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`.");
  • +
  • DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication.");
  • +
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_advanced_deployment.html b/web/html/doc/md_doc_advanced_deployment.html new file mode 100644 index 000000000..e30ace258 --- /dev/null +++ b/web/html/doc/md_doc_advanced_deployment.html @@ -0,0 +1,164 @@ + + + + + + + +OpenPose: OpenPose Advanced Doc - Deploying/Exporting OpenPose to Other Projects + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Advanced Doc - Deploying/Exporting OpenPose to Other Projects
+
+
+

+Contents

+
    +
  1. Introduction
  2. +
  3. Third-Party Libraries
  4. +
  5. Private OpenPose Include Directory
  6. +
  7. Crash and Core Dumped Avoidance
  8. +
  9. Deploying OpenPose
      +
    1. Windows
    2. +
    3. CMake (Windows, Ubuntu, and Mac)
    4. +
    +
  10. +
+

+Introduction

+

Starting in OpenPose 1.6.0 (GitHub code in or after October 2019), OpenPose has considerable refactor its code to get rid of OpenCV in its headers. This makes OpenPose 1.6 headers different to previous versions and a bit harder to use. However, it allows OpenPose to be exported to other projects without requiring any third-party libraries (except in some special cases detailed below). The greatest benefit of this change: if your project already uses OpenCV, and you add your own version of OpenPose, the OpenCV version used in OpenPose and the one used in your project will not interfere with each other anymore, even if they are different versions!

+

+Third-Party Libraries

+

While compiling OpenPose from source, the static library files (*.a for Ubuntu, *.lib for Windows, etc.) and include/ directories of all the third-party libraries detailed in doc/installation/0_index.md are required (GFlags, Glog, OpenCV, Caffe, etc.). However, when deploying OpenPose, fewer dependencies are required:

    +
  • GFLags and Glog are required only if the include/openpose/flags.hpp file is going to be used (e.g., when intenting to use the command-line interface).
  • +
  • OpenCV can be optionally included if your project already uses it (but make sure to use the same binaries and include directory of OpenCV for both OpenPose and your project or weird runtime crashes will occur!). Including OpenCV does not increase the functionality of OpenPose, but it makes it easier to use by adding some functions that directly take cv::Mat matrices as input (rather than raw pointers). However, it is optional starting in OpenPose 1.6.0.
  • +
  • Caffe or any other 3rd-party libraries are not required.
  • +
+

The static library files (*.a for Ubuntu, *.lib for Windows, etc.) and include/ directories are the files that must be included in your project settings. However, the runtime library files (*.so for Ubuntu, *.dll for Windows, etc.), which are always required, must simply be placed together with the final executable or in default system paths. I.e., these files are only used during runtime, so they do not require any configuration in your project settings. E.g., for Windows, you can simply copy the content of the auto-generated build/bin/ directory into the path where your executable is located.

+

+Private OpenPose Include Directory

+

Inside include/, there are 2 directories: openpose/ and openpose_private/. Adding the include_private directory will require to include more libraries (e.g., OpenCV and Eigen). This directory exposes some extra functions used internally, but most of the cases this functionality is not required at all, so the include/ directory should only contain the openpose/ directory when exported.

+

Windows-only: In addition, Windows users have to manually add OP_API to all the functions/classes from openpose_private/ that he desires to use and then re-compile OpenPose.

+

+Crash and Core Dumped Avoidance

+

If your project already uses OpenCV, and you add your own version of OpenPose, the OpenCV version of OpenPose and the one from your project will not interfere anymore, even if they are different versions. However, you cannot use the OpenCV functions of OpenPose from a different project if that project uses a different versions of OpenCV. Otherwise, very cryptic runtime DLL errors might occur! Make sure you either:

    +
  • Compile OpenPose and your project with the same version of OpenCV.
  • +
  • Or if that is not possible (new since OpenPose 1.6.0), use the non-OpenCV analog functions of OpenPose to avoid cryptic DLL runtime crashes.
  • +
+

+Deploying OpenPose

+

+Windows

+

First of all, make sure to read all the sections above.

+

Second, note that the CMake option should also work for Windows. Alternatively, we also show the more Windows-like version in which *.dll, *.lib, and include/ files are copied, which might be easier to apply when using the portable binaries.

+

+CMake (Windows, Ubuntu, and Mac)

+

First of all, make sure to read all the sections above.

+

If you only intend to use the OpenPose demo, you might skip this step. This step is only recommended if you plan to use the OpenPose API from other projects.

+

To install the OpenPose headers and libraries into the system environment path (e.g., /usr/local/ or /usr/), run the following command.

cd build/
+
sudo make install
+

Once the installation is completed, you can use OpenPose in your other project using the find_package cmake command. Below, is a small example CMakeLists.txt. In order to use this script, you also need to copy FindGFlags.cmake and FindGlog.cmake into your <project_root_directory>/cmake/Modules/ (create the directory if necessary).

cmake_minimum_required(VERSION 2.8.7)
+
+
add_definitions(-std=c++11)
+
+
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules")
+
+
find_package(GFlags)
+
find_package(Glog)
+
find_package(OpenCV)
+
find_package(OpenPose REQUIRED)
+
+
include_directories(${OpenPose_INCLUDE_DIRS} ${GFLAGS_INCLUDE_DIR} ${GLOG_INCLUDE_DIR} ${OpenCV_INCLUDE_DIRS})
+
+
add_executable(example.bin example.cpp)
+
+
target_link_libraries(example.bin ${OpenPose_LIBS} ${GFLAGS_LIBRARY} ${GLOG_LIBRARY} ${OpenCV_LIBS})
+

If Caffe was built with OpenPose, it will automatically find it. Otherwise, you will need to link Caffe again as shown below (otherwise, you might get an error like /usr/bin/ld: cannot find -lcaffe).

link_directories(<path_to_caffe_installation>/caffe/build/install/lib)
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_advanced_heatmap_output.html b/web/html/doc/md_doc_advanced_heatmap_output.html new file mode 100644 index 000000000..2ce08f535 --- /dev/null +++ b/web/html/doc/md_doc_advanced_heatmap_output.html @@ -0,0 +1,148 @@ + + + + + + + +OpenPose: OpenPose Advanced Doc - Heatmap Output + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Advanced Doc - Heatmap Output
+
+
+

+Contents

+
    +
  1. Keypoints
  2. +
  3. UI and Visual Heatmap Output
  4. +
  5. Heatmap Ordering
  6. +
  7. Heatmap Saving in Float Format
  8. +
  9. Heatmap Scaling
  10. +
+

+Keypoints

+

Check doc/output_keypoints.md for the basic output information. This document is for users that want to use the heatmaps.

+

+UI and Visual Heatmap Output

+

If you choose to visualize a body part or a PAF (Part Affinity Field) heat map with the command option --part_to_show, the visual GUI should show something similar to one of the following images:

+

+

+

+Heatmap Ordering

+

For the heat maps storing format, instead of saving each of the 67 heatmaps (18 body parts + background + 2 x 19 PAFs) individually, the library concatenates them into a huge (width x #heat maps) x (height) matrix (i.e., concatenated by columns). E.g., columns [0, individual heat map width] contain the first heat map, columns [individual heat map width + 1, 2 * individual heat map width] contain the second heat map, etc. Note that some image viewers are not able to display the resulting images due to the size. However, Chrome and Firefox are able to properly open them.

+

The saving order is body parts + background + PAFs. Any of them can be disabled with program flags. If background is disabled, then the final image will be body parts + PAFs. The body parts and background follow the order of getPoseBodyPartMapping(const PoseModel poseModel).

+

The PAFs follow the order specified on getPosePartPairs(const PoseModel poseModel) together with getPoseMapIndex(const PoseModel poseModel). E.g., assuming COCO (see example code below), the PAF channels in COCO start in 19 (smallest number in getPoseMapIndex, equal to #body parts + 1), and end up in 56 (highest one). Then, we can match its value from getPosePartPairs. For instance, 19 (x-channel) and 20 (y-channel) in getPoseMapIndex correspond to PAF from body part 1 to 8; 21 and 22 correspond to x,y channels in the joint from body part 8 to 9, etc. Note that if the smallest channel is odd (19), then all the x-channels are odd, and all the y-channels even. If the smallest channel is even, then the opposite will happen.

// C++ API call
+
#include <openpose/pose/poseParameters.hpp>
+
const auto& posePartPairsBody25 = getPosePartPairs(PoseModel::BODY_25);
+
const auto& posePartPairsCoco = getPosePartPairs(PoseModel::COCO_18);
+
const auto& posePartPairsMpi = getPosePartPairs(PoseModel::MPI_15);
+
+
// getPosePartPairs(PoseModel::BODY_25) result
+
// Each index is the key value corresponding to each body part in `getPoseBodyPartMapping`. E.g., 1 for "Neck", 2 for "RShoulder", etc.
+
// 1,8, 1,2, 1,5, 2,3, 3,4, 5,6, 6,7, 8,9, 9,10, 10,11, 8,12, 12,13, 13,14, 1,0, 0,15, 15,17, 0,16, 16,18, 2,17, 5,18, 14,19,19,20,14,21, 11,22,22,23,11,24
+
+
// getPoseMapIndex(PoseModel::BODY_25) result
+
// 0,1, 14,15, 22,23, 16,17, 18,19, 24,25, 26,27, 6,7, 2,3, 4,5, 8,9, 10,11, 12,13, 30,31, 32,33, 36,37, 34,35, 38,39, 20,21, 28,29, 40,41,42,43,44,45, 46,47,48,49,50,51
+

+Heatmap Saving in Float Format

+

If you save the heatmaps in floating format by using the flag --write_heatmaps_format float, you can later read them in Python with:

# Load custom float format - Example in Python, assuming a (18 x 300 x 500) size Array
+
x = np.fromfile(heatMapFullPath, dtype=np.float32)
+
assert x[0] == 3 # First parameter saves the number of dimensions (18x300x500 = 3 dimensions)
+
shape_x = x[1:1+int(x[0])]
+
assert len(shape_x[0]) == 3 # Number of dimensions
+
assert shape_x[0] == 18 # Size of the first dimension
+
assert shape_x[1] == 300 # Size of the second dimension
+
assert shape_x[2] == 500 # Size of the third dimension
+
arrayData = x[1+int(round(x[0])):]
+

+Heatmap Scaling

+

Note that --net_resolution sets the size of the network, thus also the size of the output heatmaps. This heatmaps are resized while keeping the aspect ratio. When aspect ratio of the input and network are not the same, padding is added at the bottom and/or right part of the output heatmaps.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_advanced_standalone_face_or_hand_keypoint_detector.html b/web/html/doc/md_doc_advanced_standalone_face_or_hand_keypoint_detector.html new file mode 100644 index 000000000..9fa0201c4 --- /dev/null +++ b/web/html/doc/md_doc_advanced_standalone_face_or_hand_keypoint_detector.html @@ -0,0 +1,117 @@ + + + + + + + +OpenPose: OpenPose Advanced Doc - Standalone Face or Hand Keypoint Detector + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Advanced Doc - Standalone Face or Hand Keypoint Detector
+
+
+

In case of hand camera views at which the hands are visible but not the rest of the body, or if you do not need the body keypoint detector and want to speed up the process, you can use the OpenPose face or hand keypoint detectors with your own face or hand detectors, rather than using the body keypoint detector as initial detector for those.

+

+OpenCV-based Face Keypoint Detector

+

Note that this method will be faster than the current system if there is few people in the image, but it is also much less accurate (OpenCV face detector only works with big and frontal faces, while OpenPose works with more scales and face rotations).

./build/examples/openpose/openpose.bin --body 0 --face --face_detector 1
+

+Custom Standalone Face or Hand Keypoint Detector

+

Check the examples in examples/tutorial_api_cpp/, in particular examples/tutorial_api_cpp/06_face_from_image.cpp and examples/tutorial_api_cpp/07_hand_from_image.cpp. The provide examples of face and/or hand keypoint detection given a known bounding box or rectangle for the face and/or hand locations. These examples are equivalent to use the following flags:

# Face
+
examples/tutorial_api_cpp/06_face_from_image.cpp --body 0 --face --face_detector 2
+
# Hands
+
examples/tutorial_api_cpp/07_hand_from_image.cpp --body 0 --hand --hand_detector 2
+

Note: both FaceExtractor and HandExtractor classes requires as input squared rectangles.

+

Advance solution: If you wanna use the whole OpenPose framework, you can use the synchronous examples of the tutorial_api_cpp folder with the configuration used for examples/tutorial_api_cpp/06_face_from_image.cpp and examples/tutorial_api_cpp/07_hand_from_image.cpp.

+

+Cropping the Image for Hand/Face Keypoint Detection

+

If you are using your own hand or face images, you should leave about 10-20% margin between the end of the hand/face and the sides (left, top, right, bottom) of the image. We trained with that configuration, so it should be the ideal one for maximizing detection.

+

We did not use any solid-color-based padding, we simply cropped from the whole image. Thus, if you can, use the image rather than adding a color-based padding. Otherwise black padding should work good.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_installation_0_index.html b/web/html/doc/md_doc_installation_0_index.html new file mode 100644 index 000000000..c9c260830 --- /dev/null +++ b/web/html/doc/md_doc_installation_0_index.html @@ -0,0 +1,349 @@ + + + + + + + +OpenPose: OpenPose Doc - Installation + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Installation
+
+
+

+Contents

+
    +
  1. Operating Systems, Requirements, and Dependencies
  2. +
  3. Windows Portable Demo
  4. +
  5. Compiling and Running OpenPose from Source
      +
    1. Problems and Errors Installing OpenPose
    2. +
    3. Prerequisites
    4. +
    5. Clone OpenPose
    6. +
    7. CMake Configuration
    8. +
    9. Compilation
    10. +
    11. Running OpenPose
    12. +
    13. Custom User Code
    14. +
    +
  6. +
  7. Compiling and Running OpenPose from Source on ROS, Docker, and Google Colab - Community-Based Work
  8. +
  9. OpenPose Live Demo at Tiyaro - Community-Based Work
  10. +
  11. Uninstalling, Reinstalling, or Updating OpenPose
  12. +
  13. Additional Settings (Optional)
  14. +
+

+Operating Systems, Requirements, and Dependencies

+
    +
  • Operating Systems +
  • +
  • Requirements for the default configuration
      +
    • CUDA (Nvidia GPU) version:
        +
      • NVIDIA graphics card with at least 1.6 GB available (the nvidia-smi command checks the available GPU memory in Ubuntu).
      • +
      • At least 2.5 GB of free RAM memory for BODY_25 model or 2 GB for COCO model (assuming cuDNN installed).
      • +
      • Highly recommended: cuDNN.
      • +
      +
    • +
    • OpenCL (AMD GPU) version:
        +
      • Vega series graphics card
      • +
      • At least 2 GB of free RAM memory.
      • +
      +
    • +
    • CPU-only (no GPU) version:
        +
      • Around 8GB of free RAM memory.
      • +
      +
    • +
    • Highly recommended: a CPU with at least 8 cores.
    • +
    +
  • +
  • Advanced tip: You might need more resources with a greater --net_resolution and/or scale_number or less resources by reducing the net resolution and/or using the MPI and MPI_4 models.
  • +
  • Dependencies:
      +
    • OpenCV (all 2.X and 3.X versions are compatible).
    • +
    • Caffe and all its dependencies. Have you ported OpenPose into another DL framework (Tensorflow, Caffe2, Pytorch, ...)?. Email us (gines.nosp@m.@alu.nosp@m.mni.c.nosp@m.mu.e.nosp@m.du) or feel free to make a pull request if you implemented any of those!
    • +
    • The demo and tutorials additionally use GFlags.
    • +
    +
  • +
+

+Windows Portable Demo

+

If you just want to use OpenPose without compiling or writing any code, simply use the latest portable version of OpenPose for Windows.

    +
  1. For maximum speed, you should use OpenPose in a machine with a Nvidia GPU version. If so, you must upgrade your Nvidia drivers to the latest version (in the Nvidia "GeForce Experience" software or its website).
  2. +
  3. Download the latest OpenPose version from the Releases section.
  4. +
  5. Follow the Instructions.txt file inside the downloaded zip file to download the models required by OpenPose (about 500 Mb).
  6. +
  7. Then, you can run OpenPose from the PowerShell command-line by following doc/01_demo.md.
  8. +
+

Note: If you are using the GPU-accelerated version and are seeing Cuda check failed (3 vs. 0): initialization error when running OpenPose, you can fix it by doing one of these:

    +
  • Upgrade your Nvidia drivers. If the error persists, make sure your machine does not contain any CUDA version (or if so, that it's the same than the OpenPose portable demo files). Otherwise, uninstall that CUDA version. If you need to keep that CUDA version installed, follow Compiling and Running OpenPose from Source for that particular CUDA version instead.
  • +
  • Download an older OpenPose version (v1.6.0 does not show this error).
  • +
+

+Compiling and Running OpenPose from Source

+

The instructions in the following subsections describe the steps to build OpenPose using CMake-GUI. These instructions are only recommended if you plan to modify the OpenPose code or integrate it with another library or project. You can stop reading this document if you just wanted to run OpenPose on Windows without compiling or modifying any code.

+

+Problems and Errors Installing OpenPose

+

Any problem installing OpenPose while following this guidelines? Check doc/05_faq.md and/or check existing GitHub issues. If you don't find your issue, post a new one. We will not respond to duplicated issues, as well as GitHub issues about Caffe, OpenCV or CUDA installation errors, as well as issues that do not fill all the information that the GitHub template asks for.

+

+Prerequisites

+

Make sure to download and install the prerequisites for your particular operating system.

+

+Clone OpenPose

+

The first step is to clone the OpenPose repository.

+
    +
  1. Windows: You might use GitHub Desktop or clone it from Powershell.
  2. +
  3. Ubuntu, Mac, or Windows Powershell:
    git clone https://github.com/CMU-Perceptual-Computing-Lab/openpose
    +
    cd openpose/
    +
    git submodule update --init --recursive --remote
    +
  4. +
+

+CMake Configuration

+
    +
  1. Go to the OpenPose folder and open CMake-GUI from it. On Windows, double click on CMake-gui. On Ubuntu, Mac, or Windows Powershell:
    cd {OpenPose_folder}
    +
    mkdir build/
    +
    cd build/
    +
    cmake-gui ..
    +
  2. +
  3. align="center"Select the OpenPose directory as project source directory, and a non-existing or empty sub-directory (e.g., build) where the Makefile files (Ubuntu) or Visual Studio solution (Windows) will be generated. If build does not exist, it will ask you whether to create it. Press Yes.
  4. +
+

+
    +
  1. align="center"Press the Configure button, keep the generator in Unix Makefiles (Ubuntu) or set it to your 64-bit Visual Studio version (Windows), and press Finish. Note for Windows users: CMake-GUI has changed their design after version 14. For versions older than 14, you usually select Visual Studio XX 20XX Win64 as the generator (X depends on your VS version), while the Optional toolset to use must be empty. However, new CMake versions require you to select only the VS version as the generator, e.g., Visual Studio 16 2019, and then you must manually choose x64 for the Optional platform for generator. See the following images as example.
  2. +
+

+
    +
  1. Enabling Python (optional step, only apply it if you plan to use the Python API): Enable the BUILD_PYTHON flag and click Configure again.
  2. +
  3. Set the GPU_MODE flag to the proper value and click Configure again:
      +
    1. If your machine has an Nvidia GPU, you should most probably not modify this flag and skip this step. Cases in which you might have to change it:
        +
      • If you have a Nvidia GPU with 2GB of memory or less: Then you will have to follow some of the tricks in doc/06_maximizing_openpose_speed.md or change GPU_MODE back to CPU_ONLY.
      • +
      • If you cannot install CUDA, then you can also set GPU_MODE to CPU_ONLY.
      • +
      +
    2. +
    3. Mac OSX and machines with a non-Nvidia GPU (Intel or AMD GPUs): Set the GPU_MODE flag to CPU_ONLY (easier to install but slower runtime) or OPENCL (GPU-accelerated, it is harder to install but provides a faster runtime speed). For more details on OpenCV support, see doc/1_prerequisites.md and OpenCL Version.
    4. +
    5. If your machine does not have any GPU, set the GPU_MODE flag to CPU_ONLY.
    6. +
    +
  4. +
  5. align="center"If this step is successful, the Configuring done text will appear in the bottom box in the last line. Otherwise, some red text will appear in that same bottom box.
  6. +
+

+
    +
  1. Press the Generate button and proceed to Compilation. You can now close CMake.
  2. +
+

Note: For other optional and custom options (e.g., using your custom Caffe or OpenCV versions), see the Additional Settings (Optional) documentation.

+

+Compilation

+

+Ubuntu

+

Run the following commands in your terminal.

cd build/
+
make -j`nproc`
+

+Mac

+

Run the following commands in your terminal:

cd build/
+
make -j`sysctl -n hw.logicalcpu`
+

Advanced tip: Mac provides both logicalcpu and physicalcpu, but we want the logical number for maximum speed.

+

If the default compilation fails with Caffe errors, install Caffe separately and set BUILD_CAFFE to false in the CMake config. Steps:

    +
  • Re-create the build folder: rm -rf build; mkdir build; cd build.
  • +
  • brew uninstall caffe to remove the version of Caffe previously installed via cmake.
  • +
  • brew install caffe to install Caffe separately.
  • +
  • Run cmake-gui and make the following adjustments to the cmake config:
      +
    1. BUILD_CAFFE set to false.
    2. +
    3. Caffe_INCLUDE_DIRS set to /usr/local/include/caffe.
    4. +
    5. Caffe_LIBS set to /usr/local/lib/libcaffe.dylib.
    6. +
    7. Run Configure and Generate from CMake GUI.
    8. +
    +
  • +
+

If you face an OpenCV error during compiling time similar to ‘fatal error: 'opencv2/highgui/highgui.hpp’ file not found`, please apply the following patch (this error has been reported in the latest OSX 10.14):

cd 3rdparty/caffe; git apply ../../scripts/osx/mac_opencl_patch.txt
+

+Windows

+

In order to build the project, select and run only one of the 2 following alternatives.

+
    +
  • CMake-GUI alternative (recommended):
      +
    1. Open the Visual Studio solution (Windows) by clicking in Open Project in CMake (or alternatively build/OpenPose.sln). Then, set the configuration from Debug to Release.
    2. +
    3. Press F7 (or Build menu and click on Build Solution).
    4. +
    5. Important for Python version: Make sure not to skip step 2, it is not enough to click on F5 (Run), you must also Build Solution for the Python bindings to be generated.
    6. +
    7. After it has compiled, and if you have a webcam, you can press the green triangle icon (alternatively F5) to run the OpenPose demo with the default settings on the webcam.
    8. +
    +
  • +
  • Command-line build alternative (not recommended). NOTE: The command line alternative is not officially supported, but it was added in GitHub issue #1198. For any questions or bug report about this command-line version, comment in that GitHub issue.
      +
    1. Run "MSVS 2019 Developer Command Console" ```batch openpose\mkdir build cd build cmake .. -G "Visual Studio 16 2019" -A x64 -T v142 cmake –build . –config Release copy x64\Release* bin\ ```
    2. +
    3. If you want to clean build ```batch cmake –clean-first . cmake –build . –config Release copy x64\Release* bin\ ```
    4. +
    +
  • +
+

NOTE: To set GPU_MODE flag to CPU_ONLY when building the tool via cli, append -D GPU_MODE:STRINGS=CPU_ONLY to the cmake command.

+

VERY IMPORTANT NOTE: In order to use OpenPose outside Visual Studio, and assuming you have not unchecked the BUILD_BIN_FOLDER flag in CMake, copy all DLLs from {build_directory}/bin into the folder where the generated openpose.dll and *.exe demos are, e.g., {build_directory}x64/Release for the 64-bit release version.

+

If you are facing errors with these instructions, these are a set of alternative instructions created by the community:

+

We welcome users to send us their installation videos (e.g., sharing them as GitHub issue or doing a pull request) and we will post them here.

+

+Running OpenPose

+

Check OpenPose was properly installed by running any demo example: doc/01_demo.md.

+

+Custom User Code

+

You can quickly add your custom code so that quick prototypes can be easily tested without having to create a whole new project just for it. See examples/user_code/README.md for more details.

+

+Compiling and Running OpenPose from Source on ROS, Docker, and Google Colab - Community-Based Work

+

If you do not want to use the Windows portable binaries nor compile it from source code, we add links to some community-based work based on OpenPose. Note: We do not support them, and we will remove new GitHub issues opened asking about them as well as block those users from posting again. If you face any issue, comment only in the GitHub issues links especified below, or ask the owner of them.

+ +

+OpenPose Live Demo at Tiyaro - Community-Based Work

+

You can find a Live Demo of the OpenPose API at Tiyaro.ai. We do not officially support it, but a Tiyaro co-founder added this support in [2129]. Feel free to comment in that post to him if you have questions about it.

+

+

+Uninstalling, Reinstalling, or Updating OpenPose

+

OpenPose can be easily uninstalled:

    +
  1. (Ubuntu and Mac) If you ran sudo make install (which we do not recommend), then run sudo make uninstall in build/.
  2. +
  3. Remove the OpenPose folder.
  4. +
+

In order to update it or reinstall it:

    +
  1. Follow the above steps to uninstall it.
  2. +
  3. Follow the Compiling and Running OpenPose from Source steps again.
  4. +
+

+Additional Settings (Optional)

+

Check the Additional Settings (Optional) documentation if you want to:

    +
  1. Deploy or Export OpenPose to Other Projects
  2. +
  3. Maximum Speed
  4. +
  5. Faster CPU Version (Ubuntu Only)
  6. +
  7. OpenCL Version
  8. +
  9. COCO and MPI Models
  10. +
  11. 3D Reconstruction Module
  12. +
  13. Calibration Module
  14. +
  15. Unity Compatible Version
  16. +
  17. Compile without cuDNN
  18. +
  19. Custom Caffe
  20. +
  21. Custom NVIDIA NVCaffe
  22. +
  23. Custom OpenCV
  24. +
  25. Doxygen Documentation Autogeneration (Ubuntu Only)
  26. +
  27. CMake Command Line Configuration (Ubuntu Only)
  28. +
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_installation_1_prerequisites.html b/web/html/doc/md_doc_installation_1_prerequisites.html new file mode 100644 index 000000000..572777272 --- /dev/null +++ b/web/html/doc/md_doc_installation_1_prerequisites.html @@ -0,0 +1,257 @@ + + + + + + + +OpenPose: OpenPose Doc - Installation - Prerequisites + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Installation - Prerequisites
+
+
+

+Contents

+
    +
  1. General Tips
  2. +
  3. Ubuntu Prerequisites
  4. +
  5. Mac OS Prerequisites
  6. +
  7. Windows Prerequisites
  8. +
+

+General Tips

+

These tips are very important and avoid many bugs:

    +
  • Install the latest CUDA version or make sure your GPU is compatible with the CUDA version you have in your system. E.g., Nvidia 30XX GPUs require at least CUDA 11, others (GTX 20XX, V100, Volta or Turing GPUs) require at least CUDA 10.
  • +
  • CMake automatically downloads all the OpenPose models. However, some firewall or company networks block these downloads. If so, you might need to download them manually: +
  • +
+

+Ubuntu Prerequisites

+
    +
  1. Anaconda should not be installed on your system or should be deactivated. Anaconda includes a Protobuf version that is incompatible with Caffe. Either you uninstall anaconda and install protobuf via apt-get, or you deactivate Conda with the command conda deactivate (twice if you are not in the base environment).
  2. +
  3. Install CMake GUI:
      +
    • Ubuntu 20: Run the command sudo apt-get install cmake-qt-gui.
    • +
    • Ubuntu 18: Download and compile CMake-gui from source. The default CMake-gui version (3.10) installed via sudo apt-get install cmake-qt-gui provokes some compiling errors. Required CMake version >= 3.12.
        +
      • Uninstall your current Cmake-gui version by running sudo apt purge cmake-qt-gui.
      • +
      • Install OpenSSL for building CMake by running sudo apt install libssl-dev.
      • +
      • Run sudo apt-get install qtbase5-dev.
      • +
      • Download the Latest Release of CMake Unix/Linux Source from the CMake download website, called cmake-X.X.X.tar.gz.
      • +
      • Unzip it and go inside that folder from the terminal.
      • +
      • Run ./configure --qt-gui. Make sure no error occurred.
      • +
      • Run ./bootstrap && make -j`nproc` && sudo make install -j`nproc`. Make sure no error occurred.
      • +
      • Assuming your CMake downloaded folder is in {CMAKE_FOLDER_PATH}, every time these instructions mentions cmake-gui, you will have to replace that line by {CMAKE_FOLDER_PATH}/bin/cmake-gui.
      • +
      +
    • +
    • Ubuntu 14 or 16: Run the command sudo apt-get install cmake-qt-gui. Note: If you prefer to use CMake through the command line, see doc/installation/0_index.md#CMake-Command-Line-Configuration-(Ubuntu-Only).
    • +
    +
  4. +
  5. Nvidia GPU version prerequisites:
      +
    1. Note: OpenPose has been tested extensively with CUDA 11.7.1 (cuDNN 8.5.0) for Ubuntu 20. Older OpenPose versions (v1.6.X and v1.5.X) were tested with CUDA 10.1 (cuDNN 7.5.1) for Ubuntu 18 and CUDA 8.0 (cuDNN 5.1) for Ubuntu 14 and 16. We highly recommend using those combinations to minimize potential installation issues. Other combinations should also work, but we do not provide any support about installation/compilation issues related to CUDA/cuDNN or their integration with OpenPose. Note: If Secure Boot is enabled (by default it is not), the MOK key installation part might be mandatory. For that, record the public key output path and invoke into sudo mokutil --import PATH_TO_PUBLIC_KEY manually if automatic install failed.
    2. +
    3. Upgrade your Nvidia drivers to the latest version.
        +
      • For Ubuntu 20, download (515.65)
      • +
      +
    4. +
    5. CUDA: You can simply run sudo bash ./scripts/ubuntu/install_cuda.sh if you are not too familiar with CUDA. If you are, then you could also do one of the following instead:
        +
      • Ubuntu 20 (CUDA 11.7.1): Download CUDA 11.7.1 from their official website. Most Ubuntu computers use the Architecture named x86_64, and we personally recommend the Installer Type named runfile (local). Then, follow the Nvidia website installation instructions. When installing, make sure to enable the symbolic link in usr/local/cuda to minimize potential future errors. If the (Nvidia) drivers were installed manually, untick the "install driver" option.
      • +
      • Ubuntu 18 (CUDA 10.1): Analog to the instructions for Ubuntu 20, but using CUDA version 10.1.
      • +
      • Ubuntu 14 or 16 (CUDA 8 or 10): Run sudo ./scripts/ubuntu/install_cuda.sh (if Ubuntu 16 or 14 and for Graphic cards up to 10XX) or alternatively download and install it from their website.
      • +
      +
    6. +
    7. cuDNN:
        +
      • Download it (usually called cuDNN Library for Linux (x86_64)):
          +
        • Ubuntu 20: cuDNN 8.5.0. cuDNN is currently not recommended due to performance degradation issues outlined in #1864.
        • +
        • Ubuntu 18: cuDNN 7.5.1.
        • +
        • Ubuntu 14 or 16 (cuDNN 5.1 or 7.2): Run sudo ./scripts/ubuntu/install_cudnn_up_to_Ubuntu16.sh (if Ubuntu 16 or 14 and for Graphic cards up to 10XX) or alternatively download it from their website.
        • +
        +
      • +
      • And install it:
          +
        • In order to manually install it (any version), just unzip it and copy (merge) its contents on the CUDA folder, usually /usr/local/cuda-{version}/ in Ubuntu and C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v{version}\ in Windows.
        • +
        +
      • +
      +
    8. +
    +
  6. +
  7. OpenCL / AMD GPU version prerequisites (only if you do not have an Nvidia GPU and want to run on AMD graphic cards):
      +
    • Ubuntu 20 or 18: Not tested and not officially supported. Try at your own risk. You might want to use the CPU version if no Nvidia GPU is available.
    • +
    • Ubuntu 14 or 16:
        +
      1. Download 3rd party ROCM driver for Ubuntu from AMD - OpenCL.
      2. +
      3. Install sudo apt-get install libviennacl-dev.
      4. +
      +
    • +
    +
  8. +
  9. Install Caffe, OpenCV, and Caffe prerequisites:
      +
    • OpenCV must be already installed on your machine. It can be installed with sudo apt-get install libopencv-dev. You could also use your own compiled OpenCV version.
    • +
    • Caffe prerequisites: By default, OpenPose uses Caffe under the hood. If you have not used Caffe previously, install its dependencies by running sudo bash ./scripts/ubuntu/install_deps.sh after installing your desired CUDA and cuDNN versions.
    • +
    • CMake config generation prerequisites (they might be already installed by default): sudo apt install protobuf-compiler libgoogle-glog-dev.
    • +
    • OpenPose make prerequisites (they might be already installed by default): sudo apt install libboost-all-dev libhdf5-dev libatlas-base-dev.
    • +
    +
  10. +
  11. Python prerequisites (optional, only if you plan to use the Python API): python-dev, Numpy (for array management), and OpenCV (for image loading).
    # Python 3 (default and recommended)
    +
    sudo apt-get install python3-dev
    +
    sudo pip3 install numpy opencv-python
    +
    +
    # Python 2
    +
    sudo apt-get install python-dev
    +
    sudo pip install numpy opencv-python
    +
  12. +
+

+Mac OS Prerequisites

+
    +
  1. If you don't have brew, install it by running bash scripts/osx/install_brew.sh on your terminal.
  2. +
  3. Install CMake GUI: Run the command brew install --cask cmake.
  4. +
  5. Install Caffe, OpenCV, and Caffe prerequisites: Run bash scripts/osx/install_deps.sh.
  6. +
+

+Windows Prerequisites

+

NOTE: These instructions are only required when compiling OpenPose from source. If you simply wanna use the OpenPose binaries for Windows, skip this step.

+
    +
  1. Install CMake GUI: Download and install the Latest Release of CMake Windows win64-x64 Installer from the CMake download website, called cmake-X.X.X-win64-x64.msi.
  2. +
  3. Install Microsoft Visual Studio (VS) 2019 Enterprise, Microsoft Visual Studio (VS) 2017 Enterprise or VS 2015 Enterprise Update 3:
      +
    • IMPORTANT: Enable all C++-related flags when selecting the components to install.
    • +
    • Different VS versions:
        +
      • If Visual Studio 2019 Community (or 2017) is desired, we do not officially support it, but it should run similarly to VS 2017/2019 Enterprise.
      • +
      +
    • +
    +
  4. +
  5. Nvidia GPU version prerequisites:
      +
    1. Note: OpenPose has been tested extensively with CUDA 11.1.1 (cuDNN 8.1.0) for VS2019. Older OpenPose versions (v1.6.X and v1.5.X) were tested with CUDA 10.1 (cuDNN 7.5.1) for VS2017 and CUDA 8.0 (cuDNN 5.1) for VS2015. We highly recommend using those combinations to minimize potential installation issues. Other combinations should also work, but we do not provide any support about installation/compilation issues related to CUDA/cuDNN or their integration with OpenPose.
    2. +
    3. Upgrade your Nvidia drivers to the latest version (in the Nvidia "GeForce Experience" software or its website).
    4. +
    5. Install one out of CUDA 11.1.1, CUDA 10.1, or CUDA 8:
        +
      • Install CUDA 11.1.1/10.0/8.0 after Visual Studio 2019/2017/2015 is installed to assure that the CUDA installation will generate all necessary files for VS. If CUDA was installed before installing VS, then re-install CUDA.
      • +
      • Important installation tips:
          +
        • If CMake returns and error message similar to CUDA_TOOLKIT_ROOT_DIR not found or specified or any other CUDA component missing, then: 1) Re-install Visual Studio 2015; 2) Reboot your PC; 3) Re-install CUDA (in this order!).
        • +
        +
      • +
      +
    6. +
    7. cuDNN 8.1.0, cuDNN 7.5.1, or cuDNN 5.1:
        +
      • In order to manually install it, just unzip it and copy (merge) the contents on the CUDA folder, usually C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v{version} in Windows and /usr/local/cuda-{version}/ in Ubuntu.
      • +
      +
    8. +
    +
  6. +
  7. AMD GPU version prerequisites (only if you do not have an Nvidia GPU and want to run on AMD graphic cards):
      +
    1. Download the official AMD drivers for Windows from AMD - Windows.
    2. +
    3. The libviennacl package comes packaged inside OpenPose for Windows (i.e., no further action required).
    4. +
    +
  8. +
  9. Caffe, OpenCV, and Caffe prerequisites:
      +
    • CMake automatically downloads all the Windows DLLs. Alternatively, you might prefer to download them manually:
        +
      • Dependencies:
          +
        • Note: Leave the zip files in 3rdparty/windows/ so that CMake does not try to download them again.
        • +
        • Caffe (if you are not sure which one you need, download the default one): +
        • +
        • Caffe dependencies: Unzip as 3rdparty/windows/caffe3rdparty/.
        • +
        • OpenCV 4.2.0: Unzip as 3rdparty/windows/opencv/.
        • +
        +
      • +
      +
    • +
    +
  10. +
  11. Python prerequisites (optional, only if you plan to use the Python API): Install any Python 3.X version for Windows, and then:
    sudo pip install numpy opencv-python
    +
  12. +
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_installation_2_additional_settings.html b/web/html/doc/md_doc_installation_2_additional_settings.html new file mode 100644 index 000000000..259cbf175 --- /dev/null +++ b/web/html/doc/md_doc_installation_2_additional_settings.html @@ -0,0 +1,263 @@ + + + + + + + +OpenPose: OpenPose Doc - Installation - Additional Settings (Optional) + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Installation - Additional Settings (Optional)
+
+
+

+Contents

+
    +
  1. Additional Settings (Optional)
      +
    1. Deploy or Export OpenPose to Other Projects
    2. +
    3. Maximum Speed
    4. +
    5. Faster CPU Version (Ubuntu Only)
    6. +
    7. OpenCL Version
    8. +
    9. COCO and MPI Models
    10. +
    11. 3D Reconstruction Module
    12. +
    13. Calibration Module
    14. +
    15. Unity Compatible Version
    16. +
    17. Compile without cuDNN
    18. +
    19. Custom Caffe
    20. +
    21. Custom NVIDIA NVCaffe
    22. +
    23. Custom OpenCV
    24. +
    25. Doxygen Documentation Autogeneration (Ubuntu Only)
    26. +
    27. CMake Command Line Configuration (Ubuntu Only)
    28. +
    +
  2. +
+

+Additional Settings (Optional)

+

+Deploye or Exporte OpenPose to Other Projects

+

See doc/advanced/deployment.md.

+

+Maximum Speed

+

Check the OpenPose Benchmark as well as some hints to speed up and/or reduce the memory requirements to run OpenPose on doc/06_maximizing_openpose_speed.md.

+

+Faster CPU Version (Ubuntu Only)

+

NOTE: The accuracy of the CPU/OpenCL versions is a bit lower than CUDA version, so the results will very slightly vary. In practice, the different is barely noticeable, so you are safe using these.

+

This step is only supported for Intel CPUs on Ubuntu versions 16 and 14. It does not compile on Ubuntu 20, and we have not tested it on Ubuntu 18.

+

After setting the GPU_MODE flag to CPU_ONLY and clicking Configured, search for USE_MKL and set it to true. Then, click Configure again. This way, OpenPose will link against the Intel MKL version (Math Kernel Library) of Caffe. This speeds up CPU version on Ubuntu roughly about 2-3x, making it as fast as the Windows CPU-only version.

+

The default CPU version takes about 0.2 images per second on Ubuntu (~50x slower than GPU) while the MKL version provides a roughly 2x speedup at ~0.4 images per second. As of now OpenPose does not support MKL on Windows but will at a later date. Also, MKL version does not support unfixed resolution. So a folder of images of different resolutions requires a fixed net resolution (e.g., --net_resolution 656x368).

+

For MKL, the user can configure the environmental variables MKL_NUM_THREADS and OMP_NUM_THREADS. They are set at an optimum parameter level by default (i.e., to the number of threads of the machine). However, they can be tweak by running the following commands into the terminal window, right before running any OpenPose application. Eg:

+
# Optimal number = Number of threads (used by default)
+
export MKL_NUM_THREADS="8"
+
export OMP_NUM_THREADS="8"
+

Increasing the number of threads results in a higher RAM memory usage. You can check the doc/06_maximizing_openpose_speed.md for more information about speed and memory requirements in several CPUs and GPUs.

+

+OpenCL Version

+

NOTE: The accuracy of the CPU/OpenCL versions is a bit lower than CUDA version, so the results will very slightly vary. In practice, the different is not barely noticeable, so you are safe using these.

+

If you have an AMD graphics card, you can compile OpenPose with the OpenCL option. To manually select the OpenCL Version, open CMake GUI mentioned above, and set the GPU_MODE flag to OPENCL (or non-UI CMake with GPU_MODE=OPENCL). Very important: If you compiled previously the CPU-only or CUDA versions on that same OpenPose folder, you will have to manually delete the build directory and run the installation steps from scratch. Otherwise, many weird errors will appear.

+

The OpenCL version has been tested on Ubuntu, Windows and OSX. This has been tested only on AMD Vega series and NVIDIA 10 series graphics cards. Please email us if you have issues with other operating systems or graphics cards. Running on OSX on a Mac with an AMD graphics card requires special instructions which can be seen in the section below.

+

Lastly, OpenCL version does not support unfixed --net_resolution. So a folder of images of different resolutions with OpenPose, requires the --net_resolution 656x368 flag for example. This should be fixed by the Caffe author in a future patch.

+

+COCO and MPI Models

+

By default, the body COCO and MPI models are not downloaded (they are slower and less accurate than BODY_25, so not useful in most cases!). But you can download them by turning on the DOWNLOAD_BODY_COCO_MODEL or DOWNLOAD_BODY_MPI_MODEL flags. Check the differences between these models in doc/05_faq.md#difference-between-body_25-vs-coco-vs-mpi.

+

+3D Reconstruction Module

+

You can include the 3D reconstruction module by:

+
    +
  1. Install the FLIR camera software, Spinnaker SDK. It is a proprietary software, so we cannot provide direct download link. Note: You might skip this step if you intend to use the 3-D OpenPose module with a different camera brand.
      +
    1. Ubuntu: Get and install the latest Spinnaker SKD version in their default path. OpenPose will automatically find it. Otherwise, set the right path with CMake.
    2. +
    3. Windows: Download the latest Spinnaker SKD version from https://www.ptgrey.com/support/downloads.
        +
      • Copy {PointGreyParentDirectory}\Point Grey Research\Spinnaker\bin64\vs2015\ as {OpenPoseDirectory}\3rdparty\windows\spinnaker\bin\. You can remove all the *.exe files.
      • +
      • Copy {PointGreyParentDirectory}\Point Grey Research\Spinnaker\include\ as {OpenPoseDirectory}\3rdparty\windows\spinnaker\include\.
      • +
      • Copy Spinnaker_v140.lib and Spinnakerd_v140.lib from {PointGreyParentDirectory}\Point Grey Research\Spinnaker\lib64\vs2015\ into {OpenPoseDirectory}\3rdparty\windows\spinnaker\lib\.
      • +
      • (Optional) Spinnaker SDK overview: https://www.ptgrey.com/spinnaker-sdk.
      • +
      +
    4. +
    +
  2. +
  3. Install the 3D visualizer, FreeGLUT:
      +
    1. Ubuntu: run sudo apt-get update && sudo apt-get install build-essential freeglut3 freeglut3-dev libxmu-dev libxi-dev and reboot your PC.
    2. +
    3. Windows:
        +
      1. It is automatically downloaded by the CMake installer.
      2. +
      3. Alternatively, if you prefer to download it yourself, you could either:
          +
        1. Double click on 3rdparty\windows\getFreeglut.bat.
        2. +
        3. Download this version from our server and unzip it in {OpenPoseDirectory}\3rdparty\windows\freeglut\.
        4. +
        5. Download the latest MSVC Package from http://www.transmissionzero.co.uk/software/freeglut-devel/.
            +
          • Copy {freeglutParentDirectory}\freeglut\bin\x64\ as {OpenPoseDirectory}\3rdparty\windows\freeglut\bin\.
          • +
          • Copy {freeglutParentDirectory}\freeglut\include\ as {OpenPoseDirectory}\3rdparty\windows\freeglut\include\.
          • +
          • Copy {freeglutParentDirectory}\freeglut\lib\x64\ as {OpenPoseDirectory}\3rdparty\windows\freeglut\lib\.
          • +
          +
        6. +
        +
      4. +
      +
    4. +
    +
  4. +
  5. Follow the CMake installation steps. In addition, set the WITH_FLIR_CAMERA (only if Spinnaker was installed) and WITH_3D_RENDERER options.
  6. +
  7. Increased accuracy with Ceres solver (Ubuntu only): For extra 3-D reconstruction accuracy, run sudo apt-get install libeigen3-dev, install Ceres solver, and enable WITH_CERES in CMake when installing OpenPose. Ceres is harder to install in Windows, so we have not tested it so far in there. Feel free to make a pull request if you do.
  8. +
+

After installation, check the doc/advanced/3d_reconstruction_module.md instructions.

+

+Calibration Module

+

The intrinsic camera calibration toolbox is included by default.

+

To enable the extrinsic camera parameter estimation toolbox, you must also enable WITH_EIGEN in CMake during CMake Configuration. You can perform any of the 3 following options (but only 1 of them!)

    +
  • Recommended: Simply set the WITH_EIGEN flag to AUTOBUILD. CMake will automatically download Eigen and configure OpenPose to use it. If you prefer to download it manually (or if your firewall blocks CMake from downloading it): +
  • +
  • Advanced (not recommended): If you set WITH_EIGEN to FIND, you must have Eigen already installed in your system. Note that Eigen <= 3.3.6 is not supported by CUDA >=9.1. In order to install it (make sure that Eigen version is compatible with CUDA!):
      +
    • Run sudo apt-get install libeigen3-dev and link CMake to the right CMake.
    • +
    +
  • +
  • Advanced (not recommended): Or you could also use your own version of Eigen by setting WITH_EIGEN to AUTOBUILD, click Configure to let CMake download the zip file, and replace 3rdparty/eigen/ by your own version.
  • +
+

After installation, check the doc/advanced/calibration_module.md instructions.

+

+Unity Compatible Version

+

Check Unity Plugin.

+

However, the OpenPose Unity version will crash if if faces an error while it is not used inside Unity. Thus, do not use it without Unity. Although this version would work as long as no errors occur.

+

+Compile without cuDNN

+

The cuDNN library is not mandatory, but required for full keypoint detection accuracy. In case your graphics card is not compatible with cuDNN, you can disable it by unchecking USE_CUDNN in CMake.

+

Then, you would have to reduce the --net_resolution flag to fit the model into the GPU memory. You can try values like 640x320, 320x240, 320x160, or 160x80 to see your GPU memory capabilities. After finding the maximum approximate resolution that your GPU can handle without throwing an out-of-memory error, adjust the net_resolution ratio to your image or video to be processed (see the --net_resolution explanation from doc/advanced/demo_advanced.md), or use -1 (e.g., --net_resolution -1x320).

+

+Custom Caffe

+

OpenPose uses a custom fork of Caffe (rather than the official Caffe master). Our custom fork is only updated if it works on our machines, but we try to keep it updated with the latest Caffe version. This version works on a newly formatted machine (Ubuntu 16.04 LTS) and in all our machines (CUDA 8 and 10 tested). The default GPU version is the master branch, which it is also compatible with CUDA 10 without changes (official Caffe version might require some changes for it). We also use the OpenCL and CPU tags if their CMake flags are selected. We only modified some Caffe compilation flags and minor details.

+

Alternatively, you can use your own Caffe distribution on Ubuntu/Mac by 1) disabling BUILD_CAFFE, 2) setting Caffe_INCLUDE_DIRS to {CAFFE_PATH}/include/caffe, and 3) setting Caffe_LIBS to {CAFFE_PATH}/build/lib/libcaffe.so, as shown in the image below. Note that cuDNN-compatible Caffe version is required in order to get the maximum possible accuracy in OpenPose.

+

+

For Windows, simply replace the OpenCV DLLs and include folder for your custom one.

+

+Custom NVIDIA NVCaffe

+

This functionality was added by the community, and we do not officially support it. New pull requests with additional functionality or fixing any bug are welcome!

+

It has been tested with the official Nvidia Docker image nvcr.io/nvidia/caffe:18.12-py2.

+

For questions and issues, please only post on the related Pull Request #1169. New GitHub issues about this topic (i.e., outside PR #1169) will be automatically closed with no answer.

+

Windows support has not been added. Replace set_property(CACHE DL_FRAMEWORK PROPERTY STRINGS CAFFE) by set_property(CACHE DL_FRAMEWORK PROPERTY STRINGS CAFFE NV_CAFFE) in CMakeLists.txt if you intend to use it for Windows, and feel free to do a pull request of it working!

+

To use a NVIDIA's NVCaffe docker image instead of the standard Caffe, set the following CMake flags:

+
    +
  1. Set the DL_FRAMEWORK variable to NV_CAFFE.
  2. +
  3. Set the BUILD_CAFFE variable to OFF.
  4. +
  5. Set the correct Caffe_INCLUDE_DIRS and Caffe_LIBS paths following Custom Caffe.
  6. +
+

In addition, peter-uhrig.de/openpose-with-nvcaffe-in-a-singularity-container-with-support-for-multiple-architectures/ contains a detailed step-by-step guide to install a portable container with NVCaffe and support for multiple NVidia cards as well as CPU.

+

+Custom OpenCV

+

If you have built OpenCV from source and OpenPose cannot find it automatically, you can set the OPENCV_DIR variable to the directory where you build OpenCV (Ubuntu and Mac). For Windows, simply replace the OpenCV DLLs and include folder for your custom one.

+

+Doxygen Documentation Autogeneration (Ubuntu Only)

+

You can generate the documentation by setting the BUILD_DOCS flag. The documentation will be generated in doc/doxygen/html/index.html. You can simply open it with double-click (your default browser should automatically display it).

+

+CMake Command Line Configuration (Ubuntu Only)

+

Note that this step is unnecessary if you already used the CMake GUI alternative.

+

Create a build folder in the root OpenPose folder, where you will build the library –

cd openpose
+
mkdir build
+
cd build
+

The next step is to generate the Makefiles. Now there can be multiple scenarios based on what the user already has e.x. Caffe might be already installed and the user might be interested in building OpenPose against that version of Caffe instead of requiring OpenPose to build Caffe from scratch.

+

+Scenario 1 - Caffe not installed and OpenCV installed using <tt>apt-get</tt>

+

In the build directory, run the below command –

cmake ..
+

+Scenario 2 - Caffe installed and OpenCV build from source

+

In this example, we assume that Caffe and OpenCV are already present. The user needs to supply the paths of the libraries and the include directories to CMake. For OpenCV, specify the include directories and the libraries directory using OpenCV_INCLUDE_DIRS and OpenCV_LIBS_DIR variables respectively. Alternatively, the user can also specify the path to the OpenCVConfig.cmake file by setting the OpenCV_CONFIG_FILE variable. For Caffe, specify the include directory and library using the Caffe_INCLUDE_DIRS and Caffe_LIBS variables. This will be where you installed Caffe. Below is an example of the same.

cmake -DOpenCV_INCLUDE_DIRS=/home/"${USER}"/softwares/opencv/build/install/include \
+
-DOpenCV_LIBS_DIR=/home/"${USER}"/softwares/opencv/build/install/lib \
+
-DCaffe_INCLUDE_DIRS=/home/"${USER}"/softwares/caffe/build/install/include \
+
-DCaffe_LIBS=/home/"${USER}"/softwares/caffe/build/install/lib/libcaffe.so -DBUILD_CAFFE=OFF ..
+
cmake -DOpenCV_CONFIG_FILE=/home/"${USER}"/softwares/opencv/build/install/share/OpenCV/OpenCVConfig.cmake \
+
-DCaffe_INCLUDE_DIRS=/home/"${USER}"/softwares/caffe/build/install/include \
+
-DCaffe_LIBS=/home/"${USER}"/softwares/caffe/build/install/lib/libcaffe.so -DBUILD_CAFFE=OFF ..
+

+Scenario 3 - OpenCV already installed

+

If Caffe is not already present but OpenCV is, then use the below command.

cmake -DOpenCV_INCLUDE_DIRS=/home/"${USER}"/softwares/opencv/build/install/include \
+
-DOpenCV_LIBS_DIR=/home/"${USER}"/softwares/opencv/build/install/lib ..
+
cmake -DOpenCV_CONFIG_FILE=/home/"${USER}"/softwares/opencv/build/install/share/OpenCV/OpenCVConfig.cmake ..
+

+Any Other Scenario

+

You can check the CMake online documentation to check all the options that CMake provides and its analogs to the CMake-gui ones that we show on this document.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_installation_deprecated_installation_deprecated.html b/web/html/doc/md_doc_installation_deprecated_installation_deprecated.html new file mode 100644 index 000000000..d00efd7ec --- /dev/null +++ b/web/html/doc/md_doc_installation_deprecated_installation_deprecated.html @@ -0,0 +1,269 @@ + + + + + + + +OpenPose: OpenPose Doc - Installation (deprecated) + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Installation (deprecated)
+
+
+

NOTE: Do not use this document, see doc/installation/0_index.md instead. This deprecated installation document is kept just for backwards compatibility, but it should not be used.

+

+Contents

+
    +
  1. Operating Systems
  2. +
  3. Requirements
  4. +
  5. Clone OpenPose
  6. +
  7. Update OpenPose
  8. +
  9. Ubuntu
  10. +
  11. Windows
  12. +
  13. Doxygen Documentation Autogeneration
  14. +
  15. Custom Caffe
  16. +
  17. Compiling without cuDNN
  18. +
+

+Operating Systems

+

See doc/installation/0_index.md#operating-systems.

+

+Requirements

+

See doc/installation/0_index.md#requirements.

+

+Clone OpenPose

+

See doc/installation/0_index.md#clone-openpose.

+

+Update OpenPose

+

See doc/installation/0_index.md#update-openpose.

+

+Ubuntu

+

+Installation - CMake

+

Recommended installation method, it is simpler and offers more customization settings. See doc/installation/0_index.md.

+

+Prerequisites (Script Compilation or Manual Compilation)

+

CUDA, cuDNN, OpenCV and Atlas must be already installed on your machine:

    +
  1. CUDA must be installed. You should reboot your machine after installing CUDA.
  2. +
  3. cuDNN: Once you have downloaded it, just unzip it and copy (merge) the contents on the CUDA folder, e.g., /usr/local/cuda-8.0/. Note: We found OpenPose working ~10% faster with cuDNN 5.1 compared to cuDNN 6. Otherwise, check the section Compiling without cuDNN.
  4. +
  5. OpenCV can be installed with apt-get install libopencv-dev. If you have compiled OpenCV 3 by your own, follow Manual Compilation. After both Makefile.config files have been generated, edit them and uncomment the line # OPENCV_VERSION := 3. You might alternatively modify all Makefile.config.UbuntuXX files and then run the scripts in step 2.
  6. +
  7. In addition, OpenCV 3 does not incorporate the opencv_contrib module by default. Assuming you have OpenCV 3 compiled with the contrib module and you want to use it, append opencv_contrib at the end of the line LIBRARIES += opencv_core opencv_highgui opencv_imgproc in the Makefile file.
  8. +
  9. Atlas can be installed with sudo apt-get install libatlas-base-dev. Instead of Atlas, you can use OpenBLAS or Intel MKL by modifying the line BLAS := atlas in the same way as previously mentioned for the OpenCV version selection.
  10. +
+

+Installation - Script Compilation

+

Build Caffe & the OpenPose library + download the required Caffe models for Ubuntu 14.04 or 16.04 (auto-detected for the script) and CUDA 8:

bash scripts/ubuntu_deprecated/install_caffe_and_openpose_if_cuda8.sh
+

Highly important: This script only works with CUDA 8 and Ubuntu 14 or 16. Otherwise, see doc/installation/0_index.md or Installation - Manual Compilation.

+

+Installation - Manual Compilation

+

Alternatively to the script installation, if you want to use CUDA 7, avoid using sh scripts, change some configuration labels (e.g., OpenCV version), etc., then:

    +
  1. Install the Caffe prerequisites.
  2. +
  3. Compile Caffe and OpenPose by running these lines: ```
  4. +
+

+Install Caffe

+

git submodule update –init –recursive –remote cd 3rdparty/caffe/

+

+Select your desired Makefile file (run only one of the next 4 commands)

+

cp Makefile.config.Ubuntu14_cuda7.example Makefile.config # Ubuntu 14, cuda 7 cp Makefile.config.Ubuntu14_cuda8.example Makefile.config # Ubuntu 14, cuda 8 cp Makefile.config.Ubuntu16_cuda7.example Makefile.config # Ubuntu 16, cuda 7 cp Makefile.config.Ubuntu16_cuda8.example Makefile.config # Ubuntu 16, cuda 8

+

+Change any custom flag from the resulting Makefile.config (e.g., OpenCV 3, Atlas/OpenBLAS/MKL, etc.)

+

+Compile Caffe

+

make all -jnproc && make distribute -jnproc

+

+Install OpenPose

+

cd ../../models/ bash ./getModels.sh # It just downloads the Caffe trained models cd .. cp scripts/ubuntu/Makefile.example Makefile

+

+Same file cp command as the one used for Caffe

+

cp scripts/ubuntu_deprecated/Makefile.config.Ubuntu14_cuda7.example Makefile.config

+

+Change any custom flag from the resulting Makefile.config (e.g., OpenCV 3, Atlas/OpenBLAS/MKL, etc.)

+

make all -jnproc ```

+

NOTE: If you want to use your own Caffe distribution, follow the steps on Custom Caffe section and later re-compile the OpenPose library: ``` bash ./install_openpose_if_cuda8.sh ``` Note: These steps only need to be performed once. If you are interested in making changes to the OpenPose library, you can simply recompile it with: ``` make clean make all -jnproc `` **Highly important**: There are 2Makefile.config.Ubuntu##.example` analogous files, one in the main folder and one in 3rdparty/caffe/, corresponding to OpenPose and Caffe configuration files respectively. Any change must be done to both files (e.g., OpenCV 3 flag, Atlab/OpenBLAS/MKL flag, etc.). E.g., for CUDA 8 and Ubuntu16: 3rdparty/caffe/Makefile.config.Ubuntu16_cuda8.example and scripts/ubuntu_deprecated/Makefile.config.Ubuntu16_cuda8.example.

+

+Reinstallation

+

If you updated some software that our library or 3rdparty use, or you simply want to reinstall it:

    +
  1. Clean the OpenPose and Caffe compilation folders:
    make clean && cd 3rdparty/caffe && make clean
    +
  2. +
  3. Repeat the Installation steps. You do not need to download the models again.
  4. +
+

+Uninstallation

+

You just need to remove the OpenPose folder, by default called openpose/. E.g., rm -rf openpose/.

+

+Windows

+

+Installation - Library

+
    +
  1. Install the pre-requisites:
      +
    1. Microsoft Visual Studio (VS) 2015 Enterprise Update 3.
        +
      • If Visual Studio 2017 Community is desired, we do not officially support it, but it might be compiled by firstly enabling CUDA 8.0 in VS2017 or use VS2017 with CUDA 9 by checking the .vcxproj file and changing the necessary paths from CUDA 8 to 9.
      • +
      • VS 2015 Enterprise Update 1 will give some compiler errors and VS 2015 Community has not been tested.
      • +
      +
    2. +
    3. CUDA 8: Install it on the default location, C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v8.0. Otherwise, modify the Visual Studio project solution accordingly. Install CUDA 8.0 after Visual Studio 2015 is installed to assure that the CUDA installation will generate all necessary files for VS. If CUDA was already installed, re-install it after installing VS!
    4. +
    5. cuDNN 5.1: Once you have downloaded it, just unzip it and copy (merge) the contents on the CUDA folder, C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v8.0.
    6. +
    +
  2. +
+

+CMake Installer

+

Recommended installation method, it is simpler and offers more customization settings. See doc/installation/0_index.md. Note that it is a beta version, post in GitHub any issue you find.

+

+Deprecated Windows Installer

+

Note: This installer will not incorporate any new features, we recommend to use the CMake installer.

+
    +
  1. Download the OpenPose dependencies and models (body, face and hand models) by double-clicking on {openpose_path}\windows\download_3rdparty_and_models.bat. Alternatively, you might prefer to download them manually: +
  2. +
  3. Open the Visual Studio project sln file by double-cliking on {openpose_path}\windows\OpenPose.sln.
  4. +
  5. In order to verify OpenPose is working, try compiling and executing the demo:
      +
    1. Right click on OpenPoseDemo --> Set as StartUp Project.
    2. +
    3. Change Debug by Release mode.
    4. +
    5. Compile it and run it with F5 or the green play icon.
    6. +
    +
  6. +
  7. If you have a webcam connected, OpenPose will automatically start after being compiled.
  8. +
  9. In order to use the created exe file from the command line (i.e., outside Visual Studio), you have to:
      +
    1. Copy all the DLLs located on {openpose_folder}\3rdparty\windows\caffe\bin\ on the exe folder: {openpose_folder}\windows\x64\Release.
    2. +
    3. Copy all the DLLs located on {openpose_folder}\3rdparty\windows\opencv\x64\vc15\bin\ on the exe folder: {openpose_folder}\windows\x64\Release.
    4. +
    5. Open the Windows cmd (Windows button + X, then A).
    6. +
    7. Go to the OpenPose directory, assuming OpenPose has been downloaded on C:\openpose: cd C:\openpose\.
    8. +
    9. Run the tutorial commands.
    10. +
    +
  10. +
  11. Check OpenPose was properly installed by running it on the default images, video or webcam: doc/01_demo.md.
  12. +
+

+Uninstallation

+

You just need to remove the OpenPose or portable demo folder.

+

+Reinstallation

+

If you updated some software that our library or 3rdparty use, or you simply want to reinstall it:

    +
  1. Open the Visual Studio project sln file by double-cliking on {openpose_path}\windows\OpenPose.sln.
  2. +
  3. Clean the OpenPose project by right-click on ‘Solution 'OpenPose’andClean Solution`.
  4. +
  5. Compile it and run it with F5 or the green play icon.
  6. +
+

+Doxygen Documentation Autogeneration

+

See doc/installation/0_index.md#doxygen-documentation-autogeneration-ubuntu-only.

+

+Custom Caffe

+

We only modified some Caffe compilation flags and minor details. You can use your own Caffe distribution, these are the files we added and modified:

+
    +
  1. Added files: install_caffe.sh; as well as Makefile.config.Ubuntu14.example, Makefile.config.Ubuntu16.example, Makefile.config.Ubuntu14_cuda_7.example and Makefile.config.Ubuntu16_cuda_7.example (extracted from Makefile.config.example). Basically, you must enable cuDNN.
  2. +
  3. Edited file: Makefile. Search for "# OpenPose: " to find the edited code. We basically added the C++11 flag to avoid issues in some old computers.
  4. +
  5. Optional - deleted Caffe file: Makefile.config.example.
  6. +
  7. In order to link it to OpenPose:
      +
    1. Run make all && make distribute in your Caffe version.
    2. +
    3. Open the OpenPose Makefile config file: ./Makefile.config.UbuntuX.example (where X depends on your OS and CUDA version).
    4. +
    5. Modify the Caffe folder directory variable (CAFFE_DIR) to your custom Caffe distribute folder location in the previous OpenPose Makefile config file.
    6. +
    +
  8. +
+

+Compiling without cuDNN

+

See doc/installation/0_index.md#compiling-without-cudnn.

+
+
+
+ + + + diff --git a/web/html/doc/md_doc_installation_jetson_tx_installation_jetson_tx1.html b/web/html/doc/md_doc_installation_jetson_tx_installation_jetson_tx1.html new file mode 100644 index 000000000..c87d0b862 --- /dev/null +++ b/web/html/doc/md_doc_installation_jetson_tx_installation_jetson_tx1.html @@ -0,0 +1,173 @@ + + + + + + + +OpenPose: OpenPose Doc - Installation on Nvidia Jetson TX1 + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Installation on Nvidia Jetson TX1
+
+
+

+Introduction

+

We do not officially support TX1, but thanks to @dreinsdo, we have these instructions about how he made it work in his TX1. We would like to thank @dreinsdo, who added this documentation in this GitHub issue post. If you face any issue, feel free to post on that issue post.

+

+Purpose

+

This document describes the full procedure for installing openpose on the Jetson TX1. Other, and less involved, procedures may have been found successful by community members, and we encourage you to share your alternatives.

+

+Preliminary remarks

+

This procedure details moving the Jetson file system to a larger drive, building a custom kernel, building OpenCV from source, and customizing Openpose Makefiles because TX1 eMMC is limited, the onboard camera did not work with Openpose, stock Jetpack 3.1 OpenCV build lacks Openpose dependencies, and Openpose makefiles are not compatible with TX1 CUDA arch, respectively. We used a PS3 Eye camera in place of the onboard camera, and a 120Gb SSD, but most USB webcams and SATA drives should work fine.

+

+Contents

+ +

+Prep the TX1

+
    +
  1. Flash Jetson TX1 with JetPack 3.1 per Jetpack installation guide. Be sure to complete both OS flashing and CUDA / cuDNN installation parts before installation.
  2. +
  3. Move file system to SATA drive. Follow steps of JetsonHacks article Install Samsung SSD on NVIDIA Jetson TX1.
  4. +
+

+Build custom kernel

+

This step is required because we were not able to use Openpose with the onboard TX1 camera. The steps are a combination of two JetsonHacks articles Build Kernel and ttyACM Module – NVIDIA Jetson TX1 and Sony PlayStation Eye – NVIDIA Jetson TX1. If you are using a different webcam then include the driver for that webcam in place of the PS3 eye driver in step 3.

    +
  1. Get the install scripts from JetsonHacks Github. This link is to the zip of the 'JetPack 3.1' release. If you $git clone$ from the master branch then you will get the most recent kernel build files, which are not compatible with JetPack 3.1.
  2. +
  3. Unzip the downloaded files, enter the unzipped directory and run script to get kernel sources.
    $ cd buildJetsonTX1Kernel
    +
    $ sudo ./getKernelSources.sh
    +
  4. +
  5. The script will open the editor for the kernel configuration. Find the driver for your webcam and select with a checkbox (not a dot). Save the configuration and quit the config window.
  6. +
  7. Make the kernel.
    $ sudo ./makeKernel.sh
    +
  8. +
  9. Replace the current kernel the newly built kernel image.
    $ sudo cp /usr/src/kernel/kernel-4.4/arch/arm64/boot/Image ($PATH_TO_EMMC)/boot/Image
    +
    Replace $PATH_TO_EMMC with the path to your eMMC. This is required because the Jetson initially boots to eMMC and loads the kernel from their, even with the SATA drive connected.
  10. +
+

+Build OpenCV from source

+

Follow JK Jung's steps from How to Install OpenCV (3.4.0) on Jetson TX2 verbatim, with the following exception: omit installation of Python3 dependencies, i.e. skip the following lines.

$ sudo apt-get install python3-dev python3-pip python3-tk
+
$ sudo pip3 install numpy
+
$ sudo pip3 install matplotlib
+

+Install Openpose

+

The following steps detail the modification of three files to install Openpose. Modified versions of the files are attached and may alternatively be used. To use, be sure to rename both makefile configs to Makefile.config.Ubuntu16_cuda8_JetsonTX2.

    +
  1. Clone from the master branch.
    $ git clone https://github.com/CMU-Perceptual-Computing-Lab/openpose
    +
    $ cd openpose
    +
  2. +
  3. Modify makefile config. For the installation procedure we will use the TX2 files for JetPack 3.1.
    $ gedit scripts/ubuntu/Makefile.config.Ubuntu16_cuda8_JetsonTX2
    +
    Uncomment the opencv line
    OPENCV_VERSION := 3
    +
    Replace all the 'CUDA_ARCH :=' lines with the following
    CUDA_ARCH := -gencode arch=compute_53,code=[sm_53,compute_53]
    +
    Add CUDNN - not sure if this is necessary, have not retried the install without it.
    USE_CUDNN := 1
    +
  4. +
  5. Correct error in install script path.
    $ gedit scripts/ubuntu/install_caffe_and_openpose_JetsonTX2_JetPack3.1.sh
    +
    Replace
    executeShInItsFolder "install_openpose_JetsonTX2_JetPack3.1.sh" "./scripts/ubuntu/" "./"
    +
    with
    executeShInItsFolder "./scripts/ubuntu/install_openpose_JetsonTX2_JetPack3.1.sh" "./" "./"
    +
  6. +
  7. Start the install process. When you initially call the install script the caffe repo will be cloned and associated files downloaded. As soon as Caffe starts compiling, halt the process and change the makefile config as in step 2.
    bash ./scripts/ubuntu/install_caffe_and_openpose_JetsonTX2_JetPack3.1.sh
    +
    Once caffe begins to compile CTRL+C.
    $ gedit 3rdparty/caffe/Makefile.config.Ubuntu16_cuda8_JetsonTX2
    +
    Make the same changes as in step 2. The CUDNN switch should already be on.
  8. +
  9. Restart the installation process.
    bash ./scripts/ubuntu/install_caffe_and_openpose_JetsonTX2_JetPack3.1.sh
    +
  10. +
+

+Usage

+

To get to decent FPS you need to lower the net resolution:

./build/examples/openpose/openpose.bin -camera_resolution 640x480 -net_resolution 128x96
+

To activate hand or face resolution please complete this command with the following options (warning, both simultaneously will cause out of memory error):

# Body and face
+
./build/examples/openpose/openpose.bin --face -face_net_resolution 256x256
+
# Body and hands
+
./build/examples/openpose/openpose.bin --hand -hand_net_resolution 256x256
+
# All body, face, and hands
+
./build/examples/openpose/openpose.bin --face -face_net_resolution 256x256 --hand -hand_net_resolution 256x256
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html b/web/html/doc/md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html new file mode 100644 index 000000000..62d25d4b1 --- /dev/null +++ b/web/html/doc/md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html @@ -0,0 +1,129 @@ + + + + + + + +OpenPose: OpenPose Doc - Installation on Nvidia Jetson TX2 JetPack 3.1 + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Installation on Nvidia Jetson TX2 JetPack 3.1
+
+
+

Note that OpenPose for Nvidia Jetson TX2 was developed and it is maintained by the community. The OpenPose authors will not be able to provide official support for it.

+

+Contents

+
    +
  1. Requirements and Dependencies
  2. +
  3. Installation
  4. +
  5. Usage
  6. +
+

+Requirements and Dependencies

+

Jetson TX2 just flashed with JetPack 3.1

+

Notes:

+
    +
  • Installation is similar to Jetson TX1 and you can follow this video tutorial.
  • +
  • If you are installing from a virtual machine host, installation may need to be done in two steps, please refer to this solution.
  • +
  • Be sure to complete both OS flashing and CUDA / cuDNN installation parts before installation.
  • +
+

Dependencies:

- OpenCV (all 2.X and 3.X versions are compatible).
+- Caffe and all its dependencies.
+- The demo and tutorials additionally use GFlags.
+

+Installation

+

Use the following script for installation of both caffe and OpenPose:

bash ./scripts/ubuntu/install_caffe_and_openpose_JetsonTX2_JetPack3.1.sh
+

+Usage

+

It is for now recommended to use an external camera with the demo. To get to decent FPS you need to lower the net resolution:

./build/examples/openpose/openpose.bin -camera_resolution 640x480 -net_resolution 128x96
+

To activate hand or face resolution please complete this command with the following options (warning, both simultaneously will cause out of memory error):

--hand -hand_net_resolution 256x256
+
--face -face_net_resolution 256x256
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html b/web/html/doc/md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html new file mode 100644 index 000000000..77440e7f2 --- /dev/null +++ b/web/html/doc/md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html @@ -0,0 +1,145 @@ + + + + + + + +OpenPose: OpenPose Doc - Installation on Nvidia Jetson TX2 JetPack 3.3 + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Doc - Installation on Nvidia Jetson TX2 JetPack 3.3
+
+
+

Note that OpenPose for Nvidia Jetson TX2 was developed and it is maintained by the community. The OpenPose authors will not be able to provide official support for it.

+

+Contents

+
    +
  1. Requirements and Dependencies
  2. +
  3. Installation
  4. +
  5. Usage
  6. +
+

+Requirements and Dependencies

+

Jetson TX2 just flashed with JetPack 3.3

+

Notes:

+
    +
  • Installation is similar to Jetson TX1 and you can follow this step by step tutorial.
  • +
  • If you are installing from a virtual machine host, installation may need to be done in two steps, please refer to this solution.
  • +
  • Be sure to complete both OS flashing and CUDA / cuDNN installation parts before installation.
  • +
+

Dependencies:

- OpenCV (3.X versions are compatible).
+- Caffe and all its dependencies.
+- The demo and tutorials additionally use GFlags.
+

+Installation

+

Use the following script for installation of both caffe and OpenPose:

bash ./scripts/ubuntu/install_caffe_and_openpose_JetsonTX2_JetPack3.3.sh
+

Optional: If you want to build the Python libraries, then:

    +
  1. Edit the BUILD_PYTHON flag on CMakeLists.txt:
  2. +
+
{option(BUILD_PYTHON}
+
+
2. In both places where this appears, set the flag to ON:
+

-DBUILD_python=ON -DBUILD_python_layer=ON ````

+
    +
  1. There are additional flags that need to be set: PYTHON_EXECUTABLE=/usr/bin/python2.7 and PYTHON_LIBRARY=/usr/lib/aarch64-linux-gnu/libpython2.7.so for Python 2.7. Therefore, inside build, do:
  2. +
+
4. Now run `make`. You should see a file called "pyopenpose.so" if Python was set to 2.7, in
+
`/home/nvidia/openpose/build/python/openpose`. Otherwise, it will be `pyopenpose.cpython-35m-aarch64-linux-gnu`.
+
+
5. Finally, run `sudo make install` inside build to copy the files to /usr/local/python and set PYTHONPATH accordingly on .bashrc:
+
+
```export PYTHONPATH="${PYTHONPATH}:/usr/local/python
+

+Usage

+

It is for now recommended to use an external camera with the demo. To get to decent FPS you need to lower the net resolution:

./build/examples/openpose/openpose.bin -camera_resolution 640x480 -net_resolution 128x96
+

To activate hand or face resolution please complete this command with the following options (warning, both simultaneously will cause out of memory error):

--hand -hand_net_resolution 256x256
+
--face -face_net_resolution 256x256
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_very_advanced_library_structure_0_index.html b/web/html/doc/md_doc_very_advanced_library_structure_0_index.html new file mode 100644 index 000000000..4b2d7e76c --- /dev/null +++ b/web/html/doc/md_doc_very_advanced_library_structure_0_index.html @@ -0,0 +1,110 @@ + + + + + + + +OpenPose: OpenPose Very Advanced Doc - Library Structure + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Very Advanced Doc - Library Structure
+
+
+

As a user, you do not need to know anything about this section! This section is intended for OpenPose internal developers. It is exposed publicly, but you can skip this whole folder if you are just trying to use OpenPose or create new code/demos using OpenPose.

+

Even if you want to e.g., change internal functions and/or extend the OpenPose functionality, the easiest solution as a user is to follow the ../../doc/04_cpp_api.md "OpenPose C++ API". If the new functionality is cool, make a pull request so we can add it to OpenPose!

+

In order to learn the basics about how OpenPose works internally:

    +
  1. See the Doxygen documentation on http://cmu-perceptual-computing-lab.github.io/openpose or build that Doxygen doc from the source code.
  2. +
  3. Take a look at the library Quick Start section from the main README (or its Doxygen analog).
  4. +
  5. OpenPose Overview: Learn the basics about the library source code in doc/very_advanced/library_structure/1_library_deep_overview.md.
  6. +
  7. Extending Functionality: Learn how to extend the library in doc/very_advanced/library_structure/2_library_extend_functionality.md.
  8. +
  9. Adding An Extra Module: Learn how to add an extra module in doc/very_advanced/library_structure/3_library_add_new_module.md.
  10. +
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_very_advanced_library_structure_1_library_deep_overview.html b/web/html/doc/md_doc_very_advanced_library_structure_1_library_deep_overview.html new file mode 100644 index 000000000..1716b4606 --- /dev/null +++ b/web/html/doc/md_doc_very_advanced_library_structure_1_library_deep_overview.html @@ -0,0 +1,372 @@ + + + + + + + +OpenPose: OpenPose Very Advanced Doc - Library Structure - Deep Overview + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Very Advanced Doc - Library Structure - Deep Overview
+
+
+

Note: Read doc/very_advanced/library_structure/0_index.md before this page.

+

+Modules Diagram

+

+

+Debugging C++ Code

+

+Finding Segmentation Faults

+

This is the faster method to debug a segmentation fault problem. Usual scenario: You are editing OpenPose source code and suddenly OpenPose returns segmentation fault when executed. In order to find where it occurs:

+
    +
  1. Select one of the 2 options:
      +
    1. Switch to debug mode.
    2. +
    3. Go to openpose/utilities/errorAndLog.hpp and modify dLog:
        +
      1. Comment #ifndef NDEBUG and its else and endif.
      2. +
      +
    4. +
    +
  2. +
  3. Call OpenPose with --logging_level 0 --disable_multi_thread.
  4. +
  5. At this point you have an idea of in which file class the segmentation fault is coming from. Now you can further isolate the error by iteratively adding the following line all over the code until you find the exact position of the segmentation fault: opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
  6. +
  7. After you have found the segmentation fault, remember to remove all the extra opLog() calls that you temporarily added.
  8. +
+

+Accuracy

+

+Checking OpenPose Accuracy Quantitatively

+
    +
  1. Download OpenPose training code: https://github.com/CMU-Perceptual-Computing-Lab/openpose_train
  2. +
  3. Download val2017 set from COCO: http://images.cocodataset.org/zips/val2017.zip
  4. +
  5. Get JSONs in OpenPose: examples/tests/pose_accuracy_coco_val.sh
  6. +
  7. Get accuracy (Matlab): validation/f_getValidations.m
  8. +
+

+Checking Ground-Truth Labels

+

From the COCO dataset:

    +
  1. Download 2014 or 2017 Train/Val annotations.
  2. +
  3. Download the COCO API.
  4. +
  5. With the COCO API (either Python, Matlab, or LUA ones), you can check any image with the image ID (equivalent to the number in the image name).
  6. +
+

+OpenPose Coding Style

+
    +
  1. Error-prone mistakes:
      +
    1. Do never use std::mutex.lock and/or std::mutex.unlock. Use std::unique_lock<std::mutex> if unlock required or std::lock_guard<std::mutex> otherwise.
    2. +
    3. Do never use the new keyword with std::shared_ptr, but rather: std::make_shared<>.
    4. +
    +
  2. +
  3. Naming:
      +
    1. Class parameters should start with m, class pointers with p, shared_ptrs with sp, unique_ptrs with up, static parameters with s.
    2. +
    3. Function and class parameters coding style is the same other than the previous point.
    4. +
    5. Any parameters should not contain special characters, simply letters and numbers (preferred only letters) separated with upper case. E.g., mThisIsAParameter, thisIsAParameter.
    6. +
    7. In addition, the names should be self-explanatory and not abbreviated. Good examples: counter, thisIs. Bad examples: ctr, var.
    8. +
    +
  4. +
  5. Length:
      +
    1. Lines should contain up to 120 characters.
    2. +
    +
  6. +
  7. Comments:
      +
    1. Only // comments are allowed in the code, /* */ should not be used.
    2. +
    3. There should be a (at least) 1-line comment for each block of code inside each function.
    4. +
    +
  8. +
  9. Loops and statements:
      +
    1. There should be a space between the keyword (if, for, etc) and the parenthesis, e.g., if (true). Wrong: if(true). Note: So they can be easily located with Ctrl + F.
    2. +
    3. Braces should be added in the following line with respect to the loop/statement keyword. See example in point 3.
    4. +
    5. 1-line loops/statements should not contain braces. E.g.,
      if (booleanParameter)
      +
      anotherParameter = 25;
      +
      else
      +
      {
      +
      anotherParameter = 2;
      +
      differentParameter = 3;
      +
      }
      +
    6. +
    +
  10. +
  11. Includes:
      +
    1. There cannot be any include to a 3rd party in the headers (other than OpenCV core: opencv2/core/core.hpp).
        +
      1. PImpl idiom can be checked (e.g., in include/openpose/pose/poseExtractorCaffe.hpp) for an idea of how to avoid it.
      2. +
      3. Otherwise the defining class examples in include/openpose/core/macros.hpp (point 1 is highly preferred).
      4. +
      +
    2. +
    3. They should be sorted in this order:
        +
      1. Std libraries.
      2. +
      3. OS libraries.
      4. +
      5. 3rd party libraries (e.g., Caffe, OpenCV).
      6. +
      7. OpenPose libraries.
      8. +
      9. If it is a cpp file, the last one should be its own hpp.
      10. +
      +
    4. +
    5. Inside each of the previous groups, it should be sorted alphabetically.
    6. +
    +
  12. +
  13. Functions arguments:
      +
    1. It should first include the variables to be edited, and secondly the const variables.
    2. +
    3. Any variable that is not gonna be modified must be added with const.
    4. +
    +
  14. +
  15. Pointers:
      +
    1. Pointers must be avoided if possible.
    2. +
    3. If a pointer must be used, std::unique_ptr must be always be used.
    4. +
    5. If the pointer must be shared, then std::shared_ptr.
    6. +
    7. No delete keyword is allowed in OpenPose.
    8. +
    +
  16. +
+

+Main Modules

+

In order to use and/or slightly extend the OpenPose library, we try to explain the 2 main components on this section. doc/very_advanced/library_structure/UML contains the class diagram of all these modules.

+
    +
  1. The basic module: core.
  2. +
  3. The multi-threading module: thread.
  4. +
  5. The multi-person keypoint detection module: pose.
  6. +
+

+Basic Module: <tt>core</tt>

+

+Array<T> - The OpenPose Basic Raw Data Container

+

This template class implements a multidimensional data array. It is our basic data container, analogous to cv::Mat in OpenCV, Tensor in Torch and TensorFlow or Blob in Caffe. It wraps a cv::Mat and a std::shared_ptr, both of them pointing to the same raw data. I.e. they both share the same memory, so we can read this data in both formats, while there is no performance impact. For instance, op::Datum has several op::Array<float>, for instance the op::Datum<float> pose with the pose data.

+

+Construction And Data allocation

+

There are 4 different ways to allocate the memory:

+
    +
  1. The constructor Array(const std::vector<int>& size), which calls reset(size).
  2. +
  3. The constructor Array(const int size), which calls reset(size).
  4. +
  5. The reset(const std::vector<int>& size) function: It allocates the memory indicated for size. The allocated memory equals the product of all elements in the size vector. Internally, it is saved as a 1-D std::shared_ptr<T[]>.
  6. +
  7. The reset(const int size) function: equivalent for 1-dimension data (i.e., vector).
  8. +
  9. The setFrom(const cv::Mat& cvMat) function: It calls reset() and copies the data from cvMat.
  10. +
+

+Data access

+

The data can be access as a raw pointer, shared pointer or cv::Mat. So given your Array<T> array:

+
    +
  1. Similar to the std::vector: array[index] or array.at(index). If the code is in debug mode, they both has the same functionality. In release mode, the only difference is that the at function checks whether the index is within the limits of the data.
  2. +
  3. As const cv::Mat: array.getConstCvMat(). We do not allow to directly modify the cv::Mat, since some operations might change the dimensional size of the data. If you want to do so, you can clone this cv::Mat, perform any desired operation, and copy it back to the array class with setFrom().
  4. +
  5. As raw pointer: T* getPtr() and const T* const getConstPtr(). Similar to std:: and std::shared_ptr::get(). For instance, CUDA code usually requires raw pointers to access its data.
  6. +
+

+Dimensionality Information

+

There are several functions to get information about the allocated data:

+
    +
  1. bool empty(): Similar to cv::Mat::empty(). It checks whether internal data has been allocated.
  2. +
  3. std::vector<int> getSize(): It returns the size of each dimension.
  4. +
  5. int getSize(const int index): It returns the size of the index dimension.
  6. +
  7. size_t getNumberDimensions(): It returns the number of dimensions (i.e., getSize().size()).
  8. +
  9. size_t getVolume(): It returns the total internal number of T objects, i.e., the product of all dimensions size.
  10. +
+

+Datum - The OpenPose Basic Piece of Information Between Threads

+

The Datum class has all the variables that our Workers need to share to each other. The user can inherit from op::Datum in order to add extra functionality (e.g., if he want to add new Workers and they require extra information between them). We highly recommend not to modify the op::Datum source code. Instead, just inherit it and tell the Workers and ThreadManager to use your inherited class. No changes are needed in the OpenPose source code for this task.

UserDatum : public op::Datum {/* op::Datum + extra variables */}
+
+
// Worker and ThreadManager example initialization
+
op::WGui<std::shared_ptr<std::vector<std::shared_ptr<UserDatum>>> userGUI(/* constructor arguments */);
+
op::ThreadManager<std::shared_ptr<std::vector<std::shared_ptr<UserDatum>>> userThreadManager;
+

Since UserDatum inherits from op::Datum, all the original OpenPose code will compile and run with your inherited version of op::Datum.

+

+Multi-Threading Module - <tt>thread</tt>

+

+The ThreadManager<T> Template Class

+

It manages and automates the multi-threading configuration and execution. The user just needs to add the desired Worker<T> classes to be executed and the parallelization mode, and this class will take care of it.

+

+Constructor

+

Just call op::ThreadManager<TypedefDatumsSP> threadManager.

+

+Adding a Worker Sequence

+

There are 4 ways to add sequence of workers:

+
    +
  1. void add(const std::vector<std::tuple<unsigned long long, std::vector<TWorker>, unsigned long long, unsigned long long>>& threadWorkerQueues).
  2. +
  3. void add(const std::vector<std::tuple<unsigned long long, TWorker, unsigned long long, unsigned long long>>& threadWorkerQueues).
  4. +
  5. void add(const unsigned long long threadId, const std::vector<TWorker>& tWorkers, const unsigned long long queueInId, const unsigned long long queueOutId).
  6. +
  7. void add(const unsigned long long threadId, const TWorker& tWorker, const unsigned long long queueInId, const unsigned long long queueOutId).
  8. +
+

+Threading Configuration Modes

+

There are 3 basic configuration modes: single-threading, multi-threading and smart multi-threading (mix of single- and multi-threading):

+
    +
  1. Single-threading, with 2 variations:
      +
    1. Just call threadManager.add(0, std::vector<TypedefWorker> VECTOR_WITH_ALL_WORKERS, 0, 1);
    2. +
    3. Add the workers one by one, but keeping the same threadId: ``` auto threadId = 0; auto queueIn = 0; auto queueOut = 0; threadManager.add(threadId, {wDatumProducer, wCvMatToOpInput}, queueIn++, queueOut++); // Thread 0, queues 0 -> 1 threadManager.add(threadId, wPose, queueIn++, queueOut++); // Thread 0, queues 1 -> 2 ```
    4. +
    +
  2. +
  3. Multi-threading: Just increase the thread id for each new sequence: ``` auto threadId = 0; auto queueIn = 0; auto queueOut = 0; threadManager.add(threadId++, wDatumProducer, queueIn++, queueOut++); // Thread 0, queues 0 -> 1 threadManager.add(threadId++, wCvMatToOpInput}, queueIn++, queueOut++); // Thread 1, queues 1 -> 2 threadManager.add(threadId++, wPose, queueIn++, queueOut++); // Thread 2, queues 3 -> 3 ```
  4. +
  5. Smart multi-threading: Some classes are much more faster than others (e.g., pose estimation takes ~100 ms while extracting frames from a video only ~10 ms). In addition, any machine has a limited number of threads. Therefore, the library allows the user to merge the faster threads in order to potentially speed up the code. Check the command line demo too see a more complete example. ``` auto threadId = 0; auto queueIn = 0; auto queueOut = 0; threadManager.add(threadId++, {wDatumProducer, wCvMatToOpInput}, queueIn++, queueOut++); // Thread 0, queues 0 -> 1, 2 workers merged together into 1 thread threadManager.add(threadId++, wPose, queueIn++, queueOut++); // Thread 1, queues 1 -> 2, 1 worker ```
  6. +
+

+Thread Id:

+

In order to have X different threads, you just need X different thread ids in the add() function. There should not be any missing thread or queue id. I.e., when start is called, all the thread ids from 0 to max_thread_id must have been added with the add() function, as well as all queue ids from 0 to the maximum queue id introduced.

+

The threads will be started following the thread id order (first the lowest id, last the highest one). In practice, thread id ordering might negatively affect the program execution by adding some lag. I.e., if the thread ids are assigned in complete opposite order to the temporal order of the Workers (e.g., first the GUI and lastly the webcam reader), then during the first few iterations the GUI Worker will have an empty queue until all other Workers have processed at least one frame.

+

Within each thread, the Workers are executed in the order that they have been added to ThreadManager by the add() function.

+

+Queue Id:

+

In addition, each queue id is forced to be the input and output of at least 1 Worker sequence. Special cases are the queue id 0 (only forced to be input of >= 1 Workers) and max_queue_id (forced to be output of >=1 Workers). This prevent users from accidentally forgetting connecting some queue ids.

+

Recursive queuing is allowed. E.g., a Worker might work from queue 0 to 1, another one from 1 to 2, and a third one from 2 to 1, creating a recursive queue/threading. However, the index 0 is reserved for the first queue, and the maximum index for the last one.

+

+The Worker<T> Template Class - The Parent Class of All Workers

+

Classes starting by the letter W + upper case letter (e.g., WGui) directly or indirectly inherit from Worker<T>. They can be directly added to the ThreadManager class so they can access and/or modify the data as well as be parallelized automatically.

+

The easiest way to create your own Worker is to inherit Worker<T>, and implement the work() function such us it just calls a wrapper to your desired functionality (check the source code of some of our basic Workers). Since the Worker classes are templates, they are always compiled. Therefore, including your desired functionality in a different file will let you compile it only once. Otherwise, it would be compiled any time that any code which uses your worker is compiled.

+

All OpenPose Workers are templates, i.e., they are not only limited to work with the default op::Datum. However, if you intend to use some of our Workers, your custom TDatums class (the one substituting op::Datum) should implement the same variables and functions that those Workers use. The easiest solution is to inherit from op::Datum and extend its functionality.

+

+Creating New Workers

+

Users can directly implement their own W from Worker<T> or any other sub-inherited Worker[...]<T> class and add them to ThreadManager. For that, they just need to: inherit those classes from...

+
    +
  1. Inherit from Worker<T> and implement the functionality work(T& tDatum), i.e., it will use and modify tDatum.
  2. +
  3. Inherit from WorkerProducer<T> and implement the functionality T work(), i.e., it will create and return tDatum.
  4. +
  5. Inherit from WorkerConsumer<T> and implement the functionality work(const T& tDatum), i.e., it will use but will not modify tDatum.
  6. +
+

We suggest users to also start their inherited Worker<T> classes with the W letter for code clarity, required if they want to send us a pull request.

+

+All Workers Wrap a Non-Worker Class

+

All Workers wrap and call a non-Worker non-template equivalent which actually performs their functionality. E.g., WPoseExtractor<T> and PoseExtractor. In this way, threading and functionality are completely decoupled. This gives us the best of templates and normal classes:

+
    +
  1. Templates allow us to use different classes, e.g., the user could use his own specific equivalent to op::Datum. However, they must be compiled any time that any function that uses them changes.
  2. +
  3. Classes can be compiled only once, and later the algorithm just use them. However, they can only be used with specific arguments.
  4. +
+

By separating functionality and their Worker<T> wrappers, we get the good of both points, eliminating the cons. In this way, the user is able to:

+
    +
  1. Change std::shared_ptr<std::vector<std::shared_ptr<op::Datum>>> for a custom class, implementing his own Worker templates, but using the already implemented functionality to create new custom Worker templates.
  2. +
  3. Create a Worker which wraps several non-Workers classes.
  4. +
+

+Multi-Person Key-Point Detection module - <tt>pose</tt>

+

The human body pose detection is wrapped into the WPoseExtractor<T> worker and its equivalent non-template PoseExtractor. In addition, it can be rendered and/or blended into the original frame with (W)PoseRenderer class.

+

+PoseExtractor Class

+

Currently, only PoseExtractorCaffe is implemented, which uses the Caffe framework. We might add other famous frameworks later (e.g., Torch or TensorFlow). If you compile our library with any other framework, please email us or make a pull request! We are really interested in adding any other Deep Net framework, and the code is mostly prepared for it. Just create the equivalent PoseExtractorDesiredFramework and make the pull request!

+

+Constructor

+

In order to be initialized, PoseExtractorCaffe has the following constructor and parameters: PoseExtractorCaffe(const Point<int>& netInputSize, const Point<int>& netOutputSize, const Point<int>& outputSize, const int scaleNumber, const double scaleGap, const PoseModel poseModel, const std::string& modelsFolder, const int gpuId).

+
    +
  1. netInputSize is the resolution of the first layer of the deep net. I.e., the frames given to this class must have that size.
  2. +
  3. netOutputSize is the resolution of the last layer of the deep net. I.e., the resulting heatmaps will have this size. Currently, it must be set to the same size as netInputSize.
  4. +
  5. outputSize is the final desired resolution to be used. The human pose keypoint locations will be scaled to this output size. However, the heat-maps will have the netOutputSize size due to performance.
  6. +
  7. scaleNumber and scaleGap specify the multi-scale parameters. Explained in doc/advanced/demo_advanced.md.
  8. +
  9. poseModel specifies the model to load (e.g., COCO or MPI).
  10. +
  11. modelsFolder is the resolution of the last layer of the deep net. I.e., the resulting heat-maps will have this size.
  12. +
  13. gpuId specifies the GPU where the deep net will run. To parallelize the process along the number of available GPUs, just create the class with the same number of parameters but a different GPU id.
  14. +
+

+Detect Human Pose

+

In order to detect the human pose:

+
    +
  1. First run the deep net over the desired target image, by using forwardPass(const Array<float>& inputNetData, const Point<int>& inputDataSize). inputNetData is the input image scaled to netInputSize, while inputDataSize indicates the original frame resolution before being rescaled to netInputSize (this is required given that we resize the images keeping the original aspect ratio).
  2. +
  3. After, you can choose either to get:
      +
    1. The people pose as a op::Array<float>: Array<float> getPose().
    2. +
    3. The scale used (keeping the aspect ratio) to rescale from netOutputSize to outputSize: double getScaleNetToOutput().
    4. +
    5. The people pose as a constant GPU float pointer (not implemented yet): const float* getPoseGpuConstPtr().
    6. +
    7. The heatmap data as a constant CPU or GPU float pointer: const float* getHeatMapCpuConstPtr() and const float* getHeatMapGpuConstPtr().
    8. +
    +
  4. +
+

Due to performance reasons, we only copy the people pose data given by getPose(). However, we do not copy the heatmap and GPU pose values and just give you a raw pointer to them. Hence, you need to manually copy the data if you pretend to use it later, since we reuse that memory on each forwardPass.

+

+PoseRenderer Class

+

After estimating the pose, you usually desired to visualize it. PoseRenderer does this work for you.

+

+Constructor

+

In order to be initialized, PoseRenderer has the following constructor and parameters: PoseRenderer(const Point<int>& netOutputSize, const Point<int>& outputSize, const PoseModel poseModel, const std::shared_ptr<PoseExtractor>& poseExtractor, const float alphaKeypoint, const float alphaHeatMap).

+
    +
  1. netOutputSize, outputSize and poseModel are the same as the ones used for PoseExtractorCaffe.
  2. +
  3. poseExtractor is the pose extractor used previously. It is only used for heatmap and PAFs rendering, since the GPU data is not copied to op::Datum for performance purposes. If any of the heatmaps are gonna be rendered, PoseRenderer must be placed in the same thread as PoseExtractor. Otherwise, it will throw a runtime exception.
  4. +
  5. alphaKeypoint and alphaHeatMap controls the blending coefficient between original frame and rendered pose or heatmap/PAF respectively. A value alphaKeypoint = 1 will render the pose with no transparency at all, while alphaKeypoint = 0 will not be visible. In addition, alphaHeatMap = 1 would only show the heatmap, while alphaHeatMap = 0 would only show the original frame.
  6. +
+

+Render Human Pose

+

In order to render the detected human pose, run std::pair<int, std::string> renderPose(Array<float>& outputData, const Array<float>& pose, const double scaleNetToOutput).

+
    +
  1. outputData is the Array<float> where the original image resized to outputSize is located.
  2. +
  3. pose is given by PoseExtractor::getPose().
  4. +
  5. scaleNetToOutput is given by PoseExtractor::getScaleNetToOutput().
  6. +
  7. The resulting std::pair has the element rendered id, and its name. E.g., <0, "Nose"> or <19, "Part Affinity Fields">.
  8. +
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_very_advanced_library_structure_2_library_extend_functionality.html b/web/html/doc/md_doc_very_advanced_library_structure_2_library_extend_functionality.html new file mode 100644 index 000000000..3bda3c975 --- /dev/null +++ b/web/html/doc/md_doc_very_advanced_library_structure_2_library_extend_functionality.html @@ -0,0 +1,112 @@ + + + + + + + +OpenPose: OpenPose Very Advanced Doc - Library Structure - How to Extend Functionality + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Very Advanced Doc - Library Structure - How to Extend Functionality
+
+
+

If you intend to extend the functionality of our library:

+
    +
  1. Read the README.md page.
  2. +
  3. Check the basic library overview doc on doc/very_advanced/library_structure/1_library_deep_overview.md.
  4. +
  5. Read, understand and play with the basic real time pose demo source code ../../doc/01_demo.md "OpenPose demo" and ../../doc/04_cpp_api.md "C++ API". It includes all the functionality of our library, and it has been properly commented.
  6. +
  7. Read, understand and play with the other tutorials in examples/. It includes more specific examples.
  8. +
  9. Check the basic UML diagram on the doc/very_advanced/library_structure/UML to get an idea of each module relations.
  10. +
  11. Take a look to the stucuture of the already existing modules.
  12. +
  13. The C++ headers files add documentation in Doxygen format. Create this documentation by compiling the include folder with Doxygen. This documentation is slowly but continuously improved.
  14. +
  15. You can also take a look to the source code or ask us on GitHub.
  16. +
+
+
+
+ + + + diff --git a/web/html/doc/md_doc_very_advanced_library_structure_3_library_add_new_module.html b/web/html/doc/md_doc_very_advanced_library_structure_3_library_add_new_module.html new file mode 100644 index 000000000..706bb7f34 --- /dev/null +++ b/web/html/doc/md_doc_very_advanced_library_structure_3_library_add_new_module.html @@ -0,0 +1,137 @@ + + + + + + + +OpenPose: OpenPose Very Advanced Doc - Library Structure - Steps to Add a New Module + + + + + + + + + + + + + +
+
+ + + + + + + +
+
OpenPose +  1.7.0 +
+
The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
OpenPose Very Advanced Doc - Library Structure - Steps to Add a New Module
+
+
+

+Developing Steps

+

In order to add a new module, these are the recommended steps in order to develop it:

+
    +
  1. Create a folder with its name in the experimental/ module, e.g., experimental/hair/.
  2. +
  3. Implement all the functionality in one Worker. I.e., inherit from Worker and implement all the functionality on that class (copy the examples from any Worker subclass).
      +
    1. The first letter of the class name should be W (e.g., WHairExtractor).
    2. +
    3. To initially simplify development:
        +
      1. Optionally (higher debug info), you might initially create the Worker as a non-templated class, assuming it uses std::shared_ptr<std::vector<std::shared_ptr<op::Datum>>> instead of directly using a template class (following the examples/tutorial_api_cpp synchronous examples). While developing, templates provide more confusing debugging info. Turn the class into a template after being initially developed.
      2. +
      3. Optionally (for development speed), use op::Datum as unique argument of your auxiliary functions within that worker.
      4. +
      5. Use the OpenPose Wrapper class in ThreadManagerMode::SingleThread mode (e.g., it allows you to directly use cv::imshow).
      6. +
      7. If you are using your own custom Caffe -> initially change the Caffe for your version. It should directly work.
      8. +
      +
    4. +
    5. Copy the design from pose/WPoseExtractor.
    6. +
    +
  4. +
  5. To test it:
      +
    1. Add the functionality to Wrapper, use the experimental namespace for the new Struct (e.g., experimental::HairStruct) that the Wrapper will use. Do not change any function name from Wrapper, just add a new configure, with the new HairStruct or modify the existing ones without changing their names.
    2. +
    3. Add a demo (e.g., examples/openpose/rthair.cpp) to test it.
    4. +
    +
  6. +
  7. Split the Worker into as many Workers as required.
  8. +
  9. If the Workers need extra data from Datum, simply add into Datum the new variables required (without removing/modifying any previous variables!).
  10. +
  11. Read also the release steps before starting this developping phase.
  12. +
+

+Release Steps

+

After the code is running and ready to be merged, in order to officially release the new module:

+
    +
  1. Move the functionality of each Worker class to the non-template class (e.g., WHairExtractor to HairExtractor). WHairExtractor will simply wrap HairExtractor. This will reduce compiling time for the user. See examples from other modules.
  2. +
  3. If you are using a custom Caffe version, move the custom code into the OpenPose library and change back Caffe to the default (most updated) version.
  4. +
  5. Move the module from experimental/hair/ to hair/.
  6. +
  7. Remove experimental namespaces (e.g., from Wrapper and Hair) and turn Workers into template classes.
  8. +
  9. Integrate the new flags to the OpenPose Demo in examples/openpose/ and optionally add some tutorial examples in examples/tutorial_api_cpp.
  10. +
+
+
+
+ + + + diff --git a/web/html/doc/menu.js b/web/html/doc/menu.js new file mode 100644 index 000000000..2fe2214f2 --- /dev/null +++ b/web/html/doc/menu.js @@ -0,0 +1,51 @@ +/* + @licstart The following is the entire license notice for the JavaScript code in this file. + + The MIT License (MIT) + + Copyright (C) 1997-2020 by Dimitri van Heesch + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software + and associated documentation files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, publish, distribute, + sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or + substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + @licend The above is the entire license notice for the JavaScript code in this file + */ +function initMenu(relPath,searchEnabled,serverSide,searchPage,search) { + function makeTree(data,relPath) { + var result=''; + if ('children' in data) { + result+=''; + } + return result; + } + + $('#main-nav').append(makeTree(menudata,relPath)); + $('#main-nav').children(':first').addClass('sm sm-dox').attr('id','main-menu'); + if (searchEnabled) { + if (serverSide) { + $('#main-menu').append('
  • '); + } else { + $('#main-menu').append('
  • '); + } + } + $('#main-menu').smartmenus(); +} +/* @license-end */ diff --git a/web/html/doc/menudata.js b/web/html/doc/menudata.js new file mode 100644 index 000000000..f416c2b93 --- /dev/null +++ b/web/html/doc/menudata.js @@ -0,0 +1,176 @@ +/* + @licstart The following is the entire license notice for the JavaScript code in this file. + + The MIT License (MIT) + + Copyright (C) 1997-2020 by Dimitri van Heesch + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software + and associated documentation files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, publish, distribute, + sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or + substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + @licend The above is the entire license notice for the JavaScript code in this file +*/ +var menudata={children:[ +{text:"Main Page",url:"index.html"}, +{text:"Related Pages",url:"pages.html"}, +{text:"Namespaces",url:"namespaces.html",children:[ +{text:"Namespace List",url:"namespaces.html"}, +{text:"Namespace Members",url:"namespacemembers.html",children:[ +{text:"All",url:"namespacemembers.html",children:[ +{text:"a",url:"namespacemembers.html#index_a"}, +{text:"c",url:"namespacemembers_c.html#index_c"}, +{text:"d",url:"namespacemembers_d.html#index_d"}, +{text:"e",url:"namespacemembers_e.html#index_e"}, +{text:"f",url:"namespacemembers_f.html#index_f"}, +{text:"g",url:"namespacemembers_g.html#index_g"}, +{text:"h",url:"namespacemembers_h.html#index_h"}, +{text:"k",url:"namespacemembers_k.html#index_k"}, +{text:"l",url:"namespacemembers_l.html#index_l"}, +{text:"m",url:"namespacemembers_m.html#index_m"}, +{text:"n",url:"namespacemembers_n.html#index_n"}, +{text:"o",url:"namespacemembers_o.html#index_o"}, +{text:"p",url:"namespacemembers_p.html#index_p"}, +{text:"r",url:"namespacemembers_r.html#index_r"}, +{text:"s",url:"namespacemembers_s.html#index_s"}, +{text:"t",url:"namespacemembers_t.html#index_t"}, +{text:"u",url:"namespacemembers_u.html#index_u"}, +{text:"v",url:"namespacemembers_v.html#index_v"}, +{text:"w",url:"namespacemembers_w.html#index_w"}]}, +{text:"Functions",url:"namespacemembers_func.html",children:[ +{text:"a",url:"namespacemembers_func.html#index_a"}, +{text:"c",url:"namespacemembers_func_c.html#index_c"}, +{text:"d",url:"namespacemembers_func_d.html#index_d"}, +{text:"e",url:"namespacemembers_func_e.html#index_e"}, +{text:"f",url:"namespacemembers_func_f.html#index_f"}, +{text:"g",url:"namespacemembers_func_g.html#index_g"}, +{text:"k",url:"namespacemembers_func_k.html#index_k"}, +{text:"l",url:"namespacemembers_func_l.html#index_l"}, +{text:"m",url:"namespacemembers_func_m.html#index_m"}, +{text:"n",url:"namespacemembers_func_n.html#index_n"}, +{text:"o",url:"namespacemembers_func_o.html#index_o"}, +{text:"p",url:"namespacemembers_func_p.html#index_p"}, +{text:"r",url:"namespacemembers_func_r.html#index_r"}, +{text:"s",url:"namespacemembers_func_s.html#index_s"}, +{text:"t",url:"namespacemembers_func_t.html#index_t"}, +{text:"u",url:"namespacemembers_func_u.html#index_u"}, +{text:"v",url:"namespacemembers_func_v.html#index_v"}, +{text:"w",url:"namespacemembers_func_w.html#index_w"}]}, +{text:"Variables",url:"namespacemembers_vars.html"}, +{text:"Typedefs",url:"namespacemembers_type.html"}, +{text:"Enumerations",url:"namespacemembers_enum.html"}]}]}, +{text:"Classes",url:"annotated.html",children:[ +{text:"Class List",url:"annotated.html"}, +{text:"Class Index",url:"classes.html"}, +{text:"Class Hierarchy",url:"hierarchy.html"}, +{text:"Class Members",url:"functions.html",children:[ +{text:"All",url:"functions.html",children:[ +{text:"a",url:"functions.html#index_a"}, +{text:"b",url:"functions_b.html#index_b"}, +{text:"c",url:"functions_c.html#index_c"}, +{text:"d",url:"functions_d.html#index_d"}, +{text:"e",url:"functions_e.html#index_e"}, +{text:"f",url:"functions_f.html#index_f"}, +{text:"g",url:"functions_g.html#index_g"}, +{text:"h",url:"functions_h.html#index_h"}, +{text:"i",url:"functions_i.html#index_i"}, +{text:"j",url:"functions_j.html#index_j"}, +{text:"k",url:"functions_k.html#index_k"}, +{text:"l",url:"functions_l.html#index_l"}, +{text:"m",url:"functions_m.html#index_m"}, +{text:"n",url:"functions_n.html#index_n"}, +{text:"o",url:"functions_o.html#index_o"}, +{text:"p",url:"functions_p.html#index_p"}, +{text:"q",url:"functions_q.html#index_q"}, +{text:"r",url:"functions_r.html#index_r"}, +{text:"s",url:"functions_s.html#index_s"}, +{text:"t",url:"functions_t.html#index_t"}, +{text:"u",url:"functions_u.html#index_u"}, +{text:"v",url:"functions_v.html#index_v"}, +{text:"w",url:"functions_w.html#index_w"}, +{text:"x",url:"functions_x.html#index_x"}, +{text:"y",url:"functions_y.html#index_y"}, +{text:"~",url:"functions_~.html#index__7E"}]}, +{text:"Functions",url:"functions_func.html",children:[ +{text:"a",url:"functions_func.html#index_a"}, +{text:"b",url:"functions_func_b.html#index_b"}, +{text:"c",url:"functions_func_c.html#index_c"}, +{text:"d",url:"functions_func_d.html#index_d"}, +{text:"e",url:"functions_func_e.html#index_e"}, +{text:"f",url:"functions_func_f.html#index_f"}, +{text:"g",url:"functions_func_g.html#index_g"}, +{text:"h",url:"functions_func_h.html#index_h"}, +{text:"i",url:"functions_func_i.html#index_i"}, +{text:"j",url:"functions_func_j.html#index_j"}, +{text:"k",url:"functions_func_k.html#index_k"}, +{text:"l",url:"functions_func_l.html#index_l"}, +{text:"m",url:"functions_func_m.html#index_m"}, +{text:"n",url:"functions_func_n.html#index_n"}, +{text:"o",url:"functions_func_o.html#index_o"}, +{text:"p",url:"functions_func_p.html#index_p"}, +{text:"q",url:"functions_func_q.html#index_q"}, +{text:"r",url:"functions_func_r.html#index_r"}, +{text:"s",url:"functions_func_s.html#index_s"}, +{text:"t",url:"functions_func_t.html#index_t"}, +{text:"u",url:"functions_func_u.html#index_u"}, +{text:"v",url:"functions_func_v.html#index_v"}, +{text:"w",url:"functions_func_w.html#index_w"}, +{text:"~",url:"functions_func_~.html#index__7E"}]}, +{text:"Variables",url:"functions_vars.html",children:[ +{text:"a",url:"functions_vars.html#index_a"}, +{text:"b",url:"functions_vars.html#index_b"}, +{text:"c",url:"functions_vars.html#index_c"}, +{text:"d",url:"functions_vars.html#index_d"}, +{text:"e",url:"functions_vars.html#index_e"}, +{text:"f",url:"functions_vars.html#index_f"}, +{text:"g",url:"functions_vars.html#index_g"}, +{text:"h",url:"functions_vars.html#index_h"}, +{text:"i",url:"functions_vars.html#index_i"}, +{text:"k",url:"functions_vars.html#index_k"}, +{text:"m",url:"functions_vars.html#index_m"}, +{text:"n",url:"functions_vars.html#index_n"}, +{text:"o",url:"functions_vars.html#index_o"}, +{text:"p",url:"functions_vars.html#index_p"}, +{text:"r",url:"functions_vars.html#index_r"}, +{text:"s",url:"functions_vars.html#index_s"}, +{text:"t",url:"functions_vars.html#index_t"}, +{text:"u",url:"functions_vars.html#index_u"}, +{text:"v",url:"functions_vars.html#index_v"}, +{text:"w",url:"functions_vars.html#index_w"}, +{text:"x",url:"functions_vars.html#index_x"}, +{text:"y",url:"functions_vars.html#index_y"}]}]}]}, +{text:"Files",url:"files.html",children:[ +{text:"File List",url:"files.html"}, +{text:"File Members",url:"globals.html",children:[ +{text:"All",url:"globals.html",children:[ +{text:"b",url:"globals.html#index_b"}, +{text:"c",url:"globals.html#index_c"}, +{text:"d",url:"globals.html#index_d"}, +{text:"f",url:"globals.html#index_f"}, +{text:"h",url:"globals.html#index_h"}, +{text:"o",url:"globals.html#index_o"}, +{text:"p",url:"globals.html#index_p"}, +{text:"u",url:"globals.html#index_u"}]}, +{text:"Functions",url:"globals_func.html",children:[ +{text:"d",url:"globals_func.html#index_d"}]}, +{text:"Variables",url:"globals_vars.html"}, +{text:"Macros",url:"globals_defs.html",children:[ +{text:"b",url:"globals_defs.html#index_b"}, +{text:"c",url:"globals_defs.html#index_c"}, +{text:"d",url:"globals_defs.html#index_d"}, +{text:"f",url:"globals_defs.html#index_f"}, +{text:"h",url:"globals_defs.html#index_h"}, +{text:"o",url:"globals_defs.html#index_o"}, +{text:"p",url:"globals_defs.html#index_p"}, +{text:"u",url:"globals_defs.html#index_u"}]}]}]}]} diff --git a/web/html/doc/namespacemembers.html b/web/html/doc/namespacemembers.html new file mode 100644 index 000000000..86b4bfe39 --- /dev/null +++ b/web/html/doc/namespacemembers.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - a -

      +
    • addBkgChannel() +: op +
    • +
    • averageKeypoints() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_c.html b/web/html/doc/namespacemembers_c.html new file mode 100644 index 000000000..eb875b07c --- /dev/null +++ b/web/html/doc/namespacemembers_c.html @@ -0,0 +1,160 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - c -

      +
    • checkBool() +: op +
    • +
    • checkEqual() +: op +
    • +
    • checkGreaterOrEqual() +: op +
    • +
    • checkGreaterThan() +: op +
    • +
    • checkLessOrEqual() +: op +
    • +
    • checkLessThan() +: op +
    • +
    • checkNoNullNorEmpty() +: op +
    • +
    • checkNotEqual() +: op +
    • +
    • checkWorkerErrors() +: op +
    • +
    • CocoJsonFormat +: op +
    • +
    • COMPILE_TEMPLATE_DATUM() +: op +
    • +
    • configureThreadManager() +: op +
    • +
    • connectBodyPartsCpu() +: op +
    • +
    • connectBodyPartsGpu() +: op +
    • +
    • connectBodyPartsOcl() +: op +
    • +
    • createMultiviewTDatum() +: op +
    • +
    • createPeopleVector() +: op +
    • +
    • createProducer() +: op +
    • +
    • CUDA_NUM_THREADS +: op +
    • +
    • cudaCheck() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_d.html b/web/html/doc/namespacemembers_d.html new file mode 100644 index 000000000..55443f45a --- /dev/null +++ b/web/html/doc/namespacemembers_d.html @@ -0,0 +1,130 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - d -

      +
    • DataFormat +: op +
    • +
    • dataFormatToString() +: op +
    • +
    • datumProducerConstructor() +: op +
    • +
    • datumProducerConstructorRunningAndGetDatumApplyPlayerControls() +: op +
    • +
    • datumProducerConstructorRunningAndGetDatumFrameIntegrity() +: op +
    • +
    • datumProducerConstructorRunningAndGetDatumIsDatumProducerRunning() +: op +
    • +
    • datumProducerConstructorRunningAndGetNextFrameNumber() +: op +
    • +
    • datumProducerConstructorTooManyConsecutiveEmptyFrames() +: op +
    • +
    • Detector +: op +
    • +
    • DisplayMode +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_dup.js b/web/html/doc/namespacemembers_dup.js new file mode 100644 index 000000000..f61a50a16 --- /dev/null +++ b/web/html/doc/namespacemembers_dup.js @@ -0,0 +1,22 @@ +var namespacemembers_dup = +[ + [ "a", "namespacemembers.html", null ], + [ "c", "namespacemembers_c.html", null ], + [ "d", "namespacemembers_d.html", null ], + [ "e", "namespacemembers_e.html", null ], + [ "f", "namespacemembers_f.html", null ], + [ "g", "namespacemembers_g.html", null ], + [ "h", "namespacemembers_h.html", null ], + [ "k", "namespacemembers_k.html", null ], + [ "l", "namespacemembers_l.html", null ], + [ "m", "namespacemembers_m.html", null ], + [ "n", "namespacemembers_n.html", null ], + [ "o", "namespacemembers_o.html", null ], + [ "p", "namespacemembers_p.html", null ], + [ "r", "namespacemembers_r.html", null ], + [ "s", "namespacemembers_s.html", null ], + [ "t", "namespacemembers_t.html", null ], + [ "u", "namespacemembers_u.html", null ], + [ "v", "namespacemembers_v.html", null ], + [ "w", "namespacemembers_w.html", null ] +]; \ No newline at end of file diff --git a/web/html/doc/namespacemembers_e.html b/web/html/doc/namespacemembers_e.html new file mode 100644 index 000000000..21f589358 --- /dev/null +++ b/web/html/doc/namespacemembers_e.html @@ -0,0 +1,133 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - e -

      +
    • ElementToRender +: op +
    • +
    • error() +: op +
    • +
    • errorDestructor() +: op +
    • +
    • ErrorMode +: op +
    • +
    • errorWorker() +: op +
    • +
    • estimateAndSaveExtrinsics() +: op +
    • +
    • estimateAndSaveIntrinsics() +: op +
    • +
    • estimateAndSaveSiftFile() +: op +
    • +
    • existDirectory() +: op +
    • +
    • existFile() +: op +
    • +
    • Extensions +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_enum.html b/web/html/doc/namespacemembers_enum.html new file mode 100644 index 000000000..cba25ee8e --- /dev/null +++ b/web/html/doc/namespacemembers_enum.html @@ -0,0 +1,164 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
      +
    • CocoJsonFormat +: op +
    • +
    • DataFormat +: op +
    • +
    • Detector +: op +
    • +
    • DisplayMode +: op +
    • +
    • ElementToRender +: op +
    • +
    • ErrorMode +: op +
    • +
    • Extensions +: op +
    • +
    • FullScreenMode +: op +
    • +
    • GpuMode +: op +
    • +
    • HeatMapType +: op +
    • +
    • LogMode +: op +
    • +
    • PoseMode +: op +
    • +
    • PoseModel +: op +
    • +
    • PoseProperty +: op +
    • +
    • Priority +: op +
    • +
    • ProducerFpsMode +: op +
    • +
    • ProducerProperty +: op +
    • +
    • ProducerType +: op +
    • +
    • RenderMode +: op +
    • +
    • ScaleMode +: op +
    • +
    • ThreadManagerMode +: op +
    • +
    • WorkerType +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_f.html b/web/html/doc/namespacemembers_f.html new file mode 100644 index 000000000..121f30e2a --- /dev/null +++ b/web/html/doc/namespacemembers_f.html @@ -0,0 +1,181 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - f -

      +
    • F135 +: op +
    • +
    • FACE_CCN_DECREASE_FACTOR +: op +
    • +
    • FACE_COLORS_RENDER +: op +
    • +
    • FACE_DEFAULT_ALPHA_HEAT_MAP +: op +
    • +
    • FACE_DEFAULT_ALPHA_KEYPOINT +: op +
    • +
    • FACE_MAX_FACES +: op +
    • +
    • FACE_NUMBER_PARTS +: op +
    • +
    • FACE_PAIRS_RENDER +: op +
    • +
    • FACE_PROTOTXT +: op +
    • +
    • FACE_SCALES_RENDER +: op +
    • +
    • FACE_TRAINED_MODEL +: op +
    • +
    • fastMax() +: op +
    • +
    • fastMin() +: op +
    • +
    • fastTruncate() +: op +
    • +
    • flagsToDetector() +: op +
    • +
    • flagsToDisplayMode() +: op +
    • +
    • flagsToHeatMaps() +: op +
    • +
    • flagsToHeatMapScaleMode() +: op +
    • +
    • flagsToPoint() +: op +
    • +
    • flagsToPoseMode() +: op +
    • +
    • flagsToPoseModel() +: op +
    • +
    • flagsToProducer() +: op +
    • +
    • flagsToProducerType() +: op +
    • +
    • flagsToRenderMode() +: op +
    • +
    • flagsToScaleMode() +: op +
    • +
    • formatAsDirectory() +: op +
    • +
    • FullScreenMode +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func.html b/web/html/doc/namespacemembers_func.html new file mode 100644 index 000000000..644f827c1 --- /dev/null +++ b/web/html/doc/namespacemembers_func.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - a -

      +
    • addBkgChannel() +: op +
    • +
    • averageKeypoints() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func.js b/web/html/doc/namespacemembers_func.js new file mode 100644 index 000000000..14f827743 --- /dev/null +++ b/web/html/doc/namespacemembers_func.js @@ -0,0 +1,21 @@ +var namespacemembers_func = +[ + [ "a", "namespacemembers_func.html", null ], + [ "c", "namespacemembers_func_c.html", null ], + [ "d", "namespacemembers_func_d.html", null ], + [ "e", "namespacemembers_func_e.html", null ], + [ "f", "namespacemembers_func_f.html", null ], + [ "g", "namespacemembers_func_g.html", null ], + [ "k", "namespacemembers_func_k.html", null ], + [ "l", "namespacemembers_func_l.html", null ], + [ "m", "namespacemembers_func_m.html", null ], + [ "n", "namespacemembers_func_n.html", null ], + [ "o", "namespacemembers_func_o.html", null ], + [ "p", "namespacemembers_func_p.html", null ], + [ "r", "namespacemembers_func_r.html", null ], + [ "s", "namespacemembers_func_s.html", null ], + [ "t", "namespacemembers_func_t.html", null ], + [ "u", "namespacemembers_func_u.html", null ], + [ "v", "namespacemembers_func_v.html", null ], + [ "w", "namespacemembers_func_w.html", null ] +]; \ No newline at end of file diff --git a/web/html/doc/namespacemembers_func_c.html b/web/html/doc/namespacemembers_func_c.html new file mode 100644 index 000000000..99fc551cb --- /dev/null +++ b/web/html/doc/namespacemembers_func_c.html @@ -0,0 +1,154 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - c -

      +
    • checkBool() +: op +
    • +
    • checkEqual() +: op +
    • +
    • checkGreaterOrEqual() +: op +
    • +
    • checkGreaterThan() +: op +
    • +
    • checkLessOrEqual() +: op +
    • +
    • checkLessThan() +: op +
    • +
    • checkNoNullNorEmpty() +: op +
    • +
    • checkNotEqual() +: op +
    • +
    • checkWorkerErrors() +: op +
    • +
    • COMPILE_TEMPLATE_DATUM() +: op +
    • +
    • configureThreadManager() +: op +
    • +
    • connectBodyPartsCpu() +: op +
    • +
    • connectBodyPartsGpu() +: op +
    • +
    • connectBodyPartsOcl() +: op +
    • +
    • createMultiviewTDatum() +: op +
    • +
    • createPeopleVector() +: op +
    • +
    • createProducer() +: op +
    • +
    • cudaCheck() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_d.html b/web/html/doc/namespacemembers_func_d.html new file mode 100644 index 000000000..e7e2b1614 --- /dev/null +++ b/web/html/doc/namespacemembers_func_d.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - d -

      +
    • dataFormatToString() +: op +
    • +
    • datumProducerConstructor() +: op +
    • +
    • datumProducerConstructorRunningAndGetDatumApplyPlayerControls() +: op +
    • +
    • datumProducerConstructorRunningAndGetDatumFrameIntegrity() +: op +
    • +
    • datumProducerConstructorRunningAndGetDatumIsDatumProducerRunning() +: op +
    • +
    • datumProducerConstructorRunningAndGetNextFrameNumber() +: op +
    • +
    • datumProducerConstructorTooManyConsecutiveEmptyFrames() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_e.html b/web/html/doc/namespacemembers_func_e.html new file mode 100644 index 000000000..c0422d95d --- /dev/null +++ b/web/html/doc/namespacemembers_func_e.html @@ -0,0 +1,124 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - e -

      +
    • error() +: op +
    • +
    • errorDestructor() +: op +
    • +
    • errorWorker() +: op +
    • +
    • estimateAndSaveExtrinsics() +: op +
    • +
    • estimateAndSaveIntrinsics() +: op +
    • +
    • estimateAndSaveSiftFile() +: op +
    • +
    • existDirectory() +: op +
    • +
    • existFile() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_f.html b/web/html/doc/namespacemembers_func_f.html new file mode 100644 index 000000000..001f28c4d --- /dev/null +++ b/web/html/doc/namespacemembers_func_f.html @@ -0,0 +1,145 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - f -

      +
    • fastMax() +: op +
    • +
    • fastMin() +: op +
    • +
    • fastTruncate() +: op +
    • +
    • flagsToDetector() +: op +
    • +
    • flagsToDisplayMode() +: op +
    • +
    • flagsToHeatMaps() +: op +
    • +
    • flagsToHeatMapScaleMode() +: op +
    • +
    • flagsToPoint() +: op +
    • +
    • flagsToPoseMode() +: op +
    • +
    • flagsToPoseModel() +: op +
    • +
    • flagsToProducer() +: op +
    • +
    • flagsToProducerType() +: op +
    • +
    • flagsToRenderMode() +: op +
    • +
    • flagsToScaleMode() +: op +
    • +
    • formatAsDirectory() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_g.html b/web/html/doc/namespacemembers_func_g.html new file mode 100644 index 000000000..c55731a0c --- /dev/null +++ b/web/html/doc/namespacemembers_func_g.html @@ -0,0 +1,268 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - g -

      +
    • getAverageScore() +: op +
    • +
    • getBiggestPerson() +: op +
    • +
    • getCudaGpuNumber() +: op +
    • +
    • getCvCapPropFrameCount() +: op +
    • +
    • getCvCapPropFrameFps() +: op +
    • +
    • getCvCapPropFrameHeight() +: op +
    • +
    • getCvCapPropFrameWidth() +: op +
    • +
    • getCvFourcc() +: op +
    • +
    • getCvImwriteJpegQuality() +: op +
    • +
    • getCvImwritePngCompression() +: op +
    • +
    • getCvLoadImageAnydepth() +: op +
    • +
    • getCvLoadImageGrayScale() +: op +
    • +
    • getDistance() +: op +
    • +
    • getDistanceAverage() +: op +
    • +
    • getErrorModes() +: op::ConfigureError +
    • +
    • getFileExtension() +: op +
    • +
    • getFileNameAndExtension() +: op +
    • +
    • getFileNameNoExtension() +: op +
    • +
    • getFileParentFolderPath() +: op +
    • +
    • getFilesOnDirectory() +: op +
    • +
    • getFirstNumberOnString() +: op +
    • +
    • getFullFilePathNoExtension() +: op +
    • +
    • getGpuMode() +: op +
    • +
    • getGpuNumber() +: op +
    • +
    • getIfInMainThreadOrEmpty() +: op +
    • +
    • getIfNotInMainThreadOrEmpty() +: op +
    • +
    • getKeypointsArea() +: op +
    • +
    • getKeypointsPerson() +: op +
    • +
    • getKeypointsRectangle() +: op +
    • +
    • getKeypointsRoi() +: op +
    • +
    • getLastNumber() +: op +
    • +
    • getLogModes() +: op::ConfigureLog +
    • +
    • getNonZeroKeypoints() +: op +
    • +
    • getNumberCudaBlocks() +: op +
    • +
    • getNumberCudaThreadsAndBlocks() +: op +
    • +
    • getNumberElementsToRender() +: op +
    • +
    • getPoseBodyPartMapping() +: op +
    • +
    • getPoseBodyPartPairsRender() +: op +
    • +
    • getPoseColors() +: op +
    • +
    • getPoseDefaultConnectInterMinAboveThreshold() +: op +
    • +
    • getPoseDefaultConnectInterThreshold() +: op +
    • +
    • getPoseDefaultConnectMinSubsetScore() +: op +
    • +
    • getPoseDefaultMinSubsetCnt() +: op +
    • +
    • getPoseDefaultNmsThreshold() +: op +
    • +
    • getPoseMapIndex() +: op +
    • +
    • getPoseMaxPeaks() +: op +
    • +
    • getPoseNetDecreaseFactor() +: op +
    • +
    • getPoseNumberBodyParts() +: op +
    • +
    • getPosePartPairs() +: op +
    • +
    • getPoseProtoTxt() +: op +
    • +
    • getPoseScales() +: op +
    • +
    • getPoseTrainedModel() +: op +
    • +
    • getPriorityThreshold() +: op::ConfigureLog +
    • +
    • getThreadId() +: op +
    • +
    • getTimerInit() +: op +
    • +
    • getTimeSeconds() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_k.html b/web/html/doc/namespacemembers_func_k.html new file mode 100644 index 000000000..c8a2e2382 --- /dev/null +++ b/web/html/doc/namespacemembers_func_k.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - k -

      +
    • keepRoiInside() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_l.html b/web/html/doc/namespacemembers_func_l.html new file mode 100644 index 000000000..4bf0edf21 --- /dev/null +++ b/web/html/doc/namespacemembers_func_l.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - l -

      +
    • loadData() +: op +
    • +
    • loadHandDetectorTxt() +: op +
    • +
    • loadImage() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_m.html b/web/html/doc/namespacemembers_func_m.html new file mode 100644 index 000000000..149dd5bc2 --- /dev/null +++ b/web/html/doc/namespacemembers_func_m.html @@ -0,0 +1,112 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - m -

      +
    • makeDirectory() +: op +
    • +
    • maximumCpu() +: op +
    • +
    • maximumGpu() +: op +
    • +
    • mergeVectors() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_n.html b/web/html/doc/namespacemembers_func_n.html new file mode 100644 index 000000000..77f77f95a --- /dev/null +++ b/web/html/doc/namespacemembers_func_n.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - n -

      +
    • nmsCpu() +: op +
    • +
    • nmsGpu() +: op +
    • +
    • nmsOcl() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_o.html b/web/html/doc/namespacemembers_func_o.html new file mode 100644 index 000000000..21c18cda3 --- /dev/null +++ b/web/html/doc/namespacemembers_func_o.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - o -

      +
    • opLog() +: op +
    • +
    • opLogIfDebug() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_p.html b/web/html/doc/namespacemembers_func_p.html new file mode 100644 index 000000000..1792247bf --- /dev/null +++ b/web/html/doc/namespacemembers_func_p.html @@ -0,0 +1,130 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - p -

      +
    • pafPtrIntoVector() +: op +
    • +
    • pafVectorIntoPeopleVector() +: op +
    • +
    • peopleVectorToPeopleArray() +: op +
    • +
    • poseBodyPartMapStringToKey() +: op +
    • +
    • positiveCharRound() +: op +
    • +
    • positiveIntRound() +: op +
    • +
    • positiveLongLongRound() +: op +
    • +
    • positiveLongRound() +: op +
    • +
    • positiveSCharRound() +: op +
    • +
    • printTime() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_r.html b/web/html/doc/namespacemembers_func_r.html new file mode 100644 index 000000000..c50dc3061 --- /dev/null +++ b/web/html/doc/namespacemembers_func_r.html @@ -0,0 +1,178 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - r -

      +
    • recenter() +: op +
    • +
    • refineAndSaveExtrinsics() +: op +
    • +
    • remove0sFromString() +: op +
    • +
    • removeAllOcurrencesOfSubString() +: op +
    • +
    • removePeopleBelowThresholdsAndFillFaces() +: op +
    • +
    • removeSpecialsCharacters() +: op +
    • +
    • renderFaceKeypointsCpu() +: op +
    • +
    • renderFaceKeypointsGpu() +: op +
    • +
    • renderHandKeypointsCpu() +: op +
    • +
    • renderHandKeypointsGpu() +: op +
    • +
    • renderKeypointsCpu() +: op +
    • +
    • renderPoseDistanceGpu() +: op +
    • +
    • renderPoseHeatMapGpu() +: op +
    • +
    • renderPoseHeatMapsGpu() +: op +
    • +
    • renderPoseKeypointsCpu() +: op +
    • +
    • renderPoseKeypointsGpu() +: op +
    • +
    • renderPosePAFGpu() +: op +
    • +
    • renderPosePAFsGpu() +: op +
    • +
    • reorderAndNormalize() +: op +
    • +
    • replaceAll() +: op +
    • +
    • resizeAndMergeCpu() +: op +
    • +
    • resizeAndMergeGpu() +: op +
    • +
    • resizeAndMergeOcl() +: op +
    • +
    • resizeAndPadRbgGpu() +: op +
    • +
    • resizeGetScaleFactor() +: op +
    • +
    • rotateAndFlipFrame() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_s.html b/web/html/doc/namespacemembers_func_s.html new file mode 100644 index 000000000..dd78a76a2 --- /dev/null +++ b/web/html/doc/namespacemembers_func_s.html @@ -0,0 +1,136 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - s -

      +
    • saveData() +: op +
    • +
    • saveFloatArray() +: op +
    • +
    • saveImage() +: op +
    • +
    • savePeopleJson() +: op +
    • +
    • scaleKeypoints() +: op +
    • +
    • scaleKeypoints2d() +: op +
    • +
    • setErrorModes() +: op::ConfigureError +
    • +
    • setLogModes() +: op::ConfigureLog +
    • +
    • setMainThread() +: op +
    • +
    • setPriorityThreshold() +: op::ConfigureLog +
    • +
    • splitString() +: op +
    • +
    • stringToDataFormat() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_t.html b/web/html/doc/namespacemembers_func_t.html new file mode 100644 index 000000000..0300dd624 --- /dev/null +++ b/web/html/doc/namespacemembers_func_t.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - t -

      +
    • threadIdPP() +: op +
    • +
    • toFixedLengthString() +: op +
    • +
    • toLower() +: op +
    • +
    • toUpper() +: op +
    • +
    • transpose() +: op +
    • +
    • tToString() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_u.html b/web/html/doc/namespacemembers_func_u.html new file mode 100644 index 000000000..8551e16be --- /dev/null +++ b/web/html/doc/namespacemembers_func_u.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - u -

      +
    • uCharCvMatToFloatPtr() +: op +
    • +
    • uCharImageCast() +: op +
    • +
    • uCharRound() +: op +
    • +
    • uIntRound() +: op +
    • +
    • uLongLongRound() +: op +
    • +
    • ulongRound() +: op +
    • +
    • unrollArrayToUCharCvMat() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_v.html b/web/html/doc/namespacemembers_func_v.html new file mode 100644 index 000000000..0acf5341c --- /dev/null +++ b/web/html/doc/namespacemembers_func_v.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - v -

      +
    • vectorsAreEqual() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_func_w.html b/web/html/doc/namespacemembers_func_w.html new file mode 100644 index 000000000..96b9a4003 --- /dev/null +++ b/web/html/doc/namespacemembers_func_w.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +  + +

    - w -

      +
    • wrapperConfigureSanityChecks() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_g.html b/web/html/doc/namespacemembers_g.html new file mode 100644 index 000000000..6faa418cf --- /dev/null +++ b/web/html/doc/namespacemembers_g.html @@ -0,0 +1,271 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - g -

      +
    • getAverageScore() +: op +
    • +
    • getBiggestPerson() +: op +
    • +
    • getCudaGpuNumber() +: op +
    • +
    • getCvCapPropFrameCount() +: op +
    • +
    • getCvCapPropFrameFps() +: op +
    • +
    • getCvCapPropFrameHeight() +: op +
    • +
    • getCvCapPropFrameWidth() +: op +
    • +
    • getCvFourcc() +: op +
    • +
    • getCvImwriteJpegQuality() +: op +
    • +
    • getCvImwritePngCompression() +: op +
    • +
    • getCvLoadImageAnydepth() +: op +
    • +
    • getCvLoadImageGrayScale() +: op +
    • +
    • getDistance() +: op +
    • +
    • getDistanceAverage() +: op +
    • +
    • getErrorModes() +: op::ConfigureError +
    • +
    • getFileExtension() +: op +
    • +
    • getFileNameAndExtension() +: op +
    • +
    • getFileNameNoExtension() +: op +
    • +
    • getFileParentFolderPath() +: op +
    • +
    • getFilesOnDirectory() +: op +
    • +
    • getFirstNumberOnString() +: op +
    • +
    • getFullFilePathNoExtension() +: op +
    • +
    • getGpuMode() +: op +
    • +
    • getGpuNumber() +: op +
    • +
    • getIfInMainThreadOrEmpty() +: op +
    • +
    • getIfNotInMainThreadOrEmpty() +: op +
    • +
    • getKeypointsArea() +: op +
    • +
    • getKeypointsPerson() +: op +
    • +
    • getKeypointsRectangle() +: op +
    • +
    • getKeypointsRoi() +: op +
    • +
    • getLastNumber() +: op +
    • +
    • getLogModes() +: op::ConfigureLog +
    • +
    • getNonZeroKeypoints() +: op +
    • +
    • getNumberCudaBlocks() +: op +
    • +
    • getNumberCudaThreadsAndBlocks() +: op +
    • +
    • getNumberElementsToRender() +: op +
    • +
    • getPoseBodyPartMapping() +: op +
    • +
    • getPoseBodyPartPairsRender() +: op +
    • +
    • getPoseColors() +: op +
    • +
    • getPoseDefaultConnectInterMinAboveThreshold() +: op +
    • +
    • getPoseDefaultConnectInterThreshold() +: op +
    • +
    • getPoseDefaultConnectMinSubsetScore() +: op +
    • +
    • getPoseDefaultMinSubsetCnt() +: op +
    • +
    • getPoseDefaultNmsThreshold() +: op +
    • +
    • getPoseMapIndex() +: op +
    • +
    • getPoseMaxPeaks() +: op +
    • +
    • getPoseNetDecreaseFactor() +: op +
    • +
    • getPoseNumberBodyParts() +: op +
    • +
    • getPosePartPairs() +: op +
    • +
    • getPoseProtoTxt() +: op +
    • +
    • getPoseScales() +: op +
    • +
    • getPoseTrainedModel() +: op +
    • +
    • getPriorityThreshold() +: op::ConfigureLog +
    • +
    • getThreadId() +: op +
    • +
    • getTimerInit() +: op +
    • +
    • getTimeSeconds() +: op +
    • +
    • GpuMode +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_h.html b/web/html/doc/namespacemembers_h.html new file mode 100644 index 000000000..10218eeb4 --- /dev/null +++ b/web/html/doc/namespacemembers_h.html @@ -0,0 +1,136 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - h -

      +
    • H135 +: op +
    • +
    • HAND_CCN_DECREASE_FACTOR +: op +
    • +
    • HAND_COLORS_RENDER +: op +
    • +
    • HAND_DEFAULT_ALPHA_HEAT_MAP +: op +
    • +
    • HAND_DEFAULT_ALPHA_KEYPOINT +: op +
    • +
    • HAND_MAX_HANDS +: op +
    • +
    • HAND_NUMBER_PARTS +: op +
    • +
    • HAND_PAIRS_RENDER +: op +
    • +
    • HAND_PROTOTXT +: op +
    • +
    • HAND_SCALES_RENDER +: op +
    • +
    • HAND_TRAINED_MODEL +: op +
    • +
    • HeatMapType +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_k.html b/web/html/doc/namespacemembers_k.html new file mode 100644 index 000000000..dcce730b1 --- /dev/null +++ b/web/html/doc/namespacemembers_k.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - k -

      +
    • keepRoiInside() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_l.html b/web/html/doc/namespacemembers_l.html new file mode 100644 index 000000000..07298e798 --- /dev/null +++ b/web/html/doc/namespacemembers_l.html @@ -0,0 +1,112 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - l -

      +
    • loadData() +: op +
    • +
    • loadHandDetectorTxt() +: op +
    • +
    • loadImage() +: op +
    • +
    • LogMode +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_m.html b/web/html/doc/namespacemembers_m.html new file mode 100644 index 000000000..b78b210cc --- /dev/null +++ b/web/html/doc/namespacemembers_m.html @@ -0,0 +1,112 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - m -

      +
    • makeDirectory() +: op +
    • +
    • maximumCpu() +: op +
    • +
    • maximumGpu() +: op +
    • +
    • mergeVectors() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_n.html b/web/html/doc/namespacemembers_n.html new file mode 100644 index 000000000..a8012a672 --- /dev/null +++ b/web/html/doc/namespacemembers_n.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - n -

      +
    • nmsCpu() +: op +
    • +
    • nmsGpu() +: op +
    • +
    • nmsOcl() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_o.html b/web/html/doc/namespacemembers_o.html new file mode 100644 index 000000000..76da36336 --- /dev/null +++ b/web/html/doc/namespacemembers_o.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - o -

      +
    • opLog() +: op +
    • +
    • opLogIfDebug() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_p.html b/web/html/doc/namespacemembers_p.html new file mode 100644 index 000000000..dc3ded9be --- /dev/null +++ b/web/html/doc/namespacemembers_p.html @@ -0,0 +1,160 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - p -

      +
    • pafPtrIntoVector() +: op +
    • +
    • pafVectorIntoPeopleVector() +: op +
    • +
    • peopleVectorToPeopleArray() +: op +
    • +
    • POSE_DEFAULT_ALPHA_HEAT_MAP +: op +
    • +
    • POSE_DEFAULT_ALPHA_KEYPOINT +: op +
    • +
    • POSE_MAX_PEOPLE +: op +
    • +
    • poseBodyPartMapStringToKey() +: op +
    • +
    • PoseMode +: op +
    • +
    • PoseModel +: op +
    • +
    • PoseProperty +: op +
    • +
    • positiveCharRound() +: op +
    • +
    • positiveIntRound() +: op +
    • +
    • positiveLongLongRound() +: op +
    • +
    • positiveLongRound() +: op +
    • +
    • positiveSCharRound() +: op +
    • +
    • printTime() +: op +
    • +
    • Priority +: op +
    • +
    • ProducerFpsMode +: op +
    • +
    • ProducerProperty +: op +
    • +
    • ProducerType +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_r.html b/web/html/doc/namespacemembers_r.html new file mode 100644 index 000000000..30b4b0671 --- /dev/null +++ b/web/html/doc/namespacemembers_r.html @@ -0,0 +1,181 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - r -

      +
    • recenter() +: op +
    • +
    • refineAndSaveExtrinsics() +: op +
    • +
    • remove0sFromString() +: op +
    • +
    • removeAllOcurrencesOfSubString() +: op +
    • +
    • removePeopleBelowThresholdsAndFillFaces() +: op +
    • +
    • removeSpecialsCharacters() +: op +
    • +
    • renderFaceKeypointsCpu() +: op +
    • +
    • renderFaceKeypointsGpu() +: op +
    • +
    • renderHandKeypointsCpu() +: op +
    • +
    • renderHandKeypointsGpu() +: op +
    • +
    • renderKeypointsCpu() +: op +
    • +
    • RenderMode +: op +
    • +
    • renderPoseDistanceGpu() +: op +
    • +
    • renderPoseHeatMapGpu() +: op +
    • +
    • renderPoseHeatMapsGpu() +: op +
    • +
    • renderPoseKeypointsCpu() +: op +
    • +
    • renderPoseKeypointsGpu() +: op +
    • +
    • renderPosePAFGpu() +: op +
    • +
    • renderPosePAFsGpu() +: op +
    • +
    • reorderAndNormalize() +: op +
    • +
    • replaceAll() +: op +
    • +
    • resizeAndMergeCpu() +: op +
    • +
    • resizeAndMergeGpu() +: op +
    • +
    • resizeAndMergeOcl() +: op +
    • +
    • resizeAndPadRbgGpu() +: op +
    • +
    • resizeGetScaleFactor() +: op +
    • +
    • rotateAndFlipFrame() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_s.html b/web/html/doc/namespacemembers_s.html new file mode 100644 index 000000000..e7a6fc50a --- /dev/null +++ b/web/html/doc/namespacemembers_s.html @@ -0,0 +1,139 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - s -

      +
    • saveData() +: op +
    • +
    • saveFloatArray() +: op +
    • +
    • saveImage() +: op +
    • +
    • savePeopleJson() +: op +
    • +
    • scaleKeypoints() +: op +
    • +
    • scaleKeypoints2d() +: op +
    • +
    • ScaleMode +: op +
    • +
    • setErrorModes() +: op::ConfigureError +
    • +
    • setLogModes() +: op::ConfigureLog +
    • +
    • setMainThread() +: op +
    • +
    • setPriorityThreshold() +: op::ConfigureLog +
    • +
    • splitString() +: op +
    • +
    • stringToDataFormat() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_t.html b/web/html/doc/namespacemembers_t.html new file mode 100644 index 000000000..175fb354f --- /dev/null +++ b/web/html/doc/namespacemembers_t.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - t -

      +
    • threadIdPP() +: op +
    • +
    • ThreadManagerMode +: op +
    • +
    • toFixedLengthString() +: op +
    • +
    • toLower() +: op +
    • +
    • toUpper() +: op +
    • +
    • transpose() +: op +
    • +
    • tToString() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_type.html b/web/html/doc/namespacemembers_type.html new file mode 100644 index 000000000..200185a57 --- /dev/null +++ b/web/html/doc/namespacemembers_type.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
      +
    • Wrapper +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_u.html b/web/html/doc/namespacemembers_u.html new file mode 100644 index 000000000..2d9ae1596 --- /dev/null +++ b/web/html/doc/namespacemembers_u.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - u -

      +
    • uCharCvMatToFloatPtr() +: op +
    • +
    • uCharImageCast() +: op +
    • +
    • uCharRound() +: op +
    • +
    • uIntRound() +: op +
    • +
    • uLongLongRound() +: op +
    • +
    • ulongRound() +: op +
    • +
    • unrollArrayToUCharCvMat() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_v.html b/web/html/doc/namespacemembers_v.html new file mode 100644 index 000000000..017a1adcc --- /dev/null +++ b/web/html/doc/namespacemembers_v.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - v -

      +
    • vectorsAreEqual() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_vars.html b/web/html/doc/namespacemembers_vars.html new file mode 100644 index 000000000..a33adbb72 --- /dev/null +++ b/web/html/doc/namespacemembers_vars.html @@ -0,0 +1,176 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
      +
    • CUDA_NUM_THREADS +: op +
    • +
    • F135 +: op +
    • +
    • FACE_CCN_DECREASE_FACTOR +: op +
    • +
    • FACE_COLORS_RENDER +: op +
    • +
    • FACE_DEFAULT_ALPHA_HEAT_MAP +: op +
    • +
    • FACE_DEFAULT_ALPHA_KEYPOINT +: op +
    • +
    • FACE_MAX_FACES +: op +
    • +
    • FACE_NUMBER_PARTS +: op +
    • +
    • FACE_PAIRS_RENDER +: op +
    • +
    • FACE_PROTOTXT +: op +
    • +
    • FACE_SCALES_RENDER +: op +
    • +
    • FACE_TRAINED_MODEL +: op +
    • +
    • H135 +: op +
    • +
    • HAND_CCN_DECREASE_FACTOR +: op +
    • +
    • HAND_COLORS_RENDER +: op +
    • +
    • HAND_DEFAULT_ALPHA_HEAT_MAP +: op +
    • +
    • HAND_DEFAULT_ALPHA_KEYPOINT +: op +
    • +
    • HAND_MAX_HANDS +: op +
    • +
    • HAND_NUMBER_PARTS +: op +
    • +
    • HAND_PAIRS_RENDER +: op +
    • +
    • HAND_PROTOTXT +: op +
    • +
    • HAND_SCALES_RENDER +: op +
    • +
    • HAND_TRAINED_MODEL +: op +
    • +
    • POSE_DEFAULT_ALPHA_HEAT_MAP +: op +
    • +
    • POSE_DEFAULT_ALPHA_KEYPOINT +: op +
    • +
    • POSE_MAX_PEOPLE +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespacemembers_w.html b/web/html/doc/namespacemembers_w.html new file mode 100644 index 000000000..955575bed --- /dev/null +++ b/web/html/doc/namespacemembers_w.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: Namespace Members + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    Here is a list of all namespace members with links to the namespace documentation for each member:
    + +

    - w -

      +
    • WorkerType +: op +
    • +
    • Wrapper +: op +
    • +
    • wrapperConfigureSanityChecks() +: op +
    • +
    +
    +
    + + + + diff --git a/web/html/doc/namespaceop.html b/web/html/doc/namespaceop.html new file mode 100644 index 000000000..bbad720cf --- /dev/null +++ b/web/html/doc/namespaceop.html @@ -0,0 +1,10346 @@ + + + + + + + +OpenPose: op Namespace Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op Namespace Reference
    +
    +
    + + + + + + +

    +Namespaces

     ConfigureError
     
     ConfigureLog
     
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Classes

    class  CameraParameterReader
     
    class  PoseTriangulation
     
    class  WPoseTriangulation
     
    class  Array
     
    class  ArrayCpuGpu
     
    class  CvMatToOpInput
     
    class  CvMatToOpOutput
     
    struct  Datum
     
    class  GpuRenderer
     
    class  KeepTopNPeople
     
    class  KeypointScaler
     
    class  Matrix
     
    class  OpOutputToCvMat
     
    struct  Point
     
    struct  Rectangle
     
    class  Renderer
     
    class  ScaleAndSizeExtractor
     
    class  String
     
    class  VerbosePrinter
     
    class  WCvMatToOpInput
     
    class  WCvMatToOpOutput
     
    class  WKeepTopNPeople
     
    class  WKeypointScaler
     
    class  WOpOutputToCvMat
     
    class  WScaleAndSizeExtractor
     
    class  WVerbosePrinter
     
    class  FaceCpuRenderer
     
    class  FaceDetector
     
    class  FaceDetectorOpenCV
     
    class  FaceExtractorCaffe
     
    class  FaceExtractorNet
     
    class  FaceGpuRenderer
     
    class  FaceRenderer
     
    class  WFaceDetector
     
    class  WFaceDetectorOpenCV
     
    class  WFaceExtractorNet
     
    class  WFaceRenderer
     
    class  CocoJsonSaver
     
    class  FileSaver
     
    class  HeatMapSaver
     
    class  ImageSaver
     
    class  JsonOfstream
     
    class  KeypointSaver
     
    class  PeopleJsonSaver
     
    class  UdpSender
     
    class  VideoSaver
     
    class  WCocoJsonSaver
     
    class  WFaceSaver
     
    class  WHandSaver
     
    class  WHeatMapSaver
     
    class  WImageSaver
     
    class  WPeopleJsonSaver
     
    class  WPoseSaver
     
    class  WUdpSender
     
    class  WVideoSaver
     
    class  WVideoSaver3D
     
    class  FrameDisplayer
     
    class  Gui
     
    class  Gui3D
     
    class  GuiInfoAdder
     
    class  WGui
     
    class  WGui3D
     
    class  WGuiInfoAdder
     
    class  HandCpuRenderer
     
    class  HandDetector
     
    class  HandDetectorFromTxt
     
    class  HandExtractorCaffe
     
    class  HandExtractorNet
     
    class  HandGpuRenderer
     
    class  HandRenderer
     
    class  WHandDetector
     
    class  WHandDetectorFromTxt
     
    class  WHandDetectorTracking
     
    class  WHandDetectorUpdate
     
    class  WHandExtractorNet
     
    class  WHandRenderer
     
    class  BodyPartConnectorCaffe
     
    class  MaximumCaffe
     
    class  Net
     
    class  NetCaffe
     
    class  NetOpenCv
     
    class  NmsCaffe
     
    class  ResizeAndMergeCaffe
     
    class  PoseCpuRenderer
     
    class  PoseExtractor
     
    class  PoseExtractorCaffe
     
    class  PoseExtractorNet
     
    class  PoseGpuRenderer
     
    class  PoseRenderer
     
    class  WPoseExtractor
     
    class  WPoseExtractorNet
     
    class  WPoseRenderer
     
    class  DatumProducer
     
    class  FlirReader
     
    class  ImageDirectoryReader
     
    class  IpCameraReader
     
    class  Producer
     
    class  SpinnakerWrapper
     
    class  VideoCaptureReader
     
    class  VideoReader
     
    class  WDatumProducer
     
    class  WebcamReader
     
    class  PriorityQueue
     
    class  Queue
     
    class  QueueBase
     
    class  SubThread
     
    class  SubThreadNoQueue
     
    class  SubThreadQueueIn
     
    class  SubThreadQueueInOut
     
    class  SubThreadQueueOut
     
    class  Thread
     
    class  ThreadManager
     
    class  WFpsMax
     
    class  WIdGenerator
     
    class  Worker
     
    class  WorkerConsumer
     
    class  WorkerProducer
     
    class  WQueueAssembler
     
    class  WQueueOrderer
     
    class  PersonIdExtractor
     
    class  PersonTracker
     
    class  WPersonIdExtractor
     
    class  PointerContainerGreater
     
    class  PointerContainerLess
     
    class  Profiler
     
    class  WrapperT
     
    struct  WrapperStructExtra
     
    struct  WrapperStructFace
     
    struct  WrapperStructGui
     
    struct  WrapperStructHand
     
    struct  WrapperStructInput
     
    struct  WrapperStructOutput
     
    struct  WrapperStructPose
     
    + + + +

    +Typedefs

    typedef WrapperT< BASE_DATUMWrapper
     
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Enumerations

    enum class  ScaleMode : unsigned char {
    +  InputResolution +, NetOutputResolution +, OutputResolution +, ZeroToOne +,
    +  ZeroToOneFixedAspect +, PlusMinusOne +, PlusMinusOneFixedAspect +, UnsignedChar +,
    +  NoScale +
    + }
     
    enum class  HeatMapType : unsigned char { Parts +, Background +, PAFs + }
     
    enum class  RenderMode : unsigned char { None +, Auto +, Cpu +, Gpu + }
     
    enum class  ElementToRender : unsigned char { Skeleton +, Background +, AddKeypoints +, AddPAFs + }
     
    enum class  DataFormat : unsigned char { Json +, Xml +, Yaml +, Yml + }
     
    enum class  CocoJsonFormat : unsigned char {
    +  Body +, Hand21 +, Hand42 +, Face +,
    +  Foot +, Car +, Size +
    + }
     
    enum class  GpuMode : unsigned char { Cuda = 0 +, OpenCL = 1 +, NoGpu = 2 +, Size + }
     
    enum class  DisplayMode : unsigned short {
    +  NoDisplay +, DisplayAll +, Display2D +, Display3D +,
    +  DisplayAdam +
    + }
     
    enum class  FullScreenMode : bool { FullScreen +, Windowed + }
     
    enum class  PoseModel : unsigned char {
    +  BODY_25 = 0 +, COCO_18 +, MPI_15 +, MPI_15_4 +,
    +  BODY_19 +, BODY_19_X2 +, BODY_19N +, BODY_25E +,
    +  CAR_12 +, BODY_25D +, BODY_23 +, CAR_22 +,
    +  BODY_19E +, BODY_25B +, BODY_135 +, Size +
    + }
     
    enum class  PoseProperty : unsigned char {
    +  NMSThreshold = 0 +, ConnectInterMinAboveThreshold +, ConnectInterThreshold +, ConnectMinSubsetCnt +,
    +  ConnectMinSubsetScore +, Size +
    + }
     
    enum class  ProducerFpsMode : bool { OriginalFps +, RetrievalFps + }
     
    enum class  ProducerProperty : unsigned char {
    +  AutoRepeat = 0 +, Flip +, Rotation +, FrameStep +,
    +  NumberViews +, Size +
    + }
     
    enum class  ProducerType : unsigned char {
    +  FlirCamera +, ImageDirectory +, IPCamera +, Video +,
    +  Webcam +, None +
    + }
     
    enum class  ThreadManagerMode : unsigned char { Asynchronous +, AsynchronousIn +, AsynchronousOut +, Synchronous + }
     
    enum class  ErrorMode : unsigned char { StdRuntimeError +, FileLogging +, StdCerr +, All + }
     
    enum class  LogMode : unsigned char { FileLogging +, StdCout +, All + }
     
    enum class  Priority : unsigned char {
    +  None = 0 +, Low = 1 +, Normal = 2 +, High = 3 +,
    +  Max = 4 +, NoOutput = 255 +
    + }
     
    enum class  Extensions : unsigned char { Images +, Size + }
     
    enum class  PoseMode : unsigned char { Disabled = 0 +, Enabled +, NoNetwork +, Size + }
     
    enum class  Detector : unsigned char {
    +  Body = 0 +, OpenCV +, Provided +, BodyWithTracking +,
    +  Size +
    + }
     
    enum class  WorkerType : unsigned char {
    +  Input = 0 +, PreProcessing +, PostProcessing +, Output +,
    +  Size +
    + }
     
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Functions

     COMPILE_TEMPLATE_DATUM (WPoseTriangulation)
     
    OP_API void estimateAndSaveIntrinsics (const Point< int > &gridInnerCorners, const float gridSquareSizeMm, const int flags, const std::string &outputParameterFolder, const std::string &imageFolder, const std::string &serialNumber, const bool saveImagesWithCorners=false)
     
    OP_API void estimateAndSaveExtrinsics (const std::string &parameterFolder, const std::string &imageFolder, const Point< int > &gridInnerCorners, const float gridSquareSizeMm, const int index0, const int index1, const bool imagesAreUndistorted, const bool combineCam0Extrinsics)
     
    OP_API void refineAndSaveExtrinsics (const std::string &parameterFolder, const std::string &imageFolder, const Point< int > &gridInnerCorners, const float gridSquareSizeMm, const int numberCameras, const bool imagesAreUndistorted, const bool saveImagesWithCorners=false)
     
    OP_API void estimateAndSaveSiftFile (const Point< int > &gridInnerCorners, const std::string &imageFolder, const int numberCameras, const bool saveImagesWithCorners=false)
     
    template<typename T >
    Rectangle< T > recenter (const Rectangle< T > &rectangle, const T newWidth, const T newHeight)
     
     COMPILE_TEMPLATE_DATUM (WCvMatToOpInput)
     
     COMPILE_TEMPLATE_DATUM (WCvMatToOpOutput)
     
     COMPILE_TEMPLATE_DATUM (WKeepTopNPeople)
     
     COMPILE_TEMPLATE_DATUM (WKeypointScaler)
     
     COMPILE_TEMPLATE_DATUM (WOpOutputToCvMat)
     
     COMPILE_TEMPLATE_DATUM (WScaleAndSizeExtractor)
     
     COMPILE_TEMPLATE_DATUM (WVerbosePrinter)
     
    OP_API void renderFaceKeypointsCpu (Array< float > &frameArray, const Array< float > &faceKeypoints, const float renderThreshold)
     
    void renderFaceKeypointsGpu (float *framePtr, float *maxPtr, float *minPtr, float *scalePtr, const Point< unsigned int > &frameSize, const float *const facePtr, const int numberPeople, const float renderThreshold, const float alphaColorToAdd=FACE_DEFAULT_ALPHA_KEYPOINT)
     
     COMPILE_TEMPLATE_DATUM (WFaceDetector)
     
     COMPILE_TEMPLATE_DATUM (WFaceDetectorOpenCV)
     
     COMPILE_TEMPLATE_DATUM (WFaceExtractorNet)
     
     COMPILE_TEMPLATE_DATUM (WFaceRenderer)
     
    OP_API std::string dataFormatToString (const DataFormat dataFormat)
     
    OP_API DataFormat stringToDataFormat (const std::string &dataFormat)
     
    OP_API void saveFloatArray (const Array< float > &array, const std::string &fullFilePath)
     
    OP_API void saveData (const std::vector< Matrix > &opMats, const std::vector< std::string > &cvMatNames, const std::string &fileNameNoExtension, const DataFormat dataFormat)
     
    OP_API void saveData (const Matrix &opMat, const std::string cvMatName, const std::string &fileNameNoExtension, const DataFormat dataFormat)
     
    OP_API std::vector< MatrixloadData (const std::vector< std::string > &cvMatNames, const std::string &fileNameNoExtension, const DataFormat dataFormat)
     
    OP_API Matrix loadData (const std::string &cvMatName, const std::string &fileNameNoExtension, const DataFormat dataFormat)
     
    OP_API void savePeopleJson (const Array< float > &keypoints, const std::vector< std::vector< std::array< float, 3 >>> &candidates, const std::string &keypointName, const std::string &fileName, const bool humanReadable)
     
    OP_API void savePeopleJson (const std::vector< std::pair< Array< float >, std::string >> &keypointVector, const std::vector< std::vector< std::array< float, 3 >>> &candidates, const std::string &fileName, const bool humanReadable)
     
    OP_API void saveImage (const Matrix &matrix, const std::string &fullFilePath, const std::vector< int > &openCvCompressionParams={getCvImwriteJpegQuality(), 100, getCvImwritePngCompression(), 9})
     
    OP_API Matrix loadImage (const std::string &fullFilePath, const int openCvFlags=getCvLoadImageAnydepth())
     
    OP_API std::vector< std::array< Rectangle< float >, 2 > > loadHandDetectorTxt (const std::string &txtFilePath)
     
     COMPILE_TEMPLATE_DATUM (WCocoJsonSaver)
     
     COMPILE_TEMPLATE_DATUM (WFaceSaver)
     
     COMPILE_TEMPLATE_DATUM (WHandSaver)
     
     COMPILE_TEMPLATE_DATUM (WHeatMapSaver)
     
     COMPILE_TEMPLATE_DATUM (WImageSaver)
     
     COMPILE_TEMPLATE_DATUM (WPeopleJsonSaver)
     
     COMPILE_TEMPLATE_DATUM (WPoseSaver)
     
     COMPILE_TEMPLATE_DATUM (WUdpSender)
     
     COMPILE_TEMPLATE_DATUM (WVideoSaver)
     
     COMPILE_TEMPLATE_DATUM (WVideoSaver3D)
     
    OP_API void cudaCheck (const int line=-1, const std::string &function="", const std::string &file="")
     
    OP_API int getCudaGpuNumber ()
     
    unsigned int getNumberCudaBlocks (const unsigned int totalRequired, const unsigned int numberCudaThreads=CUDA_NUM_THREADS)
     
    OP_API void getNumberCudaThreadsAndBlocks (dim3 &numberCudaThreads, dim3 &numberCudaBlocks, const Point< unsigned int > &frameSize)
     
    template<typename T >
    void reorderAndNormalize (T *targetPtr, const unsigned char *const srcPtr, const int width, const int height, const int channels)
     
    template<typename T >
    void uCharImageCast (unsigned char *targetPtr, const T *const srcPtr, const int volume)
     
    OP_API int getGpuNumber ()
     
    OP_API GpuMode getGpuMode ()
     
     COMPILE_TEMPLATE_DATUM (WGui)
     
     COMPILE_TEMPLATE_DATUM (WGui3D)
     
     COMPILE_TEMPLATE_DATUM (WGuiInfoAdder)
     
    OP_API void renderHandKeypointsCpu (Array< float > &frameArray, const std::array< Array< float >, 2 > &handKeypoints, const float renderThreshold)
     
    void renderHandKeypointsGpu (float *framePtr, float *maxPtr, float *minPtr, float *scalePtr, const Point< unsigned int > &frameSize, const float *const handsPtr, const int numberHands, const float renderThreshold, const float alphaColorToAdd=HAND_DEFAULT_ALPHA_KEYPOINT)
     
     COMPILE_TEMPLATE_DATUM (WHandDetector)
     
     COMPILE_TEMPLATE_DATUM (WHandDetectorFromTxt)
     
     COMPILE_TEMPLATE_DATUM (WHandDetectorTracking)
     
     COMPILE_TEMPLATE_DATUM (WHandDetectorUpdate)
     
     COMPILE_TEMPLATE_DATUM (WHandExtractorNet)
     
     COMPILE_TEMPLATE_DATUM (WHandRenderer)
     
    template<typename T >
    void connectBodyPartsCpu (Array< T > &poseKeypoints, Array< T > &poseScores, const T *const heatMapPtr, const T *const peaksPtr, const PoseModel poseModel, const Point< int > &heatMapSize, const int maxPeaks, const T interMinAboveThreshold, const T interThreshold, const int minSubsetCnt, const T minSubsetScore, const T defaultNmsThreshold, const T scaleFactor=1.f, const bool maximizePositives=false)
     
    template<typename T >
    void connectBodyPartsGpu (Array< T > &poseKeypoints, Array< T > &poseScores, const T *const heatMapGpuPtr, const T *const peaksPtr, const PoseModel poseModel, const Point< int > &heatMapSize, const int maxPeaks, const T interMinAboveThreshold, const T interThreshold, const int minSubsetCnt, const T minSubsetScore, const T defaultNmsThreshold, const T scaleFactor, const bool maximizePositives, Array< T > pairScoresCpu, T *pairScoresGpuPtr, const unsigned int *const bodyPartPairsGpuPtr, const unsigned int *const mapIdxGpuPtr, const T *const peaksGpuPtr)
     
    template<typename T >
    void connectBodyPartsOcl (Array< T > &poseKeypoints, Array< T > &poseScores, const T *const heatMapGpuPtr, const T *const peaksPtr, const PoseModel poseModel, const Point< int > &heatMapSize, const int maxPeaks, const T interMinAboveThreshold, const T interThreshold, const int minSubsetCnt, const T minSubsetScore, const T defaultNmsThreshold, const T scaleFactor=1.f, const bool maximizePositives=false, Array< T > pairScoresCpu=Array< T >{}, T *pairScoresGpuPtr=nullptr, const unsigned int *const bodyPartPairsGpuPtr=nullptr, const unsigned int *const mapIdxGpuPtr=nullptr, const T *const peaksGpuPtr=nullptr, const int gpuID=0)
     
    template<typename T >
    std::vector< std::pair< std::vector< int >, T > > createPeopleVector (const T *const heatMapPtr, const T *const peaksPtr, const PoseModel poseModel, const Point< int > &heatMapSize, const int maxPeaks, const T interThreshold, const T interMinAboveThreshold, const std::vector< unsigned int > &bodyPartPairs, const unsigned int numberBodyParts, const unsigned int numberBodyPartPairs, const T defaultNmsThreshold, const Array< T > &precomputedPAFs=Array< T >())
     
    template<typename T >
    void removePeopleBelowThresholdsAndFillFaces (std::vector< int > &validSubsetIndexes, int &numberPeople, std::vector< std::pair< std::vector< int >, T >> &subsets, const unsigned int numberBodyParts, const int minSubsetCnt, const T minSubsetScore, const bool maximizePositives, const T *const peaksPtr)
     
    template<typename T >
    void peopleVectorToPeopleArray (Array< T > &poseKeypoints, Array< T > &poseScores, const T scaleFactor, const std::vector< std::pair< std::vector< int >, T >> &subsets, const std::vector< int > &validSubsetIndexes, const T *const peaksPtr, const int numberPeople, const unsigned int numberBodyParts, const unsigned int numberBodyPartPairs)
     
    template<typename T >
    std::vector< std::tuple< T, T, int, int, int > > pafPtrIntoVector (const Array< T > &pairScores, const T *const peaksPtr, const int maxPeaks, const std::vector< unsigned int > &bodyPartPairs, const unsigned int numberBodyPartPairs)
     
    template<typename T >
    std::vector< std::pair< std::vector< int >, T > > pafVectorIntoPeopleVector (const std::vector< std::tuple< T, T, int, int, int >> &pairScores, const T *const peaksPtr, const int maxPeaks, const std::vector< unsigned int > &bodyPartPairs, const unsigned int numberBodyParts)
     
    template<typename T >
    void maximumCpu (T *targetPtr, const T *const sourcePtr, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize)
     
    template<typename T >
    void maximumGpu (T *targetPtr, const T *const sourcePtr, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize)
     
    template<typename T >
    void nmsCpu (T *targetPtr, int *kernelPtr, const T *const sourcePtr, const T threshold, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize, const Point< T > &offset)
     
    template<typename T >
    void nmsGpu (T *targetPtr, int *kernelPtr, const T *const sourcePtr, const T threshold, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize, const Point< T > &offset)
     
    template<typename T >
    void nmsOcl (T *targetPtr, uint8_t *kernelGpuPtr, uint8_t *kernelCpuPtr, const T *const sourcePtr, const T threshold, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize, const Point< T > &offset, const int gpuID=0)
     
    template<typename T >
    void resizeAndMergeCpu (T *targetPtr, const std::vector< const T * > &sourcePtrs, const std::array< int, 4 > &targetSize, const std::vector< std::array< int, 4 >> &sourceSizes, const std::vector< T > &scaleInputToNetInputs={1.f})
     
    template<typename T >
    void resizeAndMergeGpu (T *targetPtr, const std::vector< const T * > &sourcePtrs, const std::array< int, 4 > &targetSize, const std::vector< std::array< int, 4 >> &sourceSizes, const std::vector< T > &scaleInputToNetInputs={1.f})
     
    template<typename T >
    void resizeAndMergeOcl (T *targetPtr, const std::vector< const T * > &sourcePtrs, std::vector< T * > &sourceTempPtrs, const std::array< int, 4 > &targetSize, const std::vector< std::array< int, 4 >> &sourceSizes, const std::vector< T > &scaleInputToNetInputs={1.f}, const int gpuID=0)
     
    template<typename T >
    void resizeAndPadRbgGpu (T *targetPtr, const T *const srcPtr, const int sourceWidth, const int sourceHeight, const int targetWidth, const int targetHeight, const T scaleFactor)
     
    template<typename T >
    void resizeAndPadRbgGpu (T *targetPtr, const unsigned char *const srcPtr, const int sourceWidth, const int sourceHeight, const int targetWidth, const int targetHeight, const T scaleFactor)
     
    OP_API const std::map< unsigned int, std::string > & getPoseBodyPartMapping (const PoseModel poseModel)
     
    OP_API const std::string & getPoseProtoTxt (const PoseModel poseModel)
     
    OP_API const std::string & getPoseTrainedModel (const PoseModel poseModel)
     
    OP_API unsigned int getPoseNumberBodyParts (const PoseModel poseModel)
     
    OP_API const std::vector< unsigned int > & getPosePartPairs (const PoseModel poseModel)
     
    OP_API const std::vector< unsigned int > & getPoseMapIndex (const PoseModel poseModel)
     
    OP_API unsigned int getPoseMaxPeaks ()
     
    OP_API float getPoseNetDecreaseFactor (const PoseModel poseModel)
     
    OP_API unsigned int poseBodyPartMapStringToKey (const PoseModel poseModel, const std::string &string)
     
    OP_API unsigned int poseBodyPartMapStringToKey (const PoseModel poseModel, const std::vector< std::string > &strings)
     
    OP_API float getPoseDefaultNmsThreshold (const PoseModel poseModel, const bool maximizePositives=false)
     
    OP_API float getPoseDefaultConnectInterMinAboveThreshold (const bool maximizePositives=false)
     
    OP_API float getPoseDefaultConnectInterThreshold (const PoseModel poseModel, const bool maximizePositives=false)
     
    OP_API unsigned int getPoseDefaultMinSubsetCnt (const bool maximizePositives=false)
     
    OP_API float getPoseDefaultConnectMinSubsetScore (const bool maximizePositives=false)
     
    OP_API bool addBkgChannel (const PoseModel poseModel)
     
    OP_API const std::vector< float > & getPoseScales (const PoseModel poseModel)
     
    OP_API const std::vector< float > & getPoseColors (const PoseModel poseModel)
     
    OP_API const std::vector< unsigned int > & getPoseBodyPartPairsRender (const PoseModel poseModel)
     
    OP_API unsigned int getNumberElementsToRender (const PoseModel poseModel)
     
    OP_API void renderPoseKeypointsCpu (Array< float > &frameArray, const Array< float > &poseKeypoints, const PoseModel poseModel, const float renderThreshold, const bool blendOriginalFrame=true)
     
    void renderPoseKeypointsGpu (float *framePtr, float *maxPtr, float *minPtr, float *scalePtr, const PoseModel poseModel, const int numberPeople, const Point< unsigned int > &frameSize, const float *const posePtr, const float renderThreshold, const bool googlyEyes=false, const bool blendOriginalFrame=true, const float alphaBlending=POSE_DEFAULT_ALPHA_KEYPOINT)
     
    void renderPoseHeatMapGpu (float *frame, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const unsigned int part, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
     
    void renderPoseHeatMapsGpu (float *frame, const PoseModel poseModel, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
     
    void renderPosePAFGpu (float *framePtr, const PoseModel poseModel, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const int part, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
     
    void renderPosePAFsGpu (float *framePtr, const PoseModel poseModel, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
     
    void renderPoseDistanceGpu (float *framePtr, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const unsigned int part, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
     
     COMPILE_TEMPLATE_DATUM (WPoseExtractor)
     
     COMPILE_TEMPLATE_DATUM (WPoseExtractorNet)
     
     COMPILE_TEMPLATE_DATUM (WPoseRenderer)
     
    OP_API void datumProducerConstructor (const std::shared_ptr< Producer > &producerSharedPtr, const unsigned long long frameFirst, const unsigned long long frameStep, const unsigned long long frameLast)
     
    OP_API void datumProducerConstructorTooManyConsecutiveEmptyFrames (unsigned int &numberConsecutiveEmptyFrames, const bool emptyFrame)
     
    OP_API bool datumProducerConstructorRunningAndGetDatumIsDatumProducerRunning (const std::shared_ptr< Producer > &producerSharedPtr, const unsigned long long numberFramesToProcess, const unsigned long long globalCounter)
     
    OP_API void datumProducerConstructorRunningAndGetDatumApplyPlayerControls (const std::shared_ptr< Producer > &producerSharedPtr, const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> &videoSeekSharedPtr)
     
    OP_API unsigned long long datumProducerConstructorRunningAndGetNextFrameNumber (const std::shared_ptr< Producer > &producerSharedPtr)
     
    OP_API void datumProducerConstructorRunningAndGetDatumFrameIntegrity (Matrix &matrix)
     
    OP_API std::shared_ptr< ProducercreateProducer (const ProducerType producerType=ProducerType::None, const std::string &producerString="", const Point< int > &cameraResolution=Point< int >{-1,-1}, const std::string &cameraParameterPath="models/cameraParameters/", const bool undistortImage=true, const int numberViews=-1)
     
     COMPILE_TEMPLATE_DATUM (PriorityQueue)
     
     COMPILE_TEMPLATE_DATUM (Queue)
     
     COMPILE_TEMPLATE_DATUM (SubThread)
     
     COMPILE_TEMPLATE_DATUM (SubThreadNoQueue)
     
     COMPILE_TEMPLATE_DATUM (SubThreadQueueIn)
     
     COMPILE_TEMPLATE_DATUM (SubThreadQueueInOut)
     
     COMPILE_TEMPLATE_DATUM (SubThreadQueueOut)
     
     COMPILE_TEMPLATE_DATUM (Thread)
     
     COMPILE_TEMPLATE_DATUM (ThreadManager)
     
     COMPILE_TEMPLATE_DATUM (WFpsMax)
     
     COMPILE_TEMPLATE_DATUM (WIdGenerator)
     
     COMPILE_TEMPLATE_DATUM (Worker)
     
     COMPILE_TEMPLATE_DATUM (WorkerConsumer)
     
     COMPILE_TEMPLATE_DATUM (WorkerProducer)
     
     COMPILE_TEMPLATE_DATUM (WQueueOrderer)
     
     COMPILE_TEMPLATE_DATUM (WPersonIdExtractor)
     
    template<typename T >
    void checkBool (const bool condition, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
     
    template<typename T , typename T1 , typename T2 >
    void checkEqual (const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
     
    template<typename T , typename T1 , typename T2 >
    void checkNotEqual (const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
     
    template<typename T , typename T1 , typename T2 >
    void checkLessOrEqual (const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
     
    template<typename T , typename T1 , typename T2 >
    void checkLessThan (const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
     
    template<typename T , typename T1 , typename T2 >
    void checkGreaterOrEqual (const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
     
    template<typename T , typename T1 , typename T2 >
    void checkGreaterThan (const T1 &conditionA, const T2 &conditionB, const T &message="", const int line=-1, const std::string &function="", const std::string &file="")
     
    OP_API void setMainThread ()
     
    OP_API std::string getThreadId ()
     
    OP_API bool getIfInMainThreadOrEmpty ()
     
    OP_API bool getIfNotInMainThreadOrEmpty ()
     
    template<typename T >
    std::string tToString (const T &message)
     
    OP_API void error (const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
     
    template<typename T >
    void error (const T &message, const int line=-1, const std::string &function="", const std::string &file="")
     
    OP_API void checkWorkerErrors ()
     
    OP_API void errorWorker (const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
     
    template<typename T >
    void errorWorker (const T &message, const int line=-1, const std::string &function="", const std::string &file="")
     
    OP_API void errorDestructor (const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
     
    template<typename T >
    void errorDestructor (const T &message, const int line=-1, const std::string &function="", const std::string &file="")
     
    OP_API void opLog (const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
     
    template<typename T >
    void opLog (const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
     
    template<typename T >
    void opLogIfDebug (const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
     
    template<typename T >
    char positiveCharRound (const T a)
     
    template<typename T >
    signed char positiveSCharRound (const T a)
     
    template<typename T >
    int positiveIntRound (const T a)
     
    template<typename T >
    long positiveLongRound (const T a)
     
    template<typename T >
    long long positiveLongLongRound (const T a)
     
    template<typename T >
    unsigned char uCharRound (const T a)
     
    template<typename T >
    unsigned int uIntRound (const T a)
     
    template<typename T >
    unsigned long ulongRound (const T a)
     
    template<typename T >
    unsigned long long uLongLongRound (const T a)
     
    template<typename T >
    fastMax (const T a, const T b)
     
    template<typename T >
    fastMin (const T a, const T b)
     
    template<class T >
    fastTruncate (T value, T min=0, T max=1)
     
    OP_API void makeDirectory (const std::string &directoryPath)
     
    OP_API bool existDirectory (const std::string &directoryPath)
     
    OP_API bool existFile (const std::string &filePath)
     
    OP_API std::string formatAsDirectory (const std::string &directoryPathString)
     
    OP_API std::string getFileNameAndExtension (const std::string &fullPath)
     
    OP_API std::string getFileNameNoExtension (const std::string &fullPath)
     
    OP_API std::string getFileExtension (const std::string &fullPath)
     
    OP_API std::string getFullFilePathNoExtension (const std::string &fullPath)
     
    OP_API std::string getFileParentFolderPath (const std::string &fullPath)
     
    OP_API std::vector< std::string > getFilesOnDirectory (const std::string &directoryPath, const std::vector< std::string > &extensions={})
     
    OP_API std::vector< std::string > getFilesOnDirectory (const std::string &directoryPath, const std::string &extension)
     
    OP_API std::vector< std::string > getFilesOnDirectory (const std::string &directoryPath, const Extensions extensions)
     
    OP_API std::string removeSpecialsCharacters (const std::string &stringToVariate)
     
    OP_API void removeAllOcurrencesOfSubString (std::string &stringToModify, const std::string &substring)
     
    OP_API void replaceAll (std::string &stringText, const char charToChange, const char charToAdd)
     
    OP_API PoseMode flagsToPoseMode (const int poseModeInt)
     
    OP_API PoseModel flagsToPoseModel (const String &poseModeString)
     
    OP_API ScaleMode flagsToScaleMode (const int keypointScaleMode)
     
    OP_API ScaleMode flagsToHeatMapScaleMode (const int heatMapScaleMode)
     
    OP_API Detector flagsToDetector (const int detector)
     
    OP_API ProducerType flagsToProducerType (const String &imageDirectory, const String &videoPath, const String &ipCameraPath, const int webcamIndex, const bool flirCamera)
     
    OP_API std::pair< ProducerType, StringflagsToProducer (const String &imageDirectory, const String &videoPath, const String &ipCameraPath=String(""), const int webcamIndex=-1, const bool flirCamera=false, const int flirCameraIndex=-1)
     
    OP_API std::vector< HeatMapTypeflagsToHeatMaps (const bool heatMapsAddParts=false, const bool heatMapsAddBkg=false, const bool heatMapsAddPAFs=false)
     
    OP_API RenderMode flagsToRenderMode (const int renderFlag, const bool gpuBuggy=false, const int renderPoseFlag=-2)
     
    OP_API DisplayMode flagsToDisplayMode (const int display, const bool enabled3d)
     
    OP_API Point< int > flagsToPoint (const String &pointString, const String &pointExample)
     
    template<typename T >
    getDistance (const Array< T > &keypoints, const int person, const int elementA, const int elementB)
     
    template<typename T >
    void averageKeypoints (Array< T > &keypointsA, const Array< T > &keypointsB, const int personA)
     
    template<typename T >
    void scaleKeypoints (Array< T > &keypoints, const T scale)
     
    template<typename T >
    void scaleKeypoints2d (Array< T > &keypoints, const T scaleX, const T scaleY)
     
    template<typename T >
    void scaleKeypoints2d (Array< T > &keypoints, const T scaleX, const T scaleY, const T offsetX, const T offsetY)
     
    template<typename T >
    void renderKeypointsCpu (Array< T > &frameArray, const Array< T > &keypoints, const std::vector< unsigned int > &pairs, const std::vector< T > colors, const T thicknessCircleRatio, const T thicknessLineRatioWRTCircle, const std::vector< T > &poseScales, const T threshold)
     
    template<typename T >
    Rectangle< T > getKeypointsRectangle (const Array< T > &keypoints, const int person, const T threshold, const int firstIndex=0, const int lastIndex=-1)
     
    template<typename T >
    getAverageScore (const Array< T > &keypoints, const int person)
     
    template<typename T >
    getKeypointsArea (const Array< T > &keypoints, const int person, const T threshold)
     
    template<typename T >
    int getBiggestPerson (const Array< T > &keypoints, const T threshold)
     
    template<typename T >
    int getNonZeroKeypoints (const Array< T > &keypoints, const int person, const T threshold)
     
    template<typename T >
    getDistanceAverage (const Array< T > &keypoints, const int personA, const int personB, const T threshold)
     
    template<typename T >
    getDistanceAverage (const Array< T > &keypointsA, const int personA, const Array< T > &keypointsB, const int personB, const T threshold)
     
    template<typename T >
    Array< T > getKeypointsPerson (const Array< T > &keypoints, const int person, const bool noCopy=false)
     
    template<typename T >
    float getKeypointsRoi (const Array< T > &keypoints, const int personA, const int personB, const T threshold)
     
    template<typename T >
    float getKeypointsRoi (const Array< T > &keypointsA, const int personA, const Array< T > &keypointsB, const int personB, const T threshold)
     
    template<typename T >
    float getKeypointsRoi (const Rectangle< T > &rectangleA, const Rectangle< T > &rectangleB)
     
    OP_API void unrollArrayToUCharCvMat (Matrix &matResult, const Array< float > &array)
     
    OP_API void uCharCvMatToFloatPtr (float *floatPtrImage, const Matrix &matImage, const int normalize)
     
    OP_API double resizeGetScaleFactor (const Point< int > &initialSize, const Point< int > &targetSize)
     
    OP_API void keepRoiInside (Rectangle< int > &roi, const int imageWidth, const int imageHeight)
     
    OP_API void transpose (Matrix &matrix)
     
    OP_API void rotateAndFlipFrame (Matrix &frame, const double rotationAngle, const bool flipFrame=false)
     
    OP_API int getCvCapPropFrameCount ()
     
    OP_API int getCvCapPropFrameFps ()
     
    OP_API int getCvCapPropFrameWidth ()
     
    OP_API int getCvCapPropFrameHeight ()
     
    OP_API int getCvFourcc (const char c1, const char c2, const char c3, const char c4)
     
    OP_API int getCvImwriteJpegQuality ()
     
    OP_API int getCvImwritePngCompression ()
     
    OP_API int getCvLoadImageAnydepth ()
     
    OP_API int getCvLoadImageGrayScale ()
     
    template<typename TPointerContainer >
    bool checkNoNullNorEmpty (const TPointerContainer &tPointerContainer)
     
    OP_API std::chrono::time_point< std::chrono::high_resolution_clock > getTimerInit ()
     
    OP_API double getTimeSeconds (const std::chrono::time_point< std::chrono::high_resolution_clock > &timerInit)
     
    OP_API void printTime (const std::chrono::time_point< std::chrono::high_resolution_clock > &timerInit, const std::string &firstMessage, const std::string &secondMessage, const Priority priority)
     
    template<typename T >
    bool vectorsAreEqual (const std::vector< T > &vectorA, const std::vector< T > &vectorB)
     
    template<typename T >
    std::vector< T > mergeVectors (const std::vector< T > &vectorA, const std::vector< T > &vectorB)
     
    OP_API unsigned long long getLastNumber (const std::string &string)
     
    template<typename T >
    std::string toFixedLengthString (const T number, const unsigned long long stringLength=0)
     
    OP_API std::vector< std::string > splitString (const std::string &stringToSplit, const std::string &delimiter)
     
    OP_API std::string toLower (const std::string &string)
     
    OP_API std::string toUpper (const std::string &string)
     
    OP_API std::string remove0sFromString (const std::string &string)
     
    OP_API std::string getFirstNumberOnString (const std::string &string)
     
    OP_API void wrapperConfigureSanityChecks (WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const WrapperStructGui &wrapperStructGui, const bool renderOutput, const bool userInputAndPreprocessingWsEmpty, const bool userOutputWsEmpty, const std::shared_ptr< Producer > &producerSharedPtr, const ThreadManagerMode threadManagerMode)
     
    OP_API void threadIdPP (unsigned long long &threadId, const bool multiThreadEnabled)
     
    template<typename TDatum , typename TDatums = std::vector<std::shared_ptr<TDatum>>, typename TDatumsSP = std::shared_ptr<TDatums>, typename TWorker = std::shared_ptr<Worker<TDatumsSP>>>
    void configureThreadManager (ThreadManager< TDatumsSP > &threadManager, const bool multiThreadEnabled, const ThreadManagerMode threadManagerMode, const WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const WrapperStructGui &wrapperStructGui, const std::array< std::vector< TWorker >, int(WorkerType::Size)> &userWs, const std::array< bool, int(WorkerType::Size)> &userWsOnNewThread)
     
    template<typename TDatum , typename TDatums = std::vector<std::shared_ptr<TDatum>>, typename TDatumsSP = std::shared_ptr<TDatums>>
    void createMultiviewTDatum (TDatumsSP &tDatumsSP, unsigned long long &frameCounter, const CameraParameterReader &cameraParameterReader, const void *const cvMatPtr)
     
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Variables

    const auto FACE_MAX_FACES = POSE_MAX_PEOPLE
     
    const auto FACE_NUMBER_PARTS = 70u
     
    const std::vector< unsigned int > FACE_PAIRS_RENDER {FACE_PAIRS_RENDER_GPU}
     
    const std::vector< float > FACE_COLORS_RENDER {FACE_COLORS_RENDER_GPU}
     
    const std::vector< float > FACE_SCALES_RENDER {FACE_SCALES_RENDER_GPU}
     
    const auto FACE_CCN_DECREASE_FACTOR = 8.f
     
    const std::string FACE_PROTOTXT {"face/pose_deploy.prototxt"}
     
    const std::string FACE_TRAINED_MODEL {"face/pose_iter_116000.caffemodel"}
     
    const auto FACE_DEFAULT_ALPHA_KEYPOINT = POSE_DEFAULT_ALPHA_KEYPOINT
     
    const auto FACE_DEFAULT_ALPHA_HEAT_MAP = POSE_DEFAULT_ALPHA_HEAT_MAP
     
    const auto CUDA_NUM_THREADS = 512u
     
    const auto HAND_MAX_HANDS = 2*POSE_MAX_PEOPLE
     
    const auto HAND_NUMBER_PARTS = 21u
     
    const std::vector< unsigned int > HAND_PAIRS_RENDER {HAND_PAIRS_RENDER_GPU}
     
    const std::vector< float > HAND_COLORS_RENDER {HAND_COLORS_RENDER_GPU}
     
    const std::vector< float > HAND_SCALES_RENDER {HAND_SCALES_RENDER_GPU}
     
    const auto HAND_CCN_DECREASE_FACTOR = 8.f
     
    const std::string HAND_PROTOTXT {"hand/pose_deploy.prototxt"}
     
    const std::string HAND_TRAINED_MODEL {"hand/pose_iter_102000.caffemodel"}
     
    const auto HAND_DEFAULT_ALPHA_KEYPOINT = POSE_DEFAULT_ALPHA_KEYPOINT
     
    const auto HAND_DEFAULT_ALPHA_HEAT_MAP = POSE_DEFAULT_ALPHA_HEAT_MAP
     
    const auto POSE_MAX_PEOPLE = 127u
     
    const auto POSE_DEFAULT_ALPHA_KEYPOINT = 0.6f
     
    const auto POSE_DEFAULT_ALPHA_HEAT_MAP = 0.7f
     
    const auto H135 = 25
     
    const auto F135 = H135 + 40
     
    +

    Typedef Documentation

    + +

    ◆ Wrapper

    + +
    +
    + + + + +
    typedef WrapperT<BASE_DATUM> op::Wrapper
    +
    + +

    Definition at line 249 of file wrapper.hpp.

    + +
    +
    +

    Enumeration Type Documentation

    + +

    ◆ CocoJsonFormat

    + +
    +
    + + + + + +
    + + + + +
    enum op::CocoJsonFormat : unsigned char
    +
    +strong
    +
    + + + + + + + + +
    Enumerator
    Body 
    Hand21 
    Hand42 
    Face 
    Foot 
    Car 
    Size 
    + +

    Definition at line 14 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ DataFormat

    + +
    +
    + + + + + +
    + + + + +
    enum op::DataFormat : unsigned char
    +
    +strong
    +
    + + + + + +
    Enumerator
    Json 
    Xml 
    Yaml 
    Yml 
    + +

    Definition at line 6 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ Detector

    + +
    +
    + + + + + +
    + + + + +
    enum op::Detector : unsigned char
    +
    +strong
    +
    + + + + + + +
    Enumerator
    Body 
    OpenCV 
    Provided 
    BodyWithTracking 
    Size 
    + +

    Definition at line 14 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ DisplayMode

    + +
    +
    + + + + + +
    + + + + +
    enum op::DisplayMode : unsigned short
    +
    +strong
    +
    +

    GUI display modes. An enum class with the different output screen options. E.g., 2-D, 3-D, all, none.

    + + + + + + +
    Enumerator
    NoDisplay 

    No display.

    +
    DisplayAll 

    All (2-D and 3-D/Adam) displays

    +
    Display2D 

    Only 2-D display.

    +
    Display3D 

    Only 3-D display.

    +
    DisplayAdam 

    Only Adam display.

    +
    + +

    Definition at line 10 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ ElementToRender

    + +
    +
    + + + + + +
    + + + + +
    enum op::ElementToRender : unsigned char
    +
    +strong
    +
    + + + + + +
    Enumerator
    Skeleton 
    Background 
    AddKeypoints 
    AddPAFs 
    + +

    Definition at line 34 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ ErrorMode

    + +
    +
    + + + + + +
    + + + + +
    enum op::ErrorMode : unsigned char
    +
    +strong
    +
    + + + + + +
    Enumerator
    StdRuntimeError 
    FileLogging 
    StdCerr 
    All 
    + +

    Definition at line 6 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ Extensions

    + +
    +
    + + + + + +
    + + + + +
    enum op::Extensions : unsigned char
    +
    +strong
    +
    + + + +
    Enumerator
    Images 
    Size 
    + +

    Definition at line 31 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ FullScreenMode

    + +
    +
    + + + + + +
    + + + + +
    enum op::FullScreenMode : bool
    +
    +strong
    +
    +

    Full screen modes. An enum class with the different full screen mode options, i.e., full screen or windored.

    + + + +
    Enumerator
    FullScreen 

    Full screen mode.

    +
    Windowed 

    Windowed mode, depending on the frame output size.

    +
    + +

    Definition at line 23 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ GpuMode

    + +
    +
    + + + + + +
    + + + + +
    enum op::GpuMode : unsigned char
    +
    +strong
    +
    + + + + + +
    Enumerator
    Cuda 
    OpenCL 
    NoGpu 
    Size 
    + +

    Definition at line 6 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ HeatMapType

    + +
    +
    + + + + + +
    + + + + +
    enum op::HeatMapType : unsigned char
    +
    +strong
    +
    + + + + +
    Enumerator
    Parts 
    Background 
    PAFs 
    + +

    Definition at line 19 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ LogMode

    + +
    +
    + + + + + +
    + + + + +
    enum op::LogMode : unsigned char
    +
    +strong
    +
    + + + + +
    Enumerator
    FileLogging 
    StdCout 
    All 
    + +

    Definition at line 14 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ PoseMode

    + +
    +
    + + + + + +
    + + + + +
    enum op::PoseMode : unsigned char
    +
    +strong
    +
    + + + + + +
    Enumerator
    Disabled 
    Enabled 
    NoNetwork 
    Size 
    + +

    Definition at line 6 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ PoseModel

    + +
    +
    + + + + + +
    + + + + +
    enum op::PoseModel : unsigned char
    +
    +strong
    +
    +

    An enum class in which all the possible type of pose estimation models are included.

    + + + + + + + + + + + + + + + + + +
    Enumerator
    BODY_25 

    COCO + 6 foot keypoints + neck + lower abs model, with 25+1 components (see poseParameters.hpp for details).

    +
    COCO_18 

    COCO model + neck, with 18+1 components (see poseParameters.hpp for details).

    +
    MPI_15 

    MPI model, with 15+1 components (see poseParameters.hpp for details).

    +
    MPI_15_4 

    Variation of the MPI model, reduced number of CNN stages to 4: faster but less accurate.

    +
    BODY_19 

    Experimental. Do not use.

    +
    BODY_19_X2 

    Experimental. Do not use.

    +
    BODY_19N 

    Experimental. Do not use.

    +
    BODY_25E 

    Experimental. Do not use.

    +
    CAR_12 

    Experimental. Do not use.

    +
    BODY_25D 

    Experimental. Do not use.

    +
    BODY_23 

    Experimental. Do not use.

    +
    CAR_22 

    Experimental. Do not use.

    +
    BODY_19E 

    Experimental. Do not use.

    +
    BODY_25B 

    Experimental. Do not use.

    +
    BODY_135 

    Experimental. Do not use.

    +
    Size 
    + +

    Definition at line 9 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ PoseProperty

    + +
    +
    + + + + + +
    + + + + +
    enum op::PoseProperty : unsigned char
    +
    +strong
    +
    + + + + + + + +
    Enumerator
    NMSThreshold 
    ConnectInterMinAboveThreshold 
    ConnectInterThreshold 
    ConnectMinSubsetCnt 
    ConnectMinSubsetScore 
    Size 
    + +

    Definition at line 32 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ Priority

    + +
    +
    + + + + + +
    + + + + +
    enum op::Priority : unsigned char
    +
    +strong
    +
    + + + + + + + +
    Enumerator
    None 
    Low 
    Normal 
    High 
    Max 
    NoOutput 
    + +

    Definition at line 21 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ ProducerFpsMode

    + +
    +
    + + + + + +
    + + + + +
    enum op::ProducerFpsMode : bool
    +
    +strong
    +
    + + + +
    Enumerator
    OriginalFps 

    The frames will be extracted at the original source fps (frames might be skipped or repeated).

    +
    RetrievalFps 

    The frames will be extracted when the software retrieves them (frames will not be skipped or repeated).

    +
    + +

    Definition at line 6 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ ProducerProperty

    + +
    +
    + + + + + +
    + + + + +
    enum op::ProducerProperty : unsigned char
    +
    +strong
    +
    + + + + + + + +
    Enumerator
    AutoRepeat 
    Flip 
    Rotation 
    FrameStep 
    NumberViews 
    Size 
    + +

    Definition at line 14 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ ProducerType

    + +
    +
    + + + + + +
    + + + + +
    enum op::ProducerType : unsigned char
    +
    +strong
    +
    +

    Type of producers An enum class in which all the possible type of Producer are included. In order to add a new Producer, include its name in this enum and add a new 'else if' statement inside ProducerFactory::createProducer().

    + + + + + + + +
    Enumerator
    FlirCamera 

    Stereo FLIR (Point-Grey) camera reader. Based on Spinnaker SDK.

    +
    ImageDirectory 

    An image directory reader. It is able to read images on a folder with a interface similar to the OpenCV cv::VideoCapture.

    +
    IPCamera 

    An IP camera frames extractor, extending the functionality of cv::VideoCapture.

    +
    Video 

    A video frames extractor, extending the functionality of cv::VideoCapture.

    +
    Webcam 

    A webcam frames extractor, extending the functionality of cv::VideoCapture.

    +
    None 

    No type defined. Default state when no specific Producer has been picked yet.

    +
    + +

    Definition at line 29 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ RenderMode

    + +
    +
    + + + + + +
    + + + + +
    enum op::RenderMode : unsigned char
    +
    +strong
    +
    + + + + + +
    Enumerator
    None 
    Auto 
    Cpu 
    Gpu 
    + +

    Definition at line 26 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ ScaleMode

    + +
    +
    + + + + + +
    + + + + +
    enum op::ScaleMode : unsigned char
    +
    +strong
    +
    + + + + + + + + + + +
    Enumerator
    InputResolution 
    NetOutputResolution 
    OutputResolution 
    ZeroToOne 
    ZeroToOneFixedAspect 
    PlusMinusOne 
    PlusMinusOneFixedAspect 
    UnsignedChar 
    NoScale 
    + +

    Definition at line 6 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ ThreadManagerMode

    + +
    +
    + + + + + +
    + + + + +
    enum op::ThreadManagerMode : unsigned char
    +
    +strong
    +
    +

    ThreadManager synchronization mode.

    + + + + + +
    Enumerator
    Asynchronous 

    First and last queues of ThreadManager will be given to the user, so he must push elements to the first queue and retrieve them from the last one after being processed. Recommended for prototyping environments (easier to test but more error-prone and potentially slower in performance).

    +
    AsynchronousIn 

    Similar to Asynchronous, but only the input (first) queue is given to the user.

    +
    AsynchronousOut 

    Similar to Asynchronous, but only the output (last) queue is given to the user.

    +
    Synchronous 

    Everything will run inside the ThreadManager. Recommended for production environments (more difficult to set up but faster in performance and less error-prone).

    +
    + +

    Definition at line 9 of file enumClasses.hpp.

    + +
    +
    + +

    ◆ WorkerType

    + +
    +
    + + + + + +
    + + + + +
    enum op::WorkerType : unsigned char
    +
    +strong
    +
    + + + + + + +
    Enumerator
    Input 
    PreProcessing 
    PostProcessing 
    Output 
    Size 
    + +

    Definition at line 23 of file enumClasses.hpp.

    + +
    +
    +

    Function Documentation

    + +

    ◆ addBkgChannel()

    + +
    +
    + + + + + + + + +
    OP_API bool op::addBkgChannel (const PoseModel poseModel)
    +
    + +
    +
    + +

    ◆ averageKeypoints()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::averageKeypoints (Array< T > & keypointsA,
    const Array< T > & keypointsB,
    const int personA 
    )
    +
    + +
    +
    + +

    ◆ checkBool()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::checkBool (const bool condition,
    const T & message = "",
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    + +

    Definition at line 10 of file check.hpp.

    + +
    +
    + +

    ◆ checkEqual()

    + +
    +
    +
    +template<typename T , typename T1 , typename T2 >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::checkEqual (const T1 & conditionA,
    const T2 & conditionB,
    const T & message = "",
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    + +

    Definition at line 19 of file check.hpp.

    + +
    +
    + +

    ◆ checkGreaterOrEqual()

    + +
    +
    +
    +template<typename T , typename T1 , typename T2 >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::checkGreaterOrEqual (const T1 & conditionA,
    const T2 & conditionB,
    const T & message = "",
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    + +

    Definition at line 59 of file check.hpp.

    + +
    +
    + +

    ◆ checkGreaterThan()

    + +
    +
    +
    +template<typename T , typename T1 , typename T2 >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::checkGreaterThan (const T1 & conditionA,
    const T2 & conditionB,
    const T & message = "",
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    + +

    Definition at line 69 of file check.hpp.

    + +
    +
    + +

    ◆ checkLessOrEqual()

    + +
    +
    +
    +template<typename T , typename T1 , typename T2 >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::checkLessOrEqual (const T1 & conditionA,
    const T2 & conditionB,
    const T & message = "",
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    + +

    Definition at line 39 of file check.hpp.

    + +
    +
    + +

    ◆ checkLessThan()

    + +
    +
    +
    +template<typename T , typename T1 , typename T2 >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::checkLessThan (const T1 & conditionA,
    const T2 & conditionB,
    const T & message = "",
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    + +

    Definition at line 49 of file check.hpp.

    + +
    +
    + +

    ◆ checkNoNullNorEmpty()

    + +
    +
    +
    +template<typename TPointerContainer >
    + + + + + +
    + + + + + + + + +
    bool op::checkNoNullNorEmpty (const TPointerContainer & tPointerContainer)
    +
    +inline
    +
    + +

    Definition at line 7 of file pointerContainer.hpp.

    + +
    +
    + +

    ◆ checkNotEqual()

    + +
    +
    +
    +template<typename T , typename T1 , typename T2 >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::checkNotEqual (const T1 & conditionA,
    const T2 & conditionB,
    const T & message = "",
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    + +

    Definition at line 29 of file check.hpp.

    + +
    +
    + +

    ◆ checkWorkerErrors()

    + +
    +
    + + + + + + + +
    OP_API void op::checkWorkerErrors ()
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [1/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (PriorityQueue )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [2/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (Queue )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [3/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (SubThread )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [4/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (SubThreadNoQueue )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [5/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (SubThreadQueueIn )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [6/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (SubThreadQueueInOut )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [7/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (SubThreadQueueOut )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [8/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (Thread )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [9/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (ThreadManager )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [10/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WCocoJsonSaver )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [11/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WCvMatToOpInput )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [12/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WCvMatToOpOutput )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [13/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WFaceDetector )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [14/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WFaceDetectorOpenCV )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [15/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WFaceExtractorNet )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [16/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WFaceRenderer )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [17/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WFaceSaver )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [18/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WFpsMax )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [19/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WGui )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [20/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WGui3D )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [21/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WGuiInfoAdder )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [22/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WHandDetector )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [23/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WHandDetectorFromTxt )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [24/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WHandDetectorTracking )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [25/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WHandDetectorUpdate )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [26/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WHandExtractorNet )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [27/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WHandRenderer )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [28/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WHandSaver )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [29/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WHeatMapSaver )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [30/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WIdGenerator )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [31/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WImageSaver )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [32/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WKeepTopNPeople )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [33/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WKeypointScaler )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [34/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WOpOutputToCvMat )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [35/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (Worker )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [36/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WorkerConsumer )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [37/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WorkerProducer )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [38/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WPeopleJsonSaver )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [39/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WPersonIdExtractor )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [40/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WPoseExtractor )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [41/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WPoseExtractorNet )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [42/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WPoseRenderer )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [43/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WPoseSaver )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [44/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WPoseTriangulation )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [45/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WQueueOrderer )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [46/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WScaleAndSizeExtractor )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [47/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WUdpSender )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [48/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WVerbosePrinter )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [49/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WVideoSaver )
    +
    + +
    +
    + +

    ◆ COMPILE_TEMPLATE_DATUM() [50/50]

    + +
    +
    + + + + + + + + +
    op::COMPILE_TEMPLATE_DATUM (WVideoSaver3D )
    +
    + +
    +
    + +

    ◆ configureThreadManager()

    + +
    +
    +
    +template<typename TDatum , typename TDatums = std::vector<std::shared_ptr<TDatum>>, typename TDatumsSP = std::shared_ptr<TDatums>, typename TWorker = std::shared_ptr<Worker<TDatumsSP>>>
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::configureThreadManager (ThreadManager< TDatumsSP > & threadManager,
    const bool multiThreadEnabled,
    const ThreadManagerMode threadManagerMode,
    const WrapperStructPosewrapperStructPose,
    const WrapperStructFacewrapperStructFace,
    const WrapperStructHandwrapperStructHand,
    const WrapperStructExtrawrapperStructExtra,
    const WrapperStructInputwrapperStructInput,
    const WrapperStructOutputwrapperStructOutput,
    const WrapperStructGuiwrapperStructGui,
    const std::array< std::vector< TWorker >, int(WorkerType::Size)> & userWs,
    const std::array< bool, int(WorkerType::Size)> & userWsOnNewThread 
    )
    +
    +

    Set ThreadManager from TWorkers (private internal function). After any configure() has been called, the TWorkers are initialized. This function resets the ThreadManager and adds them. Common code for start() and exec().

    + +

    Definition at line 98 of file wrapperAuxiliary.hpp.

    + +
    +
    + +

    ◆ connectBodyPartsCpu()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::connectBodyPartsCpu (Array< T > & poseKeypoints,
    Array< T > & poseScores,
    const T *const heatMapPtr,
    const T *const peaksPtr,
    const PoseModel poseModel,
    const Point< int > & heatMapSize,
    const int maxPeaks,
    const T interMinAboveThreshold,
    const T interThreshold,
    const int minSubsetCnt,
    const T minSubsetScore,
    const T defaultNmsThreshold,
    const T scaleFactor = 1.f,
    const bool maximizePositives = false 
    )
    +
    + +
    +
    + +

    ◆ connectBodyPartsGpu()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::connectBodyPartsGpu (Array< T > & poseKeypoints,
    Array< T > & poseScores,
    const T *const heatMapGpuPtr,
    const T *const peaksPtr,
    const PoseModel poseModel,
    const Point< int > & heatMapSize,
    const int maxPeaks,
    const T interMinAboveThreshold,
    const T interThreshold,
    const int minSubsetCnt,
    const T minSubsetScore,
    const T defaultNmsThreshold,
    const T scaleFactor,
    const bool maximizePositives,
    Array< T > pairScoresCpu,
    T * pairScoresGpuPtr,
    const unsigned int *const bodyPartPairsGpuPtr,
    const unsigned int *const mapIdxGpuPtr,
    const T *const peaksGpuPtr 
    )
    +
    + +
    +
    + +

    ◆ connectBodyPartsOcl()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::connectBodyPartsOcl (Array< T > & poseKeypoints,
    Array< T > & poseScores,
    const T *const heatMapGpuPtr,
    const T *const peaksPtr,
    const PoseModel poseModel,
    const Point< int > & heatMapSize,
    const int maxPeaks,
    const T interMinAboveThreshold,
    const T interThreshold,
    const int minSubsetCnt,
    const T minSubsetScore,
    const T defaultNmsThreshold,
    const T scaleFactor = 1.f,
    const bool maximizePositives = false,
    Array< T > pairScoresCpu = Array< T >{},
    T * pairScoresGpuPtr = nullptr,
    const unsigned int *const bodyPartPairsGpuPtr = nullptr,
    const unsigned int *const mapIdxGpuPtr = nullptr,
    const T *const peaksGpuPtr = nullptr,
    const int gpuID = 0 
    )
    +
    + +
    +
    + +

    ◆ createMultiviewTDatum()

    + +
    +
    +
    +template<typename TDatum , typename TDatums = std::vector<std::shared_ptr<TDatum>>, typename TDatumsSP = std::shared_ptr<TDatums>>
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::createMultiviewTDatum (TDatumsSP & tDatumsSP,
    unsigned long long & frameCounter,
    const CameraParameterReadercameraParameterReader,
    const void *const cvMatPtr 
    )
    +
    +

    It fills camera parameters and splits the cvMat depending on how many camera parameter matrices are found. For example usage, check examples/tutorial_api_cpp/11_asynchronous_custom_input_multi_camera.cpp

    + +

    Definition at line 1226 of file wrapperAuxiliary.hpp.

    + +
    +
    + +

    ◆ createPeopleVector()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    std::vector<std::pair<std::vector<int>, T> > op::createPeopleVector (const T *const heatMapPtr,
    const T *const peaksPtr,
    const PoseModel poseModel,
    const Point< int > & heatMapSize,
    const int maxPeaks,
    const T interThreshold,
    const T interMinAboveThreshold,
    const std::vector< unsigned int > & bodyPartPairs,
    const unsigned int numberBodyParts,
    const unsigned int numberBodyPartPairs,
    const T defaultNmsThreshold,
    const Array< T > & precomputedPAFs = Array< T >() 
    )
    +
    + +
    +
    + +

    ◆ createProducer()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API std::shared_ptr<Producer> op::createProducer (const ProducerType producerType = ProducerType::None,
    const std::string & producerString = "",
    const Point< int > & cameraResolution = Point< int >{-1,-1},
    const std::string & cameraParameterPath = "models/cameraParameters/",
    const bool undistortImage = true,
    const int numberViews = -1 
    )
    +
    +

    This function returns the desired producer given the input parameters.

    + +
    +
    + +

    ◆ cudaCheck()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::cudaCheck (const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    + +
    +
    + +

    ◆ dataFormatToString()

    + +
    +
    + + + + + + + + +
    OP_API std::string op::dataFormatToString (const DataFormat dataFormat)
    +
    + +
    +
    + +

    ◆ datumProducerConstructor()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::datumProducerConstructor (const std::shared_ptr< Producer > & producerSharedPtr,
    const unsigned long long frameFirst,
    const unsigned long long frameStep,
    const unsigned long long frameLast 
    )
    +
    + +
    +
    + +

    ◆ datumProducerConstructorRunningAndGetDatumApplyPlayerControls()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API void op::datumProducerConstructorRunningAndGetDatumApplyPlayerControls (const std::shared_ptr< Producer > & producerSharedPtr,
    const std::shared_ptr< std::pair< std::atomic< bool >, std::atomic< int >>> & videoSeekSharedPtr 
    )
    +
    + +
    +
    + +

    ◆ datumProducerConstructorRunningAndGetDatumFrameIntegrity()

    + +
    +
    + + + + + + + + +
    OP_API void op::datumProducerConstructorRunningAndGetDatumFrameIntegrity (Matrixmatrix)
    +
    + +
    +
    + +

    ◆ datumProducerConstructorRunningAndGetDatumIsDatumProducerRunning()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API bool op::datumProducerConstructorRunningAndGetDatumIsDatumProducerRunning (const std::shared_ptr< Producer > & producerSharedPtr,
    const unsigned long long numberFramesToProcess,
    const unsigned long long globalCounter 
    )
    +
    + +
    +
    + +

    ◆ datumProducerConstructorRunningAndGetNextFrameNumber()

    + +
    +
    + + + + + + + + +
    OP_API unsigned long long op::datumProducerConstructorRunningAndGetNextFrameNumber (const std::shared_ptr< Producer > & producerSharedPtr)
    +
    + +
    +
    + +

    ◆ datumProducerConstructorTooManyConsecutiveEmptyFrames()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API void op::datumProducerConstructorTooManyConsecutiveEmptyFrames (unsigned int & numberConsecutiveEmptyFrames,
    const bool emptyFrame 
    )
    +
    + +
    +
    + +

    ◆ error() [1/2]

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::error (const std::string & message,
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    +

    Differences between different kind of errors:

      +
    • error() is a normal error in the code.
    • +
    • errorWorker() is an error that occurred on a thread. Therefore, the machine will stop the threads, go back to the main thread, and then throw the error.
    • +
    • errorDestructor() is an error that occurred on a destructor. Exception on destructors provokes core dumped, so we simply output an error message via std::cerr.
    • +
    + +
    +
    + +

    ◆ error() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::error (const T & message,
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    +inline
    +
    + +

    Definition at line 46 of file errorAndLog.hpp.

    + +
    +
    + +

    ◆ errorDestructor() [1/2]

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::errorDestructor (const std::string & message,
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    + +
    +
    + +

    ◆ errorDestructor() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::errorDestructor (const T & message,
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    +inline
    +
    + +

    Definition at line 72 of file errorAndLog.hpp.

    + +
    +
    + +

    ◆ errorWorker() [1/2]

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::errorWorker (const std::string & message,
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    + +
    +
    + +

    ◆ errorWorker() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::errorWorker (const T & message,
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    +inline
    +
    + +

    Definition at line 60 of file errorAndLog.hpp.

    + +
    +
    + +

    ◆ estimateAndSaveExtrinsics()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::estimateAndSaveExtrinsics (const std::string & parameterFolder,
    const std::string & imageFolder,
    const Point< int > & gridInnerCorners,
    const float gridSquareSizeMm,
    const int index0,
    const int index1,
    const bool imagesAreUndistorted,
    const bool combineCam0Extrinsics 
    )
    +
    + +
    +
    + +

    ◆ estimateAndSaveIntrinsics()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::estimateAndSaveIntrinsics (const Point< int > & gridInnerCorners,
    const float gridSquareSizeMm,
    const int flags,
    const std::string & outputParameterFolder,
    const std::string & imageFolder,
    const std::string & serialNumber,
    const bool saveImagesWithCorners = false 
    )
    +
    +

    This function estimate and saves the intrinsic parameters (K and distortion coefficients).

    Parameters
    + + + + + +
    gridInnerCornersThe Point<int> of the board, i.e., the number of squares by width and height
    gridSquareSizeMmFloating number with the size of a square in your defined unit (point, millimeter,etc).
    flagsInteger with the OpenCV flags for calibration (e.g., CALIB_RATIONAL_MODEL, CALIB_THIN_PRISM_MODEL, or CALIB_TILTED_MODEL)
    outputFilePathString with the name of the file where to write
    +
    +
    + +
    +
    + +

    ◆ estimateAndSaveSiftFile()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::estimateAndSaveSiftFile (const Point< int > & gridInnerCorners,
    const std::string & imageFolder,
    const int numberCameras,
    const bool saveImagesWithCorners = false 
    )
    +
    + +
    +
    + +

    ◆ existDirectory()

    + +
    +
    + + + + + + + + +
    OP_API bool op::existDirectory (const std::string & directoryPath)
    +
    + +
    +
    + +

    ◆ existFile()

    + +
    +
    + + + + + + + + +
    OP_API bool op::existFile (const std::string & filePath)
    +
    + +
    +
    + +

    ◆ fastMax()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + + + + + + + + + + + +
    T op::fastMax (const T a,
    const T b 
    )
    +
    +inline
    +
    + +

    Definition at line 73 of file fastMath.hpp.

    + +
    +
    + +

    ◆ fastMin()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + + + + + + + + + + + +
    T op::fastMin (const T a,
    const T b 
    )
    +
    +inline
    +
    + +

    Definition at line 79 of file fastMath.hpp.

    + +
    +
    + +

    ◆ fastTruncate()

    + +
    +
    +
    +template<class T >
    + + + + + +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    T op::fastTruncate (value,
    min = 0,
    max = 1 
    )
    +
    +inline
    +
    + +

    Definition at line 85 of file fastMath.hpp.

    + +
    +
    + +

    ◆ flagsToDetector()

    + +
    +
    + + + + + + + + +
    OP_API Detector op::flagsToDetector (const int detector)
    +
    + +
    +
    + +

    ◆ flagsToDisplayMode()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API DisplayMode op::flagsToDisplayMode (const int display,
    const bool enabled3d 
    )
    +
    + +
    +
    + +

    ◆ flagsToHeatMaps()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API std::vector<HeatMapType> op::flagsToHeatMaps (const bool heatMapsAddParts = false,
    const bool heatMapsAddBkg = false,
    const bool heatMapsAddPAFs = false 
    )
    +
    + +
    +
    + +

    ◆ flagsToHeatMapScaleMode()

    + +
    +
    + + + + + + + + +
    OP_API ScaleMode op::flagsToHeatMapScaleMode (const int heatMapScaleMode)
    +
    + +
    +
    + +

    ◆ flagsToPoint()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API Point<int> op::flagsToPoint (const StringpointString,
    const StringpointExample 
    )
    +
    +

    E.g., const Point<int> netInputSize = flagsToPoint(op::String(FLAGS_net_resolution), "-1x368"); E.g., const Point<int> resolution = flagsToPoint(resolutionString, "1280x720");

    + +
    +
    + +

    ◆ flagsToPoseMode()

    + +
    +
    + + + + + + + + +
    OP_API PoseMode op::flagsToPoseMode (const int poseModeInt)
    +
    + +
    +
    + +

    ◆ flagsToPoseModel()

    + +
    +
    + + + + + + + + +
    OP_API PoseModel op::flagsToPoseModel (const StringposeModeString)
    +
    + +
    +
    + +

    ◆ flagsToProducer()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API std::pair<ProducerType, String> op::flagsToProducer (const StringimageDirectory,
    const StringvideoPath,
    const StringipCameraPath = String(""),
    const int webcamIndex = -1,
    const bool flirCamera = false,
    const int flirCameraIndex = -1 
    )
    +
    + +
    +
    + +

    ◆ flagsToProducerType()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API ProducerType op::flagsToProducerType (const StringimageDirectory,
    const StringvideoPath,
    const StringipCameraPath,
    const int webcamIndex,
    const bool flirCamera 
    )
    +
    + +
    +
    + +

    ◆ flagsToRenderMode()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API RenderMode op::flagsToRenderMode (const int renderFlag,
    const bool gpuBuggy = false,
    const int renderPoseFlag = -2 
    )
    +
    + +
    +
    + +

    ◆ flagsToScaleMode()

    + +
    +
    + + + + + + + + +
    OP_API ScaleMode op::flagsToScaleMode (const int keypointScaleMode)
    +
    + +
    +
    + +

    ◆ formatAsDirectory()

    + +
    +
    + + + + + + + + +
    OP_API std::string op::formatAsDirectory (const std::string & directoryPathString)
    +
    +

    This function makes sure that the directoryPathString is properly formatted. I.e., it changes all '\' by '/', and it makes sure that the string finishes with '/'.

    Parameters
    + + +
    directoryPathStringstd::string with the directory path to be formatted.
    +
    +
    +
    Returns
    std::string with the formatted directory path.
    + +
    +
    + +

    ◆ getAverageScore()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + +
    T op::getAverageScore (const Array< T > & keypoints,
    const int person 
    )
    +
    + +
    +
    + +

    ◆ getBiggestPerson()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + +
    int op::getBiggestPerson (const Array< T > & keypoints,
    const T threshold 
    )
    +
    + +
    +
    + +

    ◆ getCudaGpuNumber()

    + +
    +
    + + + + + + + +
    OP_API int op::getCudaGpuNumber ()
    +
    + +
    +
    + +

    ◆ getCvCapPropFrameCount()

    + +
    +
    + + + + + + + +
    OP_API int op::getCvCapPropFrameCount ()
    +
    +

    Wrapper of CV_CAP_PROP_FRAME_COUNT to avoid leaving OpenCV dependencies on headers.

    + +
    +
    + +

    ◆ getCvCapPropFrameFps()

    + +
    +
    + + + + + + + +
    OP_API int op::getCvCapPropFrameFps ()
    +
    +

    Wrapper of CV_CAP_PROP_FRAME_FPS to avoid leaving OpenCV dependencies on headers.

    + +
    +
    + +

    ◆ getCvCapPropFrameHeight()

    + +
    +
    + + + + + + + +
    OP_API int op::getCvCapPropFrameHeight ()
    +
    +

    Wrapper of CV_CAP_PROP_FRAME_HEIGHT to avoid leaving OpenCV dependencies on headers.

    + +
    +
    + +

    ◆ getCvCapPropFrameWidth()

    + +
    +
    + + + + + + + +
    OP_API int op::getCvCapPropFrameWidth ()
    +
    +

    Wrapper of CV_CAP_PROP_FRAME_WIDTH to avoid leaving OpenCV dependencies on headers.

    + +
    +
    + +

    ◆ getCvFourcc()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API int op::getCvFourcc (const char c1,
    const char c2,
    const char c3,
    const char c4 
    )
    +
    +

    Wrapper of CV_FOURCC to avoid leaving OpenCV dependencies on headers.

    + +
    +
    + +

    ◆ getCvImwriteJpegQuality()

    + +
    +
    + + + + + + + +
    OP_API int op::getCvImwriteJpegQuality ()
    +
    +

    Wrapper of CV_IMWRITE_JPEG_QUALITY to avoid leaving OpenCV dependencies on headers.

    + +
    +
    + +

    ◆ getCvImwritePngCompression()

    + +
    +
    + + + + + + + +
    OP_API int op::getCvImwritePngCompression ()
    +
    +

    Wrapper of CV_IMWRITE_PNG_COMPRESSION to avoid leaving OpenCV dependencies on headers.

    + +
    +
    + +

    ◆ getCvLoadImageAnydepth()

    + +
    +
    + + + + + + + +
    OP_API int op::getCvLoadImageAnydepth ()
    +
    +

    Wrapper of CV_LOAD_IMAGE_ANYDEPTH to avoid leaving OpenCV dependencies on headers.

    + +
    +
    + +

    ◆ getCvLoadImageGrayScale()

    + +
    +
    + + + + + + + +
    OP_API int op::getCvLoadImageGrayScale ()
    +
    +

    Wrapper of CV_LOAD_IMAGE_GRAYSCALE to avoid leaving OpenCV dependencies on headers.

    + +
    +
    + +

    ◆ getDistance()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    T op::getDistance (const Array< T > & keypoints,
    const int person,
    const int elementA,
    const int elementB 
    )
    +
    + +
    +
    + +

    ◆ getDistanceAverage() [1/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    T op::getDistanceAverage (const Array< T > & keypoints,
    const int personA,
    const int personB,
    const T threshold 
    )
    +
    + +
    +
    + +

    ◆ getDistanceAverage() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    T op::getDistanceAverage (const Array< T > & keypointsA,
    const int personA,
    const Array< T > & keypointsB,
    const int personB,
    const T threshold 
    )
    +
    + +
    +
    + +

    ◆ getFileExtension()

    + +
    +
    + + + + + + + + +
    OP_API std::string op::getFileExtension (const std::string & fullPath)
    +
    +

    This function extracts the extension from a full path. E.g., if fullPath is /media/document.txt, output will be txt

    Parameters
    + + +
    fullPathstd::string with the full path.
    +
    +
    +
    Returns
    std::string with the file extension.
    + +
    +
    + +

    ◆ getFileNameAndExtension()

    + +
    +
    + + + + + + + + +
    OP_API std::string op::getFileNameAndExtension (const std::string & fullPath)
    +
    +

    This function extracts the file name and extension from a full path.

    Parameters
    + + +
    fullPathstd::string with the full path.
    +
    +
    +
    Returns
    std::string with the file name with extension.
    + +
    +
    + +

    ◆ getFileNameNoExtension()

    + +
    +
    + + + + + + + + +
    OP_API std::string op::getFileNameNoExtension (const std::string & fullPath)
    +
    +

    This function extracts the file name (without extension) from a full path.

    Parameters
    + + +
    fullPathstd::string with the full path.
    +
    +
    +
    Returns
    std::string with the file name without extension.
    + +
    +
    + +

    ◆ getFileParentFolderPath()

    + +
    +
    + + + + + + + + +
    OP_API std::string op::getFileParentFolderPath (const std::string & fullPath)
    +
    +

    This function extracts the full file path of the folder where it is contained.

    Parameters
    + + +
    fullPathstd::string with the full path.
    +
    +
    +
    Returns
    std::string with the full file path of the folder.
    + +
    +
    + +

    ◆ getFilesOnDirectory() [1/3]

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API std::vector<std::string> op::getFilesOnDirectory (const std::string & directoryPath,
    const Extensions extensions 
    )
    +
    +

    This function extracts all the files in a directory path with the desired group of extensions (e.g., Extensions::Images).

    Parameters
    + + + +
    directoryPathstd::string with the directory path.
    extensionsExtensions with the kind of extensions desired (e.g., Extensions:Images).
    +
    +
    +
    Returns
    std::vector<std::string> with the existing file names.
    + +
    +
    + +

    ◆ getFilesOnDirectory() [2/3]

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API std::vector<std::string> op::getFilesOnDirectory (const std::string & directoryPath,
    const std::string & extension 
    )
    +
    +

    Analogous to getFilesOnDirectory(const std::string& directoryPath, const std::vector<std::string>& extensions) for 1 specific extension.

    Parameters
    + + + +
    directoryPathstd::string with the directory path.
    extensionstd::string with the extension of the desired files.
    +
    +
    +
    Returns
    std::vector<std::string> with the existing file names.
    + +
    +
    + +

    ◆ getFilesOnDirectory() [3/3]

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API std::vector<std::string> op::getFilesOnDirectory (const std::string & directoryPath,
    const std::vector< std::string > & extensions = {} 
    )
    +
    +

    This function extracts all the files in a directory path with the desired extensions. If no extensions is specified, then all the file names are returned.

    Parameters
    + + + +
    directoryPathstd::string with the directory path.
    extensionsstd::vector<std::string> with the extensions of the desired files.
    +
    +
    +
    Returns
    std::vector<std::string> with the existing file names.
    + +
    +
    + +

    ◆ getFirstNumberOnString()

    + +
    +
    + + + + + + + + +
    OP_API std::string op::getFirstNumberOnString (const std::string & string)
    +
    + +
    +
    + +

    ◆ getFullFilePathNoExtension()

    + +
    +
    + + + + + + + + +
    OP_API std::string op::getFullFilePathNoExtension (const std::string & fullPath)
    +
    +

    This function extracts the full file path without its extension from a full file path.

    Parameters
    + + +
    fullPathstd::string with the full path.
    +
    +
    +
    Returns
    std::string with the full file path without extension.
    + +
    +
    + +

    ◆ getGpuMode()

    + +
    +
    + + + + + + + +
    OP_API GpuMode op::getGpuMode ()
    +
    + +
    +
    + +

    ◆ getGpuNumber()

    + +
    +
    + + + + + + + +
    OP_API int op::getGpuNumber ()
    +
    + +
    +
    + +

    ◆ getIfInMainThreadOrEmpty()

    + +
    +
    + + + + + + + +
    OP_API bool op::getIfInMainThreadOrEmpty ()
    +
    + +
    +
    + +

    ◆ getIfNotInMainThreadOrEmpty()

    + +
    +
    + + + + + + + +
    OP_API bool op::getIfNotInMainThreadOrEmpty ()
    +
    + +
    +
    + +

    ◆ getKeypointsArea()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + +
    T op::getKeypointsArea (const Array< T > & keypoints,
    const int person,
    const T threshold 
    )
    +
    + +
    +
    + +

    ◆ getKeypointsPerson()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + +
    Array<T> op::getKeypointsPerson (const Array< T > & keypoints,
    const int person,
    const bool noCopy = false 
    )
    +
    +

    Creates and Array<T> with a specific person.

    Parameters
    + + + + +
    keypointsArray<T> with the original data array to slice.
    personindicates the index of the array to extract.
    noCopyindicates whether to perform a copy. Copy will never go to undefined behavior, however, if noCopy == true, then:
      +
    1. It is faster, as no data copy is involved, but...
    2. +
    3. If the Array keypoints goes out of scope, then the resulting Array will provoke an undefined behavior.
    4. +
    5. If the returned Array is modified, the information in the Array keypoints will also be.
    6. +
    +
    +
    +
    +
    Returns
    Array<T> with the same dimension than keypoints expect the first dimension being 1. E.g., if keypoints is {p,k,m}, the resulting Array<T> is {1,k,m}.
    + +
    +
    + +

    ◆ getKeypointsRectangle()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Rectangle<T> op::getKeypointsRectangle (const Array< T > & keypoints,
    const int person,
    const T threshold,
    const int firstIndex = 0,
    const int lastIndex = -1 
    )
    +
    + +
    +
    + +

    ◆ getKeypointsRoi() [1/3]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    float op::getKeypointsRoi (const Array< T > & keypoints,
    const int personA,
    const int personB,
    const T threshold 
    )
    +
    + +
    +
    + +

    ◆ getKeypointsRoi() [2/3]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    float op::getKeypointsRoi (const Array< T > & keypointsA,
    const int personA,
    const Array< T > & keypointsB,
    const int personB,
    const T threshold 
    )
    +
    + +
    +
    + +

    ◆ getKeypointsRoi() [3/3]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + +
    float op::getKeypointsRoi (const Rectangle< T > & rectangleA,
    const Rectangle< T > & rectangleB 
    )
    +
    + +
    +
    + +

    ◆ getLastNumber()

    + +
    +
    + + + + + + + + +
    OP_API unsigned long long op::getLastNumber (const std::string & string)
    +
    + +
    +
    + +

    ◆ getNonZeroKeypoints()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + +
    int op::getNonZeroKeypoints (const Array< T > & keypoints,
    const int person,
    const T threshold 
    )
    +
    + +
    +
    + +

    ◆ getNumberCudaBlocks()

    + +
    +
    + + + + + +
    + + + + + + + + + + + + + + + + + + +
    unsigned int op::getNumberCudaBlocks (const unsigned int totalRequired,
    const unsigned int numberCudaThreads = CUDA_NUM_THREADS 
    )
    +
    +inline
    +
    + +

    Definition at line 15 of file cuda.hpp.

    + +
    +
    + +

    ◆ getNumberCudaThreadsAndBlocks()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::getNumberCudaThreadsAndBlocks (dim3 & numberCudaThreads,
    dim3 & numberCudaBlocks,
    const Point< unsigned int > & frameSize 
    )
    +
    + +
    +
    + +

    ◆ getNumberElementsToRender()

    + +
    +
    + + + + + + + + +
    OP_API unsigned int op::getNumberElementsToRender (const PoseModel poseModel)
    +
    + +
    +
    + +

    ◆ getPoseBodyPartMapping()

    + +
    +
    + + + + + + + + +
    OP_API const std::map<unsigned int, std::string>& op::getPoseBodyPartMapping (const PoseModel poseModel)
    +
    + +
    +
    + +

    ◆ getPoseBodyPartPairsRender()

    + +
    +
    + + + + + + + + +
    OP_API const std::vector<unsigned int>& op::getPoseBodyPartPairsRender (const PoseModel poseModel)
    +
    + +
    +
    + +

    ◆ getPoseColors()

    + +
    +
    + + + + + + + + +
    OP_API const std::vector<float>& op::getPoseColors (const PoseModel poseModel)
    +
    + +
    +
    + +

    ◆ getPoseDefaultConnectInterMinAboveThreshold()

    + +
    +
    + + + + + + + + +
    OP_API float op::getPoseDefaultConnectInterMinAboveThreshold (const bool maximizePositives = false)
    +
    + +
    +
    + +

    ◆ getPoseDefaultConnectInterThreshold()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API float op::getPoseDefaultConnectInterThreshold (const PoseModel poseModel,
    const bool maximizePositives = false 
    )
    +
    + +
    +
    + +

    ◆ getPoseDefaultConnectMinSubsetScore()

    + +
    +
    + + + + + + + + +
    OP_API float op::getPoseDefaultConnectMinSubsetScore (const bool maximizePositives = false)
    +
    + +
    +
    + +

    ◆ getPoseDefaultMinSubsetCnt()

    + +
    +
    + + + + + + + + +
    OP_API unsigned int op::getPoseDefaultMinSubsetCnt (const bool maximizePositives = false)
    +
    + +
    +
    + +

    ◆ getPoseDefaultNmsThreshold()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API float op::getPoseDefaultNmsThreshold (const PoseModel poseModel,
    const bool maximizePositives = false 
    )
    +
    + +
    +
    + +

    ◆ getPoseMapIndex()

    + +
    +
    + + + + + + + + +
    OP_API const std::vector<unsigned int>& op::getPoseMapIndex (const PoseModel poseModel)
    +
    + +
    +
    + +

    ◆ getPoseMaxPeaks()

    + +
    +
    + + + + + + + +
    OP_API unsigned int op::getPoseMaxPeaks ()
    +
    + +
    +
    + +

    ◆ getPoseNetDecreaseFactor()

    + +
    +
    + + + + + + + + +
    OP_API float op::getPoseNetDecreaseFactor (const PoseModel poseModel)
    +
    + +
    +
    + +

    ◆ getPoseNumberBodyParts()

    + +
    +
    + + + + + + + + +
    OP_API unsigned int op::getPoseNumberBodyParts (const PoseModel poseModel)
    +
    + +
    +
    + +

    ◆ getPosePartPairs()

    + +
    +
    + + + + + + + + +
    OP_API const std::vector<unsigned int>& op::getPosePartPairs (const PoseModel poseModel)
    +
    + +
    +
    + +

    ◆ getPoseProtoTxt()

    + +
    +
    + + + + + + + + +
    OP_API const std::string& op::getPoseProtoTxt (const PoseModel poseModel)
    +
    + +
    +
    + +

    ◆ getPoseScales()

    + +
    +
    + + + + + + + + +
    OP_API const std::vector<float>& op::getPoseScales (const PoseModel poseModel)
    +
    + +
    +
    + +

    ◆ getPoseTrainedModel()

    + +
    +
    + + + + + + + + +
    OP_API const std::string& op::getPoseTrainedModel (const PoseModel poseModel)
    +
    + +
    +
    + +

    ◆ getThreadId()

    + +
    +
    + + + + + + + +
    OP_API std::string op::getThreadId ()
    +
    + +
    +
    + +

    ◆ getTimerInit()

    + +
    +
    + + + + + + + +
    OP_API std::chrono::time_point<std::chrono::high_resolution_clock> op::getTimerInit ()
    +
    + +
    +
    + +

    ◆ getTimeSeconds()

    + +
    +
    + + + + + + + + +
    OP_API double op::getTimeSeconds (const std::chrono::time_point< std::chrono::high_resolution_clock > & timerInit)
    +
    + +
    +
    + +

    ◆ keepRoiInside()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::keepRoiInside (Rectangle< int > & roi,
    const int imageWidth,
    const int imageHeight 
    )
    +
    + +
    +
    + +

    ◆ loadData() [1/2]

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API Matrix op::loadData (const std::string & cvMatName,
    const std::string & fileNameNoExtension,
    const DataFormat dataFormat 
    )
    +
    + +
    +
    + +

    ◆ loadData() [2/2]

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API std::vector<Matrix> op::loadData (const std::vector< std::string > & cvMatNames,
    const std::string & fileNameNoExtension,
    const DataFormat dataFormat 
    )
    +
    + +
    +
    + +

    ◆ loadHandDetectorTxt()

    + +
    +
    + + + + + + + + +
    OP_API std::vector<std::array<Rectangle<float>, 2> > op::loadHandDetectorTxt (const std::string & txtFilePath)
    +
    + +
    +
    + +

    ◆ loadImage()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API Matrix op::loadImage (const std::string & fullFilePath,
    const int openCvFlags = getCvLoadImageAnydepth() 
    )
    +
    + +
    +
    + +

    ◆ makeDirectory()

    + +
    +
    + + + + + + + + +
    OP_API void op::makeDirectory (const std::string & directoryPath)
    +
    + +
    +
    + +

    ◆ maximumCpu()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::maximumCpu (T * targetPtr,
    const T *const sourcePtr,
    const std::array< int, 4 > & targetSize,
    const std::array< int, 4 > & sourceSize 
    )
    +
    + +
    +
    + +

    ◆ maximumGpu()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::maximumGpu (T * targetPtr,
    const T *const sourcePtr,
    const std::array< int, 4 > & targetSize,
    const std::array< int, 4 > & sourceSize 
    )
    +
    + +
    +
    + +

    ◆ mergeVectors()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + +
    std::vector<T> op::mergeVectors (const std::vector< T > & vectorA,
    const std::vector< T > & vectorB 
    )
    +
    +

    std::vector<T> concatenator. Auxiliary function that concatenate std::vectors of any class type T. It assumes basic copy (ideal for smart pointers, pointers, etc.), so note that the copy still shares the same internal data. It will not work for element that cannot be copied.

    Parameters
    + + + +
    vectorAFirst std::shared_ptr<T> element to be concatenated.
    vectorBSecond std::shared_ptr<T> element to be concatenated.
    +
    +
    +
    Returns
    Concatenated std::vector<T> of both vectorA and vectorB.
    + +

    Definition at line 40 of file standard.hpp.

    + +
    +
    + +

    ◆ nmsCpu()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::nmsCpu (T * targetPtr,
    int * kernelPtr,
    const T *const sourcePtr,
    const T threshold,
    const std::array< int, 4 > & targetSize,
    const std::array< int, 4 > & sourceSize,
    const Point< T > & offset 
    )
    +
    + +
    +
    + +

    ◆ nmsGpu()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::nmsGpu (T * targetPtr,
    int * kernelPtr,
    const T *const sourcePtr,
    const T threshold,
    const std::array< int, 4 > & targetSize,
    const std::array< int, 4 > & sourceSize,
    const Point< T > & offset 
    )
    +
    + +
    +
    + +

    ◆ nmsOcl()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::nmsOcl (T * targetPtr,
    uint8_t * kernelGpuPtr,
    uint8_t * kernelCpuPtr,
    const T *const sourcePtr,
    const T threshold,
    const std::array< int, 4 > & targetSize,
    const std::array< int, 4 > & sourceSize,
    const Point< T > & offset,
    const int gpuID = 0 
    )
    +
    + +
    +
    + +

    ◆ opLog() [1/2]

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::opLog (const std::string & message,
    const Priority priority = Priority::Max,
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    + +
    +
    + +

    ◆ opLog() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::opLog (const T & message,
    const Priority priority = Priority::Max,
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    +inline
    +
    + +

    Definition at line 86 of file errorAndLog.hpp.

    + +
    +
    + +

    ◆ opLogIfDebug()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::opLogIfDebug (const T & message,
    const Priority priority = Priority::Max,
    const int line = -1,
    const std::string & function = "",
    const std::string & file = "" 
    )
    +
    +inline
    +
    + +

    Definition at line 97 of file errorAndLog.hpp.

    + +
    +
    + +

    ◆ pafPtrIntoVector()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    std::vector<std::tuple<T, T, int, int, int> > op::pafPtrIntoVector (const Array< T > & pairScores,
    const T *const peaksPtr,
    const int maxPeaks,
    const std::vector< unsigned int > & bodyPartPairs,
    const unsigned int numberBodyPartPairs 
    )
    +
    + +
    +
    + +

    ◆ pafVectorIntoPeopleVector()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    std::vector<std::pair<std::vector<int>, T> > op::pafVectorIntoPeopleVector (const std::vector< std::tuple< T, T, int, int, int >> & pairScores,
    const T *const peaksPtr,
    const int maxPeaks,
    const std::vector< unsigned int > & bodyPartPairs,
    const unsigned int numberBodyParts 
    )
    +
    + +
    +
    + +

    ◆ peopleVectorToPeopleArray()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::peopleVectorToPeopleArray (Array< T > & poseKeypoints,
    Array< T > & poseScores,
    const T scaleFactor,
    const std::vector< std::pair< std::vector< int >, T >> & subsets,
    const std::vector< int > & validSubsetIndexes,
    const T *const peaksPtr,
    const int numberPeople,
    const unsigned int numberBodyParts,
    const unsigned int numberBodyPartPairs 
    )
    +
    + +
    +
    + +

    ◆ poseBodyPartMapStringToKey() [1/2]

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API unsigned int op::poseBodyPartMapStringToKey (const PoseModel poseModel,
    const std::string & string 
    )
    +
    + +
    +
    + +

    ◆ poseBodyPartMapStringToKey() [2/2]

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API unsigned int op::poseBodyPartMapStringToKey (const PoseModel poseModel,
    const std::vector< std::string > & strings 
    )
    +
    + +
    +
    + +

    ◆ positiveCharRound()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    char op::positiveCharRound (const T a)
    +
    +inline
    +
    + +

    Definition at line 17 of file fastMath.hpp.

    + +
    +
    + +

    ◆ positiveIntRound()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    int op::positiveIntRound (const T a)
    +
    +inline
    +
    + +

    Definition at line 29 of file fastMath.hpp.

    + +
    +
    + +

    ◆ positiveLongLongRound()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    long long op::positiveLongLongRound (const T a)
    +
    +inline
    +
    + +

    Definition at line 41 of file fastMath.hpp.

    + +
    +
    + +

    ◆ positiveLongRound()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    long op::positiveLongRound (const T a)
    +
    +inline
    +
    + +

    Definition at line 35 of file fastMath.hpp.

    + +
    +
    + +

    ◆ positiveSCharRound()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    signed char op::positiveSCharRound (const T a)
    +
    +inline
    +
    + +

    Definition at line 23 of file fastMath.hpp.

    + +
    +
    + +

    ◆ printTime()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::printTime (const std::chrono::time_point< std::chrono::high_resolution_clock > & timerInit,
    const std::string & firstMessage,
    const std::string & secondMessage,
    const Priority priority 
    )
    +
    + +
    +
    + +

    ◆ recenter()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + +
    Rectangle<T> op::recenter (const Rectangle< T > & rectangle,
    const T newWidth,
    const T newHeight 
    )
    +
    + +
    +
    + +

    ◆ refineAndSaveExtrinsics()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::refineAndSaveExtrinsics (const std::string & parameterFolder,
    const std::string & imageFolder,
    const Point< int > & gridInnerCorners,
    const float gridSquareSizeMm,
    const int numberCameras,
    const bool imagesAreUndistorted,
    const bool saveImagesWithCorners = false 
    )
    +
    + +
    +
    + +

    ◆ remove0sFromString()

    + +
    +
    + + + + + + + + +
    OP_API std::string op::remove0sFromString (const std::string & string)
    +
    + +
    +
    + +

    ◆ removeAllOcurrencesOfSubString()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API void op::removeAllOcurrencesOfSubString (std::string & stringToModify,
    const std::string & substring 
    )
    +
    + +
    +
    + +

    ◆ removePeopleBelowThresholdsAndFillFaces()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::removePeopleBelowThresholdsAndFillFaces (std::vector< int > & validSubsetIndexes,
    int & numberPeople,
    std::vector< std::pair< std::vector< int >, T >> & subsets,
    const unsigned int numberBodyParts,
    const int minSubsetCnt,
    const T minSubsetScore,
    const bool maximizePositives,
    const T *const peaksPtr 
    )
    +
    + +
    +
    + +

    ◆ removeSpecialsCharacters()

    + +
    +
    + + + + + + + + +
    OP_API std::string op::removeSpecialsCharacters (const std::string & stringToVariate)
    +
    + +
    +
    + +

    ◆ renderFaceKeypointsCpu()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::renderFaceKeypointsCpu (Array< float > & frameArray,
    const Array< float > & faceKeypoints,
    const float renderThreshold 
    )
    +
    + +
    +
    + +

    ◆ renderFaceKeypointsGpu()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::renderFaceKeypointsGpu (float * framePtr,
    float * maxPtr,
    float * minPtr,
    float * scalePtr,
    const Point< unsigned int > & frameSize,
    const float *const facePtr,
    const int numberPeople,
    const float renderThreshold,
    const float alphaColorToAdd = FACE_DEFAULT_ALPHA_KEYPOINT 
    )
    +
    + +
    +
    + +

    ◆ renderHandKeypointsCpu()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::renderHandKeypointsCpu (Array< float > & frameArray,
    const std::array< Array< float >, 2 > & handKeypoints,
    const float renderThreshold 
    )
    +
    + +
    +
    + +

    ◆ renderHandKeypointsGpu()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::renderHandKeypointsGpu (float * framePtr,
    float * maxPtr,
    float * minPtr,
    float * scalePtr,
    const Point< unsigned int > & frameSize,
    const float *const handsPtr,
    const int numberHands,
    const float renderThreshold,
    const float alphaColorToAdd = HAND_DEFAULT_ALPHA_KEYPOINT 
    )
    +
    + +
    +
    + +

    ◆ renderKeypointsCpu()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::renderKeypointsCpu (Array< T > & frameArray,
    const Array< T > & keypoints,
    const std::vector< unsigned int > & pairs,
    const std::vector< T > colors,
    const T thicknessCircleRatio,
    const T thicknessLineRatioWRTCircle,
    const std::vector< T > & poseScales,
    const T threshold 
    )
    +
    + +
    +
    + +

    ◆ renderPoseDistanceGpu()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::renderPoseDistanceGpu (float * framePtr,
    const Point< unsigned int > & frameSize,
    const float *const heatMapPtr,
    const Point< int > & heatMapSize,
    const float scaleToKeepRatio,
    const unsigned int part,
    const float alphaBlending = POSE_DEFAULT_ALPHA_HEAT_MAP 
    )
    +
    + +
    +
    + +

    ◆ renderPoseHeatMapGpu()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::renderPoseHeatMapGpu (float * frame,
    const Point< unsigned int > & frameSize,
    const float *const heatMapPtr,
    const Point< int > & heatMapSize,
    const float scaleToKeepRatio,
    const unsigned int part,
    const float alphaBlending = POSE_DEFAULT_ALPHA_HEAT_MAP 
    )
    +
    + +
    +
    + +

    ◆ renderPoseHeatMapsGpu()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::renderPoseHeatMapsGpu (float * frame,
    const PoseModel poseModel,
    const Point< unsigned int > & frameSize,
    const float *const heatMapPtr,
    const Point< int > & heatMapSize,
    const float scaleToKeepRatio,
    const float alphaBlending = POSE_DEFAULT_ALPHA_HEAT_MAP 
    )
    +
    + +
    +
    + +

    ◆ renderPoseKeypointsCpu()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::renderPoseKeypointsCpu (Array< float > & frameArray,
    const Array< float > & poseKeypoints,
    const PoseModel poseModel,
    const float renderThreshold,
    const bool blendOriginalFrame = true 
    )
    +
    + +
    +
    + +

    ◆ renderPoseKeypointsGpu()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::renderPoseKeypointsGpu (float * framePtr,
    float * maxPtr,
    float * minPtr,
    float * scalePtr,
    const PoseModel poseModel,
    const int numberPeople,
    const Point< unsigned int > & frameSize,
    const float *const posePtr,
    const float renderThreshold,
    const bool googlyEyes = false,
    const bool blendOriginalFrame = true,
    const float alphaBlending = POSE_DEFAULT_ALPHA_KEYPOINT 
    )
    +
    + +
    +
    + +

    ◆ renderPosePAFGpu()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::renderPosePAFGpu (float * framePtr,
    const PoseModel poseModel,
    const Point< unsigned int > & frameSize,
    const float *const heatMapPtr,
    const Point< int > & heatMapSize,
    const float scaleToKeepRatio,
    const int part,
    const float alphaBlending = POSE_DEFAULT_ALPHA_HEAT_MAP 
    )
    +
    + +
    +
    + +

    ◆ renderPosePAFsGpu()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::renderPosePAFsGpu (float * framePtr,
    const PoseModel poseModel,
    const Point< unsigned int > & frameSize,
    const float *const heatMapPtr,
    const Point< int > & heatMapSize,
    const float scaleToKeepRatio,
    const float alphaBlending = POSE_DEFAULT_ALPHA_HEAT_MAP 
    )
    +
    + +
    +
    + +

    ◆ reorderAndNormalize()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::reorderAndNormalize (T * targetPtr,
    const unsigned char *const srcPtr,
    const int width,
    const int height,
    const int channels 
    )
    +
    + +
    +
    + +

    ◆ replaceAll()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::replaceAll (std::string & stringText,
    const char charToChange,
    const char charToAdd 
    )
    +
    + +
    +
    + +

    ◆ resizeAndMergeCpu()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::resizeAndMergeCpu (T * targetPtr,
    const std::vector< const T * > & sourcePtrs,
    const std::array< int, 4 > & targetSize,
    const std::vector< std::array< int, 4 >> & sourceSizes,
    const std::vector< T > & scaleInputToNetInputs = {1.f} 
    )
    +
    + +
    +
    + +

    ◆ resizeAndMergeGpu()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::resizeAndMergeGpu (T * targetPtr,
    const std::vector< const T * > & sourcePtrs,
    const std::array< int, 4 > & targetSize,
    const std::vector< std::array< int, 4 >> & sourceSizes,
    const std::vector< T > & scaleInputToNetInputs = {1.f} 
    )
    +
    + +
    +
    + +

    ◆ resizeAndMergeOcl()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::resizeAndMergeOcl (T * targetPtr,
    const std::vector< const T * > & sourcePtrs,
    std::vector< T * > & sourceTempPtrs,
    const std::array< int, 4 > & targetSize,
    const std::vector< std::array< int, 4 >> & sourceSizes,
    const std::vector< T > & scaleInputToNetInputs = {1.f},
    const int gpuID = 0 
    )
    +
    + +
    +
    + +

    ◆ resizeAndPadRbgGpu() [1/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::resizeAndPadRbgGpu (T * targetPtr,
    const T *const srcPtr,
    const int sourceWidth,
    const int sourceHeight,
    const int targetWidth,
    const int targetHeight,
    const T scaleFactor 
    )
    +
    + +
    +
    + +

    ◆ resizeAndPadRbgGpu() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::resizeAndPadRbgGpu (T * targetPtr,
    const unsigned char *const srcPtr,
    const int sourceWidth,
    const int sourceHeight,
    const int targetWidth,
    const int targetHeight,
    const T scaleFactor 
    )
    +
    + +
    +
    + +

    ◆ resizeGetScaleFactor()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API double op::resizeGetScaleFactor (const Point< int > & initialSize,
    const Point< int > & targetSize 
    )
    +
    + +
    +
    + +

    ◆ rotateAndFlipFrame()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::rotateAndFlipFrame (Matrixframe,
    const double rotationAngle,
    const bool flipFrame = false 
    )
    +
    +

    It performs rotation and flipping over the desired Mat.

    Parameters
    + + + + +
    cvMatMat with the frame matrix to be rotated and/or flipped.
    rotationAngleHow much the cvMat element should be rotated. 0 would mean no rotation.
    flipFrameWhether to flip the cvMat element. Set to false to disable it.
    +
    +
    + +
    +
    + +

    ◆ saveData() [1/2]

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::saveData (const MatrixopMat,
    const std::string cvMatName,
    const std::string & fileNameNoExtension,
    const DataFormat dataFormat 
    )
    +
    + +
    +
    + +

    ◆ saveData() [2/2]

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::saveData (const std::vector< Matrix > & opMats,
    const std::vector< std::string > & cvMatNames,
    const std::string & fileNameNoExtension,
    const DataFormat dataFormat 
    )
    +
    + +
    +
    + +

    ◆ saveFloatArray()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API void op::saveFloatArray (const Array< float > & array,
    const std::string & fullFilePath 
    )
    +
    + +
    +
    + +

    ◆ saveImage()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::saveImage (const Matrixmatrix,
    const std::string & fullFilePath,
    const std::vector< int > & openCvCompressionParams = {getCvImwriteJpegQuality(), 100, getCvImwritePngCompression(), 9} 
    )
    +
    + +
    +
    + +

    ◆ savePeopleJson() [1/2]

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::savePeopleJson (const Array< float > & keypoints,
    const std::vector< std::vector< std::array< float, 3 >>> & candidates,
    const std::string & keypointName,
    const std::string & fileName,
    const bool humanReadable 
    )
    +
    + +
    +
    + +

    ◆ savePeopleJson() [2/2]

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::savePeopleJson (const std::vector< std::pair< Array< float >, std::string >> & keypointVector,
    const std::vector< std::vector< std::array< float, 3 >>> & candidates,
    const std::string & fileName,
    const bool humanReadable 
    )
    +
    + +
    +
    + +

    ◆ scaleKeypoints()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + +
    void op::scaleKeypoints (Array< T > & keypoints,
    const T scale 
    )
    +
    + +
    +
    + +

    ◆ scaleKeypoints2d() [1/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::scaleKeypoints2d (Array< T > & keypoints,
    const T scaleX,
    const T scaleY 
    )
    +
    + +
    +
    + +

    ◆ scaleKeypoints2d() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::scaleKeypoints2d (Array< T > & keypoints,
    const T scaleX,
    const T scaleY,
    const T offsetX,
    const T offsetY 
    )
    +
    + +
    +
    + +

    ◆ setMainThread()

    + +
    +
    + + + + + + + +
    OP_API void op::setMainThread ()
    +
    + +
    +
    + +

    ◆ splitString()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API std::vector<std::string> op::splitString (const std::string & stringToSplit,
    const std::string & delimiter 
    )
    +
    + +
    +
    + +

    ◆ stringToDataFormat()

    + +
    +
    + + + + + + + + +
    OP_API DataFormat op::stringToDataFormat (const std::string & dataFormat)
    +
    + +
    +
    + +

    ◆ threadIdPP()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API void op::threadIdPP (unsigned long long & threadId,
    const bool multiThreadEnabled 
    )
    +
    +

    Thread ID increase (private internal function). If multi-threading mode, it increases the thread ID. If single-threading mode (for debugging), it does not modify it. Note that mThreadId must be re-initialized to 0 before starting a new Wrapper(T) configuration.

    Parameters
    + + +
    threadIdunsigned long long element with the current thread id value. I will be edited to the next `desired thread id number.
    +
    +
    + +
    +
    + +

    ◆ toFixedLengthString()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + +
    std::string op::toFixedLengthString (const T number,
    const unsigned long long stringLength = 0 
    )
    +
    +

    This template function turns an integer number into a fixed-length std::string.

    Parameters
    + + + +
    numberT integer corresponding to the integer to be formatted.
    stringLengthunsigned long long indicating the final length. If 0, the final length is the original number length.
    +
    +
    +
    Returns
    std::string with the formatted value.
    + +
    +
    + +

    ◆ toLower()

    + +
    +
    + + + + + + + + +
    OP_API std::string op::toLower (const std::string & string)
    +
    + +
    +
    + +

    ◆ toUpper()

    + +
    +
    + + + + + + + + +
    OP_API std::string op::toUpper (const std::string & string)
    +
    + +
    +
    + +

    ◆ transpose()

    + +
    +
    + + + + + + + + +
    OP_API void op::transpose (Matrixmatrix)
    +
    + +
    +
    + +

    ◆ tToString()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    std::string op::tToString (const T & message)
    +
    + +

    Definition at line 21 of file errorAndLog.hpp.

    + +
    +
    + +

    ◆ uCharCvMatToFloatPtr()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::uCharCvMatToFloatPtr (float * floatPtrImage,
    const MatrixmatImage,
    const int normalize 
    )
    +
    + +
    +
    + +

    ◆ uCharImageCast()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + +
    void op::uCharImageCast (unsigned char * targetPtr,
    const T *const srcPtr,
    const int volume 
    )
    +
    + +
    +
    + +

    ◆ uCharRound()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    unsigned char op::uCharRound (const T a)
    +
    +inline
    +
    + +

    Definition at line 48 of file fastMath.hpp.

    + +
    +
    + +

    ◆ uIntRound()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    unsigned int op::uIntRound (const T a)
    +
    +inline
    +
    + +

    Definition at line 54 of file fastMath.hpp.

    + +
    +
    + +

    ◆ uLongLongRound()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    unsigned long long op::uLongLongRound (const T a)
    +
    +inline
    +
    + +

    Definition at line 66 of file fastMath.hpp.

    + +
    +
    + +

    ◆ ulongRound()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    unsigned long op::ulongRound (const T a)
    +
    +inline
    +
    + +

    Definition at line 60 of file fastMath.hpp.

    + +
    +
    + +

    ◆ unrollArrayToUCharCvMat()

    + +
    +
    + + + + + + + + + + + + + + + + + + +
    OP_API void op::unrollArrayToUCharCvMat (MatrixmatResult,
    const Array< float > & array 
    )
    +
    + +
    +
    + +

    ◆ vectorsAreEqual()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + +
    bool op::vectorsAreEqual (const std::vector< T > & vectorA,
    const std::vector< T > & vectorB 
    )
    +
    + +

    Definition at line 9 of file standard.hpp.

    + +
    +
    + +

    ◆ wrapperConfigureSanityChecks()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    OP_API void op::wrapperConfigureSanityChecks (WrapperStructPosewrapperStructPose,
    const WrapperStructFacewrapperStructFace,
    const WrapperStructHandwrapperStructHand,
    const WrapperStructExtrawrapperStructExtra,
    const WrapperStructInputwrapperStructInput,
    const WrapperStructOutputwrapperStructOutput,
    const WrapperStructGuiwrapperStructGui,
    const bool renderOutput,
    const bool userInputAndPreprocessingWsEmpty,
    const bool userOutputWsEmpty,
    const std::shared_ptr< Producer > & producerSharedPtr,
    const ThreadManagerMode threadManagerMode 
    )
    +
    +

    It checks that no wrong/contradictory flags are enabled for Wrapper(T)

    Parameters
    + + + + + + + + + + + +
    wrapperStructPose
    wrapperStructFace
    wrapperStructHand
    wrapperStructExtra
    wrapperStructInput
    wrapperStructOutput
    renderOutput
    userOutputWsEmpty
    producerSharedPtr
    threadManagerMode
    +
    +
    + +
    +
    +

    Variable Documentation

    + +

    ◆ CUDA_NUM_THREADS

    + +
    +
    + + + + +
    const auto op::CUDA_NUM_THREADS = 512u
    +
    + +

    Definition at line 9 of file cuda.hpp.

    + +
    +
    + +

    ◆ F135

    + +
    +
    + + + + +
    const auto op::F135 = H135 + 40
    +
    + +

    Definition at line 187 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ FACE_CCN_DECREASE_FACTOR

    + +
    +
    + + + + +
    const auto op::FACE_CCN_DECREASE_FACTOR = 8.f
    +
    + +

    Definition at line 24 of file faceParameters.hpp.

    + +
    +
    + +

    ◆ FACE_COLORS_RENDER

    + +
    +
    + + + + +
    const std::vector<float> op::FACE_COLORS_RENDER {FACE_COLORS_RENDER_GPU}
    +
    + +

    Definition at line 20 of file faceParameters.hpp.

    + +
    +
    + +

    ◆ FACE_DEFAULT_ALPHA_HEAT_MAP

    + +
    +
    + + + + +
    const auto op::FACE_DEFAULT_ALPHA_HEAT_MAP = POSE_DEFAULT_ALPHA_HEAT_MAP
    +
    + +

    Definition at line 30 of file faceParameters.hpp.

    + +
    +
    + +

    ◆ FACE_DEFAULT_ALPHA_KEYPOINT

    + +
    +
    + + + + +
    const auto op::FACE_DEFAULT_ALPHA_KEYPOINT = POSE_DEFAULT_ALPHA_KEYPOINT
    +
    + +

    Definition at line 29 of file faceParameters.hpp.

    + +
    +
    + +

    ◆ FACE_MAX_FACES

    + +
    +
    + + + + +
    const auto op::FACE_MAX_FACES = POSE_MAX_PEOPLE
    +
    + +

    Definition at line 9 of file faceParameters.hpp.

    + +
    +
    + +

    ◆ FACE_NUMBER_PARTS

    + +
    +
    + + + + +
    const auto op::FACE_NUMBER_PARTS = 70u
    +
    + +

    Definition at line 11 of file faceParameters.hpp.

    + +
    +
    + +

    ◆ FACE_PAIRS_RENDER

    + +
    +
    + + + + +
    const std::vector<unsigned int> op::FACE_PAIRS_RENDER {FACE_PAIRS_RENDER_GPU}
    +
    + +

    Definition at line 18 of file faceParameters.hpp.

    + +
    +
    + +

    ◆ FACE_PROTOTXT

    + +
    +
    + + + + +
    const std::string op::FACE_PROTOTXT {"face/pose_deploy.prototxt"}
    +
    + +

    Definition at line 25 of file faceParameters.hpp.

    + +
    +
    + +

    ◆ FACE_SCALES_RENDER

    + +
    +
    + + + + +
    const std::vector<float> op::FACE_SCALES_RENDER {FACE_SCALES_RENDER_GPU}
    +
    + +

    Definition at line 21 of file faceParameters.hpp.

    + +
    +
    + +

    ◆ FACE_TRAINED_MODEL

    + +
    +
    + + + + +
    const std::string op::FACE_TRAINED_MODEL {"face/pose_iter_116000.caffemodel"}
    +
    + +

    Definition at line 26 of file faceParameters.hpp.

    + +
    +
    + +

    ◆ H135

    + +
    +
    + + + + +
    const auto op::H135 = 25
    +
    + +

    Definition at line 186 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ HAND_CCN_DECREASE_FACTOR

    + +
    +
    + + + + +
    const auto op::HAND_CCN_DECREASE_FACTOR = 8.f
    +
    + +

    Definition at line 43 of file handParameters.hpp.

    + +
    +
    + +

    ◆ HAND_COLORS_RENDER

    + +
    +
    + + + + +
    const std::vector<float> op::HAND_COLORS_RENDER {HAND_COLORS_RENDER_GPU}
    +
    + +

    Definition at line 38 of file handParameters.hpp.

    + +
    +
    + +

    ◆ HAND_DEFAULT_ALPHA_HEAT_MAP

    + +
    +
    + + + + +
    const auto op::HAND_DEFAULT_ALPHA_HEAT_MAP = POSE_DEFAULT_ALPHA_HEAT_MAP
    +
    + +

    Definition at line 49 of file handParameters.hpp.

    + +
    +
    + +

    ◆ HAND_DEFAULT_ALPHA_KEYPOINT

    + +
    +
    + + + + +
    const auto op::HAND_DEFAULT_ALPHA_KEYPOINT = POSE_DEFAULT_ALPHA_KEYPOINT
    +
    + +

    Definition at line 48 of file handParameters.hpp.

    + +
    +
    + +

    ◆ HAND_MAX_HANDS

    + +
    +
    + + + + +
    const auto op::HAND_MAX_HANDS = 2*POSE_MAX_PEOPLE
    +
    + +

    Definition at line 9 of file handParameters.hpp.

    + +
    +
    + +

    ◆ HAND_NUMBER_PARTS

    + +
    +
    + + + + +
    const auto op::HAND_NUMBER_PARTS = 21u
    +
    + +

    Definition at line 11 of file handParameters.hpp.

    + +
    +
    + +

    ◆ HAND_PAIRS_RENDER

    + +
    +
    + + + + +
    const std::vector<unsigned int> op::HAND_PAIRS_RENDER {HAND_PAIRS_RENDER_GPU}
    +
    + +

    Definition at line 15 of file handParameters.hpp.

    + +
    +
    + +

    ◆ HAND_PROTOTXT

    + +
    +
    + + + + +
    const std::string op::HAND_PROTOTXT {"hand/pose_deploy.prototxt"}
    +
    + +

    Definition at line 44 of file handParameters.hpp.

    + +
    +
    + +

    ◆ HAND_SCALES_RENDER

    + +
    +
    + + + + +
    const std::vector<float> op::HAND_SCALES_RENDER {HAND_SCALES_RENDER_GPU}
    +
    + +

    Definition at line 39 of file handParameters.hpp.

    + +
    +
    + +

    ◆ HAND_TRAINED_MODEL

    + +
    +
    + + + + +
    const std::string op::HAND_TRAINED_MODEL {"hand/pose_iter_102000.caffemodel"}
    +
    + +

    Definition at line 45 of file handParameters.hpp.

    + +
    +
    + +

    ◆ POSE_DEFAULT_ALPHA_HEAT_MAP

    + +
    +
    + + + + +
    const auto op::POSE_DEFAULT_ALPHA_HEAT_MAP = 0.7f
    +
    + +

    Definition at line 11 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_DEFAULT_ALPHA_KEYPOINT

    + +
    +
    + + + + +
    const auto op::POSE_DEFAULT_ALPHA_KEYPOINT = 0.6f
    +
    + +

    Definition at line 10 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_MAX_PEOPLE

    + +
    +
    + + + + +
    const auto op::POSE_MAX_PEOPLE = 127u
    +
    + +

    Definition at line 14 of file poseParameters.hpp.

    + +
    +
    +
    +
    + + + + diff --git a/web/html/doc/namespaceop.js b/web/html/doc/namespaceop.js new file mode 100644 index 000000000..9c9ea0cd9 --- /dev/null +++ b/web/html/doc/namespaceop.js @@ -0,0 +1,568 @@ +var namespaceop = +[ + [ "ConfigureError", "namespaceop_1_1_configure_error.html", [ + [ "getErrorModes", "namespaceop_1_1_configure_error.html#ae8dbbccc9a2ca8a4670716ac5fdd8d53", null ], + [ "setErrorModes", "namespaceop_1_1_configure_error.html#a96e56b0ddbe2cb17443b93aaba05d672", null ] + ] ], + [ "ConfigureLog", "namespaceop_1_1_configure_log.html", [ + [ "getLogModes", "namespaceop_1_1_configure_log.html#a5ab07ae8c026e4f7782a113778d9082d", null ], + [ "getPriorityThreshold", "namespaceop_1_1_configure_log.html#a0e5c3fad2ace3eb129dd1d97afd59558", null ], + [ "setLogModes", "namespaceop_1_1_configure_log.html#a2f41e9a74bbda434ef16189c32a13aba", null ], + [ "setPriorityThreshold", "namespaceop_1_1_configure_log.html#a149393c3c87c82a5cf14417c6b430d30", null ] + ] ], + [ "CameraParameterReader", "classop_1_1_camera_parameter_reader.html", "classop_1_1_camera_parameter_reader" ], + [ "PoseTriangulation", "classop_1_1_pose_triangulation.html", "classop_1_1_pose_triangulation" ], + [ "WPoseTriangulation", "classop_1_1_w_pose_triangulation.html", "classop_1_1_w_pose_triangulation" ], + [ "Array", "classop_1_1_array.html", "classop_1_1_array" ], + [ "ArrayCpuGpu", "classop_1_1_array_cpu_gpu.html", "classop_1_1_array_cpu_gpu" ], + [ "CvMatToOpInput", "classop_1_1_cv_mat_to_op_input.html", "classop_1_1_cv_mat_to_op_input" ], + [ "CvMatToOpOutput", "classop_1_1_cv_mat_to_op_output.html", "classop_1_1_cv_mat_to_op_output" ], + [ "Datum", "structop_1_1_datum.html", "structop_1_1_datum" ], + [ "GpuRenderer", "classop_1_1_gpu_renderer.html", "classop_1_1_gpu_renderer" ], + [ "KeepTopNPeople", "classop_1_1_keep_top_n_people.html", "classop_1_1_keep_top_n_people" ], + [ "KeypointScaler", "classop_1_1_keypoint_scaler.html", "classop_1_1_keypoint_scaler" ], + [ "Matrix", "classop_1_1_matrix.html", "classop_1_1_matrix" ], + [ "OpOutputToCvMat", "classop_1_1_op_output_to_cv_mat.html", "classop_1_1_op_output_to_cv_mat" ], + [ "Point", "structop_1_1_point.html", "structop_1_1_point" ], + [ "Rectangle", "structop_1_1_rectangle.html", "structop_1_1_rectangle" ], + [ "Renderer", "classop_1_1_renderer.html", "classop_1_1_renderer" ], + [ "ScaleAndSizeExtractor", "classop_1_1_scale_and_size_extractor.html", "classop_1_1_scale_and_size_extractor" ], + [ "String", "classop_1_1_string.html", "classop_1_1_string" ], + [ "VerbosePrinter", "classop_1_1_verbose_printer.html", "classop_1_1_verbose_printer" ], + [ "WCvMatToOpInput", "classop_1_1_w_cv_mat_to_op_input.html", "classop_1_1_w_cv_mat_to_op_input" ], + [ "WCvMatToOpOutput", "classop_1_1_w_cv_mat_to_op_output.html", "classop_1_1_w_cv_mat_to_op_output" ], + [ "WKeepTopNPeople", "classop_1_1_w_keep_top_n_people.html", "classop_1_1_w_keep_top_n_people" ], + [ "WKeypointScaler", "classop_1_1_w_keypoint_scaler.html", "classop_1_1_w_keypoint_scaler" ], + [ "WOpOutputToCvMat", "classop_1_1_w_op_output_to_cv_mat.html", "classop_1_1_w_op_output_to_cv_mat" ], + [ "WScaleAndSizeExtractor", "classop_1_1_w_scale_and_size_extractor.html", "classop_1_1_w_scale_and_size_extractor" ], + [ "WVerbosePrinter", "classop_1_1_w_verbose_printer.html", "classop_1_1_w_verbose_printer" ], + [ "FaceCpuRenderer", "classop_1_1_face_cpu_renderer.html", "classop_1_1_face_cpu_renderer" ], + [ "FaceDetector", "classop_1_1_face_detector.html", "classop_1_1_face_detector" ], + [ "FaceDetectorOpenCV", "classop_1_1_face_detector_open_c_v.html", "classop_1_1_face_detector_open_c_v" ], + [ "FaceExtractorCaffe", "classop_1_1_face_extractor_caffe.html", "classop_1_1_face_extractor_caffe" ], + [ "FaceExtractorNet", "classop_1_1_face_extractor_net.html", "classop_1_1_face_extractor_net" ], + [ "FaceGpuRenderer", "classop_1_1_face_gpu_renderer.html", "classop_1_1_face_gpu_renderer" ], + [ "FaceRenderer", "classop_1_1_face_renderer.html", "classop_1_1_face_renderer" ], + [ "WFaceDetector", "classop_1_1_w_face_detector.html", "classop_1_1_w_face_detector" ], + [ "WFaceDetectorOpenCV", "classop_1_1_w_face_detector_open_c_v.html", "classop_1_1_w_face_detector_open_c_v" ], + [ "WFaceExtractorNet", "classop_1_1_w_face_extractor_net.html", "classop_1_1_w_face_extractor_net" ], + [ "WFaceRenderer", "classop_1_1_w_face_renderer.html", "classop_1_1_w_face_renderer" ], + [ "CocoJsonSaver", "classop_1_1_coco_json_saver.html", "classop_1_1_coco_json_saver" ], + [ "FileSaver", "classop_1_1_file_saver.html", "classop_1_1_file_saver" ], + [ "HeatMapSaver", "classop_1_1_heat_map_saver.html", "classop_1_1_heat_map_saver" ], + [ "ImageSaver", "classop_1_1_image_saver.html", "classop_1_1_image_saver" ], + [ "JsonOfstream", "classop_1_1_json_ofstream.html", "classop_1_1_json_ofstream" ], + [ "KeypointSaver", "classop_1_1_keypoint_saver.html", "classop_1_1_keypoint_saver" ], + [ "PeopleJsonSaver", "classop_1_1_people_json_saver.html", "classop_1_1_people_json_saver" ], + [ "UdpSender", "classop_1_1_udp_sender.html", "classop_1_1_udp_sender" ], + [ "VideoSaver", "classop_1_1_video_saver.html", "classop_1_1_video_saver" ], + [ "WCocoJsonSaver", "classop_1_1_w_coco_json_saver.html", "classop_1_1_w_coco_json_saver" ], + [ "WFaceSaver", "classop_1_1_w_face_saver.html", "classop_1_1_w_face_saver" ], + [ "WHandSaver", "classop_1_1_w_hand_saver.html", "classop_1_1_w_hand_saver" ], + [ "WHeatMapSaver", "classop_1_1_w_heat_map_saver.html", "classop_1_1_w_heat_map_saver" ], + [ "WImageSaver", "classop_1_1_w_image_saver.html", "classop_1_1_w_image_saver" ], + [ "WPeopleJsonSaver", "classop_1_1_w_people_json_saver.html", "classop_1_1_w_people_json_saver" ], + [ "WPoseSaver", "classop_1_1_w_pose_saver.html", "classop_1_1_w_pose_saver" ], + [ "WUdpSender", "classop_1_1_w_udp_sender.html", "classop_1_1_w_udp_sender" ], + [ "WVideoSaver", "classop_1_1_w_video_saver.html", "classop_1_1_w_video_saver" ], + [ "WVideoSaver3D", "classop_1_1_w_video_saver3_d.html", "classop_1_1_w_video_saver3_d" ], + [ "FrameDisplayer", "classop_1_1_frame_displayer.html", "classop_1_1_frame_displayer" ], + [ "Gui", "classop_1_1_gui.html", "classop_1_1_gui" ], + [ "Gui3D", "classop_1_1_gui3_d.html", "classop_1_1_gui3_d" ], + [ "GuiInfoAdder", "classop_1_1_gui_info_adder.html", "classop_1_1_gui_info_adder" ], + [ "WGui", "classop_1_1_w_gui.html", "classop_1_1_w_gui" ], + [ "WGui3D", "classop_1_1_w_gui3_d.html", "classop_1_1_w_gui3_d" ], + [ "WGuiInfoAdder", "classop_1_1_w_gui_info_adder.html", "classop_1_1_w_gui_info_adder" ], + [ "HandCpuRenderer", "classop_1_1_hand_cpu_renderer.html", "classop_1_1_hand_cpu_renderer" ], + [ "HandDetector", "classop_1_1_hand_detector.html", "classop_1_1_hand_detector" ], + [ "HandDetectorFromTxt", "classop_1_1_hand_detector_from_txt.html", "classop_1_1_hand_detector_from_txt" ], + [ "HandExtractorCaffe", "classop_1_1_hand_extractor_caffe.html", "classop_1_1_hand_extractor_caffe" ], + [ "HandExtractorNet", "classop_1_1_hand_extractor_net.html", "classop_1_1_hand_extractor_net" ], + [ "HandGpuRenderer", "classop_1_1_hand_gpu_renderer.html", "classop_1_1_hand_gpu_renderer" ], + [ "HandRenderer", "classop_1_1_hand_renderer.html", "classop_1_1_hand_renderer" ], + [ "WHandDetector", "classop_1_1_w_hand_detector.html", "classop_1_1_w_hand_detector" ], + [ "WHandDetectorFromTxt", "classop_1_1_w_hand_detector_from_txt.html", "classop_1_1_w_hand_detector_from_txt" ], + [ "WHandDetectorTracking", "classop_1_1_w_hand_detector_tracking.html", "classop_1_1_w_hand_detector_tracking" ], + [ "WHandDetectorUpdate", "classop_1_1_w_hand_detector_update.html", "classop_1_1_w_hand_detector_update" ], + [ "WHandExtractorNet", "classop_1_1_w_hand_extractor_net.html", "classop_1_1_w_hand_extractor_net" ], + [ "WHandRenderer", "classop_1_1_w_hand_renderer.html", "classop_1_1_w_hand_renderer" ], + [ "BodyPartConnectorCaffe", "classop_1_1_body_part_connector_caffe.html", "classop_1_1_body_part_connector_caffe" ], + [ "MaximumCaffe", "classop_1_1_maximum_caffe.html", "classop_1_1_maximum_caffe" ], + [ "Net", "classop_1_1_net.html", "classop_1_1_net" ], + [ "NetCaffe", "classop_1_1_net_caffe.html", "classop_1_1_net_caffe" ], + [ "NetOpenCv", "classop_1_1_net_open_cv.html", "classop_1_1_net_open_cv" ], + [ "NmsCaffe", "classop_1_1_nms_caffe.html", "classop_1_1_nms_caffe" ], + [ "ResizeAndMergeCaffe", "classop_1_1_resize_and_merge_caffe.html", "classop_1_1_resize_and_merge_caffe" ], + [ "PoseCpuRenderer", "classop_1_1_pose_cpu_renderer.html", "classop_1_1_pose_cpu_renderer" ], + [ "PoseExtractor", "classop_1_1_pose_extractor.html", "classop_1_1_pose_extractor" ], + [ "PoseExtractorCaffe", "classop_1_1_pose_extractor_caffe.html", "classop_1_1_pose_extractor_caffe" ], + [ "PoseExtractorNet", "classop_1_1_pose_extractor_net.html", "classop_1_1_pose_extractor_net" ], + [ "PoseGpuRenderer", "classop_1_1_pose_gpu_renderer.html", "classop_1_1_pose_gpu_renderer" ], + [ "PoseRenderer", "classop_1_1_pose_renderer.html", "classop_1_1_pose_renderer" ], + [ "WPoseExtractor", "classop_1_1_w_pose_extractor.html", "classop_1_1_w_pose_extractor" ], + [ "WPoseExtractorNet", "classop_1_1_w_pose_extractor_net.html", "classop_1_1_w_pose_extractor_net" ], + [ "WPoseRenderer", "classop_1_1_w_pose_renderer.html", "classop_1_1_w_pose_renderer" ], + [ "DatumProducer", "classop_1_1_datum_producer.html", "classop_1_1_datum_producer" ], + [ "FlirReader", "classop_1_1_flir_reader.html", "classop_1_1_flir_reader" ], + [ "ImageDirectoryReader", "classop_1_1_image_directory_reader.html", "classop_1_1_image_directory_reader" ], + [ "IpCameraReader", "classop_1_1_ip_camera_reader.html", "classop_1_1_ip_camera_reader" ], + [ "Producer", "classop_1_1_producer.html", "classop_1_1_producer" ], + [ "SpinnakerWrapper", "classop_1_1_spinnaker_wrapper.html", "classop_1_1_spinnaker_wrapper" ], + [ "VideoCaptureReader", "classop_1_1_video_capture_reader.html", "classop_1_1_video_capture_reader" ], + [ "VideoReader", "classop_1_1_video_reader.html", "classop_1_1_video_reader" ], + [ "WDatumProducer", "classop_1_1_w_datum_producer.html", "classop_1_1_w_datum_producer" ], + [ "WebcamReader", "classop_1_1_webcam_reader.html", "classop_1_1_webcam_reader" ], + [ "PriorityQueue", "classop_1_1_priority_queue.html", "classop_1_1_priority_queue" ], + [ "Queue", "classop_1_1_queue.html", "classop_1_1_queue" ], + [ "QueueBase", "classop_1_1_queue_base.html", "classop_1_1_queue_base" ], + [ "SubThread", "classop_1_1_sub_thread.html", "classop_1_1_sub_thread" ], + [ "SubThreadNoQueue", "classop_1_1_sub_thread_no_queue.html", "classop_1_1_sub_thread_no_queue" ], + [ "SubThreadQueueIn", "classop_1_1_sub_thread_queue_in.html", "classop_1_1_sub_thread_queue_in" ], + [ "SubThreadQueueInOut", "classop_1_1_sub_thread_queue_in_out.html", "classop_1_1_sub_thread_queue_in_out" ], + [ "SubThreadQueueOut", "classop_1_1_sub_thread_queue_out.html", "classop_1_1_sub_thread_queue_out" ], + [ "Thread", "classop_1_1_thread.html", "classop_1_1_thread" ], + [ "ThreadManager", "classop_1_1_thread_manager.html", "classop_1_1_thread_manager" ], + [ "WFpsMax", "classop_1_1_w_fps_max.html", "classop_1_1_w_fps_max" ], + [ "WIdGenerator", "classop_1_1_w_id_generator.html", "classop_1_1_w_id_generator" ], + [ "Worker", "classop_1_1_worker.html", "classop_1_1_worker" ], + [ "WorkerConsumer", "classop_1_1_worker_consumer.html", "classop_1_1_worker_consumer" ], + [ "WorkerProducer", "classop_1_1_worker_producer.html", "classop_1_1_worker_producer" ], + [ "WQueueAssembler", "classop_1_1_w_queue_assembler.html", "classop_1_1_w_queue_assembler" ], + [ "WQueueOrderer", "classop_1_1_w_queue_orderer.html", "classop_1_1_w_queue_orderer" ], + [ "PersonIdExtractor", "classop_1_1_person_id_extractor.html", "classop_1_1_person_id_extractor" ], + [ "PersonTracker", "classop_1_1_person_tracker.html", "classop_1_1_person_tracker" ], + [ "WPersonIdExtractor", "classop_1_1_w_person_id_extractor.html", "classop_1_1_w_person_id_extractor" ], + [ "PointerContainerGreater", "classop_1_1_pointer_container_greater.html", "classop_1_1_pointer_container_greater" ], + [ "PointerContainerLess", "classop_1_1_pointer_container_less.html", "classop_1_1_pointer_container_less" ], + [ "Profiler", "classop_1_1_profiler.html", null ], + [ "WrapperT", "classop_1_1_wrapper_t.html", "classop_1_1_wrapper_t" ], + [ "WrapperStructExtra", "structop_1_1_wrapper_struct_extra.html", "structop_1_1_wrapper_struct_extra" ], + [ "WrapperStructFace", "structop_1_1_wrapper_struct_face.html", "structop_1_1_wrapper_struct_face" ], + [ "WrapperStructGui", "structop_1_1_wrapper_struct_gui.html", "structop_1_1_wrapper_struct_gui" ], + [ "WrapperStructHand", "structop_1_1_wrapper_struct_hand.html", "structop_1_1_wrapper_struct_hand" ], + [ "WrapperStructInput", "structop_1_1_wrapper_struct_input.html", "structop_1_1_wrapper_struct_input" ], + [ "WrapperStructOutput", "structop_1_1_wrapper_struct_output.html", "structop_1_1_wrapper_struct_output" ], + [ "WrapperStructPose", "structop_1_1_wrapper_struct_pose.html", "structop_1_1_wrapper_struct_pose" ], + [ "Wrapper", "namespaceop.html#a790dea3c007bed742fbc8cdd5757d026", null ], + [ "CocoJsonFormat", "namespaceop.html#a5418b76dad5b4aea1133325f4aa715ac", [ + [ "Body", "namespaceop.html#a5418b76dad5b4aea1133325f4aa715acaac101b32dda4448cf13a93fe283dddd8", null ], + [ "Hand21", "namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca9909f7cecc318ee0049ad0f3b409b3b3", null ], + [ "Hand42", "namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca1d9502bb9f6efc989b3578dcfde7901e", null ], + [ "Face", "namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca8af5861002f3c157f9ba842bba10aa3f", null ], + [ "Foot", "namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca129e74dde7b475c8848310e16754c965", null ], + [ "Car", "namespaceop.html#a5418b76dad5b4aea1133325f4aa715acae9989db5dabeea617f40c8dbfd07f5fb", null ], + [ "Size", "namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "DataFormat", "namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7e", [ + [ "Json", "namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7eaeed8d85b888a6c015834240885ee6333", null ], + [ "Xml", "namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7ea9ec8e4e3ab4c7eeba097f27d7364d743", null ], + [ "Yaml", "namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7ea65f6036bfc9798ce230c5d8567551315", null ], + [ "Yml", "namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7ea55eeca17b45365c188d0edbd35f6e0c3", null ] + ] ], + [ "Detector", "namespaceop.html#a1070db47220e17cf37df40411350f6fb", [ + [ "Body", "namespaceop.html#a1070db47220e17cf37df40411350f6fbaac101b32dda4448cf13a93fe283dddd8", null ], + [ "OpenCV", "namespaceop.html#a1070db47220e17cf37df40411350f6fba5bd4c87976f48e6a53919d53e14025e9", null ], + [ "Provided", "namespaceop.html#a1070db47220e17cf37df40411350f6fba900b06e1ae224594f075e0c882c73532", null ], + [ "BodyWithTracking", "namespaceop.html#a1070db47220e17cf37df40411350f6fba65c691a85367d21881220b7a3d923747", null ], + [ "Size", "namespaceop.html#a1070db47220e17cf37df40411350f6fba6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "DisplayMode", "namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6", [ + [ "NoDisplay", "namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a28b652e57d2da6b7c939166be21efd9a", null ], + [ "DisplayAll", "namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a105036ef087117869f656cd72bfd8dd6", null ], + [ "Display2D", "namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a3bd9369403112127ae7db2f866002be2", null ], + [ "Display3D", "namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6ae18221460ca8434295f980225fd6a91b", null ], + [ "DisplayAdam", "namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a442304e26339521bc296bdc47ff5fddf", null ] + ] ], + [ "ElementToRender", "namespaceop.html#a37a23e10d9cbc428c793c3df1d62993e", [ + [ "Skeleton", "namespaceop.html#a37a23e10d9cbc428c793c3df1d62993ea6ab48f7ed56efc362f41853c5616bf75", null ], + [ "Background", "namespaceop.html#a37a23e10d9cbc428c793c3df1d62993eaa9ded1e5ce5d75814730bb4caaf49419", null ], + [ "AddKeypoints", "namespaceop.html#a37a23e10d9cbc428c793c3df1d62993ea5f4badd072493724e560fa43d0cf2c71", null ], + [ "AddPAFs", "namespaceop.html#a37a23e10d9cbc428c793c3df1d62993eaca9f686d0a3d6b8bfe5865b59b2fc84f", null ] + ] ], + [ "ErrorMode", "namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400c", [ + [ "StdRuntimeError", "namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400cafe50b062b9b9100a72e68b48fe26fc50", null ], + [ "FileLogging", "namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400ca68ec2bf5b1662d1d27a523dcfc3c702a", null ], + [ "StdCerr", "namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400ca002f2100f8870e7c823894f492e4d337", null ], + [ "All", "namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400cab1c94ca2fbc3e78fc30069c8d0f01680", null ] + ] ], + [ "Extensions", "namespaceop.html#a553bd31855c20a0d14e4c44a20bd91da", [ + [ "Images", "namespaceop.html#a553bd31855c20a0d14e4c44a20bd91daafff0d600f8a0b5e19e88bfb821dd1157", null ], + [ "Size", "namespaceop.html#a553bd31855c20a0d14e4c44a20bd91daa6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "FullScreenMode", "namespaceop.html#a6c22a72ce93c64e7582cb670492a50bf", [ + [ "FullScreen", "namespaceop.html#a6c22a72ce93c64e7582cb670492a50bfae7ec409749889353b8f83a6b04159420", null ], + [ "Windowed", "namespaceop.html#a6c22a72ce93c64e7582cb670492a50bfab13311ab51c4c34757f67f26580018dd", null ] + ] ], + [ "GpuMode", "namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7f", [ + [ "Cuda", "namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa8b95dcff7397d0693c03e394af5552aa", null ], + [ "OpenCL", "namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa7982b09a852b37f2afb1227eaf552e47", null ], + [ "NoGpu", "namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa3c1472839b807c90abff3c7c36dff458", null ], + [ "Size", "namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "HeatMapType", "namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226b", [ + [ "Parts", "namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226ba9ce2d07469b39a72159ed8b0e0e597ca", null ], + [ "Background", "namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226baa9ded1e5ce5d75814730bb4caaf49419", null ], + [ "PAFs", "namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226ba21c5c3f60f4881b8d5477f5628db74f1", null ] + ] ], + [ "LogMode", "namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94", [ + [ "FileLogging", "namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94a68ec2bf5b1662d1d27a523dcfc3c702a", null ], + [ "StdCout", "namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94aa544d56d9492a20da20018000b5043b6", null ], + [ "All", "namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94ab1c94ca2fbc3e78fc30069c8d0f01680", null ] + ] ], + [ "PoseMode", "namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09", [ + [ "Disabled", "namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09ab9f5c797ebbf55adccdd8539a65a0241", null ], + [ "Enabled", "namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09a00d23a76e43b46dae9ec7aa9dcbebb32", null ], + [ "NoNetwork", "namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09aa6e20e86de146a7b524d32c9b1fea7f4", null ], + [ "Size", "namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09a6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "PoseModel", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261f", [ + [ "BODY_25", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa9a87ca5ab7b20c2bd4f8d5379956e6f6", null ], + [ "COCO_18", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa0c4a19d9254adcb3ca1f0f527ee141fd", null ], + [ "MPI_15", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fad788fbec25069f2884ee1ed97e0af2b9", null ], + [ "MPI_15_4", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa71e915c88449606c6498d33dd7c98e84", null ], + [ "BODY_19", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fae3ae2003e0e0458bdc49480fb19c876e", null ], + [ "BODY_19_X2", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261faca4c7eb29b1f3402e78aa384ce8fd5a9", null ], + [ "BODY_19N", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa9c6c21b2b0a410880f46637db622e392", null ], + [ "BODY_25E", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261faef29c97ffaed7b0d41ee9bb0d20550cc", null ], + [ "CAR_12", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa66ae79a5ac5fa502ae8bbecd3e07e71c", null ], + [ "BODY_25D", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa529c87ac399e5fd6f0fa4a360c032568", null ], + [ "BODY_23", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa003cc3795b0eeed2af2dfd34ed482794", null ], + [ "CAR_22", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa372b9885bba8bc32ad323fffcf99e39e", null ], + [ "BODY_19E", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa08956a1731b54bbdce3f97f1361efc23", null ], + [ "BODY_25B", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa0b93cfdf906412bd7c8560ccd180cec6", null ], + [ "BODY_135", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261facfbe6a39619f4ca5a1fa2db000a17e0d", null ], + [ "Size", "namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "PoseProperty", "namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0e", [ + [ "NMSThreshold", "namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea83be5d7f6f29b19cf24f7393551c0439", null ], + [ "ConnectInterMinAboveThreshold", "namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0eaf7405796a5c90a93fc3c8ffa89eb432d", null ], + [ "ConnectInterThreshold", "namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea240f10f3a39507d858c743971fd4298f", null ], + [ "ConnectMinSubsetCnt", "namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea7bf312724768faebba41ca3585a91f19", null ], + [ "ConnectMinSubsetScore", "namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea04576b26f5dc3637bf3c8168fba1641d", null ], + [ "Size", "namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "Priority", "namespaceop.html#adc43fb9031418e7f8112816a3b535d14", [ + [ "None", "namespaceop.html#adc43fb9031418e7f8112816a3b535d14a6adf97f83acf6453d4a6a4b1070f3754", null ], + [ "Low", "namespaceop.html#adc43fb9031418e7f8112816a3b535d14a28d0edd045e05cf5af64e35ae0c4c6ef", null ], + [ "Normal", "namespaceop.html#adc43fb9031418e7f8112816a3b535d14a960b44c579bc2f6818d2daaf9e4c16f0", null ], + [ "High", "namespaceop.html#adc43fb9031418e7f8112816a3b535d14a655d20c1ca69519ca647684edbb2db35", null ], + [ "Max", "namespaceop.html#adc43fb9031418e7f8112816a3b535d14a6a061313d22e51e0f25b7cd4dc065233", null ], + [ "NoOutput", "namespaceop.html#adc43fb9031418e7f8112816a3b535d14a828d496739024f4af00df1e277d96ebd", null ] + ] ], + [ "ProducerFpsMode", "namespaceop.html#ac0230b669b296920c0cfc41b7587268f", [ + [ "OriginalFps", "namespaceop.html#ac0230b669b296920c0cfc41b7587268fa0123c3afc0fac5edaf8b1672cb12626c", null ], + [ "RetrievalFps", "namespaceop.html#ac0230b669b296920c0cfc41b7587268fa6bcd0f3b66e42d1aacd18d1c3b532473", null ] + ] ], + [ "ProducerProperty", "namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774", [ + [ "AutoRepeat", "namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a3ebbca1b84060b0caaf823639739945d", null ], + [ "Flip", "namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a9ffbd422925a6839ee820ddbc59278c5", null ], + [ "Rotation", "namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774af1a42bd417390fc63b030a519624607a", null ], + [ "FrameStep", "namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a63eacc5ed21c0ecb8bc583e10dc3ae58", null ], + [ "NumberViews", "namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a3b6cff57206f4ce645622b2e55f784a6", null ], + [ "Size", "namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "ProducerType", "namespaceop.html#a54b73745852c270cfd891eed0f6f2332", [ + [ "FlirCamera", "namespaceop.html#a54b73745852c270cfd891eed0f6f2332af436d4d7a472ac39a7cb227e3ea24f8d", null ], + [ "ImageDirectory", "namespaceop.html#a54b73745852c270cfd891eed0f6f2332a54a365e86ee42cff91ca36532c9bbabf", null ], + [ "IPCamera", "namespaceop.html#a54b73745852c270cfd891eed0f6f2332af40a40a04a078c4449cda2f326d7fb18", null ], + [ "Video", "namespaceop.html#a54b73745852c270cfd891eed0f6f2332a34e2d1989a1dbf75cd631596133ee5ee", null ], + [ "Webcam", "namespaceop.html#a54b73745852c270cfd891eed0f6f2332ae2faa2a74b6a4134d0b3e84c7c0e2a01", null ], + [ "None", "namespaceop.html#a54b73745852c270cfd891eed0f6f2332a6adf97f83acf6453d4a6a4b1070f3754", null ] + ] ], + [ "RenderMode", "namespaceop.html#afce557f02e337e16150d00bdf72ec033", [ + [ "None", "namespaceop.html#afce557f02e337e16150d00bdf72ec033a6adf97f83acf6453d4a6a4b1070f3754", null ], + [ "Auto", "namespaceop.html#afce557f02e337e16150d00bdf72ec033a06b9281e396db002010bde1de57262eb", null ], + [ "Cpu", "namespaceop.html#afce557f02e337e16150d00bdf72ec033a54c82ef76ecbbd4c2293e09bae01b54e", null ], + [ "Gpu", "namespaceop.html#afce557f02e337e16150d00bdf72ec033a3432ca64f06615abf07ab44c10cada38", null ] + ] ], + [ "ScaleMode", "namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bda", [ + [ "InputResolution", "namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa46f9a0da0a5d448fd0cc8b3aa0a9b228", null ], + [ "NetOutputResolution", "namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa668a2bc599fd07445eae0730d043c96d", null ], + [ "OutputResolution", "namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa73c42013aac51c335d50d103f30fcb99", null ], + [ "ZeroToOne", "namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa4b942544cb3e764bbb8d33f8a8744855", null ], + [ "ZeroToOneFixedAspect", "namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaafa90ddb034be42f1cdf13a6829eed2ad", null ], + [ "PlusMinusOne", "namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaab7e7b2beae3435e73021d6d9a6a3fd8a", null ], + [ "PlusMinusOneFixedAspect", "namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaaee080e43c505aa85cdda0e480b0abc06", null ], + [ "UnsignedChar", "namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaaa93f121640d609f8772397a0f40f40d6", null ], + [ "NoScale", "namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa6089ccf7c3fe93a62745e51200419c60", null ] + ] ], + [ "ThreadManagerMode", "namespaceop.html#a3593e2d53bec533f0048ef3973eebd36", [ + [ "Asynchronous", "namespaceop.html#a3593e2d53bec533f0048ef3973eebd36a288aae25bc408055f50c21c991903a44", null ], + [ "AsynchronousIn", "namespaceop.html#a3593e2d53bec533f0048ef3973eebd36a435b3ab344c03bfc0e4530a2e75f5e44", null ], + [ "AsynchronousOut", "namespaceop.html#a3593e2d53bec533f0048ef3973eebd36ac68f8680ccf3a65dfcfc63356112c9f9", null ], + [ "Synchronous", "namespaceop.html#a3593e2d53bec533f0048ef3973eebd36a2fe4167817733fec8e6ba1afddf78f1b", null ] + ] ], + [ "WorkerType", "namespaceop.html#a970a2a768a2ace81605b1558c9fdec18", [ + [ "Input", "namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a324118a6721dd6b8a9b9f4e327df2bf5", null ], + [ "PreProcessing", "namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a05318bd0215d16e009798570b53755d2", null ], + [ "PostProcessing", "namespaceop.html#a970a2a768a2ace81605b1558c9fdec18aa52d6088cbae537944827c8f8c69c570", null ], + [ "Output", "namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a29c2c02a361c9d7028472e5d92cd4a54", null ], + [ "Size", "namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "addBkgChannel", "namespaceop.html#a13b86d097fd5f36612e9858e9348ea57", null ], + [ "averageKeypoints", "namespaceop.html#a1f931e210eb575a084b8e6f462b0b382", null ], + [ "checkBool", "namespaceop.html#a410201fcc46274e24726c5a601bc1721", null ], + [ "checkEqual", "namespaceop.html#aaff52f436911aa17bebb999cd91a44fd", null ], + [ "checkGreaterOrEqual", "namespaceop.html#a92e8cd01741c90fbfdfaa33a13803f34", null ], + [ "checkGreaterThan", "namespaceop.html#a3dd874d4341b99431819f9fa6b678ca9", null ], + [ "checkLessOrEqual", "namespaceop.html#a7ecfc02dca25534a071acf3136ff175e", null ], + [ "checkLessThan", "namespaceop.html#a1e71130dc8f280e4664c711128b18b42", null ], + [ "checkNoNullNorEmpty", "namespaceop.html#a02164ca0af9e838190f584f5d1d8465e", null ], + [ "checkNotEqual", "namespaceop.html#aaada2594361f6f929af5b1f9d50387eb", null ], + [ "checkWorkerErrors", "namespaceop.html#a865a4cd0ba3b596667dc7242756837bd", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#aa65c081c13e0d0453938a3c41d04dc49", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#aa7f93261bd6d87f86c45e933607a0678", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#af98c8e514e79d4718fb1fc64dc0e431b", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a36492d15f864f7c813a573789ea554aa", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a506578f3e723f992eabb627a371351ba", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a63605cf0e6f4049beacf6094995272e8", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#aee90a0429c2d14da0c3a85cd67a17821", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#ae5dac6cf1ccdf461838f9795be8fda03", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#ac06eeab84c4861ef08834855b48750a6", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#af46e80e6bac0f815006759df4c9d00c3", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a9076fc1719030c2a74f21682999d2315", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a6d12bd1e42cfb63d2f780bed55fa01fb", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a196f17357cd1c1bb02e24e4e8a0e6ec3", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#abf3a59fc4662f07e6ba19b95bd4da32f", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#ab5b47f0069e9f397ff891194b20d28f2", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#af42afa53c725d556c14928b2603883e3", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a57c4f3ada0db4882a4106d4dedf08012", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#adfc12925650978828707c1c0dcbebd0e", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#ade3b2e4b105242a3cf41def3def1691d", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a54b38240e45009f7e6a25d956ac96fe0", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#ae88e9ced5d14fa221205b492ff76c56b", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a0424a8e4dc8ceb5e8d8a2230c157a7fd", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a767385c8d3ebe736e1752825ab4d4ea0", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a361310c59d16e88a4d2450a80f078f01", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a5cc3f625b2644b1aade85a9458b5503a", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#ae5cc3e92ffd9696f01ce7824ebbd0759", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a635579f5f8d20b8e65f4f94da4d3d2f2", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a602d5d238fe0c7096698cf36b7dee9ab", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a7ac10b9f503668695643c366e25f3b68", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#ad22c543a4376e943b728e657fab5ed9f", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a505ea16cc6c2c0068bbf4e7269dc8e0a", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#aaee32c4c68404e5086844bcb911b7a20", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a47758c703fccdbb65c26dc7bc4022237", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a1d9f50688522ed7368acc33a09ae9ece", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a5642545fda1c3bbaf60810cf0e2d2c1d", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a01aa5c6e24026536367cf47a64e9bba5", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a5660f0e72781ce6d7db9eb78b582e5c6", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a774871462f7fefb8cadea1e49f501e45", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a674a652ad38b355285417529fc050847", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a020603e3ad6326cb1dce43485157f768", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#ab1e242b1ae7ff3300324fbfedebb52fc", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#ae76afeeeaedaebe6941f41a4bdf50e2a", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a31ad937a2e52ea08ce925031d26616b9", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a53f346232d0743f3dd0f547de1fc508f", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#add981a5f6a49d35cc316a54c613497f3", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#aaca98fe6101cda512a43c513182ae5cc", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#af9e0d9e4028c0589b5eeeaed42a5088c", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a89984557f6968584d1938afe7b9f32bd", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a49bd4106b0cd1cb81980329b06c0d2c8", null ], + [ "COMPILE_TEMPLATE_DATUM", "namespaceop.html#a0db530b6f607aa43e8f9154b308d207a", null ], + [ "configureThreadManager", "namespaceop.html#a4adaee31db7ae1d3f963daa9e022e62f", null ], + [ "connectBodyPartsCpu", "namespaceop.html#a2ae13dae91c41b29063b48158ccbcc4e", null ], + [ "connectBodyPartsGpu", "namespaceop.html#a927468f6931ddb1e7d1e6e6e59b8bd36", null ], + [ "connectBodyPartsOcl", "namespaceop.html#a77a4d87bbee791dfba0667aa10bcca99", null ], + [ "createMultiviewTDatum", "namespaceop.html#a3da2a2a2f5ac58cfba53ea0d43ac6751", null ], + [ "createPeopleVector", "namespaceop.html#ae5d883da8c8f11356d5e1b61bc3a99b6", null ], + [ "createProducer", "namespaceop.html#a6a34909c6c4d79a215f163291111d556", null ], + [ "cudaCheck", "namespaceop.html#a2af8422ada0de882cc222920ca15c6d2", null ], + [ "dataFormatToString", "namespaceop.html#a9d121f33179e41075f4602eb6527e658", null ], + [ "datumProducerConstructor", "namespaceop.html#ad72abbc7b2600f543e4ee8e28392711e", null ], + [ "datumProducerConstructorRunningAndGetDatumApplyPlayerControls", "namespaceop.html#a177ffd3101c7a1f5cf32e100474a1234", null ], + [ "datumProducerConstructorRunningAndGetDatumFrameIntegrity", "namespaceop.html#a427c6244ee27171037bc201f401de16a", null ], + [ "datumProducerConstructorRunningAndGetDatumIsDatumProducerRunning", "namespaceop.html#a71c68de51a3608e782854c298b91cd62", null ], + [ "datumProducerConstructorRunningAndGetNextFrameNumber", "namespaceop.html#a71cdc487bbec12ddbe4bac9123745494", null ], + [ "datumProducerConstructorTooManyConsecutiveEmptyFrames", "namespaceop.html#a5001474237d31d72c9145a84ec5143da", null ], + [ "error", "namespaceop.html#a5f092bd36c716a894cb035e1ead2aca3", null ], + [ "error", "namespaceop.html#a42d364d9fbd1a719341bd7187d97cf18", null ], + [ "errorDestructor", "namespaceop.html#a825f15fdf9dc9cb7473c20f970f15b60", null ], + [ "errorDestructor", "namespaceop.html#a758b08be140e27dd2642d286a383be54", null ], + [ "errorWorker", "namespaceop.html#a61af88aac41ef77ab4e8816023fe32f0", null ], + [ "errorWorker", "namespaceop.html#a96d1720ea5d160cfd4c8404060a9bebd", null ], + [ "estimateAndSaveExtrinsics", "namespaceop.html#aed964859fbd282bd29f2b818a3bf10dd", null ], + [ "estimateAndSaveIntrinsics", "namespaceop.html#a1fd317d44606181c63ef8a4e5676a09e", null ], + [ "estimateAndSaveSiftFile", "namespaceop.html#a37cdfa8dd466c3df9e7da5724a909143", null ], + [ "existDirectory", "namespaceop.html#a6fc2ee2d2c256695fb7b2b953ee7f762", null ], + [ "existFile", "namespaceop.html#ac1f4b95440d2fb57fc715558d039b947", null ], + [ "fastMax", "namespaceop.html#a9f4b99449c0c73e2c89ee1a1eff007c7", null ], + [ "fastMin", "namespaceop.html#a6e1d1f90ef06cc7af576fdaad4b4e320", null ], + [ "fastTruncate", "namespaceop.html#a2dafd3db8f922405b38240345dd1dce5", null ], + [ "flagsToDetector", "namespaceop.html#a9f585930a5246e4a9a70145fa8763447", null ], + [ "flagsToDisplayMode", "namespaceop.html#afdf2dd76cbae54789a139d9415790f82", null ], + [ "flagsToHeatMaps", "namespaceop.html#ad3b02ca66d11f4129372f4a9f98c6437", null ], + [ "flagsToHeatMapScaleMode", "namespaceop.html#aed9ab5282e3e60f22dc11c301af897e6", null ], + [ "flagsToPoint", "namespaceop.html#a0e1275fd8690a55200fcd193c94dcf08", null ], + [ "flagsToPoseMode", "namespaceop.html#af5ec8b7e6271798cbd09475766c64d2f", null ], + [ "flagsToPoseModel", "namespaceop.html#a60ab295fba5d41b31d6ba5a4942889a9", null ], + [ "flagsToProducer", "namespaceop.html#a8264a6feec695adef80d40940863d511", null ], + [ "flagsToProducerType", "namespaceop.html#a1ca09f1d0e1f01d95842e99ebeef0631", null ], + [ "flagsToRenderMode", "namespaceop.html#a70f65da8f70ebd07b093932927187c90", null ], + [ "flagsToScaleMode", "namespaceop.html#abe3f4d783191416b8e62e54c953fe36b", null ], + [ "formatAsDirectory", "namespaceop.html#ab38ea91ef7b7dad700d8e4a4654d48f5", null ], + [ "getAverageScore", "namespaceop.html#a1110f4c0017c43ea1d0896a3225c55f8", null ], + [ "getBiggestPerson", "namespaceop.html#ace4af20d19066df9ec502c5a09097c24", null ], + [ "getCudaGpuNumber", "namespaceop.html#ad9b7765a4396ee4470585ded07285563", null ], + [ "getCvCapPropFrameCount", "namespaceop.html#a0e60b0e4e89a7f08de54ad40c2d46a60", null ], + [ "getCvCapPropFrameFps", "namespaceop.html#aaf7199f3821a6f954cfae134ec8c7e19", null ], + [ "getCvCapPropFrameHeight", "namespaceop.html#a264496927e7b331ad628d7dc4a683194", null ], + [ "getCvCapPropFrameWidth", "namespaceop.html#a71866b00e7d1077137094f78ec83b62b", null ], + [ "getCvFourcc", "namespaceop.html#a4059a24a786c4f2def977715dd2e6747", null ], + [ "getCvImwriteJpegQuality", "namespaceop.html#ad86d86621b1f485f261d620373748ed1", null ], + [ "getCvImwritePngCompression", "namespaceop.html#a289d19386824250545f248a79aed283c", null ], + [ "getCvLoadImageAnydepth", "namespaceop.html#a84730c1ab201fe836fe87787589af88a", null ], + [ "getCvLoadImageGrayScale", "namespaceop.html#ace6c48833ba117b7d036179bdaf31a7a", null ], + [ "getDistance", "namespaceop.html#ac968b1c98c60b74be78225be27805706", null ], + [ "getDistanceAverage", "namespaceop.html#acf638f00b0a825c05683f8e23942a9d5", null ], + [ "getDistanceAverage", "namespaceop.html#aa053f4b0533d9e981aa171a1ef57fc30", null ], + [ "getFileExtension", "namespaceop.html#a515273b013402d8c75780330588421bc", null ], + [ "getFileNameAndExtension", "namespaceop.html#a573544858d0a9c29c9707eeda3a21c98", null ], + [ "getFileNameNoExtension", "namespaceop.html#a6f37638480139a4076eef4d0c7dc6cd1", null ], + [ "getFileParentFolderPath", "namespaceop.html#a2e35510c95e5525aae7a398b03b32488", null ], + [ "getFilesOnDirectory", "namespaceop.html#a858f70fa9d84ad85c60f19a2229ebbde", null ], + [ "getFilesOnDirectory", "namespaceop.html#adb26da2c52486e926d98471b5387c7e1", null ], + [ "getFilesOnDirectory", "namespaceop.html#a3ff74a37eb4bf12e31bc5aa95b69f9e3", null ], + [ "getFirstNumberOnString", "namespaceop.html#a844c35ea57a8bc67f33f49deb5070652", null ], + [ "getFullFilePathNoExtension", "namespaceop.html#ac1737c19228b83a5e93ae51e5d9556eb", null ], + [ "getGpuMode", "namespaceop.html#a971a7caa96be5b715b5c22f6e5dc6ad1", null ], + [ "getGpuNumber", "namespaceop.html#aaad222b087dd041c35de2f3414c1a01f", null ], + [ "getIfInMainThreadOrEmpty", "namespaceop.html#ad5e1c975a1b7dce9b02bc8cdf3d45a01", null ], + [ "getIfNotInMainThreadOrEmpty", "namespaceop.html#abdedc8f1fd2f723dae5bb8ff20b93a93", null ], + [ "getKeypointsArea", "namespaceop.html#a1dd5dde18458975a36bdbd6dd38720a2", null ], + [ "getKeypointsPerson", "namespaceop.html#a75411d98f69051860379730e16103178", null ], + [ "getKeypointsRectangle", "namespaceop.html#ac74cba4141f2bee2b9d94dc171029a73", null ], + [ "getKeypointsRoi", "namespaceop.html#a6913c67141fcbbba84fc88ac8a45aa0f", null ], + [ "getKeypointsRoi", "namespaceop.html#ac9af122ccd8dcdafb11e37b6633245b4", null ], + [ "getKeypointsRoi", "namespaceop.html#a36296ff5a5945244c5131e3ae16057e1", null ], + [ "getLastNumber", "namespaceop.html#ab670c693d8e4a540cfe75ce8383b6d10", null ], + [ "getNonZeroKeypoints", "namespaceop.html#aa9366cf1b4ac3494965749eeb5537da1", null ], + [ "getNumberCudaBlocks", "namespaceop.html#a4ba080c11cc9758051db97ce2a11c023", null ], + [ "getNumberCudaThreadsAndBlocks", "namespaceop.html#a17da233ea322ae172ff5bda7caaf2124", null ], + [ "getNumberElementsToRender", "namespaceop.html#aebff78a4cfbef1cf1b2e03066d88564c", null ], + [ "getPoseBodyPartMapping", "namespaceop.html#aab3de911b04b96c1850cc05c6947e184", null ], + [ "getPoseBodyPartPairsRender", "namespaceop.html#a11bd7e53698eabe32b69b48708cf7b19", null ], + [ "getPoseColors", "namespaceop.html#abb49286241ba7a1d754b31dee333274a", null ], + [ "getPoseDefaultConnectInterMinAboveThreshold", "namespaceop.html#a8e377d8da8f109cb8be8e4edbb2ea90a", null ], + [ "getPoseDefaultConnectInterThreshold", "namespaceop.html#aabfd35e57744b44481c09f56c90cc8b8", null ], + [ "getPoseDefaultConnectMinSubsetScore", "namespaceop.html#ae7636f6e8974ecb2ed96d43dd5ec261d", null ], + [ "getPoseDefaultMinSubsetCnt", "namespaceop.html#a863c96f1fb23d96c5d605867cfe5f99f", null ], + [ "getPoseDefaultNmsThreshold", "namespaceop.html#acd8cab258d7e98affa5c317a9a03e862", null ], + [ "getPoseMapIndex", "namespaceop.html#a84d87ec0e4ed3cf75a37ce99d0d25ef7", null ], + [ "getPoseMaxPeaks", "namespaceop.html#a96a81e831f8c965825162dba09095477", null ], + [ "getPoseNetDecreaseFactor", "namespaceop.html#ad7ca8d89f9045481075902c8bd98b8f4", null ], + [ "getPoseNumberBodyParts", "namespaceop.html#a54a6c42a42a0a7e539061f5e30abb4bc", null ], + [ "getPosePartPairs", "namespaceop.html#a307b2c7b1506415a4ba44590fe8a7258", null ], + [ "getPoseProtoTxt", "namespaceop.html#ae0730c6559abdb976423ecf81eac4620", null ], + [ "getPoseScales", "namespaceop.html#a016abefba53293ed2ffe3a3c3bd88dd0", null ], + [ "getPoseTrainedModel", "namespaceop.html#ade70b024ee461ae04e7233bf3937c5c6", null ], + [ "getThreadId", "namespaceop.html#a5a3db1a0d272d8fb5ea723845beee150", null ], + [ "getTimerInit", "namespaceop.html#ae0e92a0d8867d1b02f1c43ae4c0c9e09", null ], + [ "getTimeSeconds", "namespaceop.html#a01dd208c992c8e07623579f77dcfb59b", null ], + [ "keepRoiInside", "namespaceop.html#a5f85de4dca2733d03470d42617f83d4e", null ], + [ "loadData", "namespaceop.html#a1c2921f841ab87033b535b5ae8a4d526", null ], + [ "loadData", "namespaceop.html#a9f14054fbf4e63fc85d10c83f2f9ecb7", null ], + [ "loadHandDetectorTxt", "namespaceop.html#a0ce96f84c6e380b261802c7e2639dc7d", null ], + [ "loadImage", "namespaceop.html#a871a61f08021460e0f24f51583546a75", null ], + [ "makeDirectory", "namespaceop.html#acc650faa23df88ca16a09a2d2a522960", null ], + [ "maximumCpu", "namespaceop.html#ae0fea41041a70ae8449a77f46ffe8100", null ], + [ "maximumGpu", "namespaceop.html#a8ec109805adf02f9872a6af37d602caa", null ], + [ "mergeVectors", "namespaceop.html#aa3a3e2acfb27ecbd187d01c8dcd41899", null ], + [ "nmsCpu", "namespaceop.html#a6a97f255cc323f1c1babe4c598727196", null ], + [ "nmsGpu", "namespaceop.html#a28c5ac530845231600fb93c0be44ad6d", null ], + [ "nmsOcl", "namespaceop.html#a37dce2abad2568d7664654e4598002af", null ], + [ "opLog", "namespaceop.html#aa72861fea0671209aca1ea5fa385891a", null ], + [ "opLog", "namespaceop.html#a838b69fead43c8a848d059b5f9d63baf", null ], + [ "opLogIfDebug", "namespaceop.html#a91dd00cbb8fb646e6612455eb0f1b3e9", null ], + [ "pafPtrIntoVector", "namespaceop.html#aaec4a34b015f898d28be2b9f2aba0d38", null ], + [ "pafVectorIntoPeopleVector", "namespaceop.html#a36f0207c6263e7174f4c79eba7c4df3f", null ], + [ "peopleVectorToPeopleArray", "namespaceop.html#a3dbd17f2f656a2bc751441a42b5b9516", null ], + [ "poseBodyPartMapStringToKey", "namespaceop.html#a3df938ef93037c534c5d342720d5fb70", null ], + [ "poseBodyPartMapStringToKey", "namespaceop.html#aacf6e688031bb116e4878b811e8dbc23", null ], + [ "positiveCharRound", "namespaceop.html#ab5eb10c958f3f37fb82d29361ad81467", null ], + [ "positiveIntRound", "namespaceop.html#a699ef17b0f27b8bc2c4d4a03e46e6be1", null ], + [ "positiveLongLongRound", "namespaceop.html#a1b479fea39a56c041a8a51aecf024bed", null ], + [ "positiveLongRound", "namespaceop.html#a57eee48e4cefd583a81cfc907586c035", null ], + [ "positiveSCharRound", "namespaceop.html#ab71596bc88b87ea5920f19f978d6d6ac", null ], + [ "printTime", "namespaceop.html#ab0908bcc0abb00c49ecbe7fc373b58c9", null ], + [ "recenter", "namespaceop.html#a7cd131c9ddd8f3987508e89e0881b9e0", null ], + [ "refineAndSaveExtrinsics", "namespaceop.html#a50526c188f2ba94b07e0945c0871fd2c", null ], + [ "remove0sFromString", "namespaceop.html#a2f610ba8a71cf16628df2f4d270b7d34", null ], + [ "removeAllOcurrencesOfSubString", "namespaceop.html#a82471a2af285bada830bac3c95a8440b", null ], + [ "removePeopleBelowThresholdsAndFillFaces", "namespaceop.html#ae01dd412590493f5f732594e8332d3f0", null ], + [ "removeSpecialsCharacters", "namespaceop.html#a8664658afa7be03e173cec9eff2873ad", null ], + [ "renderFaceKeypointsCpu", "namespaceop.html#a5fc85e8500dbeda3b75c1b6ecfac91cd", null ], + [ "renderFaceKeypointsGpu", "namespaceop.html#ab8b2748a5bcf823e59b66549e6a24cfe", null ], + [ "renderHandKeypointsCpu", "namespaceop.html#afb5b711819f94b51f32460861d9cea38", null ], + [ "renderHandKeypointsGpu", "namespaceop.html#a865db81a5bc4f81cf9fc7c7f3ce81be3", null ], + [ "renderKeypointsCpu", "namespaceop.html#aa7803aa62abc21471e7d966bd674a81a", null ], + [ "renderPoseDistanceGpu", "namespaceop.html#a056c64afca17423e038590e4ef2f712b", null ], + [ "renderPoseHeatMapGpu", "namespaceop.html#a3ceb3476e4154a6e9e06b3613a12c040", null ], + [ "renderPoseHeatMapsGpu", "namespaceop.html#aa1225091307f8d0bf07dd032389f8961", null ], + [ "renderPoseKeypointsCpu", "namespaceop.html#a99a08148f440bd96546076e15f0de04c", null ], + [ "renderPoseKeypointsGpu", "namespaceop.html#ad0069d4c6204b35893f4158d04d615f1", null ], + [ "renderPosePAFGpu", "namespaceop.html#a9275c58ba881ea94e054117392a67381", null ], + [ "renderPosePAFsGpu", "namespaceop.html#a3ba62b3d5cc275fc1700bf0c5e6bf578", null ], + [ "reorderAndNormalize", "namespaceop.html#a8587bab6b02056384b7c424555cd50d8", null ], + [ "replaceAll", "namespaceop.html#a5fe477200af87dadb07c8d6a75b4414b", null ], + [ "resizeAndMergeCpu", "namespaceop.html#adb8ffc1a6a2cc2949d80d8e8ad4e2190", null ], + [ "resizeAndMergeGpu", "namespaceop.html#a8982332c4263696d0e023997f0e4c753", null ], + [ "resizeAndMergeOcl", "namespaceop.html#a97b053019720782f2f81bc1b41f036d6", null ], + [ "resizeAndPadRbgGpu", "namespaceop.html#ad5495d8c6a65afbedef3af7a8844bfcc", null ], + [ "resizeAndPadRbgGpu", "namespaceop.html#a2f1ef915c8efc724c0bf40f0348f20a2", null ], + [ "resizeGetScaleFactor", "namespaceop.html#a24ebdcb8395dea0429f220de6a715d6e", null ], + [ "rotateAndFlipFrame", "namespaceop.html#af65d1b7c5b708f30780e4b2bcfccedcb", null ], + [ "saveData", "namespaceop.html#a7b9bcb57dd8488ade8ea288342eaed08", null ], + [ "saveData", "namespaceop.html#aafac1158605748694e3c3ed4eb34b3b7", null ], + [ "saveFloatArray", "namespaceop.html#ac1080e627185a65b88ec788184a95552", null ], + [ "saveImage", "namespaceop.html#a8c9d3469086a12607b097731848b6dea", null ], + [ "savePeopleJson", "namespaceop.html#af9c189f7c80092570699c8b9d5686fea", null ], + [ "savePeopleJson", "namespaceop.html#a1e986a510a29bfd8c682f65a8b399551", null ], + [ "scaleKeypoints", "namespaceop.html#ac5fc565b24e499e306ca170b9139eeb6", null ], + [ "scaleKeypoints2d", "namespaceop.html#aae9e38fa6c56e188b4f649732f0d4cd3", null ], + [ "scaleKeypoints2d", "namespaceop.html#a6b9adf8f7e698e566414c9f44f0c85f1", null ], + [ "setMainThread", "namespaceop.html#a7eb0121791185c13a6c3dd88994e0eab", null ], + [ "splitString", "namespaceop.html#ae80a103d8a4308bc435342b3d31404c8", null ], + [ "stringToDataFormat", "namespaceop.html#a46e815df32db67d78a94367b7f97df25", null ], + [ "threadIdPP", "namespaceop.html#af65a4564afcad06b72468679f6bee52b", null ], + [ "toFixedLengthString", "namespaceop.html#a42292d44d10f55cb1d83a296183e9b31", null ], + [ "toLower", "namespaceop.html#a3290f48d24c9992dd00d339ce49cfac7", null ], + [ "toUpper", "namespaceop.html#a7a815e303884fb2b3346c8cc19d61b23", null ], + [ "transpose", "namespaceop.html#a75c4194e0eae0ef28c6829def462dad2", null ], + [ "tToString", "namespaceop.html#af548fe1a2ad2b392a25afe9b0b87b8dd", null ], + [ "uCharCvMatToFloatPtr", "namespaceop.html#a532d08cb2ef011f9cad29c01d3431d6e", null ], + [ "uCharImageCast", "namespaceop.html#a6aeab543a61ef23ed58a6e29401424ae", null ], + [ "uCharRound", "namespaceop.html#a61240e5fbd4ea84a2cfdc89407bcb1ae", null ], + [ "uIntRound", "namespaceop.html#a8525e440d6ac1b558e72637dc4f4e3c4", null ], + [ "uLongLongRound", "namespaceop.html#a757a5cc88734e7be9e910e7d8213c282", null ], + [ "ulongRound", "namespaceop.html#aaafe2e235a1a3a146bb026b71c521c7b", null ], + [ "unrollArrayToUCharCvMat", "namespaceop.html#a1910d9f194831570be6ffe683209e7b3", null ], + [ "vectorsAreEqual", "namespaceop.html#af63e418966741f7efebacc9519174a0a", null ], + [ "wrapperConfigureSanityChecks", "namespaceop.html#acc4a5460e02ae510e854724513eea822", null ], + [ "CUDA_NUM_THREADS", "namespaceop.html#ac7bbf63b37bf6762c47557ad227e036d", null ], + [ "F135", "namespaceop.html#a593bb53120d8db14cab814dfb5d9ed2c", null ], + [ "FACE_CCN_DECREASE_FACTOR", "namespaceop.html#aa6701cc08e1a8651798ef3bf8437375b", null ], + [ "FACE_COLORS_RENDER", "namespaceop.html#a3fbae1778780ae5bf4ffcc84cdef1870", null ], + [ "FACE_DEFAULT_ALPHA_HEAT_MAP", "namespaceop.html#a4f191afed46fea5e3ce5b2a8756e1ddd", null ], + [ "FACE_DEFAULT_ALPHA_KEYPOINT", "namespaceop.html#a15f6c39797cee87f6aa941d93f22b78b", null ], + [ "FACE_MAX_FACES", "namespaceop.html#a9b4b92c621cc5962a72898899d2f2534", null ], + [ "FACE_NUMBER_PARTS", "namespaceop.html#a8a05bdc38612c38e28b96bba5b4679b8", null ], + [ "FACE_PAIRS_RENDER", "namespaceop.html#a1245f62cf98c4ee7591dfc8807ef355d", null ], + [ "FACE_PROTOTXT", "namespaceop.html#a4d07868d77fb11253b413ed579e04c22", null ], + [ "FACE_SCALES_RENDER", "namespaceop.html#a00c56c20997f734b2bd44d6f85b86cf0", null ], + [ "FACE_TRAINED_MODEL", "namespaceop.html#abd0ef2306478c3295283e7f1b59e3aff", null ], + [ "H135", "namespaceop.html#ae37c577c1054c89da4a6736342d491aa", null ], + [ "HAND_CCN_DECREASE_FACTOR", "namespaceop.html#aed0d108f5ada623eeb0ed41f896f8e97", null ], + [ "HAND_COLORS_RENDER", "namespaceop.html#a450bb646e7573322d8f622bfdbab4833", null ], + [ "HAND_DEFAULT_ALPHA_HEAT_MAP", "namespaceop.html#a76c1f1ea90b73e13e93f72413b3cab0e", null ], + [ "HAND_DEFAULT_ALPHA_KEYPOINT", "namespaceop.html#aa8cc53d2fe5353f9d87d50c32a8c1a95", null ], + [ "HAND_MAX_HANDS", "namespaceop.html#a182585e2e944cdb62f3dededdd85d1fc", null ], + [ "HAND_NUMBER_PARTS", "namespaceop.html#a41b6fb82924c5532cf10151e6ce497f2", null ], + [ "HAND_PAIRS_RENDER", "namespaceop.html#a335d707e98d311d39d9a9dab0e325391", null ], + [ "HAND_PROTOTXT", "namespaceop.html#a3fe70bd1eacdd78aef3344c83533ffc7", null ], + [ "HAND_SCALES_RENDER", "namespaceop.html#a4e9bbc2167923763c5982d6d1f41f560", null ], + [ "HAND_TRAINED_MODEL", "namespaceop.html#ac13af59538bcb8a1709f20010681d1c7", null ], + [ "POSE_DEFAULT_ALPHA_HEAT_MAP", "namespaceop.html#af45cddacd69fff73a4ea4acbbbac43e0", null ], + [ "POSE_DEFAULT_ALPHA_KEYPOINT", "namespaceop.html#a21fcb98366f6ea8895fc7f527f232db5", null ], + [ "POSE_MAX_PEOPLE", "namespaceop.html#a522d4552d2aeabe367f4d3bf371e6b3e", null ] +]; \ No newline at end of file diff --git a/web/html/doc/namespaceop_1_1_configure_error.html b/web/html/doc/namespaceop_1_1_configure_error.html new file mode 100644 index 000000000..6d748a5a8 --- /dev/null +++ b/web/html/doc/namespaceop_1_1_configure_error.html @@ -0,0 +1,147 @@ + + + + + + + +OpenPose: op::ConfigureError Namespace Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op::ConfigureError Namespace Reference
    +
    +
    + + + + + + +

    +Functions

    OP_API std::vector< ErrorModegetErrorModes ()
     
    OP_API void setErrorModes (const std::vector< ErrorMode > &errorModes)
     
    +

    Function Documentation

    + +

    ◆ getErrorModes()

    + +
    +
    + + + + + + + +
    OP_API std::vector<ErrorMode> op::ConfigureError::getErrorModes ()
    +
    + +
    +
    + +

    ◆ setErrorModes()

    + +
    +
    + + + + + + + + +
    OP_API void op::ConfigureError::setErrorModes (const std::vector< ErrorMode > & errorModes)
    +
    + +
    +
    +
    +
    + + + + diff --git a/web/html/doc/namespaceop_1_1_configure_log.html b/web/html/doc/namespaceop_1_1_configure_log.html new file mode 100644 index 000000000..f1002640f --- /dev/null +++ b/web/html/doc/namespaceop_1_1_configure_log.html @@ -0,0 +1,186 @@ + + + + + + + +OpenPose: op::ConfigureLog Namespace Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op::ConfigureLog Namespace Reference
    +
    +
    + + + + + + + + + + +

    +Functions

    OP_API Priority getPriorityThreshold ()
     
    OP_API const std::vector< LogMode > & getLogModes ()
     
    OP_API void setPriorityThreshold (const Priority priorityThreshold)
     
    OP_API void setLogModes (const std::vector< LogMode > &loggingModes)
     
    +

    Function Documentation

    + +

    ◆ getLogModes()

    + +
    +
    + + + + + + + +
    OP_API const std::vector<LogMode>& op::ConfigureLog::getLogModes ()
    +
    + +
    +
    + +

    ◆ getPriorityThreshold()

    + +
    +
    + + + + + + + +
    OP_API Priority op::ConfigureLog::getPriorityThreshold ()
    +
    + +
    +
    + +

    ◆ setLogModes()

    + +
    +
    + + + + + + + + +
    OP_API void op::ConfigureLog::setLogModes (const std::vector< LogMode > & loggingModes)
    +
    + +
    +
    + +

    ◆ setPriorityThreshold()

    + +
    +
    + + + + + + + + +
    OP_API void op::ConfigureLog::setPriorityThreshold (const Priority priorityThreshold)
    +
    + +
    +
    +
    +
    + + + + diff --git a/web/html/doc/namespaces.html b/web/html/doc/namespaces.html new file mode 100644 index 000000000..ad8ad435d --- /dev/null +++ b/web/html/doc/namespaces.html @@ -0,0 +1,240 @@ + + + + + + + +OpenPose: Namespace List + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    Namespace List
    +
    +
    +
    Here is a list of all namespaces with brief descriptions:
    +
    [detail level 12]
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
     Nop
    +
    +
    +
    + + + + diff --git a/web/html/doc/namespaces_dup.js b/web/html/doc/namespaces_dup.js new file mode 100644 index 000000000..51242d711 --- /dev/null +++ b/web/html/doc/namespaces_dup.js @@ -0,0 +1,4 @@ +var namespaces_dup = +[ + [ "op", "namespaceop.html", "namespaceop" ] +]; \ No newline at end of file diff --git a/web/html/doc/nav_f.png b/web/html/doc/nav_f.png new file mode 100644 index 000000000..72a58a529 Binary files /dev/null and b/web/html/doc/nav_f.png differ diff --git a/web/html/doc/nav_g.png b/web/html/doc/nav_g.png new file mode 100644 index 000000000..2093a237a Binary files /dev/null and b/web/html/doc/nav_g.png differ diff --git a/web/html/doc/nav_h.png b/web/html/doc/nav_h.png new file mode 100644 index 000000000..33389b101 Binary files /dev/null and b/web/html/doc/nav_h.png differ diff --git a/web/html/doc/navtree.css b/web/html/doc/navtree.css new file mode 100644 index 000000000..33341a67d --- /dev/null +++ b/web/html/doc/navtree.css @@ -0,0 +1,146 @@ +#nav-tree .children_ul { + margin:0; + padding:4px; +} + +#nav-tree ul { + list-style:none outside none; + margin:0px; + padding:0px; +} + +#nav-tree li { + white-space:nowrap; + margin:0px; + padding:0px; +} + +#nav-tree .plus { + margin:0px; +} + +#nav-tree .selected { + background-image: url('tab_a.png'); + background-repeat:repeat-x; + color: #fff; + text-shadow: 0px 1px 1px rgba(0, 0, 0, 1.0); +} + +#nav-tree img { + margin:0px; + padding:0px; + border:0px; + vertical-align: middle; +} + +#nav-tree a { + text-decoration:none; + padding:0px; + margin:0px; + outline:none; +} + +#nav-tree .label { + margin:0px; + padding:0px; + font: 12px 'Lucida Grande',Geneva,Helvetica,Arial,sans-serif; +} + +#nav-tree .label a { + padding:2px; +} + +#nav-tree .selected a { + text-decoration:none; + color:#fff; +} + +#nav-tree .children_ul { + margin:0px; + padding:0px; +} + +#nav-tree .item { + margin:0px; + padding:0px; +} + +#nav-tree { + padding: 0px 0px; + background-color: #FAFAFF; + font-size:14px; + overflow:auto; +} + +#doc-content { + overflow:auto; + display:block; + padding:0px; + margin:0px; + -webkit-overflow-scrolling : touch; /* iOS 5+ */ +} + +#side-nav { + padding:0 6px 0 0; + margin: 0px; + display:block; + position: absolute; + left: 0px; + width: 250px; +} + +.ui-resizable .ui-resizable-handle { + display:block; +} + +.ui-resizable-e { + background-image:url("splitbar.png"); + background-size:100%; + background-repeat:repeat-y; + background-attachment: scroll; + cursor:ew-resize; + height:100%; + right:0; + top:0; + width:6px; +} + +.ui-resizable-handle { + display:none; + font-size:0.1px; + position:absolute; + z-index:1; +} + +#nav-tree-contents { + margin: 6px 0px 0px 0px; +} + +#nav-tree { + background-image:url('nav_h.png'); + background-repeat:repeat-x; + background-color: #F9FAFC; + -webkit-overflow-scrolling : touch; /* iOS 5+ */ +} + +#nav-sync { + position:absolute; + top:5px; + right:24px; + z-index:0; +} + +#nav-sync img { + opacity:0.3; +} + +#nav-sync img:hover { + opacity:0.9; +} + +@media print +{ + #nav-tree { display: none; } + div.ui-resizable-handle { display: none; position: relative; } +} + diff --git a/web/html/doc/navtree.js b/web/html/doc/navtree.js new file mode 100644 index 000000000..1e272d31d --- /dev/null +++ b/web/html/doc/navtree.js @@ -0,0 +1,546 @@ +/* + @licstart The following is the entire license notice for the JavaScript code in this file. + + The MIT License (MIT) + + Copyright (C) 1997-2020 by Dimitri van Heesch + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software + and associated documentation files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, publish, distribute, + sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or + substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + @licend The above is the entire license notice for the JavaScript code in this file + */ +var navTreeSubIndices = new Array(); +var arrowDown = '▼'; +var arrowRight = '►'; + +function getData(varName) +{ + var i = varName.lastIndexOf('/'); + var n = i>=0 ? varName.substring(i+1) : varName; + return eval(n.replace(/\-/g,'_')); +} + +function stripPath(uri) +{ + return uri.substring(uri.lastIndexOf('/')+1); +} + +function stripPath2(uri) +{ + var i = uri.lastIndexOf('/'); + var s = uri.substring(i+1); + var m = uri.substring(0,i+1).match(/\/d\w\/d\w\w\/$/); + return m ? uri.substring(i-6) : s; +} + +function hashValue() +{ + return $(location).attr('hash').substring(1).replace(/[^\w\-]/g,''); +} + +function hashUrl() +{ + return '#'+hashValue(); +} + +function pathName() +{ + return $(location).attr('pathname').replace(/[^-A-Za-z0-9+&@#/%?=~_|!:,.;\(\)]/g, ''); +} + +function localStorageSupported() +{ + try { + return 'localStorage' in window && window['localStorage'] !== null && window.localStorage.getItem; + } + catch(e) { + return false; + } +} + +function storeLink(link) +{ + if (!$("#nav-sync").hasClass('sync') && localStorageSupported()) { + window.localStorage.setItem('navpath',link); + } +} + +function deleteLink() +{ + if (localStorageSupported()) { + window.localStorage.setItem('navpath',''); + } +} + +function cachedLink() +{ + if (localStorageSupported()) { + return window.localStorage.getItem('navpath'); + } else { + return ''; + } +} + +function getScript(scriptName,func,show) +{ + var head = document.getElementsByTagName("head")[0]; + var script = document.createElement('script'); + script.id = scriptName; + script.type = 'text/javascript'; + script.onload = func; + script.src = scriptName+'.js'; + head.appendChild(script); +} + +function createIndent(o,domNode,node,level) +{ + var level=-1; + var n = node; + while (n.parentNode) { level++; n=n.parentNode; } + if (node.childrenData) { + var imgNode = document.createElement("span"); + imgNode.className = 'arrow'; + imgNode.style.paddingLeft=(16*level).toString()+'px'; + imgNode.innerHTML=arrowRight; + node.plus_img = imgNode; + node.expandToggle = document.createElement("a"); + node.expandToggle.href = "javascript:void(0)"; + node.expandToggle.onclick = function() { + if (node.expanded) { + $(node.getChildrenUL()).slideUp("fast"); + node.plus_img.innerHTML=arrowRight; + node.expanded = false; + } else { + expandNode(o, node, false, false); + } + } + node.expandToggle.appendChild(imgNode); + domNode.appendChild(node.expandToggle); + } else { + var span = document.createElement("span"); + span.className = 'arrow'; + span.style.width = 16*(level+1)+'px'; + span.innerHTML = ' '; + domNode.appendChild(span); + } +} + +var animationInProgress = false; + +function gotoAnchor(anchor,aname,updateLocation) +{ + var pos, docContent = $('#doc-content'); + var ancParent = $(anchor.parent()); + if (ancParent.hasClass('memItemLeft') || + ancParent.hasClass('memtitle') || + ancParent.hasClass('fieldname') || + ancParent.hasClass('fieldtype') || + ancParent.is(':header')) + { + pos = ancParent.position().top; + } else if (anchor.position()) { + pos = anchor.position().top; + } + if (pos) { + var dist = Math.abs(Math.min( + pos-docContent.offset().top, + docContent[0].scrollHeight- + docContent.height()-docContent.scrollTop())); + animationInProgress=true; + docContent.animate({ + scrollTop: pos + docContent.scrollTop() - docContent.offset().top + },Math.max(50,Math.min(500,dist)),function(){ + if (updateLocation) window.location.href=aname; + animationInProgress=false; + }); + } +} + +function newNode(o, po, text, link, childrenData, lastNode) +{ + var node = new Object(); + node.children = Array(); + node.childrenData = childrenData; + node.depth = po.depth + 1; + node.relpath = po.relpath; + node.isLast = lastNode; + + node.li = document.createElement("li"); + po.getChildrenUL().appendChild(node.li); + node.parentNode = po; + + node.itemDiv = document.createElement("div"); + node.itemDiv.className = "item"; + + node.labelSpan = document.createElement("span"); + node.labelSpan.className = "label"; + + createIndent(o,node.itemDiv,node,0); + node.itemDiv.appendChild(node.labelSpan); + node.li.appendChild(node.itemDiv); + + var a = document.createElement("a"); + node.labelSpan.appendChild(a); + node.label = document.createTextNode(text); + node.expanded = false; + a.appendChild(node.label); + if (link) { + var url; + if (link.substring(0,1)=='^') { + url = link.substring(1); + link = url; + } else { + url = node.relpath+link; + } + a.className = stripPath(link.replace('#',':')); + if (link.indexOf('#')!=-1) { + var aname = '#'+link.split('#')[1]; + var srcPage = stripPath(pathName()); + var targetPage = stripPath(link.split('#')[0]); + a.href = srcPage!=targetPage ? url : "javascript:void(0)"; + a.onclick = function(){ + storeLink(link); + if (!$(a).parent().parent().hasClass('selected')) + { + $('.item').removeClass('selected'); + $('.item').removeAttr('id'); + $(a).parent().parent().addClass('selected'); + $(a).parent().parent().attr('id','selected'); + } + var anchor = $(aname); + gotoAnchor(anchor,aname,true); + }; + } else { + a.href = url; + a.onclick = function() { storeLink(link); } + } + } else { + if (childrenData != null) + { + a.className = "nolink"; + a.href = "javascript:void(0)"; + a.onclick = node.expandToggle.onclick; + } + } + + node.childrenUL = null; + node.getChildrenUL = function() { + if (!node.childrenUL) { + node.childrenUL = document.createElement("ul"); + node.childrenUL.className = "children_ul"; + node.childrenUL.style.display = "none"; + node.li.appendChild(node.childrenUL); + } + return node.childrenUL; + }; + + return node; +} + +function showRoot() +{ + var headerHeight = $("#top").height(); + var footerHeight = $("#nav-path").height(); + var windowHeight = $(window).height() - headerHeight - footerHeight; + (function (){ // retry until we can scroll to the selected item + try { + var navtree=$('#nav-tree'); + navtree.scrollTo('#selected',100,{offset:-windowHeight/2}); + } catch (err) { + setTimeout(arguments.callee, 0); + } + })(); +} + +function expandNode(o, node, imm, showRoot) +{ + if (node.childrenData && !node.expanded) { + if (typeof(node.childrenData)==='string') { + var varName = node.childrenData; + getScript(node.relpath+varName,function(){ + node.childrenData = getData(varName); + expandNode(o, node, imm, showRoot); + }, showRoot); + } else { + if (!node.childrenVisited) { + getNode(o, node); + } + $(node.getChildrenUL()).slideDown("fast"); + node.plus_img.innerHTML = arrowDown; + node.expanded = true; + } + } +} + +function glowEffect(n,duration) +{ + n.addClass('glow').delay(duration).queue(function(next){ + $(this).removeClass('glow');next(); + }); +} + +function highlightAnchor() +{ + var aname = hashUrl(); + var anchor = $(aname); + if (anchor.parent().attr('class')=='memItemLeft'){ + var rows = $('.memberdecls tr[class$="'+hashValue()+'"]'); + glowEffect(rows.children(),300); // member without details + } else if (anchor.parent().attr('class')=='fieldname'){ + glowEffect(anchor.parent().parent(),1000); // enum value + } else if (anchor.parent().attr('class')=='fieldtype'){ + glowEffect(anchor.parent().parent(),1000); // struct field + } else if (anchor.parent().is(":header")) { + glowEffect(anchor.parent(),1000); // section header + } else { + glowEffect(anchor.next(),1000); // normal member + } +} + +function selectAndHighlight(hash,n) +{ + var a; + if (hash) { + var link=stripPath(pathName())+':'+hash.substring(1); + a=$('.item a[class$="'+link+'"]'); + } + if (a && a.length) { + a.parent().parent().addClass('selected'); + a.parent().parent().attr('id','selected'); + highlightAnchor(); + } else if (n) { + $(n.itemDiv).addClass('selected'); + $(n.itemDiv).attr('id','selected'); + } + if ($('#nav-tree-contents .item:first').hasClass('selected')) { + $('#nav-sync').css('top','30px'); + } else { + $('#nav-sync').css('top','5px'); + } + showRoot(); +} + +function showNode(o, node, index, hash) +{ + if (node && node.childrenData) { + if (typeof(node.childrenData)==='string') { + var varName = node.childrenData; + getScript(node.relpath+varName,function(){ + node.childrenData = getData(varName); + showNode(o,node,index,hash); + },true); + } else { + if (!node.childrenVisited) { + getNode(o, node); + } + $(node.getChildrenUL()).css({'display':'block'}); + node.plus_img.innerHTML = arrowDown; + node.expanded = true; + var n = node.children[o.breadcrumbs[index]]; + if (index+11) hash = '#'+parts[1].replace(/[^\w\-]/g,''); + else hash=''; + } + if (hash.match(/^#l\d+$/)) { + var anchor=$('a[name='+hash.substring(1)+']'); + glowEffect(anchor.parent(),1000); // line number + hash=''; // strip line number anchors + } + var url=root+hash; + var i=-1; + while (NAVTREEINDEX[i+1]<=url) i++; + if (i==-1) { i=0; root=NAVTREE[0][1]; } // fallback: show index + if (navTreeSubIndices[i]) { + gotoNode(o,i,root,hash,relpath) + } else { + getScript(relpath+'navtreeindex'+i,function(){ + navTreeSubIndices[i] = eval('NAVTREEINDEX'+i); + if (navTreeSubIndices[i]) { + gotoNode(o,i,root,hash,relpath); + } + },true); + } +} + +function showSyncOff(n,relpath) +{ + n.html(''); +} + +function showSyncOn(n,relpath) +{ + n.html(''); +} + +function toggleSyncButton(relpath) +{ + var navSync = $('#nav-sync'); + if (navSync.hasClass('sync')) { + navSync.removeClass('sync'); + showSyncOff(navSync,relpath); + storeLink(stripPath2(pathName())+hashUrl()); + } else { + navSync.addClass('sync'); + showSyncOn(navSync,relpath); + deleteLink(); + } +} + +var loadTriggered = false; +var readyTriggered = false; +var loadObject,loadToRoot,loadUrl,loadRelPath; + +$(window).on('load',function(){ + if (readyTriggered) { // ready first + navTo(loadObject,loadToRoot,loadUrl,loadRelPath); + showRoot(); + } + loadTriggered=true; +}); + +function initNavTree(toroot,relpath) +{ + var o = new Object(); + o.toroot = toroot; + o.node = new Object(); + o.node.li = document.getElementById("nav-tree-contents"); + o.node.childrenData = NAVTREE; + o.node.children = new Array(); + o.node.childrenUL = document.createElement("ul"); + o.node.getChildrenUL = function() { return o.node.childrenUL; }; + o.node.li.appendChild(o.node.childrenUL); + o.node.depth = 0; + o.node.relpath = relpath; + o.node.expanded = false; + o.node.isLast = true; + o.node.plus_img = document.createElement("span"); + o.node.plus_img.className = 'arrow'; + o.node.plus_img.innerHTML = arrowRight; + + if (localStorageSupported()) { + var navSync = $('#nav-sync'); + if (cachedLink()) { + showSyncOff(navSync,relpath); + navSync.removeClass('sync'); + } else { + showSyncOn(navSync,relpath); + } + navSync.click(function(){ toggleSyncButton(relpath); }); + } + + if (loadTriggered) { // load before ready + navTo(o,toroot,hashUrl(),relpath); + showRoot(); + } else { // ready before load + loadObject = o; + loadToRoot = toroot; + loadUrl = hashUrl(); + loadRelPath = relpath; + readyTriggered=true; + } + + $(window).bind('hashchange', function(){ + if (window.location.hash && window.location.hash.length>1){ + var a; + if ($(location).attr('hash')){ + var clslink=stripPath(pathName())+':'+hashValue(); + a=$('.item a[class$="'+clslink.replace(/apt-get", "md_doc_installation_2_additional_settings.html#autotoc_md207", null ], + [ "Scenario 2 - Caffe installed and OpenCV build from source", "md_doc_installation_2_additional_settings.html#autotoc_md208", null ], + [ "Scenario 3 - OpenCV already installed", "md_doc_installation_2_additional_settings.html#autotoc_md209", null ], + [ "Any Other Scenario", "md_doc_installation_2_additional_settings.html#autotoc_md210", null ] + ] ] + ] ] + ] ], + [ "OpenPose Doc - Installation (deprecated)", "md_doc_installation_deprecated_installation_deprecated.html", [ + [ "Contents", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md211", null ], + [ "Operating Systems", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md212", null ], + [ "Requirements", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md213", null ], + [ "Clone OpenPose", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md214", null ], + [ "Update OpenPose", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md215", null ], + [ "Ubuntu", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md216", [ + [ "Installation - CMake", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md217", null ], + [ "Prerequisites (Script Compilation or Manual Compilation)", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md218", null ], + [ "Installation - Script Compilation", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md219", null ], + [ "Installation - Manual Compilation", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md220", null ], + [ "Install Caffe", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md221", null ] + ] ], + [ "Select your desired Makefile file (run only one of the next 4 commands)", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md222", null ], + [ "Change any custom flag from the resulting Makefile.config (e.g., OpenCV 3, Atlas/OpenBLAS/MKL, etc.)", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md223", null ], + [ "Compile Caffe", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md224", null ], + [ "Same file cp command as the one used for Caffe", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md226", null ], + [ "Change any custom flag from the resulting Makefile.config (e.g., OpenCV 3, Atlas/OpenBLAS/MKL, etc.)", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md227", [ + [ "Windows", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md230", [ + [ "Install OpenPose", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md225", null ], + [ "Reinstallation", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md228", null ], + [ "Uninstallation", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md229", null ], + [ "Installation - Library", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md231", [ + [ "CMake Installer", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md232", null ], + [ "Deprecated Windows Installer", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md233", null ] + ] ], + [ "Uninstallation", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md234", null ], + [ "Reinstallation", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md235", null ] + ] ], + [ "Doxygen Documentation Autogeneration", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md236", null ], + [ "Custom Caffe", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md237", null ], + [ "Compiling without cuDNN", "md_doc_installation_deprecated_installation_deprecated.html#autotoc_md238", null ] + ] ] + ] ], + [ "OpenPose Doc - Installation on Nvidia Jetson TX1", "md_doc_installation_jetson_tx_installation_jetson_tx1.html", [ + [ "Introduction", "md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md239", null ], + [ "Purpose", "md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md240", null ], + [ "Preliminary remarks", "md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md241", null ], + [ "Contents", "md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md242", null ], + [ "Prep the TX1", "md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md243", null ], + [ "Build custom kernel", "md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md244", null ], + [ "Build OpenCV from source", "md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md245", null ], + [ "Install Openpose", "md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md246", null ], + [ "Usage", "md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md247", null ] + ] ], + [ "OpenPose Doc - Installation on Nvidia Jetson TX2 JetPack 3.1", "md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html", [ + [ "Contents", "md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html#autotoc_md248", null ], + [ "Requirements and Dependencies", "md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html#autotoc_md249", null ], + [ "Installation", "md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html#autotoc_md250", null ], + [ "Usage", "md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html#autotoc_md251", null ] + ] ], + [ "OpenPose Doc - Installation on Nvidia Jetson TX2 JetPack 3.3", "md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html", [ + [ "Contents", "md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html#autotoc_md252", null ], + [ "Requirements and Dependencies", "md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html#autotoc_md253", null ], + [ "Installation", "md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html#autotoc_md254", null ], + [ "Usage", "md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html#autotoc_md255", null ] + ] ], + [ "OpenPose Very Advanced Doc - Library Structure", "md_doc_very_advanced_library_structure_0_index.html", null ], + [ "OpenPose Very Advanced Doc - Library Structure - Deep Overview", "md_doc_very_advanced_library_structure_1_library_deep_overview.html", [ + [ "Modules Diagram", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md256", null ], + [ "Debugging C++ Code", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md257", [ + [ "Finding Segmentation Faults", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md258", null ] + ] ], + [ "Accuracy", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md259", [ + [ "Checking OpenPose Accuracy Quantitatively", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md260", null ], + [ "Checking Ground-Truth Labels", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md261", null ] + ] ], + [ "OpenPose Coding Style", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md262", null ], + [ "Main Modules", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md263", null ], + [ "Basic Module: core", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md264", [ + [ "Array - The OpenPose Basic Raw Data Container", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md265", [ + [ "Construction And Data allocation", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md266", null ], + [ "Data access", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md267", null ], + [ "Dimensionality Information", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md268", null ] + ] ], + [ "Datum - The OpenPose Basic Piece of Information Between Threads", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md269", null ] + ] ], + [ "Multi-Threading Module - thread", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md270", [ + [ "The ThreadManager Template Class", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md271", [ + [ "Constructor", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md272", null ], + [ "Adding a Worker Sequence", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md273", null ], + [ "Threading Configuration Modes", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md274", null ], + [ "Thread Id:", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md275", null ], + [ "Queue Id:", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md276", null ] + ] ], + [ "The Worker Template Class - The Parent Class of All Workers", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md277", null ], + [ "Creating New Workers", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md278", null ], + [ "All Workers Wrap a Non-Worker Class", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md279", null ] + ] ], + [ "Multi-Person Key-Point Detection module - pose", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md280", [ + [ "PoseExtractor Class", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md281", [ + [ "Constructor", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md282", null ], + [ "Detect Human Pose", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md283", null ] + ] ], + [ "PoseRenderer Class", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md284", [ + [ "Constructor", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md285", null ], + [ "Render Human Pose", "md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md286", null ] + ] ] + ] ] + ] ], + [ "OpenPose Very Advanced Doc - Library Structure - How to Extend Functionality", "md_doc_very_advanced_library_structure_2_library_extend_functionality.html", null ], + [ "OpenPose Very Advanced Doc - Library Structure - Steps to Add a New Module", "md_doc_very_advanced_library_structure_3_library_add_new_module.html", [ + [ "Developing Steps", "md_doc_very_advanced_library_structure_3_library_add_new_module.html#autotoc_md287", null ], + [ "Release Steps", "md_doc_very_advanced_library_structure_3_library_add_new_module.html#autotoc_md288", null ] + ] ], + [ "Namespaces", "namespaces.html", [ + [ "Namespace List", "namespaces.html", "namespaces_dup" ], + [ "Namespace Members", "namespacemembers.html", [ + [ "All", "namespacemembers.html", "namespacemembers_dup" ], + [ "Functions", "namespacemembers_func.html", "namespacemembers_func" ], + [ "Variables", "namespacemembers_vars.html", null ], + [ "Typedefs", "namespacemembers_type.html", null ], + [ "Enumerations", "namespacemembers_enum.html", null ] + ] ] + ] ], + [ "Classes", "annotated.html", [ + [ "Class List", "annotated.html", "annotated_dup" ], + [ "Class Index", "classes.html", null ], + [ "Class Hierarchy", "hierarchy.html", "hierarchy" ], + [ "Class Members", "functions.html", [ + [ "All", "functions.html", "functions_dup" ], + [ "Functions", "functions_func.html", "functions_func" ], + [ "Variables", "functions_vars.html", null ] + ] ] + ] ], + [ "Files", "files.html", [ + [ "File List", "files.html", "files_dup" ], + [ "File Members", "globals.html", [ + [ "All", "globals.html", null ], + [ "Functions", "globals_func.html", null ], + [ "Variables", "globals_vars.html", null ], + [ "Macros", "globals_defs.html", null ] + ] ] + ] ] + ] ] +]; + +var NAVTREEINDEX = +[ +"3d_2headers_8hpp.html", +"classop_1_1_gpu_renderer.html#a5d729aab549908c758953be742dd0115", +"classop_1_1_pose_extractor_caffe.html#ae5d41065ea3eaf37d2c9663aa35554d6", +"classop_1_1_w_coco_json_saver.html#a508c1105406b3cc55dc6bd1b299f6ed3", +"classop_1_1_wrapper_t.html#af3d5d56e63b8c6faee0d7954db95c69d", +"flags_8hpp.html#a8bd040787ac075ae4cf483be01fe2c5f", +"macros_8hpp.html#a7bacf9f65110ec8292bc69e1eb0f426e", +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md212", +"namespaceop.html#a5418b76dad5b4aea1133325f4aa715acae9989db5dabeea617f40c8dbfd07f5fb", +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fad788fbec25069f2884ee1ed97e0af2b9", +"resize_and_merge_caffe_8hpp.html", +"utilities_2enum_classes_8hpp.html#a5fa46d7c4b25c823d1cdcc8e9d460f94" +]; + +var SYNCONMSG = 'click to disable panel synchronisation'; +var SYNCOFFMSG = 'click to enable panel synchronisation'; \ No newline at end of file diff --git a/web/html/doc/navtreeindex0.js b/web/html/doc/navtreeindex0.js new file mode 100644 index 000000000..8e1837774 --- /dev/null +++ b/web/html/doc/navtreeindex0.js @@ -0,0 +1,253 @@ +var NAVTREEINDEX0 = +{ +"3d_2headers_8hpp.html":[30,0,1,0,0,1], +"3d_2headers_8hpp_source.html":[30,0,1,0,0,1], +"annotated.html":[29,0], +"array_8hpp.html":[30,0,1,0,2,0], +"array_8hpp_source.html":[30,0,1,0,2,0], +"array_cpu_gpu_8hpp.html":[30,0,1,0,2,1], +"array_cpu_gpu_8hpp_source.html":[30,0,1,0,2,1], +"body_part_connector_base_8hpp.html":[30,0,1,0,8,0], +"body_part_connector_base_8hpp.html#a2ae13dae91c41b29063b48158ccbcc4e":[30,0,1,0,8,0,0], +"body_part_connector_base_8hpp.html#a36f0207c6263e7174f4c79eba7c4df3f":[30,0,1,0,8,0,5], +"body_part_connector_base_8hpp.html#a3dbd17f2f656a2bc751441a42b5b9516":[30,0,1,0,8,0,6], +"body_part_connector_base_8hpp.html#a77a4d87bbee791dfba0667aa10bcca99":[30,0,1,0,8,0,2], +"body_part_connector_base_8hpp.html#a927468f6931ddb1e7d1e6e6e59b8bd36":[30,0,1,0,8,0,1], +"body_part_connector_base_8hpp.html#aaec4a34b015f898d28be2b9f2aba0d38":[30,0,1,0,8,0,4], +"body_part_connector_base_8hpp.html#ae01dd412590493f5f732594e8332d3f0":[30,0,1,0,8,0,7], +"body_part_connector_base_8hpp.html#ae5d883da8c8f11356d5e1b61bc3a99b6":[30,0,1,0,8,0,3], +"body_part_connector_base_8hpp_source.html":[30,0,1,0,8,0], +"body_part_connector_caffe_8hpp.html":[30,0,1,0,8,1], +"body_part_connector_caffe_8hpp_source.html":[30,0,1,0,8,1], +"bvh_saver_8hpp.html":[30,0,1,0,4,0], +"bvh_saver_8hpp_source.html":[30,0,1,0,4,0], +"calibration_2headers_8hpp.html":[30,0,1,0,1,1], +"calibration_2headers_8hpp_source.html":[30,0,1,0,1,1], +"camera_parameter_estimation_8hpp.html":[30,0,1,0,1,0], +"camera_parameter_estimation_8hpp.html#a1fd317d44606181c63ef8a4e5676a09e":[30,0,1,0,1,0,1], +"camera_parameter_estimation_8hpp.html#a37cdfa8dd466c3df9e7da5724a909143":[30,0,1,0,1,0,2], +"camera_parameter_estimation_8hpp.html#a50526c188f2ba94b07e0945c0871fd2c":[30,0,1,0,1,0,3], +"camera_parameter_estimation_8hpp.html#aed964859fbd282bd29f2b818a3bf10dd":[30,0,1,0,1,0,0], +"camera_parameter_estimation_8hpp_source.html":[30,0,1,0,1,0], +"camera_parameter_reader_8hpp.html":[30,0,1,0,0,0], +"camera_parameter_reader_8hpp_source.html":[30,0,1,0,0,0], +"check_8hpp.html":[30,0,1,0,14,0], +"check_8hpp.html#a1e71130dc8f280e4664c711128b18b42":[30,0,1,0,14,0,5], +"check_8hpp.html#a3dd874d4341b99431819f9fa6b678ca9":[30,0,1,0,14,0,3], +"check_8hpp.html#a410201fcc46274e24726c5a601bc1721":[30,0,1,0,14,0,0], +"check_8hpp.html#a7ecfc02dca25534a071acf3136ff175e":[30,0,1,0,14,0,4], +"check_8hpp.html#a92e8cd01741c90fbfdfaa33a13803f34":[30,0,1,0,14,0,2], +"check_8hpp.html#aaada2594361f6f929af5b1f9d50387eb":[30,0,1,0,14,0,6], +"check_8hpp.html#aaff52f436911aa17bebb999cd91a44fd":[30,0,1,0,14,0,1], +"check_8hpp_source.html":[30,0,1,0,14,0], +"classes.html":[29,1], +"classop_1_1_array.html":[29,0,0,3], +"classop_1_1_array.html#a0ad0232daa69783cf2c8f7a0ff5b3b0c":[29,0,0,3,40], +"classop_1_1_array.html#a0e1d5ce14d11caa3b92306ee677af4cc":[29,0,0,3,31], +"classop_1_1_array.html#a12e538b09e98bf0900163031602ed2ed":[29,0,0,3,35], +"classop_1_1_array.html#a28f09d11de753a741334ee8094296acb":[29,0,0,3,42], +"classop_1_1_array.html#a3252c38318d81a8b8fb6f71f4d4c2642":[29,0,0,3,39], +"classop_1_1_array.html#a38de9c4ba539b8134fcac91287722044":[29,0,0,3,24], +"classop_1_1_array.html#a3f26a48c35cde008970078a66ff6e5c7":[29,0,0,3,34], +"classop_1_1_array.html#a416e95541761c557c50b79b5e1b33389":[29,0,0,3,6], +"classop_1_1_array.html#a4568f646a97fa8cea443b864d91a28df":[29,0,0,3,22], +"classop_1_1_array.html#a48c1ba1f7017b5aa8e0451079dd3a6d3":[29,0,0,3,1], +"classop_1_1_array.html#a4a24dfa0d0f1f3769bf3bfcea47e2220":[29,0,0,3,11], +"classop_1_1_array.html#a530010928025b3f64743505d732b1308":[29,0,0,3,18], +"classop_1_1_array.html#a5a68cca98a3ebaf565f1e546eebd9f01":[29,0,0,3,8], +"classop_1_1_array.html#a5ed838d2b9933b6a80906d0e0db39742":[29,0,0,3,27], +"classop_1_1_array.html#a5eff0723f0bbd192248e602bfbb6956f":[29,0,0,3,19], +"classop_1_1_array.html#a6e0afd5f447efbfc29efbeac62716eff":[29,0,0,3,10], +"classop_1_1_array.html#a793b9851c7490bc98d4dd52020c0cd3c":[29,0,0,3,0], +"classop_1_1_array.html#a7a7d854d63815e10e158fe889d17a88e":[29,0,0,3,9], +"classop_1_1_array.html#a85d749e637a7528325f86b80595a91d1":[29,0,0,3,20], +"classop_1_1_array.html#a8eebb6c34642cdf19ac74c7ed38d128b":[29,0,0,3,12], +"classop_1_1_array.html#a90895562def04a81db0b3e7eaa3722c7":[29,0,0,3,5], +"classop_1_1_array.html#a959ede0df7e535d2d3ac40d098541c27":[29,0,0,3,3], +"classop_1_1_array.html#a9b43d8d495a233c384a75a3f33eae75f":[29,0,0,3,16], +"classop_1_1_array.html#a9c8e006e0eea472485f37971330ecbab":[29,0,0,3,29], +"classop_1_1_array.html#a9cd386050e94c29b3c4ee40cafcacc46":[29,0,0,3,4], +"classop_1_1_array.html#a9f4b51216faaa967d81598a0cedcf78f":[29,0,0,3,41], +"classop_1_1_array.html#aa173085fa7ec7c7af3a443c617edd97a":[29,0,0,3,15], +"classop_1_1_array.html#aa40dc59e800d3c4cce623d560c0e0fad":[29,0,0,3,30], +"classop_1_1_array.html#aada0f1bd6e9eb73b4f977e62da536f58":[29,0,0,3,32], +"classop_1_1_array.html#ab033fba3d9140020dd89edb10fe8b109":[29,0,0,3,25], +"classop_1_1_array.html#ab0b95bf5488cccad3bce7413251b04de":[29,0,0,3,14], +"classop_1_1_array.html#ab4123b36e0816793e206365397dd8f79":[29,0,0,3,23], +"classop_1_1_array.html#ac4e9514cfee78a3a0236c1a6265376d8":[29,0,0,3,33], +"classop_1_1_array.html#ac5e77d6926d1d344cf54c88036fc8a9c":[29,0,0,3,17], +"classop_1_1_array.html#ac7183eb2f4e78a6941da3a2079b9ed32":[29,0,0,3,36], +"classop_1_1_array.html#ac833fdcb245fcc3135ce65227bb9e4b2":[29,0,0,3,2], +"classop_1_1_array.html#add2eeccd967cdf0900449649cb6f5afb":[29,0,0,3,38], +"classop_1_1_array.html#ae0c3d1a662f6c213da16ac87e53120fc":[29,0,0,3,37], +"classop_1_1_array.html#ae388368128afac05369172198911e05d":[29,0,0,3,28], +"classop_1_1_array.html#ae74dec8220582072f85fb3ec430238ce":[29,0,0,3,13], +"classop_1_1_array.html#aee364306687e39e754117c98ad844157":[29,0,0,3,26], +"classop_1_1_array.html#af4715967fd2b028a97fd30257e697275":[29,0,0,3,21], +"classop_1_1_array.html#af488c66ddac6cb75f7690ba8207599ed":[29,0,0,3,43], +"classop_1_1_array.html#afb4e1f55747898d29aa13606ded9991f":[29,0,0,3,7], +"classop_1_1_array_cpu_gpu.html":[29,0,0,4], +"classop_1_1_array_cpu_gpu.html#a0234d36fab318cc2c672134fd4009301":[29,0,0,4,3], +"classop_1_1_array_cpu_gpu.html#a0a307838959472e8e8815d76305c1bf6":[29,0,0,4,30], +"classop_1_1_array_cpu_gpu.html#a16dc8c19cc0b0442c1be6c859fe7d33c":[29,0,0,4,29], +"classop_1_1_array_cpu_gpu.html#a1ae24508718592592421f9491bcf50f0":[29,0,0,4,18], +"classop_1_1_array_cpu_gpu.html#a1cc1cc3226543f5a2eb4c8ddcb5ec8a5":[29,0,0,4,28], +"classop_1_1_array_cpu_gpu.html#a280202f2a968ea68795d31accf5072bb":[29,0,0,4,37], +"classop_1_1_array_cpu_gpu.html#a292b819460cbf56fd36e7435cd99c49a":[29,0,0,4,15], +"classop_1_1_array_cpu_gpu.html#a2eb57d0cb4f902b275d126e4b6f706f2":[29,0,0,4,7], +"classop_1_1_array_cpu_gpu.html#a3e44f7ede3ff5ef0db6df30ecd4562c5":[29,0,0,4,34], +"classop_1_1_array_cpu_gpu.html#a3f10532b10ec840aa9e1dac3ccc7ee25":[29,0,0,4,14], +"classop_1_1_array_cpu_gpu.html#a425d12f8d702ac9a57fb9a5f48cea152":[29,0,0,4,35], +"classop_1_1_array_cpu_gpu.html#a4836fabbedf7e1ef97bfbd4d33db3d96":[29,0,0,4,13], +"classop_1_1_array_cpu_gpu.html#a4dd6f5e4d7f54e921c7c296078a594f0":[29,0,0,4,32], +"classop_1_1_array_cpu_gpu.html#a4fb245f1557f61192ab54c24f4baf487":[29,0,0,4,1], +"classop_1_1_array_cpu_gpu.html#a5011662a5cf4bc7f7c1a2d966dcc44cd":[29,0,0,4,39], +"classop_1_1_array_cpu_gpu.html#a50f82490bab162626760d420f5f6779c":[29,0,0,4,9], +"classop_1_1_array_cpu_gpu.html#a675012bf11b17cb7b7c41b5dc1beccf6":[29,0,0,4,5], +"classop_1_1_array_cpu_gpu.html#a678e65cb71d2cc2e1070499465894892":[29,0,0,4,23], +"classop_1_1_array_cpu_gpu.html#a6dd38e027220beada2f8f55f9d073d53":[29,0,0,4,36], +"classop_1_1_array_cpu_gpu.html#a6eafc0638925b776bb78c68c1fef972a":[29,0,0,4,20], +"classop_1_1_array_cpu_gpu.html#a7c92a38509887af087eafd7522047429":[29,0,0,4,17], +"classop_1_1_array_cpu_gpu.html#a7cf928cf41b4477c59c91a0f45c0443c":[29,0,0,4,4], +"classop_1_1_array_cpu_gpu.html#a7e982b668191924e6665645790fa18a2":[29,0,0,4,11], +"classop_1_1_array_cpu_gpu.html#a9aa5001613f7199de933eef152db40b0":[29,0,0,4,19], +"classop_1_1_array_cpu_gpu.html#a9e3c6d812430d638187441e9d5cacfcc":[29,0,0,4,27], +"classop_1_1_array_cpu_gpu.html#aa0717b11c87da804e6da0d7aca4a5414":[29,0,0,4,16], +"classop_1_1_array_cpu_gpu.html#aa3e701c15f11e563e0b442c28143188d":[29,0,0,4,8], +"classop_1_1_array_cpu_gpu.html#aa87f658e2ff9699908f5cb823e988188":[29,0,0,4,24], +"classop_1_1_array_cpu_gpu.html#aba03b602ed1c745b3ba344d7ccedfd30":[29,0,0,4,21], +"classop_1_1_array_cpu_gpu.html#ac0bb37052ae356e85d681f52a4716f3c":[29,0,0,4,22], +"classop_1_1_array_cpu_gpu.html#ac5d005ccb8a3b8aba935e5276fcd20e4":[29,0,0,4,12], +"classop_1_1_array_cpu_gpu.html#acd9ea6e75dd2eb516d6a91bac91e43e4":[29,0,0,4,10], +"classop_1_1_array_cpu_gpu.html#ad5d631890ff61a2e70695b797f1a6f34":[29,0,0,4,2], +"classop_1_1_array_cpu_gpu.html#ad6e6a4da3987e9558d89b877f9ec7e82":[29,0,0,4,31], +"classop_1_1_array_cpu_gpu.html#ad79b3b3cf4180535211e20e086262837":[29,0,0,4,25], +"classop_1_1_array_cpu_gpu.html#adeb393edfae4967cb510a8c7a2d07d80":[29,0,0,4,6], +"classop_1_1_array_cpu_gpu.html#aee39459d54376c7ec98155b4add7f961":[29,0,0,4,0], +"classop_1_1_array_cpu_gpu.html#af24813492bad97de4e4c628fe356abe7":[29,0,0,4,26], +"classop_1_1_array_cpu_gpu.html#af817bde68da318a8f9dd08feabe3c286":[29,0,0,4,33], +"classop_1_1_array_cpu_gpu.html#af9f32307732772c708ff45c52b4e7dd0":[29,0,0,4,38], +"classop_1_1_body_part_connector_caffe.html":[29,0,0,76], +"classop_1_1_body_part_connector_caffe.html#a03364fbed0c71e76eb5fb1f61a397de8":[29,0,0,76,5], +"classop_1_1_body_part_connector_caffe.html#a0bad959b2da005b62cab962327ccba01":[29,0,0,76,16], +"classop_1_1_body_part_connector_caffe.html#a104744fdab14d4c1335eb8778edea21e":[29,0,0,76,15], +"classop_1_1_body_part_connector_caffe.html#a51324177e60bf260f6c2def76e9e3d7d":[29,0,0,76,7], +"classop_1_1_body_part_connector_caffe.html#a52cc46828bc7720a62fbbe841022331e":[29,0,0,76,4], +"classop_1_1_body_part_connector_caffe.html#a6442721373481873ddeb9ffd7c6fdb7b":[29,0,0,76,13], +"classop_1_1_body_part_connector_caffe.html#a6d859f2e218b1ea707fddcaf0911886d":[29,0,0,76,12], +"classop_1_1_body_part_connector_caffe.html#a75d0a3f3c8dca99c8a9e1b680098da16":[29,0,0,76,11], +"classop_1_1_body_part_connector_caffe.html#a789c77e69e5590a78b22e1e5f5cc4efc":[29,0,0,76,10], +"classop_1_1_body_part_connector_caffe.html#a8ad522722607c9ff6299337019f04175":[29,0,0,76,2], +"classop_1_1_body_part_connector_caffe.html#a94e2364fa13ea79b2d6fd72c5db34765":[29,0,0,76,0], +"classop_1_1_body_part_connector_caffe.html#a9b9fa9490fef0121a70c3d6d749272f7":[29,0,0,76,14], +"classop_1_1_body_part_connector_caffe.html#a9dbcac7de4a57a58733462f3ce1db10c":[29,0,0,76,6], +"classop_1_1_body_part_connector_caffe.html#ab0beade5f7d8e56e881231e46f9306ec":[29,0,0,76,1], +"classop_1_1_body_part_connector_caffe.html#abf26360f5d25fab82705270dae5f5d86":[29,0,0,76,8], +"classop_1_1_body_part_connector_caffe.html#ace9cba081581a4c58fcfbef73b6dd11b":[29,0,0,76,3], +"classop_1_1_body_part_connector_caffe.html#aec0d6f32107a6222406e73ca9ae4942d":[29,0,0,76,17], +"classop_1_1_body_part_connector_caffe.html#af4520659b0cfb644a3c2d6ceb0e81a21":[29,0,0,76,9], +"classop_1_1_camera_parameter_reader.html":[29,0,0,0], +"classop_1_1_camera_parameter_reader.html#a2be8ff6d89e5f623f476c75afe3c5c3b":[29,0,0,0,11], +"classop_1_1_camera_parameter_reader.html#a4c819945b0df95bcfb6f8d79451290d5":[29,0,0,0,10], +"classop_1_1_camera_parameter_reader.html#a6db1e0c2b4ed63407d12ff0de97cb098":[29,0,0,0,6], +"classop_1_1_camera_parameter_reader.html#a7210bc5ebfe6e90a52524b7f0f3f3991":[29,0,0,0,15], +"classop_1_1_camera_parameter_reader.html#a8122bb2a8a07555b5341141356fa37c7":[29,0,0,0,4], +"classop_1_1_camera_parameter_reader.html#a88c9f18f6b0f3e5d09240e65a5e04beb":[29,0,0,0,5], +"classop_1_1_camera_parameter_reader.html#a8d97033970f3e71657da070cd87fd70c":[29,0,0,0,9], +"classop_1_1_camera_parameter_reader.html#a8edb22b20d1ed044335ec0d2175eeabf":[29,0,0,0,3], +"classop_1_1_camera_parameter_reader.html#a906fd316f09d901280a5fe10a4a54541":[29,0,0,0,12], +"classop_1_1_camera_parameter_reader.html#a975e5a340bd1b77d680007797ec9eeea":[29,0,0,0,7], +"classop_1_1_camera_parameter_reader.html#aae3c60cbed99e1b5706c96577732ddae":[29,0,0,0,2], +"classop_1_1_camera_parameter_reader.html#ab7a4c3ef7ac8d8a41e5711ec85b7be4b":[29,0,0,0,0], +"classop_1_1_camera_parameter_reader.html#acf2b4d428d18f2663f4df640171b254d":[29,0,0,0,8], +"classop_1_1_camera_parameter_reader.html#acfa701389b1e566e1ea49cfd2605bbf8":[29,0,0,0,1], +"classop_1_1_camera_parameter_reader.html#ae33e8637012413719b389649d1e5448a":[29,0,0,0,13], +"classop_1_1_camera_parameter_reader.html#aee02b82d0c5fd51dd3ba5a2267f7b370":[29,0,0,0,14], +"classop_1_1_coco_json_saver.html":[29,0,0,37], +"classop_1_1_coco_json_saver.html#a2c3777cb55d09ee3911d4ed69334b17f":[29,0,0,37,2], +"classop_1_1_coco_json_saver.html#a6d596768658b4b32430d3686be557e33":[29,0,0,37,0], +"classop_1_1_coco_json_saver.html#a8bbfab84a7816cb0f189f243246f744b":[29,0,0,37,1], +"classop_1_1_cv_mat_to_op_input.html":[29,0,0,5], +"classop_1_1_cv_mat_to_op_input.html#a449eacb6cce9678c3ae399c68a45a2e5":[29,0,0,5,0], +"classop_1_1_cv_mat_to_op_input.html#ad7c70d7843d64ab0dce9a8a1d993e5b5":[29,0,0,5,2], +"classop_1_1_cv_mat_to_op_input.html#adbe9ae80914d9c1d224c1fe753519090":[29,0,0,5,1], +"classop_1_1_cv_mat_to_op_output.html":[29,0,0,6], +"classop_1_1_cv_mat_to_op_output.html#a054c4dd7892ad540405413b071459b42":[29,0,0,6,0], +"classop_1_1_cv_mat_to_op_output.html#abc8953e080adc30fa52345322ae8445a":[29,0,0,6,1], +"classop_1_1_cv_mat_to_op_output.html#ad0ac01a9866ea00c873da7e2552c5b08":[29,0,0,6,3], +"classop_1_1_cv_mat_to_op_output.html#ad15a20bf40389e7dea888e982bd64e8b":[29,0,0,6,2], +"classop_1_1_datum_producer.html":[29,0,0,92], +"classop_1_1_datum_producer.html#a39da4822705d23ca7e600b69f39e69be":[29,0,0,92,2], +"classop_1_1_datum_producer.html#a4d52ee6961e2c5c9564f49d203a2865e":[29,0,0,92,0], +"classop_1_1_datum_producer.html#ad12f3202a265c989430d15bf7476a326":[29,0,0,92,1], +"classop_1_1_face_cpu_renderer.html":[29,0,0,26], +"classop_1_1_face_cpu_renderer.html#a233f2a83930d07e4d420b43c8a660f32":[29,0,0,26,2], +"classop_1_1_face_cpu_renderer.html#a5c5e1e9b016bd33b5740beb04fc0fb49":[29,0,0,26,1], +"classop_1_1_face_cpu_renderer.html#aa2f7c7f0a5a8df2dbb94c8a01fa41e2a":[29,0,0,26,3], +"classop_1_1_face_cpu_renderer.html#afb0dcfff75c4a89d5971d7b0bbd0b51b":[29,0,0,26,0], +"classop_1_1_face_detector.html":[29,0,0,27], +"classop_1_1_face_detector.html#a66ff3806053a5f86d01724f5029e0859":[29,0,0,27,1], +"classop_1_1_face_detector.html#a6db84197d64104da0c26f49ecf8facd1":[29,0,0,27,2], +"classop_1_1_face_detector.html#adfeab6977c93b7bef66c1dfbcf6f8150":[29,0,0,27,0], +"classop_1_1_face_detector_open_c_v.html":[29,0,0,28], +"classop_1_1_face_detector_open_c_v.html#a88eae893ff7f7664243cadf0f84500da":[29,0,0,28,1], +"classop_1_1_face_detector_open_c_v.html#a8c4d55863b726448762a142fa91bb69d":[29,0,0,28,0], +"classop_1_1_face_detector_open_c_v.html#aba2826bad3f87ce3967e1f999f941fc5":[29,0,0,28,2], +"classop_1_1_face_extractor_caffe.html":[29,0,0,29], +"classop_1_1_face_extractor_caffe.html#a4450e656f21a8cb7f1d9bf5f545012f1":[29,0,0,29,1], +"classop_1_1_face_extractor_caffe.html#ad78fc3e86428d89a513e8e3be10fc47f":[29,0,0,29,2], +"classop_1_1_face_extractor_caffe.html#add2a24d9bd5e03ff90034239e90523c2":[29,0,0,29,3], +"classop_1_1_face_extractor_caffe.html#adedc0e50f2eacd8e02c5bd8b0563b2ee":[29,0,0,29,0], +"classop_1_1_face_extractor_net.html":[29,0,0,30], +"classop_1_1_face_extractor_net.html#a125b052c75a5e39890e140e962b37838":[29,0,0,30,0], +"classop_1_1_face_extractor_net.html#a18911596f5ba442d50718f54a3d64fe0":[29,0,0,30,3], +"classop_1_1_face_extractor_net.html#a1ba97136b2cc006cd066e3e950f0c179":[29,0,0,30,5], +"classop_1_1_face_extractor_net.html#a3bf177dbf1a3effbe6b15545e6102d6e":[29,0,0,30,14], +"classop_1_1_face_extractor_net.html#a43bd29f8c1fc0dbef051bd574df2deca":[29,0,0,30,12], +"classop_1_1_face_extractor_net.html#a4cd488333e450cfbb19aab8910e7f138":[29,0,0,30,1], +"classop_1_1_face_extractor_net.html#a5d3437e6a4a0fd834232b0afaab95a8a":[29,0,0,30,11], +"classop_1_1_face_extractor_net.html#a637f9c4c19e110be435cd05052248f86":[29,0,0,30,9], +"classop_1_1_face_extractor_net.html#a6a9a02b46596283cab6f8a4640161081":[29,0,0,30,7], +"classop_1_1_face_extractor_net.html#a6c00e96ddf7465062d6f0b51a7a1348d":[29,0,0,30,8], +"classop_1_1_face_extractor_net.html#a6c5d14660690396edb1a939b11962a68":[29,0,0,30,2], +"classop_1_1_face_extractor_net.html#a6d6d5d6bd912bb940058a2b958aadf61":[29,0,0,30,6], +"classop_1_1_face_extractor_net.html#aa3f6566e8b857262f57e18a88c90b9be":[29,0,0,30,13], +"classop_1_1_face_extractor_net.html#acf72945f62375b6ac8939c463a616f4a":[29,0,0,30,15], +"classop_1_1_face_extractor_net.html#ae18226cef1478a929df9061c7d699c6f":[29,0,0,30,10], +"classop_1_1_face_extractor_net.html#aee0d7b760214c805466ae515938b5190":[29,0,0,30,4], +"classop_1_1_face_gpu_renderer.html":[29,0,0,31], +"classop_1_1_face_gpu_renderer.html#a344b4f1d256d6ad805273eb8ba29cde1":[29,0,0,31,0], +"classop_1_1_face_gpu_renderer.html#a6ebd9287927529ffaa4200890190896b":[29,0,0,31,2], +"classop_1_1_face_gpu_renderer.html#a94758beab4bfbfed02cc8330a63abaeb":[29,0,0,31,1], +"classop_1_1_face_gpu_renderer.html#ae54b7538a6ed6a5eaedcbdc117a0d61c":[29,0,0,31,3], +"classop_1_1_face_renderer.html":[29,0,0,32], +"classop_1_1_face_renderer.html#a8ba7bad616bd2cf673d8faa846bf95b5":[29,0,0,32,0], +"classop_1_1_face_renderer.html#aa34ce7a0602b0994cc3043b80627a31c":[29,0,0,32,1], +"classop_1_1_face_renderer.html#acbbdaca16f4115a5a68d006f4f325397":[29,0,0,32,2], +"classop_1_1_file_saver.html":[29,0,0,38], +"classop_1_1_file_saver.html#a080e6bb80adad7a3d534356cdfe40211":[29,0,0,38,1], +"classop_1_1_file_saver.html#a52aab3187cefc2e878790aa440a842aa":[29,0,0,38,3], +"classop_1_1_file_saver.html#a5940f007f3346580124cd1b6b27492e6":[29,0,0,38,2], +"classop_1_1_file_saver.html#aa4632ae62ac77dbad85523845ce79999":[29,0,0,38,0], +"classop_1_1_flir_reader.html":[29,0,0,93], +"classop_1_1_flir_reader.html#a3d383e03a405dcbff566a86253db90af":[29,0,0,93,7], +"classop_1_1_flir_reader.html#a5101cdbcd46e51bf7f35995a3d87e900":[29,0,0,93,2], +"classop_1_1_flir_reader.html#a66d6144c5dcb0dd3cbadcd6f8eefa9e0":[29,0,0,93,1], +"classop_1_1_flir_reader.html#a711db0919bd7516fde3e641c13259637":[29,0,0,93,6], +"classop_1_1_flir_reader.html#a7ddcdf533c778df342a50c24c280499b":[29,0,0,93,5], +"classop_1_1_flir_reader.html#a8fa5c03b6ce95372ce47013c01c782a5":[29,0,0,93,0], +"classop_1_1_flir_reader.html#ab28f40422c9edff8594d855bbef91f58":[29,0,0,93,8], +"classop_1_1_flir_reader.html#acb45c9a89ebc92c0a8ee69a0ec4d0476":[29,0,0,93,4], +"classop_1_1_flir_reader.html#ad3b940d5ed672ef17406843b102e9715":[29,0,0,93,3], +"classop_1_1_flir_reader.html#af14f63c79272781429341dc3a0720485":[29,0,0,93,9], +"classop_1_1_frame_displayer.html":[29,0,0,56], +"classop_1_1_frame_displayer.html#a21a746ef46172c6a18ea72da6e7b5721":[29,0,0,56,0], +"classop_1_1_frame_displayer.html#a23263864af418160f489072716ba9951":[29,0,0,56,3], +"classop_1_1_frame_displayer.html#a2df856e4cf7542c7cda2757553674fb8":[29,0,0,56,5], +"classop_1_1_frame_displayer.html#aa99517efbef90cd8a6e171a713c37501":[29,0,0,56,2], +"classop_1_1_frame_displayer.html#ab3dea1eefac57cf129b4828ecd856fb4":[29,0,0,56,1], +"classop_1_1_frame_displayer.html#ad83a47005c52f066587f49d62c109802":[29,0,0,56,6], +"classop_1_1_frame_displayer.html#af5d2e1c8bcd2012c66347252e8dbc543":[29,0,0,56,4], +"classop_1_1_gpu_renderer.html":[29,0,0,8], +"classop_1_1_gpu_renderer.html#a3ef06d85a62cd4049d5e8ac1e94d8fd8":[29,0,0,8,1] +}; diff --git a/web/html/doc/navtreeindex1.js b/web/html/doc/navtreeindex1.js new file mode 100644 index 000000000..1124164fe --- /dev/null +++ b/web/html/doc/navtreeindex1.js @@ -0,0 +1,253 @@ +var NAVTREEINDEX1 = +{ +"classop_1_1_gpu_renderer.html#a5d729aab549908c758953be742dd0115":[29,0,0,8,7], +"classop_1_1_gpu_renderer.html#a6355f70d16c6427b028fa4596ce5d985":[29,0,0,8,4], +"classop_1_1_gpu_renderer.html#a63eb7ae0b440a5552ed9342043a8f369":[29,0,0,8,3], +"classop_1_1_gpu_renderer.html#a9852b2017e972637b47250bb7fbc53ea":[29,0,0,8,0], +"classop_1_1_gpu_renderer.html#ac7c1ab0eebf1d54b55cc65a5560bad7b":[29,0,0,8,2], +"classop_1_1_gpu_renderer.html#acc83c7b857db7d35132febaebfcb84df":[29,0,0,8,5], +"classop_1_1_gpu_renderer.html#afa58647bfd9efa02629e4b81bbe48c6e":[29,0,0,8,6], +"classop_1_1_gui.html":[29,0,0,57], +"classop_1_1_gui.html#a07cf9b4e7757979666d097278df02c20":[29,0,0,57,2], +"classop_1_1_gui.html#a0ad7be7018e634769da8d22d60e7edc0":[29,0,0,57,8], +"classop_1_1_gui.html#a1084d79f61d08f0551832de1ca337c70":[29,0,0,57,0], +"classop_1_1_gui.html#a5b95cbfa7cd4018977f4eb1fc095823b":[29,0,0,57,6], +"classop_1_1_gui.html#a5e7e30073c0f7ee18904b25fc638b4e2":[29,0,0,57,1], +"classop_1_1_gui.html#a8e9a67dd507598654a5db06273d50c94":[29,0,0,57,5], +"classop_1_1_gui.html#a8fc6182d0124dd24e26e0fc139074061":[29,0,0,57,3], +"classop_1_1_gui.html#a94cfbf759e88467bfcab18fcd2c987f2":[29,0,0,57,7], +"classop_1_1_gui.html#abeff19fe8eceeacfb9115a059cdde4ad":[29,0,0,57,4], +"classop_1_1_gui3_d.html":[29,0,0,58], +"classop_1_1_gui3_d.html#a04abf8036928d58daf9417c5b5a41693":[29,0,0,58,3], +"classop_1_1_gui3_d.html#a23ead7d9d09b3f0b3ba81b284d49b4a4":[29,0,0,58,0], +"classop_1_1_gui3_d.html#a2fff0519028b406fe9ffc984ecd1dfa9":[29,0,0,58,1], +"classop_1_1_gui3_d.html#a4247c56f90a535944b8aa14def754eaa":[29,0,0,58,2], +"classop_1_1_gui3_d.html#abd245c07a53d1d25c237aff22a2b6e6f":[29,0,0,58,4], +"classop_1_1_gui3_d.html#af10162684889706894f13a308970ba32":[29,0,0,58,5], +"classop_1_1_gui_info_adder.html":[29,0,0,59], +"classop_1_1_gui_info_adder.html#a6f2f2d449d48ca7e21729d03796a540c":[29,0,0,59,2], +"classop_1_1_gui_info_adder.html#a942af111d6bc41991db4bca3e573b8e9":[29,0,0,59,1], +"classop_1_1_gui_info_adder.html#af23e17f9eeb51c7473cd0940292efa61":[29,0,0,59,0], +"classop_1_1_hand_cpu_renderer.html":[29,0,0,63], +"classop_1_1_hand_cpu_renderer.html#a3145d482c0378288e7ba3e42091a56c2":[29,0,0,63,0], +"classop_1_1_hand_cpu_renderer.html#a66a7d318b240c73687320bf092363409":[29,0,0,63,2], +"classop_1_1_hand_cpu_renderer.html#a8269f1879939d1b403787f982f10258d":[29,0,0,63,1], +"classop_1_1_hand_cpu_renderer.html#ae9e43ff22b0aae81dd88df3a313b0b0f":[29,0,0,63,3], +"classop_1_1_hand_detector.html":[29,0,0,64], +"classop_1_1_hand_detector.html#a20b127dd7b51afcd336d1f16b40ee0b1":[29,0,0,64,0], +"classop_1_1_hand_detector.html#a58513169f01ab7c705979f1f2a88b571":[29,0,0,64,4], +"classop_1_1_hand_detector.html#a731a19ff54389b1f56b0aae76af6debe":[29,0,0,64,2], +"classop_1_1_hand_detector.html#a963972f9ecb769786b5f60018da443e4":[29,0,0,64,3], +"classop_1_1_hand_detector.html#ae70826e6de6a8f26c240d0152578375e":[29,0,0,64,1], +"classop_1_1_hand_detector_from_txt.html":[29,0,0,65], +"classop_1_1_hand_detector_from_txt.html#a1e6ba23fa1486e92a3bdca36b2e86d22":[29,0,0,65,2], +"classop_1_1_hand_detector_from_txt.html#a8fb6eb6ef5d5689cfdb502b5bc43685f":[29,0,0,65,1], +"classop_1_1_hand_detector_from_txt.html#a94ef5e925c5d25b181c56ae79bb1eed2":[29,0,0,65,0], +"classop_1_1_hand_extractor_caffe.html":[29,0,0,66], +"classop_1_1_hand_extractor_caffe.html#a2f8e53c8d4f4d509b4a1842f042fa548":[29,0,0,66,2], +"classop_1_1_hand_extractor_caffe.html#a703c8b8d15de55bc2b6bbaee633a6384":[29,0,0,66,0], +"classop_1_1_hand_extractor_caffe.html#ace3ee9d717887ee9dc0f00ce69bd0c82":[29,0,0,66,3], +"classop_1_1_hand_extractor_caffe.html#aee681b43b8691ac1f07e08616522f6af":[29,0,0,66,1], +"classop_1_1_hand_extractor_net.html":[29,0,0,67], +"classop_1_1_hand_extractor_net.html#a0981f4dfd15ce4a13de9d166cad9e1d4":[29,0,0,67,10], +"classop_1_1_hand_extractor_net.html#a0aa50449396fd075bec29e0393a1ff9e":[29,0,0,67,2], +"classop_1_1_hand_extractor_net.html#a270f22a05dbae6d156d79f0386cfbf4b":[29,0,0,67,15], +"classop_1_1_hand_extractor_net.html#a2ee9d38650ed3138fa74fae2cad4bd77":[29,0,0,67,9], +"classop_1_1_hand_extractor_net.html#a3743bf97fd19ee7d52ffd1019baa0c46":[29,0,0,67,1], +"classop_1_1_hand_extractor_net.html#a37d86377da41c576c4d54027a9762733":[29,0,0,67,6], +"classop_1_1_hand_extractor_net.html#a5c4174ed2c09ff7c15edfc5d971f4aef":[29,0,0,67,12], +"classop_1_1_hand_extractor_net.html#a7f97a5b842d20d3d37d3469418faac7b":[29,0,0,67,11], +"classop_1_1_hand_extractor_net.html#a88a35f29d3c53c259756bc07b2bfb093":[29,0,0,67,5], +"classop_1_1_hand_extractor_net.html#a8bcb44ea2618dea01c00255021425637":[29,0,0,67,0], +"classop_1_1_hand_extractor_net.html#aad7c29237d50e77e606bb32c20c60d24":[29,0,0,67,7], +"classop_1_1_hand_extractor_net.html#aaf0386c8c15a37cf79e9f3f4b1ced2e8":[29,0,0,67,14], +"classop_1_1_hand_extractor_net.html#ab59a77d051991734b0c74b122671f097":[29,0,0,67,8], +"classop_1_1_hand_extractor_net.html#ac5e36cd33696a684a4447acccec28fdd":[29,0,0,67,16], +"classop_1_1_hand_extractor_net.html#ae9617434c4dc7e390c18d596b868297d":[29,0,0,67,4], +"classop_1_1_hand_extractor_net.html#af03c8872258c644086bda26a3aaf95b5":[29,0,0,67,13], +"classop_1_1_hand_extractor_net.html#af064ccee582800f39ed3eac5d69a4134":[29,0,0,67,3], +"classop_1_1_hand_gpu_renderer.html":[29,0,0,68], +"classop_1_1_hand_gpu_renderer.html#a0489f10ddc9e37e87084ebf9a5138f3a":[29,0,0,68,2], +"classop_1_1_hand_gpu_renderer.html#a0d2f742b995a6f34e414f9731db847d5":[29,0,0,68,0], +"classop_1_1_hand_gpu_renderer.html#a8206b59519e8214e06af9994a6038dae":[29,0,0,68,3], +"classop_1_1_hand_gpu_renderer.html#ad6a87a582129d7ed18a520dc9cd6c3fc":[29,0,0,68,1], +"classop_1_1_hand_renderer.html":[29,0,0,69], +"classop_1_1_hand_renderer.html#a2693c02336fb373a42405ccc7cff29bd":[29,0,0,69,2], +"classop_1_1_hand_renderer.html#a66ca52089ca021542816a085d39ee640":[29,0,0,69,0], +"classop_1_1_hand_renderer.html#adb91ae2a8ccf24671ad86e99e786b120":[29,0,0,69,1], +"classop_1_1_heat_map_saver.html":[29,0,0,39], +"classop_1_1_heat_map_saver.html#a150c053182074a1cc846c3ced7a674fb":[29,0,0,39,1], +"classop_1_1_heat_map_saver.html#a48aeaad854bfff14fd2642dc13071137":[29,0,0,39,2], +"classop_1_1_heat_map_saver.html#aa6a339b70a9535a018584b93c932b12d":[29,0,0,39,0], +"classop_1_1_image_directory_reader.html":[29,0,0,94], +"classop_1_1_image_directory_reader.html#a10157e6234426dd809ffe83ebfbfd274":[29,0,0,94,0], +"classop_1_1_image_directory_reader.html#a1965a4dca2ddb86b24e69e2da90b9dbf":[29,0,0,94,6], +"classop_1_1_image_directory_reader.html#a46ce23209afe6d3ca90db545b69cd04a":[29,0,0,94,3], +"classop_1_1_image_directory_reader.html#a7551a8567f42f7cfb68020e149921438":[29,0,0,94,1], +"classop_1_1_image_directory_reader.html#aa05bfd69272e81115ba23a3c0731b596":[29,0,0,94,2], +"classop_1_1_image_directory_reader.html#adbf9ff392cd52a585332dbdcd46ffb81":[29,0,0,94,4], +"classop_1_1_image_directory_reader.html#af74e192f8cba5c10f8e252674a85185a":[29,0,0,94,5], +"classop_1_1_image_saver.html":[29,0,0,40], +"classop_1_1_image_saver.html#a0262916d0af8cc3be81b3375e0520e62":[29,0,0,40,2], +"classop_1_1_image_saver.html#a723387e62a6b701202dd6cf35c57429f":[29,0,0,40,0], +"classop_1_1_image_saver.html#a8c1f4ae3799db276753707879e59bee1":[29,0,0,40,3], +"classop_1_1_image_saver.html#ab11a6e42a910021fd072cdf287b796ed":[29,0,0,40,1], +"classop_1_1_ip_camera_reader.html":[29,0,0,95], +"classop_1_1_ip_camera_reader.html#a0c1582090cc7c54dd9cb752207b52986":[29,0,0,95,3], +"classop_1_1_ip_camera_reader.html#aa7ad6adac6e401193e03d279176dd889":[29,0,0,95,2], +"classop_1_1_ip_camera_reader.html#ac13cc7da97a31d8e69eaa64b2a7b31ba":[29,0,0,95,5], +"classop_1_1_ip_camera_reader.html#ac26913b4ff841f56f43bb53b012a2401":[29,0,0,95,4], +"classop_1_1_ip_camera_reader.html#ad90e52c898ddf32503ce94685977aae0":[29,0,0,95,1], +"classop_1_1_ip_camera_reader.html#af3a67a2705107e04e79672fa087847c5":[29,0,0,95,0], +"classop_1_1_json_ofstream.html":[29,0,0,41], +"classop_1_1_json_ofstream.html#a32f058b961605d418df1258a1dc5e6a0":[29,0,0,41,6], +"classop_1_1_json_ofstream.html#a3f940d3ad51d0acb7126d62a5617fd69":[29,0,0,41,3], +"classop_1_1_json_ofstream.html#a45eeb25d9413fc31786f315b46c341cc":[29,0,0,41,8], +"classop_1_1_json_ofstream.html#a4cc6d50256354b3dc7385e2db01aabc0":[29,0,0,41,4], +"classop_1_1_json_ofstream.html#a5c38e36c1449d808dd4ab6558d65289c":[29,0,0,41,9], +"classop_1_1_json_ofstream.html#a5c4b866df81cf36d8f6dcdfc8414de8f":[29,0,0,41,2], +"classop_1_1_json_ofstream.html#a6a5e0e4f3cdf8f93fb1bdef8cb63b0a2":[29,0,0,41,12], +"classop_1_1_json_ofstream.html#aa432ff172e10bb9e3b6e2bfa6124c648":[29,0,0,41,11], +"classop_1_1_json_ofstream.html#ab8220b4336ccc8998cc38f0fa5c36918":[29,0,0,41,1], +"classop_1_1_json_ofstream.html#ae4468279f789c8026d431b2ef62646f9":[29,0,0,41,5], +"classop_1_1_json_ofstream.html#af0c7f763e7e809810c00b394a260672e":[29,0,0,41,7], +"classop_1_1_json_ofstream.html#afa4b3e1dee27f5afd0017b95c0f5e364":[29,0,0,41,0], +"classop_1_1_json_ofstream.html#aff76578c824c0314e33231884b40147e":[29,0,0,41,10], +"classop_1_1_keep_top_n_people.html":[29,0,0,9], +"classop_1_1_keep_top_n_people.html#a556a0d8d97985e0b73fc78e372be6ea8":[29,0,0,9,2], +"classop_1_1_keep_top_n_people.html#a7675c9c3668a2610827da67818a67741":[29,0,0,9,1], +"classop_1_1_keep_top_n_people.html#ae9419ae35da5b0547989f19795a26808":[29,0,0,9,0], +"classop_1_1_keypoint_saver.html":[29,0,0,42], +"classop_1_1_keypoint_saver.html#a903a4fa8be0b0cb5008d015126ac0e59":[29,0,0,42,1], +"classop_1_1_keypoint_saver.html#aa6d9eb36cfd40c5cfa3995420cdf3dfa":[29,0,0,42,0], +"classop_1_1_keypoint_saver.html#aad663949dc5f2262ebdc69ed0d0caa1b":[29,0,0,42,2], +"classop_1_1_keypoint_scaler.html":[29,0,0,10], +"classop_1_1_keypoint_scaler.html#a0f556c1b0fad63c7c3551a5d4fd72219":[29,0,0,10,0], +"classop_1_1_keypoint_scaler.html#a42e46aea4d43fcf0886f06c9700148ea":[29,0,0,10,4], +"classop_1_1_keypoint_scaler.html#a5797e76ffea7e3b6a4080b04f50f0c0f":[29,0,0,10,1], +"classop_1_1_keypoint_scaler.html#a687a64bbca93d54292f191762efe20d7":[29,0,0,10,3], +"classop_1_1_keypoint_scaler.html#a9c2d575ce49bb6112b2a099cb92f07cc":[29,0,0,10,2], +"classop_1_1_matrix.html":[29,0,0,11], +"classop_1_1_matrix.html#a09859c3f88b8c75c7d12f53667304450":[29,0,0,11,25], +"classop_1_1_matrix.html#a1beb13525ec86c9827a7116eb2d175b7":[29,0,0,11,17], +"classop_1_1_matrix.html#a3099b24c0ee295014d95c99a20615fdd":[29,0,0,11,21], +"classop_1_1_matrix.html#a401c028c88a65b69c0c029cfc990f631":[29,0,0,11,14], +"classop_1_1_matrix.html#a41ec72e2d80f73025c4c0837364c8193":[29,0,0,11,24], +"classop_1_1_matrix.html#a4555d0f39c54ad5f7adcb39fe06503cc":[29,0,0,11,4], +"classop_1_1_matrix.html#a53786b5c97e1cded5accbcb3cd6b992d":[29,0,0,11,3], +"classop_1_1_matrix.html#a6714cef92d6dce3089841ea124cd2b7d":[29,0,0,11,8], +"classop_1_1_matrix.html#a69d3316b25c1fce55f067e92b31e4d57":[29,0,0,11,9], +"classop_1_1_matrix.html#a770bbf0242b96b2e746d7f1e30dbf8fc":[29,0,0,11,2], +"classop_1_1_matrix.html#a77cd25c8e23a4eec148039ab4832cde1":[29,0,0,11,5], +"classop_1_1_matrix.html#a7f9df7fbdc9ef76e158f72d306f88ec2":[29,0,0,11,16], +"classop_1_1_matrix.html#a93188dad84f0f0a20f7a631edd32a620":[29,0,0,11,23], +"classop_1_1_matrix.html#a9326d59a12659563d123ea6587fd4415":[29,0,0,11,18], +"classop_1_1_matrix.html#a9af637b50e808c1d84e179cc6acb45b4":[29,0,0,11,10], +"classop_1_1_matrix.html#aa0ab094e21bab6757f502866bce7e79c":[29,0,0,11,20], +"classop_1_1_matrix.html#aabfd2f25b2459aac510e1e31b207fcf3":[29,0,0,11,13], +"classop_1_1_matrix.html#aac1572705e72a18198a8b2d32d1b5c24":[29,0,0,11,26], +"classop_1_1_matrix.html#ab65ba706b58675da9a4512d448d44370":[29,0,0,11,11], +"classop_1_1_matrix.html#abc101fe6c039f6ef2311c5e9cef4c293":[29,0,0,11,6], +"classop_1_1_matrix.html#ac2171dc14ef5480496c05c115b6dd579":[29,0,0,11,7], +"classop_1_1_matrix.html#ad74c013aa1aaed93271275cce6c77972":[29,0,0,11,22], +"classop_1_1_matrix.html#adbdc98003dd0f666c845ac2acf592bd8":[29,0,0,11,0], +"classop_1_1_matrix.html#ae33558724a713e9a36f8dc0062d267a8":[29,0,0,11,12], +"classop_1_1_matrix.html#ae459fb36ef45c1215a7db39af8a8e6cf":[29,0,0,11,15], +"classop_1_1_matrix.html#ae82b851dd176317d72df95461a4bad76":[29,0,0,11,19], +"classop_1_1_matrix.html#af9dc44c30ec3ae5f8e7ba8f76516985a":[29,0,0,11,1], +"classop_1_1_maximum_caffe.html":[29,0,0,77], +"classop_1_1_maximum_caffe.html#a0b438980e5c2fce978e9de80f75afcd3":[29,0,0,77,1], +"classop_1_1_maximum_caffe.html#a47047083e35d2af5a969acbf7cb55674":[29,0,0,77,7], +"classop_1_1_maximum_caffe.html#a51604d40efcfa63c5a46dc257c72cf9c":[29,0,0,77,4], +"classop_1_1_maximum_caffe.html#a66bd0aa863a97647ae6350d1f886ea51":[29,0,0,77,0], +"classop_1_1_maximum_caffe.html#a6e44cdf4dc3fce4d1dcc75ce29bc051e":[29,0,0,77,6], +"classop_1_1_maximum_caffe.html#a8d047aa2e08e49199f9f422191e9bdd2":[29,0,0,77,9], +"classop_1_1_maximum_caffe.html#a91989f6e0a2c1349c33815a8cd659e52":[29,0,0,77,3], +"classop_1_1_maximum_caffe.html#ab9fb5ce2358801ac4e85fa25f052baa4":[29,0,0,77,8], +"classop_1_1_maximum_caffe.html#ad21700230d1f1ac1139e8ce76574232c":[29,0,0,77,2], +"classop_1_1_maximum_caffe.html#ae88c10cadaef2e4e7347ef7f8c101b67":[29,0,0,77,5], +"classop_1_1_net.html":[29,0,0,78], +"classop_1_1_net.html#a222cfe3d19800824b742b218b466586b":[29,0,0,78,2], +"classop_1_1_net.html#a65193e857c721f2f606ea6b010953dbc":[29,0,0,78,1], +"classop_1_1_net.html#a6e9e801f2c9950a798d0d2fa94a6c8f2":[29,0,0,78,3], +"classop_1_1_net.html#ae20a74df1a401eb17d5b75b406574919":[29,0,0,78,0], +"classop_1_1_net_caffe.html":[29,0,0,79], +"classop_1_1_net_caffe.html#a08b71387287339e68327dd6d4cb1e8b3":[29,0,0,79,4], +"classop_1_1_net_caffe.html#a37648c14f06ee46ca395c9d38635fade":[29,0,0,79,3], +"classop_1_1_net_caffe.html#a439b30ec5d10c68cb620130ff5e1812a":[29,0,0,79,2], +"classop_1_1_net_caffe.html#a84007645c88de286e9d306461a044e8d":[29,0,0,79,1], +"classop_1_1_net_caffe.html#af6d9ee03568d2783e0e4ed0b78a21c3d":[29,0,0,79,0], +"classop_1_1_net_open_cv.html":[29,0,0,80], +"classop_1_1_net_open_cv.html#a30ec3c3ee2ffe0a95656f6b11151243f":[29,0,0,80,1], +"classop_1_1_net_open_cv.html#a932f2f53f61e05bc0fd164a707f692b9":[29,0,0,80,4], +"classop_1_1_net_open_cv.html#a9f4981ac196b094183c52caa6ce283db":[29,0,0,80,3], +"classop_1_1_net_open_cv.html#aa62d557f44d2d44f08b8b1dd3efd54fb":[29,0,0,80,2], +"classop_1_1_net_open_cv.html#af46f57f8a4093c927dd39109ad0411e9":[29,0,0,80,0], +"classop_1_1_nms_caffe.html":[29,0,0,81], +"classop_1_1_nms_caffe.html#a0702488e5d899a6610535f6741601978":[29,0,0,81,1], +"classop_1_1_nms_caffe.html#a1dd658e4bc9e080867a99e9b57f1baa8":[29,0,0,81,10], +"classop_1_1_nms_caffe.html#a263d87a3282cbc03182e4d8759ca9f3a":[29,0,0,81,4], +"classop_1_1_nms_caffe.html#a3d1d4cee2b93d5bc0d88c25019b17715":[29,0,0,81,3], +"classop_1_1_nms_caffe.html#a5f257eb561fc705c2b74489b12269b49":[29,0,0,81,11], +"classop_1_1_nms_caffe.html#a8289f4e680cd16405555002a61de735b":[29,0,0,81,5], +"classop_1_1_nms_caffe.html#a8520f4df4fb2d26a1289b1bcaa814e93":[29,0,0,81,6], +"classop_1_1_nms_caffe.html#a8c7e69c32f1fff92893284ed70278f48":[29,0,0,81,8], +"classop_1_1_nms_caffe.html#abbaee841e5cb64f97c94da67ef4349c9":[29,0,0,81,2], +"classop_1_1_nms_caffe.html#abe113059484596e82efd8b5f6d346ab5":[29,0,0,81,9], +"classop_1_1_nms_caffe.html#ad1719736dc5e459a1d8b28837e94f989":[29,0,0,81,7], +"classop_1_1_nms_caffe.html#adc88733fceaefc359a95f067c62c3b07":[29,0,0,81,12], +"classop_1_1_nms_caffe.html#afb808d9a264ce50664f8641e477d9e2d":[29,0,0,81,0], +"classop_1_1_op_output_to_cv_mat.html":[29,0,0,12], +"classop_1_1_op_output_to_cv_mat.html#a60affeb41b26b1357cf8c797c7e16ecb":[29,0,0,12,0], +"classop_1_1_op_output_to_cv_mat.html#aaee9dc07945e0857de33308b12c9bd09":[29,0,0,12,2], +"classop_1_1_op_output_to_cv_mat.html#af150c89ff5edbe4f4bd727b7162e9b36":[29,0,0,12,3], +"classop_1_1_op_output_to_cv_mat.html#afe99e538dfcca6396b0672db1ec2f17f":[29,0,0,12,1], +"classop_1_1_people_json_saver.html":[29,0,0,43], +"classop_1_1_people_json_saver.html#a4a84666529a0418ccf9256c9942ea3f8":[29,0,0,43,1], +"classop_1_1_people_json_saver.html#aa6e2f479d959752c5c0f71cd8b4427c2":[29,0,0,43,0], +"classop_1_1_people_json_saver.html#ac0c0609652f89a3de44bcc940a82e235":[29,0,0,43,2], +"classop_1_1_person_id_extractor.html":[29,0,0,119], +"classop_1_1_person_id_extractor.html#a1aebf8006d814a02d7fa55f0609a7ab7":[29,0,0,119,3], +"classop_1_1_person_id_extractor.html#a5916ec673bdbe127386b7f496b188828":[29,0,0,119,0], +"classop_1_1_person_id_extractor.html#a7ff9f8faf42bff0dbd7207105c149a1e":[29,0,0,119,1], +"classop_1_1_person_id_extractor.html#a8d0b309bdf1ce96ed1aa2bd3df6f6dbc":[29,0,0,119,2], +"classop_1_1_person_tracker.html":[29,0,0,120], +"classop_1_1_person_tracker.html#a05eaf85bd389ad965f9960c9db31d873":[29,0,0,120,3], +"classop_1_1_person_tracker.html#a35cd3cd6c946f560220c9459a5dd7ee7":[29,0,0,120,4], +"classop_1_1_person_tracker.html#a68f46367bd719196974aa5b1bd23cb7d":[29,0,0,120,2], +"classop_1_1_person_tracker.html#a840ed2e06c1cc4dfc89e6083b2a8bc37":[29,0,0,120,1], +"classop_1_1_person_tracker.html#aa88f617ff9f1ff509c54b2cbf51e764a":[29,0,0,120,0], +"classop_1_1_pointer_container_greater.html":[29,0,0,122], +"classop_1_1_pointer_container_greater.html#a7c571ddbcfd6eaaaf33bb6abe4b22aaa":[29,0,0,122,0], +"classop_1_1_pointer_container_less.html":[29,0,0,123], +"classop_1_1_pointer_container_less.html#af34bafbf659ff4768dbb33fe7454cb21":[29,0,0,123,0], +"classop_1_1_pose_cpu_renderer.html":[29,0,0,83], +"classop_1_1_pose_cpu_renderer.html#a5863733d560345d4890557b0f8c0d08e":[29,0,0,83,0], +"classop_1_1_pose_cpu_renderer.html#a98541b982847713472411402314efd96":[29,0,0,83,2], +"classop_1_1_pose_cpu_renderer.html#ad4994dcc005a5e283abc012e8889c481":[29,0,0,83,1], +"classop_1_1_pose_extractor.html":[29,0,0,84], +"classop_1_1_pose_extractor.html#a15d81f74033c643465864f8ab6e48bba":[29,0,0,84,2], +"classop_1_1_pose_extractor.html#a291521decad2465df13dc769fe9cc4e5":[29,0,0,84,11], +"classop_1_1_pose_extractor.html#a487be38105b0d3f310142d99e0ca6b12":[29,0,0,84,7], +"classop_1_1_pose_extractor.html#a6c0abd998181d03d7890ec7abdee5efe":[29,0,0,84,4], +"classop_1_1_pose_extractor.html#a95f6235ab496ada0b8cbc4b614637ac0":[29,0,0,84,6], +"classop_1_1_pose_extractor.html#a9f98eef4ac08cacefe74e002ac086582":[29,0,0,84,1], +"classop_1_1_pose_extractor.html#aa7b59f4bfe89219e75995bc048efe4de":[29,0,0,84,3], +"classop_1_1_pose_extractor.html#aab1cccc9ad99f6b007abaa14600ea6df":[29,0,0,84,10], +"classop_1_1_pose_extractor.html#ab464ae97522336cf69dec1c1561c431d":[29,0,0,84,12], +"classop_1_1_pose_extractor.html#acd50fa337aef1d658b6fed3edc717ada":[29,0,0,84,0], +"classop_1_1_pose_extractor.html#adab126d32216aa9a27cc78d7158d6616":[29,0,0,84,13], +"classop_1_1_pose_extractor.html#adc430a6b1b2bf4df75ebf088f97be8af":[29,0,0,84,5], +"classop_1_1_pose_extractor.html#ae798335b1606804c87220d3c72423dad":[29,0,0,84,9], +"classop_1_1_pose_extractor.html#aee77aa0ca773abe442a278d9e9e69376":[29,0,0,84,8], +"classop_1_1_pose_extractor_caffe.html":[29,0,0,85], +"classop_1_1_pose_extractor_caffe.html#a1444ad1ee245a5bcd9e0b5b55395d6d8":[29,0,0,85,3], +"classop_1_1_pose_extractor_caffe.html#a3359641c1199c712a07859dcb76b7dcf":[29,0,0,85,1], +"classop_1_1_pose_extractor_caffe.html#a350900a3b326f4ed7d3dcb9531055523":[29,0,0,85,7], +"classop_1_1_pose_extractor_caffe.html#a499d975f7b6add768425271b2af19a2e":[29,0,0,85,4], +"classop_1_1_pose_extractor_caffe.html#a682152a072d07b1b0764c2f7aab09ab7":[29,0,0,85,0], +"classop_1_1_pose_extractor_caffe.html#a6ffc941073b66868177c91cc9e025098":[29,0,0,85,8], +"classop_1_1_pose_extractor_caffe.html#a9e8056cd50ba679636c5d5055f5a563b":[29,0,0,85,5], +"classop_1_1_pose_extractor_caffe.html#a9f8677779c9c07c0fd4ac265cd8d2d8f":[29,0,0,85,2], +"classop_1_1_pose_extractor_caffe.html#ac4737f29b467f6c0daad5f54aa20524b":[29,0,0,85,6] +}; diff --git a/web/html/doc/navtreeindex10.js b/web/html/doc/navtreeindex10.js new file mode 100644 index 000000000..d5549d865 --- /dev/null +++ b/web/html/doc/navtreeindex10.js @@ -0,0 +1,253 @@ +var NAVTREEINDEX10 = +{ +"resize_and_merge_caffe_8hpp.html":[30,0,1,0,8,11], +"resize_and_merge_caffe_8hpp_source.html":[30,0,1,0,8,11], +"scale_and_size_extractor_8hpp.html":[30,0,1,0,2,17], +"scale_and_size_extractor_8hpp_source.html":[30,0,1,0,2,17], +"spinnaker_wrapper_8hpp.html":[30,0,1,0,10,7], +"spinnaker_wrapper_8hpp_source.html":[30,0,1,0,10,7], +"standard_8hpp.html":[30,0,1,0,14,11], +"standard_8hpp.html#aa3a3e2acfb27ecbd187d01c8dcd41899":[30,0,1,0,14,11,0], +"standard_8hpp.html#af63e418966741f7efebacc9519174a0a":[30,0,1,0,14,11,1], +"standard_8hpp_source.html":[30,0,1,0,14,11], +"structop_1_1_datum.html":[29,0,0,7], +"structop_1_1_datum.html#a0aa21ea7a3adea0126003b778509f2d2":[29,0,0,7,18], +"structop_1_1_datum.html#a0b2f6955a1751fc79b107789272effad":[29,0,0,7,23], +"structop_1_1_datum.html#a0e416771f275be98c83aaff01e482a71":[29,0,0,7,42], +"structop_1_1_datum.html#a16b968aec06e9b904751216402972e74":[29,0,0,7,3], +"structop_1_1_datum.html#a1f9ba4bd5be779a911c8c8e7962ea727":[29,0,0,7,16], +"structop_1_1_datum.html#a24f3bfcb0ffffeb5742eb1530bc9e367":[29,0,0,7,9], +"structop_1_1_datum.html#a27bb38102b5ebecd9b13a3619e658316":[29,0,0,7,27], +"structop_1_1_datum.html#a2d4940d8cb12d95b8588cd0280f6524c":[29,0,0,7,2], +"structop_1_1_datum.html#a32752199884dcb51b7157daa098063e1":[29,0,0,7,7], +"structop_1_1_datum.html#a32d164c01acf6b4f7eb1323d74edbdca":[29,0,0,7,32], +"structop_1_1_datum.html#a35212700ef2a2ac290a6666e2993a192":[29,0,0,7,19], +"structop_1_1_datum.html#a42b953c082f479eddc527da9a3a4cc75":[29,0,0,7,34], +"structop_1_1_datum.html#a42f9aef848c6335c5a81cad374319f0b":[29,0,0,7,1], +"structop_1_1_datum.html#a44af7162e180c6856ce909057f43d8e1":[29,0,0,7,44], +"structop_1_1_datum.html#a46ff336119fd0d67c8223b1a9371731d":[29,0,0,7,30], +"structop_1_1_datum.html#a52d75e3273490624414f0602785bb608":[29,0,0,7,28], +"structop_1_1_datum.html#a5429e97e0ab9b0e2209a3947af668381":[29,0,0,7,36], +"structop_1_1_datum.html#a55dd5354e09696ed6896923755f1c85b":[29,0,0,7,35], +"structop_1_1_datum.html#a59d455dbddc50d700809c5e102c40d4e":[29,0,0,7,26], +"structop_1_1_datum.html#a652ac1e7de13ec9a886dece75848cfea":[29,0,0,7,39], +"structop_1_1_datum.html#a65deddd49d0fbca81f367198fc600015":[29,0,0,7,29], +"structop_1_1_datum.html#a6cf96c250c236a03f13da69e1d4336d9":[29,0,0,7,43], +"structop_1_1_datum.html#a6d629b1f6f7b958fe4cf2ef4cdf57c5b":[29,0,0,7,38], +"structop_1_1_datum.html#a72c75834671aebe44705738fb5efc3c5":[29,0,0,7,0], +"structop_1_1_datum.html#a72ee10bf507aea368cfd3dba3dd38cb5":[29,0,0,7,8], +"structop_1_1_datum.html#a79a05bec9871522cfab5d33cc7b63614":[29,0,0,7,11], +"structop_1_1_datum.html#a8337f6ff81ba8231ceeabc840372bff9":[29,0,0,7,5], +"structop_1_1_datum.html#a8b930d61467f98702ebea68f39fc762b":[29,0,0,7,24], +"structop_1_1_datum.html#a8f6f5fd181abe3bdfd6f7bdf8a165782":[29,0,0,7,40], +"structop_1_1_datum.html#a9a44196a197d5c050e626efe8b016e84":[29,0,0,7,22], +"structop_1_1_datum.html#a9d67e55fbc26399e4efd2385c1899541":[29,0,0,7,6], +"structop_1_1_datum.html#aa27ee36fd2e1fb0dfc5c1e6869e2073e":[29,0,0,7,15], +"structop_1_1_datum.html#aa3e5b74f3d54bc880f47831c3932dfa9":[29,0,0,7,13], +"structop_1_1_datum.html#ab87c493347456b592b616e9f656a5d60":[29,0,0,7,46], +"structop_1_1_datum.html#ab97601a7628b46619f4a071cf1613ce6":[29,0,0,7,12], +"structop_1_1_datum.html#aba90dccffb5a830296231bd430c4766c":[29,0,0,7,37], +"structop_1_1_datum.html#ac734d4262a5a7892c6d4094cdd2bcc7c":[29,0,0,7,33], +"structop_1_1_datum.html#ad137a102ef753734a9413762d72e6d46":[29,0,0,7,4], +"structop_1_1_datum.html#ad70b95f61637fe23092bca8f0a4fb088":[29,0,0,7,17], +"structop_1_1_datum.html#ae0c0f33a6b75c7f47e11112dd33f23c1":[29,0,0,7,20], +"structop_1_1_datum.html#ae2aad08cc74ee43e1242b403d47be2ff":[29,0,0,7,14], +"structop_1_1_datum.html#ae6adcdacea12a9cfa445cf0cac1985b0":[29,0,0,7,31], +"structop_1_1_datum.html#ae740051202ca0db8358d5308143bb1b3":[29,0,0,7,10], +"structop_1_1_datum.html#aeb6d10e4fa40a20d38118bf1be3112d8":[29,0,0,7,45], +"structop_1_1_datum.html#aebd19bf50725a5cd87de1efd96f6ebfe":[29,0,0,7,21], +"structop_1_1_datum.html#aef6c478313691ab5101664c1df55aa58":[29,0,0,7,25], +"structop_1_1_datum.html#afb117821de7aff9ac3c219ef3bbc0c14":[29,0,0,7,41], +"structop_1_1_point.html":[29,0,0,13], +"structop_1_1_point.html#a075741b8963b342bb068976afcf579af":[29,0,0,13,11], +"structop_1_1_point.html#a0b362efa00fc5a0d35f743f3c01fa1d0":[29,0,0,13,7], +"structop_1_1_point.html#a0e94c712c194c0b317eef4d8995e52f3":[29,0,0,13,22], +"structop_1_1_point.html#a2f7900c0d58fb297b3b039cfb3c98a3e":[29,0,0,13,21], +"structop_1_1_point.html#a44559988e3980e21568b5d9dd2897368":[29,0,0,13,1], +"structop_1_1_point.html#a5821bc77a416629916e671793df3ce3b":[29,0,0,13,26], +"structop_1_1_point.html#a60488ca743d82fe8dd574b01f992460c":[29,0,0,13,10], +"structop_1_1_point.html#a6aaab75fe6d1c8b4c935c2da385fd7ee":[29,0,0,13,17], +"structop_1_1_point.html#a73d1088b5d0f3370499ca5c6e80b544a":[29,0,0,13,24], +"structop_1_1_point.html#a812d4ef29d102f4ad18f32ae54eb17ec":[29,0,0,13,25], +"structop_1_1_point.html#a872607032f6b4fa8982f88a74c88c6bd":[29,0,0,13,18], +"structop_1_1_point.html#a8961164fe93cd91fcf55f56200730578":[29,0,0,13,12], +"structop_1_1_point.html#a8a82a0d663d9572fa28394f7562ebfb2":[29,0,0,13,4], +"structop_1_1_point.html#a9f80114d18ec8055360222d975bcd5a8":[29,0,0,13,0], +"structop_1_1_point.html#aada0e9eecee2fb30fb903b32f9f33047":[29,0,0,13,19], +"structop_1_1_point.html#ab3b92e4a40cd58d948647484f21dd9ef":[29,0,0,13,2], +"structop_1_1_point.html#abf2bb2d2d5b3dba3424b489b99faa760":[29,0,0,13,14], +"structop_1_1_point.html#ac8596f2b3b50464b6c6eaa34b0a2c48b":[29,0,0,13,20], +"structop_1_1_point.html#ac85e32b9381abc2af106fe96dba81b08":[29,0,0,13,3], +"structop_1_1_point.html#ad42deecd0077f7c962ca383cbc87e08f":[29,0,0,13,13], +"structop_1_1_point.html#ad5005ff994bfcd1846854d6af103a3a6":[29,0,0,13,9], +"structop_1_1_point.html#ad599eeba7a0137c3c138e5542bb2e9ed":[29,0,0,13,15], +"structop_1_1_point.html#ad66e33cf5d57e78c80220881406e41ce":[29,0,0,13,5], +"structop_1_1_point.html#ae7afe35869eea79f72bd8b74fae4a2f1":[29,0,0,13,23], +"structop_1_1_point.html#af656ee43b596b5bb68139404a54c5a63":[29,0,0,13,8], +"structop_1_1_point.html#af8e49e33dad417d05ce8fb5f9dd68762":[29,0,0,13,16], +"structop_1_1_point.html#afb53230d0d884ca5432e948605b5c2e6":[29,0,0,13,6], +"structop_1_1_rectangle.html":[29,0,0,14], +"structop_1_1_rectangle.html#a0112ddaa9782f3ccbb76a319b05f030b":[29,0,0,14,0], +"structop_1_1_rectangle.html#a0b0b8be8a0b300204a2afff4f219879b":[29,0,0,14,5], +"structop_1_1_rectangle.html#a0d0ae826039b0961fae8723708809cdf":[29,0,0,14,16], +"structop_1_1_rectangle.html#a1c9a572db2c17fb02a7d19e965c1d3dc":[29,0,0,14,12], +"structop_1_1_rectangle.html#a2d3d7951770da3954d5af9e365f5780c":[29,0,0,14,7], +"structop_1_1_rectangle.html#a5a9a60fdfd9c88ab8ded6275d64333ea":[29,0,0,14,2], +"structop_1_1_rectangle.html#a5b319240c995c81bfa1d73a2461d49fd":[29,0,0,14,3], +"structop_1_1_rectangle.html#a5db9f0e8c946d837a1d351cc0bc72811":[29,0,0,14,15], +"structop_1_1_rectangle.html#a640050d6186148b425bedba8c33cf1ea":[29,0,0,14,13], +"structop_1_1_rectangle.html#a64e6891af0088a4ad271a725601b8043":[29,0,0,14,18], +"structop_1_1_rectangle.html#a65620c7efbb3db95d85c90c2be3a851d":[29,0,0,14,9], +"structop_1_1_rectangle.html#a66e38889d2b413df95a9995e93103ff7":[29,0,0,14,6], +"structop_1_1_rectangle.html#ab4473fb43ab826ffb10c2be18cb96f24":[29,0,0,14,4], +"structop_1_1_rectangle.html#abd3476f9a32ad2058ea67c75c2a547a2":[29,0,0,14,10], +"structop_1_1_rectangle.html#abea1a6760629dc4ed99875dae9d5ac36":[29,0,0,14,11], +"structop_1_1_rectangle.html#ac4ae58fe6ffd2f811f5cbc48661c1856":[29,0,0,14,17], +"structop_1_1_rectangle.html#adba48a35368d4a4d55896899b217d523":[29,0,0,14,8], +"structop_1_1_rectangle.html#af1c7f96c34132924fa9237248894e63d":[29,0,0,14,14], +"structop_1_1_rectangle.html#afbb0da8956e35178d3f28d2b1d998175":[29,0,0,14,1], +"structop_1_1_wrapper_struct_extra.html":[29,0,0,126], +"structop_1_1_wrapper_struct_extra.html#a08578de8a074415df3e645d3ddb27b8b":[29,0,0,126,1], +"structop_1_1_wrapper_struct_extra.html#a70cdc27c953962810333fafe011f86dd":[29,0,0,126,0], +"structop_1_1_wrapper_struct_extra.html#a86ae9d1faa008aaeed4d6fa6ff03f0fb":[29,0,0,126,5], +"structop_1_1_wrapper_struct_extra.html#aa157c20ca959fd952a85866a119183ca":[29,0,0,126,4], +"structop_1_1_wrapper_struct_extra.html#ad41edf2717e5446a250efc05512ee07f":[29,0,0,126,2], +"structop_1_1_wrapper_struct_extra.html#ae8a3562b010c4fa31e6a9722947301c6":[29,0,0,126,3], +"structop_1_1_wrapper_struct_face.html":[29,0,0,127], +"structop_1_1_wrapper_struct_face.html#a0fb08ed60a50f19713df6f62ee685593":[29,0,0,127,0], +"structop_1_1_wrapper_struct_face.html#a30d45f383e5c9d72709b5281f24f1ed0":[29,0,0,127,3], +"structop_1_1_wrapper_struct_face.html#a49f609ae1c075f272bbaf32e128cc3a9":[29,0,0,127,1], +"structop_1_1_wrapper_struct_face.html#a65a0244cbcea06621c6f8c41e519990f":[29,0,0,127,2], +"structop_1_1_wrapper_struct_face.html#a8fadeca500dde0df2a35f864bf05ee05":[29,0,0,127,4], +"structop_1_1_wrapper_struct_face.html#a982e3f1a13358a522e1882d17cb80d57":[29,0,0,127,7], +"structop_1_1_wrapper_struct_face.html#a9845712fd6ebb66fccb0c1647e3491a0":[29,0,0,127,5], +"structop_1_1_wrapper_struct_face.html#accc6e564598130b9bf0a6d0ec9c304c4":[29,0,0,127,6], +"structop_1_1_wrapper_struct_gui.html":[29,0,0,128], +"structop_1_1_wrapper_struct_gui.html#a2a979a7daa368cc847ae6d9a168ff556":[29,0,0,128,1], +"structop_1_1_wrapper_struct_gui.html#a41638659ae2237d4ebfac635f4cc7842":[29,0,0,128,0], +"structop_1_1_wrapper_struct_gui.html#a9dbb0bfce2593b0a560ed738e11708ce":[29,0,0,128,3], +"structop_1_1_wrapper_struct_gui.html#ac1d393d3ce6be9304017c1aa3afd8f13":[29,0,0,128,2], +"structop_1_1_wrapper_struct_hand.html":[29,0,0,129], +"structop_1_1_wrapper_struct_hand.html#a223b29ce9a234c3fb8a7864cfe2919fc":[29,0,0,129,0], +"structop_1_1_wrapper_struct_hand.html#a2759e92ee811d7a8eb69e1b7eba29d08":[29,0,0,129,3], +"structop_1_1_wrapper_struct_hand.html#a667ad7eed9f4f96b460f331d25f3d87f":[29,0,0,129,4], +"structop_1_1_wrapper_struct_hand.html#a6a54d5b5766d23412c87bd10c26cb291":[29,0,0,129,5], +"structop_1_1_wrapper_struct_hand.html#a716f9c98cbee1a4a70d5978875795c4d":[29,0,0,129,1], +"structop_1_1_wrapper_struct_hand.html#a8074cf22f8926d7f4d1d60cacae99c3e":[29,0,0,129,2], +"structop_1_1_wrapper_struct_hand.html#a90ddd24ee55b6aabd9a1728ccd91525e":[29,0,0,129,6], +"structop_1_1_wrapper_struct_hand.html#a9655c0dfa83eefde174d09e622482089":[29,0,0,129,7], +"structop_1_1_wrapper_struct_hand.html#adaea15e182e5c75129293873cd94e35f":[29,0,0,129,8], +"structop_1_1_wrapper_struct_hand.html#aea6263dc96708b11fab72416d810b3d6":[29,0,0,129,9], +"structop_1_1_wrapper_struct_input.html":[29,0,0,130], +"structop_1_1_wrapper_struct_input.html#a2ee8db5c1fbade720719bb1464e59175":[29,0,0,130,0], +"structop_1_1_wrapper_struct_input.html#a2eeea9ee711a1dcbec99c3dc871fbc47":[29,0,0,130,12], +"structop_1_1_wrapper_struct_input.html#a4c77c6257dec58ac0a5e18cfe5b38a26":[29,0,0,130,1], +"structop_1_1_wrapper_struct_input.html#a5cffb282052bdd812217e54f0b2ec7d5":[29,0,0,130,5], +"structop_1_1_wrapper_struct_input.html#a5ee9722814fe2b5a695511cabd12b613":[29,0,0,130,4], +"structop_1_1_wrapper_struct_input.html#a6aec09a94fdf393d6ab3b23857c376da":[29,0,0,130,10], +"structop_1_1_wrapper_struct_input.html#a7c80f6a3687696ba30d3ce0902ac162f":[29,0,0,130,7], +"structop_1_1_wrapper_struct_input.html#a86df98e50b680b30afe100d8b2b50685":[29,0,0,130,6], +"structop_1_1_wrapper_struct_input.html#ac4349e123d359f436cc01d4068231dc2":[29,0,0,130,8], +"structop_1_1_wrapper_struct_input.html#acc72b8efe09ec3888823ed5680a19fe4":[29,0,0,130,3], +"structop_1_1_wrapper_struct_input.html#acd6a460d6c0a64bc818539b67fcafea7":[29,0,0,130,11], +"structop_1_1_wrapper_struct_input.html#adac2c3e58e1e75a96e52904762c37c42":[29,0,0,130,9], +"structop_1_1_wrapper_struct_input.html#ae2078c540324a9cdc8500dce5d361bee":[29,0,0,130,2], +"structop_1_1_wrapper_struct_input.html#ae7183e10862dbdbed422f042f1a71ed1":[29,0,0,130,13], +"structop_1_1_wrapper_struct_output.html":[29,0,0,131], +"structop_1_1_wrapper_struct_output.html#a0119bb7429483928c587ffaf607919de":[29,0,0,131,6], +"structop_1_1_wrapper_struct_output.html#a17c339a9c6c97e1dfdb29b3af0bdab77":[29,0,0,131,1], +"structop_1_1_wrapper_struct_output.html#a183afe9fa45aa69a9d79b9434163ed3e":[29,0,0,131,13], +"structop_1_1_wrapper_struct_output.html#a29583f73263bdffe1903ea64a9c09463":[29,0,0,131,8], +"structop_1_1_wrapper_struct_output.html#a3f6370fa1cb1f13922e36831c564588c":[29,0,0,131,19], +"structop_1_1_wrapper_struct_output.html#a49d8f54f546bfe6a6c644280c5e50458":[29,0,0,131,15], +"structop_1_1_wrapper_struct_output.html#a4b829e1d007943cba3f4a23be25bc74d":[29,0,0,131,17], +"structop_1_1_wrapper_struct_output.html#a70278a7418053ced9de2447cc78f4240":[29,0,0,131,9], +"structop_1_1_wrapper_struct_output.html#a8658c8313ac1d8ddb177d83de2e1bfe7":[29,0,0,131,5], +"structop_1_1_wrapper_struct_output.html#a873bcabae9cf7039830cae565009ce2b":[29,0,0,131,2], +"structop_1_1_wrapper_struct_output.html#a8899e8af7df7dad1215a09f61fc8198b":[29,0,0,131,12], +"structop_1_1_wrapper_struct_output.html#abfa84cf0ae76a1c07f9d33b98e9f6d84":[29,0,0,131,4], +"structop_1_1_wrapper_struct_output.html#aca7b610f478c36b643fcbd73931c9278":[29,0,0,131,7], +"structop_1_1_wrapper_struct_output.html#acb0e1a13713fd796c9452684bdb7cdaf":[29,0,0,131,10], +"structop_1_1_wrapper_struct_output.html#ad338fd4719d6f243bb64bc67f68bc7c9":[29,0,0,131,14], +"structop_1_1_wrapper_struct_output.html#ad595edffced2bfd80c3bee183f32f505":[29,0,0,131,18], +"structop_1_1_wrapper_struct_output.html#ad996d177c4b84e2d38c105f637559094":[29,0,0,131,16], +"structop_1_1_wrapper_struct_output.html#ae12454443c1c8b1c74f257eaac4927d3":[29,0,0,131,11], +"structop_1_1_wrapper_struct_output.html#ae8975341f779a89d68a125cbfb17d940":[29,0,0,131,0], +"structop_1_1_wrapper_struct_output.html#aec57f5b4f6920cd43c2f6e55a21c769b":[29,0,0,131,3], +"structop_1_1_wrapper_struct_pose.html":[29,0,0,132], +"structop_1_1_wrapper_struct_pose.html#a02c4ab6b56e4da4b3ed0da4eae8ac0fc":[29,0,0,132,18], +"structop_1_1_wrapper_struct_pose.html#a054c88e977084707e80eb31dd0a658ab":[29,0,0,132,13], +"structop_1_1_wrapper_struct_pose.html#a0f00648621ca97fde61287be23671523":[29,0,0,132,15], +"structop_1_1_wrapper_struct_pose.html#a16c4fb26e6ce76dfa577e0f4b5747733":[29,0,0,132,8], +"structop_1_1_wrapper_struct_pose.html#a25ee056d914f1cdc990a8a7956810313":[29,0,0,132,27], +"structop_1_1_wrapper_struct_pose.html#a2a5cceaf05cf228b47d2b001e05efeb8":[29,0,0,132,11], +"structop_1_1_wrapper_struct_pose.html#a322ff95b6a2838fe0d55afb28d2a4224":[29,0,0,132,24], +"structop_1_1_wrapper_struct_pose.html#a35147b6fb9e300d79b71637793053a1b":[29,0,0,132,21], +"structop_1_1_wrapper_struct_pose.html#a4d3ad84b14697d5f1009fa29e2ff1998":[29,0,0,132,3], +"structop_1_1_wrapper_struct_pose.html#a536ea76d50e94d513066e9e5767d0c03":[29,0,0,132,9], +"structop_1_1_wrapper_struct_pose.html#a646ae142f821411d22d772b76960d585":[29,0,0,132,25], +"structop_1_1_wrapper_struct_pose.html#a6de869a73fd338bd41e390fcb1a5bcf3":[29,0,0,132,5], +"structop_1_1_wrapper_struct_pose.html#a782ba82c6aeabea8fa625042fdf09408":[29,0,0,132,7], +"structop_1_1_wrapper_struct_pose.html#a80ead0f411ddab86f643345e4effe805":[29,0,0,132,19], +"structop_1_1_wrapper_struct_pose.html#a84edcbf2237d579adc88badaa17c9795":[29,0,0,132,14], +"structop_1_1_wrapper_struct_pose.html#a8a6273145f5e2f2ccade81865cbdfecb":[29,0,0,132,22], +"structop_1_1_wrapper_struct_pose.html#a8bafec1b3ee2f2a6473fd604925e265a":[29,0,0,132,17], +"structop_1_1_wrapper_struct_pose.html#a8be188d871061079432ead77b278fe0d":[29,0,0,132,10], +"structop_1_1_wrapper_struct_pose.html#aa2cee9019b708d48cc18313615d0189e":[29,0,0,132,4], +"structop_1_1_wrapper_struct_pose.html#aa459f2f26c1f1a929af55b8c2d39ccf6":[29,0,0,132,12], +"structop_1_1_wrapper_struct_pose.html#ab6810e97aa62a728aa09dbbe6b9b6c06":[29,0,0,132,6], +"structop_1_1_wrapper_struct_pose.html#ac1233492c750fbd98df353bffa8f9b78":[29,0,0,132,2], +"structop_1_1_wrapper_struct_pose.html#acff912f14ba3c0ba706ea99e1cef790e":[29,0,0,132,16], +"structop_1_1_wrapper_struct_pose.html#ad0f4992658b9d624184dcecf79e54e43":[29,0,0,132,20], +"structop_1_1_wrapper_struct_pose.html#ad23a9c103a60709eed9d7b7381828e5e":[29,0,0,132,26], +"structop_1_1_wrapper_struct_pose.html#ad6b5ea0cef8eb81d20ab39099ba7716e":[29,0,0,132,23], +"structop_1_1_wrapper_struct_pose.html#ad73981c6ad9b23f511ef6f12136bf8e7":[29,0,0,132,1], +"structop_1_1_wrapper_struct_pose.html#af3c639dd4de2bfebe1376a0ab7666c86":[29,0,0,132,0], +"sub_thread_8hpp.html":[30,0,1,0,11,5], +"sub_thread_8hpp.html#af98c8e514e79d4718fb1fc64dc0e431b":[30,0,1,0,11,5,1], +"sub_thread_8hpp_source.html":[30,0,1,0,11,5], +"sub_thread_no_queue_8hpp.html":[30,0,1,0,11,6], +"sub_thread_no_queue_8hpp.html#a36492d15f864f7c813a573789ea554aa":[30,0,1,0,11,6,1], +"sub_thread_no_queue_8hpp_source.html":[30,0,1,0,11,6], +"sub_thread_queue_in_8hpp.html":[30,0,1,0,11,7], +"sub_thread_queue_in_8hpp.html#a506578f3e723f992eabb627a371351ba":[30,0,1,0,11,7,1], +"sub_thread_queue_in_8hpp_source.html":[30,0,1,0,11,7], +"sub_thread_queue_in_out_8hpp.html":[30,0,1,0,11,8], +"sub_thread_queue_in_out_8hpp.html#a63605cf0e6f4049beacf6094995272e8":[30,0,1,0,11,8,1], +"sub_thread_queue_in_out_8hpp_source.html":[30,0,1,0,11,8], +"sub_thread_queue_out_8hpp.html":[30,0,1,0,11,9], +"sub_thread_queue_out_8hpp.html#aee90a0429c2d14da0c3a85cd67a17821":[30,0,1,0,11,9,1], +"sub_thread_queue_out_8hpp_source.html":[30,0,1,0,11,9], +"thread_2enum_classes_8hpp.html":[30,0,1,0,11,0], +"thread_2enum_classes_8hpp.html#a3593e2d53bec533f0048ef3973eebd36":[30,0,1,0,11,0,0], +"thread_2enum_classes_8hpp.html#a3593e2d53bec533f0048ef3973eebd36a288aae25bc408055f50c21c991903a44":[30,0,1,0,11,0,0,0], +"thread_2enum_classes_8hpp.html#a3593e2d53bec533f0048ef3973eebd36a2fe4167817733fec8e6ba1afddf78f1b":[30,0,1,0,11,0,0,3], +"thread_2enum_classes_8hpp.html#a3593e2d53bec533f0048ef3973eebd36a435b3ab344c03bfc0e4530a2e75f5e44":[30,0,1,0,11,0,0,1], +"thread_2enum_classes_8hpp.html#a3593e2d53bec533f0048ef3973eebd36ac68f8680ccf3a65dfcfc63356112c9f9":[30,0,1,0,11,0,0,2], +"thread_2enum_classes_8hpp_source.html":[30,0,1,0,11,0], +"thread_2headers_8hpp.html":[30,0,1,0,11,1], +"thread_2headers_8hpp_source.html":[30,0,1,0,11,1], +"thread_8hpp.html":[30,0,1,0,11,10], +"thread_8hpp.html#ae5dac6cf1ccdf461838f9795be8fda03":[30,0,1,0,11,10,1], +"thread_8hpp_source.html":[30,0,1,0,11,10], +"thread_manager_8hpp.html":[30,0,1,0,11,11], +"thread_manager_8hpp.html#ac06eeab84c4861ef08834855b48750a6":[30,0,1,0,11,11,1], +"thread_manager_8hpp_source.html":[30,0,1,0,11,11], +"tracking_2headers_8hpp.html":[30,0,1,0,12,0], +"tracking_2headers_8hpp_source.html":[30,0,1,0,12,0], +"udp_sender_8hpp.html":[30,0,1,0,4,11], +"udp_sender_8hpp_source.html":[30,0,1,0,4,11], +"unity_2headers_8hpp.html":[30,0,1,0,13,0], +"unity_2headers_8hpp_source.html":[30,0,1,0,13,0], +"unity_binding_8hpp.html":[30,0,1,0,13,1], +"unity_binding_8hpp_source.html":[30,0,1,0,13,1], +"utilities_2enum_classes_8hpp.html":[30,0,1,0,14,1], +"utilities_2enum_classes_8hpp.html#a553bd31855c20a0d14e4c44a20bd91da":[30,0,1,0,14,1,1], +"utilities_2enum_classes_8hpp.html#a553bd31855c20a0d14e4c44a20bd91daa6f6cb72d544962fa333e2e34ce64f719":[30,0,1,0,14,1,1,1], +"utilities_2enum_classes_8hpp.html#a553bd31855c20a0d14e4c44a20bd91daafff0d600f8a0b5e19e88bfb821dd1157":[30,0,1,0,14,1,1,0], +"utilities_2enum_classes_8hpp.html#a5f5a4cee9809deaf7201fb9caf5e400c":[30,0,1,0,14,1,0], +"utilities_2enum_classes_8hpp.html#a5f5a4cee9809deaf7201fb9caf5e400ca002f2100f8870e7c823894f492e4d337":[30,0,1,0,14,1,0,2], +"utilities_2enum_classes_8hpp.html#a5f5a4cee9809deaf7201fb9caf5e400ca68ec2bf5b1662d1d27a523dcfc3c702a":[30,0,1,0,14,1,0,1], +"utilities_2enum_classes_8hpp.html#a5f5a4cee9809deaf7201fb9caf5e400cab1c94ca2fbc3e78fc30069c8d0f01680":[30,0,1,0,14,1,0,3], +"utilities_2enum_classes_8hpp.html#a5f5a4cee9809deaf7201fb9caf5e400cafe50b062b9b9100a72e68b48fe26fc50":[30,0,1,0,14,1,0,0] +}; diff --git a/web/html/doc/navtreeindex11.js b/web/html/doc/navtreeindex11.js new file mode 100644 index 000000000..020c8fba3 --- /dev/null +++ b/web/html/doc/navtreeindex11.js @@ -0,0 +1,213 @@ +var NAVTREEINDEX11 = +{ +"utilities_2enum_classes_8hpp.html#a5fa46d7c4b25c823d1cdcc8e9d460f94":[30,0,1,0,14,1,2], +"utilities_2enum_classes_8hpp.html#a5fa46d7c4b25c823d1cdcc8e9d460f94a68ec2bf5b1662d1d27a523dcfc3c702a":[30,0,1,0,14,1,2,0], +"utilities_2enum_classes_8hpp.html#a5fa46d7c4b25c823d1cdcc8e9d460f94aa544d56d9492a20da20018000b5043b6":[30,0,1,0,14,1,2,1], +"utilities_2enum_classes_8hpp.html#a5fa46d7c4b25c823d1cdcc8e9d460f94ab1c94ca2fbc3e78fc30069c8d0f01680":[30,0,1,0,14,1,2,2], +"utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14":[30,0,1,0,14,1,3], +"utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14a28d0edd045e05cf5af64e35ae0c4c6ef":[30,0,1,0,14,1,3,1], +"utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14a655d20c1ca69519ca647684edbb2db35":[30,0,1,0,14,1,3,3], +"utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14a6a061313d22e51e0f25b7cd4dc065233":[30,0,1,0,14,1,3,4], +"utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14a6adf97f83acf6453d4a6a4b1070f3754":[30,0,1,0,14,1,3,0], +"utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14a828d496739024f4af00df1e277d96ebd":[30,0,1,0,14,1,3,5], +"utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14a960b44c579bc2f6818d2daaf9e4c16f0":[30,0,1,0,14,1,3,2], +"utilities_2enum_classes_8hpp_source.html":[30,0,1,0,14,1], +"utilities_2headers_8hpp.html":[30,0,1,0,14,6], +"utilities_2headers_8hpp_source.html":[30,0,1,0,14,6], +"utilities_2string_8hpp.html":[30,0,1,0,14,12], +"utilities_2string_8hpp.html#a2f610ba8a71cf16628df2f4d270b7d34":[30,0,1,0,14,12,2], +"utilities_2string_8hpp.html#a3290f48d24c9992dd00d339ce49cfac7":[30,0,1,0,14,12,5], +"utilities_2string_8hpp.html#a42292d44d10f55cb1d83a296183e9b31":[30,0,1,0,14,12,4], +"utilities_2string_8hpp.html#a7a815e303884fb2b3346c8cc19d61b23":[30,0,1,0,14,12,6], +"utilities_2string_8hpp.html#a844c35ea57a8bc67f33f49deb5070652":[30,0,1,0,14,12,0], +"utilities_2string_8hpp.html#ab670c693d8e4a540cfe75ce8383b6d10":[30,0,1,0,14,12,1], +"utilities_2string_8hpp.html#ae80a103d8a4308bc435342b3d31404c8":[30,0,1,0,14,12,3], +"utilities_2string_8hpp_source.html":[30,0,1,0,14,12], +"verbose_printer_8hpp.html":[30,0,1,0,2,19], +"verbose_printer_8hpp_source.html":[30,0,1,0,2,19], +"video_capture_reader_8hpp.html":[30,0,1,0,10,8], +"video_capture_reader_8hpp_source.html":[30,0,1,0,10,8], +"video_reader_8hpp.html":[30,0,1,0,10,9], +"video_reader_8hpp_source.html":[30,0,1,0,10,9], +"video_saver_8hpp.html":[30,0,1,0,4,12], +"video_saver_8hpp_source.html":[30,0,1,0,4,12], +"w_bvh_saver_8hpp.html":[30,0,1,0,4,13], +"w_bvh_saver_8hpp_source.html":[30,0,1,0,4,13], +"w_coco_json_saver_8hpp.html":[30,0,1,0,4,14], +"w_coco_json_saver_8hpp.html#af46e80e6bac0f815006759df4c9d00c3":[30,0,1,0,4,14,1], +"w_coco_json_saver_8hpp_source.html":[30,0,1,0,4,14], +"w_cv_mat_to_op_input_8hpp.html":[30,0,1,0,2,20], +"w_cv_mat_to_op_input_8hpp.html#a9076fc1719030c2a74f21682999d2315":[30,0,1,0,2,20,1], +"w_cv_mat_to_op_input_8hpp_source.html":[30,0,1,0,2,20], +"w_cv_mat_to_op_output_8hpp.html":[30,0,1,0,2,21], +"w_cv_mat_to_op_output_8hpp.html#a6d12bd1e42cfb63d2f780bed55fa01fb":[30,0,1,0,2,21,1], +"w_cv_mat_to_op_output_8hpp_source.html":[30,0,1,0,2,21], +"w_datum_producer_8hpp.html":[30,0,1,0,10,10], +"w_datum_producer_8hpp_source.html":[30,0,1,0,10,10], +"w_face_detector_8hpp.html":[30,0,1,0,3,10], +"w_face_detector_8hpp.html#a196f17357cd1c1bb02e24e4e8a0e6ec3":[30,0,1,0,3,10,1], +"w_face_detector_8hpp_source.html":[30,0,1,0,3,10], +"w_face_detector_open_c_v_8hpp.html":[30,0,1,0,3,11], +"w_face_detector_open_c_v_8hpp.html#abf3a59fc4662f07e6ba19b95bd4da32f":[30,0,1,0,3,11,1], +"w_face_detector_open_c_v_8hpp_source.html":[30,0,1,0,3,11], +"w_face_extractor_net_8hpp.html":[30,0,1,0,3,12], +"w_face_extractor_net_8hpp.html#ab5b47f0069e9f397ff891194b20d28f2":[30,0,1,0,3,12,1], +"w_face_extractor_net_8hpp_source.html":[30,0,1,0,3,12], +"w_face_renderer_8hpp.html":[30,0,1,0,3,13], +"w_face_renderer_8hpp.html#af42afa53c725d556c14928b2603883e3":[30,0,1,0,3,13,1], +"w_face_renderer_8hpp_source.html":[30,0,1,0,3,13], +"w_face_saver_8hpp.html":[30,0,1,0,4,15], +"w_face_saver_8hpp.html#a57c4f3ada0db4882a4106d4dedf08012":[30,0,1,0,4,15,1], +"w_face_saver_8hpp_source.html":[30,0,1,0,4,15], +"w_fps_max_8hpp.html":[30,0,1,0,11,12], +"w_fps_max_8hpp.html#adfc12925650978828707c1c0dcbebd0e":[30,0,1,0,11,12,1], +"w_fps_max_8hpp_source.html":[30,0,1,0,11,12], +"w_gui3_d_8hpp.html":[30,0,1,0,6,8], +"w_gui3_d_8hpp.html#a54b38240e45009f7e6a25d956ac96fe0":[30,0,1,0,6,8,1], +"w_gui3_d_8hpp_source.html":[30,0,1,0,6,8], +"w_gui_8hpp.html":[30,0,1,0,6,7], +"w_gui_8hpp.html#ade3b2e4b105242a3cf41def3def1691d":[30,0,1,0,6,7,1], +"w_gui_8hpp_source.html":[30,0,1,0,6,7], +"w_gui_adam_8hpp.html":[30,0,1,0,6,9], +"w_gui_adam_8hpp_source.html":[30,0,1,0,6,9], +"w_gui_info_adder_8hpp.html":[30,0,1,0,6,10], +"w_gui_info_adder_8hpp.html#ae88e9ced5d14fa221205b492ff76c56b":[30,0,1,0,6,10,1], +"w_gui_info_adder_8hpp_source.html":[30,0,1,0,6,10], +"w_hand_detector_8hpp.html":[30,0,1,0,7,10], +"w_hand_detector_8hpp.html#a0424a8e4dc8ceb5e8d8a2230c157a7fd":[30,0,1,0,7,10,1], +"w_hand_detector_8hpp_source.html":[30,0,1,0,7,10], +"w_hand_detector_from_txt_8hpp.html":[30,0,1,0,7,11], +"w_hand_detector_from_txt_8hpp.html#a767385c8d3ebe736e1752825ab4d4ea0":[30,0,1,0,7,11,1], +"w_hand_detector_from_txt_8hpp_source.html":[30,0,1,0,7,11], +"w_hand_detector_tracking_8hpp.html":[30,0,1,0,7,12], +"w_hand_detector_tracking_8hpp.html#a361310c59d16e88a4d2450a80f078f01":[30,0,1,0,7,12,1], +"w_hand_detector_tracking_8hpp_source.html":[30,0,1,0,7,12], +"w_hand_detector_update_8hpp.html":[30,0,1,0,7,13], +"w_hand_detector_update_8hpp.html#a5cc3f625b2644b1aade85a9458b5503a":[30,0,1,0,7,13,1], +"w_hand_detector_update_8hpp_source.html":[30,0,1,0,7,13], +"w_hand_extractor_net_8hpp.html":[30,0,1,0,7,14], +"w_hand_extractor_net_8hpp.html#ae5cc3e92ffd9696f01ce7824ebbd0759":[30,0,1,0,7,14,1], +"w_hand_extractor_net_8hpp_source.html":[30,0,1,0,7,14], +"w_hand_renderer_8hpp.html":[30,0,1,0,7,15], +"w_hand_renderer_8hpp.html#a635579f5f8d20b8e65f4f94da4d3d2f2":[30,0,1,0,7,15,1], +"w_hand_renderer_8hpp_source.html":[30,0,1,0,7,15], +"w_hand_saver_8hpp.html":[30,0,1,0,4,16], +"w_hand_saver_8hpp.html#a602d5d238fe0c7096698cf36b7dee9ab":[30,0,1,0,4,16,1], +"w_hand_saver_8hpp_source.html":[30,0,1,0,4,16], +"w_heat_map_saver_8hpp.html":[30,0,1,0,4,17], +"w_heat_map_saver_8hpp.html#a7ac10b9f503668695643c366e25f3b68":[30,0,1,0,4,17,1], +"w_heat_map_saver_8hpp_source.html":[30,0,1,0,4,17], +"w_id_generator_8hpp.html":[30,0,1,0,11,13], +"w_id_generator_8hpp.html#ad22c543a4376e943b728e657fab5ed9f":[30,0,1,0,11,13,1], +"w_id_generator_8hpp_source.html":[30,0,1,0,11,13], +"w_image_saver_8hpp.html":[30,0,1,0,4,18], +"w_image_saver_8hpp.html#a505ea16cc6c2c0068bbf4e7269dc8e0a":[30,0,1,0,4,18,1], +"w_image_saver_8hpp_source.html":[30,0,1,0,4,18], +"w_joint_angle_estimation_8hpp.html":[30,0,1,0,0,4], +"w_joint_angle_estimation_8hpp_source.html":[30,0,1,0,0,4], +"w_keep_top_n_people_8hpp.html":[30,0,1,0,2,22], +"w_keep_top_n_people_8hpp.html#aaee32c4c68404e5086844bcb911b7a20":[30,0,1,0,2,22,1], +"w_keep_top_n_people_8hpp_source.html":[30,0,1,0,2,22], +"w_keypoint_scaler_8hpp.html":[30,0,1,0,2,23], +"w_keypoint_scaler_8hpp.html#a47758c703fccdbb65c26dc7bc4022237":[30,0,1,0,2,23,1], +"w_keypoint_scaler_8hpp_source.html":[30,0,1,0,2,23], +"w_op_output_to_cv_mat_8hpp.html":[30,0,1,0,2,24], +"w_op_output_to_cv_mat_8hpp.html#a1d9f50688522ed7368acc33a09ae9ece":[30,0,1,0,2,24,1], +"w_op_output_to_cv_mat_8hpp_source.html":[30,0,1,0,2,24], +"w_people_json_saver_8hpp.html":[30,0,1,0,4,19], +"w_people_json_saver_8hpp.html#a774871462f7fefb8cadea1e49f501e45":[30,0,1,0,4,19,1], +"w_people_json_saver_8hpp_source.html":[30,0,1,0,4,19], +"w_person_id_extractor_8hpp.html":[30,0,1,0,12,3], +"w_person_id_extractor_8hpp.html#a674a652ad38b355285417529fc050847":[30,0,1,0,12,3,1], +"w_person_id_extractor_8hpp_source.html":[30,0,1,0,12,3], +"w_pose_extractor_8hpp.html":[30,0,1,0,9,11], +"w_pose_extractor_8hpp.html#a020603e3ad6326cb1dce43485157f768":[30,0,1,0,9,11,1], +"w_pose_extractor_8hpp_source.html":[30,0,1,0,9,11], +"w_pose_extractor_net_8hpp.html":[30,0,1,0,9,12], +"w_pose_extractor_net_8hpp.html#ab1e242b1ae7ff3300324fbfedebb52fc":[30,0,1,0,9,12,1], +"w_pose_extractor_net_8hpp_source.html":[30,0,1,0,9,12], +"w_pose_renderer_8hpp.html":[30,0,1,0,9,13], +"w_pose_renderer_8hpp.html#ae76afeeeaedaebe6941f41a4bdf50e2a":[30,0,1,0,9,13,1], +"w_pose_renderer_8hpp_source.html":[30,0,1,0,9,13], +"w_pose_saver_8hpp.html":[30,0,1,0,4,20], +"w_pose_saver_8hpp.html#a31ad937a2e52ea08ce925031d26616b9":[30,0,1,0,4,20,1], +"w_pose_saver_8hpp_source.html":[30,0,1,0,4,20], +"w_pose_triangulation_8hpp.html":[30,0,1,0,0,5], +"w_pose_triangulation_8hpp.html#a53f346232d0743f3dd0f547de1fc508f":[30,0,1,0,0,5,1], +"w_pose_triangulation_8hpp_source.html":[30,0,1,0,0,5], +"w_queue_assembler_8hpp.html":[30,0,1,0,11,17], +"w_queue_assembler_8hpp_source.html":[30,0,1,0,11,17], +"w_queue_orderer_8hpp.html":[30,0,1,0,11,18], +"w_queue_orderer_8hpp.html#add981a5f6a49d35cc316a54c613497f3":[30,0,1,0,11,18,1], +"w_queue_orderer_8hpp_source.html":[30,0,1,0,11,18], +"w_scale_and_size_extractor_8hpp.html":[30,0,1,0,2,25], +"w_scale_and_size_extractor_8hpp.html#aaca98fe6101cda512a43c513182ae5cc":[30,0,1,0,2,25,1], +"w_scale_and_size_extractor_8hpp_source.html":[30,0,1,0,2,25], +"w_udp_sender_8hpp.html":[30,0,1,0,4,21], +"w_udp_sender_8hpp.html#af9e0d9e4028c0589b5eeeaed42a5088c":[30,0,1,0,4,21,1], +"w_udp_sender_8hpp_source.html":[30,0,1,0,4,21], +"w_verbose_printer_8hpp.html":[30,0,1,0,2,26], +"w_verbose_printer_8hpp.html#a89984557f6968584d1938afe7b9f32bd":[30,0,1,0,2,26,1], +"w_verbose_printer_8hpp_source.html":[30,0,1,0,2,26], +"w_video_saver3_d_8hpp.html":[30,0,1,0,4,23], +"w_video_saver3_d_8hpp.html#a0db530b6f607aa43e8f9154b308d207a":[30,0,1,0,4,23,1], +"w_video_saver3_d_8hpp_source.html":[30,0,1,0,4,23], +"w_video_saver_8hpp.html":[30,0,1,0,4,22], +"w_video_saver_8hpp.html#a49bd4106b0cd1cb81980329b06c0d2c8":[30,0,1,0,4,22,1], +"w_video_saver_8hpp_source.html":[30,0,1,0,4,22], +"webcam_reader_8hpp.html":[30,0,1,0,10,11], +"webcam_reader_8hpp_source.html":[30,0,1,0,10,11], +"worker_8hpp.html":[30,0,1,0,11,14], +"worker_8hpp.html#a5642545fda1c3bbaf60810cf0e2d2c1d":[30,0,1,0,11,14,1], +"worker_8hpp_source.html":[30,0,1,0,11,14], +"worker_consumer_8hpp.html":[30,0,1,0,11,15], +"worker_consumer_8hpp.html#a01aa5c6e24026536367cf47a64e9bba5":[30,0,1,0,11,15,1], +"worker_consumer_8hpp_source.html":[30,0,1,0,11,15], +"worker_producer_8hpp.html":[30,0,1,0,11,16], +"worker_producer_8hpp.html#a5660f0e72781ce6d7db9eb78b582e5c6":[30,0,1,0,11,16,1], +"worker_producer_8hpp_source.html":[30,0,1,0,11,16], +"wrapper_2enum_classes_8hpp.html":[30,0,1,0,15,0], +"wrapper_2enum_classes_8hpp.html#a1070db47220e17cf37df40411350f6fb":[30,0,1,0,15,0,0], +"wrapper_2enum_classes_8hpp.html#a1070db47220e17cf37df40411350f6fba5bd4c87976f48e6a53919d53e14025e9":[30,0,1,0,15,0,0,1], +"wrapper_2enum_classes_8hpp.html#a1070db47220e17cf37df40411350f6fba65c691a85367d21881220b7a3d923747":[30,0,1,0,15,0,0,3], +"wrapper_2enum_classes_8hpp.html#a1070db47220e17cf37df40411350f6fba6f6cb72d544962fa333e2e34ce64f719":[30,0,1,0,15,0,0,4], +"wrapper_2enum_classes_8hpp.html#a1070db47220e17cf37df40411350f6fba900b06e1ae224594f075e0c882c73532":[30,0,1,0,15,0,0,2], +"wrapper_2enum_classes_8hpp.html#a1070db47220e17cf37df40411350f6fbaac101b32dda4448cf13a93fe283dddd8":[30,0,1,0,15,0,0,0], +"wrapper_2enum_classes_8hpp.html#a53e7c7ac399de4698e1e609ec0474a09":[30,0,1,0,15,0,1], +"wrapper_2enum_classes_8hpp.html#a53e7c7ac399de4698e1e609ec0474a09a00d23a76e43b46dae9ec7aa9dcbebb32":[30,0,1,0,15,0,1,1], +"wrapper_2enum_classes_8hpp.html#a53e7c7ac399de4698e1e609ec0474a09a6f6cb72d544962fa333e2e34ce64f719":[30,0,1,0,15,0,1,3], +"wrapper_2enum_classes_8hpp.html#a53e7c7ac399de4698e1e609ec0474a09aa6e20e86de146a7b524d32c9b1fea7f4":[30,0,1,0,15,0,1,2], +"wrapper_2enum_classes_8hpp.html#a53e7c7ac399de4698e1e609ec0474a09ab9f5c797ebbf55adccdd8539a65a0241":[30,0,1,0,15,0,1,0], +"wrapper_2enum_classes_8hpp.html#a970a2a768a2ace81605b1558c9fdec18":[30,0,1,0,15,0,2], +"wrapper_2enum_classes_8hpp.html#a970a2a768a2ace81605b1558c9fdec18a05318bd0215d16e009798570b53755d2":[30,0,1,0,15,0,2,1], +"wrapper_2enum_classes_8hpp.html#a970a2a768a2ace81605b1558c9fdec18a29c2c02a361c9d7028472e5d92cd4a54":[30,0,1,0,15,0,2,3], +"wrapper_2enum_classes_8hpp.html#a970a2a768a2ace81605b1558c9fdec18a324118a6721dd6b8a9b9f4e327df2bf5":[30,0,1,0,15,0,2,0], +"wrapper_2enum_classes_8hpp.html#a970a2a768a2ace81605b1558c9fdec18a6f6cb72d544962fa333e2e34ce64f719":[30,0,1,0,15,0,2,4], +"wrapper_2enum_classes_8hpp.html#a970a2a768a2ace81605b1558c9fdec18aa52d6088cbae537944827c8f8c69c570":[30,0,1,0,15,0,2,2], +"wrapper_2enum_classes_8hpp_source.html":[30,0,1,0,15,0], +"wrapper_2headers_8hpp.html":[30,0,1,0,15,1], +"wrapper_2headers_8hpp_source.html":[30,0,1,0,15,1], +"wrapper_8hpp.html":[30,0,1,0,15,2], +"wrapper_8hpp.html#a790dea3c007bed742fbc8cdd5757d026":[30,0,1,0,15,2,1], +"wrapper_8hpp_source.html":[30,0,1,0,15,2], +"wrapper_auxiliary_8hpp.html":[30,0,1,0,15,3], +"wrapper_auxiliary_8hpp.html#a3da2a2a2f5ac58cfba53ea0d43ac6751":[30,0,1,0,15,3,1], +"wrapper_auxiliary_8hpp.html#a4adaee31db7ae1d3f963daa9e022e62f":[30,0,1,0,15,3,0], +"wrapper_auxiliary_8hpp.html#acc4a5460e02ae510e854724513eea822":[30,0,1,0,15,3,3], +"wrapper_auxiliary_8hpp.html#af65a4564afcad06b72468679f6bee52b":[30,0,1,0,15,3,2], +"wrapper_auxiliary_8hpp_source.html":[30,0,1,0,15,3], +"wrapper_struct_extra_8hpp.html":[30,0,1,0,15,4], +"wrapper_struct_extra_8hpp_source.html":[30,0,1,0,15,4], +"wrapper_struct_face_8hpp.html":[30,0,1,0,15,5], +"wrapper_struct_face_8hpp_source.html":[30,0,1,0,15,5], +"wrapper_struct_gui_8hpp.html":[30,0,1,0,15,6], +"wrapper_struct_gui_8hpp_source.html":[30,0,1,0,15,6], +"wrapper_struct_hand_8hpp.html":[30,0,1,0,15,7], +"wrapper_struct_hand_8hpp_source.html":[30,0,1,0,15,7], +"wrapper_struct_input_8hpp.html":[30,0,1,0,15,8], +"wrapper_struct_input_8hpp_source.html":[30,0,1,0,15,8], +"wrapper_struct_output_8hpp.html":[30,0,1,0,15,9], +"wrapper_struct_output_8hpp_source.html":[30,0,1,0,15,9], +"wrapper_struct_pose_8hpp.html":[30,0,1,0,15,10], +"wrapper_struct_pose_8hpp_source.html":[30,0,1,0,15,10] +}; diff --git a/web/html/doc/navtreeindex2.js b/web/html/doc/navtreeindex2.js new file mode 100644 index 000000000..fd3363f01 --- /dev/null +++ b/web/html/doc/navtreeindex2.js @@ -0,0 +1,253 @@ +var NAVTREEINDEX2 = +{ +"classop_1_1_pose_extractor_caffe.html#ae5d41065ea3eaf37d2c9663aa35554d6":[29,0,0,85,9], +"classop_1_1_pose_extractor_net.html":[29,0,0,86], +"classop_1_1_pose_extractor_net.html#a28923c846dc7c731d3571c72a50acd2f":[29,0,0,86,18], +"classop_1_1_pose_extractor_net.html#a3e73f27594e61bf451b8e9fff7695f62":[29,0,0,86,7], +"classop_1_1_pose_extractor_net.html#a3e88bd2122835db768c123d1026ce30f":[29,0,0,86,14], +"classop_1_1_pose_extractor_net.html#a3fe7256d9860f4c624f5cf928556bc28":[29,0,0,86,3], +"classop_1_1_pose_extractor_net.html#a43317a6868ffa7391586f2b8b599ecdf":[29,0,0,86,15], +"classop_1_1_pose_extractor_net.html#a4959a9c9d433d9297e5daef0e3a0eabc":[29,0,0,86,17], +"classop_1_1_pose_extractor_net.html#a49e1dcb9f9d049131df866b7538507cd":[29,0,0,86,12], +"classop_1_1_pose_extractor_net.html#a528c3056546b0759fafb249a02edd1b6":[29,0,0,86,24], +"classop_1_1_pose_extractor_net.html#a546f0d6e0c62c7c7e2d44de848f9a174":[29,0,0,86,13], +"classop_1_1_pose_extractor_net.html#a5503fceecf280b6b1ed6e3251de46e26":[29,0,0,86,0], +"classop_1_1_pose_extractor_net.html#a56d7dd1157e70786850169897bcf6883":[29,0,0,86,6], +"classop_1_1_pose_extractor_net.html#a67ea32116dfaff15cc16e5a0a2bef822":[29,0,0,86,25], +"classop_1_1_pose_extractor_net.html#a7e49f2339e21ff784689ec78c9d69b75":[29,0,0,86,20], +"classop_1_1_pose_extractor_net.html#a80cb59fa161a7ecd3d6a016354ab9002":[29,0,0,86,9], +"classop_1_1_pose_extractor_net.html#a840c6fbdbf59d088d966ad26d45572a4":[29,0,0,86,2], +"classop_1_1_pose_extractor_net.html#a8595789b244399ecd9c4b2a774f2c74b":[29,0,0,86,23], +"classop_1_1_pose_extractor_net.html#a95c48a9fc5368af73a54aa66e44b4bc2":[29,0,0,86,4], +"classop_1_1_pose_extractor_net.html#a963c679df20b16d475aa3a7c0661135c":[29,0,0,86,1], +"classop_1_1_pose_extractor_net.html#aa8bf8cdfdede22410e2dfcea5d3f0cdc":[29,0,0,86,19], +"classop_1_1_pose_extractor_net.html#aa9138224f4977da54517398ba044b7c3":[29,0,0,86,5], +"classop_1_1_pose_extractor_net.html#aaaa4c619868bbf6306a549280002a2c6":[29,0,0,86,22], +"classop_1_1_pose_extractor_net.html#aab49f9af9f5d7e4e64957dc0feb60ca7":[29,0,0,86,21], +"classop_1_1_pose_extractor_net.html#abee987adbe411ca71b6b37ab9cd89a41":[29,0,0,86,8], +"classop_1_1_pose_extractor_net.html#ac67c1d8fcba15ccfb284f10776e9fd89":[29,0,0,86,16], +"classop_1_1_pose_extractor_net.html#ad1b526d42f690a8857c0ccdc88ff88ac":[29,0,0,86,10], +"classop_1_1_pose_extractor_net.html#ad6e1c91c60cf0041c196fd4347bbcdf5":[29,0,0,86,11], +"classop_1_1_pose_gpu_renderer.html":[29,0,0,87], +"classop_1_1_pose_gpu_renderer.html#a1582e63e33192d79f80b5879ba04d448":[29,0,0,87,0], +"classop_1_1_pose_gpu_renderer.html#a4705b3c47cd9ac8174e357999960a28f":[29,0,0,87,3], +"classop_1_1_pose_gpu_renderer.html#a9e94ab926baf360dd6b23e14fba09836":[29,0,0,87,2], +"classop_1_1_pose_gpu_renderer.html#afe3959a08624dd71cc5797eb3938e748":[29,0,0,87,1], +"classop_1_1_pose_renderer.html":[29,0,0,88], +"classop_1_1_pose_renderer.html#a1dfd34d42fa69913a9702e0a0ebcd04e":[29,0,0,88,0], +"classop_1_1_pose_renderer.html#a8ff2470d813201e992cd5e07bab23386":[29,0,0,88,1], +"classop_1_1_pose_renderer.html#a9fea1f9ce47b4b5f1015cae13f4ddcb1":[29,0,0,88,5], +"classop_1_1_pose_renderer.html#ad1e96ceb47bf205b56b50c6b2792f9e3":[29,0,0,88,3], +"classop_1_1_pose_renderer.html#aecc0a9296ca880ad6ceaf38ecd8c3c53":[29,0,0,88,4], +"classop_1_1_pose_renderer.html#af861d8213f1444b3246402061cea1b33":[29,0,0,88,2], +"classop_1_1_pose_triangulation.html":[29,0,0,1], +"classop_1_1_pose_triangulation.html#a3f4764c7063d9849b75a354a6a92f062":[29,0,0,1,1], +"classop_1_1_pose_triangulation.html#a519abdb2477c518a565803a5ef5bdc1e":[29,0,0,1,3], +"classop_1_1_pose_triangulation.html#a7858f0c4adf7845c2be072e0985af3ee":[29,0,0,1,0], +"classop_1_1_pose_triangulation.html#a90436697faa45a3676087426763014f4":[29,0,0,1,2], +"classop_1_1_pose_triangulation.html#adc3cf7eb81cb9e7d7f72fda0602ed89b":[29,0,0,1,4], +"classop_1_1_priority_queue.html":[29,0,0,102], +"classop_1_1_priority_queue.html#a469b458b035822f01b212c089d4245bc":[29,0,0,102,1], +"classop_1_1_priority_queue.html#a8e468dfaed310e54987cbb8cb1cef909":[29,0,0,102,2], +"classop_1_1_priority_queue.html#acecdd3c5789942777652b66d08578d93":[29,0,0,102,0], +"classop_1_1_producer.html":[29,0,0,96], +"classop_1_1_producer.html#a024e55b4ec769cdbc40ee21613a6ef6f":[29,0,0,96,20], +"classop_1_1_producer.html#a07f416a256a3f7e906748701ad569030":[29,0,0,96,8], +"classop_1_1_producer.html#a0d711ebc149dd71159ebc2902ccd8113":[29,0,0,96,7], +"classop_1_1_producer.html#a2853a47b12ab1f32138b6d944c322ebd":[29,0,0,96,5], +"classop_1_1_producer.html#a366881a952ad34071cc719477f08b968":[29,0,0,96,3], +"classop_1_1_producer.html#a58590e4a409d31f839184b4bf030a68b":[29,0,0,96,15], +"classop_1_1_producer.html#a6c5be8c556b0a744e11a11de3f185049":[29,0,0,96,6], +"classop_1_1_producer.html#a7753ffb0daa486ab0f82873b3567f95e":[29,0,0,96,17], +"classop_1_1_producer.html#a8b48342b2c4003a080b17ac411f3454f":[29,0,0,96,1], +"classop_1_1_producer.html#a94d561f95384dfa0cd91113882869d06":[29,0,0,96,4], +"classop_1_1_producer.html#a9a9424027e5bc8e0fba7c65eccc460e0":[29,0,0,96,13], +"classop_1_1_producer.html#aad1f861eaea12a3590e1beb286d023b7":[29,0,0,96,9], +"classop_1_1_producer.html#aaec98c35fe9f2695cd31be3e2d437a61":[29,0,0,96,0], +"classop_1_1_producer.html#ab23d9eeac2c1820be9191ab9f7bb1777":[29,0,0,96,11], +"classop_1_1_producer.html#ab30c7b3e34d962e0b17458d9a0947f6b":[29,0,0,96,18], +"classop_1_1_producer.html#ab35d570dc35573433ec86e3fce25e545":[29,0,0,96,10], +"classop_1_1_producer.html#abbfbe53757f75e5e77266b04e9d0fea1":[29,0,0,96,2], +"classop_1_1_producer.html#ac72a751759ae8b5a0a99552580f7fbad":[29,0,0,96,14], +"classop_1_1_producer.html#ad6d701ad0867491736374d8ea753c00e":[29,0,0,96,12], +"classop_1_1_producer.html#af11f1bbfbd61b9534c02c3e4839e19b0":[29,0,0,96,19], +"classop_1_1_producer.html#afad3eadd16cca0de2c2be8b083c0d56d":[29,0,0,96,16], +"classop_1_1_profiler.html":[29,0,0,124], +"classop_1_1_queue.html":[29,0,0,103], +"classop_1_1_queue.html#a056600a7cf4503235ba4e172cee63a7f":[29,0,0,103,1], +"classop_1_1_queue.html#a22f6d214fe4dfc743b3abf00e049c504":[29,0,0,103,2], +"classop_1_1_queue.html#ae2b845322940bfc89b6342137d8ac372":[29,0,0,103,0], +"classop_1_1_queue_base.html":[29,0,0,104], +"classop_1_1_queue_base.html#a04f7160c199f90b8f8e91ddfd40e92fb":[29,0,0,104,28], +"classop_1_1_queue_base.html#a17a52df2e912a346c412418c62268425":[29,0,0,104,10], +"classop_1_1_queue_base.html#a1ccdec39ea65a83edc54661acc283134":[29,0,0,104,3], +"classop_1_1_queue_base.html#a1d55f40e032cd5d43d63ba02040b3117":[29,0,0,104,25], +"classop_1_1_queue_base.html#a22c5e2964e9d9c18a9f02b8d2e0f30b4":[29,0,0,104,26], +"classop_1_1_queue_base.html#a247f435c95709f3246d352eee4f757af":[29,0,0,104,4], +"classop_1_1_queue_base.html#a2c7b3d0fa6502c644c3083dd68332542":[29,0,0,104,22], +"classop_1_1_queue_base.html#a32ac0e4b14a310aee62ce817e86c0356":[29,0,0,104,15], +"classop_1_1_queue_base.html#a35f0547f6020f22e49835b147b7ec52e":[29,0,0,104,19], +"classop_1_1_queue_base.html#a49c1d6740f2ce7f26eae606f109b5738":[29,0,0,104,31], +"classop_1_1_queue_base.html#a5b28915cc58e040aca673bdfdf7c8be3":[29,0,0,104,12], +"classop_1_1_queue_base.html#a5e52b4ab7e310373e3d1f1d42cfe4549":[29,0,0,104,17], +"classop_1_1_queue_base.html#a68b51dafaba93179fcef78731aaf1703":[29,0,0,104,14], +"classop_1_1_queue_base.html#a74d9b247804a226cf9a0758b25bd3ba9":[29,0,0,104,5], +"classop_1_1_queue_base.html#a77bf3592bbb6ac586cd4c2b0aea98e62":[29,0,0,104,27], +"classop_1_1_queue_base.html#a7905841f953be7099847cc7b5b17ae0c":[29,0,0,104,16], +"classop_1_1_queue_base.html#a7b3f810bb6e729be3afe3313c4b2f31b":[29,0,0,104,9], +"classop_1_1_queue_base.html#a7c382bb98f5b769cde37b06d67cb0530":[29,0,0,104,29], +"classop_1_1_queue_base.html#a80c6e2dda17afa82aae83aeadad1f7e0":[29,0,0,104,18], +"classop_1_1_queue_base.html#a84da9e045acec02e3900153eea2bd92d":[29,0,0,104,21], +"classop_1_1_queue_base.html#a8a5d53c7b66fd0ef34b3e276f586e355":[29,0,0,104,20], +"classop_1_1_queue_base.html#a8b5e59161a0b175d12955b552a90a47f":[29,0,0,104,24], +"classop_1_1_queue_base.html#a8d218f599b84194909691c72ee0de8d0":[29,0,0,104,6], +"classop_1_1_queue_base.html#a8fd69ac0ffcda02d0d26102e2ebd2841":[29,0,0,104,13], +"classop_1_1_queue_base.html#a9f529f94ff3b98e3ac11d796caa31239":[29,0,0,104,11], +"classop_1_1_queue_base.html#aad7a6a666dcf70834d9d18ae6d92cb2c":[29,0,0,104,8], +"classop_1_1_queue_base.html#ab28c5805dd23117c8d6d82d59617bb95":[29,0,0,104,23], +"classop_1_1_queue_base.html#ad124d414b7c2680e5312ee163d18410f":[29,0,0,104,7], +"classop_1_1_queue_base.html#adc5df8a039d360831db06e3c610bf015":[29,0,0,104,2], +"classop_1_1_queue_base.html#aea7941746e2403a09356b9c6a208784c":[29,0,0,104,0], +"classop_1_1_queue_base.html#aef098201d9084083adba5ceeb45b12fa":[29,0,0,104,1], +"classop_1_1_queue_base.html#af2c0f21c6b4f4639661b59aa247ae407":[29,0,0,104,30], +"classop_1_1_renderer.html":[29,0,0,15], +"classop_1_1_renderer.html#a00caf604fad781dfcf3bc311ef6a6623":[29,0,0,15,0], +"classop_1_1_renderer.html#a039e88897ed844551cadb115ea98e9ef":[29,0,0,15,8], +"classop_1_1_renderer.html#a298a5a58bab80b7252db7d3386a0ed8a":[29,0,0,15,6], +"classop_1_1_renderer.html#a3cf2d07dc9df42db4648398367c72dbb":[29,0,0,15,3], +"classop_1_1_renderer.html#a3e04644546dd9990a16d0b6861b60553":[29,0,0,15,17], +"classop_1_1_renderer.html#a44e13a965a9b0fca119ea897ad1348e0":[29,0,0,15,5], +"classop_1_1_renderer.html#a88449a7c29a48e157cd6b16089825be7":[29,0,0,15,13], +"classop_1_1_renderer.html#a9d46c28d88225af94468c757ab1b26c1":[29,0,0,15,10], +"classop_1_1_renderer.html#aa8339054ed113d99ca70208d0cee5aa9":[29,0,0,15,9], +"classop_1_1_renderer.html#ab226d47f554735fa3e0372ce429747c3":[29,0,0,15,12], +"classop_1_1_renderer.html#ab776e07b5b2f3a3b0aca0ce95d67796b":[29,0,0,15,2], +"classop_1_1_renderer.html#abaea1725725ff775aed0c120b2ba3d1f":[29,0,0,15,7], +"classop_1_1_renderer.html#abd45555a9864e799309b72902b6cec30":[29,0,0,15,1], +"classop_1_1_renderer.html#aca8ebf0c0a50b87f0be82afa090155a0":[29,0,0,15,16], +"classop_1_1_renderer.html#ace2490fa3c5a87443e4d1e64007cd1ff":[29,0,0,15,15], +"classop_1_1_renderer.html#ad2ac64e018f2b925d0c8d45883928b68":[29,0,0,15,4], +"classop_1_1_renderer.html#adc4cd0a62008325c5c7df6df2f95a167":[29,0,0,15,14], +"classop_1_1_renderer.html#afd48a9cb0be184303dce2969fa2f8e02":[29,0,0,15,11], +"classop_1_1_resize_and_merge_caffe.html":[29,0,0,82], +"classop_1_1_resize_and_merge_caffe.html#a13d984c2ec4b5440a694b9a2dfa64521":[29,0,0,82,6], +"classop_1_1_resize_and_merge_caffe.html#a2f3f7903827e3abc3dab35ebdad002a6":[29,0,0,82,4], +"classop_1_1_resize_and_merge_caffe.html#a30805a265fa887eff04b1200dbda91a5":[29,0,0,82,0], +"classop_1_1_resize_and_merge_caffe.html#a4836b2f08273896f58c2d63a15c871e8":[29,0,0,82,2], +"classop_1_1_resize_and_merge_caffe.html#a5dc1aa7c462bd8df8b6a8377418e19d4":[29,0,0,82,1], +"classop_1_1_resize_and_merge_caffe.html#a65e81f3ac60a58a29f302d818d5b0c8f":[29,0,0,82,5], +"classop_1_1_resize_and_merge_caffe.html#a90e4af20eee1bfaf152937199f3ad068":[29,0,0,82,11], +"classop_1_1_resize_and_merge_caffe.html#aa16862bbc207fef227d53d37223512b8":[29,0,0,82,10], +"classop_1_1_resize_and_merge_caffe.html#aba74db20a0aca30b797f590548de4272":[29,0,0,82,7], +"classop_1_1_resize_and_merge_caffe.html#abd4c8a363c569fbb4187cd928c481334":[29,0,0,82,9], +"classop_1_1_resize_and_merge_caffe.html#acfa7742f943fd741acf0bf383c572655":[29,0,0,82,3], +"classop_1_1_resize_and_merge_caffe.html#ad7441a1f8db85f6239830603fb7a6325":[29,0,0,82,8], +"classop_1_1_scale_and_size_extractor.html":[29,0,0,16], +"classop_1_1_scale_and_size_extractor.html#a4618beea6f87df0c4eac6c6a204bd269":[29,0,0,16,0], +"classop_1_1_scale_and_size_extractor.html#a90bc64fe3c8ee45cfe5f3bd73a8bb3c9":[29,0,0,16,1], +"classop_1_1_scale_and_size_extractor.html#aa05b7698ff8417072787009c85a14421":[29,0,0,16,2], +"classop_1_1_spinnaker_wrapper.html":[29,0,0,97], +"classop_1_1_spinnaker_wrapper.html#a2135a9d3c9dbab4c1e0ee6be6c31b93a":[29,0,0,97,5], +"classop_1_1_spinnaker_wrapper.html#a427bf92ca3fc9011b01c57833b078154":[29,0,0,97,2], +"classop_1_1_spinnaker_wrapper.html#a51e869f56a6517bd55783ea039066d7d":[29,0,0,97,7], +"classop_1_1_spinnaker_wrapper.html#a5d1ba90b4d1987423b330de2fdcdb702":[29,0,0,97,0], +"classop_1_1_spinnaker_wrapper.html#a6e66639ee75708486b3d9aa4598607c0":[29,0,0,97,8], +"classop_1_1_spinnaker_wrapper.html#a76849430ae48ba14cbdd0b68bca133fb":[29,0,0,97,4], +"classop_1_1_spinnaker_wrapper.html#a8ae3e45fba6f9d0943cbd9038e98b066":[29,0,0,97,1], +"classop_1_1_spinnaker_wrapper.html#aad97f57040a953cbce0f20c6b3303202":[29,0,0,97,6], +"classop_1_1_spinnaker_wrapper.html#aaf441c78eeb921886a09412d8af9ddbc":[29,0,0,97,3], +"classop_1_1_string.html":[29,0,0,17], +"classop_1_1_string.html#a5848aace0a849fafffb3a2ae78d05156":[29,0,0,17,2], +"classop_1_1_string.html#a5f1c9f53adedf082ee0cad43fa6140be":[29,0,0,17,0], +"classop_1_1_string.html#a82003e99b9f3e9bd0054873deac970da":[29,0,0,17,4], +"classop_1_1_string.html#ad8384eb6141b3fc53e5dc246be77cf6c":[29,0,0,17,1], +"classop_1_1_string.html#aeae63b12cb517a5cdaf55b836a92a49c":[29,0,0,17,3], +"classop_1_1_sub_thread.html":[29,0,0,105], +"classop_1_1_sub_thread.html#a14330cbc1117f32b6d69c1733ccdeb61":[29,0,0,105,4], +"classop_1_1_sub_thread.html#a3e8e044b20842d15b1caedf8a78be622":[29,0,0,105,2], +"classop_1_1_sub_thread.html#a6ee67e375611e8df2d09b3234dedf36c":[29,0,0,105,1], +"classop_1_1_sub_thread.html#a8debc3b655463847fed2c547d13326f7":[29,0,0,105,3], +"classop_1_1_sub_thread.html#aa551df0d8f0b30aaf9e0840ecf29d749":[29,0,0,105,0], +"classop_1_1_sub_thread.html#ad9f2d3be9e05739b102fad350e1a1364":[29,0,0,105,5], +"classop_1_1_sub_thread_no_queue.html":[29,0,0,106], +"classop_1_1_sub_thread_no_queue.html#a43504502c36461305d656fb87b914749":[29,0,0,106,2], +"classop_1_1_sub_thread_no_queue.html#acb7edd02e1724e0fd131235666009f42":[29,0,0,106,3], +"classop_1_1_sub_thread_no_queue.html#ad88bbbe72f4777603d71e322b0fd20ed":[29,0,0,106,1], +"classop_1_1_sub_thread_no_queue.html#afbaf89d7a8fb4d19f67064fb954a31eb":[29,0,0,106,0], +"classop_1_1_sub_thread_queue_in.html":[29,0,0,107], +"classop_1_1_sub_thread_queue_in.html#a11aa71a818430c4eb435a1626e54f29a":[29,0,0,107,0], +"classop_1_1_sub_thread_queue_in.html#a7e9bd6ca09bb77a8de76ae8a02ee8ed4":[29,0,0,107,2], +"classop_1_1_sub_thread_queue_in.html#a8a479c4ddc5b42f1dbf329c4a0c235c3":[29,0,0,107,1], +"classop_1_1_sub_thread_queue_in_out.html":[29,0,0,108], +"classop_1_1_sub_thread_queue_in_out.html#a87d122e11adc7363d9b24c7f796d3d33":[29,0,0,108,1], +"classop_1_1_sub_thread_queue_in_out.html#aa5b9beea615b8b968c5da74dd66a6d78":[29,0,0,108,0], +"classop_1_1_sub_thread_queue_in_out.html#abb65911e9d9b6d5efe782ca0e599be3b":[29,0,0,108,2], +"classop_1_1_sub_thread_queue_out.html":[29,0,0,109], +"classop_1_1_sub_thread_queue_out.html#a0ff5f79e63038ffa5b4aca24cfea7e7c":[29,0,0,109,2], +"classop_1_1_sub_thread_queue_out.html#aa4a827932f632f1f30b5650a4fcc77ff":[29,0,0,109,0], +"classop_1_1_sub_thread_queue_out.html#ab61e068d6dddd2914b25638ebeff0f3b":[29,0,0,109,1], +"classop_1_1_thread.html":[29,0,0,110], +"classop_1_1_thread.html#a0617df4103c25bb04ee2c75f05ea2978":[29,0,0,110,4], +"classop_1_1_thread.html#a151e4e647917f2351cc05a8861588e2a":[29,0,0,110,2], +"classop_1_1_thread.html#a16d1835e2bd7c5ae988f4bc225b3ca09":[29,0,0,110,7], +"classop_1_1_thread.html#a3ed032f4c42ef1797873122aa96a055d":[29,0,0,110,6], +"classop_1_1_thread.html#a6ae463dc996ca6941a303b0c41288063":[29,0,0,110,1], +"classop_1_1_thread.html#a820b9416b96c69cb1fc6773b9a53a47a":[29,0,0,110,3], +"classop_1_1_thread.html#a92e5dd0f60a0485e7d0fad3e82bb74f3":[29,0,0,110,9], +"classop_1_1_thread.html#a9d3408a329a475da22a8e2a0bdf5f68d":[29,0,0,110,0], +"classop_1_1_thread.html#ac898abffd6ed18456b97ef1b72935ec6":[29,0,0,110,8], +"classop_1_1_thread.html#ad6c3721793d0f65ffe755ab74534afed":[29,0,0,110,5], +"classop_1_1_thread_manager.html":[29,0,0,111], +"classop_1_1_thread_manager.html#a01c2d62e539896e36564457ab9cac25c":[29,0,0,111,9], +"classop_1_1_thread_manager.html#a03c6587dbc60b266bee04b9714647fba":[29,0,0,111,1], +"classop_1_1_thread_manager.html#a36bd8060a4f7f449a8aa35d9a166270d":[29,0,0,111,14], +"classop_1_1_thread_manager.html#a393a9f04c70a002f5ceb5e301eea5cff":[29,0,0,111,15], +"classop_1_1_thread_manager.html#a472a1ebee700c3449bac4d6d2bb0c3a8":[29,0,0,111,10], +"classop_1_1_thread_manager.html#a48ea53b3de4d09c84db18e2c31ce1be1":[29,0,0,111,5], +"classop_1_1_thread_manager.html#a59916fc3428aaf5c487e1dd373d437cd":[29,0,0,111,12], +"classop_1_1_thread_manager.html#a5b7c5ea46c360496e261094c5e1397a7":[29,0,0,111,7], +"classop_1_1_thread_manager.html#a67a2d7cecc749be414e6896a88ec268d":[29,0,0,111,4], +"classop_1_1_thread_manager.html#a762acc9eb60bd10857da1f416e169f3d":[29,0,0,111,3], +"classop_1_1_thread_manager.html#a7a24fd902ebd4b5fd81166547a5654d9":[29,0,0,111,13], +"classop_1_1_thread_manager.html#a7bad63adddf7a35a436911ada2a1c519":[29,0,0,111,6], +"classop_1_1_thread_manager.html#a8134abeaec65b5647ae92e34f3ad420b":[29,0,0,111,2], +"classop_1_1_thread_manager.html#a8b7d17f4a330495389e646bb21907303":[29,0,0,111,0], +"classop_1_1_thread_manager.html#a8d5ffd9473557ff0f90ac1c6a1bae3ad":[29,0,0,111,11], +"classop_1_1_thread_manager.html#abfa315257b3e8cd022573f439b4936ec":[29,0,0,111,16], +"classop_1_1_thread_manager.html#ace408d1d281193a9f3d3d6561181ef56":[29,0,0,111,8], +"classop_1_1_udp_sender.html":[29,0,0,44], +"classop_1_1_udp_sender.html#a2e8b52e1fd5a3383ebc9063ce21f6f06":[29,0,0,44,2], +"classop_1_1_udp_sender.html#a80fb12e5d4357e5dbb37c8a7b660c67c":[29,0,0,44,0], +"classop_1_1_udp_sender.html#ac85192d475d5e84b9dcc839d5e240585":[29,0,0,44,1], +"classop_1_1_verbose_printer.html":[29,0,0,18], +"classop_1_1_verbose_printer.html#a5c4ef10db4aba13be43b92ab4e6c4d3e":[29,0,0,18,1], +"classop_1_1_verbose_printer.html#a79d2dc59b75a0164f60d875ef78523da":[29,0,0,18,0], +"classop_1_1_verbose_printer.html#ab85c8d6555a52eb77042646dfe798fbf":[29,0,0,18,2], +"classop_1_1_video_capture_reader.html":[29,0,0,98], +"classop_1_1_video_capture_reader.html#a06348fd9a290fc2ece2f3c2e4dc9bc70":[29,0,0,98,4], +"classop_1_1_video_capture_reader.html#a2f73e10efe7f9b24a6cb75af2167de58":[29,0,0,98,8], +"classop_1_1_video_capture_reader.html#a33aabaf7c82773f117f6842ff900fa18":[29,0,0,98,5], +"classop_1_1_video_capture_reader.html#a3fe940326900ac6a2289de85664b14be":[29,0,0,98,1], +"classop_1_1_video_capture_reader.html#a64e5cbfb1c556d64cabcebc6eb94eaf1":[29,0,0,98,3], +"classop_1_1_video_capture_reader.html#a7ea52eabf5133a1a01d38f95b1a4b601":[29,0,0,98,2], +"classop_1_1_video_capture_reader.html#a9f58d9280a26d94ff4ba6cd93f4928a0":[29,0,0,98,6], +"classop_1_1_video_capture_reader.html#ab0c6519396faae82ec1b49262ed454a2":[29,0,0,98,7], +"classop_1_1_video_capture_reader.html#ab2929b7d2d002b58ebaf7b9b56999cca":[29,0,0,98,10], +"classop_1_1_video_capture_reader.html#ab85b68c93854dd7c2ad437477e819506":[29,0,0,98,9], +"classop_1_1_video_capture_reader.html#ae07295c083ce99b032ce219ea15405d9":[29,0,0,98,0], +"classop_1_1_video_reader.html":[29,0,0,99], +"classop_1_1_video_reader.html#a057a7d0c498c48639b38c10ac7efc183":[29,0,0,99,2], +"classop_1_1_video_reader.html#a0dd53334327642368d41ec860e64e756":[29,0,0,99,5], +"classop_1_1_video_reader.html#a219e3901e489a293e85fe9a872e7fb78":[29,0,0,99,0], +"classop_1_1_video_reader.html#a26cee6225a62c4e120ae9ea2e4a9a41c":[29,0,0,99,1], +"classop_1_1_video_reader.html#a503e70039e2cfecfe2d31771df509733":[29,0,0,99,4], +"classop_1_1_video_reader.html#a508eed918fbe3bfe3eff4c1ebacb3463":[29,0,0,99,3], +"classop_1_1_video_saver.html":[29,0,0,45], +"classop_1_1_video_saver.html#a0c5dadfa4f687283c370e7890ae5037f":[29,0,0,45,2], +"classop_1_1_video_saver.html#a413aba00e90b40f6cd62144c98d7723c":[29,0,0,45,0], +"classop_1_1_video_saver.html#a4ecf895fc5cd7508ac139a7b69fc25e7":[29,0,0,45,4], +"classop_1_1_video_saver.html#a6f6914d16434cebc9a6c596472b212aa":[29,0,0,45,3], +"classop_1_1_video_saver.html#acfb839eb14ac032055930932db966e84":[29,0,0,45,1], +"classop_1_1_w_coco_json_saver.html":[29,0,0,46], +"classop_1_1_w_coco_json_saver.html#a49ba32973e43c176c88d17aa805f1ab5":[29,0,0,46,1] +}; diff --git a/web/html/doc/navtreeindex3.js b/web/html/doc/navtreeindex3.js new file mode 100644 index 000000000..4a9d392b9 --- /dev/null +++ b/web/html/doc/navtreeindex3.js @@ -0,0 +1,253 @@ +var NAVTREEINDEX3 = +{ +"classop_1_1_w_coco_json_saver.html#a508c1105406b3cc55dc6bd1b299f6ed3":[29,0,0,46,0], +"classop_1_1_w_coco_json_saver.html#a5cca095ff23c3134ab0addc9a4feabaf":[29,0,0,46,2], +"classop_1_1_w_coco_json_saver.html#af152a61abc9ab46da651c9d87e6775f0":[29,0,0,46,3], +"classop_1_1_w_cv_mat_to_op_input.html":[29,0,0,19], +"classop_1_1_w_cv_mat_to_op_input.html#a82c13641d071fdb5db50afdee7cfa849":[29,0,0,19,0], +"classop_1_1_w_cv_mat_to_op_input.html#a8ae2eb423f1fe70f4154716b38b62719":[29,0,0,19,1], +"classop_1_1_w_cv_mat_to_op_input.html#aa7faa9e2671a85d36aad3366a7958f58":[29,0,0,19,3], +"classop_1_1_w_cv_mat_to_op_input.html#ac03534bbe3b6c3c45efb61b5d78402da":[29,0,0,19,2], +"classop_1_1_w_cv_mat_to_op_output.html":[29,0,0,20], +"classop_1_1_w_cv_mat_to_op_output.html#a04cd3d8e91d731a36e3c7830631e47af":[29,0,0,20,0], +"classop_1_1_w_cv_mat_to_op_output.html#a0bf2e43d2586c83fdd5cb0b1b54aefca":[29,0,0,20,3], +"classop_1_1_w_cv_mat_to_op_output.html#ad4c957d391e371b7ee56cdb5be6b1452":[29,0,0,20,2], +"classop_1_1_w_cv_mat_to_op_output.html#add97e472ab242fe72221cf0591801f81":[29,0,0,20,1], +"classop_1_1_w_datum_producer.html":[29,0,0,100], +"classop_1_1_w_datum_producer.html#a4381eaec4625824ebaa2d23f0cf1be48":[29,0,0,100,2], +"classop_1_1_w_datum_producer.html#a728efd416b307b5ffa25c44b0fbf7760":[29,0,0,100,0], +"classop_1_1_w_datum_producer.html#a858e64351ef6d3942bc7d53678badcc7":[29,0,0,100,1], +"classop_1_1_w_datum_producer.html#aac2674f961492fa299da18d716a617b4":[29,0,0,100,3], +"classop_1_1_w_face_detector.html":[29,0,0,33], +"classop_1_1_w_face_detector.html#a721ced99378516c04cb3cff296cc274a":[29,0,0,33,3], +"classop_1_1_w_face_detector.html#a77355426bc59b212a8eb1730ff6289f3":[29,0,0,33,0], +"classop_1_1_w_face_detector.html#ac0aa45b289e6800bb76bfbfc8a216035":[29,0,0,33,1], +"classop_1_1_w_face_detector.html#afaca53a669f0cd43103f7317aded75d3":[29,0,0,33,2], +"classop_1_1_w_face_detector_open_c_v.html":[29,0,0,34], +"classop_1_1_w_face_detector_open_c_v.html#a2942e145f9c4c720aad7c810a3d0f3f3":[29,0,0,34,1], +"classop_1_1_w_face_detector_open_c_v.html#a4d3a4a29bcb7b8c141ae1917634ca4c9":[29,0,0,34,3], +"classop_1_1_w_face_detector_open_c_v.html#a8c765201f0cc9440f8d172c8d8c76a62":[29,0,0,34,0], +"classop_1_1_w_face_detector_open_c_v.html#ad7dce5824ba32bc07d2474c20b23e62d":[29,0,0,34,2], +"classop_1_1_w_face_extractor_net.html":[29,0,0,35], +"classop_1_1_w_face_extractor_net.html#a31bd32d4b9922ea456c97343c94501ac":[29,0,0,35,0], +"classop_1_1_w_face_extractor_net.html#aa47940fb2ed940a53c7a305ce45817a3":[29,0,0,35,3], +"classop_1_1_w_face_extractor_net.html#ac04b0bec061a6cbc6a6afacb3f8d15c7":[29,0,0,35,2], +"classop_1_1_w_face_extractor_net.html#ae781bd1a7d450983a9aa168860d4e96d":[29,0,0,35,1], +"classop_1_1_w_face_renderer.html":[29,0,0,36], +"classop_1_1_w_face_renderer.html#a2f06bfea6521c7528fc7b07b9b067351":[29,0,0,36,1], +"classop_1_1_w_face_renderer.html#a51e2a661867adee200f5c4029a585e5d":[29,0,0,36,0], +"classop_1_1_w_face_renderer.html#a7b72c70dc02c9209d84992caad6ad7d0":[29,0,0,36,2], +"classop_1_1_w_face_renderer.html#aa52166ea2d5e0f201c94d5c4fe74216e":[29,0,0,36,3], +"classop_1_1_w_face_saver.html":[29,0,0,47], +"classop_1_1_w_face_saver.html#a026bfad8cd9e0d1289a1db473cef34a0":[29,0,0,47,3], +"classop_1_1_w_face_saver.html#a5dc60ede4b88594d59ece4ce3e4683d6":[29,0,0,47,0], +"classop_1_1_w_face_saver.html#ae27f54e5aead73b6eb604d0a0a06e18f":[29,0,0,47,1], +"classop_1_1_w_face_saver.html#ae8401789881462eb8438c65e9d2d3fb2":[29,0,0,47,2], +"classop_1_1_w_fps_max.html":[29,0,0,112], +"classop_1_1_w_fps_max.html#a8b9f49fb22b18dbee786922af15ba939":[29,0,0,112,3], +"classop_1_1_w_fps_max.html#af48214bbb4ed5c84efe1adf845aa9318":[29,0,0,112,1], +"classop_1_1_w_fps_max.html#af8c5f74f0271d227b2c70b4415366332":[29,0,0,112,2], +"classop_1_1_w_fps_max.html#afc487c8404a9d4794bcccdd43f0368f6":[29,0,0,112,0], +"classop_1_1_w_gui.html":[29,0,0,60], +"classop_1_1_w_gui.html#a3c55ca3290f64181201890fae10e4002":[29,0,0,60,1], +"classop_1_1_w_gui.html#a4e4db210b87f78cc1238dd3ab2bedaa4":[29,0,0,60,2], +"classop_1_1_w_gui.html#a4e7d3f5b3ddaf02109738b4348250611":[29,0,0,60,0], +"classop_1_1_w_gui.html#a664e1f76211510e38b8d5f5bed37ffcb":[29,0,0,60,3], +"classop_1_1_w_gui3_d.html":[29,0,0,61], +"classop_1_1_w_gui3_d.html#a62b93d2704634170339827ee1f93fa97":[29,0,0,61,1], +"classop_1_1_w_gui3_d.html#a7da4f85892e0d7d9e105c6d471a706a3":[29,0,0,61,2], +"classop_1_1_w_gui3_d.html#ab61a31574460ff87efa99ed7362474ed":[29,0,0,61,0], +"classop_1_1_w_gui3_d.html#afe019cff8fd5ed2f59f59d886de7473a":[29,0,0,61,3], +"classop_1_1_w_gui_info_adder.html":[29,0,0,62], +"classop_1_1_w_gui_info_adder.html#a0b2c539b72ef09106ab0306dc88c5ac5":[29,0,0,62,0], +"classop_1_1_w_gui_info_adder.html#ab369f542339af87ff652fc6e8e5408dd":[29,0,0,62,1], +"classop_1_1_w_gui_info_adder.html#ae620275d6570fd5c74f33728cd340217":[29,0,0,62,2], +"classop_1_1_w_gui_info_adder.html#ae90a68c6ef7b4f45595a020efd232612":[29,0,0,62,3], +"classop_1_1_w_hand_detector.html":[29,0,0,70], +"classop_1_1_w_hand_detector.html#a5c29c944205ee0727f76c282ef55ae52":[29,0,0,70,2], +"classop_1_1_w_hand_detector.html#a7a740a7f9275b7016013728dbed001d0":[29,0,0,70,1], +"classop_1_1_w_hand_detector.html#aa82ef40fad1d343b5856b41ec4dbcd5c":[29,0,0,70,3], +"classop_1_1_w_hand_detector.html#ac44b474c7d8bd4876e32ceb9c9a322fe":[29,0,0,70,0], +"classop_1_1_w_hand_detector_from_txt.html":[29,0,0,71], +"classop_1_1_w_hand_detector_from_txt.html#a01a5f73b0a8a1b8998937e7ba3d747a3":[29,0,0,71,0], +"classop_1_1_w_hand_detector_from_txt.html#a51ebff94734350463fcf507a84eeefdc":[29,0,0,71,3], +"classop_1_1_w_hand_detector_from_txt.html#acd7d37555c09a58dc660811724930276":[29,0,0,71,2], +"classop_1_1_w_hand_detector_from_txt.html#ae51bcc36e790b298d3cd0c231d4b3640":[29,0,0,71,1], +"classop_1_1_w_hand_detector_tracking.html":[29,0,0,72], +"classop_1_1_w_hand_detector_tracking.html#a20ef6206194a873c2cfa7fe13d905d92":[29,0,0,72,2], +"classop_1_1_w_hand_detector_tracking.html#a7c849c5a423ffc150c6a4aee9055d34e":[29,0,0,72,3], +"classop_1_1_w_hand_detector_tracking.html#a7d884dfd00822de27742a2392fb210bb":[29,0,0,72,1], +"classop_1_1_w_hand_detector_tracking.html#ad2a5ac720f4ed651f4cf5e42d21c05dd":[29,0,0,72,0], +"classop_1_1_w_hand_detector_update.html":[29,0,0,73], +"classop_1_1_w_hand_detector_update.html#a29d71b3c1ee52f04bd52b932db350b59":[29,0,0,73,1], +"classop_1_1_w_hand_detector_update.html#a729aaa628e4f4c24e7cb9afca1cdc761":[29,0,0,73,2], +"classop_1_1_w_hand_detector_update.html#abd8b56fbfbd2a619a4f37d148592f61b":[29,0,0,73,0], +"classop_1_1_w_hand_detector_update.html#af9287dc0a3c67abd35974c1c74614f3c":[29,0,0,73,3], +"classop_1_1_w_hand_extractor_net.html":[29,0,0,74], +"classop_1_1_w_hand_extractor_net.html#a21ffee48567b1c7c8994e4effef6cffe":[29,0,0,74,3], +"classop_1_1_w_hand_extractor_net.html#a464a629c6ecd9727da53453af8266e1d":[29,0,0,74,0], +"classop_1_1_w_hand_extractor_net.html#a7904f62b91d658a06ed89f0bfd307642":[29,0,0,74,2], +"classop_1_1_w_hand_extractor_net.html#ab46b680c14fb2a0cb171b040da484eda":[29,0,0,74,1], +"classop_1_1_w_hand_renderer.html":[29,0,0,75], +"classop_1_1_w_hand_renderer.html#a2ee88145b38fea1a6a2bb7987a33bd40":[29,0,0,75,2], +"classop_1_1_w_hand_renderer.html#a30121b55c601aed3644996d010b6bf8c":[29,0,0,75,0], +"classop_1_1_w_hand_renderer.html#ab18c8602c8bf65e3e762b2ff06def220":[29,0,0,75,1], +"classop_1_1_w_hand_renderer.html#ad178e8d413b3b15edc53625e1f5119d7":[29,0,0,75,3], +"classop_1_1_w_hand_saver.html":[29,0,0,48], +"classop_1_1_w_hand_saver.html#aa234a68d1cc7ec97fefbf30239149baa":[29,0,0,48,2], +"classop_1_1_w_hand_saver.html#ab41ecc429abfe0a1424facd6ee4acd1f":[29,0,0,48,0], +"classop_1_1_w_hand_saver.html#abf4a45c6ebe82fca1e0f0db1d3e2af79":[29,0,0,48,1], +"classop_1_1_w_hand_saver.html#afc3976b394070927b9396163137317e5":[29,0,0,48,3], +"classop_1_1_w_heat_map_saver.html":[29,0,0,49], +"classop_1_1_w_heat_map_saver.html#a20e82b121a580c578f69cbb0401c4cb0":[29,0,0,49,2], +"classop_1_1_w_heat_map_saver.html#a5b72d5f3bcbdacb26ba440b80eef0109":[29,0,0,49,0], +"classop_1_1_w_heat_map_saver.html#a5fd729a47f0cdbe94001219f971f8f51":[29,0,0,49,3], +"classop_1_1_w_heat_map_saver.html#aa651ec613c81cf9a19222428bd59feed":[29,0,0,49,1], +"classop_1_1_w_id_generator.html":[29,0,0,113], +"classop_1_1_w_id_generator.html#a03bd005cf88749702fb8a29c20d4cb91":[29,0,0,113,3], +"classop_1_1_w_id_generator.html#a50a1b7929810daae87ee6443c659edad":[29,0,0,113,2], +"classop_1_1_w_id_generator.html#a6112733ee0b537d4d91191f93f0a84f8":[29,0,0,113,0], +"classop_1_1_w_id_generator.html#ad9e160c5120aa850fbe2285f78e062e2":[29,0,0,113,1], +"classop_1_1_w_image_saver.html":[29,0,0,50], +"classop_1_1_w_image_saver.html#a11add012ee88b64a4f36d3f63cb65ee0":[29,0,0,50,0], +"classop_1_1_w_image_saver.html#a198bbfcf625354ddda419e0121d0cb33":[29,0,0,50,3], +"classop_1_1_w_image_saver.html#a78655ea3d4dac28bdf7e2e4a80b5a337":[29,0,0,50,2], +"classop_1_1_w_image_saver.html#ab8371a260e35cdea5010327240c9a53d":[29,0,0,50,1], +"classop_1_1_w_keep_top_n_people.html":[29,0,0,21], +"classop_1_1_w_keep_top_n_people.html#a56371016b6fe1fbacdba8d558685719b":[29,0,0,21,2], +"classop_1_1_w_keep_top_n_people.html#a5928a091e0990706ab2ea5e5e07629dd":[29,0,0,21,3], +"classop_1_1_w_keep_top_n_people.html#ad23785b42b85c166e5080f47591cccaa":[29,0,0,21,1], +"classop_1_1_w_keep_top_n_people.html#aebe939c354cfb62cb6d950f73d14731b":[29,0,0,21,0], +"classop_1_1_w_keypoint_scaler.html":[29,0,0,22], +"classop_1_1_w_keypoint_scaler.html#a31624e262988b0840a8ddbf098e56e9b":[29,0,0,22,0], +"classop_1_1_w_keypoint_scaler.html#aacad5116921e2ff746fbdf9f6c0cbb25":[29,0,0,22,3], +"classop_1_1_w_keypoint_scaler.html#aba4fb004818f3adc22959e382a90cd2c":[29,0,0,22,2], +"classop_1_1_w_keypoint_scaler.html#af4e30e78dba64f2784a1757bc2eb9f8b":[29,0,0,22,1], +"classop_1_1_w_op_output_to_cv_mat.html":[29,0,0,23], +"classop_1_1_w_op_output_to_cv_mat.html#a5a4e433aa0c7cc62a5f97cc63a67c3fa":[29,0,0,23,1], +"classop_1_1_w_op_output_to_cv_mat.html#a6f632a83de4cdc731c3f52d1541060f3":[29,0,0,23,0], +"classop_1_1_w_op_output_to_cv_mat.html#adea2e8b1d33e6c091640c7d904dac7cd":[29,0,0,23,2], +"classop_1_1_w_op_output_to_cv_mat.html#ae3fc21569d56a648c606b23fcc016349":[29,0,0,23,3], +"classop_1_1_w_people_json_saver.html":[29,0,0,51], +"classop_1_1_w_people_json_saver.html#a386b5b64f2eee08cb344b242f5adb122":[29,0,0,51,1], +"classop_1_1_w_people_json_saver.html#a5d4239596a996723a20a1031d32c7446":[29,0,0,51,2], +"classop_1_1_w_people_json_saver.html#ac12dfe8c1414ec36ace474ecbf148f67":[29,0,0,51,0], +"classop_1_1_w_people_json_saver.html#af874a16a06a9a3452a0e3792ac15647e":[29,0,0,51,3], +"classop_1_1_w_person_id_extractor.html":[29,0,0,121], +"classop_1_1_w_person_id_extractor.html#a14a6cc9c6c70acd4847482fd71e4972b":[29,0,0,121,0], +"classop_1_1_w_person_id_extractor.html#a15f33c528ac92d30be226e784256be01":[29,0,0,121,1], +"classop_1_1_w_person_id_extractor.html#a4066bf1c8cad753c74de1ceabdd76505":[29,0,0,121,3], +"classop_1_1_w_person_id_extractor.html#a72b888875be18eb3fc8d0a8c267630de":[29,0,0,121,2], +"classop_1_1_w_pose_extractor.html":[29,0,0,89], +"classop_1_1_w_pose_extractor.html#a9b621ed9915da9bf3cce49db547de9e6":[29,0,0,89,2], +"classop_1_1_w_pose_extractor.html#ae0f02aaefccab05bbbd919dd7a9e0f61":[29,0,0,89,3], +"classop_1_1_w_pose_extractor.html#ae85b1ec41bf47dcf1aed7bdae1d91915":[29,0,0,89,0], +"classop_1_1_w_pose_extractor.html#aedf9cc53f7dfdb1ec2aa77651ca37eef":[29,0,0,89,1], +"classop_1_1_w_pose_extractor_net.html":[29,0,0,90], +"classop_1_1_w_pose_extractor_net.html#a18d4a120314ec44d1722cc164aaba7a8":[29,0,0,90,2], +"classop_1_1_w_pose_extractor_net.html#a3d691e30c419c70e23a4d7b3c92adb4b":[29,0,0,90,3], +"classop_1_1_w_pose_extractor_net.html#aa085377f965ffc8385d34d77a2e65e5a":[29,0,0,90,1], +"classop_1_1_w_pose_extractor_net.html#aa0f6b7ec6f36fe2a27649ac2c7490c09":[29,0,0,90,0], +"classop_1_1_w_pose_renderer.html":[29,0,0,91], +"classop_1_1_w_pose_renderer.html#a10b1631d78d8270ed2a16e538b30eb76":[29,0,0,91,3], +"classop_1_1_w_pose_renderer.html#aba989a73cef9a807879ad2196725c61c":[29,0,0,91,2], +"classop_1_1_w_pose_renderer.html#ae74189143175b89ccd36662cec4de72e":[29,0,0,91,0], +"classop_1_1_w_pose_renderer.html#ae748fc721246c2a3ad8ffd32adf5e9e7":[29,0,0,91,1], +"classop_1_1_w_pose_saver.html":[29,0,0,52], +"classop_1_1_w_pose_saver.html#a039027281498168b57df8dfeefd82cd8":[29,0,0,52,3], +"classop_1_1_w_pose_saver.html#a4f0774832e12389593361186f1b83128":[29,0,0,52,2], +"classop_1_1_w_pose_saver.html#a62394c885abe4d95bece4469ac3657e9":[29,0,0,52,1], +"classop_1_1_w_pose_saver.html#aa9dd0f4649c9e8efef10201caf9e4cfd":[29,0,0,52,0], +"classop_1_1_w_pose_triangulation.html":[29,0,0,2], +"classop_1_1_w_pose_triangulation.html#a439c75d19eae34fdd20f2f1c4ee18e48":[29,0,0,2,0], +"classop_1_1_w_pose_triangulation.html#a495b29e03933d750827acc0531c72c78":[29,0,0,2,3], +"classop_1_1_w_pose_triangulation.html#a5711329db1768eb77d2d96575c9fb668":[29,0,0,2,2], +"classop_1_1_w_pose_triangulation.html#ae88fe6766fbcca1a682306af99684fa3":[29,0,0,2,1], +"classop_1_1_w_queue_assembler.html":[29,0,0,117], +"classop_1_1_w_queue_assembler.html#a02bb2d4e47689903434c05a911a5ba15":[29,0,0,117,2], +"classop_1_1_w_queue_assembler.html#abe8d97c0749cd8d968c8df2727b643e6":[29,0,0,117,1], +"classop_1_1_w_queue_assembler.html#ad3b1ca56d18e1e234773ba15efea7158":[29,0,0,117,3], +"classop_1_1_w_queue_assembler.html#ad4a4ec3e060ad6483331156a5a62af25":[29,0,0,117,0], +"classop_1_1_w_queue_orderer.html":[29,0,0,118], +"classop_1_1_w_queue_orderer.html#a01bc7495ec992cc9c54a040534cb3634":[29,0,0,118,3], +"classop_1_1_w_queue_orderer.html#a1ea314eeaa8d99fbf33885d9a4c6d044":[29,0,0,118,4], +"classop_1_1_w_queue_orderer.html#a3303add5fa8cc36593d3d859ffdd8ae0":[29,0,0,118,0], +"classop_1_1_w_queue_orderer.html#a720b2cd490e2267258bf5d5776f00095":[29,0,0,118,1], +"classop_1_1_w_queue_orderer.html#a85598f83f6f3a30b7ddce9bc7beddf33":[29,0,0,118,2], +"classop_1_1_w_scale_and_size_extractor.html":[29,0,0,24], +"classop_1_1_w_scale_and_size_extractor.html#a8e6ef291bd809987f06fbb1cc2173b0f":[29,0,0,24,0], +"classop_1_1_w_scale_and_size_extractor.html#ac1203ef395a836b13f5586432f284c41":[29,0,0,24,2], +"classop_1_1_w_scale_and_size_extractor.html#afddf54d061dc5325e78252a3bba482b9":[29,0,0,24,3], +"classop_1_1_w_scale_and_size_extractor.html#afe30e073c4410146e2c8ba8f2752737f":[29,0,0,24,1], +"classop_1_1_w_udp_sender.html":[29,0,0,53], +"classop_1_1_w_udp_sender.html#a22a5ec90fe83ed654bd0aef112fac98b":[29,0,0,53,0], +"classop_1_1_w_udp_sender.html#a567d9fe2adc85ae086379696573112e3":[29,0,0,53,2], +"classop_1_1_w_udp_sender.html#a615fc6a537ca9f624022698391c11a54":[29,0,0,53,3], +"classop_1_1_w_udp_sender.html#a684854618fbd74bce420ed44d867f8cd":[29,0,0,53,1], +"classop_1_1_w_verbose_printer.html":[29,0,0,25], +"classop_1_1_w_verbose_printer.html#a32ea8ffef9a255ee33d6d56a550706f8":[29,0,0,25,1], +"classop_1_1_w_verbose_printer.html#a6ece5acbf5f8a7a3e900c5029a56271d":[29,0,0,25,0], +"classop_1_1_w_verbose_printer.html#a9d21f5db0e70ba4cad73cf2bdf6c9fe2":[29,0,0,25,2], +"classop_1_1_w_verbose_printer.html#af98586e3da7cedd902f70e6521c0ffc4":[29,0,0,25,3], +"classop_1_1_w_video_saver.html":[29,0,0,54], +"classop_1_1_w_video_saver.html#a04dc4e6f039d047a0da6f94283c145d9":[29,0,0,54,0], +"classop_1_1_w_video_saver.html#a40bcb8ccf137c6cbee3ca31e6cc3bfbf":[29,0,0,54,3], +"classop_1_1_w_video_saver.html#ac0057c1bbfb3e193c891f167d56fcbab":[29,0,0,54,1], +"classop_1_1_w_video_saver.html#ada90f76b28e4bafe9c8ecbb9bcbb2d14":[29,0,0,54,2], +"classop_1_1_w_video_saver3_d.html":[29,0,0,55], +"classop_1_1_w_video_saver3_d.html#a39482b591eafa150fee3db7027ae093f":[29,0,0,55,1], +"classop_1_1_w_video_saver3_d.html#a570d2b868a6c3d3932671d56b0dbb531":[29,0,0,55,0], +"classop_1_1_w_video_saver3_d.html#ad5a050f5646af36bf8d91909e8f47b2f":[29,0,0,55,2], +"classop_1_1_w_video_saver3_d.html#adef743533fbab522d55c43768d28469e":[29,0,0,55,3], +"classop_1_1_webcam_reader.html":[29,0,0,101], +"classop_1_1_webcam_reader.html#a15fa1b26adfb75c0f072dcdb44c8fc44":[29,0,0,101,0], +"classop_1_1_webcam_reader.html#a38ccbdf61f21fba0694362077cb6bdb1":[29,0,0,101,2], +"classop_1_1_webcam_reader.html#a58c315e577c12486e5ab1b941d4cce04":[29,0,0,101,3], +"classop_1_1_webcam_reader.html#a6a065fcf3d6dca624741adc0f77da11d":[29,0,0,101,4], +"classop_1_1_webcam_reader.html#ae66b26829cc2d6e3f02109d4431a7bc2":[29,0,0,101,5], +"classop_1_1_webcam_reader.html#aea29bfce4df5493d662ed3a892f364d2":[29,0,0,101,1], +"classop_1_1_worker.html":[29,0,0,114], +"classop_1_1_worker.html#a5008fc4ef4e41366ba0022f5cd79edba":[29,0,0,114,0], +"classop_1_1_worker.html#a567902b58e492421a6ad771e730ddf53":[29,0,0,114,5], +"classop_1_1_worker.html#a5df10dd8a245df1a6d8df18978490899":[29,0,0,114,4], +"classop_1_1_worker.html#a6e4e84bd2052919bc48df1ec4b913ecf":[29,0,0,114,2], +"classop_1_1_worker.html#a6ebe180098e00ac062a1bb31d462df60":[29,0,0,114,1], +"classop_1_1_worker.html#a9acadd6df7af03b31b9e354ae815f781":[29,0,0,114,8], +"classop_1_1_worker.html#aa5be4df9d4d8302728c653870e7d2a23":[29,0,0,114,3], +"classop_1_1_worker.html#ad689b232d68f3b3e0b41f9e219b01134":[29,0,0,114,7], +"classop_1_1_worker.html#ae45ac828f6e8f6055203c224e50f145b":[29,0,0,114,6], +"classop_1_1_worker_consumer.html":[29,0,0,115], +"classop_1_1_worker_consumer.html#a26cf5c40df363d94d603fce92a5b69eb":[29,0,0,115,2], +"classop_1_1_worker_consumer.html#a7383747b3bdc6ac79e6f9afbf2c28d27":[29,0,0,115,1], +"classop_1_1_worker_consumer.html#a9aaa75f194df6b3ed4994c8e95aa0ab5":[29,0,0,115,0], +"classop_1_1_worker_producer.html":[29,0,0,116], +"classop_1_1_worker_producer.html#a0259f0b387e2b868388ba0a6769f4691":[29,0,0,116,1], +"classop_1_1_worker_producer.html#a364992ef862fe84a78416e2b556daae7":[29,0,0,116,2], +"classop_1_1_worker_producer.html#a8f2592f70d723de4b818c97b25c5a476":[29,0,0,116,0], +"classop_1_1_wrapper_t.html":[29,0,0,125], +"classop_1_1_wrapper_t.html#a0577721c5e714861b27ad4ff356980bc":[29,0,0,125,13], +"classop_1_1_wrapper_t.html#a061ea09aac902a8a44438feffd18998f":[29,0,0,125,17], +"classop_1_1_wrapper_t.html#a0b502ef38ee46749733ae3dda7e5fd31":[29,0,0,125,15], +"classop_1_1_wrapper_t.html#a0e0aea3f8bf81458c0662c46f4d345d5":[29,0,0,125,25], +"classop_1_1_wrapper_t.html#a3818c026e33cc573ba8b5722daa003a7":[29,0,0,125,11], +"classop_1_1_wrapper_t.html#a3c3b605e0787b55ffd00725c09a1cd53":[29,0,0,125,23], +"classop_1_1_wrapper_t.html#a3ce073fb177c316aaeab406c1f4808db":[29,0,0,125,3], +"classop_1_1_wrapper_t.html#a442ff1e4fec93ec28457f7c7c4b4bfbb":[29,0,0,125,22], +"classop_1_1_wrapper_t.html#a478b8bd7deb43322f220593552fe683d":[29,0,0,125,12], +"classop_1_1_wrapper_t.html#a4d9396d4490b90f32a45d4a80d2cd5c7":[29,0,0,125,20], +"classop_1_1_wrapper_t.html#a55af0ab1f0ea4329f2c0bb3feb92b835":[29,0,0,125,19], +"classop_1_1_wrapper_t.html#a65e310384f3b898c4c3621e0e1ee6883":[29,0,0,125,1], +"classop_1_1_wrapper_t.html#a6ba81304df06fbec71103973ce0041c5":[29,0,0,125,9], +"classop_1_1_wrapper_t.html#a7508886116ccfbbb8567a1921591751e":[29,0,0,125,8], +"classop_1_1_wrapper_t.html#a79fa1a518495e1e3684f05943d1c04f8":[29,0,0,125,18], +"classop_1_1_wrapper_t.html#a7a37b4a945171fd42d1ab16b0b7e8205":[29,0,0,125,4], +"classop_1_1_wrapper_t.html#a8111d8cdb984e996410ace159a896992":[29,0,0,125,16], +"classop_1_1_wrapper_t.html#a94151754dddc2a37044aea26b9dac6c7":[29,0,0,125,0], +"classop_1_1_wrapper_t.html#a98a7310bc4062fb72f5d26e37d6d7c70":[29,0,0,125,7], +"classop_1_1_wrapper_t.html#aa89055f5cf4e762071479f5fec8d2faf":[29,0,0,125,14], +"classop_1_1_wrapper_t.html#aaa18264f99da260efb8fa12dd293ee75":[29,0,0,125,5], +"classop_1_1_wrapper_t.html#aaca8a5dc6f342470c8241fda5cd6cdb9":[29,0,0,125,10], +"classop_1_1_wrapper_t.html#abcb907a2718260a14c0472279254df84":[29,0,0,125,24], +"classop_1_1_wrapper_t.html#ad9d83f0332c27aa64cde22c66755deec":[29,0,0,125,2], +"classop_1_1_wrapper_t.html#ae2c6cf519701c320ae53c597ae54a7aa":[29,0,0,125,21] +}; diff --git a/web/html/doc/navtreeindex4.js b/web/html/doc/navtreeindex4.js new file mode 100644 index 000000000..9272c06df --- /dev/null +++ b/web/html/doc/navtreeindex4.js @@ -0,0 +1,253 @@ +var NAVTREEINDEX4 = +{ +"classop_1_1_wrapper_t.html#af3d5d56e63b8c6faee0d7954db95c69d":[29,0,0,125,6], +"coco_json_saver_8hpp.html":[30,0,1,0,4,1], +"coco_json_saver_8hpp_source.html":[30,0,1,0,4,1], +"common_8hpp.html":[30,0,1,0,2,2], +"common_8hpp_source.html":[30,0,1,0,2,2], +"core_2enum_classes_8hpp.html":[30,0,1,0,2,6], +"core_2enum_classes_8hpp.html#a1c3dbc214e7552f7ef9cc753ee97226b":[30,0,1,0,2,6,1], +"core_2enum_classes_8hpp.html#a1c3dbc214e7552f7ef9cc753ee97226ba21c5c3f60f4881b8d5477f5628db74f1":[30,0,1,0,2,6,1,2], +"core_2enum_classes_8hpp.html#a1c3dbc214e7552f7ef9cc753ee97226ba9ce2d07469b39a72159ed8b0e0e597ca":[30,0,1,0,2,6,1,0], +"core_2enum_classes_8hpp.html#a1c3dbc214e7552f7ef9cc753ee97226baa9ded1e5ce5d75814730bb4caaf49419":[30,0,1,0,2,6,1,1], +"core_2enum_classes_8hpp.html#a37a23e10d9cbc428c793c3df1d62993e":[30,0,1,0,2,6,0], +"core_2enum_classes_8hpp.html#a37a23e10d9cbc428c793c3df1d62993ea5f4badd072493724e560fa43d0cf2c71":[30,0,1,0,2,6,0,2], +"core_2enum_classes_8hpp.html#a37a23e10d9cbc428c793c3df1d62993ea6ab48f7ed56efc362f41853c5616bf75":[30,0,1,0,2,6,0,0], +"core_2enum_classes_8hpp.html#a37a23e10d9cbc428c793c3df1d62993eaa9ded1e5ce5d75814730bb4caaf49419":[30,0,1,0,2,6,0,1], +"core_2enum_classes_8hpp.html#a37a23e10d9cbc428c793c3df1d62993eaca9f686d0a3d6b8bfe5865b59b2fc84f":[30,0,1,0,2,6,0,3], +"core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bda":[30,0,1,0,2,6,3], +"core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaa46f9a0da0a5d448fd0cc8b3aa0a9b228":[30,0,1,0,2,6,3,0], +"core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaa4b942544cb3e764bbb8d33f8a8744855":[30,0,1,0,2,6,3,3], +"core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaa6089ccf7c3fe93a62745e51200419c60":[30,0,1,0,2,6,3,8], +"core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaa668a2bc599fd07445eae0730d043c96d":[30,0,1,0,2,6,3,1], +"core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaa73c42013aac51c335d50d103f30fcb99":[30,0,1,0,2,6,3,2], +"core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaaa93f121640d609f8772397a0f40f40d6":[30,0,1,0,2,6,3,7], +"core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaab7e7b2beae3435e73021d6d9a6a3fd8a":[30,0,1,0,2,6,3,5], +"core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaaee080e43c505aa85cdda0e480b0abc06":[30,0,1,0,2,6,3,6], +"core_2enum_classes_8hpp.html#af72fe4ed32846c12f41b049d3d0e1bdaafa90ddb034be42f1cdf13a6829eed2ad":[30,0,1,0,2,6,3,4], +"core_2enum_classes_8hpp.html#afce557f02e337e16150d00bdf72ec033":[30,0,1,0,2,6,2], +"core_2enum_classes_8hpp.html#afce557f02e337e16150d00bdf72ec033a06b9281e396db002010bde1de57262eb":[30,0,1,0,2,6,2,1], +"core_2enum_classes_8hpp.html#afce557f02e337e16150d00bdf72ec033a3432ca64f06615abf07ab44c10cada38":[30,0,1,0,2,6,2,3], +"core_2enum_classes_8hpp.html#afce557f02e337e16150d00bdf72ec033a54c82ef76ecbbd4c2293e09bae01b54e":[30,0,1,0,2,6,2,2], +"core_2enum_classes_8hpp.html#afce557f02e337e16150d00bdf72ec033a6adf97f83acf6453d4a6a4b1070f3754":[30,0,1,0,2,6,2,0], +"core_2enum_classes_8hpp_source.html":[30,0,1,0,2,6], +"core_2headers_8hpp.html":[30,0,1,0,2,8], +"core_2headers_8hpp_source.html":[30,0,1,0,2,8], +"core_2string_8hpp.html":[30,0,1,0,2,18], +"core_2string_8hpp_source.html":[30,0,1,0,2,18], +"cuda_8hpp.html":[30,0,1,0,5,0], +"cuda_8hpp.html#a17da233ea322ae172ff5bda7caaf2124":[30,0,1,0,5,0,3], +"cuda_8hpp.html#a2af8422ada0de882cc222920ca15c6d2":[30,0,1,0,5,0,0], +"cuda_8hpp.html#a4ba080c11cc9758051db97ce2a11c023":[30,0,1,0,5,0,2], +"cuda_8hpp.html#a6aeab543a61ef23ed58a6e29401424ae":[30,0,1,0,5,0,5], +"cuda_8hpp.html#a8587bab6b02056384b7c424555cd50d8":[30,0,1,0,5,0,4], +"cuda_8hpp.html#ac7bbf63b37bf6762c47557ad227e036d":[30,0,1,0,5,0,6], +"cuda_8hpp.html#ad9b7765a4396ee4470585ded07285563":[30,0,1,0,5,0,1], +"cuda_8hpp_source.html":[30,0,1,0,5,0], +"cv_mat_to_op_input_8hpp.html":[30,0,1,0,2,3], +"cv_mat_to_op_input_8hpp_source.html":[30,0,1,0,2,3], +"cv_mat_to_op_output_8hpp.html":[30,0,1,0,2,4], +"cv_mat_to_op_output_8hpp_source.html":[30,0,1,0,2,4], +"datum_8hpp.html":[30,0,1,0,2,5], +"datum_8hpp.html#a03de732ffb0edab021fb745b21a05fdd":[30,0,1,0,2,5,1], +"datum_8hpp.html#aa0a67922cf9df1e30dad2c32785b147e":[30,0,1,0,2,5,2], +"datum_8hpp.html#ad11d52b69bc54e48ceb2f5787f700431":[30,0,1,0,2,5,5], +"datum_8hpp.html#ae2331967a21fec02341dec3ca39d3809":[30,0,1,0,2,5,3], +"datum_8hpp.html#af87cd873cebb915837ae27248f67e822":[30,0,1,0,2,5,4], +"datum_8hpp_source.html":[30,0,1,0,2,5], +"datum_producer_8hpp.html":[30,0,1,0,10,0], +"datum_producer_8hpp.html#a177ffd3101c7a1f5cf32e100474a1234":[30,0,1,0,10,0,2], +"datum_producer_8hpp.html#a427c6244ee27171037bc201f401de16a":[30,0,1,0,10,0,3], +"datum_producer_8hpp.html#a5001474237d31d72c9145a84ec5143da":[30,0,1,0,10,0,6], +"datum_producer_8hpp.html#a71c68de51a3608e782854c298b91cd62":[30,0,1,0,10,0,4], +"datum_producer_8hpp.html#a71cdc487bbec12ddbe4bac9123745494":[30,0,1,0,10,0,5], +"datum_producer_8hpp.html#ad72abbc7b2600f543e4ee8e28392711e":[30,0,1,0,10,0,1], +"datum_producer_8hpp_source.html":[30,0,1,0,10,0], +"dir_2012eae92ff1c095841b0424b32ded73.html":[30,0,1,0,0], +"dir_2fe3b209ba91c67403a5b318dad0feb6.html":[30,0,0,1], +"dir_30b75edf6ab089fde7a8426886bd6b03.html":[30,0,1,0,13], +"dir_3521bb8cf95d1c22170a875d1710b83f.html":[30,0,0,2], +"dir_3c425d4d4c1956c7a8db48efb69a2718.html":[30,0,1,0,15], +"dir_3dc351f869bb7a8f0afe68ebb7e681e8.html":[30,0,1,0,10], +"dir_40f12b33899adef613f503ab305e6d57.html":[30,0,1,0,11], +"dir_50b707fa6cdf56bfdc2ad79b44ee2e7d.html":[30,0,1,0,5], +"dir_6b0b8e919b15d8dea67b0fa2c092513b.html":[30,0,1,0,8], +"dir_6beae53d0373f2b1f5bd56e91c6780e1.html":[30,0,1,0,14], +"dir_76d1d371018acee6886a69c6ed6f0be5.html":[30,0,1,0,7], +"dir_7de49b63a676cf8462ac8f05c3f89409.html":[30,0,1,0,2], +"dir_85db440c4601460769d5050789bfa56d.html":[30,0,1,0,1], +"dir_860fec895be5700f57450c90b9398659.html":[30,0,0,1,0], +"dir_8f5ad9f4aef96c85fa295a06f65e3f88.html":[30,0,1,0,12], +"dir_a43890ebc8412b28a6c3727ba389c186.html":[30,0,0,0], +"dir_bb0b1a6a2f2ff4919a87df39c7181fc7.html":[30,0,1,0,9], +"dir_bb6374c98f96ead39f6032c111104f04.html":[30,0,0,2,0], +"dir_c7344abab9be755d8deee77d51a9abb7.html":[30,0,1,0,3], +"dir_d30173e4a8fdb18630e5d2bb2948a3be.html":[30,0,0,1,1], +"dir_d44c64559bbebec7f509842c48db8b23.html":[30,0,1], +"dir_e68e8157741866f444e17edd764ebbae.html":[30,0,0], +"dir_fbdcb2ba3f7af3b3a55342a5bedf43b8.html":[30,0,1,0,6], +"dir_fdee266290491355a8cca55fd72320d8.html":[30,0,1,0], +"dir_fec53438e526959e34f7f645cf03dfd4.html":[30,0,1,0,4], +"error_and_log_8hpp.html":[30,0,1,0,14,2], +"error_and_log_8hpp.html#a0e5c3fad2ace3eb129dd1d97afd59558":[30,0,1,0,14,2,11], +"error_and_log_8hpp.html#a149393c3c87c82a5cf14417c6b430d30":[30,0,1,0,14,2,19], +"error_and_log_8hpp.html#a2f41e9a74bbda434ef16189c32a13aba":[30,0,1,0,14,2,17], +"error_and_log_8hpp.html#a42d364d9fbd1a719341bd7187d97cf18":[30,0,1,0,14,2,2], +"error_and_log_8hpp.html#a5a3db1a0d272d8fb5ea723845beee150":[30,0,1,0,14,2,12], +"error_and_log_8hpp.html#a5ab07ae8c026e4f7782a113778d9082d":[30,0,1,0,14,2,10], +"error_and_log_8hpp.html#a5f092bd36c716a894cb035e1ead2aca3":[30,0,1,0,14,2,1], +"error_and_log_8hpp.html#a61af88aac41ef77ab4e8816023fe32f0":[30,0,1,0,14,2,5], +"error_and_log_8hpp.html#a758b08be140e27dd2642d286a383be54":[30,0,1,0,14,2,4], +"error_and_log_8hpp.html#a7eb0121791185c13a6c3dd88994e0eab":[30,0,1,0,14,2,18], +"error_and_log_8hpp.html#a825f15fdf9dc9cb7473c20f970f15b60":[30,0,1,0,14,2,3], +"error_and_log_8hpp.html#a838b69fead43c8a848d059b5f9d63baf":[30,0,1,0,14,2,14], +"error_and_log_8hpp.html#a865a4cd0ba3b596667dc7242756837bd":[30,0,1,0,14,2,0], +"error_and_log_8hpp.html#a91dd00cbb8fb646e6612455eb0f1b3e9":[30,0,1,0,14,2,15], +"error_and_log_8hpp.html#a96d1720ea5d160cfd4c8404060a9bebd":[30,0,1,0,14,2,6], +"error_and_log_8hpp.html#a96e56b0ddbe2cb17443b93aaba05d672":[30,0,1,0,14,2,16], +"error_and_log_8hpp.html#aa72861fea0671209aca1ea5fa385891a":[30,0,1,0,14,2,13], +"error_and_log_8hpp.html#abdedc8f1fd2f723dae5bb8ff20b93a93":[30,0,1,0,14,2,9], +"error_and_log_8hpp.html#ad5e1c975a1b7dce9b02bc8cdf3d45a01":[30,0,1,0,14,2,8], +"error_and_log_8hpp.html#ae8dbbccc9a2ca8a4670716ac5fdd8d53":[30,0,1,0,14,2,7], +"error_and_log_8hpp.html#af548fe1a2ad2b392a25afe9b0b87b8dd":[30,0,1,0,14,2,20], +"error_and_log_8hpp_source.html":[30,0,1,0,14,2], +"face_2headers_8hpp.html":[30,0,1,0,3,8], +"face_2headers_8hpp_source.html":[30,0,1,0,3,8], +"face_cpu_renderer_8hpp.html":[30,0,1,0,3,0], +"face_cpu_renderer_8hpp_source.html":[30,0,1,0,3,0], +"face_detector_8hpp.html":[30,0,1,0,3,1], +"face_detector_8hpp_source.html":[30,0,1,0,3,1], +"face_detector_open_c_v_8hpp.html":[30,0,1,0,3,2], +"face_detector_open_c_v_8hpp_source.html":[30,0,1,0,3,2], +"face_extractor_caffe_8hpp.html":[30,0,1,0,3,3], +"face_extractor_caffe_8hpp_source.html":[30,0,1,0,3,3], +"face_extractor_net_8hpp.html":[30,0,1,0,3,4], +"face_extractor_net_8hpp_source.html":[30,0,1,0,3,4], +"face_gpu_renderer_8hpp.html":[30,0,1,0,3,5], +"face_gpu_renderer_8hpp_source.html":[30,0,1,0,3,5], +"face_parameters_8hpp.html":[30,0,1,0,3,6], +"face_parameters_8hpp.html#a00c56c20997f734b2bd44d6f85b86cf0":[30,0,1,0,3,6,11], +"face_parameters_8hpp.html#a1245f62cf98c4ee7591dfc8807ef355d":[30,0,1,0,3,6,9], +"face_parameters_8hpp.html#a15f6c39797cee87f6aa941d93f22b78b":[30,0,1,0,3,6,6], +"face_parameters_8hpp.html#a1a7ddb1a137c44091a1b4161725adfa0":[30,0,1,0,3,6,2], +"face_parameters_8hpp.html#a3fbae1778780ae5bf4ffcc84cdef1870":[30,0,1,0,3,6,4], +"face_parameters_8hpp.html#a4d07868d77fb11253b413ed579e04c22":[30,0,1,0,3,6,10], +"face_parameters_8hpp.html#a4f191afed46fea5e3ce5b2a8756e1ddd":[30,0,1,0,3,6,5], +"face_parameters_8hpp.html#a740a6228babfde5f18fba6fc033ef0ed":[30,0,1,0,3,6,0], +"face_parameters_8hpp.html#a7e2f64c1349d6a881c6ceb49757e099a":[30,0,1,0,3,6,1], +"face_parameters_8hpp.html#a8a05bdc38612c38e28b96bba5b4679b8":[30,0,1,0,3,6,8], +"face_parameters_8hpp.html#a9b4b92c621cc5962a72898899d2f2534":[30,0,1,0,3,6,7], +"face_parameters_8hpp.html#aa6701cc08e1a8651798ef3bf8437375b":[30,0,1,0,3,6,3], +"face_parameters_8hpp.html#abd0ef2306478c3295283e7f1b59e3aff":[30,0,1,0,3,6,12], +"face_parameters_8hpp_source.html":[30,0,1,0,3,6], +"face_renderer_8hpp.html":[30,0,1,0,3,7], +"face_renderer_8hpp_source.html":[30,0,1,0,3,7], +"fast_math_8hpp.html":[30,0,1,0,14,3], +"fast_math_8hpp.html#a1b479fea39a56c041a8a51aecf024bed":[30,0,1,0,14,3,5], +"fast_math_8hpp.html#a2dafd3db8f922405b38240345dd1dce5":[30,0,1,0,14,3,2], +"fast_math_8hpp.html#a57eee48e4cefd583a81cfc907586c035":[30,0,1,0,14,3,6], +"fast_math_8hpp.html#a61240e5fbd4ea84a2cfdc89407bcb1ae":[30,0,1,0,14,3,8], +"fast_math_8hpp.html#a699ef17b0f27b8bc2c4d4a03e46e6be1":[30,0,1,0,14,3,4], +"fast_math_8hpp.html#a6e1d1f90ef06cc7af576fdaad4b4e320":[30,0,1,0,14,3,1], +"fast_math_8hpp.html#a757a5cc88734e7be9e910e7d8213c282":[30,0,1,0,14,3,10], +"fast_math_8hpp.html#a8525e440d6ac1b558e72637dc4f4e3c4":[30,0,1,0,14,3,9], +"fast_math_8hpp.html#a9f4b99449c0c73e2c89ee1a1eff007c7":[30,0,1,0,14,3,0], +"fast_math_8hpp.html#aaafe2e235a1a3a146bb026b71c521c7b":[30,0,1,0,14,3,11], +"fast_math_8hpp.html#ab5eb10c958f3f37fb82d29361ad81467":[30,0,1,0,14,3,3], +"fast_math_8hpp.html#ab71596bc88b87ea5920f19f978d6d6ac":[30,0,1,0,14,3,7], +"fast_math_8hpp_source.html":[30,0,1,0,14,3], +"file_saver_8hpp.html":[30,0,1,0,4,3], +"file_saver_8hpp_source.html":[30,0,1,0,4,3], +"file_stream_8hpp.html":[30,0,1,0,4,4], +"file_stream_8hpp.html#a0ce96f84c6e380b261802c7e2639dc7d":[30,0,1,0,4,4,3], +"file_stream_8hpp.html#a1c2921f841ab87033b535b5ae8a4d526":[30,0,1,0,4,4,1], +"file_stream_8hpp.html#a1e986a510a29bfd8c682f65a8b399551":[30,0,1,0,4,4,10], +"file_stream_8hpp.html#a46e815df32db67d78a94367b7f97df25":[30,0,1,0,4,4,11], +"file_stream_8hpp.html#a7b9bcb57dd8488ade8ea288342eaed08":[30,0,1,0,4,4,5], +"file_stream_8hpp.html#a871a61f08021460e0f24f51583546a75":[30,0,1,0,4,4,4], +"file_stream_8hpp.html#a8c9d3469086a12607b097731848b6dea":[30,0,1,0,4,4,8], +"file_stream_8hpp.html#a9d121f33179e41075f4602eb6527e658":[30,0,1,0,4,4,0], +"file_stream_8hpp.html#a9f14054fbf4e63fc85d10c83f2f9ecb7":[30,0,1,0,4,4,2], +"file_stream_8hpp.html#aafac1158605748694e3c3ed4eb34b3b7":[30,0,1,0,4,4,6], +"file_stream_8hpp.html#ac1080e627185a65b88ec788184a95552":[30,0,1,0,4,4,7], +"file_stream_8hpp.html#af9c189f7c80092570699c8b9d5686fea":[30,0,1,0,4,4,9], +"file_stream_8hpp_source.html":[30,0,1,0,4,4], +"file_system_8hpp.html":[30,0,1,0,14,4], +"file_system_8hpp.html#a2e35510c95e5525aae7a398b03b32488":[30,0,1,0,14,4,6], +"file_system_8hpp.html#a3ff74a37eb4bf12e31bc5aa95b69f9e3":[30,0,1,0,14,4,9], +"file_system_8hpp.html#a515273b013402d8c75780330588421bc":[30,0,1,0,14,4,3], +"file_system_8hpp.html#a573544858d0a9c29c9707eeda3a21c98":[30,0,1,0,14,4,4], +"file_system_8hpp.html#a5fe477200af87dadb07c8d6a75b4414b":[30,0,1,0,14,4,14], +"file_system_8hpp.html#a6f37638480139a4076eef4d0c7dc6cd1":[30,0,1,0,14,4,5], +"file_system_8hpp.html#a6fc2ee2d2c256695fb7b2b953ee7f762":[30,0,1,0,14,4,0], +"file_system_8hpp.html#a82471a2af285bada830bac3c95a8440b":[30,0,1,0,14,4,12], +"file_system_8hpp.html#a858f70fa9d84ad85c60f19a2229ebbde":[30,0,1,0,14,4,7], +"file_system_8hpp.html#a8664658afa7be03e173cec9eff2873ad":[30,0,1,0,14,4,13], +"file_system_8hpp.html#ab38ea91ef7b7dad700d8e4a4654d48f5":[30,0,1,0,14,4,2], +"file_system_8hpp.html#ac1737c19228b83a5e93ae51e5d9556eb":[30,0,1,0,14,4,10], +"file_system_8hpp.html#ac1f4b95440d2fb57fc715558d039b947":[30,0,1,0,14,4,1], +"file_system_8hpp.html#acc650faa23df88ca16a09a2d2a522960":[30,0,1,0,14,4,11], +"file_system_8hpp.html#adb26da2c52486e926d98471b5387c7e1":[30,0,1,0,14,4,8], +"file_system_8hpp_source.html":[30,0,1,0,14,4], +"files.html":[30,0], +"filestream_2enum_classes_8hpp.html":[30,0,1,0,4,2], +"filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715ac":[30,0,1,0,4,2,0], +"filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715aca129e74dde7b475c8848310e16754c965":[30,0,1,0,4,2,0,4], +"filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715aca1d9502bb9f6efc989b3578dcfde7901e":[30,0,1,0,4,2,0,2], +"filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715aca6f6cb72d544962fa333e2e34ce64f719":[30,0,1,0,4,2,0,6], +"filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715aca8af5861002f3c157f9ba842bba10aa3f":[30,0,1,0,4,2,0,3], +"filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715aca9909f7cecc318ee0049ad0f3b409b3b3":[30,0,1,0,4,2,0,1], +"filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715acaac101b32dda4448cf13a93fe283dddd8":[30,0,1,0,4,2,0,0], +"filestream_2enum_classes_8hpp.html#a5418b76dad5b4aea1133325f4aa715acae9989db5dabeea617f40c8dbfd07f5fb":[30,0,1,0,4,2,0,5], +"filestream_2enum_classes_8hpp.html#ae52c21a24cf2c21e3b419c127924fd7e":[30,0,1,0,4,2,1], +"filestream_2enum_classes_8hpp.html#ae52c21a24cf2c21e3b419c127924fd7ea55eeca17b45365c188d0edbd35f6e0c3":[30,0,1,0,4,2,1,3], +"filestream_2enum_classes_8hpp.html#ae52c21a24cf2c21e3b419c127924fd7ea65f6036bfc9798ce230c5d8567551315":[30,0,1,0,4,2,1,2], +"filestream_2enum_classes_8hpp.html#ae52c21a24cf2c21e3b419c127924fd7ea9ec8e4e3ab4c7eeba097f27d7364d743":[30,0,1,0,4,2,1,1], +"filestream_2enum_classes_8hpp.html#ae52c21a24cf2c21e3b419c127924fd7eaeed8d85b888a6c015834240885ee6333":[30,0,1,0,4,2,1,0], +"filestream_2enum_classes_8hpp_source.html":[30,0,1,0,4,2], +"filestream_2headers_8hpp.html":[30,0,1,0,4,5], +"filestream_2headers_8hpp_source.html":[30,0,1,0,4,5], +"flags_8hpp.html":[30,0,1,0,16], +"flags_8hpp.html#a02962b73af4084b90494b777ff1826c1":[30,0,1,0,16,67], +"flags_8hpp.html#a064e35f07a2835d7b4b5d31f0f625865":[30,0,1,0,16,9], +"flags_8hpp.html#a08c988c91c179c16944f9f703c24324b":[30,0,1,0,16,87], +"flags_8hpp.html#a0fe12ed9bee07b6120d595b3a1b85b15":[30,0,1,0,16,28], +"flags_8hpp.html#a13dcbbdf12e9e72eb29ccf25d7a7cd42":[30,0,1,0,16,36], +"flags_8hpp.html#a166e98128271506645ce14000faace73":[30,0,1,0,16,31], +"flags_8hpp.html#a17550ab833803b7862beaab957642af6":[30,0,1,0,16,59], +"flags_8hpp.html#a1da3324efb2a917d0714100dcdb13aba":[30,0,1,0,16,64], +"flags_8hpp.html#a1f0ce14d63633af19e375d6fbcccc463":[30,0,1,0,16,76], +"flags_8hpp.html#a245491f4f0bb36e5e8655877402f50eb":[30,0,1,0,16,4], +"flags_8hpp.html#a2c213e3a0c01a36f52667d1707b49062":[30,0,1,0,16,48], +"flags_8hpp.html#a2c805a3cd1797a9f67783ed5c9b7c5b1":[30,0,1,0,16,10], +"flags_8hpp.html#a2d8a3ae1f10dd657619e2a5d2dcb4b61":[30,0,1,0,16,55], +"flags_8hpp.html#a33562cf43d115a4d26f9958aa04c15ff":[30,0,1,0,16,20], +"flags_8hpp.html#a349e235659cc7b31dcf5db0d3d468fce":[30,0,1,0,16,29], +"flags_8hpp.html#a456ac1650914494fbdacd53b55420e2b":[30,0,1,0,16,68], +"flags_8hpp.html#a4670eb3cdedb3f3bac3886e2c21a7750":[30,0,1,0,16,12], +"flags_8hpp.html#a46b9edf947872e29ea5cbd7a95bee719":[30,0,1,0,16,45], +"flags_8hpp.html#a488d04acd61a19fe00cd2e56844dd8c5":[30,0,1,0,16,84], +"flags_8hpp.html#a4a4d36b7f90a4d53a0fa29f86bbbb9aa":[30,0,1,0,16,80], +"flags_8hpp.html#a4d830b4bcbd7998d8c6de665c0531ce9":[30,0,1,0,16,14], +"flags_8hpp.html#a4ebc35e01d48db77575a1cdd53ac0815":[30,0,1,0,16,77], +"flags_8hpp.html#a4fff2a82464bb9e180e04f70f0d5cbad":[30,0,1,0,16,71], +"flags_8hpp.html#a513eca9e40da3a149e02c0c1fb6d10d3":[30,0,1,0,16,42], +"flags_8hpp.html#a547efed657b6e562d8d5f071124fcf17":[30,0,1,0,16,49], +"flags_8hpp.html#a5690d1f0bce6904d9ccea011b0a0262f":[30,0,1,0,16,75], +"flags_8hpp.html#a61f245285b5a4b77b1d923276fe6f995":[30,0,1,0,16,56], +"flags_8hpp.html#a6561fc0841b80f5c19a1c4bc549175e9":[30,0,1,0,16,57], +"flags_8hpp.html#a66ec3a67de281684d9ff60c7b80c9430":[30,0,1,0,16,82], +"flags_8hpp.html#a6d2331153c7051c742d11dcb0a4220ec":[30,0,1,0,16,58], +"flags_8hpp.html#a71a0fc42dd98d1739571e4f7fed4873c":[30,0,1,0,16,40], +"flags_8hpp.html#a796a3b14805d5e0b01b21b9bab844382":[30,0,1,0,16,41], +"flags_8hpp.html#a7ffa026d9b667e5551909aba895f0dfb":[30,0,1,0,16,74], +"flags_8hpp.html#a807c2eb873d9e727f14d6c7ee6e02e11":[30,0,1,0,16,38], +"flags_8hpp.html#a80cdeb8d094d26ae5840a74ccff8613c":[30,0,1,0,16,60], +"flags_8hpp.html#a81e3bebeb0cec269b90097fb5856c96f":[30,0,1,0,16,65], +"flags_8hpp.html#a81edc2bb181cd79c98bfae1520f8ab71":[30,0,1,0,16,72], +"flags_8hpp.html#a83309bdfd7daadfb89be65edf399ac9a":[30,0,1,0,16,23], +"flags_8hpp.html#a844330d264f5648ae7d99b76f72f391a":[30,0,1,0,16,52], +"flags_8hpp.html#a85ed9ce7f145fad05a50344a6fdbee37":[30,0,1,0,16,86], +"flags_8hpp.html#a87455dc2555757a087e99d8b52138835":[30,0,1,0,16,21], +"flags_8hpp.html#a8763644943c3413220cfe6bf8f385d44":[30,0,1,0,16,83] +}; diff --git a/web/html/doc/navtreeindex5.js b/web/html/doc/navtreeindex5.js new file mode 100644 index 000000000..001ef9e98 --- /dev/null +++ b/web/html/doc/navtreeindex5.js @@ -0,0 +1,253 @@ +var NAVTREEINDEX5 = +{ +"flags_8hpp.html#a8bd040787ac075ae4cf483be01fe2c5f":[30,0,1,0,16,51], +"flags_8hpp.html#a8dfdbe0ff3a68f46e440f379621f8f0a":[30,0,1,0,16,24], +"flags_8hpp.html#a8e45f79c948490e55be06e3541b3681f":[30,0,1,0,16,69], +"flags_8hpp.html#a91ba99b997951b09eab545a40c019f85":[30,0,1,0,16,81], +"flags_8hpp.html#a9973307b6bd2af114083ba1badf4c297":[30,0,1,0,16,16], +"flags_8hpp.html#a99f9d7e0dcbf9f6ceddf589dc482d17a":[30,0,1,0,16,88], +"flags_8hpp.html#a9b1025da93c8ab21c0fdfe4941f26ad4":[30,0,1,0,16,3], +"flags_8hpp.html#a9b80aae5395b7d99c980198374bde9f2":[30,0,1,0,16,22], +"flags_8hpp.html#a9fbfea8bf51a80ff2254f329366a19b8":[30,0,1,0,16,89], +"flags_8hpp.html#aa0cc9af40fd8fdee2d8c61da206913dc":[30,0,1,0,16,70], +"flags_8hpp.html#aa2bc11c618a37698d88f7ae100e1729f":[30,0,1,0,16,37], +"flags_8hpp.html#aa3c62563ce9d99c25d4a2977f253c6c7":[30,0,1,0,16,47], +"flags_8hpp.html#aac91c51c83200f18076e7354067ccbb0":[30,0,1,0,16,79], +"flags_8hpp.html#aae4cfb31c1a5934475d8cbd6b2f8268e":[30,0,1,0,16,50], +"flags_8hpp.html#ab1d4b66fac361d1f3f450cd6bc5311d4":[30,0,1,0,16,63], +"flags_8hpp.html#ab598c69ed7164089afcdd4149c24a5eb":[30,0,1,0,16,39], +"flags_8hpp.html#ab7c61c5b25d4b3db1284761933c66aed":[30,0,1,0,16,90], +"flags_8hpp.html#ab8ac5e6be119dc06f351810053ea8bcf":[30,0,1,0,16,15], +"flags_8hpp.html#abada704ec59515bb12563262a3f21aeb":[30,0,1,0,16,6], +"flags_8hpp.html#abcc67acb9ca2d225394445eb6017bc4d":[30,0,1,0,16,62], +"flags_8hpp.html#abd20da73260490fba6e09a17c235fc4a":[30,0,1,0,16,73], +"flags_8hpp.html#abd5499ff7014225c02e6149bde93e3a3":[30,0,1,0,16,85], +"flags_8hpp.html#abd8fa8293ee3a05d4c5a2b6082460ab0":[30,0,1,0,16,8], +"flags_8hpp.html#abecb5274ddd75ca51fb40064478b1ed3":[30,0,1,0,16,32], +"flags_8hpp.html#ac0db13ec99b09bf9bd38582da33cff1d":[30,0,1,0,16,7], +"flags_8hpp.html#ac2cc95296b63a048521a2c41dce82b45":[30,0,1,0,16,27], +"flags_8hpp.html#ac5e8f82d85a3eb0ee72a64569395497c":[30,0,1,0,16,53], +"flags_8hpp.html#ac9b1dcda85ac079222769931cad6bebc":[30,0,1,0,16,35], +"flags_8hpp.html#acd0c383a2043852c83e284b669a5cf7e":[30,0,1,0,16,61], +"flags_8hpp.html#ace91dac10649fcbe836a71459b2f2584":[30,0,1,0,16,25], +"flags_8hpp.html#ad677c3dfae13a1ec9a3ee2fabe9e37a0":[30,0,1,0,16,1], +"flags_8hpp.html#ad696d262dc852c2f872470b90c25fafe":[30,0,1,0,16,46], +"flags_8hpp.html#ad90e61b31f6bd48c3514195da36ff31c":[30,0,1,0,16,30], +"flags_8hpp.html#ad9114bc8e1fc8f306e5296eaae5d542f":[30,0,1,0,16,11], +"flags_8hpp.html#adbd3e76b28ecc7415ccb782c3419a9de":[30,0,1,0,16,43], +"flags_8hpp.html#add0ca9baf682a84f3236e7f5c001db06":[30,0,1,0,16,54], +"flags_8hpp.html#add5d5807feef88090f8c9d11bf904ba8":[30,0,1,0,16,66], +"flags_8hpp.html#adf7a6ca551fbd934deb6784bfe37f897":[30,0,1,0,16,17], +"flags_8hpp.html#ae7ff5e3adea9c5f572455ec30dd3fbff":[30,0,1,0,16,44], +"flags_8hpp.html#ae9ea69ae8e64ee99bb4516199f847980":[30,0,1,0,16,5], +"flags_8hpp.html#aea6dc9d3cb9ea69426d012d1f41fadf0":[30,0,1,0,16,26], +"flags_8hpp.html#aefe90773deaaa525b3b115d37b46e281":[30,0,1,0,16,13], +"flags_8hpp.html#af12ef9f66fbf74e05c08b69caf54821e":[30,0,1,0,16,0], +"flags_8hpp.html#af1f0085881667603ed4e0404d7140bdc":[30,0,1,0,16,78], +"flags_8hpp.html#af437a0d8f293cd02b992a94b268571a4":[30,0,1,0,16,33], +"flags_8hpp.html#af4490397ad693c3d51835699a5dcddf3":[30,0,1,0,16,34], +"flags_8hpp.html#af4ef631dab577c13f59e10b626c580f6":[30,0,1,0,16,2], +"flags_8hpp.html#af50c82bf332c1a699f0615e7eb734c51":[30,0,1,0,16,18], +"flags_8hpp.html#af9d388afd71b21640a573e6e8cad4c1a":[30,0,1,0,16,19], +"flags_8hpp_source.html":[30,0,1,0,16], +"flags_to_open_pose_8hpp.html":[30,0,1,0,14,5], +"flags_to_open_pose_8hpp.html#a0e1275fd8690a55200fcd193c94dcf08":[30,0,1,0,14,5,4], +"flags_to_open_pose_8hpp.html#a1ca09f1d0e1f01d95842e99ebeef0631":[30,0,1,0,14,5,8], +"flags_to_open_pose_8hpp.html#a60ab295fba5d41b31d6ba5a4942889a9":[30,0,1,0,14,5,6], +"flags_to_open_pose_8hpp.html#a70f65da8f70ebd07b093932927187c90":[30,0,1,0,14,5,9], +"flags_to_open_pose_8hpp.html#a8264a6feec695adef80d40940863d511":[30,0,1,0,14,5,7], +"flags_to_open_pose_8hpp.html#a9f585930a5246e4a9a70145fa8763447":[30,0,1,0,14,5,0], +"flags_to_open_pose_8hpp.html#abe3f4d783191416b8e62e54c953fe36b":[30,0,1,0,14,5,10], +"flags_to_open_pose_8hpp.html#ad3b02ca66d11f4129372f4a9f98c6437":[30,0,1,0,14,5,2], +"flags_to_open_pose_8hpp.html#aed9ab5282e3e60f22dc11c301af897e6":[30,0,1,0,14,5,3], +"flags_to_open_pose_8hpp.html#af5ec8b7e6271798cbd09475766c64d2f":[30,0,1,0,14,5,5], +"flags_to_open_pose_8hpp.html#afdf2dd76cbae54789a139d9415790f82":[30,0,1,0,14,5,1], +"flags_to_open_pose_8hpp_source.html":[30,0,1,0,14,5], +"flir_reader_8hpp.html":[30,0,1,0,10,2], +"flir_reader_8hpp_source.html":[30,0,1,0,10,2], +"frame_displayer_8hpp.html":[30,0,1,0,6,1], +"frame_displayer_8hpp_source.html":[30,0,1,0,6,1], +"functions.html":[29,3,0], +"functions.html":[29,3,0,0], +"functions_b.html":[29,3,0,1], +"functions_c.html":[29,3,0,2], +"functions_d.html":[29,3,0,3], +"functions_e.html":[29,3,0,4], +"functions_f.html":[29,3,0,5], +"functions_func.html":[29,3,1], +"functions_func.html":[29,3,1,0], +"functions_func_b.html":[29,3,1,1], +"functions_func_c.html":[29,3,1,2], +"functions_func_d.html":[29,3,1,3], +"functions_func_e.html":[29,3,1,4], +"functions_func_f.html":[29,3,1,5], +"functions_func_g.html":[29,3,1,6], +"functions_func_h.html":[29,3,1,7], +"functions_func_i.html":[29,3,1,8], +"functions_func_j.html":[29,3,1,9], +"functions_func_k.html":[29,3,1,10], +"functions_func_l.html":[29,3,1,11], +"functions_func_m.html":[29,3,1,12], +"functions_func_n.html":[29,3,1,13], +"functions_func_o.html":[29,3,1,14], +"functions_func_p.html":[29,3,1,15], +"functions_func_q.html":[29,3,1,16], +"functions_func_r.html":[29,3,1,17], +"functions_func_s.html":[29,3,1,18], +"functions_func_t.html":[29,3,1,19], +"functions_func_u.html":[29,3,1,20], +"functions_func_v.html":[29,3,1,21], +"functions_func_w.html":[29,3,1,22], +"functions_func_~.html":[29,3,1,23], +"functions_g.html":[29,3,0,6], +"functions_h.html":[29,3,0,7], +"functions_i.html":[29,3,0,8], +"functions_j.html":[29,3,0,9], +"functions_k.html":[29,3,0,10], +"functions_l.html":[29,3,0,11], +"functions_m.html":[29,3,0,12], +"functions_n.html":[29,3,0,13], +"functions_o.html":[29,3,0,14], +"functions_p.html":[29,3,0,15], +"functions_q.html":[29,3,0,16], +"functions_r.html":[29,3,0,17], +"functions_s.html":[29,3,0,18], +"functions_t.html":[29,3,0,19], +"functions_u.html":[29,3,0,20], +"functions_v.html":[29,3,0,21], +"functions_vars.html":[29,3,2], +"functions_w.html":[29,3,0,22], +"functions_x.html":[29,3,0,23], +"functions_y.html":[29,3,0,24], +"functions_~.html":[29,3,0,25], +"globals.html":[30,1,0], +"globals_defs.html":[30,1,3], +"globals_func.html":[30,1,1], +"globals_vars.html":[30,1,2], +"gpu_2enum_classes_8hpp.html":[30,0,1,0,5,1], +"gpu_2enum_classes_8hpp.html#adbb34b5c8f2b6f0c051f831f18582e7f":[30,0,1,0,5,1,0], +"gpu_2enum_classes_8hpp.html#adbb34b5c8f2b6f0c051f831f18582e7fa3c1472839b807c90abff3c7c36dff458":[30,0,1,0,5,1,0,2], +"gpu_2enum_classes_8hpp.html#adbb34b5c8f2b6f0c051f831f18582e7fa6f6cb72d544962fa333e2e34ce64f719":[30,0,1,0,5,1,0,3], +"gpu_2enum_classes_8hpp.html#adbb34b5c8f2b6f0c051f831f18582e7fa7982b09a852b37f2afb1227eaf552e47":[30,0,1,0,5,1,0,1], +"gpu_2enum_classes_8hpp.html#adbb34b5c8f2b6f0c051f831f18582e7fa8b95dcff7397d0693c03e394af5552aa":[30,0,1,0,5,1,0,0], +"gpu_2enum_classes_8hpp_source.html":[30,0,1,0,5,1], +"gpu_2headers_8hpp.html":[30,0,1,0,5,3], +"gpu_2headers_8hpp_source.html":[30,0,1,0,5,3], +"gpu_8hpp.html":[30,0,1,0,5,2], +"gpu_8hpp.html#a971a7caa96be5b715b5c22f6e5dc6ad1":[30,0,1,0,5,2,0], +"gpu_8hpp.html#aaad222b087dd041c35de2f3414c1a01f":[30,0,1,0,5,2,1], +"gpu_8hpp_source.html":[30,0,1,0,5,2], +"gpu_renderer_8hpp.html":[30,0,1,0,2,7], +"gpu_renderer_8hpp_source.html":[30,0,1,0,2,7], +"gui3_d_8hpp.html":[30,0,1,0,6,3], +"gui3_d_8hpp_source.html":[30,0,1,0,6,3], +"gui_2enum_classes_8hpp.html":[30,0,1,0,6,0], +"gui_2enum_classes_8hpp.html#a6c22a72ce93c64e7582cb670492a50bf":[30,0,1,0,6,0,1], +"gui_2enum_classes_8hpp.html#a6c22a72ce93c64e7582cb670492a50bfab13311ab51c4c34757f67f26580018dd":[30,0,1,0,6,0,1,1], +"gui_2enum_classes_8hpp.html#a6c22a72ce93c64e7582cb670492a50bfae7ec409749889353b8f83a6b04159420":[30,0,1,0,6,0,1,0], +"gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6":[30,0,1,0,6,0,0], +"gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6a105036ef087117869f656cd72bfd8dd6":[30,0,1,0,6,0,0,1], +"gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6a28b652e57d2da6b7c939166be21efd9a":[30,0,1,0,6,0,0,0], +"gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6a3bd9369403112127ae7db2f866002be2":[30,0,1,0,6,0,0,2], +"gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6a442304e26339521bc296bdc47ff5fddf":[30,0,1,0,6,0,0,4], +"gui_2enum_classes_8hpp.html#ae52707752b1872b39f0306cc4f6c6ae6ae18221460ca8434295f980225fd6a91b":[30,0,1,0,6,0,0,3], +"gui_2enum_classes_8hpp_source.html":[30,0,1,0,6,0], +"gui_2headers_8hpp.html":[30,0,1,0,6,6], +"gui_2headers_8hpp_source.html":[30,0,1,0,6,6], +"gui_8hpp.html":[30,0,1,0,6,2], +"gui_8hpp_source.html":[30,0,1,0,6,2], +"gui_adam_8hpp.html":[30,0,1,0,6,4], +"gui_adam_8hpp_source.html":[30,0,1,0,6,4], +"gui_info_adder_8hpp.html":[30,0,1,0,6,5], +"gui_info_adder_8hpp_source.html":[30,0,1,0,6,5], +"hand_2headers_8hpp.html":[30,0,1,0,7,8], +"hand_2headers_8hpp_source.html":[30,0,1,0,7,8], +"hand_cpu_renderer_8hpp.html":[30,0,1,0,7,0], +"hand_cpu_renderer_8hpp_source.html":[30,0,1,0,7,0], +"hand_detector_8hpp.html":[30,0,1,0,7,1], +"hand_detector_8hpp_source.html":[30,0,1,0,7,1], +"hand_detector_from_txt_8hpp.html":[30,0,1,0,7,2], +"hand_detector_from_txt_8hpp_source.html":[30,0,1,0,7,2], +"hand_extractor_caffe_8hpp.html":[30,0,1,0,7,3], +"hand_extractor_caffe_8hpp_source.html":[30,0,1,0,7,3], +"hand_extractor_net_8hpp.html":[30,0,1,0,7,4], +"hand_extractor_net_8hpp_source.html":[30,0,1,0,7,4], +"hand_gpu_renderer_8hpp.html":[30,0,1,0,7,5], +"hand_gpu_renderer_8hpp_source.html":[30,0,1,0,7,5], +"hand_parameters_8hpp.html":[30,0,1,0,7,6], +"hand_parameters_8hpp.html#a182585e2e944cdb62f3dededdd85d1fc":[30,0,1,0,7,6,7], +"hand_parameters_8hpp.html#a335d707e98d311d39d9a9dab0e325391":[30,0,1,0,7,6,9], +"hand_parameters_8hpp.html#a3fe70bd1eacdd78aef3344c83533ffc7":[30,0,1,0,7,6,10], +"hand_parameters_8hpp.html#a41b6fb82924c5532cf10151e6ce497f2":[30,0,1,0,7,6,8], +"hand_parameters_8hpp.html#a450bb646e7573322d8f622bfdbab4833":[30,0,1,0,7,6,4], +"hand_parameters_8hpp.html#a4e9bbc2167923763c5982d6d1f41f560":[30,0,1,0,7,6,11], +"hand_parameters_8hpp.html#a5ca06d0202756b9a3b8825ccbafc0558":[30,0,1,0,7,6,0], +"hand_parameters_8hpp.html#a5ec40add22e28bc75596c75a7be8a692":[30,0,1,0,7,6,1], +"hand_parameters_8hpp.html#a76c1f1ea90b73e13e93f72413b3cab0e":[30,0,1,0,7,6,5], +"hand_parameters_8hpp.html#a799d629d7fddd7f0daf40ccdae0293b9":[30,0,1,0,7,6,2], +"hand_parameters_8hpp.html#aa8cc53d2fe5353f9d87d50c32a8c1a95":[30,0,1,0,7,6,6], +"hand_parameters_8hpp.html#ac13af59538bcb8a1709f20010681d1c7":[30,0,1,0,7,6,12], +"hand_parameters_8hpp.html#aed0d108f5ada623eeb0ed41f896f8e97":[30,0,1,0,7,6,3], +"hand_parameters_8hpp_source.html":[30,0,1,0,7,6], +"hand_renderer_8hpp.html":[30,0,1,0,7,7], +"hand_renderer_8hpp_source.html":[30,0,1,0,7,7], +"headers_8hpp.html":[30,0,1,0,17], +"headers_8hpp_source.html":[30,0,1,0,17], +"heat_map_saver_8hpp.html":[30,0,1,0,4,6], +"heat_map_saver_8hpp_source.html":[30,0,1,0,4,6], +"hierarchy.html":[29,2], +"image_directory_reader_8hpp.html":[30,0,1,0,10,4], +"image_directory_reader_8hpp_source.html":[30,0,1,0,10,4], +"image_saver_8hpp.html":[30,0,1,0,4,7], +"image_saver_8hpp_source.html":[30,0,1,0,4,7], +"index.html":[], +"index.html#autotoc_md1":[0], +"index.html#autotoc_md10":[5], +"index.html#autotoc_md11":[6], +"index.html#autotoc_md12":[7], +"index.html#autotoc_md13":[8], +"index.html#autotoc_md2":[1], +"index.html#autotoc_md3":[1,0], +"index.html#autotoc_md4":[1,1], +"index.html#autotoc_md5":[1,2], +"index.html#autotoc_md6":[1,3], +"index.html#autotoc_md7":[2], +"index.html#autotoc_md8":[3], +"index.html#autotoc_md9":[4], +"ip_camera_reader_8hpp.html":[30,0,1,0,10,5], +"ip_camera_reader_8hpp_source.html":[30,0,1,0,10,5], +"joint_angle_estimation_8hpp.html":[30,0,1,0,0,2], +"joint_angle_estimation_8hpp_source.html":[30,0,1,0,0,2], +"json_ofstream_8hpp.html":[30,0,1,0,4,8], +"json_ofstream_8hpp_source.html":[30,0,1,0,4,8], +"keep_top_n_people_8hpp.html":[30,0,1,0,2,9], +"keep_top_n_people_8hpp_source.html":[30,0,1,0,2,9], +"keypoint_8hpp.html":[30,0,1,0,14,7], +"keypoint_8hpp.html#a1110f4c0017c43ea1d0896a3225c55f8":[30,0,1,0,14,7,1], +"keypoint_8hpp.html#a1dd5dde18458975a36bdbd6dd38720a2":[30,0,1,0,14,7,6], +"keypoint_8hpp.html#a1f931e210eb575a084b8e6f462b0b382":[30,0,1,0,14,7,0], +"keypoint_8hpp.html#a36296ff5a5945244c5131e3ae16057e1":[30,0,1,0,14,7,11], +"keypoint_8hpp.html#a6913c67141fcbbba84fc88ac8a45aa0f":[30,0,1,0,14,7,9], +"keypoint_8hpp.html#a6b9adf8f7e698e566414c9f44f0c85f1":[30,0,1,0,14,7,16], +"keypoint_8hpp.html#a75411d98f69051860379730e16103178":[30,0,1,0,14,7,7], +"keypoint_8hpp.html#aa053f4b0533d9e981aa171a1ef57fc30":[30,0,1,0,14,7,5], +"keypoint_8hpp.html#aa7803aa62abc21471e7d966bd674a81a":[30,0,1,0,14,7,13], +"keypoint_8hpp.html#aa9366cf1b4ac3494965749eeb5537da1":[30,0,1,0,14,7,12], +"keypoint_8hpp.html#aae9e38fa6c56e188b4f649732f0d4cd3":[30,0,1,0,14,7,15], +"keypoint_8hpp.html#ac5fc565b24e499e306ca170b9139eeb6":[30,0,1,0,14,7,14], +"keypoint_8hpp.html#ac74cba4141f2bee2b9d94dc171029a73":[30,0,1,0,14,7,8], +"keypoint_8hpp.html#ac968b1c98c60b74be78225be27805706":[30,0,1,0,14,7,3], +"keypoint_8hpp.html#ac9af122ccd8dcdafb11e37b6633245b4":[30,0,1,0,14,7,10], +"keypoint_8hpp.html#ace4af20d19066df9ec502c5a09097c24":[30,0,1,0,14,7,2], +"keypoint_8hpp.html#acf638f00b0a825c05683f8e23942a9d5":[30,0,1,0,14,7,4], +"keypoint_8hpp_source.html":[30,0,1,0,14,7], +"keypoint_saver_8hpp.html":[30,0,1,0,4,9], +"keypoint_saver_8hpp_source.html":[30,0,1,0,4,9], +"keypoint_scaler_8hpp.html":[30,0,1,0,2,10], +"keypoint_scaler_8hpp_source.html":[30,0,1,0,2,10], +"macros_8hpp.html":[30,0,1,0,2,11], +"macros_8hpp.html#a1eadbb31e92e7fbc799bf7cf4d2a6f50":[30,0,1,0,2,11,7], +"macros_8hpp.html#a4ba443bb7a0e5dbe8054a9ac37a5e000":[30,0,1,0,2,11,10], +"macros_8hpp.html#a60e010d8a2352d94b8b57d97cf4a7d73":[30,0,1,0,2,11,1], +"macros_8hpp.html#a6bf32c65e0f388d5b09d8b2424416c0e":[30,0,1,0,2,11,0] +}; diff --git a/web/html/doc/navtreeindex6.js b/web/html/doc/navtreeindex6.js new file mode 100644 index 000000000..3263bb433 --- /dev/null +++ b/web/html/doc/navtreeindex6.js @@ -0,0 +1,253 @@ +var NAVTREEINDEX6 = +{ +"macros_8hpp.html#a7bacf9f65110ec8292bc69e1eb0f426e":[30,0,1,0,2,11,4], +"macros_8hpp.html#a7de0b663a3aa8043a346ebf2c411bda3":[30,0,1,0,2,11,15], +"macros_8hpp.html#a80404791b46a15fd601feaa11f1e5028":[30,0,1,0,2,11,6], +"macros_8hpp.html#aa883b8ec96d2804b37d3bfb0bd4c5f16":[30,0,1,0,2,11,11], +"macros_8hpp.html#abef96b5dd35dd9d44ad27ddf0e2f5f2e":[30,0,1,0,2,11,9], +"macros_8hpp.html#ac5627744abe5fd0c8eacfe9c7f8bd32e":[30,0,1,0,2,11,2], +"macros_8hpp.html#acc5af19a77b18cf5aa2e1f82e2e484dd":[30,0,1,0,2,11,5], +"macros_8hpp.html#ad0aef3afcb2a9da69c3453426f56b0ac":[30,0,1,0,2,11,3], +"macros_8hpp.html#adcf24c45000a4f44f355f1cc3062ea49":[30,0,1,0,2,11,13], +"macros_8hpp.html#af57a843cfdae82e064838c20b3b54851":[30,0,1,0,2,11,12], +"macros_8hpp.html#af9fed593b7a4237bc6ede717a1ae70f0":[30,0,1,0,2,11,8], +"macros_8hpp.html#afda430d83b9513af7270f1d680bf5471":[30,0,1,0,2,11,14], +"macros_8hpp_source.html":[30,0,1,0,2,11], +"matrix_8hpp.html":[30,0,1,0,2,12], +"matrix_8hpp.html#a00c8b0a04adbe37ba8b6d08e0ba23287":[30,0,1,0,2,12,4], +"matrix_8hpp.html#a1a8232a2c14792f9315d85004973c33c":[30,0,1,0,2,12,6], +"matrix_8hpp.html#a1a8d8a14fa0269d045f8d8c8228098af":[30,0,1,0,2,12,10], +"matrix_8hpp.html#a1b810570f8207983b20ea93e8f9f71a2":[30,0,1,0,2,12,2], +"matrix_8hpp.html#a1c9288885fc29db5560426556d3fba41":[30,0,1,0,2,12,8], +"matrix_8hpp.html#a2bab8a00953b4ba71a8b965347f7dd92":[30,0,1,0,2,12,7], +"matrix_8hpp.html#ad0bd05468e4619f7061bb513fc2cb86d":[30,0,1,0,2,12,3], +"matrix_8hpp.html#ad2790de0442f8b1a303b781ffe171c6e":[30,0,1,0,2,12,5], +"matrix_8hpp.html#adb6fa4cc9ba470382895a448b7cf1257":[30,0,1,0,2,12,1], +"matrix_8hpp.html#af06d0e620916e1f08ca609fb02f25dc8":[30,0,1,0,2,12,9], +"matrix_8hpp_source.html":[30,0,1,0,2,12], +"maximum_base_8hpp.html":[30,0,1,0,8,3], +"maximum_base_8hpp.html#a8ec109805adf02f9872a6af37d602caa":[30,0,1,0,8,3,1], +"maximum_base_8hpp.html#ae0fea41041a70ae8449a77f46ffe8100":[30,0,1,0,8,3,0], +"maximum_base_8hpp_source.html":[30,0,1,0,8,3], +"maximum_caffe_8hpp.html":[30,0,1,0,8,4], +"maximum_caffe_8hpp_source.html":[30,0,1,0,8,4], +"md_doc_00_index.html":[0], +"md_doc_01_demo.html":[1], +"md_doc_01_demo.html#autotoc_md14":[1,9], +"md_doc_01_demo.html#autotoc_md15":[1,10], +"md_doc_01_demo.html#autotoc_md16":[1,10,0], +"md_doc_01_demo.html#autotoc_md17":[1,10,1], +"md_doc_01_demo.html#autotoc_md18":[1,11], +"md_doc_01_demo.html#autotoc_md19":[1,12], +"md_doc_01_demo.html#autotoc_md20":[1,13], +"md_doc_01_demo.html#autotoc_md21":[1,13,0], +"md_doc_01_demo.html#autotoc_md22":[1,13,0,0], +"md_doc_01_demo.html#autotoc_md23":[1,13,0,1], +"md_doc_01_demo.html#autotoc_md24":[1,13,1], +"md_doc_01_demo.html#autotoc_md25":[1,13,2], +"md_doc_01_demo.html#autotoc_md26":[1,14], +"md_doc_01_demo.html#autotoc_md27":[1,14,0], +"md_doc_01_demo.html#autotoc_md28":[1,15], +"md_doc_01_demo.html#autotoc_md29":[1,16], +"md_doc_01_demo.html#autotoc_md30":[1,16,0], +"md_doc_01_demo.html#autotoc_md31":[1,16,1], +"md_doc_01_demo.html#autotoc_md32":[1,16,2], +"md_doc_02_output.html":[2], +"md_doc_02_output.html#autotoc_md33":[2,0], +"md_doc_02_output.html#autotoc_md34":[2,1], +"md_doc_02_output.html#autotoc_md35":[2,2], +"md_doc_02_output.html#autotoc_md36":[2,2,0], +"md_doc_02_output.html#autotoc_md37":[2,2,1], +"md_doc_02_output.html#autotoc_md38":[2,2,2], +"md_doc_02_output.html#autotoc_md39":[2,2,3], +"md_doc_02_output.html#autotoc_md40":[2,3], +"md_doc_02_output.html#autotoc_md41":[2,4], +"md_doc_02_output.html#autotoc_md42":[2,4,0], +"md_doc_02_output.html#autotoc_md43":[2,4,1], +"md_doc_02_output.html#autotoc_md44":[2,5], +"md_doc_02_output.html#autotoc_md45":[2,6], +"md_doc_02_output.html#autotoc_md46":[2,6,0], +"md_doc_02_output.html#autotoc_md47":[2,6,1], +"md_doc_03_python_api.html":[3], +"md_doc_03_python_api.html#autotoc_md48":[3,0], +"md_doc_03_python_api.html#autotoc_md49":[3,1], +"md_doc_03_python_api.html#autotoc_md50":[3,2], +"md_doc_03_python_api.html#autotoc_md51":[3,3], +"md_doc_03_python_api.html#autotoc_md52":[3,4], +"md_doc_03_python_api.html#autotoc_md53":[3,5], +"md_doc_03_python_api.html#autotoc_md54":[3,6], +"md_doc_03_python_api.html#autotoc_md55":[3,7], +"md_doc_03_python_api.html#autotoc_md56":[3,7,0], +"md_doc_03_python_api.html#autotoc_md57":[3,7,1], +"md_doc_04_cpp_api.html":[4], +"md_doc_04_cpp_api.html#autotoc_md58":[4,0], +"md_doc_04_cpp_api.html#autotoc_md59":[4,1], +"md_doc_04_cpp_api.html#autotoc_md60":[4,2], +"md_doc_05_faq.html":[5], +"md_doc_05_faq.html#autotoc_md61":[5,0], +"md_doc_05_faq.html#autotoc_md62":[5,1], +"md_doc_05_faq.html#autotoc_md63":[5,1,0], +"md_doc_05_faq.html#autotoc_md64":[5,1,0,0], +"md_doc_05_faq.html#autotoc_md65":[5,1,0,1], +"md_doc_05_faq.html#autotoc_md66":[5,1,0,2], +"md_doc_05_faq.html#autotoc_md67":[5,1,0,3], +"md_doc_05_faq.html#autotoc_md68":[5,1,0,4], +"md_doc_05_faq.html#autotoc_md69":[5,1,0,5], +"md_doc_05_faq.html#autotoc_md70":[5,1,0,6], +"md_doc_05_faq.html#autotoc_md71":[5,1,0,7], +"md_doc_05_faq.html#autotoc_md72":[5,1,0,8], +"md_doc_05_faq.html#autotoc_md73":[5,1,0,9], +"md_doc_05_faq.html#autotoc_md74":[5,1,0,10], +"md_doc_05_faq.html#autotoc_md75":[5,1,0,11], +"md_doc_05_faq.html#autotoc_md76":[5,1,0,12], +"md_doc_05_faq.html#autotoc_md77":[5,1,0,13], +"md_doc_05_faq.html#autotoc_md78":[5,1,0,14], +"md_doc_05_faq.html#autotoc_md79":[5,1,0,15], +"md_doc_05_faq.html#autotoc_md80":[5,1,0,16], +"md_doc_05_faq.html#autotoc_md81":[5,1,1], +"md_doc_05_faq.html#autotoc_md82":[5,1,1,0], +"md_doc_05_faq.html#autotoc_md83":[5,1,1,1], +"md_doc_05_faq.html#autotoc_md84":[5,1,1,2], +"md_doc_05_faq.html#autotoc_md85":[5,1,1,3], +"md_doc_05_faq.html#autotoc_md86":[5,1,1,4], +"md_doc_05_faq.html#autotoc_md87":[5,1,2], +"md_doc_05_faq.html#autotoc_md88":[5,1,2,0], +"md_doc_05_faq.html#autotoc_md89":[5,1,3], +"md_doc_05_faq.html#autotoc_md90":[5,1,3,0], +"md_doc_05_faq.html#autotoc_md91":[5,1,3,1], +"md_doc_05_faq.html#autotoc_md92":[5,1,3,2], +"md_doc_06_maximizing_openpose_speed.html":[6], +"md_doc_06_maximizing_openpose_speed.html#autotoc_md93":[6,0], +"md_doc_06_maximizing_openpose_speed.html#autotoc_md94":[6,1], +"md_doc_06_maximizing_openpose_speed.html#autotoc_md95":[6,1,0], +"md_doc_06_maximizing_openpose_speed.html#autotoc_md96":[6,1,1], +"md_doc_06_maximizing_openpose_speed.html#autotoc_md97":[6,2], +"md_doc_06_maximizing_openpose_speed.html#autotoc_md98":[6,3], +"md_doc_07_major_released_features.html":[7], +"md_doc_08_release_notes.html":[8], +"md_doc_08_release_notes.html#autotoc_md100":[8,1], +"md_doc_08_release_notes.html#autotoc_md101":[8,2], +"md_doc_08_release_notes.html#autotoc_md102":[8,3], +"md_doc_08_release_notes.html#autotoc_md103":[8,4], +"md_doc_08_release_notes.html#autotoc_md104":[8,5], +"md_doc_08_release_notes.html#autotoc_md105":[8,6], +"md_doc_08_release_notes.html#autotoc_md106":[8,7], +"md_doc_08_release_notes.html#autotoc_md107":[8,8], +"md_doc_08_release_notes.html#autotoc_md108":[8,9], +"md_doc_08_release_notes.html#autotoc_md109":[8,10], +"md_doc_08_release_notes.html#autotoc_md110":[8,11], +"md_doc_08_release_notes.html#autotoc_md111":[8,12], +"md_doc_08_release_notes.html#autotoc_md112":[8,13], +"md_doc_08_release_notes.html#autotoc_md113":[8,14], +"md_doc_08_release_notes.html#autotoc_md114":[8,15], +"md_doc_08_release_notes.html#autotoc_md115":[8,16], +"md_doc_08_release_notes.html#autotoc_md99":[8,0], +"md_doc_09_authors_and_contributors.html":[9], +"md_doc_09_authors_and_contributors.html#autotoc_md116":[9,0], +"md_doc_09_authors_and_contributors.html#autotoc_md117":[9,1], +"md_doc_10_community_projects.html":[10], +"md_doc_advanced_3d_reconstruction_module.html":[11], +"md_doc_advanced_3d_reconstruction_module.html#autotoc_md118":[11,0], +"md_doc_advanced_3d_reconstruction_module.html#autotoc_md119":[11,1], +"md_doc_advanced_3d_reconstruction_module.html#autotoc_md120":[11,2], +"md_doc_advanced_3d_reconstruction_module.html#autotoc_md121":[11,3], +"md_doc_advanced_3d_reconstruction_module.html#autotoc_md122":[11,4], +"md_doc_advanced_3d_reconstruction_module.html#autotoc_md123":[11,5], +"md_doc_advanced_3d_reconstruction_module.html#autotoc_md124":[11,6], +"md_doc_advanced_3d_reconstruction_module.html#autotoc_md125":[11,7], +"md_doc_advanced_3d_reconstruction_module.html#autotoc_md126":[11,8], +"md_doc_advanced_3d_reconstruction_module.html#autotoc_md127":[11,9], +"md_doc_advanced_3d_reconstruction_module.html#autotoc_md128":[11,10], +"md_doc_advanced_3d_reconstruction_module.html#autotoc_md129":[11,11], +"md_doc_advanced_calibration_module.html":[12], +"md_doc_advanced_calibration_module.html#autotoc_md130":[12,0], +"md_doc_advanced_calibration_module.html#autotoc_md131":[12,1], +"md_doc_advanced_calibration_module.html#autotoc_md132":[12,2], +"md_doc_advanced_calibration_module.html#autotoc_md133":[12,3], +"md_doc_advanced_calibration_module.html#autotoc_md134":[12,4], +"md_doc_advanced_calibration_module.html#autotoc_md135":[12,4,0], +"md_doc_advanced_calibration_module.html#autotoc_md136":[12,4,1], +"md_doc_advanced_calibration_module.html#autotoc_md137":[12,4,2], +"md_doc_advanced_calibration_module.html#autotoc_md138":[12,5], +"md_doc_advanced_calibration_module.html#autotoc_md139":[12,6], +"md_doc_advanced_calibration_module.html#autotoc_md140":[12,7], +"md_doc_advanced_demo_advanced.html":[13], +"md_doc_advanced_demo_advanced.html#autotoc_md141":[13,0], +"md_doc_advanced_demo_advanced.html#autotoc_md142":[13,1], +"md_doc_advanced_demo_advanced.html#autotoc_md143":[13,1,0], +"md_doc_advanced_demo_advanced.html#autotoc_md144":[13,1,1], +"md_doc_advanced_demo_advanced.html#autotoc_md145":[13,1,2], +"md_doc_advanced_demo_advanced.html#autotoc_md146":[13,1,3], +"md_doc_advanced_demo_advanced.html#autotoc_md147":[13,1,4], +"md_doc_advanced_demo_advanced.html#autotoc_md148":[13,1,5], +"md_doc_advanced_demo_advanced.html#autotoc_md149":[13,2], +"md_doc_advanced_demo_advanced.html#autotoc_md150":[13,3], +"md_doc_advanced_deployment.html":[14], +"md_doc_advanced_deployment.html#autotoc_md151":[14,0], +"md_doc_advanced_deployment.html#autotoc_md152":[14,0,0], +"md_doc_advanced_deployment.html#autotoc_md153":[14,0,1], +"md_doc_advanced_deployment.html#autotoc_md154":[14,0,2], +"md_doc_advanced_deployment.html#autotoc_md155":[14,0,3], +"md_doc_advanced_deployment.html#autotoc_md156":[14,1], +"md_doc_advanced_deployment.html#autotoc_md157":[14,1,0], +"md_doc_advanced_deployment.html#autotoc_md158":[14,1,1], +"md_doc_advanced_heatmap_output.html":[15], +"md_doc_advanced_heatmap_output.html#autotoc_md159":[15,0], +"md_doc_advanced_heatmap_output.html#autotoc_md160":[15,1], +"md_doc_advanced_heatmap_output.html#autotoc_md161":[15,2], +"md_doc_advanced_heatmap_output.html#autotoc_md162":[15,3], +"md_doc_advanced_heatmap_output.html#autotoc_md163":[15,4], +"md_doc_advanced_heatmap_output.html#autotoc_md164":[15,5], +"md_doc_advanced_standalone_face_or_hand_keypoint_detector.html":[16], +"md_doc_advanced_standalone_face_or_hand_keypoint_detector.html#autotoc_md165":[16,0], +"md_doc_advanced_standalone_face_or_hand_keypoint_detector.html#autotoc_md166":[16,1], +"md_doc_advanced_standalone_face_or_hand_keypoint_detector.html#autotoc_md167":[16,2], +"md_doc_installation_0_index.html":[17], +"md_doc_installation_0_index.html#autotoc_md168":[17,0], +"md_doc_installation_0_index.html#autotoc_md169":[17,1], +"md_doc_installation_0_index.html#autotoc_md170":[17,2], +"md_doc_installation_0_index.html#autotoc_md171":[17,3], +"md_doc_installation_0_index.html#autotoc_md172":[17,3,0], +"md_doc_installation_0_index.html#autotoc_md173":[17,3,1], +"md_doc_installation_0_index.html#autotoc_md174":[17,3,2], +"md_doc_installation_0_index.html#autotoc_md175":[17,3,3], +"md_doc_installation_0_index.html#autotoc_md176":[17,3,4], +"md_doc_installation_0_index.html#autotoc_md177":[17,3,4,0], +"md_doc_installation_0_index.html#autotoc_md178":[17,3,4,1], +"md_doc_installation_0_index.html#autotoc_md179":[17,3,4,2], +"md_doc_installation_0_index.html#autotoc_md180":[17,3,5], +"md_doc_installation_0_index.html#autotoc_md181":[17,3,6], +"md_doc_installation_0_index.html#autotoc_md182":[17,4], +"md_doc_installation_0_index.html#autotoc_md183":[17,5], +"md_doc_installation_0_index.html#autotoc_md184":[17,6], +"md_doc_installation_0_index.html#autotoc_md185":[17,7], +"md_doc_installation_1_prerequisites.html":[18], +"md_doc_installation_1_prerequisites.html#autotoc_md186":[18,0], +"md_doc_installation_1_prerequisites.html#autotoc_md187":[18,1], +"md_doc_installation_1_prerequisites.html#autotoc_md188":[18,2], +"md_doc_installation_1_prerequisites.html#autotoc_md189":[18,3], +"md_doc_installation_1_prerequisites.html#autotoc_md190":[18,4], +"md_doc_installation_2_additional_settings.html":[19], +"md_doc_installation_2_additional_settings.html#autotoc_md191":[19,0], +"md_doc_installation_2_additional_settings.html#autotoc_md192":[19,1], +"md_doc_installation_2_additional_settings.html#autotoc_md193":[19,1,0], +"md_doc_installation_2_additional_settings.html#autotoc_md194":[19,1,1], +"md_doc_installation_2_additional_settings.html#autotoc_md195":[19,1,2], +"md_doc_installation_2_additional_settings.html#autotoc_md196":[19,1,3], +"md_doc_installation_2_additional_settings.html#autotoc_md197":[19,1,4], +"md_doc_installation_2_additional_settings.html#autotoc_md198":[19,1,5], +"md_doc_installation_2_additional_settings.html#autotoc_md199":[19,1,6], +"md_doc_installation_2_additional_settings.html#autotoc_md200":[19,1,7], +"md_doc_installation_2_additional_settings.html#autotoc_md201":[19,1,8], +"md_doc_installation_2_additional_settings.html#autotoc_md202":[19,1,9], +"md_doc_installation_2_additional_settings.html#autotoc_md203":[19,1,10], +"md_doc_installation_2_additional_settings.html#autotoc_md204":[19,1,11], +"md_doc_installation_2_additional_settings.html#autotoc_md205":[19,1,12], +"md_doc_installation_2_additional_settings.html#autotoc_md206":[19,1,13], +"md_doc_installation_2_additional_settings.html#autotoc_md207":[19,1,13,0], +"md_doc_installation_2_additional_settings.html#autotoc_md208":[19,1,13,1], +"md_doc_installation_2_additional_settings.html#autotoc_md209":[19,1,13,2], +"md_doc_installation_2_additional_settings.html#autotoc_md210":[19,1,13,3], +"md_doc_installation_deprecated_installation_deprecated.html":[20], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md211":[20,0] +}; diff --git a/web/html/doc/navtreeindex7.js b/web/html/doc/navtreeindex7.js new file mode 100644 index 000000000..34e026b91 --- /dev/null +++ b/web/html/doc/navtreeindex7.js @@ -0,0 +1,253 @@ +var NAVTREEINDEX7 = +{ +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md212":[20,1], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md213":[20,2], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md214":[20,3], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md215":[20,4], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md216":[20,5], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md217":[20,5,0], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md218":[20,5,1], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md219":[20,5,2], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md220":[20,5,3], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md221":[20,5,4], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md222":[20,6], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md223":[20,7], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md224":[20,8], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md225":[0], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md226":[20,9], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md227":[20,10], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md228":[1], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md229":[2], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md230":[20,10,0], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md231":[20,10,0,3], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md232":[20,10,0,3,0], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md233":[20,10,0,3,1], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md234":[20,10,0,4], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md235":[20,10,0,5], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md236":[20,10,1], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md237":[20,10,2], +"md_doc_installation_deprecated_installation_deprecated.html#autotoc_md238":[20,10,3], +"md_doc_installation_jetson_tx_installation_jetson_tx1.html":[21], +"md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md239":[21,0], +"md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md240":[21,1], +"md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md241":[21,2], +"md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md242":[21,3], +"md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md243":[21,4], +"md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md244":[21,5], +"md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md245":[21,6], +"md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md246":[21,7], +"md_doc_installation_jetson_tx_installation_jetson_tx1.html#autotoc_md247":[21,8], +"md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html":[22], +"md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html#autotoc_md248":[22,0], +"md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html#autotoc_md249":[22,1], +"md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html#autotoc_md250":[22,2], +"md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html#autotoc_md251":[22,3], +"md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html":[23], +"md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html#autotoc_md252":[23,0], +"md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html#autotoc_md253":[23,1], +"md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html#autotoc_md254":[23,2], +"md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html#autotoc_md255":[23,3], +"md_doc_very_advanced_library_structure_0_index.html":[24], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html":[25], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md256":[25,0], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md257":[25,1], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md258":[25,1,0], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md259":[25,2], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md260":[25,2,0], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md261":[25,2,1], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md262":[25,3], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md263":[25,4], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md264":[25,5], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md265":[25,5,0], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md266":[25,5,0,0], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md267":[25,5,0,1], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md268":[25,5,0,2], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md269":[25,5,1], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md270":[25,6], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md271":[25,6,0], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md272":[25,6,0,0], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md273":[25,6,0,1], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md274":[25,6,0,2], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md275":[25,6,0,3], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md276":[25,6,0,4], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md277":[25,6,1], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md278":[25,6,2], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md279":[25,6,3], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md280":[25,7], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md281":[25,7,0], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md282":[25,7,0,0], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md283":[25,7,0,1], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md284":[25,7,1], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md285":[25,7,1,0], +"md_doc_very_advanced_library_structure_1_library_deep_overview.html#autotoc_md286":[25,7,1,1], +"md_doc_very_advanced_library_structure_2_library_extend_functionality.html":[26], +"md_doc_very_advanced_library_structure_3_library_add_new_module.html":[27], +"md_doc_very_advanced_library_structure_3_library_add_new_module.html#autotoc_md287":[27,0], +"md_doc_very_advanced_library_structure_3_library_add_new_module.html#autotoc_md288":[27,1], +"namespacemembers.html":[28,1,0,0], +"namespacemembers.html":[28,1,0], +"namespacemembers_c.html":[28,1,0,1], +"namespacemembers_d.html":[28,1,0,2], +"namespacemembers_e.html":[28,1,0,3], +"namespacemembers_enum.html":[28,1,4], +"namespacemembers_f.html":[28,1,0,4], +"namespacemembers_func.html":[28,1,1], +"namespacemembers_func.html":[28,1,1,0], +"namespacemembers_func_c.html":[28,1,1,1], +"namespacemembers_func_d.html":[28,1,1,2], +"namespacemembers_func_e.html":[28,1,1,3], +"namespacemembers_func_f.html":[28,1,1,4], +"namespacemembers_func_g.html":[28,1,1,5], +"namespacemembers_func_k.html":[28,1,1,6], +"namespacemembers_func_l.html":[28,1,1,7], +"namespacemembers_func_m.html":[28,1,1,8], +"namespacemembers_func_n.html":[28,1,1,9], +"namespacemembers_func_o.html":[28,1,1,10], +"namespacemembers_func_p.html":[28,1,1,11], +"namespacemembers_func_r.html":[28,1,1,12], +"namespacemembers_func_s.html":[28,1,1,13], +"namespacemembers_func_t.html":[28,1,1,14], +"namespacemembers_func_u.html":[28,1,1,15], +"namespacemembers_func_v.html":[28,1,1,16], +"namespacemembers_func_w.html":[28,1,1,17], +"namespacemembers_g.html":[28,1,0,5], +"namespacemembers_h.html":[28,1,0,6], +"namespacemembers_k.html":[28,1,0,7], +"namespacemembers_l.html":[28,1,0,8], +"namespacemembers_m.html":[28,1,0,9], +"namespacemembers_n.html":[28,1,0,10], +"namespacemembers_o.html":[28,1,0,11], +"namespacemembers_p.html":[28,1,0,12], +"namespacemembers_r.html":[28,1,0,13], +"namespacemembers_s.html":[28,1,0,14], +"namespacemembers_t.html":[28,1,0,15], +"namespacemembers_type.html":[28,1,3], +"namespacemembers_u.html":[28,1,0,16], +"namespacemembers_v.html":[28,1,0,17], +"namespacemembers_vars.html":[28,1,2], +"namespacemembers_w.html":[28,1,0,18], +"namespaceop.html":[28,0,0], +"namespaceop.html#a00c56c20997f734b2bd44d6f85b86cf0":[28,0,0,408], +"namespaceop.html#a016abefba53293ed2ffe3a3c3bd88dd0":[28,0,0,313], +"namespaceop.html#a01aa5c6e24026536367cf47a64e9bba5":[28,0,0,204], +"namespaceop.html#a01dd208c992c8e07623579f77dcfb59b":[28,0,0,317], +"namespaceop.html#a020603e3ad6326cb1dce43485157f768":[28,0,0,208], +"namespaceop.html#a02164ca0af9e838190f584f5d1d8465e":[28,0,0,166], +"namespaceop.html#a0424a8e4dc8ceb5e8d8a2230c157a7fd":[28,0,0,190], +"namespaceop.html#a056c64afca17423e038590e4ef2f712b":[28,0,0,355], +"namespaceop.html#a0ce96f84c6e380b261802c7e2639dc7d":[28,0,0,321], +"namespaceop.html#a0db530b6f607aa43e8f9154b308d207a":[28,0,0,218], +"namespaceop.html#a0e1275fd8690a55200fcd193c94dcf08":[28,0,0,252], +"namespaceop.html#a0e60b0e4e89a7f08de54ad40c2d46a60":[28,0,0,263], +"namespaceop.html#a1070db47220e17cf37df40411350f6fb":[28,0,0,138], +"namespaceop.html#a1070db47220e17cf37df40411350f6fba5bd4c87976f48e6a53919d53e14025e9":[28,0,0,138,1], +"namespaceop.html#a1070db47220e17cf37df40411350f6fba65c691a85367d21881220b7a3d923747":[28,0,0,138,3], +"namespaceop.html#a1070db47220e17cf37df40411350f6fba6f6cb72d544962fa333e2e34ce64f719":[28,0,0,138,4], +"namespaceop.html#a1070db47220e17cf37df40411350f6fba900b06e1ae224594f075e0c882c73532":[28,0,0,138,2], +"namespaceop.html#a1070db47220e17cf37df40411350f6fbaac101b32dda4448cf13a93fe283dddd8":[28,0,0,138,0], +"namespaceop.html#a1110f4c0017c43ea1d0896a3225c55f8":[28,0,0,260], +"namespaceop.html#a11bd7e53698eabe32b69b48708cf7b19":[28,0,0,300], +"namespaceop.html#a1245f62cf98c4ee7591dfc8807ef355d":[28,0,0,406], +"namespaceop.html#a13b86d097fd5f36612e9858e9348ea57":[28,0,0,158], +"namespaceop.html#a15f6c39797cee87f6aa941d93f22b78b":[28,0,0,403], +"namespaceop.html#a177ffd3101c7a1f5cf32e100474a1234":[28,0,0,229], +"namespaceop.html#a17da233ea322ae172ff5bda7caaf2124":[28,0,0,297], +"namespaceop.html#a182585e2e944cdb62f3dededdd85d1fc":[28,0,0,415], +"namespaceop.html#a1910d9f194831570be6ffe683209e7b3":[28,0,0,395], +"namespaceop.html#a196f17357cd1c1bb02e24e4e8a0e6ec3":[28,0,0,181], +"namespaceop.html#a1b479fea39a56c041a8a51aecf024bed":[28,0,0,340], +"namespaceop.html#a1c2921f841ab87033b535b5ae8a4d526":[28,0,0,319], +"namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226b":[28,0,0,145], +"namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226ba21c5c3f60f4881b8d5477f5628db74f1":[28,0,0,145,2], +"namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226ba9ce2d07469b39a72159ed8b0e0e597ca":[28,0,0,145,0], +"namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226baa9ded1e5ce5d75814730bb4caaf49419":[28,0,0,145,1], +"namespaceop.html#a1ca09f1d0e1f01d95842e99ebeef0631":[28,0,0,256], +"namespaceop.html#a1d9f50688522ed7368acc33a09ae9ece":[28,0,0,202], +"namespaceop.html#a1dd5dde18458975a36bdbd6dd38720a2":[28,0,0,288], +"namespaceop.html#a1e71130dc8f280e4664c711128b18b42":[28,0,0,165], +"namespaceop.html#a1e986a510a29bfd8c682f65a8b399551":[28,0,0,376], +"namespaceop.html#a1f931e210eb575a084b8e6f462b0b382":[28,0,0,159], +"namespaceop.html#a1fd317d44606181c63ef8a4e5676a09e":[28,0,0,241], +"namespaceop.html#a21fcb98366f6ea8895fc7f527f232db5":[28,0,0,422], +"namespaceop.html#a24ebdcb8395dea0429f220de6a715d6e":[28,0,0,369], +"namespaceop.html#a264496927e7b331ad628d7dc4a683194":[28,0,0,265], +"namespaceop.html#a289d19386824250545f248a79aed283c":[28,0,0,269], +"namespaceop.html#a28c5ac530845231600fb93c0be44ad6d":[28,0,0,328], +"namespaceop.html#a2ae13dae91c41b29063b48158ccbcc4e":[28,0,0,220], +"namespaceop.html#a2af8422ada0de882cc222920ca15c6d2":[28,0,0,226], +"namespaceop.html#a2dafd3db8f922405b38240345dd1dce5":[28,0,0,247], +"namespaceop.html#a2e35510c95e5525aae7a398b03b32488":[28,0,0,278], +"namespaceop.html#a2f1ef915c8efc724c0bf40f0348f20a2":[28,0,0,368], +"namespaceop.html#a2f610ba8a71cf16628df2f4d270b7d34":[28,0,0,346], +"namespaceop.html#a307b2c7b1506415a4ba44590fe8a7258":[28,0,0,311], +"namespaceop.html#a31ad937a2e52ea08ce925031d26616b9":[28,0,0,211], +"namespaceop.html#a3290f48d24c9992dd00d339ce49cfac7":[28,0,0,385], +"namespaceop.html#a335d707e98d311d39d9a9dab0e325391":[28,0,0,417], +"namespaceop.html#a3593e2d53bec533f0048ef3973eebd36":[28,0,0,156], +"namespaceop.html#a3593e2d53bec533f0048ef3973eebd36a288aae25bc408055f50c21c991903a44":[28,0,0,156,0], +"namespaceop.html#a3593e2d53bec533f0048ef3973eebd36a2fe4167817733fec8e6ba1afddf78f1b":[28,0,0,156,3], +"namespaceop.html#a3593e2d53bec533f0048ef3973eebd36a435b3ab344c03bfc0e4530a2e75f5e44":[28,0,0,156,1], +"namespaceop.html#a3593e2d53bec533f0048ef3973eebd36ac68f8680ccf3a65dfcfc63356112c9f9":[28,0,0,156,2], +"namespaceop.html#a361310c59d16e88a4d2450a80f078f01":[28,0,0,192], +"namespaceop.html#a36296ff5a5945244c5131e3ae16057e1":[28,0,0,293], +"namespaceop.html#a36492d15f864f7c813a573789ea554aa":[28,0,0,172], +"namespaceop.html#a36f0207c6263e7174f4c79eba7c4df3f":[28,0,0,334], +"namespaceop.html#a37a23e10d9cbc428c793c3df1d62993e":[28,0,0,140], +"namespaceop.html#a37a23e10d9cbc428c793c3df1d62993ea5f4badd072493724e560fa43d0cf2c71":[28,0,0,140,2], +"namespaceop.html#a37a23e10d9cbc428c793c3df1d62993ea6ab48f7ed56efc362f41853c5616bf75":[28,0,0,140,0], +"namespaceop.html#a37a23e10d9cbc428c793c3df1d62993eaa9ded1e5ce5d75814730bb4caaf49419":[28,0,0,140,1], +"namespaceop.html#a37a23e10d9cbc428c793c3df1d62993eaca9f686d0a3d6b8bfe5865b59b2fc84f":[28,0,0,140,3], +"namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0e":[28,0,0,149], +"namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea04576b26f5dc3637bf3c8168fba1641d":[28,0,0,149,4], +"namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea240f10f3a39507d858c743971fd4298f":[28,0,0,149,2], +"namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea6f6cb72d544962fa333e2e34ce64f719":[28,0,0,149,5], +"namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea7bf312724768faebba41ca3585a91f19":[28,0,0,149,3], +"namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea83be5d7f6f29b19cf24f7393551c0439":[28,0,0,149,0], +"namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0eaf7405796a5c90a93fc3c8ffa89eb432d":[28,0,0,149,1], +"namespaceop.html#a37cdfa8dd466c3df9e7da5724a909143":[28,0,0,242], +"namespaceop.html#a37dce2abad2568d7664654e4598002af":[28,0,0,329], +"namespaceop.html#a3ba62b3d5cc275fc1700bf0c5e6bf578":[28,0,0,361], +"namespaceop.html#a3ceb3476e4154a6e9e06b3613a12c040":[28,0,0,356], +"namespaceop.html#a3da2a2a2f5ac58cfba53ea0d43ac6751":[28,0,0,223], +"namespaceop.html#a3dbd17f2f656a2bc751441a42b5b9516":[28,0,0,335], +"namespaceop.html#a3dd874d4341b99431819f9fa6b678ca9":[28,0,0,163], +"namespaceop.html#a3df938ef93037c534c5d342720d5fb70":[28,0,0,336], +"namespaceop.html#a3fbae1778780ae5bf4ffcc84cdef1870":[28,0,0,401], +"namespaceop.html#a3fe70bd1eacdd78aef3344c83533ffc7":[28,0,0,418], +"namespaceop.html#a3ff74a37eb4bf12e31bc5aa95b69f9e3":[28,0,0,281], +"namespaceop.html#a4059a24a786c4f2def977715dd2e6747":[28,0,0,267], +"namespaceop.html#a410201fcc46274e24726c5a601bc1721":[28,0,0,160], +"namespaceop.html#a41b6fb82924c5532cf10151e6ce497f2":[28,0,0,416], +"namespaceop.html#a42292d44d10f55cb1d83a296183e9b31":[28,0,0,384], +"namespaceop.html#a427c6244ee27171037bc201f401de16a":[28,0,0,230], +"namespaceop.html#a42d364d9fbd1a719341bd7187d97cf18":[28,0,0,235], +"namespaceop.html#a450bb646e7573322d8f622bfdbab4833":[28,0,0,412], +"namespaceop.html#a46e815df32db67d78a94367b7f97df25":[28,0,0,382], +"namespaceop.html#a47758c703fccdbb65c26dc7bc4022237":[28,0,0,201], +"namespaceop.html#a49bd4106b0cd1cb81980329b06c0d2c8":[28,0,0,217], +"namespaceop.html#a4adaee31db7ae1d3f963daa9e022e62f":[28,0,0,219], +"namespaceop.html#a4ba080c11cc9758051db97ce2a11c023":[28,0,0,296], +"namespaceop.html#a4d07868d77fb11253b413ed579e04c22":[28,0,0,407], +"namespaceop.html#a4e9bbc2167923763c5982d6d1f41f560":[28,0,0,419], +"namespaceop.html#a4f191afed46fea5e3ce5b2a8756e1ddd":[28,0,0,402], +"namespaceop.html#a5001474237d31d72c9145a84ec5143da":[28,0,0,233], +"namespaceop.html#a50526c188f2ba94b07e0945c0871fd2c":[28,0,0,345], +"namespaceop.html#a505ea16cc6c2c0068bbf4e7269dc8e0a":[28,0,0,199], +"namespaceop.html#a506578f3e723f992eabb627a371351ba":[28,0,0,173], +"namespaceop.html#a515273b013402d8c75780330588421bc":[28,0,0,275], +"namespaceop.html#a522d4552d2aeabe367f4d3bf371e6b3e":[28,0,0,423], +"namespaceop.html#a532d08cb2ef011f9cad29c01d3431d6e":[28,0,0,389], +"namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09":[28,0,0,147], +"namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09a00d23a76e43b46dae9ec7aa9dcbebb32":[28,0,0,147,1], +"namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09a6f6cb72d544962fa333e2e34ce64f719":[28,0,0,147,3], +"namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09aa6e20e86de146a7b524d32c9b1fea7f4":[28,0,0,147,2], +"namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09ab9f5c797ebbf55adccdd8539a65a0241":[28,0,0,147,0], +"namespaceop.html#a53f346232d0743f3dd0f547de1fc508f":[28,0,0,212], +"namespaceop.html#a5418b76dad5b4aea1133325f4aa715ac":[28,0,0,136], +"namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca129e74dde7b475c8848310e16754c965":[28,0,0,136,4], +"namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca1d9502bb9f6efc989b3578dcfde7901e":[28,0,0,136,2], +"namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca6f6cb72d544962fa333e2e34ce64f719":[28,0,0,136,6], +"namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca8af5861002f3c157f9ba842bba10aa3f":[28,0,0,136,3], +"namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca9909f7cecc318ee0049ad0f3b409b3b3":[28,0,0,136,1], +"namespaceop.html#a5418b76dad5b4aea1133325f4aa715acaac101b32dda4448cf13a93fe283dddd8":[28,0,0,136,0] +}; diff --git a/web/html/doc/navtreeindex8.js b/web/html/doc/navtreeindex8.js new file mode 100644 index 000000000..4186ba138 --- /dev/null +++ b/web/html/doc/navtreeindex8.js @@ -0,0 +1,253 @@ +var NAVTREEINDEX8 = +{ +"namespaceop.html#a5418b76dad5b4aea1133325f4aa715acae9989db5dabeea617f40c8dbfd07f5fb":[28,0,0,136,5], +"namespaceop.html#a54a6c42a42a0a7e539061f5e30abb4bc":[28,0,0,310], +"namespaceop.html#a54b38240e45009f7e6a25d956ac96fe0":[28,0,0,188], +"namespaceop.html#a54b73745852c270cfd891eed0f6f2332":[28,0,0,153], +"namespaceop.html#a54b73745852c270cfd891eed0f6f2332a34e2d1989a1dbf75cd631596133ee5ee":[28,0,0,153,3], +"namespaceop.html#a54b73745852c270cfd891eed0f6f2332a54a365e86ee42cff91ca36532c9bbabf":[28,0,0,153,1], +"namespaceop.html#a54b73745852c270cfd891eed0f6f2332a6adf97f83acf6453d4a6a4b1070f3754":[28,0,0,153,5], +"namespaceop.html#a54b73745852c270cfd891eed0f6f2332ae2faa2a74b6a4134d0b3e84c7c0e2a01":[28,0,0,153,4], +"namespaceop.html#a54b73745852c270cfd891eed0f6f2332af40a40a04a078c4449cda2f326d7fb18":[28,0,0,153,2], +"namespaceop.html#a54b73745852c270cfd891eed0f6f2332af436d4d7a472ac39a7cb227e3ea24f8d":[28,0,0,153,0], +"namespaceop.html#a553bd31855c20a0d14e4c44a20bd91da":[28,0,0,142], +"namespaceop.html#a553bd31855c20a0d14e4c44a20bd91daa6f6cb72d544962fa333e2e34ce64f719":[28,0,0,142,1], +"namespaceop.html#a553bd31855c20a0d14e4c44a20bd91daafff0d600f8a0b5e19e88bfb821dd1157":[28,0,0,142,0], +"namespaceop.html#a5642545fda1c3bbaf60810cf0e2d2c1d":[28,0,0,203], +"namespaceop.html#a5660f0e72781ce6d7db9eb78b582e5c6":[28,0,0,205], +"namespaceop.html#a573544858d0a9c29c9707eeda3a21c98":[28,0,0,276], +"namespaceop.html#a57c4f3ada0db4882a4106d4dedf08012":[28,0,0,185], +"namespaceop.html#a57eee48e4cefd583a81cfc907586c035":[28,0,0,341], +"namespaceop.html#a593bb53120d8db14cab814dfb5d9ed2c":[28,0,0,399], +"namespaceop.html#a5a3db1a0d272d8fb5ea723845beee150":[28,0,0,315], +"namespaceop.html#a5cc3f625b2644b1aade85a9458b5503a":[28,0,0,193], +"namespaceop.html#a5f092bd36c716a894cb035e1ead2aca3":[28,0,0,234], +"namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400c":[28,0,0,141], +"namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400ca002f2100f8870e7c823894f492e4d337":[28,0,0,141,2], +"namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400ca68ec2bf5b1662d1d27a523dcfc3c702a":[28,0,0,141,1], +"namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400cab1c94ca2fbc3e78fc30069c8d0f01680":[28,0,0,141,3], +"namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400cafe50b062b9b9100a72e68b48fe26fc50":[28,0,0,141,0], +"namespaceop.html#a5f85de4dca2733d03470d42617f83d4e":[28,0,0,318], +"namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94":[28,0,0,146], +"namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94a68ec2bf5b1662d1d27a523dcfc3c702a":[28,0,0,146,0], +"namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94aa544d56d9492a20da20018000b5043b6":[28,0,0,146,1], +"namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94ab1c94ca2fbc3e78fc30069c8d0f01680":[28,0,0,146,2], +"namespaceop.html#a5fc85e8500dbeda3b75c1b6ecfac91cd":[28,0,0,350], +"namespaceop.html#a5fe477200af87dadb07c8d6a75b4414b":[28,0,0,363], +"namespaceop.html#a602d5d238fe0c7096698cf36b7dee9ab":[28,0,0,196], +"namespaceop.html#a60ab295fba5d41b31d6ba5a4942889a9":[28,0,0,254], +"namespaceop.html#a61240e5fbd4ea84a2cfdc89407bcb1ae":[28,0,0,391], +"namespaceop.html#a61af88aac41ef77ab4e8816023fe32f0":[28,0,0,238], +"namespaceop.html#a635579f5f8d20b8e65f4f94da4d3d2f2":[28,0,0,195], +"namespaceop.html#a63605cf0e6f4049beacf6094995272e8":[28,0,0,174], +"namespaceop.html#a674a652ad38b355285417529fc050847":[28,0,0,207], +"namespaceop.html#a6913c67141fcbbba84fc88ac8a45aa0f":[28,0,0,291], +"namespaceop.html#a699ef17b0f27b8bc2c4d4a03e46e6be1":[28,0,0,339], +"namespaceop.html#a6a34909c6c4d79a215f163291111d556":[28,0,0,225], +"namespaceop.html#a6a97f255cc323f1c1babe4c598727196":[28,0,0,327], +"namespaceop.html#a6aeab543a61ef23ed58a6e29401424ae":[28,0,0,390], +"namespaceop.html#a6b9adf8f7e698e566414c9f44f0c85f1":[28,0,0,379], +"namespaceop.html#a6c22a72ce93c64e7582cb670492a50bf":[28,0,0,143], +"namespaceop.html#a6c22a72ce93c64e7582cb670492a50bfab13311ab51c4c34757f67f26580018dd":[28,0,0,143,1], +"namespaceop.html#a6c22a72ce93c64e7582cb670492a50bfae7ec409749889353b8f83a6b04159420":[28,0,0,143,0], +"namespaceop.html#a6d12bd1e42cfb63d2f780bed55fa01fb":[28,0,0,180], +"namespaceop.html#a6e1d1f90ef06cc7af576fdaad4b4e320":[28,0,0,246], +"namespaceop.html#a6f37638480139a4076eef4d0c7dc6cd1":[28,0,0,277], +"namespaceop.html#a6fc2ee2d2c256695fb7b2b953ee7f762":[28,0,0,243], +"namespaceop.html#a70f65da8f70ebd07b093932927187c90":[28,0,0,257], +"namespaceop.html#a71866b00e7d1077137094f78ec83b62b":[28,0,0,266], +"namespaceop.html#a71c68de51a3608e782854c298b91cd62":[28,0,0,231], +"namespaceop.html#a71cdc487bbec12ddbe4bac9123745494":[28,0,0,232], +"namespaceop.html#a75411d98f69051860379730e16103178":[28,0,0,289], +"namespaceop.html#a757a5cc88734e7be9e910e7d8213c282":[28,0,0,393], +"namespaceop.html#a758b08be140e27dd2642d286a383be54":[28,0,0,237], +"namespaceop.html#a75c4194e0eae0ef28c6829def462dad2":[28,0,0,387], +"namespaceop.html#a767385c8d3ebe736e1752825ab4d4ea0":[28,0,0,191], +"namespaceop.html#a76c1f1ea90b73e13e93f72413b3cab0e":[28,0,0,413], +"namespaceop.html#a774871462f7fefb8cadea1e49f501e45":[28,0,0,206], +"namespaceop.html#a77a4d87bbee791dfba0667aa10bcca99":[28,0,0,222], +"namespaceop.html#a790dea3c007bed742fbc8cdd5757d026":[28,0,0,135], +"namespaceop.html#a7a815e303884fb2b3346c8cc19d61b23":[28,0,0,386], +"namespaceop.html#a7ac10b9f503668695643c366e25f3b68":[28,0,0,197], +"namespaceop.html#a7b9bcb57dd8488ade8ea288342eaed08":[28,0,0,371], +"namespaceop.html#a7cd131c9ddd8f3987508e89e0881b9e0":[28,0,0,344], +"namespaceop.html#a7eb0121791185c13a6c3dd88994e0eab":[28,0,0,380], +"namespaceop.html#a7ecfc02dca25534a071acf3136ff175e":[28,0,0,164], +"namespaceop.html#a82471a2af285bada830bac3c95a8440b":[28,0,0,347], +"namespaceop.html#a825f15fdf9dc9cb7473c20f970f15b60":[28,0,0,236], +"namespaceop.html#a8264a6feec695adef80d40940863d511":[28,0,0,255], +"namespaceop.html#a838b69fead43c8a848d059b5f9d63baf":[28,0,0,331], +"namespaceop.html#a844c35ea57a8bc67f33f49deb5070652":[28,0,0,282], +"namespaceop.html#a84730c1ab201fe836fe87787589af88a":[28,0,0,270], +"namespaceop.html#a84d87ec0e4ed3cf75a37ce99d0d25ef7":[28,0,0,307], +"namespaceop.html#a8525e440d6ac1b558e72637dc4f4e3c4":[28,0,0,392], +"namespaceop.html#a8587bab6b02056384b7c424555cd50d8":[28,0,0,362], +"namespaceop.html#a858f70fa9d84ad85c60f19a2229ebbde":[28,0,0,279], +"namespaceop.html#a863c96f1fb23d96c5d605867cfe5f99f":[28,0,0,305], +"namespaceop.html#a865a4cd0ba3b596667dc7242756837bd":[28,0,0,168], +"namespaceop.html#a865db81a5bc4f81cf9fc7c7f3ce81be3":[28,0,0,353], +"namespaceop.html#a8664658afa7be03e173cec9eff2873ad":[28,0,0,349], +"namespaceop.html#a871a61f08021460e0f24f51583546a75":[28,0,0,322], +"namespaceop.html#a8982332c4263696d0e023997f0e4c753":[28,0,0,365], +"namespaceop.html#a89984557f6968584d1938afe7b9f32bd":[28,0,0,216], +"namespaceop.html#a8a05bdc38612c38e28b96bba5b4679b8":[28,0,0,405], +"namespaceop.html#a8c9d3469086a12607b097731848b6dea":[28,0,0,374], +"namespaceop.html#a8e377d8da8f109cb8be8e4edbb2ea90a":[28,0,0,302], +"namespaceop.html#a8ec109805adf02f9872a6af37d602caa":[28,0,0,325], +"namespaceop.html#a9076fc1719030c2a74f21682999d2315":[28,0,0,179], +"namespaceop.html#a91dd00cbb8fb646e6612455eb0f1b3e9":[28,0,0,332], +"namespaceop.html#a927468f6931ddb1e7d1e6e6e59b8bd36":[28,0,0,221], +"namespaceop.html#a9275c58ba881ea94e054117392a67381":[28,0,0,360], +"namespaceop.html#a92e8cd01741c90fbfdfaa33a13803f34":[28,0,0,162], +"namespaceop.html#a96a81e831f8c965825162dba09095477":[28,0,0,308], +"namespaceop.html#a96d1720ea5d160cfd4c8404060a9bebd":[28,0,0,239], +"namespaceop.html#a970a2a768a2ace81605b1558c9fdec18":[28,0,0,157], +"namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a05318bd0215d16e009798570b53755d2":[28,0,0,157,1], +"namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a29c2c02a361c9d7028472e5d92cd4a54":[28,0,0,157,3], +"namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a324118a6721dd6b8a9b9f4e327df2bf5":[28,0,0,157,0], +"namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a6f6cb72d544962fa333e2e34ce64f719":[28,0,0,157,4], +"namespaceop.html#a970a2a768a2ace81605b1558c9fdec18aa52d6088cbae537944827c8f8c69c570":[28,0,0,157,2], +"namespaceop.html#a971a7caa96be5b715b5c22f6e5dc6ad1":[28,0,0,284], +"namespaceop.html#a97b053019720782f2f81bc1b41f036d6":[28,0,0,366], +"namespaceop.html#a99a08148f440bd96546076e15f0de04c":[28,0,0,358], +"namespaceop.html#a9b4b92c621cc5962a72898899d2f2534":[28,0,0,404], +"namespaceop.html#a9d121f33179e41075f4602eb6527e658":[28,0,0,227], +"namespaceop.html#a9f14054fbf4e63fc85d10c83f2f9ecb7":[28,0,0,320], +"namespaceop.html#a9f4b99449c0c73e2c89ee1a1eff007c7":[28,0,0,245], +"namespaceop.html#a9f585930a5246e4a9a70145fa8763447":[28,0,0,248], +"namespaceop.html#aa053f4b0533d9e981aa171a1ef57fc30":[28,0,0,274], +"namespaceop.html#aa1225091307f8d0bf07dd032389f8961":[28,0,0,357], +"namespaceop.html#aa3a3e2acfb27ecbd187d01c8dcd41899":[28,0,0,326], +"namespaceop.html#aa65c081c13e0d0453938a3c41d04dc49":[28,0,0,169], +"namespaceop.html#aa6701cc08e1a8651798ef3bf8437375b":[28,0,0,400], +"namespaceop.html#aa72861fea0671209aca1ea5fa385891a":[28,0,0,330], +"namespaceop.html#aa7803aa62abc21471e7d966bd674a81a":[28,0,0,354], +"namespaceop.html#aa7f93261bd6d87f86c45e933607a0678":[28,0,0,170], +"namespaceop.html#aa8cc53d2fe5353f9d87d50c32a8c1a95":[28,0,0,414], +"namespaceop.html#aa9366cf1b4ac3494965749eeb5537da1":[28,0,0,295], +"namespaceop.html#aaad222b087dd041c35de2f3414c1a01f":[28,0,0,285], +"namespaceop.html#aaada2594361f6f929af5b1f9d50387eb":[28,0,0,167], +"namespaceop.html#aaafe2e235a1a3a146bb026b71c521c7b":[28,0,0,394], +"namespaceop.html#aab3de911b04b96c1850cc05c6947e184":[28,0,0,299], +"namespaceop.html#aabfd35e57744b44481c09f56c90cc8b8":[28,0,0,303], +"namespaceop.html#aaca98fe6101cda512a43c513182ae5cc":[28,0,0,214], +"namespaceop.html#aacf6e688031bb116e4878b811e8dbc23":[28,0,0,337], +"namespaceop.html#aae9e38fa6c56e188b4f649732f0d4cd3":[28,0,0,378], +"namespaceop.html#aaec4a34b015f898d28be2b9f2aba0d38":[28,0,0,333], +"namespaceop.html#aaee32c4c68404e5086844bcb911b7a20":[28,0,0,200], +"namespaceop.html#aaf7199f3821a6f954cfae134ec8c7e19":[28,0,0,264], +"namespaceop.html#aafac1158605748694e3c3ed4eb34b3b7":[28,0,0,372], +"namespaceop.html#aaff52f436911aa17bebb999cd91a44fd":[28,0,0,161], +"namespaceop.html#ab0908bcc0abb00c49ecbe7fc373b58c9":[28,0,0,343], +"namespaceop.html#ab1e242b1ae7ff3300324fbfedebb52fc":[28,0,0,209], +"namespaceop.html#ab38ea91ef7b7dad700d8e4a4654d48f5":[28,0,0,259], +"namespaceop.html#ab5b47f0069e9f397ff891194b20d28f2":[28,0,0,183], +"namespaceop.html#ab5eb10c958f3f37fb82d29361ad81467":[28,0,0,338], +"namespaceop.html#ab670c693d8e4a540cfe75ce8383b6d10":[28,0,0,294], +"namespaceop.html#ab71596bc88b87ea5920f19f978d6d6ac":[28,0,0,342], +"namespaceop.html#ab8b2748a5bcf823e59b66549e6a24cfe":[28,0,0,351], +"namespaceop.html#abb49286241ba7a1d754b31dee333274a":[28,0,0,301], +"namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774":[28,0,0,152], +"namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a3b6cff57206f4ce645622b2e55f784a6":[28,0,0,152,4], +"namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a3ebbca1b84060b0caaf823639739945d":[28,0,0,152,0], +"namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a63eacc5ed21c0ecb8bc583e10dc3ae58":[28,0,0,152,3], +"namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a6f6cb72d544962fa333e2e34ce64f719":[28,0,0,152,5], +"namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a9ffbd422925a6839ee820ddbc59278c5":[28,0,0,152,1], +"namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774af1a42bd417390fc63b030a519624607a":[28,0,0,152,2], +"namespaceop.html#abd0ef2306478c3295283e7f1b59e3aff":[28,0,0,409], +"namespaceop.html#abdedc8f1fd2f723dae5bb8ff20b93a93":[28,0,0,287], +"namespaceop.html#abe3f4d783191416b8e62e54c953fe36b":[28,0,0,258], +"namespaceop.html#abf3a59fc4662f07e6ba19b95bd4da32f":[28,0,0,182], +"namespaceop.html#ac0230b669b296920c0cfc41b7587268f":[28,0,0,151], +"namespaceop.html#ac0230b669b296920c0cfc41b7587268fa0123c3afc0fac5edaf8b1672cb12626c":[28,0,0,151,0], +"namespaceop.html#ac0230b669b296920c0cfc41b7587268fa6bcd0f3b66e42d1aacd18d1c3b532473":[28,0,0,151,1], +"namespaceop.html#ac06eeab84c4861ef08834855b48750a6":[28,0,0,177], +"namespaceop.html#ac1080e627185a65b88ec788184a95552":[28,0,0,373], +"namespaceop.html#ac13af59538bcb8a1709f20010681d1c7":[28,0,0,420], +"namespaceop.html#ac1737c19228b83a5e93ae51e5d9556eb":[28,0,0,283], +"namespaceop.html#ac1f4b95440d2fb57fc715558d039b947":[28,0,0,244], +"namespaceop.html#ac5fc565b24e499e306ca170b9139eeb6":[28,0,0,377], +"namespaceop.html#ac74cba4141f2bee2b9d94dc171029a73":[28,0,0,290], +"namespaceop.html#ac7bbf63b37bf6762c47557ad227e036d":[28,0,0,398], +"namespaceop.html#ac968b1c98c60b74be78225be27805706":[28,0,0,272], +"namespaceop.html#ac9af122ccd8dcdafb11e37b6633245b4":[28,0,0,292], +"namespaceop.html#acc4a5460e02ae510e854724513eea822":[28,0,0,397], +"namespaceop.html#acc650faa23df88ca16a09a2d2a522960":[28,0,0,323], +"namespaceop.html#acd8cab258d7e98affa5c317a9a03e862":[28,0,0,306], +"namespaceop.html#ace4af20d19066df9ec502c5a09097c24":[28,0,0,261], +"namespaceop.html#ace6c48833ba117b7d036179bdaf31a7a":[28,0,0,271], +"namespaceop.html#acf638f00b0a825c05683f8e23942a9d5":[28,0,0,273], +"namespaceop.html#ad0069d4c6204b35893f4158d04d615f1":[28,0,0,359], +"namespaceop.html#ad22c543a4376e943b728e657fab5ed9f":[28,0,0,198], +"namespaceop.html#ad3b02ca66d11f4129372f4a9f98c6437":[28,0,0,250], +"namespaceop.html#ad5495d8c6a65afbedef3af7a8844bfcc":[28,0,0,367], +"namespaceop.html#ad5e1c975a1b7dce9b02bc8cdf3d45a01":[28,0,0,286], +"namespaceop.html#ad72abbc7b2600f543e4ee8e28392711e":[28,0,0,228], +"namespaceop.html#ad7ca8d89f9045481075902c8bd98b8f4":[28,0,0,309], +"namespaceop.html#ad86d86621b1f485f261d620373748ed1":[28,0,0,268], +"namespaceop.html#ad9b7765a4396ee4470585ded07285563":[28,0,0,262], +"namespaceop.html#adb26da2c52486e926d98471b5387c7e1":[28,0,0,280], +"namespaceop.html#adb8ffc1a6a2cc2949d80d8e8ad4e2190":[28,0,0,364], +"namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7f":[28,0,0,144], +"namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa3c1472839b807c90abff3c7c36dff458":[28,0,0,144,2], +"namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa6f6cb72d544962fa333e2e34ce64f719":[28,0,0,144,3], +"namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa7982b09a852b37f2afb1227eaf552e47":[28,0,0,144,1], +"namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa8b95dcff7397d0693c03e394af5552aa":[28,0,0,144,0], +"namespaceop.html#adc43fb9031418e7f8112816a3b535d14":[28,0,0,150], +"namespaceop.html#adc43fb9031418e7f8112816a3b535d14a28d0edd045e05cf5af64e35ae0c4c6ef":[28,0,0,150,1], +"namespaceop.html#adc43fb9031418e7f8112816a3b535d14a655d20c1ca69519ca647684edbb2db35":[28,0,0,150,3], +"namespaceop.html#adc43fb9031418e7f8112816a3b535d14a6a061313d22e51e0f25b7cd4dc065233":[28,0,0,150,4], +"namespaceop.html#adc43fb9031418e7f8112816a3b535d14a6adf97f83acf6453d4a6a4b1070f3754":[28,0,0,150,0], +"namespaceop.html#adc43fb9031418e7f8112816a3b535d14a828d496739024f4af00df1e277d96ebd":[28,0,0,150,5], +"namespaceop.html#adc43fb9031418e7f8112816a3b535d14a960b44c579bc2f6818d2daaf9e4c16f0":[28,0,0,150,2], +"namespaceop.html#add981a5f6a49d35cc316a54c613497f3":[28,0,0,213], +"namespaceop.html#ade3b2e4b105242a3cf41def3def1691d":[28,0,0,187], +"namespaceop.html#ade70b024ee461ae04e7233bf3937c5c6":[28,0,0,314], +"namespaceop.html#adfc12925650978828707c1c0dcbebd0e":[28,0,0,186], +"namespaceop.html#ae01dd412590493f5f732594e8332d3f0":[28,0,0,348], +"namespaceop.html#ae0730c6559abdb976423ecf81eac4620":[28,0,0,312], +"namespaceop.html#ae0e92a0d8867d1b02f1c43ae4c0c9e09":[28,0,0,316], +"namespaceop.html#ae0fea41041a70ae8449a77f46ffe8100":[28,0,0,324], +"namespaceop.html#ae37c577c1054c89da4a6736342d491aa":[28,0,0,410], +"namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6":[28,0,0,139], +"namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a105036ef087117869f656cd72bfd8dd6":[28,0,0,139,1], +"namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a28b652e57d2da6b7c939166be21efd9a":[28,0,0,139,0], +"namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a3bd9369403112127ae7db2f866002be2":[28,0,0,139,2], +"namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a442304e26339521bc296bdc47ff5fddf":[28,0,0,139,4], +"namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6ae18221460ca8434295f980225fd6a91b":[28,0,0,139,3], +"namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7e":[28,0,0,137], +"namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7ea55eeca17b45365c188d0edbd35f6e0c3":[28,0,0,137,3], +"namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7ea65f6036bfc9798ce230c5d8567551315":[28,0,0,137,2], +"namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7ea9ec8e4e3ab4c7eeba097f27d7364d743":[28,0,0,137,1], +"namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7eaeed8d85b888a6c015834240885ee6333":[28,0,0,137,0], +"namespaceop.html#ae5cc3e92ffd9696f01ce7824ebbd0759":[28,0,0,194], +"namespaceop.html#ae5d883da8c8f11356d5e1b61bc3a99b6":[28,0,0,224], +"namespaceop.html#ae5dac6cf1ccdf461838f9795be8fda03":[28,0,0,176], +"namespaceop.html#ae7636f6e8974ecb2ed96d43dd5ec261d":[28,0,0,304], +"namespaceop.html#ae76afeeeaedaebe6941f41a4bdf50e2a":[28,0,0,210], +"namespaceop.html#ae80a103d8a4308bc435342b3d31404c8":[28,0,0,381], +"namespaceop.html#ae88e9ced5d14fa221205b492ff76c56b":[28,0,0,189], +"namespaceop.html#aebff78a4cfbef1cf1b2e03066d88564c":[28,0,0,298], +"namespaceop.html#aed0d108f5ada623eeb0ed41f896f8e97":[28,0,0,411], +"namespaceop.html#aed964859fbd282bd29f2b818a3bf10dd":[28,0,0,240], +"namespaceop.html#aed9ab5282e3e60f22dc11c301af897e6":[28,0,0,251], +"namespaceop.html#aee90a0429c2d14da0c3a85cd67a17821":[28,0,0,175], +"namespaceop.html#af42afa53c725d556c14928b2603883e3":[28,0,0,184], +"namespaceop.html#af45cddacd69fff73a4ea4acbbbac43e0":[28,0,0,421], +"namespaceop.html#af46e80e6bac0f815006759df4c9d00c3":[28,0,0,178], +"namespaceop.html#af548fe1a2ad2b392a25afe9b0b87b8dd":[28,0,0,388], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261f":[28,0,0,148], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa003cc3795b0eeed2af2dfd34ed482794":[28,0,0,148,10], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa08956a1731b54bbdce3f97f1361efc23":[28,0,0,148,12], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa0b93cfdf906412bd7c8560ccd180cec6":[28,0,0,148,13], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa0c4a19d9254adcb3ca1f0f527ee141fd":[28,0,0,148,1], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa372b9885bba8bc32ad323fffcf99e39e":[28,0,0,148,11], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa529c87ac399e5fd6f0fa4a360c032568":[28,0,0,148,9], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa66ae79a5ac5fa502ae8bbecd3e07e71c":[28,0,0,148,8], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa6f6cb72d544962fa333e2e34ce64f719":[28,0,0,148,15], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa71e915c88449606c6498d33dd7c98e84":[28,0,0,148,3], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa9a87ca5ab7b20c2bd4f8d5379956e6f6":[28,0,0,148,0], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa9c6c21b2b0a410880f46637db622e392":[28,0,0,148,6], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261faca4c7eb29b1f3402e78aa384ce8fd5a9":[28,0,0,148,5], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261facfbe6a39619f4ca5a1fa2db000a17e0d":[28,0,0,148,14] +}; diff --git a/web/html/doc/navtreeindex9.js b/web/html/doc/navtreeindex9.js new file mode 100644 index 000000000..fa6cd9406 --- /dev/null +++ b/web/html/doc/navtreeindex9.js @@ -0,0 +1,253 @@ +var NAVTREEINDEX9 = +{ +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fad788fbec25069f2884ee1ed97e0af2b9":[28,0,0,148,2], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fae3ae2003e0e0458bdc49480fb19c876e":[28,0,0,148,4], +"namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261faef29c97ffaed7b0d41ee9bb0d20550cc":[28,0,0,148,7], +"namespaceop.html#af5ec8b7e6271798cbd09475766c64d2f":[28,0,0,253], +"namespaceop.html#af63e418966741f7efebacc9519174a0a":[28,0,0,396], +"namespaceop.html#af65a4564afcad06b72468679f6bee52b":[28,0,0,383], +"namespaceop.html#af65d1b7c5b708f30780e4b2bcfccedcb":[28,0,0,370], +"namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bda":[28,0,0,155], +"namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa46f9a0da0a5d448fd0cc8b3aa0a9b228":[28,0,0,155,0], +"namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa4b942544cb3e764bbb8d33f8a8744855":[28,0,0,155,3], +"namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa6089ccf7c3fe93a62745e51200419c60":[28,0,0,155,8], +"namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa668a2bc599fd07445eae0730d043c96d":[28,0,0,155,1], +"namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa73c42013aac51c335d50d103f30fcb99":[28,0,0,155,2], +"namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaaa93f121640d609f8772397a0f40f40d6":[28,0,0,155,7], +"namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaab7e7b2beae3435e73021d6d9a6a3fd8a":[28,0,0,155,5], +"namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaaee080e43c505aa85cdda0e480b0abc06":[28,0,0,155,6], +"namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaafa90ddb034be42f1cdf13a6829eed2ad":[28,0,0,155,4], +"namespaceop.html#af98c8e514e79d4718fb1fc64dc0e431b":[28,0,0,171], +"namespaceop.html#af9c189f7c80092570699c8b9d5686fea":[28,0,0,375], +"namespaceop.html#af9e0d9e4028c0589b5eeeaed42a5088c":[28,0,0,215], +"namespaceop.html#afb5b711819f94b51f32460861d9cea38":[28,0,0,352], +"namespaceop.html#afce557f02e337e16150d00bdf72ec033":[28,0,0,154], +"namespaceop.html#afce557f02e337e16150d00bdf72ec033a06b9281e396db002010bde1de57262eb":[28,0,0,154,1], +"namespaceop.html#afce557f02e337e16150d00bdf72ec033a3432ca64f06615abf07ab44c10cada38":[28,0,0,154,3], +"namespaceop.html#afce557f02e337e16150d00bdf72ec033a54c82ef76ecbbd4c2293e09bae01b54e":[28,0,0,154,2], +"namespaceop.html#afce557f02e337e16150d00bdf72ec033a6adf97f83acf6453d4a6a4b1070f3754":[28,0,0,154,0], +"namespaceop.html#afdf2dd76cbae54789a139d9415790f82":[28,0,0,249], +"namespaceop_1_1_configure_error.html":[28,0,0,0], +"namespaceop_1_1_configure_error.html#a96e56b0ddbe2cb17443b93aaba05d672":[28,0,0,0,1], +"namespaceop_1_1_configure_error.html#ae8dbbccc9a2ca8a4670716ac5fdd8d53":[28,0,0,0,0], +"namespaceop_1_1_configure_log.html":[28,0,0,1], +"namespaceop_1_1_configure_log.html#a0e5c3fad2ace3eb129dd1d97afd59558":[28,0,0,1,1], +"namespaceop_1_1_configure_log.html#a149393c3c87c82a5cf14417c6b430d30":[28,0,0,1,3], +"namespaceop_1_1_configure_log.html#a2f41e9a74bbda434ef16189c32a13aba":[28,0,0,1,2], +"namespaceop_1_1_configure_log.html#a5ab07ae8c026e4f7782a113778d9082d":[28,0,0,1,0], +"namespaces.html":[28,0], +"net_2headers_8hpp.html":[30,0,1,0,8,2], +"net_2headers_8hpp_source.html":[30,0,1,0,8,2], +"net_8hpp.html":[30,0,1,0,8,5], +"net_8hpp_source.html":[30,0,1,0,8,5], +"net_caffe_8hpp.html":[30,0,1,0,8,6], +"net_caffe_8hpp_source.html":[30,0,1,0,8,6], +"net_open_cv_8hpp.html":[30,0,1,0,8,7], +"net_open_cv_8hpp_source.html":[30,0,1,0,8,7], +"nms_base_8hpp.html":[30,0,1,0,8,8], +"nms_base_8hpp.html#a28c5ac530845231600fb93c0be44ad6d":[30,0,1,0,8,8,1], +"nms_base_8hpp.html#a37dce2abad2568d7664654e4598002af":[30,0,1,0,8,8,2], +"nms_base_8hpp.html#a6a97f255cc323f1c1babe4c598727196":[30,0,1,0,8,8,0], +"nms_base_8hpp_source.html":[30,0,1,0,8,8], +"nms_caffe_8hpp.html":[30,0,1,0,8,9], +"nms_caffe_8hpp_source.html":[30,0,1,0,8,9], +"op_output_to_cv_mat_8hpp.html":[30,0,1,0,2,13], +"op_output_to_cv_mat_8hpp_source.html":[30,0,1,0,2,13], +"open_cv_8hpp.html":[30,0,1,0,14,8], +"open_cv_8hpp.html#a0e60b0e4e89a7f08de54ad40c2d46a60":[30,0,1,0,14,8,0], +"open_cv_8hpp.html#a1910d9f194831570be6ffe683209e7b3":[30,0,1,0,14,8,14], +"open_cv_8hpp.html#a24ebdcb8395dea0429f220de6a715d6e":[30,0,1,0,14,8,10], +"open_cv_8hpp.html#a264496927e7b331ad628d7dc4a683194":[30,0,1,0,14,8,2], +"open_cv_8hpp.html#a289d19386824250545f248a79aed283c":[30,0,1,0,14,8,6], +"open_cv_8hpp.html#a4059a24a786c4f2def977715dd2e6747":[30,0,1,0,14,8,4], +"open_cv_8hpp.html#a532d08cb2ef011f9cad29c01d3431d6e":[30,0,1,0,14,8,13], +"open_cv_8hpp.html#a5f85de4dca2733d03470d42617f83d4e":[30,0,1,0,14,8,9], +"open_cv_8hpp.html#a71866b00e7d1077137094f78ec83b62b":[30,0,1,0,14,8,3], +"open_cv_8hpp.html#a75c4194e0eae0ef28c6829def462dad2":[30,0,1,0,14,8,12], +"open_cv_8hpp.html#a84730c1ab201fe836fe87787589af88a":[30,0,1,0,14,8,7], +"open_cv_8hpp.html#aaf7199f3821a6f954cfae134ec8c7e19":[30,0,1,0,14,8,1], +"open_cv_8hpp.html#ace6c48833ba117b7d036179bdaf31a7a":[30,0,1,0,14,8,8], +"open_cv_8hpp.html#ad86d86621b1f485f261d620373748ed1":[30,0,1,0,14,8,5], +"open_cv_8hpp.html#af65d1b7c5b708f30780e4b2bcfccedcb":[30,0,1,0,14,8,11], +"open_cv_8hpp_source.html":[30,0,1,0,14,8], +"pages.html":[], +"people_json_saver_8hpp.html":[30,0,1,0,4,10], +"people_json_saver_8hpp_source.html":[30,0,1,0,4,10], +"person_id_extractor_8hpp.html":[30,0,1,0,12,1], +"person_id_extractor_8hpp_source.html":[30,0,1,0,12,1], +"person_tracker_8hpp.html":[30,0,1,0,12,2], +"person_tracker_8hpp_source.html":[30,0,1,0,12,2], +"point_8hpp.html":[30,0,1,0,2,14], +"point_8hpp_source.html":[30,0,1,0,2,14], +"pointer_container_8hpp.html":[30,0,1,0,14,9], +"pointer_container_8hpp.html#a02164ca0af9e838190f584f5d1d8465e":[30,0,1,0,14,9,2], +"pointer_container_8hpp_source.html":[30,0,1,0,14,9], +"pose_2enum_classes_8hpp.html":[30,0,1,0,9,0], +"pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0e":[30,0,1,0,9,0,1], +"pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0ea04576b26f5dc3637bf3c8168fba1641d":[30,0,1,0,9,0,1,4], +"pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0ea240f10f3a39507d858c743971fd4298f":[30,0,1,0,9,0,1,2], +"pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0ea6f6cb72d544962fa333e2e34ce64f719":[30,0,1,0,9,0,1,5], +"pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0ea7bf312724768faebba41ca3585a91f19":[30,0,1,0,9,0,1,3], +"pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0ea83be5d7f6f29b19cf24f7393551c0439":[30,0,1,0,9,0,1,0], +"pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0eaf7405796a5c90a93fc3c8ffa89eb432d":[30,0,1,0,9,0,1,1], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261f":[30,0,1,0,9,0,0], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa003cc3795b0eeed2af2dfd34ed482794":[30,0,1,0,9,0,0,10], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa08956a1731b54bbdce3f97f1361efc23":[30,0,1,0,9,0,0,12], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa0b93cfdf906412bd7c8560ccd180cec6":[30,0,1,0,9,0,0,13], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa0c4a19d9254adcb3ca1f0f527ee141fd":[30,0,1,0,9,0,0,1], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa372b9885bba8bc32ad323fffcf99e39e":[30,0,1,0,9,0,0,11], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa529c87ac399e5fd6f0fa4a360c032568":[30,0,1,0,9,0,0,9], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa66ae79a5ac5fa502ae8bbecd3e07e71c":[30,0,1,0,9,0,0,8], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa6f6cb72d544962fa333e2e34ce64f719":[30,0,1,0,9,0,0,15], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa71e915c88449606c6498d33dd7c98e84":[30,0,1,0,9,0,0,3], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa9a87ca5ab7b20c2bd4f8d5379956e6f6":[30,0,1,0,9,0,0,0], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa9c6c21b2b0a410880f46637db622e392":[30,0,1,0,9,0,0,6], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261faca4c7eb29b1f3402e78aa384ce8fd5a9":[30,0,1,0,9,0,0,5], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261facfbe6a39619f4ca5a1fa2db000a17e0d":[30,0,1,0,9,0,0,14], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fad788fbec25069f2884ee1ed97e0af2b9":[30,0,1,0,9,0,0,2], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fae3ae2003e0e0458bdc49480fb19c876e":[30,0,1,0,9,0,0,4], +"pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261faef29c97ffaed7b0d41ee9bb0d20550cc":[30,0,1,0,9,0,0,7], +"pose_2enum_classes_8hpp_source.html":[30,0,1,0,9,0], +"pose_2headers_8hpp.html":[30,0,1,0,9,1], +"pose_2headers_8hpp_source.html":[30,0,1,0,9,1], +"pose_cpu_renderer_8hpp.html":[30,0,1,0,9,2], +"pose_cpu_renderer_8hpp_source.html":[30,0,1,0,9,2], +"pose_extractor_8hpp.html":[30,0,1,0,9,3], +"pose_extractor_8hpp_source.html":[30,0,1,0,9,3], +"pose_extractor_caffe_8hpp.html":[30,0,1,0,9,4], +"pose_extractor_caffe_8hpp_source.html":[30,0,1,0,9,4], +"pose_extractor_net_8hpp.html":[30,0,1,0,9,5], +"pose_extractor_net_8hpp_source.html":[30,0,1,0,9,5], +"pose_gpu_renderer_8hpp.html":[30,0,1,0,9,6], +"pose_gpu_renderer_8hpp_source.html":[30,0,1,0,9,6], +"pose_parameters_8hpp.html":[30,0,1,0,9,7], +"pose_parameters_8hpp.html#a13b86d097fd5f36612e9858e9348ea57":[30,0,1,0,9,7,0], +"pose_parameters_8hpp.html#a307b2c7b1506415a4ba44590fe8a7258":[30,0,1,0,9,7,11], +"pose_parameters_8hpp.html#a3df938ef93037c534c5d342720d5fb70":[30,0,1,0,9,7,14], +"pose_parameters_8hpp.html#a522d4552d2aeabe367f4d3bf371e6b3e":[30,0,1,0,9,7,16], +"pose_parameters_8hpp.html#a54a6c42a42a0a7e539061f5e30abb4bc":[30,0,1,0,9,7,10], +"pose_parameters_8hpp.html#a84d87ec0e4ed3cf75a37ce99d0d25ef7":[30,0,1,0,9,7,7], +"pose_parameters_8hpp.html#a863c96f1fb23d96c5d605867cfe5f99f":[30,0,1,0,9,7,5], +"pose_parameters_8hpp.html#a8e377d8da8f109cb8be8e4edbb2ea90a":[30,0,1,0,9,7,2], +"pose_parameters_8hpp.html#a96a81e831f8c965825162dba09095477":[30,0,1,0,9,7,8], +"pose_parameters_8hpp.html#aab3de911b04b96c1850cc05c6947e184":[30,0,1,0,9,7,1], +"pose_parameters_8hpp.html#aabfd35e57744b44481c09f56c90cc8b8":[30,0,1,0,9,7,3], +"pose_parameters_8hpp.html#aacf6e688031bb116e4878b811e8dbc23":[30,0,1,0,9,7,15], +"pose_parameters_8hpp.html#acd8cab258d7e98affa5c317a9a03e862":[30,0,1,0,9,7,6], +"pose_parameters_8hpp.html#ad7ca8d89f9045481075902c8bd98b8f4":[30,0,1,0,9,7,9], +"pose_parameters_8hpp.html#ade70b024ee461ae04e7233bf3937c5c6":[30,0,1,0,9,7,13], +"pose_parameters_8hpp.html#ae0730c6559abdb976423ecf81eac4620":[30,0,1,0,9,7,12], +"pose_parameters_8hpp.html#ae7636f6e8974ecb2ed96d43dd5ec261d":[30,0,1,0,9,7,4], +"pose_parameters_8hpp_source.html":[30,0,1,0,9,7], +"pose_parameters_render_8hpp.html":[30,0,1,0,9,8], +"pose_parameters_render_8hpp.html#a0065da73d9e649360d458fc670ee0f95":[30,0,1,0,9,8,19], +"pose_parameters_render_8hpp.html#a016abefba53293ed2ffe3a3c3bd88dd0":[30,0,1,0,9,8,30], +"pose_parameters_render_8hpp.html#a04ebdf33bf0ff159d144dab0ebf1c2ce":[30,0,1,0,9,8,14], +"pose_parameters_render_8hpp.html#a0afb6a9782a4ad8bd3ac41bd2436fefc":[30,0,1,0,9,8,20], +"pose_parameters_render_8hpp.html#a11bd7e53698eabe32b69b48708cf7b19":[30,0,1,0,9,8,28], +"pose_parameters_render_8hpp.html#a17cec2005928720d6da0e83ba26cca01":[30,0,1,0,9,8,3], +"pose_parameters_render_8hpp.html#a1b97e47c182baf7de08af03a8ba397e3":[30,0,1,0,9,8,13], +"pose_parameters_render_8hpp.html#a1e4980010228bfd1e9e1387c23a3ab6a":[30,0,1,0,9,8,12], +"pose_parameters_render_8hpp.html#a216b861af0ff0c237be529dc204ed05e":[30,0,1,0,9,8,10], +"pose_parameters_render_8hpp.html#a21fcb98366f6ea8895fc7f527f232db5":[30,0,1,0,9,8,34], +"pose_parameters_render_8hpp.html#a253206407787fc26629e6e46f60d7be2":[30,0,1,0,9,8,16], +"pose_parameters_render_8hpp.html#a2c5ec8c89146a0535f4f29f861f4e248":[30,0,1,0,9,8,1], +"pose_parameters_render_8hpp.html#a32e98c9dd9e6f38c597c7924582570d0":[30,0,1,0,9,8,26], +"pose_parameters_render_8hpp.html#a426402ce79f98928f30037da33c2a349":[30,0,1,0,9,8,2], +"pose_parameters_render_8hpp.html#a456b8ce498f455af926215d91f6b6087":[30,0,1,0,9,8,9], +"pose_parameters_render_8hpp.html#a45b08569481c3bf02eceab0d911b2bf6":[30,0,1,0,9,8,0], +"pose_parameters_render_8hpp.html#a497bfbf7fddb6e960565ec70bb6b2ad1":[30,0,1,0,9,8,4], +"pose_parameters_render_8hpp.html#a593bb53120d8db14cab814dfb5d9ed2c":[30,0,1,0,9,8,31], +"pose_parameters_render_8hpp.html#a5afab27fbbebc71b8753a20dd6c9a322":[30,0,1,0,9,8,22], +"pose_parameters_render_8hpp.html#a5f3db3bbb18fe8d978661f3c5417c110":[30,0,1,0,9,8,7], +"pose_parameters_render_8hpp.html#a6be8d3dedaf015f795625d1df19876aa":[30,0,1,0,9,8,15], +"pose_parameters_render_8hpp.html#a7382830f0c24beaea601444cb5962f06":[30,0,1,0,9,8,17], +"pose_parameters_render_8hpp.html#a791ed14d0f2a65f850c94154b996826c":[30,0,1,0,9,8,5], +"pose_parameters_render_8hpp.html#a7987426d997b6b040302d25fd07403ac":[30,0,1,0,9,8,25], +"pose_parameters_render_8hpp.html#a7fdd75b1478d65f11ebc77144662958c":[30,0,1,0,9,8,24], +"pose_parameters_render_8hpp.html#a8b293ab02337be3f90218c5b824ece06":[30,0,1,0,9,8,21], +"pose_parameters_render_8hpp.html#a8cd3d34880f73dc73b2feb28370e86ec":[30,0,1,0,9,8,23], +"pose_parameters_render_8hpp.html#aa257db7f46ddaa7fe838f659b8e5ed66":[30,0,1,0,9,8,8], +"pose_parameters_render_8hpp.html#aaecdba75da05e8bfc90e4393c88ab6e6":[30,0,1,0,9,8,18], +"pose_parameters_render_8hpp.html#abb49286241ba7a1d754b31dee333274a":[30,0,1,0,9,8,29], +"pose_parameters_render_8hpp.html#ae30e7b56c09200d60f05acba38a8bf05":[30,0,1,0,9,8,11], +"pose_parameters_render_8hpp.html#ae37c577c1054c89da4a6736342d491aa":[30,0,1,0,9,8,32], +"pose_parameters_render_8hpp.html#aeb1e2dd8178c15024e372185e2e5cf54":[30,0,1,0,9,8,6], +"pose_parameters_render_8hpp.html#aebff78a4cfbef1cf1b2e03066d88564c":[30,0,1,0,9,8,27], +"pose_parameters_render_8hpp.html#af45cddacd69fff73a4ea4acbbbac43e0":[30,0,1,0,9,8,33], +"pose_parameters_render_8hpp_source.html":[30,0,1,0,9,8], +"pose_renderer_8hpp.html":[30,0,1,0,9,9], +"pose_renderer_8hpp_source.html":[30,0,1,0,9,9], +"pose_triangulation_8hpp.html":[30,0,1,0,0,3], +"pose_triangulation_8hpp_source.html":[30,0,1,0,0,3], +"priority_queue_8hpp.html":[30,0,1,0,11,2], +"priority_queue_8hpp.html#aa65c081c13e0d0453938a3c41d04dc49":[30,0,1,0,11,2,1], +"priority_queue_8hpp_source.html":[30,0,1,0,11,2], +"producer_2enum_classes_8hpp.html":[30,0,1,0,10,1], +"producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332":[30,0,1,0,10,1,2], +"producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332a34e2d1989a1dbf75cd631596133ee5ee":[30,0,1,0,10,1,2,3], +"producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332a54a365e86ee42cff91ca36532c9bbabf":[30,0,1,0,10,1,2,1], +"producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332a6adf97f83acf6453d4a6a4b1070f3754":[30,0,1,0,10,1,2,5], +"producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332ae2faa2a74b6a4134d0b3e84c7c0e2a01":[30,0,1,0,10,1,2,4], +"producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332af40a40a04a078c4449cda2f326d7fb18":[30,0,1,0,10,1,2,2], +"producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332af436d4d7a472ac39a7cb227e3ea24f8d":[30,0,1,0,10,1,2,0], +"producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774":[30,0,1,0,10,1,1], +"producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774a3b6cff57206f4ce645622b2e55f784a6":[30,0,1,0,10,1,1,4], +"producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774a3ebbca1b84060b0caaf823639739945d":[30,0,1,0,10,1,1,0], +"producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774a63eacc5ed21c0ecb8bc583e10dc3ae58":[30,0,1,0,10,1,1,3], +"producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774a6f6cb72d544962fa333e2e34ce64f719":[30,0,1,0,10,1,1,5], +"producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774a9ffbd422925a6839ee820ddbc59278c5":[30,0,1,0,10,1,1,1], +"producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774af1a42bd417390fc63b030a519624607a":[30,0,1,0,10,1,1,2], +"producer_2enum_classes_8hpp.html#ac0230b669b296920c0cfc41b7587268f":[30,0,1,0,10,1,0], +"producer_2enum_classes_8hpp.html#ac0230b669b296920c0cfc41b7587268fa0123c3afc0fac5edaf8b1672cb12626c":[30,0,1,0,10,1,0,0], +"producer_2enum_classes_8hpp.html#ac0230b669b296920c0cfc41b7587268fa6bcd0f3b66e42d1aacd18d1c3b532473":[30,0,1,0,10,1,0,1], +"producer_2enum_classes_8hpp_source.html":[30,0,1,0,10,1], +"producer_2headers_8hpp.html":[30,0,1,0,10,3], +"producer_2headers_8hpp_source.html":[30,0,1,0,10,3], +"producer_8hpp.html":[30,0,1,0,10,6], +"producer_8hpp.html#a6a34909c6c4d79a215f163291111d556":[30,0,1,0,10,6,1], +"producer_8hpp_source.html":[30,0,1,0,10,6], +"profiler_8hpp.html":[30,0,1,0,14,10], +"profiler_8hpp.html#a01dd208c992c8e07623579f77dcfb59b":[30,0,1,0,14,10,6], +"profiler_8hpp.html#a543c2d65f7d0e835513310d83fc08589":[30,0,1,0,14,10,2], +"profiler_8hpp.html#a6211ca30ec696c346d0b3f2c056e05e6":[30,0,1,0,14,10,4], +"profiler_8hpp.html#a774eaef2d2d68028026f52d554a8ba45":[30,0,1,0,14,10,1], +"profiler_8hpp.html#ab0908bcc0abb00c49ecbe7fc373b58c9":[30,0,1,0,14,10,7], +"profiler_8hpp.html#ae0e92a0d8867d1b02f1c43ae4c0c9e09":[30,0,1,0,14,10,5], +"profiler_8hpp.html#ae1f762d7d0c1f5ad10304ef82bd85516":[30,0,1,0,14,10,3], +"profiler_8hpp_source.html":[30,0,1,0,14,10], +"queue_8hpp.html":[30,0,1,0,11,3], +"queue_8hpp.html#aa7f93261bd6d87f86c45e933607a0678":[30,0,1,0,11,3,1], +"queue_8hpp_source.html":[30,0,1,0,11,3], +"queue_base_8hpp.html":[30,0,1,0,11,4], +"queue_base_8hpp_source.html":[30,0,1,0,11,4], +"rectangle_8hpp.html":[30,0,1,0,2,15], +"rectangle_8hpp.html#a7cd131c9ddd8f3987508e89e0881b9e0":[30,0,1,0,2,15,1], +"rectangle_8hpp_source.html":[30,0,1,0,2,15], +"render_face_8hpp.html":[30,0,1,0,3,9], +"render_face_8hpp.html#a5fc85e8500dbeda3b75c1b6ecfac91cd":[30,0,1,0,3,9,0], +"render_face_8hpp.html#ab8b2748a5bcf823e59b66549e6a24cfe":[30,0,1,0,3,9,1], +"render_face_8hpp_source.html":[30,0,1,0,3,9], +"render_hand_8hpp.html":[30,0,1,0,7,9], +"render_hand_8hpp.html#a865db81a5bc4f81cf9fc7c7f3ce81be3":[30,0,1,0,7,9,1], +"render_hand_8hpp.html#afb5b711819f94b51f32460861d9cea38":[30,0,1,0,7,9,0], +"render_hand_8hpp_source.html":[30,0,1,0,7,9], +"render_pose_8hpp.html":[30,0,1,0,9,10], +"render_pose_8hpp.html#a056c64afca17423e038590e4ef2f712b":[30,0,1,0,9,10,0], +"render_pose_8hpp.html#a3ba62b3d5cc275fc1700bf0c5e6bf578":[30,0,1,0,9,10,6], +"render_pose_8hpp.html#a3ceb3476e4154a6e9e06b3613a12c040":[30,0,1,0,9,10,1], +"render_pose_8hpp.html#a9275c58ba881ea94e054117392a67381":[30,0,1,0,9,10,5], +"render_pose_8hpp.html#a99a08148f440bd96546076e15f0de04c":[30,0,1,0,9,10,3], +"render_pose_8hpp.html#aa1225091307f8d0bf07dd032389f8961":[30,0,1,0,9,10,2], +"render_pose_8hpp.html#ad0069d4c6204b35893f4158d04d615f1":[30,0,1,0,9,10,4], +"render_pose_8hpp_source.html":[30,0,1,0,9,10], +"renderer_8hpp.html":[30,0,1,0,2,16], +"renderer_8hpp_source.html":[30,0,1,0,2,16], +"resize_and_merge_base_8hpp.html":[30,0,1,0,8,10], +"resize_and_merge_base_8hpp.html#a2f1ef915c8efc724c0bf40f0348f20a2":[30,0,1,0,8,10,4], +"resize_and_merge_base_8hpp.html#a8982332c4263696d0e023997f0e4c753":[30,0,1,0,8,10,1], +"resize_and_merge_base_8hpp.html#a97b053019720782f2f81bc1b41f036d6":[30,0,1,0,8,10,2], +"resize_and_merge_base_8hpp.html#ad5495d8c6a65afbedef3af7a8844bfcc":[30,0,1,0,8,10,3], +"resize_and_merge_base_8hpp.html#adb8ffc1a6a2cc2949d80d8e8ad4e2190":[30,0,1,0,8,10,0], +"resize_and_merge_base_8hpp_source.html":[30,0,1,0,8,10] +}; diff --git a/web/html/doc/net_2headers_8hpp.html b/web/html/doc/net_2headers_8hpp.html new file mode 100644 index 000000000..460c5fd18 --- /dev/null +++ b/web/html/doc/net_2headers_8hpp.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: include/openpose/net/headers.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + + + + + + diff --git a/web/html/doc/net_2headers_8hpp_source.html b/web/html/doc/net_2headers_8hpp_source.html new file mode 100644 index 000000000..21859c0ea --- /dev/null +++ b/web/html/doc/net_2headers_8hpp_source.html @@ -0,0 +1,129 @@ + + + + + + + +OpenPose: include/openpose/net/headers.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    headers.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_NET_HEADERS_HPP
    +
    2 #define OPENPOSE_NET_HEADERS_HPP
    +
    3 
    +
    4 // net module
    + + + + +
    9 #include <openpose/net/net.hpp>
    + + +
    12 #include <openpose/net/nmsBase.hpp>
    + + + +
    16 
    +
    17 #endif // OPENPOSE_NET_HEADERS_HPP
    + + + + + + + + + + + +
    +
    + + + + diff --git a/web/html/doc/net_8hpp.html b/web/html/doc/net_8hpp.html new file mode 100644 index 000000000..90e77cfae --- /dev/null +++ b/web/html/doc/net_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/net/net.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    net.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::Net
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/net_8hpp_source.html b/web/html/doc/net_8hpp_source.html new file mode 100644 index 000000000..81160a42e --- /dev/null +++ b/web/html/doc/net_8hpp_source.html @@ -0,0 +1,131 @@ + + + + + + + +OpenPose: include/openpose/net/net.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    net.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_NET_NET_HPP
    +
    2 #define OPENPOSE_NET_NET_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    +
    8  class OP_API Net
    +
    9  {
    +
    10  public:
    +
    11  virtual ~Net(){}
    +
    12 
    +
    13  virtual void initializationOnThread() = 0;
    +
    14 
    +
    15  virtual void forwardPass(const Array<float>& inputData) const = 0;
    +
    16 
    +
    17  virtual std::shared_ptr<ArrayCpuGpu<float>> getOutputBlobArray() const = 0;
    +
    18  };
    +
    19 }
    +
    20 
    +
    21 #endif // OPENPOSE_NET_NET_HPP
    + +
    Definition: net.hpp:9
    +
    virtual std::shared_ptr< ArrayCpuGpu< float > > getOutputBlobArray() const =0
    +
    virtual void forwardPass(const Array< float > &inputData) const =0
    +
    virtual void initializationOnThread()=0
    +
    virtual ~Net()
    Definition: net.hpp:11
    + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    +
    + + + + diff --git a/web/html/doc/net_caffe_8hpp.html b/web/html/doc/net_caffe_8hpp.html new file mode 100644 index 000000000..07bc6b715 --- /dev/null +++ b/web/html/doc/net_caffe_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/net/netCaffe.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    netCaffe.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::NetCaffe
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/net_caffe_8hpp_source.html b/web/html/doc/net_caffe_8hpp_source.html new file mode 100644 index 000000000..8b2073053 --- /dev/null +++ b/web/html/doc/net_caffe_8hpp_source.html @@ -0,0 +1,149 @@ + + + + + + + +OpenPose: include/openpose/net/netCaffe.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    netCaffe.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_NET_NET_CAFFE_HPP
    +
    2 #define OPENPOSE_NET_NET_CAFFE_HPP
    +
    3 
    + +
    5 #include <openpose/net/net.hpp>
    +
    6 
    +
    7 namespace op
    +
    8 {
    +
    9  class OP_API NetCaffe : public Net
    +
    10  {
    +
    11  public:
    +
    12  NetCaffe(const std::string& caffeProto, const std::string& caffeTrainedModel, const int gpuId = 0,
    +
    13  const bool enableGoogleLogging = true, const std::string& lastBlobName = "net_output");
    +
    14 
    +
    15  virtual ~NetCaffe();
    +
    16 
    + +
    18 
    +
    19  void forwardPass(const Array<float>& inputNetData) const;
    +
    20 
    +
    21  std::shared_ptr<ArrayCpuGpu<float>> getOutputBlobArray() const;
    +
    22 
    +
    23  private:
    +
    24  // PIMPL idiom
    +
    25  // http://www.cppsamples.com/common-tasks/pimpl.html
    +
    26  struct ImplNetCaffe;
    +
    27  std::unique_ptr<ImplNetCaffe> upImpl;
    +
    28 
    +
    29  // PIMP requires DELETE_COPY & destructor, or extra code
    +
    30  // http://oliora.github.io/2015/12/29/pimpl-and-rule-of-zero.html
    + +
    32  };
    +
    33 }
    +
    34 
    +
    35 #endif // OPENPOSE_NET_NET_CAFFE_HPP
    + + +
    void initializationOnThread()
    +
    std::shared_ptr< ArrayCpuGpu< float > > getOutputBlobArray() const
    +
    void forwardPass(const Array< float > &inputNetData) const
    +
    virtual ~NetCaffe()
    +
    NetCaffe(const std::string &caffeProto, const std::string &caffeTrainedModel, const int gpuId=0, const bool enableGoogleLogging=true, const std::string &lastBlobName="net_output")
    +
    Definition: net.hpp:9
    + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + + +
    +
    + + + + diff --git a/web/html/doc/net_open_cv_8hpp.html b/web/html/doc/net_open_cv_8hpp.html new file mode 100644 index 000000000..c56c75b79 --- /dev/null +++ b/web/html/doc/net_open_cv_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/net/netOpenCv.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    netOpenCv.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::NetOpenCv
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/net_open_cv_8hpp_source.html b/web/html/doc/net_open_cv_8hpp_source.html new file mode 100644 index 000000000..976b56266 --- /dev/null +++ b/web/html/doc/net_open_cv_8hpp_source.html @@ -0,0 +1,148 @@ + + + + + + + +OpenPose: include/openpose/net/netOpenCv.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    netOpenCv.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_NET_NET_OPEN_CV_HPP
    +
    2 #define OPENPOSE_NET_NET_OPEN_CV_HPP
    +
    3 
    + +
    5 #include <openpose/net/net.hpp>
    +
    6 
    +
    7 namespace op
    +
    8 {
    +
    9  class OP_API NetOpenCv : public Net
    +
    10  {
    +
    11  public:
    +
    12  NetOpenCv(const std::string& caffeProto, const std::string& caffeTrainedModel, const int gpuId = 0);
    +
    13 
    +
    14  virtual ~NetOpenCv();
    +
    15 
    + +
    17 
    +
    18  void forwardPass(const Array<float>& inputNetData) const;
    +
    19 
    +
    20  std::shared_ptr<ArrayCpuGpu<float>> getOutputBlobArray() const;
    +
    21 
    +
    22  private:
    +
    23  // PIMPL idiom
    +
    24  // http://www.cppsamples.com/common-tasks/pimpl.html
    +
    25  struct ImplNetOpenCv;
    +
    26  std::unique_ptr<ImplNetOpenCv> upImpl;
    +
    27 
    +
    28  // PIMP requires DELETE_COPY & destructor, or extra code
    +
    29  // http://oliora.github.io/2015/12/29/pimpl-and-rule-of-zero.html
    + +
    31  };
    +
    32 }
    +
    33 
    +
    34 #endif // OPENPOSE_NET_NET_OPEN_CV_HPP
    + +
    Definition: net.hpp:9
    + +
    virtual ~NetOpenCv()
    +
    void initializationOnThread()
    +
    std::shared_ptr< ArrayCpuGpu< float > > getOutputBlobArray() const
    +
    void forwardPass(const Array< float > &inputNetData) const
    +
    NetOpenCv(const std::string &caffeProto, const std::string &caffeTrainedModel, const int gpuId=0)
    + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + + +
    +
    + + + + diff --git a/web/html/doc/nms_base_8hpp.html b/web/html/doc/nms_base_8hpp.html new file mode 100644 index 000000000..6bc226bb3 --- /dev/null +++ b/web/html/doc/nms_base_8hpp.html @@ -0,0 +1,125 @@ + + + + + + + +OpenPose: include/openpose/net/nmsBase.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    nmsBase.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + + + + + + +

    +Functions

    template<typename T >
    void op::nmsCpu (T *targetPtr, int *kernelPtr, const T *const sourcePtr, const T threshold, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize, const Point< T > &offset)
     
    template<typename T >
    void op::nmsGpu (T *targetPtr, int *kernelPtr, const T *const sourcePtr, const T threshold, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize, const Point< T > &offset)
     
    template<typename T >
    void op::nmsOcl (T *targetPtr, uint8_t *kernelGpuPtr, uint8_t *kernelCpuPtr, const T *const sourcePtr, const T threshold, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize, const Point< T > &offset, const int gpuID=0)
     
    +
    +
    + + + + diff --git a/web/html/doc/nms_base_8hpp.js b/web/html/doc/nms_base_8hpp.js new file mode 100644 index 000000000..54f22dc8f --- /dev/null +++ b/web/html/doc/nms_base_8hpp.js @@ -0,0 +1,6 @@ +var nms_base_8hpp = +[ + [ "nmsCpu", "nms_base_8hpp.html#a6a97f255cc323f1c1babe4c598727196", null ], + [ "nmsGpu", "nms_base_8hpp.html#a28c5ac530845231600fb93c0be44ad6d", null ], + [ "nmsOcl", "nms_base_8hpp.html#a37dce2abad2568d7664654e4598002af", null ] +]; \ No newline at end of file diff --git a/web/html/doc/nms_base_8hpp_source.html b/web/html/doc/nms_base_8hpp_source.html new file mode 100644 index 000000000..bda54c301 --- /dev/null +++ b/web/html/doc/nms_base_8hpp_source.html @@ -0,0 +1,133 @@ + + + + + + + +OpenPose: include/openpose/net/nmsBase.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    nmsBase.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_NET_NMS_BASE_HPP
    +
    2 #define OPENPOSE_NET_NMS_BASE_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    +
    8  template <typename T>
    +
    9  void nmsCpu(
    +
    10  T* targetPtr, int* kernelPtr, const T* const sourcePtr, const T threshold, const std::array<int, 4>& targetSize,
    +
    11  const std::array<int, 4>& sourceSize, const Point<T>& offset);
    +
    12 
    +
    13  // Windows: Cuda functions do not include OP_API
    +
    14  template <typename T>
    +
    15  void nmsGpu(
    +
    16  T* targetPtr, int* kernelPtr, const T* const sourcePtr, const T threshold, const std::array<int, 4>& targetSize,
    +
    17  const std::array<int, 4>& sourceSize, const Point<T>& offset);
    +
    18 
    +
    19  // Windows: OpenCL functions do not include OP_API
    +
    20  template <typename T>
    +
    21  void nmsOcl(
    +
    22  T* targetPtr, uint8_t* kernelGpuPtr, uint8_t* kernelCpuPtr, const T* const sourcePtr, const T threshold, const std::array<int, 4>& targetSize,
    +
    23  const std::array<int, 4>& sourceSize, const Point<T>& offset, const int gpuID = 0);
    +
    24 }
    +
    25 
    +
    26 #endif // OPENPOSE_NET_NMS_BASE_HPP
    + + +
    void nmsGpu(T *targetPtr, int *kernelPtr, const T *const sourcePtr, const T threshold, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize, const Point< T > &offset)
    +
    void nmsOcl(T *targetPtr, uint8_t *kernelGpuPtr, uint8_t *kernelCpuPtr, const T *const sourcePtr, const T threshold, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize, const Point< T > &offset, const int gpuID=0)
    +
    void nmsCpu(T *targetPtr, int *kernelPtr, const T *const sourcePtr, const T threshold, const std::array< int, 4 > &targetSize, const std::array< int, 4 > &sourceSize, const Point< T > &offset)
    + +
    +
    + + + + diff --git a/web/html/doc/nms_caffe_8hpp.html b/web/html/doc/nms_caffe_8hpp.html new file mode 100644 index 000000000..7574122d4 --- /dev/null +++ b/web/html/doc/nms_caffe_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/net/nmsCaffe.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    nmsCaffe.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::NmsCaffe< T >
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/nms_caffe_8hpp_source.html b/web/html/doc/nms_caffe_8hpp_source.html new file mode 100644 index 000000000..eb16467c2 --- /dev/null +++ b/web/html/doc/nms_caffe_8hpp_source.html @@ -0,0 +1,181 @@ + + + + + + + +OpenPose: include/openpose/net/nmsCaffe.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    nmsCaffe.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_NET_NMS_CAFFE_HPP
    +
    2 #define OPENPOSE_NET_NMS_CAFFE_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    +
    8  // It mostly follows the Caffe::layer implementation, so Caffe users can easily use it. However, in order to keep
    +
    9  // the compatibility with any generic Caffe version, we keep this 'layer' inside our library rather than in the
    +
    10  // Caffe code.
    +
    11  template <typename T>
    +
    12  class NmsCaffe
    +
    13  {
    +
    14  public:
    +
    15  explicit NmsCaffe();
    +
    16 
    +
    17  virtual ~NmsCaffe();
    +
    18 
    +
    19  virtual void LayerSetUp(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
    +
    20 
    +
    21  virtual void Reshape(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top,
    +
    22  const int maxPeaks, const int outputChannels = -1, const int gpuID = 0);
    +
    23 
    +
    24  virtual inline const char* type() const { return "Nms"; }
    +
    25 
    +
    26  void setThreshold(const T threshold);
    +
    27 
    +
    28  // Empirically gives better results (copied from Matlab original code)
    +
    29  void setOffset(const Point<T>& offset);
    +
    30 
    +
    31  virtual void Forward(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
    +
    32 
    +
    33  virtual void Forward_cpu(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
    +
    34 
    +
    35  virtual void Forward_gpu(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
    +
    36 
    +
    37  virtual void Forward_ocl(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
    +
    38 
    +
    39  virtual void Backward_cpu(const std::vector<ArrayCpuGpu<T>*>& top, const std::vector<bool>& propagate_down,
    +
    40  const std::vector<ArrayCpuGpu<T>*>& bottom);
    +
    41 
    +
    42  virtual void Backward_gpu(const std::vector<ArrayCpuGpu<T>*>& top, const std::vector<bool>& propagate_down,
    +
    43  const std::vector<ArrayCpuGpu<T>*>& bottom);
    +
    44 
    +
    45  private:
    +
    46  T mThreshold;
    +
    47  Point<T> mOffset;
    +
    48  int mGpuID;
    +
    49 
    +
    50  // PIMPL idiom
    +
    51  // http://www.cppsamples.com/common-tasks/pimpl.html
    +
    52  struct ImplNmsCaffe;
    +
    53  std::unique_ptr<ImplNmsCaffe> upImpl;
    +
    54 
    +
    55  // PIMP requires DELETE_COPY & destructor, or extra code
    +
    56  // http://oliora.github.io/2015/12/29/pimpl-and-rule-of-zero.html
    + +
    58  };
    +
    59 }
    +
    60 
    +
    61 #endif // OPENPOSE_NET_NMS_CAFFE_HPP
    + + +
    virtual ~NmsCaffe()
    +
    void setOffset(const Point< T > &offset)
    +
    virtual void Forward(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
    +
    virtual void Backward_gpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
    +
    void setThreshold(const T threshold)
    +
    virtual void Forward_cpu(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
    +
    virtual void Forward_gpu(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
    +
    virtual void LayerSetUp(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
    +
    virtual void Backward_cpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
    +
    virtual void Reshape(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top, const int maxPeaks, const int outputChannels=-1, const int gpuID=0)
    +
    virtual void Forward_ocl(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
    +
    virtual const char * type() const
    Definition: nmsCaffe.hpp:24
    + + +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + + +
    +
    + + + + diff --git a/web/html/doc/op_output_to_cv_mat_8hpp.html b/web/html/doc/op_output_to_cv_mat_8hpp.html new file mode 100644 index 000000000..75648ae6a --- /dev/null +++ b/web/html/doc/op_output_to_cv_mat_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/core/opOutputToCvMat.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    opOutputToCvMat.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::OpOutputToCvMat
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/op_output_to_cv_mat_8hpp_source.html b/web/html/doc/op_output_to_cv_mat_8hpp_source.html new file mode 100644 index 000000000..cd86bc15a --- /dev/null +++ b/web/html/doc/op_output_to_cv_mat_8hpp_source.html @@ -0,0 +1,143 @@ + + + + + + + +OpenPose: include/openpose/core/opOutputToCvMat.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    opOutputToCvMat.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_OP_OUTPUT_TO_CV_MAT_HPP
    +
    2 #define OPENPOSE_CORE_OP_OUTPUT_TO_CV_MAT_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    + +
    9  {
    +
    10  public:
    +
    11  OpOutputToCvMat(const bool gpuResize = false);
    +
    12 
    +
    13  virtual ~OpOutputToCvMat();
    +
    14 
    + +
    16  const std::tuple<std::shared_ptr<float*>, std::shared_ptr<bool>, std::shared_ptr<unsigned long long>>& tuple);
    +
    17 
    +
    18  Matrix formatToCvMat(const Array<float>& outputData);
    +
    19 
    +
    20  private:
    +
    21  const bool mGpuResize;
    +
    22  // Shared variables
    +
    23  std::shared_ptr<float*> spOutputImageFloatCuda;
    +
    24  std::shared_ptr<unsigned long long> spOutputMaxSize;
    +
    25  std::shared_ptr<bool> spGpuMemoryAllocated;
    +
    26  // Local variables
    +
    27  unsigned char* pOutputImageUCharCuda;
    +
    28  unsigned long long mOutputMaxSizeUChar;
    +
    29  };
    +
    30 }
    +
    31 
    +
    32 #endif // OPENPOSE_CORE_OP_OUTPUT_TO_CV_MAT_HPP
    + + + +
    OpOutputToCvMat(const bool gpuResize=false)
    +
    Matrix formatToCvMat(const Array< float > &outputData)
    +
    void setSharedParameters(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)
    +
    virtual ~OpOutputToCvMat()
    + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    +
    + + + + diff --git a/web/html/doc/open.png b/web/html/doc/open.png new file mode 100644 index 000000000..30f75c7ef Binary files /dev/null and b/web/html/doc/open.png differ diff --git a/web/html/doc/open_cv_8hpp.html b/web/html/doc/open_cv_8hpp.html new file mode 100644 index 000000000..a998565c3 --- /dev/null +++ b/web/html/doc/open_cv_8hpp.html @@ -0,0 +1,146 @@ + + + + + + + +OpenPose: include/openpose/utilities/openCv.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    openCv.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Functions

    OP_API void op::unrollArrayToUCharCvMat (Matrix &matResult, const Array< float > &array)
     
    OP_API void op::uCharCvMatToFloatPtr (float *floatPtrImage, const Matrix &matImage, const int normalize)
     
    OP_API double op::resizeGetScaleFactor (const Point< int > &initialSize, const Point< int > &targetSize)
     
    OP_API void op::keepRoiInside (Rectangle< int > &roi, const int imageWidth, const int imageHeight)
     
    OP_API void op::transpose (Matrix &matrix)
     
    OP_API void op::rotateAndFlipFrame (Matrix &frame, const double rotationAngle, const bool flipFrame=false)
     
    OP_API int op::getCvCapPropFrameCount ()
     
    OP_API int op::getCvCapPropFrameFps ()
     
    OP_API int op::getCvCapPropFrameWidth ()
     
    OP_API int op::getCvCapPropFrameHeight ()
     
    OP_API int op::getCvFourcc (const char c1, const char c2, const char c3, const char c4)
     
    OP_API int op::getCvImwriteJpegQuality ()
     
    OP_API int op::getCvImwritePngCompression ()
     
    OP_API int op::getCvLoadImageAnydepth ()
     
    OP_API int op::getCvLoadImageGrayScale ()
     
    +
    +
    + + + + diff --git a/web/html/doc/open_cv_8hpp.js b/web/html/doc/open_cv_8hpp.js new file mode 100644 index 000000000..0d3555662 --- /dev/null +++ b/web/html/doc/open_cv_8hpp.js @@ -0,0 +1,18 @@ +var open_cv_8hpp = +[ + [ "getCvCapPropFrameCount", "open_cv_8hpp.html#a0e60b0e4e89a7f08de54ad40c2d46a60", null ], + [ "getCvCapPropFrameFps", "open_cv_8hpp.html#aaf7199f3821a6f954cfae134ec8c7e19", null ], + [ "getCvCapPropFrameHeight", "open_cv_8hpp.html#a264496927e7b331ad628d7dc4a683194", null ], + [ "getCvCapPropFrameWidth", "open_cv_8hpp.html#a71866b00e7d1077137094f78ec83b62b", null ], + [ "getCvFourcc", "open_cv_8hpp.html#a4059a24a786c4f2def977715dd2e6747", null ], + [ "getCvImwriteJpegQuality", "open_cv_8hpp.html#ad86d86621b1f485f261d620373748ed1", null ], + [ "getCvImwritePngCompression", "open_cv_8hpp.html#a289d19386824250545f248a79aed283c", null ], + [ "getCvLoadImageAnydepth", "open_cv_8hpp.html#a84730c1ab201fe836fe87787589af88a", null ], + [ "getCvLoadImageGrayScale", "open_cv_8hpp.html#ace6c48833ba117b7d036179bdaf31a7a", null ], + [ "keepRoiInside", "open_cv_8hpp.html#a5f85de4dca2733d03470d42617f83d4e", null ], + [ "resizeGetScaleFactor", "open_cv_8hpp.html#a24ebdcb8395dea0429f220de6a715d6e", null ], + [ "rotateAndFlipFrame", "open_cv_8hpp.html#af65d1b7c5b708f30780e4b2bcfccedcb", null ], + [ "transpose", "open_cv_8hpp.html#a75c4194e0eae0ef28c6829def462dad2", null ], + [ "uCharCvMatToFloatPtr", "open_cv_8hpp.html#a532d08cb2ef011f9cad29c01d3431d6e", null ], + [ "unrollArrayToUCharCvMat", "open_cv_8hpp.html#a1910d9f194831570be6ffe683209e7b3", null ] +]; \ No newline at end of file diff --git a/web/html/doc/open_cv_8hpp_source.html b/web/html/doc/open_cv_8hpp_source.html new file mode 100644 index 000000000..017e20666 --- /dev/null +++ b/web/html/doc/open_cv_8hpp_source.html @@ -0,0 +1,162 @@ + + + + + + + +OpenPose: include/openpose/utilities/openCv.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    openCv.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_UTILITIES_OPEN_CV_HPP
    +
    2 #define OPENPOSE_UTILITIES_OPEN_CV_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    +
    8  OP_API void unrollArrayToUCharCvMat(Matrix& matResult, const Array<float>& array);
    +
    9 
    +
    10  OP_API void uCharCvMatToFloatPtr(float* floatPtrImage, const Matrix& matImage, const int normalize);
    +
    11 
    +
    12  OP_API double resizeGetScaleFactor(const Point<int>& initialSize, const Point<int>& targetSize);
    +
    13 
    +
    14  OP_API void keepRoiInside(Rectangle<int>& roi, const int imageWidth, const int imageHeight);
    +
    15 
    +
    16  OP_API void transpose(Matrix& matrix);
    +
    17 
    +
    24  OP_API void rotateAndFlipFrame(Matrix& frame, const double rotationAngle, const bool flipFrame = false);
    +
    25 
    + +
    30 
    + +
    35 
    + +
    40 
    + +
    45 
    +
    49  OP_API int getCvFourcc(const char c1, const char c2, const char c3, const char c4);
    +
    50 
    + +
    55 
    + +
    60 
    + +
    65 
    + +
    70 }
    +
    71 
    +
    72 #endif // OPENPOSE_UTILITIES_OPEN_CV_HPP
    + + + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    OP_API int getCvCapPropFrameCount()
    +
    OP_API void unrollArrayToUCharCvMat(Matrix &matResult, const Array< float > &array)
    +
    OP_API double resizeGetScaleFactor(const Point< int > &initialSize, const Point< int > &targetSize)
    +
    OP_API int getCvCapPropFrameHeight()
    +
    OP_API int getCvImwritePngCompression()
    +
    OP_API int getCvFourcc(const char c1, const char c2, const char c3, const char c4)
    +
    OP_API void uCharCvMatToFloatPtr(float *floatPtrImage, const Matrix &matImage, const int normalize)
    +
    OP_API void keepRoiInside(Rectangle< int > &roi, const int imageWidth, const int imageHeight)
    +
    OP_API int getCvCapPropFrameWidth()
    +
    OP_API void transpose(Matrix &matrix)
    +
    OP_API int getCvLoadImageAnydepth()
    +
    OP_API int getCvCapPropFrameFps()
    +
    OP_API int getCvLoadImageGrayScale()
    +
    OP_API int getCvImwriteJpegQuality()
    +
    OP_API void rotateAndFlipFrame(Matrix &frame, const double rotationAngle, const bool flipFrame=false)
    + + +
    +
    + + + + diff --git a/web/html/doc/pages.html b/web/html/doc/pages.html new file mode 100644 index 000000000..55c9a79a5 --- /dev/null +++ b/web/html/doc/pages.html @@ -0,0 +1,132 @@ + + + + + + + +OpenPose: Related Pages + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + + + + + + diff --git a/web/html/doc/people_json_saver_8hpp.html b/web/html/doc/people_json_saver_8hpp.html new file mode 100644 index 000000000..3877ba7fe --- /dev/null +++ b/web/html/doc/people_json_saver_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/filestream/peopleJsonSaver.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    peopleJsonSaver.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::PeopleJsonSaver
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/people_json_saver_8hpp_source.html b/web/html/doc/people_json_saver_8hpp_source.html new file mode 100644 index 000000000..32fd4a2bc --- /dev/null +++ b/web/html/doc/people_json_saver_8hpp_source.html @@ -0,0 +1,134 @@ + + + + + + + +OpenPose: include/openpose/filestream/peopleJsonSaver.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    peopleJsonSaver.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_PEOPLE_JSON_SAVER_HPP
    +
    2 #define OPENPOSE_FILESTREAM_PEOPLE_JSON_SAVER_HPP
    +
    3 
    + + +
    6 
    +
    7 namespace op
    +
    8 {
    + +
    10  {
    +
    11  public:
    +
    12  PeopleJsonSaver(const std::string& directoryPath);
    +
    13 
    +
    14  virtual ~PeopleJsonSaver();
    +
    15 
    +
    16  void save(
    +
    17  const std::vector<std::pair<Array<float>, std::string>>& keypointVector,
    +
    18  const std::vector<std::vector<std::array<float,3>>>& candidates, const std::string& fileName,
    +
    19  const bool humanReadable = true) const;
    +
    20  };
    +
    21 }
    +
    22 
    +
    23 #endif // OPENPOSE_FILESTREAM_PEOPLE_JSON_SAVER_HPP
    + + + +
    virtual ~PeopleJsonSaver()
    +
    PeopleJsonSaver(const std::string &directoryPath)
    +
    void save(const std::vector< std::pair< Array< float >, std::string >> &keypointVector, const std::vector< std::vector< std::array< float, 3 >>> &candidates, const std::string &fileName, const bool humanReadable=true) const
    + + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    +
    + + + + diff --git a/web/html/doc/person_id_extractor_8hpp.html b/web/html/doc/person_id_extractor_8hpp.html new file mode 100644 index 000000000..2f7459625 --- /dev/null +++ b/web/html/doc/person_id_extractor_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/tracking/personIdExtractor.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    personIdExtractor.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::PersonIdExtractor
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/person_id_extractor_8hpp_source.html b/web/html/doc/person_id_extractor_8hpp_source.html new file mode 100644 index 000000000..d5ebd0487 --- /dev/null +++ b/web/html/doc/person_id_extractor_8hpp_source.html @@ -0,0 +1,145 @@ + + + + + + + +OpenPose: include/openpose/tracking/personIdExtractor.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    personIdExtractor.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_TRACKING_PERSON_ID_EXTRACTOR_HPP
    +
    2 #define OPENPOSE_TRACKING_PERSON_ID_EXTRACTOR_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    + +
    9  {
    +
    10  public:
    +
    11  PersonIdExtractor(const float confidenceThreshold = 0.1f, const float inlierRatioThreshold = 0.5f,
    +
    12  const float distanceThreshold = 30.f, const int numberFramesToDeletePerson = 10);
    +
    13 
    +
    14  virtual ~PersonIdExtractor();
    +
    15 
    +
    16  Array<long long> extractIds(const Array<float>& poseKeypoints, const Matrix& cvMatInput,
    +
    17  const unsigned long long imageViewIndex = 0ull);
    +
    18 
    +
    19  Array<long long> extractIdsLockThread(const Array<float>& poseKeypoints, const Matrix& cvMatInput,
    +
    20  const unsigned long long imageViewIndex,
    +
    21  const long long frameId);
    +
    22 
    +
    23  private:
    +
    24  // PIMPL idiom
    +
    25  // http://www.cppsamples.com/common-tasks/pimpl.html
    +
    26  struct ImplPersonIdExtractor;
    +
    27  std::shared_ptr<ImplPersonIdExtractor> spImpl;
    +
    28 
    + +
    30  };
    +
    31 }
    +
    32 
    +
    33 #endif // OPENPOSE_TRACKING_PERSON_ID_EXTRACTOR_HPP
    + + + +
    Array< long long > extractIdsLockThread(const Array< float > &poseKeypoints, const Matrix &cvMatInput, const unsigned long long imageViewIndex, const long long frameId)
    +
    PersonIdExtractor(const float confidenceThreshold=0.1f, const float inlierRatioThreshold=0.5f, const float distanceThreshold=30.f, const int numberFramesToDeletePerson=10)
    +
    virtual ~PersonIdExtractor()
    +
    Array< long long > extractIds(const Array< float > &poseKeypoints, const Matrix &cvMatInput, const unsigned long long imageViewIndex=0ull)
    + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    +
    + + + + diff --git a/web/html/doc/person_tracker_8hpp.html b/web/html/doc/person_tracker_8hpp.html new file mode 100644 index 000000000..435ed1c59 --- /dev/null +++ b/web/html/doc/person_tracker_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/tracking/personTracker.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    personTracker.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::PersonTracker
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/person_tracker_8hpp_source.html b/web/html/doc/person_tracker_8hpp_source.html new file mode 100644 index 000000000..0db6f1d09 --- /dev/null +++ b/web/html/doc/person_tracker_8hpp_source.html @@ -0,0 +1,147 @@ + + + + + + + +OpenPose: include/openpose/tracking/personTracker.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    personTracker.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_OPENPOSE_PRIVATE_TRACKING_PERSON_TRACKER_HPP
    +
    2 #define OPENPOSE_OPENPOSE_PRIVATE_TRACKING_PERSON_TRACKER_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    + +
    9  {
    +
    10  public:
    +
    11  PersonTracker(const bool mergeResults, const int levels = 3, const int patchSize = 31,
    +
    12  const float confidenceThreshold = 0.05f, const bool trackVelocity = false,
    +
    13  const bool scaleVarying = false, const float rescale = 640);
    +
    14 
    +
    15  virtual ~PersonTracker();
    +
    16 
    +
    17  void track(Array<float>& poseKeypoints, Array<long long>& poseIds, const Matrix& cvMatInput);
    +
    18 
    +
    19  void trackLockThread(Array<float>& poseKeypoints, Array<long long>& poseIds, const Matrix& cvMatInput,
    +
    20  const long long frameId);
    +
    21 
    +
    22  bool getMergeResults() const;
    +
    23 
    +
    24  private:
    +
    25  // PIMPL idiom
    +
    26  // http://www.cppsamples.com/common-tasks/pimpl.html
    +
    27  struct ImplPersonTracker;
    +
    28  std::shared_ptr<ImplPersonTracker> spImpl;
    +
    29 
    + +
    31  };
    +
    32 }
    +
    33 
    +
    34 #endif // OPENPOSE_OPENPOSE_PRIVATE_TRACKING_PERSON_TRACKER_HPP
    + + + +
    void track(Array< float > &poseKeypoints, Array< long long > &poseIds, const Matrix &cvMatInput)
    +
    void trackLockThread(Array< float > &poseKeypoints, Array< long long > &poseIds, const Matrix &cvMatInput, const long long frameId)
    +
    bool getMergeResults() const
    +
    virtual ~PersonTracker()
    +
    PersonTracker(const bool mergeResults, const int levels=3, const int patchSize=31, const float confidenceThreshold=0.05f, const bool trackVelocity=false, const bool scaleVarying=false, const float rescale=640)
    + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    +
    + + + + diff --git a/web/html/doc/point_8hpp.html b/web/html/doc/point_8hpp.html new file mode 100644 index 000000000..38d14ef76 --- /dev/null +++ b/web/html/doc/point_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/core/point.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    point.hpp File Reference
    +
    +
    +
    #include <string>
    +#include <openpose/core/macros.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    struct  op::Point< T >
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/point_8hpp_source.html b/web/html/doc/point_8hpp_source.html new file mode 100644 index 000000000..efdb33cdc --- /dev/null +++ b/web/html/doc/point_8hpp_source.html @@ -0,0 +1,231 @@ + + + + + + + +OpenPose: include/openpose/core/point.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    point.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_POINT_HPP
    +
    2 #define OPENPOSE_CORE_POINT_HPP
    +
    3 
    +
    4 #include <string>
    + +
    6 
    +
    7 namespace op
    +
    8 {
    +
    9  template<typename T>
    +
    10  struct Point
    +
    11  {
    +
    12  T x;
    +
    13  T y;
    +
    14 
    +
    15  Point(const T x = 0, const T y = 0);
    +
    16 
    +
    25  Point<T>(const Point<T>& point);
    +
    26 
    +
    33  Point<T>& operator=(const Point<T>& point);
    +
    34 
    +
    40  Point<T>(Point<T>&& point);
    +
    41 
    + +
    49 
    +
    50  inline T area() const
    +
    51  {
    +
    52  return x * y;
    +
    53  }
    +
    54 
    +
    60  std::string toString() const;
    +
    61 
    +
    62 
    +
    63 
    +
    64 
    +
    65 
    +
    66  // ------------------------------ Comparison operators ------------------------------ //
    +
    72  inline bool operator<(const Point<T>& point) const
    +
    73  {
    +
    74  return area() < point.area();
    +
    75  }
    +
    76 
    +
    82  inline bool operator>(const Point<T>& point) const
    +
    83  {
    +
    84  return area() > point.area();
    +
    85  }
    +
    86 
    +
    92  inline bool operator<=(const Point<T>& point) const
    +
    93  {
    +
    94  return area() <= point.area();
    +
    95  }
    +
    96 
    +
    102  inline bool operator>=(const Point<T>& point) const
    +
    103  {
    +
    104  return area() >= point.area();
    +
    105  }
    +
    106 
    +
    112  inline bool operator==(const Point<T>& point) const
    +
    113  {
    +
    114  return area() == point.area();
    +
    115  }
    +
    116 
    +
    122  inline bool operator!=(const Point<T>& point) const
    +
    123  {
    +
    124  return area() != point.area();
    +
    125  }
    +
    126 
    +
    127 
    +
    128 
    +
    129 
    +
    130 
    +
    131  // ------------------------------ Basic Operators ------------------------------ //
    +
    132  Point<T>& operator+=(const Point<T>& point);
    +
    133 
    +
    134  Point<T> operator+(const Point<T>& point) const;
    +
    135 
    +
    136  Point<T>& operator+=(const T value);
    +
    137 
    +
    138  Point<T> operator+(const T value) const;
    +
    139 
    +
    140  Point<T>& operator-=(const Point<T>& point);
    +
    141 
    +
    142  Point<T> operator-(const Point<T>& point) const;
    +
    143 
    +
    144  Point<T>& operator-=(const T value);
    +
    145 
    +
    146  Point<T> operator-(const T value) const;
    +
    147 
    +
    148  Point<T>& operator*=(const T value);
    +
    149 
    +
    150  Point<T> operator*(const T value) const;
    +
    151 
    +
    152  Point<T>& operator/=(const T value);
    +
    153 
    +
    154  Point<T> operator/(const T value) const;
    +
    155  };
    +
    156 
    +
    157  // Static methods
    + +
    159 }
    +
    160 
    +
    161 #endif // OPENPOSE_CORE_POINT_HPP
    + +
    #define OVERLOAD_C_OUT(className)
    Definition: macros.hpp:77
    + + +
    Point< T > operator-(const Point< T > &point) const
    +
    Point< T > operator+(const Point< T > &point) const
    +
    bool operator>(const Point< T > &point) const
    Definition: point.hpp:82
    +
    bool operator==(const Point< T > &point) const
    Definition: point.hpp:112
    + +
    Point< T > & operator+=(const T value)
    +
    bool operator<(const Point< T > &point) const
    Definition: point.hpp:72
    +
    std::string toString() const
    + +
    bool operator<=(const Point< T > &point) const
    Definition: point.hpp:92
    +
    Point< T > operator-(const T value) const
    +
    bool operator!=(const Point< T > &point) const
    Definition: point.hpp:122
    +
    Point(const T x=0, const T y=0)
    +
    Point< T > & operator=(const Point< T > &point)
    +
    Point< T > & operator-=(const T value)
    +
    Point< T > & operator=(Point< T > &&point)
    +
    T area() const
    Definition: point.hpp:50
    +
    Point< T > & operator-=(const Point< T > &point)
    +
    Point< T > & operator+=(const Point< T > &point)
    +
    Point< T > operator/(const T value) const
    +
    Point< T > operator*(const T value) const
    +
    bool operator>=(const Point< T > &point) const
    Definition: point.hpp:102
    +
    Point< T > operator+(const T value) const
    +
    Point< T > & operator/=(const T value)
    +
    Point< T > & operator*=(const T value)
    +
    +
    + + + + diff --git a/web/html/doc/pointer_container_8hpp.html b/web/html/doc/pointer_container_8hpp.html new file mode 100644 index 000000000..bb6c1b82e --- /dev/null +++ b/web/html/doc/pointer_container_8hpp.html @@ -0,0 +1,126 @@ + + + + + + + +OpenPose: include/openpose/utilities/pointerContainer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    pointerContainer.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + + + +

    +Classes

    class  op::PointerContainerGreater< TDatumsSP >
     
    class  op::PointerContainerLess< TDatumsSP >
     
    + + + +

    +Namespaces

     op
     
    + + + + +

    +Functions

    template<typename TPointerContainer >
    bool op::checkNoNullNorEmpty (const TPointerContainer &tPointerContainer)
     
    +
    +
    + + + + diff --git a/web/html/doc/pointer_container_8hpp.js b/web/html/doc/pointer_container_8hpp.js new file mode 100644 index 000000000..47897f820 --- /dev/null +++ b/web/html/doc/pointer_container_8hpp.js @@ -0,0 +1,6 @@ +var pointer_container_8hpp = +[ + [ "PointerContainerGreater", "classop_1_1_pointer_container_greater.html", "classop_1_1_pointer_container_greater" ], + [ "PointerContainerLess", "classop_1_1_pointer_container_less.html", "classop_1_1_pointer_container_less" ], + [ "checkNoNullNorEmpty", "pointer_container_8hpp.html#a02164ca0af9e838190f584f5d1d8465e", null ] +]; \ No newline at end of file diff --git a/web/html/doc/pointer_container_8hpp_source.html b/web/html/doc/pointer_container_8hpp_source.html new file mode 100644 index 000000000..640706557 --- /dev/null +++ b/web/html/doc/pointer_container_8hpp_source.html @@ -0,0 +1,150 @@ + + + + + + + +OpenPose: include/openpose/utilities/pointerContainer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    pointerContainer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_UTILITIES_POINTER_CONTAINER_HPP
    +
    2 #define OPENPOSE_UTILITIES_POINTER_CONTAINER_HPP
    +
    3 
    +
    4 namespace op
    +
    5 {
    +
    6  template<typename TPointerContainer>
    +
    7  inline bool checkNoNullNorEmpty(const TPointerContainer& tPointerContainer)
    +
    8  {
    +
    9  return (tPointerContainer != nullptr && tPointerContainer->size() > 0);
    +
    10  }
    +
    11 
    +
    12  template<typename TDatumsSP>
    + +
    14  {
    +
    15  public:
    +
    16  bool operator() (const TDatumsSP& a, const TDatumsSP& b)
    +
    17  {
    +
    18  if (!b || b->empty())
    +
    19  return true;
    +
    20  else if (!a || a->empty())
    +
    21  return false;
    +
    22  else
    +
    23  return *(*a)[0] > *(*b)[0];
    +
    24  }
    +
    25  };
    +
    26 
    +
    27  template<typename TDatumsSP>
    + +
    29  {
    +
    30  public:
    +
    31  bool operator() (const TDatumsSP& a, const TDatumsSP& b)
    +
    32  {
    +
    33  if (!b || b->empty())
    +
    34  return false;
    +
    35  else if (!a || a->empty())
    +
    36  return true;
    +
    37  else
    +
    38  return *(*a)[0] < *(*b)[0];
    +
    39  }
    +
    40  };
    +
    41 }
    +
    42 
    +
    43 #endif // OPENPOSE_UTILITIES_POINTER_CONTAINER_HPP
    + +
    bool operator()(const TDatumsSP &a, const TDatumsSP &b)
    + +
    bool operator()(const TDatumsSP &a, const TDatumsSP &b)
    + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    +
    + + + + diff --git a/web/html/doc/pose_2enum_classes_8hpp.html b/web/html/doc/pose_2enum_classes_8hpp.html new file mode 100644 index 000000000..3d78f4b6b --- /dev/null +++ b/web/html/doc/pose_2enum_classes_8hpp.html @@ -0,0 +1,149 @@ + + + + + + + +OpenPose: include/openpose/pose/enumClasses.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    enumClasses.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + +

    +Enumerations

    enum class  op::PoseModel : unsigned char {
    +  op::BODY_25 = 0 +, op::COCO_18 +, op::MPI_15 +, op::MPI_15_4 +,
    +  op::BODY_19 +, op::BODY_19_X2 +, op::BODY_19N +, op::BODY_25E +,
    +  op::CAR_12 +, op::BODY_25D +, op::BODY_23 +, op::CAR_22 +,
    +  op::BODY_19E +, op::BODY_25B +, op::BODY_135 +, op::Size +
    + }
     
    enum class  op::PoseProperty : unsigned char {
    +  op::NMSThreshold = 0 +, op::ConnectInterMinAboveThreshold +, op::ConnectInterThreshold +, op::ConnectMinSubsetCnt +,
    +  op::ConnectMinSubsetScore +, op::Size +
    + }
     
    +
    +
    + + + + diff --git a/web/html/doc/pose_2enum_classes_8hpp.js b/web/html/doc/pose_2enum_classes_8hpp.js new file mode 100644 index 000000000..127ced883 --- /dev/null +++ b/web/html/doc/pose_2enum_classes_8hpp.js @@ -0,0 +1,29 @@ +var pose_2enum_classes_8hpp = +[ + [ "PoseModel", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261f", [ + [ "BODY_25", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa9a87ca5ab7b20c2bd4f8d5379956e6f6", null ], + [ "COCO_18", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa0c4a19d9254adcb3ca1f0f527ee141fd", null ], + [ "MPI_15", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fad788fbec25069f2884ee1ed97e0af2b9", null ], + [ "MPI_15_4", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa71e915c88449606c6498d33dd7c98e84", null ], + [ "BODY_19", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fae3ae2003e0e0458bdc49480fb19c876e", null ], + [ "BODY_19_X2", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261faca4c7eb29b1f3402e78aa384ce8fd5a9", null ], + [ "BODY_19N", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa9c6c21b2b0a410880f46637db622e392", null ], + [ "BODY_25E", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261faef29c97ffaed7b0d41ee9bb0d20550cc", null ], + [ "CAR_12", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa66ae79a5ac5fa502ae8bbecd3e07e71c", null ], + [ "BODY_25D", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa529c87ac399e5fd6f0fa4a360c032568", null ], + [ "BODY_23", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa003cc3795b0eeed2af2dfd34ed482794", null ], + [ "CAR_22", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa372b9885bba8bc32ad323fffcf99e39e", null ], + [ "BODY_19E", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa08956a1731b54bbdce3f97f1361efc23", null ], + [ "BODY_25B", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa0b93cfdf906412bd7c8560ccd180cec6", null ], + [ "BODY_135", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261facfbe6a39619f4ca5a1fa2db000a17e0d", null ], + [ "Size", "pose_2enum_classes_8hpp.html#af5b3ce2a5d3de87cb31b9b67e96f261fa6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "PoseProperty", "pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0e", [ + [ "NMSThreshold", "pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0ea83be5d7f6f29b19cf24f7393551c0439", null ], + [ "ConnectInterMinAboveThreshold", "pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0eaf7405796a5c90a93fc3c8ffa89eb432d", null ], + [ "ConnectInterThreshold", "pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0ea240f10f3a39507d858c743971fd4298f", null ], + [ "ConnectMinSubsetCnt", "pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0ea7bf312724768faebba41ca3585a91f19", null ], + [ "ConnectMinSubsetScore", "pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0ea04576b26f5dc3637bf3c8168fba1641d", null ], + [ "Size", "pose_2enum_classes_8hpp.html#a37c58b781e5bcd9fee67a7768afc5d0ea6f6cb72d544962fa333e2e34ce64f719", null ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/pose_2enum_classes_8hpp_source.html b/web/html/doc/pose_2enum_classes_8hpp_source.html new file mode 100644 index 000000000..8355c93fc --- /dev/null +++ b/web/html/doc/pose_2enum_classes_8hpp_source.html @@ -0,0 +1,163 @@ + + + + + + + +OpenPose: include/openpose/pose/enumClasses.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    enumClasses.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_ENUM_CLASSES_HPP
    +
    2 #define OPENPOSE_POSE_ENUM_CLASSES_HPP
    +
    3 
    +
    4 namespace op
    +
    5 {
    +
    9  enum class PoseModel : unsigned char
    +
    10  {
    +
    14  BODY_25 = 0,
    +
    15  COCO_18,
    +
    16  MPI_15,
    +
    17  MPI_15_4,
    +
    18  BODY_19,
    +
    19  BODY_19_X2,
    +
    20  BODY_19N,
    +
    21  BODY_25E,
    +
    22  CAR_12,
    +
    23  BODY_25D,
    +
    24  BODY_23,
    +
    25  CAR_22,
    +
    26  BODY_19E,
    +
    27  BODY_25B,
    +
    28  BODY_135,
    +
    29  Size,
    +
    30  };
    +
    31 
    +
    32  enum class PoseProperty : unsigned char
    +
    33  {
    +
    34  NMSThreshold = 0,
    + + + + +
    39  Size,
    +
    40  };
    +
    41 }
    +
    42 
    +
    43 #endif // OPENPOSE_POSE_ENUM_CLASSES_HPP
    + +
    PoseProperty
    Definition: enumClasses.hpp:33
    + + + + + + +
    PoseModel
    Definition: enumClasses.hpp:10
    + + + + + + + + + + + + + + + + +
    +
    + + + + diff --git a/web/html/doc/pose_2headers_8hpp.html b/web/html/doc/pose_2headers_8hpp.html new file mode 100644 index 000000000..0519890d9 --- /dev/null +++ b/web/html/doc/pose_2headers_8hpp.html @@ -0,0 +1,116 @@ + + + + + + + +OpenPose: include/openpose/pose/headers.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + + + + + + diff --git a/web/html/doc/pose_2headers_8hpp_source.html b/web/html/doc/pose_2headers_8hpp_source.html new file mode 100644 index 000000000..7c4ed60d7 --- /dev/null +++ b/web/html/doc/pose_2headers_8hpp_source.html @@ -0,0 +1,133 @@ + + + + + + + +OpenPose: include/openpose/pose/headers.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + + + + + + diff --git a/web/html/doc/pose_cpu_renderer_8hpp.html b/web/html/doc/pose_cpu_renderer_8hpp.html new file mode 100644 index 000000000..9e6380810 --- /dev/null +++ b/web/html/doc/pose_cpu_renderer_8hpp.html @@ -0,0 +1,122 @@ + + + + + + + +OpenPose: include/openpose/pose/poseCpuRenderer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    poseCpuRenderer.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/pose_cpu_renderer_8hpp_source.html b/web/html/doc/pose_cpu_renderer_8hpp_source.html new file mode 100644 index 000000000..80b3ed14f --- /dev/null +++ b/web/html/doc/pose_cpu_renderer_8hpp_source.html @@ -0,0 +1,150 @@ + + + + + + + +OpenPose: include/openpose/pose/poseCpuRenderer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    poseCpuRenderer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_POSE_CPU_RENDERER_HPP
    +
    2 #define OPENPOSE_POSE_POSE_CPU_RENDERER_HPP
    +
    3 
    + + + + + +
    9 
    +
    10 namespace op
    +
    11 {
    + +
    13  {
    +
    14  public:
    + +
    16  const PoseModel poseModel, const float renderThreshold, const bool blendOriginalFrame = true,
    +
    17  const float alphaKeypoint = POSE_DEFAULT_ALPHA_KEYPOINT,
    +
    18  const float alphaHeatMap = POSE_DEFAULT_ALPHA_HEAT_MAP, const unsigned int elementToRender = 0u);
    +
    19 
    +
    20  virtual ~PoseCpuRenderer();
    +
    21 
    +
    22  std::pair<int, std::string> renderPose(
    +
    23  Array<float>& outputData, const Array<float>& poseKeypoints, const float scaleInputToOutput,
    +
    24  const float scaleNetToOutput = -1.f);
    +
    25 
    +
    26  private:
    + +
    28  };
    +
    29 }
    +
    30 
    +
    31 #endif // OPENPOSE_POSE_POSE_CPU_RENDERER_HPP
    + + +
    PoseCpuRenderer(const PoseModel poseModel, const float renderThreshold, const bool blendOriginalFrame=true, const float alphaKeypoint=POSE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=POSE_DEFAULT_ALPHA_HEAT_MAP, const unsigned int elementToRender=0u)
    +
    std::pair< int, std::string > renderPose(Array< float > &outputData, const Array< float > &poseKeypoints, const float scaleInputToOutput, const float scaleNetToOutput=-1.f)
    +
    virtual ~PoseCpuRenderer()
    + + + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    const auto POSE_DEFAULT_ALPHA_KEYPOINT
    +
    const auto POSE_DEFAULT_ALPHA_HEAT_MAP
    +
    PoseModel
    Definition: enumClasses.hpp:10
    + + + + +
    +
    + + + + diff --git a/web/html/doc/pose_extractor_8hpp.html b/web/html/doc/pose_extractor_8hpp.html new file mode 100644 index 000000000..d6f0ceb0a --- /dev/null +++ b/web/html/doc/pose_extractor_8hpp.html @@ -0,0 +1,124 @@ + + + + + + + +OpenPose: include/openpose/pose/poseExtractor.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    poseExtractor.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/pose_extractor_8hpp_source.html b/web/html/doc/pose_extractor_8hpp_source.html new file mode 100644 index 000000000..15a556081 --- /dev/null +++ b/web/html/doc/pose_extractor_8hpp_source.html @@ -0,0 +1,207 @@ + + + + + + + +OpenPose: include/openpose/pose/poseExtractor.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    poseExtractor.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_POSE_EXTRACTOR_HPP
    +
    2 #define OPENPOSE_POSE_POSE_EXTRACTOR_HPP
    +
    3 
    + + + + + + + +
    11 
    +
    12 namespace op
    +
    13 {
    + +
    15  {
    +
    16  public:
    +
    17  PoseExtractor(const std::shared_ptr<PoseExtractorNet>& poseExtractorNet,
    +
    18  const std::shared_ptr<KeepTopNPeople>& keepTopNPeople = nullptr,
    +
    19  const std::shared_ptr<PersonIdExtractor>& personIdExtractor = nullptr,
    +
    20  const std::shared_ptr<std::vector<std::shared_ptr<PersonTracker>>>& personTracker = {},
    +
    21  const int numberPeopleMax = -1, const int tracking = -1);
    +
    22 
    +
    23  virtual ~PoseExtractor();
    +
    24 
    + +
    26 
    +
    27  void forwardPass(const std::vector<Array<float>>& inputNetData,
    +
    28  const Point<int>& inputDataSize,
    +
    29  const std::vector<double>& scaleRatios,
    +
    30  const Array<float>& poseNetOutput = Array<float>{},
    +
    31  const long long frameId = -1ll);
    +
    32 
    +
    33  // PoseExtractorNet functions
    + +
    35 
    +
    36  std::vector<std::vector<std::array<float, 3>>> getCandidatesCopy() const;
    +
    37 
    + +
    39 
    + +
    41 
    +
    42  float getScaleNetToOutput() const;
    +
    43 
    +
    44  // KeepTopNPeople functions
    +
    45  void keepTopPeople(Array<float>& poseKeypoints, const Array<float>& poseScores) const;
    +
    46 
    +
    47  // PersonIdExtractor functions
    +
    48  // Not thread-safe
    +
    49  Array<long long> extractIds(const Array<float>& poseKeypoints, const Matrix& cvMatInput,
    +
    50  const unsigned long long imageIndex = 0ull);
    +
    51 
    +
    52  // Same than extractIds but thread-safe
    +
    53  Array<long long> extractIdsLockThread(const Array<float>& poseKeypoints, const Matrix& cvMatInput,
    +
    54  const unsigned long long imageIndex,
    +
    55  const long long frameId);
    +
    56 
    +
    57  // PersonTracker functions
    +
    58  void track(Array<float>& poseKeypoints, Array<long long>& poseIds,
    +
    59  const Matrix& cvMatInput, const unsigned long long imageViewIndex = 0ull);
    +
    60 
    +
    61  void trackLockThread(Array<float>& poseKeypoints, Array<long long>& poseIds,
    +
    62  const Matrix& cvMatInput,
    +
    63  const unsigned long long imageViewIndex,
    +
    64  const long long frameId);
    +
    65 
    +
    66  private:
    +
    67  const int mNumberPeopleMax;
    +
    68  const int mTracking;
    +
    69  const std::shared_ptr<PoseExtractorNet> spPoseExtractorNet;
    +
    70  const std::shared_ptr<KeepTopNPeople> spKeepTopNPeople;
    +
    71  const std::shared_ptr<PersonIdExtractor> spPersonIdExtractor;
    +
    72  const std::shared_ptr<std::vector<std::shared_ptr<PersonTracker>>> spPersonTrackers;
    +
    73 
    + +
    75  };
    +
    76 }
    +
    77 
    +
    78 #endif // OPENPOSE_POSE_POSE_EXTRACTOR_HPP
    + + + +
    Array< long long > extractIds(const Array< float > &poseKeypoints, const Matrix &cvMatInput, const unsigned long long imageIndex=0ull)
    +
    void keepTopPeople(Array< float > &poseKeypoints, const Array< float > &poseScores) const
    +
    Array< float > getPoseKeypoints() const
    +
    void forwardPass(const std::vector< Array< float >> &inputNetData, const Point< int > &inputDataSize, const std::vector< double > &scaleRatios, const Array< float > &poseNetOutput=Array< float >{}, const long long frameId=-1ll)
    +
    Array< float > getHeatMapsCopy() const
    +
    virtual ~PoseExtractor()
    +
    Array< long long > extractIdsLockThread(const Array< float > &poseKeypoints, const Matrix &cvMatInput, const unsigned long long imageIndex, const long long frameId)
    +
    void initializationOnThread()
    +
    void track(Array< float > &poseKeypoints, Array< long long > &poseIds, const Matrix &cvMatInput, const unsigned long long imageViewIndex=0ull)
    +
    PoseExtractor(const std::shared_ptr< PoseExtractorNet > &poseExtractorNet, const std::shared_ptr< KeepTopNPeople > &keepTopNPeople=nullptr, const std::shared_ptr< PersonIdExtractor > &personIdExtractor=nullptr, const std::shared_ptr< std::vector< std::shared_ptr< PersonTracker >>> &personTracker={}, const int numberPeopleMax=-1, const int tracking=-1)
    +
    void trackLockThread(Array< float > &poseKeypoints, Array< long long > &poseIds, const Matrix &cvMatInput, const unsigned long long imageViewIndex, const long long frameId)
    +
    std::vector< std::vector< std::array< float, 3 > > > getCandidatesCopy() const
    +
    float getScaleNetToOutput() const
    +
    Array< float > getPoseScores() const
    + + + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + + + + + + +
    +
    + + + + diff --git a/web/html/doc/pose_extractor_caffe_8hpp.html b/web/html/doc/pose_extractor_caffe_8hpp.html new file mode 100644 index 000000000..c53afe37c --- /dev/null +++ b/web/html/doc/pose_extractor_caffe_8hpp.html @@ -0,0 +1,126 @@ + + + + + + + +OpenPose: include/openpose/pose/poseExtractorCaffe.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    poseExtractorCaffe.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/pose_extractor_caffe_8hpp_source.html b/web/html/doc/pose_extractor_caffe_8hpp_source.html new file mode 100644 index 000000000..94121f4d5 --- /dev/null +++ b/web/html/doc/pose_extractor_caffe_8hpp_source.html @@ -0,0 +1,206 @@ + + + + + + + +OpenPose: include/openpose/pose/poseExtractorCaffe.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    poseExtractorCaffe.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_POSE_EXTRACTOR_CAFFE_HPP
    +
    2 #define OPENPOSE_POSE_POSE_EXTRACTOR_CAFFE_HPP
    +
    3 
    + + + + + + + + + +
    13 
    +
    14 namespace op
    +
    15 {
    + +
    17  {
    +
    18  public:
    + +
    20  const PoseModel poseModel, const std::string& modelFolder, const int gpuId,
    +
    21  const std::vector<HeatMapType>& heatMapTypes = {},
    +
    22  const ScaleMode heatMapScaleMode = ScaleMode::ZeroToOneFixedAspect,
    +
    23  const bool addPartCandidates = false, const bool maximizePositives = false,
    +
    24  const std::string& protoTxtPath = "", const std::string& caffeModelPath = "",
    +
    25  const float upsamplingRatio = 0.f, const bool enableNet = true,
    +
    26  const bool enableGoogleLogging = true);
    +
    27 
    + +
    29 
    +
    30  virtual void netInitializationOnThread();
    +
    31 
    +
    39  virtual void forwardPass(
    +
    40  const std::vector<Array<float>>& inputNetData, const Point<int>& inputDataSize,
    +
    41  const std::vector<double>& scaleInputToNetInputs = {1.f},
    +
    42  const Array<float>& poseNetOutput = Array<float>{});
    +
    43 
    +
    44  const float* getCandidatesCpuConstPtr() const;
    +
    45 
    +
    46  const float* getCandidatesGpuConstPtr() const;
    +
    47 
    +
    48  const float* getHeatMapCpuConstPtr() const;
    +
    49 
    +
    50  const float* getHeatMapGpuConstPtr() const;
    +
    51 
    +
    52  std::vector<int> getHeatMapSize() const;
    +
    53 
    +
    54  const float* getPoseGpuConstPtr() const;
    +
    55 
    +
    56  private:
    +
    57  // Used when increasing spNets
    +
    58  const PoseModel mPoseModel;
    +
    59  const int mGpuId;
    +
    60  const std::string mModelFolder;
    +
    61  const std::string mProtoTxtPath;
    +
    62  const std::string mCaffeModelPath;
    +
    63  const float mUpsamplingRatio;
    +
    64  const bool mEnableNet;
    +
    65  const bool mEnableGoogleLogging;
    +
    66  // General parameters
    +
    67  std::vector<std::shared_ptr<Net>> spNets;
    +
    68  std::shared_ptr<ResizeAndMergeCaffe<float>> spResizeAndMergeCaffe;
    +
    69  std::shared_ptr<NmsCaffe<float>> spNmsCaffe;
    +
    70  std::shared_ptr<BodyPartConnectorCaffe<float>> spBodyPartConnectorCaffe;
    +
    71  std::shared_ptr<MaximumCaffe<float>> spMaximumCaffe;
    +
    72  std::vector<std::vector<int>> mNetInput4DSizes;
    +
    73  // Init with thread
    +
    74  std::vector<std::shared_ptr<ArrayCpuGpu<float>>> spCaffeNetOutputBlobs;
    +
    75  std::shared_ptr<ArrayCpuGpu<float>> spHeatMapsBlob;
    +
    76  std::shared_ptr<ArrayCpuGpu<float>> spPeaksBlob;
    +
    77  std::shared_ptr<ArrayCpuGpu<float>> spMaximumPeaksBlob;
    +
    78 
    + +
    80  };
    +
    81 }
    +
    82 
    +
    83 #endif // OPENPOSE_POSE_POSE_EXTRACTOR_CAFFE_HPP
    + + + +
    const float * getCandidatesCpuConstPtr() const
    +
    virtual ~PoseExtractorCaffe()
    +
    std::vector< int > getHeatMapSize() const
    +
    const float * getCandidatesGpuConstPtr() const
    +
    PoseExtractorCaffe(const PoseModel poseModel, const std::string &modelFolder, const int gpuId, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool addPartCandidates=false, const bool maximizePositives=false, const std::string &protoTxtPath="", const std::string &caffeModelPath="", const float upsamplingRatio=0.f, const bool enableNet=true, const bool enableGoogleLogging=true)
    +
    const float * getPoseGpuConstPtr() const
    +
    const float * getHeatMapCpuConstPtr() const
    +
    virtual void forwardPass(const std::vector< Array< float >> &inputNetData, const Point< int > &inputDataSize, const std::vector< double > &scaleInputToNetInputs={1.f}, const Array< float > &poseNetOutput=Array< float >{})
    +
    const float * getHeatMapGpuConstPtr() const
    +
    virtual void netInitializationOnThread()
    + + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + + +
    PoseModel
    Definition: enumClasses.hpp:10
    +
    ScaleMode
    Definition: enumClasses.hpp:7
    + + + + + + + + +
    +
    + + + + diff --git a/web/html/doc/pose_extractor_net_8hpp.html b/web/html/doc/pose_extractor_net_8hpp.html new file mode 100644 index 000000000..d829ebce4 --- /dev/null +++ b/web/html/doc/pose_extractor_net_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/pose/poseExtractorNet.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    poseExtractorNet.hpp File Reference
    +
    +
    +
    #include <atomic>
    +#include <openpose/core/common.hpp>
    +#include <openpose/core/enumClasses.hpp>
    +#include <openpose/pose/poseParameters.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::PoseExtractorNet
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/pose_extractor_net_8hpp_source.html b/web/html/doc/pose_extractor_net_8hpp_source.html new file mode 100644 index 000000000..4672910b4 --- /dev/null +++ b/web/html/doc/pose_extractor_net_8hpp_source.html @@ -0,0 +1,221 @@ + + + + + + + +OpenPose: include/openpose/pose/poseExtractorNet.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    poseExtractorNet.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_POSE_EXTRACTOR_NET_HPP
    +
    2 #define OPENPOSE_POSE_POSE_EXTRACTOR_NET_HPP
    +
    3 
    +
    4 #include <atomic>
    + + + +
    8 
    +
    9 namespace op
    +
    10 {
    + +
    12  {
    +
    13  public:
    +
    14  PoseExtractorNet(const PoseModel poseModel,
    +
    15  const std::vector<HeatMapType>& heatMapTypes = {},
    +
    16  const ScaleMode heatMapScaleMode = ScaleMode::ZeroToOneFixedAspect,
    +
    17  const bool addPartCandidates = false,
    +
    18  const bool maximizePositives = false);
    +
    19 
    +
    20  virtual ~PoseExtractorNet();
    +
    21 
    + +
    23 
    +
    24  virtual void forwardPass(
    +
    25  const std::vector<Array<float>>& inputNetData, const Point<int>& inputDataSize,
    +
    26  const std::vector<double>& scaleRatios = {1.f}, const Array<float>& poseNetOutput = Array<float>{}) = 0;
    +
    27 
    +
    28  virtual const float* getCandidatesCpuConstPtr() const = 0;
    +
    29 
    +
    30  virtual const float* getCandidatesGpuConstPtr() const = 0;
    +
    31 
    +
    32  virtual const float* getHeatMapCpuConstPtr() const = 0;
    +
    33 
    +
    34  virtual const float* getHeatMapGpuConstPtr() const = 0;
    +
    35 
    +
    36  virtual std::vector<int> getHeatMapSize() const = 0;
    +
    37 
    + +
    39 
    +
    40  std::vector<std::vector<std::array<float,3>>> getCandidatesCopy() const;
    +
    41 
    +
    42  virtual const float* getPoseGpuConstPtr() const = 0;
    +
    43 
    + +
    45 
    + +
    47 
    +
    48  float getScaleNetToOutput() const;
    +
    49 
    +
    50  double get(const PoseProperty property) const;
    +
    51 
    +
    52  void set(const PoseProperty property, const double value);
    +
    53 
    +
    54  void increase(const PoseProperty property, const double value);
    +
    55 
    +
    56  void clear();
    +
    57 
    +
    58  protected:
    + + + + + +
    64 
    +
    65  void checkThread() const;
    +
    66 
    +
    67  virtual void netInitializationOnThread() = 0;
    +
    68 
    +
    69  private:
    +
    70  const std::vector<HeatMapType> mHeatMapTypes;
    +
    71  const ScaleMode mHeatMapScaleMode;
    +
    72  const bool mAddPartCandidates;
    +
    73  std::array<std::atomic<double>, (int)PoseProperty::Size> mProperties;
    +
    74  std::thread::id mThreadId;
    +
    75 
    + +
    77  };
    +
    78 }
    +
    79 
    +
    80 #endif // OPENPOSE_POSE_POSE_EXTRACTOR_NET_HPP
    + + + +
    virtual const float * getCandidatesCpuConstPtr() const =0
    +
    Array< float > getPoseKeypoints() const
    + +
    Array< float > getPoseScores() const
    +
    void increase(const PoseProperty property, const double value)
    +
    virtual std::vector< int > getHeatMapSize() const =0
    +
    Array< float > mPoseScores
    +
    virtual const float * getPoseGpuConstPtr() const =0
    +
    PoseExtractorNet(const PoseModel poseModel, const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::ZeroToOneFixedAspect, const bool addPartCandidates=false, const bool maximizePositives=false)
    +
    std::vector< std::vector< std::array< float, 3 > > > getCandidatesCopy() const
    + +
    void set(const PoseProperty property, const double value)
    +
    virtual const float * getHeatMapCpuConstPtr() const =0
    +
    void checkThread() const
    +
    const PoseModel mPoseModel
    +
    virtual void forwardPass(const std::vector< Array< float >> &inputNetData, const Point< int > &inputDataSize, const std::vector< double > &scaleRatios={1.f}, const Array< float > &poseNetOutput=Array< float >{})=0
    +
    virtual ~PoseExtractorNet()
    +
    virtual void netInitializationOnThread()=0
    +
    double get(const PoseProperty property) const
    +
    Array< float > mPoseKeypoints
    + +
    virtual const float * getCandidatesGpuConstPtr() const =0
    +
    float getScaleNetToOutput() const
    +
    virtual const float * getHeatMapGpuConstPtr() const =0
    +
    Array< float > getHeatMapsCopy() const
    + + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    PoseProperty
    Definition: enumClasses.hpp:33
    + +
    PoseModel
    Definition: enumClasses.hpp:10
    +
    ScaleMode
    Definition: enumClasses.hpp:7
    + + + +
    +
    + + + + diff --git a/web/html/doc/pose_gpu_renderer_8hpp.html b/web/html/doc/pose_gpu_renderer_8hpp.html new file mode 100644 index 000000000..d84d84191 --- /dev/null +++ b/web/html/doc/pose_gpu_renderer_8hpp.html @@ -0,0 +1,124 @@ + + + + + + + +OpenPose: include/openpose/pose/poseGpuRenderer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    poseGpuRenderer.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/pose_gpu_renderer_8hpp_source.html b/web/html/doc/pose_gpu_renderer_8hpp_source.html new file mode 100644 index 000000000..a7cb8c595 --- /dev/null +++ b/web/html/doc/pose_gpu_renderer_8hpp_source.html @@ -0,0 +1,165 @@ + + + + + + + +OpenPose: include/openpose/pose/poseGpuRenderer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    poseGpuRenderer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_POSE_GPU_RENDERER_HPP
    +
    2 #define OPENPOSE_POSE_POSE_GPU_RENDERER_HPP
    +
    3 
    + + + + + + + +
    11 
    +
    12 namespace op
    +
    13 {
    + +
    15  {
    +
    16  public:
    + +
    18  const PoseModel poseModel, const std::shared_ptr<PoseExtractorNet>& poseExtractorNet,
    +
    19  const float renderThreshold, const bool blendOriginalFrame = true,
    +
    20  const float alphaKeypoint = POSE_DEFAULT_ALPHA_KEYPOINT,
    +
    21  const float alphaHeatMap = POSE_DEFAULT_ALPHA_HEAT_MAP, const unsigned int elementToRender = 0u);
    +
    22 
    +
    23  virtual ~PoseGpuRenderer();
    +
    24 
    + +
    26 
    +
    27  std::pair<int, std::string> renderPose(Array<float>& outputData, const Array<float>& poseKeypoints,
    +
    28  const float scaleInputToOutput,
    +
    29  const float scaleNetToOutput = -1.f);
    +
    30 
    +
    31  private:
    +
    32  const std::shared_ptr<PoseExtractorNet> spPoseExtractorNet;
    +
    33  // Init with thread
    +
    34  float* pGpuPose; // GPU aux memory
    +
    35  float* pMaxPtr; // GPU aux memory
    +
    36  float* pMinPtr; // GPU aux memory
    +
    37  float* pScalePtr; // GPU aux memory
    +
    38 
    + +
    40  };
    +
    41 }
    +
    42 
    +
    43 #endif // OPENPOSE_POSE_POSE_GPU_RENDERER_HPP
    + + + +
    PoseGpuRenderer(const PoseModel poseModel, const std::shared_ptr< PoseExtractorNet > &poseExtractorNet, const float renderThreshold, const bool blendOriginalFrame=true, const float alphaKeypoint=POSE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=POSE_DEFAULT_ALPHA_HEAT_MAP, const unsigned int elementToRender=0u)
    +
    std::pair< int, std::string > renderPose(Array< float > &outputData, const Array< float > &poseKeypoints, const float scaleInputToOutput, const float scaleNetToOutput=-1.f)
    +
    void initializationOnThread()
    +
    virtual ~PoseGpuRenderer()
    + + + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    const auto POSE_DEFAULT_ALPHA_KEYPOINT
    +
    const auto POSE_DEFAULT_ALPHA_HEAT_MAP
    +
    PoseModel
    Definition: enumClasses.hpp:10
    + + + + + +
    +
    + + + + diff --git a/web/html/doc/pose_parameters_8hpp.html b/web/html/doc/pose_parameters_8hpp.html new file mode 100644 index 000000000..91f56ef9b --- /dev/null +++ b/web/html/doc/pose_parameters_8hpp.html @@ -0,0 +1,156 @@ + + + + + + + +OpenPose: include/openpose/pose/poseParameters.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    poseParameters.hpp File Reference
    +
    +
    +
    #include <map>
    +#include <openpose/core/common.hpp>
    +#include <openpose/pose/enumClasses.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Functions

    OP_API const std::map< unsigned int, std::string > & op::getPoseBodyPartMapping (const PoseModel poseModel)
     
    OP_API const std::string & op::getPoseProtoTxt (const PoseModel poseModel)
     
    OP_API const std::string & op::getPoseTrainedModel (const PoseModel poseModel)
     
    OP_API unsigned int op::getPoseNumberBodyParts (const PoseModel poseModel)
     
    OP_API const std::vector< unsigned int > & op::getPosePartPairs (const PoseModel poseModel)
     
    OP_API const std::vector< unsigned int > & op::getPoseMapIndex (const PoseModel poseModel)
     
    OP_API unsigned int op::getPoseMaxPeaks ()
     
    OP_API float op::getPoseNetDecreaseFactor (const PoseModel poseModel)
     
    OP_API unsigned int op::poseBodyPartMapStringToKey (const PoseModel poseModel, const std::string &string)
     
    OP_API unsigned int op::poseBodyPartMapStringToKey (const PoseModel poseModel, const std::vector< std::string > &strings)
     
    OP_API float op::getPoseDefaultNmsThreshold (const PoseModel poseModel, const bool maximizePositives=false)
     
    OP_API float op::getPoseDefaultConnectInterMinAboveThreshold (const bool maximizePositives=false)
     
    OP_API float op::getPoseDefaultConnectInterThreshold (const PoseModel poseModel, const bool maximizePositives=false)
     
    OP_API unsigned int op::getPoseDefaultMinSubsetCnt (const bool maximizePositives=false)
     
    OP_API float op::getPoseDefaultConnectMinSubsetScore (const bool maximizePositives=false)
     
    OP_API bool op::addBkgChannel (const PoseModel poseModel)
     
    + + + +

    +Variables

    const auto op::POSE_MAX_PEOPLE = 127u
     
    +
    +
    + + + + diff --git a/web/html/doc/pose_parameters_8hpp.js b/web/html/doc/pose_parameters_8hpp.js new file mode 100644 index 000000000..542832c45 --- /dev/null +++ b/web/html/doc/pose_parameters_8hpp.js @@ -0,0 +1,20 @@ +var pose_parameters_8hpp = +[ + [ "addBkgChannel", "pose_parameters_8hpp.html#a13b86d097fd5f36612e9858e9348ea57", null ], + [ "getPoseBodyPartMapping", "pose_parameters_8hpp.html#aab3de911b04b96c1850cc05c6947e184", null ], + [ "getPoseDefaultConnectInterMinAboveThreshold", "pose_parameters_8hpp.html#a8e377d8da8f109cb8be8e4edbb2ea90a", null ], + [ "getPoseDefaultConnectInterThreshold", "pose_parameters_8hpp.html#aabfd35e57744b44481c09f56c90cc8b8", null ], + [ "getPoseDefaultConnectMinSubsetScore", "pose_parameters_8hpp.html#ae7636f6e8974ecb2ed96d43dd5ec261d", null ], + [ "getPoseDefaultMinSubsetCnt", "pose_parameters_8hpp.html#a863c96f1fb23d96c5d605867cfe5f99f", null ], + [ "getPoseDefaultNmsThreshold", "pose_parameters_8hpp.html#acd8cab258d7e98affa5c317a9a03e862", null ], + [ "getPoseMapIndex", "pose_parameters_8hpp.html#a84d87ec0e4ed3cf75a37ce99d0d25ef7", null ], + [ "getPoseMaxPeaks", "pose_parameters_8hpp.html#a96a81e831f8c965825162dba09095477", null ], + [ "getPoseNetDecreaseFactor", "pose_parameters_8hpp.html#ad7ca8d89f9045481075902c8bd98b8f4", null ], + [ "getPoseNumberBodyParts", "pose_parameters_8hpp.html#a54a6c42a42a0a7e539061f5e30abb4bc", null ], + [ "getPosePartPairs", "pose_parameters_8hpp.html#a307b2c7b1506415a4ba44590fe8a7258", null ], + [ "getPoseProtoTxt", "pose_parameters_8hpp.html#ae0730c6559abdb976423ecf81eac4620", null ], + [ "getPoseTrainedModel", "pose_parameters_8hpp.html#ade70b024ee461ae04e7233bf3937c5c6", null ], + [ "poseBodyPartMapStringToKey", "pose_parameters_8hpp.html#a3df938ef93037c534c5d342720d5fb70", null ], + [ "poseBodyPartMapStringToKey", "pose_parameters_8hpp.html#aacf6e688031bb116e4878b811e8dbc23", null ], + [ "POSE_MAX_PEOPLE", "pose_parameters_8hpp.html#a522d4552d2aeabe367f4d3bf371e6b3e", null ] +]; \ No newline at end of file diff --git a/web/html/doc/pose_parameters_8hpp_source.html b/web/html/doc/pose_parameters_8hpp_source.html new file mode 100644 index 000000000..018c3b4ba --- /dev/null +++ b/web/html/doc/pose_parameters_8hpp_source.html @@ -0,0 +1,159 @@ + + + + + + + +OpenPose: include/openpose/pose/poseParameters.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    poseParameters.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_POSE_PARAMETERS_HPP
    +
    2 #define OPENPOSE_POSE_POSE_PARAMETERS_HPP
    +
    3 
    +
    4 #include <map>
    + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  // Constant Global Parameters
    +
    11  // For OpenCL-NMS in Ubuntu, (POSE_MAX_PEOPLE+1)*3(x,y,score) must be divisible by 32. Easy fix:
    +
    12  // POSE_MAX_PEOPLE = 32n - 1
    +
    13  // For OpenCL-NMS in Windows, it must be by 64, so 64n - 1
    +
    14  const auto POSE_MAX_PEOPLE = 127u;
    +
    15 
    +
    16  // Model functions
    +
    17  OP_API const std::map<unsigned int, std::string>& getPoseBodyPartMapping(const PoseModel poseModel);
    +
    18  OP_API const std::string& getPoseProtoTxt(const PoseModel poseModel);
    +
    19  OP_API const std::string& getPoseTrainedModel(const PoseModel poseModel);
    +
    20  OP_API unsigned int getPoseNumberBodyParts(const PoseModel poseModel);
    +
    21  OP_API const std::vector<unsigned int>& getPosePartPairs(const PoseModel poseModel);
    +
    22  OP_API const std::vector<unsigned int>& getPoseMapIndex(const PoseModel poseModel);
    +
    23  OP_API unsigned int getPoseMaxPeaks();
    +
    24  OP_API float getPoseNetDecreaseFactor(const PoseModel poseModel);
    +
    25  OP_API unsigned int poseBodyPartMapStringToKey(const PoseModel poseModel, const std::string& string);
    +
    26  OP_API unsigned int poseBodyPartMapStringToKey(const PoseModel poseModel, const std::vector<std::string>& strings);
    +
    27 
    +
    28  // Default NSM and body connector parameters
    +
    29  OP_API float getPoseDefaultNmsThreshold(const PoseModel poseModel, const bool maximizePositives = false);
    +
    30  OP_API float getPoseDefaultConnectInterMinAboveThreshold(const bool maximizePositives = false);
    +
    31  OP_API float getPoseDefaultConnectInterThreshold(const PoseModel poseModel, const bool maximizePositives = false);
    +
    32  OP_API unsigned int getPoseDefaultMinSubsetCnt(const bool maximizePositives = false);
    +
    33  OP_API float getPoseDefaultConnectMinSubsetScore(const bool maximizePositives = false);
    +
    34  OP_API bool addBkgChannel(const PoseModel poseModel);
    +
    35 }
    +
    36 
    +
    37 #endif // OPENPOSE_POSE_POSE_PARAMETERS_HPP
    + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    OP_API bool addBkgChannel(const PoseModel poseModel)
    +
    OP_API const std::vector< unsigned int > & getPosePartPairs(const PoseModel poseModel)
    +
    OP_API unsigned int poseBodyPartMapStringToKey(const PoseModel poseModel, const std::string &string)
    +
    const auto POSE_MAX_PEOPLE
    +
    OP_API unsigned int getPoseNumberBodyParts(const PoseModel poseModel)
    +
    OP_API const std::vector< unsigned int > & getPoseMapIndex(const PoseModel poseModel)
    +
    OP_API unsigned int getPoseDefaultMinSubsetCnt(const bool maximizePositives=false)
    +
    OP_API float getPoseDefaultConnectInterMinAboveThreshold(const bool maximizePositives=false)
    +
    OP_API unsigned int getPoseMaxPeaks()
    +
    OP_API const std::map< unsigned int, std::string > & getPoseBodyPartMapping(const PoseModel poseModel)
    +
    OP_API float getPoseDefaultConnectInterThreshold(const PoseModel poseModel, const bool maximizePositives=false)
    +
    OP_API float getPoseDefaultNmsThreshold(const PoseModel poseModel, const bool maximizePositives=false)
    +
    OP_API float getPoseNetDecreaseFactor(const PoseModel poseModel)
    +
    OP_API const std::string & getPoseTrainedModel(const PoseModel poseModel)
    +
    OP_API const std::string & getPoseProtoTxt(const PoseModel poseModel)
    +
    OP_API float getPoseDefaultConnectMinSubsetScore(const bool maximizePositives=false)
    +
    PoseModel
    Definition: enumClasses.hpp:10
    + +
    +
    + + + + diff --git a/web/html/doc/pose_parameters_render_8hpp.html b/web/html/doc/pose_parameters_render_8hpp.html new file mode 100644 index 000000000..6a7a0cb4a --- /dev/null +++ b/web/html/doc/pose_parameters_render_8hpp.html @@ -0,0 +1,830 @@ + + + + + + + +OpenPose: include/openpose/pose/poseParametersRender.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    poseParametersRender.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Macros

    #define POSE_BODY_25_PAIRS_RENDER_GPU    1,8, 1,2, 1,5, 2,3, 3,4, 5,6, 6,7, 8,9, 9,10, 10,11, 8,12, 12,13, 13,14, 1,0, 0,15, 15,17, 0,16, 16,18, 14,19,19,20,14,21, 11,22,22,23,11,24
     
    #define POSE_BODY_25_SCALES_RENDER_GPU   1
     
    #define POSE_BODY_25_COLORS_RENDER_GPU
     
    #define POSE_COCO_PAIRS_RENDER_GPU    1,2, 1,5, 2,3, 3,4, 5,6, 6,7, 1,8, 8,9, 9,10, 1,11, 11,12, 12,13, 1,0, 0,14, 14,16, 0,15, 15,17
     
    #define POSE_COCO_SCALES_RENDER_GPU   1
     
    #define POSE_COCO_COLORS_RENDER_GPU
     
    #define POSE_MPI_PAIRS_RENDER_GPU    0,1, 1,2, 2,3, 3,4, 1,5, 5,6, 6,7, 1,14, 14,8, 8,9, 9,10, 14,11, 11,12, 12,13
     
    #define POSE_MPI_SCALES_RENDER_GPU   1
     
    #define POSE_MPI_COLORS_RENDER_GPU
     
    #define POSE_BODY_19_PAIRS_RENDER_GPU    1,8, 1,2, 1,5, 2,3, 3,4, 5,6, 6,7, 8,9, 9,10, 10,11, 8,12, 12,13, 13,14, 1,0, 0,15, 15,17, 0,16, 16,18
     
    #define POSE_BODY_19_SCALES_RENDER_GPU   1
     
    #define POSE_BODY_19_COLORS_RENDER_GPU
     
    #define POSE_BODY_23_PAIRS_RENDER_GPU    1,7, 4,10, 0,1, 0,4, 1,2, 2,3, 4,5, 5,6, 7,8, 8,9, 10,11, 11,12, 0,13, 13,15, 0,14, 14,16, 12,17,17,18,12,19, 9,20,20,21,9,22
     
    #define POSE_BODY_23_SCALES_RENDER_GPU   1
     
    #define POSE_BODY_23_COLORS_RENDER_GPU
     
    #define POSE_BODY_25B_PAIRS_RENDER_GPU
     
    #define POSE_BODY_25B_SCALES_RENDER_GPU   1
     
    #define POSE_BODY_25B_COLORS_RENDER_GPU
     
    #define POSE_BODY_135_PAIRS_RENDER_GPU
     
    #define POSE_BODY_135_SCALES_RENDER_GPU
     
    #define POSE_BODY_135_COLORS_RENDER_GPU
     
    #define POSE_CAR_12_PAIRS_RENDER_GPU    4,5, 4,6, 4,0, 0,2, 4,8, 8,10, 5,7, 5,1, 1,3, 5,9, 9,11, 0,1, 8,9, 2,3, 6,7, 10,11, 6,2,7,3, 6,10,7,11
     
    #define POSE_CAR_12_SCALES_RENDER_GPU   0.5
     
    #define POSE_CAR_12_COLORS_RENDER_GPU
     
    #define POSE_CAR_22_PAIRS_RENDER_GPU
     
    #define POSE_CAR_22_SCALES_RENDER_GPU   0.625
     
    #define POSE_CAR_22_COLORS_RENDER_GPU
     
    + + + + + + + + + +

    +Functions

    OP_API const std::vector< float > & op::getPoseScales (const PoseModel poseModel)
     
    OP_API const std::vector< float > & op::getPoseColors (const PoseModel poseModel)
     
    OP_API const std::vector< unsigned int > & op::getPoseBodyPartPairsRender (const PoseModel poseModel)
     
    OP_API unsigned int op::getNumberElementsToRender (const PoseModel poseModel)
     
    + + + + + + + + + +

    +Variables

    const auto op::POSE_DEFAULT_ALPHA_KEYPOINT = 0.6f
     
    const auto op::POSE_DEFAULT_ALPHA_HEAT_MAP = 0.7f
     
    const auto op::H135 = 25
     
    const auto op::F135 = H135 + 40
     
    +

    Macro Definition Documentation

    + +

    ◆ POSE_BODY_135_COLORS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_135_COLORS_RENDER_GPU
    +
    + +

    Definition at line 219 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_135_PAIRS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_135_PAIRS_RENDER_GPU
    +
    +Value:
    0,1, 0,2, 1,3, 2,4, 5,7, 6,8, 7,9, 8,10, 5,11, 6,12, 11,13, 12,14, 13,15, 14,16, \
    +
    15,19, 19,20, 15,21, 16,22, 22,23, 16,24, 5,6, 17,18, 11,12, \
    +
    \
    +
    9,H135+0, H135+0,H135+1, H135+1,H135+2, H135+2,H135+3, 9,H135+4, H135+4,H135+5, H135+5,H135+6, H135+6,H135+7, \
    +
    9,H135+8, H135+8,H135+9, H135+9,H135+10, H135+10,H135+11, 9,H135+12, H135+12,H135+13, H135+13,H135+14, H135+14,H135+15, \
    +
    9,H135+16, H135+16,H135+17, H135+17,H135+18, H135+18,H135+19, \
    +
    \
    +
    10,H135+20, H135+20,H135+21, H135+21,H135+22, H135+22,H135+23, 10,H135+24, H135+24,H135+25, H135+25,H135+26, H135+26,H135+27, \
    +
    10,H135+28, H135+28,H135+29, H135+29,H135+30, H135+30,H135+31, 10,H135+32, H135+32,H135+33, H135+33,H135+34, H135+34,H135+35, \
    +
    10,H135+36, H135+36,H135+37, H135+37,H135+38, H135+38,H135+39, \
    +
    \
    +
    F135+0,F135+1, F135+1,F135+2, F135+2,F135+3, F135+3,F135+4, F135+4,F135+5, F135+5,F135+6, F135+6,F135+7, F135+7,F135+8, F135+8,F135+9, F135+9,F135+10, F135+10,F135+11, F135+11,F135+12, F135+12,F135+13, F135+13,F135+14, F135+14,F135+15, F135+15,F135+16, F135+17,F135+18, F135+18,F135+19, F135+19,F135+20, \
    +
    F135+20,F135+21, F135+22,F135+23, F135+23,F135+24, F135+24,F135+25, F135+25,F135+26, F135+27,F135+28, F135+28,F135+29, F135+29,F135+30, F135+31,F135+32, F135+32,F135+33, F135+33,F135+34, F135+34,F135+35, F135+36,F135+37, F135+37,F135+38, F135+38,F135+39, F135+39,F135+40, F135+40,F135+41, \
    +
    F135+41,F135+36, F135+42,F135+43, F135+43,F135+44, F135+44,F135+45, F135+45,F135+46, F135+46,F135+47, F135+47,F135+42, F135+48,F135+49, F135+49,F135+50, F135+50,F135+51, F135+51,F135+52, F135+52,F135+53, F135+53,F135+54, F135+54,F135+55, F135+55,F135+56, F135+56,F135+57, F135+57,F135+58, \
    +
    F135+58,F135+59, F135+59,F135+48, F135+60,F135+61, F135+61,F135+62, F135+62,F135+63, F135+63,F135+64, F135+64,F135+65, F135+65,F135+66, F135+66,F135+67, F135+67,F135+60
    +
    const auto F135
    +
    const auto H135
    +
    +

    Definition at line 190 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_135_SCALES_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_135_SCALES_RENDER_GPU
    +
    +Value:
    1.f,1.f,1.f,1.f,1.f, 1.f,1.f,1.f,1.f,1.f, 1.f,1.f,1.f,1.f,1.f, 1.f,1.f, \
    +
    0.00f,0.00f, \
    +
    1.f,1.f,1.f,1.f,1.f,1.f, \
    +
    0.60f,0.60f,0.60f,0.60f,0.60f, 0.60f,0.60f,0.60f,0.60f,0.60f, 0.60f,0.60f,0.60f,0.60f,0.60f, 0.60f,0.60f,0.60f,0.60f,0.60f, \
    +
    0.60f,0.60f,0.60f,0.60f,0.60f, 0.60f,0.60f,0.60f,0.60f,0.60f, 0.60f,0.60f,0.60f,0.60f,0.60f, 0.60f,0.60f,0.60f,0.60f,0.60f, \
    +
    0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, \
    +
    0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, \
    +
    0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, \
    +
    0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f
    +
    +

    Definition at line 207 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_19_COLORS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_19_COLORS_RENDER_GPU
    +
    +Value:
    255.f, 0.f, 85.f, \
    +
    255.f, 0.f, 0.f, \
    +
    255.f, 85.f, 0.f, \
    +
    255.f, 170.f, 0.f, \
    +
    255.f, 255.f, 0.f, \
    +
    170.f, 255.f, 0.f, \
    +
    85.f, 255.f, 0.f, \
    +
    0.f, 255.f, 0.f, \
    +
    255.f, 0.f, 0.f, \
    +
    0.f, 255.f, 85.f, \
    +
    0.f, 255.f, 170.f, \
    +
    0.f, 255.f, 255.f, \
    +
    0.f, 170.f, 255.f, \
    +
    0.f, 85.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    255.f, 0.f, 170.f, \
    +
    170.f, 0.f, 255.f, \
    +
    255.f, 0.f, 255.f, \
    +
    85.f, 0.f, 255.f
    +
    +

    Definition at line 93 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_19_PAIRS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_19_PAIRS_RENDER_GPU    1,8, 1,2, 1,5, 2,3, 3,4, 5,6, 6,7, 8,9, 9,10, 10,11, 8,12, 12,13, 13,14, 1,0, 0,15, 15,17, 0,16, 16,18
    +
    + +

    Definition at line 90 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_19_SCALES_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_19_SCALES_RENDER_GPU   1
    +
    + +

    Definition at line 92 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_23_COLORS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_23_COLORS_RENDER_GPU
    +
    +Value:
    255.f, 0.f, 85.f, \
    +
    255.f, 0.f, 0.f, \
    +
    255.f, 85.f, 0.f, \
    +
    255.f, 170.f, 0.f, \
    +
    255.f, 255.f, 0.f, \
    +
    170.f, 255.f, 0.f, \
    +
    85.f, 255.f, 0.f, \
    +
    0.f, 255.f, 0.f, \
    +
    255.f, 0.f, 0.f, \
    +
    0.f, 255.f, 85.f, \
    +
    0.f, 255.f, 170.f, \
    +
    0.f, 255.f, 255.f, \
    +
    0.f, 170.f, 255.f, \
    +
    0.f, 85.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    255.f, 0.f, 170.f, \
    +
    170.f, 0.f, 255.f, \
    +
    255.f, 0.f, 255.f, \
    +
    85.f, 0.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 255.f, 255.f, \
    +
    0.f, 255.f, 255.f, \
    +
    0.f, 255.f, 255.f
    +
    +

    Definition at line 117 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_23_PAIRS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_23_PAIRS_RENDER_GPU    1,7, 4,10, 0,1, 0,4, 1,2, 2,3, 4,5, 5,6, 7,8, 8,9, 10,11, 11,12, 0,13, 13,15, 0,14, 14,16, 12,17,17,18,12,19, 9,20,20,21,9,22
    +
    + +

    Definition at line 114 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_23_SCALES_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_23_SCALES_RENDER_GPU   1
    +
    + +

    Definition at line 116 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_25_COLORS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_25_COLORS_RENDER_GPU
    +
    +Value:
    255.f, 0.f, 85.f, \
    +
    255.f, 0.f, 0.f, \
    +
    255.f, 85.f, 0.f, \
    +
    255.f, 170.f, 0.f, \
    +
    255.f, 255.f, 0.f, \
    +
    170.f, 255.f, 0.f, \
    +
    85.f, 255.f, 0.f, \
    +
    0.f, 255.f, 0.f, \
    +
    255.f, 0.f, 0.f, \
    +
    0.f, 255.f, 85.f, \
    +
    0.f, 255.f, 170.f, \
    +
    0.f, 255.f, 255.f, \
    +
    0.f, 170.f, 255.f, \
    +
    0.f, 85.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    255.f, 0.f, 170.f, \
    +
    170.f, 0.f, 255.f, \
    +
    255.f, 0.f, 255.f, \
    +
    85.f, 0.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 255.f, 255.f, \
    +
    0.f, 255.f, 255.f, \
    +
    0.f, 255.f, 255.f
    +
    +

    Definition at line 19 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_25_PAIRS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_25_PAIRS_RENDER_GPU    1,8, 1,2, 1,5, 2,3, 3,4, 5,6, 6,7, 8,9, 9,10, 10,11, 8,12, 12,13, 13,14, 1,0, 0,15, 15,17, 0,16, 16,18, 14,19,19,20,14,21, 11,22,22,23,11,24
    +
    + +

    Definition at line 16 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_25_SCALES_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_25_SCALES_RENDER_GPU   1
    +
    + +

    Definition at line 18 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_25B_COLORS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_25B_COLORS_RENDER_GPU
    +
    +Value:
    255.f, 0.f, 85.f, \
    +
    170.f, 0.f, 255.f, \
    +
    255.f, 0.f, 170.f, \
    +
    85.f, 0.f, 255.f, \
    +
    255.f, 0.f, 255.f, \
    +
    170.f, 255.f, 0.f, \
    +
    255.f, 85.f, 0.f, \
    +
    85.f, 255.f, 0.f, \
    +
    255.f, 170.f, 0.f, \
    +
    0.f, 255.f, 0.f, \
    +
    255.f, 255.f, 0.f, \
    +
    0.f, 170.f, 255.f, \
    +
    0.f, 255.f, 85.f, \
    +
    0.f, 85.f, 255.f, \
    +
    0.f, 255.f, 170.f, \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 255.f, 255.f, \
    +
    255.f, 0.f, 0.f, \
    +
    255.f, 0.f, 0.f, \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 255.f, 255.f, \
    +
    0.f, 255.f, 255.f, \
    +
    0.f, 255.f, 255.f
    +
    +

    Definition at line 149 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_25B_PAIRS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_25B_PAIRS_RENDER_GPU
    +
    +Value:
    0,1, 0,2, 1,3, 2,4, 5,7, 6,8, 7,9, 8,10, 5,11, 6,12, 11,13, 12,14, 13,15, 14,16, \
    +
    15,19, 19,20, 15,21, 16,22, 22,23, 16,24, 5,17, \
    +
    6,17, 17,18, 11,12
    +
    +

    Definition at line 144 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_BODY_25B_SCALES_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_BODY_25B_SCALES_RENDER_GPU   1
    +
    + +

    Definition at line 148 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_CAR_12_COLORS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_CAR_12_COLORS_RENDER_GPU
    +
    +Value:
    0.f, 255.f, 0.f, \
    +
    0.f, 255.f, 0.f, \
    +
    255.f, 255.f, 0.f, \
    +
    255.f, 255.f, 0.f, \
    +
    \
    +
    255.f, 0.f, 0.f, \
    +
    255.f, 0.f, 0.f, \
    +
    255.f, 75.f, 75.f, \
    +
    255.f, 75.f, 75.f, \
    +
    \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    255.f, 0.f, 255.f, \
    +
    255.f, 0.f, 255.f
    +
    +

    Definition at line 365 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_CAR_12_PAIRS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_CAR_12_PAIRS_RENDER_GPU    4,5, 4,6, 4,0, 0,2, 4,8, 8,10, 5,7, 5,1, 1,3, 5,9, 9,11, 0,1, 8,9, 2,3, 6,7, 10,11, 6,2,7,3, 6,10,7,11
    +
    + +

    Definition at line 361 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_CAR_12_SCALES_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_CAR_12_SCALES_RENDER_GPU   0.5
    +
    + +

    Definition at line 364 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_CAR_22_COLORS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_CAR_22_COLORS_RENDER_GPU
    +
    +Value:
    255.f, 128.f, 128.f, \
    +
    255.f, 0.f, 0.f, \
    +
    64.f, 0.f, 0.f, \
    +
    255.f, 0.f, 0.f, \
    +
    \
    +
    0.f, 255.f, 0.f, \
    +
    0.f, 255.f, 0.f, \
    +
    \
    +
    0.f, 0.f, 64.f, \
    +
    128.f, 128.f, 255.f, \
    +
    \
    +
    0.f, 255.f, 0.f, \
    +
    0.f, 255.f, 0.f, \
    +
    \
    +
    0.f, 255.f, 0.f, \
    +
    0.f, 255.f, 0.f, \
    +
    \
    +
    64.f, 0.f, 0.f, \
    +
    255.f, 128.f, 128.f, \
    +
    255.f, 0.f, 0.f, \
    +
    255.f, 0.f, 0.f, \
    +
    \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    \
    +
    0.f, 255.f, 0.f, \
    +
    0.f, 255.f, 0.f, \
    +
    \
    +
    0.f, 0.f, 255.f, \
    +
    0.f, 0.f, 64.f
    +
    +

    Definition at line 386 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_CAR_22_PAIRS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_CAR_22_PAIRS_RENDER_GPU
    +
    +Value:
    0,1,1,3,3,2,2,0, 6,7,7,16,16,17,17,6, 12,13,13,14,14,15,15,12, 6,8,7,8,6,9,7,9,6,4,7,5, 12,11,13,10, \
    +
    16,18,17,18,16,19,17,19, 6,21,7,20
    +
    +

    Definition at line 382 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_CAR_22_SCALES_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_CAR_22_SCALES_RENDER_GPU   0.625
    +
    + +

    Definition at line 385 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_COCO_COLORS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_COCO_COLORS_RENDER_GPU
    +
    +Value:
    255.f, 0.f, 85.f, \
    +
    255.f, 0.f, 0.f, \
    +
    255.f, 85.f, 0.f, \
    +
    255.f, 170.f, 0.f, \
    +
    255.f, 255.f, 0.f, \
    +
    170.f, 255.f, 0.f, \
    +
    85.f, 255.f, 0.f, \
    +
    0.f, 255.f, 0.f, \
    +
    0.f, 255.f, 85.f, \
    +
    0.f, 255.f, 170.f, \
    +
    0.f, 255.f, 255.f, \
    +
    0.f, 170.f, 255.f, \
    +
    0.f, 85.f, 255.f, \
    +
    0.f, 0.f, 255.f, \
    +
    255.f, 0.f, 170.f, \
    +
    170.f, 0.f, 255.f, \
    +
    255.f, 0.f, 255.f, \
    +
    85.f, 0.f, 255.f
    +
    +

    Definition at line 49 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_COCO_PAIRS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_COCO_PAIRS_RENDER_GPU    1,2, 1,5, 2,3, 3,4, 5,6, 6,7, 1,8, 8,9, 9,10, 1,11, 11,12, 12,13, 1,0, 0,14, 14,16, 0,15, 15,17
    +
    + +

    Definition at line 46 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_COCO_SCALES_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_COCO_SCALES_RENDER_GPU   1
    +
    + +

    Definition at line 48 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_MPI_COLORS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_MPI_COLORS_RENDER_GPU
    +
    +Value:
    255.f, 0.f, 85.f, \
    +
    255.f, 0.f, 0.f, \
    +
    255.f, 85.f, 0.f, \
    +
    255.f, 170.f, 0.f, \
    +
    255.f, 255.f, 0.f, \
    +
    170.f, 255.f, 0.f, \
    +
    85.f, 255.f, 0.f, \
    +
    43.f, 255.f, 0.f, \
    +
    0.f, 255.f, 0.f, \
    +
    0.f, 255.f, 85.f, \
    +
    0.f, 255.f, 170.f, \
    +
    0.f, 255.f, 255.f, \
    +
    0.f, 170.f, 255.f, \
    +
    0.f, 85.f, 255.f, \
    +
    0.f, 0.f, 255.f
    +
    +

    Definition at line 73 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_MPI_PAIRS_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_MPI_PAIRS_RENDER_GPU    0,1, 1,2, 2,3, 3,4, 1,5, 5,6, 6,7, 1,14, 14,8, 8,9, 9,10, 14,11, 11,12, 12,13
    +
    + +

    Definition at line 70 of file poseParametersRender.hpp.

    + +
    +
    + +

    ◆ POSE_MPI_SCALES_RENDER_GPU

    + +
    +
    + + + + +
    #define POSE_MPI_SCALES_RENDER_GPU   1
    +
    + +

    Definition at line 72 of file poseParametersRender.hpp.

    + +
    +
    +
    +
    + + + + diff --git a/web/html/doc/pose_parameters_render_8hpp.js b/web/html/doc/pose_parameters_render_8hpp.js new file mode 100644 index 000000000..5c0c07cc8 --- /dev/null +++ b/web/html/doc/pose_parameters_render_8hpp.js @@ -0,0 +1,38 @@ +var pose_parameters_render_8hpp = +[ + [ "POSE_BODY_135_COLORS_RENDER_GPU", "pose_parameters_render_8hpp.html#a45b08569481c3bf02eceab0d911b2bf6", null ], + [ "POSE_BODY_135_PAIRS_RENDER_GPU", "pose_parameters_render_8hpp.html#a2c5ec8c89146a0535f4f29f861f4e248", null ], + [ "POSE_BODY_135_SCALES_RENDER_GPU", "pose_parameters_render_8hpp.html#a426402ce79f98928f30037da33c2a349", null ], + [ "POSE_BODY_19_COLORS_RENDER_GPU", "pose_parameters_render_8hpp.html#a17cec2005928720d6da0e83ba26cca01", null ], + [ "POSE_BODY_19_PAIRS_RENDER_GPU", "pose_parameters_render_8hpp.html#a497bfbf7fddb6e960565ec70bb6b2ad1", null ], + [ "POSE_BODY_19_SCALES_RENDER_GPU", "pose_parameters_render_8hpp.html#a791ed14d0f2a65f850c94154b996826c", null ], + [ "POSE_BODY_23_COLORS_RENDER_GPU", "pose_parameters_render_8hpp.html#aeb1e2dd8178c15024e372185e2e5cf54", null ], + [ "POSE_BODY_23_PAIRS_RENDER_GPU", "pose_parameters_render_8hpp.html#a5f3db3bbb18fe8d978661f3c5417c110", null ], + [ "POSE_BODY_23_SCALES_RENDER_GPU", "pose_parameters_render_8hpp.html#aa257db7f46ddaa7fe838f659b8e5ed66", null ], + [ "POSE_BODY_25_COLORS_RENDER_GPU", "pose_parameters_render_8hpp.html#a456b8ce498f455af926215d91f6b6087", null ], + [ "POSE_BODY_25_PAIRS_RENDER_GPU", "pose_parameters_render_8hpp.html#a216b861af0ff0c237be529dc204ed05e", null ], + [ "POSE_BODY_25_SCALES_RENDER_GPU", "pose_parameters_render_8hpp.html#ae30e7b56c09200d60f05acba38a8bf05", null ], + [ "POSE_BODY_25B_COLORS_RENDER_GPU", "pose_parameters_render_8hpp.html#a1e4980010228bfd1e9e1387c23a3ab6a", null ], + [ "POSE_BODY_25B_PAIRS_RENDER_GPU", "pose_parameters_render_8hpp.html#a1b97e47c182baf7de08af03a8ba397e3", null ], + [ "POSE_BODY_25B_SCALES_RENDER_GPU", "pose_parameters_render_8hpp.html#a04ebdf33bf0ff159d144dab0ebf1c2ce", null ], + [ "POSE_CAR_12_COLORS_RENDER_GPU", "pose_parameters_render_8hpp.html#a6be8d3dedaf015f795625d1df19876aa", null ], + [ "POSE_CAR_12_PAIRS_RENDER_GPU", "pose_parameters_render_8hpp.html#a253206407787fc26629e6e46f60d7be2", null ], + [ "POSE_CAR_12_SCALES_RENDER_GPU", "pose_parameters_render_8hpp.html#a7382830f0c24beaea601444cb5962f06", null ], + [ "POSE_CAR_22_COLORS_RENDER_GPU", "pose_parameters_render_8hpp.html#aaecdba75da05e8bfc90e4393c88ab6e6", null ], + [ "POSE_CAR_22_PAIRS_RENDER_GPU", "pose_parameters_render_8hpp.html#a0065da73d9e649360d458fc670ee0f95", null ], + [ "POSE_CAR_22_SCALES_RENDER_GPU", "pose_parameters_render_8hpp.html#a0afb6a9782a4ad8bd3ac41bd2436fefc", null ], + [ "POSE_COCO_COLORS_RENDER_GPU", "pose_parameters_render_8hpp.html#a8b293ab02337be3f90218c5b824ece06", null ], + [ "POSE_COCO_PAIRS_RENDER_GPU", "pose_parameters_render_8hpp.html#a5afab27fbbebc71b8753a20dd6c9a322", null ], + [ "POSE_COCO_SCALES_RENDER_GPU", "pose_parameters_render_8hpp.html#a8cd3d34880f73dc73b2feb28370e86ec", null ], + [ "POSE_MPI_COLORS_RENDER_GPU", "pose_parameters_render_8hpp.html#a7fdd75b1478d65f11ebc77144662958c", null ], + [ "POSE_MPI_PAIRS_RENDER_GPU", "pose_parameters_render_8hpp.html#a7987426d997b6b040302d25fd07403ac", null ], + [ "POSE_MPI_SCALES_RENDER_GPU", "pose_parameters_render_8hpp.html#a32e98c9dd9e6f38c597c7924582570d0", null ], + [ "getNumberElementsToRender", "pose_parameters_render_8hpp.html#aebff78a4cfbef1cf1b2e03066d88564c", null ], + [ "getPoseBodyPartPairsRender", "pose_parameters_render_8hpp.html#a11bd7e53698eabe32b69b48708cf7b19", null ], + [ "getPoseColors", "pose_parameters_render_8hpp.html#abb49286241ba7a1d754b31dee333274a", null ], + [ "getPoseScales", "pose_parameters_render_8hpp.html#a016abefba53293ed2ffe3a3c3bd88dd0", null ], + [ "F135", "pose_parameters_render_8hpp.html#a593bb53120d8db14cab814dfb5d9ed2c", null ], + [ "H135", "pose_parameters_render_8hpp.html#ae37c577c1054c89da4a6736342d491aa", null ], + [ "POSE_DEFAULT_ALPHA_HEAT_MAP", "pose_parameters_render_8hpp.html#af45cddacd69fff73a4ea4acbbbac43e0", null ], + [ "POSE_DEFAULT_ALPHA_KEYPOINT", "pose_parameters_render_8hpp.html#a21fcb98366f6ea8895fc7f527f232db5", null ] +]; \ No newline at end of file diff --git a/web/html/doc/pose_parameters_render_8hpp_source.html b/web/html/doc/pose_parameters_render_8hpp_source.html new file mode 100644 index 000000000..5f2868718 --- /dev/null +++ b/web/html/doc/pose_parameters_render_8hpp_source.html @@ -0,0 +1,539 @@ + + + + + + + +OpenPose: include/openpose/pose/poseParametersRender.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    poseParametersRender.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_POSE_PARAMETERS_RENDER_HPP
    +
    2 #define OPENPOSE_POSE_POSE_PARAMETERS_RENDER_HPP
    +
    3 
    + + +
    6 
    +
    7 namespace op
    +
    8 {
    +
    9  // Rendering parameters
    +
    10  const auto POSE_DEFAULT_ALPHA_KEYPOINT = 0.6f;
    +
    11  const auto POSE_DEFAULT_ALPHA_HEAT_MAP = 0.7f;
    +
    12 
    +
    13  // Model-Dependent Parameters
    +
    14  // CUDA-code Model-Dependent Parameters must be defined with #define
    +
    15  // BODY_25
    +
    16  #define POSE_BODY_25_PAIRS_RENDER_GPU \
    +
    17  1,8, 1,2, 1,5, 2,3, 3,4, 5,6, 6,7, 8,9, 9,10, 10,11, 8,12, 12,13, 13,14, 1,0, 0,15, 15,17, 0,16, 16,18, 14,19,19,20,14,21, 11,22,22,23,11,24
    +
    18  #define POSE_BODY_25_SCALES_RENDER_GPU 1
    +
    19  #define POSE_BODY_25_COLORS_RENDER_GPU \
    +
    20  255.f, 0.f, 85.f, \
    +
    21  255.f, 0.f, 0.f, \
    +
    22  255.f, 85.f, 0.f, \
    +
    23  255.f, 170.f, 0.f, \
    +
    24  255.f, 255.f, 0.f, \
    +
    25  170.f, 255.f, 0.f, \
    +
    26  85.f, 255.f, 0.f, \
    +
    27  0.f, 255.f, 0.f, \
    +
    28  255.f, 0.f, 0.f, \
    +
    29  0.f, 255.f, 85.f, \
    +
    30  0.f, 255.f, 170.f, \
    +
    31  0.f, 255.f, 255.f, \
    +
    32  0.f, 170.f, 255.f, \
    +
    33  0.f, 85.f, 255.f, \
    +
    34  0.f, 0.f, 255.f, \
    +
    35  255.f, 0.f, 170.f, \
    +
    36  170.f, 0.f, 255.f, \
    +
    37  255.f, 0.f, 255.f, \
    +
    38  85.f, 0.f, 255.f, \
    +
    39  0.f, 0.f, 255.f, \
    +
    40  0.f, 0.f, 255.f, \
    +
    41  0.f, 0.f, 255.f, \
    +
    42  0.f, 255.f, 255.f, \
    +
    43  0.f, 255.f, 255.f, \
    +
    44  0.f, 255.f, 255.f
    +
    45  // COCO
    +
    46  #define POSE_COCO_PAIRS_RENDER_GPU \
    +
    47  1,2, 1,5, 2,3, 3,4, 5,6, 6,7, 1,8, 8,9, 9,10, 1,11, 11,12, 12,13, 1,0, 0,14, 14,16, 0,15, 15,17
    +
    48  #define POSE_COCO_SCALES_RENDER_GPU 1
    +
    49  #define POSE_COCO_COLORS_RENDER_GPU \
    +
    50  255.f, 0.f, 85.f, \
    +
    51  255.f, 0.f, 0.f, \
    +
    52  255.f, 85.f, 0.f, \
    +
    53  255.f, 170.f, 0.f, \
    +
    54  255.f, 255.f, 0.f, \
    +
    55  170.f, 255.f, 0.f, \
    +
    56  85.f, 255.f, 0.f, \
    +
    57  0.f, 255.f, 0.f, \
    +
    58  0.f, 255.f, 85.f, \
    +
    59  0.f, 255.f, 170.f, \
    +
    60  0.f, 255.f, 255.f, \
    +
    61  0.f, 170.f, 255.f, \
    +
    62  0.f, 85.f, 255.f, \
    +
    63  0.f, 0.f, 255.f, \
    +
    64  255.f, 0.f, 170.f, \
    +
    65  170.f, 0.f, 255.f, \
    +
    66  255.f, 0.f, 255.f, \
    +
    67  85.f, 0.f, 255.f
    +
    68  // MPI
    +
    69  // MPI colors chosen such that they are closed to COCO colors
    +
    70  #define POSE_MPI_PAIRS_RENDER_GPU \
    +
    71  0,1, 1,2, 2,3, 3,4, 1,5, 5,6, 6,7, 1,14, 14,8, 8,9, 9,10, 14,11, 11,12, 12,13
    +
    72  #define POSE_MPI_SCALES_RENDER_GPU 1
    +
    73  #define POSE_MPI_COLORS_RENDER_GPU \
    +
    74  255.f, 0.f, 85.f, \
    +
    75  255.f, 0.f, 0.f, \
    +
    76  255.f, 85.f, 0.f, \
    +
    77  255.f, 170.f, 0.f, \
    +
    78  255.f, 255.f, 0.f, \
    +
    79  170.f, 255.f, 0.f, \
    +
    80  85.f, 255.f, 0.f, \
    +
    81  43.f, 255.f, 0.f, \
    +
    82  0.f, 255.f, 0.f, \
    +
    83  0.f, 255.f, 85.f, \
    +
    84  0.f, 255.f, 170.f, \
    +
    85  0.f, 255.f, 255.f, \
    +
    86  0.f, 170.f, 255.f, \
    +
    87  0.f, 85.f, 255.f, \
    +
    88  0.f, 0.f, 255.f
    +
    89  // BODY_19
    +
    90  #define POSE_BODY_19_PAIRS_RENDER_GPU \
    +
    91  1,8, 1,2, 1,5, 2,3, 3,4, 5,6, 6,7, 8,9, 9,10, 10,11, 8,12, 12,13, 13,14, 1,0, 0,15, 15,17, 0,16, 16,18
    +
    92  #define POSE_BODY_19_SCALES_RENDER_GPU 1
    +
    93  #define POSE_BODY_19_COLORS_RENDER_GPU \
    +
    94  255.f, 0.f, 85.f, \
    +
    95  255.f, 0.f, 0.f, \
    +
    96  255.f, 85.f, 0.f, \
    +
    97  255.f, 170.f, 0.f, \
    +
    98  255.f, 255.f, 0.f, \
    +
    99  170.f, 255.f, 0.f, \
    +
    100  85.f, 255.f, 0.f, \
    +
    101  0.f, 255.f, 0.f, \
    +
    102  255.f, 0.f, 0.f, \
    +
    103  0.f, 255.f, 85.f, \
    +
    104  0.f, 255.f, 170.f, \
    +
    105  0.f, 255.f, 255.f, \
    +
    106  0.f, 170.f, 255.f, \
    +
    107  0.f, 85.f, 255.f, \
    +
    108  0.f, 0.f, 255.f, \
    +
    109  255.f, 0.f, 170.f, \
    +
    110  170.f, 0.f, 255.f, \
    +
    111  255.f, 0.f, 255.f, \
    +
    112  85.f, 0.f, 255.f
    +
    113  // BODY_23
    +
    114  #define POSE_BODY_23_PAIRS_RENDER_GPU \
    +
    115  1,7, 4,10, 0,1, 0,4, 1,2, 2,3, 4,5, 5,6, 7,8, 8,9, 10,11, 11,12, 0,13, 13,15, 0,14, 14,16, 12,17,17,18,12,19, 9,20,20,21,9,22
    +
    116  #define POSE_BODY_23_SCALES_RENDER_GPU 1
    +
    117  #define POSE_BODY_23_COLORS_RENDER_GPU \
    +
    118  255.f, 0.f, 85.f, \
    +
    119  255.f, 0.f, 0.f, \
    +
    120  255.f, 85.f, 0.f, \
    +
    121  255.f, 170.f, 0.f, \
    +
    122  255.f, 255.f, 0.f, \
    +
    123  170.f, 255.f, 0.f, \
    +
    124  85.f, 255.f, 0.f, \
    +
    125  0.f, 255.f, 0.f, \
    +
    126  255.f, 0.f, 0.f, \
    +
    127  0.f, 255.f, 85.f, \
    +
    128  0.f, 255.f, 170.f, \
    +
    129  0.f, 255.f, 255.f, \
    +
    130  0.f, 170.f, 255.f, \
    +
    131  0.f, 85.f, 255.f, \
    +
    132  0.f, 0.f, 255.f, \
    +
    133  255.f, 0.f, 170.f, \
    +
    134  170.f, 0.f, 255.f, \
    +
    135  255.f, 0.f, 255.f, \
    +
    136  85.f, 0.f, 255.f, \
    +
    137  0.f, 0.f, 255.f, \
    +
    138  0.f, 0.f, 255.f, \
    +
    139  0.f, 0.f, 255.f, \
    +
    140  0.f, 255.f, 255.f, \
    +
    141  0.f, 255.f, 255.f, \
    +
    142  0.f, 255.f, 255.f
    +
    143  // BODY_25B
    +
    144  #define POSE_BODY_25B_PAIRS_RENDER_GPU \
    +
    145  0,1, 0,2, 1,3, 2,4, 5,7, 6,8, 7,9, 8,10, 5,11, 6,12, 11,13, 12,14, 13,15, 14,16, \
    +
    146  15,19, 19,20, 15,21, 16,22, 22,23, 16,24, 5,17, \
    +
    147  6,17, 17,18, 11,12
    +
    148  #define POSE_BODY_25B_SCALES_RENDER_GPU 1
    +
    149  #define POSE_BODY_25B_COLORS_RENDER_GPU \
    +
    150  255.f, 0.f, 85.f, \
    +
    151  170.f, 0.f, 255.f, \
    +
    152  255.f, 0.f, 170.f, \
    +
    153  85.f, 0.f, 255.f, \
    +
    154  255.f, 0.f, 255.f, \
    +
    155  170.f, 255.f, 0.f, \
    +
    156  255.f, 85.f, 0.f, \
    +
    157  85.f, 255.f, 0.f, \
    +
    158  255.f, 170.f, 0.f, \
    +
    159  0.f, 255.f, 0.f, \
    +
    160  255.f, 255.f, 0.f, \
    +
    161  0.f, 170.f, 255.f, \
    +
    162  0.f, 255.f, 85.f, \
    +
    163  0.f, 85.f, 255.f, \
    +
    164  0.f, 255.f, 170.f, \
    +
    165  0.f, 0.f, 255.f, \
    +
    166  0.f, 255.f, 255.f, \
    +
    167  255.f, 0.f, 0.f, \
    +
    168  255.f, 0.f, 0.f, \
    +
    169  0.f, 0.f, 255.f, \
    +
    170  0.f, 0.f, 255.f, \
    +
    171  0.f, 0.f, 255.f, \
    +
    172  0.f, 255.f, 255.f, \
    +
    173  0.f, 255.f, 255.f, \
    +
    174  0.f, 255.f, 255.f
    +
    175  // BODY_135
    +
    176  // Hand color selection
    +
    177  // http://www.perbang.dk/rgbgradient/
    +
    178  // 1. Main color
    +
    179  // - Each finger of the right hand: 11 steps from FF0000 to FF0001 and pick last 5 from HSV gradient.
    +
    180  // - Each finger of the left hand: 21 steps from FF0000 to FF0001, choosing 4 among first 6 (HSV grad.),
    +
    181  // and then green.
    +
    182  // Note: Choosing first 5 from 11 steps was giving 2 very close greens
    +
    183  // 2. Gradient color from wrist to finger tips
    +
    184  // - Inside each finger: 5 steps from main color to 000000, and selecting first 4 from RGB gradient.
    +
    185  // Note: Used HSV gradient for red finger.
    +
    186  const auto H135 = 25;
    +
    187  const auto F135 = H135 + 40;
    +
    188  // 15,19, 19,20, 15,21, 16,22, 22,23, 16,24, 5,17,
    +
    189  // 6,17, 17,18, 11,12,
    +
    190  #define POSE_BODY_135_PAIRS_RENDER_GPU \
    +
    191  0,1, 0,2, 1,3, 2,4, 5,7, 6,8, 7,9, 8,10, 5,11, 6,12, 11,13, 12,14, 13,15, 14,16, \
    +
    192  15,19, 19,20, 15,21, 16,22, 22,23, 16,24, 5,6, 17,18, 11,12, \
    +
    193  \
    +
    194  9,H135+0, H135+0,H135+1, H135+1,H135+2, H135+2,H135+3, 9,H135+4, H135+4,H135+5, H135+5,H135+6, H135+6,H135+7, \
    +
    195  9,H135+8, H135+8,H135+9, H135+9,H135+10, H135+10,H135+11, 9,H135+12, H135+12,H135+13, H135+13,H135+14, H135+14,H135+15, \
    +
    196  9,H135+16, H135+16,H135+17, H135+17,H135+18, H135+18,H135+19, \
    +
    197  \
    +
    198  10,H135+20, H135+20,H135+21, H135+21,H135+22, H135+22,H135+23, 10,H135+24, H135+24,H135+25, H135+25,H135+26, H135+26,H135+27, \
    +
    199  10,H135+28, H135+28,H135+29, H135+29,H135+30, H135+30,H135+31, 10,H135+32, H135+32,H135+33, H135+33,H135+34, H135+34,H135+35, \
    +
    200  10,H135+36, H135+36,H135+37, H135+37,H135+38, H135+38,H135+39, \
    +
    201  \
    +
    202  F135+0,F135+1, F135+1,F135+2, F135+2,F135+3, F135+3,F135+4, F135+4,F135+5, F135+5,F135+6, F135+6,F135+7, F135+7,F135+8, F135+8,F135+9, F135+9,F135+10, F135+10,F135+11, F135+11,F135+12, F135+12,F135+13, F135+13,F135+14, F135+14,F135+15, F135+15,F135+16, F135+17,F135+18, F135+18,F135+19, F135+19,F135+20, \
    +
    203  F135+20,F135+21, F135+22,F135+23, F135+23,F135+24, F135+24,F135+25, F135+25,F135+26, F135+27,F135+28, F135+28,F135+29, F135+29,F135+30, F135+31,F135+32, F135+32,F135+33, F135+33,F135+34, F135+34,F135+35, F135+36,F135+37, F135+37,F135+38, F135+38,F135+39, F135+39,F135+40, F135+40,F135+41, \
    +
    204  F135+41,F135+36, F135+42,F135+43, F135+43,F135+44, F135+44,F135+45, F135+45,F135+46, F135+46,F135+47, F135+47,F135+42, F135+48,F135+49, F135+49,F135+50, F135+50,F135+51, F135+51,F135+52, F135+52,F135+53, F135+53,F135+54, F135+54,F135+55, F135+55,F135+56, F135+56,F135+57, F135+57,F135+58, \
    +
    205  F135+58,F135+59, F135+59,F135+48, F135+60,F135+61, F135+61,F135+62, F135+62,F135+63, F135+63,F135+64, F135+64,F135+65, F135+65,F135+66, F135+66,F135+67, F135+67,F135+60
    +
    206  // Disabled really noisy values
    +
    207  #define POSE_BODY_135_SCALES_RENDER_GPU \
    +
    208  1.f,1.f,1.f,1.f,1.f, 1.f,1.f,1.f,1.f,1.f, 1.f,1.f,1.f,1.f,1.f, 1.f,1.f, \
    +
    209  0.00f,0.00f, \
    +
    210  1.f,1.f,1.f,1.f,1.f,1.f, \
    +
    211  0.60f,0.60f,0.60f,0.60f,0.60f, 0.60f,0.60f,0.60f,0.60f,0.60f, 0.60f,0.60f,0.60f,0.60f,0.60f, 0.60f,0.60f,0.60f,0.60f,0.60f, \
    +
    212  0.60f,0.60f,0.60f,0.60f,0.60f, 0.60f,0.60f,0.60f,0.60f,0.60f, 0.60f,0.60f,0.60f,0.60f,0.60f, 0.60f,0.60f,0.60f,0.60f,0.60f, \
    +
    213  0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, \
    +
    214  0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, \
    +
    215  0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f, \
    +
    216  0.45f,0.45f,0.45f,0.45f,0.45f, 0.45f,0.45f,0.45f,0.45f,0.45f
    +
    217  // First 0.45f row:
    +
    218  // 0.00f,0.00f,0.00f,0.00f,0.00f, 0.00f,0.00f,0.00f,0.00f,0.00f, 0.00f,0.00f,0.00f,0.00f,0.00f, 0.00f,0.00f,0.45f,0.45f,0.45f,
    +
    219  #define POSE_BODY_135_COLORS_RENDER_GPU \
    +
    220  255.f, 0.f, 85.f, \
    +
    221  170.f, 0.f, 255.f, \
    +
    222  255.f, 0.f, 170.f, \
    +
    223  85.f, 0.f, 255.f, \
    +
    224  255.f, 0.f, 255.f, \
    +
    225  170.f, 255.f, 0.f, \
    +
    226  255.f, 85.f, 0.f, \
    +
    227  85.f, 255.f, 0.f, \
    +
    228  255.f, 170.f, 0.f, \
    +
    229  0.f, 255.f, 0.f, \
    +
    230  255.f, 255.f, 0.f, \
    +
    231  0.f, 170.f, 255.f, \
    +
    232  0.f, 255.f, 85.f, \
    +
    233  0.f, 85.f, 255.f, \
    +
    234  0.f, 255.f, 170.f, \
    +
    235  0.f, 0.f, 255.f, \
    +
    236  0.f, 255.f, 255.f, \
    +
    237  255.f, 0.f, 0.f, \
    +
    238  255.f, 0.f, 0.f, \
    +
    239  \
    +
    240  0.f, 0.f, 255.f, \
    +
    241  0.f, 0.f, 255.f, \
    +
    242  0.f, 0.f, 255.f, \
    +
    243  0.f, 255.f, 255.f, \
    +
    244  0.f, 255.f, 255.f, \
    +
    245  0.f, 255.f, 255.f, \
    +
    246  \
    +
    247  255.f, 0.f, 0.f, \
    +
    248  191.f, 47.f, 47.f, \
    +
    249  127.f, 63.f, 63.f, \
    +
    250  63.f, 47.f, 47.f, \
    +
    251  255.f, 76.f, 0.f, \
    +
    252  191.f, 57.f, 0.f, \
    +
    253  127.f, 38.f, 0.f, \
    +
    254  63.f, 19.f, 0.f, \
    +
    255  255.f, 152.f, 0.f, \
    +
    256  191.f, 114.f, 0.f, \
    +
    257  127.f, 76.f, 0.f, \
    +
    258  63.f, 38.f, 0.f, \
    +
    259  255.f, 255.f, 0.f, \
    +
    260  191.f, 191.f, 0.f, \
    +
    261  127.f, 127.f, 0.f, \
    +
    262  63.f, 63.f, 0.f, \
    +
    263  0.f, 255.f, 0.f, \
    +
    264  0.f, 191.f, 0.f, \
    +
    265  0.f, 127.f, 0.f, \
    +
    266  0.f, 63.f, 0.f, \
    +
    267  \
    +
    268  255.f, 0.f, 153.f, \
    +
    269  191.f, 0.f, 114.f, \
    +
    270  127.f, 0.f, 76.f, \
    +
    271  63.f, 0.f, 38.f, \
    +
    272  203.f, 0.f, 255.f, \
    +
    273  152.f, 0.f, 191.f, \
    +
    274  101.f, 0.f, 127.f, \
    +
    275  50.f, 0.f, 63.f, \
    +
    276  50.f, 0.f, 255.f, \
    +
    277  37.f, 0.f, 191.f, \
    +
    278  25.f, 0.f, 127.f, \
    +
    279  12.f, 0.f, 63.f, \
    +
    280  0.f, 102.f, 255.f, \
    +
    281  0.f, 76.f, 191.f, \
    +
    282  0.f, 51.f, 127.f, \
    +
    283  0.f, 25.f, 63.f, \
    +
    284  0.f, 255.f, 255.f, \
    +
    285  0.f, 191.f, 191.f, \
    +
    286  0.f, 127.f, 127.f, \
    +
    287  0.f, 63.f, 63.f, \
    +
    288  \
    +
    289  255.f, 255.f, 255.f, \
    +
    290  255.f, 255.f, 255.f, \
    +
    291  255.f, 255.f, 255.f, \
    +
    292  255.f, 255.f, 255.f, \
    +
    293  255.f, 255.f, 255.f, \
    +
    294  255.f, 255.f, 255.f, \
    +
    295  255.f, 255.f, 255.f, \
    +
    296  255.f, 255.f, 255.f, \
    +
    297  255.f, 255.f, 255.f, \
    +
    298  255.f, 255.f, 255.f, \
    +
    299  255.f, 255.f, 255.f, \
    +
    300  255.f, 255.f, 255.f, \
    +
    301  255.f, 255.f, 255.f, \
    +
    302  255.f, 255.f, 255.f, \
    +
    303  255.f, 255.f, 255.f, \
    +
    304  255.f, 255.f, 255.f, \
    +
    305  255.f, 255.f, 255.f, \
    +
    306  255.f, 255.f, 255.f, \
    +
    307  255.f, 255.f, 255.f, \
    +
    308  255.f, 255.f, 255.f, \
    +
    309  255.f, 255.f, 255.f, \
    +
    310  255.f, 255.f, 255.f, \
    +
    311  255.f, 255.f, 255.f, \
    +
    312  255.f, 255.f, 255.f, \
    +
    313  255.f, 255.f, 255.f, \
    +
    314  255.f, 255.f, 255.f, \
    +
    315  255.f, 255.f, 255.f, \
    +
    316  255.f, 255.f, 255.f, \
    +
    317  255.f, 255.f, 255.f, \
    +
    318  255.f, 255.f, 255.f, \
    +
    319  255.f, 255.f, 255.f, \
    +
    320  255.f, 255.f, 255.f, \
    +
    321  255.f, 255.f, 255.f, \
    +
    322  255.f, 255.f, 255.f, \
    +
    323  255.f, 255.f, 255.f, \
    +
    324  255.f, 255.f, 255.f, \
    +
    325  255.f, 255.f, 255.f, \
    +
    326  255.f, 255.f, 255.f, \
    +
    327  255.f, 255.f, 255.f, \
    +
    328  255.f, 255.f, 255.f, \
    +
    329  255.f, 255.f, 255.f, \
    +
    330  255.f, 255.f, 255.f, \
    +
    331  255.f, 255.f, 255.f, \
    +
    332  255.f, 255.f, 255.f, \
    +
    333  255.f, 255.f, 255.f, \
    +
    334  255.f, 255.f, 255.f, \
    +
    335  255.f, 255.f, 255.f, \
    +
    336  255.f, 255.f, 255.f, \
    +
    337  255.f, 255.f, 255.f, \
    +
    338  255.f, 255.f, 255.f, \
    +
    339  255.f, 255.f, 255.f, \
    +
    340  255.f, 255.f, 255.f, \
    +
    341  255.f, 255.f, 255.f, \
    +
    342  255.f, 255.f, 255.f, \
    +
    343  255.f, 255.f, 255.f, \
    +
    344  255.f, 255.f, 255.f, \
    +
    345  255.f, 255.f, 255.f, \
    +
    346  255.f, 255.f, 255.f, \
    +
    347  255.f, 255.f, 255.f, \
    +
    348  255.f, 255.f, 255.f, \
    +
    349  255.f, 255.f, 255.f, \
    +
    350  255.f, 255.f, 255.f, \
    +
    351  255.f, 255.f, 255.f, \
    +
    352  255.f, 255.f, 255.f, \
    +
    353  255.f, 255.f, 255.f, \
    +
    354  255.f, 255.f, 255.f, \
    +
    355  255.f, 255.f, 255.f, \
    +
    356  255.f, 255.f, 255.f, \
    +
    357  255.f, 255.f, 255.f, \
    +
    358  255.f, 255.f, 255.f
    +
    359 
    +
    360  // CAR_12
    +
    361  #define POSE_CAR_12_PAIRS_RENDER_GPU \
    +
    362  4,5, 4,6, 4,0, 0,2, 4,8, 8,10, 5,7, 5,1, 1,3, 5,9, 9,11, 0,1, 8,9, 2,3, 6,7, 10,11, 6,2,7,3, 6,10,7,11
    +
    363  // 4,5, 4,6, 4,0, 0,2, 4,8, 8,10, 5,7, 5,1, 1,3, 5,9, 9,11
    +
    364  #define POSE_CAR_12_SCALES_RENDER_GPU 0.5
    +
    365  #define POSE_CAR_12_COLORS_RENDER_GPU \
    +
    366  0.f, 255.f, 0.f, \
    +
    367  0.f, 255.f, 0.f, \
    +
    368  255.f, 255.f, 0.f, \
    +
    369  255.f, 255.f, 0.f, \
    +
    370  \
    +
    371  255.f, 0.f, 0.f, \
    +
    372  255.f, 0.f, 0.f, \
    +
    373  255.f, 75.f, 75.f, \
    +
    374  255.f, 75.f, 75.f, \
    +
    375  \
    +
    376  0.f, 0.f, 255.f, \
    +
    377  0.f, 0.f, 255.f, \
    +
    378  255.f, 0.f, 255.f, \
    +
    379  255.f, 0.f, 255.f
    +
    380 
    +
    381  // CAR_22
    +
    382  #define POSE_CAR_22_PAIRS_RENDER_GPU \
    +
    383  0,1,1,3,3,2,2,0, 6,7,7,16,16,17,17,6, 12,13,13,14,14,15,15,12, 6,8,7,8,6,9,7,9,6,4,7,5, 12,11,13,10, \
    +
    384  16,18,17,18,16,19,17,19, 6,21,7,20
    +
    385  #define POSE_CAR_22_SCALES_RENDER_GPU 0.625
    +
    386  #define POSE_CAR_22_COLORS_RENDER_GPU \
    +
    387  255.f, 128.f, 128.f, \
    +
    388  255.f, 0.f, 0.f, \
    +
    389  64.f, 0.f, 0.f, \
    +
    390  255.f, 0.f, 0.f, \
    +
    391  \
    +
    392  0.f, 255.f, 0.f, \
    +
    393  0.f, 255.f, 0.f, \
    +
    394  \
    +
    395  0.f, 0.f, 64.f, \
    +
    396  128.f, 128.f, 255.f, \
    +
    397  \
    +
    398  0.f, 255.f, 0.f, \
    +
    399  0.f, 255.f, 0.f, \
    +
    400  \
    +
    401  0.f, 255.f, 0.f, \
    +
    402  0.f, 255.f, 0.f, \
    +
    403  \
    +
    404  64.f, 0.f, 0.f, \
    +
    405  255.f, 128.f, 128.f, \
    +
    406  255.f, 0.f, 0.f, \
    +
    407  255.f, 0.f, 0.f, \
    +
    408  \
    +
    409  0.f, 0.f, 255.f, \
    +
    410  0.f, 0.f, 255.f, \
    +
    411  \
    +
    412  0.f, 255.f, 0.f, \
    +
    413  0.f, 255.f, 0.f, \
    +
    414  \
    +
    415  0.f, 0.f, 255.f, \
    +
    416  0.f, 0.f, 64.f
    +
    417 
    +
    418  // Rendering functions
    +
    419  OP_API const std::vector<float>& getPoseScales(const PoseModel poseModel);
    +
    420  OP_API const std::vector<float>& getPoseColors(const PoseModel poseModel);
    +
    421  OP_API const std::vector<unsigned int>& getPoseBodyPartPairsRender(const PoseModel poseModel);
    +
    422  OP_API unsigned int getNumberElementsToRender(const PoseModel poseModel);
    +
    423 }
    +
    424 
    +
    425 #endif // OPENPOSE_POSE_POSE_PARAMETERS_RENDER_HPP
    + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    OP_API const std::vector< float > & getPoseScales(const PoseModel poseModel)
    +
    OP_API const std::vector< unsigned int > & getPoseBodyPartPairsRender(const PoseModel poseModel)
    +
    const auto POSE_DEFAULT_ALPHA_KEYPOINT
    +
    const auto F135
    +
    OP_API const std::vector< float > & getPoseColors(const PoseModel poseModel)
    +
    const auto H135
    +
    OP_API unsigned int getNumberElementsToRender(const PoseModel poseModel)
    +
    const auto POSE_DEFAULT_ALPHA_HEAT_MAP
    +
    PoseModel
    Definition: enumClasses.hpp:10
    + +
    +
    + + + + diff --git a/web/html/doc/pose_renderer_8hpp.html b/web/html/doc/pose_renderer_8hpp.html new file mode 100644 index 000000000..921cd5512 --- /dev/null +++ b/web/html/doc/pose_renderer_8hpp.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: include/openpose/pose/poseRenderer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    poseRenderer.hpp File Reference
    +
    +
    +
    #include <map>
    +#include <openpose/core/common.hpp>
    +#include <openpose/pose/enumClasses.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::PoseRenderer
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/pose_renderer_8hpp_source.html b/web/html/doc/pose_renderer_8hpp_source.html new file mode 100644 index 000000000..184af5d12 --- /dev/null +++ b/web/html/doc/pose_renderer_8hpp_source.html @@ -0,0 +1,148 @@ + + + + + + + +OpenPose: include/openpose/pose/poseRenderer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    poseRenderer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_POSE_RENDERER_HPP
    +
    2 #define OPENPOSE_POSE_POSE_RENDERER_HPP
    +
    3 
    +
    4 #include <map>
    + + +
    7 
    +
    8 namespace op
    +
    9 {
    + +
    11  {
    +
    12  public:
    +
    13  PoseRenderer(const PoseModel poseModel);
    +
    14 
    +
    15  virtual ~PoseRenderer();
    +
    16 
    +
    17  virtual void initializationOnThread(){};
    +
    18 
    +
    19  virtual std::pair<int, std::string> renderPose(
    +
    20  Array<float>& outputData, const Array<float>& poseKeypoints, const float scaleInputToOutput,
    +
    21  const float scaleNetToOutput = -1.f) = 0;
    +
    22 
    +
    23  protected:
    + +
    25  const std::map<unsigned int, std::string> mPartIndexToName;
    +
    26 
    +
    27  private:
    +
    28 
    + +
    30  };
    +
    31 }
    +
    32 
    +
    33 #endif // OPENPOSE_POSE_POSE_RENDERER_HPP
    + + +
    PoseRenderer(const PoseModel poseModel)
    +
    virtual ~PoseRenderer()
    +
    const PoseModel mPoseModel
    +
    virtual std::pair< int, std::string > renderPose(Array< float > &outputData, const Array< float > &poseKeypoints, const float scaleInputToOutput, const float scaleNetToOutput=-1.f)=0
    +
    const std::map< unsigned int, std::string > mPartIndexToName
    +
    virtual void initializationOnThread()
    + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    PoseModel
    Definition: enumClasses.hpp:10
    + +
    +
    + + + + diff --git a/web/html/doc/pose_triangulation_8hpp.html b/web/html/doc/pose_triangulation_8hpp.html new file mode 100644 index 000000000..4f49d4023 --- /dev/null +++ b/web/html/doc/pose_triangulation_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/3d/poseTriangulation.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    poseTriangulation.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::PoseTriangulation
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/pose_triangulation_8hpp_source.html b/web/html/doc/pose_triangulation_8hpp_source.html new file mode 100644 index 000000000..9bac38ef5 --- /dev/null +++ b/web/html/doc/pose_triangulation_8hpp_source.html @@ -0,0 +1,142 @@ + + + + + + + +OpenPose: include/openpose/3d/poseTriangulation.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    poseTriangulation.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_3D_POSE_TRIANGULATION_HPP
    +
    2 #define OPENPOSE_3D_POSE_TRIANGULATION_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    + +
    9  {
    +
    10  public:
    +
    11  PoseTriangulation(const int minViews3d);
    +
    12 
    +
    13  virtual ~PoseTriangulation();
    +
    14 
    + +
    16 
    + +
    18  const std::vector<Array<float>>& keypointsVector, const std::vector<Matrix>& cameraMatrices,
    +
    19  const std::vector<Point<int>>& imageSizes) const;
    +
    20 
    +
    21  std::vector<Array<float>> reconstructArray(
    +
    22  const std::vector<std::vector<Array<float>>>& keypointsVector, const std::vector<Matrix>& cameraMatrices,
    +
    23  const std::vector<Point<int>>& imageSizes) const;
    +
    24 
    +
    25  private:
    +
    26  const int mMinViews3d;
    +
    27  };
    +
    28 }
    +
    29 
    +
    30 #endif // OPENPOSE_3D_POSE_TRIANGULATION_HPP
    + + +
    virtual ~PoseTriangulation()
    +
    Array< float > reconstructArray(const std::vector< Array< float >> &keypointsVector, const std::vector< Matrix > &cameraMatrices, const std::vector< Point< int >> &imageSizes) const
    +
    PoseTriangulation(const int minViews3d)
    + +
    std::vector< Array< float > > reconstructArray(const std::vector< std::vector< Array< float >>> &keypointsVector, const std::vector< Matrix > &cameraMatrices, const std::vector< Point< int >> &imageSizes) const
    + +
    #define OP_API
    Definition: macros.hpp:18
    + + +
    +
    + + + + diff --git a/web/html/doc/priority_queue_8hpp.html b/web/html/doc/priority_queue_8hpp.html new file mode 100644 index 000000000..c6ac9d8f1 --- /dev/null +++ b/web/html/doc/priority_queue_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/thread/priorityQueue.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    priorityQueue.hpp File Reference
    +
    +
    +
    #include <queue>
    +#include <openpose/core/common.hpp>
    +#include <openpose/thread/queueBase.hpp>
    +#include <type_traits>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::PriorityQueue< TDatums, TQueue >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (PriorityQueue)
     
    +
    +
    + + + + diff --git a/web/html/doc/priority_queue_8hpp.js b/web/html/doc/priority_queue_8hpp.js new file mode 100644 index 000000000..67a8465a4 --- /dev/null +++ b/web/html/doc/priority_queue_8hpp.js @@ -0,0 +1,5 @@ +var priority_queue_8hpp = +[ + [ "PriorityQueue", "classop_1_1_priority_queue.html", "classop_1_1_priority_queue" ], + [ "COMPILE_TEMPLATE_DATUM", "priority_queue_8hpp.html#aa65c081c13e0d0453938a3c41d04dc49", null ] +]; \ No newline at end of file diff --git a/web/html/doc/priority_queue_8hpp_source.html b/web/html/doc/priority_queue_8hpp_source.html new file mode 100644 index 000000000..59e7d93f5 --- /dev/null +++ b/web/html/doc/priority_queue_8hpp_source.html @@ -0,0 +1,199 @@ + + + + + + + +OpenPose: include/openpose/thread/priorityQueue.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    priorityQueue.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_PRIORITY_QUEUE_HPP
    +
    2 #define OPENPOSE_THREAD_PRIORITY_QUEUE_HPP
    +
    3 
    +
    4 #include <queue> // std::priority_queue
    + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums, typename TQueue = std::priority_queue<TDatums, std::vector<TDatums>, std::greater<TDatums>>>
    +
    11  class PriorityQueue : public QueueBase<TDatums, TQueue>
    +
    12  {
    +
    13  public:
    +
    14  explicit PriorityQueue(const long long maxSize = 256);
    +
    15 
    +
    16  virtual ~PriorityQueue();
    +
    17 
    +
    18  TDatums front() const;
    +
    19 
    +
    20  private:
    +
    21  bool pop(TDatums& tDatums);
    +
    22 
    +
    23  DELETE_COPY(PriorityQueue);
    +
    24  };
    +
    25 }
    +
    26 
    +
    27 
    +
    28 
    +
    29 
    +
    30 
    +
    31 // Implementation
    +
    32 #include <type_traits> // std::is_same
    +
    33 namespace op
    +
    34 {
    +
    35  template<typename TDatums, typename TQueue>
    + +
    37  QueueBase<TDatums, TQueue>{maxSize}
    +
    38  {
    +
    39  // Check TDatums = underlying value type of TQueue
    +
    40  typedef typename TQueue::value_type underlyingValueType;
    +
    41  static_assert(std::is_same<TDatums, underlyingValueType>::value,
    +
    42  "Error: The type of the queue must be the same as the type of the container");
    +
    43  }
    +
    44 
    +
    45  template<typename TDatums, typename TQueue>
    + +
    47  {
    +
    48  }
    +
    49 
    +
    50  template<typename TDatums, typename TQueue>
    + +
    52  {
    +
    53  try
    +
    54  {
    +
    55  const std::lock_guard<std::mutex> lock{this->mMutex};
    +
    56  return this->mTQueue.top();
    +
    57  }
    +
    58  catch (const std::exception& e)
    +
    59  {
    +
    60  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    61  return TDatums{};
    +
    62  }
    +
    63  }
    +
    64 
    +
    65  template<typename TDatums, typename TQueue>
    +
    66  bool PriorityQueue<TDatums, TQueue>::pop(TDatums& tDatums)
    +
    67  {
    +
    68  try
    +
    69  {
    +
    70  if (this->mPopIsStopped || this->mTQueue.empty())
    +
    71  return false;
    +
    72 
    +
    73  tDatums = {std::move(this->mTQueue.top())};
    +
    74  this->mTQueue.pop();
    +
    75  this->mConditionVariable.notify_one();
    +
    76  return true;
    +
    77  }
    +
    78  catch (const std::exception& e)
    +
    79  {
    +
    80  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    81  return false;
    +
    82  }
    +
    83  }
    +
    84 
    + +
    86 }
    +
    87 
    +
    88 #endif // OPENPOSE_THREAD_PRIORITY_QUEUE_HPP
    + +
    virtual ~PriorityQueue()
    +
    TDatums front() const
    +
    PriorityQueue(const long long maxSize=256)
    + + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    + +
    +
    + + + + diff --git a/web/html/doc/producer_2enum_classes_8hpp.html b/web/html/doc/producer_2enum_classes_8hpp.html new file mode 100644 index 000000000..a11ed0201 --- /dev/null +++ b/web/html/doc/producer_2enum_classes_8hpp.html @@ -0,0 +1,141 @@ + + + + + + + +OpenPose: include/openpose/producer/enumClasses.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    enumClasses.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + + + +

    +Enumerations

    enum class  op::ProducerFpsMode : bool { op::OriginalFps +, op::RetrievalFps + }
     
    enum class  op::ProducerProperty : unsigned char {
    +  op::AutoRepeat = 0 +, op::Flip +, op::Rotation +, op::FrameStep +,
    +  op::NumberViews +, op::Size +
    + }
     
    enum class  op::ProducerType : unsigned char {
    +  op::FlirCamera +, op::ImageDirectory +, op::IPCamera +, op::Video +,
    +  op::Webcam +, op::None +
    + }
     
    +
    +
    + + + + diff --git a/web/html/doc/producer_2enum_classes_8hpp.js b/web/html/doc/producer_2enum_classes_8hpp.js new file mode 100644 index 000000000..a95656ac4 --- /dev/null +++ b/web/html/doc/producer_2enum_classes_8hpp.js @@ -0,0 +1,23 @@ +var producer_2enum_classes_8hpp = +[ + [ "ProducerFpsMode", "producer_2enum_classes_8hpp.html#ac0230b669b296920c0cfc41b7587268f", [ + [ "OriginalFps", "producer_2enum_classes_8hpp.html#ac0230b669b296920c0cfc41b7587268fa0123c3afc0fac5edaf8b1672cb12626c", null ], + [ "RetrievalFps", "producer_2enum_classes_8hpp.html#ac0230b669b296920c0cfc41b7587268fa6bcd0f3b66e42d1aacd18d1c3b532473", null ] + ] ], + [ "ProducerProperty", "producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774", [ + [ "AutoRepeat", "producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774a3ebbca1b84060b0caaf823639739945d", null ], + [ "Flip", "producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774a9ffbd422925a6839ee820ddbc59278c5", null ], + [ "Rotation", "producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774af1a42bd417390fc63b030a519624607a", null ], + [ "FrameStep", "producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774a63eacc5ed21c0ecb8bc583e10dc3ae58", null ], + [ "NumberViews", "producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774a3b6cff57206f4ce645622b2e55f784a6", null ], + [ "Size", "producer_2enum_classes_8hpp.html#abc501c56c6cf6cf1989c84b1692cb774a6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "ProducerType", "producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332", [ + [ "FlirCamera", "producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332af436d4d7a472ac39a7cb227e3ea24f8d", null ], + [ "ImageDirectory", "producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332a54a365e86ee42cff91ca36532c9bbabf", null ], + [ "IPCamera", "producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332af40a40a04a078c4449cda2f326d7fb18", null ], + [ "Video", "producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332a34e2d1989a1dbf75cd631596133ee5ee", null ], + [ "Webcam", "producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332ae2faa2a74b6a4134d0b3e84c7c0e2a01", null ], + [ "None", "producer_2enum_classes_8hpp.html#a54b73745852c270cfd891eed0f6f2332a6adf97f83acf6453d4a6a4b1070f3754", null ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/producer_2enum_classes_8hpp_source.html b/web/html/doc/producer_2enum_classes_8hpp_source.html new file mode 100644 index 000000000..2b508539a --- /dev/null +++ b/web/html/doc/producer_2enum_classes_8hpp_source.html @@ -0,0 +1,152 @@ + + + + + + + +OpenPose: include/openpose/producer/enumClasses.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    enumClasses.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_PRODUCER_ENUM_CLASSES_HPP
    +
    2 #define OPENPOSE_PRODUCER_ENUM_CLASSES_HPP
    +
    3 
    +
    4 namespace op
    +
    5 {
    +
    6  enum class ProducerFpsMode : bool
    +
    7  {
    + + +
    12  };
    +
    13 
    +
    14  enum class ProducerProperty : unsigned char
    +
    15  {
    +
    16  AutoRepeat = 0,
    +
    17  Flip,
    +
    18  Rotation,
    +
    19  FrameStep,
    + +
    21  Size,
    +
    22  };
    +
    23 
    +
    29  enum class ProducerType : unsigned char
    +
    30  {
    +
    32  FlirCamera,
    + +
    38  IPCamera,
    +
    40  Video,
    +
    42  Webcam,
    +
    44  None,
    +
    45  };
    +
    46 }
    +
    47 
    +
    48 #endif // OPENPOSE_PRODUCER_ENUM_CLASSES_HPP
    + +
    ProducerType
    Definition: enumClasses.hpp:30
    + + + + + + +
    ProducerProperty
    Definition: enumClasses.hpp:15
    + + + + + + +
    ProducerFpsMode
    Definition: enumClasses.hpp:7
    + + +
    +
    + + + + diff --git a/web/html/doc/producer_2headers_8hpp.html b/web/html/doc/producer_2headers_8hpp.html new file mode 100644 index 000000000..995877c99 --- /dev/null +++ b/web/html/doc/producer_2headers_8hpp.html @@ -0,0 +1,114 @@ + + + + + + + +OpenPose: include/openpose/producer/headers.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + + + + + + diff --git a/web/html/doc/producer_2headers_8hpp_source.html b/web/html/doc/producer_2headers_8hpp_source.html new file mode 100644 index 000000000..57fe065bb --- /dev/null +++ b/web/html/doc/producer_2headers_8hpp_source.html @@ -0,0 +1,129 @@ + + + + + + + +OpenPose: include/openpose/producer/headers.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    headers.hpp
    +
    + +
    + + + + diff --git a/web/html/doc/producer_8hpp.html b/web/html/doc/producer_8hpp.html new file mode 100644 index 000000000..79d65bd15 --- /dev/null +++ b/web/html/doc/producer_8hpp.html @@ -0,0 +1,126 @@ + + + + + + + +OpenPose: include/openpose/producer/producer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    producer.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::Producer
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

    OP_API std::shared_ptr< Producer > op::createProducer (const ProducerType producerType=ProducerType::None, const std::string &producerString="", const Point< int > &cameraResolution=Point< int >{-1,-1}, const std::string &cameraParameterPath="models/cameraParameters/", const bool undistortImage=true, const int numberViews=-1)
     
    +
    +
    + + + + diff --git a/web/html/doc/producer_8hpp.js b/web/html/doc/producer_8hpp.js new file mode 100644 index 000000000..54fdadfb7 --- /dev/null +++ b/web/html/doc/producer_8hpp.js @@ -0,0 +1,5 @@ +var producer_8hpp = +[ + [ "Producer", "classop_1_1_producer.html", "classop_1_1_producer" ], + [ "createProducer", "producer_8hpp.html#a6a34909c6c4d79a215f163291111d556", null ] +]; \ No newline at end of file diff --git a/web/html/doc/producer_8hpp_source.html b/web/html/doc/producer_8hpp_source.html new file mode 100644 index 000000000..4ff3b35b0 --- /dev/null +++ b/web/html/doc/producer_8hpp_source.html @@ -0,0 +1,222 @@ + + + + + + + +OpenPose: include/openpose/producer/producer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    producer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_PRODUCER_PRODUCER_HPP
    +
    2 #define OPENPOSE_PRODUCER_PRODUCER_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    + +
    15  {
    +
    16  public:
    +
    20  explicit Producer(const ProducerType type, const std::string& cameraParameterPath, const bool undistortImage,
    +
    21  const int mNumberViews);
    +
    22 
    +
    27  virtual ~Producer();
    +
    28 
    + +
    34 
    +
    39  std::vector<Matrix> getFrames();
    +
    40 
    +
    46  virtual std::vector<Matrix> getCameraMatrices();
    +
    47 
    +
    53  virtual std::vector<Matrix> getCameraExtrinsics();
    +
    54 
    +
    60  virtual std::vector<Matrix> getCameraIntrinsics();
    +
    61 
    +
    67  virtual std::string getNextFrameName() = 0;
    +
    68 
    +
    74  void setProducerFpsMode(const ProducerFpsMode fpsMode);
    +
    75 
    + +
    81  {
    +
    82  return mType;
    +
    83  }
    +
    84 
    +
    90  virtual bool isOpened() const = 0;
    +
    91 
    +
    96  virtual void release() = 0;
    +
    97 
    +
    105  virtual double get(const int capProperty) = 0;
    +
    106 
    +
    114  virtual void set(const int capProperty, const double value) = 0;
    +
    115 
    +
    120  double get(const ProducerProperty property);
    +
    121 
    +
    127  void set(const ProducerProperty property, const double value);
    +
    128 
    +
    129  protected:
    + +
    137 
    + +
    143 
    + +
    148 
    +
    153  virtual Matrix getRawFrame() = 0;
    +
    154 
    +
    160  virtual std::vector<Matrix> getRawFrames() = 0;
    +
    161 
    +
    162  private:
    +
    163  const ProducerType mType;
    +
    164  ProducerFpsMode mProducerFpsMode;
    +
    165  std::array<double, (int)ProducerProperty::Size> mProperties;
    +
    166  unsigned int mNumberEmptyFrames;
    +
    167  // For ProducerFpsMode::OriginalFps
    +
    168  bool mTrackingFps;
    +
    169  unsigned long long mFirstFrameTrackingFps;
    +
    170  unsigned long long mNumberFramesTrackingFps;
    +
    171  unsigned int mNumberSetPositionTrackingFps;
    +
    172  std::chrono::high_resolution_clock::time_point mClockTrackingFps;
    +
    173  // Camera parameters
    +
    174  CameraParameterReader mCameraParameterReader;
    +
    175 
    + +
    177  };
    +
    178 
    +
    182  OP_API std::shared_ptr<Producer> createProducer(
    +
    183  const ProducerType producerType = ProducerType::None, const std::string& producerString = "",
    +
    184  const Point<int>& cameraResolution = Point<int>{-1,-1},
    +
    185  const std::string& cameraParameterPath = "models/cameraParameters/", const bool undistortImage = true,
    +
    186  const int numberViews = -1);
    +
    187 }
    +
    188 
    +
    189 #endif // OPENPOSE_PRODUCER_PRODUCER_HPP
    + + + + +
    void setProducerFpsMode(const ProducerFpsMode fpsMode)
    +
    Matrix getFrame()
    +
    virtual std::vector< Matrix > getCameraMatrices()
    +
    virtual std::vector< Matrix > getCameraExtrinsics()
    +
    virtual double get(const int capProperty)=0
    +
    virtual bool isOpened() const =0
    +
    virtual std::vector< Matrix > getCameraIntrinsics()
    +
    virtual void release()=0
    +
    virtual ~Producer()
    +
    double get(const ProducerProperty property)
    +
    ProducerType getType()
    Definition: producer.hpp:80
    +
    std::vector< Matrix > getFrames()
    +
    Producer(const ProducerType type, const std::string &cameraParameterPath, const bool undistortImage, const int mNumberViews)
    +
    virtual Matrix getRawFrame()=0
    +
    virtual void set(const int capProperty, const double value)=0
    +
    virtual std::string getNextFrameName()=0
    +
    void checkFrameIntegrity(Matrix &frame)
    +
    void ifEndedResetOrRelease()
    +
    virtual std::vector< Matrix > getRawFrames()=0
    +
    void set(const ProducerProperty property, const double value)
    +
    void keepDesiredFrameRate()
    + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    ProducerType
    Definition: enumClasses.hpp:30
    + +
    OP_API std::shared_ptr< Producer > createProducer(const ProducerType producerType=ProducerType::None, const std::string &producerString="", const Point< int > &cameraResolution=Point< int >{-1,-1}, const std::string &cameraParameterPath="models/cameraParameters/", const bool undistortImage=true, const int numberViews=-1)
    +
    ProducerProperty
    Definition: enumClasses.hpp:15
    + +
    ProducerFpsMode
    Definition: enumClasses.hpp:7
    + + +
    +
    + + + + diff --git a/web/html/doc/profiler_8hpp.html b/web/html/doc/profiler_8hpp.html new file mode 100644 index 000000000..865171c3c --- /dev/null +++ b/web/html/doc/profiler_8hpp.html @@ -0,0 +1,275 @@ + + + + + + + +OpenPose: include/openpose/utilities/profiler.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    profiler.hpp File Reference
    +
    +
    +
    #include <chrono>
    +#include <string>
    +#include <openpose/core/macros.hpp>
    +#include <openpose/utilities/enumClasses.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::Profiler
     
    + + + +

    +Namespaces

     op
     
    + + + + + + + + + +

    +Macros

    #define OP_PROFILE_INIT(REPS)
     
    #define OP_PROFILE_END(finalTime, factor, REPS)
     
    #define OP_CUDA_PROFILE_INIT(REPS)
     
    #define OP_CUDA_PROFILE_END(finalTime, factor, REPS)
     
    + + + + + + + +

    +Functions

    OP_API std::chrono::time_point< std::chrono::high_resolution_clock > op::getTimerInit ()
     
    OP_API double op::getTimeSeconds (const std::chrono::time_point< std::chrono::high_resolution_clock > &timerInit)
     
    OP_API void op::printTime (const std::chrono::time_point< std::chrono::high_resolution_clock > &timerInit, const std::string &firstMessage, const std::string &secondMessage, const Priority priority)
     
    +

    Macro Definition Documentation

    + +

    ◆ OP_CUDA_PROFILE_END

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    #define OP_CUDA_PROFILE_END( finalTime,
     factor,
     REPS 
    )
    +
    +Value:
    } \
    +
    cudaDeviceSynchronize(); \
    +
    (finalTime) = (factor)/(float)(REPS)*getTimeSeconds(timerInit); \
    +
    cudaCheck(__LINE__, __FUNCTION__, __FILE__); \
    +
    }
    +
    OP_API double getTimeSeconds(const std::chrono::time_point< std::chrono::high_resolution_clock > &timerInit)
    +
    +

    Definition at line 59 of file profiler.hpp.

    + +
    +
    + +

    ◆ OP_CUDA_PROFILE_INIT

    + +
    +
    + + + + + + + + +
    #define OP_CUDA_PROFILE_INIT( REPS)
    +
    +Value:
    { \
    +
    cudaDeviceSynchronize(); \
    +
    const auto timerInit = getTimerInit(); \
    +
    for (auto rep = 0 ; rep < (REPS) ; ++rep) \
    +
    {
    +
    OP_API std::chrono::time_point< std::chrono::high_resolution_clock > getTimerInit()
    +
    +

    Definition at line 51 of file profiler.hpp.

    + +
    +
    + +

    ◆ OP_PROFILE_END

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    #define OP_PROFILE_END( finalTime,
     factor,
     REPS 
    )
    +
    +Value:
    } \
    +
    (finalTime) = (factor)/(float)(REPS)*getTimeSeconds(timerInit); \
    +
    }
    +
    +

    Definition at line 36 of file profiler.hpp.

    + +
    +
    + +

    ◆ OP_PROFILE_INIT

    + +
    +
    + + + + + + + + +
    #define OP_PROFILE_INIT( REPS)
    +
    +Value:
    { \
    +
    const auto timerInit = getTimerInit(); \
    +
    for (auto rep = 0 ; rep < (REPS) ; ++rep) \
    +
    {
    +
    +

    Definition at line 31 of file profiler.hpp.

    + +
    +
    +
    +
    + + + + diff --git a/web/html/doc/profiler_8hpp.js b/web/html/doc/profiler_8hpp.js new file mode 100644 index 000000000..eb0163db1 --- /dev/null +++ b/web/html/doc/profiler_8hpp.js @@ -0,0 +1,11 @@ +var profiler_8hpp = +[ + [ "Profiler", "classop_1_1_profiler.html", null ], + [ "OP_CUDA_PROFILE_END", "profiler_8hpp.html#a774eaef2d2d68028026f52d554a8ba45", null ], + [ "OP_CUDA_PROFILE_INIT", "profiler_8hpp.html#a543c2d65f7d0e835513310d83fc08589", null ], + [ "OP_PROFILE_END", "profiler_8hpp.html#ae1f762d7d0c1f5ad10304ef82bd85516", null ], + [ "OP_PROFILE_INIT", "profiler_8hpp.html#a6211ca30ec696c346d0b3f2c056e05e6", null ], + [ "getTimerInit", "profiler_8hpp.html#ae0e92a0d8867d1b02f1c43ae4c0c9e09", null ], + [ "getTimeSeconds", "profiler_8hpp.html#a01dd208c992c8e07623579f77dcfb59b", null ], + [ "printTime", "profiler_8hpp.html#ab0908bcc0abb00c49ecbe7fc373b58c9", null ] +]; \ No newline at end of file diff --git a/web/html/doc/profiler_8hpp_source.html b/web/html/doc/profiler_8hpp_source.html new file mode 100644 index 000000000..90e1626bc --- /dev/null +++ b/web/html/doc/profiler_8hpp_source.html @@ -0,0 +1,217 @@ + + + + + + + +OpenPose: include/openpose/utilities/profiler.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    profiler.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_UTILITIES_PROFILER_HPP
    +
    2 #define OPENPOSE_UTILITIES_PROFILER_HPP
    +
    3 
    +
    4 #include <chrono>
    +
    5 #include <string>
    + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  // The following functions provides basic functions to measure time. Usage example:
    +
    12  // const auto timerInit = getTimerInit();
    +
    13  // // [Some code in here]
    +
    14  // const auto timeSeconds = getTimeSeconds(timerInit);
    +
    15  // const printTime(timeSeconds, "Function X took ", " seconds.");
    +
    16  OP_API std::chrono::time_point<std::chrono::high_resolution_clock> getTimerInit();
    +
    17 
    +
    18  OP_API double getTimeSeconds(const std::chrono::time_point<std::chrono::high_resolution_clock>& timerInit);
    +
    19 
    + +
    21  const std::chrono::time_point<std::chrono::high_resolution_clock>& timerInit, const std::string& firstMessage,
    +
    22  const std::string& secondMessage, const Priority priority);
    +
    23 
    +
    24  // The following functions will run REPS times and average the final time in seconds. Usage example:
    +
    25  // const auto REPS = 1000;
    +
    26  // double time = 0.;
    +
    27  // OP_PROFILE_INIT(REPS);
    +
    28  // // [Some code in here]
    +
    29  // OP_PROFILE_END(time, 1e3, REPS); // Time in msec. 1 = sec, 1e3 = msec, 1e6 = usec, 1e9 = nsec, etc.
    +
    30  // opLog("Function X took " + std::to_string(time) + " milliseconds.");
    +
    31  #define OP_PROFILE_INIT(REPS) \
    +
    32  { \
    +
    33  const auto timerInit = getTimerInit(); \
    +
    34  for (auto rep = 0 ; rep < (REPS) ; ++rep) \
    +
    35  {
    +
    36  #define OP_PROFILE_END(finalTime, factor, REPS) \
    +
    37  } \
    +
    38  (finalTime) = (factor)/(float)(REPS)*getTimeSeconds(timerInit); \
    +
    39  }
    +
    40 
    +
    41  // The following functions will run REPS times, wait for the kernels to finish, and then average the final time
    +
    42  // in seconds. Usage example:
    +
    43  // const auto REPS = 1000;
    +
    44  // double time = 0.;
    +
    45  // OP_CUDA_PROFILE_INIT(REPS);
    +
    46  // // [Some code with CUDA calls in here]
    +
    47  // OP_CUDA_PROFILE_END(time, 1e3, REPS); // Time in msec. 1 = sec, 1e3 = msec, 1e6 = usec, 1e9 = nsec, etc.
    +
    48  // opLog("Function X took " + std::to_string(time) + " milliseconds.");
    +
    49  // Analogous to OP_PROFILE_INIT, but also waits for CUDA kernels to finish their asynchronous operations
    +
    50  // It requires: #include <cuda_runtime.h>
    +
    51  #define OP_CUDA_PROFILE_INIT(REPS) \
    +
    52  { \
    +
    53  cudaDeviceSynchronize(); \
    +
    54  const auto timerInit = getTimerInit(); \
    +
    55  for (auto rep = 0 ; rep < (REPS) ; ++rep) \
    +
    56  {
    +
    57  // Analogous to OP_PROFILE_END, but also waits for CUDA kernels to finish their asynchronous operations
    +
    58  // It requires: #include <cuda_runtime.h>
    +
    59  #define OP_CUDA_PROFILE_END(finalTime, factor, REPS) \
    +
    60  } \
    +
    61  cudaDeviceSynchronize(); \
    +
    62  (finalTime) = (factor)/(float)(REPS)*getTimeSeconds(timerInit); \
    +
    63  cudaCheck(__LINE__, __FUNCTION__, __FILE__); \
    +
    64  }
    +
    65 
    +
    66  // Enable PROFILER_ENABLED on Makefile.config or CMake in order to use this function. Otherwise nothing will be outputted.
    +
    67  // How to use - example:
    +
    68  // For GPU - It can only be applied in the main.cpp file:
    +
    69  // Profiler::profileGpuMemory(__LINE__, __FUNCTION__, __FILE__);
    +
    70  // For time:
    +
    71  // // ... inside continuous loop ...
    +
    72  // const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    73  // // functions to do...
    +
    74  // Profiler::timerEnd(profilerKey);
    +
    75  // Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__, NUMBER_ITERATIONS);
    + +
    77  {
    +
    78  public:
    +
    79  static unsigned long long DEFAULT_X;
    +
    80 
    +
    81  // Non-thread safe, it must be performed at the beginning of the code before any parallelization occurs
    +
    82  static void setDefaultX(const unsigned long long defaultX);
    +
    83 
    +
    84  static const std::string timerInit(const int line, const std::string& function, const std::string& file);
    +
    85 
    +
    86  static void timerEnd(const std::string& key);
    +
    87 
    + +
    89  const std::string& key, const int line, const std::string& function, const std::string& file,
    +
    90  const unsigned long long x = DEFAULT_X);
    +
    91 
    + +
    93  const std::string& key, const int line, const std::string& function, const std::string& file,
    +
    94  const unsigned long long x = DEFAULT_X);
    +
    95 
    +
    96  static void profileGpuMemory(const int line, const std::string& function, const std::string& file);
    +
    97  };
    +
    98 }
    +
    99 
    +
    100 #endif // OPENPOSE_UTILITIES_PROFILER_HPP
    + +
    static void printAveragedTimeMsEveryXIterations(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static unsigned long long DEFAULT_X
    Definition: profiler.hpp:79
    +
    static void setDefaultX(const unsigned long long defaultX)
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static void profileGpuMemory(const int line, const std::string &function, const std::string &file)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    OP_API double getTimeSeconds(const std::chrono::time_point< std::chrono::high_resolution_clock > &timerInit)
    +
    OP_API void printTime(const std::chrono::time_point< std::chrono::high_resolution_clock > &timerInit, const std::string &firstMessage, const std::string &secondMessage, const Priority priority)
    +
    Priority
    Definition: enumClasses.hpp:22
    +
    OP_API std::chrono::time_point< std::chrono::high_resolution_clock > getTimerInit()
    + +
    +
    + + + + diff --git a/web/html/doc/queue_8hpp.html b/web/html/doc/queue_8hpp.html new file mode 100644 index 000000000..85d3f47d3 --- /dev/null +++ b/web/html/doc/queue_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/thread/queue.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    queue.hpp File Reference
    +
    +
    +
    #include <queue>
    +#include <openpose/core/common.hpp>
    +#include <openpose/thread/queueBase.hpp>
    +#include <type_traits>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::Queue< TDatums, TQueue >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (Queue)
     
    +
    +
    + + + + diff --git a/web/html/doc/queue_8hpp.js b/web/html/doc/queue_8hpp.js new file mode 100644 index 000000000..3779c7ed9 --- /dev/null +++ b/web/html/doc/queue_8hpp.js @@ -0,0 +1,5 @@ +var queue_8hpp = +[ + [ "Queue", "classop_1_1_queue.html", "classop_1_1_queue" ], + [ "COMPILE_TEMPLATE_DATUM", "queue_8hpp.html#aa7f93261bd6d87f86c45e933607a0678", null ] +]; \ No newline at end of file diff --git a/web/html/doc/queue_8hpp_source.html b/web/html/doc/queue_8hpp_source.html new file mode 100644 index 000000000..ad370c979 --- /dev/null +++ b/web/html/doc/queue_8hpp_source.html @@ -0,0 +1,199 @@ + + + + + + + +OpenPose: include/openpose/thread/queue.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    queue.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_QUEUE_HPP
    +
    2 #define OPENPOSE_THREAD_QUEUE_HPP
    +
    3 
    +
    4 #include <queue> // std::queue
    + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums, typename TQueue = std::queue<TDatums>>
    +
    11  class Queue : public QueueBase<TDatums, TQueue>
    +
    12  {
    +
    13  public:
    +
    14  explicit Queue(const long long maxSize);
    +
    15 
    +
    16  virtual ~Queue();
    +
    17 
    +
    18  TDatums front() const;
    +
    19 
    +
    20  private:
    +
    21  bool pop(TDatums& tDatums);
    +
    22 
    +
    23  DELETE_COPY(Queue);
    +
    24  };
    +
    25 }
    +
    26 
    +
    27 
    +
    28 
    +
    29 
    +
    30 
    +
    31 // Implementation
    +
    32 #include <type_traits> // std::is_same
    +
    33 namespace op
    +
    34 {
    +
    35  template<typename TDatums, typename TQueue>
    +
    36  Queue<TDatums, TQueue>::Queue(const long long maxSize) :
    +
    37  QueueBase<TDatums, TQueue>{maxSize}
    +
    38  {
    +
    39  // Check TDatums = underlying value type of TQueue
    +
    40  typedef typename TQueue::value_type underlyingValueType;
    +
    41  static_assert(std::is_same<TDatums, underlyingValueType>::value,
    +
    42  "Error: The type of the queue must be the same as the type of the container");
    +
    43  }
    +
    44 
    +
    45  template<typename TDatums, typename TQueue>
    + +
    47  {
    +
    48  }
    +
    49 
    +
    50  template<typename TDatums, typename TQueue>
    + +
    52  {
    +
    53  try
    +
    54  {
    +
    55  const std::lock_guard<std::mutex> lock{this->mMutex};
    +
    56  return this->mTQueue.front();
    +
    57  }
    +
    58  catch (const std::exception& e)
    +
    59  {
    +
    60  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    61  return TDatums{};
    +
    62  }
    +
    63  }
    +
    64 
    +
    65  template<typename TDatums, typename TQueue>
    +
    66  bool Queue<TDatums, TQueue>::pop(TDatums& tDatums)
    +
    67  {
    +
    68  try
    +
    69  {
    +
    70  if (this->mPopIsStopped || this->mTQueue.empty())
    +
    71  return false;
    +
    72 
    +
    73  tDatums = {std::move(this->mTQueue.front())};
    +
    74  this->mTQueue.pop();
    +
    75  this->mConditionVariable.notify_one();
    +
    76  return true;
    +
    77  }
    +
    78  catch (const std::exception& e)
    +
    79  {
    +
    80  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    81  return false;
    +
    82  }
    +
    83  }
    +
    84 
    + +
    86 }
    +
    87 
    +
    88 #endif // OPENPOSE_THREAD_QUEUE_HPP
    + + +
    virtual ~Queue()
    Definition: queue.hpp:46
    +
    TDatums front() const
    Definition: queue.hpp:51
    +
    Queue(const long long maxSize)
    Definition: queue.hpp:36
    + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    + +
    +
    + + + + diff --git a/web/html/doc/queue_base_8hpp.html b/web/html/doc/queue_base_8hpp.html new file mode 100644 index 000000000..c58f2cb30 --- /dev/null +++ b/web/html/doc/queue_base_8hpp.html @@ -0,0 +1,123 @@ + + + + + + + +OpenPose: include/openpose/thread/queueBase.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    queueBase.hpp File Reference
    +
    +
    +
    #include <condition_variable>
    +#include <mutex>
    +#include <queue>
    +#include <openpose/core/common.hpp>
    +#include <openpose/core/datum.hpp>
    +#include <openpose/utilities/fastMath.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::QueueBase< TDatums, TQueue >
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/queue_base_8hpp_source.html b/web/html/doc/queue_base_8hpp_source.html new file mode 100644 index 000000000..901ec7ffb --- /dev/null +++ b/web/html/doc/queue_base_8hpp_source.html @@ -0,0 +1,667 @@ + + + + + + + +OpenPose: include/openpose/thread/queueBase.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    queueBase.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_QUEUE_BASE_HPP
    +
    2 #define OPENPOSE_THREAD_QUEUE_BASE_HPP
    +
    3 
    +
    4 #include <condition_variable>
    +
    5 #include <mutex>
    +
    6 #include <queue> // std::queue & std::priority_queue
    + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums, typename TQueue>
    +
    12  class QueueBase
    +
    13  {
    +
    14  public:
    +
    15  explicit QueueBase(const long long maxSize = -1);
    +
    16 
    +
    17  virtual ~QueueBase();
    +
    18 
    +
    19  bool forceEmplace(TDatums& tDatums);
    +
    20 
    +
    21  bool tryEmplace(TDatums& tDatums);
    +
    22 
    +
    23  bool waitAndEmplace(TDatums& tDatums);
    +
    24 
    +
    25  bool forcePush(const TDatums& tDatums);
    +
    26 
    +
    27  bool tryPush(const TDatums& tDatums);
    +
    28 
    +
    29  bool waitAndPush(const TDatums& tDatums);
    +
    30 
    +
    31  bool tryPop(TDatums& tDatums);
    +
    32 
    +
    33  bool tryPop();
    +
    34 
    +
    35  bool waitAndPop(TDatums& tDatums);
    +
    36 
    +
    37  bool waitAndPop();
    +
    38 
    +
    39  bool empty() const;
    +
    40 
    +
    41  void stop();
    +
    42 
    +
    43  void stopPusher();
    +
    44 
    +
    45  void addPopper();
    +
    46 
    +
    47  void addPusher();
    +
    48 
    +
    49  bool isRunning() const;
    +
    50 
    +
    51  bool isFull() const;
    +
    52 
    +
    53  size_t size() const;
    +
    54 
    +
    55  void clear();
    +
    56 
    +
    57  virtual TDatums front() const = 0;
    +
    58 
    +
    59  protected:
    +
    60  mutable std::mutex mMutex;
    +
    61  long long mPoppers;
    +
    62  long long mPushers;
    +
    63  long long mMaxPoppersPushers;
    + + +
    66  std::condition_variable mConditionVariable;
    +
    67  TQueue mTQueue;
    +
    68 
    +
    69  virtual bool pop(TDatums& tDatums) = 0;
    +
    70 
    +
    71  unsigned long long getMaxSize() const;
    +
    72 
    +
    73  private:
    +
    74  const long long mMaxSize;
    +
    75 
    +
    76  bool emplace(TDatums& tDatums);
    +
    77 
    +
    78  bool push(const TDatums& tDatums);
    +
    79 
    +
    80  bool pop();
    +
    81 
    +
    82  void updateMaxPoppersPushers();
    +
    83 
    +
    84  DELETE_COPY(QueueBase);
    +
    85  };
    +
    86 }
    +
    87 
    +
    88 
    +
    89 
    +
    90 
    +
    91 
    +
    92 // Implementation
    +
    93 #include <openpose/core/datum.hpp>
    + +
    95 namespace op
    +
    96 {
    +
    97  template<typename TDatums, typename TQueue>
    +
    98  QueueBase<TDatums, TQueue>::QueueBase(const long long maxSize) :
    +
    99  mPoppers{0ll},
    +
    100  mPushers{0ll},
    +
    101  mPopIsStopped{false},
    +
    102  mPushIsStopped{false},
    +
    103  mMaxSize{maxSize}
    +
    104  {
    +
    105  }
    +
    106 
    +
    107  // Virtual destructor
    +
    108  template<typename TDatums, typename TQueue>
    + +
    110  {
    +
    111  try
    +
    112  {
    +
    113  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    114  stop();
    +
    115  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    116  }
    +
    117  catch (const std::exception& e)
    +
    118  {
    +
    119  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    120  }
    +
    121  }
    +
    122 
    +
    123  template<typename TDatums, typename TQueue>
    + +
    125  {
    +
    126  try
    +
    127  {
    +
    128  const std::lock_guard<std::mutex> lock{mMutex};
    +
    129  if (mTQueue.size() >= getMaxSize())
    +
    130  mTQueue.pop();
    +
    131  return emplace(tDatums);
    +
    132  }
    +
    133  catch (const std::exception& e)
    +
    134  {
    +
    135  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    136  return false;
    +
    137  }
    +
    138  }
    +
    139 
    +
    140  template<typename TDatums, typename TQueue>
    + +
    142  {
    +
    143  try
    +
    144  {
    +
    145  const std::lock_guard<std::mutex> lock{mMutex};
    +
    146  if (mTQueue.size() >= getMaxSize())
    +
    147  return false;
    +
    148  return emplace(tDatums);
    +
    149  }
    +
    150  catch (const std::exception& e)
    +
    151  {
    +
    152  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    153  return false;
    +
    154  }
    +
    155  }
    +
    156 
    +
    157  template<typename TDatums, typename TQueue>
    + +
    159  {
    +
    160  try
    +
    161  {
    +
    162  std::unique_lock<std::mutex> lock{mMutex};
    +
    163  mConditionVariable.wait(lock, [this]{return mTQueue.size() < getMaxSize() || mPushIsStopped; });
    +
    164  return emplace(tDatums);
    +
    165  }
    +
    166  catch (const std::exception& e)
    +
    167  {
    +
    168  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    169  return false;
    +
    170  }
    +
    171  }
    +
    172 
    +
    173  template<typename TDatums, typename TQueue>
    +
    174  bool QueueBase<TDatums, TQueue>::forcePush(const TDatums& tDatums)
    +
    175  {
    +
    176  try
    +
    177  {
    +
    178  const std::lock_guard<std::mutex> lock{mMutex};
    +
    179  if (mTQueue.size() >= getMaxSize())
    +
    180  mTQueue.pop();
    +
    181  return push(tDatums);
    +
    182  }
    +
    183  catch (const std::exception& e)
    +
    184  {
    +
    185  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    186  return false;
    +
    187  }
    +
    188  }
    +
    189 
    +
    190  template<typename TDatums, typename TQueue>
    +
    191  bool QueueBase<TDatums, TQueue>::tryPush(const TDatums& tDatums)
    +
    192  {
    +
    193  try
    +
    194  {
    +
    195  const std::lock_guard<std::mutex> lock{mMutex};
    +
    196  if (mTQueue.size() >= getMaxSize())
    +
    197  return false;
    +
    198  return push(tDatums);
    +
    199  }
    +
    200  catch (const std::exception& e)
    +
    201  {
    +
    202  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    203  return false;
    +
    204  }
    +
    205  }
    +
    206 
    +
    207  template<typename TDatums, typename TQueue>
    +
    208  bool QueueBase<TDatums, TQueue>::waitAndPush(const TDatums& tDatums)
    +
    209  {
    +
    210  try
    +
    211  {
    +
    212  std::unique_lock<std::mutex> lock{mMutex};
    +
    213  mConditionVariable.wait(lock, [this]{return mTQueue.size() < getMaxSize() || mPushIsStopped; });
    +
    214  return push(tDatums);
    +
    215  }
    +
    216  catch (const std::exception& e)
    +
    217  {
    +
    218  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    219  return false;
    +
    220  }
    +
    221  }
    +
    222 
    +
    223  template<typename TDatums, typename TQueue>
    +
    224  bool QueueBase<TDatums, TQueue>::tryPop(TDatums& tDatums)
    +
    225  {
    +
    226  try
    +
    227  {
    +
    228  const std::lock_guard<std::mutex> lock{mMutex};
    +
    229  return pop(tDatums);
    +
    230  }
    +
    231  catch (const std::exception& e)
    +
    232  {
    +
    233  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    234  return false;
    +
    235  }
    +
    236  }
    +
    237 
    +
    238  template<typename TDatums, typename TQueue>
    + +
    240  {
    +
    241  try
    +
    242  {
    +
    243  const std::lock_guard<std::mutex> lock{mMutex};
    +
    244  return pop();
    +
    245  }
    +
    246  catch (const std::exception& e)
    +
    247  {
    +
    248  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    249  return false;
    +
    250  }
    +
    251  }
    +
    252 
    +
    253  template<typename TDatums, typename TQueue>
    + +
    255  {
    +
    256  try
    +
    257  {
    +
    258  std::unique_lock<std::mutex> lock{mMutex};
    +
    259  mConditionVariable.wait(lock, [this]{return !mTQueue.empty() || mPopIsStopped; });
    +
    260  return pop(tDatums);
    +
    261  }
    +
    262  catch (const std::exception& e)
    +
    263  {
    +
    264  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    265  return false;
    +
    266  }
    +
    267  }
    +
    268 
    +
    269  template<typename TDatums, typename TQueue>
    + +
    271  {
    +
    272  try
    +
    273  {
    +
    274  std::unique_lock<std::mutex> lock{mMutex};
    +
    275  mConditionVariable.wait(lock, [this]{return !mTQueue.empty() || mPopIsStopped; });
    +
    276  return pop();
    +
    277  }
    +
    278  catch (const std::exception& e)
    +
    279  {
    +
    280  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    281  return false;
    +
    282  }
    +
    283  }
    +
    284 
    +
    285  template<typename TDatums, typename TQueue>
    + +
    287  {
    +
    288  try
    +
    289  {
    +
    290  const std::lock_guard<std::mutex> lock{mMutex};
    +
    291  return mTQueue.empty();
    +
    292  }
    +
    293  catch (const std::exception& e)
    +
    294  {
    +
    295  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    296  return false;
    +
    297  }
    +
    298  }
    +
    299 
    +
    300  template<typename TDatums, typename TQueue>
    + +
    302  {
    +
    303  try
    +
    304  {
    +
    305  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    306  const std::lock_guard<std::mutex> lock{mMutex};
    +
    307  mPopIsStopped = {true};
    +
    308  mPushIsStopped = {true};
    +
    309  while (!mTQueue.empty())
    +
    310  mTQueue.pop();
    +
    311  mConditionVariable.notify_all();
    +
    312  }
    +
    313  catch (const std::exception& e)
    +
    314  {
    +
    315  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    316  }
    +
    317  }
    +
    318 
    +
    319  template<typename TDatums, typename TQueue>
    + +
    321  {
    +
    322  try
    +
    323  {
    +
    324  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    325  const std::lock_guard<std::mutex> lock{mMutex};
    +
    326  mPushers--;
    +
    327  if (mPushers == 0)
    +
    328  {
    +
    329  mPushIsStopped = {true};
    +
    330  if (mTQueue.empty())
    +
    331  mPopIsStopped = {true};
    +
    332  mConditionVariable.notify_all();
    +
    333  }
    +
    334  }
    +
    335  catch (const std::exception& e)
    +
    336  {
    +
    337  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    338  }
    +
    339  }
    +
    340 
    +
    341  template<typename TDatums, typename TQueue>
    + +
    343  {
    +
    344  try
    +
    345  {
    +
    346  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    347  const std::lock_guard<std::mutex> lock{mMutex};
    +
    348  mPoppers++;
    +
    349  updateMaxPoppersPushers();
    +
    350  }
    +
    351  catch (const std::exception& e)
    +
    352  {
    +
    353  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    354  }
    +
    355  }
    +
    356 
    +
    357  template<typename TDatums, typename TQueue>
    + +
    359  {
    +
    360  try
    +
    361  {
    +
    362  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    363  const std::lock_guard<std::mutex> lock{mMutex};
    +
    364  mPushers++;
    +
    365  updateMaxPoppersPushers();
    +
    366  }
    +
    367  catch (const std::exception& e)
    +
    368  {
    +
    369  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    370  }
    +
    371  }
    +
    372 
    +
    373  template<typename TDatums, typename TQueue>
    + +
    375  {
    +
    376  try
    +
    377  {
    +
    378  const std::lock_guard<std::mutex> lock{mMutex};
    +
    379  return !(mPushIsStopped && (mPopIsStopped || mTQueue.empty()));
    +
    380  }
    +
    381  catch (const std::exception& e)
    +
    382  {
    +
    383  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    384  return true;
    +
    385  }
    +
    386  }
    +
    387 
    +
    388  template<typename TDatums, typename TQueue>
    + +
    390  {
    +
    391  try
    +
    392  {
    +
    393  // No mutex required because the size() and getMaxSize() are already thread-safe
    +
    394  return size() == getMaxSize();
    +
    395  }
    +
    396  catch (const std::exception& e)
    +
    397  {
    +
    398  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    399  return false;
    +
    400  }
    +
    401  }
    +
    402 
    +
    403  template<typename TDatums, typename TQueue>
    + +
    405  {
    +
    406  try
    +
    407  {
    +
    408  const std::lock_guard<std::mutex> lock{mMutex};
    +
    409  return mTQueue.size();
    +
    410  }
    +
    411  catch (const std::exception& e)
    +
    412  {
    +
    413  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    414  return 0;
    +
    415  }
    +
    416  }
    +
    417 
    +
    418  template<typename TDatums, typename TQueue>
    + +
    420  {
    +
    421  try
    +
    422  {
    +
    423  const std::lock_guard<std::mutex> lock{mMutex};
    +
    424  while (!mTQueue.empty())
    +
    425  mTQueue.pop();
    +
    426  }
    +
    427  catch (const std::exception& e)
    +
    428  {
    +
    429  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    430  }
    +
    431  }
    +
    432 
    +
    433  template<typename TDatums, typename TQueue>
    +
    434  unsigned long long QueueBase<TDatums, TQueue>::getMaxSize() const
    +
    435  {
    +
    436  try
    +
    437  {
    +
    438  return (mMaxSize > 0 ? mMaxSize : fastMax(1ll, mMaxPoppersPushers));
    +
    439  }
    +
    440  catch (const std::exception& e)
    +
    441  {
    +
    442  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    443  return false;
    +
    444  }
    +
    445  }
    +
    446 
    +
    447  template<typename TDatums, typename TQueue>
    +
    448  bool QueueBase<TDatums, TQueue>::emplace(TDatums& tDatums)
    +
    449  {
    +
    450  try
    +
    451  {
    +
    452  if (mPushIsStopped)
    +
    453  return false;
    +
    454 
    +
    455  mTQueue.emplace(tDatums);
    +
    456  mConditionVariable.notify_all();
    +
    457  return true;
    +
    458  }
    +
    459  catch (const std::exception& e)
    +
    460  {
    +
    461  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    462  return false;
    +
    463  }
    +
    464  }
    +
    465 
    +
    466  template<typename TDatums, typename TQueue>
    +
    467  bool QueueBase<TDatums, TQueue>::push(const TDatums& tDatums)
    +
    468  {
    +
    469  try
    +
    470  {
    +
    471  if (mPushIsStopped)
    +
    472  return false;
    +
    473 
    +
    474  mTQueue.push(tDatums);
    +
    475  mConditionVariable.notify_all();
    +
    476  return true;
    +
    477  }
    +
    478  catch (const std::exception& e)
    +
    479  {
    +
    480  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    481  return false;
    +
    482  }
    +
    483  }
    +
    484 
    +
    485  template<typename TDatums, typename TQueue>
    +
    486  bool QueueBase<TDatums, TQueue>::pop()
    +
    487  {
    +
    488  try
    +
    489  {
    +
    490  if (mPopIsStopped || mTQueue.empty())
    +
    491  return false;
    +
    492 
    +
    493  mTQueue.pop();
    +
    494  mConditionVariable.notify_all();
    +
    495  return true;
    +
    496  }
    +
    497  catch (const std::exception& e)
    +
    498  {
    +
    499  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    500  return false;
    +
    501  }
    +
    502  }
    +
    503 
    +
    504  template<typename TDatums, typename TQueue>
    +
    505  void QueueBase<TDatums, TQueue>::updateMaxPoppersPushers()
    +
    506  {
    +
    507  try
    +
    508  {
    +
    509  mMaxPoppersPushers = fastMax(mPoppers, mPushers);
    +
    510  }
    +
    511  catch (const std::exception& e)
    +
    512  {
    +
    513  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    514  }
    +
    515  }
    +
    516 
    +
    517  extern template class QueueBase<BASE_DATUMS_SH, std::queue<BASE_DATUMS_SH>>;
    +
    518  extern template class QueueBase<
    + +
    520  std::priority_queue<BASE_DATUMS_SH, std::vector<BASE_DATUMS_SH>,
    +
    521  std::greater<BASE_DATUMS_SH>>>;
    +
    522 }
    +
    523 
    +
    524 #endif // OPENPOSE_THREAD_QUEUE_BASE_HPP
    + +
    long long mPoppers
    Definition: queueBase.hpp:61
    +
    bool isFull() const
    Definition: queueBase.hpp:389
    +
    void addPusher()
    Definition: queueBase.hpp:358
    +
    long long mMaxPoppersPushers
    Definition: queueBase.hpp:63
    +
    std::mutex mMutex
    Definition: queueBase.hpp:60
    + +
    bool waitAndPop(TDatums &tDatums)
    Definition: queueBase.hpp:254
    +
    void stopPusher()
    Definition: queueBase.hpp:320
    +
    bool tryPush(const TDatums &tDatums)
    Definition: queueBase.hpp:191
    +
    TQueue mTQueue
    Definition: queueBase.hpp:67
    +
    virtual bool pop(TDatums &tDatums)=0
    + + +
    bool empty() const
    Definition: queueBase.hpp:286
    +
    bool mPopIsStopped
    Definition: queueBase.hpp:64
    +
    bool tryEmplace(TDatums &tDatums)
    Definition: queueBase.hpp:141
    +
    unsigned long long getMaxSize() const
    Definition: queueBase.hpp:434
    +
    long long mPushers
    Definition: queueBase.hpp:62
    +
    bool tryPop(TDatums &tDatums)
    Definition: queueBase.hpp:224
    +
    bool waitAndPop()
    Definition: queueBase.hpp:270
    +
    bool waitAndEmplace(TDatums &tDatums)
    Definition: queueBase.hpp:158
    +
    std::condition_variable mConditionVariable
    Definition: queueBase.hpp:66
    +
    bool forceEmplace(TDatums &tDatums)
    Definition: queueBase.hpp:124
    +
    size_t size() const
    Definition: queueBase.hpp:404
    +
    bool isRunning() const
    Definition: queueBase.hpp:374
    +
    virtual TDatums front() const =0
    +
    bool waitAndPush(const TDatums &tDatums)
    Definition: queueBase.hpp:208
    +
    bool forcePush(const TDatums &tDatums)
    Definition: queueBase.hpp:174
    +
    void addPopper()
    Definition: queueBase.hpp:342
    +
    QueueBase(const long long maxSize=-1)
    Definition: queueBase.hpp:98
    +
    virtual ~QueueBase()
    Definition: queueBase.hpp:109
    +
    bool mPushIsStopped
    Definition: queueBase.hpp:65
    + + +
    #define BASE_DATUMS_SH
    Definition: datum.hpp:405
    + + +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    T fastMax(const T a, const T b)
    Definition: fastMath.hpp:73
    +
    OP_API void opLog(const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    + +
    +
    + + + + diff --git a/web/html/doc/rectangle_8hpp.html b/web/html/doc/rectangle_8hpp.html new file mode 100644 index 000000000..e942e949b --- /dev/null +++ b/web/html/doc/rectangle_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/core/rectangle.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    rectangle.hpp File Reference
    +
    +
    +
    #include <string>
    +#include <openpose/core/macros.hpp>
    +#include <openpose/core/point.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    struct  op::Rectangle< T >
     
    + + + +

    +Namespaces

     op
     
    + + + + +

    +Functions

    template<typename T >
    Rectangle< T > op::recenter (const Rectangle< T > &rectangle, const T newWidth, const T newHeight)
     
    +
    +
    + + + + diff --git a/web/html/doc/rectangle_8hpp.js b/web/html/doc/rectangle_8hpp.js new file mode 100644 index 000000000..e68066ab6 --- /dev/null +++ b/web/html/doc/rectangle_8hpp.js @@ -0,0 +1,5 @@ +var rectangle_8hpp = +[ + [ "Rectangle", "structop_1_1_rectangle.html", "structop_1_1_rectangle" ], + [ "recenter", "rectangle_8hpp.html#a7cd131c9ddd8f3987508e89e0881b9e0", null ] +]; \ No newline at end of file diff --git a/web/html/doc/rectangle_8hpp_source.html b/web/html/doc/rectangle_8hpp_source.html new file mode 100644 index 000000000..2f2b9b505 --- /dev/null +++ b/web/html/doc/rectangle_8hpp_source.html @@ -0,0 +1,188 @@ + + + + + + + +OpenPose: include/openpose/core/rectangle.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    rectangle.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_RECTANGLE_HPP
    +
    2 #define OPENPOSE_CORE_RECTANGLE_HPP
    +
    3 
    +
    4 #include <string>
    + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename T>
    +
    11  struct Rectangle
    +
    12  {
    +
    13  T x;
    +
    14  T y;
    +
    15  T width;
    +
    16  T height;
    +
    17 
    +
    18  Rectangle(const T x = 0, const T y = 0, const T width = 0, const T height = 0);
    +
    19 
    +
    28  Rectangle<T>(const Rectangle<T>& rectangle);
    +
    29 
    +
    36  Rectangle<T>& operator=(const Rectangle<T>& rectangle);
    +
    37 
    +
    43  Rectangle<T>(Rectangle<T>&& rectangle);
    +
    44 
    + +
    52 
    +
    53  Point<T> center() const;
    +
    54 
    +
    55  inline Point<T> topLeft() const
    +
    56  {
    +
    57  return Point<T>{x, y};
    +
    58  }
    +
    59 
    + +
    61 
    +
    62  inline T area() const
    +
    63  {
    +
    64  return width * height;
    +
    65  }
    +
    66 
    +
    67  void recenter(const T newWidth, const T newHeight);
    +
    68 
    +
    74  std::string toString() const;
    +
    75 
    +
    76  // ------------------------------ Basic Operators ------------------------------ //
    +
    77  Rectangle<T>& operator*=(const T value);
    +
    78 
    +
    79  Rectangle<T> operator*(const T value) const;
    +
    80 
    +
    81  Rectangle<T>& operator/=(const T value);
    +
    82 
    +
    83  Rectangle<T> operator/(const T value) const;
    +
    84  };
    +
    85 
    +
    86  // Static methods
    +
    87  template<typename T>
    +
    88  Rectangle<T> recenter(const Rectangle<T>& rectangle, const T newWidth, const T newHeight);
    +
    89 
    + +
    91 }
    +
    92 
    +
    93 #endif // OPENPOSE_CORE_RECTANGLE_HPP
    + +
    #define OVERLOAD_C_OUT(className)
    Definition: macros.hpp:77
    + +
    Rectangle< T > recenter(const Rectangle< T > &rectangle, const T newWidth, const T newHeight)
    + + + +
    Rectangle(const T x=0, const T y=0, const T width=0, const T height=0)
    +
    Point< T > center() const
    + +
    void recenter(const T newWidth, const T newHeight)
    +
    Rectangle< T > & operator*=(const T value)
    +
    T area() const
    Definition: rectangle.hpp:62
    + +
    Point< T > topLeft() const
    Definition: rectangle.hpp:55
    + +
    Rectangle< T > & operator/=(const T value)
    +
    Rectangle< T > operator*(const T value) const
    +
    Point< T > bottomRight() const
    +
    Rectangle< T > & operator=(const Rectangle< T > &rectangle)
    +
    Rectangle< T > & operator=(Rectangle< T > &&rectangle)
    + +
    Rectangle< T > operator/(const T value) const
    +
    std::string toString() const
    +
    +
    + + + + diff --git a/web/html/doc/render_face_8hpp.html b/web/html/doc/render_face_8hpp.html new file mode 100644 index 000000000..b06c312c9 --- /dev/null +++ b/web/html/doc/render_face_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/face/renderFace.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    renderFace.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + +

    +Functions

    OP_API void op::renderFaceKeypointsCpu (Array< float > &frameArray, const Array< float > &faceKeypoints, const float renderThreshold)
     
    void op::renderFaceKeypointsGpu (float *framePtr, float *maxPtr, float *minPtr, float *scalePtr, const Point< unsigned int > &frameSize, const float *const facePtr, const int numberPeople, const float renderThreshold, const float alphaColorToAdd=FACE_DEFAULT_ALPHA_KEYPOINT)
     
    +
    +
    + + + + diff --git a/web/html/doc/render_face_8hpp.js b/web/html/doc/render_face_8hpp.js new file mode 100644 index 000000000..74ec17e8a --- /dev/null +++ b/web/html/doc/render_face_8hpp.js @@ -0,0 +1,5 @@ +var render_face_8hpp = +[ + [ "renderFaceKeypointsCpu", "render_face_8hpp.html#a5fc85e8500dbeda3b75c1b6ecfac91cd", null ], + [ "renderFaceKeypointsGpu", "render_face_8hpp.html#ab8b2748a5bcf823e59b66549e6a24cfe", null ] +]; \ No newline at end of file diff --git a/web/html/doc/render_face_8hpp_source.html b/web/html/doc/render_face_8hpp_source.html new file mode 100644 index 000000000..945e75939 --- /dev/null +++ b/web/html/doc/render_face_8hpp_source.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: include/openpose/face/renderFace.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    renderFace.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FACE_RENDER_FACE_HPP
    +
    2 #define OPENPOSE_FACE_RENDER_FACE_HPP
    +
    3 
    + + +
    6 
    +
    7 namespace op
    +
    8 {
    + +
    10  Array<float>& frameArray, const Array<float>& faceKeypoints, const float renderThreshold);
    +
    11 
    + +
    13  float* framePtr, float* maxPtr, float* minPtr, float* scalePtr, const Point<unsigned int>& frameSize,
    +
    14  const float* const facePtr, const int numberPeople, const float renderThreshold,
    +
    15  const float alphaColorToAdd = FACE_DEFAULT_ALPHA_KEYPOINT);
    +
    16 }
    +
    17 
    +
    18 #endif // OPENPOSE_FACE_RENDER_FACE_HPP
    + + + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    const auto FACE_DEFAULT_ALPHA_KEYPOINT
    +
    OP_API void renderFaceKeypointsCpu(Array< float > &frameArray, const Array< float > &faceKeypoints, const float renderThreshold)
    +
    void renderFaceKeypointsGpu(float *framePtr, float *maxPtr, float *minPtr, float *scalePtr, const Point< unsigned int > &frameSize, const float *const facePtr, const int numberPeople, const float renderThreshold, const float alphaColorToAdd=FACE_DEFAULT_ALPHA_KEYPOINT)
    + +
    +
    + + + + diff --git a/web/html/doc/render_hand_8hpp.html b/web/html/doc/render_hand_8hpp.html new file mode 100644 index 000000000..7c2a03dbb --- /dev/null +++ b/web/html/doc/render_hand_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/hand/renderHand.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    renderHand.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + +

    +Functions

    OP_API void op::renderHandKeypointsCpu (Array< float > &frameArray, const std::array< Array< float >, 2 > &handKeypoints, const float renderThreshold)
     
    void op::renderHandKeypointsGpu (float *framePtr, float *maxPtr, float *minPtr, float *scalePtr, const Point< unsigned int > &frameSize, const float *const handsPtr, const int numberHands, const float renderThreshold, const float alphaColorToAdd=HAND_DEFAULT_ALPHA_KEYPOINT)
     
    +
    +
    + + + + diff --git a/web/html/doc/render_hand_8hpp.js b/web/html/doc/render_hand_8hpp.js new file mode 100644 index 000000000..7438a2233 --- /dev/null +++ b/web/html/doc/render_hand_8hpp.js @@ -0,0 +1,5 @@ +var render_hand_8hpp = +[ + [ "renderHandKeypointsCpu", "render_hand_8hpp.html#afb5b711819f94b51f32460861d9cea38", null ], + [ "renderHandKeypointsGpu", "render_hand_8hpp.html#a865db81a5bc4f81cf9fc7c7f3ce81be3", null ] +]; \ No newline at end of file diff --git a/web/html/doc/render_hand_8hpp_source.html b/web/html/doc/render_hand_8hpp_source.html new file mode 100644 index 000000000..460e9dab0 --- /dev/null +++ b/web/html/doc/render_hand_8hpp_source.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: include/openpose/hand/renderHand.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    renderHand.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_HAND_GPU_HAND_RENDER_HPP
    +
    2 #define OPENPOSE_HAND_GPU_HAND_RENDER_HPP
    +
    3 
    + + +
    6 
    +
    7 namespace op
    +
    8 {
    + +
    10  Array<float>& frameArray, const std::array<Array<float>, 2>& handKeypoints, const float renderThreshold);
    +
    11 
    + +
    13  float* framePtr, float* maxPtr, float* minPtr, float* scalePtr, const Point<unsigned int>& frameSize,
    +
    14  const float* const handsPtr, const int numberHands, const float renderThreshold,
    +
    15  const float alphaColorToAdd = HAND_DEFAULT_ALPHA_KEYPOINT);
    +
    16 }
    +
    17 
    +
    18 #endif // OPENPOSE_HAND_GPU_HAND_RENDER_HPP
    + + + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    void renderHandKeypointsGpu(float *framePtr, float *maxPtr, float *minPtr, float *scalePtr, const Point< unsigned int > &frameSize, const float *const handsPtr, const int numberHands, const float renderThreshold, const float alphaColorToAdd=HAND_DEFAULT_ALPHA_KEYPOINT)
    +
    const auto HAND_DEFAULT_ALPHA_KEYPOINT
    +
    OP_API void renderHandKeypointsCpu(Array< float > &frameArray, const std::array< Array< float >, 2 > &handKeypoints, const float renderThreshold)
    + +
    +
    + + + + diff --git a/web/html/doc/render_pose_8hpp.html b/web/html/doc/render_pose_8hpp.html new file mode 100644 index 000000000..f885e955e --- /dev/null +++ b/web/html/doc/render_pose_8hpp.html @@ -0,0 +1,132 @@ + + + + + + + +OpenPose: include/openpose/pose/renderPose.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    renderPose.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + + + + + + + + + + + +

    +Functions

    OP_API void op::renderPoseKeypointsCpu (Array< float > &frameArray, const Array< float > &poseKeypoints, const PoseModel poseModel, const float renderThreshold, const bool blendOriginalFrame=true)
     
    void op::renderPoseKeypointsGpu (float *framePtr, float *maxPtr, float *minPtr, float *scalePtr, const PoseModel poseModel, const int numberPeople, const Point< unsigned int > &frameSize, const float *const posePtr, const float renderThreshold, const bool googlyEyes=false, const bool blendOriginalFrame=true, const float alphaBlending=POSE_DEFAULT_ALPHA_KEYPOINT)
     
    void op::renderPoseHeatMapGpu (float *frame, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const unsigned int part, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
     
    void op::renderPoseHeatMapsGpu (float *frame, const PoseModel poseModel, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
     
    void op::renderPosePAFGpu (float *framePtr, const PoseModel poseModel, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const int part, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
     
    void op::renderPosePAFsGpu (float *framePtr, const PoseModel poseModel, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
     
    void op::renderPoseDistanceGpu (float *framePtr, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const unsigned int part, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
     
    +
    +
    + + + + diff --git a/web/html/doc/render_pose_8hpp.js b/web/html/doc/render_pose_8hpp.js new file mode 100644 index 000000000..54a1b6ad9 --- /dev/null +++ b/web/html/doc/render_pose_8hpp.js @@ -0,0 +1,10 @@ +var render_pose_8hpp = +[ + [ "renderPoseDistanceGpu", "render_pose_8hpp.html#a056c64afca17423e038590e4ef2f712b", null ], + [ "renderPoseHeatMapGpu", "render_pose_8hpp.html#a3ceb3476e4154a6e9e06b3613a12c040", null ], + [ "renderPoseHeatMapsGpu", "render_pose_8hpp.html#aa1225091307f8d0bf07dd032389f8961", null ], + [ "renderPoseKeypointsCpu", "render_pose_8hpp.html#a99a08148f440bd96546076e15f0de04c", null ], + [ "renderPoseKeypointsGpu", "render_pose_8hpp.html#ad0069d4c6204b35893f4158d04d615f1", null ], + [ "renderPosePAFGpu", "render_pose_8hpp.html#a9275c58ba881ea94e054117392a67381", null ], + [ "renderPosePAFsGpu", "render_pose_8hpp.html#a3ba62b3d5cc275fc1700bf0c5e6bf578", null ] +]; \ No newline at end of file diff --git a/web/html/doc/render_pose_8hpp_source.html b/web/html/doc/render_pose_8hpp_source.html new file mode 100644 index 000000000..25cc7859b --- /dev/null +++ b/web/html/doc/render_pose_8hpp_source.html @@ -0,0 +1,163 @@ + + + + + + + +OpenPose: include/openpose/pose/renderPose.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    renderPose.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_RENDER_POSE_HPP
    +
    2 #define OPENPOSE_POSE_RENDER_POSE_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    + +
    11  Array<float>& frameArray, const Array<float>& poseKeypoints, const PoseModel poseModel,
    +
    12  const float renderThreshold, const bool blendOriginalFrame = true);
    +
    13 
    + +
    15  float* framePtr, float* maxPtr, float* minPtr, float* scalePtr, const PoseModel poseModel,
    +
    16  const int numberPeople, const Point<unsigned int>& frameSize, const float* const posePtr,
    +
    17  const float renderThreshold, const bool googlyEyes = false, const bool blendOriginalFrame = true,
    +
    18  const float alphaBlending = POSE_DEFAULT_ALPHA_KEYPOINT);
    +
    19 
    + +
    21  float* frame, const Point<unsigned int>& frameSize, const float* const heatMapPtr, const Point<int>& heatMapSize,
    +
    22  const float scaleToKeepRatio, const unsigned int part,
    +
    23  const float alphaBlending = POSE_DEFAULT_ALPHA_HEAT_MAP);
    +
    24 
    + +
    26  float* frame, const PoseModel poseModel, const Point<unsigned int>& frameSize, const float* const heatMapPtr,
    +
    27  const Point<int>& heatMapSize, const float scaleToKeepRatio,
    +
    28  const float alphaBlending = POSE_DEFAULT_ALPHA_HEAT_MAP);
    +
    29 
    + +
    31  float* framePtr, const PoseModel poseModel, const Point<unsigned int>& frameSize, const float* const heatMapPtr,
    +
    32  const Point<int>& heatMapSize, const float scaleToKeepRatio, const int part,
    +
    33  const float alphaBlending = POSE_DEFAULT_ALPHA_HEAT_MAP);
    +
    34 
    + +
    36  float* framePtr, const PoseModel poseModel, const Point<unsigned int>& frameSize, const float* const heatMapPtr,
    +
    37  const Point<int>& heatMapSize, const float scaleToKeepRatio,
    +
    38  const float alphaBlending = POSE_DEFAULT_ALPHA_HEAT_MAP);
    +
    39 
    + +
    41  float* framePtr, const Point<unsigned int>& frameSize, const float* const heatMapPtr, const Point<int>& heatMapSize,
    +
    42  const float scaleToKeepRatio, const unsigned int part, const float alphaBlending = POSE_DEFAULT_ALPHA_HEAT_MAP);
    +
    43 }
    +
    44 
    +
    45 #endif // OPENPOSE_POSE_RENDER_POSE_HPP
    + + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    void renderPoseDistanceGpu(float *framePtr, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const unsigned int part, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
    +
    const auto POSE_DEFAULT_ALPHA_KEYPOINT
    +
    void renderPosePAFsGpu(float *framePtr, const PoseModel poseModel, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
    +
    void renderPoseHeatMapGpu(float *frame, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const unsigned int part, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
    +
    void renderPosePAFGpu(float *framePtr, const PoseModel poseModel, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const int part, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
    +
    OP_API void renderPoseKeypointsCpu(Array< float > &frameArray, const Array< float > &poseKeypoints, const PoseModel poseModel, const float renderThreshold, const bool blendOriginalFrame=true)
    +
    void renderPoseHeatMapsGpu(float *frame, const PoseModel poseModel, const Point< unsigned int > &frameSize, const float *const heatMapPtr, const Point< int > &heatMapSize, const float scaleToKeepRatio, const float alphaBlending=POSE_DEFAULT_ALPHA_HEAT_MAP)
    +
    void renderPoseKeypointsGpu(float *framePtr, float *maxPtr, float *minPtr, float *scalePtr, const PoseModel poseModel, const int numberPeople, const Point< unsigned int > &frameSize, const float *const posePtr, const float renderThreshold, const bool googlyEyes=false, const bool blendOriginalFrame=true, const float alphaBlending=POSE_DEFAULT_ALPHA_KEYPOINT)
    +
    const auto POSE_DEFAULT_ALPHA_HEAT_MAP
    +
    PoseModel
    Definition: enumClasses.hpp:10
    + + + +
    +
    + + + + diff --git a/web/html/doc/renderer_8hpp.html b/web/html/doc/renderer_8hpp.html new file mode 100644 index 000000000..e2df155c3 --- /dev/null +++ b/web/html/doc/renderer_8hpp.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: include/openpose/core/renderer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    renderer.hpp File Reference
    +
    +
    +
    #include <atomic>
    +#include <openpose/core/common.hpp>
    +#include <openpose/core/enumClasses.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::Renderer
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/renderer_8hpp_source.html b/web/html/doc/renderer_8hpp_source.html new file mode 100644 index 000000000..aae50fb4b --- /dev/null +++ b/web/html/doc/renderer_8hpp_source.html @@ -0,0 +1,182 @@ + + + + + + + +OpenPose: include/openpose/core/renderer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    renderer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_RENDERER_HPP
    +
    2 #define OPENPOSE_CORE_RENDERER_HPP
    +
    3 
    +
    4 #include <atomic>
    + + +
    7 
    +
    8 namespace op
    +
    9 {
    + +
    11  {
    +
    12  public:
    +
    13  explicit Renderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap,
    +
    14  const bool blendOriginalFrame = true, const unsigned int elementToRender = 0u,
    +
    15  const unsigned int numberElementsToRender = 0u);
    +
    16 
    +
    17  virtual ~Renderer();
    +
    18 
    +
    19  void increaseElementToRender(const int increment);
    +
    20 
    +
    21  void setElementToRender(const int elementToRender);
    +
    22 
    +
    23  void setElementToRender(const ElementToRender elementToRender);
    +
    24 
    +
    25  bool getBlendOriginalFrame() const;
    +
    26 
    +
    27  void setBlendOriginalFrame(const bool blendOriginalFrame);
    +
    28 
    +
    29  float getAlphaKeypoint() const;
    +
    30 
    +
    31  void setAlphaKeypoint(const float alphaKeypoint);
    +
    32 
    +
    33  float getAlphaHeatMap() const;
    +
    34 
    +
    35  void setAlphaHeatMap(const float alphaHeatMap);
    +
    36 
    +
    37  bool getShowGooglyEyes() const;
    +
    38 
    +
    39  void setShowGooglyEyes(const bool showGooglyEyes);
    +
    40 
    +
    41  protected:
    +
    42  const float mRenderThreshold;
    +
    43  std::atomic<bool> mBlendOriginalFrame;
    +
    44  std::shared_ptr<std::atomic<unsigned int>> spElementToRender;
    +
    45  std::shared_ptr<const unsigned int> spNumberElementsToRender;
    +
    46  std::atomic<bool> mShowGooglyEyes;
    +
    47 
    +
    48  private:
    +
    49  float mAlphaKeypoint;
    +
    50  float mAlphaHeatMap;
    +
    51 
    + +
    53  };
    +
    54 }
    +
    55 
    +
    56 #endif // OPENPOSE_CORE_RENDERER_HPP
    + +
    Renderer(const float renderThreshold, const float alphaKeypoint, const float alphaHeatMap, const bool blendOriginalFrame=true, const unsigned int elementToRender=0u, const unsigned int numberElementsToRender=0u)
    +
    void setAlphaKeypoint(const float alphaKeypoint)
    +
    void increaseElementToRender(const int increment)
    +
    float getAlphaKeypoint() const
    +
    std::shared_ptr< const unsigned int > spNumberElementsToRender
    Definition: renderer.hpp:45
    +
    bool getShowGooglyEyes() const
    +
    std::atomic< bool > mBlendOriginalFrame
    Definition: renderer.hpp:43
    +
    void setElementToRender(const ElementToRender elementToRender)
    +
    void setBlendOriginalFrame(const bool blendOriginalFrame)
    +
    void setShowGooglyEyes(const bool showGooglyEyes)
    +
    float getAlphaHeatMap() const
    +
    void setAlphaHeatMap(const float alphaHeatMap)
    +
    virtual ~Renderer()
    +
    std::shared_ptr< std::atomic< unsigned int > > spElementToRender
    Definition: renderer.hpp:44
    +
    std::atomic< bool > mShowGooglyEyes
    Definition: renderer.hpp:46
    +
    bool getBlendOriginalFrame() const
    +
    const float mRenderThreshold
    Definition: renderer.hpp:42
    +
    void setElementToRender(const int elementToRender)
    + + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    ElementToRender
    Definition: enumClasses.hpp:35
    +
    +
    + + + + diff --git a/web/html/doc/resize.js b/web/html/doc/resize.js new file mode 100644 index 000000000..e1ad0fe3b --- /dev/null +++ b/web/html/doc/resize.js @@ -0,0 +1,140 @@ +/* + @licstart The following is the entire license notice for the JavaScript code in this file. + + The MIT License (MIT) + + Copyright (C) 1997-2020 by Dimitri van Heesch + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software + and associated documentation files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, publish, distribute, + sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or + substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + @licend The above is the entire license notice for the JavaScript code in this file + */ +function initResizable() +{ + var cookie_namespace = 'doxygen'; + var sidenav,navtree,content,header,collapsed,collapsedWidth=0,barWidth=6,desktop_vp=768,titleHeight; + + function readCookie(cookie) + { + var myCookie = cookie_namespace+"_"+cookie+"="; + if (document.cookie) { + var index = document.cookie.indexOf(myCookie); + if (index != -1) { + var valStart = index + myCookie.length; + var valEnd = document.cookie.indexOf(";", valStart); + if (valEnd == -1) { + valEnd = document.cookie.length; + } + var val = document.cookie.substring(valStart, valEnd); + return val; + } + } + return 0; + } + + function writeCookie(cookie, val, expiration) + { + if (val==undefined) return; + if (expiration == null) { + var date = new Date(); + date.setTime(date.getTime()+(10*365*24*60*60*1000)); // default expiration is one week + expiration = date.toGMTString(); + } + document.cookie = cookie_namespace + "_" + cookie + "=" + val + "; expires=" + expiration+"; path=/"; + } + + function resizeWidth() + { + var windowWidth = $(window).width() + "px"; + var sidenavWidth = $(sidenav).outerWidth(); + content.css({marginLeft:parseInt(sidenavWidth)+"px"}); + writeCookie('width',sidenavWidth-barWidth, null); + } + + function restoreWidth(navWidth) + { + var windowWidth = $(window).width() + "px"; + content.css({marginLeft:parseInt(navWidth)+barWidth+"px"}); + sidenav.css({width:navWidth + "px"}); + } + + function resizeHeight() + { + var headerHeight = header.outerHeight(); + var footerHeight = footer.outerHeight(); + var windowHeight = $(window).height() - headerHeight - footerHeight; + content.css({height:windowHeight + "px"}); + navtree.css({height:windowHeight + "px"}); + sidenav.css({height:windowHeight + "px"}); + var width=$(window).width(); + if (width!=collapsedWidth) { + if (width=desktop_vp) { + if (!collapsed) { + collapseExpand(); + } + } else if (width>desktop_vp && collapsedWidth0) { + restoreWidth(0); + collapsed=true; + } + else { + var width = readCookie('width'); + if (width>200 && width<$(window).width()) { restoreWidth(width); } else { restoreWidth(200); } + collapsed=false; + } + } + + header = $("#top"); + sidenav = $("#side-nav"); + content = $("#doc-content"); + navtree = $("#nav-tree"); + footer = $("#nav-path"); + $(".side-nav-resizable").resizable({resize: function(e, ui) { resizeWidth(); } }); + $(sidenav).resizable({ minWidth: 0 }); + $(window).resize(function() { resizeHeight(); }); + var device = navigator.userAgent.toLowerCase(); + var touch_device = device.match(/(iphone|ipod|ipad|android)/); + if (touch_device) { /* wider split bar for touch only devices */ + $(sidenav).css({ paddingRight:'20px' }); + $('.ui-resizable-e').css({ width:'20px' }); + $('#nav-sync').css({ right:'34px' }); + barWidth=20; + } + var width = readCookie('width'); + if (width) { restoreWidth(width); } else { resizeWidth(); } + resizeHeight(); + var url = location.href; + var i=url.indexOf("#"); + if (i>=0) window.location.hash=url.substr(i); + var _preventDefault = function(evt) { evt.preventDefault(); }; + $("#splitbar").bind("dragstart", _preventDefault).bind("selectstart", _preventDefault); + $(".ui-resizable-handle").dblclick(collapseExpand); + $(window).on('load',resizeHeight); +} +/* @license-end */ diff --git a/web/html/doc/resize_and_merge_base_8hpp.html b/web/html/doc/resize_and_merge_base_8hpp.html new file mode 100644 index 000000000..29c4149d2 --- /dev/null +++ b/web/html/doc/resize_and_merge_base_8hpp.html @@ -0,0 +1,131 @@ + + + + + + + +OpenPose: include/openpose/net/resizeAndMergeBase.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    resizeAndMergeBase.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + + + + + + + + + + + + +

    +Functions

    template<typename T >
    void op::resizeAndMergeCpu (T *targetPtr, const std::vector< const T * > &sourcePtrs, const std::array< int, 4 > &targetSize, const std::vector< std::array< int, 4 >> &sourceSizes, const std::vector< T > &scaleInputToNetInputs={1.f})
     
    template<typename T >
    void op::resizeAndMergeGpu (T *targetPtr, const std::vector< const T * > &sourcePtrs, const std::array< int, 4 > &targetSize, const std::vector< std::array< int, 4 >> &sourceSizes, const std::vector< T > &scaleInputToNetInputs={1.f})
     
    template<typename T >
    void op::resizeAndMergeOcl (T *targetPtr, const std::vector< const T * > &sourcePtrs, std::vector< T * > &sourceTempPtrs, const std::array< int, 4 > &targetSize, const std::vector< std::array< int, 4 >> &sourceSizes, const std::vector< T > &scaleInputToNetInputs={1.f}, const int gpuID=0)
     
    template<typename T >
    void op::resizeAndPadRbgGpu (T *targetPtr, const T *const srcPtr, const int sourceWidth, const int sourceHeight, const int targetWidth, const int targetHeight, const T scaleFactor)
     
    template<typename T >
    void op::resizeAndPadRbgGpu (T *targetPtr, const unsigned char *const srcPtr, const int sourceWidth, const int sourceHeight, const int targetWidth, const int targetHeight, const T scaleFactor)
     
    +
    +
    + + + + diff --git a/web/html/doc/resize_and_merge_base_8hpp.js b/web/html/doc/resize_and_merge_base_8hpp.js new file mode 100644 index 000000000..007b0b1ad --- /dev/null +++ b/web/html/doc/resize_and_merge_base_8hpp.js @@ -0,0 +1,8 @@ +var resize_and_merge_base_8hpp = +[ + [ "resizeAndMergeCpu", "resize_and_merge_base_8hpp.html#adb8ffc1a6a2cc2949d80d8e8ad4e2190", null ], + [ "resizeAndMergeGpu", "resize_and_merge_base_8hpp.html#a8982332c4263696d0e023997f0e4c753", null ], + [ "resizeAndMergeOcl", "resize_and_merge_base_8hpp.html#a97b053019720782f2f81bc1b41f036d6", null ], + [ "resizeAndPadRbgGpu", "resize_and_merge_base_8hpp.html#ad5495d8c6a65afbedef3af7a8844bfcc", null ], + [ "resizeAndPadRbgGpu", "resize_and_merge_base_8hpp.html#a2f1ef915c8efc724c0bf40f0348f20a2", null ] +]; \ No newline at end of file diff --git a/web/html/doc/resize_and_merge_base_8hpp_source.html b/web/html/doc/resize_and_merge_base_8hpp_source.html new file mode 100644 index 000000000..d159b80c5 --- /dev/null +++ b/web/html/doc/resize_and_merge_base_8hpp_source.html @@ -0,0 +1,144 @@ + + + + + + + +OpenPose: include/openpose/net/resizeAndMergeBase.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    resizeAndMergeBase.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_NET_RESIZE_AND_MERGE_BASE_HPP
    +
    2 #define OPENPOSE_NET_RESIZE_AND_MERGE_BASE_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    +
    8  template <typename T>
    + +
    10  T* targetPtr, const std::vector<const T*>& sourcePtrs, const std::array<int, 4>& targetSize,
    +
    11  const std::vector<std::array<int, 4>>& sourceSizes, const std::vector<T>& scaleInputToNetInputs = {1.f});
    +
    12 
    +
    13  // Windows: Cuda functions do not include OP_API
    +
    14  template <typename T>
    + +
    16  T* targetPtr, const std::vector<const T*>& sourcePtrs, const std::array<int, 4>& targetSize,
    +
    17  const std::vector<std::array<int, 4>>& sourceSizes, const std::vector<T>& scaleInputToNetInputs = {1.f});
    +
    18 
    +
    19  // Windows: OpenCL functions do not include OP_API
    +
    20  template <typename T>
    + +
    22  T* targetPtr, const std::vector<const T*>& sourcePtrs, std::vector<T*>& sourceTempPtrs,
    +
    23  const std::array<int, 4>& targetSize, const std::vector<std::array<int, 4>>& sourceSizes,
    +
    24  const std::vector<T>& scaleInputToNetInputs = {1.f}, const int gpuID = 0);
    +
    25 
    +
    26  // Functions for cvMatToOpInput/cvMatToOpOutput
    +
    27  template <typename T>
    + +
    29  T* targetPtr, const T* const srcPtr, const int sourceWidth, const int sourceHeight,
    +
    30  const int targetWidth, const int targetHeight, const T scaleFactor);
    +
    31 
    +
    32  template <typename T>
    + +
    34  T* targetPtr, const unsigned char* const srcPtr, const int sourceWidth, const int sourceHeight,
    +
    35  const int targetWidth, const int targetHeight, const T scaleFactor);
    +
    36 }
    +
    37 #endif // OPENPOSE_NET_RESIZE_AND_MERGE_BASE_HPP
    + + +
    void resizeAndMergeGpu(T *targetPtr, const std::vector< const T * > &sourcePtrs, const std::array< int, 4 > &targetSize, const std::vector< std::array< int, 4 >> &sourceSizes, const std::vector< T > &scaleInputToNetInputs={1.f})
    +
    void resizeAndMergeOcl(T *targetPtr, const std::vector< const T * > &sourcePtrs, std::vector< T * > &sourceTempPtrs, const std::array< int, 4 > &targetSize, const std::vector< std::array< int, 4 >> &sourceSizes, const std::vector< T > &scaleInputToNetInputs={1.f}, const int gpuID=0)
    +
    void resizeAndPadRbgGpu(T *targetPtr, const T *const srcPtr, const int sourceWidth, const int sourceHeight, const int targetWidth, const int targetHeight, const T scaleFactor)
    +
    void resizeAndMergeCpu(T *targetPtr, const std::vector< const T * > &sourcePtrs, const std::array< int, 4 > &targetSize, const std::vector< std::array< int, 4 >> &sourceSizes, const std::vector< T > &scaleInputToNetInputs={1.f})
    +
    +
    + + + + diff --git a/web/html/doc/resize_and_merge_caffe_8hpp.html b/web/html/doc/resize_and_merge_caffe_8hpp.html new file mode 100644 index 000000000..37cf52e5c --- /dev/null +++ b/web/html/doc/resize_and_merge_caffe_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/net/resizeAndMergeCaffe.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    resizeAndMergeCaffe.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::ResizeAndMergeCaffe< T >
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/resize_and_merge_caffe_8hpp_source.html b/web/html/doc/resize_and_merge_caffe_8hpp_source.html new file mode 100644 index 000000000..62af52537 --- /dev/null +++ b/web/html/doc/resize_and_merge_caffe_8hpp_source.html @@ -0,0 +1,171 @@ + + + + + + + +OpenPose: include/openpose/net/resizeAndMergeCaffe.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    resizeAndMergeCaffe.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_NET_RESIZE_AND_MERGE_CAFFE_HPP
    +
    2 #define OPENPOSE_NET_RESIZE_AND_MERGE_CAFFE_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    +
    8  // It mostly follows the Caffe::layer implementation, so Caffe users can easily use it. However, in order to keep
    +
    9  // the compatibility with any generic Caffe version, we keep this 'layer' inside our library rather than in the
    +
    10  // Caffe code.
    +
    11  template <typename T>
    + +
    13  {
    +
    14  public:
    +
    15  explicit ResizeAndMergeCaffe();
    +
    16 
    + +
    18 
    +
    19  virtual void LayerSetUp(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
    +
    20 
    +
    21  virtual void Reshape(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top,
    +
    22  const T netFactor, const T scaleFactor, const bool mergeFirstDimension = true,
    +
    23  const int gpuID = 0);
    +
    24 
    +
    25  virtual inline const char* type() const { return "ResizeAndMerge"; }
    +
    26 
    +
    27  void setScaleRatios(const std::vector<T>& scaleRatios);
    +
    28 
    +
    29  virtual void Forward(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
    +
    30 
    +
    31  virtual void Forward_cpu(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
    +
    32 
    +
    33  virtual void Forward_gpu(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
    +
    34 
    +
    35  virtual void Forward_ocl(const std::vector<ArrayCpuGpu<T>*>& bottom, const std::vector<ArrayCpuGpu<T>*>& top);
    +
    36 
    +
    37  virtual void Backward_cpu(const std::vector<ArrayCpuGpu<T>*>& top, const std::vector<bool>& propagate_down,
    +
    38  const std::vector<ArrayCpuGpu<T>*>& bottom);
    +
    39 
    +
    40  virtual void Backward_gpu(const std::vector<ArrayCpuGpu<T>*>& top, const std::vector<bool>& propagate_down,
    +
    41  const std::vector<ArrayCpuGpu<T>*>& bottom);
    +
    42 
    +
    43  private:
    +
    44  std::vector<T*> mTempGPUData;
    +
    45  std::vector<T> mScaleRatios;
    +
    46  std::vector<std::array<int, 4>> mBottomSizes;
    +
    47  std::array<int, 4> mTopSize;
    +
    48  int mGpuID;
    +
    49 
    +
    50  DELETE_COPY(ResizeAndMergeCaffe);
    +
    51  };
    +
    52 }
    +
    53 
    +
    54 #endif // OPENPOSE_NET_RESIZE_AND_MERGE_CAFFE_HPP
    + + +
    virtual void Forward_gpu(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
    +
    virtual void Forward(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
    + +
    virtual void Backward_cpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
    +
    virtual ~ResizeAndMergeCaffe()
    +
    virtual void Forward_cpu(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
    +
    virtual const char * type() const
    +
    void setScaleRatios(const std::vector< T > &scaleRatios)
    +
    virtual void Forward_ocl(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
    +
    virtual void Reshape(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top, const T netFactor, const T scaleFactor, const bool mergeFirstDimension=true, const int gpuID=0)
    +
    virtual void Backward_gpu(const std::vector< ArrayCpuGpu< T > * > &top, const std::vector< bool > &propagate_down, const std::vector< ArrayCpuGpu< T > * > &bottom)
    +
    virtual void LayerSetUp(const std::vector< ArrayCpuGpu< T > * > &bottom, const std::vector< ArrayCpuGpu< T > * > &top)
    + + +
    +
    + + + + diff --git a/web/html/doc/scale_and_size_extractor_8hpp.html b/web/html/doc/scale_and_size_extractor_8hpp.html new file mode 100644 index 000000000..afcdc59ab --- /dev/null +++ b/web/html/doc/scale_and_size_extractor_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/core/scaleAndSizeExtractor.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    scaleAndSizeExtractor.hpp File Reference
    +
    +
    +
    #include <tuple>
    +#include <openpose/core/common.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::ScaleAndSizeExtractor
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/scale_and_size_extractor_8hpp_source.html b/web/html/doc/scale_and_size_extractor_8hpp_source.html new file mode 100644 index 000000000..784006908 --- /dev/null +++ b/web/html/doc/scale_and_size_extractor_8hpp_source.html @@ -0,0 +1,138 @@ + + + + + + + +OpenPose: include/openpose/core/scaleAndSizeExtractor.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    scaleAndSizeExtractor.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_SCALE_AND_SIZE_EXTRACTOR_HPP
    +
    2 #define OPENPOSE_CORE_SCALE_AND_SIZE_EXTRACTOR_HPP
    +
    3 
    +
    4 #include <tuple>
    + +
    6 
    +
    7 namespace op
    +
    8 {
    + +
    10  {
    +
    11  public:
    +
    12  ScaleAndSizeExtractor(const Point<int>& netInputResolution, const float netInputResolutionDynamicBehavior,
    +
    13  const Point<int>& outputResolution, const int scaleNumber = 1, const double scaleGap = 0.25);
    +
    14 
    + +
    16 
    +
    17  std::tuple<std::vector<double>, std::vector<Point<int>>, double, Point<int>> extract(
    +
    18  const Point<int>& inputResolution) const;
    +
    19 
    +
    20  private:
    +
    21  const Point<int> mNetInputResolution;
    +
    22  const float mNetInputResolutionDynamicBehavior;
    +
    23  const Point<int> mOutputSize;
    +
    24  const int mScaleNumber;
    +
    25  const double mScaleGap;
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 #endif // OPENPOSE_CORE_SCALE_AND_SIZE_EXTRACTOR_HPP
    + +
    ScaleAndSizeExtractor(const Point< int > &netInputResolution, const float netInputResolutionDynamicBehavior, const Point< int > &outputResolution, const int scaleNumber=1, const double scaleGap=0.25)
    + +
    std::tuple< std::vector< double >, std::vector< Point< int > >, double, Point< int > > extract(const Point< int > &inputResolution) const
    + +
    #define OP_API
    Definition: macros.hpp:18
    + + +
    +
    + + + + diff --git a/web/html/doc/search/all_0.html b/web/html/doc/search/all_0.html new file mode 100644 index 000000000..1ec5b2d59 --- /dev/null +++ b/web/html/doc/search/all_0.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_0.js b/web/html/doc/search/all_0.js new file mode 100644 index 000000000..a64f8d157 --- /dev/null +++ b/web/html/doc/search/all_0.js @@ -0,0 +1,14 @@ +var searchData= +[ + ['00_5findex_2emd_0',['00_index.md',['../00__index_8md.html',1,'']]], + ['01_5fdemo_2emd_1',['01_demo.md',['../01__demo_8md.html',1,'']]], + ['02_5foutput_2emd_2',['02_output.md',['../02__output_8md.html',1,'']]], + ['03_5fpython_5fapi_2emd_3',['03_python_api.md',['../03__python__api_8md.html',1,'']]], + ['04_5fcpp_5fapi_2emd_4',['04_cpp_api.md',['../04__cpp__api_8md.html',1,'']]], + ['05_5ffaq_2emd_5',['05_faq.md',['../05__faq_8md.html',1,'']]], + ['06_5fmaximizing_5fopenpose_5fspeed_2emd_6',['06_maximizing_openpose_speed.md',['../06__maximizing__openpose__speed_8md.html',1,'']]], + ['07_5fmajor_5freleased_5ffeatures_2emd_7',['07_major_released_features.md',['../07__major__released__features_8md.html',1,'']]], + ['08_5frelease_5fnotes_2emd_8',['08_release_notes.md',['../08__release__notes_8md.html',1,'']]], + ['09_5fauthors_5fand_5fcontributors_2emd_9',['09_authors_and_contributors.md',['../09__authors__and__contributors_8md.html',1,'']]], + ['0_5findex_2emd_10',['0_index.md',['../installation_20__index_8md.html',1,'(Global Namespace)'],['../very__advanced_2library__structure_20__index_8md.html',1,'(Global Namespace)']]] +]; diff --git a/web/html/doc/search/all_1.html b/web/html/doc/search/all_1.html new file mode 100644 index 000000000..9f80e9043 --- /dev/null +++ b/web/html/doc/search/all_1.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_1.js b/web/html/doc/search/all_1.js new file mode 100644 index 000000000..a201b4a8c --- /dev/null +++ b/web/html/doc/search/all_1.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['10_5fcommunity_5fprojects_2emd_11',['10_community_projects.md',['../10__community__projects_8md.html',1,'']]], + ['1_5flibrary_5fdeep_5foverview_2emd_12',['1_library_deep_overview.md',['../1__library__deep__overview_8md.html',1,'']]], + ['1_5fprerequisites_2emd_13',['1_prerequisites.md',['../1__prerequisites_8md.html',1,'']]] +]; diff --git a/web/html/doc/search/all_10.html b/web/html/doc/search/all_10.html new file mode 100644 index 000000000..3bf11961f --- /dev/null +++ b/web/html/doc/search/all_10.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_10.js b/web/html/doc/search/all_10.js new file mode 100644 index 000000000..73bd0537b --- /dev/null +++ b/web/html/doc/search/all_10.js @@ -0,0 +1,51 @@ +var searchData= +[ + ['macros_2ehpp_529',['macros.hpp',['../macros_8hpp.html',1,'']]], + ['makedirectory_530',['makeDirectory',['../namespaceop.html#acc650faa23df88ca16a09a2d2a522960',1,'op']]], + ['matrix_531',['Matrix',['../classop_1_1_matrix.html#adbdc98003dd0f666c845ac2acf592bd8',1,'op::Matrix::Matrix()'],['../classop_1_1_matrix.html#af9dc44c30ec3ae5f8e7ba8f76516985a',1,'op::Matrix::Matrix(const void *cvMatPtr)'],['../classop_1_1_matrix.html#a770bbf0242b96b2e746d7f1e30dbf8fc',1,'op::Matrix::Matrix(const int rows, const int cols, const int type)'],['../classop_1_1_matrix.html#a53786b5c97e1cded5accbcb3cd6b992d',1,'op::Matrix::Matrix(const int rows, const int cols, const int type, void *cvMatPtr)'],['../classop_1_1_matrix.html',1,'op::Matrix']]], + ['matrix_2ehpp_532',['matrix.hpp',['../matrix_8hpp.html',1,'']]], + ['max_533',['Max',['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14a6a061313d22e51e0f25b7cd4dc065233',1,'op']]], + ['maximizepositives_534',['maximizePositives',['../structop_1_1_wrapper_struct_pose.html#a84edcbf2237d579adc88badaa17c9795',1,'op::WrapperStructPose']]], + ['maximumbase_2ehpp_535',['maximumBase.hpp',['../maximum_base_8hpp.html',1,'']]], + ['maximumcaffe_536',['MaximumCaffe',['../classop_1_1_maximum_caffe.html#a66bd0aa863a97647ae6350d1f886ea51',1,'op::MaximumCaffe::MaximumCaffe()'],['../classop_1_1_maximum_caffe.html',1,'op::MaximumCaffe< T >']]], + ['maximumcaffe_2ehpp_537',['maximumCaffe.hpp',['../maximum_caffe_8hpp.html',1,'']]], + ['maximumcpu_538',['maximumCpu',['../namespaceop.html#ae0fea41041a70ae8449a77f46ffe8100',1,'op']]], + ['maximumgpu_539',['maximumGpu',['../namespaceop.html#a8ec109805adf02f9872a6af37d602caa',1,'op']]], + ['mblendoriginalframe_540',['mBlendOriginalFrame',['../classop_1_1_renderer.html#a88449a7c29a48e157cd6b16089825be7',1,'op::Renderer']]], + ['mconditionvariable_541',['mConditionVariable',['../classop_1_1_queue_base.html#a8b5e59161a0b175d12955b552a90a47f',1,'op::QueueBase']]], + ['mdisplaymode_542',['mDisplayMode',['../classop_1_1_gui.html#a5b95cbfa7cd4018977f4eb1fc095823b',1,'op::Gui']]], + ['mdisplaymodeoriginal_543',['mDisplayModeOriginal',['../classop_1_1_gui.html#a94cfbf759e88467bfcab18fcd2c987f2',1,'op::Gui']]], + ['menabled_544',['mEnabled',['../classop_1_1_hand_extractor_net.html#a2ee9d38650ed3138fa74fae2cad4bd77',1,'op::HandExtractorNet::mEnabled()'],['../classop_1_1_face_extractor_net.html#a637f9c4c19e110be435cd05052248f86',1,'op::FaceExtractorNet::mEnabled()']]], + ['mergevectors_545',['mergeVectors',['../namespaceop.html#aa3a3e2acfb27ecbd187d01c8dcd41899',1,'op']]], + ['mfaceimagecrop_546',['mFaceImageCrop',['../classop_1_1_face_extractor_net.html#ae18226cef1478a929df9061c7d699c6f',1,'op::FaceExtractorNet']]], + ['mfacekeypoints_547',['mFaceKeypoints',['../classop_1_1_face_extractor_net.html#a5d3437e6a4a0fd834232b0afaab95a8a',1,'op::FaceExtractorNet']]], + ['mhandimagecrop_548',['mHandImageCrop',['../classop_1_1_hand_extractor_net.html#a0981f4dfd15ce4a13de9d166cad9e1d4',1,'op::HandExtractorNet']]], + ['mhandkeypoints_549',['mHandKeypoints',['../classop_1_1_hand_extractor_net.html#a7f97a5b842d20d3d37d3469418faac7b',1,'op::HandExtractorNet']]], + ['mheatmaps_550',['mHeatMaps',['../classop_1_1_face_extractor_net.html#a43bd29f8c1fc0dbef051bd574df2deca',1,'op::FaceExtractorNet::mHeatMaps()'],['../classop_1_1_hand_extractor_net.html#a5c4174ed2c09ff7c15edfc5d971f4aef',1,'op::HandExtractorNet::mHeatMaps()']]], + ['mheatmapscalemode_551',['mHeatMapScaleMode',['../classop_1_1_face_extractor_net.html#aa3f6566e8b857262f57e18a88c90b9be',1,'op::FaceExtractorNet::mHeatMapScaleMode()'],['../classop_1_1_hand_extractor_net.html#af03c8872258c644086bda26a3aaf95b5',1,'op::HandExtractorNet::mHeatMapScaleMode()']]], + ['mheatmaptypes_552',['mHeatMapTypes',['../classop_1_1_hand_extractor_net.html#aaf0386c8c15a37cf79e9f3f4b1ced2e8',1,'op::HandExtractorNet::mHeatMapTypes()'],['../classop_1_1_face_extractor_net.html#a3bf177dbf1a3effbe6b15545e6102d6e',1,'op::FaceExtractorNet::mHeatMapTypes()']]], + ['minviews3d_553',['minViews3d',['../structop_1_1_wrapper_struct_extra.html#ae8a3562b010c4fa31e6a9722947301c6',1,'op::WrapperStructExtra']]], + ['mmaxpopperspushers_554',['mMaxPoppersPushers',['../classop_1_1_queue_base.html#a1d55f40e032cd5d43d63ba02040b3117',1,'op::QueueBase']]], + ['mmultiscalenumberandrange_555',['mMultiScaleNumberAndRange',['../classop_1_1_hand_extractor_net.html#a270f22a05dbae6d156d79f0386cfbf4b',1,'op::HandExtractorNet']]], + ['mmutex_556',['mMutex',['../classop_1_1_queue_base.html#a22c5e2964e9d9c18a9f02b8d2e0f30b4',1,'op::QueueBase']]], + ['mnetoutputsize_557',['mNetOutputSize',['../classop_1_1_face_extractor_net.html#acf72945f62375b6ac8939c463a616f4a',1,'op::FaceExtractorNet::mNetOutputSize()'],['../classop_1_1_hand_extractor_net.html#ac5e36cd33696a684a4447acccec28fdd',1,'op::HandExtractorNet::mNetOutputSize()'],['../classop_1_1_pose_extractor_net.html#aab49f9af9f5d7e4e64957dc0feb60ca7',1,'op::PoseExtractorNet::mNetOutputSize()']]], + ['modelfolder_558',['modelFolder',['../structop_1_1_wrapper_struct_pose.html#a0f00648621ca97fde61287be23671523',1,'op::WrapperStructPose']]], + ['mpartindextoname_559',['mPartIndexToName',['../classop_1_1_pose_renderer.html#aecc0a9296ca880ad6ceaf38ecd8c3c53',1,'op::PoseRenderer']]], + ['mpi_5f15_560',['MPI_15',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fad788fbec25069f2884ee1ed97e0af2b9',1,'op']]], + ['mpi_5f15_5f4_561',['MPI_15_4',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa71e915c88449606c6498d33dd7c98e84',1,'op']]], + ['mpopisstopped_562',['mPopIsStopped',['../classop_1_1_queue_base.html#a77bf3592bbb6ac586cd4c2b0aea98e62',1,'op::QueueBase']]], + ['mpoppers_563',['mPoppers',['../classop_1_1_queue_base.html#a04f7160c199f90b8f8e91ddfd40e92fb',1,'op::QueueBase']]], + ['mposekeypoints_564',['mPoseKeypoints',['../classop_1_1_pose_extractor_net.html#aaaa4c619868bbf6306a549280002a2c6',1,'op::PoseExtractorNet']]], + ['mposemodel_565',['mPoseModel',['../classop_1_1_pose_renderer.html#a9fea1f9ce47b4b5f1015cae13f4ddcb1',1,'op::PoseRenderer::mPoseModel()'],['../classop_1_1_pose_extractor_net.html#a8595789b244399ecd9c4b2a774f2c74b',1,'op::PoseExtractorNet::mPoseModel()']]], + ['mposescores_566',['mPoseScores',['../classop_1_1_pose_extractor_net.html#a528c3056546b0759fafb249a02edd1b6',1,'op::PoseExtractorNet']]], + ['mpushers_567',['mPushers',['../classop_1_1_queue_base.html#a7c382bb98f5b769cde37b06d67cb0530',1,'op::QueueBase']]], + ['mpushisstopped_568',['mPushIsStopped',['../classop_1_1_queue_base.html#af2c0f21c6b4f4639661b59aa247ae407',1,'op::QueueBase']]], + ['mrenderthreshold_569',['mRenderThreshold',['../classop_1_1_renderer.html#adc4cd0a62008325c5c7df6df2f95a167',1,'op::Renderer']]], + ['mscalenettooutput_570',['mScaleNetToOutput',['../classop_1_1_pose_extractor_net.html#a67ea32116dfaff15cc16e5a0a2bef822',1,'op::PoseExtractorNet']]], + ['mshowgooglyeyes_571',['mShowGooglyEyes',['../classop_1_1_renderer.html#ace2490fa3c5a87443e4d1e64007cd1ff',1,'op::Renderer']]], + ['mtqueue_572',['mTQueue',['../classop_1_1_queue_base.html#a49c1d6740f2ce7f26eae606f109b5738',1,'op::QueueBase']]], + ['mutable_5fcpu_5fdata_573',['mutable_cpu_data',['../classop_1_1_array_cpu_gpu.html#a6eafc0638925b776bb78c68c1fef972a',1,'op::ArrayCpuGpu']]], + ['mutable_5fcpu_5fdiff_574',['mutable_cpu_diff',['../classop_1_1_array_cpu_gpu.html#aba03b602ed1c745b3ba344d7ccedfd30',1,'op::ArrayCpuGpu']]], + ['mutable_5fgpu_5fdata_575',['mutable_gpu_data',['../classop_1_1_array_cpu_gpu.html#ac0bb37052ae356e85d681f52a4716f3c',1,'op::ArrayCpuGpu']]], + ['mutable_5fgpu_5fdiff_576',['mutable_gpu_diff',['../classop_1_1_array_cpu_gpu.html#a678e65cb71d2cc2e1070499465894892',1,'op::ArrayCpuGpu']]] +]; diff --git a/web/html/doc/search/all_11.html b/web/html/doc/search/all_11.html new file mode 100644 index 000000000..c9f79d289 --- /dev/null +++ b/web/html/doc/search/all_11.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_11.js b/web/html/doc/search/all_11.js new file mode 100644 index 000000000..c3abf6cb3 --- /dev/null +++ b/web/html/doc/search/all_11.js @@ -0,0 +1,35 @@ +var searchData= +[ + ['name_577',['name',['../structop_1_1_datum.html#ae6adcdacea12a9cfa445cf0cac1985b0',1,'op::Datum']]], + ['net_578',['Net',['../classop_1_1_net.html',1,'op']]], + ['net_2ehpp_579',['net.hpp',['../net_8hpp.html',1,'']]], + ['netcaffe_580',['NetCaffe',['../classop_1_1_net_caffe.html#af6d9ee03568d2783e0e4ed0b78a21c3d',1,'op::NetCaffe::NetCaffe()'],['../classop_1_1_net_caffe.html',1,'op::NetCaffe']]], + ['netcaffe_2ehpp_581',['netCaffe.hpp',['../net_caffe_8hpp.html',1,'']]], + ['netinitializationonthread_582',['netInitializationOnThread',['../classop_1_1_face_extractor_caffe.html#add2a24d9bd5e03ff90034239e90523c2',1,'op::FaceExtractorCaffe::netInitializationOnThread()'],['../classop_1_1_face_extractor_net.html#a6a9a02b46596283cab6f8a4640161081',1,'op::FaceExtractorNet::netInitializationOnThread()'],['../classop_1_1_hand_extractor_caffe.html#ace3ee9d717887ee9dc0f00ce69bd0c82',1,'op::HandExtractorCaffe::netInitializationOnThread()'],['../classop_1_1_hand_extractor_net.html#aad7c29237d50e77e606bb32c20c60d24',1,'op::HandExtractorNet::netInitializationOnThread()'],['../classop_1_1_pose_extractor_caffe.html#ae5d41065ea3eaf37d2c9663aa35554d6',1,'op::PoseExtractorCaffe::netInitializationOnThread()'],['../classop_1_1_pose_extractor_net.html#aa8bf8cdfdede22410e2dfcea5d3f0cdc',1,'op::PoseExtractorNet::netInitializationOnThread()']]], + ['netinputsize_583',['netInputSize',['../structop_1_1_wrapper_struct_pose.html#acff912f14ba3c0ba706ea99e1cef790e',1,'op::WrapperStructPose::netInputSize()'],['../structop_1_1_wrapper_struct_hand.html#a6a54d5b5766d23412c87bd10c26cb291',1,'op::WrapperStructHand::netInputSize()'],['../structop_1_1_wrapper_struct_face.html#a9845712fd6ebb66fccb0c1647e3491a0',1,'op::WrapperStructFace::netInputSize()']]], + ['netinputsizedynamicbehavior_584',['netInputSizeDynamicBehavior',['../structop_1_1_wrapper_struct_pose.html#a8bafec1b3ee2f2a6473fd604925e265a',1,'op::WrapperStructPose']]], + ['netinputsizes_585',['netInputSizes',['../structop_1_1_datum.html#a32d164c01acf6b4f7eb1323d74edbdca',1,'op::Datum']]], + ['netopencv_586',['NetOpenCv',['../classop_1_1_net_open_cv.html#af46f57f8a4093c927dd39109ad0411e9',1,'op::NetOpenCv::NetOpenCv()'],['../classop_1_1_net_open_cv.html',1,'op::NetOpenCv']]], + ['netopencv_2ehpp_587',['netOpenCv.hpp',['../net_open_cv_8hpp.html',1,'']]], + ['netoutputresolution_588',['NetOutputResolution',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa668a2bc599fd07445eae0730d043c96d',1,'op']]], + ['netoutputsize_589',['netOutputSize',['../structop_1_1_datum.html#ac734d4262a5a7892c6d4094cdd2bcc7c',1,'op::Datum']]], + ['nmsbase_2ehpp_590',['nmsBase.hpp',['../nms_base_8hpp.html',1,'']]], + ['nmscaffe_591',['NmsCaffe',['../classop_1_1_nms_caffe.html#afb808d9a264ce50664f8641e477d9e2d',1,'op::NmsCaffe::NmsCaffe()'],['../classop_1_1_nms_caffe.html',1,'op::NmsCaffe< T >']]], + ['nmscaffe_2ehpp_592',['nmsCaffe.hpp',['../nms_caffe_8hpp.html',1,'']]], + ['nmscpu_593',['nmsCpu',['../namespaceop.html#a6a97f255cc323f1c1babe4c598727196',1,'op']]], + ['nmsgpu_594',['nmsGpu',['../namespaceop.html#a28c5ac530845231600fb93c0be44ad6d',1,'op']]], + ['nmsocl_595',['nmsOcl',['../namespaceop.html#a37dce2abad2568d7664654e4598002af',1,'op']]], + ['nmsthreshold_596',['NMSThreshold',['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea83be5d7f6f29b19cf24f7393551c0439',1,'op']]], + ['nodisplay_597',['NoDisplay',['../namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a28b652e57d2da6b7c939166be21efd9a',1,'op']]], + ['nogpu_598',['NoGpu',['../namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa3c1472839b807c90abff3c7c36dff458',1,'op']]], + ['none_599',['None',['../namespaceop.html#afce557f02e337e16150d00bdf72ec033a6adf97f83acf6453d4a6a4b1070f3754',1,'op::None()'],['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332a6adf97f83acf6453d4a6a4b1070f3754',1,'op::None()'],['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14a6adf97f83acf6453d4a6a4b1070f3754',1,'op::None()']]], + ['nonetwork_600',['NoNetwork',['../namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09aa6e20e86de146a7b524d32c9b1fea7f4',1,'op']]], + ['nooutput_601',['NoOutput',['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14a828d496739024f4af00df1e277d96ebd',1,'op']]], + ['normal_602',['Normal',['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14a960b44c579bc2f6818d2daaf9e4c16f0',1,'op']]], + ['noscale_603',['NoScale',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa6089ccf7c3fe93a62745e51200419c60',1,'op']]], + ['num_604',['num',['../classop_1_1_array_cpu_gpu.html#aa87f658e2ff9699908f5cb823e988188',1,'op::ArrayCpuGpu']]], + ['num_5faxes_605',['num_axes',['../classop_1_1_array_cpu_gpu.html#ad79b3b3cf4180535211e20e086262837',1,'op::ArrayCpuGpu']]], + ['numberpeoplemax_606',['numberPeopleMax',['../structop_1_1_wrapper_struct_pose.html#a02c4ab6b56e4da4b3ed0da4eae8ac0fc',1,'op::WrapperStructPose']]], + ['numberviews_607',['numberViews',['../structop_1_1_wrapper_struct_input.html#adac2c3e58e1e75a96e52904762c37c42',1,'op::WrapperStructInput']]], + ['numberviews_608',['NumberViews',['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a3b6cff57206f4ce645622b2e55f784a6',1,'op']]] +]; diff --git a/web/html/doc/search/all_12.html b/web/html/doc/search/all_12.html new file mode 100644 index 000000000..ab934722c --- /dev/null +++ b/web/html/doc/search/all_12.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_12.js b/web/html/doc/search/all_12.js new file mode 100644 index 000000000..9fd2f077d --- /dev/null +++ b/web/html/doc/search/all_12.js @@ -0,0 +1,85 @@ +var searchData= +[ + ['configureerror_609',['ConfigureError',['../namespaceop_1_1_configure_error.html',1,'op']]], + ['configurelog_610',['ConfigureLog',['../namespaceop_1_1_configure_log.html',1,'op']]], + ['objectclose_611',['objectClose',['../classop_1_1_json_ofstream.html#a45eeb25d9413fc31786f315b46c341cc',1,'op::JsonOfstream']]], + ['objectopen_612',['objectOpen',['../classop_1_1_json_ofstream.html#a5c38e36c1449d808dd4ab6558d65289c',1,'op::JsonOfstream']]], + ['offset_613',['offset',['../classop_1_1_array_cpu_gpu.html#af24813492bad97de4e4c628fe356abe7',1,'op::ArrayCpuGpu']]], + ['op_614',['op',['../namespaceop.html',1,'']]], + ['op_5fapi_615',['OP_API',['../macros_8hpp.html#a4ba443bb7a0e5dbe8054a9ac37a5e000',1,'macros.hpp']]], + ['op_5fconst_5fmat_5freturn_5ffunction_616',['OP_CONST_MAT_RETURN_FUNCTION',['../matrix_8hpp.html#adb6fa4cc9ba470382895a448b7cf1257',1,'matrix.hpp']]], + ['op_5fconst_5fmat_5fvoid_5ffunction_617',['OP_CONST_MAT_VOID_FUNCTION',['../matrix_8hpp.html#a1b810570f8207983b20ea93e8f9f71a2',1,'matrix.hpp']]], + ['op_5fcuda_5fprofile_5fend_618',['OP_CUDA_PROFILE_END',['../profiler_8hpp.html#a774eaef2d2d68028026f52d554a8ba45',1,'profiler.hpp']]], + ['op_5fcuda_5fprofile_5finit_619',['OP_CUDA_PROFILE_INIT',['../profiler_8hpp.html#a543c2d65f7d0e835513310d83fc08589',1,'profiler.hpp']]], + ['op_5fcv2opconstmat_620',['OP_CV2OPCONSTMAT',['../matrix_8hpp.html#ad0bd05468e4619f7061bb513fc2cb86d',1,'matrix.hpp']]], + ['op_5fcv2opmat_621',['OP_CV2OPMAT',['../matrix_8hpp.html#a00c8b0a04adbe37ba8b6d08e0ba23287',1,'matrix.hpp']]], + ['op_5fcv2opvectormat_622',['OP_CV2OPVECTORMAT',['../matrix_8hpp.html#ad2790de0442f8b1a303b781ffe171c6e',1,'matrix.hpp']]], + ['op_5fmat_5freturn_5ffunction_623',['OP_MAT_RETURN_FUNCTION',['../matrix_8hpp.html#a1a8232a2c14792f9315d85004973c33c',1,'matrix.hpp']]], + ['op_5fmat_5fvoid_5ffunction_624',['OP_MAT_VOID_FUNCTION',['../matrix_8hpp.html#a2bab8a00953b4ba71a8b965347f7dd92',1,'matrix.hpp']]], + ['op_5fop2cvconstmat_625',['OP_OP2CVCONSTMAT',['../matrix_8hpp.html#a1c9288885fc29db5560426556d3fba41',1,'matrix.hpp']]], + ['op_5fop2cvmat_626',['OP_OP2CVMAT',['../matrix_8hpp.html#af06d0e620916e1f08ca609fb02f25dc8',1,'matrix.hpp']]], + ['op_5fop2cvvectormat_627',['OP_OP2CVVECTORMAT',['../matrix_8hpp.html#a1a8d8a14fa0269d045f8d8c8228098af',1,'matrix.hpp']]], + ['op_5fprofile_5fend_628',['OP_PROFILE_END',['../profiler_8hpp.html#ae1f762d7d0c1f5ad10304ef82bd85516',1,'profiler.hpp']]], + ['op_5fprofile_5finit_629',['OP_PROFILE_INIT',['../profiler_8hpp.html#a6211ca30ec696c346d0b3f2c056e05e6',1,'profiler.hpp']]], + ['open_5fpose_5fname_5fand_5fversion_630',['OPEN_POSE_NAME_AND_VERSION',['../macros_8hpp.html#adcf24c45000a4f44f355f1cc3062ea49',1,'macros.hpp']]], + ['open_5fpose_5fname_5fstring_631',['OPEN_POSE_NAME_STRING',['../macros_8hpp.html#afda430d83b9513af7270f1d680bf5471',1,'macros.hpp']]], + ['open_5fpose_5fversion_5fstring_632',['OPEN_POSE_VERSION_STRING',['../macros_8hpp.html#a7de0b663a3aa8043a346ebf2c411bda3',1,'macros.hpp']]], + ['opencl_633',['OpenCL',['../namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa7982b09a852b37f2afb1227eaf552e47',1,'op']]], + ['opencv_634',['OpenCV',['../namespaceop.html#a1070db47220e17cf37df40411350f6fba5bd4c87976f48e6a53919d53e14025e9',1,'op']]], + ['opencv_2ehpp_635',['openCv.hpp',['../open_cv_8hpp.html',1,'']]], + ['openpose_20advanced_20doc_20_2d_203_2dd_20reconstruction_20module_20and_20demo_636',['OpenPose Advanced Doc - 3-D Reconstruction Module and Demo',['../md_doc_advanced_3d_reconstruction_module.html',1,'']]], + ['openpose_20advanced_20doc_20_2d_20calibration_20module_20and_20demo_637',['OpenPose Advanced Doc - Calibration Module and Demo',['../md_doc_advanced_calibration_module.html',1,'']]], + ['openpose_20advanced_20doc_20_2d_20demo_20_2d_20advanced_638',['OpenPose Advanced Doc - Demo - Advanced',['../md_doc_advanced_demo_advanced.html',1,'']]], + ['openpose_20advanced_20doc_20_2d_20deploying_2fexporting_20openpose_20to_20other_20projects_639',['OpenPose Advanced Doc - Deploying/Exporting OpenPose to Other Projects',['../md_doc_advanced_deployment.html',1,'']]], + ['openpose_20advanced_20doc_20_2d_20heatmap_20output_640',['OpenPose Advanced Doc - Heatmap Output',['../md_doc_advanced_heatmap_output.html',1,'']]], + ['openpose_20advanced_20doc_20_2d_20standalone_20face_20or_20hand_20keypoint_20detector_641',['OpenPose Advanced Doc - Standalone Face or Hand Keypoint Detector',['../md_doc_advanced_standalone_face_or_hand_keypoint_detector.html',1,'']]], + ['openpose_20doc_642',['OpenPose Doc',['../md_doc_00_index.html',1,'']]], + ['openpose_20doc_20_2d_20authors_20and_20contributors_643',['OpenPose Doc - Authors and Contributors',['../md_doc_09_authors_and_contributors.html',1,'']]], + ['openpose_20doc_20_2d_20c_2b_2b_20api_644',['OpenPose Doc - C++ API',['../md_doc_04_cpp_api.html',1,'']]], + ['openpose_20doc_20_2d_20community_2dbased_20projects_645',['OpenPose Doc - Community-based Projects',['../md_doc_10_community_projects.html',1,'']]], + ['openpose_20doc_20_2d_20demo_646',['OpenPose Doc - Demo',['../md_doc_01_demo.html',1,'']]], + ['openpose_20doc_20_2d_20frequently_20asked_20questions_20_28faq_29_647',['OpenPose Doc - Frequently Asked Questions (FAQ)',['../md_doc_05_faq.html',1,'']]], + ['openpose_20doc_20_2d_20installation_648',['OpenPose Doc - Installation',['../md_doc_installation_0_index.html',1,'']]], + ['openpose_20doc_20_2d_20installation_20_28deprecated_29_649',['OpenPose Doc - Installation (deprecated)',['../md_doc_installation_deprecated_installation_deprecated.html',1,'']]], + ['openpose_20doc_20_2d_20installation_20_2d_20additional_20settings_20_28optional_29_650',['OpenPose Doc - Installation - Additional Settings (Optional)',['../md_doc_installation_2_additional_settings.html',1,'']]], + ['openpose_20doc_20_2d_20installation_20_2d_20prerequisites_651',['OpenPose Doc - Installation - Prerequisites',['../md_doc_installation_1_prerequisites.html',1,'']]], + ['openpose_20doc_20_2d_20installation_20on_20nvidia_20jetson_20tx1_652',['OpenPose Doc - Installation on Nvidia Jetson TX1',['../md_doc_installation_jetson_tx_installation_jetson_tx1.html',1,'']]], + ['openpose_20doc_20_2d_20installation_20on_20nvidia_20jetson_20tx2_20jetpack_203_2e1_653',['OpenPose Doc - Installation on Nvidia Jetson TX2 JetPack 3.1',['../md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html',1,'']]], + ['openpose_20doc_20_2d_20installation_20on_20nvidia_20jetson_20tx2_20jetpack_203_2e3_654',['OpenPose Doc - Installation on Nvidia Jetson TX2 JetPack 3.3',['../md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html',1,'']]], + ['openpose_20doc_20_2d_20major_20released_20features_655',['OpenPose Doc - Major Released Features',['../md_doc_07_major_released_features.html',1,'']]], + ['openpose_20doc_20_2d_20maximizing_20the_20openpose_20speed_656',['OpenPose Doc - Maximizing the OpenPose Speed',['../md_doc_06_maximizing_openpose_speed.html',1,'']]], + ['openpose_20doc_20_2d_20output_657',['OpenPose Doc - Output',['../md_doc_02_output.html',1,'']]], + ['openpose_20doc_20_2d_20python_20api_658',['OpenPose Doc - Python API',['../md_doc_03_python_api.html',1,'']]], + ['openpose_20doc_20_2d_20release_20notes_659',['OpenPose Doc - Release Notes',['../md_doc_08_release_notes.html',1,'']]], + ['openpose_20very_20advanced_20doc_20_2d_20library_20structure_660',['OpenPose Very Advanced Doc - Library Structure',['../md_doc_very_advanced_library_structure_0_index.html',1,'']]], + ['openpose_20very_20advanced_20doc_20_2d_20library_20structure_20_2d_20deep_20overview_661',['OpenPose Very Advanced Doc - Library Structure - Deep Overview',['../md_doc_very_advanced_library_structure_1_library_deep_overview.html',1,'']]], + ['openpose_20very_20advanced_20doc_20_2d_20library_20structure_20_2d_20how_20to_20extend_20functionality_662',['OpenPose Very Advanced Doc - Library Structure - How to Extend Functionality',['../md_doc_very_advanced_library_structure_2_library_extend_functionality.html',1,'']]], + ['openpose_20very_20advanced_20doc_20_2d_20library_20structure_20_2d_20steps_20to_20add_20a_20new_20module_663',['OpenPose Very Advanced Doc - Library Structure - Steps to Add a New Module',['../md_doc_very_advanced_library_structure_3_library_add_new_module.html',1,'']]], + ['operator_21_3d_664',['operator!=',['../structop_1_1_datum.html#a8337f6ff81ba8231ceeabc840372bff9',1,'op::Datum::operator!=()'],['../structop_1_1_point.html#a8a82a0d663d9572fa28394f7562ebfb2',1,'op::Point::operator!=()']]], + ['operator_28_29_665',['operator()',['../classop_1_1_pointer_container_greater.html#a7c571ddbcfd6eaaaf33bb6abe4b22aaa',1,'op::PointerContainerGreater::operator()()'],['../classop_1_1_pointer_container_less.html#af34bafbf659ff4768dbb33fe7454cb21',1,'op::PointerContainerLess::operator()()']]], + ['operator_2a_666',['operator*',['../structop_1_1_rectangle.html#a66e38889d2b413df95a9995e93103ff7',1,'op::Rectangle::operator*()'],['../structop_1_1_point.html#ad66e33cf5d57e78c80220881406e41ce',1,'op::Point::operator*(const T value) const']]], + ['operator_2a_3d_667',['operator*=',['../structop_1_1_point.html#afb53230d0d884ca5432e948605b5c2e6',1,'op::Point::operator*=()'],['../structop_1_1_rectangle.html#a2d3d7951770da3954d5af9e365f5780c',1,'op::Rectangle::operator*=()']]], + ['operator_2b_668',['operator+',['../structop_1_1_point.html#a0b362efa00fc5a0d35f743f3c01fa1d0',1,'op::Point::operator+(const Point< T > &point) const'],['../structop_1_1_point.html#af656ee43b596b5bb68139404a54c5a63',1,'op::Point::operator+(const T value) const']]], + ['operator_2b_3d_669',['operator+=',['../structop_1_1_point.html#a60488ca743d82fe8dd574b01f992460c',1,'op::Point::operator+=(const T value)'],['../structop_1_1_point.html#ad5005ff994bfcd1846854d6af103a3a6',1,'op::Point::operator+=(const Point< T > &point)']]], + ['operator_2d_670',['operator-',['../structop_1_1_point.html#a8961164fe93cd91fcf55f56200730578',1,'op::Point::operator-(const T value) const'],['../structop_1_1_point.html#a075741b8963b342bb068976afcf579af',1,'op::Point::operator-(const Point< T > &point) const']]], + ['operator_2d_3d_671',['operator-=',['../structop_1_1_point.html#abf2bb2d2d5b3dba3424b489b99faa760',1,'op::Point::operator-=(const T value)'],['../structop_1_1_point.html#ad42deecd0077f7c962ca383cbc87e08f',1,'op::Point::operator-=(const Point< T > &point)']]], + ['operator_2f_672',['operator/',['../structop_1_1_rectangle.html#adba48a35368d4a4d55896899b217d523',1,'op::Rectangle::operator/()'],['../structop_1_1_point.html#ad599eeba7a0137c3c138e5542bb2e9ed',1,'op::Point::operator/(const T value) const']]], + ['operator_2f_3d_673',['operator/=',['../structop_1_1_point.html#af8e49e33dad417d05ce8fb5f9dd68762',1,'op::Point::operator/=()'],['../structop_1_1_rectangle.html#a65620c7efbb3db95d85c90c2be3a851d',1,'op::Rectangle::operator/=()']]], + ['operator_3c_674',['operator<',['../structop_1_1_datum.html#a9d67e55fbc26399e4efd2385c1899541',1,'op::Datum::operator<()'],['../structop_1_1_point.html#a6aaab75fe6d1c8b4c935c2da385fd7ee',1,'op::Point::operator<()']]], + ['operator_3c_3d_675',['operator<=',['../structop_1_1_datum.html#a32752199884dcb51b7157daa098063e1',1,'op::Datum::operator<=()'],['../structop_1_1_point.html#a872607032f6b4fa8982f88a74c88c6bd',1,'op::Point::operator<=()']]], + ['operator_3d_676',['operator=',['../structop_1_1_datum.html#a24f3bfcb0ffffeb5742eb1530bc9e367',1,'op::Datum::operator=()'],['../classop_1_1_array.html#a9c8e006e0eea472485f37971330ecbab',1,'op::Array::operator=(const Array< T > &array)'],['../classop_1_1_array.html#ae388368128afac05369172198911e05d',1,'op::Array::operator=(Array< T > &&array)'],['../classop_1_1_thread.html#a16d1835e2bd7c5ae988f4bc225b3ca09',1,'op::Thread::operator=()'],['../classop_1_1_json_ofstream.html#aff76578c824c0314e33231884b40147e',1,'op::JsonOfstream::operator=()'],['../structop_1_1_rectangle.html#abea1a6760629dc4ed99875dae9d5ac36',1,'op::Rectangle::operator=(Rectangle< T > &&rectangle)'],['../structop_1_1_rectangle.html#abd3476f9a32ad2058ea67c75c2a547a2',1,'op::Rectangle::operator=(const Rectangle< T > &rectangle)'],['../structop_1_1_point.html#ac8596f2b3b50464b6c6eaa34b0a2c48b',1,'op::Point::operator=()'],['../structop_1_1_datum.html#a72ee10bf507aea368cfd3dba3dd38cb5',1,'op::Datum::operator=()'],['../structop_1_1_point.html#aada0e9eecee2fb30fb903b32f9f33047',1,'op::Point::operator=(const Point< T > &point)']]], + ['operator_3d_3d_677',['operator==',['../structop_1_1_point.html#a2f7900c0d58fb297b3b039cfb3c98a3e',1,'op::Point::operator==()'],['../structop_1_1_datum.html#ae740051202ca0db8358d5308143bb1b3',1,'op::Datum::operator==()']]], + ['operator_3e_678',['operator>',['../structop_1_1_point.html#a0e94c712c194c0b317eef4d8995e52f3',1,'op::Point::operator>()'],['../structop_1_1_datum.html#a79a05bec9871522cfab5d33cc7b63614',1,'op::Datum::operator>()']]], + ['operator_3e_3d_679',['operator>=',['../structop_1_1_point.html#ae7afe35869eea79f72bd8b74fae4a2f1',1,'op::Point::operator>=()'],['../structop_1_1_datum.html#ab97601a7628b46619f4a071cf1613ce6',1,'op::Datum::operator>=()']]], + ['operator_5b_5d_680',['operator[]',['../classop_1_1_array.html#aa40dc59e800d3c4cce623d560c0e0fad',1,'op::Array::operator[](const int index)'],['../classop_1_1_array.html#ac4e9514cfee78a3a0236c1a6265376d8',1,'op::Array::operator[](const std::vector< int > &indexes) const'],['../classop_1_1_array.html#a0e1d5ce14d11caa3b92306ee677af4cc',1,'op::Array::operator[](const int index) const'],['../classop_1_1_array.html#aada0f1bd6e9eb73b4f977e62da536f58',1,'op::Array::operator[](const std::vector< int > &indexes)']]], + ['oplog_681',['opLog',['../namespaceop.html#a838b69fead43c8a848d059b5f9d63baf',1,'op::opLog(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")'],['../namespaceop.html#aa72861fea0671209aca1ea5fa385891a',1,'op::opLog(const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")']]], + ['oplogifdebug_682',['opLogIfDebug',['../namespaceop.html#a91dd00cbb8fb646e6612455eb0f1b3e9',1,'op']]], + ['opoutputtocvmat_683',['OpOutputToCvMat',['../classop_1_1_op_output_to_cv_mat.html',1,'op::OpOutputToCvMat'],['../classop_1_1_op_output_to_cv_mat.html#a60affeb41b26b1357cf8c797c7e16ecb',1,'op::OpOutputToCvMat::OpOutputToCvMat()']]], + ['opoutputtocvmat_2ehpp_684',['opOutputToCvMat.hpp',['../op_output_to_cv_mat_8hpp.html',1,'']]], + ['originalfps_685',['OriginalFps',['../namespaceop.html#ac0230b669b296920c0cfc41b7587268fa0123c3afc0fac5edaf8b1672cb12626c',1,'op']]], + ['output_686',['Output',['../namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a29c2c02a361c9d7028472e5d92cd4a54',1,'op']]], + ['outputdata_687',['outputData',['../structop_1_1_datum.html#a42b953c082f479eddc527da9a3a4cc75',1,'op::Datum']]], + ['outputresolution_688',['OutputResolution',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa73c42013aac51c335d50d103f30fcb99',1,'op']]], + ['outputsize_689',['outputSize',['../structop_1_1_wrapper_struct_pose.html#a80ead0f411ddab86f643345e4effe805',1,'op::WrapperStructPose']]], + ['overload_5fc_5fout_690',['OVERLOAD_C_OUT',['../macros_8hpp.html#aa883b8ec96d2804b37d3bfb0bd4c5f16',1,'macros.hpp']]] +]; diff --git a/web/html/doc/search/all_13.html b/web/html/doc/search/all_13.html new file mode 100644 index 000000000..51172c2f3 --- /dev/null +++ b/web/html/doc/search/all_13.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_13.js b/web/html/doc/search/all_13.js new file mode 100644 index 000000000..776d24c15 --- /dev/null +++ b/web/html/doc/search/all_13.js @@ -0,0 +1,110 @@ +var searchData= +[ + ['pafptrintovector_691',['pafPtrIntoVector',['../namespaceop.html#aaec4a34b015f898d28be2b9f2aba0d38',1,'op']]], + ['pafs_692',['PAFs',['../namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226ba21c5c3f60f4881b8d5477f5628db74f1',1,'op']]], + ['pafvectorintopeoplevector_693',['pafVectorIntoPeopleVector',['../namespaceop.html#a36f0207c6263e7174f4c79eba7c4df3f',1,'op']]], + ['parts_694',['Parts',['../namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226ba9ce2d07469b39a72159ed8b0e0e597ca',1,'op']]], + ['peoplejsonsaver_695',['PeopleJsonSaver',['../classop_1_1_people_json_saver.html',1,'op::PeopleJsonSaver'],['../classop_1_1_people_json_saver.html#aa6e2f479d959752c5c0f71cd8b4427c2',1,'op::PeopleJsonSaver::PeopleJsonSaver()']]], + ['peoplejsonsaver_2ehpp_696',['peopleJsonSaver.hpp',['../people_json_saver_8hpp.html',1,'']]], + ['peoplevectortopeoplearray_697',['peopleVectorToPeopleArray',['../namespaceop.html#a3dbd17f2f656a2bc751441a42b5b9516',1,'op']]], + ['personidextractor_698',['PersonIdExtractor',['../classop_1_1_person_id_extractor.html',1,'op::PersonIdExtractor'],['../classop_1_1_person_id_extractor.html#a5916ec673bdbe127386b7f496b188828',1,'op::PersonIdExtractor::PersonIdExtractor()']]], + ['personidextractor_2ehpp_699',['personIdExtractor.hpp',['../person_id_extractor_8hpp.html',1,'']]], + ['persontracker_700',['PersonTracker',['../classop_1_1_person_tracker.html',1,'op::PersonTracker'],['../classop_1_1_person_tracker.html#aa88f617ff9f1ff509c54b2cbf51e764a',1,'op::PersonTracker::PersonTracker()']]], + ['persontracker_2ehpp_701',['personTracker.hpp',['../person_tracker_8hpp.html',1,'']]], + ['plaintext_702',['plainText',['../classop_1_1_json_ofstream.html#aa432ff172e10bb9e3b6e2bfa6124c648',1,'op::JsonOfstream']]], + ['plusminusone_703',['PlusMinusOne',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaab7e7b2beae3435e73021d6d9a6a3fd8a',1,'op']]], + ['plusminusonefixedaspect_704',['PlusMinusOneFixedAspect',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaaee080e43c505aa85cdda0e480b0abc06',1,'op']]], + ['point_705',['Point',['../structop_1_1_point.html',1,'op::Point< T >'],['../structop_1_1_point.html#a9f80114d18ec8055360222d975bcd5a8',1,'op::Point::Point(const T x=0, const T y=0)'],['../structop_1_1_point.html#a44559988e3980e21568b5d9dd2897368',1,'op::Point::Point(const Point< T > &point)'],['../structop_1_1_point.html#ab3b92e4a40cd58d948647484f21dd9ef',1,'op::Point::Point(Point< T > &&point)']]], + ['point_2ehpp_706',['point.hpp',['../point_8hpp.html',1,'']]], + ['point_3c_20int_20_3e_707',['Point< int >',['../structop_1_1_point.html',1,'op']]], + ['pointercontainer_2ehpp_708',['pointerContainer.hpp',['../pointer_container_8hpp.html',1,'']]], + ['pointercontainergreater_709',['PointerContainerGreater',['../classop_1_1_pointer_container_greater.html',1,'op']]], + ['pointercontainerless_710',['PointerContainerLess',['../classop_1_1_pointer_container_less.html',1,'op']]], + ['pop_711',['pop',['../classop_1_1_queue_base.html#a5b28915cc58e040aca673bdfdf7c8be3',1,'op::QueueBase']]], + ['pose_5fbody_5f135_5fcolors_5frender_5fgpu_712',['POSE_BODY_135_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a45b08569481c3bf02eceab0d911b2bf6',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f135_5fpairs_5frender_5fgpu_713',['POSE_BODY_135_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a2c5ec8c89146a0535f4f29f861f4e248',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f135_5fscales_5frender_5fgpu_714',['POSE_BODY_135_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a426402ce79f98928f30037da33c2a349',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f19_5fcolors_5frender_5fgpu_715',['POSE_BODY_19_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a17cec2005928720d6da0e83ba26cca01',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f19_5fpairs_5frender_5fgpu_716',['POSE_BODY_19_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a497bfbf7fddb6e960565ec70bb6b2ad1',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f19_5fscales_5frender_5fgpu_717',['POSE_BODY_19_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a791ed14d0f2a65f850c94154b996826c',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f23_5fcolors_5frender_5fgpu_718',['POSE_BODY_23_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#aeb1e2dd8178c15024e372185e2e5cf54',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f23_5fpairs_5frender_5fgpu_719',['POSE_BODY_23_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a5f3db3bbb18fe8d978661f3c5417c110',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f23_5fscales_5frender_5fgpu_720',['POSE_BODY_23_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#aa257db7f46ddaa7fe838f659b8e5ed66',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f25_5fcolors_5frender_5fgpu_721',['POSE_BODY_25_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a456b8ce498f455af926215d91f6b6087',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f25_5fpairs_5frender_5fgpu_722',['POSE_BODY_25_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a216b861af0ff0c237be529dc204ed05e',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f25_5fscales_5frender_5fgpu_723',['POSE_BODY_25_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#ae30e7b56c09200d60f05acba38a8bf05',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f25b_5fcolors_5frender_5fgpu_724',['POSE_BODY_25B_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a1e4980010228bfd1e9e1387c23a3ab6a',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f25b_5fpairs_5frender_5fgpu_725',['POSE_BODY_25B_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a1b97e47c182baf7de08af03a8ba397e3',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f25b_5fscales_5frender_5fgpu_726',['POSE_BODY_25B_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a04ebdf33bf0ff159d144dab0ebf1c2ce',1,'poseParametersRender.hpp']]], + ['pose_5fcar_5f12_5fcolors_5frender_5fgpu_727',['POSE_CAR_12_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a6be8d3dedaf015f795625d1df19876aa',1,'poseParametersRender.hpp']]], + ['pose_5fcar_5f12_5fpairs_5frender_5fgpu_728',['POSE_CAR_12_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a253206407787fc26629e6e46f60d7be2',1,'poseParametersRender.hpp']]], + ['pose_5fcar_5f12_5fscales_5frender_5fgpu_729',['POSE_CAR_12_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a7382830f0c24beaea601444cb5962f06',1,'poseParametersRender.hpp']]], + ['pose_5fcar_5f22_5fcolors_5frender_5fgpu_730',['POSE_CAR_22_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#aaecdba75da05e8bfc90e4393c88ab6e6',1,'poseParametersRender.hpp']]], + ['pose_5fcar_5f22_5fpairs_5frender_5fgpu_731',['POSE_CAR_22_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a0065da73d9e649360d458fc670ee0f95',1,'poseParametersRender.hpp']]], + ['pose_5fcar_5f22_5fscales_5frender_5fgpu_732',['POSE_CAR_22_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a0afb6a9782a4ad8bd3ac41bd2436fefc',1,'poseParametersRender.hpp']]], + ['pose_5fcoco_5fcolors_5frender_5fgpu_733',['POSE_COCO_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a8b293ab02337be3f90218c5b824ece06',1,'poseParametersRender.hpp']]], + ['pose_5fcoco_5fpairs_5frender_5fgpu_734',['POSE_COCO_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a5afab27fbbebc71b8753a20dd6c9a322',1,'poseParametersRender.hpp']]], + ['pose_5fcoco_5fscales_5frender_5fgpu_735',['POSE_COCO_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a8cd3d34880f73dc73b2feb28370e86ec',1,'poseParametersRender.hpp']]], + ['pose_5fdefault_5falpha_5fheat_5fmap_736',['POSE_DEFAULT_ALPHA_HEAT_MAP',['../namespaceop.html#af45cddacd69fff73a4ea4acbbbac43e0',1,'op']]], + ['pose_5fdefault_5falpha_5fkeypoint_737',['POSE_DEFAULT_ALPHA_KEYPOINT',['../namespaceop.html#a21fcb98366f6ea8895fc7f527f232db5',1,'op']]], + ['pose_5fmax_5fpeople_738',['POSE_MAX_PEOPLE',['../namespaceop.html#a522d4552d2aeabe367f4d3bf371e6b3e',1,'op']]], + ['pose_5fmpi_5fcolors_5frender_5fgpu_739',['POSE_MPI_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a7fdd75b1478d65f11ebc77144662958c',1,'poseParametersRender.hpp']]], + ['pose_5fmpi_5fpairs_5frender_5fgpu_740',['POSE_MPI_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a7987426d997b6b040302d25fd07403ac',1,'poseParametersRender.hpp']]], + ['pose_5fmpi_5fscales_5frender_5fgpu_741',['POSE_MPI_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a32e98c9dd9e6f38c597c7924582570d0',1,'poseParametersRender.hpp']]], + ['posebodypartmapstringtokey_742',['poseBodyPartMapStringToKey',['../namespaceop.html#aacf6e688031bb116e4878b811e8dbc23',1,'op::poseBodyPartMapStringToKey(const PoseModel poseModel, const std::vector< std::string > &strings)'],['../namespaceop.html#a3df938ef93037c534c5d342720d5fb70',1,'op::poseBodyPartMapStringToKey(const PoseModel poseModel, const std::string &string)']]], + ['posecandidates_743',['poseCandidates',['../structop_1_1_datum.html#a55dd5354e09696ed6896923755f1c85b',1,'op::Datum']]], + ['posecpurenderer_744',['PoseCpuRenderer',['../classop_1_1_pose_cpu_renderer.html',1,'op::PoseCpuRenderer'],['../classop_1_1_pose_cpu_renderer.html#a5863733d560345d4890557b0f8c0d08e',1,'op::PoseCpuRenderer::PoseCpuRenderer()']]], + ['posecpurenderer_2ehpp_745',['poseCpuRenderer.hpp',['../pose_cpu_renderer_8hpp.html',1,'']]], + ['poseextractor_746',['PoseExtractor',['../classop_1_1_pose_extractor.html',1,'op::PoseExtractor'],['../classop_1_1_pose_extractor.html#acd50fa337aef1d658b6fed3edc717ada',1,'op::PoseExtractor::PoseExtractor()']]], + ['poseextractor_2ehpp_747',['poseExtractor.hpp',['../pose_extractor_8hpp.html',1,'']]], + ['poseextractorcaffe_748',['PoseExtractorCaffe',['../classop_1_1_pose_extractor_caffe.html',1,'op::PoseExtractorCaffe'],['../classop_1_1_pose_extractor_caffe.html#a682152a072d07b1b0764c2f7aab09ab7',1,'op::PoseExtractorCaffe::PoseExtractorCaffe()']]], + ['poseextractorcaffe_2ehpp_749',['poseExtractorCaffe.hpp',['../pose_extractor_caffe_8hpp.html',1,'']]], + ['poseextractornet_750',['PoseExtractorNet',['../classop_1_1_pose_extractor_net.html',1,'op::PoseExtractorNet'],['../classop_1_1_pose_extractor_net.html#a5503fceecf280b6b1ed6e3251de46e26',1,'op::PoseExtractorNet::PoseExtractorNet()']]], + ['poseextractornet_2ehpp_751',['poseExtractorNet.hpp',['../pose_extractor_net_8hpp.html',1,'']]], + ['posegpurenderer_752',['PoseGpuRenderer',['../classop_1_1_pose_gpu_renderer.html',1,'op::PoseGpuRenderer'],['../classop_1_1_pose_gpu_renderer.html#a1582e63e33192d79f80b5879ba04d448',1,'op::PoseGpuRenderer::PoseGpuRenderer()']]], + ['posegpurenderer_2ehpp_753',['poseGpuRenderer.hpp',['../pose_gpu_renderer_8hpp.html',1,'']]], + ['poseheatmaps_754',['poseHeatMaps',['../structop_1_1_datum.html#a5429e97e0ab9b0e2209a3947af668381',1,'op::Datum']]], + ['poseids_755',['poseIds',['../structop_1_1_datum.html#aba90dccffb5a830296231bd430c4766c',1,'op::Datum']]], + ['posekeypoints_756',['poseKeypoints',['../structop_1_1_datum.html#a6d629b1f6f7b958fe4cf2ef4cdf57c5b',1,'op::Datum']]], + ['posekeypoints3d_757',['poseKeypoints3D',['../structop_1_1_datum.html#a652ac1e7de13ec9a886dece75848cfea',1,'op::Datum']]], + ['posemode_758',['poseMode',['../structop_1_1_wrapper_struct_pose.html#ad0f4992658b9d624184dcecf79e54e43',1,'op::WrapperStructPose']]], + ['posemode_759',['PoseMode',['../namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09',1,'op']]], + ['posemodel_760',['poseModel',['../structop_1_1_wrapper_struct_pose.html#a35147b6fb9e300d79b71637793053a1b',1,'op::WrapperStructPose']]], + ['posemodel_761',['PoseModel',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261f',1,'op']]], + ['posenetoutput_762',['poseNetOutput',['../structop_1_1_datum.html#a8f6f5fd181abe3bdfd6f7bdf8a165782',1,'op::Datum']]], + ['poseparameters_2ehpp_763',['poseParameters.hpp',['../pose_parameters_8hpp.html',1,'']]], + ['poseparametersrender_2ehpp_764',['poseParametersRender.hpp',['../pose_parameters_render_8hpp.html',1,'']]], + ['poseproperty_765',['PoseProperty',['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0e',1,'op']]], + ['poserenderer_766',['PoseRenderer',['../classop_1_1_pose_renderer.html',1,'op::PoseRenderer'],['../classop_1_1_pose_renderer.html#a1dfd34d42fa69913a9702e0a0ebcd04e',1,'op::PoseRenderer::PoseRenderer()']]], + ['poserenderer_2ehpp_767',['poseRenderer.hpp',['../pose_renderer_8hpp.html',1,'']]], + ['posescores_768',['poseScores',['../structop_1_1_datum.html#afb117821de7aff9ac3c219ef3bbc0c14',1,'op::Datum']]], + ['posetriangulation_769',['PoseTriangulation',['../classop_1_1_pose_triangulation.html',1,'op::PoseTriangulation'],['../classop_1_1_pose_triangulation.html#a7858f0c4adf7845c2be072e0985af3ee',1,'op::PoseTriangulation::PoseTriangulation()']]], + ['posetriangulation_2ehpp_770',['poseTriangulation.hpp',['../pose_triangulation_8hpp.html',1,'']]], + ['positivecharround_771',['positiveCharRound',['../namespaceop.html#ab5eb10c958f3f37fb82d29361ad81467',1,'op']]], + ['positiveintround_772',['positiveIntRound',['../namespaceop.html#a699ef17b0f27b8bc2c4d4a03e46e6be1',1,'op']]], + ['positivelonglonground_773',['positiveLongLongRound',['../namespaceop.html#a1b479fea39a56c041a8a51aecf024bed',1,'op']]], + ['positivelonground_774',['positiveLongRound',['../namespaceop.html#a57eee48e4cefd583a81cfc907586c035',1,'op']]], + ['positivescharround_775',['positiveSCharRound',['../namespaceop.html#ab71596bc88b87ea5920f19f978d6d6ac',1,'op']]], + ['postprocessing_776',['PostProcessing',['../namespaceop.html#a970a2a768a2ace81605b1558c9fdec18aa52d6088cbae537944827c8f8c69c570',1,'op']]], + ['preprocessing_777',['PreProcessing',['../namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a05318bd0215d16e009798570b53755d2',1,'op']]], + ['printaveragedtimemseveryxiterations_778',['printAveragedTimeMsEveryXIterations',['../classop_1_1_profiler.html#a1192952d076f52b884b32fcd496df2ec',1,'op::Profiler']]], + ['printaveragedtimemsoniterationx_779',['printAveragedTimeMsOnIterationX',['../classop_1_1_profiler.html#a58b930a54a98bbc91af074395852da76',1,'op::Profiler']]], + ['printsize_780',['printSize',['../classop_1_1_array.html#a3f26a48c35cde008970078a66ff6e5c7',1,'op::Array']]], + ['printtime_781',['printTime',['../namespaceop.html#ab0908bcc0abb00c49ecbe7fc373b58c9',1,'op']]], + ['printverbose_782',['printVerbose',['../classop_1_1_verbose_printer.html#ab85c8d6555a52eb77042646dfe798fbf',1,'op::VerbosePrinter']]], + ['priority_783',['Priority',['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14',1,'op']]], + ['priorityqueue_784',['PriorityQueue',['../classop_1_1_priority_queue.html',1,'op::PriorityQueue< TDatums, TQueue >'],['../classop_1_1_priority_queue.html#acecdd3c5789942777652b66d08578d93',1,'op::PriorityQueue::PriorityQueue()']]], + ['priorityqueue_2ehpp_785',['priorityQueue.hpp',['../priority_queue_8hpp.html',1,'']]], + ['producer_786',['Producer',['../classop_1_1_producer.html',1,'op::Producer'],['../classop_1_1_producer.html#aaec98c35fe9f2695cd31be3e2d437a61',1,'op::Producer::Producer()']]], + ['producer_2ehpp_787',['producer.hpp',['../producer_8hpp.html',1,'']]], + ['producerfpsmode_788',['ProducerFpsMode',['../namespaceop.html#ac0230b669b296920c0cfc41b7587268f',1,'op']]], + ['producerproperty_789',['ProducerProperty',['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774',1,'op']]], + ['producerstring_790',['producerString',['../structop_1_1_wrapper_struct_input.html#a6aec09a94fdf393d6ab3b23857c376da',1,'op::WrapperStructInput']]], + ['producertype_791',['ProducerType',['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332',1,'op']]], + ['producertype_792',['producerType',['../structop_1_1_wrapper_struct_input.html#acd6a460d6c0a64bc818539b67fcafea7',1,'op::WrapperStructInput']]], + ['profilegpumemory_793',['profileGpuMemory',['../classop_1_1_profiler.html#a6e828c0b4fef5671a094727b7919a948',1,'op::Profiler']]], + ['profiler_794',['Profiler',['../classop_1_1_profiler.html',1,'op']]], + ['profiler_2ehpp_795',['profiler.hpp',['../profiler_8hpp.html',1,'']]], + ['prototxtpath_796',['protoTxtPath',['../structop_1_1_wrapper_struct_pose.html#a8a6273145f5e2f2ccade81865cbdfecb',1,'op::WrapperStructPose']]], + ['provided_797',['Provided',['../namespaceop.html#a1070db47220e17cf37df40411350f6fba900b06e1ae224594f075e0c882c73532',1,'op']]] +]; diff --git a/web/html/doc/search/all_14.html b/web/html/doc/search/all_14.html new file mode 100644 index 000000000..afecf5634 --- /dev/null +++ b/web/html/doc/search/all_14.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_14.js b/web/html/doc/search/all_14.js new file mode 100644 index 000000000..54c144b9f --- /dev/null +++ b/web/html/doc/search/all_14.js @@ -0,0 +1,8 @@ +var searchData= +[ + ['queue_798',['Queue',['../classop_1_1_queue.html',1,'op::Queue< TDatums, TQueue >'],['../classop_1_1_queue.html#ae2b845322940bfc89b6342137d8ac372',1,'op::Queue::Queue()']]], + ['queue_2ehpp_799',['queue.hpp',['../queue_8hpp.html',1,'']]], + ['queuebase_800',['QueueBase',['../classop_1_1_queue_base.html',1,'op::QueueBase< TDatums, TQueue >'],['../classop_1_1_queue_base.html#aea7941746e2403a09356b9c6a208784c',1,'op::QueueBase::QueueBase()']]], + ['queuebase_2ehpp_801',['queueBase.hpp',['../queue_base_8hpp.html',1,'']]], + ['queuebase_3c_20tdatums_2c_20std_3a_3aqueue_3c_20tdatums_20_3e_20_3e_802',['QueueBase< TDatums, std::queue< TDatums > >',['../classop_1_1_queue_base.html',1,'op']]] +]; diff --git a/web/html/doc/search/all_15.html b/web/html/doc/search/all_15.html new file mode 100644 index 000000000..69f382b31 --- /dev/null +++ b/web/html/doc/search/all_15.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_15.js b/web/html/doc/search/all_15.js new file mode 100644 index 000000000..30da14cfb --- /dev/null +++ b/web/html/doc/search/all_15.js @@ -0,0 +1,62 @@ +var searchData= +[ + ['readcvmat_803',['readCvMat',['../classop_1_1_gui3_d.html#a04abf8036928d58daf9417c5b5a41693',1,'op::Gui3D']]], + ['readme_2emd_804',['README.md',['../_r_e_a_d_m_e_8md.html',1,'(Global Namespace)'],['../doc_2installation_2_r_e_a_d_m_e_8md.html',1,'(Global Namespace)'],['../doc_2_r_e_a_d_m_e_8md.html',1,'(Global Namespace)']]], + ['readparameters_805',['readParameters',['../classop_1_1_camera_parameter_reader.html#a2be8ff6d89e5f623f476c75afe3c5c3b',1,'op::CameraParameterReader::readParameters(const std::string &cameraParameterPath, const std::string &serialNumber)'],['../classop_1_1_camera_parameter_reader.html#a906fd316f09d901280a5fe10a4a54541',1,'op::CameraParameterReader::readParameters(const std::string &cameraParameterPath, const std::vector< std::string > &serialNumbers={})']]], + ['realtimeprocessing_806',['realTimeProcessing',['../structop_1_1_wrapper_struct_input.html#a2eeea9ee711a1dcbec99c3dc871fbc47',1,'op::WrapperStructInput']]], + ['recenter_807',['recenter',['../namespaceop.html#a7cd131c9ddd8f3987508e89e0881b9e0',1,'op::recenter()'],['../structop_1_1_rectangle.html#a1c9a572db2c17fb02a7d19e965c1d3dc',1,'op::Rectangle::recenter()']]], + ['reconstruct3d_808',['reconstruct3d',['../structop_1_1_wrapper_struct_extra.html#aa157c20ca959fd952a85866a119183ca',1,'op::WrapperStructExtra']]], + ['reconstructarray_809',['reconstructArray',['../classop_1_1_pose_triangulation.html#adc3cf7eb81cb9e7d7f72fda0602ed89b',1,'op::PoseTriangulation::reconstructArray(const std::vector< std::vector< Array< float >>> &keypointsVector, const std::vector< Matrix > &cameraMatrices, const std::vector< Point< int >> &imageSizes) const'],['../classop_1_1_pose_triangulation.html#a519abdb2477c518a565803a5ef5bdc1e',1,'op::PoseTriangulation::reconstructArray(const std::vector< Array< float >> &keypointsVector, const std::vector< Matrix > &cameraMatrices, const std::vector< Point< int >> &imageSizes) const']]], + ['record_810',['record',['../classop_1_1_coco_json_saver.html#a2c3777cb55d09ee3911d4ed69334b17f',1,'op::CocoJsonSaver']]], + ['rectangle_811',['Rectangle',['../structop_1_1_rectangle.html',1,'op::Rectangle< T >'],['../structop_1_1_rectangle.html#a0112ddaa9782f3ccbb76a319b05f030b',1,'op::Rectangle::Rectangle(const T x=0, const T y=0, const T width=0, const T height=0)'],['../structop_1_1_rectangle.html#afbb0da8956e35178d3f28d2b1d998175',1,'op::Rectangle::Rectangle(const Rectangle< T > &rectangle)'],['../structop_1_1_rectangle.html#a5a9a60fdfd9c88ab8ded6275d64333ea',1,'op::Rectangle::Rectangle(Rectangle< T > &&rectangle)']]], + ['rectangle_2ehpp_812',['rectangle.hpp',['../rectangle_8hpp.html',1,'']]], + ['refineandsaveextrinsics_813',['refineAndSaveExtrinsics',['../namespaceop.html#a50526c188f2ba94b07e0945c0871fd2c',1,'op']]], + ['release_814',['release',['../classop_1_1_flir_reader.html#ab28f40422c9edff8594d855bbef91f58',1,'op::FlirReader::release()'],['../classop_1_1_image_directory_reader.html#af74e192f8cba5c10f8e252674a85185a',1,'op::ImageDirectoryReader::release()'],['../classop_1_1_producer.html#a7753ffb0daa486ab0f82873b3567f95e',1,'op::Producer::release()'],['../classop_1_1_spinnaker_wrapper.html#a6e66639ee75708486b3d9aa4598607c0',1,'op::SpinnakerWrapper::release()'],['../classop_1_1_video_capture_reader.html#a2f73e10efe7f9b24a6cb75af2167de58',1,'op::VideoCaptureReader::release()']]], + ['remove0sfromstring_815',['remove0sFromString',['../namespaceop.html#a2f610ba8a71cf16628df2f4d270b7d34',1,'op']]], + ['removeallocurrencesofsubstring_816',['removeAllOcurrencesOfSubString',['../namespaceop.html#a82471a2af285bada830bac3c95a8440b',1,'op']]], + ['removepeoplebelowthresholdsandfillfaces_817',['removePeopleBelowThresholdsAndFillFaces',['../namespaceop.html#ae01dd412590493f5f732594e8332d3f0',1,'op']]], + ['removespecialscharacters_818',['removeSpecialsCharacters',['../namespaceop.html#a8664658afa7be03e173cec9eff2873ad',1,'op']]], + ['renderer_819',['Renderer',['../classop_1_1_renderer.html',1,'op::Renderer'],['../classop_1_1_renderer.html#a00caf604fad781dfcf3bc311ef6a6623',1,'op::Renderer::Renderer()']]], + ['renderer_2ehpp_820',['renderer.hpp',['../renderer_8hpp.html',1,'']]], + ['renderface_821',['renderFace',['../classop_1_1_face_renderer.html#acbbdaca16f4115a5a68d006f4f325397',1,'op::FaceRenderer']]], + ['renderface_2ehpp_822',['renderFace.hpp',['../render_face_8hpp.html',1,'']]], + ['renderfaceinherited_823',['renderFaceInherited',['../classop_1_1_face_cpu_renderer.html#aa2f7c7f0a5a8df2dbb94c8a01fa41e2a',1,'op::FaceCpuRenderer::renderFaceInherited()'],['../classop_1_1_face_gpu_renderer.html#ae54b7538a6ed6a5eaedcbdc117a0d61c',1,'op::FaceGpuRenderer::renderFaceInherited()']]], + ['renderfacekeypointscpu_824',['renderFaceKeypointsCpu',['../namespaceop.html#a5fc85e8500dbeda3b75c1b6ecfac91cd',1,'op']]], + ['renderfacekeypointsgpu_825',['renderFaceKeypointsGpu',['../namespaceop.html#ab8b2748a5bcf823e59b66549e6a24cfe',1,'op']]], + ['renderhand_826',['renderHand',['../classop_1_1_hand_renderer.html#a2693c02336fb373a42405ccc7cff29bd',1,'op::HandRenderer']]], + ['renderhand_2ehpp_827',['renderHand.hpp',['../render_hand_8hpp.html',1,'']]], + ['renderhandinherited_828',['renderHandInherited',['../classop_1_1_hand_cpu_renderer.html#ae9e43ff22b0aae81dd88df3a313b0b0f',1,'op::HandCpuRenderer::renderHandInherited()'],['../classop_1_1_hand_gpu_renderer.html#a8206b59519e8214e06af9994a6038dae',1,'op::HandGpuRenderer::renderHandInherited()']]], + ['renderhandkeypointscpu_829',['renderHandKeypointsCpu',['../namespaceop.html#afb5b711819f94b51f32460861d9cea38',1,'op']]], + ['renderhandkeypointsgpu_830',['renderHandKeypointsGpu',['../namespaceop.html#a865db81a5bc4f81cf9fc7c7f3ce81be3',1,'op']]], + ['renderkeypointscpu_831',['renderKeypointsCpu',['../namespaceop.html#aa7803aa62abc21471e7d966bd674a81a',1,'op']]], + ['rendermode_832',['renderMode',['../structop_1_1_wrapper_struct_hand.html#a90ddd24ee55b6aabd9a1728ccd91525e',1,'op::WrapperStructHand']]], + ['rendermode_833',['RenderMode',['../namespaceop.html#afce557f02e337e16150d00bdf72ec033',1,'op']]], + ['rendermode_834',['renderMode',['../structop_1_1_wrapper_struct_pose.html#ad6b5ea0cef8eb81d20ab39099ba7716e',1,'op::WrapperStructPose::renderMode()'],['../structop_1_1_wrapper_struct_face.html#accc6e564598130b9bf0a6d0ec9c304c4',1,'op::WrapperStructFace::renderMode()']]], + ['renderpose_835',['renderPose',['../classop_1_1_pose_renderer.html#ad1e96ceb47bf205b56b50c6b2792f9e3',1,'op::PoseRenderer::renderPose()'],['../classop_1_1_pose_gpu_renderer.html#a4705b3c47cd9ac8174e357999960a28f',1,'op::PoseGpuRenderer::renderPose()'],['../classop_1_1_pose_cpu_renderer.html#a98541b982847713472411402314efd96',1,'op::PoseCpuRenderer::renderPose()']]], + ['renderpose_2ehpp_836',['renderPose.hpp',['../render_pose_8hpp.html',1,'']]], + ['renderposedistancegpu_837',['renderPoseDistanceGpu',['../namespaceop.html#a056c64afca17423e038590e4ef2f712b',1,'op']]], + ['renderposeheatmapgpu_838',['renderPoseHeatMapGpu',['../namespaceop.html#a3ceb3476e4154a6e9e06b3613a12c040',1,'op']]], + ['renderposeheatmapsgpu_839',['renderPoseHeatMapsGpu',['../namespaceop.html#aa1225091307f8d0bf07dd032389f8961',1,'op']]], + ['renderposekeypointscpu_840',['renderPoseKeypointsCpu',['../namespaceop.html#a99a08148f440bd96546076e15f0de04c',1,'op']]], + ['renderposekeypointsgpu_841',['renderPoseKeypointsGpu',['../namespaceop.html#ad0069d4c6204b35893f4158d04d615f1',1,'op']]], + ['renderposepafgpu_842',['renderPosePAFGpu',['../namespaceop.html#a9275c58ba881ea94e054117392a67381',1,'op']]], + ['renderposepafsgpu_843',['renderPosePAFsGpu',['../namespaceop.html#a3ba62b3d5cc275fc1700bf0c5e6bf578',1,'op']]], + ['renderthreshold_844',['renderThreshold',['../structop_1_1_wrapper_struct_pose.html#a322ff95b6a2838fe0d55afb28d2a4224',1,'op::WrapperStructPose::renderThreshold()'],['../structop_1_1_wrapper_struct_hand.html#a9655c0dfa83eefde174d09e622482089',1,'op::WrapperStructHand::renderThreshold()'],['../structop_1_1_wrapper_struct_face.html#a982e3f1a13358a522e1882d17cb80d57',1,'op::WrapperStructFace::renderThreshold()']]], + ['reorderandnormalize_845',['reorderAndNormalize',['../namespaceop.html#a8587bab6b02056384b7c424555cd50d8',1,'op']]], + ['replaceall_846',['replaceAll',['../namespaceop.html#a5fe477200af87dadb07c8d6a75b4414b',1,'op']]], + ['reset_847',['reset',['../classop_1_1_array.html#a12e538b09e98bf0900163031602ed2ed',1,'op::Array::reset()'],['../classop_1_1_thread_manager.html#a5b7c5ea46c360496e261094c5e1397a7',1,'op::ThreadManager::reset()'],['../classop_1_1_array.html#a3252c38318d81a8b8fb6f71f4d4c2642',1,'op::Array::reset(const std::vector< int > &sizes, T *const dataPtr)'],['../classop_1_1_array.html#ae0c3d1a662f6c213da16ac87e53120fc',1,'op::Array::reset(const int size, T *const dataPtr)'],['../classop_1_1_array.html#add2eeccd967cdf0900449649cb6f5afb',1,'op::Array::reset(const std::vector< int > &sizes, const T value)'],['../classop_1_1_array.html#ac7183eb2f4e78a6941da3a2079b9ed32',1,'op::Array::reset(const int size, const T value)'],['../classop_1_1_array.html#a0ad0232daa69783cf2c8f7a0ff5b3b0c',1,'op::Array::reset(const std::vector< int > &sizes={})']]], + ['resetwebcam_848',['resetWebcam',['../classop_1_1_video_capture_reader.html#ab85b68c93854dd7c2ad437477e819506',1,'op::VideoCaptureReader']]], + ['reshape_849',['Reshape',['../classop_1_1_resize_and_merge_caffe.html#abd4c8a363c569fbb4187cd928c481334',1,'op::ResizeAndMergeCaffe::Reshape()'],['../classop_1_1_nms_caffe.html#abe113059484596e82efd8b5f6d346ab5',1,'op::NmsCaffe::Reshape()'],['../classop_1_1_maximum_caffe.html#ab9fb5ce2358801ac4e85fa25f052baa4',1,'op::MaximumCaffe::Reshape()'],['../classop_1_1_body_part_connector_caffe.html#abf26360f5d25fab82705270dae5f5d86',1,'op::BodyPartConnectorCaffe::Reshape()'],['../classop_1_1_array_cpu_gpu.html#a1cc1cc3226543f5a2eb4c8ddcb5ec8a5',1,'op::ArrayCpuGpu::Reshape(const std::vector< int > &shape)'],['../classop_1_1_array_cpu_gpu.html#a9e3c6d812430d638187441e9d5cacfcc',1,'op::ArrayCpuGpu::Reshape(const int num, const int channels, const int height, const int width)']]], + ['resizeandmergebase_2ehpp_850',['resizeAndMergeBase.hpp',['../resize_and_merge_base_8hpp.html',1,'']]], + ['resizeandmergecaffe_851',['ResizeAndMergeCaffe',['../classop_1_1_resize_and_merge_caffe.html',1,'op::ResizeAndMergeCaffe< T >'],['../classop_1_1_resize_and_merge_caffe.html#a30805a265fa887eff04b1200dbda91a5',1,'op::ResizeAndMergeCaffe::ResizeAndMergeCaffe()']]], + ['resizeandmergecaffe_2ehpp_852',['resizeAndMergeCaffe.hpp',['../resize_and_merge_caffe_8hpp.html',1,'']]], + ['resizeandmergecpu_853',['resizeAndMergeCpu',['../namespaceop.html#adb8ffc1a6a2cc2949d80d8e8ad4e2190',1,'op']]], + ['resizeandmergegpu_854',['resizeAndMergeGpu',['../namespaceop.html#a8982332c4263696d0e023997f0e4c753',1,'op']]], + ['resizeandmergeocl_855',['resizeAndMergeOcl',['../namespaceop.html#a97b053019720782f2f81bc1b41f036d6',1,'op']]], + ['resizeandpadrbggpu_856',['resizeAndPadRbgGpu',['../namespaceop.html#ad5495d8c6a65afbedef3af7a8844bfcc',1,'op::resizeAndPadRbgGpu(T *targetPtr, const T *const srcPtr, const int sourceWidth, const int sourceHeight, const int targetWidth, const int targetHeight, const T scaleFactor)'],['../namespaceop.html#a2f1ef915c8efc724c0bf40f0348f20a2',1,'op::resizeAndPadRbgGpu(T *targetPtr, const unsigned char *const srcPtr, const int sourceWidth, const int sourceHeight, const int targetWidth, const int targetHeight, const T scaleFactor)']]], + ['resizegetscalefactor_857',['resizeGetScaleFactor',['../namespaceop.html#a24ebdcb8395dea0429f220de6a715d6e',1,'op']]], + ['retrievalfps_858',['RetrievalFps',['../namespaceop.html#ac0230b669b296920c0cfc41b7587268fa6bcd0f3b66e42d1aacd18d1c3b532473',1,'op']]], + ['rotateandflipframe_859',['rotateAndFlipFrame',['../namespaceop.html#af65d1b7c5b708f30780e4b2bcfccedcb',1,'op']]], + ['rotation_860',['Rotation',['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774af1a42bd417390fc63b030a519624607a',1,'op']]], + ['rows_861',['rows',['../classop_1_1_matrix.html#a3099b24c0ee295014d95c99a20615fdd',1,'op::Matrix']]] +]; diff --git a/web/html/doc/search/all_16.html b/web/html/doc/search/all_16.html new file mode 100644 index 000000000..b19867ad9 --- /dev/null +++ b/web/html/doc/search/all_16.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_16.js b/web/html/doc/search/all_16.js new file mode 100644 index 000000000..e211ca0d8 --- /dev/null +++ b/web/html/doc/search/all_16.js @@ -0,0 +1,106 @@ +var searchData= +[ + ['save_862',['save',['../classop_1_1_people_json_saver.html#ac0c0609652f89a3de44bcc940a82e235',1,'op::PeopleJsonSaver']]], + ['savedata_863',['saveData',['../namespaceop.html#aafac1158605748694e3c3ed4eb34b3b7',1,'op::saveData(const std::vector< Matrix > &opMats, const std::vector< std::string > &cvMatNames, const std::string &fileNameNoExtension, const DataFormat dataFormat)'],['../namespaceop.html#a7b9bcb57dd8488ade8ea288342eaed08',1,'op::saveData(const Matrix &opMat, const std::string cvMatName, const std::string &fileNameNoExtension, const DataFormat dataFormat)']]], + ['savefloatarray_864',['saveFloatArray',['../namespaceop.html#ac1080e627185a65b88ec788184a95552',1,'op']]], + ['saveheatmaps_865',['saveHeatMaps',['../classop_1_1_heat_map_saver.html#a48aeaad854bfff14fd2642dc13071137',1,'op::HeatMapSaver']]], + ['saveimage_866',['saveImage',['../namespaceop.html#a8c9d3469086a12607b097731848b6dea',1,'op']]], + ['saveimages_867',['saveImages',['../classop_1_1_image_saver.html#a8c1f4ae3799db276753707879e59bee1',1,'op::ImageSaver::saveImages(const std::vector< Matrix > &matOutputDatas, const std::string &fileName) const'],['../classop_1_1_image_saver.html#a0262916d0af8cc3be81b3375e0520e62',1,'op::ImageSaver::saveImages(const Matrix &cvOutputData, const std::string &fileName) const']]], + ['savekeypoints_868',['saveKeypoints',['../classop_1_1_keypoint_saver.html#aad663949dc5f2262ebdc69ed0d0caa1b',1,'op::KeypointSaver']]], + ['savepeoplejson_869',['savePeopleJson',['../namespaceop.html#af9c189f7c80092570699c8b9d5686fea',1,'op::savePeopleJson(const Array< float > &keypoints, const std::vector< std::vector< std::array< float, 3 >>> &candidates, const std::string &keypointName, const std::string &fileName, const bool humanReadable)'],['../namespaceop.html#a1e986a510a29bfd8c682f65a8b399551',1,'op::savePeopleJson(const std::vector< std::pair< Array< float >, std::string >> &keypointVector, const std::vector< std::vector< std::array< float, 3 >>> &candidates, const std::string &fileName, const bool humanReadable)']]], + ['scale_870',['scale',['../classop_1_1_keypoint_scaler.html#a42e46aea4d43fcf0886f06c9700148ea',1,'op::KeypointScaler::scale(std::vector< std::vector< std::array< float, 3 >>> &poseCandidates, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) const'],['../classop_1_1_keypoint_scaler.html#a687a64bbca93d54292f191762efe20d7',1,'op::KeypointScaler::scale(std::vector< Array< float >> &arraysToScale, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) const'],['../classop_1_1_keypoint_scaler.html#a9c2d575ce49bb6112b2a099cb92f07cc',1,'op::KeypointScaler::scale(Array< float > &arrayToScale, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) const']]], + ['scale_5fdata_871',['scale_data',['../classop_1_1_array_cpu_gpu.html#a16dc8c19cc0b0442c1be6c859fe7d33c',1,'op::ArrayCpuGpu']]], + ['scale_5fdiff_872',['scale_diff',['../classop_1_1_array_cpu_gpu.html#a0a307838959472e8e8815d76305c1bf6',1,'op::ArrayCpuGpu']]], + ['scaleandsizeextractor_873',['ScaleAndSizeExtractor',['../classop_1_1_scale_and_size_extractor.html',1,'op::ScaleAndSizeExtractor'],['../classop_1_1_scale_and_size_extractor.html#a4618beea6f87df0c4eac6c6a204bd269',1,'op::ScaleAndSizeExtractor::ScaleAndSizeExtractor()']]], + ['scaleandsizeextractor_2ehpp_874',['scaleAndSizeExtractor.hpp',['../scale_and_size_extractor_8hpp.html',1,'']]], + ['scalegap_875',['scaleGap',['../structop_1_1_wrapper_struct_pose.html#a646ae142f821411d22d772b76960d585',1,'op::WrapperStructPose']]], + ['scaleinputtonetinputs_876',['scaleInputToNetInputs',['../structop_1_1_datum.html#a0e416771f275be98c83aaff01e482a71',1,'op::Datum']]], + ['scaleinputtooutput_877',['scaleInputToOutput',['../structop_1_1_datum.html#a6cf96c250c236a03f13da69e1d4336d9',1,'op::Datum']]], + ['scalekeypoints_878',['scaleKeypoints',['../namespaceop.html#ac5fc565b24e499e306ca170b9139eeb6',1,'op']]], + ['scalekeypoints2d_879',['scaleKeypoints2d',['../namespaceop.html#a6b9adf8f7e698e566414c9f44f0c85f1',1,'op::scaleKeypoints2d(Array< T > &keypoints, const T scaleX, const T scaleY, const T offsetX, const T offsetY)'],['../namespaceop.html#aae9e38fa6c56e188b4f649732f0d4cd3',1,'op::scaleKeypoints2d(Array< T > &keypoints, const T scaleX, const T scaleY)']]], + ['scalemode_880',['ScaleMode',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bda',1,'op']]], + ['scalenettooutput_881',['scaleNetToOutput',['../structop_1_1_datum.html#a44af7162e180c6856ce909057f43d8e1',1,'op::Datum']]], + ['scalerange_882',['scaleRange',['../structop_1_1_wrapper_struct_hand.html#adaea15e182e5c75129293873cd94e35f',1,'op::WrapperStructHand']]], + ['scalesnumber_883',['scalesNumber',['../structop_1_1_wrapper_struct_hand.html#aea6263dc96708b11fab72416d810b3d6',1,'op::WrapperStructHand::scalesNumber()'],['../structop_1_1_wrapper_struct_pose.html#ad23a9c103a60709eed9d7b7381828e5e',1,'op::WrapperStructPose::scalesNumber()']]], + ['sendjointangles_884',['sendJointAngles',['../classop_1_1_udp_sender.html#a2e8b52e1fd5a3383ebc9063ce21f6f06',1,'op::UdpSender']]], + ['set_885',['set',['../classop_1_1_pose_extractor_net.html#a7e49f2339e21ff784689ec78c9d69b75',1,'op::PoseExtractorNet::set()'],['../classop_1_1_webcam_reader.html#ae66b26829cc2d6e3f02109d4431a7bc2',1,'op::WebcamReader::set()'],['../classop_1_1_video_reader.html#a0dd53334327642368d41ec860e64e756',1,'op::VideoReader::set()'],['../classop_1_1_video_capture_reader.html#ab2929b7d2d002b58ebaf7b9b56999cca',1,'op::VideoCaptureReader::set()'],['../classop_1_1_producer.html#af11f1bbfbd61b9534c02c3e4839e19b0',1,'op::Producer::set(const ProducerProperty property, const double value)'],['../classop_1_1_producer.html#ab30c7b3e34d962e0b17458d9a0947f6b',1,'op::Producer::set(const int capProperty, const double value)=0'],['../classop_1_1_ip_camera_reader.html#ac13cc7da97a31d8e69eaa64b2a7b31ba',1,'op::IpCameraReader::set()'],['../classop_1_1_image_directory_reader.html#a1965a4dca2ddb86b24e69e2da90b9dbf',1,'op::ImageDirectoryReader::set()'],['../classop_1_1_flir_reader.html#af14f63c79272781429341dc3a0720485',1,'op::FlirReader::set()']]], + ['set_5fcpu_5fdata_886',['set_cpu_data',['../classop_1_1_array_cpu_gpu.html#ad6e6a4da3987e9558d89b877f9ec7e82',1,'op::ArrayCpuGpu']]], + ['set_5fgpu_5fdata_887',['set_gpu_data',['../classop_1_1_array_cpu_gpu.html#a4dd6f5e4d7f54e921c7c296078a594f0',1,'op::ArrayCpuGpu']]], + ['setalphaheatmap_888',['setAlphaHeatMap',['../classop_1_1_renderer.html#abaea1725725ff775aed0c120b2ba3d1f',1,'op::Renderer']]], + ['setalphakeypoint_889',['setAlphaKeypoint',['../classop_1_1_renderer.html#a039e88897ed844551cadb115ea98e9ef',1,'op::Renderer']]], + ['setblendoriginalframe_890',['setBlendOriginalFrame',['../classop_1_1_renderer.html#aa8339054ed113d99ca70208d0cee5aa9',1,'op::Renderer']]], + ['setdefaultmaxsizequeues_891',['setDefaultMaxSizeQueues',['../classop_1_1_thread_manager.html#ace408d1d281193a9f3d3d6561181ef56',1,'op::ThreadManager::setDefaultMaxSizeQueues()'],['../classop_1_1_wrapper_t.html#aa89055f5cf4e762071479f5fec8d2faf',1,'op::WrapperT::setDefaultMaxSizeQueues()']]], + ['setdefaultnmsthreshold_892',['setDefaultNmsThreshold',['../classop_1_1_body_part_connector_caffe.html#af4520659b0cfb644a3c2d6ceb0e81a21',1,'op::BodyPartConnectorCaffe']]], + ['setdefaultx_893',['setDefaultX',['../classop_1_1_profiler.html#a2dc5cfa5fd91d4cadaa7e66695f1ee51',1,'op::Profiler']]], + ['setelementtorender_894',['setElementToRender',['../classop_1_1_renderer.html#a9d46c28d88225af94468c757ab1b26c1',1,'op::Renderer::setElementToRender(const ElementToRender elementToRender)'],['../classop_1_1_renderer.html#afd48a9cb0be184303dce2969fa2f8e02',1,'op::Renderer::setElementToRender(const int elementToRender)']]], + ['setenabled_895',['setEnabled',['../classop_1_1_hand_extractor_net.html#ab59a77d051991734b0c74b122671f097',1,'op::HandExtractorNet::setEnabled()'],['../classop_1_1_face_extractor_net.html#a6c00e96ddf7465062d6f0b51a7a1348d',1,'op::FaceExtractorNet::setEnabled()']]], + ['seterrormodes_896',['setErrorModes',['../namespaceop_1_1_configure_error.html#a96e56b0ddbe2cb17443b93aaba05d672',1,'op::ConfigureError']]], + ['setfrom_897',['setFrom',['../classop_1_1_array.html#a9f4b51216faaa967d81598a0cedcf78f',1,'op::Array']]], + ['setfullscreenmode_898',['setFullScreenMode',['../classop_1_1_frame_displayer.html#a2df856e4cf7542c7cda2757553674fb8',1,'op::FrameDisplayer']]], + ['setimage_899',['setImage',['../classop_1_1_gui.html#a8fc6182d0124dd24e26e0fc139074061',1,'op::Gui::setImage(const Matrix &cvMatOutput)'],['../classop_1_1_gui.html#abeff19fe8eceeacfb9115a059cdde4ad',1,'op::Gui::setImage(const std::vector< Matrix > &cvMatOutputs)']]], + ['setinterminabovethreshold_900',['setInterMinAboveThreshold',['../classop_1_1_body_part_connector_caffe.html#a789c77e69e5590a78b22e1e5f5cc4efc',1,'op::BodyPartConnectorCaffe']]], + ['setinterthreshold_901',['setInterThreshold',['../classop_1_1_body_part_connector_caffe.html#a75d0a3f3c8dca99c8a9e1b680098da16',1,'op::BodyPartConnectorCaffe']]], + ['setkeypoints_902',['setKeypoints',['../classop_1_1_gui3_d.html#abd245c07a53d1d25c237aff22a2b6e6f',1,'op::Gui3D']]], + ['setlogmodes_903',['setLogModes',['../namespaceop_1_1_configure_log.html#a2f41e9a74bbda434ef16189c32a13aba',1,'op::ConfigureLog']]], + ['setmainthread_904',['setMainThread',['../namespaceop.html#a7eb0121791185c13a6c3dd88994e0eab',1,'op']]], + ['setmaximizepositives_905',['setMaximizePositives',['../classop_1_1_body_part_connector_caffe.html#a6d859f2e218b1ea707fddcaf0911886d',1,'op::BodyPartConnectorCaffe']]], + ['setminsubsetcnt_906',['setMinSubsetCnt',['../classop_1_1_body_part_connector_caffe.html#a6442721373481873ddeb9ffd7c6fdb7b',1,'op::BodyPartConnectorCaffe']]], + ['setminsubsetscore_907',['setMinSubsetScore',['../classop_1_1_body_part_connector_caffe.html#a9b9fa9490fef0121a70c3d6d749272f7',1,'op::BodyPartConnectorCaffe']]], + ['setoffset_908',['setOffset',['../classop_1_1_nms_caffe.html#a1dd658e4bc9e080867a99e9b57f1baa8',1,'op::NmsCaffe']]], + ['setposemodel_909',['setPoseModel',['../classop_1_1_body_part_connector_caffe.html#a104744fdab14d4c1335eb8778edea21e',1,'op::BodyPartConnectorCaffe']]], + ['setprioritythreshold_910',['setPriorityThreshold',['../namespaceop_1_1_configure_log.html#a149393c3c87c82a5cf14417c6b430d30',1,'op::ConfigureLog']]], + ['setproducerfpsmode_911',['setProducerFpsMode',['../classop_1_1_producer.html#a024e55b4ec769cdbc40ee21613a6ef6f',1,'op::Producer']]], + ['setscalenettooutput_912',['setScaleNetToOutput',['../classop_1_1_body_part_connector_caffe.html#a0bad959b2da005b62cab962327ccba01',1,'op::BodyPartConnectorCaffe']]], + ['setscaleratios_913',['setScaleRatios',['../classop_1_1_resize_and_merge_caffe.html#aa16862bbc207fef227d53d37223512b8',1,'op::ResizeAndMergeCaffe']]], + ['setsharedparameters_914',['setSharedParameters',['../classop_1_1_op_output_to_cv_mat.html#af150c89ff5edbe4f4bd727b7162e9b36',1,'op::OpOutputToCvMat::setSharedParameters()'],['../classop_1_1_gpu_renderer.html#acc83c7b857db7d35132febaebfcb84df',1,'op::GpuRenderer::setSharedParameters(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)']]], + ['setsharedparametersandiflast_915',['setSharedParametersAndIfLast',['../classop_1_1_gpu_renderer.html#afa58647bfd9efa02629e4b81bbe48c6e',1,'op::GpuRenderer']]], + ['setshowgooglyeyes_916',['setShowGooglyEyes',['../classop_1_1_renderer.html#ab226d47f554735fa3e0372ce429747c3',1,'op::Renderer']]], + ['setthreshold_917',['setThreshold',['../classop_1_1_nms_caffe.html#a5f257eb561fc705c2b74489b12269b49',1,'op::NmsCaffe']]], + ['setto_918',['setTo',['../classop_1_1_array.html#a28f09d11de753a741334ee8094296acb',1,'op::Array::setTo()'],['../classop_1_1_matrix.html#ad74c013aa1aaed93271275cce6c77972',1,'op::Matrix::setTo()']]], + ['setundistortimage_919',['setUndistortImage',['../classop_1_1_camera_parameter_reader.html#ae33e8637012413719b389649d1e5448a',1,'op::CameraParameterReader']]], + ['setworker_920',['setWorker',['../classop_1_1_wrapper_t.html#a0b502ef38ee46749733ae3dda7e5fd31',1,'op::WrapperT']]], + ['shape_921',['shape',['../classop_1_1_array_cpu_gpu.html#af817bde68da318a8f9dd08feabe3c286',1,'op::ArrayCpuGpu::shape() const'],['../classop_1_1_array_cpu_gpu.html#a3e44f7ede3ff5ef0db6df30ecd4562c5',1,'op::ArrayCpuGpu::shape(const int index) const']]], + ['shape_5fstring_922',['shape_string',['../classop_1_1_array_cpu_gpu.html#a425d12f8d702ac9a57fb9a5f48cea152',1,'op::ArrayCpuGpu']]], + ['size_923',['Size',['../namespaceop.html#a1070db47220e17cf37df40411350f6fba6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09a6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#a553bd31855c20a0d14e4c44a20bd91daa6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()']]], + ['size_924',['size',['../classop_1_1_queue_base.html#a8fd69ac0ffcda02d0d26102e2ebd2841',1,'op::QueueBase::size()'],['../classop_1_1_matrix.html#a93188dad84f0f0a20f7a631edd32a620',1,'op::Matrix::size()']]], + ['size_925',['Size',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()']]], + ['skeleton_926',['Skeleton',['../namespaceop.html#a37a23e10d9cbc428c793c3df1d62993ea6ab48f7ed56efc362f41853c5616bf75',1,'op']]], + ['spelementtorender_927',['spElementToRender',['../classop_1_1_renderer.html#aca8ebf0c0a50b87f0be82afa090155a0',1,'op::Renderer']]], + ['spgpumemory_928',['spGpuMemory',['../classop_1_1_gpu_renderer.html#a5d729aab549908c758953be742dd0115',1,'op::GpuRenderer']]], + ['spinnakerwrapper_929',['SpinnakerWrapper',['../classop_1_1_spinnaker_wrapper.html',1,'op::SpinnakerWrapper'],['../classop_1_1_spinnaker_wrapper.html#a5d1ba90b4d1987423b330de2fdcdb702',1,'op::SpinnakerWrapper::SpinnakerWrapper()']]], + ['spinnakerwrapper_2ehpp_930',['spinnakerWrapper.hpp',['../spinnaker_wrapper_8hpp.html',1,'']]], + ['spisrunning_931',['spIsRunning',['../classop_1_1_gui.html#a0ad7be7018e634769da8d22d60e7edc0',1,'op::Gui']]], + ['splitcvmatintovectormatrix_932',['splitCvMatIntoVectorMatrix',['../classop_1_1_matrix.html#a042f230ce1b1f56458f90e09d7b4e7e4',1,'op::Matrix']]], + ['splitstring_933',['splitString',['../namespaceop.html#ae80a103d8a4308bc435342b3d31404c8',1,'op']]], + ['spnumberelementstorender_934',['spNumberElementsToRender',['../classop_1_1_renderer.html#a3e04644546dd9990a16d0b6861b60553',1,'op::Renderer']]], + ['standalone_5fface_5for_5fhand_5fkeypoint_5fdetector_2emd_935',['standalone_face_or_hand_keypoint_detector.md',['../standalone__face__or__hand__keypoint__detector_8md.html',1,'']]], + ['standard_2ehpp_936',['standard.hpp',['../standard_8hpp.html',1,'']]], + ['start_937',['start',['../classop_1_1_thread_manager.html#a01c2d62e539896e36564457ab9cac25c',1,'op::ThreadManager::start()'],['../classop_1_1_wrapper_t.html#a8111d8cdb984e996410ace159a896992',1,'op::WrapperT::start()']]], + ['startinthread_938',['startInThread',['../classop_1_1_thread.html#ac898abffd6ed18456b97ef1b72935ec6',1,'op::Thread']]], + ['stdcerr_939',['StdCerr',['../namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400ca002f2100f8870e7c823894f492e4d337',1,'op']]], + ['stdcout_940',['StdCout',['../namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94aa544d56d9492a20da20018000b5043b6',1,'op']]], + ['stdruntimeerror_941',['StdRuntimeError',['../namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400cafe50b062b9b9100a72e68b48fe26fc50',1,'op']]], + ['step1_942',['step1',['../classop_1_1_matrix.html#a41ec72e2d80f73025c4c0837364c8193',1,'op::Matrix']]], + ['stop_943',['stop',['../classop_1_1_thread_manager.html#a472a1ebee700c3449bac4d6d2bb0c3a8',1,'op::ThreadManager::stop()'],['../classop_1_1_queue_base.html#a68b51dafaba93179fcef78731aaf1703',1,'op::QueueBase::stop()'],['../classop_1_1_worker.html#ae45ac828f6e8f6055203c224e50f145b',1,'op::Worker::stop()'],['../classop_1_1_wrapper_t.html#a061ea09aac902a8a44438feffd18998f',1,'op::WrapperT::stop()']]], + ['stopandjoin_944',['stopAndJoin',['../classop_1_1_thread.html#a92e5dd0f60a0485e7d0fad3e82bb74f3',1,'op::Thread']]], + ['stoppusher_945',['stopPusher',['../classop_1_1_queue_base.html#a32ac0e4b14a310aee62ce817e86c0356',1,'op::QueueBase']]], + ['string_946',['String',['../classop_1_1_string.html',1,'op::String'],['../classop_1_1_string.html#a5848aace0a849fafffb3a2ae78d05156',1,'op::String::String(const std::string &string)'],['../classop_1_1_string.html#a5f1c9f53adedf082ee0cad43fa6140be',1,'op::String::String()'],['../classop_1_1_string.html#ad8384eb6141b3fc53e5dc246be77cf6c',1,'op::String::String(const char *charPtr)']]], + ['string_2ehpp_947',['string.hpp',['../utilities_2string_8hpp.html',1,'(Global Namespace)'],['../core_2string_8hpp.html',1,'(Global Namespace)']]], + ['stringtodataformat_948',['stringToDataFormat',['../namespaceop.html#a46e815df32db67d78a94367b7f97df25',1,'op']]], + ['subid_949',['subId',['../structop_1_1_datum.html#aeb6d10e4fa40a20d38118bf1be3112d8',1,'op::Datum']]], + ['subidmax_950',['subIdMax',['../structop_1_1_datum.html#ab87c493347456b592b616e9f656a5d60',1,'op::Datum']]], + ['subthread_951',['SubThread',['../classop_1_1_sub_thread.html',1,'op::SubThread< TDatums, TWorker >'],['../classop_1_1_sub_thread.html#aa551df0d8f0b30aaf9e0840ecf29d749',1,'op::SubThread::SubThread()']]], + ['subthread_2ehpp_952',['subThread.hpp',['../sub_thread_8hpp.html',1,'']]], + ['subthreadnoqueue_953',['SubThreadNoQueue',['../classop_1_1_sub_thread_no_queue.html',1,'op::SubThreadNoQueue< TDatums, TWorker >'],['../classop_1_1_sub_thread_no_queue.html#afbaf89d7a8fb4d19f67064fb954a31eb',1,'op::SubThreadNoQueue::SubThreadNoQueue()']]], + ['subthreadnoqueue_2ehpp_954',['subThreadNoQueue.hpp',['../sub_thread_no_queue_8hpp.html',1,'']]], + ['subthreadqueuein_955',['SubThreadQueueIn',['../classop_1_1_sub_thread_queue_in.html',1,'op::SubThreadQueueIn< TDatums, TWorker, TQueue >'],['../classop_1_1_sub_thread_queue_in.html#a11aa71a818430c4eb435a1626e54f29a',1,'op::SubThreadQueueIn::SubThreadQueueIn()']]], + ['subthreadqueuein_2ehpp_956',['subThreadQueueIn.hpp',['../sub_thread_queue_in_8hpp.html',1,'']]], + ['subthreadqueueinout_957',['SubThreadQueueInOut',['../classop_1_1_sub_thread_queue_in_out.html',1,'op::SubThreadQueueInOut< TDatums, TWorker, TQueue >'],['../classop_1_1_sub_thread_queue_in_out.html#aa5b9beea615b8b968c5da74dd66a6d78',1,'op::SubThreadQueueInOut::SubThreadQueueInOut()']]], + ['subthreadqueueinout_2ehpp_958',['subThreadQueueInOut.hpp',['../sub_thread_queue_in_out_8hpp.html',1,'']]], + ['subthreadqueueout_959',['SubThreadQueueOut',['../classop_1_1_sub_thread_queue_out.html',1,'op::SubThreadQueueOut< TDatums, TWorker, TQueue >'],['../classop_1_1_sub_thread_queue_out.html#aa4a827932f632f1f30b5650a4fcc77ff',1,'op::SubThreadQueueOut::SubThreadQueueOut()']]], + ['subthreadqueueout_2ehpp_960',['subThreadQueueOut.hpp',['../sub_thread_queue_out_8hpp.html',1,'']]], + ['sumsq_5fdata_961',['sumsq_data',['../classop_1_1_array_cpu_gpu.html#a6dd38e027220beada2f8f55f9d073d53',1,'op::ArrayCpuGpu']]], + ['sumsq_5fdiff_962',['sumsq_diff',['../classop_1_1_array_cpu_gpu.html#a280202f2a968ea68795d31accf5072bb',1,'op::ArrayCpuGpu']]], + ['switchfullscreenmode_963',['switchFullScreenMode',['../classop_1_1_frame_displayer.html#ad83a47005c52f066587f49d62c109802',1,'op::FrameDisplayer']]], + ['synchronous_964',['Synchronous',['../namespaceop.html#a3593e2d53bec533f0048ef3973eebd36a2fe4167817733fec8e6ba1afddf78f1b',1,'op']]] +]; diff --git a/web/html/doc/search/all_17.html b/web/html/doc/search/all_17.html new file mode 100644 index 000000000..1ad5d34b4 --- /dev/null +++ b/web/html/doc/search/all_17.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_17.js b/web/html/doc/search/all_17.js new file mode 100644 index 000000000..56cc991df --- /dev/null +++ b/web/html/doc/search/all_17.js @@ -0,0 +1,28 @@ +var searchData= +[ + ['thread_965',['Thread',['../classop_1_1_thread.html',1,'op::Thread< TDatums, TWorker >'],['../classop_1_1_thread.html#a9d3408a329a475da22a8e2a0bdf5f68d',1,'op::Thread::Thread(const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr=nullptr)'],['../classop_1_1_thread.html#a6ae463dc996ca6941a303b0c41288063',1,'op::Thread::Thread(Thread &&t)']]], + ['thread_2ehpp_966',['thread.hpp',['../thread_8hpp.html',1,'']]], + ['threadidpp_967',['threadIdPP',['../namespaceop.html#af65a4564afcad06b72468679f6bee52b',1,'op']]], + ['threadmanager_968',['ThreadManager',['../classop_1_1_thread_manager.html',1,'op::ThreadManager< TDatums, TWorker, TQueue >'],['../classop_1_1_thread_manager.html#a8b7d17f4a330495389e646bb21907303',1,'op::ThreadManager::ThreadManager()']]], + ['threadmanager_2ehpp_969',['threadManager.hpp',['../thread_manager_8hpp.html',1,'']]], + ['threadmanagermode_970',['ThreadManagerMode',['../namespaceop.html#a3593e2d53bec533f0048ef3973eebd36',1,'op']]], + ['timerend_971',['timerEnd',['../classop_1_1_profiler.html#affff5b0b992e05276b2a699f97ad525d',1,'op::Profiler']]], + ['timerinit_972',['timerInit',['../classop_1_1_profiler.html#aea2c89bcd0a661a53f47e59b9bec2be7',1,'op::Profiler']]], + ['tofixedlengthstring_973',['toFixedLengthString',['../namespaceop.html#a42292d44d10f55cb1d83a296183e9b31',1,'op']]], + ['tolower_974',['toLower',['../namespaceop.html#a3290f48d24c9992dd00d339ce49cfac7',1,'op']]], + ['topleft_975',['topLeft',['../structop_1_1_rectangle.html#a640050d6186148b425bedba8c33cf1ea',1,'op::Rectangle']]], + ['tostring_976',['toString',['../classop_1_1_array.html#af488c66ddac6cb75f7690ba8207599ed',1,'op::Array::toString()'],['../structop_1_1_point.html#a73d1088b5d0f3370499ca5c6e80b544a',1,'op::Point::toString()'],['../structop_1_1_rectangle.html#af1c7f96c34132924fa9237248894e63d',1,'op::Rectangle::toString()']]], + ['total_977',['total',['../classop_1_1_matrix.html#a09859c3f88b8c75c7d12f53667304450',1,'op::Matrix']]], + ['toupper_978',['toUpper',['../namespaceop.html#a7a815e303884fb2b3346c8cc19d61b23',1,'op']]], + ['track_979',['track',['../classop_1_1_pose_extractor.html#ab464ae97522336cf69dec1c1561c431d',1,'op::PoseExtractor::track()'],['../classop_1_1_person_tracker.html#a05eaf85bd389ad965f9960c9db31d873',1,'op::PersonTracker::track()']]], + ['trackhands_980',['trackHands',['../classop_1_1_hand_detector.html#a963972f9ecb769786b5f60018da443e4',1,'op::HandDetector']]], + ['tracking_981',['tracking',['../structop_1_1_wrapper_struct_extra.html#a86ae9d1faa008aaeed4d6fa6ff03f0fb',1,'op::WrapperStructExtra']]], + ['tracklockthread_982',['trackLockThread',['../classop_1_1_person_tracker.html#a35cd3cd6c946f560220c9459a5dd7ee7',1,'op::PersonTracker::trackLockThread()'],['../classop_1_1_pose_extractor.html#adab126d32216aa9a27cc78d7158d6616',1,'op::PoseExtractor::trackLockThread()']]], + ['transpose_983',['transpose',['../namespaceop.html#a75c4194e0eae0ef28c6829def462dad2',1,'op']]], + ['tryemplace_984',['tryEmplace',['../classop_1_1_wrapper_t.html#a79fa1a518495e1e3684f05943d1c04f8',1,'op::WrapperT::tryEmplace()'],['../classop_1_1_queue_base.html#a7905841f953be7099847cc7b5b17ae0c',1,'op::QueueBase::tryEmplace()'],['../classop_1_1_thread_manager.html#a8d5ffd9473557ff0f90ac1c6a1bae3ad',1,'op::ThreadManager::tryEmplace()']]], + ['trypop_985',['tryPop',['../classop_1_1_queue_base.html#a80c6e2dda17afa82aae83aeadad1f7e0',1,'op::QueueBase::tryPop(TDatums &tDatums)'],['../classop_1_1_queue_base.html#a5e52b4ab7e310373e3d1f1d42cfe4549',1,'op::QueueBase::tryPop()'],['../classop_1_1_wrapper_t.html#a55af0ab1f0ea4329f2c0bb3feb92b835',1,'op::WrapperT::tryPop()'],['../classop_1_1_thread_manager.html#a59916fc3428aaf5c487e1dd373d437cd',1,'op::ThreadManager::tryPop(TDatums &tDatums)']]], + ['trypush_986',['tryPush',['../classop_1_1_thread_manager.html#a7a24fd902ebd4b5fd81166547a5654d9',1,'op::ThreadManager::tryPush()'],['../classop_1_1_wrapper_t.html#a4d9396d4490b90f32a45d4a80d2cd5c7',1,'op::WrapperT::tryPush()'],['../classop_1_1_queue_base.html#a35f0547f6020f22e49835b147b7ec52e',1,'op::QueueBase::tryPush()']]], + ['trystop_987',['tryStop',['../classop_1_1_worker.html#ad689b232d68f3b3e0b41f9e219b01134',1,'op::Worker::tryStop()'],['../classop_1_1_w_queue_orderer.html#a01bc7495ec992cc9c54a040534cb3634',1,'op::WQueueOrderer::tryStop()']]], + ['ttostring_988',['tToString',['../namespaceop.html#af548fe1a2ad2b392a25afe9b0b87b8dd',1,'op']]], + ['type_989',['type',['../classop_1_1_resize_and_merge_caffe.html#a90e4af20eee1bfaf152937199f3ad068',1,'op::ResizeAndMergeCaffe::type()'],['../classop_1_1_nms_caffe.html#adc88733fceaefc359a95f067c62c3b07',1,'op::NmsCaffe::type()'],['../classop_1_1_maximum_caffe.html#a8d047aa2e08e49199f9f422191e9bdd2',1,'op::MaximumCaffe::type()'],['../classop_1_1_body_part_connector_caffe.html#aec0d6f32107a6222406e73ca9ae4942d',1,'op::BodyPartConnectorCaffe::type()'],['../classop_1_1_matrix.html#aac1572705e72a18198a8b2d32d1b5c24',1,'op::Matrix::type()']]] +]; diff --git a/web/html/doc/search/all_18.html b/web/html/doc/search/all_18.html new file mode 100644 index 000000000..507d0f856 --- /dev/null +++ b/web/html/doc/search/all_18.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_18.js b/web/html/doc/search/all_18.js new file mode 100644 index 000000000..b83763fec --- /dev/null +++ b/web/html/doc/search/all_18.js @@ -0,0 +1,23 @@ +var searchData= +[ + ['ucharcvmattofloatptr_990',['uCharCvMatToFloatPtr',['../namespaceop.html#a532d08cb2ef011f9cad29c01d3431d6e',1,'op']]], + ['ucharimagecast_991',['uCharImageCast',['../namespaceop.html#a6aeab543a61ef23ed58a6e29401424ae',1,'op']]], + ['ucharround_992',['uCharRound',['../namespaceop.html#a61240e5fbd4ea84a2cfdc89407bcb1ae',1,'op']]], + ['udphost_993',['udpHost',['../structop_1_1_wrapper_struct_output.html#a17c339a9c6c97e1dfdb29b3af0bdab77',1,'op::WrapperStructOutput']]], + ['udpport_994',['udpPort',['../structop_1_1_wrapper_struct_output.html#a873bcabae9cf7039830cae565009ce2b',1,'op::WrapperStructOutput']]], + ['udpsender_995',['UdpSender',['../classop_1_1_udp_sender.html',1,'op::UdpSender'],['../classop_1_1_udp_sender.html#a80fb12e5d4357e5dbb37c8a7b660c67c',1,'op::UdpSender::UdpSender()']]], + ['udpsender_2ehpp_996',['udpSender.hpp',['../udp_sender_8hpp.html',1,'']]], + ['uintround_997',['uIntRound',['../namespaceop.html#a8525e440d6ac1b558e72637dc4f4e3c4',1,'op']]], + ['ulonglonground_998',['uLongLongRound',['../namespaceop.html#a757a5cc88734e7be9e910e7d8213c282',1,'op']]], + ['ulonground_999',['ulongRound',['../namespaceop.html#aaafe2e235a1a3a146bb026b71c521c7b',1,'op']]], + ['undistort_1000',['undistort',['../classop_1_1_camera_parameter_reader.html#aee02b82d0c5fd51dd3ba5a2267f7b370',1,'op::CameraParameterReader']]], + ['undistortimage_1001',['undistortImage',['../structop_1_1_wrapper_struct_input.html#ae7183e10862dbdbed422f042f1a71ed1',1,'op::WrapperStructInput']]], + ['unitybinding_2ehpp_1002',['unityBinding.hpp',['../unity_binding_8hpp.html',1,'']]], + ['unrollarraytoucharcvmat_1003',['unrollArrayToUCharCvMat',['../namespaceop.html#a1910d9f194831570be6ffe683209e7b3',1,'op']]], + ['unsignedchar_1004',['UnsignedChar',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaaa93f121640d609f8772397a0f40f40d6',1,'op']]], + ['unused_1005',['UNUSED',['../macros_8hpp.html#af57a843cfdae82e064838c20b3b54851',1,'macros.hpp']]], + ['update_1006',['Update',['../classop_1_1_array_cpu_gpu.html#af9f32307732772c708ff45c52b4e7dd0',1,'op::ArrayCpuGpu']]], + ['update_1007',['update',['../classop_1_1_gui3_d.html#af10162684889706894f13a308970ba32',1,'op::Gui3D::update()'],['../classop_1_1_gui.html#a8e9a67dd507598654a5db06273d50c94',1,'op::Gui::update()']]], + ['updatetracker_1008',['updateTracker',['../classop_1_1_hand_detector.html#a58513169f01ab7c705979f1f2a88b571',1,'op::HandDetector']]], + ['upsamplingratio_1009',['upsamplingRatio',['../structop_1_1_wrapper_struct_pose.html#a25ee056d914f1cdc990a8a7956810313',1,'op::WrapperStructPose']]] +]; diff --git a/web/html/doc/search/all_19.html b/web/html/doc/search/all_19.html new file mode 100644 index 000000000..e69289e9b --- /dev/null +++ b/web/html/doc/search/all_19.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_19.js b/web/html/doc/search/all_19.js new file mode 100644 index 000000000..1fd5deb0e --- /dev/null +++ b/web/html/doc/search/all_19.js @@ -0,0 +1,15 @@ +var searchData= +[ + ['vectorsareequal_1010',['vectorsAreEqual',['../namespaceop.html#af63e418966741f7efebacc9519174a0a',1,'op']]], + ['verbose_1011',['verbose',['../structop_1_1_wrapper_struct_output.html#aec57f5b4f6920cd43c2f6e55a21c769b',1,'op::WrapperStructOutput']]], + ['verboseprinter_1012',['VerbosePrinter',['../classop_1_1_verbose_printer.html',1,'op::VerbosePrinter'],['../classop_1_1_verbose_printer.html#a79d2dc59b75a0164f60d875ef78523da',1,'op::VerbosePrinter::VerbosePrinter()']]], + ['verboseprinter_2ehpp_1013',['verbosePrinter.hpp',['../verbose_printer_8hpp.html',1,'']]], + ['version_1014',['version',['../classop_1_1_json_ofstream.html#a6a5e0e4f3cdf8f93fb1bdef8cb63b0a2',1,'op::JsonOfstream']]], + ['video_1015',['Video',['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332a34e2d1989a1dbf75cd631596133ee5ee',1,'op']]], + ['videocapturereader_1016',['VideoCaptureReader',['../classop_1_1_video_capture_reader.html',1,'op::VideoCaptureReader'],['../classop_1_1_video_capture_reader.html#ae07295c083ce99b032ce219ea15405d9',1,'op::VideoCaptureReader::VideoCaptureReader(const int index, const bool throwExceptionIfNoOpened, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)'],['../classop_1_1_video_capture_reader.html#a3fe940326900ac6a2289de85664b14be',1,'op::VideoCaptureReader::VideoCaptureReader(const std::string &path, const ProducerType producerType, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)']]], + ['videocapturereader_2ehpp_1017',['videoCaptureReader.hpp',['../video_capture_reader_8hpp.html',1,'']]], + ['videoreader_1018',['VideoReader',['../classop_1_1_video_reader.html',1,'op::VideoReader'],['../classop_1_1_video_reader.html#a219e3901e489a293e85fe9a872e7fb78',1,'op::VideoReader::VideoReader()']]], + ['videoreader_2ehpp_1019',['videoReader.hpp',['../video_reader_8hpp.html',1,'']]], + ['videosaver_1020',['VideoSaver',['../classop_1_1_video_saver.html',1,'op::VideoSaver'],['../classop_1_1_video_saver.html#a413aba00e90b40f6cd62144c98d7723c',1,'op::VideoSaver::VideoSaver()']]], + ['videosaver_2ehpp_1021',['videoSaver.hpp',['../video_saver_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/all_1a.html b/web/html/doc/search/all_1a.html new file mode 100644 index 000000000..e50f29b1c --- /dev/null +++ b/web/html/doc/search/all_1a.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_1a.js b/web/html/doc/search/all_1a.js new file mode 100644 index 000000000..908480d99 --- /dev/null +++ b/web/html/doc/search/all_1a.js @@ -0,0 +1,144 @@ +var searchData= +[ + ['waitandemplace_1022',['waitAndEmplace',['../classop_1_1_thread_manager.html#a36bd8060a4f7f449a8aa35d9a166270d',1,'op::ThreadManager::waitAndEmplace()'],['../classop_1_1_wrapper_t.html#a442ff1e4fec93ec28457f7c7c4b4bfbb',1,'op::WrapperT::waitAndEmplace(TDatumsSP &tDatums)'],['../classop_1_1_wrapper_t.html#ae2c6cf519701c320ae53c597ae54a7aa',1,'op::WrapperT::waitAndEmplace(Matrix &matrix)'],['../classop_1_1_queue_base.html#a8a5d53c7b66fd0ef34b3e276f586e355',1,'op::QueueBase::waitAndEmplace(TDatums &tDatums)']]], + ['waitandpop_1023',['waitAndPop',['../classop_1_1_queue_base.html#a2c7b3d0fa6502c644c3083dd68332542',1,'op::QueueBase::waitAndPop(TDatums &tDatums)'],['../classop_1_1_queue_base.html#a84da9e045acec02e3900153eea2bd92d',1,'op::QueueBase::waitAndPop()'],['../classop_1_1_thread_manager.html#a393a9f04c70a002f5ceb5e301eea5cff',1,'op::ThreadManager::waitAndPop()'],['../classop_1_1_wrapper_t.html#a3c3b605e0787b55ffd00725c09a1cd53',1,'op::WrapperT::waitAndPop()']]], + ['waitandpush_1024',['waitAndPush',['../classop_1_1_thread_manager.html#abfa315257b3e8cd022573f439b4936ec',1,'op::ThreadManager::waitAndPush()'],['../classop_1_1_queue_base.html#ab28c5805dd23117c8d6d82d59617bb95',1,'op::QueueBase::waitAndPush()'],['../classop_1_1_wrapper_t.html#a0e0aea3f8bf81458c0662c46f4d345d5',1,'op::WrapperT::waitAndPush(const TDatumsSP &tDatums)'],['../classop_1_1_wrapper_t.html#abcb907a2718260a14c0472279254df84',1,'op::WrapperT::waitAndPush(const Matrix &matrix)']]], + ['wbvhsaver_2ehpp_1025',['wBvhSaver.hpp',['../w_bvh_saver_8hpp.html',1,'']]], + ['wcocojsonsaver_1026',['WCocoJsonSaver',['../classop_1_1_w_coco_json_saver.html',1,'op::WCocoJsonSaver< TDatums >'],['../classop_1_1_w_coco_json_saver.html#a508c1105406b3cc55dc6bd1b299f6ed3',1,'op::WCocoJsonSaver::WCocoJsonSaver()']]], + ['wcocojsonsaver_2ehpp_1027',['wCocoJsonSaver.hpp',['../w_coco_json_saver_8hpp.html',1,'']]], + ['wcvmattoopinput_1028',['WCvMatToOpInput',['../classop_1_1_w_cv_mat_to_op_input.html',1,'op::WCvMatToOpInput< TDatums >'],['../classop_1_1_w_cv_mat_to_op_input.html#a82c13641d071fdb5db50afdee7cfa849',1,'op::WCvMatToOpInput::WCvMatToOpInput()']]], + ['wcvmattoopinput_2ehpp_1029',['wCvMatToOpInput.hpp',['../w_cv_mat_to_op_input_8hpp.html',1,'']]], + ['wcvmattoopoutput_1030',['WCvMatToOpOutput',['../classop_1_1_w_cv_mat_to_op_output.html',1,'op::WCvMatToOpOutput< TDatums >'],['../classop_1_1_w_cv_mat_to_op_output.html#a04cd3d8e91d731a36e3c7830631e47af',1,'op::WCvMatToOpOutput::WCvMatToOpOutput()']]], + ['wcvmattoopoutput_2ehpp_1031',['wCvMatToOpOutput.hpp',['../w_cv_mat_to_op_output_8hpp.html',1,'']]], + ['wdatumproducer_1032',['WDatumProducer',['../classop_1_1_w_datum_producer.html',1,'op::WDatumProducer< TDatum >'],['../classop_1_1_w_datum_producer.html#a728efd416b307b5ffa25c44b0fbf7760',1,'op::WDatumProducer::WDatumProducer()']]], + ['wdatumproducer_2ehpp_1033',['wDatumProducer.hpp',['../w_datum_producer_8hpp.html',1,'']]], + ['webcam_1034',['Webcam',['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332ae2faa2a74b6a4134d0b3e84c7c0e2a01',1,'op']]], + ['webcamreader_1035',['WebcamReader',['../classop_1_1_webcam_reader.html',1,'op::WebcamReader'],['../classop_1_1_webcam_reader.html#a15fa1b26adfb75c0f072dcdb44c8fc44',1,'op::WebcamReader::WebcamReader()']]], + ['webcamreader_2ehpp_1036',['webcamReader.hpp',['../webcam_reader_8hpp.html',1,'']]], + ['wfacedetector_1037',['WFaceDetector',['../classop_1_1_w_face_detector.html',1,'op::WFaceDetector< TDatums >'],['../classop_1_1_w_face_detector.html#a77355426bc59b212a8eb1730ff6289f3',1,'op::WFaceDetector::WFaceDetector()']]], + ['wfacedetector_2ehpp_1038',['wFaceDetector.hpp',['../w_face_detector_8hpp.html',1,'']]], + ['wfacedetectoropencv_1039',['WFaceDetectorOpenCV',['../classop_1_1_w_face_detector_open_c_v.html',1,'op::WFaceDetectorOpenCV< TDatums >'],['../classop_1_1_w_face_detector_open_c_v.html#a8c765201f0cc9440f8d172c8d8c76a62',1,'op::WFaceDetectorOpenCV::WFaceDetectorOpenCV()']]], + ['wfacedetectoropencv_2ehpp_1040',['wFaceDetectorOpenCV.hpp',['../w_face_detector_open_c_v_8hpp.html',1,'']]], + ['wfaceextractornet_1041',['WFaceExtractorNet',['../classop_1_1_w_face_extractor_net.html',1,'op::WFaceExtractorNet< TDatums >'],['../classop_1_1_w_face_extractor_net.html#a31bd32d4b9922ea456c97343c94501ac',1,'op::WFaceExtractorNet::WFaceExtractorNet()']]], + ['wfaceextractornet_2ehpp_1042',['wFaceExtractorNet.hpp',['../w_face_extractor_net_8hpp.html',1,'']]], + ['wfacerenderer_1043',['WFaceRenderer',['../classop_1_1_w_face_renderer.html',1,'op::WFaceRenderer< TDatums >'],['../classop_1_1_w_face_renderer.html#a51e2a661867adee200f5c4029a585e5d',1,'op::WFaceRenderer::WFaceRenderer()']]], + ['wfacerenderer_2ehpp_1044',['wFaceRenderer.hpp',['../w_face_renderer_8hpp.html',1,'']]], + ['wfacesaver_1045',['WFaceSaver',['../classop_1_1_w_face_saver.html',1,'op::WFaceSaver< TDatums >'],['../classop_1_1_w_face_saver.html#a5dc60ede4b88594d59ece4ce3e4683d6',1,'op::WFaceSaver::WFaceSaver()']]], + ['wfacesaver_2ehpp_1046',['wFaceSaver.hpp',['../w_face_saver_8hpp.html',1,'']]], + ['wfpsmax_1047',['WFpsMax',['../classop_1_1_w_fps_max.html',1,'op::WFpsMax< TDatums >'],['../classop_1_1_w_fps_max.html#afc487c8404a9d4794bcccdd43f0368f6',1,'op::WFpsMax::WFpsMax()']]], + ['wfpsmax_2ehpp_1048',['wFpsMax.hpp',['../w_fps_max_8hpp.html',1,'']]], + ['wgui_1049',['WGui',['../classop_1_1_w_gui.html',1,'op::WGui< TDatums >'],['../classop_1_1_w_gui.html#a4e7d3f5b3ddaf02109738b4348250611',1,'op::WGui::WGui()']]], + ['wgui_2ehpp_1050',['wGui.hpp',['../w_gui_8hpp.html',1,'']]], + ['wgui3d_1051',['WGui3D',['../classop_1_1_w_gui3_d.html',1,'op::WGui3D< TDatums >'],['../classop_1_1_w_gui3_d.html#ab61a31574460ff87efa99ed7362474ed',1,'op::WGui3D::WGui3D()']]], + ['wgui3d_2ehpp_1052',['wGui3D.hpp',['../w_gui3_d_8hpp.html',1,'']]], + ['wguiadam_2ehpp_1053',['wGuiAdam.hpp',['../w_gui_adam_8hpp.html',1,'']]], + ['wguiinfoadder_1054',['WGuiInfoAdder',['../classop_1_1_w_gui_info_adder.html',1,'op::WGuiInfoAdder< TDatums >'],['../classop_1_1_w_gui_info_adder.html#a0b2c539b72ef09106ab0306dc88c5ac5',1,'op::WGuiInfoAdder::WGuiInfoAdder()']]], + ['wguiinfoadder_2ehpp_1055',['wGuiInfoAdder.hpp',['../w_gui_info_adder_8hpp.html',1,'']]], + ['whanddetector_1056',['WHandDetector',['../classop_1_1_w_hand_detector.html',1,'op::WHandDetector< TDatums >'],['../classop_1_1_w_hand_detector.html#ac44b474c7d8bd4876e32ceb9c9a322fe',1,'op::WHandDetector::WHandDetector()']]], + ['whanddetector_2ehpp_1057',['wHandDetector.hpp',['../w_hand_detector_8hpp.html',1,'']]], + ['whanddetectorfromtxt_1058',['WHandDetectorFromTxt',['../classop_1_1_w_hand_detector_from_txt.html',1,'op::WHandDetectorFromTxt< TDatums >'],['../classop_1_1_w_hand_detector_from_txt.html#a01a5f73b0a8a1b8998937e7ba3d747a3',1,'op::WHandDetectorFromTxt::WHandDetectorFromTxt()']]], + ['whanddetectorfromtxt_2ehpp_1059',['wHandDetectorFromTxt.hpp',['../w_hand_detector_from_txt_8hpp.html',1,'']]], + ['whanddetectortracking_1060',['WHandDetectorTracking',['../classop_1_1_w_hand_detector_tracking.html',1,'op::WHandDetectorTracking< TDatums >'],['../classop_1_1_w_hand_detector_tracking.html#ad2a5ac720f4ed651f4cf5e42d21c05dd',1,'op::WHandDetectorTracking::WHandDetectorTracking()']]], + ['whanddetectortracking_2ehpp_1061',['wHandDetectorTracking.hpp',['../w_hand_detector_tracking_8hpp.html',1,'']]], + ['whanddetectorupdate_1062',['WHandDetectorUpdate',['../classop_1_1_w_hand_detector_update.html',1,'op::WHandDetectorUpdate< TDatums >'],['../classop_1_1_w_hand_detector_update.html#abd8b56fbfbd2a619a4f37d148592f61b',1,'op::WHandDetectorUpdate::WHandDetectorUpdate()']]], + ['whanddetectorupdate_2ehpp_1063',['wHandDetectorUpdate.hpp',['../w_hand_detector_update_8hpp.html',1,'']]], + ['whandextractornet_1064',['WHandExtractorNet',['../classop_1_1_w_hand_extractor_net.html',1,'op::WHandExtractorNet< TDatums >'],['../classop_1_1_w_hand_extractor_net.html#a464a629c6ecd9727da53453af8266e1d',1,'op::WHandExtractorNet::WHandExtractorNet()']]], + ['whandextractornet_2ehpp_1065',['wHandExtractorNet.hpp',['../w_hand_extractor_net_8hpp.html',1,'']]], + ['whandrenderer_1066',['WHandRenderer',['../classop_1_1_w_hand_renderer.html',1,'op::WHandRenderer< TDatums >'],['../classop_1_1_w_hand_renderer.html#a30121b55c601aed3644996d010b6bf8c',1,'op::WHandRenderer::WHandRenderer()']]], + ['whandrenderer_2ehpp_1067',['wHandRenderer.hpp',['../w_hand_renderer_8hpp.html',1,'']]], + ['whandsaver_1068',['WHandSaver',['../classop_1_1_w_hand_saver.html',1,'op::WHandSaver< TDatums >'],['../classop_1_1_w_hand_saver.html#ab41ecc429abfe0a1424facd6ee4acd1f',1,'op::WHandSaver::WHandSaver()']]], + ['whandsaver_2ehpp_1069',['wHandSaver.hpp',['../w_hand_saver_8hpp.html',1,'']]], + ['wheatmapsaver_1070',['WHeatMapSaver',['../classop_1_1_w_heat_map_saver.html',1,'op::WHeatMapSaver< TDatums >'],['../classop_1_1_w_heat_map_saver.html#a5b72d5f3bcbdacb26ba440b80eef0109',1,'op::WHeatMapSaver::WHeatMapSaver()']]], + ['wheatmapsaver_2ehpp_1071',['wHeatMapSaver.hpp',['../w_heat_map_saver_8hpp.html',1,'']]], + ['widgenerator_1072',['WIdGenerator',['../classop_1_1_w_id_generator.html',1,'op::WIdGenerator< TDatums >'],['../classop_1_1_w_id_generator.html#a6112733ee0b537d4d91191f93f0a84f8',1,'op::WIdGenerator::WIdGenerator()']]], + ['widgenerator_2ehpp_1073',['wIdGenerator.hpp',['../w_id_generator_8hpp.html',1,'']]], + ['width_1074',['width',['../structop_1_1_rectangle.html#a0d0ae826039b0961fae8723708809cdf',1,'op::Rectangle::width()'],['../classop_1_1_array_cpu_gpu.html#a5011662a5cf4bc7f7c1a2d966dcc44cd',1,'op::ArrayCpuGpu::width()']]], + ['wimagesaver_1075',['WImageSaver',['../classop_1_1_w_image_saver.html',1,'op::WImageSaver< TDatums >'],['../classop_1_1_w_image_saver.html#a11add012ee88b64a4f36d3f63cb65ee0',1,'op::WImageSaver::WImageSaver()']]], + ['wimagesaver_2ehpp_1076',['wImageSaver.hpp',['../w_image_saver_8hpp.html',1,'']]], + ['windowed_1077',['Windowed',['../namespaceop.html#a6c22a72ce93c64e7582cb670492a50bfab13311ab51c4c34757f67f26580018dd',1,'op']]], + ['wjointangleestimation_2ehpp_1078',['wJointAngleEstimation.hpp',['../w_joint_angle_estimation_8hpp.html',1,'']]], + ['wkeeptopnpeople_1079',['WKeepTopNPeople',['../classop_1_1_w_keep_top_n_people.html',1,'op::WKeepTopNPeople< TDatums >'],['../classop_1_1_w_keep_top_n_people.html#aebe939c354cfb62cb6d950f73d14731b',1,'op::WKeepTopNPeople::WKeepTopNPeople()']]], + ['wkeeptopnpeople_2ehpp_1080',['wKeepTopNPeople.hpp',['../w_keep_top_n_people_8hpp.html',1,'']]], + ['wkeypointscaler_1081',['WKeypointScaler',['../classop_1_1_w_keypoint_scaler.html',1,'op::WKeypointScaler< TDatums >'],['../classop_1_1_w_keypoint_scaler.html#a31624e262988b0840a8ddbf098e56e9b',1,'op::WKeypointScaler::WKeypointScaler()']]], + ['wkeypointscaler_2ehpp_1082',['wKeypointScaler.hpp',['../w_keypoint_scaler_8hpp.html',1,'']]], + ['wopoutputtocvmat_1083',['WOpOutputToCvMat',['../classop_1_1_w_op_output_to_cv_mat.html',1,'op::WOpOutputToCvMat< TDatums >'],['../classop_1_1_w_op_output_to_cv_mat.html#a6f632a83de4cdc731c3f52d1541060f3',1,'op::WOpOutputToCvMat::WOpOutputToCvMat()']]], + ['wopoutputtocvmat_2ehpp_1084',['wOpOutputToCvMat.hpp',['../w_op_output_to_cv_mat_8hpp.html',1,'']]], + ['work_1085',['work',['../classop_1_1_w_face_renderer.html#aa52166ea2d5e0f201c94d5c4fe74216e',1,'op::WFaceRenderer::work()'],['../classop_1_1_w_hand_detector_update.html#af9287dc0a3c67abd35974c1c74614f3c',1,'op::WHandDetectorUpdate::work()'],['../classop_1_1_w_face_extractor_net.html#aa47940fb2ed940a53c7a305ce45817a3',1,'op::WFaceExtractorNet::work()'],['../classop_1_1_w_gui_info_adder.html#ae90a68c6ef7b4f45595a020efd232612',1,'op::WGuiInfoAdder::work()'],['../classop_1_1_w_hand_detector.html#aa82ef40fad1d343b5856b41ec4dbcd5c',1,'op::WHandDetector::work()'],['../classop_1_1_w_hand_detector_from_txt.html#a51ebff94734350463fcf507a84eeefdc',1,'op::WHandDetectorFromTxt::work()'],['../classop_1_1_w_hand_detector_tracking.html#a7c849c5a423ffc150c6a4aee9055d34e',1,'op::WHandDetectorTracking::work()'],['../classop_1_1_w_face_detector_open_c_v.html#a4d3a4a29bcb7b8c141ae1917634ca4c9',1,'op::WFaceDetectorOpenCV::work()'],['../classop_1_1_w_face_detector.html#a721ced99378516c04cb3cff296cc274a',1,'op::WFaceDetector::work()'],['../classop_1_1_w_verbose_printer.html#af98586e3da7cedd902f70e6521c0ffc4',1,'op::WVerbosePrinter::work()'],['../classop_1_1_w_scale_and_size_extractor.html#afddf54d061dc5325e78252a3bba482b9',1,'op::WScaleAndSizeExtractor::work()'],['../classop_1_1_w_op_output_to_cv_mat.html#ae3fc21569d56a648c606b23fcc016349',1,'op::WOpOutputToCvMat::work()'],['../classop_1_1_w_keypoint_scaler.html#aacad5116921e2ff746fbdf9f6c0cbb25',1,'op::WKeypointScaler::work()'],['../classop_1_1_w_pose_triangulation.html#a495b29e03933d750827acc0531c72c78',1,'op::WPoseTriangulation::work()'],['../classop_1_1_w_cv_mat_to_op_input.html#aa7faa9e2671a85d36aad3366a7958f58',1,'op::WCvMatToOpInput::work()'],['../classop_1_1_w_cv_mat_to_op_output.html#a0bf2e43d2586c83fdd5cb0b1b54aefca',1,'op::WCvMatToOpOutput::work()'],['../classop_1_1_w_keep_top_n_people.html#a5928a091e0990706ab2ea5e5e07629dd',1,'op::WKeepTopNPeople::work()'],['../classop_1_1_w_pose_extractor_net.html#a3d691e30c419c70e23a4d7b3c92adb4b',1,'op::WPoseExtractorNet::work()'],['../classop_1_1_worker.html#a9acadd6df7af03b31b9e354ae815f781',1,'op::Worker::work()'],['../classop_1_1_worker_consumer.html#a7383747b3bdc6ac79e6f9afbf2c28d27',1,'op::WorkerConsumer::work()'],['../classop_1_1_worker_producer.html#a0259f0b387e2b868388ba0a6769f4691',1,'op::WorkerProducer::work()'],['../classop_1_1_w_id_generator.html#a03bd005cf88749702fb8a29c20d4cb91',1,'op::WIdGenerator::work()'],['../classop_1_1_w_fps_max.html#a8b9f49fb22b18dbee786922af15ba939',1,'op::WFpsMax::work()'],['../classop_1_1_sub_thread_queue_out.html#a0ff5f79e63038ffa5b4aca24cfea7e7c',1,'op::SubThreadQueueOut::work()'],['../classop_1_1_sub_thread_queue_in_out.html#abb65911e9d9b6d5efe782ca0e599be3b',1,'op::SubThreadQueueInOut::work()'],['../classop_1_1_sub_thread_queue_in.html#a7e9bd6ca09bb77a8de76ae8a02ee8ed4',1,'op::SubThreadQueueIn::work()'],['../classop_1_1_sub_thread_no_queue.html#acb7edd02e1724e0fd131235666009f42',1,'op::SubThreadNoQueue::work()'],['../classop_1_1_sub_thread.html#a14330cbc1117f32b6d69c1733ccdeb61',1,'op::SubThread::work()'],['../classop_1_1_w_pose_renderer.html#a10b1631d78d8270ed2a16e538b30eb76',1,'op::WPoseRenderer::work()'],['../classop_1_1_w_pose_extractor.html#ae0f02aaefccab05bbbd919dd7a9e0f61',1,'op::WPoseExtractor::work()'],['../classop_1_1_w_hand_renderer.html#ad178e8d413b3b15edc53625e1f5119d7',1,'op::WHandRenderer::work()'],['../classop_1_1_w_queue_assembler.html#ad3b1ca56d18e1e234773ba15efea7158',1,'op::WQueueAssembler::work()'],['../classop_1_1_w_queue_orderer.html#a1ea314eeaa8d99fbf33885d9a4c6d044',1,'op::WQueueOrderer::work()'],['../classop_1_1_w_person_id_extractor.html#a4066bf1c8cad753c74de1ceabdd76505',1,'op::WPersonIdExtractor::work()'],['../classop_1_1_w_hand_extractor_net.html#a21ffee48567b1c7c8994e4effef6cffe',1,'op::WHandExtractorNet::work()']]], + ['workconsumer_1086',['workConsumer',['../classop_1_1_w_people_json_saver.html#af874a16a06a9a3452a0e3792ac15647e',1,'op::WPeopleJsonSaver::workConsumer()'],['../classop_1_1_w_coco_json_saver.html#af152a61abc9ab46da651c9d87e6775f0',1,'op::WCocoJsonSaver::workConsumer()'],['../classop_1_1_w_face_saver.html#a026bfad8cd9e0d1289a1db473cef34a0',1,'op::WFaceSaver::workConsumer()'],['../classop_1_1_w_hand_saver.html#afc3976b394070927b9396163137317e5',1,'op::WHandSaver::workConsumer()'],['../classop_1_1_w_heat_map_saver.html#a5fd729a47f0cdbe94001219f971f8f51',1,'op::WHeatMapSaver::workConsumer()'],['../classop_1_1_w_image_saver.html#a198bbfcf625354ddda419e0121d0cb33',1,'op::WImageSaver::workConsumer()'],['../classop_1_1_w_pose_saver.html#a039027281498168b57df8dfeefd82cd8',1,'op::WPoseSaver::workConsumer()'],['../classop_1_1_w_udp_sender.html#a615fc6a537ca9f624022698391c11a54',1,'op::WUdpSender::workConsumer()'],['../classop_1_1_w_video_saver.html#a40bcb8ccf137c6cbee3ca31e6cc3bfbf',1,'op::WVideoSaver::workConsumer()'],['../classop_1_1_w_video_saver3_d.html#adef743533fbab522d55c43768d28469e',1,'op::WVideoSaver3D::workConsumer()'],['../classop_1_1_w_gui.html#a664e1f76211510e38b8d5f5bed37ffcb',1,'op::WGui::workConsumer()'],['../classop_1_1_w_gui3_d.html#afe019cff8fd5ed2f59f59d886de7473a',1,'op::WGui3D::workConsumer()'],['../classop_1_1_worker_consumer.html#a26cf5c40df363d94d603fce92a5b69eb',1,'op::WorkerConsumer::workConsumer()']]], + ['worker_1087',['Worker',['../classop_1_1_worker.html',1,'op::Worker< TDatums >'],['../classop_1_1_worker.html#a5008fc4ef4e41366ba0022f5cd79edba',1,'op::Worker::Worker()']]], + ['worker_2ehpp_1088',['worker.hpp',['../worker_8hpp.html',1,'']]], + ['worker_3c_20std_3a_3ashared_5fptr_3c_20tdatums_20_3e_20_3e_1089',['Worker< std::shared_ptr< TDatums > >',['../classop_1_1_worker.html',1,'op']]], + ['workerconsumer_1090',['WorkerConsumer',['../classop_1_1_worker_consumer.html',1,'op']]], + ['workerconsumer_2ehpp_1091',['workerConsumer.hpp',['../worker_consumer_8hpp.html',1,'']]], + ['workerproducer_1092',['WorkerProducer',['../classop_1_1_worker_producer.html',1,'op']]], + ['workerproducer_2ehpp_1093',['workerProducer.hpp',['../worker_producer_8hpp.html',1,'']]], + ['workerproducer_3c_20std_3a_3ashared_5fptr_3c_20std_3a_3avector_3c_20std_3a_3ashared_5fptr_3c_20tdatum_20_3e_20_3e_20_3e_20_3e_1094',['WorkerProducer< std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > >',['../classop_1_1_worker_producer.html',1,'op']]], + ['workertype_1095',['WorkerType',['../namespaceop.html#a970a2a768a2ace81605b1558c9fdec18',1,'op']]], + ['workproducer_1096',['workProducer',['../classop_1_1_w_datum_producer.html#aac2674f961492fa299da18d716a617b4',1,'op::WDatumProducer::workProducer()'],['../classop_1_1_worker_producer.html#a364992ef862fe84a78416e2b556daae7',1,'op::WorkerProducer::workProducer()']]], + ['worktworkers_1097',['workTWorkers',['../classop_1_1_sub_thread.html#ad9f2d3be9e05739b102fad350e1a1364',1,'op::SubThread']]], + ['wpeoplejsonsaver_1098',['WPeopleJsonSaver',['../classop_1_1_w_people_json_saver.html',1,'op::WPeopleJsonSaver< TDatums >'],['../classop_1_1_w_people_json_saver.html#ac12dfe8c1414ec36ace474ecbf148f67',1,'op::WPeopleJsonSaver::WPeopleJsonSaver()']]], + ['wpeoplejsonsaver_2ehpp_1099',['wPeopleJsonSaver.hpp',['../w_people_json_saver_8hpp.html',1,'']]], + ['wpersonidextractor_1100',['WPersonIdExtractor',['../classop_1_1_w_person_id_extractor.html',1,'op::WPersonIdExtractor< TDatums >'],['../classop_1_1_w_person_id_extractor.html#a14a6cc9c6c70acd4847482fd71e4972b',1,'op::WPersonIdExtractor::WPersonIdExtractor()']]], + ['wpersonidextractor_2ehpp_1101',['wPersonIdExtractor.hpp',['../w_person_id_extractor_8hpp.html',1,'']]], + ['wposeextractor_1102',['WPoseExtractor',['../classop_1_1_w_pose_extractor.html',1,'op::WPoseExtractor< TDatums >'],['../classop_1_1_w_pose_extractor.html#ae85b1ec41bf47dcf1aed7bdae1d91915',1,'op::WPoseExtractor::WPoseExtractor()']]], + ['wposeextractor_2ehpp_1103',['wPoseExtractor.hpp',['../w_pose_extractor_8hpp.html',1,'']]], + ['wposeextractornet_1104',['WPoseExtractorNet',['../classop_1_1_w_pose_extractor_net.html',1,'op::WPoseExtractorNet< TDatums >'],['../classop_1_1_w_pose_extractor_net.html#aa0f6b7ec6f36fe2a27649ac2c7490c09',1,'op::WPoseExtractorNet::WPoseExtractorNet()']]], + ['wposeextractornet_2ehpp_1105',['wPoseExtractorNet.hpp',['../w_pose_extractor_net_8hpp.html',1,'']]], + ['wposerenderer_1106',['WPoseRenderer',['../classop_1_1_w_pose_renderer.html',1,'op::WPoseRenderer< TDatums >'],['../classop_1_1_w_pose_renderer.html#ae74189143175b89ccd36662cec4de72e',1,'op::WPoseRenderer::WPoseRenderer()']]], + ['wposerenderer_2ehpp_1107',['wPoseRenderer.hpp',['../w_pose_renderer_8hpp.html',1,'']]], + ['wposesaver_1108',['WPoseSaver',['../classop_1_1_w_pose_saver.html',1,'op::WPoseSaver< TDatums >'],['../classop_1_1_w_pose_saver.html#aa9dd0f4649c9e8efef10201caf9e4cfd',1,'op::WPoseSaver::WPoseSaver()']]], + ['wposesaver_2ehpp_1109',['wPoseSaver.hpp',['../w_pose_saver_8hpp.html',1,'']]], + ['wposetriangulation_1110',['WPoseTriangulation',['../classop_1_1_w_pose_triangulation.html',1,'op::WPoseTriangulation< TDatums >'],['../classop_1_1_w_pose_triangulation.html#a439c75d19eae34fdd20f2f1c4ee18e48',1,'op::WPoseTriangulation::WPoseTriangulation()']]], + ['wposetriangulation_2ehpp_1111',['wPoseTriangulation.hpp',['../w_pose_triangulation_8hpp.html',1,'']]], + ['wqueueassembler_1112',['WQueueAssembler',['../classop_1_1_w_queue_assembler.html',1,'op::WQueueAssembler< TDatums >'],['../classop_1_1_w_queue_assembler.html#ad4a4ec3e060ad6483331156a5a62af25',1,'op::WQueueAssembler::WQueueAssembler()']]], + ['wqueueassembler_2ehpp_1113',['wQueueAssembler.hpp',['../w_queue_assembler_8hpp.html',1,'']]], + ['wqueueorderer_1114',['WQueueOrderer',['../classop_1_1_w_queue_orderer.html',1,'op::WQueueOrderer< TDatums >'],['../classop_1_1_w_queue_orderer.html#a3303add5fa8cc36593d3d859ffdd8ae0',1,'op::WQueueOrderer::WQueueOrderer()']]], + ['wqueueorderer_2ehpp_1115',['wQueueOrderer.hpp',['../w_queue_orderer_8hpp.html',1,'']]], + ['wrapper_1116',['Wrapper',['../namespaceop.html#a790dea3c007bed742fbc8cdd5757d026',1,'op']]], + ['wrapper_2ehpp_1117',['wrapper.hpp',['../wrapper_8hpp.html',1,'']]], + ['wrapperauxiliary_2ehpp_1118',['wrapperAuxiliary.hpp',['../wrapper_auxiliary_8hpp.html',1,'']]], + ['wrapperconfiguresanitychecks_1119',['wrapperConfigureSanityChecks',['../namespaceop.html#acc4a5460e02ae510e854724513eea822',1,'op']]], + ['wrapperstructextra_1120',['WrapperStructExtra',['../structop_1_1_wrapper_struct_extra.html',1,'op::WrapperStructExtra'],['../structop_1_1_wrapper_struct_extra.html#a70cdc27c953962810333fafe011f86dd',1,'op::WrapperStructExtra::WrapperStructExtra()']]], + ['wrapperstructextra_2ehpp_1121',['wrapperStructExtra.hpp',['../wrapper_struct_extra_8hpp.html',1,'']]], + ['wrapperstructface_1122',['WrapperStructFace',['../structop_1_1_wrapper_struct_face.html',1,'op::WrapperStructFace'],['../structop_1_1_wrapper_struct_face.html#a0fb08ed60a50f19713df6f62ee685593',1,'op::WrapperStructFace::WrapperStructFace()']]], + ['wrapperstructface_2ehpp_1123',['wrapperStructFace.hpp',['../wrapper_struct_face_8hpp.html',1,'']]], + ['wrapperstructgui_1124',['WrapperStructGui',['../structop_1_1_wrapper_struct_gui.html',1,'op::WrapperStructGui'],['../structop_1_1_wrapper_struct_gui.html#a41638659ae2237d4ebfac635f4cc7842',1,'op::WrapperStructGui::WrapperStructGui()']]], + ['wrapperstructgui_2ehpp_1125',['wrapperStructGui.hpp',['../wrapper_struct_gui_8hpp.html',1,'']]], + ['wrapperstructhand_1126',['WrapperStructHand',['../structop_1_1_wrapper_struct_hand.html',1,'op::WrapperStructHand'],['../structop_1_1_wrapper_struct_hand.html#a223b29ce9a234c3fb8a7864cfe2919fc',1,'op::WrapperStructHand::WrapperStructHand()']]], + ['wrapperstructhand_2ehpp_1127',['wrapperStructHand.hpp',['../wrapper_struct_hand_8hpp.html',1,'']]], + ['wrapperstructinput_1128',['WrapperStructInput',['../structop_1_1_wrapper_struct_input.html',1,'op::WrapperStructInput'],['../structop_1_1_wrapper_struct_input.html#a2ee8db5c1fbade720719bb1464e59175',1,'op::WrapperStructInput::WrapperStructInput()']]], + ['wrapperstructinput_2ehpp_1129',['wrapperStructInput.hpp',['../wrapper_struct_input_8hpp.html',1,'']]], + ['wrapperstructoutput_1130',['WrapperStructOutput',['../structop_1_1_wrapper_struct_output.html',1,'op::WrapperStructOutput'],['../structop_1_1_wrapper_struct_output.html#ae8975341f779a89d68a125cbfb17d940',1,'op::WrapperStructOutput::WrapperStructOutput()']]], + ['wrapperstructoutput_2ehpp_1131',['wrapperStructOutput.hpp',['../wrapper_struct_output_8hpp.html',1,'']]], + ['wrapperstructpose_1132',['WrapperStructPose',['../structop_1_1_wrapper_struct_pose.html',1,'op::WrapperStructPose'],['../structop_1_1_wrapper_struct_pose.html#af3c639dd4de2bfebe1376a0ab7666c86',1,'op::WrapperStructPose::WrapperStructPose()']]], + ['wrapperstructpose_2ehpp_1133',['wrapperStructPose.hpp',['../wrapper_struct_pose_8hpp.html',1,'']]], + ['wrappert_1134',['WrapperT',['../classop_1_1_wrapper_t.html',1,'op::WrapperT< TDatum, TDatums, TDatumsSP, TWorker >'],['../classop_1_1_wrapper_t.html#a94151754dddc2a37044aea26b9dac6c7',1,'op::WrapperT::WrapperT()']]], + ['write_1135',['write',['../classop_1_1_video_saver.html#a4ecf895fc5cd7508ac139a7b69fc25e7',1,'op::VideoSaver::write(const std::vector< Matrix > &matsToSave)'],['../classop_1_1_video_saver.html#a6f6914d16434cebc9a6c596472b212aa',1,'op::VideoSaver::write(const Matrix &matToSave)']]], + ['writebvh_1136',['writeBvh',['../structop_1_1_wrapper_struct_output.html#abfa84cf0ae76a1c07f9d33b98e9f6d84',1,'op::WrapperStructOutput']]], + ['writecocojson_1137',['writeCocoJson',['../structop_1_1_wrapper_struct_output.html#a8658c8313ac1d8ddb177d83de2e1bfe7',1,'op::WrapperStructOutput']]], + ['writecocojsonvariant_1138',['writeCocoJsonVariant',['../structop_1_1_wrapper_struct_output.html#a0119bb7429483928c587ffaf607919de',1,'op::WrapperStructOutput']]], + ['writecocojsonvariants_1139',['writeCocoJsonVariants',['../structop_1_1_wrapper_struct_output.html#aca7b610f478c36b643fcbd73931c9278',1,'op::WrapperStructOutput']]], + ['writeheatmaps_1140',['writeHeatMaps',['../structop_1_1_wrapper_struct_output.html#a29583f73263bdffe1903ea64a9c09463',1,'op::WrapperStructOutput']]], + ['writeheatmapsformat_1141',['writeHeatMapsFormat',['../structop_1_1_wrapper_struct_output.html#a70278a7418053ced9de2447cc78f4240',1,'op::WrapperStructOutput']]], + ['writeimages_1142',['writeImages',['../structop_1_1_wrapper_struct_output.html#acb0e1a13713fd796c9452684bdb7cdaf',1,'op::WrapperStructOutput']]], + ['writeimagesformat_1143',['writeImagesFormat',['../structop_1_1_wrapper_struct_output.html#ae12454443c1c8b1c74f257eaac4927d3',1,'op::WrapperStructOutput']]], + ['writejson_1144',['writeJson',['../structop_1_1_wrapper_struct_output.html#a8899e8af7df7dad1215a09f61fc8198b',1,'op::WrapperStructOutput']]], + ['writekeypoint_1145',['writeKeypoint',['../structop_1_1_wrapper_struct_output.html#a183afe9fa45aa69a9d79b9434163ed3e',1,'op::WrapperStructOutput']]], + ['writekeypointformat_1146',['writeKeypointFormat',['../structop_1_1_wrapper_struct_output.html#ad338fd4719d6f243bb64bc67f68bc7c9',1,'op::WrapperStructOutput']]], + ['writeparameters_1147',['writeParameters',['../classop_1_1_camera_parameter_reader.html#a7210bc5ebfe6e90a52524b7f0f3f3991',1,'op::CameraParameterReader']]], + ['writevideo_1148',['writeVideo',['../structop_1_1_wrapper_struct_output.html#a49d8f54f546bfe6a6c644280c5e50458',1,'op::WrapperStructOutput']]], + ['writevideo3d_1149',['writeVideo3D',['../structop_1_1_wrapper_struct_output.html#ad996d177c4b84e2d38c105f637559094',1,'op::WrapperStructOutput']]], + ['writevideoadam_1150',['writeVideoAdam',['../structop_1_1_wrapper_struct_output.html#a4b829e1d007943cba3f4a23be25bc74d',1,'op::WrapperStructOutput']]], + ['writevideofps_1151',['writeVideoFps',['../structop_1_1_wrapper_struct_output.html#ad595edffced2bfd80c3bee183f32f505',1,'op::WrapperStructOutput']]], + ['writevideowithaudio_1152',['writeVideoWithAudio',['../structop_1_1_wrapper_struct_output.html#a3f6370fa1cb1f13922e36831c564588c',1,'op::WrapperStructOutput']]], + ['wscaleandsizeextractor_1153',['WScaleAndSizeExtractor',['../classop_1_1_w_scale_and_size_extractor.html',1,'op::WScaleAndSizeExtractor< TDatums >'],['../classop_1_1_w_scale_and_size_extractor.html#a8e6ef291bd809987f06fbb1cc2173b0f',1,'op::WScaleAndSizeExtractor::WScaleAndSizeExtractor()']]], + ['wscaleandsizeextractor_2ehpp_1154',['wScaleAndSizeExtractor.hpp',['../w_scale_and_size_extractor_8hpp.html',1,'']]], + ['wudpsender_1155',['WUdpSender',['../classop_1_1_w_udp_sender.html',1,'op::WUdpSender< TDatums >'],['../classop_1_1_w_udp_sender.html#a22a5ec90fe83ed654bd0aef112fac98b',1,'op::WUdpSender::WUdpSender()']]], + ['wudpsender_2ehpp_1156',['wUdpSender.hpp',['../w_udp_sender_8hpp.html',1,'']]], + ['wverboseprinter_1157',['WVerbosePrinter',['../classop_1_1_w_verbose_printer.html',1,'op::WVerbosePrinter< TDatums >'],['../classop_1_1_w_verbose_printer.html#a6ece5acbf5f8a7a3e900c5029a56271d',1,'op::WVerbosePrinter::WVerbosePrinter()']]], + ['wverboseprinter_2ehpp_1158',['wVerbosePrinter.hpp',['../w_verbose_printer_8hpp.html',1,'']]], + ['wvideosaver_1159',['WVideoSaver',['../classop_1_1_w_video_saver.html',1,'op::WVideoSaver< TDatums >'],['../classop_1_1_w_video_saver.html#a04dc4e6f039d047a0da6f94283c145d9',1,'op::WVideoSaver::WVideoSaver()']]], + ['wvideosaver_2ehpp_1160',['wVideoSaver.hpp',['../w_video_saver_8hpp.html',1,'']]], + ['wvideosaver3d_1161',['WVideoSaver3D',['../classop_1_1_w_video_saver3_d.html',1,'op::WVideoSaver3D< TDatums >'],['../classop_1_1_w_video_saver3_d.html#a570d2b868a6c3d3932671d56b0dbb531',1,'op::WVideoSaver3D::WVideoSaver3D()']]], + ['wvideosaver3d_2ehpp_1162',['wVideoSaver3D.hpp',['../w_video_saver3_d_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/all_1b.html b/web/html/doc/search/all_1b.html new file mode 100644 index 000000000..54186384d --- /dev/null +++ b/web/html/doc/search/all_1b.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_1b.js b/web/html/doc/search/all_1b.js new file mode 100644 index 000000000..c8007942e --- /dev/null +++ b/web/html/doc/search/all_1b.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['x_1163',['x',['../structop_1_1_point.html#a812d4ef29d102f4ad18f32ae54eb17ec',1,'op::Point::x()'],['../structop_1_1_rectangle.html#ac4ae58fe6ffd2f811f5cbc48661c1856',1,'op::Rectangle::x()']]], + ['xml_1164',['Xml',['../namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7ea9ec8e4e3ab4c7eeba097f27d7364d743',1,'op']]] +]; diff --git a/web/html/doc/search/all_1c.html b/web/html/doc/search/all_1c.html new file mode 100644 index 000000000..bf84a63ec --- /dev/null +++ b/web/html/doc/search/all_1c.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_1c.js b/web/html/doc/search/all_1c.js new file mode 100644 index 000000000..a636eb6d3 --- /dev/null +++ b/web/html/doc/search/all_1c.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['y_1165',['y',['../structop_1_1_point.html#a5821bc77a416629916e671793df3ce3b',1,'op::Point::y()'],['../structop_1_1_rectangle.html#a64e6891af0088a4ad271a725601b8043',1,'op::Rectangle::y()']]], + ['yaml_1166',['Yaml',['../namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7ea65f6036bfc9798ce230c5d8567551315',1,'op']]], + ['yml_1167',['Yml',['../namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7ea55eeca17b45365c188d0edbd35f6e0c3',1,'op']]] +]; diff --git a/web/html/doc/search/all_1d.html b/web/html/doc/search/all_1d.html new file mode 100644 index 000000000..a180afe97 --- /dev/null +++ b/web/html/doc/search/all_1d.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_1d.js b/web/html/doc/search/all_1d.js new file mode 100644 index 000000000..4cd575ebf --- /dev/null +++ b/web/html/doc/search/all_1d.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['zerotoone_1168',['ZeroToOne',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa4b942544cb3e764bbb8d33f8a8744855',1,'op']]], + ['zerotoonefixedaspect_1169',['ZeroToOneFixedAspect',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaafa90ddb034be42f1cdf13a6829eed2ad',1,'op']]] +]; diff --git a/web/html/doc/search/all_1e.html b/web/html/doc/search/all_1e.html new file mode 100644 index 000000000..f6695a10e --- /dev/null +++ b/web/html/doc/search/all_1e.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_1e.js b/web/html/doc/search/all_1e.js new file mode 100644 index 000000000..e9dbacf0c --- /dev/null +++ b/web/html/doc/search/all_1e.js @@ -0,0 +1,120 @@ +var searchData= +[ + ['_7ebodypartconnectorcaffe_1170',['~BodyPartConnectorCaffe',['../classop_1_1_body_part_connector_caffe.html#ab0beade5f7d8e56e881231e46f9306ec',1,'op::BodyPartConnectorCaffe']]], + ['_7ecameraparameterreader_1171',['~CameraParameterReader',['../classop_1_1_camera_parameter_reader.html#acfa701389b1e566e1ea49cfd2605bbf8',1,'op::CameraParameterReader']]], + ['_7ecocojsonsaver_1172',['~CocoJsonSaver',['../classop_1_1_coco_json_saver.html#a8bbfab84a7816cb0f189f243246f744b',1,'op::CocoJsonSaver']]], + ['_7ecvmattoopinput_1173',['~CvMatToOpInput',['../classop_1_1_cv_mat_to_op_input.html#adbe9ae80914d9c1d224c1fe753519090',1,'op::CvMatToOpInput']]], + ['_7ecvmattoopoutput_1174',['~CvMatToOpOutput',['../classop_1_1_cv_mat_to_op_output.html#abc8953e080adc30fa52345322ae8445a',1,'op::CvMatToOpOutput']]], + ['_7edatum_1175',['~Datum',['../structop_1_1_datum.html#a16b968aec06e9b904751216402972e74',1,'op::Datum']]], + ['_7edatumproducer_1176',['~DatumProducer',['../classop_1_1_datum_producer.html#ad12f3202a265c989430d15bf7476a326',1,'op::DatumProducer']]], + ['_7efacecpurenderer_1177',['~FaceCpuRenderer',['../classop_1_1_face_cpu_renderer.html#a5c5e1e9b016bd33b5740beb04fc0fb49',1,'op::FaceCpuRenderer']]], + ['_7efacedetector_1178',['~FaceDetector',['../classop_1_1_face_detector.html#a66ff3806053a5f86d01724f5029e0859',1,'op::FaceDetector']]], + ['_7efacedetectoropencv_1179',['~FaceDetectorOpenCV',['../classop_1_1_face_detector_open_c_v.html#a88eae893ff7f7664243cadf0f84500da',1,'op::FaceDetectorOpenCV']]], + ['_7efaceextractorcaffe_1180',['~FaceExtractorCaffe',['../classop_1_1_face_extractor_caffe.html#a4450e656f21a8cb7f1d9bf5f545012f1',1,'op::FaceExtractorCaffe']]], + ['_7efaceextractornet_1181',['~FaceExtractorNet',['../classop_1_1_face_extractor_net.html#a4cd488333e450cfbb19aab8910e7f138',1,'op::FaceExtractorNet']]], + ['_7efacegpurenderer_1182',['~FaceGpuRenderer',['../classop_1_1_face_gpu_renderer.html#a94758beab4bfbfed02cc8330a63abaeb',1,'op::FaceGpuRenderer']]], + ['_7efacerenderer_1183',['~FaceRenderer',['../classop_1_1_face_renderer.html#a8ba7bad616bd2cf673d8faa846bf95b5',1,'op::FaceRenderer']]], + ['_7efilesaver_1184',['~FileSaver',['../classop_1_1_file_saver.html#a080e6bb80adad7a3d534356cdfe40211',1,'op::FileSaver']]], + ['_7eflirreader_1185',['~FlirReader',['../classop_1_1_flir_reader.html#a66d6144c5dcb0dd3cbadcd6f8eefa9e0',1,'op::FlirReader']]], + ['_7eframedisplayer_1186',['~FrameDisplayer',['../classop_1_1_frame_displayer.html#ab3dea1eefac57cf129b4828ecd856fb4',1,'op::FrameDisplayer']]], + ['_7egpurenderer_1187',['~GpuRenderer',['../classop_1_1_gpu_renderer.html#a3ef06d85a62cd4049d5e8ac1e94d8fd8',1,'op::GpuRenderer']]], + ['_7egui_1188',['~Gui',['../classop_1_1_gui.html#a5e7e30073c0f7ee18904b25fc638b4e2',1,'op::Gui']]], + ['_7egui3d_1189',['~Gui3D',['../classop_1_1_gui3_d.html#a2fff0519028b406fe9ffc984ecd1dfa9',1,'op::Gui3D']]], + ['_7eguiinfoadder_1190',['~GuiInfoAdder',['../classop_1_1_gui_info_adder.html#a942af111d6bc41991db4bca3e573b8e9',1,'op::GuiInfoAdder']]], + ['_7ehandcpurenderer_1191',['~HandCpuRenderer',['../classop_1_1_hand_cpu_renderer.html#a8269f1879939d1b403787f982f10258d',1,'op::HandCpuRenderer']]], + ['_7ehanddetector_1192',['~HandDetector',['../classop_1_1_hand_detector.html#ae70826e6de6a8f26c240d0152578375e',1,'op::HandDetector']]], + ['_7ehanddetectorfromtxt_1193',['~HandDetectorFromTxt',['../classop_1_1_hand_detector_from_txt.html#a8fb6eb6ef5d5689cfdb502b5bc43685f',1,'op::HandDetectorFromTxt']]], + ['_7ehandextractorcaffe_1194',['~HandExtractorCaffe',['../classop_1_1_hand_extractor_caffe.html#aee681b43b8691ac1f07e08616522f6af',1,'op::HandExtractorCaffe']]], + ['_7ehandextractornet_1195',['~HandExtractorNet',['../classop_1_1_hand_extractor_net.html#a3743bf97fd19ee7d52ffd1019baa0c46',1,'op::HandExtractorNet']]], + ['_7ehandgpurenderer_1196',['~HandGpuRenderer',['../classop_1_1_hand_gpu_renderer.html#ad6a87a582129d7ed18a520dc9cd6c3fc',1,'op::HandGpuRenderer']]], + ['_7ehandrenderer_1197',['~HandRenderer',['../classop_1_1_hand_renderer.html#a66ca52089ca021542816a085d39ee640',1,'op::HandRenderer']]], + ['_7eheatmapsaver_1198',['~HeatMapSaver',['../classop_1_1_heat_map_saver.html#a150c053182074a1cc846c3ced7a674fb',1,'op::HeatMapSaver']]], + ['_7eimagedirectoryreader_1199',['~ImageDirectoryReader',['../classop_1_1_image_directory_reader.html#a7551a8567f42f7cfb68020e149921438',1,'op::ImageDirectoryReader']]], + ['_7eimagesaver_1200',['~ImageSaver',['../classop_1_1_image_saver.html#ab11a6e42a910021fd072cdf287b796ed',1,'op::ImageSaver']]], + ['_7eipcamerareader_1201',['~IpCameraReader',['../classop_1_1_ip_camera_reader.html#ad90e52c898ddf32503ce94685977aae0',1,'op::IpCameraReader']]], + ['_7ejsonofstream_1202',['~JsonOfstream',['../classop_1_1_json_ofstream.html#a5c4b866df81cf36d8f6dcdfc8414de8f',1,'op::JsonOfstream']]], + ['_7ekeeptopnpeople_1203',['~KeepTopNPeople',['../classop_1_1_keep_top_n_people.html#a7675c9c3668a2610827da67818a67741',1,'op::KeepTopNPeople']]], + ['_7ekeypointsaver_1204',['~KeypointSaver',['../classop_1_1_keypoint_saver.html#a903a4fa8be0b0cb5008d015126ac0e59',1,'op::KeypointSaver']]], + ['_7ekeypointscaler_1205',['~KeypointScaler',['../classop_1_1_keypoint_scaler.html#a5797e76ffea7e3b6a4080b04f50f0c0f',1,'op::KeypointScaler']]], + ['_7emaximumcaffe_1206',['~MaximumCaffe',['../classop_1_1_maximum_caffe.html#a0b438980e5c2fce978e9de80f75afcd3',1,'op::MaximumCaffe']]], + ['_7enet_1207',['~Net',['../classop_1_1_net.html#ae20a74df1a401eb17d5b75b406574919',1,'op::Net']]], + ['_7enetcaffe_1208',['~NetCaffe',['../classop_1_1_net_caffe.html#a84007645c88de286e9d306461a044e8d',1,'op::NetCaffe']]], + ['_7enetopencv_1209',['~NetOpenCv',['../classop_1_1_net_open_cv.html#a30ec3c3ee2ffe0a95656f6b11151243f',1,'op::NetOpenCv']]], + ['_7enmscaffe_1210',['~NmsCaffe',['../classop_1_1_nms_caffe.html#a0702488e5d899a6610535f6741601978',1,'op::NmsCaffe']]], + ['_7eopoutputtocvmat_1211',['~OpOutputToCvMat',['../classop_1_1_op_output_to_cv_mat.html#afe99e538dfcca6396b0672db1ec2f17f',1,'op::OpOutputToCvMat']]], + ['_7epeoplejsonsaver_1212',['~PeopleJsonSaver',['../classop_1_1_people_json_saver.html#a4a84666529a0418ccf9256c9942ea3f8',1,'op::PeopleJsonSaver']]], + ['_7epersonidextractor_1213',['~PersonIdExtractor',['../classop_1_1_person_id_extractor.html#a7ff9f8faf42bff0dbd7207105c149a1e',1,'op::PersonIdExtractor']]], + ['_7epersontracker_1214',['~PersonTracker',['../classop_1_1_person_tracker.html#a840ed2e06c1cc4dfc89e6083b2a8bc37',1,'op::PersonTracker']]], + ['_7eposecpurenderer_1215',['~PoseCpuRenderer',['../classop_1_1_pose_cpu_renderer.html#ad4994dcc005a5e283abc012e8889c481',1,'op::PoseCpuRenderer']]], + ['_7eposeextractor_1216',['~PoseExtractor',['../classop_1_1_pose_extractor.html#a9f98eef4ac08cacefe74e002ac086582',1,'op::PoseExtractor']]], + ['_7eposeextractorcaffe_1217',['~PoseExtractorCaffe',['../classop_1_1_pose_extractor_caffe.html#a3359641c1199c712a07859dcb76b7dcf',1,'op::PoseExtractorCaffe']]], + ['_7eposeextractornet_1218',['~PoseExtractorNet',['../classop_1_1_pose_extractor_net.html#a963c679df20b16d475aa3a7c0661135c',1,'op::PoseExtractorNet']]], + ['_7eposegpurenderer_1219',['~PoseGpuRenderer',['../classop_1_1_pose_gpu_renderer.html#afe3959a08624dd71cc5797eb3938e748',1,'op::PoseGpuRenderer']]], + ['_7eposerenderer_1220',['~PoseRenderer',['../classop_1_1_pose_renderer.html#a8ff2470d813201e992cd5e07bab23386',1,'op::PoseRenderer']]], + ['_7eposetriangulation_1221',['~PoseTriangulation',['../classop_1_1_pose_triangulation.html#a3f4764c7063d9849b75a354a6a92f062',1,'op::PoseTriangulation']]], + ['_7epriorityqueue_1222',['~PriorityQueue',['../classop_1_1_priority_queue.html#a469b458b035822f01b212c089d4245bc',1,'op::PriorityQueue']]], + ['_7eproducer_1223',['~Producer',['../classop_1_1_producer.html#a8b48342b2c4003a080b17ac411f3454f',1,'op::Producer']]], + ['_7equeue_1224',['~Queue',['../classop_1_1_queue.html#a056600a7cf4503235ba4e172cee63a7f',1,'op::Queue']]], + ['_7equeuebase_1225',['~QueueBase',['../classop_1_1_queue_base.html#aef098201d9084083adba5ceeb45b12fa',1,'op::QueueBase']]], + ['_7erenderer_1226',['~Renderer',['../classop_1_1_renderer.html#abd45555a9864e799309b72902b6cec30',1,'op::Renderer']]], + ['_7eresizeandmergecaffe_1227',['~ResizeAndMergeCaffe',['../classop_1_1_resize_and_merge_caffe.html#a5dc1aa7c462bd8df8b6a8377418e19d4',1,'op::ResizeAndMergeCaffe']]], + ['_7escaleandsizeextractor_1228',['~ScaleAndSizeExtractor',['../classop_1_1_scale_and_size_extractor.html#a90bc64fe3c8ee45cfe5f3bd73a8bb3c9',1,'op::ScaleAndSizeExtractor']]], + ['_7espinnakerwrapper_1229',['~SpinnakerWrapper',['../classop_1_1_spinnaker_wrapper.html#a8ae3e45fba6f9d0943cbd9038e98b066',1,'op::SpinnakerWrapper']]], + ['_7esubthread_1230',['~SubThread',['../classop_1_1_sub_thread.html#a6ee67e375611e8df2d09b3234dedf36c',1,'op::SubThread']]], + ['_7esubthreadnoqueue_1231',['~SubThreadNoQueue',['../classop_1_1_sub_thread_no_queue.html#ad88bbbe72f4777603d71e322b0fd20ed',1,'op::SubThreadNoQueue']]], + ['_7esubthreadqueuein_1232',['~SubThreadQueueIn',['../classop_1_1_sub_thread_queue_in.html#a8a479c4ddc5b42f1dbf329c4a0c235c3',1,'op::SubThreadQueueIn']]], + ['_7esubthreadqueueinout_1233',['~SubThreadQueueInOut',['../classop_1_1_sub_thread_queue_in_out.html#a87d122e11adc7363d9b24c7f796d3d33',1,'op::SubThreadQueueInOut']]], + ['_7esubthreadqueueout_1234',['~SubThreadQueueOut',['../classop_1_1_sub_thread_queue_out.html#ab61e068d6dddd2914b25638ebeff0f3b',1,'op::SubThreadQueueOut']]], + ['_7ethread_1235',['~Thread',['../classop_1_1_thread.html#a151e4e647917f2351cc05a8861588e2a',1,'op::Thread']]], + ['_7ethreadmanager_1236',['~ThreadManager',['../classop_1_1_thread_manager.html#a03c6587dbc60b266bee04b9714647fba',1,'op::ThreadManager']]], + ['_7eudpsender_1237',['~UdpSender',['../classop_1_1_udp_sender.html#ac85192d475d5e84b9dcc839d5e240585',1,'op::UdpSender']]], + ['_7everboseprinter_1238',['~VerbosePrinter',['../classop_1_1_verbose_printer.html#a5c4ef10db4aba13be43b92ab4e6c4d3e',1,'op::VerbosePrinter']]], + ['_7evideocapturereader_1239',['~VideoCaptureReader',['../classop_1_1_video_capture_reader.html#a7ea52eabf5133a1a01d38f95b1a4b601',1,'op::VideoCaptureReader']]], + ['_7evideoreader_1240',['~VideoReader',['../classop_1_1_video_reader.html#a26cee6225a62c4e120ae9ea2e4a9a41c',1,'op::VideoReader']]], + ['_7evideosaver_1241',['~VideoSaver',['../classop_1_1_video_saver.html#acfb839eb14ac032055930932db966e84',1,'op::VideoSaver']]], + ['_7ewcocojsonsaver_1242',['~WCocoJsonSaver',['../classop_1_1_w_coco_json_saver.html#a49ba32973e43c176c88d17aa805f1ab5',1,'op::WCocoJsonSaver']]], + ['_7ewcvmattoopinput_1243',['~WCvMatToOpInput',['../classop_1_1_w_cv_mat_to_op_input.html#a8ae2eb423f1fe70f4154716b38b62719',1,'op::WCvMatToOpInput']]], + ['_7ewcvmattoopoutput_1244',['~WCvMatToOpOutput',['../classop_1_1_w_cv_mat_to_op_output.html#add97e472ab242fe72221cf0591801f81',1,'op::WCvMatToOpOutput']]], + ['_7ewdatumproducer_1245',['~WDatumProducer',['../classop_1_1_w_datum_producer.html#a858e64351ef6d3942bc7d53678badcc7',1,'op::WDatumProducer']]], + ['_7ewebcamreader_1246',['~WebcamReader',['../classop_1_1_webcam_reader.html#aea29bfce4df5493d662ed3a892f364d2',1,'op::WebcamReader']]], + ['_7ewfacedetector_1247',['~WFaceDetector',['../classop_1_1_w_face_detector.html#ac0aa45b289e6800bb76bfbfc8a216035',1,'op::WFaceDetector']]], + ['_7ewfacedetectoropencv_1248',['~WFaceDetectorOpenCV',['../classop_1_1_w_face_detector_open_c_v.html#a2942e145f9c4c720aad7c810a3d0f3f3',1,'op::WFaceDetectorOpenCV']]], + ['_7ewfaceextractornet_1249',['~WFaceExtractorNet',['../classop_1_1_w_face_extractor_net.html#ae781bd1a7d450983a9aa168860d4e96d',1,'op::WFaceExtractorNet']]], + ['_7ewfacerenderer_1250',['~WFaceRenderer',['../classop_1_1_w_face_renderer.html#a2f06bfea6521c7528fc7b07b9b067351',1,'op::WFaceRenderer']]], + ['_7ewfacesaver_1251',['~WFaceSaver',['../classop_1_1_w_face_saver.html#ae27f54e5aead73b6eb604d0a0a06e18f',1,'op::WFaceSaver']]], + ['_7ewfpsmax_1252',['~WFpsMax',['../classop_1_1_w_fps_max.html#af48214bbb4ed5c84efe1adf845aa9318',1,'op::WFpsMax']]], + ['_7ewgui_1253',['~WGui',['../classop_1_1_w_gui.html#a3c55ca3290f64181201890fae10e4002',1,'op::WGui']]], + ['_7ewgui3d_1254',['~WGui3D',['../classop_1_1_w_gui3_d.html#a62b93d2704634170339827ee1f93fa97',1,'op::WGui3D']]], + ['_7ewguiinfoadder_1255',['~WGuiInfoAdder',['../classop_1_1_w_gui_info_adder.html#ab369f542339af87ff652fc6e8e5408dd',1,'op::WGuiInfoAdder']]], + ['_7ewhanddetector_1256',['~WHandDetector',['../classop_1_1_w_hand_detector.html#a7a740a7f9275b7016013728dbed001d0',1,'op::WHandDetector']]], + ['_7ewhanddetectorfromtxt_1257',['~WHandDetectorFromTxt',['../classop_1_1_w_hand_detector_from_txt.html#ae51bcc36e790b298d3cd0c231d4b3640',1,'op::WHandDetectorFromTxt']]], + ['_7ewhanddetectortracking_1258',['~WHandDetectorTracking',['../classop_1_1_w_hand_detector_tracking.html#a7d884dfd00822de27742a2392fb210bb',1,'op::WHandDetectorTracking']]], + ['_7ewhanddetectorupdate_1259',['~WHandDetectorUpdate',['../classop_1_1_w_hand_detector_update.html#a29d71b3c1ee52f04bd52b932db350b59',1,'op::WHandDetectorUpdate']]], + ['_7ewhandextractornet_1260',['~WHandExtractorNet',['../classop_1_1_w_hand_extractor_net.html#ab46b680c14fb2a0cb171b040da484eda',1,'op::WHandExtractorNet']]], + ['_7ewhandrenderer_1261',['~WHandRenderer',['../classop_1_1_w_hand_renderer.html#ab18c8602c8bf65e3e762b2ff06def220',1,'op::WHandRenderer']]], + ['_7ewhandsaver_1262',['~WHandSaver',['../classop_1_1_w_hand_saver.html#abf4a45c6ebe82fca1e0f0db1d3e2af79',1,'op::WHandSaver']]], + ['_7ewheatmapsaver_1263',['~WHeatMapSaver',['../classop_1_1_w_heat_map_saver.html#aa651ec613c81cf9a19222428bd59feed',1,'op::WHeatMapSaver']]], + ['_7ewidgenerator_1264',['~WIdGenerator',['../classop_1_1_w_id_generator.html#ad9e160c5120aa850fbe2285f78e062e2',1,'op::WIdGenerator']]], + ['_7ewimagesaver_1265',['~WImageSaver',['../classop_1_1_w_image_saver.html#ab8371a260e35cdea5010327240c9a53d',1,'op::WImageSaver']]], + ['_7ewkeeptopnpeople_1266',['~WKeepTopNPeople',['../classop_1_1_w_keep_top_n_people.html#ad23785b42b85c166e5080f47591cccaa',1,'op::WKeepTopNPeople']]], + ['_7ewkeypointscaler_1267',['~WKeypointScaler',['../classop_1_1_w_keypoint_scaler.html#af4e30e78dba64f2784a1757bc2eb9f8b',1,'op::WKeypointScaler']]], + ['_7ewopoutputtocvmat_1268',['~WOpOutputToCvMat',['../classop_1_1_w_op_output_to_cv_mat.html#a5a4e433aa0c7cc62a5f97cc63a67c3fa',1,'op::WOpOutputToCvMat']]], + ['_7eworker_1269',['~Worker',['../classop_1_1_worker.html#a6ebe180098e00ac062a1bb31d462df60',1,'op::Worker']]], + ['_7eworkerconsumer_1270',['~WorkerConsumer',['../classop_1_1_worker_consumer.html#a9aaa75f194df6b3ed4994c8e95aa0ab5',1,'op::WorkerConsumer']]], + ['_7eworkerproducer_1271',['~WorkerProducer',['../classop_1_1_worker_producer.html#a8f2592f70d723de4b818c97b25c5a476',1,'op::WorkerProducer']]], + ['_7ewpeoplejsonsaver_1272',['~WPeopleJsonSaver',['../classop_1_1_w_people_json_saver.html#a386b5b64f2eee08cb344b242f5adb122',1,'op::WPeopleJsonSaver']]], + ['_7ewpersonidextractor_1273',['~WPersonIdExtractor',['../classop_1_1_w_person_id_extractor.html#a15f33c528ac92d30be226e784256be01',1,'op::WPersonIdExtractor']]], + ['_7ewposeextractor_1274',['~WPoseExtractor',['../classop_1_1_w_pose_extractor.html#aedf9cc53f7dfdb1ec2aa77651ca37eef',1,'op::WPoseExtractor']]], + ['_7ewposeextractornet_1275',['~WPoseExtractorNet',['../classop_1_1_w_pose_extractor_net.html#aa085377f965ffc8385d34d77a2e65e5a',1,'op::WPoseExtractorNet']]], + ['_7ewposerenderer_1276',['~WPoseRenderer',['../classop_1_1_w_pose_renderer.html#ae748fc721246c2a3ad8ffd32adf5e9e7',1,'op::WPoseRenderer']]], + ['_7ewposesaver_1277',['~WPoseSaver',['../classop_1_1_w_pose_saver.html#a62394c885abe4d95bece4469ac3657e9',1,'op::WPoseSaver']]], + ['_7ewposetriangulation_1278',['~WPoseTriangulation',['../classop_1_1_w_pose_triangulation.html#ae88fe6766fbcca1a682306af99684fa3',1,'op::WPoseTriangulation']]], + ['_7ewqueueassembler_1279',['~WQueueAssembler',['../classop_1_1_w_queue_assembler.html#abe8d97c0749cd8d968c8df2727b643e6',1,'op::WQueueAssembler']]], + ['_7ewqueueorderer_1280',['~WQueueOrderer',['../classop_1_1_w_queue_orderer.html#a720b2cd490e2267258bf5d5776f00095',1,'op::WQueueOrderer']]], + ['_7ewrappert_1281',['~WrapperT',['../classop_1_1_wrapper_t.html#a65e310384f3b898c4c3621e0e1ee6883',1,'op::WrapperT']]], + ['_7ewscaleandsizeextractor_1282',['~WScaleAndSizeExtractor',['../classop_1_1_w_scale_and_size_extractor.html#afe30e073c4410146e2c8ba8f2752737f',1,'op::WScaleAndSizeExtractor']]], + ['_7ewudpsender_1283',['~WUdpSender',['../classop_1_1_w_udp_sender.html#a684854618fbd74bce420ed44d867f8cd',1,'op::WUdpSender']]], + ['_7ewverboseprinter_1284',['~WVerbosePrinter',['../classop_1_1_w_verbose_printer.html#a32ea8ffef9a255ee33d6d56a550706f8',1,'op::WVerbosePrinter']]], + ['_7ewvideosaver_1285',['~WVideoSaver',['../classop_1_1_w_video_saver.html#ac0057c1bbfb3e193c891f167d56fcbab',1,'op::WVideoSaver']]], + ['_7ewvideosaver3d_1286',['~WVideoSaver3D',['../classop_1_1_w_video_saver3_d.html#a39482b591eafa150fee3db7027ae093f',1,'op::WVideoSaver3D']]] +]; diff --git a/web/html/doc/search/all_2.html b/web/html/doc/search/all_2.html new file mode 100644 index 000000000..02cfffc2e --- /dev/null +++ b/web/html/doc/search/all_2.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_2.js b/web/html/doc/search/all_2.js new file mode 100644 index 000000000..7a1ee8cdd --- /dev/null +++ b/web/html/doc/search/all_2.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['2_5fadditional_5fsettings_2emd_14',['2_additional_settings.md',['../2__additional__settings_8md.html',1,'']]], + ['2_5flibrary_5fextend_5ffunctionality_2emd_15',['2_library_extend_functionality.md',['../2__library__extend__functionality_8md.html',1,'']]] +]; diff --git a/web/html/doc/search/all_3.html b/web/html/doc/search/all_3.html new file mode 100644 index 000000000..39767b85b --- /dev/null +++ b/web/html/doc/search/all_3.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_3.js b/web/html/doc/search/all_3.js new file mode 100644 index 000000000..5de72ba6d --- /dev/null +++ b/web/html/doc/search/all_3.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['3_5flibrary_5fadd_5fnew_5fmodule_2emd_16',['3_library_add_new_module.md',['../3__library__add__new__module_8md.html',1,'']]], + ['3d_5freconstruction_5fmodule_2emd_17',['3d_reconstruction_module.md',['../3d__reconstruction__module_8md.html',1,'']]] +]; diff --git a/web/html/doc/search/all_4.html b/web/html/doc/search/all_4.html new file mode 100644 index 000000000..fc40463c8 --- /dev/null +++ b/web/html/doc/search/all_4.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_4.js b/web/html/doc/search/all_4.js new file mode 100644 index 000000000..b232da7ac --- /dev/null +++ b/web/html/doc/search/all_4.js @@ -0,0 +1,32 @@ +var searchData= +[ + ['add_18',['add',['../classop_1_1_thread_manager.html#a762acc9eb60bd10857da1f416e169f3d',1,'op::ThreadManager::add(const unsigned long long threadId, const TWorker &tWorker, const unsigned long long queueInId, const unsigned long long queueOutId)'],['../classop_1_1_thread_manager.html#a8134abeaec65b5647ae92e34f3ad420b',1,'op::ThreadManager::add(const unsigned long long threadId, const std::vector< TWorker > &tWorkers, const unsigned long long queueInId, const unsigned long long queueOutId)'],['../classop_1_1_thread.html#a820b9416b96c69cb1fc6773b9a53a47a',1,'op::Thread::add(const std::shared_ptr< SubThread< TDatums, TWorker >> &subThread)'],['../classop_1_1_thread.html#a0617df4103c25bb04ee2c75f05ea2978',1,'op::Thread::add(const std::vector< std::shared_ptr< SubThread< TDatums, TWorker >>> &subThreads)']]], + ['addbkgchannel_19',['addBkgChannel',['../namespaceop.html#a13b86d097fd5f36612e9858e9348ea57',1,'op']]], + ['addinfo_20',['addInfo',['../classop_1_1_gui_info_adder.html#a6f2f2d449d48ca7e21729d03796a540c',1,'op::GuiInfoAdder']]], + ['addkeypoints_21',['AddKeypoints',['../namespaceop.html#a37a23e10d9cbc428c793c3df1d62993ea5f4badd072493724e560fa43d0cf2c71',1,'op']]], + ['addpafs_22',['AddPAFs',['../namespaceop.html#a37a23e10d9cbc428c793c3df1d62993eaca9f686d0a3d6b8bfe5865b59b2fc84f',1,'op']]], + ['addpartcandidates_23',['addPartCandidates',['../structop_1_1_wrapper_struct_pose.html#ad73981c6ad9b23f511ef6f12136bf8e7',1,'op::WrapperStructPose']]], + ['addpopper_24',['addPopper',['../classop_1_1_queue_base.html#adc5df8a039d360831db06e3c610bf015',1,'op::QueueBase']]], + ['addpusher_25',['addPusher',['../classop_1_1_queue_base.html#a1ccdec39ea65a83edc54661acc283134',1,'op::QueueBase']]], + ['all_26',['All',['../namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400cab1c94ca2fbc3e78fc30069c8d0f01680',1,'op::All()'],['../namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94ab1c94ca2fbc3e78fc30069c8d0f01680',1,'op::All()']]], + ['alphaheatmap_27',['alphaHeatMap',['../structop_1_1_wrapper_struct_face.html#a49f609ae1c075f272bbaf32e128cc3a9',1,'op::WrapperStructFace::alphaHeatMap()'],['../structop_1_1_wrapper_struct_hand.html#a716f9c98cbee1a4a70d5978875795c4d',1,'op::WrapperStructHand::alphaHeatMap()'],['../structop_1_1_wrapper_struct_pose.html#ac1233492c750fbd98df353bffa8f9b78',1,'op::WrapperStructPose::alphaHeatMap()']]], + ['alphakeypoint_28',['alphaKeypoint',['../structop_1_1_wrapper_struct_face.html#a65a0244cbcea06621c6f8c41e519990f',1,'op::WrapperStructFace::alphaKeypoint()'],['../structop_1_1_wrapper_struct_pose.html#a4d3ad84b14697d5f1009fa29e2ff1998',1,'op::WrapperStructPose::alphaKeypoint()'],['../structop_1_1_wrapper_struct_hand.html#a8074cf22f8926d7f4d1d60cacae99c3e',1,'op::WrapperStructHand::alphaKeypoint()']]], + ['area_29',['area',['../structop_1_1_point.html#ac85e32b9381abc2af106fe96dba81b08',1,'op::Point::area()'],['../structop_1_1_rectangle.html#a5b319240c995c81bfa1d73a2461d49fd',1,'op::Rectangle::area()']]], + ['array_30',['Array',['../classop_1_1_array.html#a793b9851c7490bc98d4dd52020c0cd3c',1,'op::Array::Array(const int size)'],['../classop_1_1_array.html#a48c1ba1f7017b5aa8e0451079dd3a6d3',1,'op::Array::Array(const std::vector< int > &sizes={})'],['../classop_1_1_array.html#ac833fdcb245fcc3135ce65227bb9e4b2',1,'op::Array::Array(const int size, const T value)'],['../classop_1_1_array.html#a959ede0df7e535d2d3ac40d098541c27',1,'op::Array::Array(const std::vector< int > &sizes, const T value)'],['../classop_1_1_array.html#a9cd386050e94c29b3c4ee40cafcacc46',1,'op::Array::Array(const int size, T *const dataPtr)'],['../classop_1_1_array.html#a90895562def04a81db0b3e7eaa3722c7',1,'op::Array::Array(const std::vector< int > &sizes, T *const dataPtr)'],['../classop_1_1_array.html#a416e95541761c557c50b79b5e1b33389',1,'op::Array::Array(const Array< T > &array, const int index, const bool noCopy=false)'],['../classop_1_1_array.html#afb4e1f55747898d29aa13606ded9991f',1,'op::Array::Array(const Array< T2 > &array)'],['../classop_1_1_array.html#a5a68cca98a3ebaf565f1e546eebd9f01',1,'op::Array::Array(const Array< T > &array)'],['../classop_1_1_array.html#a7a7d854d63815e10e158fe889d17a88e',1,'op::Array::Array(Array< T > &&array)'],['../classop_1_1_array.html',1,'op::Array< T >']]], + ['array_2ehpp_31',['array.hpp',['../array_8hpp.html',1,'']]], + ['array_3c_20float_20_3e_32',['Array< float >',['../classop_1_1_array.html',1,'op']]], + ['array_3c_20long_20long_20_3e_33',['Array< long long >',['../classop_1_1_array.html',1,'op']]], + ['arrayclose_34',['arrayClose',['../classop_1_1_json_ofstream.html#a3f940d3ad51d0acb7126d62a5617fd69',1,'op::JsonOfstream']]], + ['arraycpugpu_35',['ArrayCpuGpu',['../classop_1_1_array_cpu_gpu.html#ad5d631890ff61a2e70695b797f1a6f34',1,'op::ArrayCpuGpu::ArrayCpuGpu(const Array< T > &array, const bool copyFromGpu)'],['../classop_1_1_array_cpu_gpu.html#aee39459d54376c7ec98155b4add7f961',1,'op::ArrayCpuGpu::ArrayCpuGpu()'],['../classop_1_1_array_cpu_gpu.html#a4fb245f1557f61192ab54c24f4baf487',1,'op::ArrayCpuGpu::ArrayCpuGpu(const void *caffeBlobTPtr)'],['../classop_1_1_array_cpu_gpu.html#a0234d36fab318cc2c672134fd4009301',1,'op::ArrayCpuGpu::ArrayCpuGpu(const int num, const int channels, const int height, const int width)'],['../classop_1_1_array_cpu_gpu.html',1,'op::ArrayCpuGpu< T >']]], + ['arraycpugpu_2ehpp_36',['arrayCpuGpu.hpp',['../array_cpu_gpu_8hpp.html',1,'']]], + ['arrayopen_37',['arrayOpen',['../classop_1_1_json_ofstream.html#a4cc6d50256354b3dc7385e2db01aabc0',1,'op::JsonOfstream']]], + ['asum_5fdata_38',['asum_data',['../classop_1_1_array_cpu_gpu.html#a7cf928cf41b4477c59c91a0f45c0443c',1,'op::ArrayCpuGpu']]], + ['asum_5fdiff_39',['asum_diff',['../classop_1_1_array_cpu_gpu.html#a675012bf11b17cb7b7c41b5dc1beccf6',1,'op::ArrayCpuGpu']]], + ['asynchronous_40',['Asynchronous',['../namespaceop.html#a3593e2d53bec533f0048ef3973eebd36a288aae25bc408055f50c21c991903a44',1,'op']]], + ['asynchronousin_41',['AsynchronousIn',['../namespaceop.html#a3593e2d53bec533f0048ef3973eebd36a435b3ab344c03bfc0e4530a2e75f5e44',1,'op']]], + ['asynchronousout_42',['AsynchronousOut',['../namespaceop.html#a3593e2d53bec533f0048ef3973eebd36ac68f8680ccf3a65dfcfc63356112c9f9',1,'op']]], + ['at_43',['at',['../classop_1_1_array.html#ae74dec8220582072f85fb3ec430238ce',1,'op::Array::at(const std::vector< int > &indexes) const'],['../classop_1_1_array.html#a8eebb6c34642cdf19ac74c7ed38d128b',1,'op::Array::at(const std::vector< int > &indexes)'],['../classop_1_1_array.html#a4a24dfa0d0f1f3769bf3bfcea47e2220',1,'op::Array::at(const int index) const'],['../classop_1_1_array.html#a6e0afd5f447efbfc29efbeac62716eff',1,'op::Array::at(const int index)']]], + ['auto_44',['Auto',['../namespaceop.html#afce557f02e337e16150d00bdf72ec033a06b9281e396db002010bde1de57262eb',1,'op']]], + ['autorepeat_45',['AutoRepeat',['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a3ebbca1b84060b0caaf823639739945d',1,'op']]], + ['averagekeypoints_46',['averageKeypoints',['../namespaceop.html#a1f931e210eb575a084b8e6f462b0b382',1,'op']]] +]; diff --git a/web/html/doc/search/all_5.html b/web/html/doc/search/all_5.html new file mode 100644 index 000000000..9dd9344b0 --- /dev/null +++ b/web/html/doc/search/all_5.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_5.js b/web/html/doc/search/all_5.js new file mode 100644 index 000000000..89008e34e --- /dev/null +++ b/web/html/doc/search/all_5.js @@ -0,0 +1,27 @@ +var searchData= +[ + ['background_47',['Background',['../namespaceop.html#a37a23e10d9cbc428c793c3df1d62993eaa9ded1e5ce5d75814730bb4caaf49419',1,'op::Background()'],['../namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226baa9ded1e5ce5d75814730bb4caaf49419',1,'op::Background()']]], + ['backward_5fcpu_48',['Backward_cpu',['../classop_1_1_body_part_connector_caffe.html#a8ad522722607c9ff6299337019f04175',1,'op::BodyPartConnectorCaffe::Backward_cpu()'],['../classop_1_1_maximum_caffe.html#ad21700230d1f1ac1139e8ce76574232c',1,'op::MaximumCaffe::Backward_cpu()'],['../classop_1_1_nms_caffe.html#abbaee841e5cb64f97c94da67ef4349c9',1,'op::NmsCaffe::Backward_cpu()'],['../classop_1_1_resize_and_merge_caffe.html#a4836b2f08273896f58c2d63a15c871e8',1,'op::ResizeAndMergeCaffe::Backward_cpu()']]], + ['backward_5fgpu_49',['Backward_gpu',['../classop_1_1_body_part_connector_caffe.html#ace9cba081581a4c58fcfbef73b6dd11b',1,'op::BodyPartConnectorCaffe::Backward_gpu()'],['../classop_1_1_maximum_caffe.html#a91989f6e0a2c1349c33815a8cd659e52',1,'op::MaximumCaffe::Backward_gpu()'],['../classop_1_1_nms_caffe.html#a3d1d4cee2b93d5bc0d88c25019b17715',1,'op::NmsCaffe::Backward_gpu()'],['../classop_1_1_resize_and_merge_caffe.html#acfa7742f943fd741acf0bf383c572655',1,'op::ResizeAndMergeCaffe::Backward_gpu()']]], + ['base_5fdatum_50',['BASE_DATUM',['../datum_8hpp.html#a03de732ffb0edab021fb745b21a05fdd',1,'datum.hpp']]], + ['base_5fdatums_51',['BASE_DATUMS',['../datum_8hpp.html#aa0a67922cf9df1e30dad2c32785b147e',1,'datum.hpp']]], + ['base_5fdatums_5fsh_52',['BASE_DATUMS_SH',['../datum_8hpp.html#ae2331967a21fec02341dec3ca39d3809',1,'datum.hpp']]], + ['blendoriginalframe_53',['blendOriginalFrame',['../structop_1_1_wrapper_struct_pose.html#aa2cee9019b708d48cc18313615d0189e',1,'op::WrapperStructPose']]], + ['body_54',['Body',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715acaac101b32dda4448cf13a93fe283dddd8',1,'op::Body()'],['../namespaceop.html#a1070db47220e17cf37df40411350f6fbaac101b32dda4448cf13a93fe283dddd8',1,'op::Body()']]], + ['body_5f135_55',['BODY_135',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261facfbe6a39619f4ca5a1fa2db000a17e0d',1,'op']]], + ['body_5f19_56',['BODY_19',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fae3ae2003e0e0458bdc49480fb19c876e',1,'op']]], + ['body_5f19_5fx2_57',['BODY_19_X2',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261faca4c7eb29b1f3402e78aa384ce8fd5a9',1,'op']]], + ['body_5f19e_58',['BODY_19E',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa08956a1731b54bbdce3f97f1361efc23',1,'op']]], + ['body_5f19n_59',['BODY_19N',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa9c6c21b2b0a410880f46637db622e392',1,'op']]], + ['body_5f23_60',['BODY_23',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa003cc3795b0eeed2af2dfd34ed482794',1,'op']]], + ['body_5f25_61',['BODY_25',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa9a87ca5ab7b20c2bd4f8d5379956e6f6',1,'op']]], + ['body_5f25b_62',['BODY_25B',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa0b93cfdf906412bd7c8560ccd180cec6',1,'op']]], + ['body_5f25d_63',['BODY_25D',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa529c87ac399e5fd6f0fa4a360c032568',1,'op']]], + ['body_5f25e_64',['BODY_25E',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261faef29c97ffaed7b0d41ee9bb0d20550cc',1,'op']]], + ['bodypartconnectorbase_2ehpp_65',['bodyPartConnectorBase.hpp',['../body_part_connector_base_8hpp.html',1,'']]], + ['bodypartconnectorcaffe_66',['BodyPartConnectorCaffe',['../classop_1_1_body_part_connector_caffe.html#a94e2364fa13ea79b2d6fd72c5db34765',1,'op::BodyPartConnectorCaffe::BodyPartConnectorCaffe()'],['../classop_1_1_body_part_connector_caffe.html',1,'op::BodyPartConnectorCaffe< T >']]], + ['bodypartconnectorcaffe_2ehpp_67',['bodyPartConnectorCaffe.hpp',['../body_part_connector_caffe_8hpp.html',1,'']]], + ['bodywithtracking_68',['BodyWithTracking',['../namespaceop.html#a1070db47220e17cf37df40411350f6fba65c691a85367d21881220b7a3d923747',1,'op']]], + ['bottomright_69',['bottomRight',['../structop_1_1_rectangle.html#ab4473fb43ab826ffb10c2be18cb96f24',1,'op::Rectangle']]], + ['bvhsaver_2ehpp_70',['bvhSaver.hpp',['../bvh_saver_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/all_6.html b/web/html/doc/search/all_6.html new file mode 100644 index 000000000..f1e516d75 --- /dev/null +++ b/web/html/doc/search/all_6.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_6.js b/web/html/doc/search/all_6.js new file mode 100644 index 000000000..8e3f49510 --- /dev/null +++ b/web/html/doc/search/all_6.js @@ -0,0 +1,83 @@ +var searchData= +[ + ['caffemodelpath_71',['caffeModelPath',['../structop_1_1_wrapper_struct_pose.html#a6de869a73fd338bd41e390fcb1a5bcf3',1,'op::WrapperStructPose']]], + ['calibration_5fmodule_2emd_72',['calibration_module.md',['../calibration__module_8md.html',1,'']]], + ['cameraextrinsics_73',['cameraExtrinsics',['../structop_1_1_datum.html#aa3e5b74f3d54bc880f47831c3932dfa9',1,'op::Datum']]], + ['cameraintrinsics_74',['cameraIntrinsics',['../structop_1_1_datum.html#ae2aad08cc74ee43e1242b403d47be2ff',1,'op::Datum']]], + ['cameramatrix_75',['cameraMatrix',['../structop_1_1_datum.html#aa27ee36fd2e1fb0dfc5c1e6869e2073e',1,'op::Datum']]], + ['cameraparameterestimation_2ehpp_76',['cameraParameterEstimation.hpp',['../camera_parameter_estimation_8hpp.html',1,'']]], + ['cameraparameterpath_77',['cameraParameterPath',['../structop_1_1_wrapper_struct_input.html#a4c77c6257dec58ac0a5e18cfe5b38a26',1,'op::WrapperStructInput']]], + ['cameraparameterreader_78',['CameraParameterReader',['../classop_1_1_camera_parameter_reader.html#ab7a4c3ef7ac8d8a41e5711ec85b7be4b',1,'op::CameraParameterReader::CameraParameterReader()'],['../classop_1_1_camera_parameter_reader.html#aae3c60cbed99e1b5706c96577732ddae',1,'op::CameraParameterReader::CameraParameterReader(const std::string &serialNumber, const Matrix &cameraIntrinsics, const Matrix &cameraDistortion, const Matrix &cameraExtrinsics=Matrix(), const Matrix &cameraExtrinsicsInitial=Matrix())'],['../classop_1_1_camera_parameter_reader.html',1,'op::CameraParameterReader']]], + ['cameraparameterreader_2ehpp_79',['cameraParameterReader.hpp',['../camera_parameter_reader_8hpp.html',1,'']]], + ['cameraresolution_80',['cameraResolution',['../structop_1_1_wrapper_struct_input.html#ae2078c540324a9cdc8500dce5d361bee',1,'op::WrapperStructInput']]], + ['canonicalaxisindex_81',['CanonicalAxisIndex',['../classop_1_1_array_cpu_gpu.html#adeb393edfae4967cb510a8c7a2d07d80',1,'op::ArrayCpuGpu']]], + ['car_82',['Car',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715acae9989db5dabeea617f40c8dbfd07f5fb',1,'op']]], + ['car_5f12_83',['CAR_12',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa66ae79a5ac5fa502ae8bbecd3e07e71c',1,'op']]], + ['car_5f22_84',['CAR_22',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa372b9885bba8bc32ad323fffcf99e39e',1,'op']]], + ['center_85',['center',['../structop_1_1_rectangle.html#a0b0b8be8a0b300204a2afff4f219879b',1,'op::Rectangle']]], + ['channels_86',['channels',['../classop_1_1_array_cpu_gpu.html#a2eb57d0cb4f902b275d126e4b6f706f2',1,'op::ArrayCpuGpu::channels()'],['../classop_1_1_matrix.html#a4555d0f39c54ad5f7adcb39fe06503cc',1,'op::Matrix::channels()']]], + ['check_2ehpp_87',['check.hpp',['../check_8hpp.html',1,'']]], + ['checkandwork_88',['checkAndWork',['../classop_1_1_worker.html#a6e4e84bd2052919bc48df1ec4b913ecf',1,'op::Worker']]], + ['checkbool_89',['checkBool',['../namespaceop.html#a410201fcc46274e24726c5a601bc1721',1,'op']]], + ['checkequal_90',['checkEqual',['../namespaceop.html#aaff52f436911aa17bebb999cd91a44fd',1,'op']]], + ['checkframeintegrity_91',['checkFrameIntegrity',['../classop_1_1_producer.html#abbfbe53757f75e5e77266b04e9d0fea1',1,'op::Producer']]], + ['checkgreaterorequal_92',['checkGreaterOrEqual',['../namespaceop.html#a92e8cd01741c90fbfdfaa33a13803f34',1,'op']]], + ['checkgreaterthan_93',['checkGreaterThan',['../namespaceop.html#a3dd874d4341b99431819f9fa6b678ca9',1,'op']]], + ['checkifrunningandgetdatum_94',['checkIfRunningAndGetDatum',['../classop_1_1_datum_producer.html#a39da4822705d23ca7e600b69f39e69be',1,'op::DatumProducer']]], + ['checklessorequal_95',['checkLessOrEqual',['../namespaceop.html#a7ecfc02dca25534a071acf3136ff175e',1,'op']]], + ['checklessthan_96',['checkLessThan',['../namespaceop.html#a1e71130dc8f280e4664c711128b18b42',1,'op']]], + ['checknonullnorempty_97',['checkNoNullNorEmpty',['../namespaceop.html#a02164ca0af9e838190f584f5d1d8465e',1,'op']]], + ['checknotequal_98',['checkNotEqual',['../namespaceop.html#aaada2594361f6f929af5b1f9d50387eb',1,'op']]], + ['checkthread_99',['checkThread',['../classop_1_1_pose_extractor_net.html#a840c6fbdbf59d088d966ad26d45572a4',1,'op::PoseExtractorNet']]], + ['checkvector_100',['checkVector',['../classop_1_1_matrix.html#a77cd25c8e23a4eec148039ab4832cde1',1,'op::Matrix']]], + ['checkworkererrors_101',['checkWorkerErrors',['../namespaceop.html#a865a4cd0ba3b596667dc7242756837bd',1,'op']]], + ['clear_102',['clear',['../classop_1_1_queue_base.html#a247f435c95709f3246d352eee4f757af',1,'op::QueueBase::clear()'],['../classop_1_1_pose_extractor_net.html#a3fe7256d9860f4c624f5cf928556bc28',1,'op::PoseExtractorNet::clear()']]], + ['clone_103',['clone',['../classop_1_1_matrix.html#abc101fe6c039f6ef2311c5e9cef4c293',1,'op::Matrix::clone()'],['../structop_1_1_datum.html#ad137a102ef753734a9413762d72e6d46',1,'op::Datum::clone()'],['../classop_1_1_array.html#ab0b95bf5488cccad3bce7413251b04de',1,'op::Array::clone()']]], + ['coco_5f18_104',['COCO_18',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa0c4a19d9254adcb3ca1f0f527ee141fd',1,'op']]], + ['cocojsonformat_105',['CocoJsonFormat',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715ac',1,'op']]], + ['cocojsonsaver_106',['CocoJsonSaver',['../classop_1_1_coco_json_saver.html#a6d596768658b4b32430d3686be557e33',1,'op::CocoJsonSaver::CocoJsonSaver()'],['../classop_1_1_coco_json_saver.html',1,'op::CocoJsonSaver']]], + ['cocojsonsaver_2ehpp_107',['cocoJsonSaver.hpp',['../coco_json_saver_8hpp.html',1,'']]], + ['cols_108',['cols',['../classop_1_1_matrix.html#ac2171dc14ef5480496c05c115b6dd579',1,'op::Matrix']]], + ['comma_109',['comma',['../classop_1_1_json_ofstream.html#ae4468279f789c8026d431b2ef62646f9',1,'op::JsonOfstream']]], + ['common_2ehpp_110',['common.hpp',['../common_8hpp.html',1,'']]], + ['compile_5ftemplate_5fbasic_5ftypes_111',['COMPILE_TEMPLATE_BASIC_TYPES',['../macros_8hpp.html#a6bf32c65e0f388d5b09d8b2424416c0e',1,'macros.hpp']]], + ['compile_5ftemplate_5fbasic_5ftypes_5fclass_112',['COMPILE_TEMPLATE_BASIC_TYPES_CLASS',['../macros_8hpp.html#a60e010d8a2352d94b8b57d97cf4a7d73',1,'macros.hpp']]], + ['compile_5ftemplate_5fbasic_5ftypes_5fstruct_113',['COMPILE_TEMPLATE_BASIC_TYPES_STRUCT',['../macros_8hpp.html#ac5627744abe5fd0c8eacfe9c7f8bd32e',1,'macros.hpp']]], + ['compile_5ftemplate_5fdatum_114',['COMPILE_TEMPLATE_DATUM',['../namespaceop.html#af46e80e6bac0f815006759df4c9d00c3',1,'op::COMPILE_TEMPLATE_DATUM(WCocoJsonSaver)'],['../namespaceop.html#a53f346232d0743f3dd0f547de1fc508f',1,'op::COMPILE_TEMPLATE_DATUM(WPoseTriangulation)'],['../namespaceop.html#a9076fc1719030c2a74f21682999d2315',1,'op::COMPILE_TEMPLATE_DATUM(WCvMatToOpInput)'],['../namespaceop.html#a6d12bd1e42cfb63d2f780bed55fa01fb',1,'op::COMPILE_TEMPLATE_DATUM(WCvMatToOpOutput)'],['../namespaceop.html#aaee32c4c68404e5086844bcb911b7a20',1,'op::COMPILE_TEMPLATE_DATUM(WKeepTopNPeople)'],['../namespaceop.html#a47758c703fccdbb65c26dc7bc4022237',1,'op::COMPILE_TEMPLATE_DATUM(WKeypointScaler)'],['../namespaceop.html#a1d9f50688522ed7368acc33a09ae9ece',1,'op::COMPILE_TEMPLATE_DATUM(WOpOutputToCvMat)'],['../namespaceop.html#aaca98fe6101cda512a43c513182ae5cc',1,'op::COMPILE_TEMPLATE_DATUM(WScaleAndSizeExtractor)'],['../namespaceop.html#a89984557f6968584d1938afe7b9f32bd',1,'op::COMPILE_TEMPLATE_DATUM(WVerbosePrinter)'],['../namespaceop.html#a196f17357cd1c1bb02e24e4e8a0e6ec3',1,'op::COMPILE_TEMPLATE_DATUM(WFaceDetector)'],['../namespaceop.html#abf3a59fc4662f07e6ba19b95bd4da32f',1,'op::COMPILE_TEMPLATE_DATUM(WFaceDetectorOpenCV)'],['../namespaceop.html#ab5b47f0069e9f397ff891194b20d28f2',1,'op::COMPILE_TEMPLATE_DATUM(WFaceExtractorNet)'],['../namespaceop.html#af42afa53c725d556c14928b2603883e3',1,'op::COMPILE_TEMPLATE_DATUM(WFaceRenderer)'],['../datum_8hpp.html#af87cd873cebb915837ae27248f67e822',1,'COMPILE_TEMPLATE_DATUM(): datum.hpp'],['../namespaceop.html#a57c4f3ada0db4882a4106d4dedf08012',1,'op::COMPILE_TEMPLATE_DATUM(WFaceSaver)'],['../namespaceop.html#a602d5d238fe0c7096698cf36b7dee9ab',1,'op::COMPILE_TEMPLATE_DATUM(WHandSaver)'],['../namespaceop.html#a7ac10b9f503668695643c366e25f3b68',1,'op::COMPILE_TEMPLATE_DATUM(WHeatMapSaver)'],['../namespaceop.html#a505ea16cc6c2c0068bbf4e7269dc8e0a',1,'op::COMPILE_TEMPLATE_DATUM(WImageSaver)'],['../namespaceop.html#a774871462f7fefb8cadea1e49f501e45',1,'op::COMPILE_TEMPLATE_DATUM(WPeopleJsonSaver)'],['../namespaceop.html#a31ad937a2e52ea08ce925031d26616b9',1,'op::COMPILE_TEMPLATE_DATUM(WPoseSaver)'],['../namespaceop.html#af9e0d9e4028c0589b5eeeaed42a5088c',1,'op::COMPILE_TEMPLATE_DATUM(WUdpSender)'],['../namespaceop.html#a49bd4106b0cd1cb81980329b06c0d2c8',1,'op::COMPILE_TEMPLATE_DATUM(WVideoSaver)'],['../namespaceop.html#a0db530b6f607aa43e8f9154b308d207a',1,'op::COMPILE_TEMPLATE_DATUM(WVideoSaver3D)'],['../namespaceop.html#ade3b2e4b105242a3cf41def3def1691d',1,'op::COMPILE_TEMPLATE_DATUM(WGui)'],['../namespaceop.html#a54b38240e45009f7e6a25d956ac96fe0',1,'op::COMPILE_TEMPLATE_DATUM(WGui3D)'],['../namespaceop.html#ae88e9ced5d14fa221205b492ff76c56b',1,'op::COMPILE_TEMPLATE_DATUM(WGuiInfoAdder)'],['../namespaceop.html#a0424a8e4dc8ceb5e8d8a2230c157a7fd',1,'op::COMPILE_TEMPLATE_DATUM(WHandDetector)'],['../namespaceop.html#a767385c8d3ebe736e1752825ab4d4ea0',1,'op::COMPILE_TEMPLATE_DATUM(WHandDetectorFromTxt)'],['../namespaceop.html#a674a652ad38b355285417529fc050847',1,'op::COMPILE_TEMPLATE_DATUM(WPersonIdExtractor)'],['../namespaceop.html#add981a5f6a49d35cc316a54c613497f3',1,'op::COMPILE_TEMPLATE_DATUM(WQueueOrderer)'],['../namespaceop.html#a5660f0e72781ce6d7db9eb78b582e5c6',1,'op::COMPILE_TEMPLATE_DATUM(WorkerProducer)'],['../namespaceop.html#a01aa5c6e24026536367cf47a64e9bba5',1,'op::COMPILE_TEMPLATE_DATUM(WorkerConsumer)'],['../namespaceop.html#a5642545fda1c3bbaf60810cf0e2d2c1d',1,'op::COMPILE_TEMPLATE_DATUM(Worker)'],['../namespaceop.html#ad22c543a4376e943b728e657fab5ed9f',1,'op::COMPILE_TEMPLATE_DATUM(WIdGenerator)'],['../namespaceop.html#adfc12925650978828707c1c0dcbebd0e',1,'op::COMPILE_TEMPLATE_DATUM(WFpsMax)'],['../namespaceop.html#ac06eeab84c4861ef08834855b48750a6',1,'op::COMPILE_TEMPLATE_DATUM(ThreadManager)'],['../namespaceop.html#ae5dac6cf1ccdf461838f9795be8fda03',1,'op::COMPILE_TEMPLATE_DATUM(Thread)'],['../namespaceop.html#aee90a0429c2d14da0c3a85cd67a17821',1,'op::COMPILE_TEMPLATE_DATUM(SubThreadQueueOut)'],['../namespaceop.html#a63605cf0e6f4049beacf6094995272e8',1,'op::COMPILE_TEMPLATE_DATUM(SubThreadQueueInOut)'],['../namespaceop.html#a506578f3e723f992eabb627a371351ba',1,'op::COMPILE_TEMPLATE_DATUM(SubThreadQueueIn)'],['../namespaceop.html#a36492d15f864f7c813a573789ea554aa',1,'op::COMPILE_TEMPLATE_DATUM(SubThreadNoQueue)'],['../namespaceop.html#af98c8e514e79d4718fb1fc64dc0e431b',1,'op::COMPILE_TEMPLATE_DATUM(SubThread)'],['../namespaceop.html#aa7f93261bd6d87f86c45e933607a0678',1,'op::COMPILE_TEMPLATE_DATUM(Queue)'],['../namespaceop.html#aa65c081c13e0d0453938a3c41d04dc49',1,'op::COMPILE_TEMPLATE_DATUM(PriorityQueue)'],['../namespaceop.html#ae76afeeeaedaebe6941f41a4bdf50e2a',1,'op::COMPILE_TEMPLATE_DATUM(WPoseRenderer)'],['../namespaceop.html#ab1e242b1ae7ff3300324fbfedebb52fc',1,'op::COMPILE_TEMPLATE_DATUM(WPoseExtractorNet)'],['../namespaceop.html#a020603e3ad6326cb1dce43485157f768',1,'op::COMPILE_TEMPLATE_DATUM(WPoseExtractor)'],['../namespaceop.html#a635579f5f8d20b8e65f4f94da4d3d2f2',1,'op::COMPILE_TEMPLATE_DATUM(WHandRenderer)'],['../namespaceop.html#ae5cc3e92ffd9696f01ce7824ebbd0759',1,'op::COMPILE_TEMPLATE_DATUM(WHandExtractorNet)'],['../namespaceop.html#a5cc3f625b2644b1aade85a9458b5503a',1,'op::COMPILE_TEMPLATE_DATUM(WHandDetectorUpdate)'],['../namespaceop.html#a361310c59d16e88a4d2450a80f078f01',1,'op::COMPILE_TEMPLATE_DATUM(WHandDetectorTracking)']]], + ['compile_5ftemplate_5ffloating_5fint_5ftypes_115',['COMPILE_TEMPLATE_FLOATING_INT_TYPES',['../macros_8hpp.html#ad0aef3afcb2a9da69c3453426f56b0ac',1,'macros.hpp']]], + ['compile_5ftemplate_5ffloating_5fint_5ftypes_5fclass_116',['COMPILE_TEMPLATE_FLOATING_INT_TYPES_CLASS',['../macros_8hpp.html#a7bacf9f65110ec8292bc69e1eb0f426e',1,'macros.hpp']]], + ['compile_5ftemplate_5ffloating_5fint_5ftypes_5fstruct_117',['COMPILE_TEMPLATE_FLOATING_INT_TYPES_STRUCT',['../macros_8hpp.html#acc5af19a77b18cf5aa2e1f82e2e484dd',1,'macros.hpp']]], + ['compile_5ftemplate_5ffloating_5ftypes_118',['COMPILE_TEMPLATE_FLOATING_TYPES',['../macros_8hpp.html#a80404791b46a15fd601feaa11f1e5028',1,'macros.hpp']]], + ['compile_5ftemplate_5ffloating_5ftypes_5fclass_119',['COMPILE_TEMPLATE_FLOATING_TYPES_CLASS',['../macros_8hpp.html#a1eadbb31e92e7fbc799bf7cf4d2a6f50',1,'macros.hpp']]], + ['compile_5ftemplate_5ffloating_5ftypes_5fstruct_120',['COMPILE_TEMPLATE_FLOATING_TYPES_STRUCT',['../macros_8hpp.html#af9fed593b7a4237bc6ede717a1ae70f0',1,'macros.hpp']]], + ['configure_121',['configure',['../classop_1_1_wrapper_t.html#a98a7310bc4062fb72f5d26e37d6d7c70',1,'op::WrapperT::configure(const WrapperStructOutput &wrapperStructOutput)'],['../classop_1_1_wrapper_t.html#a7a37b4a945171fd42d1ab16b0b7e8205',1,'op::WrapperT::configure(const WrapperStructGui &wrapperStructGui)'],['../classop_1_1_wrapper_t.html#af3d5d56e63b8c6faee0d7954db95c69d',1,'op::WrapperT::configure(const WrapperStructInput &wrapperStructInput)'],['../classop_1_1_wrapper_t.html#ad9d83f0332c27aa64cde22c66755deec',1,'op::WrapperT::configure(const WrapperStructExtra &wrapperStructExtra)'],['../classop_1_1_wrapper_t.html#aaa18264f99da260efb8fa12dd293ee75',1,'op::WrapperT::configure(const WrapperStructHand &wrapperStructHand)'],['../classop_1_1_wrapper_t.html#a3ce073fb177c316aaeab406c1f4808db',1,'op::WrapperT::configure(const WrapperStructFace &wrapperStructFace)'],['../classop_1_1_wrapper_t.html#a7508886116ccfbbb8567a1921591751e',1,'op::WrapperT::configure(const WrapperStructPose &wrapperStructPose)']]], + ['configurethreadmanager_122',['configureThreadManager',['../namespaceop.html#a4adaee31db7ae1d3f963daa9e022e62f',1,'op']]], + ['connectbodypartscpu_123',['connectBodyPartsCpu',['../namespaceop.html#a2ae13dae91c41b29063b48158ccbcc4e',1,'op']]], + ['connectbodypartsgpu_124',['connectBodyPartsGpu',['../namespaceop.html#a927468f6931ddb1e7d1e6e6e59b8bd36',1,'op']]], + ['connectbodypartsocl_125',['connectBodyPartsOcl',['../namespaceop.html#a77a4d87bbee791dfba0667aa10bcca99',1,'op']]], + ['connectinterminabovethreshold_126',['ConnectInterMinAboveThreshold',['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0eaf7405796a5c90a93fc3c8ffa89eb432d',1,'op']]], + ['connectinterthreshold_127',['ConnectInterThreshold',['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea240f10f3a39507d858c743971fd4298f',1,'op']]], + ['connectminsubsetcnt_128',['ConnectMinSubsetCnt',['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea7bf312724768faebba41ca3585a91f19',1,'op']]], + ['connectminsubsetscore_129',['ConnectMinSubsetScore',['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea04576b26f5dc3637bf3c8168fba1641d',1,'op']]], + ['copyto_130',['copyTo',['../classop_1_1_matrix.html#a6714cef92d6dce3089841ea124cd2b7d',1,'op::Matrix']]], + ['count_131',['count',['../classop_1_1_array_cpu_gpu.html#aa3e701c15f11e563e0b442c28143188d',1,'op::ArrayCpuGpu::count() const'],['../classop_1_1_array_cpu_gpu.html#a50f82490bab162626760d420f5f6779c',1,'op::ArrayCpuGpu::count(const int start_axis) const'],['../classop_1_1_array_cpu_gpu.html#acd9ea6e75dd2eb516d6a91bac91e43e4',1,'op::ArrayCpuGpu::count(const int start_axis, const int end_axis) const']]], + ['cpu_132',['Cpu',['../namespaceop.html#afce557f02e337e16150d00bdf72ec033a54c82ef76ecbbd4c2293e09bae01b54e',1,'op']]], + ['cpu_5fdata_133',['cpu_data',['../classop_1_1_array_cpu_gpu.html#a7e982b668191924e6665645790fa18a2',1,'op::ArrayCpuGpu']]], + ['cpu_5fdiff_134',['cpu_diff',['../classop_1_1_array_cpu_gpu.html#ac5d005ccb8a3b8aba935e5276fcd20e4',1,'op::ArrayCpuGpu']]], + ['cputogpumemoryifnotcopiedyet_135',['cpuToGpuMemoryIfNotCopiedYet',['../classop_1_1_gpu_renderer.html#ac7c1ab0eebf1d54b55cc65a5560bad7b',1,'op::GpuRenderer']]], + ['createarray_136',['createArray',['../classop_1_1_cv_mat_to_op_input.html#ad7c70d7843d64ab0dce9a8a1d993e5b5',1,'op::CvMatToOpInput::createArray()'],['../classop_1_1_cv_mat_to_op_output.html#ad15a20bf40389e7dea888e982bd64e8b',1,'op::CvMatToOpOutput::createArray()']]], + ['createmultiviewtdatum_137',['createMultiviewTDatum',['../namespaceop.html#a3da2a2a2f5ac58cfba53ea0d43ac6751',1,'op']]], + ['createpeoplevector_138',['createPeopleVector',['../namespaceop.html#ae5d883da8c8f11356d5e1b61bc3a99b6',1,'op']]], + ['createproducer_139',['createProducer',['../namespaceop.html#a6a34909c6c4d79a215f163291111d556',1,'op']]], + ['cuda_140',['Cuda',['../namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa8b95dcff7397d0693c03e394af5552aa',1,'op']]], + ['cuda_2ehpp_141',['cuda.hpp',['../cuda_8hpp.html',1,'']]], + ['cuda_5fnum_5fthreads_142',['CUDA_NUM_THREADS',['../namespaceop.html#ac7bbf63b37bf6762c47557ad227e036d',1,'op']]], + ['cudacheck_143',['cudaCheck',['../namespaceop.html#a2af8422ada0de882cc222920ca15c6d2',1,'op']]], + ['cvinputdata_144',['cvInputData',['../structop_1_1_datum.html#a1f9ba4bd5be779a911c8c8e7962ea727',1,'op::Datum']]], + ['cvmattoopinput_145',['CvMatToOpInput',['../classop_1_1_cv_mat_to_op_input.html#a449eacb6cce9678c3ae399c68a45a2e5',1,'op::CvMatToOpInput::CvMatToOpInput()'],['../classop_1_1_cv_mat_to_op_input.html',1,'op::CvMatToOpInput']]], + ['cvmattoopinput_2ehpp_146',['cvMatToOpInput.hpp',['../cv_mat_to_op_input_8hpp.html',1,'']]], + ['cvmattoopoutput_147',['CvMatToOpOutput',['../classop_1_1_cv_mat_to_op_output.html#a054c4dd7892ad540405413b071459b42',1,'op::CvMatToOpOutput::CvMatToOpOutput()'],['../classop_1_1_cv_mat_to_op_output.html',1,'op::CvMatToOpOutput']]], + ['cvmattoopoutput_2ehpp_148',['cvMatToOpOutput.hpp',['../cv_mat_to_op_output_8hpp.html',1,'']]], + ['cvoutputdata_149',['cvOutputData',['../structop_1_1_datum.html#ad70b95f61637fe23092bca8f0a4fb088',1,'op::Datum']]], + ['cvoutputdata3d_150',['cvOutputData3D',['../structop_1_1_datum.html#a0aa21ea7a3adea0126003b778509f2d2',1,'op::Datum']]] +]; diff --git a/web/html/doc/search/all_7.html b/web/html/doc/search/all_7.html new file mode 100644 index 000000000..8ddbf6c8e --- /dev/null +++ b/web/html/doc/search/all_7.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_7.js b/web/html/doc/search/all_7.js new file mode 100644 index 000000000..5feb7c274 --- /dev/null +++ b/web/html/doc/search/all_7.js @@ -0,0 +1,46 @@ +var searchData= +[ + ['data_151',['data',['../classop_1_1_matrix.html#a69d3316b25c1fce55f067e92b31e4d57',1,'op::Matrix']]], + ['data_5fat_152',['data_at',['../classop_1_1_array_cpu_gpu.html#a4836fabbedf7e1ef97bfbd4d33db3d96',1,'op::ArrayCpuGpu']]], + ['dataconst_153',['dataConst',['../classop_1_1_matrix.html#a9af637b50e808c1d84e179cc6acb45b4',1,'op::Matrix']]], + ['dataformat_154',['DataFormat',['../namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7e',1,'op']]], + ['dataformattostring_155',['dataFormatToString',['../namespaceop.html#a9d121f33179e41075f4602eb6527e658',1,'op']]], + ['datapseudoconst_156',['dataPseudoConst',['../classop_1_1_matrix.html#ab65ba706b58675da9a4512d448d44370',1,'op::Matrix']]], + ['datum_157',['Datum',['../structop_1_1_datum.html#a2d4940d8cb12d95b8588cd0280f6524c',1,'op::Datum::Datum(Datum &&datum)'],['../structop_1_1_datum.html#a42f9aef848c6335c5a81cad374319f0b',1,'op::Datum::Datum(const Datum &datum)'],['../structop_1_1_datum.html#a72c75834671aebe44705738fb5efc3c5',1,'op::Datum::Datum()'],['../structop_1_1_datum.html',1,'op::Datum']]], + ['datum_2ehpp_158',['datum.hpp',['../datum_8hpp.html',1,'']]], + ['datumproducer_159',['DatumProducer',['../classop_1_1_datum_producer.html#a4d52ee6961e2c5c9564f49d203a2865e',1,'op::DatumProducer::DatumProducer()'],['../classop_1_1_datum_producer.html',1,'op::DatumProducer< TDatum >']]], + ['datumproducer_2ehpp_160',['datumProducer.hpp',['../datum_producer_8hpp.html',1,'']]], + ['datumproducerconstructor_161',['datumProducerConstructor',['../namespaceop.html#ad72abbc7b2600f543e4ee8e28392711e',1,'op']]], + ['datumproducerconstructorrunningandgetdatumapplyplayercontrols_162',['datumProducerConstructorRunningAndGetDatumApplyPlayerControls',['../namespaceop.html#a177ffd3101c7a1f5cf32e100474a1234',1,'op']]], + ['datumproducerconstructorrunningandgetdatumframeintegrity_163',['datumProducerConstructorRunningAndGetDatumFrameIntegrity',['../namespaceop.html#a427c6244ee27171037bc201f401de16a',1,'op']]], + ['datumproducerconstructorrunningandgetdatumisdatumproducerrunning_164',['datumProducerConstructorRunningAndGetDatumIsDatumProducerRunning',['../namespaceop.html#a71c68de51a3608e782854c298b91cd62',1,'op']]], + ['datumproducerconstructorrunningandgetnextframenumber_165',['datumProducerConstructorRunningAndGetNextFrameNumber',['../namespaceop.html#a71cdc487bbec12ddbe4bac9123745494',1,'op']]], + ['datumproducerconstructortoomanyconsecutiveemptyframes_166',['datumProducerConstructorTooManyConsecutiveEmptyFrames',['../namespaceop.html#a5001474237d31d72c9145a84ec5143da',1,'op']]], + ['default_5fx_167',['DEFAULT_X',['../classop_1_1_profiler.html#a13de5fe55b2599c0626d5071d3851dec',1,'op::Profiler']]], + ['defaultparttorender_168',['defaultPartToRender',['../structop_1_1_wrapper_struct_pose.html#ab6810e97aa62a728aa09dbbe6b9b6c06',1,'op::WrapperStructPose']]], + ['define_5fbool_169',['DEFINE_bool',['../flags_8hpp.html#a245491f4f0bb36e5e8655877402f50eb',1,'DEFINE_bool(flir_camera, false, "Whether to use FLIR (Point-Grey) stereo camera."): flags.hpp'],['../flags_8hpp.html#abd8fa8293ee3a05d4c5a2b6082460ab0',1,'DEFINE_bool(fullscreen, false, "Run in full-screen mode (press f during runtime to toggle)."): flags.hpp'],['../flags_8hpp.html#ad677c3dfae13a1ec9a3ee2fabe9e37a0',1,'DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black" " background, instead of being rendered into the original image. Related: `part_to_show`," " `alpha_pose`, and `alpha_pose`."): flags.hpp'],['../flags_8hpp.html#aefe90773deaaa525b3b115d37b46e281',1,'DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames."): flags.hpp'],['../flags_8hpp.html#af12ef9f66fbf74e05c08b69caf54821e',1,'DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system." " 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction" " results. Note that it will only display 1 person. If multiple people is present, it will" " fail."): flags.hpp'],['../flags_8hpp.html#a064e35f07a2835d7b4b5d31f0f625865',1,'DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g." " `model_folder`. Analogously to `--face`, it will also slow down the performance, increase" " the required GPU memory and its speed depends on the number of people."): flags.hpp'],['../flags_8hpp.html#af4ef631dab577c13f59e10b626c580f6',1,'DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful" " for 1) Cases where it is needed a low latency (e.g., webcam in real-time scenarios with" " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the" " error."): flags.hpp'],['../flags_8hpp.html#a9b1025da93c8ab21c0fdfe4941f26ad4',1,'DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g." " `model_folder`. Note that this will considerable slow down the performance and increase" " the required GPU memory. In addition, the greater number of people on the image, the" " slower OpenPose will be."): flags.hpp'],['../flags_8hpp.html#ae9ea69ae8e64ee99bb4516199f847980',1,'DEFINE_bool(frame_flip, false, "Flip/mirror each frame (e.g., for real time webcam demonstrations)."): flags.hpp'],['../flags_8hpp.html#ac0db13ec99b09bf9bd38582da33cff1d',1,'DEFINE_bool(frames_repeat, false, "Repeat frames when finished."): flags.hpp'],['../flags_8hpp.html#adf7a6ca551fbd934deb6784bfe37f897',1,'DEFINE_bool(process_real_time, false, "Enable to keep the original source frame rate (e.g., for video). If the processing time is" " too long, it will skip frames. If it is too fast, it will slow it down."): flags.hpp'],['../flags_8hpp.html#abada704ec59515bb12563262a3f21aeb',1,'DEFINE_bool(frame_undistort, false, "If false (default), it will not undistort the image, if true, it will undistortionate them" " based on the camera parameters found in `camera_parameter_path`"): flags.hpp'],['../flags_8hpp.html#a4d830b4bcbd7998d8c6de665c0531ce9',1,'DEFINE_bool(maximize_positives, false, "It reduces the thresholds to accept a person candidate. It highly increases both false and" " true positives. I.e., it maximizes average recall but could harm average precision."): flags.hpp'],['../flags_8hpp.html#a4670eb3cdedb3f3bac3886e2c21a7750',1,'DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and" " analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps." " If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential" " memory order: body parts + bkg + PAFs. It will follow the order on" " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will" " considerably decrease. Not required for OpenPose, enable it only if you intend to" " explicitly use this information later."): flags.hpp'],['../flags_8hpp.html#a2c805a3cd1797a9f67783ed5c9b7c5b1',1,'DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to" " background."): flags.hpp'],['../flags_8hpp.html#ad9114bc8e1fc8f306e5296eaae5d542f',1,'DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs."): flags.hpp'],['../flags_8hpp.html#a9973307b6bd2af114083ba1badf4c297',1,'DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the" " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all" " the detected body parts, before being assembled into people. Note that the number of" " candidates is equal or higher than the number of final body parts (i.e., after being" " assembled into people). The empty body parts are filled with 0s. Program speed will" " slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly" " use this information."): flags.hpp'],['../flags_8hpp.html#af50c82bf332c1a699f0615e7eb734c51',1,'DEFINE_bool(write_video_with_audio, false, "If the input is video and the output is so too, it will save the video with audio. It" " requires the output video file path finishing in `.mp4` format (see `write_video` for" " details)."): flags.hpp'],['../flags_8hpp.html#ab8ac5e6be119dc06f351810053ea8bcf',1,'DEFINE_bool(no_gui_verbose, false, "Do not write text on output images on GUI (e.g., number of current frame and people). It" " does not affect the pose rendering."): flags.hpp']]], + ['define_5fdouble_170',['DEFINE_double',['../flags_8hpp.html#a0fe12ed9bee07b6120d595b3a1b85b15',1,'DEFINE_double(hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints."): flags.hpp'],['../flags_8hpp.html#af4490397ad693c3d51835699a5dcddf3',1,'DEFINE_double(write_video_fps, -1., "Frame rate for the recorded video. By default, it will try to get the input frames producer" " frame rate (e.g., input video or webcam frame rate). If the input frames producer does not" " have a set FPS (e.g., image_dir or webcam if OpenCV not compiled with its support), set" " this value accordingly (e.g., to the frame rate displayed by the OpenPose GUI)."): flags.hpp'],['../flags_8hpp.html#a87455dc2555757a087e99d8b52138835',1,'DEFINE_double(cli_verbose, -1.f, "If -1, it will be disabled (default). If it is a positive integer number, it will print on" " the command line every `verbose` frames. If number in the range (0,1), it will print the" " progress every `verbose` times the total of frames."): flags.hpp'],['../flags_8hpp.html#aea6dc9d3cb9ea69426d012d1f41fadf0',1,'DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand."): flags.hpp'],['../flags_8hpp.html#ac2cc95296b63a048521a2c41dce82b45',1,'DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand."): flags.hpp'],['../flags_8hpp.html#a9b80aae5395b7d99c980198374bde9f2',1,'DEFINE_double(face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face."): flags.hpp'],['../flags_8hpp.html#a83309bdfd7daadfb89be65edf399ac9a',1,'DEFINE_double(face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face."): flags.hpp'],['../flags_8hpp.html#a8dfdbe0ff3a68f46e440f379621f8f0a',1,'DEFINE_double(face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints."): flags.hpp'],['../flags_8hpp.html#af9d388afd71b21640a573e6e8cad4c1a',1,'DEFINE_double(alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the" " heatmap, 0 will only show the frame. Only valid for GPU rendering."): flags.hpp'],['../flags_8hpp.html#a33562cf43d115a4d26f9958aa04c15ff',1,'DEFINE_double(alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will" " hide it. Only valid for GPU rendering."): flags.hpp'],['../flags_8hpp.html#a166e98128271506645ce14000faace73',1,'DEFINE_double(render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be" " rendered. Note: Rendered refers only to visual display in the OpenPose basic GUI, not in" " the saved results. Generally, a high threshold (> 0.5) will only render very clear body" " parts; while small thresholds (~0.1) will also output guessed and occluded keypoints," " but also more false positives (i.e., wrong detections)."): flags.hpp'],['../flags_8hpp.html#a349e235659cc7b31dcf5db0d3d468fce',1,'DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range" " between smallest and biggest scale. The scales will be centered in ratio 1. E.g., if" " scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."): flags.hpp'],['../flags_8hpp.html#af437a0d8f293cd02b992a94b268571a4',1,'DEFINE_double(upsampling_ratio, 0., "Upsampling ratio between the `net_resolution` and the output net results. A value less" " or equal than 0 (default) will use the network default value (recommended)."): flags.hpp'],['../flags_8hpp.html#abecb5274ddd75ca51fb40064478b1ed3',1,'DEFINE_double(scale_gap, 0.25, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1." " If you want to change the initial scale, you actually want to multiply the" " `net_resolution` by your desired initial scale."): flags.hpp'],['../flags_8hpp.html#ad90e61b31f6bd48c3514195da36ff31c',1,'DEFINE_double(net_resolution_dynamic, 1., "This flag only applies to images or custom inputs (not to video or webcam). If it is zero" " or a negative value, it means that using `-1` in `net_resolution` will behave as explained" " in its description. Otherwise, and to avoid out of memory errors, the `-1` in" " `net_resolution` will clip to this value times the default 16/9 aspect ratio value (which" " is 656 width for a 368 height). E.g., `net_resolution_dynamic 10 net_resolution -1x368`" " will clip to 6560x368 (10 x 656). Recommended 1 for small GPUs (to avoid out of memory" " errors but maximize speed) and 0 for big GPUs (for maximum accuracy and speed)."): flags.hpp'],['../flags_8hpp.html#ace91dac10649fcbe836a71459b2f2584',1,'DEFINE_double(fps_max, -1., "Maximum processing frame rate. By default (-1), OpenPose will process frames as fast as" " possible. Example usage: If OpenPose is displaying images too quickly, this can reduce" " the speed so the user can analyze better each frame from the GUI."): flags.hpp']]], + ['define_5fint32_171',['DEFINE_int32',['../flags_8hpp.html#ad696d262dc852c2f872470b90c25fafe',1,'DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results" " were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."): flags.hpp'],['../flags_8hpp.html#aa3c62563ce9d99c25d4a2977f253c6c7',1,'DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer" " rounded [0,255]; and 3 for no scaling."): flags.hpp'],['../flags_8hpp.html#a6561fc0841b80f5c19a1c4bc549175e9',1,'DEFINE_int32(scale_number, 1, "Number of scales to average."): flags.hpp'],['../flags_8hpp.html#aa2bc11c618a37698d88f7ae100e1729f',1,'DEFINE_int32(body, 1, "Select 0 to disable body keypoint detection (e.g., for faster but less accurate face" " keypoint detection, custom hand detector, etc.), 1 (default) for body keypoint" " estimation, and 2 to disable its internal body pose estimation network but still" " still run the greedy association parsing algorithm"): flags.hpp'],['../flags_8hpp.html#ac5e8f82d85a3eb0ee72a64569395497c',1,'DEFINE_int32(number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with" " top scores. The score is based in person area over the image, body part score, as well as" " joint score (between each pair of connected body parts). Useful if you know the exact" " number of people in the scene, so it can remove false positives (if all the people have" " been detected. However, it might also include false negatives by removing very small or" " highly occluded people. -1 will keep them all."): flags.hpp'],['../flags_8hpp.html#a547efed657b6e562d8d5f071124fcf17',1,'DEFINE_int32(keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e., the scale of the (x,y)" " coordinates that will be saved with the `write_json` & `write_keypoint` flags." " Select `0` to scale it to the original source resolution; `1`to scale it to the net output" " size (set with `net_resolution`); `2` to scale it to the final output size (set with" " `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left" " corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where" " (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non" " related with `scale_number` and `scale_gap`."): flags.hpp'],['../flags_8hpp.html#a844330d264f5648ae7d99b76f72f391a',1,'DEFINE_int32(num_gpu_start, 0, "GPU device start number."): flags.hpp'],['../flags_8hpp.html#a8bd040787ac075ae4cf483be01fe2c5f',1,'DEFINE_int32(num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your" " machine."): flags.hpp'],['../flags_8hpp.html#adbd3e76b28ecc7415ccb782c3419a9de',1,'DEFINE_int32(frame_rotate, 0, "Rotate each frame, 4 possible values: 0, 90, 180, 270."): flags.hpp'],['../flags_8hpp.html#a513eca9e40da3a149e02c0c1fb6d10d3',1,'DEFINE_int32(flir_camera_index, -1, "Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir" " camera index to run, where 0 corresponds to the detected flir camera with the lowest" " serial number, and `n` to the `n`-th lowest serial number camera."): flags.hpp'],['../flags_8hpp.html#a807c2eb873d9e727f14d6c7ee6e02e11',1,'DEFINE_int32(camera, -1, "The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative" " number (by default), to auto-detect and open the first available camera."): flags.hpp'],['../flags_8hpp.html#a2d8a3ae1f10dd657619e2a5d2dcb4b61',1,'DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some" " runtime statistics at this frame number."): flags.hpp'],['../flags_8hpp.html#aae4cfb31c1a5934475d8cbd6b2f8268e',1,'DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any opLog() message," " while 255 will not output any. Current OpenPose library messages are in the range 0-4:" " 1 for low priority messages and 4 for important ones."): flags.hpp'],['../flags_8hpp.html#a71a0fc42dd98d1739571e4f7fed4873c',1,'DEFINE_int32(face_detector, 0, "Kind of face rectangle detector. Select 0 (default) to select OpenPose body detector (most" " accurate one and fastest one if body is enabled), 1 to select OpenCV face detector (not" " implemented for hands), 2 to indicate that it will be provided by the user, or 3 to" " also apply hand tracking (only for hand). Hand tracking might improve hand keypoint" " detection for webcam (if the frame rate is high enough, i.e., >7 FPS per GPU) and video." " This is not person ID tracking, it simply looks for hands in positions at which hands were" " located in previous frames, but it does not guarantee the same person ID among frames."): flags.hpp'],['../flags_8hpp.html#ae7ff5e3adea9c5f572455ec30dd3fbff',1,'DEFINE_int32(hand_detector, 0, "Kind of hand rectangle detector. Analogous to `--face_detector`."): flags.hpp'],['../flags_8hpp.html#ac9b1dcda85ac079222769931cad6bebc',1,'DEFINE_int32(3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will" " require max(2, min(4, #cameras-1)) cameras to see the keypoint in order to reconstruct" " it."): flags.hpp'],['../flags_8hpp.html#a13dcbbdf12e9e72eb29ccf25d7a7cd42',1,'DEFINE_int32(3d_views, -1, "Complementary option for `--image_dir` or `--video`. OpenPose will read as many images per" " iteration, allowing tasks such as stereo camera processing (`--3d`). Note that" " `--camera_parameter_path` must be set. OpenPose must find as many `xml` files in the" " parameter folder as this number indicates."): flags.hpp'],['../flags_8hpp.html#a6d2331153c7051c742d11dcb0a4220ec',1,'DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The" " value indicates the number of frames where tracking is run between each OpenPose keypoint" " detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint" " detector and tracking for potentially higher accuracy than only OpenPose."): flags.hpp'],['../flags_8hpp.html#a2c213e3a0c01a36f52667d1707b49062',1,'DEFINE_int32(ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D" " keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing" " the number of threads will increase the speed but also the global system latency."): flags.hpp'],['../flags_8hpp.html#add0ca9baf682a84f3236e7f5c001db06',1,'DEFINE_int32(part_to_show, 0, "Prediction channel to visualize: 0 (default) for all the body parts, 1 for the background" " heat map, 2 for the superposition of heatmaps, 3 for the superposition of PAFs," " 4-(4+#keypoints) for each body part heat map, the following ones for each body part pair" " PAF."): flags.hpp'],['../flags_8hpp.html#a61f245285b5a4b77b1d923276fe6f995',1,'DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering" " (slower but greater functionality, e.g., `alpha_X` flags). If -1, it will pick CPU if" " CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render" " both `outputData` and `cvOutputData` with the original image and desired body part to be" " shown (i.e., keypoints, heat maps or PAFs)."): flags.hpp'],['../flags_8hpp.html#a796a3b14805d5e0b01b21b9bab844382',1,'DEFINE_int32(face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same" " configuration that `render_pose` is using."): flags.hpp'],['../flags_8hpp.html#a46b9edf947872e29ea5cbd7a95bee719',1,'DEFINE_int32(hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same" " configuration that `render_pose` is using."): flags.hpp'],['../flags_8hpp.html#ab598c69ed7164089afcdd4149c24a5eb',1,'DEFINE_int32(display, -1, "Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server" " and/or to slightly speed up the processing if visual output is not required); 2 for 2-D" " display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display."): flags.hpp'],['../flags_8hpp.html#a80cdeb8d094d26ae5840a74ccff8613c',1,'DEFINE_int32(write_coco_json_variants, 1, "Add 1 for body, add 2 for foot, 4 for face, and/or 8 for hands. Use 0 to use all the" " possible candidates. E.g., 7 would mean body+foot+face COCO JSON."): flags.hpp'],['../flags_8hpp.html#a17550ab833803b7862beaab957642af6',1,'DEFINE_int32(write_coco_json_variant, 0, "Currently, this option is experimental and only makes effect on car JSON generation. It" " selects the COCO variant for cocoJsonSaver."): flags.hpp']]], + ['define_5fstring_172',['DEFINE_string',['../flags_8hpp.html#abcc67acb9ca2d225394445eb6017bc4d',1,'DEFINE_string(camera_parameter_path, "models/cameraParameters/flir/", "String with the folder where the camera parameters are located. If there" " is only 1 XML file (for single video, webcam, or images from the same camera), you must" " specify the whole XML file path (ending in .xml)."): flags.hpp'],['../flags_8hpp.html#a81edc2bb181cd79c98bfae1520f8ab71',1,'DEFINE_string(prototxt_path, "", "The combination `--model_folder` + `--prototxt_path` represents the whole path to the" " prototxt file. If empty, it will use the default OpenPose ProtoTxt file."): flags.hpp'],['../flags_8hpp.html#a456ac1650914494fbdacd53b55420e2b',1,'DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located."): flags.hpp'],['../flags_8hpp.html#a1f0ce14d63633af19e375d6fbcccc463',1,'DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g., `~/Desktop/mocapResult.bvh`."): flags.hpp'],['../flags_8hpp.html#abd5499ff7014225c02e6149bde93e3a3',1,'DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the" " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag" " `write_video_fps` controls FPS. Alternatively, the video extension can be `.mp4`," " resulting in a file with a much smaller size and allowing `--write_video_with_audio`." " However, that would require: 1) Ubuntu or Mac system, 2) FFmpeg library installed" " (`sudo apt-get install ffmpeg`), 3) the creation temporarily of a folder with the same" " file path than the final video (without the extension) to storage the intermediate frames" " that will later be used to generate the final MP4 video."): flags.hpp'],['../flags_8hpp.html#a488d04acd61a19fe00cd2e56844dd8c5',1,'DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml," " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."): flags.hpp'],['../flags_8hpp.html#a8763644943c3413220cfe6bf8f385d44',1,'DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format" " with `write_keypoint_format`."): flags.hpp'],['../flags_8hpp.html#aac91c51c83200f18076e7354067ccbb0',1,'DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`." " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for" " floating values. See `doc/02_output.md` for more details."): flags.hpp'],['../flags_8hpp.html#af1f0085881667603ed4e0404d7140bdc',1,'DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag" " must be enabled."): flags.hpp'],['../flags_8hpp.html#a4ebc35e01d48db77575a1cdd53ac0815',1,'DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format. If foot, face," " hands, etc. JSON is also desired (`--write_coco_json_variants`), they are saved with" " different file name suffix."): flags.hpp'],['../flags_8hpp.html#a66ec3a67de281684d9ff60c7b80c9430',1,'DEFINE_string(write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose" " keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."): flags.hpp'],['../flags_8hpp.html#a08c988c91c179c16944f9f703c24324b',1,'DEFINE_string(write_video_adam, "", "Experimental, not available yet. Analogous to `--write_video`, but applied to Adam model."): flags.hpp'],['../flags_8hpp.html#a85ed9ce7f145fad05a50344a6fdbee37',1,'DEFINE_string(write_video_3d, "", "Analogous to `--write_video`, but applied to the 3D output."): flags.hpp'],['../flags_8hpp.html#acd0c383a2043852c83e284b669a5cf7e',1,'DEFINE_string(caffemodel_path, "", "The combination `--model_folder` + `--caffemodel_path` represents the whole path to the" " caffemodel file. If empty, it will use the default OpenPose CaffeModel file."): flags.hpp'],['../flags_8hpp.html#a91ba99b997951b09eab545a40c019f85',1,'DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g., png, jpg or bmp. Check the OpenCV" " function cv::imwrite for all compatible extensions."): flags.hpp'],['../flags_8hpp.html#a4a4d36b7f90a4d53a0fa29f86bbbb9aa',1,'DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format."): flags.hpp'],['../flags_8hpp.html#a81e3bebeb0cec269b90097fb5856c96f',1,'DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint" " detector."): flags.hpp'],['../flags_8hpp.html#a1da3324efb2a917d0714100dcdb13aba',1,'DEFINE_string(face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint" " detector. 320x320 usually works fine while giving a substantial speed up when multiple" " faces on the image."): flags.hpp'],['../flags_8hpp.html#aa0cc9af40fd8fdee2d8c61da206913dc',1,'DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is" " decreased, the speed increases. For maximum speed-accuracy balance, it should keep the" " closest aspect ratio possible to the images or videos to be processed. Using `-1` in" " any of the dimensions, OP will choose the optimal aspect ratio depending on the user's" " input value. E.g., the default `-1x368` is equivalent to `656x368` in 16:9 resolutions," " e.g., full HD (1980x1080) and HD (1280x720) resolutions."): flags.hpp'],['../flags_8hpp.html#a8e45f79c948490e55be06e3541b3681f',1,'DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g., `BODY_25` (fastest for CUDA version, most accurate, and includes" " foot keypoints), `COCO` (18 keypoints), `MPI` (15 keypoints, least accurate model but" " fastest on CPU), `MPI_4_layers` (15 keypoints, even faster but less accurate)."): flags.hpp'],['../flags_8hpp.html#a4fff2a82464bb9e180e04f70f0d5cbad',1,'DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the" " input image resolution."): flags.hpp'],['../flags_8hpp.html#add5d5807feef88090f8c9d11bf904ba8',1,'DEFINE_string(image_dir, "", "Process a directory of images. Use `examples/media/` for our default example folder with 20" " images. Read all standard formats (jpg, png, bmp, etc.)."): flags.hpp'],['../flags_8hpp.html#a5690d1f0bce6904d9ccea011b0a0262f',1,'DEFINE_string(video, "", "Use a video file instead of the camera. Use `examples/media/video.avi` for our default" " example video."): flags.hpp'],['../flags_8hpp.html#ab1d4b66fac361d1f3f450cd6bc5311d4',1,'DEFINE_string(camera_resolution, "-1x-1", "Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the" " default 1280x720 for `--camera`, or the maximum flir camera resolution available for" " `--flir_camera`"): flags.hpp'],['../flags_8hpp.html#a7ffa026d9b667e5551909aba895f0dfb',1,'DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication."): flags.hpp'],['../flags_8hpp.html#abd20da73260490fba6e09a17c235fc4a',1,'DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."): flags.hpp'],['../flags_8hpp.html#a02962b73af4084b90494b777ff1826c1',1,'DEFINE_string(ip_camera, "", "String with the IP camera URL. It supports protocols like RTSP and HTTP."): flags.hpp']]], + ['define_5ftemplate_5fdatum_173',['DEFINE_TEMPLATE_DATUM',['../datum_8hpp.html#ad11d52b69bc54e48ceb2f5787f700431',1,'datum.hpp']]], + ['define_5fuint64_174',['DEFINE_uint64',['../flags_8hpp.html#a99f9d7e0dcbf9f6ceddf589dc482d17a',1,'DEFINE_uint64(frame_first, 0, "Start on desired frame number. Indexes are 0-based, i.e., the first frame has index 0."): flags.hpp'],['../flags_8hpp.html#ab7c61c5b25d4b3db1284761933c66aed',1,'DEFINE_uint64(frame_step, 1, "Step or gap between processed frames. E.g., `--frame_step 5` would read and process frames" " 0, 5, 10, etc.."): flags.hpp'],['../flags_8hpp.html#a9fbfea8bf51a80ff2254f329366a19b8',1,'DEFINE_uint64(frame_last, -1, "Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g., if set to" " 10, it will process 11 frames (0-10)."): flags.hpp']]], + ['delete_5fcopy_175',['DELETE_COPY',['../macros_8hpp.html#abef96b5dd35dd9d44ad27ddf0e2f5f2e',1,'DELETE_COPY(): macros.hpp'],['../classop_1_1_sub_thread_no_queue.html#a43504502c36461305d656fb87b914749',1,'op::SubThreadNoQueue::DELETE_COPY()'],['../classop_1_1_hand_cpu_renderer.html#a66a7d318b240c73687320bf092363409',1,'op::HandCpuRenderer::DELETE_COPY()'],['../classop_1_1_face_cpu_renderer.html#a233f2a83930d07e4d420b43c8a660f32',1,'op::FaceCpuRenderer::DELETE_COPY()']]], + ['demo_5fadvanced_2emd_176',['demo_advanced.md',['../demo__advanced_8md.html',1,'']]], + ['deployment_2emd_177',['deployment.md',['../deployment_8md.html',1,'']]], + ['depth_178',['depth',['../classop_1_1_matrix.html#ae33558724a713e9a36f8dc0062d267a8',1,'op::Matrix']]], + ['detectfaces_179',['detectFaces',['../classop_1_1_face_detector_open_c_v.html#aba2826bad3f87ce3967e1f999f941fc5',1,'op::FaceDetectorOpenCV::detectFaces()'],['../classop_1_1_face_detector.html#a6db84197d64104da0c26f49ecf8facd1',1,'op::FaceDetector::detectFaces()']]], + ['detecthands_180',['detectHands',['../classop_1_1_hand_detector_from_txt.html#a1e6ba23fa1486e92a3bdca36b2e86d22',1,'op::HandDetectorFromTxt::detectHands()'],['../classop_1_1_hand_detector.html#a731a19ff54389b1f56b0aae76af6debe',1,'op::HandDetector::detectHands()']]], + ['detector_181',['Detector',['../namespaceop.html#a1070db47220e17cf37df40411350f6fb',1,'op']]], + ['detector_182',['detector',['../structop_1_1_wrapper_struct_face.html#a30d45f383e5c9d72709b5281f24f1ed0',1,'op::WrapperStructFace::detector()'],['../structop_1_1_wrapper_struct_hand.html#a2759e92ee811d7a8eb69e1b7eba29d08',1,'op::WrapperStructHand::detector()']]], + ['diff_5fat_183',['diff_at',['../classop_1_1_array_cpu_gpu.html#a3f10532b10ec840aa9e1dac3ccc7ee25',1,'op::ArrayCpuGpu']]], + ['dims_184',['dims',['../classop_1_1_matrix.html#aabfd2f25b2459aac510e1e31b207fcf3',1,'op::Matrix']]], + ['disabled_185',['Disabled',['../namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09ab9f5c797ebbf55adccdd8539a65a0241',1,'op']]], + ['disablemultithreading_186',['disableMultiThreading',['../classop_1_1_wrapper_t.html#a6ba81304df06fbec71103973ce0041c5',1,'op::WrapperT']]], + ['display2d_187',['Display2D',['../namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a3bd9369403112127ae7db2f866002be2',1,'op']]], + ['display3d_188',['Display3D',['../namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6ae18221460ca8434295f980225fd6a91b',1,'op']]], + ['displayadam_189',['DisplayAdam',['../namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a442304e26339521bc296bdc47ff5fddf',1,'op']]], + ['displayall_190',['DisplayAll',['../namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a105036ef087117869f656cd72bfd8dd6',1,'op']]], + ['displayframe_191',['displayFrame',['../classop_1_1_frame_displayer.html#a23263864af418160f489072716ba9951',1,'op::FrameDisplayer::displayFrame(const std::vector< Matrix > &frames, const int waitKeyValue=-1)'],['../classop_1_1_frame_displayer.html#aa99517efbef90cd8a6e171a713c37501',1,'op::FrameDisplayer::displayFrame(const Matrix &frame, const int waitKeyValue=-1)']]], + ['displaymode_192',['displayMode',['../structop_1_1_wrapper_struct_gui.html#a2a979a7daa368cc847ae6d9a168ff556',1,'op::WrapperStructGui']]], + ['displaymode_193',['DisplayMode',['../namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6',1,'op']]] +]; diff --git a/web/html/doc/search/all_8.html b/web/html/doc/search/all_8.html new file mode 100644 index 000000000..83c55ae22 --- /dev/null +++ b/web/html/doc/search/all_8.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_8.js b/web/html/doc/search/all_8.js new file mode 100644 index 000000000..c6a92b492 --- /dev/null +++ b/web/html/doc/search/all_8.js @@ -0,0 +1,30 @@ +var searchData= +[ + ['elementrendered_194',['elementRendered',['../structop_1_1_datum.html#a35212700ef2a2ac290a6666e2993a192',1,'op::Datum']]], + ['elementtorender_195',['ElementToRender',['../namespaceop.html#a37a23e10d9cbc428c793c3df1d62993e',1,'op']]], + ['elemsize_196',['elemSize',['../classop_1_1_matrix.html#a401c028c88a65b69c0c029cfc990f631',1,'op::Matrix']]], + ['elemsize1_197',['elemSize1',['../classop_1_1_matrix.html#ae459fb36ef45c1215a7db39af8a8e6cf',1,'op::Matrix']]], + ['emplaceandpop_198',['emplaceAndPop',['../classop_1_1_wrapper_t.html#aaca8a5dc6f342470c8241fda5cd6cdb9',1,'op::WrapperT::emplaceAndPop(const Matrix &matrix)'],['../classop_1_1_wrapper_t.html#a3818c026e33cc573ba8b5722daa003a7',1,'op::WrapperT::emplaceAndPop(TDatumsSP &tDatums)']]], + ['empty_199',['empty',['../classop_1_1_queue_base.html#a74d9b247804a226cf9a0758b25bd3ba9',1,'op::QueueBase::empty()'],['../classop_1_1_matrix.html#a7f9df7fbdc9ef76e158f72d306f88ec2',1,'op::Matrix::empty()'],['../classop_1_1_array.html#aa173085fa7ec7c7af3a443c617edd97a',1,'op::Array::empty()'],['../classop_1_1_string.html#aeae63b12cb517a5cdaf55b836a92a49c',1,'op::String::empty()']]], + ['enable_200',['enable',['../structop_1_1_wrapper_struct_face.html#a8fadeca500dde0df2a35f864bf05ee05',1,'op::WrapperStructFace::enable()'],['../structop_1_1_wrapper_struct_hand.html#a667ad7eed9f4f96b460f331d25f3d87f',1,'op::WrapperStructHand::enable()']]], + ['enabled_201',['Enabled',['../namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09a00d23a76e43b46dae9ec7aa9dcbebb32',1,'op']]], + ['enablegooglelogging_202',['enableGoogleLogging',['../structop_1_1_wrapper_struct_pose.html#a782ba82c6aeabea8fa625042fdf09408',1,'op::WrapperStructPose']]], + ['enter_203',['enter',['../classop_1_1_json_ofstream.html#a32f058b961605d418df1258a1dc5e6a0',1,'op::JsonOfstream']]], + ['enumclasses_2ehpp_204',['enumClasses.hpp',['../filestream_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../core_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../wrapper_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../utilities_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../thread_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../producer_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../pose_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../gui_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../gpu_2enum_classes_8hpp.html',1,'(Global Namespace)']]], + ['error_205',['error',['../namespaceop.html#a42d364d9fbd1a719341bd7187d97cf18',1,'op::error(const T &message, const int line=-1, const std::string &function="", const std::string &file="")'],['../namespaceop.html#a5f092bd36c716a894cb035e1ead2aca3',1,'op::error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")']]], + ['errorandlog_2ehpp_206',['errorAndLog.hpp',['../error_and_log_8hpp.html',1,'']]], + ['errordestructor_207',['errorDestructor',['../namespaceop.html#a758b08be140e27dd2642d286a383be54',1,'op::errorDestructor(const T &message, const int line=-1, const std::string &function="", const std::string &file="")'],['../namespaceop.html#a825f15fdf9dc9cb7473c20f970f15b60',1,'op::errorDestructor(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")']]], + ['errormode_208',['ErrorMode',['../namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400c',1,'op']]], + ['errorworker_209',['errorWorker',['../namespaceop.html#a96d1720ea5d160cfd4c8404060a9bebd',1,'op::errorWorker(const T &message, const int line=-1, const std::string &function="", const std::string &file="")'],['../namespaceop.html#a61af88aac41ef77ab4e8816023fe32f0',1,'op::errorWorker(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")']]], + ['estimateandsaveextrinsics_210',['estimateAndSaveExtrinsics',['../namespaceop.html#aed964859fbd282bd29f2b818a3bf10dd',1,'op']]], + ['estimateandsaveintrinsics_211',['estimateAndSaveIntrinsics',['../namespaceop.html#a1fd317d44606181c63ef8a4e5676a09e',1,'op']]], + ['estimateandsavesiftfile_212',['estimateAndSaveSiftFile',['../namespaceop.html#a37cdfa8dd466c3df9e7da5724a909143',1,'op']]], + ['exec_213',['exec',['../classop_1_1_thread.html#ad6c3721793d0f65ffe755ab74534afed',1,'op::Thread::exec()'],['../classop_1_1_wrapper_t.html#a478b8bd7deb43322f220593552fe683d',1,'op::WrapperT::exec()'],['../classop_1_1_thread_manager.html#a67a2d7cecc749be414e6896a88ec268d',1,'op::ThreadManager::exec()']]], + ['existdirectory_214',['existDirectory',['../namespaceop.html#a6fc2ee2d2c256695fb7b2b953ee7f762',1,'op']]], + ['existfile_215',['existFile',['../namespaceop.html#ac1f4b95440d2fb57fc715558d039b947',1,'op']]], + ['extensions_216',['Extensions',['../namespaceop.html#a553bd31855c20a0d14e4c44a20bd91da',1,'op']]], + ['extract_217',['extract',['../classop_1_1_scale_and_size_extractor.html#aa05b7698ff8417072787009c85a14421',1,'op::ScaleAndSizeExtractor']]], + ['extractids_218',['extractIds',['../classop_1_1_person_id_extractor.html#a8d0b309bdf1ce96ed1aa2bd3df6f6dbc',1,'op::PersonIdExtractor::extractIds()'],['../classop_1_1_pose_extractor.html#a15d81f74033c643465864f8ab6e48bba',1,'op::PoseExtractor::extractIds()']]], + ['extractidslockthread_219',['extractIdsLockThread',['../classop_1_1_person_id_extractor.html#a1aebf8006d814a02d7fa55f0609a7ab7',1,'op::PersonIdExtractor::extractIdsLockThread()'],['../classop_1_1_pose_extractor.html#aa7b59f4bfe89219e75995bc048efe4de',1,'op::PoseExtractor::extractIdsLockThread()']]], + ['eye_220',['eye',['../classop_1_1_matrix.html#a78f16f08895693461fb20082260aec68',1,'op::Matrix']]] +]; diff --git a/web/html/doc/search/all_9.html b/web/html/doc/search/all_9.html new file mode 100644 index 000000000..1e263c134 --- /dev/null +++ b/web/html/doc/search/all_9.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_9.js b/web/html/doc/search/all_9.js new file mode 100644 index 000000000..9dce9522e --- /dev/null +++ b/web/html/doc/search/all_9.js @@ -0,0 +1,88 @@ +var searchData= +[ + ['f135_221',['F135',['../namespaceop.html#a593bb53120d8db14cab814dfb5d9ed2c',1,'op']]], + ['face_222',['Face',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca8af5861002f3c157f9ba842bba10aa3f',1,'op']]], + ['face_5fccn_5fdecrease_5ffactor_223',['FACE_CCN_DECREASE_FACTOR',['../namespaceop.html#aa6701cc08e1a8651798ef3bf8437375b',1,'op']]], + ['face_5fcolors_5frender_224',['FACE_COLORS_RENDER',['../namespaceop.html#a3fbae1778780ae5bf4ffcc84cdef1870',1,'op']]], + ['face_5fcolors_5frender_5fgpu_225',['FACE_COLORS_RENDER_GPU',['../face_parameters_8hpp.html#a740a6228babfde5f18fba6fc033ef0ed',1,'faceParameters.hpp']]], + ['face_5fdefault_5falpha_5fheat_5fmap_226',['FACE_DEFAULT_ALPHA_HEAT_MAP',['../namespaceop.html#a4f191afed46fea5e3ce5b2a8756e1ddd',1,'op']]], + ['face_5fdefault_5falpha_5fkeypoint_227',['FACE_DEFAULT_ALPHA_KEYPOINT',['../namespaceop.html#a15f6c39797cee87f6aa941d93f22b78b',1,'op']]], + ['face_5fmax_5ffaces_228',['FACE_MAX_FACES',['../namespaceop.html#a9b4b92c621cc5962a72898899d2f2534',1,'op']]], + ['face_5fnumber_5fparts_229',['FACE_NUMBER_PARTS',['../namespaceop.html#a8a05bdc38612c38e28b96bba5b4679b8',1,'op']]], + ['face_5fpairs_5frender_230',['FACE_PAIRS_RENDER',['../namespaceop.html#a1245f62cf98c4ee7591dfc8807ef355d',1,'op']]], + ['face_5fpairs_5frender_5fgpu_231',['FACE_PAIRS_RENDER_GPU',['../face_parameters_8hpp.html#a7e2f64c1349d6a881c6ceb49757e099a',1,'faceParameters.hpp']]], + ['face_5fprototxt_232',['FACE_PROTOTXT',['../namespaceop.html#a4d07868d77fb11253b413ed579e04c22',1,'op']]], + ['face_5fscales_5frender_233',['FACE_SCALES_RENDER',['../namespaceop.html#a00c56c20997f734b2bd44d6f85b86cf0',1,'op']]], + ['face_5fscales_5frender_5fgpu_234',['FACE_SCALES_RENDER_GPU',['../face_parameters_8hpp.html#a1a7ddb1a137c44091a1b4161725adfa0',1,'faceParameters.hpp']]], + ['face_5ftrained_5fmodel_235',['FACE_TRAINED_MODEL',['../namespaceop.html#abd0ef2306478c3295283e7f1b59e3aff',1,'op']]], + ['facecpurenderer_236',['FaceCpuRenderer',['../classop_1_1_face_cpu_renderer.html#afb0dcfff75c4a89d5971d7b0bbd0b51b',1,'op::FaceCpuRenderer::FaceCpuRenderer()'],['../classop_1_1_face_cpu_renderer.html',1,'op::FaceCpuRenderer']]], + ['facecpurenderer_2ehpp_237',['faceCpuRenderer.hpp',['../face_cpu_renderer_8hpp.html',1,'']]], + ['facedetector_238',['FaceDetector',['../classop_1_1_face_detector.html#adfeab6977c93b7bef66c1dfbcf6f8150',1,'op::FaceDetector::FaceDetector()'],['../classop_1_1_face_detector.html',1,'op::FaceDetector']]], + ['facedetector_2ehpp_239',['faceDetector.hpp',['../face_detector_8hpp.html',1,'']]], + ['facedetectoropencv_240',['FaceDetectorOpenCV',['../classop_1_1_face_detector_open_c_v.html#a8c4d55863b726448762a142fa91bb69d',1,'op::FaceDetectorOpenCV::FaceDetectorOpenCV()'],['../classop_1_1_face_detector_open_c_v.html',1,'op::FaceDetectorOpenCV']]], + ['facedetectoropencv_2ehpp_241',['faceDetectorOpenCV.hpp',['../face_detector_open_c_v_8hpp.html',1,'']]], + ['faceextractorcaffe_242',['FaceExtractorCaffe',['../classop_1_1_face_extractor_caffe.html#adedc0e50f2eacd8e02c5bd8b0563b2ee',1,'op::FaceExtractorCaffe::FaceExtractorCaffe()'],['../classop_1_1_face_extractor_caffe.html',1,'op::FaceExtractorCaffe']]], + ['faceextractorcaffe_2ehpp_243',['faceExtractorCaffe.hpp',['../face_extractor_caffe_8hpp.html',1,'']]], + ['faceextractornet_244',['FaceExtractorNet',['../classop_1_1_face_extractor_net.html#a125b052c75a5e39890e140e962b37838',1,'op::FaceExtractorNet::FaceExtractorNet()'],['../classop_1_1_face_extractor_net.html',1,'op::FaceExtractorNet']]], + ['faceextractornet_2ehpp_245',['faceExtractorNet.hpp',['../face_extractor_net_8hpp.html',1,'']]], + ['facegpurenderer_246',['FaceGpuRenderer',['../classop_1_1_face_gpu_renderer.html#a344b4f1d256d6ad805273eb8ba29cde1',1,'op::FaceGpuRenderer::FaceGpuRenderer()'],['../classop_1_1_face_gpu_renderer.html',1,'op::FaceGpuRenderer']]], + ['facegpurenderer_2ehpp_247',['faceGpuRenderer.hpp',['../face_gpu_renderer_8hpp.html',1,'']]], + ['faceheatmaps_248',['faceHeatMaps',['../structop_1_1_datum.html#ae0c0f33a6b75c7f47e11112dd33f23c1',1,'op::Datum']]], + ['facekeypoints_249',['faceKeypoints',['../structop_1_1_datum.html#aebd19bf50725a5cd87de1efd96f6ebfe',1,'op::Datum']]], + ['facekeypoints3d_250',['faceKeypoints3D',['../structop_1_1_datum.html#a9a44196a197d5c050e626efe8b016e84',1,'op::Datum']]], + ['faceparameters_2ehpp_251',['faceParameters.hpp',['../face_parameters_8hpp.html',1,'']]], + ['facerectangles_252',['faceRectangles',['../structop_1_1_datum.html#a0b2f6955a1751fc79b107789272effad',1,'op::Datum']]], + ['facerenderer_253',['FaceRenderer',['../classop_1_1_face_renderer.html',1,'op']]], + ['facerenderer_2ehpp_254',['faceRenderer.hpp',['../face_renderer_8hpp.html',1,'']]], + ['fastmath_2ehpp_255',['fastMath.hpp',['../fast_math_8hpp.html',1,'']]], + ['fastmax_256',['fastMax',['../namespaceop.html#a9f4b99449c0c73e2c89ee1a1eff007c7',1,'op']]], + ['fastmin_257',['fastMin',['../namespaceop.html#a6e1d1f90ef06cc7af576fdaad4b4e320',1,'op']]], + ['fasttruncate_258',['fastTruncate',['../namespaceop.html#a2dafd3db8f922405b38240345dd1dce5',1,'op']]], + ['filelogging_259',['FileLogging',['../namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94a68ec2bf5b1662d1d27a523dcfc3c702a',1,'op::FileLogging()'],['../namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400ca68ec2bf5b1662d1d27a523dcfc3c702a',1,'op::FileLogging()']]], + ['filesaver_260',['FileSaver',['../classop_1_1_file_saver.html#aa4632ae62ac77dbad85523845ce79999',1,'op::FileSaver::FileSaver()'],['../classop_1_1_file_saver.html',1,'op::FileSaver']]], + ['filesaver_2ehpp_261',['fileSaver.hpp',['../file_saver_8hpp.html',1,'']]], + ['filestream_2ehpp_262',['fileStream.hpp',['../file_stream_8hpp.html',1,'']]], + ['filesystem_2ehpp_263',['fileSystem.hpp',['../file_system_8hpp.html',1,'']]], + ['flags_2ehpp_264',['flags.hpp',['../flags_8hpp.html',1,'']]], + ['flagstodetector_265',['flagsToDetector',['../namespaceop.html#a9f585930a5246e4a9a70145fa8763447',1,'op']]], + ['flagstodisplaymode_266',['flagsToDisplayMode',['../namespaceop.html#afdf2dd76cbae54789a139d9415790f82',1,'op']]], + ['flagstoheatmaps_267',['flagsToHeatMaps',['../namespaceop.html#ad3b02ca66d11f4129372f4a9f98c6437',1,'op']]], + ['flagstoheatmapscalemode_268',['flagsToHeatMapScaleMode',['../namespaceop.html#aed9ab5282e3e60f22dc11c301af897e6',1,'op']]], + ['flagstoopenpose_2ehpp_269',['flagsToOpenPose.hpp',['../flags_to_open_pose_8hpp.html',1,'']]], + ['flagstopoint_270',['flagsToPoint',['../namespaceop.html#a0e1275fd8690a55200fcd193c94dcf08',1,'op']]], + ['flagstoposemode_271',['flagsToPoseMode',['../namespaceop.html#af5ec8b7e6271798cbd09475766c64d2f',1,'op']]], + ['flagstoposemodel_272',['flagsToPoseModel',['../namespaceop.html#a60ab295fba5d41b31d6ba5a4942889a9',1,'op']]], + ['flagstoproducer_273',['flagsToProducer',['../namespaceop.html#a8264a6feec695adef80d40940863d511',1,'op']]], + ['flagstoproducertype_274',['flagsToProducerType',['../namespaceop.html#a1ca09f1d0e1f01d95842e99ebeef0631',1,'op']]], + ['flagstorendermode_275',['flagsToRenderMode',['../namespaceop.html#a70f65da8f70ebd07b093932927187c90',1,'op']]], + ['flagstoscalemode_276',['flagsToScaleMode',['../namespaceop.html#abe3f4d783191416b8e62e54c953fe36b',1,'op']]], + ['flip_277',['Flip',['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a9ffbd422925a6839ee820ddbc59278c5',1,'op']]], + ['flircamera_278',['FlirCamera',['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332af436d4d7a472ac39a7cb227e3ea24f8d',1,'op']]], + ['flirreader_279',['FlirReader',['../classop_1_1_flir_reader.html#a8fa5c03b6ce95372ce47013c01c782a5',1,'op::FlirReader::FlirReader()'],['../classop_1_1_flir_reader.html',1,'op::FlirReader']]], + ['flirreader_2ehpp_280',['flirReader.hpp',['../flir_reader_8hpp.html',1,'']]], + ['foot_281',['Foot',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca129e74dde7b475c8848310e16754c965',1,'op']]], + ['forceemplace_282',['forceEmplace',['../classop_1_1_queue_base.html#a8d218f599b84194909691c72ee0de8d0',1,'op::QueueBase']]], + ['forcepush_283',['forcePush',['../classop_1_1_queue_base.html#ad124d414b7c2680e5312ee163d18410f',1,'op::QueueBase']]], + ['formatasdirectory_284',['formatAsDirectory',['../namespaceop.html#ab38ea91ef7b7dad700d8e4a4654d48f5',1,'op']]], + ['formattocvmat_285',['formatToCvMat',['../classop_1_1_op_output_to_cv_mat.html#aaee9dc07945e0857de33308b12c9bd09',1,'op::OpOutputToCvMat']]], + ['forward_286',['Forward',['../classop_1_1_resize_and_merge_caffe.html#a2f3f7903827e3abc3dab35ebdad002a6',1,'op::ResizeAndMergeCaffe::Forward()'],['../classop_1_1_nms_caffe.html#a263d87a3282cbc03182e4d8759ca9f3a',1,'op::NmsCaffe::Forward()'],['../classop_1_1_maximum_caffe.html#a51604d40efcfa63c5a46dc257c72cf9c',1,'op::MaximumCaffe::Forward()'],['../classop_1_1_body_part_connector_caffe.html#a52cc46828bc7720a62fbbe841022331e',1,'op::BodyPartConnectorCaffe::Forward()']]], + ['forward_5fcpu_287',['Forward_cpu',['../classop_1_1_resize_and_merge_caffe.html#a65e81f3ac60a58a29f302d818d5b0c8f',1,'op::ResizeAndMergeCaffe::Forward_cpu()'],['../classop_1_1_nms_caffe.html#a8289f4e680cd16405555002a61de735b',1,'op::NmsCaffe::Forward_cpu()'],['../classop_1_1_maximum_caffe.html#ae88c10cadaef2e4e7347ef7f8c101b67',1,'op::MaximumCaffe::Forward_cpu()'],['../classop_1_1_body_part_connector_caffe.html#a03364fbed0c71e76eb5fb1f61a397de8',1,'op::BodyPartConnectorCaffe::Forward_cpu()']]], + ['forward_5fgpu_288',['Forward_gpu',['../classop_1_1_nms_caffe.html#a8520f4df4fb2d26a1289b1bcaa814e93',1,'op::NmsCaffe::Forward_gpu()'],['../classop_1_1_body_part_connector_caffe.html#a9dbcac7de4a57a58733462f3ce1db10c',1,'op::BodyPartConnectorCaffe::Forward_gpu()'],['../classop_1_1_resize_and_merge_caffe.html#a13d984c2ec4b5440a694b9a2dfa64521',1,'op::ResizeAndMergeCaffe::Forward_gpu()'],['../classop_1_1_maximum_caffe.html#a6e44cdf4dc3fce4d1dcc75ce29bc051e',1,'op::MaximumCaffe::Forward_gpu()']]], + ['forward_5focl_289',['Forward_ocl',['../classop_1_1_resize_and_merge_caffe.html#aba74db20a0aca30b797f590548de4272',1,'op::ResizeAndMergeCaffe::Forward_ocl()'],['../classop_1_1_nms_caffe.html#ad1719736dc5e459a1d8b28837e94f989',1,'op::NmsCaffe::Forward_ocl()'],['../classop_1_1_body_part_connector_caffe.html#a51324177e60bf260f6c2def76e9e3d7d',1,'op::BodyPartConnectorCaffe::Forward_ocl()']]], + ['forwardpass_290',['forwardPass',['../classop_1_1_hand_extractor_caffe.html#a2f8e53c8d4f4d509b4a1842f042fa548',1,'op::HandExtractorCaffe::forwardPass()'],['../classop_1_1_face_extractor_caffe.html#ad78fc3e86428d89a513e8e3be10fc47f',1,'op::FaceExtractorCaffe::forwardPass()'],['../classop_1_1_face_extractor_net.html#a6c5d14660690396edb1a939b11962a68',1,'op::FaceExtractorNet::forwardPass()'],['../classop_1_1_hand_extractor_net.html#a0aa50449396fd075bec29e0393a1ff9e',1,'op::HandExtractorNet::forwardPass()'],['../classop_1_1_net.html#a65193e857c721f2f606ea6b010953dbc',1,'op::Net::forwardPass()'],['../classop_1_1_net_caffe.html#a439b30ec5d10c68cb620130ff5e1812a',1,'op::NetCaffe::forwardPass()'],['../classop_1_1_net_open_cv.html#aa62d557f44d2d44f08b8b1dd3efd54fb',1,'op::NetOpenCv::forwardPass()'],['../classop_1_1_pose_extractor.html#a6c0abd998181d03d7890ec7abdee5efe',1,'op::PoseExtractor::forwardPass()'],['../classop_1_1_pose_extractor_caffe.html#a9f8677779c9c07c0fd4ac265cd8d2d8f',1,'op::PoseExtractorCaffe::forwardPass()'],['../classop_1_1_pose_extractor_net.html#a95c48a9fc5368af73a54aa66e44b4bc2',1,'op::PoseExtractorNet::forwardPass()']]], + ['fpsmax_291',['fpsMax',['../structop_1_1_wrapper_struct_pose.html#a16c4fb26e6ce76dfa577e0f4b5747733',1,'op::WrapperStructPose']]], + ['framedisplayer_292',['FrameDisplayer',['../classop_1_1_frame_displayer.html#a21a746ef46172c6a18ea72da6e7b5721',1,'op::FrameDisplayer::FrameDisplayer()'],['../classop_1_1_frame_displayer.html',1,'op::FrameDisplayer']]], + ['framedisplayer_2ehpp_293',['frameDisplayer.hpp',['../frame_displayer_8hpp.html',1,'']]], + ['framefirst_294',['frameFirst',['../structop_1_1_wrapper_struct_input.html#acc72b8efe09ec3888823ed5680a19fe4',1,'op::WrapperStructInput']]], + ['frameflip_295',['frameFlip',['../structop_1_1_wrapper_struct_input.html#a5ee9722814fe2b5a695511cabd12b613',1,'op::WrapperStructInput']]], + ['framelast_296',['frameLast',['../structop_1_1_wrapper_struct_input.html#a5cffb282052bdd812217e54f0b2ec7d5',1,'op::WrapperStructInput']]], + ['framenumber_297',['frameNumber',['../structop_1_1_datum.html#a8b930d61467f98702ebea68f39fc762b',1,'op::Datum']]], + ['framerotate_298',['frameRotate',['../structop_1_1_wrapper_struct_input.html#a86df98e50b680b30afe100d8b2b50685',1,'op::WrapperStructInput']]], + ['framesrepeat_299',['framesRepeat',['../structop_1_1_wrapper_struct_input.html#a7c80f6a3687696ba30d3ce0902ac162f',1,'op::WrapperStructInput']]], + ['framestep_300',['frameStep',['../structop_1_1_wrapper_struct_input.html#ac4349e123d359f436cc01d4068231dc2',1,'op::WrapperStructInput']]], + ['framestep_301',['FrameStep',['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a63eacc5ed21c0ecb8bc583e10dc3ae58',1,'op']]], + ['front_302',['front',['../classop_1_1_queue_base.html#aad7a6a666dcf70834d9d18ae6d92cb2c',1,'op::QueueBase::front()'],['../classop_1_1_priority_queue.html#a8e468dfaed310e54987cbb8cb1cef909',1,'op::PriorityQueue::front()'],['../classop_1_1_queue.html#a22f6d214fe4dfc743b3abf00e049c504',1,'op::Queue::front()']]], + ['fullscreen_303',['fullScreen',['../structop_1_1_wrapper_struct_gui.html#ac1d393d3ce6be9304017c1aa3afd8f13',1,'op::WrapperStructGui']]], + ['fullscreen_304',['FullScreen',['../namespaceop.html#a6c22a72ce93c64e7582cb670492a50bfae7ec409749889353b8f83a6b04159420',1,'op']]], + ['fullscreenmode_305',['FullScreenMode',['../namespaceop.html#a6c22a72ce93c64e7582cb670492a50bf',1,'op']]] +]; diff --git a/web/html/doc/search/all_a.html b/web/html/doc/search/all_a.html new file mode 100644 index 000000000..3a6cac108 --- /dev/null +++ b/web/html/doc/search/all_a.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_a.js b/web/html/doc/search/all_a.js new file mode 100644 index 000000000..a697d7f61 --- /dev/null +++ b/web/html/doc/search/all_a.js @@ -0,0 +1,130 @@ +var searchData= +[ + ['get_306',['get',['../classop_1_1_webcam_reader.html#a38ccbdf61f21fba0694362077cb6bdb1',1,'op::WebcamReader::get()'],['../classop_1_1_video_reader.html#a057a7d0c498c48639b38c10ac7efc183',1,'op::VideoReader::get()'],['../classop_1_1_video_capture_reader.html#a64e5cbfb1c556d64cabcebc6eb94eaf1',1,'op::VideoCaptureReader::get()'],['../classop_1_1_producer.html#a94d561f95384dfa0cd91113882869d06',1,'op::Producer::get(const ProducerProperty property)'],['../classop_1_1_producer.html#a366881a952ad34071cc719477f08b968',1,'op::Producer::get(const int capProperty)=0'],['../classop_1_1_ip_camera_reader.html#aa7ad6adac6e401193e03d279176dd889',1,'op::IpCameraReader::get()'],['../classop_1_1_pose_extractor_net.html#aa9138224f4977da54517398ba044b7c3',1,'op::PoseExtractorNet::get()'],['../classop_1_1_flir_reader.html#a5101cdbcd46e51bf7f35995a3d87e900',1,'op::FlirReader::get()'],['../classop_1_1_image_directory_reader.html#aa05bfd69272e81115ba23a3c0731b596',1,'op::ImageDirectoryReader::get()']]], + ['getalphaheatmap_307',['getAlphaHeatMap',['../classop_1_1_renderer.html#ab776e07b5b2f3a3b0aca0ce95d67796b',1,'op::Renderer']]], + ['getalphakeypoint_308',['getAlphaKeypoint',['../classop_1_1_renderer.html#a3cf2d07dc9df42db4648398367c72dbb',1,'op::Renderer']]], + ['getaveragescore_309',['getAverageScore',['../namespaceop.html#a1110f4c0017c43ea1d0896a3225c55f8',1,'op']]], + ['getbiggestperson_310',['getBiggestPerson',['../namespaceop.html#ace4af20d19066df9ec502c5a09097c24',1,'op']]], + ['getblendoriginalframe_311',['getBlendOriginalFrame',['../classop_1_1_renderer.html#ad2ac64e018f2b925d0c8d45883928b68',1,'op::Renderer']]], + ['getcameradistortions_312',['getCameraDistortions',['../classop_1_1_camera_parameter_reader.html#a8edb22b20d1ed044335ec0d2175eeabf',1,'op::CameraParameterReader']]], + ['getcameraextrinsics_313',['getCameraExtrinsics',['../classop_1_1_spinnaker_wrapper.html#a427bf92ca3fc9011b01c57833b078154',1,'op::SpinnakerWrapper::getCameraExtrinsics()'],['../classop_1_1_camera_parameter_reader.html#a8122bb2a8a07555b5341141356fa37c7',1,'op::CameraParameterReader::getCameraExtrinsics()'],['../classop_1_1_flir_reader.html#ad3b940d5ed672ef17406843b102e9715',1,'op::FlirReader::getCameraExtrinsics()'],['../classop_1_1_producer.html#a2853a47b12ab1f32138b6d944c322ebd',1,'op::Producer::getCameraExtrinsics()']]], + ['getcameraextrinsicsinitial_314',['getCameraExtrinsicsInitial',['../classop_1_1_camera_parameter_reader.html#a88c9f18f6b0f3e5d09240e65a5e04beb',1,'op::CameraParameterReader']]], + ['getcameraintrinsics_315',['getCameraIntrinsics',['../classop_1_1_spinnaker_wrapper.html#aaf441c78eeb921886a09412d8af9ddbc',1,'op::SpinnakerWrapper::getCameraIntrinsics()'],['../classop_1_1_producer.html#a6c5be8c556b0a744e11a11de3f185049',1,'op::Producer::getCameraIntrinsics()'],['../classop_1_1_flir_reader.html#acb45c9a89ebc92c0a8ee69a0ec4d0476',1,'op::FlirReader::getCameraIntrinsics()'],['../classop_1_1_camera_parameter_reader.html#a6db1e0c2b4ed63407d12ff0de97cb098',1,'op::CameraParameterReader::getCameraIntrinsics() const']]], + ['getcameramatrices_316',['getCameraMatrices',['../classop_1_1_camera_parameter_reader.html#a975e5a340bd1b77d680007797ec9eeea',1,'op::CameraParameterReader::getCameraMatrices()'],['../classop_1_1_flir_reader.html#a7ddcdf533c778df342a50c24c280499b',1,'op::FlirReader::getCameraMatrices()'],['../classop_1_1_spinnaker_wrapper.html#a76849430ae48ba14cbdd0b68bca133fb',1,'op::SpinnakerWrapper::getCameraMatrices()'],['../classop_1_1_producer.html#a0d711ebc149dd71159ebc2902ccd8113',1,'op::Producer::getCameraMatrices()']]], + ['getcameraserialnumbers_317',['getCameraSerialNumbers',['../classop_1_1_camera_parameter_reader.html#acf2b4d428d18f2663f4df640171b254d',1,'op::CameraParameterReader']]], + ['getcandidatescopy_318',['getCandidatesCopy',['../classop_1_1_pose_extractor_net.html#a56d7dd1157e70786850169897bcf6883',1,'op::PoseExtractorNet::getCandidatesCopy()'],['../classop_1_1_pose_extractor.html#adc430a6b1b2bf4df75ebf088f97be8af',1,'op::PoseExtractor::getCandidatesCopy()']]], + ['getcandidatescpuconstptr_319',['getCandidatesCpuConstPtr',['../classop_1_1_pose_extractor_caffe.html#a1444ad1ee245a5bcd9e0b5b55395d6d8',1,'op::PoseExtractorCaffe::getCandidatesCpuConstPtr()'],['../classop_1_1_pose_extractor_net.html#a3e73f27594e61bf451b8e9fff7695f62',1,'op::PoseExtractorNet::getCandidatesCpuConstPtr()']]], + ['getcandidatesgpuconstptr_320',['getCandidatesGpuConstPtr',['../classop_1_1_pose_extractor_caffe.html#a499d975f7b6add768425271b2af19a2e',1,'op::PoseExtractorCaffe::getCandidatesGpuConstPtr()'],['../classop_1_1_pose_extractor_net.html#abee987adbe411ca71b6b37ab9cd89a41',1,'op::PoseExtractorNet::getCandidatesGpuConstPtr()']]], + ['getconstcvmat_321',['getConstCvMat',['../classop_1_1_array.html#a9b43d8d495a233c384a75a3f33eae75f',1,'op::Array::getConstCvMat()'],['../classop_1_1_matrix.html#a1beb13525ec86c9827a7116eb2d175b7',1,'op::Matrix::getConstCvMat()']]], + ['getconstptr_322',['getConstPtr',['../classop_1_1_array.html#ac5e77d6926d1d344cf54c88036fc8a9c',1,'op::Array']]], + ['getcudagpunumber_323',['getCudaGpuNumber',['../namespaceop.html#ad9b7765a4396ee4470585ded07285563',1,'op']]], + ['getcvcappropframecount_324',['getCvCapPropFrameCount',['../namespaceop.html#a0e60b0e4e89a7f08de54ad40c2d46a60',1,'op']]], + ['getcvcappropframefps_325',['getCvCapPropFrameFps',['../namespaceop.html#aaf7199f3821a6f954cfae134ec8c7e19',1,'op']]], + ['getcvcappropframeheight_326',['getCvCapPropFrameHeight',['../namespaceop.html#a264496927e7b331ad628d7dc4a683194',1,'op']]], + ['getcvcappropframewidth_327',['getCvCapPropFrameWidth',['../namespaceop.html#a71866b00e7d1077137094f78ec83b62b',1,'op']]], + ['getcvfourcc_328',['getCvFourcc',['../namespaceop.html#a4059a24a786c4f2def977715dd2e6747',1,'op']]], + ['getcvimwritejpegquality_329',['getCvImwriteJpegQuality',['../namespaceop.html#ad86d86621b1f485f261d620373748ed1',1,'op']]], + ['getcvimwritepngcompression_330',['getCvImwritePngCompression',['../namespaceop.html#a289d19386824250545f248a79aed283c',1,'op']]], + ['getcvloadimageanydepth_331',['getCvLoadImageAnydepth',['../namespaceop.html#a84730c1ab201fe836fe87787589af88a',1,'op']]], + ['getcvloadimagegrayscale_332',['getCvLoadImageGrayScale',['../namespaceop.html#ace6c48833ba117b7d036179bdaf31a7a',1,'op']]], + ['getcvmat_333',['getCvMat',['../classop_1_1_array.html#a530010928025b3f64743505d732b1308',1,'op::Array::getCvMat()'],['../classop_1_1_matrix.html#a9326d59a12659563d123ea6587fd4415',1,'op::Matrix::getCvMat()']]], + ['getdistance_334',['getDistance',['../namespaceop.html#ac968b1c98c60b74be78225be27805706',1,'op']]], + ['getdistanceaverage_335',['getDistanceAverage',['../namespaceop.html#aa053f4b0533d9e981aa171a1ef57fc30',1,'op::getDistanceAverage(const Array< T > &keypointsA, const int personA, const Array< T > &keypointsB, const int personB, const T threshold)'],['../namespaceop.html#acf638f00b0a825c05683f8e23942a9d5',1,'op::getDistanceAverage(const Array< T > &keypoints, const int personA, const int personB, const T threshold)']]], + ['getenabled_336',['getEnabled',['../classop_1_1_face_extractor_net.html#a18911596f5ba442d50718f54a3d64fe0',1,'op::FaceExtractorNet::getEnabled()'],['../classop_1_1_hand_extractor_net.html#af064ccee582800f39ed3eac5d69a4134',1,'op::HandExtractorNet::getEnabled()']]], + ['geterrormodes_337',['getErrorModes',['../namespaceop_1_1_configure_error.html#ae8dbbccc9a2ca8a4670716ac5fdd8d53',1,'op::ConfigureError']]], + ['getfacekeypoints_338',['getFaceKeypoints',['../classop_1_1_face_extractor_net.html#aee0d7b760214c805466ae515938b5190',1,'op::FaceExtractorNet']]], + ['getfileextension_339',['getFileExtension',['../namespaceop.html#a515273b013402d8c75780330588421bc',1,'op']]], + ['getfilenameandextension_340',['getFileNameAndExtension',['../namespaceop.html#a573544858d0a9c29c9707eeda3a21c98',1,'op']]], + ['getfilenamenoextension_341',['getFileNameNoExtension',['../namespaceop.html#a6f37638480139a4076eef4d0c7dc6cd1',1,'op']]], + ['getfileparentfolderpath_342',['getFileParentFolderPath',['../namespaceop.html#a2e35510c95e5525aae7a398b03b32488',1,'op']]], + ['getfilesondirectory_343',['getFilesOnDirectory',['../namespaceop.html#a3ff74a37eb4bf12e31bc5aa95b69f9e3',1,'op::getFilesOnDirectory(const std::string &directoryPath, const std::vector< std::string > &extensions={})'],['../namespaceop.html#adb26da2c52486e926d98471b5387c7e1',1,'op::getFilesOnDirectory(const std::string &directoryPath, const std::string &extension)'],['../namespaceop.html#a858f70fa9d84ad85c60f19a2229ebbde',1,'op::getFilesOnDirectory(const std::string &directoryPath, const Extensions extensions)']]], + ['getfirstnumberonstring_344',['getFirstNumberOnString',['../namespaceop.html#a844c35ea57a8bc67f33f49deb5070652',1,'op']]], + ['getframe_345',['getFrame',['../classop_1_1_producer.html#a07f416a256a3f7e906748701ad569030',1,'op::Producer']]], + ['getframes_346',['getFrames',['../classop_1_1_producer.html#aad1f861eaea12a3590e1beb286d023b7',1,'op::Producer']]], + ['getfullfilepathnoextension_347',['getFullFilePathNoExtension',['../namespaceop.html#ac1737c19228b83a5e93ae51e5d9556eb',1,'op']]], + ['getgpumode_348',['getGpuMode',['../namespaceop.html#a971a7caa96be5b715b5c22f6e5dc6ad1',1,'op']]], + ['getgpunumber_349',['getGpuNumber',['../namespaceop.html#aaad222b087dd041c35de2f3414c1a01f',1,'op']]], + ['gethandkeypoints_350',['getHandKeypoints',['../classop_1_1_hand_extractor_net.html#ae9617434c4dc7e390c18d596b868297d',1,'op::HandExtractorNet']]], + ['getheatmapcpuconstptr_351',['getHeatMapCpuConstPtr',['../classop_1_1_pose_extractor_net.html#a80cb59fa161a7ecd3d6a016354ab9002',1,'op::PoseExtractorNet::getHeatMapCpuConstPtr()'],['../classop_1_1_pose_extractor_caffe.html#a9e8056cd50ba679636c5d5055f5a563b',1,'op::PoseExtractorCaffe::getHeatMapCpuConstPtr()']]], + ['getheatmapgpuconstptr_352',['getHeatMapGpuConstPtr',['../classop_1_1_pose_extractor_net.html#ad1b526d42f690a8857c0ccdc88ff88ac',1,'op::PoseExtractorNet::getHeatMapGpuConstPtr()'],['../classop_1_1_pose_extractor_caffe.html#ac4737f29b467f6c0daad5f54aa20524b',1,'op::PoseExtractorCaffe::getHeatMapGpuConstPtr()']]], + ['getheatmaps_353',['getHeatMaps',['../classop_1_1_hand_extractor_net.html#a88a35f29d3c53c259756bc07b2bfb093',1,'op::HandExtractorNet::getHeatMaps()'],['../classop_1_1_face_extractor_net.html#a1ba97136b2cc006cd066e3e950f0c179',1,'op::FaceExtractorNet::getHeatMaps()']]], + ['getheatmapscopy_354',['getHeatMapsCopy',['../classop_1_1_pose_extractor_net.html#ad6e1c91c60cf0041c196fd4347bbcdf5',1,'op::PoseExtractorNet::getHeatMapsCopy()'],['../classop_1_1_pose_extractor.html#a95f6235ab496ada0b8cbc4b614637ac0',1,'op::PoseExtractor::getHeatMapsCopy()']]], + ['getheatmapsize_355',['getHeatMapSize',['../classop_1_1_pose_extractor_caffe.html#a350900a3b326f4ed7d3dcb9531055523',1,'op::PoseExtractorCaffe::getHeatMapSize()'],['../classop_1_1_pose_extractor_net.html#a49e1dcb9f9d049131df866b7538507cd',1,'op::PoseExtractorNet::getHeatMapSize()']]], + ['getifinmainthreadorempty_356',['getIfInMainThreadOrEmpty',['../namespaceop.html#ad5e1c975a1b7dce9b02bc8cdf3d45a01',1,'op']]], + ['getifnotinmainthreadorempty_357',['getIfNotInMainThreadOrEmpty',['../namespaceop.html#abdedc8f1fd2f723dae5bb8ff20b93a93',1,'op']]], + ['getisrunningsharedptr_358',['getIsRunningSharedPtr',['../classop_1_1_thread_manager.html#a48ea53b3de4d09c84db18e2c31ce1be1',1,'op::ThreadManager']]], + ['getkeypointsarea_359',['getKeypointsArea',['../namespaceop.html#a1dd5dde18458975a36bdbd6dd38720a2',1,'op']]], + ['getkeypointsperson_360',['getKeypointsPerson',['../namespaceop.html#a75411d98f69051860379730e16103178',1,'op']]], + ['getkeypointsrectangle_361',['getKeypointsRectangle',['../namespaceop.html#ac74cba4141f2bee2b9d94dc171029a73',1,'op']]], + ['getkeypointsroi_362',['getKeypointsRoi',['../namespaceop.html#a36296ff5a5945244c5131e3ae16057e1',1,'op::getKeypointsRoi(const Rectangle< T > &rectangleA, const Rectangle< T > &rectangleB)'],['../namespaceop.html#ac9af122ccd8dcdafb11e37b6633245b4',1,'op::getKeypointsRoi(const Array< T > &keypointsA, const int personA, const Array< T > &keypointsB, const int personB, const T threshold)'],['../namespaceop.html#a6913c67141fcbbba84fc88ac8a45aa0f',1,'op::getKeypointsRoi(const Array< T > &keypoints, const int personA, const int personB, const T threshold)']]], + ['getlastnumber_363',['getLastNumber',['../namespaceop.html#ab670c693d8e4a540cfe75ce8383b6d10',1,'op']]], + ['getlogmodes_364',['getLogModes',['../namespaceop_1_1_configure_log.html#a5ab07ae8c026e4f7782a113778d9082d',1,'op::ConfigureLog']]], + ['getmaxsize_365',['getMaxSize',['../classop_1_1_queue_base.html#a7b3f810bb6e729be3afe3313c4b2f31b',1,'op::QueueBase']]], + ['getmergeresults_366',['getMergeResults',['../classop_1_1_person_tracker.html#a68f46367bd719196974aa5b1bd23cb7d',1,'op::PersonTracker']]], + ['getnextfilename_367',['getNextFileName',['../classop_1_1_file_saver.html#a5940f007f3346580124cd1b6b27492e6',1,'op::FileSaver::getNextFileName(const std::string &fileNameNoExtension) const'],['../classop_1_1_file_saver.html#a52aab3187cefc2e878790aa440a842aa',1,'op::FileSaver::getNextFileName(const unsigned long long index) const']]], + ['getnextframename_368',['getNextFrameName',['../classop_1_1_webcam_reader.html#a58c315e577c12486e5ab1b941d4cce04',1,'op::WebcamReader::getNextFrameName()'],['../classop_1_1_video_reader.html#a508eed918fbe3bfe3eff4c1ebacb3463',1,'op::VideoReader::getNextFrameName()'],['../classop_1_1_video_capture_reader.html#a06348fd9a290fc2ece2f3c2e4dc9bc70',1,'op::VideoCaptureReader::getNextFrameName()'],['../classop_1_1_producer.html#ab35d570dc35573433ec86e3fce25e545',1,'op::Producer::getNextFrameName()'],['../classop_1_1_ip_camera_reader.html#a0c1582090cc7c54dd9cb752207b52986',1,'op::IpCameraReader::getNextFrameName()'],['../classop_1_1_image_directory_reader.html#a46ce23209afe6d3ca90db545b69cd04a',1,'op::ImageDirectoryReader::getNextFrameName()'],['../classop_1_1_flir_reader.html#a711db0919bd7516fde3e641c13259637',1,'op::FlirReader::getNextFrameName()']]], + ['getnonzerokeypoints_369',['getNonZeroKeypoints',['../namespaceop.html#aa9366cf1b4ac3494965749eeb5537da1',1,'op']]], + ['getnumbercameras_370',['getNumberCameras',['../classop_1_1_camera_parameter_reader.html#a8d97033970f3e71657da070cd87fd70c',1,'op::CameraParameterReader']]], + ['getnumbercudablocks_371',['getNumberCudaBlocks',['../namespaceop.html#a4ba080c11cc9758051db97ce2a11c023',1,'op']]], + ['getnumbercudathreadsandblocks_372',['getNumberCudaThreadsAndBlocks',['../namespaceop.html#a17da233ea322ae172ff5bda7caaf2124',1,'op']]], + ['getnumberdimensions_373',['getNumberDimensions',['../classop_1_1_array.html#a5eff0723f0bbd192248e602bfbb6956f',1,'op::Array']]], + ['getnumberelementstorender_374',['getNumberElementsToRender',['../namespaceop.html#aebff78a4cfbef1cf1b2e03066d88564c',1,'op']]], + ['getoutputblobarray_375',['getOutputBlobArray',['../classop_1_1_net_caffe.html#a37648c14f06ee46ca395c9d38635fade',1,'op::NetCaffe::getOutputBlobArray()'],['../classop_1_1_net_open_cv.html#a9f4981ac196b094183c52caa6ce283db',1,'op::NetOpenCv::getOutputBlobArray()'],['../classop_1_1_net.html#a222cfe3d19800824b742b218b466586b',1,'op::Net::getOutputBlobArray()']]], + ['getposebodypartmapping_376',['getPoseBodyPartMapping',['../namespaceop.html#aab3de911b04b96c1850cc05c6947e184',1,'op']]], + ['getposebodypartpairsrender_377',['getPoseBodyPartPairsRender',['../namespaceop.html#a11bd7e53698eabe32b69b48708cf7b19',1,'op']]], + ['getposecolors_378',['getPoseColors',['../namespaceop.html#abb49286241ba7a1d754b31dee333274a',1,'op']]], + ['getposedefaultconnectinterminabovethreshold_379',['getPoseDefaultConnectInterMinAboveThreshold',['../namespaceop.html#a8e377d8da8f109cb8be8e4edbb2ea90a',1,'op']]], + ['getposedefaultconnectinterthreshold_380',['getPoseDefaultConnectInterThreshold',['../namespaceop.html#aabfd35e57744b44481c09f56c90cc8b8',1,'op']]], + ['getposedefaultconnectminsubsetscore_381',['getPoseDefaultConnectMinSubsetScore',['../namespaceop.html#ae7636f6e8974ecb2ed96d43dd5ec261d',1,'op']]], + ['getposedefaultminsubsetcnt_382',['getPoseDefaultMinSubsetCnt',['../namespaceop.html#a863c96f1fb23d96c5d605867cfe5f99f',1,'op']]], + ['getposedefaultnmsthreshold_383',['getPoseDefaultNmsThreshold',['../namespaceop.html#acd8cab258d7e98affa5c317a9a03e862',1,'op']]], + ['getposegpuconstptr_384',['getPoseGpuConstPtr',['../classop_1_1_pose_extractor_net.html#a546f0d6e0c62c7c7e2d44de848f9a174',1,'op::PoseExtractorNet::getPoseGpuConstPtr()'],['../classop_1_1_pose_extractor_caffe.html#a6ffc941073b66868177c91cc9e025098',1,'op::PoseExtractorCaffe::getPoseGpuConstPtr()']]], + ['getposekeypoints_385',['getPoseKeypoints',['../classop_1_1_pose_extractor.html#a487be38105b0d3f310142d99e0ca6b12',1,'op::PoseExtractor::getPoseKeypoints()'],['../classop_1_1_pose_extractor_net.html#a3e88bd2122835db768c123d1026ce30f',1,'op::PoseExtractorNet::getPoseKeypoints()']]], + ['getposemapindex_386',['getPoseMapIndex',['../namespaceop.html#a84d87ec0e4ed3cf75a37ce99d0d25ef7',1,'op']]], + ['getposemaxpeaks_387',['getPoseMaxPeaks',['../namespaceop.html#a96a81e831f8c965825162dba09095477',1,'op']]], + ['getposenetdecreasefactor_388',['getPoseNetDecreaseFactor',['../namespaceop.html#ad7ca8d89f9045481075902c8bd98b8f4',1,'op']]], + ['getposenumberbodyparts_389',['getPoseNumberBodyParts',['../namespaceop.html#a54a6c42a42a0a7e539061f5e30abb4bc',1,'op']]], + ['getposepartpairs_390',['getPosePartPairs',['../namespaceop.html#a307b2c7b1506415a4ba44590fe8a7258',1,'op']]], + ['getposeprototxt_391',['getPoseProtoTxt',['../namespaceop.html#ae0730c6559abdb976423ecf81eac4620',1,'op']]], + ['getposescales_392',['getPoseScales',['../namespaceop.html#a016abefba53293ed2ffe3a3c3bd88dd0',1,'op']]], + ['getposescores_393',['getPoseScores',['../classop_1_1_pose_extractor.html#aee77aa0ca773abe442a278d9e9e69376',1,'op::PoseExtractor::getPoseScores()'],['../classop_1_1_pose_extractor_net.html#a43317a6868ffa7391586f2b8b599ecdf',1,'op::PoseExtractorNet::getPoseScores()']]], + ['getposetrainedmodel_394',['getPoseTrainedModel',['../namespaceop.html#ade70b024ee461ae04e7233bf3937c5c6',1,'op']]], + ['getprioritythreshold_395',['getPriorityThreshold',['../namespaceop_1_1_configure_log.html#a0e5c3fad2ace3eb129dd1d97afd59558',1,'op::ConfigureLog']]], + ['getpseudoconstptr_396',['getPseudoConstPtr',['../classop_1_1_array.html#a85d749e637a7528325f86b80595a91d1',1,'op::Array']]], + ['getptr_397',['getPtr',['../classop_1_1_array.html#af4715967fd2b028a97fd30257e697275',1,'op::Array']]], + ['getrawframe_398',['getRawFrame',['../classop_1_1_producer.html#ab23d9eeac2c1820be9191ab9f7bb1777',1,'op::Producer::getRawFrame()'],['../classop_1_1_video_capture_reader.html#a33aabaf7c82773f117f6842ff900fa18',1,'op::VideoCaptureReader::getRawFrame()']]], + ['getrawframes_399',['getRawFrames',['../classop_1_1_producer.html#ad6d701ad0867491736374d8ea753c00e',1,'op::Producer::getRawFrames()'],['../classop_1_1_spinnaker_wrapper.html#a2135a9d3c9dbab4c1e0ee6be6c31b93a',1,'op::SpinnakerWrapper::getRawFrames()'],['../classop_1_1_video_capture_reader.html#a9f58d9280a26d94ff4ba6cd93f4928a0',1,'op::VideoCaptureReader::getRawFrames()']]], + ['getresolution_400',['getResolution',['../classop_1_1_spinnaker_wrapper.html#aad97f57040a953cbce0f20c6b3303202',1,'op::SpinnakerWrapper']]], + ['getscalenettooutput_401',['getScaleNetToOutput',['../classop_1_1_pose_extractor.html#ae798335b1606804c87220d3c72423dad',1,'op::PoseExtractor::getScaleNetToOutput()'],['../classop_1_1_pose_extractor_net.html#ac67c1d8fcba15ccfb284f10776e9fd89',1,'op::PoseExtractorNet::getScaleNetToOutput()']]], + ['getsharedparameters_402',['getSharedParameters',['../classop_1_1_cv_mat_to_op_output.html#ad0ac01a9866ea00c873da7e2552c5b08',1,'op::CvMatToOpOutput::getSharedParameters()'],['../classop_1_1_gpu_renderer.html#a63eb7ae0b440a5552ed9342043a8f369',1,'op::GpuRenderer::getSharedParameters()']]], + ['getshowgooglyeyes_403',['getShowGooglyEyes',['../classop_1_1_renderer.html#a44e13a965a9b0fca119ea897ad1348e0',1,'op::Renderer']]], + ['getsize_404',['getSize',['../classop_1_1_array.html#a4568f646a97fa8cea443b864d91a28df',1,'op::Array::getSize() const'],['../classop_1_1_array.html#ab4123b36e0816793e206365397dd8f79',1,'op::Array::getSize(const int index) const']]], + ['getstdstring_405',['getStdString',['../classop_1_1_string.html#a82003e99b9f3e9bd0054873deac970da',1,'op::String']]], + ['getstride_406',['getStride',['../classop_1_1_array.html#a38de9c4ba539b8134fcac91287722044',1,'op::Array::getStride() const'],['../classop_1_1_array.html#ab033fba3d9140020dd89edb10fe8b109',1,'op::Array::getStride(const int index) const']]], + ['getthreadid_407',['getThreadId',['../namespaceop.html#a5a3db1a0d272d8fb5ea723845beee150',1,'op']]], + ['gettimerinit_408',['getTimerInit',['../namespaceop.html#ae0e92a0d8867d1b02f1c43ae4c0c9e09',1,'op']]], + ['gettimeseconds_409',['getTimeSeconds',['../namespaceop.html#a01dd208c992c8e07623579f77dcfb59b',1,'op']]], + ['gettworkerssize_410',['getTWorkersSize',['../classop_1_1_sub_thread.html#a3e8e044b20842d15b1caedf8a78be622',1,'op::SubThread']]], + ['gettype_411',['getType',['../classop_1_1_producer.html#a9a9424027e5bc8e0fba7c65eccc460e0',1,'op::Producer']]], + ['getundistortimage_412',['getUndistortImage',['../classop_1_1_camera_parameter_reader.html#a4c819945b0df95bcfb6f8d79451290d5',1,'op::CameraParameterReader']]], + ['getvolume_413',['getVolume',['../classop_1_1_array.html#a5ed838d2b9933b6a80906d0e0db39742',1,'op::Array::getVolume(const int indexA, const int indexB=-1) const'],['../classop_1_1_array.html#aee364306687e39e754117c98ad844157',1,'op::Array::getVolume() const']]], + ['gpu_414',['Gpu',['../namespaceop.html#afce557f02e337e16150d00bdf72ec033a3432ca64f06615abf07ab44c10cada38',1,'op']]], + ['gpu_2ehpp_415',['gpu.hpp',['../gpu_8hpp.html',1,'']]], + ['gpu_5fdata_416',['gpu_data',['../classop_1_1_array_cpu_gpu.html#a292b819460cbf56fd36e7435cd99c49a',1,'op::ArrayCpuGpu']]], + ['gpu_5fdiff_417',['gpu_diff',['../classop_1_1_array_cpu_gpu.html#aa0717b11c87da804e6da0d7aca4a5414',1,'op::ArrayCpuGpu']]], + ['gpu_5fshape_418',['gpu_shape',['../classop_1_1_array_cpu_gpu.html#a7c92a38509887af087eafd7522047429',1,'op::ArrayCpuGpu']]], + ['gpumode_419',['GpuMode',['../namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7f',1,'op']]], + ['gpunumber_420',['gpuNumber',['../structop_1_1_wrapper_struct_pose.html#a536ea76d50e94d513066e9e5767d0c03',1,'op::WrapperStructPose']]], + ['gpunumberstart_421',['gpuNumberStart',['../structop_1_1_wrapper_struct_pose.html#a8be188d871061079432ead77b278fe0d',1,'op::WrapperStructPose']]], + ['gpurenderer_422',['GpuRenderer',['../classop_1_1_gpu_renderer.html#a9852b2017e972637b47250bb7fbc53ea',1,'op::GpuRenderer::GpuRenderer()'],['../classop_1_1_gpu_renderer.html',1,'op::GpuRenderer']]], + ['gpurenderer_2ehpp_423',['gpuRenderer.hpp',['../gpu_renderer_8hpp.html',1,'']]], + ['gputocpumemoryiflastrenderer_424',['gpuToCpuMemoryIfLastRenderer',['../classop_1_1_gpu_renderer.html#a6355f70d16c6427b028fa4596ce5d985',1,'op::GpuRenderer']]], + ['gui_425',['Gui',['../classop_1_1_gui.html#a1084d79f61d08f0551832de1ca337c70',1,'op::Gui::Gui()'],['../classop_1_1_gui.html',1,'op::Gui']]], + ['gui_2ehpp_426',['gui.hpp',['../gui_8hpp.html',1,'']]], + ['gui3d_427',['Gui3D',['../classop_1_1_gui3_d.html#a23ead7d9d09b3f0b3ba81b284d49b4a4',1,'op::Gui3D::Gui3D()'],['../classop_1_1_gui3_d.html',1,'op::Gui3D']]], + ['gui3d_2ehpp_428',['gui3D.hpp',['../gui3_d_8hpp.html',1,'']]], + ['guiadam_2ehpp_429',['guiAdam.hpp',['../gui_adam_8hpp.html',1,'']]], + ['guiinfoadder_430',['GuiInfoAdder',['../classop_1_1_gui_info_adder.html#af23e17f9eeb51c7473cd0940292efa61',1,'op::GuiInfoAdder::GuiInfoAdder()'],['../classop_1_1_gui_info_adder.html',1,'op::GuiInfoAdder']]], + ['guiinfoadder_2ehpp_431',['guiInfoAdder.hpp',['../gui_info_adder_8hpp.html',1,'']]], + ['guiverbose_432',['guiVerbose',['../structop_1_1_wrapper_struct_gui.html#a9dbb0bfce2593b0a560ed738e11708ce',1,'op::WrapperStructGui']]] +]; diff --git a/web/html/doc/search/all_b.html b/web/html/doc/search/all_b.html new file mode 100644 index 000000000..130deb4ed --- /dev/null +++ b/web/html/doc/search/all_b.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_b.js b/web/html/doc/search/all_b.js new file mode 100644 index 000000000..51718d161 --- /dev/null +++ b/web/html/doc/search/all_b.js @@ -0,0 +1,47 @@ +var searchData= +[ + ['h135_433',['H135',['../namespaceop.html#ae37c577c1054c89da4a6736342d491aa',1,'op']]], + ['hand21_434',['Hand21',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca9909f7cecc318ee0049ad0f3b409b3b3',1,'op']]], + ['hand42_435',['Hand42',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca1d9502bb9f6efc989b3578dcfde7901e',1,'op']]], + ['hand_5fccn_5fdecrease_5ffactor_436',['HAND_CCN_DECREASE_FACTOR',['../namespaceop.html#aed0d108f5ada623eeb0ed41f896f8e97',1,'op']]], + ['hand_5fcolors_5frender_437',['HAND_COLORS_RENDER',['../namespaceop.html#a450bb646e7573322d8f622bfdbab4833',1,'op']]], + ['hand_5fcolors_5frender_5fgpu_438',['HAND_COLORS_RENDER_GPU',['../hand_parameters_8hpp.html#a5ca06d0202756b9a3b8825ccbafc0558',1,'handParameters.hpp']]], + ['hand_5fdefault_5falpha_5fheat_5fmap_439',['HAND_DEFAULT_ALPHA_HEAT_MAP',['../namespaceop.html#a76c1f1ea90b73e13e93f72413b3cab0e',1,'op']]], + ['hand_5fdefault_5falpha_5fkeypoint_440',['HAND_DEFAULT_ALPHA_KEYPOINT',['../namespaceop.html#aa8cc53d2fe5353f9d87d50c32a8c1a95',1,'op']]], + ['hand_5fmax_5fhands_441',['HAND_MAX_HANDS',['../namespaceop.html#a182585e2e944cdb62f3dededdd85d1fc',1,'op']]], + ['hand_5fnumber_5fparts_442',['HAND_NUMBER_PARTS',['../namespaceop.html#a41b6fb82924c5532cf10151e6ce497f2',1,'op']]], + ['hand_5fpairs_5frender_443',['HAND_PAIRS_RENDER',['../namespaceop.html#a335d707e98d311d39d9a9dab0e325391',1,'op']]], + ['hand_5fpairs_5frender_5fgpu_444',['HAND_PAIRS_RENDER_GPU',['../hand_parameters_8hpp.html#a5ec40add22e28bc75596c75a7be8a692',1,'handParameters.hpp']]], + ['hand_5fprototxt_445',['HAND_PROTOTXT',['../namespaceop.html#a3fe70bd1eacdd78aef3344c83533ffc7',1,'op']]], + ['hand_5fscales_5frender_446',['HAND_SCALES_RENDER',['../namespaceop.html#a4e9bbc2167923763c5982d6d1f41f560',1,'op']]], + ['hand_5fscales_5frender_5fgpu_447',['HAND_SCALES_RENDER_GPU',['../hand_parameters_8hpp.html#a799d629d7fddd7f0daf40ccdae0293b9',1,'handParameters.hpp']]], + ['hand_5ftrained_5fmodel_448',['HAND_TRAINED_MODEL',['../namespaceop.html#ac13af59538bcb8a1709f20010681d1c7',1,'op']]], + ['handcpurenderer_449',['HandCpuRenderer',['../classop_1_1_hand_cpu_renderer.html#a3145d482c0378288e7ba3e42091a56c2',1,'op::HandCpuRenderer::HandCpuRenderer()'],['../classop_1_1_hand_cpu_renderer.html',1,'op::HandCpuRenderer']]], + ['handcpurenderer_2ehpp_450',['handCpuRenderer.hpp',['../hand_cpu_renderer_8hpp.html',1,'']]], + ['handdetector_451',['HandDetector',['../classop_1_1_hand_detector.html#a20b127dd7b51afcd336d1f16b40ee0b1',1,'op::HandDetector::HandDetector()'],['../classop_1_1_hand_detector.html',1,'op::HandDetector']]], + ['handdetector_2ehpp_452',['handDetector.hpp',['../hand_detector_8hpp.html',1,'']]], + ['handdetectorfromtxt_453',['HandDetectorFromTxt',['../classop_1_1_hand_detector_from_txt.html#a94ef5e925c5d25b181c56ae79bb1eed2',1,'op::HandDetectorFromTxt::HandDetectorFromTxt()'],['../classop_1_1_hand_detector_from_txt.html',1,'op::HandDetectorFromTxt']]], + ['handdetectorfromtxt_2ehpp_454',['handDetectorFromTxt.hpp',['../hand_detector_from_txt_8hpp.html',1,'']]], + ['handextractorcaffe_455',['HandExtractorCaffe',['../classop_1_1_hand_extractor_caffe.html#a703c8b8d15de55bc2b6bbaee633a6384',1,'op::HandExtractorCaffe::HandExtractorCaffe()'],['../classop_1_1_hand_extractor_caffe.html',1,'op::HandExtractorCaffe']]], + ['handextractorcaffe_2ehpp_456',['handExtractorCaffe.hpp',['../hand_extractor_caffe_8hpp.html',1,'']]], + ['handextractornet_457',['HandExtractorNet',['../classop_1_1_hand_extractor_net.html#a8bcb44ea2618dea01c00255021425637',1,'op::HandExtractorNet::HandExtractorNet()'],['../classop_1_1_hand_extractor_net.html',1,'op::HandExtractorNet']]], + ['handextractornet_2ehpp_458',['handExtractorNet.hpp',['../hand_extractor_net_8hpp.html',1,'']]], + ['handgpurenderer_459',['HandGpuRenderer',['../classop_1_1_hand_gpu_renderer.html#a0d2f742b995a6f34e414f9731db847d5',1,'op::HandGpuRenderer::HandGpuRenderer()'],['../classop_1_1_hand_gpu_renderer.html',1,'op::HandGpuRenderer']]], + ['handgpurenderer_2ehpp_460',['handGpuRenderer.hpp',['../hand_gpu_renderer_8hpp.html',1,'']]], + ['handheatmaps_461',['handHeatMaps',['../structop_1_1_datum.html#aef6c478313691ab5101664c1df55aa58',1,'op::Datum']]], + ['handkeypoints_462',['handKeypoints',['../structop_1_1_datum.html#a59d455dbddc50d700809c5e102c40d4e',1,'op::Datum']]], + ['handkeypoints3d_463',['handKeypoints3D',['../structop_1_1_datum.html#a27bb38102b5ebecd9b13a3619e658316',1,'op::Datum']]], + ['handparameters_2ehpp_464',['handParameters.hpp',['../hand_parameters_8hpp.html',1,'']]], + ['handrectangles_465',['handRectangles',['../structop_1_1_datum.html#a52d75e3273490624414f0602785bb608',1,'op::Datum']]], + ['handrenderer_466',['HandRenderer',['../classop_1_1_hand_renderer.html',1,'op']]], + ['handrenderer_2ehpp_467',['handRenderer.hpp',['../hand_renderer_8hpp.html',1,'']]], + ['headers_2ehpp_468',['headers.hpp',['../wrapper_2headers_8hpp.html',1,'(Global Namespace)'],['../3d_2headers_8hpp.html',1,'(Global Namespace)'],['../calibration_2headers_8hpp.html',1,'(Global Namespace)'],['../core_2headers_8hpp.html',1,'(Global Namespace)'],['../face_2headers_8hpp.html',1,'(Global Namespace)'],['../filestream_2headers_8hpp.html',1,'(Global Namespace)'],['../gpu_2headers_8hpp.html',1,'(Global Namespace)'],['../gui_2headers_8hpp.html',1,'(Global Namespace)'],['../hand_2headers_8hpp.html',1,'(Global Namespace)'],['../headers_8hpp.html',1,'(Global Namespace)'],['../net_2headers_8hpp.html',1,'(Global Namespace)'],['../pose_2headers_8hpp.html',1,'(Global Namespace)'],['../producer_2headers_8hpp.html',1,'(Global Namespace)'],['../thread_2headers_8hpp.html',1,'(Global Namespace)'],['../tracking_2headers_8hpp.html',1,'(Global Namespace)'],['../unity_2headers_8hpp.html',1,'(Global Namespace)'],['../utilities_2headers_8hpp.html',1,'(Global Namespace)']]], + ['heatmap_5foutput_2emd_469',['heatmap_output.md',['../heatmap__output_8md.html',1,'']]], + ['heatmapsaver_470',['HeatMapSaver',['../classop_1_1_heat_map_saver.html#aa6a339b70a9535a018584b93c932b12d',1,'op::HeatMapSaver::HeatMapSaver()'],['../classop_1_1_heat_map_saver.html',1,'op::HeatMapSaver']]], + ['heatmapsaver_2ehpp_471',['heatMapSaver.hpp',['../heat_map_saver_8hpp.html',1,'']]], + ['heatmapscalemode_472',['heatMapScaleMode',['../structop_1_1_wrapper_struct_pose.html#a2a5cceaf05cf228b47d2b001e05efeb8',1,'op::WrapperStructPose']]], + ['heatmaptype_473',['HeatMapType',['../namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226b',1,'op']]], + ['heatmaptypes_474',['heatMapTypes',['../structop_1_1_wrapper_struct_pose.html#aa459f2f26c1f1a929af55b8c2d39ccf6',1,'op::WrapperStructPose']]], + ['height_475',['height',['../classop_1_1_array_cpu_gpu.html#a1ae24508718592592421f9491bcf50f0',1,'op::ArrayCpuGpu::height()'],['../structop_1_1_rectangle.html#a5db9f0e8c946d837a1d351cc0bc72811',1,'op::Rectangle::height()']]], + ['high_476',['High',['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14a655d20c1ca69519ca647684edbb2db35',1,'op']]] +]; diff --git a/web/html/doc/search/all_c.html b/web/html/doc/search/all_c.html new file mode 100644 index 000000000..3dd5af06d --- /dev/null +++ b/web/html/doc/search/all_c.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_c.js b/web/html/doc/search/all_c.js new file mode 100644 index 000000000..5978bedec --- /dev/null +++ b/web/html/doc/search/all_c.js @@ -0,0 +1,32 @@ +var searchData= +[ + ['id_477',['id',['../structop_1_1_datum.html#a65deddd49d0fbca81f367198fc600015',1,'op::Datum']]], + ['identification_478',['identification',['../structop_1_1_wrapper_struct_extra.html#a08578de8a074415df3e645d3ddb27b8b',1,'op::WrapperStructExtra']]], + ['ifendedresetorrelease_479',['ifEndedResetOrRelease',['../classop_1_1_producer.html#ac72a751759ae8b5a0a99552580f7fbad',1,'op::Producer']]], + ['ikthreads_480',['ikThreads',['../structop_1_1_wrapper_struct_extra.html#ad41edf2717e5446a250efc05512ee07f',1,'op::WrapperStructExtra']]], + ['imagedirectory_481',['ImageDirectory',['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332a54a365e86ee42cff91ca36532c9bbabf',1,'op']]], + ['imagedirectoryreader_482',['ImageDirectoryReader',['../classop_1_1_image_directory_reader.html#a10157e6234426dd809ffe83ebfbfd274',1,'op::ImageDirectoryReader::ImageDirectoryReader()'],['../classop_1_1_image_directory_reader.html',1,'op::ImageDirectoryReader']]], + ['imagedirectoryreader_2ehpp_483',['imageDirectoryReader.hpp',['../image_directory_reader_8hpp.html',1,'']]], + ['images_484',['Images',['../namespaceop.html#a553bd31855c20a0d14e4c44a20bd91daafff0d600f8a0b5e19e88bfb821dd1157',1,'op']]], + ['imagesaver_485',['ImageSaver',['../classop_1_1_image_saver.html#a723387e62a6b701202dd6cf35c57429f',1,'op::ImageSaver::ImageSaver()'],['../classop_1_1_image_saver.html',1,'op::ImageSaver']]], + ['imagesaver_2ehpp_486',['imageSaver.hpp',['../image_saver_8hpp.html',1,'']]], + ['increase_487',['increase',['../classop_1_1_pose_extractor_net.html#a4959a9c9d433d9297e5daef0e3a0eabc',1,'op::PoseExtractorNet']]], + ['increaseelementtorender_488',['increaseElementToRender',['../classop_1_1_renderer.html#a298a5a58bab80b7252db7d3386a0ed8a',1,'op::Renderer']]], + ['initializationonthread_489',['initializationOnThread',['../classop_1_1_net_caffe.html#a08b71387287339e68327dd6d4cb1e8b3',1,'op::NetCaffe::initializationOnThread()'],['../classop_1_1_w_gui_info_adder.html#ae620275d6570fd5c74f33728cd340217',1,'op::WGuiInfoAdder::initializationOnThread()'],['../classop_1_1_net.html#a6e9e801f2c9950a798d0d2fa94a6c8f2',1,'op::Net::initializationOnThread()'],['../classop_1_1_w_hand_renderer.html#a2ee88145b38fea1a6a2bb7987a33bd40',1,'op::WHandRenderer::initializationOnThread()'],['../classop_1_1_w_hand_extractor_net.html#a7904f62b91d658a06ed89f0bfd307642',1,'op::WHandExtractorNet::initializationOnThread()'],['../classop_1_1_w_hand_detector_update.html#a729aaa628e4f4c24e7cb9afca1cdc761',1,'op::WHandDetectorUpdate::initializationOnThread()'],['../classop_1_1_w_hand_detector_tracking.html#a20ef6206194a873c2cfa7fe13d905d92',1,'op::WHandDetectorTracking::initializationOnThread()'],['../classop_1_1_w_hand_detector_from_txt.html#acd7d37555c09a58dc660811724930276',1,'op::WHandDetectorFromTxt::initializationOnThread()'],['../classop_1_1_w_hand_detector.html#a5c29c944205ee0727f76c282ef55ae52',1,'op::WHandDetector::initializationOnThread()'],['../classop_1_1_hand_renderer.html#adb91ae2a8ccf24671ad86e99e786b120',1,'op::HandRenderer::initializationOnThread()'],['../classop_1_1_gui3_d.html#a4247c56f90a535944b8aa14def754eaa',1,'op::Gui3D::initializationOnThread()'],['../classop_1_1_hand_extractor_net.html#a37d86377da41c576c4d54027a9762733',1,'op::HandExtractorNet::initializationOnThread()'],['../classop_1_1_hand_gpu_renderer.html#a0489f10ddc9e37e87084ebf9a5138f3a',1,'op::HandGpuRenderer::initializationOnThread()'],['../classop_1_1_w_gui.html#a4e4db210b87f78cc1238dd3ab2bedaa4',1,'op::WGui::initializationOnThread()'],['../classop_1_1_w_gui3_d.html#a7da4f85892e0d7d9e105c6d471a706a3',1,'op::WGui3D::initializationOnThread()'],['../classop_1_1_net_open_cv.html#a932f2f53f61e05bc0fd164a707f692b9',1,'op::NetOpenCv::initializationOnThread()'],['../classop_1_1_pose_extractor.html#aab1cccc9ad99f6b007abaa14600ea6df',1,'op::PoseExtractor::initializationOnThread()'],['../classop_1_1_pose_extractor_net.html#a28923c846dc7c731d3571c72a50acd2f',1,'op::PoseExtractorNet::initializationOnThread()'],['../classop_1_1_pose_gpu_renderer.html#a9e94ab926baf360dd6b23e14fba09836',1,'op::PoseGpuRenderer::initializationOnThread()'],['../classop_1_1_pose_renderer.html#af861d8213f1444b3246402061cea1b33',1,'op::PoseRenderer::initializationOnThread()'],['../classop_1_1_w_pose_extractor.html#a9b621ed9915da9bf3cce49db547de9e6',1,'op::WPoseExtractor::initializationOnThread()'],['../classop_1_1_w_pose_extractor_net.html#a18d4a120314ec44d1722cc164aaba7a8',1,'op::WPoseExtractorNet::initializationOnThread()'],['../classop_1_1_w_pose_renderer.html#aba989a73cef9a807879ad2196725c61c',1,'op::WPoseRenderer::initializationOnThread()'],['../classop_1_1_w_datum_producer.html#a4381eaec4625824ebaa2d23f0cf1be48',1,'op::WDatumProducer::initializationOnThread()'],['../classop_1_1_sub_thread.html#a8debc3b655463847fed2c547d13326f7',1,'op::SubThread::initializationOnThread()'],['../classop_1_1_w_fps_max.html#af8c5f74f0271d227b2c70b4415366332',1,'op::WFpsMax::initializationOnThread()'],['../classop_1_1_w_id_generator.html#a50a1b7929810daae87ee6443c659edad',1,'op::WIdGenerator::initializationOnThread()'],['../classop_1_1_worker.html#aa5be4df9d4d8302728c653870e7d2a23',1,'op::Worker::initializationOnThread()'],['../classop_1_1_w_queue_assembler.html#a02bb2d4e47689903434c05a911a5ba15',1,'op::WQueueAssembler::initializationOnThread()'],['../classop_1_1_w_queue_orderer.html#a85598f83f6f3a30b7ddce9bc7beddf33',1,'op::WQueueOrderer::initializationOnThread()'],['../classop_1_1_w_person_id_extractor.html#a72b888875be18eb3fc8d0a8c267630de',1,'op::WPersonIdExtractor::initializationOnThread()'],['../classop_1_1_frame_displayer.html#af5d2e1c8bcd2012c66347252e8dbc543',1,'op::FrameDisplayer::initializationOnThread()'],['../classop_1_1_gui.html#a07cf9b4e7757979666d097278df02c20',1,'op::Gui::initializationOnThread()'],['../classop_1_1_pose_triangulation.html#a90436697faa45a3676087426763014f4',1,'op::PoseTriangulation::initializationOnThread()'],['../classop_1_1_w_pose_triangulation.html#a5711329db1768eb77d2d96575c9fb668',1,'op::WPoseTriangulation::initializationOnThread()'],['../classop_1_1_w_cv_mat_to_op_input.html#ac03534bbe3b6c3c45efb61b5d78402da',1,'op::WCvMatToOpInput::initializationOnThread()'],['../classop_1_1_w_cv_mat_to_op_output.html#ad4c957d391e371b7ee56cdb5be6b1452',1,'op::WCvMatToOpOutput::initializationOnThread()'],['../classop_1_1_w_keep_top_n_people.html#a56371016b6fe1fbacdba8d558685719b',1,'op::WKeepTopNPeople::initializationOnThread()'],['../classop_1_1_w_keypoint_scaler.html#aba4fb004818f3adc22959e382a90cd2c',1,'op::WKeypointScaler::initializationOnThread()'],['../classop_1_1_w_op_output_to_cv_mat.html#adea2e8b1d33e6c091640c7d904dac7cd',1,'op::WOpOutputToCvMat::initializationOnThread()'],['../classop_1_1_w_scale_and_size_extractor.html#ac1203ef395a836b13f5586432f284c41',1,'op::WScaleAndSizeExtractor::initializationOnThread()'],['../classop_1_1_w_verbose_printer.html#a9d21f5db0e70ba4cad73cf2bdf6c9fe2',1,'op::WVerbosePrinter::initializationOnThread()'],['../classop_1_1_face_extractor_net.html#a6d6d5d6bd912bb940058a2b958aadf61',1,'op::FaceExtractorNet::initializationOnThread()'],['../classop_1_1_face_gpu_renderer.html#a6ebd9287927529ffaa4200890190896b',1,'op::FaceGpuRenderer::initializationOnThread()'],['../classop_1_1_face_renderer.html#aa34ce7a0602b0994cc3043b80627a31c',1,'op::FaceRenderer::initializationOnThread()'],['../classop_1_1_w_face_detector.html#afaca53a669f0cd43103f7317aded75d3',1,'op::WFaceDetector::initializationOnThread()'],['../classop_1_1_w_face_extractor_net.html#ac04b0bec061a6cbc6a6afacb3f8d15c7',1,'op::WFaceExtractorNet::initializationOnThread()'],['../classop_1_1_w_video_saver3_d.html#ad5a050f5646af36bf8d91909e8f47b2f',1,'op::WVideoSaver3D::initializationOnThread()'],['../classop_1_1_w_video_saver.html#ada90f76b28e4bafe9c8ecbb9bcbb2d14',1,'op::WVideoSaver::initializationOnThread()'],['../classop_1_1_w_udp_sender.html#a567d9fe2adc85ae086379696573112e3',1,'op::WUdpSender::initializationOnThread()'],['../classop_1_1_w_pose_saver.html#a4f0774832e12389593361186f1b83128',1,'op::WPoseSaver::initializationOnThread()'],['../classop_1_1_w_people_json_saver.html#a5d4239596a996723a20a1031d32c7446',1,'op::WPeopleJsonSaver::initializationOnThread()'],['../classop_1_1_w_image_saver.html#a78655ea3d4dac28bdf7e2e4a80b5a337',1,'op::WImageSaver::initializationOnThread()'],['../classop_1_1_w_heat_map_saver.html#a20e82b121a580c578f69cbb0401c4cb0',1,'op::WHeatMapSaver::initializationOnThread()'],['../classop_1_1_w_hand_saver.html#aa234a68d1cc7ec97fefbf30239149baa',1,'op::WHandSaver::initializationOnThread()'],['../classop_1_1_w_face_saver.html#ae8401789881462eb8438c65e9d2d3fb2',1,'op::WFaceSaver::initializationOnThread()'],['../classop_1_1_w_coco_json_saver.html#a5cca095ff23c3134ab0addc9a4feabaf',1,'op::WCocoJsonSaver::initializationOnThread()'],['../classop_1_1_w_face_renderer.html#a7b72c70dc02c9209d84992caad6ad7d0',1,'op::WFaceRenderer::initializationOnThread()'],['../classop_1_1_w_face_detector_open_c_v.html#ad7dce5824ba32bc07d2474c20b23e62d',1,'op::WFaceDetectorOpenCV::initializationOnThread()']]], + ['initializationonthreadnoexception_490',['initializationOnThreadNoException',['../classop_1_1_worker.html#a5df10dd8a245df1a6d8df18978490899',1,'op::Worker']]], + ['input_491',['Input',['../namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a324118a6721dd6b8a9b9f4e327df2bf5',1,'op']]], + ['inputnetdata_492',['inputNetData',['../structop_1_1_datum.html#a46ff336119fd0d67c8223b1a9371731d',1,'op::Datum']]], + ['inputresolution_493',['InputResolution',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa46f9a0da0a5d448fd0cc8b3aa0a9b228',1,'op']]], + ['installation_5fdeprecated_2emd_494',['installation_deprecated.md',['../installation__deprecated_8md.html',1,'']]], + ['installation_5fjetson_5ftx1_2emd_495',['installation_jetson_tx1.md',['../installation__jetson__tx1_8md.html',1,'']]], + ['installation_5fjetson_5ftx2_5fjetpack3_2e1_2emd_496',['installation_jetson_tx2_jetpack3.1.md',['../installation__jetson__tx2__jetpack3_81_8md.html',1,'']]], + ['installation_5fjetson_5ftx2_5fjetpack3_2e3_2emd_497',['installation_jetson_tx2_jetpack3.3.md',['../installation__jetson__tx2__jetpack3_83_8md.html',1,'']]], + ['ipcamera_498',['IPCamera',['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332af40a40a04a078c4449cda2f326d7fb18',1,'op']]], + ['ipcamerareader_499',['IpCameraReader',['../classop_1_1_ip_camera_reader.html#af3a67a2705107e04e79672fa087847c5',1,'op::IpCameraReader::IpCameraReader()'],['../classop_1_1_ip_camera_reader.html',1,'op::IpCameraReader']]], + ['ipcamerareader_2ehpp_500',['ipCameraReader.hpp',['../ip_camera_reader_8hpp.html',1,'']]], + ['iscontinuous_501',['isContinuous',['../classop_1_1_matrix.html#ae82b851dd176317d72df95461a4bad76',1,'op::Matrix']]], + ['isfull_502',['isFull',['../classop_1_1_queue_base.html#a17a52df2e912a346c412418c62268425',1,'op::QueueBase']]], + ['isopened_503',['isOpened',['../classop_1_1_flir_reader.html#a3d383e03a405dcbff566a86253db90af',1,'op::FlirReader::isOpened()'],['../classop_1_1_webcam_reader.html#a6a065fcf3d6dca624741adc0f77da11d',1,'op::WebcamReader::isOpened()'],['../classop_1_1_video_reader.html#a503e70039e2cfecfe2d31771df509733',1,'op::VideoReader::isOpened()'],['../classop_1_1_video_capture_reader.html#ab0c6519396faae82ec1b49262ed454a2',1,'op::VideoCaptureReader::isOpened()'],['../classop_1_1_spinnaker_wrapper.html#a51e869f56a6517bd55783ea039066d7d',1,'op::SpinnakerWrapper::isOpened()'],['../classop_1_1_producer.html#a58590e4a409d31f839184b4bf030a68b',1,'op::Producer::isOpened()'],['../classop_1_1_ip_camera_reader.html#ac26913b4ff841f56f43bb53b012a2401',1,'op::IpCameraReader::isOpened()'],['../classop_1_1_image_directory_reader.html#adbf9ff392cd52a585332dbdcd46ffb81',1,'op::ImageDirectoryReader::isOpened()'],['../classop_1_1_video_saver.html#a0c5dadfa4f687283c370e7890ae5037f',1,'op::VideoSaver::isOpened()']]], + ['isrunning_504',['isRunning',['../classop_1_1_queue_base.html#a9f529f94ff3b98e3ac11d796caa31239',1,'op::QueueBase::isRunning()'],['../classop_1_1_thread.html#a3ed032f4c42ef1797873122aa96a055d',1,'op::Thread::isRunning()'],['../classop_1_1_thread_manager.html#a7bad63adddf7a35a436911ada2a1c519',1,'op::ThreadManager::isRunning()'],['../classop_1_1_worker.html#a567902b58e492421a6ad771e730ddf53',1,'op::Worker::isRunning()'],['../classop_1_1_wrapper_t.html#a0577721c5e714861b27ad4ff356980bc',1,'op::WrapperT::isRunning()']]], + ['issubmatrix_505',['isSubmatrix',['../classop_1_1_matrix.html#aa0ab094e21bab6757f502866bce7e79c',1,'op::Matrix']]] +]; diff --git a/web/html/doc/search/all_d.html b/web/html/doc/search/all_d.html new file mode 100644 index 000000000..af7f2f0f5 --- /dev/null +++ b/web/html/doc/search/all_d.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_d.js b/web/html/doc/search/all_d.js new file mode 100644 index 000000000..6962b3967 --- /dev/null +++ b/web/html/doc/search/all_d.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['jointangleestimation_2ehpp_506',['jointAngleEstimation.hpp',['../joint_angle_estimation_8hpp.html',1,'']]], + ['json_507',['Json',['../namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7eaeed8d85b888a6c015834240885ee6333',1,'op']]], + ['jsonofstream_508',['JsonOfstream',['../classop_1_1_json_ofstream.html#afa4b3e1dee27f5afd0017b95c0f5e364',1,'op::JsonOfstream::JsonOfstream(const std::string &filePath, const bool humanReadable=true)'],['../classop_1_1_json_ofstream.html#ab8220b4336ccc8998cc38f0fa5c36918',1,'op::JsonOfstream::JsonOfstream(JsonOfstream &&jsonOfstream)'],['../classop_1_1_json_ofstream.html',1,'op::JsonOfstream']]], + ['jsonofstream_2ehpp_509',['jsonOfstream.hpp',['../json_ofstream_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/all_e.html b/web/html/doc/search/all_e.html new file mode 100644 index 000000000..e25df423a --- /dev/null +++ b/web/html/doc/search/all_e.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_e.js b/web/html/doc/search/all_e.js new file mode 100644 index 000000000..3c8b81f63 --- /dev/null +++ b/web/html/doc/search/all_e.js @@ -0,0 +1,15 @@ +var searchData= +[ + ['keepdesiredframerate_510',['keepDesiredFrameRate',['../classop_1_1_producer.html#afad3eadd16cca0de2c2be8b083c0d56d',1,'op::Producer']]], + ['keeproiinside_511',['keepRoiInside',['../namespaceop.html#a5f85de4dca2733d03470d42617f83d4e',1,'op']]], + ['keeptopnpeople_512',['KeepTopNPeople',['../classop_1_1_keep_top_n_people.html#ae9419ae35da5b0547989f19795a26808',1,'op::KeepTopNPeople::KeepTopNPeople()'],['../classop_1_1_keep_top_n_people.html',1,'op::KeepTopNPeople']]], + ['keeptopnpeople_2ehpp_513',['keepTopNPeople.hpp',['../keep_top_n_people_8hpp.html',1,'']]], + ['keeptoppeople_514',['keepTopPeople',['../classop_1_1_keep_top_n_people.html#a556a0d8d97985e0b73fc78e372be6ea8',1,'op::KeepTopNPeople::keepTopPeople()'],['../classop_1_1_pose_extractor.html#a291521decad2465df13dc769fe9cc4e5',1,'op::PoseExtractor::keepTopPeople()']]], + ['key_515',['key',['../classop_1_1_json_ofstream.html#af0c7f763e7e809810c00b394a260672e',1,'op::JsonOfstream']]], + ['keypoint_2ehpp_516',['keypoint.hpp',['../keypoint_8hpp.html',1,'']]], + ['keypointsaver_517',['KeypointSaver',['../classop_1_1_keypoint_saver.html#aa6d9eb36cfd40c5cfa3995420cdf3dfa',1,'op::KeypointSaver::KeypointSaver()'],['../classop_1_1_keypoint_saver.html',1,'op::KeypointSaver']]], + ['keypointsaver_2ehpp_518',['keypointSaver.hpp',['../keypoint_saver_8hpp.html',1,'']]], + ['keypointscalemode_519',['keypointScaleMode',['../structop_1_1_wrapper_struct_pose.html#a054c88e977084707e80eb31dd0a658ab',1,'op::WrapperStructPose']]], + ['keypointscaler_520',['KeypointScaler',['../classop_1_1_keypoint_scaler.html#a0f556c1b0fad63c7c3551a5d4fd72219',1,'op::KeypointScaler::KeypointScaler()'],['../classop_1_1_keypoint_scaler.html',1,'op::KeypointScaler']]], + ['keypointscaler_2ehpp_521',['keypointScaler.hpp',['../keypoint_scaler_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/all_f.html b/web/html/doc/search/all_f.html new file mode 100644 index 000000000..b23da6ce4 --- /dev/null +++ b/web/html/doc/search/all_f.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/all_f.js b/web/html/doc/search/all_f.js new file mode 100644 index 000000000..57171688d --- /dev/null +++ b/web/html/doc/search/all_f.js @@ -0,0 +1,10 @@ +var searchData= +[ + ['layersetup_522',['LayerSetUp',['../classop_1_1_maximum_caffe.html#a47047083e35d2af5a969acbf7cb55674',1,'op::MaximumCaffe::LayerSetUp()'],['../classop_1_1_nms_caffe.html#a8c7e69c32f1fff92893284ed70278f48',1,'op::NmsCaffe::LayerSetUp()'],['../classop_1_1_resize_and_merge_caffe.html#ad7441a1f8db85f6239830603fb7a6325',1,'op::ResizeAndMergeCaffe::LayerSetUp()']]], + ['legacyshape_523',['LegacyShape',['../classop_1_1_array_cpu_gpu.html#a9aa5001613f7199de933eef152db40b0',1,'op::ArrayCpuGpu']]], + ['loaddata_524',['loadData',['../namespaceop.html#a9f14054fbf4e63fc85d10c83f2f9ecb7',1,'op::loadData(const std::vector< std::string > &cvMatNames, const std::string &fileNameNoExtension, const DataFormat dataFormat)'],['../namespaceop.html#a1c2921f841ab87033b535b5ae8a4d526',1,'op::loadData(const std::string &cvMatName, const std::string &fileNameNoExtension, const DataFormat dataFormat)']]], + ['loadhanddetectortxt_525',['loadHandDetectorTxt',['../namespaceop.html#a0ce96f84c6e380b261802c7e2639dc7d',1,'op']]], + ['loadimage_526',['loadImage',['../namespaceop.html#a871a61f08021460e0f24f51583546a75',1,'op']]], + ['logmode_527',['LogMode',['../namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94',1,'op']]], + ['low_528',['Low',['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14a28d0edd045e05cf5af64e35ae0c4c6ef',1,'op']]] +]; diff --git a/web/html/doc/search/classes_0.html b/web/html/doc/search/classes_0.html new file mode 100644 index 000000000..af8159ee6 --- /dev/null +++ b/web/html/doc/search/classes_0.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_0.js b/web/html/doc/search/classes_0.js new file mode 100644 index 000000000..a6fe7475f --- /dev/null +++ b/web/html/doc/search/classes_0.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['array_1287',['Array',['../classop_1_1_array.html',1,'op']]], + ['array_3c_20float_20_3e_1288',['Array< float >',['../classop_1_1_array.html',1,'op']]], + ['array_3c_20long_20long_20_3e_1289',['Array< long long >',['../classop_1_1_array.html',1,'op']]], + ['arraycpugpu_1290',['ArrayCpuGpu',['../classop_1_1_array_cpu_gpu.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_1.html b/web/html/doc/search/classes_1.html new file mode 100644 index 000000000..576e91689 --- /dev/null +++ b/web/html/doc/search/classes_1.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_1.js b/web/html/doc/search/classes_1.js new file mode 100644 index 000000000..fe0d93e07 --- /dev/null +++ b/web/html/doc/search/classes_1.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['bodypartconnectorcaffe_1291',['BodyPartConnectorCaffe',['../classop_1_1_body_part_connector_caffe.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_10.html b/web/html/doc/search/classes_10.html new file mode 100644 index 000000000..4af2c805a --- /dev/null +++ b/web/html/doc/search/classes_10.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_10.js b/web/html/doc/search/classes_10.js new file mode 100644 index 000000000..0084f82d2 --- /dev/null +++ b/web/html/doc/search/classes_10.js @@ -0,0 +1,11 @@ +var searchData= +[ + ['scaleandsizeextractor_1357',['ScaleAndSizeExtractor',['../classop_1_1_scale_and_size_extractor.html',1,'op']]], + ['spinnakerwrapper_1358',['SpinnakerWrapper',['../classop_1_1_spinnaker_wrapper.html',1,'op']]], + ['string_1359',['String',['../classop_1_1_string.html',1,'op']]], + ['subthread_1360',['SubThread',['../classop_1_1_sub_thread.html',1,'op']]], + ['subthreadnoqueue_1361',['SubThreadNoQueue',['../classop_1_1_sub_thread_no_queue.html',1,'op']]], + ['subthreadqueuein_1362',['SubThreadQueueIn',['../classop_1_1_sub_thread_queue_in.html',1,'op']]], + ['subthreadqueueinout_1363',['SubThreadQueueInOut',['../classop_1_1_sub_thread_queue_in_out.html',1,'op']]], + ['subthreadqueueout_1364',['SubThreadQueueOut',['../classop_1_1_sub_thread_queue_out.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_11.html b/web/html/doc/search/classes_11.html new file mode 100644 index 000000000..ed167c849 --- /dev/null +++ b/web/html/doc/search/classes_11.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_11.js b/web/html/doc/search/classes_11.js new file mode 100644 index 000000000..7914d54af --- /dev/null +++ b/web/html/doc/search/classes_11.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['thread_1365',['Thread',['../classop_1_1_thread.html',1,'op']]], + ['threadmanager_1366',['ThreadManager',['../classop_1_1_thread_manager.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_12.html b/web/html/doc/search/classes_12.html new file mode 100644 index 000000000..90bc9f579 --- /dev/null +++ b/web/html/doc/search/classes_12.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_12.js b/web/html/doc/search/classes_12.js new file mode 100644 index 000000000..d63e41dcd --- /dev/null +++ b/web/html/doc/search/classes_12.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['udpsender_1367',['UdpSender',['../classop_1_1_udp_sender.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_13.html b/web/html/doc/search/classes_13.html new file mode 100644 index 000000000..fadc16048 --- /dev/null +++ b/web/html/doc/search/classes_13.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_13.js b/web/html/doc/search/classes_13.js new file mode 100644 index 000000000..ba6474a8b --- /dev/null +++ b/web/html/doc/search/classes_13.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['verboseprinter_1368',['VerbosePrinter',['../classop_1_1_verbose_printer.html',1,'op']]], + ['videocapturereader_1369',['VideoCaptureReader',['../classop_1_1_video_capture_reader.html',1,'op']]], + ['videoreader_1370',['VideoReader',['../classop_1_1_video_reader.html',1,'op']]], + ['videosaver_1371',['VideoSaver',['../classop_1_1_video_saver.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_14.html b/web/html/doc/search/classes_14.html new file mode 100644 index 000000000..ad373c80c --- /dev/null +++ b/web/html/doc/search/classes_14.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_14.js b/web/html/doc/search/classes_14.js new file mode 100644 index 000000000..4a0229b97 --- /dev/null +++ b/web/html/doc/search/classes_14.js @@ -0,0 +1,57 @@ +var searchData= +[ + ['wcocojsonsaver_1372',['WCocoJsonSaver',['../classop_1_1_w_coco_json_saver.html',1,'op']]], + ['wcvmattoopinput_1373',['WCvMatToOpInput',['../classop_1_1_w_cv_mat_to_op_input.html',1,'op']]], + ['wcvmattoopoutput_1374',['WCvMatToOpOutput',['../classop_1_1_w_cv_mat_to_op_output.html',1,'op']]], + ['wdatumproducer_1375',['WDatumProducer',['../classop_1_1_w_datum_producer.html',1,'op']]], + ['webcamreader_1376',['WebcamReader',['../classop_1_1_webcam_reader.html',1,'op']]], + ['wfacedetector_1377',['WFaceDetector',['../classop_1_1_w_face_detector.html',1,'op']]], + ['wfacedetectoropencv_1378',['WFaceDetectorOpenCV',['../classop_1_1_w_face_detector_open_c_v.html',1,'op']]], + ['wfaceextractornet_1379',['WFaceExtractorNet',['../classop_1_1_w_face_extractor_net.html',1,'op']]], + ['wfacerenderer_1380',['WFaceRenderer',['../classop_1_1_w_face_renderer.html',1,'op']]], + ['wfacesaver_1381',['WFaceSaver',['../classop_1_1_w_face_saver.html',1,'op']]], + ['wfpsmax_1382',['WFpsMax',['../classop_1_1_w_fps_max.html',1,'op']]], + ['wgui_1383',['WGui',['../classop_1_1_w_gui.html',1,'op']]], + ['wgui3d_1384',['WGui3D',['../classop_1_1_w_gui3_d.html',1,'op']]], + ['wguiinfoadder_1385',['WGuiInfoAdder',['../classop_1_1_w_gui_info_adder.html',1,'op']]], + ['whanddetector_1386',['WHandDetector',['../classop_1_1_w_hand_detector.html',1,'op']]], + ['whanddetectorfromtxt_1387',['WHandDetectorFromTxt',['../classop_1_1_w_hand_detector_from_txt.html',1,'op']]], + ['whanddetectortracking_1388',['WHandDetectorTracking',['../classop_1_1_w_hand_detector_tracking.html',1,'op']]], + ['whanddetectorupdate_1389',['WHandDetectorUpdate',['../classop_1_1_w_hand_detector_update.html',1,'op']]], + ['whandextractornet_1390',['WHandExtractorNet',['../classop_1_1_w_hand_extractor_net.html',1,'op']]], + ['whandrenderer_1391',['WHandRenderer',['../classop_1_1_w_hand_renderer.html',1,'op']]], + ['whandsaver_1392',['WHandSaver',['../classop_1_1_w_hand_saver.html',1,'op']]], + ['wheatmapsaver_1393',['WHeatMapSaver',['../classop_1_1_w_heat_map_saver.html',1,'op']]], + ['widgenerator_1394',['WIdGenerator',['../classop_1_1_w_id_generator.html',1,'op']]], + ['wimagesaver_1395',['WImageSaver',['../classop_1_1_w_image_saver.html',1,'op']]], + ['wkeeptopnpeople_1396',['WKeepTopNPeople',['../classop_1_1_w_keep_top_n_people.html',1,'op']]], + ['wkeypointscaler_1397',['WKeypointScaler',['../classop_1_1_w_keypoint_scaler.html',1,'op']]], + ['wopoutputtocvmat_1398',['WOpOutputToCvMat',['../classop_1_1_w_op_output_to_cv_mat.html',1,'op']]], + ['worker_1399',['Worker',['../classop_1_1_worker.html',1,'op']]], + ['worker_3c_20std_3a_3ashared_5fptr_3c_20tdatums_20_3e_20_3e_1400',['Worker< std::shared_ptr< TDatums > >',['../classop_1_1_worker.html',1,'op']]], + ['workerconsumer_1401',['WorkerConsumer',['../classop_1_1_worker_consumer.html',1,'op']]], + ['workerproducer_1402',['WorkerProducer',['../classop_1_1_worker_producer.html',1,'op']]], + ['workerproducer_3c_20std_3a_3ashared_5fptr_3c_20std_3a_3avector_3c_20std_3a_3ashared_5fptr_3c_20tdatum_20_3e_20_3e_20_3e_20_3e_1403',['WorkerProducer< std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > >',['../classop_1_1_worker_producer.html',1,'op']]], + ['wpeoplejsonsaver_1404',['WPeopleJsonSaver',['../classop_1_1_w_people_json_saver.html',1,'op']]], + ['wpersonidextractor_1405',['WPersonIdExtractor',['../classop_1_1_w_person_id_extractor.html',1,'op']]], + ['wposeextractor_1406',['WPoseExtractor',['../classop_1_1_w_pose_extractor.html',1,'op']]], + ['wposeextractornet_1407',['WPoseExtractorNet',['../classop_1_1_w_pose_extractor_net.html',1,'op']]], + ['wposerenderer_1408',['WPoseRenderer',['../classop_1_1_w_pose_renderer.html',1,'op']]], + ['wposesaver_1409',['WPoseSaver',['../classop_1_1_w_pose_saver.html',1,'op']]], + ['wposetriangulation_1410',['WPoseTriangulation',['../classop_1_1_w_pose_triangulation.html',1,'op']]], + ['wqueueassembler_1411',['WQueueAssembler',['../classop_1_1_w_queue_assembler.html',1,'op']]], + ['wqueueorderer_1412',['WQueueOrderer',['../classop_1_1_w_queue_orderer.html',1,'op']]], + ['wrapperstructextra_1413',['WrapperStructExtra',['../structop_1_1_wrapper_struct_extra.html',1,'op']]], + ['wrapperstructface_1414',['WrapperStructFace',['../structop_1_1_wrapper_struct_face.html',1,'op']]], + ['wrapperstructgui_1415',['WrapperStructGui',['../structop_1_1_wrapper_struct_gui.html',1,'op']]], + ['wrapperstructhand_1416',['WrapperStructHand',['../structop_1_1_wrapper_struct_hand.html',1,'op']]], + ['wrapperstructinput_1417',['WrapperStructInput',['../structop_1_1_wrapper_struct_input.html',1,'op']]], + ['wrapperstructoutput_1418',['WrapperStructOutput',['../structop_1_1_wrapper_struct_output.html',1,'op']]], + ['wrapperstructpose_1419',['WrapperStructPose',['../structop_1_1_wrapper_struct_pose.html',1,'op']]], + ['wrappert_1420',['WrapperT',['../classop_1_1_wrapper_t.html',1,'op']]], + ['wscaleandsizeextractor_1421',['WScaleAndSizeExtractor',['../classop_1_1_w_scale_and_size_extractor.html',1,'op']]], + ['wudpsender_1422',['WUdpSender',['../classop_1_1_w_udp_sender.html',1,'op']]], + ['wverboseprinter_1423',['WVerbosePrinter',['../classop_1_1_w_verbose_printer.html',1,'op']]], + ['wvideosaver_1424',['WVideoSaver',['../classop_1_1_w_video_saver.html',1,'op']]], + ['wvideosaver3d_1425',['WVideoSaver3D',['../classop_1_1_w_video_saver3_d.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_2.html b/web/html/doc/search/classes_2.html new file mode 100644 index 000000000..956405e5a --- /dev/null +++ b/web/html/doc/search/classes_2.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_2.js b/web/html/doc/search/classes_2.js new file mode 100644 index 000000000..7a056ec93 --- /dev/null +++ b/web/html/doc/search/classes_2.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['cameraparameterreader_1292',['CameraParameterReader',['../classop_1_1_camera_parameter_reader.html',1,'op']]], + ['cocojsonsaver_1293',['CocoJsonSaver',['../classop_1_1_coco_json_saver.html',1,'op']]], + ['cvmattoopinput_1294',['CvMatToOpInput',['../classop_1_1_cv_mat_to_op_input.html',1,'op']]], + ['cvmattoopoutput_1295',['CvMatToOpOutput',['../classop_1_1_cv_mat_to_op_output.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_3.html b/web/html/doc/search/classes_3.html new file mode 100644 index 000000000..d33343bc1 --- /dev/null +++ b/web/html/doc/search/classes_3.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_3.js b/web/html/doc/search/classes_3.js new file mode 100644 index 000000000..ab28f8d77 --- /dev/null +++ b/web/html/doc/search/classes_3.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['datum_1296',['Datum',['../structop_1_1_datum.html',1,'op']]], + ['datumproducer_1297',['DatumProducer',['../classop_1_1_datum_producer.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_4.html b/web/html/doc/search/classes_4.html new file mode 100644 index 000000000..8430b07fe --- /dev/null +++ b/web/html/doc/search/classes_4.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_4.js b/web/html/doc/search/classes_4.js new file mode 100644 index 000000000..82779730c --- /dev/null +++ b/web/html/doc/search/classes_4.js @@ -0,0 +1,13 @@ +var searchData= +[ + ['facecpurenderer_1298',['FaceCpuRenderer',['../classop_1_1_face_cpu_renderer.html',1,'op']]], + ['facedetector_1299',['FaceDetector',['../classop_1_1_face_detector.html',1,'op']]], + ['facedetectoropencv_1300',['FaceDetectorOpenCV',['../classop_1_1_face_detector_open_c_v.html',1,'op']]], + ['faceextractorcaffe_1301',['FaceExtractorCaffe',['../classop_1_1_face_extractor_caffe.html',1,'op']]], + ['faceextractornet_1302',['FaceExtractorNet',['../classop_1_1_face_extractor_net.html',1,'op']]], + ['facegpurenderer_1303',['FaceGpuRenderer',['../classop_1_1_face_gpu_renderer.html',1,'op']]], + ['facerenderer_1304',['FaceRenderer',['../classop_1_1_face_renderer.html',1,'op']]], + ['filesaver_1305',['FileSaver',['../classop_1_1_file_saver.html',1,'op']]], + ['flirreader_1306',['FlirReader',['../classop_1_1_flir_reader.html',1,'op']]], + ['framedisplayer_1307',['FrameDisplayer',['../classop_1_1_frame_displayer.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_5.html b/web/html/doc/search/classes_5.html new file mode 100644 index 000000000..c2f1b767b --- /dev/null +++ b/web/html/doc/search/classes_5.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_5.js b/web/html/doc/search/classes_5.js new file mode 100644 index 000000000..5cd39bab8 --- /dev/null +++ b/web/html/doc/search/classes_5.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['gpurenderer_1308',['GpuRenderer',['../classop_1_1_gpu_renderer.html',1,'op']]], + ['gui_1309',['Gui',['../classop_1_1_gui.html',1,'op']]], + ['gui3d_1310',['Gui3D',['../classop_1_1_gui3_d.html',1,'op']]], + ['guiinfoadder_1311',['GuiInfoAdder',['../classop_1_1_gui_info_adder.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_6.html b/web/html/doc/search/classes_6.html new file mode 100644 index 000000000..e39847ce8 --- /dev/null +++ b/web/html/doc/search/classes_6.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_6.js b/web/html/doc/search/classes_6.js new file mode 100644 index 000000000..40de88757 --- /dev/null +++ b/web/html/doc/search/classes_6.js @@ -0,0 +1,11 @@ +var searchData= +[ + ['handcpurenderer_1312',['HandCpuRenderer',['../classop_1_1_hand_cpu_renderer.html',1,'op']]], + ['handdetector_1313',['HandDetector',['../classop_1_1_hand_detector.html',1,'op']]], + ['handdetectorfromtxt_1314',['HandDetectorFromTxt',['../classop_1_1_hand_detector_from_txt.html',1,'op']]], + ['handextractorcaffe_1315',['HandExtractorCaffe',['../classop_1_1_hand_extractor_caffe.html',1,'op']]], + ['handextractornet_1316',['HandExtractorNet',['../classop_1_1_hand_extractor_net.html',1,'op']]], + ['handgpurenderer_1317',['HandGpuRenderer',['../classop_1_1_hand_gpu_renderer.html',1,'op']]], + ['handrenderer_1318',['HandRenderer',['../classop_1_1_hand_renderer.html',1,'op']]], + ['heatmapsaver_1319',['HeatMapSaver',['../classop_1_1_heat_map_saver.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_7.html b/web/html/doc/search/classes_7.html new file mode 100644 index 000000000..a2c4d1a39 --- /dev/null +++ b/web/html/doc/search/classes_7.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_7.js b/web/html/doc/search/classes_7.js new file mode 100644 index 000000000..4803daae1 --- /dev/null +++ b/web/html/doc/search/classes_7.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['imagedirectoryreader_1320',['ImageDirectoryReader',['../classop_1_1_image_directory_reader.html',1,'op']]], + ['imagesaver_1321',['ImageSaver',['../classop_1_1_image_saver.html',1,'op']]], + ['ipcamerareader_1322',['IpCameraReader',['../classop_1_1_ip_camera_reader.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_8.html b/web/html/doc/search/classes_8.html new file mode 100644 index 000000000..17003e480 --- /dev/null +++ b/web/html/doc/search/classes_8.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_8.js b/web/html/doc/search/classes_8.js new file mode 100644 index 000000000..f46d8dcd6 --- /dev/null +++ b/web/html/doc/search/classes_8.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['jsonofstream_1323',['JsonOfstream',['../classop_1_1_json_ofstream.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_9.html b/web/html/doc/search/classes_9.html new file mode 100644 index 000000000..b8afa8cba --- /dev/null +++ b/web/html/doc/search/classes_9.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_9.js b/web/html/doc/search/classes_9.js new file mode 100644 index 000000000..9d04c9e78 --- /dev/null +++ b/web/html/doc/search/classes_9.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['keeptopnpeople_1324',['KeepTopNPeople',['../classop_1_1_keep_top_n_people.html',1,'op']]], + ['keypointsaver_1325',['KeypointSaver',['../classop_1_1_keypoint_saver.html',1,'op']]], + ['keypointscaler_1326',['KeypointScaler',['../classop_1_1_keypoint_scaler.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_a.html b/web/html/doc/search/classes_a.html new file mode 100644 index 000000000..6788af270 --- /dev/null +++ b/web/html/doc/search/classes_a.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_a.js b/web/html/doc/search/classes_a.js new file mode 100644 index 000000000..0d81bec0d --- /dev/null +++ b/web/html/doc/search/classes_a.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['matrix_1327',['Matrix',['../classop_1_1_matrix.html',1,'op']]], + ['maximumcaffe_1328',['MaximumCaffe',['../classop_1_1_maximum_caffe.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_b.html b/web/html/doc/search/classes_b.html new file mode 100644 index 000000000..3fcb49858 --- /dev/null +++ b/web/html/doc/search/classes_b.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_b.js b/web/html/doc/search/classes_b.js new file mode 100644 index 000000000..050421544 --- /dev/null +++ b/web/html/doc/search/classes_b.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['net_1329',['Net',['../classop_1_1_net.html',1,'op']]], + ['netcaffe_1330',['NetCaffe',['../classop_1_1_net_caffe.html',1,'op']]], + ['netopencv_1331',['NetOpenCv',['../classop_1_1_net_open_cv.html',1,'op']]], + ['nmscaffe_1332',['NmsCaffe',['../classop_1_1_nms_caffe.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_c.html b/web/html/doc/search/classes_c.html new file mode 100644 index 000000000..2f7b1f3da --- /dev/null +++ b/web/html/doc/search/classes_c.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_c.js b/web/html/doc/search/classes_c.js new file mode 100644 index 000000000..fb0aa62ca --- /dev/null +++ b/web/html/doc/search/classes_c.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['opoutputtocvmat_1333',['OpOutputToCvMat',['../classop_1_1_op_output_to_cv_mat.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_d.html b/web/html/doc/search/classes_d.html new file mode 100644 index 000000000..f9011e70f --- /dev/null +++ b/web/html/doc/search/classes_d.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_d.js b/web/html/doc/search/classes_d.js new file mode 100644 index 000000000..1b18fbd3e --- /dev/null +++ b/web/html/doc/search/classes_d.js @@ -0,0 +1,20 @@ +var searchData= +[ + ['peoplejsonsaver_1334',['PeopleJsonSaver',['../classop_1_1_people_json_saver.html',1,'op']]], + ['personidextractor_1335',['PersonIdExtractor',['../classop_1_1_person_id_extractor.html',1,'op']]], + ['persontracker_1336',['PersonTracker',['../classop_1_1_person_tracker.html',1,'op']]], + ['point_1337',['Point',['../structop_1_1_point.html',1,'op']]], + ['point_3c_20int_20_3e_1338',['Point< int >',['../structop_1_1_point.html',1,'op']]], + ['pointercontainergreater_1339',['PointerContainerGreater',['../classop_1_1_pointer_container_greater.html',1,'op']]], + ['pointercontainerless_1340',['PointerContainerLess',['../classop_1_1_pointer_container_less.html',1,'op']]], + ['posecpurenderer_1341',['PoseCpuRenderer',['../classop_1_1_pose_cpu_renderer.html',1,'op']]], + ['poseextractor_1342',['PoseExtractor',['../classop_1_1_pose_extractor.html',1,'op']]], + ['poseextractorcaffe_1343',['PoseExtractorCaffe',['../classop_1_1_pose_extractor_caffe.html',1,'op']]], + ['poseextractornet_1344',['PoseExtractorNet',['../classop_1_1_pose_extractor_net.html',1,'op']]], + ['posegpurenderer_1345',['PoseGpuRenderer',['../classop_1_1_pose_gpu_renderer.html',1,'op']]], + ['poserenderer_1346',['PoseRenderer',['../classop_1_1_pose_renderer.html',1,'op']]], + ['posetriangulation_1347',['PoseTriangulation',['../classop_1_1_pose_triangulation.html',1,'op']]], + ['priorityqueue_1348',['PriorityQueue',['../classop_1_1_priority_queue.html',1,'op']]], + ['producer_1349',['Producer',['../classop_1_1_producer.html',1,'op']]], + ['profiler_1350',['Profiler',['../classop_1_1_profiler.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_e.html b/web/html/doc/search/classes_e.html new file mode 100644 index 000000000..bb33dcfa5 --- /dev/null +++ b/web/html/doc/search/classes_e.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_e.js b/web/html/doc/search/classes_e.js new file mode 100644 index 000000000..a39890631 --- /dev/null +++ b/web/html/doc/search/classes_e.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['queue_1351',['Queue',['../classop_1_1_queue.html',1,'op']]], + ['queuebase_1352',['QueueBase',['../classop_1_1_queue_base.html',1,'op']]], + ['queuebase_3c_20tdatums_2c_20std_3a_3aqueue_3c_20tdatums_20_3e_20_3e_1353',['QueueBase< TDatums, std::queue< TDatums > >',['../classop_1_1_queue_base.html',1,'op']]] +]; diff --git a/web/html/doc/search/classes_f.html b/web/html/doc/search/classes_f.html new file mode 100644 index 000000000..d1b67daa6 --- /dev/null +++ b/web/html/doc/search/classes_f.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/classes_f.js b/web/html/doc/search/classes_f.js new file mode 100644 index 000000000..588b4334d --- /dev/null +++ b/web/html/doc/search/classes_f.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['rectangle_1354',['Rectangle',['../structop_1_1_rectangle.html',1,'op']]], + ['renderer_1355',['Renderer',['../classop_1_1_renderer.html',1,'op']]], + ['resizeandmergecaffe_1356',['ResizeAndMergeCaffe',['../classop_1_1_resize_and_merge_caffe.html',1,'op']]] +]; diff --git a/web/html/doc/search/close.svg b/web/html/doc/search/close.svg new file mode 100644 index 000000000..a933eea1a --- /dev/null +++ b/web/html/doc/search/close.svg @@ -0,0 +1,31 @@ + + + + + + image/svg+xml + + + + + + + + diff --git a/web/html/doc/search/defines_0.html b/web/html/doc/search/defines_0.html new file mode 100644 index 000000000..15cc3de38 --- /dev/null +++ b/web/html/doc/search/defines_0.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/defines_0.js b/web/html/doc/search/defines_0.js new file mode 100644 index 000000000..aed759011 --- /dev/null +++ b/web/html/doc/search/defines_0.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['base_5fdatum_2608',['BASE_DATUM',['../datum_8hpp.html#a03de732ffb0edab021fb745b21a05fdd',1,'datum.hpp']]], + ['base_5fdatums_2609',['BASE_DATUMS',['../datum_8hpp.html#aa0a67922cf9df1e30dad2c32785b147e',1,'datum.hpp']]], + ['base_5fdatums_5fsh_2610',['BASE_DATUMS_SH',['../datum_8hpp.html#ae2331967a21fec02341dec3ca39d3809',1,'datum.hpp']]] +]; diff --git a/web/html/doc/search/defines_1.html b/web/html/doc/search/defines_1.html new file mode 100644 index 000000000..c49009c71 --- /dev/null +++ b/web/html/doc/search/defines_1.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/defines_1.js b/web/html/doc/search/defines_1.js new file mode 100644 index 000000000..636ab463c --- /dev/null +++ b/web/html/doc/search/defines_1.js @@ -0,0 +1,13 @@ +var searchData= +[ + ['compile_5ftemplate_5fbasic_5ftypes_2611',['COMPILE_TEMPLATE_BASIC_TYPES',['../macros_8hpp.html#a6bf32c65e0f388d5b09d8b2424416c0e',1,'macros.hpp']]], + ['compile_5ftemplate_5fbasic_5ftypes_5fclass_2612',['COMPILE_TEMPLATE_BASIC_TYPES_CLASS',['../macros_8hpp.html#a60e010d8a2352d94b8b57d97cf4a7d73',1,'macros.hpp']]], + ['compile_5ftemplate_5fbasic_5ftypes_5fstruct_2613',['COMPILE_TEMPLATE_BASIC_TYPES_STRUCT',['../macros_8hpp.html#ac5627744abe5fd0c8eacfe9c7f8bd32e',1,'macros.hpp']]], + ['compile_5ftemplate_5fdatum_2614',['COMPILE_TEMPLATE_DATUM',['../datum_8hpp.html#af87cd873cebb915837ae27248f67e822',1,'datum.hpp']]], + ['compile_5ftemplate_5ffloating_5fint_5ftypes_2615',['COMPILE_TEMPLATE_FLOATING_INT_TYPES',['../macros_8hpp.html#ad0aef3afcb2a9da69c3453426f56b0ac',1,'macros.hpp']]], + ['compile_5ftemplate_5ffloating_5fint_5ftypes_5fclass_2616',['COMPILE_TEMPLATE_FLOATING_INT_TYPES_CLASS',['../macros_8hpp.html#a7bacf9f65110ec8292bc69e1eb0f426e',1,'macros.hpp']]], + ['compile_5ftemplate_5ffloating_5fint_5ftypes_5fstruct_2617',['COMPILE_TEMPLATE_FLOATING_INT_TYPES_STRUCT',['../macros_8hpp.html#acc5af19a77b18cf5aa2e1f82e2e484dd',1,'macros.hpp']]], + ['compile_5ftemplate_5ffloating_5ftypes_2618',['COMPILE_TEMPLATE_FLOATING_TYPES',['../macros_8hpp.html#a80404791b46a15fd601feaa11f1e5028',1,'macros.hpp']]], + ['compile_5ftemplate_5ffloating_5ftypes_5fclass_2619',['COMPILE_TEMPLATE_FLOATING_TYPES_CLASS',['../macros_8hpp.html#a1eadbb31e92e7fbc799bf7cf4d2a6f50',1,'macros.hpp']]], + ['compile_5ftemplate_5ffloating_5ftypes_5fstruct_2620',['COMPILE_TEMPLATE_FLOATING_TYPES_STRUCT',['../macros_8hpp.html#af9fed593b7a4237bc6ede717a1ae70f0',1,'macros.hpp']]] +]; diff --git a/web/html/doc/search/defines_2.html b/web/html/doc/search/defines_2.html new file mode 100644 index 000000000..c55101115 --- /dev/null +++ b/web/html/doc/search/defines_2.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/defines_2.js b/web/html/doc/search/defines_2.js new file mode 100644 index 000000000..305825aac --- /dev/null +++ b/web/html/doc/search/defines_2.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['define_5ftemplate_5fdatum_2621',['DEFINE_TEMPLATE_DATUM',['../datum_8hpp.html#ad11d52b69bc54e48ceb2f5787f700431',1,'datum.hpp']]], + ['delete_5fcopy_2622',['DELETE_COPY',['../macros_8hpp.html#abef96b5dd35dd9d44ad27ddf0e2f5f2e',1,'macros.hpp']]] +]; diff --git a/web/html/doc/search/defines_3.html b/web/html/doc/search/defines_3.html new file mode 100644 index 000000000..8c6d21536 --- /dev/null +++ b/web/html/doc/search/defines_3.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/defines_3.js b/web/html/doc/search/defines_3.js new file mode 100644 index 000000000..5db083e93 --- /dev/null +++ b/web/html/doc/search/defines_3.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['face_5fcolors_5frender_5fgpu_2623',['FACE_COLORS_RENDER_GPU',['../face_parameters_8hpp.html#a740a6228babfde5f18fba6fc033ef0ed',1,'faceParameters.hpp']]], + ['face_5fpairs_5frender_5fgpu_2624',['FACE_PAIRS_RENDER_GPU',['../face_parameters_8hpp.html#a7e2f64c1349d6a881c6ceb49757e099a',1,'faceParameters.hpp']]], + ['face_5fscales_5frender_5fgpu_2625',['FACE_SCALES_RENDER_GPU',['../face_parameters_8hpp.html#a1a7ddb1a137c44091a1b4161725adfa0',1,'faceParameters.hpp']]] +]; diff --git a/web/html/doc/search/defines_4.html b/web/html/doc/search/defines_4.html new file mode 100644 index 000000000..f4afac124 --- /dev/null +++ b/web/html/doc/search/defines_4.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/defines_4.js b/web/html/doc/search/defines_4.js new file mode 100644 index 000000000..9c7bc4c97 --- /dev/null +++ b/web/html/doc/search/defines_4.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['hand_5fcolors_5frender_5fgpu_2626',['HAND_COLORS_RENDER_GPU',['../hand_parameters_8hpp.html#a5ca06d0202756b9a3b8825ccbafc0558',1,'handParameters.hpp']]], + ['hand_5fpairs_5frender_5fgpu_2627',['HAND_PAIRS_RENDER_GPU',['../hand_parameters_8hpp.html#a5ec40add22e28bc75596c75a7be8a692',1,'handParameters.hpp']]], + ['hand_5fscales_5frender_5fgpu_2628',['HAND_SCALES_RENDER_GPU',['../hand_parameters_8hpp.html#a799d629d7fddd7f0daf40ccdae0293b9',1,'handParameters.hpp']]] +]; diff --git a/web/html/doc/search/defines_5.html b/web/html/doc/search/defines_5.html new file mode 100644 index 000000000..8c40d123b --- /dev/null +++ b/web/html/doc/search/defines_5.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/defines_5.js b/web/html/doc/search/defines_5.js new file mode 100644 index 000000000..f2610c6cc --- /dev/null +++ b/web/html/doc/search/defines_5.js @@ -0,0 +1,19 @@ +var searchData= +[ + ['op_5fapi_2629',['OP_API',['../macros_8hpp.html#a4ba443bb7a0e5dbe8054a9ac37a5e000',1,'macros.hpp']]], + ['op_5fconst_5fmat_5freturn_5ffunction_2630',['OP_CONST_MAT_RETURN_FUNCTION',['../matrix_8hpp.html#adb6fa4cc9ba470382895a448b7cf1257',1,'matrix.hpp']]], + ['op_5fconst_5fmat_5fvoid_5ffunction_2631',['OP_CONST_MAT_VOID_FUNCTION',['../matrix_8hpp.html#a1b810570f8207983b20ea93e8f9f71a2',1,'matrix.hpp']]], + ['op_5fcuda_5fprofile_5fend_2632',['OP_CUDA_PROFILE_END',['../profiler_8hpp.html#a774eaef2d2d68028026f52d554a8ba45',1,'profiler.hpp']]], + ['op_5fcuda_5fprofile_5finit_2633',['OP_CUDA_PROFILE_INIT',['../profiler_8hpp.html#a543c2d65f7d0e835513310d83fc08589',1,'profiler.hpp']]], + ['op_5fcv2opconstmat_2634',['OP_CV2OPCONSTMAT',['../matrix_8hpp.html#ad0bd05468e4619f7061bb513fc2cb86d',1,'matrix.hpp']]], + ['op_5fcv2opmat_2635',['OP_CV2OPMAT',['../matrix_8hpp.html#a00c8b0a04adbe37ba8b6d08e0ba23287',1,'matrix.hpp']]], + ['op_5fcv2opvectormat_2636',['OP_CV2OPVECTORMAT',['../matrix_8hpp.html#ad2790de0442f8b1a303b781ffe171c6e',1,'matrix.hpp']]], + ['op_5fmat_5freturn_5ffunction_2637',['OP_MAT_RETURN_FUNCTION',['../matrix_8hpp.html#a1a8232a2c14792f9315d85004973c33c',1,'matrix.hpp']]], + ['op_5fmat_5fvoid_5ffunction_2638',['OP_MAT_VOID_FUNCTION',['../matrix_8hpp.html#a2bab8a00953b4ba71a8b965347f7dd92',1,'matrix.hpp']]], + ['op_5fop2cvconstmat_2639',['OP_OP2CVCONSTMAT',['../matrix_8hpp.html#a1c9288885fc29db5560426556d3fba41',1,'matrix.hpp']]], + ['op_5fop2cvmat_2640',['OP_OP2CVMAT',['../matrix_8hpp.html#af06d0e620916e1f08ca609fb02f25dc8',1,'matrix.hpp']]], + ['op_5fop2cvvectormat_2641',['OP_OP2CVVECTORMAT',['../matrix_8hpp.html#a1a8d8a14fa0269d045f8d8c8228098af',1,'matrix.hpp']]], + ['op_5fprofile_5fend_2642',['OP_PROFILE_END',['../profiler_8hpp.html#ae1f762d7d0c1f5ad10304ef82bd85516',1,'profiler.hpp']]], + ['op_5fprofile_5finit_2643',['OP_PROFILE_INIT',['../profiler_8hpp.html#a6211ca30ec696c346d0b3f2c056e05e6',1,'profiler.hpp']]], + ['overload_5fc_5fout_2644',['OVERLOAD_C_OUT',['../macros_8hpp.html#aa883b8ec96d2804b37d3bfb0bd4c5f16',1,'macros.hpp']]] +]; diff --git a/web/html/doc/search/defines_6.html b/web/html/doc/search/defines_6.html new file mode 100644 index 000000000..c6c0f4894 --- /dev/null +++ b/web/html/doc/search/defines_6.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/defines_6.js b/web/html/doc/search/defines_6.js new file mode 100644 index 000000000..befc7e903 --- /dev/null +++ b/web/html/doc/search/defines_6.js @@ -0,0 +1,30 @@ +var searchData= +[ + ['pose_5fbody_5f135_5fcolors_5frender_5fgpu_2645',['POSE_BODY_135_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a45b08569481c3bf02eceab0d911b2bf6',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f135_5fpairs_5frender_5fgpu_2646',['POSE_BODY_135_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a2c5ec8c89146a0535f4f29f861f4e248',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f135_5fscales_5frender_5fgpu_2647',['POSE_BODY_135_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a426402ce79f98928f30037da33c2a349',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f19_5fcolors_5frender_5fgpu_2648',['POSE_BODY_19_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a17cec2005928720d6da0e83ba26cca01',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f19_5fpairs_5frender_5fgpu_2649',['POSE_BODY_19_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a497bfbf7fddb6e960565ec70bb6b2ad1',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f19_5fscales_5frender_5fgpu_2650',['POSE_BODY_19_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a791ed14d0f2a65f850c94154b996826c',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f23_5fcolors_5frender_5fgpu_2651',['POSE_BODY_23_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#aeb1e2dd8178c15024e372185e2e5cf54',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f23_5fpairs_5frender_5fgpu_2652',['POSE_BODY_23_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a5f3db3bbb18fe8d978661f3c5417c110',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f23_5fscales_5frender_5fgpu_2653',['POSE_BODY_23_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#aa257db7f46ddaa7fe838f659b8e5ed66',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f25_5fcolors_5frender_5fgpu_2654',['POSE_BODY_25_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a456b8ce498f455af926215d91f6b6087',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f25_5fpairs_5frender_5fgpu_2655',['POSE_BODY_25_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a216b861af0ff0c237be529dc204ed05e',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f25_5fscales_5frender_5fgpu_2656',['POSE_BODY_25_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#ae30e7b56c09200d60f05acba38a8bf05',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f25b_5fcolors_5frender_5fgpu_2657',['POSE_BODY_25B_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a1e4980010228bfd1e9e1387c23a3ab6a',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f25b_5fpairs_5frender_5fgpu_2658',['POSE_BODY_25B_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a1b97e47c182baf7de08af03a8ba397e3',1,'poseParametersRender.hpp']]], + ['pose_5fbody_5f25b_5fscales_5frender_5fgpu_2659',['POSE_BODY_25B_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a04ebdf33bf0ff159d144dab0ebf1c2ce',1,'poseParametersRender.hpp']]], + ['pose_5fcar_5f12_5fcolors_5frender_5fgpu_2660',['POSE_CAR_12_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a6be8d3dedaf015f795625d1df19876aa',1,'poseParametersRender.hpp']]], + ['pose_5fcar_5f12_5fpairs_5frender_5fgpu_2661',['POSE_CAR_12_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a253206407787fc26629e6e46f60d7be2',1,'poseParametersRender.hpp']]], + ['pose_5fcar_5f12_5fscales_5frender_5fgpu_2662',['POSE_CAR_12_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a7382830f0c24beaea601444cb5962f06',1,'poseParametersRender.hpp']]], + ['pose_5fcar_5f22_5fcolors_5frender_5fgpu_2663',['POSE_CAR_22_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#aaecdba75da05e8bfc90e4393c88ab6e6',1,'poseParametersRender.hpp']]], + ['pose_5fcar_5f22_5fpairs_5frender_5fgpu_2664',['POSE_CAR_22_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a0065da73d9e649360d458fc670ee0f95',1,'poseParametersRender.hpp']]], + ['pose_5fcar_5f22_5fscales_5frender_5fgpu_2665',['POSE_CAR_22_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a0afb6a9782a4ad8bd3ac41bd2436fefc',1,'poseParametersRender.hpp']]], + ['pose_5fcoco_5fcolors_5frender_5fgpu_2666',['POSE_COCO_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a8b293ab02337be3f90218c5b824ece06',1,'poseParametersRender.hpp']]], + ['pose_5fcoco_5fpairs_5frender_5fgpu_2667',['POSE_COCO_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a5afab27fbbebc71b8753a20dd6c9a322',1,'poseParametersRender.hpp']]], + ['pose_5fcoco_5fscales_5frender_5fgpu_2668',['POSE_COCO_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a8cd3d34880f73dc73b2feb28370e86ec',1,'poseParametersRender.hpp']]], + ['pose_5fmpi_5fcolors_5frender_5fgpu_2669',['POSE_MPI_COLORS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a7fdd75b1478d65f11ebc77144662958c',1,'poseParametersRender.hpp']]], + ['pose_5fmpi_5fpairs_5frender_5fgpu_2670',['POSE_MPI_PAIRS_RENDER_GPU',['../pose_parameters_render_8hpp.html#a7987426d997b6b040302d25fd07403ac',1,'poseParametersRender.hpp']]], + ['pose_5fmpi_5fscales_5frender_5fgpu_2671',['POSE_MPI_SCALES_RENDER_GPU',['../pose_parameters_render_8hpp.html#a32e98c9dd9e6f38c597c7924582570d0',1,'poseParametersRender.hpp']]] +]; diff --git a/web/html/doc/search/defines_7.html b/web/html/doc/search/defines_7.html new file mode 100644 index 000000000..81d230492 --- /dev/null +++ b/web/html/doc/search/defines_7.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/defines_7.js b/web/html/doc/search/defines_7.js new file mode 100644 index 000000000..aa23ddc1b --- /dev/null +++ b/web/html/doc/search/defines_7.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['unused_2672',['UNUSED',['../macros_8hpp.html#af57a843cfdae82e064838c20b3b54851',1,'macros.hpp']]] +]; diff --git a/web/html/doc/search/enums_0.html b/web/html/doc/search/enums_0.html new file mode 100644 index 000000000..141fff57b --- /dev/null +++ b/web/html/doc/search/enums_0.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enums_0.js b/web/html/doc/search/enums_0.js new file mode 100644 index 000000000..4e00aeea0 --- /dev/null +++ b/web/html/doc/search/enums_0.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['cocojsonformat_2489',['CocoJsonFormat',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715ac',1,'op']]] +]; diff --git a/web/html/doc/search/enums_1.html b/web/html/doc/search/enums_1.html new file mode 100644 index 000000000..d29f3b16d --- /dev/null +++ b/web/html/doc/search/enums_1.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enums_1.js b/web/html/doc/search/enums_1.js new file mode 100644 index 000000000..0fbcddba3 --- /dev/null +++ b/web/html/doc/search/enums_1.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['dataformat_2490',['DataFormat',['../namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7e',1,'op']]], + ['detector_2491',['Detector',['../namespaceop.html#a1070db47220e17cf37df40411350f6fb',1,'op']]], + ['displaymode_2492',['DisplayMode',['../namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6',1,'op']]] +]; diff --git a/web/html/doc/search/enums_2.html b/web/html/doc/search/enums_2.html new file mode 100644 index 000000000..59aadf2cb --- /dev/null +++ b/web/html/doc/search/enums_2.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enums_2.js b/web/html/doc/search/enums_2.js new file mode 100644 index 000000000..23c4cb031 --- /dev/null +++ b/web/html/doc/search/enums_2.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['elementtorender_2493',['ElementToRender',['../namespaceop.html#a37a23e10d9cbc428c793c3df1d62993e',1,'op']]], + ['errormode_2494',['ErrorMode',['../namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400c',1,'op']]], + ['extensions_2495',['Extensions',['../namespaceop.html#a553bd31855c20a0d14e4c44a20bd91da',1,'op']]] +]; diff --git a/web/html/doc/search/enums_3.html b/web/html/doc/search/enums_3.html new file mode 100644 index 000000000..87c174430 --- /dev/null +++ b/web/html/doc/search/enums_3.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enums_3.js b/web/html/doc/search/enums_3.js new file mode 100644 index 000000000..8af079651 --- /dev/null +++ b/web/html/doc/search/enums_3.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['fullscreenmode_2496',['FullScreenMode',['../namespaceop.html#a6c22a72ce93c64e7582cb670492a50bf',1,'op']]] +]; diff --git a/web/html/doc/search/enums_4.html b/web/html/doc/search/enums_4.html new file mode 100644 index 000000000..90dda139c --- /dev/null +++ b/web/html/doc/search/enums_4.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enums_4.js b/web/html/doc/search/enums_4.js new file mode 100644 index 000000000..216a9dd95 --- /dev/null +++ b/web/html/doc/search/enums_4.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['gpumode_2497',['GpuMode',['../namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7f',1,'op']]] +]; diff --git a/web/html/doc/search/enums_5.html b/web/html/doc/search/enums_5.html new file mode 100644 index 000000000..c57b88590 --- /dev/null +++ b/web/html/doc/search/enums_5.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enums_5.js b/web/html/doc/search/enums_5.js new file mode 100644 index 000000000..ca345bc97 --- /dev/null +++ b/web/html/doc/search/enums_5.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['heatmaptype_2498',['HeatMapType',['../namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226b',1,'op']]] +]; diff --git a/web/html/doc/search/enums_6.html b/web/html/doc/search/enums_6.html new file mode 100644 index 000000000..ea5e0a682 --- /dev/null +++ b/web/html/doc/search/enums_6.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enums_6.js b/web/html/doc/search/enums_6.js new file mode 100644 index 000000000..901230bde --- /dev/null +++ b/web/html/doc/search/enums_6.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['logmode_2499',['LogMode',['../namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94',1,'op']]] +]; diff --git a/web/html/doc/search/enums_7.html b/web/html/doc/search/enums_7.html new file mode 100644 index 000000000..d18087574 --- /dev/null +++ b/web/html/doc/search/enums_7.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enums_7.js b/web/html/doc/search/enums_7.js new file mode 100644 index 000000000..62be64017 --- /dev/null +++ b/web/html/doc/search/enums_7.js @@ -0,0 +1,10 @@ +var searchData= +[ + ['posemode_2500',['PoseMode',['../namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09',1,'op']]], + ['posemodel_2501',['PoseModel',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261f',1,'op']]], + ['poseproperty_2502',['PoseProperty',['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0e',1,'op']]], + ['priority_2503',['Priority',['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14',1,'op']]], + ['producerfpsmode_2504',['ProducerFpsMode',['../namespaceop.html#ac0230b669b296920c0cfc41b7587268f',1,'op']]], + ['producerproperty_2505',['ProducerProperty',['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774',1,'op']]], + ['producertype_2506',['ProducerType',['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332',1,'op']]] +]; diff --git a/web/html/doc/search/enums_8.html b/web/html/doc/search/enums_8.html new file mode 100644 index 000000000..71aad59ed --- /dev/null +++ b/web/html/doc/search/enums_8.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enums_8.js b/web/html/doc/search/enums_8.js new file mode 100644 index 000000000..b1f280303 --- /dev/null +++ b/web/html/doc/search/enums_8.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['rendermode_2507',['RenderMode',['../namespaceop.html#afce557f02e337e16150d00bdf72ec033',1,'op']]] +]; diff --git a/web/html/doc/search/enums_9.html b/web/html/doc/search/enums_9.html new file mode 100644 index 000000000..22c3d51d5 --- /dev/null +++ b/web/html/doc/search/enums_9.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enums_9.js b/web/html/doc/search/enums_9.js new file mode 100644 index 000000000..cb7969b61 --- /dev/null +++ b/web/html/doc/search/enums_9.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['scalemode_2508',['ScaleMode',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bda',1,'op']]] +]; diff --git a/web/html/doc/search/enums_a.html b/web/html/doc/search/enums_a.html new file mode 100644 index 000000000..ca3b60e26 --- /dev/null +++ b/web/html/doc/search/enums_a.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enums_a.js b/web/html/doc/search/enums_a.js new file mode 100644 index 000000000..6842c45b0 --- /dev/null +++ b/web/html/doc/search/enums_a.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['threadmanagermode_2509',['ThreadManagerMode',['../namespaceop.html#a3593e2d53bec533f0048ef3973eebd36',1,'op']]] +]; diff --git a/web/html/doc/search/enums_b.html b/web/html/doc/search/enums_b.html new file mode 100644 index 000000000..e85817bad --- /dev/null +++ b/web/html/doc/search/enums_b.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enums_b.js b/web/html/doc/search/enums_b.js new file mode 100644 index 000000000..e72785b84 --- /dev/null +++ b/web/html/doc/search/enums_b.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['workertype_2510',['WorkerType',['../namespaceop.html#a970a2a768a2ace81605b1558c9fdec18',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_0.html b/web/html/doc/search/enumvalues_0.html new file mode 100644 index 000000000..0d131d95b --- /dev/null +++ b/web/html/doc/search/enumvalues_0.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_0.js b/web/html/doc/search/enumvalues_0.js new file mode 100644 index 000000000..48913080a --- /dev/null +++ b/web/html/doc/search/enumvalues_0.js @@ -0,0 +1,11 @@ +var searchData= +[ + ['addkeypoints_2511',['AddKeypoints',['../namespaceop.html#a37a23e10d9cbc428c793c3df1d62993ea5f4badd072493724e560fa43d0cf2c71',1,'op']]], + ['addpafs_2512',['AddPAFs',['../namespaceop.html#a37a23e10d9cbc428c793c3df1d62993eaca9f686d0a3d6b8bfe5865b59b2fc84f',1,'op']]], + ['all_2513',['All',['../namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400cab1c94ca2fbc3e78fc30069c8d0f01680',1,'op::All()'],['../namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94ab1c94ca2fbc3e78fc30069c8d0f01680',1,'op::All()']]], + ['asynchronous_2514',['Asynchronous',['../namespaceop.html#a3593e2d53bec533f0048ef3973eebd36a288aae25bc408055f50c21c991903a44',1,'op']]], + ['asynchronousin_2515',['AsynchronousIn',['../namespaceop.html#a3593e2d53bec533f0048ef3973eebd36a435b3ab344c03bfc0e4530a2e75f5e44',1,'op']]], + ['asynchronousout_2516',['AsynchronousOut',['../namespaceop.html#a3593e2d53bec533f0048ef3973eebd36ac68f8680ccf3a65dfcfc63356112c9f9',1,'op']]], + ['auto_2517',['Auto',['../namespaceop.html#afce557f02e337e16150d00bdf72ec033a06b9281e396db002010bde1de57262eb',1,'op']]], + ['autorepeat_2518',['AutoRepeat',['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a3ebbca1b84060b0caaf823639739945d',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_1.html b/web/html/doc/search/enumvalues_1.html new file mode 100644 index 000000000..cd9187ab3 --- /dev/null +++ b/web/html/doc/search/enumvalues_1.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_1.js b/web/html/doc/search/enumvalues_1.js new file mode 100644 index 000000000..5e5c9cbd5 --- /dev/null +++ b/web/html/doc/search/enumvalues_1.js @@ -0,0 +1,16 @@ +var searchData= +[ + ['background_2519',['Background',['../namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226baa9ded1e5ce5d75814730bb4caaf49419',1,'op::Background()'],['../namespaceop.html#a37a23e10d9cbc428c793c3df1d62993eaa9ded1e5ce5d75814730bb4caaf49419',1,'op::Background()']]], + ['body_2520',['Body',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715acaac101b32dda4448cf13a93fe283dddd8',1,'op::Body()'],['../namespaceop.html#a1070db47220e17cf37df40411350f6fbaac101b32dda4448cf13a93fe283dddd8',1,'op::Body()']]], + ['body_5f135_2521',['BODY_135',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261facfbe6a39619f4ca5a1fa2db000a17e0d',1,'op']]], + ['body_5f19_2522',['BODY_19',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fae3ae2003e0e0458bdc49480fb19c876e',1,'op']]], + ['body_5f19_5fx2_2523',['BODY_19_X2',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261faca4c7eb29b1f3402e78aa384ce8fd5a9',1,'op']]], + ['body_5f19e_2524',['BODY_19E',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa08956a1731b54bbdce3f97f1361efc23',1,'op']]], + ['body_5f19n_2525',['BODY_19N',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa9c6c21b2b0a410880f46637db622e392',1,'op']]], + ['body_5f23_2526',['BODY_23',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa003cc3795b0eeed2af2dfd34ed482794',1,'op']]], + ['body_5f25_2527',['BODY_25',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa9a87ca5ab7b20c2bd4f8d5379956e6f6',1,'op']]], + ['body_5f25b_2528',['BODY_25B',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa0b93cfdf906412bd7c8560ccd180cec6',1,'op']]], + ['body_5f25d_2529',['BODY_25D',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa529c87ac399e5fd6f0fa4a360c032568',1,'op']]], + ['body_5f25e_2530',['BODY_25E',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261faef29c97ffaed7b0d41ee9bb0d20550cc',1,'op']]], + ['bodywithtracking_2531',['BodyWithTracking',['../namespaceop.html#a1070db47220e17cf37df40411350f6fba65c691a85367d21881220b7a3d923747',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_10.html b/web/html/doc/search/enumvalues_10.html new file mode 100644 index 000000000..dee29ccbe --- /dev/null +++ b/web/html/doc/search/enumvalues_10.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_10.js b/web/html/doc/search/enumvalues_10.js new file mode 100644 index 000000000..df9813819 --- /dev/null +++ b/web/html/doc/search/enumvalues_10.js @@ -0,0 +1,9 @@ +var searchData= +[ + ['size_2593',['Size',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#a553bd31855c20a0d14e4c44a20bd91daa6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09a6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#a1070db47220e17cf37df40411350f6fba6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()'],['../namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a6f6cb72d544962fa333e2e34ce64f719',1,'op::Size()']]], + ['skeleton_2594',['Skeleton',['../namespaceop.html#a37a23e10d9cbc428c793c3df1d62993ea6ab48f7ed56efc362f41853c5616bf75',1,'op']]], + ['stdcerr_2595',['StdCerr',['../namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400ca002f2100f8870e7c823894f492e4d337',1,'op']]], + ['stdcout_2596',['StdCout',['../namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94aa544d56d9492a20da20018000b5043b6',1,'op']]], + ['stdruntimeerror_2597',['StdRuntimeError',['../namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400cafe50b062b9b9100a72e68b48fe26fc50',1,'op']]], + ['synchronous_2598',['Synchronous',['../namespaceop.html#a3593e2d53bec533f0048ef3973eebd36a2fe4167817733fec8e6ba1afddf78f1b',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_11.html b/web/html/doc/search/enumvalues_11.html new file mode 100644 index 000000000..6d0e91871 --- /dev/null +++ b/web/html/doc/search/enumvalues_11.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_11.js b/web/html/doc/search/enumvalues_11.js new file mode 100644 index 000000000..016a90d98 --- /dev/null +++ b/web/html/doc/search/enumvalues_11.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['unsignedchar_2599',['UnsignedChar',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaaa93f121640d609f8772397a0f40f40d6',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_12.html b/web/html/doc/search/enumvalues_12.html new file mode 100644 index 000000000..6a98dc84c --- /dev/null +++ b/web/html/doc/search/enumvalues_12.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_12.js b/web/html/doc/search/enumvalues_12.js new file mode 100644 index 000000000..4f46a9fa9 --- /dev/null +++ b/web/html/doc/search/enumvalues_12.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['video_2600',['Video',['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332a34e2d1989a1dbf75cd631596133ee5ee',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_13.html b/web/html/doc/search/enumvalues_13.html new file mode 100644 index 000000000..5faeaf819 --- /dev/null +++ b/web/html/doc/search/enumvalues_13.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_13.js b/web/html/doc/search/enumvalues_13.js new file mode 100644 index 000000000..c891b954d --- /dev/null +++ b/web/html/doc/search/enumvalues_13.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['webcam_2601',['Webcam',['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332ae2faa2a74b6a4134d0b3e84c7c0e2a01',1,'op']]], + ['windowed_2602',['Windowed',['../namespaceop.html#a6c22a72ce93c64e7582cb670492a50bfab13311ab51c4c34757f67f26580018dd',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_14.html b/web/html/doc/search/enumvalues_14.html new file mode 100644 index 000000000..3ee3508fa --- /dev/null +++ b/web/html/doc/search/enumvalues_14.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_14.js b/web/html/doc/search/enumvalues_14.js new file mode 100644 index 000000000..32100ad17 --- /dev/null +++ b/web/html/doc/search/enumvalues_14.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['xml_2603',['Xml',['../namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7ea9ec8e4e3ab4c7eeba097f27d7364d743',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_15.html b/web/html/doc/search/enumvalues_15.html new file mode 100644 index 000000000..ba97b557c --- /dev/null +++ b/web/html/doc/search/enumvalues_15.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_15.js b/web/html/doc/search/enumvalues_15.js new file mode 100644 index 000000000..ced6e256c --- /dev/null +++ b/web/html/doc/search/enumvalues_15.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['yaml_2604',['Yaml',['../namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7ea65f6036bfc9798ce230c5d8567551315',1,'op']]], + ['yml_2605',['Yml',['../namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7ea55eeca17b45365c188d0edbd35f6e0c3',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_16.html b/web/html/doc/search/enumvalues_16.html new file mode 100644 index 000000000..c700a41da --- /dev/null +++ b/web/html/doc/search/enumvalues_16.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_16.js b/web/html/doc/search/enumvalues_16.js new file mode 100644 index 000000000..64527c8bc --- /dev/null +++ b/web/html/doc/search/enumvalues_16.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['zerotoone_2606',['ZeroToOne',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa4b942544cb3e764bbb8d33f8a8744855',1,'op']]], + ['zerotoonefixedaspect_2607',['ZeroToOneFixedAspect',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaafa90ddb034be42f1cdf13a6829eed2ad',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_2.html b/web/html/doc/search/enumvalues_2.html new file mode 100644 index 000000000..2b95d9204 --- /dev/null +++ b/web/html/doc/search/enumvalues_2.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_2.js b/web/html/doc/search/enumvalues_2.js new file mode 100644 index 000000000..b4b32caf5 --- /dev/null +++ b/web/html/doc/search/enumvalues_2.js @@ -0,0 +1,13 @@ +var searchData= +[ + ['car_2532',['Car',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715acae9989db5dabeea617f40c8dbfd07f5fb',1,'op']]], + ['car_5f12_2533',['CAR_12',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa66ae79a5ac5fa502ae8bbecd3e07e71c',1,'op']]], + ['car_5f22_2534',['CAR_22',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa372b9885bba8bc32ad323fffcf99e39e',1,'op']]], + ['coco_5f18_2535',['COCO_18',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa0c4a19d9254adcb3ca1f0f527ee141fd',1,'op']]], + ['connectinterminabovethreshold_2536',['ConnectInterMinAboveThreshold',['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0eaf7405796a5c90a93fc3c8ffa89eb432d',1,'op']]], + ['connectinterthreshold_2537',['ConnectInterThreshold',['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea240f10f3a39507d858c743971fd4298f',1,'op']]], + ['connectminsubsetcnt_2538',['ConnectMinSubsetCnt',['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea7bf312724768faebba41ca3585a91f19',1,'op']]], + ['connectminsubsetscore_2539',['ConnectMinSubsetScore',['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea04576b26f5dc3637bf3c8168fba1641d',1,'op']]], + ['cpu_2540',['Cpu',['../namespaceop.html#afce557f02e337e16150d00bdf72ec033a54c82ef76ecbbd4c2293e09bae01b54e',1,'op']]], + ['cuda_2541',['Cuda',['../namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa8b95dcff7397d0693c03e394af5552aa',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_3.html b/web/html/doc/search/enumvalues_3.html new file mode 100644 index 000000000..bc0ac8a97 --- /dev/null +++ b/web/html/doc/search/enumvalues_3.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_3.js b/web/html/doc/search/enumvalues_3.js new file mode 100644 index 000000000..1d6202b7b --- /dev/null +++ b/web/html/doc/search/enumvalues_3.js @@ -0,0 +1,8 @@ +var searchData= +[ + ['disabled_2542',['Disabled',['../namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09ab9f5c797ebbf55adccdd8539a65a0241',1,'op']]], + ['display2d_2543',['Display2D',['../namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a3bd9369403112127ae7db2f866002be2',1,'op']]], + ['display3d_2544',['Display3D',['../namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6ae18221460ca8434295f980225fd6a91b',1,'op']]], + ['displayadam_2545',['DisplayAdam',['../namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a442304e26339521bc296bdc47ff5fddf',1,'op']]], + ['displayall_2546',['DisplayAll',['../namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a105036ef087117869f656cd72bfd8dd6',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_4.html b/web/html/doc/search/enumvalues_4.html new file mode 100644 index 000000000..ef94dd8d7 --- /dev/null +++ b/web/html/doc/search/enumvalues_4.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_4.js b/web/html/doc/search/enumvalues_4.js new file mode 100644 index 000000000..af1600dca --- /dev/null +++ b/web/html/doc/search/enumvalues_4.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['enabled_2547',['Enabled',['../namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09a00d23a76e43b46dae9ec7aa9dcbebb32',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_5.html b/web/html/doc/search/enumvalues_5.html new file mode 100644 index 000000000..1c2e2f33d --- /dev/null +++ b/web/html/doc/search/enumvalues_5.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_5.js b/web/html/doc/search/enumvalues_5.js new file mode 100644 index 000000000..9d60fcf70 --- /dev/null +++ b/web/html/doc/search/enumvalues_5.js @@ -0,0 +1,10 @@ +var searchData= +[ + ['face_2548',['Face',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca8af5861002f3c157f9ba842bba10aa3f',1,'op']]], + ['filelogging_2549',['FileLogging',['../namespaceop.html#a5f5a4cee9809deaf7201fb9caf5e400ca68ec2bf5b1662d1d27a523dcfc3c702a',1,'op::FileLogging()'],['../namespaceop.html#a5fa46d7c4b25c823d1cdcc8e9d460f94a68ec2bf5b1662d1d27a523dcfc3c702a',1,'op::FileLogging()']]], + ['flip_2550',['Flip',['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a9ffbd422925a6839ee820ddbc59278c5',1,'op']]], + ['flircamera_2551',['FlirCamera',['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332af436d4d7a472ac39a7cb227e3ea24f8d',1,'op']]], + ['foot_2552',['Foot',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca129e74dde7b475c8848310e16754c965',1,'op']]], + ['framestep_2553',['FrameStep',['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a63eacc5ed21c0ecb8bc583e10dc3ae58',1,'op']]], + ['fullscreen_2554',['FullScreen',['../namespaceop.html#a6c22a72ce93c64e7582cb670492a50bfae7ec409749889353b8f83a6b04159420',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_6.html b/web/html/doc/search/enumvalues_6.html new file mode 100644 index 000000000..f985df91d --- /dev/null +++ b/web/html/doc/search/enumvalues_6.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_6.js b/web/html/doc/search/enumvalues_6.js new file mode 100644 index 000000000..beec2a43f --- /dev/null +++ b/web/html/doc/search/enumvalues_6.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['gpu_2555',['Gpu',['../namespaceop.html#afce557f02e337e16150d00bdf72ec033a3432ca64f06615abf07ab44c10cada38',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_7.html b/web/html/doc/search/enumvalues_7.html new file mode 100644 index 000000000..7fdf663dd --- /dev/null +++ b/web/html/doc/search/enumvalues_7.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_7.js b/web/html/doc/search/enumvalues_7.js new file mode 100644 index 000000000..cbae347a4 --- /dev/null +++ b/web/html/doc/search/enumvalues_7.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['hand21_2556',['Hand21',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca9909f7cecc318ee0049ad0f3b409b3b3',1,'op']]], + ['hand42_2557',['Hand42',['../namespaceop.html#a5418b76dad5b4aea1133325f4aa715aca1d9502bb9f6efc989b3578dcfde7901e',1,'op']]], + ['high_2558',['High',['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14a655d20c1ca69519ca647684edbb2db35',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_8.html b/web/html/doc/search/enumvalues_8.html new file mode 100644 index 000000000..674ccda69 --- /dev/null +++ b/web/html/doc/search/enumvalues_8.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_8.js b/web/html/doc/search/enumvalues_8.js new file mode 100644 index 000000000..cd3ecfe47 --- /dev/null +++ b/web/html/doc/search/enumvalues_8.js @@ -0,0 +1,8 @@ +var searchData= +[ + ['imagedirectory_2559',['ImageDirectory',['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332a54a365e86ee42cff91ca36532c9bbabf',1,'op']]], + ['images_2560',['Images',['../namespaceop.html#a553bd31855c20a0d14e4c44a20bd91daafff0d600f8a0b5e19e88bfb821dd1157',1,'op']]], + ['input_2561',['Input',['../namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a324118a6721dd6b8a9b9f4e327df2bf5',1,'op']]], + ['inputresolution_2562',['InputResolution',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa46f9a0da0a5d448fd0cc8b3aa0a9b228',1,'op']]], + ['ipcamera_2563',['IPCamera',['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332af40a40a04a078c4449cda2f326d7fb18',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_9.html b/web/html/doc/search/enumvalues_9.html new file mode 100644 index 000000000..60f15ee38 --- /dev/null +++ b/web/html/doc/search/enumvalues_9.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_9.js b/web/html/doc/search/enumvalues_9.js new file mode 100644 index 000000000..3631c53e4 --- /dev/null +++ b/web/html/doc/search/enumvalues_9.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['json_2564',['Json',['../namespaceop.html#ae52c21a24cf2c21e3b419c127924fd7eaeed8d85b888a6c015834240885ee6333',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_a.html b/web/html/doc/search/enumvalues_a.html new file mode 100644 index 000000000..ce1064c72 --- /dev/null +++ b/web/html/doc/search/enumvalues_a.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_a.js b/web/html/doc/search/enumvalues_a.js new file mode 100644 index 000000000..2fea3fd44 --- /dev/null +++ b/web/html/doc/search/enumvalues_a.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['low_2565',['Low',['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14a28d0edd045e05cf5af64e35ae0c4c6ef',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_b.html b/web/html/doc/search/enumvalues_b.html new file mode 100644 index 000000000..ec768e7ff --- /dev/null +++ b/web/html/doc/search/enumvalues_b.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_b.js b/web/html/doc/search/enumvalues_b.js new file mode 100644 index 000000000..cb9906461 --- /dev/null +++ b/web/html/doc/search/enumvalues_b.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['max_2566',['Max',['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14a6a061313d22e51e0f25b7cd4dc065233',1,'op']]], + ['mpi_5f15_2567',['MPI_15',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fad788fbec25069f2884ee1ed97e0af2b9',1,'op']]], + ['mpi_5f15_5f4_2568',['MPI_15_4',['../namespaceop.html#af5b3ce2a5d3de87cb31b9b67e96f261fa71e915c88449606c6498d33dd7c98e84',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_c.html b/web/html/doc/search/enumvalues_c.html new file mode 100644 index 000000000..bdb4e1315 --- /dev/null +++ b/web/html/doc/search/enumvalues_c.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_c.js b/web/html/doc/search/enumvalues_c.js new file mode 100644 index 000000000..8ad3fc89a --- /dev/null +++ b/web/html/doc/search/enumvalues_c.js @@ -0,0 +1,13 @@ +var searchData= +[ + ['netoutputresolution_2569',['NetOutputResolution',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa668a2bc599fd07445eae0730d043c96d',1,'op']]], + ['nmsthreshold_2570',['NMSThreshold',['../namespaceop.html#a37c58b781e5bcd9fee67a7768afc5d0ea83be5d7f6f29b19cf24f7393551c0439',1,'op']]], + ['nodisplay_2571',['NoDisplay',['../namespaceop.html#ae52707752b1872b39f0306cc4f6c6ae6a28b652e57d2da6b7c939166be21efd9a',1,'op']]], + ['nogpu_2572',['NoGpu',['../namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa3c1472839b807c90abff3c7c36dff458',1,'op']]], + ['none_2573',['None',['../namespaceop.html#afce557f02e337e16150d00bdf72ec033a6adf97f83acf6453d4a6a4b1070f3754',1,'op::None()'],['../namespaceop.html#a54b73745852c270cfd891eed0f6f2332a6adf97f83acf6453d4a6a4b1070f3754',1,'op::None()'],['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14a6adf97f83acf6453d4a6a4b1070f3754',1,'op::None()']]], + ['nonetwork_2574',['NoNetwork',['../namespaceop.html#a53e7c7ac399de4698e1e609ec0474a09aa6e20e86de146a7b524d32c9b1fea7f4',1,'op']]], + ['nooutput_2575',['NoOutput',['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14a828d496739024f4af00df1e277d96ebd',1,'op']]], + ['normal_2576',['Normal',['../namespaceop.html#adc43fb9031418e7f8112816a3b535d14a960b44c579bc2f6818d2daaf9e4c16f0',1,'op']]], + ['noscale_2577',['NoScale',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa6089ccf7c3fe93a62745e51200419c60',1,'op']]], + ['numberviews_2578',['NumberViews',['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774a3b6cff57206f4ce645622b2e55f784a6',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_d.html b/web/html/doc/search/enumvalues_d.html new file mode 100644 index 000000000..47a761596 --- /dev/null +++ b/web/html/doc/search/enumvalues_d.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_d.js b/web/html/doc/search/enumvalues_d.js new file mode 100644 index 000000000..712ae601b --- /dev/null +++ b/web/html/doc/search/enumvalues_d.js @@ -0,0 +1,8 @@ +var searchData= +[ + ['opencl_2579',['OpenCL',['../namespaceop.html#adbb34b5c8f2b6f0c051f831f18582e7fa7982b09a852b37f2afb1227eaf552e47',1,'op']]], + ['opencv_2580',['OpenCV',['../namespaceop.html#a1070db47220e17cf37df40411350f6fba5bd4c87976f48e6a53919d53e14025e9',1,'op']]], + ['originalfps_2581',['OriginalFps',['../namespaceop.html#ac0230b669b296920c0cfc41b7587268fa0123c3afc0fac5edaf8b1672cb12626c',1,'op']]], + ['output_2582',['Output',['../namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a29c2c02a361c9d7028472e5d92cd4a54',1,'op']]], + ['outputresolution_2583',['OutputResolution',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaa73c42013aac51c335d50d103f30fcb99',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_e.html b/web/html/doc/search/enumvalues_e.html new file mode 100644 index 000000000..c7b30bf3e --- /dev/null +++ b/web/html/doc/search/enumvalues_e.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_e.js b/web/html/doc/search/enumvalues_e.js new file mode 100644 index 000000000..71fbb6e48 --- /dev/null +++ b/web/html/doc/search/enumvalues_e.js @@ -0,0 +1,10 @@ +var searchData= +[ + ['pafs_2584',['PAFs',['../namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226ba21c5c3f60f4881b8d5477f5628db74f1',1,'op']]], + ['parts_2585',['Parts',['../namespaceop.html#a1c3dbc214e7552f7ef9cc753ee97226ba9ce2d07469b39a72159ed8b0e0e597ca',1,'op']]], + ['plusminusone_2586',['PlusMinusOne',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaab7e7b2beae3435e73021d6d9a6a3fd8a',1,'op']]], + ['plusminusonefixedaspect_2587',['PlusMinusOneFixedAspect',['../namespaceop.html#af72fe4ed32846c12f41b049d3d0e1bdaaee080e43c505aa85cdda0e480b0abc06',1,'op']]], + ['postprocessing_2588',['PostProcessing',['../namespaceop.html#a970a2a768a2ace81605b1558c9fdec18aa52d6088cbae537944827c8f8c69c570',1,'op']]], + ['preprocessing_2589',['PreProcessing',['../namespaceop.html#a970a2a768a2ace81605b1558c9fdec18a05318bd0215d16e009798570b53755d2',1,'op']]], + ['provided_2590',['Provided',['../namespaceop.html#a1070db47220e17cf37df40411350f6fba900b06e1ae224594f075e0c882c73532',1,'op']]] +]; diff --git a/web/html/doc/search/enumvalues_f.html b/web/html/doc/search/enumvalues_f.html new file mode 100644 index 000000000..9a7bb1266 --- /dev/null +++ b/web/html/doc/search/enumvalues_f.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/enumvalues_f.js b/web/html/doc/search/enumvalues_f.js new file mode 100644 index 000000000..3f91ace36 --- /dev/null +++ b/web/html/doc/search/enumvalues_f.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['retrievalfps_2591',['RetrievalFps',['../namespaceop.html#ac0230b669b296920c0cfc41b7587268fa6bcd0f3b66e42d1aacd18d1c3b532473',1,'op']]], + ['rotation_2592',['Rotation',['../namespaceop.html#abc501c56c6cf6cf1989c84b1692cb774af1a42bd417390fc63b030a519624607a',1,'op']]] +]; diff --git a/web/html/doc/search/files_0.html b/web/html/doc/search/files_0.html new file mode 100644 index 000000000..9498842a6 --- /dev/null +++ b/web/html/doc/search/files_0.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_0.js b/web/html/doc/search/files_0.js new file mode 100644 index 000000000..9a76d580f --- /dev/null +++ b/web/html/doc/search/files_0.js @@ -0,0 +1,14 @@ +var searchData= +[ + ['00_5findex_2emd_1429',['00_index.md',['../00__index_8md.html',1,'']]], + ['01_5fdemo_2emd_1430',['01_demo.md',['../01__demo_8md.html',1,'']]], + ['02_5foutput_2emd_1431',['02_output.md',['../02__output_8md.html',1,'']]], + ['03_5fpython_5fapi_2emd_1432',['03_python_api.md',['../03__python__api_8md.html',1,'']]], + ['04_5fcpp_5fapi_2emd_1433',['04_cpp_api.md',['../04__cpp__api_8md.html',1,'']]], + ['05_5ffaq_2emd_1434',['05_faq.md',['../05__faq_8md.html',1,'']]], + ['06_5fmaximizing_5fopenpose_5fspeed_2emd_1435',['06_maximizing_openpose_speed.md',['../06__maximizing__openpose__speed_8md.html',1,'']]], + ['07_5fmajor_5freleased_5ffeatures_2emd_1436',['07_major_released_features.md',['../07__major__released__features_8md.html',1,'']]], + ['08_5frelease_5fnotes_2emd_1437',['08_release_notes.md',['../08__release__notes_8md.html',1,'']]], + ['09_5fauthors_5fand_5fcontributors_2emd_1438',['09_authors_and_contributors.md',['../09__authors__and__contributors_8md.html',1,'']]], + ['0_5findex_2emd_1439',['0_index.md',['../installation_20__index_8md.html',1,'(Global Namespace)'],['../very__advanced_2library__structure_20__index_8md.html',1,'(Global Namespace)']]] +]; diff --git a/web/html/doc/search/files_1.html b/web/html/doc/search/files_1.html new file mode 100644 index 000000000..7050ef48a --- /dev/null +++ b/web/html/doc/search/files_1.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_1.js b/web/html/doc/search/files_1.js new file mode 100644 index 000000000..0a87e01ef --- /dev/null +++ b/web/html/doc/search/files_1.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['10_5fcommunity_5fprojects_2emd_1440',['10_community_projects.md',['../10__community__projects_8md.html',1,'']]], + ['1_5flibrary_5fdeep_5foverview_2emd_1441',['1_library_deep_overview.md',['../1__library__deep__overview_8md.html',1,'']]], + ['1_5fprerequisites_2emd_1442',['1_prerequisites.md',['../1__prerequisites_8md.html',1,'']]] +]; diff --git a/web/html/doc/search/files_10.html b/web/html/doc/search/files_10.html new file mode 100644 index 000000000..e52318ed3 --- /dev/null +++ b/web/html/doc/search/files_10.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_10.js b/web/html/doc/search/files_10.js new file mode 100644 index 000000000..5aaa73481 --- /dev/null +++ b/web/html/doc/search/files_10.js @@ -0,0 +1,8 @@ +var searchData= +[ + ['net_2ehpp_1517',['net.hpp',['../net_8hpp.html',1,'']]], + ['netcaffe_2ehpp_1518',['netCaffe.hpp',['../net_caffe_8hpp.html',1,'']]], + ['netopencv_2ehpp_1519',['netOpenCv.hpp',['../net_open_cv_8hpp.html',1,'']]], + ['nmsbase_2ehpp_1520',['nmsBase.hpp',['../nms_base_8hpp.html',1,'']]], + ['nmscaffe_2ehpp_1521',['nmsCaffe.hpp',['../nms_caffe_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_11.html b/web/html/doc/search/files_11.html new file mode 100644 index 000000000..02edc0913 --- /dev/null +++ b/web/html/doc/search/files_11.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_11.js b/web/html/doc/search/files_11.js new file mode 100644 index 000000000..e0e7b8ad8 --- /dev/null +++ b/web/html/doc/search/files_11.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['opencv_2ehpp_1522',['openCv.hpp',['../open_cv_8hpp.html',1,'']]], + ['opoutputtocvmat_2ehpp_1523',['opOutputToCvMat.hpp',['../op_output_to_cv_mat_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_12.html b/web/html/doc/search/files_12.html new file mode 100644 index 000000000..bd79f3c5f --- /dev/null +++ b/web/html/doc/search/files_12.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_12.js b/web/html/doc/search/files_12.js new file mode 100644 index 000000000..82f53b90e --- /dev/null +++ b/web/html/doc/search/files_12.js @@ -0,0 +1,20 @@ +var searchData= +[ + ['peoplejsonsaver_2ehpp_1524',['peopleJsonSaver.hpp',['../people_json_saver_8hpp.html',1,'']]], + ['personidextractor_2ehpp_1525',['personIdExtractor.hpp',['../person_id_extractor_8hpp.html',1,'']]], + ['persontracker_2ehpp_1526',['personTracker.hpp',['../person_tracker_8hpp.html',1,'']]], + ['point_2ehpp_1527',['point.hpp',['../point_8hpp.html',1,'']]], + ['pointercontainer_2ehpp_1528',['pointerContainer.hpp',['../pointer_container_8hpp.html',1,'']]], + ['posecpurenderer_2ehpp_1529',['poseCpuRenderer.hpp',['../pose_cpu_renderer_8hpp.html',1,'']]], + ['poseextractor_2ehpp_1530',['poseExtractor.hpp',['../pose_extractor_8hpp.html',1,'']]], + ['poseextractorcaffe_2ehpp_1531',['poseExtractorCaffe.hpp',['../pose_extractor_caffe_8hpp.html',1,'']]], + ['poseextractornet_2ehpp_1532',['poseExtractorNet.hpp',['../pose_extractor_net_8hpp.html',1,'']]], + ['posegpurenderer_2ehpp_1533',['poseGpuRenderer.hpp',['../pose_gpu_renderer_8hpp.html',1,'']]], + ['poseparameters_2ehpp_1534',['poseParameters.hpp',['../pose_parameters_8hpp.html',1,'']]], + ['poseparametersrender_2ehpp_1535',['poseParametersRender.hpp',['../pose_parameters_render_8hpp.html',1,'']]], + ['poserenderer_2ehpp_1536',['poseRenderer.hpp',['../pose_renderer_8hpp.html',1,'']]], + ['posetriangulation_2ehpp_1537',['poseTriangulation.hpp',['../pose_triangulation_8hpp.html',1,'']]], + ['priorityqueue_2ehpp_1538',['priorityQueue.hpp',['../priority_queue_8hpp.html',1,'']]], + ['producer_2ehpp_1539',['producer.hpp',['../producer_8hpp.html',1,'']]], + ['profiler_2ehpp_1540',['profiler.hpp',['../profiler_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_13.html b/web/html/doc/search/files_13.html new file mode 100644 index 000000000..7cbb5b01e --- /dev/null +++ b/web/html/doc/search/files_13.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_13.js b/web/html/doc/search/files_13.js new file mode 100644 index 000000000..538212aec --- /dev/null +++ b/web/html/doc/search/files_13.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['queue_2ehpp_1541',['queue.hpp',['../queue_8hpp.html',1,'']]], + ['queuebase_2ehpp_1542',['queueBase.hpp',['../queue_base_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_14.html b/web/html/doc/search/files_14.html new file mode 100644 index 000000000..c8da77bcc --- /dev/null +++ b/web/html/doc/search/files_14.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_14.js b/web/html/doc/search/files_14.js new file mode 100644 index 000000000..a99ab025f --- /dev/null +++ b/web/html/doc/search/files_14.js @@ -0,0 +1,11 @@ +var searchData= +[ + ['readme_2emd_1543',['README.md',['../_r_e_a_d_m_e_8md.html',1,'(Global Namespace)'],['../doc_2installation_2_r_e_a_d_m_e_8md.html',1,'(Global Namespace)'],['../doc_2_r_e_a_d_m_e_8md.html',1,'(Global Namespace)']]], + ['rectangle_2ehpp_1544',['rectangle.hpp',['../rectangle_8hpp.html',1,'']]], + ['renderer_2ehpp_1545',['renderer.hpp',['../renderer_8hpp.html',1,'']]], + ['renderface_2ehpp_1546',['renderFace.hpp',['../render_face_8hpp.html',1,'']]], + ['renderhand_2ehpp_1547',['renderHand.hpp',['../render_hand_8hpp.html',1,'']]], + ['renderpose_2ehpp_1548',['renderPose.hpp',['../render_pose_8hpp.html',1,'']]], + ['resizeandmergebase_2ehpp_1549',['resizeAndMergeBase.hpp',['../resize_and_merge_base_8hpp.html',1,'']]], + ['resizeandmergecaffe_2ehpp_1550',['resizeAndMergeCaffe.hpp',['../resize_and_merge_caffe_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_15.html b/web/html/doc/search/files_15.html new file mode 100644 index 000000000..2672868ce --- /dev/null +++ b/web/html/doc/search/files_15.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_15.js b/web/html/doc/search/files_15.js new file mode 100644 index 000000000..d9a26ccd4 --- /dev/null +++ b/web/html/doc/search/files_15.js @@ -0,0 +1,13 @@ +var searchData= +[ + ['scaleandsizeextractor_2ehpp_1551',['scaleAndSizeExtractor.hpp',['../scale_and_size_extractor_8hpp.html',1,'']]], + ['spinnakerwrapper_2ehpp_1552',['spinnakerWrapper.hpp',['../spinnaker_wrapper_8hpp.html',1,'']]], + ['standalone_5fface_5for_5fhand_5fkeypoint_5fdetector_2emd_1553',['standalone_face_or_hand_keypoint_detector.md',['../standalone__face__or__hand__keypoint__detector_8md.html',1,'']]], + ['standard_2ehpp_1554',['standard.hpp',['../standard_8hpp.html',1,'']]], + ['string_2ehpp_1555',['string.hpp',['../core_2string_8hpp.html',1,'(Global Namespace)'],['../utilities_2string_8hpp.html',1,'(Global Namespace)']]], + ['subthread_2ehpp_1556',['subThread.hpp',['../sub_thread_8hpp.html',1,'']]], + ['subthreadnoqueue_2ehpp_1557',['subThreadNoQueue.hpp',['../sub_thread_no_queue_8hpp.html',1,'']]], + ['subthreadqueuein_2ehpp_1558',['subThreadQueueIn.hpp',['../sub_thread_queue_in_8hpp.html',1,'']]], + ['subthreadqueueinout_2ehpp_1559',['subThreadQueueInOut.hpp',['../sub_thread_queue_in_out_8hpp.html',1,'']]], + ['subthreadqueueout_2ehpp_1560',['subThreadQueueOut.hpp',['../sub_thread_queue_out_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_16.html b/web/html/doc/search/files_16.html new file mode 100644 index 000000000..b48235e24 --- /dev/null +++ b/web/html/doc/search/files_16.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_16.js b/web/html/doc/search/files_16.js new file mode 100644 index 000000000..45b0534ce --- /dev/null +++ b/web/html/doc/search/files_16.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['thread_2ehpp_1561',['thread.hpp',['../thread_8hpp.html',1,'']]], + ['threadmanager_2ehpp_1562',['threadManager.hpp',['../thread_manager_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_17.html b/web/html/doc/search/files_17.html new file mode 100644 index 000000000..5380f40fe --- /dev/null +++ b/web/html/doc/search/files_17.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_17.js b/web/html/doc/search/files_17.js new file mode 100644 index 000000000..5005bcc84 --- /dev/null +++ b/web/html/doc/search/files_17.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['udpsender_2ehpp_1563',['udpSender.hpp',['../udp_sender_8hpp.html',1,'']]], + ['unitybinding_2ehpp_1564',['unityBinding.hpp',['../unity_binding_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_18.html b/web/html/doc/search/files_18.html new file mode 100644 index 000000000..e43020168 --- /dev/null +++ b/web/html/doc/search/files_18.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_18.js b/web/html/doc/search/files_18.js new file mode 100644 index 000000000..b8d011c69 --- /dev/null +++ b/web/html/doc/search/files_18.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['verboseprinter_2ehpp_1565',['verbosePrinter.hpp',['../verbose_printer_8hpp.html',1,'']]], + ['videocapturereader_2ehpp_1566',['videoCaptureReader.hpp',['../video_capture_reader_8hpp.html',1,'']]], + ['videoreader_2ehpp_1567',['videoReader.hpp',['../video_reader_8hpp.html',1,'']]], + ['videosaver_2ehpp_1568',['videoSaver.hpp',['../video_saver_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_19.html b/web/html/doc/search/files_19.html new file mode 100644 index 000000000..9dec161d4 --- /dev/null +++ b/web/html/doc/search/files_19.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_19.js b/web/html/doc/search/files_19.js new file mode 100644 index 000000000..353949382 --- /dev/null +++ b/web/html/doc/search/files_19.js @@ -0,0 +1,59 @@ +var searchData= +[ + ['wbvhsaver_2ehpp_1569',['wBvhSaver.hpp',['../w_bvh_saver_8hpp.html',1,'']]], + ['wcocojsonsaver_2ehpp_1570',['wCocoJsonSaver.hpp',['../w_coco_json_saver_8hpp.html',1,'']]], + ['wcvmattoopinput_2ehpp_1571',['wCvMatToOpInput.hpp',['../w_cv_mat_to_op_input_8hpp.html',1,'']]], + ['wcvmattoopoutput_2ehpp_1572',['wCvMatToOpOutput.hpp',['../w_cv_mat_to_op_output_8hpp.html',1,'']]], + ['wdatumproducer_2ehpp_1573',['wDatumProducer.hpp',['../w_datum_producer_8hpp.html',1,'']]], + ['webcamreader_2ehpp_1574',['webcamReader.hpp',['../webcam_reader_8hpp.html',1,'']]], + ['wfacedetector_2ehpp_1575',['wFaceDetector.hpp',['../w_face_detector_8hpp.html',1,'']]], + ['wfacedetectoropencv_2ehpp_1576',['wFaceDetectorOpenCV.hpp',['../w_face_detector_open_c_v_8hpp.html',1,'']]], + ['wfaceextractornet_2ehpp_1577',['wFaceExtractorNet.hpp',['../w_face_extractor_net_8hpp.html',1,'']]], + ['wfacerenderer_2ehpp_1578',['wFaceRenderer.hpp',['../w_face_renderer_8hpp.html',1,'']]], + ['wfacesaver_2ehpp_1579',['wFaceSaver.hpp',['../w_face_saver_8hpp.html',1,'']]], + ['wfpsmax_2ehpp_1580',['wFpsMax.hpp',['../w_fps_max_8hpp.html',1,'']]], + ['wgui_2ehpp_1581',['wGui.hpp',['../w_gui_8hpp.html',1,'']]], + ['wgui3d_2ehpp_1582',['wGui3D.hpp',['../w_gui3_d_8hpp.html',1,'']]], + ['wguiadam_2ehpp_1583',['wGuiAdam.hpp',['../w_gui_adam_8hpp.html',1,'']]], + ['wguiinfoadder_2ehpp_1584',['wGuiInfoAdder.hpp',['../w_gui_info_adder_8hpp.html',1,'']]], + ['whanddetector_2ehpp_1585',['wHandDetector.hpp',['../w_hand_detector_8hpp.html',1,'']]], + ['whanddetectorfromtxt_2ehpp_1586',['wHandDetectorFromTxt.hpp',['../w_hand_detector_from_txt_8hpp.html',1,'']]], + ['whanddetectortracking_2ehpp_1587',['wHandDetectorTracking.hpp',['../w_hand_detector_tracking_8hpp.html',1,'']]], + ['whanddetectorupdate_2ehpp_1588',['wHandDetectorUpdate.hpp',['../w_hand_detector_update_8hpp.html',1,'']]], + ['whandextractornet_2ehpp_1589',['wHandExtractorNet.hpp',['../w_hand_extractor_net_8hpp.html',1,'']]], + ['whandrenderer_2ehpp_1590',['wHandRenderer.hpp',['../w_hand_renderer_8hpp.html',1,'']]], + ['whandsaver_2ehpp_1591',['wHandSaver.hpp',['../w_hand_saver_8hpp.html',1,'']]], + ['wheatmapsaver_2ehpp_1592',['wHeatMapSaver.hpp',['../w_heat_map_saver_8hpp.html',1,'']]], + ['widgenerator_2ehpp_1593',['wIdGenerator.hpp',['../w_id_generator_8hpp.html',1,'']]], + ['wimagesaver_2ehpp_1594',['wImageSaver.hpp',['../w_image_saver_8hpp.html',1,'']]], + ['wjointangleestimation_2ehpp_1595',['wJointAngleEstimation.hpp',['../w_joint_angle_estimation_8hpp.html',1,'']]], + ['wkeeptopnpeople_2ehpp_1596',['wKeepTopNPeople.hpp',['../w_keep_top_n_people_8hpp.html',1,'']]], + ['wkeypointscaler_2ehpp_1597',['wKeypointScaler.hpp',['../w_keypoint_scaler_8hpp.html',1,'']]], + ['wopoutputtocvmat_2ehpp_1598',['wOpOutputToCvMat.hpp',['../w_op_output_to_cv_mat_8hpp.html',1,'']]], + ['worker_2ehpp_1599',['worker.hpp',['../worker_8hpp.html',1,'']]], + ['workerconsumer_2ehpp_1600',['workerConsumer.hpp',['../worker_consumer_8hpp.html',1,'']]], + ['workerproducer_2ehpp_1601',['workerProducer.hpp',['../worker_producer_8hpp.html',1,'']]], + ['wpeoplejsonsaver_2ehpp_1602',['wPeopleJsonSaver.hpp',['../w_people_json_saver_8hpp.html',1,'']]], + ['wpersonidextractor_2ehpp_1603',['wPersonIdExtractor.hpp',['../w_person_id_extractor_8hpp.html',1,'']]], + ['wposeextractor_2ehpp_1604',['wPoseExtractor.hpp',['../w_pose_extractor_8hpp.html',1,'']]], + ['wposeextractornet_2ehpp_1605',['wPoseExtractorNet.hpp',['../w_pose_extractor_net_8hpp.html',1,'']]], + ['wposerenderer_2ehpp_1606',['wPoseRenderer.hpp',['../w_pose_renderer_8hpp.html',1,'']]], + ['wposesaver_2ehpp_1607',['wPoseSaver.hpp',['../w_pose_saver_8hpp.html',1,'']]], + ['wposetriangulation_2ehpp_1608',['wPoseTriangulation.hpp',['../w_pose_triangulation_8hpp.html',1,'']]], + ['wqueueassembler_2ehpp_1609',['wQueueAssembler.hpp',['../w_queue_assembler_8hpp.html',1,'']]], + ['wqueueorderer_2ehpp_1610',['wQueueOrderer.hpp',['../w_queue_orderer_8hpp.html',1,'']]], + ['wrapper_2ehpp_1611',['wrapper.hpp',['../wrapper_8hpp.html',1,'']]], + ['wrapperauxiliary_2ehpp_1612',['wrapperAuxiliary.hpp',['../wrapper_auxiliary_8hpp.html',1,'']]], + ['wrapperstructextra_2ehpp_1613',['wrapperStructExtra.hpp',['../wrapper_struct_extra_8hpp.html',1,'']]], + ['wrapperstructface_2ehpp_1614',['wrapperStructFace.hpp',['../wrapper_struct_face_8hpp.html',1,'']]], + ['wrapperstructgui_2ehpp_1615',['wrapperStructGui.hpp',['../wrapper_struct_gui_8hpp.html',1,'']]], + ['wrapperstructhand_2ehpp_1616',['wrapperStructHand.hpp',['../wrapper_struct_hand_8hpp.html',1,'']]], + ['wrapperstructinput_2ehpp_1617',['wrapperStructInput.hpp',['../wrapper_struct_input_8hpp.html',1,'']]], + ['wrapperstructoutput_2ehpp_1618',['wrapperStructOutput.hpp',['../wrapper_struct_output_8hpp.html',1,'']]], + ['wrapperstructpose_2ehpp_1619',['wrapperStructPose.hpp',['../wrapper_struct_pose_8hpp.html',1,'']]], + ['wscaleandsizeextractor_2ehpp_1620',['wScaleAndSizeExtractor.hpp',['../w_scale_and_size_extractor_8hpp.html',1,'']]], + ['wudpsender_2ehpp_1621',['wUdpSender.hpp',['../w_udp_sender_8hpp.html',1,'']]], + ['wverboseprinter_2ehpp_1622',['wVerbosePrinter.hpp',['../w_verbose_printer_8hpp.html',1,'']]], + ['wvideosaver_2ehpp_1623',['wVideoSaver.hpp',['../w_video_saver_8hpp.html',1,'']]], + ['wvideosaver3d_2ehpp_1624',['wVideoSaver3D.hpp',['../w_video_saver3_d_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_2.html b/web/html/doc/search/files_2.html new file mode 100644 index 000000000..497cdf5c7 --- /dev/null +++ b/web/html/doc/search/files_2.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_2.js b/web/html/doc/search/files_2.js new file mode 100644 index 000000000..04c8f052e --- /dev/null +++ b/web/html/doc/search/files_2.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['2_5fadditional_5fsettings_2emd_1443',['2_additional_settings.md',['../2__additional__settings_8md.html',1,'']]], + ['2_5flibrary_5fextend_5ffunctionality_2emd_1444',['2_library_extend_functionality.md',['../2__library__extend__functionality_8md.html',1,'']]] +]; diff --git a/web/html/doc/search/files_3.html b/web/html/doc/search/files_3.html new file mode 100644 index 000000000..1ba106b2d --- /dev/null +++ b/web/html/doc/search/files_3.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_3.js b/web/html/doc/search/files_3.js new file mode 100644 index 000000000..67aa9db12 --- /dev/null +++ b/web/html/doc/search/files_3.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['3_5flibrary_5fadd_5fnew_5fmodule_2emd_1445',['3_library_add_new_module.md',['../3__library__add__new__module_8md.html',1,'']]], + ['3d_5freconstruction_5fmodule_2emd_1446',['3d_reconstruction_module.md',['../3d__reconstruction__module_8md.html',1,'']]] +]; diff --git a/web/html/doc/search/files_4.html b/web/html/doc/search/files_4.html new file mode 100644 index 000000000..753b7b109 --- /dev/null +++ b/web/html/doc/search/files_4.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_4.js b/web/html/doc/search/files_4.js new file mode 100644 index 000000000..c5e48e79a --- /dev/null +++ b/web/html/doc/search/files_4.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['array_2ehpp_1447',['array.hpp',['../array_8hpp.html',1,'']]], + ['arraycpugpu_2ehpp_1448',['arrayCpuGpu.hpp',['../array_cpu_gpu_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_5.html b/web/html/doc/search/files_5.html new file mode 100644 index 000000000..7b6affd7f --- /dev/null +++ b/web/html/doc/search/files_5.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_5.js b/web/html/doc/search/files_5.js new file mode 100644 index 000000000..1ee90ea64 --- /dev/null +++ b/web/html/doc/search/files_5.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['bodypartconnectorbase_2ehpp_1449',['bodyPartConnectorBase.hpp',['../body_part_connector_base_8hpp.html',1,'']]], + ['bodypartconnectorcaffe_2ehpp_1450',['bodyPartConnectorCaffe.hpp',['../body_part_connector_caffe_8hpp.html',1,'']]], + ['bvhsaver_2ehpp_1451',['bvhSaver.hpp',['../bvh_saver_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_6.html b/web/html/doc/search/files_6.html new file mode 100644 index 000000000..802ebf715 --- /dev/null +++ b/web/html/doc/search/files_6.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_6.js b/web/html/doc/search/files_6.js new file mode 100644 index 000000000..bd4e4d1de --- /dev/null +++ b/web/html/doc/search/files_6.js @@ -0,0 +1,12 @@ +var searchData= +[ + ['calibration_5fmodule_2emd_1452',['calibration_module.md',['../calibration__module_8md.html',1,'']]], + ['cameraparameterestimation_2ehpp_1453',['cameraParameterEstimation.hpp',['../camera_parameter_estimation_8hpp.html',1,'']]], + ['cameraparameterreader_2ehpp_1454',['cameraParameterReader.hpp',['../camera_parameter_reader_8hpp.html',1,'']]], + ['check_2ehpp_1455',['check.hpp',['../check_8hpp.html',1,'']]], + ['cocojsonsaver_2ehpp_1456',['cocoJsonSaver.hpp',['../coco_json_saver_8hpp.html',1,'']]], + ['common_2ehpp_1457',['common.hpp',['../common_8hpp.html',1,'']]], + ['cuda_2ehpp_1458',['cuda.hpp',['../cuda_8hpp.html',1,'']]], + ['cvmattoopinput_2ehpp_1459',['cvMatToOpInput.hpp',['../cv_mat_to_op_input_8hpp.html',1,'']]], + ['cvmattoopoutput_2ehpp_1460',['cvMatToOpOutput.hpp',['../cv_mat_to_op_output_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_7.html b/web/html/doc/search/files_7.html new file mode 100644 index 000000000..365e6484f --- /dev/null +++ b/web/html/doc/search/files_7.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_7.js b/web/html/doc/search/files_7.js new file mode 100644 index 000000000..979d27912 --- /dev/null +++ b/web/html/doc/search/files_7.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['datum_2ehpp_1461',['datum.hpp',['../datum_8hpp.html',1,'']]], + ['datumproducer_2ehpp_1462',['datumProducer.hpp',['../datum_producer_8hpp.html',1,'']]], + ['demo_5fadvanced_2emd_1463',['demo_advanced.md',['../demo__advanced_8md.html',1,'']]], + ['deployment_2emd_1464',['deployment.md',['../deployment_8md.html',1,'']]] +]; diff --git a/web/html/doc/search/files_8.html b/web/html/doc/search/files_8.html new file mode 100644 index 000000000..3df0f2fae --- /dev/null +++ b/web/html/doc/search/files_8.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_8.js b/web/html/doc/search/files_8.js new file mode 100644 index 000000000..037146823 --- /dev/null +++ b/web/html/doc/search/files_8.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['enumclasses_2ehpp_1465',['enumClasses.hpp',['../core_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../filestream_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../gpu_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../gui_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../pose_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../producer_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../thread_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../utilities_2enum_classes_8hpp.html',1,'(Global Namespace)'],['../wrapper_2enum_classes_8hpp.html',1,'(Global Namespace)']]], + ['errorandlog_2ehpp_1466',['errorAndLog.hpp',['../error_and_log_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_9.html b/web/html/doc/search/files_9.html new file mode 100644 index 000000000..52f8b6c07 --- /dev/null +++ b/web/html/doc/search/files_9.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_9.js b/web/html/doc/search/files_9.js new file mode 100644 index 000000000..dbe2b7572 --- /dev/null +++ b/web/html/doc/search/files_9.js @@ -0,0 +1,19 @@ +var searchData= +[ + ['facecpurenderer_2ehpp_1467',['faceCpuRenderer.hpp',['../face_cpu_renderer_8hpp.html',1,'']]], + ['facedetector_2ehpp_1468',['faceDetector.hpp',['../face_detector_8hpp.html',1,'']]], + ['facedetectoropencv_2ehpp_1469',['faceDetectorOpenCV.hpp',['../face_detector_open_c_v_8hpp.html',1,'']]], + ['faceextractorcaffe_2ehpp_1470',['faceExtractorCaffe.hpp',['../face_extractor_caffe_8hpp.html',1,'']]], + ['faceextractornet_2ehpp_1471',['faceExtractorNet.hpp',['../face_extractor_net_8hpp.html',1,'']]], + ['facegpurenderer_2ehpp_1472',['faceGpuRenderer.hpp',['../face_gpu_renderer_8hpp.html',1,'']]], + ['faceparameters_2ehpp_1473',['faceParameters.hpp',['../face_parameters_8hpp.html',1,'']]], + ['facerenderer_2ehpp_1474',['faceRenderer.hpp',['../face_renderer_8hpp.html',1,'']]], + ['fastmath_2ehpp_1475',['fastMath.hpp',['../fast_math_8hpp.html',1,'']]], + ['filesaver_2ehpp_1476',['fileSaver.hpp',['../file_saver_8hpp.html',1,'']]], + ['filestream_2ehpp_1477',['fileStream.hpp',['../file_stream_8hpp.html',1,'']]], + ['filesystem_2ehpp_1478',['fileSystem.hpp',['../file_system_8hpp.html',1,'']]], + ['flags_2ehpp_1479',['flags.hpp',['../flags_8hpp.html',1,'']]], + ['flagstoopenpose_2ehpp_1480',['flagsToOpenPose.hpp',['../flags_to_open_pose_8hpp.html',1,'']]], + ['flirreader_2ehpp_1481',['flirReader.hpp',['../flir_reader_8hpp.html',1,'']]], + ['framedisplayer_2ehpp_1482',['frameDisplayer.hpp',['../frame_displayer_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_a.html b/web/html/doc/search/files_a.html new file mode 100644 index 000000000..11d4c117b --- /dev/null +++ b/web/html/doc/search/files_a.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_a.js b/web/html/doc/search/files_a.js new file mode 100644 index 000000000..d304003df --- /dev/null +++ b/web/html/doc/search/files_a.js @@ -0,0 +1,9 @@ +var searchData= +[ + ['gpu_2ehpp_1483',['gpu.hpp',['../gpu_8hpp.html',1,'']]], + ['gpurenderer_2ehpp_1484',['gpuRenderer.hpp',['../gpu_renderer_8hpp.html',1,'']]], + ['gui_2ehpp_1485',['gui.hpp',['../gui_8hpp.html',1,'']]], + ['gui3d_2ehpp_1486',['gui3D.hpp',['../gui3_d_8hpp.html',1,'']]], + ['guiadam_2ehpp_1487',['guiAdam.hpp',['../gui_adam_8hpp.html',1,'']]], + ['guiinfoadder_2ehpp_1488',['guiInfoAdder.hpp',['../gui_info_adder_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_b.html b/web/html/doc/search/files_b.html new file mode 100644 index 000000000..9fc83436a --- /dev/null +++ b/web/html/doc/search/files_b.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_b.js b/web/html/doc/search/files_b.js new file mode 100644 index 000000000..cfb26fb66 --- /dev/null +++ b/web/html/doc/search/files_b.js @@ -0,0 +1,14 @@ +var searchData= +[ + ['handcpurenderer_2ehpp_1489',['handCpuRenderer.hpp',['../hand_cpu_renderer_8hpp.html',1,'']]], + ['handdetector_2ehpp_1490',['handDetector.hpp',['../hand_detector_8hpp.html',1,'']]], + ['handdetectorfromtxt_2ehpp_1491',['handDetectorFromTxt.hpp',['../hand_detector_from_txt_8hpp.html',1,'']]], + ['handextractorcaffe_2ehpp_1492',['handExtractorCaffe.hpp',['../hand_extractor_caffe_8hpp.html',1,'']]], + ['handextractornet_2ehpp_1493',['handExtractorNet.hpp',['../hand_extractor_net_8hpp.html',1,'']]], + ['handgpurenderer_2ehpp_1494',['handGpuRenderer.hpp',['../hand_gpu_renderer_8hpp.html',1,'']]], + ['handparameters_2ehpp_1495',['handParameters.hpp',['../hand_parameters_8hpp.html',1,'']]], + ['handrenderer_2ehpp_1496',['handRenderer.hpp',['../hand_renderer_8hpp.html',1,'']]], + ['headers_2ehpp_1497',['headers.hpp',['../thread_2headers_8hpp.html',1,'(Global Namespace)'],['../wrapper_2headers_8hpp.html',1,'(Global Namespace)'],['../utilities_2headers_8hpp.html',1,'(Global Namespace)'],['../unity_2headers_8hpp.html',1,'(Global Namespace)'],['../tracking_2headers_8hpp.html',1,'(Global Namespace)'],['../producer_2headers_8hpp.html',1,'(Global Namespace)'],['../pose_2headers_8hpp.html',1,'(Global Namespace)'],['../net_2headers_8hpp.html',1,'(Global Namespace)'],['../headers_8hpp.html',1,'(Global Namespace)'],['../hand_2headers_8hpp.html',1,'(Global Namespace)'],['../gui_2headers_8hpp.html',1,'(Global Namespace)'],['../gpu_2headers_8hpp.html',1,'(Global Namespace)'],['../filestream_2headers_8hpp.html',1,'(Global Namespace)'],['../face_2headers_8hpp.html',1,'(Global Namespace)'],['../core_2headers_8hpp.html',1,'(Global Namespace)'],['../calibration_2headers_8hpp.html',1,'(Global Namespace)'],['../3d_2headers_8hpp.html',1,'(Global Namespace)']]], + ['heatmap_5foutput_2emd_1498',['heatmap_output.md',['../heatmap__output_8md.html',1,'']]], + ['heatmapsaver_2ehpp_1499',['heatMapSaver.hpp',['../heat_map_saver_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_c.html b/web/html/doc/search/files_c.html new file mode 100644 index 000000000..c266b4c25 --- /dev/null +++ b/web/html/doc/search/files_c.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_c.js b/web/html/doc/search/files_c.js new file mode 100644 index 000000000..20e011a07 --- /dev/null +++ b/web/html/doc/search/files_c.js @@ -0,0 +1,10 @@ +var searchData= +[ + ['imagedirectoryreader_2ehpp_1500',['imageDirectoryReader.hpp',['../image_directory_reader_8hpp.html',1,'']]], + ['imagesaver_2ehpp_1501',['imageSaver.hpp',['../image_saver_8hpp.html',1,'']]], + ['installation_5fdeprecated_2emd_1502',['installation_deprecated.md',['../installation__deprecated_8md.html',1,'']]], + ['installation_5fjetson_5ftx1_2emd_1503',['installation_jetson_tx1.md',['../installation__jetson__tx1_8md.html',1,'']]], + ['installation_5fjetson_5ftx2_5fjetpack3_2e1_2emd_1504',['installation_jetson_tx2_jetpack3.1.md',['../installation__jetson__tx2__jetpack3_81_8md.html',1,'']]], + ['installation_5fjetson_5ftx2_5fjetpack3_2e3_2emd_1505',['installation_jetson_tx2_jetpack3.3.md',['../installation__jetson__tx2__jetpack3_83_8md.html',1,'']]], + ['ipcamerareader_2ehpp_1506',['ipCameraReader.hpp',['../ip_camera_reader_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_d.html b/web/html/doc/search/files_d.html new file mode 100644 index 000000000..d2ca3c1ca --- /dev/null +++ b/web/html/doc/search/files_d.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_d.js b/web/html/doc/search/files_d.js new file mode 100644 index 000000000..ddabb4b58 --- /dev/null +++ b/web/html/doc/search/files_d.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['jointangleestimation_2ehpp_1507',['jointAngleEstimation.hpp',['../joint_angle_estimation_8hpp.html',1,'']]], + ['jsonofstream_2ehpp_1508',['jsonOfstream.hpp',['../json_ofstream_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_e.html b/web/html/doc/search/files_e.html new file mode 100644 index 000000000..9df411672 --- /dev/null +++ b/web/html/doc/search/files_e.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_e.js b/web/html/doc/search/files_e.js new file mode 100644 index 000000000..b95e0ff54 --- /dev/null +++ b/web/html/doc/search/files_e.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['keeptopnpeople_2ehpp_1509',['keepTopNPeople.hpp',['../keep_top_n_people_8hpp.html',1,'']]], + ['keypoint_2ehpp_1510',['keypoint.hpp',['../keypoint_8hpp.html',1,'']]], + ['keypointsaver_2ehpp_1511',['keypointSaver.hpp',['../keypoint_saver_8hpp.html',1,'']]], + ['keypointscaler_2ehpp_1512',['keypointScaler.hpp',['../keypoint_scaler_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/files_f.html b/web/html/doc/search/files_f.html new file mode 100644 index 000000000..f75258bb5 --- /dev/null +++ b/web/html/doc/search/files_f.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/files_f.js b/web/html/doc/search/files_f.js new file mode 100644 index 000000000..f776c2a82 --- /dev/null +++ b/web/html/doc/search/files_f.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['macros_2ehpp_1513',['macros.hpp',['../macros_8hpp.html',1,'']]], + ['matrix_2ehpp_1514',['matrix.hpp',['../matrix_8hpp.html',1,'']]], + ['maximumbase_2ehpp_1515',['maximumBase.hpp',['../maximum_base_8hpp.html',1,'']]], + ['maximumcaffe_2ehpp_1516',['maximumCaffe.hpp',['../maximum_caffe_8hpp.html',1,'']]] +]; diff --git a/web/html/doc/search/functions_0.html b/web/html/doc/search/functions_0.html new file mode 100644 index 000000000..eb4c5014c --- /dev/null +++ b/web/html/doc/search/functions_0.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_0.js b/web/html/doc/search/functions_0.js new file mode 100644 index 000000000..e8552afdf --- /dev/null +++ b/web/html/doc/search/functions_0.js @@ -0,0 +1,17 @@ +var searchData= +[ + ['add_1625',['add',['../classop_1_1_thread.html#a820b9416b96c69cb1fc6773b9a53a47a',1,'op::Thread::add()'],['../classop_1_1_thread_manager.html#a8134abeaec65b5647ae92e34f3ad420b',1,'op::ThreadManager::add(const unsigned long long threadId, const std::vector< TWorker > &tWorkers, const unsigned long long queueInId, const unsigned long long queueOutId)'],['../classop_1_1_thread_manager.html#a762acc9eb60bd10857da1f416e169f3d',1,'op::ThreadManager::add(const unsigned long long threadId, const TWorker &tWorker, const unsigned long long queueInId, const unsigned long long queueOutId)'],['../classop_1_1_thread.html#a0617df4103c25bb04ee2c75f05ea2978',1,'op::Thread::add()']]], + ['addbkgchannel_1626',['addBkgChannel',['../namespaceop.html#a13b86d097fd5f36612e9858e9348ea57',1,'op']]], + ['addinfo_1627',['addInfo',['../classop_1_1_gui_info_adder.html#a6f2f2d449d48ca7e21729d03796a540c',1,'op::GuiInfoAdder']]], + ['addpopper_1628',['addPopper',['../classop_1_1_queue_base.html#adc5df8a039d360831db06e3c610bf015',1,'op::QueueBase']]], + ['addpusher_1629',['addPusher',['../classop_1_1_queue_base.html#a1ccdec39ea65a83edc54661acc283134',1,'op::QueueBase']]], + ['area_1630',['area',['../structop_1_1_point.html#ac85e32b9381abc2af106fe96dba81b08',1,'op::Point::area()'],['../structop_1_1_rectangle.html#a5b319240c995c81bfa1d73a2461d49fd',1,'op::Rectangle::area()']]], + ['array_1631',['Array',['../classop_1_1_array.html#afb4e1f55747898d29aa13606ded9991f',1,'op::Array::Array(const Array< T2 > &array)'],['../classop_1_1_array.html#a7a7d854d63815e10e158fe889d17a88e',1,'op::Array::Array(Array< T > &&array)'],['../classop_1_1_array.html#a5a68cca98a3ebaf565f1e546eebd9f01',1,'op::Array::Array(const Array< T > &array)'],['../classop_1_1_array.html#a416e95541761c557c50b79b5e1b33389',1,'op::Array::Array(const Array< T > &array, const int index, const bool noCopy=false)'],['../classop_1_1_array.html#a9cd386050e94c29b3c4ee40cafcacc46',1,'op::Array::Array(const int size, T *const dataPtr)'],['../classop_1_1_array.html#a959ede0df7e535d2d3ac40d098541c27',1,'op::Array::Array(const std::vector< int > &sizes, const T value)'],['../classop_1_1_array.html#ac833fdcb245fcc3135ce65227bb9e4b2',1,'op::Array::Array(const int size, const T value)'],['../classop_1_1_array.html#a48c1ba1f7017b5aa8e0451079dd3a6d3',1,'op::Array::Array(const std::vector< int > &sizes={})'],['../classop_1_1_array.html#a793b9851c7490bc98d4dd52020c0cd3c',1,'op::Array::Array(const int size)'],['../classop_1_1_array.html#a90895562def04a81db0b3e7eaa3722c7',1,'op::Array::Array(const std::vector< int > &sizes, T *const dataPtr)']]], + ['arrayclose_1632',['arrayClose',['../classop_1_1_json_ofstream.html#a3f940d3ad51d0acb7126d62a5617fd69',1,'op::JsonOfstream']]], + ['arraycpugpu_1633',['ArrayCpuGpu',['../classop_1_1_array_cpu_gpu.html#a0234d36fab318cc2c672134fd4009301',1,'op::ArrayCpuGpu::ArrayCpuGpu(const int num, const int channels, const int height, const int width)'],['../classop_1_1_array_cpu_gpu.html#ad5d631890ff61a2e70695b797f1a6f34',1,'op::ArrayCpuGpu::ArrayCpuGpu(const Array< T > &array, const bool copyFromGpu)'],['../classop_1_1_array_cpu_gpu.html#a4fb245f1557f61192ab54c24f4baf487',1,'op::ArrayCpuGpu::ArrayCpuGpu(const void *caffeBlobTPtr)'],['../classop_1_1_array_cpu_gpu.html#aee39459d54376c7ec98155b4add7f961',1,'op::ArrayCpuGpu::ArrayCpuGpu()']]], + ['arrayopen_1634',['arrayOpen',['../classop_1_1_json_ofstream.html#a4cc6d50256354b3dc7385e2db01aabc0',1,'op::JsonOfstream']]], + ['asum_5fdata_1635',['asum_data',['../classop_1_1_array_cpu_gpu.html#a7cf928cf41b4477c59c91a0f45c0443c',1,'op::ArrayCpuGpu']]], + ['asum_5fdiff_1636',['asum_diff',['../classop_1_1_array_cpu_gpu.html#a675012bf11b17cb7b7c41b5dc1beccf6',1,'op::ArrayCpuGpu']]], + ['at_1637',['at',['../classop_1_1_array.html#a6e0afd5f447efbfc29efbeac62716eff',1,'op::Array::at(const int index)'],['../classop_1_1_array.html#a4a24dfa0d0f1f3769bf3bfcea47e2220',1,'op::Array::at(const int index) const'],['../classop_1_1_array.html#a8eebb6c34642cdf19ac74c7ed38d128b',1,'op::Array::at(const std::vector< int > &indexes)'],['../classop_1_1_array.html#ae74dec8220582072f85fb3ec430238ce',1,'op::Array::at(const std::vector< int > &indexes) const']]], + ['averagekeypoints_1638',['averageKeypoints',['../namespaceop.html#a1f931e210eb575a084b8e6f462b0b382',1,'op']]] +]; diff --git a/web/html/doc/search/functions_1.html b/web/html/doc/search/functions_1.html new file mode 100644 index 000000000..ef4088b89 --- /dev/null +++ b/web/html/doc/search/functions_1.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_1.js b/web/html/doc/search/functions_1.js new file mode 100644 index 000000000..7100d8bf9 --- /dev/null +++ b/web/html/doc/search/functions_1.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['backward_5fcpu_1639',['Backward_cpu',['../classop_1_1_body_part_connector_caffe.html#a8ad522722607c9ff6299337019f04175',1,'op::BodyPartConnectorCaffe::Backward_cpu()'],['../classop_1_1_maximum_caffe.html#ad21700230d1f1ac1139e8ce76574232c',1,'op::MaximumCaffe::Backward_cpu()'],['../classop_1_1_nms_caffe.html#abbaee841e5cb64f97c94da67ef4349c9',1,'op::NmsCaffe::Backward_cpu()'],['../classop_1_1_resize_and_merge_caffe.html#a4836b2f08273896f58c2d63a15c871e8',1,'op::ResizeAndMergeCaffe::Backward_cpu()']]], + ['backward_5fgpu_1640',['Backward_gpu',['../classop_1_1_body_part_connector_caffe.html#ace9cba081581a4c58fcfbef73b6dd11b',1,'op::BodyPartConnectorCaffe::Backward_gpu()'],['../classop_1_1_maximum_caffe.html#a91989f6e0a2c1349c33815a8cd659e52',1,'op::MaximumCaffe::Backward_gpu()'],['../classop_1_1_nms_caffe.html#a3d1d4cee2b93d5bc0d88c25019b17715',1,'op::NmsCaffe::Backward_gpu()'],['../classop_1_1_resize_and_merge_caffe.html#acfa7742f943fd741acf0bf383c572655',1,'op::ResizeAndMergeCaffe::Backward_gpu()']]], + ['bodypartconnectorcaffe_1641',['BodyPartConnectorCaffe',['../classop_1_1_body_part_connector_caffe.html#a94e2364fa13ea79b2d6fd72c5db34765',1,'op::BodyPartConnectorCaffe']]], + ['bottomright_1642',['bottomRight',['../structop_1_1_rectangle.html#ab4473fb43ab826ffb10c2be18cb96f24',1,'op::Rectangle']]] +]; diff --git a/web/html/doc/search/functions_10.html b/web/html/doc/search/functions_10.html new file mode 100644 index 000000000..1bdc12572 --- /dev/null +++ b/web/html/doc/search/functions_10.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_10.js b/web/html/doc/search/functions_10.js new file mode 100644 index 000000000..df2c56950 --- /dev/null +++ b/web/html/doc/search/functions_10.js @@ -0,0 +1,5 @@ +var searchData= +[ + ['queue_1983',['Queue',['../classop_1_1_queue.html#ae2b845322940bfc89b6342137d8ac372',1,'op::Queue']]], + ['queuebase_1984',['QueueBase',['../classop_1_1_queue_base.html#aea7941746e2403a09356b9c6a208784c',1,'op::QueueBase']]] +]; diff --git a/web/html/doc/search/functions_11.html b/web/html/doc/search/functions_11.html new file mode 100644 index 000000000..188076ef2 --- /dev/null +++ b/web/html/doc/search/functions_11.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_11.js b/web/html/doc/search/functions_11.js new file mode 100644 index 000000000..8e4a90f63 --- /dev/null +++ b/web/html/doc/search/functions_11.js @@ -0,0 +1,46 @@ +var searchData= +[ + ['readcvmat_1985',['readCvMat',['../classop_1_1_gui3_d.html#a04abf8036928d58daf9417c5b5a41693',1,'op::Gui3D']]], + ['readparameters_1986',['readParameters',['../classop_1_1_camera_parameter_reader.html#a906fd316f09d901280a5fe10a4a54541',1,'op::CameraParameterReader::readParameters(const std::string &cameraParameterPath, const std::vector< std::string > &serialNumbers={})'],['../classop_1_1_camera_parameter_reader.html#a2be8ff6d89e5f623f476c75afe3c5c3b',1,'op::CameraParameterReader::readParameters(const std::string &cameraParameterPath, const std::string &serialNumber)']]], + ['recenter_1987',['recenter',['../namespaceop.html#a7cd131c9ddd8f3987508e89e0881b9e0',1,'op::recenter()'],['../structop_1_1_rectangle.html#a1c9a572db2c17fb02a7d19e965c1d3dc',1,'op::Rectangle::recenter()']]], + ['reconstructarray_1988',['reconstructArray',['../classop_1_1_pose_triangulation.html#adc3cf7eb81cb9e7d7f72fda0602ed89b',1,'op::PoseTriangulation::reconstructArray(const std::vector< std::vector< Array< float >>> &keypointsVector, const std::vector< Matrix > &cameraMatrices, const std::vector< Point< int >> &imageSizes) const'],['../classop_1_1_pose_triangulation.html#a519abdb2477c518a565803a5ef5bdc1e',1,'op::PoseTriangulation::reconstructArray(const std::vector< Array< float >> &keypointsVector, const std::vector< Matrix > &cameraMatrices, const std::vector< Point< int >> &imageSizes) const']]], + ['record_1989',['record',['../classop_1_1_coco_json_saver.html#a2c3777cb55d09ee3911d4ed69334b17f',1,'op::CocoJsonSaver']]], + ['rectangle_1990',['Rectangle',['../structop_1_1_rectangle.html#a0112ddaa9782f3ccbb76a319b05f030b',1,'op::Rectangle::Rectangle(const T x=0, const T y=0, const T width=0, const T height=0)'],['../structop_1_1_rectangle.html#afbb0da8956e35178d3f28d2b1d998175',1,'op::Rectangle::Rectangle(const Rectangle< T > &rectangle)'],['../structop_1_1_rectangle.html#a5a9a60fdfd9c88ab8ded6275d64333ea',1,'op::Rectangle::Rectangle(Rectangle< T > &&rectangle)']]], + ['refineandsaveextrinsics_1991',['refineAndSaveExtrinsics',['../namespaceop.html#a50526c188f2ba94b07e0945c0871fd2c',1,'op']]], + ['release_1992',['release',['../classop_1_1_spinnaker_wrapper.html#a6e66639ee75708486b3d9aa4598607c0',1,'op::SpinnakerWrapper::release()'],['../classop_1_1_video_capture_reader.html#a2f73e10efe7f9b24a6cb75af2167de58',1,'op::VideoCaptureReader::release()'],['../classop_1_1_producer.html#a7753ffb0daa486ab0f82873b3567f95e',1,'op::Producer::release()'],['../classop_1_1_image_directory_reader.html#af74e192f8cba5c10f8e252674a85185a',1,'op::ImageDirectoryReader::release()'],['../classop_1_1_flir_reader.html#ab28f40422c9edff8594d855bbef91f58',1,'op::FlirReader::release()']]], + ['remove0sfromstring_1993',['remove0sFromString',['../namespaceop.html#a2f610ba8a71cf16628df2f4d270b7d34',1,'op']]], + ['removeallocurrencesofsubstring_1994',['removeAllOcurrencesOfSubString',['../namespaceop.html#a82471a2af285bada830bac3c95a8440b',1,'op']]], + ['removepeoplebelowthresholdsandfillfaces_1995',['removePeopleBelowThresholdsAndFillFaces',['../namespaceop.html#ae01dd412590493f5f732594e8332d3f0',1,'op']]], + ['removespecialscharacters_1996',['removeSpecialsCharacters',['../namespaceop.html#a8664658afa7be03e173cec9eff2873ad',1,'op']]], + ['renderer_1997',['Renderer',['../classop_1_1_renderer.html#a00caf604fad781dfcf3bc311ef6a6623',1,'op::Renderer']]], + ['renderface_1998',['renderFace',['../classop_1_1_face_renderer.html#acbbdaca16f4115a5a68d006f4f325397',1,'op::FaceRenderer']]], + ['renderfaceinherited_1999',['renderFaceInherited',['../classop_1_1_face_gpu_renderer.html#ae54b7538a6ed6a5eaedcbdc117a0d61c',1,'op::FaceGpuRenderer::renderFaceInherited()'],['../classop_1_1_face_cpu_renderer.html#aa2f7c7f0a5a8df2dbb94c8a01fa41e2a',1,'op::FaceCpuRenderer::renderFaceInherited()']]], + ['renderfacekeypointscpu_2000',['renderFaceKeypointsCpu',['../namespaceop.html#a5fc85e8500dbeda3b75c1b6ecfac91cd',1,'op']]], + ['renderfacekeypointsgpu_2001',['renderFaceKeypointsGpu',['../namespaceop.html#ab8b2748a5bcf823e59b66549e6a24cfe',1,'op']]], + ['renderhand_2002',['renderHand',['../classop_1_1_hand_renderer.html#a2693c02336fb373a42405ccc7cff29bd',1,'op::HandRenderer']]], + ['renderhandinherited_2003',['renderHandInherited',['../classop_1_1_hand_gpu_renderer.html#a8206b59519e8214e06af9994a6038dae',1,'op::HandGpuRenderer::renderHandInherited()'],['../classop_1_1_hand_cpu_renderer.html#ae9e43ff22b0aae81dd88df3a313b0b0f',1,'op::HandCpuRenderer::renderHandInherited()']]], + ['renderhandkeypointscpu_2004',['renderHandKeypointsCpu',['../namespaceop.html#afb5b711819f94b51f32460861d9cea38',1,'op']]], + ['renderhandkeypointsgpu_2005',['renderHandKeypointsGpu',['../namespaceop.html#a865db81a5bc4f81cf9fc7c7f3ce81be3',1,'op']]], + ['renderkeypointscpu_2006',['renderKeypointsCpu',['../namespaceop.html#aa7803aa62abc21471e7d966bd674a81a',1,'op']]], + ['renderpose_2007',['renderPose',['../classop_1_1_pose_renderer.html#ad1e96ceb47bf205b56b50c6b2792f9e3',1,'op::PoseRenderer::renderPose()'],['../classop_1_1_pose_gpu_renderer.html#a4705b3c47cd9ac8174e357999960a28f',1,'op::PoseGpuRenderer::renderPose()'],['../classop_1_1_pose_cpu_renderer.html#a98541b982847713472411402314efd96',1,'op::PoseCpuRenderer::renderPose()']]], + ['renderposedistancegpu_2008',['renderPoseDistanceGpu',['../namespaceop.html#a056c64afca17423e038590e4ef2f712b',1,'op']]], + ['renderposeheatmapgpu_2009',['renderPoseHeatMapGpu',['../namespaceop.html#a3ceb3476e4154a6e9e06b3613a12c040',1,'op']]], + ['renderposeheatmapsgpu_2010',['renderPoseHeatMapsGpu',['../namespaceop.html#aa1225091307f8d0bf07dd032389f8961',1,'op']]], + ['renderposekeypointscpu_2011',['renderPoseKeypointsCpu',['../namespaceop.html#a99a08148f440bd96546076e15f0de04c',1,'op']]], + ['renderposekeypointsgpu_2012',['renderPoseKeypointsGpu',['../namespaceop.html#ad0069d4c6204b35893f4158d04d615f1',1,'op']]], + ['renderposepafgpu_2013',['renderPosePAFGpu',['../namespaceop.html#a9275c58ba881ea94e054117392a67381',1,'op']]], + ['renderposepafsgpu_2014',['renderPosePAFsGpu',['../namespaceop.html#a3ba62b3d5cc275fc1700bf0c5e6bf578',1,'op']]], + ['reorderandnormalize_2015',['reorderAndNormalize',['../namespaceop.html#a8587bab6b02056384b7c424555cd50d8',1,'op']]], + ['replaceall_2016',['replaceAll',['../namespaceop.html#a5fe477200af87dadb07c8d6a75b4414b',1,'op']]], + ['reset_2017',['reset',['../classop_1_1_thread_manager.html#a5b7c5ea46c360496e261094c5e1397a7',1,'op::ThreadManager::reset()'],['../classop_1_1_array.html#a3252c38318d81a8b8fb6f71f4d4c2642',1,'op::Array::reset(const std::vector< int > &sizes, T *const dataPtr)'],['../classop_1_1_array.html#ae0c3d1a662f6c213da16ac87e53120fc',1,'op::Array::reset(const int size, T *const dataPtr)'],['../classop_1_1_array.html#add2eeccd967cdf0900449649cb6f5afb',1,'op::Array::reset(const std::vector< int > &sizes, const T value)'],['../classop_1_1_array.html#ac7183eb2f4e78a6941da3a2079b9ed32',1,'op::Array::reset(const int size, const T value)'],['../classop_1_1_array.html#a0ad0232daa69783cf2c8f7a0ff5b3b0c',1,'op::Array::reset(const std::vector< int > &sizes={})'],['../classop_1_1_array.html#a12e538b09e98bf0900163031602ed2ed',1,'op::Array::reset(const int size)']]], + ['resetwebcam_2018',['resetWebcam',['../classop_1_1_video_capture_reader.html#ab85b68c93854dd7c2ad437477e819506',1,'op::VideoCaptureReader']]], + ['reshape_2019',['Reshape',['../classop_1_1_array_cpu_gpu.html#a1cc1cc3226543f5a2eb4c8ddcb5ec8a5',1,'op::ArrayCpuGpu::Reshape()'],['../classop_1_1_resize_and_merge_caffe.html#abd4c8a363c569fbb4187cd928c481334',1,'op::ResizeAndMergeCaffe::Reshape()'],['../classop_1_1_nms_caffe.html#abe113059484596e82efd8b5f6d346ab5',1,'op::NmsCaffe::Reshape()'],['../classop_1_1_maximum_caffe.html#ab9fb5ce2358801ac4e85fa25f052baa4',1,'op::MaximumCaffe::Reshape()'],['../classop_1_1_body_part_connector_caffe.html#abf26360f5d25fab82705270dae5f5d86',1,'op::BodyPartConnectorCaffe::Reshape()'],['../classop_1_1_array_cpu_gpu.html#a9e3c6d812430d638187441e9d5cacfcc',1,'op::ArrayCpuGpu::Reshape()']]], + ['resizeandmergecaffe_2020',['ResizeAndMergeCaffe',['../classop_1_1_resize_and_merge_caffe.html#a30805a265fa887eff04b1200dbda91a5',1,'op::ResizeAndMergeCaffe']]], + ['resizeandmergecpu_2021',['resizeAndMergeCpu',['../namespaceop.html#adb8ffc1a6a2cc2949d80d8e8ad4e2190',1,'op']]], + ['resizeandmergegpu_2022',['resizeAndMergeGpu',['../namespaceop.html#a8982332c4263696d0e023997f0e4c753',1,'op']]], + ['resizeandmergeocl_2023',['resizeAndMergeOcl',['../namespaceop.html#a97b053019720782f2f81bc1b41f036d6',1,'op']]], + ['resizeandpadrbggpu_2024',['resizeAndPadRbgGpu',['../namespaceop.html#ad5495d8c6a65afbedef3af7a8844bfcc',1,'op::resizeAndPadRbgGpu(T *targetPtr, const T *const srcPtr, const int sourceWidth, const int sourceHeight, const int targetWidth, const int targetHeight, const T scaleFactor)'],['../namespaceop.html#a2f1ef915c8efc724c0bf40f0348f20a2',1,'op::resizeAndPadRbgGpu(T *targetPtr, const unsigned char *const srcPtr, const int sourceWidth, const int sourceHeight, const int targetWidth, const int targetHeight, const T scaleFactor)']]], + ['resizegetscalefactor_2025',['resizeGetScaleFactor',['../namespaceop.html#a24ebdcb8395dea0429f220de6a715d6e',1,'op']]], + ['rotateandflipframe_2026',['rotateAndFlipFrame',['../namespaceop.html#af65d1b7c5b708f30780e4b2bcfccedcb',1,'op']]], + ['rows_2027',['rows',['../classop_1_1_matrix.html#a3099b24c0ee295014d95c99a20615fdd',1,'op::Matrix']]] +]; diff --git a/web/html/doc/search/functions_12.html b/web/html/doc/search/functions_12.html new file mode 100644 index 000000000..eb29d8f9a --- /dev/null +++ b/web/html/doc/search/functions_12.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_12.js b/web/html/doc/search/functions_12.js new file mode 100644 index 000000000..d307c0d5d --- /dev/null +++ b/web/html/doc/search/functions_12.js @@ -0,0 +1,76 @@ +var searchData= +[ + ['save_2028',['save',['../classop_1_1_people_json_saver.html#ac0c0609652f89a3de44bcc940a82e235',1,'op::PeopleJsonSaver']]], + ['savedata_2029',['saveData',['../namespaceop.html#aafac1158605748694e3c3ed4eb34b3b7',1,'op::saveData(const std::vector< Matrix > &opMats, const std::vector< std::string > &cvMatNames, const std::string &fileNameNoExtension, const DataFormat dataFormat)'],['../namespaceop.html#a7b9bcb57dd8488ade8ea288342eaed08',1,'op::saveData(const Matrix &opMat, const std::string cvMatName, const std::string &fileNameNoExtension, const DataFormat dataFormat)']]], + ['savefloatarray_2030',['saveFloatArray',['../namespaceop.html#ac1080e627185a65b88ec788184a95552',1,'op']]], + ['saveheatmaps_2031',['saveHeatMaps',['../classop_1_1_heat_map_saver.html#a48aeaad854bfff14fd2642dc13071137',1,'op::HeatMapSaver']]], + ['saveimage_2032',['saveImage',['../namespaceop.html#a8c9d3469086a12607b097731848b6dea',1,'op']]], + ['saveimages_2033',['saveImages',['../classop_1_1_image_saver.html#a8c1f4ae3799db276753707879e59bee1',1,'op::ImageSaver::saveImages(const std::vector< Matrix > &matOutputDatas, const std::string &fileName) const'],['../classop_1_1_image_saver.html#a0262916d0af8cc3be81b3375e0520e62',1,'op::ImageSaver::saveImages(const Matrix &cvOutputData, const std::string &fileName) const']]], + ['savekeypoints_2034',['saveKeypoints',['../classop_1_1_keypoint_saver.html#aad663949dc5f2262ebdc69ed0d0caa1b',1,'op::KeypointSaver']]], + ['savepeoplejson_2035',['savePeopleJson',['../namespaceop.html#a1e986a510a29bfd8c682f65a8b399551',1,'op::savePeopleJson(const std::vector< std::pair< Array< float >, std::string >> &keypointVector, const std::vector< std::vector< std::array< float, 3 >>> &candidates, const std::string &fileName, const bool humanReadable)'],['../namespaceop.html#af9c189f7c80092570699c8b9d5686fea',1,'op::savePeopleJson(const Array< float > &keypoints, const std::vector< std::vector< std::array< float, 3 >>> &candidates, const std::string &keypointName, const std::string &fileName, const bool humanReadable)']]], + ['scale_2036',['scale',['../classop_1_1_keypoint_scaler.html#a687a64bbca93d54292f191762efe20d7',1,'op::KeypointScaler::scale(std::vector< Array< float >> &arraysToScale, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) const'],['../classop_1_1_keypoint_scaler.html#a9c2d575ce49bb6112b2a099cb92f07cc',1,'op::KeypointScaler::scale(Array< float > &arrayToScale, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) const'],['../classop_1_1_keypoint_scaler.html#a42e46aea4d43fcf0886f06c9700148ea',1,'op::KeypointScaler::scale(std::vector< std::vector< std::array< float, 3 >>> &poseCandidates, const double scaleInputToOutput, const double scaleNetToOutput, const Point< int > &producerSize) const']]], + ['scale_5fdata_2037',['scale_data',['../classop_1_1_array_cpu_gpu.html#a16dc8c19cc0b0442c1be6c859fe7d33c',1,'op::ArrayCpuGpu']]], + ['scale_5fdiff_2038',['scale_diff',['../classop_1_1_array_cpu_gpu.html#a0a307838959472e8e8815d76305c1bf6',1,'op::ArrayCpuGpu']]], + ['scaleandsizeextractor_2039',['ScaleAndSizeExtractor',['../classop_1_1_scale_and_size_extractor.html#a4618beea6f87df0c4eac6c6a204bd269',1,'op::ScaleAndSizeExtractor']]], + ['scalekeypoints_2040',['scaleKeypoints',['../namespaceop.html#ac5fc565b24e499e306ca170b9139eeb6',1,'op']]], + ['scalekeypoints2d_2041',['scaleKeypoints2d',['../namespaceop.html#aae9e38fa6c56e188b4f649732f0d4cd3',1,'op::scaleKeypoints2d(Array< T > &keypoints, const T scaleX, const T scaleY)'],['../namespaceop.html#a6b9adf8f7e698e566414c9f44f0c85f1',1,'op::scaleKeypoints2d(Array< T > &keypoints, const T scaleX, const T scaleY, const T offsetX, const T offsetY)']]], + ['sendjointangles_2042',['sendJointAngles',['../classop_1_1_udp_sender.html#a2e8b52e1fd5a3383ebc9063ce21f6f06',1,'op::UdpSender']]], + ['set_2043',['set',['../classop_1_1_image_directory_reader.html#a1965a4dca2ddb86b24e69e2da90b9dbf',1,'op::ImageDirectoryReader::set()'],['../classop_1_1_pose_extractor_net.html#a7e49f2339e21ff784689ec78c9d69b75',1,'op::PoseExtractorNet::set()'],['../classop_1_1_flir_reader.html#af14f63c79272781429341dc3a0720485',1,'op::FlirReader::set()'],['../classop_1_1_webcam_reader.html#ae66b26829cc2d6e3f02109d4431a7bc2',1,'op::WebcamReader::set()'],['../classop_1_1_video_reader.html#a0dd53334327642368d41ec860e64e756',1,'op::VideoReader::set()'],['../classop_1_1_video_capture_reader.html#ab2929b7d2d002b58ebaf7b9b56999cca',1,'op::VideoCaptureReader::set()'],['../classop_1_1_producer.html#af11f1bbfbd61b9534c02c3e4839e19b0',1,'op::Producer::set(const ProducerProperty property, const double value)'],['../classop_1_1_producer.html#ab30c7b3e34d962e0b17458d9a0947f6b',1,'op::Producer::set(const int capProperty, const double value)=0'],['../classop_1_1_ip_camera_reader.html#ac13cc7da97a31d8e69eaa64b2a7b31ba',1,'op::IpCameraReader::set()']]], + ['set_5fcpu_5fdata_2044',['set_cpu_data',['../classop_1_1_array_cpu_gpu.html#ad6e6a4da3987e9558d89b877f9ec7e82',1,'op::ArrayCpuGpu']]], + ['set_5fgpu_5fdata_2045',['set_gpu_data',['../classop_1_1_array_cpu_gpu.html#a4dd6f5e4d7f54e921c7c296078a594f0',1,'op::ArrayCpuGpu']]], + ['setalphaheatmap_2046',['setAlphaHeatMap',['../classop_1_1_renderer.html#abaea1725725ff775aed0c120b2ba3d1f',1,'op::Renderer']]], + ['setalphakeypoint_2047',['setAlphaKeypoint',['../classop_1_1_renderer.html#a039e88897ed844551cadb115ea98e9ef',1,'op::Renderer']]], + ['setblendoriginalframe_2048',['setBlendOriginalFrame',['../classop_1_1_renderer.html#aa8339054ed113d99ca70208d0cee5aa9',1,'op::Renderer']]], + ['setdefaultmaxsizequeues_2049',['setDefaultMaxSizeQueues',['../classop_1_1_thread_manager.html#ace408d1d281193a9f3d3d6561181ef56',1,'op::ThreadManager::setDefaultMaxSizeQueues()'],['../classop_1_1_wrapper_t.html#aa89055f5cf4e762071479f5fec8d2faf',1,'op::WrapperT::setDefaultMaxSizeQueues()']]], + ['setdefaultnmsthreshold_2050',['setDefaultNmsThreshold',['../classop_1_1_body_part_connector_caffe.html#af4520659b0cfb644a3c2d6ceb0e81a21',1,'op::BodyPartConnectorCaffe']]], + ['setdefaultx_2051',['setDefaultX',['../classop_1_1_profiler.html#a2dc5cfa5fd91d4cadaa7e66695f1ee51',1,'op::Profiler']]], + ['setelementtorender_2052',['setElementToRender',['../classop_1_1_renderer.html#a9d46c28d88225af94468c757ab1b26c1',1,'op::Renderer::setElementToRender(const ElementToRender elementToRender)'],['../classop_1_1_renderer.html#afd48a9cb0be184303dce2969fa2f8e02',1,'op::Renderer::setElementToRender(const int elementToRender)']]], + ['setenabled_2053',['setEnabled',['../classop_1_1_hand_extractor_net.html#ab59a77d051991734b0c74b122671f097',1,'op::HandExtractorNet::setEnabled()'],['../classop_1_1_face_extractor_net.html#a6c00e96ddf7465062d6f0b51a7a1348d',1,'op::FaceExtractorNet::setEnabled()']]], + ['seterrormodes_2054',['setErrorModes',['../namespaceop_1_1_configure_error.html#a96e56b0ddbe2cb17443b93aaba05d672',1,'op::ConfigureError']]], + ['setfrom_2055',['setFrom',['../classop_1_1_array.html#a9f4b51216faaa967d81598a0cedcf78f',1,'op::Array']]], + ['setfullscreenmode_2056',['setFullScreenMode',['../classop_1_1_frame_displayer.html#a2df856e4cf7542c7cda2757553674fb8',1,'op::FrameDisplayer']]], + ['setimage_2057',['setImage',['../classop_1_1_gui.html#a8fc6182d0124dd24e26e0fc139074061',1,'op::Gui::setImage(const Matrix &cvMatOutput)'],['../classop_1_1_gui.html#abeff19fe8eceeacfb9115a059cdde4ad',1,'op::Gui::setImage(const std::vector< Matrix > &cvMatOutputs)']]], + ['setinterminabovethreshold_2058',['setInterMinAboveThreshold',['../classop_1_1_body_part_connector_caffe.html#a789c77e69e5590a78b22e1e5f5cc4efc',1,'op::BodyPartConnectorCaffe']]], + ['setinterthreshold_2059',['setInterThreshold',['../classop_1_1_body_part_connector_caffe.html#a75d0a3f3c8dca99c8a9e1b680098da16',1,'op::BodyPartConnectorCaffe']]], + ['setkeypoints_2060',['setKeypoints',['../classop_1_1_gui3_d.html#abd245c07a53d1d25c237aff22a2b6e6f',1,'op::Gui3D']]], + ['setlogmodes_2061',['setLogModes',['../namespaceop_1_1_configure_log.html#a2f41e9a74bbda434ef16189c32a13aba',1,'op::ConfigureLog']]], + ['setmainthread_2062',['setMainThread',['../namespaceop.html#a7eb0121791185c13a6c3dd88994e0eab',1,'op']]], + ['setmaximizepositives_2063',['setMaximizePositives',['../classop_1_1_body_part_connector_caffe.html#a6d859f2e218b1ea707fddcaf0911886d',1,'op::BodyPartConnectorCaffe']]], + ['setminsubsetcnt_2064',['setMinSubsetCnt',['../classop_1_1_body_part_connector_caffe.html#a6442721373481873ddeb9ffd7c6fdb7b',1,'op::BodyPartConnectorCaffe']]], + ['setminsubsetscore_2065',['setMinSubsetScore',['../classop_1_1_body_part_connector_caffe.html#a9b9fa9490fef0121a70c3d6d749272f7',1,'op::BodyPartConnectorCaffe']]], + ['setoffset_2066',['setOffset',['../classop_1_1_nms_caffe.html#a1dd658e4bc9e080867a99e9b57f1baa8',1,'op::NmsCaffe']]], + ['setposemodel_2067',['setPoseModel',['../classop_1_1_body_part_connector_caffe.html#a104744fdab14d4c1335eb8778edea21e',1,'op::BodyPartConnectorCaffe']]], + ['setprioritythreshold_2068',['setPriorityThreshold',['../namespaceop_1_1_configure_log.html#a149393c3c87c82a5cf14417c6b430d30',1,'op::ConfigureLog']]], + ['setproducerfpsmode_2069',['setProducerFpsMode',['../classop_1_1_producer.html#a024e55b4ec769cdbc40ee21613a6ef6f',1,'op::Producer']]], + ['setscalenettooutput_2070',['setScaleNetToOutput',['../classop_1_1_body_part_connector_caffe.html#a0bad959b2da005b62cab962327ccba01',1,'op::BodyPartConnectorCaffe']]], + ['setscaleratios_2071',['setScaleRatios',['../classop_1_1_resize_and_merge_caffe.html#aa16862bbc207fef227d53d37223512b8',1,'op::ResizeAndMergeCaffe']]], + ['setsharedparameters_2072',['setSharedParameters',['../classop_1_1_op_output_to_cv_mat.html#af150c89ff5edbe4f4bd727b7162e9b36',1,'op::OpOutputToCvMat::setSharedParameters()'],['../classop_1_1_gpu_renderer.html#acc83c7b857db7d35132febaebfcb84df',1,'op::GpuRenderer::setSharedParameters(const std::tuple< std::shared_ptr< float * >, std::shared_ptr< bool >, std::shared_ptr< unsigned long long >> &tuple)']]], + ['setsharedparametersandiflast_2073',['setSharedParametersAndIfLast',['../classop_1_1_gpu_renderer.html#afa58647bfd9efa02629e4b81bbe48c6e',1,'op::GpuRenderer']]], + ['setshowgooglyeyes_2074',['setShowGooglyEyes',['../classop_1_1_renderer.html#ab226d47f554735fa3e0372ce429747c3',1,'op::Renderer']]], + ['setthreshold_2075',['setThreshold',['../classop_1_1_nms_caffe.html#a5f257eb561fc705c2b74489b12269b49',1,'op::NmsCaffe']]], + ['setto_2076',['setTo',['../classop_1_1_matrix.html#ad74c013aa1aaed93271275cce6c77972',1,'op::Matrix::setTo()'],['../classop_1_1_array.html#a28f09d11de753a741334ee8094296acb',1,'op::Array::setTo()']]], + ['setundistortimage_2077',['setUndistortImage',['../classop_1_1_camera_parameter_reader.html#ae33e8637012413719b389649d1e5448a',1,'op::CameraParameterReader']]], + ['setworker_2078',['setWorker',['../classop_1_1_wrapper_t.html#a0b502ef38ee46749733ae3dda7e5fd31',1,'op::WrapperT']]], + ['shape_2079',['shape',['../classop_1_1_array_cpu_gpu.html#af817bde68da318a8f9dd08feabe3c286',1,'op::ArrayCpuGpu::shape() const'],['../classop_1_1_array_cpu_gpu.html#a3e44f7ede3ff5ef0db6df30ecd4562c5',1,'op::ArrayCpuGpu::shape(const int index) const']]], + ['shape_5fstring_2080',['shape_string',['../classop_1_1_array_cpu_gpu.html#a425d12f8d702ac9a57fb9a5f48cea152',1,'op::ArrayCpuGpu']]], + ['size_2081',['size',['../classop_1_1_matrix.html#a93188dad84f0f0a20f7a631edd32a620',1,'op::Matrix::size()'],['../classop_1_1_queue_base.html#a8fd69ac0ffcda02d0d26102e2ebd2841',1,'op::QueueBase::size()']]], + ['spinnakerwrapper_2082',['SpinnakerWrapper',['../classop_1_1_spinnaker_wrapper.html#a5d1ba90b4d1987423b330de2fdcdb702',1,'op::SpinnakerWrapper']]], + ['splitcvmatintovectormatrix_2083',['splitCvMatIntoVectorMatrix',['../classop_1_1_matrix.html#a042f230ce1b1f56458f90e09d7b4e7e4',1,'op::Matrix']]], + ['splitstring_2084',['splitString',['../namespaceop.html#ae80a103d8a4308bc435342b3d31404c8',1,'op']]], + ['start_2085',['start',['../classop_1_1_wrapper_t.html#a8111d8cdb984e996410ace159a896992',1,'op::WrapperT::start()'],['../classop_1_1_thread_manager.html#a01c2d62e539896e36564457ab9cac25c',1,'op::ThreadManager::start()']]], + ['startinthread_2086',['startInThread',['../classop_1_1_thread.html#ac898abffd6ed18456b97ef1b72935ec6',1,'op::Thread']]], + ['step1_2087',['step1',['../classop_1_1_matrix.html#a41ec72e2d80f73025c4c0837364c8193',1,'op::Matrix']]], + ['stop_2088',['stop',['../classop_1_1_wrapper_t.html#a061ea09aac902a8a44438feffd18998f',1,'op::WrapperT::stop()'],['../classop_1_1_worker.html#ae45ac828f6e8f6055203c224e50f145b',1,'op::Worker::stop()'],['../classop_1_1_thread_manager.html#a472a1ebee700c3449bac4d6d2bb0c3a8',1,'op::ThreadManager::stop()'],['../classop_1_1_queue_base.html#a68b51dafaba93179fcef78731aaf1703',1,'op::QueueBase::stop()']]], + ['stopandjoin_2089',['stopAndJoin',['../classop_1_1_thread.html#a92e5dd0f60a0485e7d0fad3e82bb74f3',1,'op::Thread']]], + ['stoppusher_2090',['stopPusher',['../classop_1_1_queue_base.html#a32ac0e4b14a310aee62ce817e86c0356',1,'op::QueueBase']]], + ['string_2091',['String',['../classop_1_1_string.html#a5848aace0a849fafffb3a2ae78d05156',1,'op::String::String(const std::string &string)'],['../classop_1_1_string.html#ad8384eb6141b3fc53e5dc246be77cf6c',1,'op::String::String(const char *charPtr)'],['../classop_1_1_string.html#a5f1c9f53adedf082ee0cad43fa6140be',1,'op::String::String()']]], + ['stringtodataformat_2092',['stringToDataFormat',['../namespaceop.html#a46e815df32db67d78a94367b7f97df25',1,'op']]], + ['subthread_2093',['SubThread',['../classop_1_1_sub_thread.html#aa551df0d8f0b30aaf9e0840ecf29d749',1,'op::SubThread']]], + ['subthreadnoqueue_2094',['SubThreadNoQueue',['../classop_1_1_sub_thread_no_queue.html#afbaf89d7a8fb4d19f67064fb954a31eb',1,'op::SubThreadNoQueue']]], + ['subthreadqueuein_2095',['SubThreadQueueIn',['../classop_1_1_sub_thread_queue_in.html#a11aa71a818430c4eb435a1626e54f29a',1,'op::SubThreadQueueIn']]], + ['subthreadqueueinout_2096',['SubThreadQueueInOut',['../classop_1_1_sub_thread_queue_in_out.html#aa5b9beea615b8b968c5da74dd66a6d78',1,'op::SubThreadQueueInOut']]], + ['subthreadqueueout_2097',['SubThreadQueueOut',['../classop_1_1_sub_thread_queue_out.html#aa4a827932f632f1f30b5650a4fcc77ff',1,'op::SubThreadQueueOut']]], + ['sumsq_5fdata_2098',['sumsq_data',['../classop_1_1_array_cpu_gpu.html#a6dd38e027220beada2f8f55f9d073d53',1,'op::ArrayCpuGpu']]], + ['sumsq_5fdiff_2099',['sumsq_diff',['../classop_1_1_array_cpu_gpu.html#a280202f2a968ea68795d31accf5072bb',1,'op::ArrayCpuGpu']]], + ['switchfullscreenmode_2100',['switchFullScreenMode',['../classop_1_1_frame_displayer.html#ad83a47005c52f066587f49d62c109802',1,'op::FrameDisplayer']]] +]; diff --git a/web/html/doc/search/functions_13.html b/web/html/doc/search/functions_13.html new file mode 100644 index 000000000..3da2ea69c --- /dev/null +++ b/web/html/doc/search/functions_13.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_13.js b/web/html/doc/search/functions_13.js new file mode 100644 index 000000000..6918aaa49 --- /dev/null +++ b/web/html/doc/search/functions_13.js @@ -0,0 +1,24 @@ +var searchData= +[ + ['thread_2101',['Thread',['../classop_1_1_thread.html#a6ae463dc996ca6941a303b0c41288063',1,'op::Thread::Thread(Thread &&t)'],['../classop_1_1_thread.html#a9d3408a329a475da22a8e2a0bdf5f68d',1,'op::Thread::Thread(const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr=nullptr)']]], + ['threadidpp_2102',['threadIdPP',['../namespaceop.html#af65a4564afcad06b72468679f6bee52b',1,'op']]], + ['threadmanager_2103',['ThreadManager',['../classop_1_1_thread_manager.html#a8b7d17f4a330495389e646bb21907303',1,'op::ThreadManager']]], + ['timerend_2104',['timerEnd',['../classop_1_1_profiler.html#affff5b0b992e05276b2a699f97ad525d',1,'op::Profiler']]], + ['timerinit_2105',['timerInit',['../classop_1_1_profiler.html#aea2c89bcd0a661a53f47e59b9bec2be7',1,'op::Profiler']]], + ['tofixedlengthstring_2106',['toFixedLengthString',['../namespaceop.html#a42292d44d10f55cb1d83a296183e9b31',1,'op']]], + ['tolower_2107',['toLower',['../namespaceop.html#a3290f48d24c9992dd00d339ce49cfac7',1,'op']]], + ['topleft_2108',['topLeft',['../structop_1_1_rectangle.html#a640050d6186148b425bedba8c33cf1ea',1,'op::Rectangle']]], + ['tostring_2109',['toString',['../classop_1_1_array.html#af488c66ddac6cb75f7690ba8207599ed',1,'op::Array::toString()'],['../structop_1_1_point.html#a73d1088b5d0f3370499ca5c6e80b544a',1,'op::Point::toString()'],['../structop_1_1_rectangle.html#af1c7f96c34132924fa9237248894e63d',1,'op::Rectangle::toString()']]], + ['total_2110',['total',['../classop_1_1_matrix.html#a09859c3f88b8c75c7d12f53667304450',1,'op::Matrix']]], + ['toupper_2111',['toUpper',['../namespaceop.html#a7a815e303884fb2b3346c8cc19d61b23',1,'op']]], + ['track_2112',['track',['../classop_1_1_pose_extractor.html#ab464ae97522336cf69dec1c1561c431d',1,'op::PoseExtractor::track()'],['../classop_1_1_person_tracker.html#a05eaf85bd389ad965f9960c9db31d873',1,'op::PersonTracker::track()']]], + ['trackhands_2113',['trackHands',['../classop_1_1_hand_detector.html#a963972f9ecb769786b5f60018da443e4',1,'op::HandDetector']]], + ['tracklockthread_2114',['trackLockThread',['../classop_1_1_pose_extractor.html#adab126d32216aa9a27cc78d7158d6616',1,'op::PoseExtractor::trackLockThread()'],['../classop_1_1_person_tracker.html#a35cd3cd6c946f560220c9459a5dd7ee7',1,'op::PersonTracker::trackLockThread()']]], + ['transpose_2115',['transpose',['../namespaceop.html#a75c4194e0eae0ef28c6829def462dad2',1,'op']]], + ['tryemplace_2116',['tryEmplace',['../classop_1_1_wrapper_t.html#a79fa1a518495e1e3684f05943d1c04f8',1,'op::WrapperT::tryEmplace()'],['../classop_1_1_thread_manager.html#a8d5ffd9473557ff0f90ac1c6a1bae3ad',1,'op::ThreadManager::tryEmplace()'],['../classop_1_1_queue_base.html#a7905841f953be7099847cc7b5b17ae0c',1,'op::QueueBase::tryEmplace(TDatums &tDatums)']]], + ['trypop_2117',['tryPop',['../classop_1_1_queue_base.html#a5e52b4ab7e310373e3d1f1d42cfe4549',1,'op::QueueBase::tryPop()'],['../classop_1_1_wrapper_t.html#a55af0ab1f0ea4329f2c0bb3feb92b835',1,'op::WrapperT::tryPop()'],['../classop_1_1_thread_manager.html#a59916fc3428aaf5c487e1dd373d437cd',1,'op::ThreadManager::tryPop()'],['../classop_1_1_queue_base.html#a80c6e2dda17afa82aae83aeadad1f7e0',1,'op::QueueBase::tryPop()']]], + ['trypush_2118',['tryPush',['../classop_1_1_wrapper_t.html#a4d9396d4490b90f32a45d4a80d2cd5c7',1,'op::WrapperT::tryPush()'],['../classop_1_1_thread_manager.html#a7a24fd902ebd4b5fd81166547a5654d9',1,'op::ThreadManager::tryPush()'],['../classop_1_1_queue_base.html#a35f0547f6020f22e49835b147b7ec52e',1,'op::QueueBase::tryPush()']]], + ['trystop_2119',['tryStop',['../classop_1_1_worker.html#ad689b232d68f3b3e0b41f9e219b01134',1,'op::Worker::tryStop()'],['../classop_1_1_w_queue_orderer.html#a01bc7495ec992cc9c54a040534cb3634',1,'op::WQueueOrderer::tryStop()']]], + ['ttostring_2120',['tToString',['../namespaceop.html#af548fe1a2ad2b392a25afe9b0b87b8dd',1,'op']]], + ['type_2121',['type',['../classop_1_1_matrix.html#aac1572705e72a18198a8b2d32d1b5c24',1,'op::Matrix::type()'],['../classop_1_1_body_part_connector_caffe.html#aec0d6f32107a6222406e73ca9ae4942d',1,'op::BodyPartConnectorCaffe::type()'],['../classop_1_1_maximum_caffe.html#a8d047aa2e08e49199f9f422191e9bdd2',1,'op::MaximumCaffe::type()'],['../classop_1_1_nms_caffe.html#adc88733fceaefc359a95f067c62c3b07',1,'op::NmsCaffe::type()'],['../classop_1_1_resize_and_merge_caffe.html#a90e4af20eee1bfaf152937199f3ad068',1,'op::ResizeAndMergeCaffe::type()']]] +]; diff --git a/web/html/doc/search/functions_14.html b/web/html/doc/search/functions_14.html new file mode 100644 index 000000000..29237b44c --- /dev/null +++ b/web/html/doc/search/functions_14.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_14.js b/web/html/doc/search/functions_14.js new file mode 100644 index 000000000..b5bc0b984 --- /dev/null +++ b/web/html/doc/search/functions_14.js @@ -0,0 +1,15 @@ +var searchData= +[ + ['ucharcvmattofloatptr_2122',['uCharCvMatToFloatPtr',['../namespaceop.html#a532d08cb2ef011f9cad29c01d3431d6e',1,'op']]], + ['ucharimagecast_2123',['uCharImageCast',['../namespaceop.html#a6aeab543a61ef23ed58a6e29401424ae',1,'op']]], + ['ucharround_2124',['uCharRound',['../namespaceop.html#a61240e5fbd4ea84a2cfdc89407bcb1ae',1,'op']]], + ['udpsender_2125',['UdpSender',['../classop_1_1_udp_sender.html#a80fb12e5d4357e5dbb37c8a7b660c67c',1,'op::UdpSender']]], + ['uintround_2126',['uIntRound',['../namespaceop.html#a8525e440d6ac1b558e72637dc4f4e3c4',1,'op']]], + ['ulonglonground_2127',['uLongLongRound',['../namespaceop.html#a757a5cc88734e7be9e910e7d8213c282',1,'op']]], + ['ulonground_2128',['ulongRound',['../namespaceop.html#aaafe2e235a1a3a146bb026b71c521c7b',1,'op']]], + ['undistort_2129',['undistort',['../classop_1_1_camera_parameter_reader.html#aee02b82d0c5fd51dd3ba5a2267f7b370',1,'op::CameraParameterReader']]], + ['unrollarraytoucharcvmat_2130',['unrollArrayToUCharCvMat',['../namespaceop.html#a1910d9f194831570be6ffe683209e7b3',1,'op']]], + ['update_2131',['update',['../classop_1_1_gui.html#a8e9a67dd507598654a5db06273d50c94',1,'op::Gui::update()'],['../classop_1_1_gui3_d.html#af10162684889706894f13a308970ba32',1,'op::Gui3D::update()']]], + ['update_2132',['Update',['../classop_1_1_array_cpu_gpu.html#af9f32307732772c708ff45c52b4e7dd0',1,'op::ArrayCpuGpu']]], + ['updatetracker_2133',['updateTracker',['../classop_1_1_hand_detector.html#a58513169f01ab7c705979f1f2a88b571',1,'op::HandDetector']]] +]; diff --git a/web/html/doc/search/functions_15.html b/web/html/doc/search/functions_15.html new file mode 100644 index 000000000..6d5decd70 --- /dev/null +++ b/web/html/doc/search/functions_15.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_15.js b/web/html/doc/search/functions_15.js new file mode 100644 index 000000000..12f4ac3cb --- /dev/null +++ b/web/html/doc/search/functions_15.js @@ -0,0 +1,9 @@ +var searchData= +[ + ['vectorsareequal_2134',['vectorsAreEqual',['../namespaceop.html#af63e418966741f7efebacc9519174a0a',1,'op']]], + ['verboseprinter_2135',['VerbosePrinter',['../classop_1_1_verbose_printer.html#a79d2dc59b75a0164f60d875ef78523da',1,'op::VerbosePrinter']]], + ['version_2136',['version',['../classop_1_1_json_ofstream.html#a6a5e0e4f3cdf8f93fb1bdef8cb63b0a2',1,'op::JsonOfstream']]], + ['videocapturereader_2137',['VideoCaptureReader',['../classop_1_1_video_capture_reader.html#ae07295c083ce99b032ce219ea15405d9',1,'op::VideoCaptureReader::VideoCaptureReader(const int index, const bool throwExceptionIfNoOpened, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)'],['../classop_1_1_video_capture_reader.html#a3fe940326900ac6a2289de85664b14be',1,'op::VideoCaptureReader::VideoCaptureReader(const std::string &path, const ProducerType producerType, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)']]], + ['videoreader_2138',['VideoReader',['../classop_1_1_video_reader.html#a219e3901e489a293e85fe9a872e7fb78',1,'op::VideoReader']]], + ['videosaver_2139',['VideoSaver',['../classop_1_1_video_saver.html#a413aba00e90b40f6cd62144c98d7723c',1,'op::VideoSaver']]] +]; diff --git a/web/html/doc/search/functions_16.html b/web/html/doc/search/functions_16.html new file mode 100644 index 000000000..5aed7d093 --- /dev/null +++ b/web/html/doc/search/functions_16.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_16.js b/web/html/doc/search/functions_16.js new file mode 100644 index 000000000..d77c4388e --- /dev/null +++ b/web/html/doc/search/functions_16.js @@ -0,0 +1,64 @@ +var searchData= +[ + ['waitandemplace_2140',['waitAndEmplace',['../classop_1_1_thread_manager.html#a36bd8060a4f7f449a8aa35d9a166270d',1,'op::ThreadManager::waitAndEmplace()'],['../classop_1_1_queue_base.html#a8a5d53c7b66fd0ef34b3e276f586e355',1,'op::QueueBase::waitAndEmplace()'],['../classop_1_1_wrapper_t.html#ae2c6cf519701c320ae53c597ae54a7aa',1,'op::WrapperT::waitAndEmplace(Matrix &matrix)'],['../classop_1_1_wrapper_t.html#a442ff1e4fec93ec28457f7c7c4b4bfbb',1,'op::WrapperT::waitAndEmplace(TDatumsSP &tDatums)']]], + ['waitandpop_2141',['waitAndPop',['../classop_1_1_queue_base.html#a2c7b3d0fa6502c644c3083dd68332542',1,'op::QueueBase::waitAndPop(TDatums &tDatums)'],['../classop_1_1_queue_base.html#a84da9e045acec02e3900153eea2bd92d',1,'op::QueueBase::waitAndPop()'],['../classop_1_1_thread_manager.html#a393a9f04c70a002f5ceb5e301eea5cff',1,'op::ThreadManager::waitAndPop()'],['../classop_1_1_wrapper_t.html#a3c3b605e0787b55ffd00725c09a1cd53',1,'op::WrapperT::waitAndPop()']]], + ['waitandpush_2142',['waitAndPush',['../classop_1_1_queue_base.html#ab28c5805dd23117c8d6d82d59617bb95',1,'op::QueueBase::waitAndPush()'],['../classop_1_1_thread_manager.html#abfa315257b3e8cd022573f439b4936ec',1,'op::ThreadManager::waitAndPush()'],['../classop_1_1_wrapper_t.html#a0e0aea3f8bf81458c0662c46f4d345d5',1,'op::WrapperT::waitAndPush(const TDatumsSP &tDatums)'],['../classop_1_1_wrapper_t.html#abcb907a2718260a14c0472279254df84',1,'op::WrapperT::waitAndPush(const Matrix &matrix)']]], + ['wcocojsonsaver_2143',['WCocoJsonSaver',['../classop_1_1_w_coco_json_saver.html#a508c1105406b3cc55dc6bd1b299f6ed3',1,'op::WCocoJsonSaver']]], + ['wcvmattoopinput_2144',['WCvMatToOpInput',['../classop_1_1_w_cv_mat_to_op_input.html#a82c13641d071fdb5db50afdee7cfa849',1,'op::WCvMatToOpInput']]], + ['wcvmattoopoutput_2145',['WCvMatToOpOutput',['../classop_1_1_w_cv_mat_to_op_output.html#a04cd3d8e91d731a36e3c7830631e47af',1,'op::WCvMatToOpOutput']]], + ['wdatumproducer_2146',['WDatumProducer',['../classop_1_1_w_datum_producer.html#a728efd416b307b5ffa25c44b0fbf7760',1,'op::WDatumProducer']]], + ['webcamreader_2147',['WebcamReader',['../classop_1_1_webcam_reader.html#a15fa1b26adfb75c0f072dcdb44c8fc44',1,'op::WebcamReader']]], + ['wfacedetector_2148',['WFaceDetector',['../classop_1_1_w_face_detector.html#a77355426bc59b212a8eb1730ff6289f3',1,'op::WFaceDetector']]], + ['wfacedetectoropencv_2149',['WFaceDetectorOpenCV',['../classop_1_1_w_face_detector_open_c_v.html#a8c765201f0cc9440f8d172c8d8c76a62',1,'op::WFaceDetectorOpenCV']]], + ['wfaceextractornet_2150',['WFaceExtractorNet',['../classop_1_1_w_face_extractor_net.html#a31bd32d4b9922ea456c97343c94501ac',1,'op::WFaceExtractorNet']]], + ['wfacerenderer_2151',['WFaceRenderer',['../classop_1_1_w_face_renderer.html#a51e2a661867adee200f5c4029a585e5d',1,'op::WFaceRenderer']]], + ['wfacesaver_2152',['WFaceSaver',['../classop_1_1_w_face_saver.html#a5dc60ede4b88594d59ece4ce3e4683d6',1,'op::WFaceSaver']]], + ['wfpsmax_2153',['WFpsMax',['../classop_1_1_w_fps_max.html#afc487c8404a9d4794bcccdd43f0368f6',1,'op::WFpsMax']]], + ['wgui_2154',['WGui',['../classop_1_1_w_gui.html#a4e7d3f5b3ddaf02109738b4348250611',1,'op::WGui']]], + ['wgui3d_2155',['WGui3D',['../classop_1_1_w_gui3_d.html#ab61a31574460ff87efa99ed7362474ed',1,'op::WGui3D']]], + ['wguiinfoadder_2156',['WGuiInfoAdder',['../classop_1_1_w_gui_info_adder.html#a0b2c539b72ef09106ab0306dc88c5ac5',1,'op::WGuiInfoAdder']]], + ['whanddetector_2157',['WHandDetector',['../classop_1_1_w_hand_detector.html#ac44b474c7d8bd4876e32ceb9c9a322fe',1,'op::WHandDetector']]], + ['whanddetectorfromtxt_2158',['WHandDetectorFromTxt',['../classop_1_1_w_hand_detector_from_txt.html#a01a5f73b0a8a1b8998937e7ba3d747a3',1,'op::WHandDetectorFromTxt']]], + ['whanddetectortracking_2159',['WHandDetectorTracking',['../classop_1_1_w_hand_detector_tracking.html#ad2a5ac720f4ed651f4cf5e42d21c05dd',1,'op::WHandDetectorTracking']]], + ['whanddetectorupdate_2160',['WHandDetectorUpdate',['../classop_1_1_w_hand_detector_update.html#abd8b56fbfbd2a619a4f37d148592f61b',1,'op::WHandDetectorUpdate']]], + ['whandextractornet_2161',['WHandExtractorNet',['../classop_1_1_w_hand_extractor_net.html#a464a629c6ecd9727da53453af8266e1d',1,'op::WHandExtractorNet']]], + ['whandrenderer_2162',['WHandRenderer',['../classop_1_1_w_hand_renderer.html#a30121b55c601aed3644996d010b6bf8c',1,'op::WHandRenderer']]], + ['whandsaver_2163',['WHandSaver',['../classop_1_1_w_hand_saver.html#ab41ecc429abfe0a1424facd6ee4acd1f',1,'op::WHandSaver']]], + ['wheatmapsaver_2164',['WHeatMapSaver',['../classop_1_1_w_heat_map_saver.html#a5b72d5f3bcbdacb26ba440b80eef0109',1,'op::WHeatMapSaver']]], + ['widgenerator_2165',['WIdGenerator',['../classop_1_1_w_id_generator.html#a6112733ee0b537d4d91191f93f0a84f8',1,'op::WIdGenerator']]], + ['width_2166',['width',['../classop_1_1_array_cpu_gpu.html#a5011662a5cf4bc7f7c1a2d966dcc44cd',1,'op::ArrayCpuGpu']]], + ['wimagesaver_2167',['WImageSaver',['../classop_1_1_w_image_saver.html#a11add012ee88b64a4f36d3f63cb65ee0',1,'op::WImageSaver']]], + ['wkeeptopnpeople_2168',['WKeepTopNPeople',['../classop_1_1_w_keep_top_n_people.html#aebe939c354cfb62cb6d950f73d14731b',1,'op::WKeepTopNPeople']]], + ['wkeypointscaler_2169',['WKeypointScaler',['../classop_1_1_w_keypoint_scaler.html#a31624e262988b0840a8ddbf098e56e9b',1,'op::WKeypointScaler']]], + ['wopoutputtocvmat_2170',['WOpOutputToCvMat',['../classop_1_1_w_op_output_to_cv_mat.html#a6f632a83de4cdc731c3f52d1541060f3',1,'op::WOpOutputToCvMat']]], + ['work_2171',['work',['../classop_1_1_w_hand_renderer.html#ad178e8d413b3b15edc53625e1f5119d7',1,'op::WHandRenderer::work()'],['../classop_1_1_w_pose_extractor.html#ae0f02aaefccab05bbbd919dd7a9e0f61',1,'op::WPoseExtractor::work()'],['../classop_1_1_w_pose_extractor_net.html#a3d691e30c419c70e23a4d7b3c92adb4b',1,'op::WPoseExtractorNet::work()'],['../classop_1_1_w_pose_renderer.html#a10b1631d78d8270ed2a16e538b30eb76',1,'op::WPoseRenderer::work()'],['../classop_1_1_sub_thread.html#a14330cbc1117f32b6d69c1733ccdeb61',1,'op::SubThread::work()'],['../classop_1_1_sub_thread_no_queue.html#acb7edd02e1724e0fd131235666009f42',1,'op::SubThreadNoQueue::work()'],['../classop_1_1_sub_thread_queue_in.html#a7e9bd6ca09bb77a8de76ae8a02ee8ed4',1,'op::SubThreadQueueIn::work()'],['../classop_1_1_sub_thread_queue_in_out.html#abb65911e9d9b6d5efe782ca0e599be3b',1,'op::SubThreadQueueInOut::work()'],['../classop_1_1_sub_thread_queue_out.html#a0ff5f79e63038ffa5b4aca24cfea7e7c',1,'op::SubThreadQueueOut::work()'],['../classop_1_1_w_fps_max.html#a8b9f49fb22b18dbee786922af15ba939',1,'op::WFpsMax::work()'],['../classop_1_1_w_id_generator.html#a03bd005cf88749702fb8a29c20d4cb91',1,'op::WIdGenerator::work()'],['../classop_1_1_worker.html#a9acadd6df7af03b31b9e354ae815f781',1,'op::Worker::work()'],['../classop_1_1_worker_consumer.html#a7383747b3bdc6ac79e6f9afbf2c28d27',1,'op::WorkerConsumer::work()'],['../classop_1_1_worker_producer.html#a0259f0b387e2b868388ba0a6769f4691',1,'op::WorkerProducer::work()'],['../classop_1_1_w_queue_assembler.html#ad3b1ca56d18e1e234773ba15efea7158',1,'op::WQueueAssembler::work()'],['../classop_1_1_w_queue_orderer.html#a1ea314eeaa8d99fbf33885d9a4c6d044',1,'op::WQueueOrderer::work()'],['../classop_1_1_w_person_id_extractor.html#a4066bf1c8cad753c74de1ceabdd76505',1,'op::WPersonIdExtractor::work()'],['../classop_1_1_w_hand_detector_update.html#af9287dc0a3c67abd35974c1c74614f3c',1,'op::WHandDetectorUpdate::work()'],['../classop_1_1_w_hand_extractor_net.html#a21ffee48567b1c7c8994e4effef6cffe',1,'op::WHandExtractorNet::work()'],['../classop_1_1_w_pose_triangulation.html#a495b29e03933d750827acc0531c72c78',1,'op::WPoseTriangulation::work()'],['../classop_1_1_w_cv_mat_to_op_input.html#aa7faa9e2671a85d36aad3366a7958f58',1,'op::WCvMatToOpInput::work()'],['../classop_1_1_w_cv_mat_to_op_output.html#a0bf2e43d2586c83fdd5cb0b1b54aefca',1,'op::WCvMatToOpOutput::work()'],['../classop_1_1_w_keep_top_n_people.html#a5928a091e0990706ab2ea5e5e07629dd',1,'op::WKeepTopNPeople::work()'],['../classop_1_1_w_keypoint_scaler.html#aacad5116921e2ff746fbdf9f6c0cbb25',1,'op::WKeypointScaler::work()'],['../classop_1_1_w_op_output_to_cv_mat.html#ae3fc21569d56a648c606b23fcc016349',1,'op::WOpOutputToCvMat::work()'],['../classop_1_1_w_scale_and_size_extractor.html#afddf54d061dc5325e78252a3bba482b9',1,'op::WScaleAndSizeExtractor::work()'],['../classop_1_1_w_verbose_printer.html#af98586e3da7cedd902f70e6521c0ffc4',1,'op::WVerbosePrinter::work()'],['../classop_1_1_w_face_detector.html#a721ced99378516c04cb3cff296cc274a',1,'op::WFaceDetector::work()'],['../classop_1_1_w_face_detector_open_c_v.html#a4d3a4a29bcb7b8c141ae1917634ca4c9',1,'op::WFaceDetectorOpenCV::work()'],['../classop_1_1_w_face_extractor_net.html#aa47940fb2ed940a53c7a305ce45817a3',1,'op::WFaceExtractorNet::work()'],['../classop_1_1_w_face_renderer.html#aa52166ea2d5e0f201c94d5c4fe74216e',1,'op::WFaceRenderer::work()'],['../classop_1_1_w_gui_info_adder.html#ae90a68c6ef7b4f45595a020efd232612',1,'op::WGuiInfoAdder::work()'],['../classop_1_1_w_hand_detector.html#aa82ef40fad1d343b5856b41ec4dbcd5c',1,'op::WHandDetector::work()'],['../classop_1_1_w_hand_detector_from_txt.html#a51ebff94734350463fcf507a84eeefdc',1,'op::WHandDetectorFromTxt::work()'],['../classop_1_1_w_hand_detector_tracking.html#a7c849c5a423ffc150c6a4aee9055d34e',1,'op::WHandDetectorTracking::work()']]], + ['workconsumer_2172',['workConsumer',['../classop_1_1_worker_consumer.html#a26cf5c40df363d94d603fce92a5b69eb',1,'op::WorkerConsumer::workConsumer()'],['../classop_1_1_w_gui3_d.html#afe019cff8fd5ed2f59f59d886de7473a',1,'op::WGui3D::workConsumer()'],['../classop_1_1_w_gui.html#a664e1f76211510e38b8d5f5bed37ffcb',1,'op::WGui::workConsumer()'],['../classop_1_1_w_video_saver3_d.html#adef743533fbab522d55c43768d28469e',1,'op::WVideoSaver3D::workConsumer()'],['../classop_1_1_w_video_saver.html#a40bcb8ccf137c6cbee3ca31e6cc3bfbf',1,'op::WVideoSaver::workConsumer()'],['../classop_1_1_w_udp_sender.html#a615fc6a537ca9f624022698391c11a54',1,'op::WUdpSender::workConsumer()'],['../classop_1_1_w_people_json_saver.html#af874a16a06a9a3452a0e3792ac15647e',1,'op::WPeopleJsonSaver::workConsumer()'],['../classop_1_1_w_pose_saver.html#a039027281498168b57df8dfeefd82cd8',1,'op::WPoseSaver::workConsumer()'],['../classop_1_1_w_coco_json_saver.html#af152a61abc9ab46da651c9d87e6775f0',1,'op::WCocoJsonSaver::workConsumer()'],['../classop_1_1_w_face_saver.html#a026bfad8cd9e0d1289a1db473cef34a0',1,'op::WFaceSaver::workConsumer()'],['../classop_1_1_w_hand_saver.html#afc3976b394070927b9396163137317e5',1,'op::WHandSaver::workConsumer()'],['../classop_1_1_w_heat_map_saver.html#a5fd729a47f0cdbe94001219f971f8f51',1,'op::WHeatMapSaver::workConsumer()'],['../classop_1_1_w_image_saver.html#a198bbfcf625354ddda419e0121d0cb33',1,'op::WImageSaver::workConsumer()']]], + ['worker_2173',['Worker',['../classop_1_1_worker.html#a5008fc4ef4e41366ba0022f5cd79edba',1,'op::Worker']]], + ['workproducer_2174',['workProducer',['../classop_1_1_w_datum_producer.html#aac2674f961492fa299da18d716a617b4',1,'op::WDatumProducer::workProducer()'],['../classop_1_1_worker_producer.html#a364992ef862fe84a78416e2b556daae7',1,'op::WorkerProducer::workProducer()']]], + ['worktworkers_2175',['workTWorkers',['../classop_1_1_sub_thread.html#ad9f2d3be9e05739b102fad350e1a1364',1,'op::SubThread']]], + ['wpeoplejsonsaver_2176',['WPeopleJsonSaver',['../classop_1_1_w_people_json_saver.html#ac12dfe8c1414ec36ace474ecbf148f67',1,'op::WPeopleJsonSaver']]], + ['wpersonidextractor_2177',['WPersonIdExtractor',['../classop_1_1_w_person_id_extractor.html#a14a6cc9c6c70acd4847482fd71e4972b',1,'op::WPersonIdExtractor']]], + ['wposeextractor_2178',['WPoseExtractor',['../classop_1_1_w_pose_extractor.html#ae85b1ec41bf47dcf1aed7bdae1d91915',1,'op::WPoseExtractor']]], + ['wposeextractornet_2179',['WPoseExtractorNet',['../classop_1_1_w_pose_extractor_net.html#aa0f6b7ec6f36fe2a27649ac2c7490c09',1,'op::WPoseExtractorNet']]], + ['wposerenderer_2180',['WPoseRenderer',['../classop_1_1_w_pose_renderer.html#ae74189143175b89ccd36662cec4de72e',1,'op::WPoseRenderer']]], + ['wposesaver_2181',['WPoseSaver',['../classop_1_1_w_pose_saver.html#aa9dd0f4649c9e8efef10201caf9e4cfd',1,'op::WPoseSaver']]], + ['wposetriangulation_2182',['WPoseTriangulation',['../classop_1_1_w_pose_triangulation.html#a439c75d19eae34fdd20f2f1c4ee18e48',1,'op::WPoseTriangulation']]], + ['wqueueassembler_2183',['WQueueAssembler',['../classop_1_1_w_queue_assembler.html#ad4a4ec3e060ad6483331156a5a62af25',1,'op::WQueueAssembler']]], + ['wqueueorderer_2184',['WQueueOrderer',['../classop_1_1_w_queue_orderer.html#a3303add5fa8cc36593d3d859ffdd8ae0',1,'op::WQueueOrderer']]], + ['wrapperconfiguresanitychecks_2185',['wrapperConfigureSanityChecks',['../namespaceop.html#acc4a5460e02ae510e854724513eea822',1,'op']]], + ['wrapperstructextra_2186',['WrapperStructExtra',['../structop_1_1_wrapper_struct_extra.html#a70cdc27c953962810333fafe011f86dd',1,'op::WrapperStructExtra']]], + ['wrapperstructface_2187',['WrapperStructFace',['../structop_1_1_wrapper_struct_face.html#a0fb08ed60a50f19713df6f62ee685593',1,'op::WrapperStructFace']]], + ['wrapperstructgui_2188',['WrapperStructGui',['../structop_1_1_wrapper_struct_gui.html#a41638659ae2237d4ebfac635f4cc7842',1,'op::WrapperStructGui']]], + ['wrapperstructhand_2189',['WrapperStructHand',['../structop_1_1_wrapper_struct_hand.html#a223b29ce9a234c3fb8a7864cfe2919fc',1,'op::WrapperStructHand']]], + ['wrapperstructinput_2190',['WrapperStructInput',['../structop_1_1_wrapper_struct_input.html#a2ee8db5c1fbade720719bb1464e59175',1,'op::WrapperStructInput']]], + ['wrapperstructoutput_2191',['WrapperStructOutput',['../structop_1_1_wrapper_struct_output.html#ae8975341f779a89d68a125cbfb17d940',1,'op::WrapperStructOutput']]], + ['wrapperstructpose_2192',['WrapperStructPose',['../structop_1_1_wrapper_struct_pose.html#af3c639dd4de2bfebe1376a0ab7666c86',1,'op::WrapperStructPose']]], + ['wrappert_2193',['WrapperT',['../classop_1_1_wrapper_t.html#a94151754dddc2a37044aea26b9dac6c7',1,'op::WrapperT']]], + ['write_2194',['write',['../classop_1_1_video_saver.html#a6f6914d16434cebc9a6c596472b212aa',1,'op::VideoSaver::write(const Matrix &matToSave)'],['../classop_1_1_video_saver.html#a4ecf895fc5cd7508ac139a7b69fc25e7',1,'op::VideoSaver::write(const std::vector< Matrix > &matsToSave)']]], + ['writeparameters_2195',['writeParameters',['../classop_1_1_camera_parameter_reader.html#a7210bc5ebfe6e90a52524b7f0f3f3991',1,'op::CameraParameterReader']]], + ['wscaleandsizeextractor_2196',['WScaleAndSizeExtractor',['../classop_1_1_w_scale_and_size_extractor.html#a8e6ef291bd809987f06fbb1cc2173b0f',1,'op::WScaleAndSizeExtractor']]], + ['wudpsender_2197',['WUdpSender',['../classop_1_1_w_udp_sender.html#a22a5ec90fe83ed654bd0aef112fac98b',1,'op::WUdpSender']]], + ['wverboseprinter_2198',['WVerbosePrinter',['../classop_1_1_w_verbose_printer.html#a6ece5acbf5f8a7a3e900c5029a56271d',1,'op::WVerbosePrinter']]], + ['wvideosaver_2199',['WVideoSaver',['../classop_1_1_w_video_saver.html#a04dc4e6f039d047a0da6f94283c145d9',1,'op::WVideoSaver']]], + ['wvideosaver3d_2200',['WVideoSaver3D',['../classop_1_1_w_video_saver3_d.html#a570d2b868a6c3d3932671d56b0dbb531',1,'op::WVideoSaver3D']]] +]; diff --git a/web/html/doc/search/functions_17.html b/web/html/doc/search/functions_17.html new file mode 100644 index 000000000..ad6d5a7af --- /dev/null +++ b/web/html/doc/search/functions_17.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_17.js b/web/html/doc/search/functions_17.js new file mode 100644 index 000000000..f227ee32c --- /dev/null +++ b/web/html/doc/search/functions_17.js @@ -0,0 +1,120 @@ +var searchData= +[ + ['_7ebodypartconnectorcaffe_2201',['~BodyPartConnectorCaffe',['../classop_1_1_body_part_connector_caffe.html#ab0beade5f7d8e56e881231e46f9306ec',1,'op::BodyPartConnectorCaffe']]], + ['_7ecameraparameterreader_2202',['~CameraParameterReader',['../classop_1_1_camera_parameter_reader.html#acfa701389b1e566e1ea49cfd2605bbf8',1,'op::CameraParameterReader']]], + ['_7ecocojsonsaver_2203',['~CocoJsonSaver',['../classop_1_1_coco_json_saver.html#a8bbfab84a7816cb0f189f243246f744b',1,'op::CocoJsonSaver']]], + ['_7ecvmattoopinput_2204',['~CvMatToOpInput',['../classop_1_1_cv_mat_to_op_input.html#adbe9ae80914d9c1d224c1fe753519090',1,'op::CvMatToOpInput']]], + ['_7ecvmattoopoutput_2205',['~CvMatToOpOutput',['../classop_1_1_cv_mat_to_op_output.html#abc8953e080adc30fa52345322ae8445a',1,'op::CvMatToOpOutput']]], + ['_7edatum_2206',['~Datum',['../structop_1_1_datum.html#a16b968aec06e9b904751216402972e74',1,'op::Datum']]], + ['_7edatumproducer_2207',['~DatumProducer',['../classop_1_1_datum_producer.html#ad12f3202a265c989430d15bf7476a326',1,'op::DatumProducer']]], + ['_7efacecpurenderer_2208',['~FaceCpuRenderer',['../classop_1_1_face_cpu_renderer.html#a5c5e1e9b016bd33b5740beb04fc0fb49',1,'op::FaceCpuRenderer']]], + ['_7efacedetector_2209',['~FaceDetector',['../classop_1_1_face_detector.html#a66ff3806053a5f86d01724f5029e0859',1,'op::FaceDetector']]], + ['_7efacedetectoropencv_2210',['~FaceDetectorOpenCV',['../classop_1_1_face_detector_open_c_v.html#a88eae893ff7f7664243cadf0f84500da',1,'op::FaceDetectorOpenCV']]], + ['_7efaceextractorcaffe_2211',['~FaceExtractorCaffe',['../classop_1_1_face_extractor_caffe.html#a4450e656f21a8cb7f1d9bf5f545012f1',1,'op::FaceExtractorCaffe']]], + ['_7efaceextractornet_2212',['~FaceExtractorNet',['../classop_1_1_face_extractor_net.html#a4cd488333e450cfbb19aab8910e7f138',1,'op::FaceExtractorNet']]], + ['_7efacegpurenderer_2213',['~FaceGpuRenderer',['../classop_1_1_face_gpu_renderer.html#a94758beab4bfbfed02cc8330a63abaeb',1,'op::FaceGpuRenderer']]], + ['_7efacerenderer_2214',['~FaceRenderer',['../classop_1_1_face_renderer.html#a8ba7bad616bd2cf673d8faa846bf95b5',1,'op::FaceRenderer']]], + ['_7efilesaver_2215',['~FileSaver',['../classop_1_1_file_saver.html#a080e6bb80adad7a3d534356cdfe40211',1,'op::FileSaver']]], + ['_7eflirreader_2216',['~FlirReader',['../classop_1_1_flir_reader.html#a66d6144c5dcb0dd3cbadcd6f8eefa9e0',1,'op::FlirReader']]], + ['_7eframedisplayer_2217',['~FrameDisplayer',['../classop_1_1_frame_displayer.html#ab3dea1eefac57cf129b4828ecd856fb4',1,'op::FrameDisplayer']]], + ['_7egpurenderer_2218',['~GpuRenderer',['../classop_1_1_gpu_renderer.html#a3ef06d85a62cd4049d5e8ac1e94d8fd8',1,'op::GpuRenderer']]], + ['_7egui_2219',['~Gui',['../classop_1_1_gui.html#a5e7e30073c0f7ee18904b25fc638b4e2',1,'op::Gui']]], + ['_7egui3d_2220',['~Gui3D',['../classop_1_1_gui3_d.html#a2fff0519028b406fe9ffc984ecd1dfa9',1,'op::Gui3D']]], + ['_7eguiinfoadder_2221',['~GuiInfoAdder',['../classop_1_1_gui_info_adder.html#a942af111d6bc41991db4bca3e573b8e9',1,'op::GuiInfoAdder']]], + ['_7ehandcpurenderer_2222',['~HandCpuRenderer',['../classop_1_1_hand_cpu_renderer.html#a8269f1879939d1b403787f982f10258d',1,'op::HandCpuRenderer']]], + ['_7ehanddetector_2223',['~HandDetector',['../classop_1_1_hand_detector.html#ae70826e6de6a8f26c240d0152578375e',1,'op::HandDetector']]], + ['_7ehanddetectorfromtxt_2224',['~HandDetectorFromTxt',['../classop_1_1_hand_detector_from_txt.html#a8fb6eb6ef5d5689cfdb502b5bc43685f',1,'op::HandDetectorFromTxt']]], + ['_7ehandextractorcaffe_2225',['~HandExtractorCaffe',['../classop_1_1_hand_extractor_caffe.html#aee681b43b8691ac1f07e08616522f6af',1,'op::HandExtractorCaffe']]], + ['_7ehandextractornet_2226',['~HandExtractorNet',['../classop_1_1_hand_extractor_net.html#a3743bf97fd19ee7d52ffd1019baa0c46',1,'op::HandExtractorNet']]], + ['_7ehandgpurenderer_2227',['~HandGpuRenderer',['../classop_1_1_hand_gpu_renderer.html#ad6a87a582129d7ed18a520dc9cd6c3fc',1,'op::HandGpuRenderer']]], + ['_7ehandrenderer_2228',['~HandRenderer',['../classop_1_1_hand_renderer.html#a66ca52089ca021542816a085d39ee640',1,'op::HandRenderer']]], + ['_7eheatmapsaver_2229',['~HeatMapSaver',['../classop_1_1_heat_map_saver.html#a150c053182074a1cc846c3ced7a674fb',1,'op::HeatMapSaver']]], + ['_7eimagedirectoryreader_2230',['~ImageDirectoryReader',['../classop_1_1_image_directory_reader.html#a7551a8567f42f7cfb68020e149921438',1,'op::ImageDirectoryReader']]], + ['_7eimagesaver_2231',['~ImageSaver',['../classop_1_1_image_saver.html#ab11a6e42a910021fd072cdf287b796ed',1,'op::ImageSaver']]], + ['_7eipcamerareader_2232',['~IpCameraReader',['../classop_1_1_ip_camera_reader.html#ad90e52c898ddf32503ce94685977aae0',1,'op::IpCameraReader']]], + ['_7ejsonofstream_2233',['~JsonOfstream',['../classop_1_1_json_ofstream.html#a5c4b866df81cf36d8f6dcdfc8414de8f',1,'op::JsonOfstream']]], + ['_7ekeeptopnpeople_2234',['~KeepTopNPeople',['../classop_1_1_keep_top_n_people.html#a7675c9c3668a2610827da67818a67741',1,'op::KeepTopNPeople']]], + ['_7ekeypointsaver_2235',['~KeypointSaver',['../classop_1_1_keypoint_saver.html#a903a4fa8be0b0cb5008d015126ac0e59',1,'op::KeypointSaver']]], + ['_7ekeypointscaler_2236',['~KeypointScaler',['../classop_1_1_keypoint_scaler.html#a5797e76ffea7e3b6a4080b04f50f0c0f',1,'op::KeypointScaler']]], + ['_7emaximumcaffe_2237',['~MaximumCaffe',['../classop_1_1_maximum_caffe.html#a0b438980e5c2fce978e9de80f75afcd3',1,'op::MaximumCaffe']]], + ['_7enet_2238',['~Net',['../classop_1_1_net.html#ae20a74df1a401eb17d5b75b406574919',1,'op::Net']]], + ['_7enetcaffe_2239',['~NetCaffe',['../classop_1_1_net_caffe.html#a84007645c88de286e9d306461a044e8d',1,'op::NetCaffe']]], + ['_7enetopencv_2240',['~NetOpenCv',['../classop_1_1_net_open_cv.html#a30ec3c3ee2ffe0a95656f6b11151243f',1,'op::NetOpenCv']]], + ['_7enmscaffe_2241',['~NmsCaffe',['../classop_1_1_nms_caffe.html#a0702488e5d899a6610535f6741601978',1,'op::NmsCaffe']]], + ['_7eopoutputtocvmat_2242',['~OpOutputToCvMat',['../classop_1_1_op_output_to_cv_mat.html#afe99e538dfcca6396b0672db1ec2f17f',1,'op::OpOutputToCvMat']]], + ['_7epeoplejsonsaver_2243',['~PeopleJsonSaver',['../classop_1_1_people_json_saver.html#a4a84666529a0418ccf9256c9942ea3f8',1,'op::PeopleJsonSaver']]], + ['_7epersonidextractor_2244',['~PersonIdExtractor',['../classop_1_1_person_id_extractor.html#a7ff9f8faf42bff0dbd7207105c149a1e',1,'op::PersonIdExtractor']]], + ['_7epersontracker_2245',['~PersonTracker',['../classop_1_1_person_tracker.html#a840ed2e06c1cc4dfc89e6083b2a8bc37',1,'op::PersonTracker']]], + ['_7eposecpurenderer_2246',['~PoseCpuRenderer',['../classop_1_1_pose_cpu_renderer.html#ad4994dcc005a5e283abc012e8889c481',1,'op::PoseCpuRenderer']]], + ['_7eposeextractor_2247',['~PoseExtractor',['../classop_1_1_pose_extractor.html#a9f98eef4ac08cacefe74e002ac086582',1,'op::PoseExtractor']]], + ['_7eposeextractorcaffe_2248',['~PoseExtractorCaffe',['../classop_1_1_pose_extractor_caffe.html#a3359641c1199c712a07859dcb76b7dcf',1,'op::PoseExtractorCaffe']]], + ['_7eposeextractornet_2249',['~PoseExtractorNet',['../classop_1_1_pose_extractor_net.html#a963c679df20b16d475aa3a7c0661135c',1,'op::PoseExtractorNet']]], + ['_7eposegpurenderer_2250',['~PoseGpuRenderer',['../classop_1_1_pose_gpu_renderer.html#afe3959a08624dd71cc5797eb3938e748',1,'op::PoseGpuRenderer']]], + ['_7eposerenderer_2251',['~PoseRenderer',['../classop_1_1_pose_renderer.html#a8ff2470d813201e992cd5e07bab23386',1,'op::PoseRenderer']]], + ['_7eposetriangulation_2252',['~PoseTriangulation',['../classop_1_1_pose_triangulation.html#a3f4764c7063d9849b75a354a6a92f062',1,'op::PoseTriangulation']]], + ['_7epriorityqueue_2253',['~PriorityQueue',['../classop_1_1_priority_queue.html#a469b458b035822f01b212c089d4245bc',1,'op::PriorityQueue']]], + ['_7eproducer_2254',['~Producer',['../classop_1_1_producer.html#a8b48342b2c4003a080b17ac411f3454f',1,'op::Producer']]], + ['_7equeue_2255',['~Queue',['../classop_1_1_queue.html#a056600a7cf4503235ba4e172cee63a7f',1,'op::Queue']]], + ['_7equeuebase_2256',['~QueueBase',['../classop_1_1_queue_base.html#aef098201d9084083adba5ceeb45b12fa',1,'op::QueueBase']]], + ['_7erenderer_2257',['~Renderer',['../classop_1_1_renderer.html#abd45555a9864e799309b72902b6cec30',1,'op::Renderer']]], + ['_7eresizeandmergecaffe_2258',['~ResizeAndMergeCaffe',['../classop_1_1_resize_and_merge_caffe.html#a5dc1aa7c462bd8df8b6a8377418e19d4',1,'op::ResizeAndMergeCaffe']]], + ['_7escaleandsizeextractor_2259',['~ScaleAndSizeExtractor',['../classop_1_1_scale_and_size_extractor.html#a90bc64fe3c8ee45cfe5f3bd73a8bb3c9',1,'op::ScaleAndSizeExtractor']]], + ['_7espinnakerwrapper_2260',['~SpinnakerWrapper',['../classop_1_1_spinnaker_wrapper.html#a8ae3e45fba6f9d0943cbd9038e98b066',1,'op::SpinnakerWrapper']]], + ['_7esubthread_2261',['~SubThread',['../classop_1_1_sub_thread.html#a6ee67e375611e8df2d09b3234dedf36c',1,'op::SubThread']]], + ['_7esubthreadnoqueue_2262',['~SubThreadNoQueue',['../classop_1_1_sub_thread_no_queue.html#ad88bbbe72f4777603d71e322b0fd20ed',1,'op::SubThreadNoQueue']]], + ['_7esubthreadqueuein_2263',['~SubThreadQueueIn',['../classop_1_1_sub_thread_queue_in.html#a8a479c4ddc5b42f1dbf329c4a0c235c3',1,'op::SubThreadQueueIn']]], + ['_7esubthreadqueueinout_2264',['~SubThreadQueueInOut',['../classop_1_1_sub_thread_queue_in_out.html#a87d122e11adc7363d9b24c7f796d3d33',1,'op::SubThreadQueueInOut']]], + ['_7esubthreadqueueout_2265',['~SubThreadQueueOut',['../classop_1_1_sub_thread_queue_out.html#ab61e068d6dddd2914b25638ebeff0f3b',1,'op::SubThreadQueueOut']]], + ['_7ethread_2266',['~Thread',['../classop_1_1_thread.html#a151e4e647917f2351cc05a8861588e2a',1,'op::Thread']]], + ['_7ethreadmanager_2267',['~ThreadManager',['../classop_1_1_thread_manager.html#a03c6587dbc60b266bee04b9714647fba',1,'op::ThreadManager']]], + ['_7eudpsender_2268',['~UdpSender',['../classop_1_1_udp_sender.html#ac85192d475d5e84b9dcc839d5e240585',1,'op::UdpSender']]], + ['_7everboseprinter_2269',['~VerbosePrinter',['../classop_1_1_verbose_printer.html#a5c4ef10db4aba13be43b92ab4e6c4d3e',1,'op::VerbosePrinter']]], + ['_7evideocapturereader_2270',['~VideoCaptureReader',['../classop_1_1_video_capture_reader.html#a7ea52eabf5133a1a01d38f95b1a4b601',1,'op::VideoCaptureReader']]], + ['_7evideoreader_2271',['~VideoReader',['../classop_1_1_video_reader.html#a26cee6225a62c4e120ae9ea2e4a9a41c',1,'op::VideoReader']]], + ['_7evideosaver_2272',['~VideoSaver',['../classop_1_1_video_saver.html#acfb839eb14ac032055930932db966e84',1,'op::VideoSaver']]], + ['_7ewcocojsonsaver_2273',['~WCocoJsonSaver',['../classop_1_1_w_coco_json_saver.html#a49ba32973e43c176c88d17aa805f1ab5',1,'op::WCocoJsonSaver']]], + ['_7ewcvmattoopinput_2274',['~WCvMatToOpInput',['../classop_1_1_w_cv_mat_to_op_input.html#a8ae2eb423f1fe70f4154716b38b62719',1,'op::WCvMatToOpInput']]], + ['_7ewcvmattoopoutput_2275',['~WCvMatToOpOutput',['../classop_1_1_w_cv_mat_to_op_output.html#add97e472ab242fe72221cf0591801f81',1,'op::WCvMatToOpOutput']]], + ['_7ewdatumproducer_2276',['~WDatumProducer',['../classop_1_1_w_datum_producer.html#a858e64351ef6d3942bc7d53678badcc7',1,'op::WDatumProducer']]], + ['_7ewebcamreader_2277',['~WebcamReader',['../classop_1_1_webcam_reader.html#aea29bfce4df5493d662ed3a892f364d2',1,'op::WebcamReader']]], + ['_7ewfacedetector_2278',['~WFaceDetector',['../classop_1_1_w_face_detector.html#ac0aa45b289e6800bb76bfbfc8a216035',1,'op::WFaceDetector']]], + ['_7ewfacedetectoropencv_2279',['~WFaceDetectorOpenCV',['../classop_1_1_w_face_detector_open_c_v.html#a2942e145f9c4c720aad7c810a3d0f3f3',1,'op::WFaceDetectorOpenCV']]], + ['_7ewfaceextractornet_2280',['~WFaceExtractorNet',['../classop_1_1_w_face_extractor_net.html#ae781bd1a7d450983a9aa168860d4e96d',1,'op::WFaceExtractorNet']]], + ['_7ewfacerenderer_2281',['~WFaceRenderer',['../classop_1_1_w_face_renderer.html#a2f06bfea6521c7528fc7b07b9b067351',1,'op::WFaceRenderer']]], + ['_7ewfacesaver_2282',['~WFaceSaver',['../classop_1_1_w_face_saver.html#ae27f54e5aead73b6eb604d0a0a06e18f',1,'op::WFaceSaver']]], + ['_7ewfpsmax_2283',['~WFpsMax',['../classop_1_1_w_fps_max.html#af48214bbb4ed5c84efe1adf845aa9318',1,'op::WFpsMax']]], + ['_7ewgui_2284',['~WGui',['../classop_1_1_w_gui.html#a3c55ca3290f64181201890fae10e4002',1,'op::WGui']]], + ['_7ewgui3d_2285',['~WGui3D',['../classop_1_1_w_gui3_d.html#a62b93d2704634170339827ee1f93fa97',1,'op::WGui3D']]], + ['_7ewguiinfoadder_2286',['~WGuiInfoAdder',['../classop_1_1_w_gui_info_adder.html#ab369f542339af87ff652fc6e8e5408dd',1,'op::WGuiInfoAdder']]], + ['_7ewhanddetector_2287',['~WHandDetector',['../classop_1_1_w_hand_detector.html#a7a740a7f9275b7016013728dbed001d0',1,'op::WHandDetector']]], + ['_7ewhanddetectorfromtxt_2288',['~WHandDetectorFromTxt',['../classop_1_1_w_hand_detector_from_txt.html#ae51bcc36e790b298d3cd0c231d4b3640',1,'op::WHandDetectorFromTxt']]], + ['_7ewhanddetectortracking_2289',['~WHandDetectorTracking',['../classop_1_1_w_hand_detector_tracking.html#a7d884dfd00822de27742a2392fb210bb',1,'op::WHandDetectorTracking']]], + ['_7ewhanddetectorupdate_2290',['~WHandDetectorUpdate',['../classop_1_1_w_hand_detector_update.html#a29d71b3c1ee52f04bd52b932db350b59',1,'op::WHandDetectorUpdate']]], + ['_7ewhandextractornet_2291',['~WHandExtractorNet',['../classop_1_1_w_hand_extractor_net.html#ab46b680c14fb2a0cb171b040da484eda',1,'op::WHandExtractorNet']]], + ['_7ewhandrenderer_2292',['~WHandRenderer',['../classop_1_1_w_hand_renderer.html#ab18c8602c8bf65e3e762b2ff06def220',1,'op::WHandRenderer']]], + ['_7ewhandsaver_2293',['~WHandSaver',['../classop_1_1_w_hand_saver.html#abf4a45c6ebe82fca1e0f0db1d3e2af79',1,'op::WHandSaver']]], + ['_7ewheatmapsaver_2294',['~WHeatMapSaver',['../classop_1_1_w_heat_map_saver.html#aa651ec613c81cf9a19222428bd59feed',1,'op::WHeatMapSaver']]], + ['_7ewidgenerator_2295',['~WIdGenerator',['../classop_1_1_w_id_generator.html#ad9e160c5120aa850fbe2285f78e062e2',1,'op::WIdGenerator']]], + ['_7ewimagesaver_2296',['~WImageSaver',['../classop_1_1_w_image_saver.html#ab8371a260e35cdea5010327240c9a53d',1,'op::WImageSaver']]], + ['_7ewkeeptopnpeople_2297',['~WKeepTopNPeople',['../classop_1_1_w_keep_top_n_people.html#ad23785b42b85c166e5080f47591cccaa',1,'op::WKeepTopNPeople']]], + ['_7ewkeypointscaler_2298',['~WKeypointScaler',['../classop_1_1_w_keypoint_scaler.html#af4e30e78dba64f2784a1757bc2eb9f8b',1,'op::WKeypointScaler']]], + ['_7ewopoutputtocvmat_2299',['~WOpOutputToCvMat',['../classop_1_1_w_op_output_to_cv_mat.html#a5a4e433aa0c7cc62a5f97cc63a67c3fa',1,'op::WOpOutputToCvMat']]], + ['_7eworker_2300',['~Worker',['../classop_1_1_worker.html#a6ebe180098e00ac062a1bb31d462df60',1,'op::Worker']]], + ['_7eworkerconsumer_2301',['~WorkerConsumer',['../classop_1_1_worker_consumer.html#a9aaa75f194df6b3ed4994c8e95aa0ab5',1,'op::WorkerConsumer']]], + ['_7eworkerproducer_2302',['~WorkerProducer',['../classop_1_1_worker_producer.html#a8f2592f70d723de4b818c97b25c5a476',1,'op::WorkerProducer']]], + ['_7ewpeoplejsonsaver_2303',['~WPeopleJsonSaver',['../classop_1_1_w_people_json_saver.html#a386b5b64f2eee08cb344b242f5adb122',1,'op::WPeopleJsonSaver']]], + ['_7ewpersonidextractor_2304',['~WPersonIdExtractor',['../classop_1_1_w_person_id_extractor.html#a15f33c528ac92d30be226e784256be01',1,'op::WPersonIdExtractor']]], + ['_7ewposeextractor_2305',['~WPoseExtractor',['../classop_1_1_w_pose_extractor.html#aedf9cc53f7dfdb1ec2aa77651ca37eef',1,'op::WPoseExtractor']]], + ['_7ewposeextractornet_2306',['~WPoseExtractorNet',['../classop_1_1_w_pose_extractor_net.html#aa085377f965ffc8385d34d77a2e65e5a',1,'op::WPoseExtractorNet']]], + ['_7ewposerenderer_2307',['~WPoseRenderer',['../classop_1_1_w_pose_renderer.html#ae748fc721246c2a3ad8ffd32adf5e9e7',1,'op::WPoseRenderer']]], + ['_7ewposesaver_2308',['~WPoseSaver',['../classop_1_1_w_pose_saver.html#a62394c885abe4d95bece4469ac3657e9',1,'op::WPoseSaver']]], + ['_7ewposetriangulation_2309',['~WPoseTriangulation',['../classop_1_1_w_pose_triangulation.html#ae88fe6766fbcca1a682306af99684fa3',1,'op::WPoseTriangulation']]], + ['_7ewqueueassembler_2310',['~WQueueAssembler',['../classop_1_1_w_queue_assembler.html#abe8d97c0749cd8d968c8df2727b643e6',1,'op::WQueueAssembler']]], + ['_7ewqueueorderer_2311',['~WQueueOrderer',['../classop_1_1_w_queue_orderer.html#a720b2cd490e2267258bf5d5776f00095',1,'op::WQueueOrderer']]], + ['_7ewrappert_2312',['~WrapperT',['../classop_1_1_wrapper_t.html#a65e310384f3b898c4c3621e0e1ee6883',1,'op::WrapperT']]], + ['_7ewscaleandsizeextractor_2313',['~WScaleAndSizeExtractor',['../classop_1_1_w_scale_and_size_extractor.html#afe30e073c4410146e2c8ba8f2752737f',1,'op::WScaleAndSizeExtractor']]], + ['_7ewudpsender_2314',['~WUdpSender',['../classop_1_1_w_udp_sender.html#a684854618fbd74bce420ed44d867f8cd',1,'op::WUdpSender']]], + ['_7ewverboseprinter_2315',['~WVerbosePrinter',['../classop_1_1_w_verbose_printer.html#a32ea8ffef9a255ee33d6d56a550706f8',1,'op::WVerbosePrinter']]], + ['_7ewvideosaver_2316',['~WVideoSaver',['../classop_1_1_w_video_saver.html#ac0057c1bbfb3e193c891f167d56fcbab',1,'op::WVideoSaver']]], + ['_7ewvideosaver3d_2317',['~WVideoSaver3D',['../classop_1_1_w_video_saver3_d.html#a39482b591eafa150fee3db7027ae093f',1,'op::WVideoSaver3D']]] +]; diff --git a/web/html/doc/search/functions_2.html b/web/html/doc/search/functions_2.html new file mode 100644 index 000000000..ca5aa10e6 --- /dev/null +++ b/web/html/doc/search/functions_2.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_2.js b/web/html/doc/search/functions_2.js new file mode 100644 index 000000000..861b815aa --- /dev/null +++ b/web/html/doc/search/functions_2.js @@ -0,0 +1,44 @@ +var searchData= +[ + ['cameraparameterreader_1643',['CameraParameterReader',['../classop_1_1_camera_parameter_reader.html#aae3c60cbed99e1b5706c96577732ddae',1,'op::CameraParameterReader::CameraParameterReader(const std::string &serialNumber, const Matrix &cameraIntrinsics, const Matrix &cameraDistortion, const Matrix &cameraExtrinsics=Matrix(), const Matrix &cameraExtrinsicsInitial=Matrix())'],['../classop_1_1_camera_parameter_reader.html#ab7a4c3ef7ac8d8a41e5711ec85b7be4b',1,'op::CameraParameterReader::CameraParameterReader()']]], + ['canonicalaxisindex_1644',['CanonicalAxisIndex',['../classop_1_1_array_cpu_gpu.html#adeb393edfae4967cb510a8c7a2d07d80',1,'op::ArrayCpuGpu']]], + ['center_1645',['center',['../structop_1_1_rectangle.html#a0b0b8be8a0b300204a2afff4f219879b',1,'op::Rectangle']]], + ['channels_1646',['channels',['../classop_1_1_array_cpu_gpu.html#a2eb57d0cb4f902b275d126e4b6f706f2',1,'op::ArrayCpuGpu::channels()'],['../classop_1_1_matrix.html#a4555d0f39c54ad5f7adcb39fe06503cc',1,'op::Matrix::channels()']]], + ['checkandwork_1647',['checkAndWork',['../classop_1_1_worker.html#a6e4e84bd2052919bc48df1ec4b913ecf',1,'op::Worker']]], + ['checkbool_1648',['checkBool',['../namespaceop.html#a410201fcc46274e24726c5a601bc1721',1,'op']]], + ['checkequal_1649',['checkEqual',['../namespaceop.html#aaff52f436911aa17bebb999cd91a44fd',1,'op']]], + ['checkframeintegrity_1650',['checkFrameIntegrity',['../classop_1_1_producer.html#abbfbe53757f75e5e77266b04e9d0fea1',1,'op::Producer']]], + ['checkgreaterorequal_1651',['checkGreaterOrEqual',['../namespaceop.html#a92e8cd01741c90fbfdfaa33a13803f34',1,'op']]], + ['checkgreaterthan_1652',['checkGreaterThan',['../namespaceop.html#a3dd874d4341b99431819f9fa6b678ca9',1,'op']]], + ['checkifrunningandgetdatum_1653',['checkIfRunningAndGetDatum',['../classop_1_1_datum_producer.html#a39da4822705d23ca7e600b69f39e69be',1,'op::DatumProducer']]], + ['checklessorequal_1654',['checkLessOrEqual',['../namespaceop.html#a7ecfc02dca25534a071acf3136ff175e',1,'op']]], + ['checklessthan_1655',['checkLessThan',['../namespaceop.html#a1e71130dc8f280e4664c711128b18b42',1,'op']]], + ['checknonullnorempty_1656',['checkNoNullNorEmpty',['../namespaceop.html#a02164ca0af9e838190f584f5d1d8465e',1,'op']]], + ['checknotequal_1657',['checkNotEqual',['../namespaceop.html#aaada2594361f6f929af5b1f9d50387eb',1,'op']]], + ['checkthread_1658',['checkThread',['../classop_1_1_pose_extractor_net.html#a840c6fbdbf59d088d966ad26d45572a4',1,'op::PoseExtractorNet']]], + ['checkvector_1659',['checkVector',['../classop_1_1_matrix.html#a77cd25c8e23a4eec148039ab4832cde1',1,'op::Matrix']]], + ['checkworkererrors_1660',['checkWorkerErrors',['../namespaceop.html#a865a4cd0ba3b596667dc7242756837bd',1,'op']]], + ['clear_1661',['clear',['../classop_1_1_pose_extractor_net.html#a3fe7256d9860f4c624f5cf928556bc28',1,'op::PoseExtractorNet::clear()'],['../classop_1_1_queue_base.html#a247f435c95709f3246d352eee4f757af',1,'op::QueueBase::clear()']]], + ['clone_1662',['clone',['../classop_1_1_array.html#ab0b95bf5488cccad3bce7413251b04de',1,'op::Array::clone()'],['../structop_1_1_datum.html#ad137a102ef753734a9413762d72e6d46',1,'op::Datum::clone()'],['../classop_1_1_matrix.html#abc101fe6c039f6ef2311c5e9cef4c293',1,'op::Matrix::clone()']]], + ['cocojsonsaver_1663',['CocoJsonSaver',['../classop_1_1_coco_json_saver.html#a6d596768658b4b32430d3686be557e33',1,'op::CocoJsonSaver']]], + ['cols_1664',['cols',['../classop_1_1_matrix.html#ac2171dc14ef5480496c05c115b6dd579',1,'op::Matrix']]], + ['comma_1665',['comma',['../classop_1_1_json_ofstream.html#ae4468279f789c8026d431b2ef62646f9',1,'op::JsonOfstream']]], + ['compile_5ftemplate_5fdatum_1666',['COMPILE_TEMPLATE_DATUM',['../namespaceop.html#a774871462f7fefb8cadea1e49f501e45',1,'op::COMPILE_TEMPLATE_DATUM(WPeopleJsonSaver)'],['../namespaceop.html#a020603e3ad6326cb1dce43485157f768',1,'op::COMPILE_TEMPLATE_DATUM(WPoseExtractor)'],['../namespaceop.html#a635579f5f8d20b8e65f4f94da4d3d2f2',1,'op::COMPILE_TEMPLATE_DATUM(WHandRenderer)'],['../namespaceop.html#ae5cc3e92ffd9696f01ce7824ebbd0759',1,'op::COMPILE_TEMPLATE_DATUM(WHandExtractorNet)'],['../namespaceop.html#a5cc3f625b2644b1aade85a9458b5503a',1,'op::COMPILE_TEMPLATE_DATUM(WHandDetectorUpdate)'],['../namespaceop.html#a361310c59d16e88a4d2450a80f078f01',1,'op::COMPILE_TEMPLATE_DATUM(WHandDetectorTracking)'],['../namespaceop.html#a767385c8d3ebe736e1752825ab4d4ea0',1,'op::COMPILE_TEMPLATE_DATUM(WHandDetectorFromTxt)'],['../namespaceop.html#a0424a8e4dc8ceb5e8d8a2230c157a7fd',1,'op::COMPILE_TEMPLATE_DATUM(WHandDetector)'],['../namespaceop.html#ae88e9ced5d14fa221205b492ff76c56b',1,'op::COMPILE_TEMPLATE_DATUM(WGuiInfoAdder)'],['../namespaceop.html#a54b38240e45009f7e6a25d956ac96fe0',1,'op::COMPILE_TEMPLATE_DATUM(WGui3D)'],['../namespaceop.html#ade3b2e4b105242a3cf41def3def1691d',1,'op::COMPILE_TEMPLATE_DATUM(WGui)'],['../namespaceop.html#a0db530b6f607aa43e8f9154b308d207a',1,'op::COMPILE_TEMPLATE_DATUM(WVideoSaver3D)'],['../namespaceop.html#a49bd4106b0cd1cb81980329b06c0d2c8',1,'op::COMPILE_TEMPLATE_DATUM(WVideoSaver)'],['../namespaceop.html#af9e0d9e4028c0589b5eeeaed42a5088c',1,'op::COMPILE_TEMPLATE_DATUM(WUdpSender)'],['../namespaceop.html#a31ad937a2e52ea08ce925031d26616b9',1,'op::COMPILE_TEMPLATE_DATUM(WPoseSaver)'],['../namespaceop.html#aaca98fe6101cda512a43c513182ae5cc',1,'op::COMPILE_TEMPLATE_DATUM(WScaleAndSizeExtractor)'],['../namespaceop.html#ae5dac6cf1ccdf461838f9795be8fda03',1,'op::COMPILE_TEMPLATE_DATUM(Thread)'],['../namespaceop.html#a674a652ad38b355285417529fc050847',1,'op::COMPILE_TEMPLATE_DATUM(WPersonIdExtractor)'],['../namespaceop.html#add981a5f6a49d35cc316a54c613497f3',1,'op::COMPILE_TEMPLATE_DATUM(WQueueOrderer)'],['../namespaceop.html#a5660f0e72781ce6d7db9eb78b582e5c6',1,'op::COMPILE_TEMPLATE_DATUM(WorkerProducer)'],['../namespaceop.html#a01aa5c6e24026536367cf47a64e9bba5',1,'op::COMPILE_TEMPLATE_DATUM(WorkerConsumer)'],['../namespaceop.html#a5642545fda1c3bbaf60810cf0e2d2c1d',1,'op::COMPILE_TEMPLATE_DATUM(Worker)'],['../namespaceop.html#ad22c543a4376e943b728e657fab5ed9f',1,'op::COMPILE_TEMPLATE_DATUM(WIdGenerator)'],['../namespaceop.html#adfc12925650978828707c1c0dcbebd0e',1,'op::COMPILE_TEMPLATE_DATUM(WFpsMax)'],['../namespaceop.html#ac06eeab84c4861ef08834855b48750a6',1,'op::COMPILE_TEMPLATE_DATUM(ThreadManager)'],['../namespaceop.html#ab1e242b1ae7ff3300324fbfedebb52fc',1,'op::COMPILE_TEMPLATE_DATUM(WPoseExtractorNet)'],['../namespaceop.html#aee90a0429c2d14da0c3a85cd67a17821',1,'op::COMPILE_TEMPLATE_DATUM(SubThreadQueueOut)'],['../namespaceop.html#a63605cf0e6f4049beacf6094995272e8',1,'op::COMPILE_TEMPLATE_DATUM(SubThreadQueueInOut)'],['../namespaceop.html#a506578f3e723f992eabb627a371351ba',1,'op::COMPILE_TEMPLATE_DATUM(SubThreadQueueIn)'],['../namespaceop.html#a36492d15f864f7c813a573789ea554aa',1,'op::COMPILE_TEMPLATE_DATUM(SubThreadNoQueue)'],['../namespaceop.html#af98c8e514e79d4718fb1fc64dc0e431b',1,'op::COMPILE_TEMPLATE_DATUM(SubThread)'],['../namespaceop.html#aa7f93261bd6d87f86c45e933607a0678',1,'op::COMPILE_TEMPLATE_DATUM(Queue)'],['../namespaceop.html#aa65c081c13e0d0453938a3c41d04dc49',1,'op::COMPILE_TEMPLATE_DATUM(PriorityQueue)'],['../namespaceop.html#ae76afeeeaedaebe6941f41a4bdf50e2a',1,'op::COMPILE_TEMPLATE_DATUM(WPoseRenderer)'],['../namespaceop.html#a47758c703fccdbb65c26dc7bc4022237',1,'op::COMPILE_TEMPLATE_DATUM(WKeypointScaler)'],['../namespaceop.html#a505ea16cc6c2c0068bbf4e7269dc8e0a',1,'op::COMPILE_TEMPLATE_DATUM(WImageSaver)'],['../namespaceop.html#a53f346232d0743f3dd0f547de1fc508f',1,'op::COMPILE_TEMPLATE_DATUM(WPoseTriangulation)'],['../namespaceop.html#a9076fc1719030c2a74f21682999d2315',1,'op::COMPILE_TEMPLATE_DATUM(WCvMatToOpInput)'],['../namespaceop.html#a6d12bd1e42cfb63d2f780bed55fa01fb',1,'op::COMPILE_TEMPLATE_DATUM(WCvMatToOpOutput)'],['../namespaceop.html#aaee32c4c68404e5086844bcb911b7a20',1,'op::COMPILE_TEMPLATE_DATUM(WKeepTopNPeople)'],['../namespaceop.html#a1d9f50688522ed7368acc33a09ae9ece',1,'op::COMPILE_TEMPLATE_DATUM(WOpOutputToCvMat)'],['../namespaceop.html#a89984557f6968584d1938afe7b9f32bd',1,'op::COMPILE_TEMPLATE_DATUM(WVerbosePrinter)'],['../namespaceop.html#a196f17357cd1c1bb02e24e4e8a0e6ec3',1,'op::COMPILE_TEMPLATE_DATUM(WFaceDetector)'],['../namespaceop.html#abf3a59fc4662f07e6ba19b95bd4da32f',1,'op::COMPILE_TEMPLATE_DATUM(WFaceDetectorOpenCV)'],['../namespaceop.html#ab5b47f0069e9f397ff891194b20d28f2',1,'op::COMPILE_TEMPLATE_DATUM(WFaceExtractorNet)'],['../namespaceop.html#af42afa53c725d556c14928b2603883e3',1,'op::COMPILE_TEMPLATE_DATUM(WFaceRenderer)'],['../namespaceop.html#af46e80e6bac0f815006759df4c9d00c3',1,'op::COMPILE_TEMPLATE_DATUM(WCocoJsonSaver)'],['../namespaceop.html#a57c4f3ada0db4882a4106d4dedf08012',1,'op::COMPILE_TEMPLATE_DATUM(WFaceSaver)'],['../namespaceop.html#a602d5d238fe0c7096698cf36b7dee9ab',1,'op::COMPILE_TEMPLATE_DATUM(WHandSaver)'],['../namespaceop.html#a7ac10b9f503668695643c366e25f3b68',1,'op::COMPILE_TEMPLATE_DATUM(WHeatMapSaver)']]], + ['configure_1667',['configure',['../classop_1_1_wrapper_t.html#a3ce073fb177c316aaeab406c1f4808db',1,'op::WrapperT::configure(const WrapperStructFace &wrapperStructFace)'],['../classop_1_1_wrapper_t.html#aaa18264f99da260efb8fa12dd293ee75',1,'op::WrapperT::configure(const WrapperStructHand &wrapperStructHand)'],['../classop_1_1_wrapper_t.html#ad9d83f0332c27aa64cde22c66755deec',1,'op::WrapperT::configure(const WrapperStructExtra &wrapperStructExtra)'],['../classop_1_1_wrapper_t.html#af3d5d56e63b8c6faee0d7954db95c69d',1,'op::WrapperT::configure(const WrapperStructInput &wrapperStructInput)'],['../classop_1_1_wrapper_t.html#a98a7310bc4062fb72f5d26e37d6d7c70',1,'op::WrapperT::configure(const WrapperStructOutput &wrapperStructOutput)'],['../classop_1_1_wrapper_t.html#a7a37b4a945171fd42d1ab16b0b7e8205',1,'op::WrapperT::configure(const WrapperStructGui &wrapperStructGui)'],['../classop_1_1_wrapper_t.html#a7508886116ccfbbb8567a1921591751e',1,'op::WrapperT::configure(const WrapperStructPose &wrapperStructPose)']]], + ['configurethreadmanager_1668',['configureThreadManager',['../namespaceop.html#a4adaee31db7ae1d3f963daa9e022e62f',1,'op']]], + ['connectbodypartscpu_1669',['connectBodyPartsCpu',['../namespaceop.html#a2ae13dae91c41b29063b48158ccbcc4e',1,'op']]], + ['connectbodypartsgpu_1670',['connectBodyPartsGpu',['../namespaceop.html#a927468f6931ddb1e7d1e6e6e59b8bd36',1,'op']]], + ['connectbodypartsocl_1671',['connectBodyPartsOcl',['../namespaceop.html#a77a4d87bbee791dfba0667aa10bcca99',1,'op']]], + ['copyto_1672',['copyTo',['../classop_1_1_matrix.html#a6714cef92d6dce3089841ea124cd2b7d',1,'op::Matrix']]], + ['count_1673',['count',['../classop_1_1_array_cpu_gpu.html#a50f82490bab162626760d420f5f6779c',1,'op::ArrayCpuGpu::count(const int start_axis) const'],['../classop_1_1_array_cpu_gpu.html#acd9ea6e75dd2eb516d6a91bac91e43e4',1,'op::ArrayCpuGpu::count(const int start_axis, const int end_axis) const'],['../classop_1_1_array_cpu_gpu.html#aa3e701c15f11e563e0b442c28143188d',1,'op::ArrayCpuGpu::count() const']]], + ['cpu_5fdata_1674',['cpu_data',['../classop_1_1_array_cpu_gpu.html#a7e982b668191924e6665645790fa18a2',1,'op::ArrayCpuGpu']]], + ['cpu_5fdiff_1675',['cpu_diff',['../classop_1_1_array_cpu_gpu.html#ac5d005ccb8a3b8aba935e5276fcd20e4',1,'op::ArrayCpuGpu']]], + ['cputogpumemoryifnotcopiedyet_1676',['cpuToGpuMemoryIfNotCopiedYet',['../classop_1_1_gpu_renderer.html#ac7c1ab0eebf1d54b55cc65a5560bad7b',1,'op::GpuRenderer']]], + ['createarray_1677',['createArray',['../classop_1_1_cv_mat_to_op_input.html#ad7c70d7843d64ab0dce9a8a1d993e5b5',1,'op::CvMatToOpInput::createArray()'],['../classop_1_1_cv_mat_to_op_output.html#ad15a20bf40389e7dea888e982bd64e8b',1,'op::CvMatToOpOutput::createArray()']]], + ['createmultiviewtdatum_1678',['createMultiviewTDatum',['../namespaceop.html#a3da2a2a2f5ac58cfba53ea0d43ac6751',1,'op']]], + ['createpeoplevector_1679',['createPeopleVector',['../namespaceop.html#ae5d883da8c8f11356d5e1b61bc3a99b6',1,'op']]], + ['createproducer_1680',['createProducer',['../namespaceop.html#a6a34909c6c4d79a215f163291111d556',1,'op']]], + ['cudacheck_1681',['cudaCheck',['../namespaceop.html#a2af8422ada0de882cc222920ca15c6d2',1,'op']]], + ['cvmattoopinput_1682',['CvMatToOpInput',['../classop_1_1_cv_mat_to_op_input.html#a449eacb6cce9678c3ae399c68a45a2e5',1,'op::CvMatToOpInput']]], + ['cvmattoopoutput_1683',['CvMatToOpOutput',['../classop_1_1_cv_mat_to_op_output.html#a054c4dd7892ad540405413b071459b42',1,'op::CvMatToOpOutput']]] +]; diff --git a/web/html/doc/search/functions_3.html b/web/html/doc/search/functions_3.html new file mode 100644 index 000000000..d79f55b8e --- /dev/null +++ b/web/html/doc/search/functions_3.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_3.js b/web/html/doc/search/functions_3.js new file mode 100644 index 000000000..40a1024eb --- /dev/null +++ b/web/html/doc/search/functions_3.js @@ -0,0 +1,29 @@ +var searchData= +[ + ['data_1684',['data',['../classop_1_1_matrix.html#a69d3316b25c1fce55f067e92b31e4d57',1,'op::Matrix']]], + ['data_5fat_1685',['data_at',['../classop_1_1_array_cpu_gpu.html#a4836fabbedf7e1ef97bfbd4d33db3d96',1,'op::ArrayCpuGpu']]], + ['dataconst_1686',['dataConst',['../classop_1_1_matrix.html#a9af637b50e808c1d84e179cc6acb45b4',1,'op::Matrix']]], + ['dataformattostring_1687',['dataFormatToString',['../namespaceop.html#a9d121f33179e41075f4602eb6527e658',1,'op']]], + ['datapseudoconst_1688',['dataPseudoConst',['../classop_1_1_matrix.html#ab65ba706b58675da9a4512d448d44370',1,'op::Matrix']]], + ['datum_1689',['Datum',['../structop_1_1_datum.html#a72c75834671aebe44705738fb5efc3c5',1,'op::Datum::Datum()'],['../structop_1_1_datum.html#a42f9aef848c6335c5a81cad374319f0b',1,'op::Datum::Datum(const Datum &datum)'],['../structop_1_1_datum.html#a2d4940d8cb12d95b8588cd0280f6524c',1,'op::Datum::Datum(Datum &&datum)']]], + ['datumproducer_1690',['DatumProducer',['../classop_1_1_datum_producer.html#a4d52ee6961e2c5c9564f49d203a2865e',1,'op::DatumProducer']]], + ['datumproducerconstructor_1691',['datumProducerConstructor',['../namespaceop.html#ad72abbc7b2600f543e4ee8e28392711e',1,'op']]], + ['datumproducerconstructorrunningandgetdatumapplyplayercontrols_1692',['datumProducerConstructorRunningAndGetDatumApplyPlayerControls',['../namespaceop.html#a177ffd3101c7a1f5cf32e100474a1234',1,'op']]], + ['datumproducerconstructorrunningandgetdatumframeintegrity_1693',['datumProducerConstructorRunningAndGetDatumFrameIntegrity',['../namespaceop.html#a427c6244ee27171037bc201f401de16a',1,'op']]], + ['datumproducerconstructorrunningandgetdatumisdatumproducerrunning_1694',['datumProducerConstructorRunningAndGetDatumIsDatumProducerRunning',['../namespaceop.html#a71c68de51a3608e782854c298b91cd62',1,'op']]], + ['datumproducerconstructorrunningandgetnextframenumber_1695',['datumProducerConstructorRunningAndGetNextFrameNumber',['../namespaceop.html#a71cdc487bbec12ddbe4bac9123745494',1,'op']]], + ['datumproducerconstructortoomanyconsecutiveemptyframes_1696',['datumProducerConstructorTooManyConsecutiveEmptyFrames',['../namespaceop.html#a5001474237d31d72c9145a84ec5143da',1,'op']]], + ['define_5fbool_1697',['DEFINE_bool',['../flags_8hpp.html#a2c805a3cd1797a9f67783ed5c9b7c5b1',1,'DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to" " background."): flags.hpp'],['../flags_8hpp.html#a4670eb3cdedb3f3bac3886e2c21a7750',1,'DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and" " analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps." " If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential" " memory order: body parts + bkg + PAFs. It will follow the order on" " POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will" " considerably decrease. Not required for OpenPose, enable it only if you intend to" " explicitly use this information later."): flags.hpp'],['../flags_8hpp.html#a4d830b4bcbd7998d8c6de665c0531ce9',1,'DEFINE_bool(maximize_positives, false, "It reduces the thresholds to accept a person candidate. It highly increases both false and" " true positives. I.e., it maximizes average recall but could harm average precision."): flags.hpp'],['../flags_8hpp.html#abada704ec59515bb12563262a3f21aeb',1,'DEFINE_bool(frame_undistort, false, "If false (default), it will not undistort the image, if true, it will undistortionate them" " based on the camera parameters found in `camera_parameter_path`"): flags.hpp'],['../flags_8hpp.html#adf7a6ca551fbd934deb6784bfe37f897',1,'DEFINE_bool(process_real_time, false, "Enable to keep the original source frame rate (e.g., for video). If the processing time is" " too long, it will skip frames. If it is too fast, it will slow it down."): flags.hpp'],['../flags_8hpp.html#ac0db13ec99b09bf9bd38582da33cff1d',1,'DEFINE_bool(frames_repeat, false, "Repeat frames when finished."): flags.hpp'],['../flags_8hpp.html#ae9ea69ae8e64ee99bb4516199f847980',1,'DEFINE_bool(frame_flip, false, "Flip/mirror each frame (e.g., for real time webcam demonstrations)."): flags.hpp'],['../flags_8hpp.html#a245491f4f0bb36e5e8655877402f50eb',1,'DEFINE_bool(flir_camera, false, "Whether to use FLIR (Point-Grey) stereo camera."): flags.hpp'],['../flags_8hpp.html#af4ef631dab577c13f59e10b626c580f6',1,'DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful" " for 1) Cases where it is needed a low latency (e.g., webcam in real-time scenarios with" " low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the" " error."): flags.hpp'],['../flags_8hpp.html#ad9114bc8e1fc8f306e5296eaae5d542f',1,'DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs."): flags.hpp'],['../flags_8hpp.html#a9973307b6bd2af114083ba1badf4c297',1,'DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the" " op::Datum::poseCandidates array with the body part candidates. Candidates refer to all" " the detected body parts, before being assembled into people. Note that the number of" " candidates is equal or higher than the number of final body parts (i.e., after being" " assembled into people). The empty body parts are filled with 0s. Program speed will" " slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly" " use this information."): flags.hpp'],['../flags_8hpp.html#a9b1025da93c8ab21c0fdfe4941f26ad4',1,'DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g." " `model_folder`. Note that this will considerable slow down the performance and increase" " the required GPU memory. In addition, the greater number of people on the image, the" " slower OpenPose will be."): flags.hpp'],['../flags_8hpp.html#a064e35f07a2835d7b4b5d31f0f625865',1,'DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g." " `model_folder`. Analogously to `--face`, it will also slow down the performance, increase" " the required GPU memory and its speed depends on the number of people."): flags.hpp'],['../flags_8hpp.html#af12ef9f66fbf74e05c08b69caf54821e',1,'DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system." " 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction" " results. Note that it will only display 1 person. If multiple people is present, it will" " fail."): flags.hpp'],['../flags_8hpp.html#aefe90773deaaa525b3b115d37b46e281',1,'DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames."): flags.hpp'],['../flags_8hpp.html#ad677c3dfae13a1ec9a3ee2fabe9e37a0',1,'DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black" " background, instead of being rendered into the original image. Related: `part_to_show`," " `alpha_pose`, and `alpha_pose`."): flags.hpp'],['../flags_8hpp.html#abd8fa8293ee3a05d4c5a2b6082460ab0',1,'DEFINE_bool(fullscreen, false, "Run in full-screen mode (press f during runtime to toggle)."): flags.hpp'],['../flags_8hpp.html#ab8ac5e6be119dc06f351810053ea8bcf',1,'DEFINE_bool(no_gui_verbose, false, "Do not write text on output images on GUI (e.g., number of current frame and people). It" " does not affect the pose rendering."): flags.hpp'],['../flags_8hpp.html#af50c82bf332c1a699f0615e7eb734c51',1,'DEFINE_bool(write_video_with_audio, false, "If the input is video and the output is so too, it will save the video with audio. It" " requires the output video file path finishing in `.mp4` format (see `write_video` for" " details)."): flags.hpp']]], + ['define_5fdouble_1698',['DEFINE_double',['../flags_8hpp.html#abecb5274ddd75ca51fb40064478b1ed3',1,'DEFINE_double(scale_gap, 0.25, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1." " If you want to change the initial scale, you actually want to multiply the" " `net_resolution` by your desired initial scale."): flags.hpp'],['../flags_8hpp.html#a0fe12ed9bee07b6120d595b3a1b85b15',1,'DEFINE_double(hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints."): flags.hpp'],['../flags_8hpp.html#ac2cc95296b63a048521a2c41dce82b45',1,'DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand."): flags.hpp'],['../flags_8hpp.html#af4490397ad693c3d51835699a5dcddf3',1,'DEFINE_double(write_video_fps, -1., "Frame rate for the recorded video. By default, it will try to get the input frames producer" " frame rate (e.g., input video or webcam frame rate). If the input frames producer does not" " have a set FPS (e.g., image_dir or webcam if OpenCV not compiled with its support), set" " this value accordingly (e.g., to the frame rate displayed by the OpenPose GUI)."): flags.hpp'],['../flags_8hpp.html#a87455dc2555757a087e99d8b52138835',1,'DEFINE_double(cli_verbose, -1.f, "If -1, it will be disabled (default). If it is a positive integer number, it will print on" " the command line every `verbose` frames. If number in the range (0,1), it will print the" " progress every `verbose` times the total of frames."): flags.hpp'],['../flags_8hpp.html#aea6dc9d3cb9ea69426d012d1f41fadf0',1,'DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand."): flags.hpp'],['../flags_8hpp.html#af437a0d8f293cd02b992a94b268571a4',1,'DEFINE_double(upsampling_ratio, 0., "Upsampling ratio between the `net_resolution` and the output net results. A value less" " or equal than 0 (default) will use the network default value (recommended)."): flags.hpp'],['../flags_8hpp.html#a349e235659cc7b31dcf5db0d3d468fce',1,'DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range" " between smallest and biggest scale. The scales will be centered in ratio 1. E.g., if" " scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2."): flags.hpp'],['../flags_8hpp.html#a166e98128271506645ce14000faace73',1,'DEFINE_double(render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be" " rendered. Note: Rendered refers only to visual display in the OpenPose basic GUI, not in" " the saved results. Generally, a high threshold (> 0.5) will only render very clear body" " parts; while small thresholds (~0.1) will also output guessed and occluded keypoints," " but also more false positives (i.e., wrong detections)."): flags.hpp'],['../flags_8hpp.html#a33562cf43d115a4d26f9958aa04c15ff',1,'DEFINE_double(alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will" " hide it. Only valid for GPU rendering."): flags.hpp'],['../flags_8hpp.html#af9d388afd71b21640a573e6e8cad4c1a',1,'DEFINE_double(alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the" " heatmap, 0 will only show the frame. Only valid for GPU rendering."): flags.hpp'],['../flags_8hpp.html#a8dfdbe0ff3a68f46e440f379621f8f0a',1,'DEFINE_double(face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints."): flags.hpp'],['../flags_8hpp.html#a83309bdfd7daadfb89be65edf399ac9a',1,'DEFINE_double(face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face."): flags.hpp'],['../flags_8hpp.html#a9b80aae5395b7d99c980198374bde9f2',1,'DEFINE_double(face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face."): flags.hpp'],['../flags_8hpp.html#ace91dac10649fcbe836a71459b2f2584',1,'DEFINE_double(fps_max, -1., "Maximum processing frame rate. By default (-1), OpenPose will process frames as fast as" " possible. Example usage: If OpenPose is displaying images too quickly, this can reduce" " the speed so the user can analyze better each frame from the GUI."): flags.hpp'],['../flags_8hpp.html#ad90e61b31f6bd48c3514195da36ff31c',1,'DEFINE_double(net_resolution_dynamic, 1., "This flag only applies to images or custom inputs (not to video or webcam). If it is zero" " or a negative value, it means that using `-1` in `net_resolution` will behave as explained" " in its description. Otherwise, and to avoid out of memory errors, the `-1` in" " `net_resolution` will clip to this value times the default 16/9 aspect ratio value (which" " is 656 width for a 368 height). E.g., `net_resolution_dynamic 10 net_resolution -1x368`" " will clip to 6560x368 (10 x 656). Recommended 1 for small GPUs (to avoid out of memory" " errors but maximize speed) and 0 for big GPUs (for maximum accuracy and speed)."): flags.hpp']]], + ['define_5fint32_1699',['DEFINE_int32',['../flags_8hpp.html#a547efed657b6e562d8d5f071124fcf17',1,'DEFINE_int32(keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e., the scale of the (x,y)" " coordinates that will be saved with the `write_json` & `write_keypoint` flags." " Select `0` to scale it to the original source resolution; `1`to scale it to the net output" " size (set with `net_resolution`); `2` to scale it to the final output size (set with" " `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left" " corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where" " (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non" " related with `scale_number` and `scale_gap`."): flags.hpp'],['../flags_8hpp.html#a61f245285b5a4b77b1d923276fe6f995',1,'DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering" " (slower but greater functionality, e.g., `alpha_X` flags). If -1, it will pick CPU if" " CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render" " both `outputData` and `cvOutputData` with the original image and desired body part to be" " shown (i.e., keypoints, heat maps or PAFs)."): flags.hpp'],['../flags_8hpp.html#ac5e8f82d85a3eb0ee72a64569395497c',1,'DEFINE_int32(number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with" " top scores. The score is based in person area over the image, body part score, as well as" " joint score (between each pair of connected body parts). Useful if you know the exact" " number of people in the scene, so it can remove false positives (if all the people have" " been detected. However, it might also include false negatives by removing very small or" " highly occluded people. -1 will keep them all."): flags.hpp'],['../flags_8hpp.html#aa2bc11c618a37698d88f7ae100e1729f',1,'DEFINE_int32(body, 1, "Select 0 to disable body keypoint detection (e.g., for faster but less accurate face" " keypoint detection, custom hand detector, etc.), 1 (default) for body keypoint" " estimation, and 2 to disable its internal body pose estimation network but still" " still run the greedy association parsing algorithm"): flags.hpp'],['../flags_8hpp.html#a6561fc0841b80f5c19a1c4bc549175e9',1,'DEFINE_int32(scale_number, 1, "Number of scales to average."): flags.hpp'],['../flags_8hpp.html#aa3c62563ce9d99c25d4a2977f253c6c7',1,'DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer" " rounded [0,255]; and 3 for no scaling."): flags.hpp'],['../flags_8hpp.html#a71a0fc42dd98d1739571e4f7fed4873c',1,'DEFINE_int32(face_detector, 0, "Kind of face rectangle detector. Select 0 (default) to select OpenPose body detector (most" " accurate one and fastest one if body is enabled), 1 to select OpenCV face detector (not" " implemented for hands), 2 to indicate that it will be provided by the user, or 3 to" " also apply hand tracking (only for hand). Hand tracking might improve hand keypoint" " detection for webcam (if the frame rate is high enough, i.e., >7 FPS per GPU) and video." " This is not person ID tracking, it simply looks for hands in positions at which hands were" " located in previous frames, but it does not guarantee the same person ID among frames."): flags.hpp'],['../flags_8hpp.html#ae7ff5e3adea9c5f572455ec30dd3fbff',1,'DEFINE_int32(hand_detector, 0, "Kind of hand rectangle detector. Analogous to `--face_detector`."): flags.hpp'],['../flags_8hpp.html#ad696d262dc852c2f872470b90c25fafe',1,'DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results" " were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4."): flags.hpp'],['../flags_8hpp.html#ac9b1dcda85ac079222769931cad6bebc',1,'DEFINE_int32(3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will" " require max(2, min(4, #cameras-1)) cameras to see the keypoint in order to reconstruct" " it."): flags.hpp'],['../flags_8hpp.html#a13dcbbdf12e9e72eb29ccf25d7a7cd42',1,'DEFINE_int32(3d_views, -1, "Complementary option for `--image_dir` or `--video`. OpenPose will read as many images per" " iteration, allowing tasks such as stereo camera processing (`--3d`). Note that" " `--camera_parameter_path` must be set. OpenPose must find as many `xml` files in the" " parameter folder as this number indicates."): flags.hpp'],['../flags_8hpp.html#a6d2331153c7051c742d11dcb0a4220ec',1,'DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The" " value indicates the number of frames where tracking is run between each OpenPose keypoint" " detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint" " detector and tracking for potentially higher accuracy than only OpenPose."): flags.hpp'],['../flags_8hpp.html#a2c213e3a0c01a36f52667d1707b49062',1,'DEFINE_int32(ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D" " keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing" " the number of threads will increase the speed but also the global system latency."): flags.hpp'],['../flags_8hpp.html#add0ca9baf682a84f3236e7f5c001db06',1,'DEFINE_int32(part_to_show, 0, "Prediction channel to visualize: 0 (default) for all the body parts, 1 for the background" " heat map, 2 for the superposition of heatmaps, 3 for the superposition of PAFs," " 4-(4+#keypoints) for each body part heat map, the following ones for each body part pair" " PAF."): flags.hpp'],['../flags_8hpp.html#a796a3b14805d5e0b01b21b9bab844382',1,'DEFINE_int32(face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same" " configuration that `render_pose` is using."): flags.hpp'],['../flags_8hpp.html#a46b9edf947872e29ea5cbd7a95bee719',1,'DEFINE_int32(hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same" " configuration that `render_pose` is using."): flags.hpp'],['../flags_8hpp.html#ab598c69ed7164089afcdd4149c24a5eb',1,'DEFINE_int32(display, -1, "Display mode: -1 for automatic selection; 0 for no display (useful if there is no X server" " and/or to slightly speed up the processing if visual output is not required); 2 for 2-D" " display; 3 for 3-D display (if `--3d` enabled); and 1 for both 2-D and 3-D display."): flags.hpp'],['../flags_8hpp.html#a80cdeb8d094d26ae5840a74ccff8613c',1,'DEFINE_int32(write_coco_json_variants, 1, "Add 1 for body, add 2 for foot, 4 for face, and/or 8 for hands. Use 0 to use all the" " possible candidates. E.g., 7 would mean body+foot+face COCO JSON."): flags.hpp'],['../flags_8hpp.html#a17550ab833803b7862beaab957642af6',1,'DEFINE_int32(write_coco_json_variant, 0, "Currently, this option is experimental and only makes effect on car JSON generation. It" " selects the COCO variant for cocoJsonSaver."): flags.hpp'],['../flags_8hpp.html#a8bd040787ac075ae4cf483be01fe2c5f',1,'DEFINE_int32(num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your" " machine."): flags.hpp'],['../flags_8hpp.html#a844330d264f5648ae7d99b76f72f391a',1,'DEFINE_int32(num_gpu_start, 0, "GPU device start number."): flags.hpp'],['../flags_8hpp.html#aae4cfb31c1a5934475d8cbd6b2f8268e',1,'DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any opLog() message," " while 255 will not output any. Current OpenPose library messages are in the range 0-4:" " 1 for low priority messages and 4 for important ones."): flags.hpp'],['../flags_8hpp.html#a2d8a3ae1f10dd657619e2a5d2dcb4b61',1,'DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some" " runtime statistics at this frame number."): flags.hpp'],['../flags_8hpp.html#a807c2eb873d9e727f14d6c7ee6e02e11',1,'DEFINE_int32(camera, -1, "The camera index for cv::VideoCapture. Integer in the range [0, 9]. Select a negative" " number (by default), to auto-detect and open the first available camera."): flags.hpp'],['../flags_8hpp.html#a513eca9e40da3a149e02c0c1fb6d10d3',1,'DEFINE_int32(flir_camera_index, -1, "Select -1 (default) to run on all detected flir cameras at once. Otherwise, select the flir" " camera index to run, where 0 corresponds to the detected flir camera with the lowest" " serial number, and `n` to the `n`-th lowest serial number camera."): flags.hpp'],['../flags_8hpp.html#adbd3e76b28ecc7415ccb782c3419a9de',1,'DEFINE_int32(frame_rotate, 0, "Rotate each frame, 4 possible values: 0, 90, 180, 270."): flags.hpp']]], + ['define_5fstring_1700',['DEFINE_string',['../flags_8hpp.html#a456ac1650914494fbdacd53b55420e2b',1,'DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located."): flags.hpp'],['../flags_8hpp.html#a4fff2a82464bb9e180e04f70f0d5cbad',1,'DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the" " input image resolution."): flags.hpp'],['../flags_8hpp.html#a8e45f79c948490e55be06e3541b3681f',1,'DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g., `BODY_25` (fastest for CUDA version, most accurate, and includes" " foot keypoints), `COCO` (18 keypoints), `MPI` (15 keypoints, least accurate model but" " fastest on CPU), `MPI_4_layers` (15 keypoints, even faster but less accurate)."): flags.hpp'],['../flags_8hpp.html#aa0cc9af40fd8fdee2d8c61da206913dc',1,'DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is" " decreased, the speed increases. For maximum speed-accuracy balance, it should keep the" " closest aspect ratio possible to the images or videos to be processed. Using `-1` in" " any of the dimensions, OP will choose the optimal aspect ratio depending on the user's" " input value. E.g., the default `-1x368` is equivalent to `656x368` in 16:9 resolutions," " e.g., full HD (1980x1080) and HD (1280x720) resolutions."): flags.hpp'],['../flags_8hpp.html#a1da3324efb2a917d0714100dcdb13aba',1,'DEFINE_string(face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint" " detector. 320x320 usually works fine while giving a substantial speed up when multiple" " faces on the image."): flags.hpp'],['../flags_8hpp.html#a81e3bebeb0cec269b90097fb5856c96f',1,'DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint" " detector."): flags.hpp'],['../flags_8hpp.html#a4a4d36b7f90a4d53a0fa29f86bbbb9aa',1,'DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format."): flags.hpp'],['../flags_8hpp.html#a91ba99b997951b09eab545a40c019f85',1,'DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g., png, jpg or bmp. Check the OpenCV" " function cv::imwrite for all compatible extensions."): flags.hpp'],['../flags_8hpp.html#abd5499ff7014225c02e6149bde93e3a3',1,'DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the" " final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag" " `write_video_fps` controls FPS. Alternatively, the video extension can be `.mp4`," " resulting in a file with a much smaller size and allowing `--write_video_with_audio`." " However, that would require: 1) Ubuntu or Mac system, 2) FFmpeg library installed" " (`sudo apt-get install ffmpeg`), 3) the creation temporarily of a folder with the same" " file path than the final video (without the extension) to storage the intermediate frames" " that will later be used to generate the final MP4 video."): flags.hpp'],['../flags_8hpp.html#a85ed9ce7f145fad05a50344a6fdbee37',1,'DEFINE_string(write_video_3d, "", "Analogous to `--write_video`, but applied to the 3D output."): flags.hpp'],['../flags_8hpp.html#a08c988c91c179c16944f9f703c24324b',1,'DEFINE_string(write_video_adam, "", "Experimental, not available yet. Analogous to `--write_video`, but applied to Adam model."): flags.hpp'],['../flags_8hpp.html#a66ec3a67de281684d9ff60c7b80c9430',1,'DEFINE_string(write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose" " keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled)."): flags.hpp'],['../flags_8hpp.html#a4ebc35e01d48db77575a1cdd53ac0815',1,'DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format. If foot, face," " hands, etc. JSON is also desired (`--write_coco_json_variants`), they are saved with" " different file name suffix."): flags.hpp'],['../flags_8hpp.html#af1f0085881667603ed4e0404d7140bdc',1,'DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag" " must be enabled."): flags.hpp'],['../flags_8hpp.html#aac91c51c83200f18076e7354067ccbb0',1,'DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`." " For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for" " floating values. See `doc/02_output.md` for more details."): flags.hpp'],['../flags_8hpp.html#a8763644943c3413220cfe6bf8f385d44',1,'DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format" " with `write_keypoint_format`."): flags.hpp'],['../flags_8hpp.html#a488d04acd61a19fe00cd2e56844dd8c5',1,'DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml," " yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead."): flags.hpp'],['../flags_8hpp.html#a1f0ce14d63633af19e375d6fbcccc463',1,'DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g., `~/Desktop/mocapResult.bvh`."): flags.hpp'],['../flags_8hpp.html#abd20da73260490fba6e09a17c235fc4a',1,'DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`."): flags.hpp'],['../flags_8hpp.html#a7ffa026d9b667e5551909aba895f0dfb',1,'DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication."): flags.hpp'],['../flags_8hpp.html#a81edc2bb181cd79c98bfae1520f8ab71',1,'DEFINE_string(prototxt_path, "", "The combination `--model_folder` + `--prototxt_path` represents the whole path to the" " prototxt file. If empty, it will use the default OpenPose ProtoTxt file."): flags.hpp'],['../flags_8hpp.html#abcc67acb9ca2d225394445eb6017bc4d',1,'DEFINE_string(camera_parameter_path, "models/cameraParameters/flir/", "String with the folder where the camera parameters are located. If there" " is only 1 XML file (for single video, webcam, or images from the same camera), you must" " specify the whole XML file path (ending in .xml)."): flags.hpp'],['../flags_8hpp.html#a02962b73af4084b90494b777ff1826c1',1,'DEFINE_string(ip_camera, "", "String with the IP camera URL. It supports protocols like RTSP and HTTP."): flags.hpp'],['../flags_8hpp.html#add5d5807feef88090f8c9d11bf904ba8',1,'DEFINE_string(image_dir, "", "Process a directory of images. Use `examples/media/` for our default example folder with 20" " images. Read all standard formats (jpg, png, bmp, etc.)."): flags.hpp'],['../flags_8hpp.html#a5690d1f0bce6904d9ccea011b0a0262f',1,'DEFINE_string(video, "", "Use a video file instead of the camera. Use `examples/media/video.avi` for our default" " example video."): flags.hpp'],['../flags_8hpp.html#ab1d4b66fac361d1f3f450cd6bc5311d4',1,'DEFINE_string(camera_resolution, "-1x-1", "Set the camera resolution (either `--camera` or `--flir_camera`). `-1x-1` will use the" " default 1280x720 for `--camera`, or the maximum flir camera resolution available for" " `--flir_camera`"): flags.hpp'],['../flags_8hpp.html#acd0c383a2043852c83e284b669a5cf7e',1,'DEFINE_string(caffemodel_path, "", "The combination `--model_folder` + `--caffemodel_path` represents the whole path to the" " caffemodel file. If empty, it will use the default OpenPose CaffeModel file."): flags.hpp']]], + ['define_5fuint64_1701',['DEFINE_uint64',['../flags_8hpp.html#a9fbfea8bf51a80ff2254f329366a19b8',1,'DEFINE_uint64(frame_last, -1, "Finish on desired frame number. Select -1 to disable. Indexes are 0-based, e.g., if set to" " 10, it will process 11 frames (0-10)."): flags.hpp'],['../flags_8hpp.html#ab7c61c5b25d4b3db1284761933c66aed',1,'DEFINE_uint64(frame_step, 1, "Step or gap between processed frames. E.g., `--frame_step 5` would read and process frames" " 0, 5, 10, etc.."): flags.hpp'],['../flags_8hpp.html#a99f9d7e0dcbf9f6ceddf589dc482d17a',1,'DEFINE_uint64(frame_first, 0, "Start on desired frame number. Indexes are 0-based, i.e., the first frame has index 0."): flags.hpp']]], + ['delete_5fcopy_1702',['DELETE_COPY',['../classop_1_1_sub_thread_no_queue.html#a43504502c36461305d656fb87b914749',1,'op::SubThreadNoQueue::DELETE_COPY()'],['../classop_1_1_hand_cpu_renderer.html#a66a7d318b240c73687320bf092363409',1,'op::HandCpuRenderer::DELETE_COPY()'],['../classop_1_1_face_cpu_renderer.html#a233f2a83930d07e4d420b43c8a660f32',1,'op::FaceCpuRenderer::DELETE_COPY()']]], + ['depth_1703',['depth',['../classop_1_1_matrix.html#ae33558724a713e9a36f8dc0062d267a8',1,'op::Matrix']]], + ['detectfaces_1704',['detectFaces',['../classop_1_1_face_detector_open_c_v.html#aba2826bad3f87ce3967e1f999f941fc5',1,'op::FaceDetectorOpenCV::detectFaces()'],['../classop_1_1_face_detector.html#a6db84197d64104da0c26f49ecf8facd1',1,'op::FaceDetector::detectFaces()']]], + ['detecthands_1705',['detectHands',['../classop_1_1_hand_detector_from_txt.html#a1e6ba23fa1486e92a3bdca36b2e86d22',1,'op::HandDetectorFromTxt::detectHands()'],['../classop_1_1_hand_detector.html#a731a19ff54389b1f56b0aae76af6debe',1,'op::HandDetector::detectHands()']]], + ['diff_5fat_1706',['diff_at',['../classop_1_1_array_cpu_gpu.html#a3f10532b10ec840aa9e1dac3ccc7ee25',1,'op::ArrayCpuGpu']]], + ['dims_1707',['dims',['../classop_1_1_matrix.html#aabfd2f25b2459aac510e1e31b207fcf3',1,'op::Matrix']]], + ['disablemultithreading_1708',['disableMultiThreading',['../classop_1_1_wrapper_t.html#a6ba81304df06fbec71103973ce0041c5',1,'op::WrapperT']]], + ['displayframe_1709',['displayFrame',['../classop_1_1_frame_displayer.html#aa99517efbef90cd8a6e171a713c37501',1,'op::FrameDisplayer::displayFrame(const Matrix &frame, const int waitKeyValue=-1)'],['../classop_1_1_frame_displayer.html#a23263864af418160f489072716ba9951',1,'op::FrameDisplayer::displayFrame(const std::vector< Matrix > &frames, const int waitKeyValue=-1)']]] +]; diff --git a/web/html/doc/search/functions_4.html b/web/html/doc/search/functions_4.html new file mode 100644 index 000000000..1657cad0d --- /dev/null +++ b/web/html/doc/search/functions_4.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_4.js b/web/html/doc/search/functions_4.js new file mode 100644 index 000000000..9b8746432 --- /dev/null +++ b/web/html/doc/search/functions_4.js @@ -0,0 +1,21 @@ +var searchData= +[ + ['elemsize_1710',['elemSize',['../classop_1_1_matrix.html#a401c028c88a65b69c0c029cfc990f631',1,'op::Matrix']]], + ['elemsize1_1711',['elemSize1',['../classop_1_1_matrix.html#ae459fb36ef45c1215a7db39af8a8e6cf',1,'op::Matrix']]], + ['emplaceandpop_1712',['emplaceAndPop',['../classop_1_1_wrapper_t.html#a3818c026e33cc573ba8b5722daa003a7',1,'op::WrapperT::emplaceAndPop(TDatumsSP &tDatums)'],['../classop_1_1_wrapper_t.html#aaca8a5dc6f342470c8241fda5cd6cdb9',1,'op::WrapperT::emplaceAndPop(const Matrix &matrix)']]], + ['empty_1713',['empty',['../classop_1_1_array.html#aa173085fa7ec7c7af3a443c617edd97a',1,'op::Array::empty()'],['../classop_1_1_matrix.html#a7f9df7fbdc9ef76e158f72d306f88ec2',1,'op::Matrix::empty()'],['../classop_1_1_string.html#aeae63b12cb517a5cdaf55b836a92a49c',1,'op::String::empty()'],['../classop_1_1_queue_base.html#a74d9b247804a226cf9a0758b25bd3ba9',1,'op::QueueBase::empty()']]], + ['enter_1714',['enter',['../classop_1_1_json_ofstream.html#a32f058b961605d418df1258a1dc5e6a0',1,'op::JsonOfstream']]], + ['error_1715',['error',['../namespaceop.html#a42d364d9fbd1a719341bd7187d97cf18',1,'op::error(const T &message, const int line=-1, const std::string &function="", const std::string &file="")'],['../namespaceop.html#a5f092bd36c716a894cb035e1ead2aca3',1,'op::error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")']]], + ['errordestructor_1716',['errorDestructor',['../namespaceop.html#a758b08be140e27dd2642d286a383be54',1,'op::errorDestructor(const T &message, const int line=-1, const std::string &function="", const std::string &file="")'],['../namespaceop.html#a825f15fdf9dc9cb7473c20f970f15b60',1,'op::errorDestructor(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")']]], + ['errorworker_1717',['errorWorker',['../namespaceop.html#a61af88aac41ef77ab4e8816023fe32f0',1,'op::errorWorker(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")'],['../namespaceop.html#a96d1720ea5d160cfd4c8404060a9bebd',1,'op::errorWorker(const T &message, const int line=-1, const std::string &function="", const std::string &file="")']]], + ['estimateandsaveextrinsics_1718',['estimateAndSaveExtrinsics',['../namespaceop.html#aed964859fbd282bd29f2b818a3bf10dd',1,'op']]], + ['estimateandsaveintrinsics_1719',['estimateAndSaveIntrinsics',['../namespaceop.html#a1fd317d44606181c63ef8a4e5676a09e',1,'op']]], + ['estimateandsavesiftfile_1720',['estimateAndSaveSiftFile',['../namespaceop.html#a37cdfa8dd466c3df9e7da5724a909143',1,'op']]], + ['exec_1721',['exec',['../classop_1_1_wrapper_t.html#a478b8bd7deb43322f220593552fe683d',1,'op::WrapperT::exec()'],['../classop_1_1_thread_manager.html#a67a2d7cecc749be414e6896a88ec268d',1,'op::ThreadManager::exec()'],['../classop_1_1_thread.html#ad6c3721793d0f65ffe755ab74534afed',1,'op::Thread::exec()']]], + ['existdirectory_1722',['existDirectory',['../namespaceop.html#a6fc2ee2d2c256695fb7b2b953ee7f762',1,'op']]], + ['existfile_1723',['existFile',['../namespaceop.html#ac1f4b95440d2fb57fc715558d039b947',1,'op']]], + ['extract_1724',['extract',['../classop_1_1_scale_and_size_extractor.html#aa05b7698ff8417072787009c85a14421',1,'op::ScaleAndSizeExtractor']]], + ['extractids_1725',['extractIds',['../classop_1_1_person_id_extractor.html#a8d0b309bdf1ce96ed1aa2bd3df6f6dbc',1,'op::PersonIdExtractor::extractIds()'],['../classop_1_1_pose_extractor.html#a15d81f74033c643465864f8ab6e48bba',1,'op::PoseExtractor::extractIds()']]], + ['extractidslockthread_1726',['extractIdsLockThread',['../classop_1_1_person_id_extractor.html#a1aebf8006d814a02d7fa55f0609a7ab7',1,'op::PersonIdExtractor::extractIdsLockThread()'],['../classop_1_1_pose_extractor.html#aa7b59f4bfe89219e75995bc048efe4de',1,'op::PoseExtractor::extractIdsLockThread()']]], + ['eye_1727',['eye',['../classop_1_1_matrix.html#a78f16f08895693461fb20082260aec68',1,'op::Matrix']]] +]; diff --git a/web/html/doc/search/functions_5.html b/web/html/doc/search/functions_5.html new file mode 100644 index 000000000..9301d6b9c --- /dev/null +++ b/web/html/doc/search/functions_5.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_5.js b/web/html/doc/search/functions_5.js new file mode 100644 index 000000000..c71787cc9 --- /dev/null +++ b/web/html/doc/search/functions_5.js @@ -0,0 +1,36 @@ +var searchData= +[ + ['facecpurenderer_1728',['FaceCpuRenderer',['../classop_1_1_face_cpu_renderer.html#afb0dcfff75c4a89d5971d7b0bbd0b51b',1,'op::FaceCpuRenderer']]], + ['facedetector_1729',['FaceDetector',['../classop_1_1_face_detector.html#adfeab6977c93b7bef66c1dfbcf6f8150',1,'op::FaceDetector']]], + ['facedetectoropencv_1730',['FaceDetectorOpenCV',['../classop_1_1_face_detector_open_c_v.html#a8c4d55863b726448762a142fa91bb69d',1,'op::FaceDetectorOpenCV']]], + ['faceextractorcaffe_1731',['FaceExtractorCaffe',['../classop_1_1_face_extractor_caffe.html#adedc0e50f2eacd8e02c5bd8b0563b2ee',1,'op::FaceExtractorCaffe']]], + ['faceextractornet_1732',['FaceExtractorNet',['../classop_1_1_face_extractor_net.html#a125b052c75a5e39890e140e962b37838',1,'op::FaceExtractorNet']]], + ['facegpurenderer_1733',['FaceGpuRenderer',['../classop_1_1_face_gpu_renderer.html#a344b4f1d256d6ad805273eb8ba29cde1',1,'op::FaceGpuRenderer']]], + ['fastmax_1734',['fastMax',['../namespaceop.html#a9f4b99449c0c73e2c89ee1a1eff007c7',1,'op']]], + ['fastmin_1735',['fastMin',['../namespaceop.html#a6e1d1f90ef06cc7af576fdaad4b4e320',1,'op']]], + ['fasttruncate_1736',['fastTruncate',['../namespaceop.html#a2dafd3db8f922405b38240345dd1dce5',1,'op']]], + ['filesaver_1737',['FileSaver',['../classop_1_1_file_saver.html#aa4632ae62ac77dbad85523845ce79999',1,'op::FileSaver']]], + ['flagstodetector_1738',['flagsToDetector',['../namespaceop.html#a9f585930a5246e4a9a70145fa8763447',1,'op']]], + ['flagstodisplaymode_1739',['flagsToDisplayMode',['../namespaceop.html#afdf2dd76cbae54789a139d9415790f82',1,'op']]], + ['flagstoheatmaps_1740',['flagsToHeatMaps',['../namespaceop.html#ad3b02ca66d11f4129372f4a9f98c6437',1,'op']]], + ['flagstoheatmapscalemode_1741',['flagsToHeatMapScaleMode',['../namespaceop.html#aed9ab5282e3e60f22dc11c301af897e6',1,'op']]], + ['flagstopoint_1742',['flagsToPoint',['../namespaceop.html#a0e1275fd8690a55200fcd193c94dcf08',1,'op']]], + ['flagstoposemode_1743',['flagsToPoseMode',['../namespaceop.html#af5ec8b7e6271798cbd09475766c64d2f',1,'op']]], + ['flagstoposemodel_1744',['flagsToPoseModel',['../namespaceop.html#a60ab295fba5d41b31d6ba5a4942889a9',1,'op']]], + ['flagstoproducer_1745',['flagsToProducer',['../namespaceop.html#a8264a6feec695adef80d40940863d511',1,'op']]], + ['flagstoproducertype_1746',['flagsToProducerType',['../namespaceop.html#a1ca09f1d0e1f01d95842e99ebeef0631',1,'op']]], + ['flagstorendermode_1747',['flagsToRenderMode',['../namespaceop.html#a70f65da8f70ebd07b093932927187c90',1,'op']]], + ['flagstoscalemode_1748',['flagsToScaleMode',['../namespaceop.html#abe3f4d783191416b8e62e54c953fe36b',1,'op']]], + ['flirreader_1749',['FlirReader',['../classop_1_1_flir_reader.html#a8fa5c03b6ce95372ce47013c01c782a5',1,'op::FlirReader']]], + ['forceemplace_1750',['forceEmplace',['../classop_1_1_queue_base.html#a8d218f599b84194909691c72ee0de8d0',1,'op::QueueBase']]], + ['forcepush_1751',['forcePush',['../classop_1_1_queue_base.html#ad124d414b7c2680e5312ee163d18410f',1,'op::QueueBase']]], + ['formatasdirectory_1752',['formatAsDirectory',['../namespaceop.html#ab38ea91ef7b7dad700d8e4a4654d48f5',1,'op']]], + ['formattocvmat_1753',['formatToCvMat',['../classop_1_1_op_output_to_cv_mat.html#aaee9dc07945e0857de33308b12c9bd09',1,'op::OpOutputToCvMat']]], + ['forward_1754',['Forward',['../classop_1_1_body_part_connector_caffe.html#a52cc46828bc7720a62fbbe841022331e',1,'op::BodyPartConnectorCaffe::Forward()'],['../classop_1_1_maximum_caffe.html#a51604d40efcfa63c5a46dc257c72cf9c',1,'op::MaximumCaffe::Forward()'],['../classop_1_1_nms_caffe.html#a263d87a3282cbc03182e4d8759ca9f3a',1,'op::NmsCaffe::Forward()'],['../classop_1_1_resize_and_merge_caffe.html#a2f3f7903827e3abc3dab35ebdad002a6',1,'op::ResizeAndMergeCaffe::Forward()']]], + ['forward_5fcpu_1755',['Forward_cpu',['../classop_1_1_body_part_connector_caffe.html#a03364fbed0c71e76eb5fb1f61a397de8',1,'op::BodyPartConnectorCaffe::Forward_cpu()'],['../classop_1_1_maximum_caffe.html#ae88c10cadaef2e4e7347ef7f8c101b67',1,'op::MaximumCaffe::Forward_cpu()'],['../classop_1_1_nms_caffe.html#a8289f4e680cd16405555002a61de735b',1,'op::NmsCaffe::Forward_cpu()'],['../classop_1_1_resize_and_merge_caffe.html#a65e81f3ac60a58a29f302d818d5b0c8f',1,'op::ResizeAndMergeCaffe::Forward_cpu()']]], + ['forward_5fgpu_1756',['Forward_gpu',['../classop_1_1_body_part_connector_caffe.html#a9dbcac7de4a57a58733462f3ce1db10c',1,'op::BodyPartConnectorCaffe::Forward_gpu()'],['../classop_1_1_maximum_caffe.html#a6e44cdf4dc3fce4d1dcc75ce29bc051e',1,'op::MaximumCaffe::Forward_gpu()'],['../classop_1_1_nms_caffe.html#a8520f4df4fb2d26a1289b1bcaa814e93',1,'op::NmsCaffe::Forward_gpu()'],['../classop_1_1_resize_and_merge_caffe.html#a13d984c2ec4b5440a694b9a2dfa64521',1,'op::ResizeAndMergeCaffe::Forward_gpu()']]], + ['forward_5focl_1757',['Forward_ocl',['../classop_1_1_nms_caffe.html#ad1719736dc5e459a1d8b28837e94f989',1,'op::NmsCaffe::Forward_ocl()'],['../classop_1_1_body_part_connector_caffe.html#a51324177e60bf260f6c2def76e9e3d7d',1,'op::BodyPartConnectorCaffe::Forward_ocl()'],['../classop_1_1_resize_and_merge_caffe.html#aba74db20a0aca30b797f590548de4272',1,'op::ResizeAndMergeCaffe::Forward_ocl()']]], + ['forwardpass_1758',['forwardPass',['../classop_1_1_pose_extractor_net.html#a95c48a9fc5368af73a54aa66e44b4bc2',1,'op::PoseExtractorNet::forwardPass()'],['../classop_1_1_pose_extractor_caffe.html#a9f8677779c9c07c0fd4ac265cd8d2d8f',1,'op::PoseExtractorCaffe::forwardPass()'],['../classop_1_1_pose_extractor.html#a6c0abd998181d03d7890ec7abdee5efe',1,'op::PoseExtractor::forwardPass()'],['../classop_1_1_net_open_cv.html#aa62d557f44d2d44f08b8b1dd3efd54fb',1,'op::NetOpenCv::forwardPass()'],['../classop_1_1_net_caffe.html#a439b30ec5d10c68cb620130ff5e1812a',1,'op::NetCaffe::forwardPass()'],['../classop_1_1_net.html#a65193e857c721f2f606ea6b010953dbc',1,'op::Net::forwardPass()'],['../classop_1_1_hand_extractor_net.html#a0aa50449396fd075bec29e0393a1ff9e',1,'op::HandExtractorNet::forwardPass()'],['../classop_1_1_hand_extractor_caffe.html#a2f8e53c8d4f4d509b4a1842f042fa548',1,'op::HandExtractorCaffe::forwardPass()'],['../classop_1_1_face_extractor_net.html#a6c5d14660690396edb1a939b11962a68',1,'op::FaceExtractorNet::forwardPass()'],['../classop_1_1_face_extractor_caffe.html#ad78fc3e86428d89a513e8e3be10fc47f',1,'op::FaceExtractorCaffe::forwardPass()']]], + ['framedisplayer_1759',['FrameDisplayer',['../classop_1_1_frame_displayer.html#a21a746ef46172c6a18ea72da6e7b5721',1,'op::FrameDisplayer']]], + ['front_1760',['front',['../classop_1_1_queue_base.html#aad7a6a666dcf70834d9d18ae6d92cb2c',1,'op::QueueBase::front()'],['../classop_1_1_queue.html#a22f6d214fe4dfc743b3abf00e049c504',1,'op::Queue::front()'],['../classop_1_1_priority_queue.html#a8e468dfaed310e54987cbb8cb1cef909',1,'op::PriorityQueue::front()']]] +]; diff --git a/web/html/doc/search/functions_6.html b/web/html/doc/search/functions_6.html new file mode 100644 index 000000000..9c4f5fc65 --- /dev/null +++ b/web/html/doc/search/functions_6.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_6.js b/web/html/doc/search/functions_6.js new file mode 100644 index 000000000..1d4a5e875 --- /dev/null +++ b/web/html/doc/search/functions_6.js @@ -0,0 +1,119 @@ +var searchData= +[ + ['get_1761',['get',['../classop_1_1_flir_reader.html#a5101cdbcd46e51bf7f35995a3d87e900',1,'op::FlirReader::get()'],['../classop_1_1_image_directory_reader.html#aa05bfd69272e81115ba23a3c0731b596',1,'op::ImageDirectoryReader::get()'],['../classop_1_1_ip_camera_reader.html#aa7ad6adac6e401193e03d279176dd889',1,'op::IpCameraReader::get()'],['../classop_1_1_producer.html#a366881a952ad34071cc719477f08b968',1,'op::Producer::get(const int capProperty)=0'],['../classop_1_1_producer.html#a94d561f95384dfa0cd91113882869d06',1,'op::Producer::get(const ProducerProperty property)'],['../classop_1_1_video_capture_reader.html#a64e5cbfb1c556d64cabcebc6eb94eaf1',1,'op::VideoCaptureReader::get()'],['../classop_1_1_video_reader.html#a057a7d0c498c48639b38c10ac7efc183',1,'op::VideoReader::get()'],['../classop_1_1_webcam_reader.html#a38ccbdf61f21fba0694362077cb6bdb1',1,'op::WebcamReader::get()'],['../classop_1_1_pose_extractor_net.html#aa9138224f4977da54517398ba044b7c3',1,'op::PoseExtractorNet::get()']]], + ['getalphaheatmap_1762',['getAlphaHeatMap',['../classop_1_1_renderer.html#ab776e07b5b2f3a3b0aca0ce95d67796b',1,'op::Renderer']]], + ['getalphakeypoint_1763',['getAlphaKeypoint',['../classop_1_1_renderer.html#a3cf2d07dc9df42db4648398367c72dbb',1,'op::Renderer']]], + ['getaveragescore_1764',['getAverageScore',['../namespaceop.html#a1110f4c0017c43ea1d0896a3225c55f8',1,'op']]], + ['getbiggestperson_1765',['getBiggestPerson',['../namespaceop.html#ace4af20d19066df9ec502c5a09097c24',1,'op']]], + ['getblendoriginalframe_1766',['getBlendOriginalFrame',['../classop_1_1_renderer.html#ad2ac64e018f2b925d0c8d45883928b68',1,'op::Renderer']]], + ['getcameradistortions_1767',['getCameraDistortions',['../classop_1_1_camera_parameter_reader.html#a8edb22b20d1ed044335ec0d2175eeabf',1,'op::CameraParameterReader']]], + ['getcameraextrinsics_1768',['getCameraExtrinsics',['../classop_1_1_camera_parameter_reader.html#a8122bb2a8a07555b5341141356fa37c7',1,'op::CameraParameterReader::getCameraExtrinsics()'],['../classop_1_1_producer.html#a2853a47b12ab1f32138b6d944c322ebd',1,'op::Producer::getCameraExtrinsics()'],['../classop_1_1_spinnaker_wrapper.html#a427bf92ca3fc9011b01c57833b078154',1,'op::SpinnakerWrapper::getCameraExtrinsics()'],['../classop_1_1_flir_reader.html#ad3b940d5ed672ef17406843b102e9715',1,'op::FlirReader::getCameraExtrinsics()']]], + ['getcameraextrinsicsinitial_1769',['getCameraExtrinsicsInitial',['../classop_1_1_camera_parameter_reader.html#a88c9f18f6b0f3e5d09240e65a5e04beb',1,'op::CameraParameterReader']]], + ['getcameraintrinsics_1770',['getCameraIntrinsics',['../classop_1_1_producer.html#a6c5be8c556b0a744e11a11de3f185049',1,'op::Producer::getCameraIntrinsics()'],['../classop_1_1_spinnaker_wrapper.html#aaf441c78eeb921886a09412d8af9ddbc',1,'op::SpinnakerWrapper::getCameraIntrinsics()'],['../classop_1_1_flir_reader.html#acb45c9a89ebc92c0a8ee69a0ec4d0476',1,'op::FlirReader::getCameraIntrinsics()'],['../classop_1_1_camera_parameter_reader.html#a6db1e0c2b4ed63407d12ff0de97cb098',1,'op::CameraParameterReader::getCameraIntrinsics() const']]], + ['getcameramatrices_1771',['getCameraMatrices',['../classop_1_1_camera_parameter_reader.html#a975e5a340bd1b77d680007797ec9eeea',1,'op::CameraParameterReader::getCameraMatrices()'],['../classop_1_1_flir_reader.html#a7ddcdf533c778df342a50c24c280499b',1,'op::FlirReader::getCameraMatrices()'],['../classop_1_1_producer.html#a0d711ebc149dd71159ebc2902ccd8113',1,'op::Producer::getCameraMatrices()'],['../classop_1_1_spinnaker_wrapper.html#a76849430ae48ba14cbdd0b68bca133fb',1,'op::SpinnakerWrapper::getCameraMatrices()']]], + ['getcameraserialnumbers_1772',['getCameraSerialNumbers',['../classop_1_1_camera_parameter_reader.html#acf2b4d428d18f2663f4df640171b254d',1,'op::CameraParameterReader']]], + ['getcandidatescopy_1773',['getCandidatesCopy',['../classop_1_1_pose_extractor.html#adc430a6b1b2bf4df75ebf088f97be8af',1,'op::PoseExtractor::getCandidatesCopy()'],['../classop_1_1_pose_extractor_net.html#a56d7dd1157e70786850169897bcf6883',1,'op::PoseExtractorNet::getCandidatesCopy()']]], + ['getcandidatescpuconstptr_1774',['getCandidatesCpuConstPtr',['../classop_1_1_pose_extractor_caffe.html#a1444ad1ee245a5bcd9e0b5b55395d6d8',1,'op::PoseExtractorCaffe::getCandidatesCpuConstPtr()'],['../classop_1_1_pose_extractor_net.html#a3e73f27594e61bf451b8e9fff7695f62',1,'op::PoseExtractorNet::getCandidatesCpuConstPtr() const =0']]], + ['getcandidatesgpuconstptr_1775',['getCandidatesGpuConstPtr',['../classop_1_1_pose_extractor_net.html#abee987adbe411ca71b6b37ab9cd89a41',1,'op::PoseExtractorNet::getCandidatesGpuConstPtr()'],['../classop_1_1_pose_extractor_caffe.html#a499d975f7b6add768425271b2af19a2e',1,'op::PoseExtractorCaffe::getCandidatesGpuConstPtr()']]], + ['getconstcvmat_1776',['getConstCvMat',['../classop_1_1_matrix.html#a1beb13525ec86c9827a7116eb2d175b7',1,'op::Matrix::getConstCvMat()'],['../classop_1_1_array.html#a9b43d8d495a233c384a75a3f33eae75f',1,'op::Array::getConstCvMat() const']]], + ['getconstptr_1777',['getConstPtr',['../classop_1_1_array.html#ac5e77d6926d1d344cf54c88036fc8a9c',1,'op::Array']]], + ['getcudagpunumber_1778',['getCudaGpuNumber',['../namespaceop.html#ad9b7765a4396ee4470585ded07285563',1,'op']]], + ['getcvcappropframecount_1779',['getCvCapPropFrameCount',['../namespaceop.html#a0e60b0e4e89a7f08de54ad40c2d46a60',1,'op']]], + ['getcvcappropframefps_1780',['getCvCapPropFrameFps',['../namespaceop.html#aaf7199f3821a6f954cfae134ec8c7e19',1,'op']]], + ['getcvcappropframeheight_1781',['getCvCapPropFrameHeight',['../namespaceop.html#a264496927e7b331ad628d7dc4a683194',1,'op']]], + ['getcvcappropframewidth_1782',['getCvCapPropFrameWidth',['../namespaceop.html#a71866b00e7d1077137094f78ec83b62b',1,'op']]], + ['getcvfourcc_1783',['getCvFourcc',['../namespaceop.html#a4059a24a786c4f2def977715dd2e6747',1,'op']]], + ['getcvimwritejpegquality_1784',['getCvImwriteJpegQuality',['../namespaceop.html#ad86d86621b1f485f261d620373748ed1',1,'op']]], + ['getcvimwritepngcompression_1785',['getCvImwritePngCompression',['../namespaceop.html#a289d19386824250545f248a79aed283c',1,'op']]], + ['getcvloadimageanydepth_1786',['getCvLoadImageAnydepth',['../namespaceop.html#a84730c1ab201fe836fe87787589af88a',1,'op']]], + ['getcvloadimagegrayscale_1787',['getCvLoadImageGrayScale',['../namespaceop.html#ace6c48833ba117b7d036179bdaf31a7a',1,'op']]], + ['getcvmat_1788',['getCvMat',['../classop_1_1_array.html#a530010928025b3f64743505d732b1308',1,'op::Array::getCvMat()'],['../classop_1_1_matrix.html#a9326d59a12659563d123ea6587fd4415',1,'op::Matrix::getCvMat()']]], + ['getdistance_1789',['getDistance',['../namespaceop.html#ac968b1c98c60b74be78225be27805706',1,'op']]], + ['getdistanceaverage_1790',['getDistanceAverage',['../namespaceop.html#acf638f00b0a825c05683f8e23942a9d5',1,'op::getDistanceAverage(const Array< T > &keypoints, const int personA, const int personB, const T threshold)'],['../namespaceop.html#aa053f4b0533d9e981aa171a1ef57fc30',1,'op::getDistanceAverage(const Array< T > &keypointsA, const int personA, const Array< T > &keypointsB, const int personB, const T threshold)']]], + ['getenabled_1791',['getEnabled',['../classop_1_1_hand_extractor_net.html#af064ccee582800f39ed3eac5d69a4134',1,'op::HandExtractorNet::getEnabled()'],['../classop_1_1_face_extractor_net.html#a18911596f5ba442d50718f54a3d64fe0',1,'op::FaceExtractorNet::getEnabled()']]], + ['geterrormodes_1792',['getErrorModes',['../namespaceop_1_1_configure_error.html#ae8dbbccc9a2ca8a4670716ac5fdd8d53',1,'op::ConfigureError']]], + ['getfacekeypoints_1793',['getFaceKeypoints',['../classop_1_1_face_extractor_net.html#aee0d7b760214c805466ae515938b5190',1,'op::FaceExtractorNet']]], + ['getfileextension_1794',['getFileExtension',['../namespaceop.html#a515273b013402d8c75780330588421bc',1,'op']]], + ['getfilenameandextension_1795',['getFileNameAndExtension',['../namespaceop.html#a573544858d0a9c29c9707eeda3a21c98',1,'op']]], + ['getfilenamenoextension_1796',['getFileNameNoExtension',['../namespaceop.html#a6f37638480139a4076eef4d0c7dc6cd1',1,'op']]], + ['getfileparentfolderpath_1797',['getFileParentFolderPath',['../namespaceop.html#a2e35510c95e5525aae7a398b03b32488',1,'op']]], + ['getfilesondirectory_1798',['getFilesOnDirectory',['../namespaceop.html#a858f70fa9d84ad85c60f19a2229ebbde',1,'op::getFilesOnDirectory(const std::string &directoryPath, const Extensions extensions)'],['../namespaceop.html#adb26da2c52486e926d98471b5387c7e1',1,'op::getFilesOnDirectory(const std::string &directoryPath, const std::string &extension)'],['../namespaceop.html#a3ff74a37eb4bf12e31bc5aa95b69f9e3',1,'op::getFilesOnDirectory(const std::string &directoryPath, const std::vector< std::string > &extensions={})']]], + ['getfirstnumberonstring_1799',['getFirstNumberOnString',['../namespaceop.html#a844c35ea57a8bc67f33f49deb5070652',1,'op']]], + ['getframe_1800',['getFrame',['../classop_1_1_producer.html#a07f416a256a3f7e906748701ad569030',1,'op::Producer']]], + ['getframes_1801',['getFrames',['../classop_1_1_producer.html#aad1f861eaea12a3590e1beb286d023b7',1,'op::Producer']]], + ['getfullfilepathnoextension_1802',['getFullFilePathNoExtension',['../namespaceop.html#ac1737c19228b83a5e93ae51e5d9556eb',1,'op']]], + ['getgpumode_1803',['getGpuMode',['../namespaceop.html#a971a7caa96be5b715b5c22f6e5dc6ad1',1,'op']]], + ['getgpunumber_1804',['getGpuNumber',['../namespaceop.html#aaad222b087dd041c35de2f3414c1a01f',1,'op']]], + ['gethandkeypoints_1805',['getHandKeypoints',['../classop_1_1_hand_extractor_net.html#ae9617434c4dc7e390c18d596b868297d',1,'op::HandExtractorNet']]], + ['getheatmapcpuconstptr_1806',['getHeatMapCpuConstPtr',['../classop_1_1_pose_extractor_net.html#a80cb59fa161a7ecd3d6a016354ab9002',1,'op::PoseExtractorNet::getHeatMapCpuConstPtr()'],['../classop_1_1_pose_extractor_caffe.html#a9e8056cd50ba679636c5d5055f5a563b',1,'op::PoseExtractorCaffe::getHeatMapCpuConstPtr() const']]], + ['getheatmapgpuconstptr_1807',['getHeatMapGpuConstPtr',['../classop_1_1_pose_extractor_caffe.html#ac4737f29b467f6c0daad5f54aa20524b',1,'op::PoseExtractorCaffe::getHeatMapGpuConstPtr()'],['../classop_1_1_pose_extractor_net.html#ad1b526d42f690a8857c0ccdc88ff88ac',1,'op::PoseExtractorNet::getHeatMapGpuConstPtr()']]], + ['getheatmaps_1808',['getHeatMaps',['../classop_1_1_hand_extractor_net.html#a88a35f29d3c53c259756bc07b2bfb093',1,'op::HandExtractorNet::getHeatMaps()'],['../classop_1_1_face_extractor_net.html#a1ba97136b2cc006cd066e3e950f0c179',1,'op::FaceExtractorNet::getHeatMaps()']]], + ['getheatmapscopy_1809',['getHeatMapsCopy',['../classop_1_1_pose_extractor_net.html#ad6e1c91c60cf0041c196fd4347bbcdf5',1,'op::PoseExtractorNet::getHeatMapsCopy()'],['../classop_1_1_pose_extractor.html#a95f6235ab496ada0b8cbc4b614637ac0',1,'op::PoseExtractor::getHeatMapsCopy()']]], + ['getheatmapsize_1810',['getHeatMapSize',['../classop_1_1_pose_extractor_net.html#a49e1dcb9f9d049131df866b7538507cd',1,'op::PoseExtractorNet::getHeatMapSize()'],['../classop_1_1_pose_extractor_caffe.html#a350900a3b326f4ed7d3dcb9531055523',1,'op::PoseExtractorCaffe::getHeatMapSize()']]], + ['getifinmainthreadorempty_1811',['getIfInMainThreadOrEmpty',['../namespaceop.html#ad5e1c975a1b7dce9b02bc8cdf3d45a01',1,'op']]], + ['getifnotinmainthreadorempty_1812',['getIfNotInMainThreadOrEmpty',['../namespaceop.html#abdedc8f1fd2f723dae5bb8ff20b93a93',1,'op']]], + ['getisrunningsharedptr_1813',['getIsRunningSharedPtr',['../classop_1_1_thread_manager.html#a48ea53b3de4d09c84db18e2c31ce1be1',1,'op::ThreadManager']]], + ['getkeypointsarea_1814',['getKeypointsArea',['../namespaceop.html#a1dd5dde18458975a36bdbd6dd38720a2',1,'op']]], + ['getkeypointsperson_1815',['getKeypointsPerson',['../namespaceop.html#a75411d98f69051860379730e16103178',1,'op']]], + ['getkeypointsrectangle_1816',['getKeypointsRectangle',['../namespaceop.html#ac74cba4141f2bee2b9d94dc171029a73',1,'op']]], + ['getkeypointsroi_1817',['getKeypointsRoi',['../namespaceop.html#a6913c67141fcbbba84fc88ac8a45aa0f',1,'op::getKeypointsRoi(const Array< T > &keypoints, const int personA, const int personB, const T threshold)'],['../namespaceop.html#ac9af122ccd8dcdafb11e37b6633245b4',1,'op::getKeypointsRoi(const Array< T > &keypointsA, const int personA, const Array< T > &keypointsB, const int personB, const T threshold)'],['../namespaceop.html#a36296ff5a5945244c5131e3ae16057e1',1,'op::getKeypointsRoi(const Rectangle< T > &rectangleA, const Rectangle< T > &rectangleB)']]], + ['getlastnumber_1818',['getLastNumber',['../namespaceop.html#ab670c693d8e4a540cfe75ce8383b6d10',1,'op']]], + ['getlogmodes_1819',['getLogModes',['../namespaceop_1_1_configure_log.html#a5ab07ae8c026e4f7782a113778d9082d',1,'op::ConfigureLog']]], + ['getmaxsize_1820',['getMaxSize',['../classop_1_1_queue_base.html#a7b3f810bb6e729be3afe3313c4b2f31b',1,'op::QueueBase']]], + ['getmergeresults_1821',['getMergeResults',['../classop_1_1_person_tracker.html#a68f46367bd719196974aa5b1bd23cb7d',1,'op::PersonTracker']]], + ['getnextfilename_1822',['getNextFileName',['../classop_1_1_file_saver.html#a5940f007f3346580124cd1b6b27492e6',1,'op::FileSaver::getNextFileName(const std::string &fileNameNoExtension) const'],['../classop_1_1_file_saver.html#a52aab3187cefc2e878790aa440a842aa',1,'op::FileSaver::getNextFileName(const unsigned long long index) const']]], + ['getnextframename_1823',['getNextFrameName',['../classop_1_1_flir_reader.html#a711db0919bd7516fde3e641c13259637',1,'op::FlirReader::getNextFrameName()'],['../classop_1_1_image_directory_reader.html#a46ce23209afe6d3ca90db545b69cd04a',1,'op::ImageDirectoryReader::getNextFrameName()'],['../classop_1_1_ip_camera_reader.html#a0c1582090cc7c54dd9cb752207b52986',1,'op::IpCameraReader::getNextFrameName()'],['../classop_1_1_producer.html#ab35d570dc35573433ec86e3fce25e545',1,'op::Producer::getNextFrameName()'],['../classop_1_1_video_capture_reader.html#a06348fd9a290fc2ece2f3c2e4dc9bc70',1,'op::VideoCaptureReader::getNextFrameName()'],['../classop_1_1_video_reader.html#a508eed918fbe3bfe3eff4c1ebacb3463',1,'op::VideoReader::getNextFrameName()'],['../classop_1_1_webcam_reader.html#a58c315e577c12486e5ab1b941d4cce04',1,'op::WebcamReader::getNextFrameName()']]], + ['getnonzerokeypoints_1824',['getNonZeroKeypoints',['../namespaceop.html#aa9366cf1b4ac3494965749eeb5537da1',1,'op']]], + ['getnumbercameras_1825',['getNumberCameras',['../classop_1_1_camera_parameter_reader.html#a8d97033970f3e71657da070cd87fd70c',1,'op::CameraParameterReader']]], + ['getnumbercudablocks_1826',['getNumberCudaBlocks',['../namespaceop.html#a4ba080c11cc9758051db97ce2a11c023',1,'op']]], + ['getnumbercudathreadsandblocks_1827',['getNumberCudaThreadsAndBlocks',['../namespaceop.html#a17da233ea322ae172ff5bda7caaf2124',1,'op']]], + ['getnumberdimensions_1828',['getNumberDimensions',['../classop_1_1_array.html#a5eff0723f0bbd192248e602bfbb6956f',1,'op::Array']]], + ['getnumberelementstorender_1829',['getNumberElementsToRender',['../namespaceop.html#aebff78a4cfbef1cf1b2e03066d88564c',1,'op']]], + ['getoutputblobarray_1830',['getOutputBlobArray',['../classop_1_1_net.html#a222cfe3d19800824b742b218b466586b',1,'op::Net::getOutputBlobArray()'],['../classop_1_1_net_caffe.html#a37648c14f06ee46ca395c9d38635fade',1,'op::NetCaffe::getOutputBlobArray()'],['../classop_1_1_net_open_cv.html#a9f4981ac196b094183c52caa6ce283db',1,'op::NetOpenCv::getOutputBlobArray()']]], + ['getposebodypartmapping_1831',['getPoseBodyPartMapping',['../namespaceop.html#aab3de911b04b96c1850cc05c6947e184',1,'op']]], + ['getposebodypartpairsrender_1832',['getPoseBodyPartPairsRender',['../namespaceop.html#a11bd7e53698eabe32b69b48708cf7b19',1,'op']]], + ['getposecolors_1833',['getPoseColors',['../namespaceop.html#abb49286241ba7a1d754b31dee333274a',1,'op']]], + ['getposedefaultconnectinterminabovethreshold_1834',['getPoseDefaultConnectInterMinAboveThreshold',['../namespaceop.html#a8e377d8da8f109cb8be8e4edbb2ea90a',1,'op']]], + ['getposedefaultconnectinterthreshold_1835',['getPoseDefaultConnectInterThreshold',['../namespaceop.html#aabfd35e57744b44481c09f56c90cc8b8',1,'op']]], + ['getposedefaultconnectminsubsetscore_1836',['getPoseDefaultConnectMinSubsetScore',['../namespaceop.html#ae7636f6e8974ecb2ed96d43dd5ec261d',1,'op']]], + ['getposedefaultminsubsetcnt_1837',['getPoseDefaultMinSubsetCnt',['../namespaceop.html#a863c96f1fb23d96c5d605867cfe5f99f',1,'op']]], + ['getposedefaultnmsthreshold_1838',['getPoseDefaultNmsThreshold',['../namespaceop.html#acd8cab258d7e98affa5c317a9a03e862',1,'op']]], + ['getposegpuconstptr_1839',['getPoseGpuConstPtr',['../classop_1_1_pose_extractor_net.html#a546f0d6e0c62c7c7e2d44de848f9a174',1,'op::PoseExtractorNet::getPoseGpuConstPtr()'],['../classop_1_1_pose_extractor_caffe.html#a6ffc941073b66868177c91cc9e025098',1,'op::PoseExtractorCaffe::getPoseGpuConstPtr()']]], + ['getposekeypoints_1840',['getPoseKeypoints',['../classop_1_1_pose_extractor_net.html#a3e88bd2122835db768c123d1026ce30f',1,'op::PoseExtractorNet::getPoseKeypoints()'],['../classop_1_1_pose_extractor.html#a487be38105b0d3f310142d99e0ca6b12',1,'op::PoseExtractor::getPoseKeypoints()']]], + ['getposemapindex_1841',['getPoseMapIndex',['../namespaceop.html#a84d87ec0e4ed3cf75a37ce99d0d25ef7',1,'op']]], + ['getposemaxpeaks_1842',['getPoseMaxPeaks',['../namespaceop.html#a96a81e831f8c965825162dba09095477',1,'op']]], + ['getposenetdecreasefactor_1843',['getPoseNetDecreaseFactor',['../namespaceop.html#ad7ca8d89f9045481075902c8bd98b8f4',1,'op']]], + ['getposenumberbodyparts_1844',['getPoseNumberBodyParts',['../namespaceop.html#a54a6c42a42a0a7e539061f5e30abb4bc',1,'op']]], + ['getposepartpairs_1845',['getPosePartPairs',['../namespaceop.html#a307b2c7b1506415a4ba44590fe8a7258',1,'op']]], + ['getposeprototxt_1846',['getPoseProtoTxt',['../namespaceop.html#ae0730c6559abdb976423ecf81eac4620',1,'op']]], + ['getposescales_1847',['getPoseScales',['../namespaceop.html#a016abefba53293ed2ffe3a3c3bd88dd0',1,'op']]], + ['getposescores_1848',['getPoseScores',['../classop_1_1_pose_extractor_net.html#a43317a6868ffa7391586f2b8b599ecdf',1,'op::PoseExtractorNet::getPoseScores()'],['../classop_1_1_pose_extractor.html#aee77aa0ca773abe442a278d9e9e69376',1,'op::PoseExtractor::getPoseScores()']]], + ['getposetrainedmodel_1849',['getPoseTrainedModel',['../namespaceop.html#ade70b024ee461ae04e7233bf3937c5c6',1,'op']]], + ['getprioritythreshold_1850',['getPriorityThreshold',['../namespaceop_1_1_configure_log.html#a0e5c3fad2ace3eb129dd1d97afd59558',1,'op::ConfigureLog']]], + ['getpseudoconstptr_1851',['getPseudoConstPtr',['../classop_1_1_array.html#a85d749e637a7528325f86b80595a91d1',1,'op::Array']]], + ['getptr_1852',['getPtr',['../classop_1_1_array.html#af4715967fd2b028a97fd30257e697275',1,'op::Array']]], + ['getrawframe_1853',['getRawFrame',['../classop_1_1_video_capture_reader.html#a33aabaf7c82773f117f6842ff900fa18',1,'op::VideoCaptureReader::getRawFrame()'],['../classop_1_1_producer.html#ab23d9eeac2c1820be9191ab9f7bb1777',1,'op::Producer::getRawFrame()=0']]], + ['getrawframes_1854',['getRawFrames',['../classop_1_1_producer.html#ad6d701ad0867491736374d8ea753c00e',1,'op::Producer::getRawFrames()'],['../classop_1_1_spinnaker_wrapper.html#a2135a9d3c9dbab4c1e0ee6be6c31b93a',1,'op::SpinnakerWrapper::getRawFrames()'],['../classop_1_1_video_capture_reader.html#a9f58d9280a26d94ff4ba6cd93f4928a0',1,'op::VideoCaptureReader::getRawFrames()']]], + ['getresolution_1855',['getResolution',['../classop_1_1_spinnaker_wrapper.html#aad97f57040a953cbce0f20c6b3303202',1,'op::SpinnakerWrapper']]], + ['getscalenettooutput_1856',['getScaleNetToOutput',['../classop_1_1_pose_extractor.html#ae798335b1606804c87220d3c72423dad',1,'op::PoseExtractor::getScaleNetToOutput()'],['../classop_1_1_pose_extractor_net.html#ac67c1d8fcba15ccfb284f10776e9fd89',1,'op::PoseExtractorNet::getScaleNetToOutput()']]], + ['getsharedparameters_1857',['getSharedParameters',['../classop_1_1_cv_mat_to_op_output.html#ad0ac01a9866ea00c873da7e2552c5b08',1,'op::CvMatToOpOutput::getSharedParameters()'],['../classop_1_1_gpu_renderer.html#a63eb7ae0b440a5552ed9342043a8f369',1,'op::GpuRenderer::getSharedParameters()']]], + ['getshowgooglyeyes_1858',['getShowGooglyEyes',['../classop_1_1_renderer.html#a44e13a965a9b0fca119ea897ad1348e0',1,'op::Renderer']]], + ['getsize_1859',['getSize',['../classop_1_1_array.html#a4568f646a97fa8cea443b864d91a28df',1,'op::Array::getSize() const'],['../classop_1_1_array.html#ab4123b36e0816793e206365397dd8f79',1,'op::Array::getSize(const int index) const']]], + ['getstdstring_1860',['getStdString',['../classop_1_1_string.html#a82003e99b9f3e9bd0054873deac970da',1,'op::String']]], + ['getstride_1861',['getStride',['../classop_1_1_array.html#a38de9c4ba539b8134fcac91287722044',1,'op::Array::getStride() const'],['../classop_1_1_array.html#ab033fba3d9140020dd89edb10fe8b109',1,'op::Array::getStride(const int index) const']]], + ['getthreadid_1862',['getThreadId',['../namespaceop.html#a5a3db1a0d272d8fb5ea723845beee150',1,'op']]], + ['gettimerinit_1863',['getTimerInit',['../namespaceop.html#ae0e92a0d8867d1b02f1c43ae4c0c9e09',1,'op']]], + ['gettimeseconds_1864',['getTimeSeconds',['../namespaceop.html#a01dd208c992c8e07623579f77dcfb59b',1,'op']]], + ['gettworkerssize_1865',['getTWorkersSize',['../classop_1_1_sub_thread.html#a3e8e044b20842d15b1caedf8a78be622',1,'op::SubThread']]], + ['gettype_1866',['getType',['../classop_1_1_producer.html#a9a9424027e5bc8e0fba7c65eccc460e0',1,'op::Producer']]], + ['getundistortimage_1867',['getUndistortImage',['../classop_1_1_camera_parameter_reader.html#a4c819945b0df95bcfb6f8d79451290d5',1,'op::CameraParameterReader']]], + ['getvolume_1868',['getVolume',['../classop_1_1_array.html#aee364306687e39e754117c98ad844157',1,'op::Array::getVolume() const'],['../classop_1_1_array.html#a5ed838d2b9933b6a80906d0e0db39742',1,'op::Array::getVolume(const int indexA, const int indexB=-1) const']]], + ['gpu_5fdata_1869',['gpu_data',['../classop_1_1_array_cpu_gpu.html#a292b819460cbf56fd36e7435cd99c49a',1,'op::ArrayCpuGpu']]], + ['gpu_5fdiff_1870',['gpu_diff',['../classop_1_1_array_cpu_gpu.html#aa0717b11c87da804e6da0d7aca4a5414',1,'op::ArrayCpuGpu']]], + ['gpu_5fshape_1871',['gpu_shape',['../classop_1_1_array_cpu_gpu.html#a7c92a38509887af087eafd7522047429',1,'op::ArrayCpuGpu']]], + ['gpurenderer_1872',['GpuRenderer',['../classop_1_1_gpu_renderer.html#a9852b2017e972637b47250bb7fbc53ea',1,'op::GpuRenderer']]], + ['gputocpumemoryiflastrenderer_1873',['gpuToCpuMemoryIfLastRenderer',['../classop_1_1_gpu_renderer.html#a6355f70d16c6427b028fa4596ce5d985',1,'op::GpuRenderer']]], + ['gui_1874',['Gui',['../classop_1_1_gui.html#a1084d79f61d08f0551832de1ca337c70',1,'op::Gui']]], + ['gui3d_1875',['Gui3D',['../classop_1_1_gui3_d.html#a23ead7d9d09b3f0b3ba81b284d49b4a4',1,'op::Gui3D']]], + ['guiinfoadder_1876',['GuiInfoAdder',['../classop_1_1_gui_info_adder.html#af23e17f9eeb51c7473cd0940292efa61',1,'op::GuiInfoAdder']]] +]; diff --git a/web/html/doc/search/functions_7.html b/web/html/doc/search/functions_7.html new file mode 100644 index 000000000..46b5c0f61 --- /dev/null +++ b/web/html/doc/search/functions_7.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_7.js b/web/html/doc/search/functions_7.js new file mode 100644 index 000000000..99c1383cf --- /dev/null +++ b/web/html/doc/search/functions_7.js @@ -0,0 +1,11 @@ +var searchData= +[ + ['handcpurenderer_1877',['HandCpuRenderer',['../classop_1_1_hand_cpu_renderer.html#a3145d482c0378288e7ba3e42091a56c2',1,'op::HandCpuRenderer']]], + ['handdetector_1878',['HandDetector',['../classop_1_1_hand_detector.html#a20b127dd7b51afcd336d1f16b40ee0b1',1,'op::HandDetector']]], + ['handdetectorfromtxt_1879',['HandDetectorFromTxt',['../classop_1_1_hand_detector_from_txt.html#a94ef5e925c5d25b181c56ae79bb1eed2',1,'op::HandDetectorFromTxt']]], + ['handextractorcaffe_1880',['HandExtractorCaffe',['../classop_1_1_hand_extractor_caffe.html#a703c8b8d15de55bc2b6bbaee633a6384',1,'op::HandExtractorCaffe']]], + ['handextractornet_1881',['HandExtractorNet',['../classop_1_1_hand_extractor_net.html#a8bcb44ea2618dea01c00255021425637',1,'op::HandExtractorNet']]], + ['handgpurenderer_1882',['HandGpuRenderer',['../classop_1_1_hand_gpu_renderer.html#a0d2f742b995a6f34e414f9731db847d5',1,'op::HandGpuRenderer']]], + ['heatmapsaver_1883',['HeatMapSaver',['../classop_1_1_heat_map_saver.html#aa6a339b70a9535a018584b93c932b12d',1,'op::HeatMapSaver']]], + ['height_1884',['height',['../classop_1_1_array_cpu_gpu.html#a1ae24508718592592421f9491bcf50f0',1,'op::ArrayCpuGpu']]] +]; diff --git a/web/html/doc/search/functions_8.html b/web/html/doc/search/functions_8.html new file mode 100644 index 000000000..31a1d9503 --- /dev/null +++ b/web/html/doc/search/functions_8.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_8.js b/web/html/doc/search/functions_8.js new file mode 100644 index 000000000..72fa9b47b --- /dev/null +++ b/web/html/doc/search/functions_8.js @@ -0,0 +1,16 @@ +var searchData= +[ + ['ifendedresetorrelease_1885',['ifEndedResetOrRelease',['../classop_1_1_producer.html#ac72a751759ae8b5a0a99552580f7fbad',1,'op::Producer']]], + ['imagedirectoryreader_1886',['ImageDirectoryReader',['../classop_1_1_image_directory_reader.html#a10157e6234426dd809ffe83ebfbfd274',1,'op::ImageDirectoryReader']]], + ['imagesaver_1887',['ImageSaver',['../classop_1_1_image_saver.html#a723387e62a6b701202dd6cf35c57429f',1,'op::ImageSaver']]], + ['increase_1888',['increase',['../classop_1_1_pose_extractor_net.html#a4959a9c9d433d9297e5daef0e3a0eabc',1,'op::PoseExtractorNet']]], + ['increaseelementtorender_1889',['increaseElementToRender',['../classop_1_1_renderer.html#a298a5a58bab80b7252db7d3386a0ed8a',1,'op::Renderer']]], + ['initializationonthread_1890',['initializationOnThread',['../classop_1_1_net_open_cv.html#a932f2f53f61e05bc0fd164a707f692b9',1,'op::NetOpenCv::initializationOnThread()'],['../classop_1_1_w_gui3_d.html#a7da4f85892e0d7d9e105c6d471a706a3',1,'op::WGui3D::initializationOnThread()'],['../classop_1_1_net_caffe.html#a08b71387287339e68327dd6d4cb1e8b3',1,'op::NetCaffe::initializationOnThread()'],['../classop_1_1_net.html#a6e9e801f2c9950a798d0d2fa94a6c8f2',1,'op::Net::initializationOnThread()'],['../classop_1_1_w_hand_renderer.html#a2ee88145b38fea1a6a2bb7987a33bd40',1,'op::WHandRenderer::initializationOnThread()'],['../classop_1_1_w_hand_extractor_net.html#a7904f62b91d658a06ed89f0bfd307642',1,'op::WHandExtractorNet::initializationOnThread()'],['../classop_1_1_w_hand_detector_update.html#a729aaa628e4f4c24e7cb9afca1cdc761',1,'op::WHandDetectorUpdate::initializationOnThread()'],['../classop_1_1_w_hand_detector_tracking.html#a20ef6206194a873c2cfa7fe13d905d92',1,'op::WHandDetectorTracking::initializationOnThread()'],['../classop_1_1_w_hand_detector.html#a5c29c944205ee0727f76c282ef55ae52',1,'op::WHandDetector::initializationOnThread()'],['../classop_1_1_hand_renderer.html#adb91ae2a8ccf24671ad86e99e786b120',1,'op::HandRenderer::initializationOnThread()'],['../classop_1_1_hand_gpu_renderer.html#a0489f10ddc9e37e87084ebf9a5138f3a',1,'op::HandGpuRenderer::initializationOnThread()'],['../classop_1_1_hand_extractor_net.html#a37d86377da41c576c4d54027a9762733',1,'op::HandExtractorNet::initializationOnThread()'],['../classop_1_1_w_gui_info_adder.html#ae620275d6570fd5c74f33728cd340217',1,'op::WGuiInfoAdder::initializationOnThread()'],['../classop_1_1_w_hand_detector_from_txt.html#acd7d37555c09a58dc660811724930276',1,'op::WHandDetectorFromTxt::initializationOnThread()'],['../classop_1_1_pose_extractor.html#aab1cccc9ad99f6b007abaa14600ea6df',1,'op::PoseExtractor::initializationOnThread()'],['../classop_1_1_pose_extractor_net.html#a28923c846dc7c731d3571c72a50acd2f',1,'op::PoseExtractorNet::initializationOnThread()'],['../classop_1_1_pose_gpu_renderer.html#a9e94ab926baf360dd6b23e14fba09836',1,'op::PoseGpuRenderer::initializationOnThread()'],['../classop_1_1_pose_renderer.html#af861d8213f1444b3246402061cea1b33',1,'op::PoseRenderer::initializationOnThread()'],['../classop_1_1_w_pose_extractor.html#a9b621ed9915da9bf3cce49db547de9e6',1,'op::WPoseExtractor::initializationOnThread()'],['../classop_1_1_w_pose_extractor_net.html#a18d4a120314ec44d1722cc164aaba7a8',1,'op::WPoseExtractorNet::initializationOnThread()'],['../classop_1_1_w_pose_renderer.html#aba989a73cef9a807879ad2196725c61c',1,'op::WPoseRenderer::initializationOnThread()'],['../classop_1_1_w_datum_producer.html#a4381eaec4625824ebaa2d23f0cf1be48',1,'op::WDatumProducer::initializationOnThread()'],['../classop_1_1_sub_thread.html#a8debc3b655463847fed2c547d13326f7',1,'op::SubThread::initializationOnThread()'],['../classop_1_1_w_fps_max.html#af8c5f74f0271d227b2c70b4415366332',1,'op::WFpsMax::initializationOnThread()'],['../classop_1_1_w_id_generator.html#a50a1b7929810daae87ee6443c659edad',1,'op::WIdGenerator::initializationOnThread()'],['../classop_1_1_worker.html#aa5be4df9d4d8302728c653870e7d2a23',1,'op::Worker::initializationOnThread()'],['../classop_1_1_w_queue_assembler.html#a02bb2d4e47689903434c05a911a5ba15',1,'op::WQueueAssembler::initializationOnThread()'],['../classop_1_1_w_queue_orderer.html#a85598f83f6f3a30b7ddce9bc7beddf33',1,'op::WQueueOrderer::initializationOnThread()'],['../classop_1_1_w_person_id_extractor.html#a72b888875be18eb3fc8d0a8c267630de',1,'op::WPersonIdExtractor::initializationOnThread()'],['../classop_1_1_gui3_d.html#a4247c56f90a535944b8aa14def754eaa',1,'op::Gui3D::initializationOnThread()'],['../classop_1_1_pose_triangulation.html#a90436697faa45a3676087426763014f4',1,'op::PoseTriangulation::initializationOnThread()'],['../classop_1_1_w_pose_triangulation.html#a5711329db1768eb77d2d96575c9fb668',1,'op::WPoseTriangulation::initializationOnThread()'],['../classop_1_1_w_cv_mat_to_op_input.html#ac03534bbe3b6c3c45efb61b5d78402da',1,'op::WCvMatToOpInput::initializationOnThread()'],['../classop_1_1_w_cv_mat_to_op_output.html#ad4c957d391e371b7ee56cdb5be6b1452',1,'op::WCvMatToOpOutput::initializationOnThread()'],['../classop_1_1_w_keep_top_n_people.html#a56371016b6fe1fbacdba8d558685719b',1,'op::WKeepTopNPeople::initializationOnThread()'],['../classop_1_1_w_keypoint_scaler.html#aba4fb004818f3adc22959e382a90cd2c',1,'op::WKeypointScaler::initializationOnThread()'],['../classop_1_1_w_op_output_to_cv_mat.html#adea2e8b1d33e6c091640c7d904dac7cd',1,'op::WOpOutputToCvMat::initializationOnThread()'],['../classop_1_1_w_scale_and_size_extractor.html#ac1203ef395a836b13f5586432f284c41',1,'op::WScaleAndSizeExtractor::initializationOnThread()'],['../classop_1_1_w_verbose_printer.html#a9d21f5db0e70ba4cad73cf2bdf6c9fe2',1,'op::WVerbosePrinter::initializationOnThread()'],['../classop_1_1_face_extractor_net.html#a6d6d5d6bd912bb940058a2b958aadf61',1,'op::FaceExtractorNet::initializationOnThread()'],['../classop_1_1_face_gpu_renderer.html#a6ebd9287927529ffaa4200890190896b',1,'op::FaceGpuRenderer::initializationOnThread()'],['../classop_1_1_face_renderer.html#aa34ce7a0602b0994cc3043b80627a31c',1,'op::FaceRenderer::initializationOnThread()'],['../classop_1_1_w_face_detector.html#afaca53a669f0cd43103f7317aded75d3',1,'op::WFaceDetector::initializationOnThread()'],['../classop_1_1_w_face_detector_open_c_v.html#ad7dce5824ba32bc07d2474c20b23e62d',1,'op::WFaceDetectorOpenCV::initializationOnThread()'],['../classop_1_1_w_udp_sender.html#a567d9fe2adc85ae086379696573112e3',1,'op::WUdpSender::initializationOnThread()'],['../classop_1_1_w_face_extractor_net.html#ac04b0bec061a6cbc6a6afacb3f8d15c7',1,'op::WFaceExtractorNet::initializationOnThread()'],['../classop_1_1_w_gui.html#a4e4db210b87f78cc1238dd3ab2bedaa4',1,'op::WGui::initializationOnThread()'],['../classop_1_1_gui.html#a07cf9b4e7757979666d097278df02c20',1,'op::Gui::initializationOnThread()'],['../classop_1_1_frame_displayer.html#af5d2e1c8bcd2012c66347252e8dbc543',1,'op::FrameDisplayer::initializationOnThread()'],['../classop_1_1_w_video_saver3_d.html#ad5a050f5646af36bf8d91909e8f47b2f',1,'op::WVideoSaver3D::initializationOnThread()'],['../classop_1_1_w_video_saver.html#ada90f76b28e4bafe9c8ecbb9bcbb2d14',1,'op::WVideoSaver::initializationOnThread()'],['../classop_1_1_w_pose_saver.html#a4f0774832e12389593361186f1b83128',1,'op::WPoseSaver::initializationOnThread()'],['../classop_1_1_w_people_json_saver.html#a5d4239596a996723a20a1031d32c7446',1,'op::WPeopleJsonSaver::initializationOnThread()'],['../classop_1_1_w_image_saver.html#a78655ea3d4dac28bdf7e2e4a80b5a337',1,'op::WImageSaver::initializationOnThread()'],['../classop_1_1_w_heat_map_saver.html#a20e82b121a580c578f69cbb0401c4cb0',1,'op::WHeatMapSaver::initializationOnThread()'],['../classop_1_1_w_hand_saver.html#aa234a68d1cc7ec97fefbf30239149baa',1,'op::WHandSaver::initializationOnThread()'],['../classop_1_1_w_face_saver.html#ae8401789881462eb8438c65e9d2d3fb2',1,'op::WFaceSaver::initializationOnThread()'],['../classop_1_1_w_coco_json_saver.html#a5cca095ff23c3134ab0addc9a4feabaf',1,'op::WCocoJsonSaver::initializationOnThread()'],['../classop_1_1_w_face_renderer.html#a7b72c70dc02c9209d84992caad6ad7d0',1,'op::WFaceRenderer::initializationOnThread()']]], + ['initializationonthreadnoexception_1891',['initializationOnThreadNoException',['../classop_1_1_worker.html#a5df10dd8a245df1a6d8df18978490899',1,'op::Worker']]], + ['ipcamerareader_1892',['IpCameraReader',['../classop_1_1_ip_camera_reader.html#af3a67a2705107e04e79672fa087847c5',1,'op::IpCameraReader']]], + ['iscontinuous_1893',['isContinuous',['../classop_1_1_matrix.html#ae82b851dd176317d72df95461a4bad76',1,'op::Matrix']]], + ['isfull_1894',['isFull',['../classop_1_1_queue_base.html#a17a52df2e912a346c412418c62268425',1,'op::QueueBase']]], + ['isopened_1895',['isOpened',['../classop_1_1_video_saver.html#a0c5dadfa4f687283c370e7890ae5037f',1,'op::VideoSaver::isOpened()'],['../classop_1_1_flir_reader.html#a3d383e03a405dcbff566a86253db90af',1,'op::FlirReader::isOpened()'],['../classop_1_1_image_directory_reader.html#adbf9ff392cd52a585332dbdcd46ffb81',1,'op::ImageDirectoryReader::isOpened()'],['../classop_1_1_ip_camera_reader.html#ac26913b4ff841f56f43bb53b012a2401',1,'op::IpCameraReader::isOpened()'],['../classop_1_1_producer.html#a58590e4a409d31f839184b4bf030a68b',1,'op::Producer::isOpened()'],['../classop_1_1_spinnaker_wrapper.html#a51e869f56a6517bd55783ea039066d7d',1,'op::SpinnakerWrapper::isOpened()'],['../classop_1_1_video_capture_reader.html#ab0c6519396faae82ec1b49262ed454a2',1,'op::VideoCaptureReader::isOpened()'],['../classop_1_1_video_reader.html#a503e70039e2cfecfe2d31771df509733',1,'op::VideoReader::isOpened()'],['../classop_1_1_webcam_reader.html#a6a065fcf3d6dca624741adc0f77da11d',1,'op::WebcamReader::isOpened()']]], + ['isrunning_1896',['isRunning',['../classop_1_1_queue_base.html#a9f529f94ff3b98e3ac11d796caa31239',1,'op::QueueBase::isRunning()'],['../classop_1_1_thread.html#a3ed032f4c42ef1797873122aa96a055d',1,'op::Thread::isRunning()'],['../classop_1_1_thread_manager.html#a7bad63adddf7a35a436911ada2a1c519',1,'op::ThreadManager::isRunning()'],['../classop_1_1_worker.html#a567902b58e492421a6ad771e730ddf53',1,'op::Worker::isRunning()'],['../classop_1_1_wrapper_t.html#a0577721c5e714861b27ad4ff356980bc',1,'op::WrapperT::isRunning()']]], + ['issubmatrix_1897',['isSubmatrix',['../classop_1_1_matrix.html#aa0ab094e21bab6757f502866bce7e79c',1,'op::Matrix']]] +]; diff --git a/web/html/doc/search/functions_9.html b/web/html/doc/search/functions_9.html new file mode 100644 index 000000000..9a8e4290c --- /dev/null +++ b/web/html/doc/search/functions_9.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_9.js b/web/html/doc/search/functions_9.js new file mode 100644 index 000000000..4b0a66fdb --- /dev/null +++ b/web/html/doc/search/functions_9.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['jsonofstream_1898',['JsonOfstream',['../classop_1_1_json_ofstream.html#afa4b3e1dee27f5afd0017b95c0f5e364',1,'op::JsonOfstream::JsonOfstream(const std::string &filePath, const bool humanReadable=true)'],['../classop_1_1_json_ofstream.html#ab8220b4336ccc8998cc38f0fa5c36918',1,'op::JsonOfstream::JsonOfstream(JsonOfstream &&jsonOfstream)']]] +]; diff --git a/web/html/doc/search/functions_a.html b/web/html/doc/search/functions_a.html new file mode 100644 index 000000000..5ecc152ca --- /dev/null +++ b/web/html/doc/search/functions_a.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_a.js b/web/html/doc/search/functions_a.js new file mode 100644 index 000000000..e39ad26ef --- /dev/null +++ b/web/html/doc/search/functions_a.js @@ -0,0 +1,10 @@ +var searchData= +[ + ['keepdesiredframerate_1899',['keepDesiredFrameRate',['../classop_1_1_producer.html#afad3eadd16cca0de2c2be8b083c0d56d',1,'op::Producer']]], + ['keeproiinside_1900',['keepRoiInside',['../namespaceop.html#a5f85de4dca2733d03470d42617f83d4e',1,'op']]], + ['keeptopnpeople_1901',['KeepTopNPeople',['../classop_1_1_keep_top_n_people.html#ae9419ae35da5b0547989f19795a26808',1,'op::KeepTopNPeople']]], + ['keeptoppeople_1902',['keepTopPeople',['../classop_1_1_keep_top_n_people.html#a556a0d8d97985e0b73fc78e372be6ea8',1,'op::KeepTopNPeople::keepTopPeople()'],['../classop_1_1_pose_extractor.html#a291521decad2465df13dc769fe9cc4e5',1,'op::PoseExtractor::keepTopPeople()']]], + ['key_1903',['key',['../classop_1_1_json_ofstream.html#af0c7f763e7e809810c00b394a260672e',1,'op::JsonOfstream']]], + ['keypointsaver_1904',['KeypointSaver',['../classop_1_1_keypoint_saver.html#aa6d9eb36cfd40c5cfa3995420cdf3dfa',1,'op::KeypointSaver']]], + ['keypointscaler_1905',['KeypointScaler',['../classop_1_1_keypoint_scaler.html#a0f556c1b0fad63c7c3551a5d4fd72219',1,'op::KeypointScaler']]] +]; diff --git a/web/html/doc/search/functions_b.html b/web/html/doc/search/functions_b.html new file mode 100644 index 000000000..e301fedd7 --- /dev/null +++ b/web/html/doc/search/functions_b.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_b.js b/web/html/doc/search/functions_b.js new file mode 100644 index 000000000..b9e5ea2d8 --- /dev/null +++ b/web/html/doc/search/functions_b.js @@ -0,0 +1,8 @@ +var searchData= +[ + ['layersetup_1906',['LayerSetUp',['../classop_1_1_maximum_caffe.html#a47047083e35d2af5a969acbf7cb55674',1,'op::MaximumCaffe::LayerSetUp()'],['../classop_1_1_nms_caffe.html#a8c7e69c32f1fff92893284ed70278f48',1,'op::NmsCaffe::LayerSetUp()'],['../classop_1_1_resize_and_merge_caffe.html#ad7441a1f8db85f6239830603fb7a6325',1,'op::ResizeAndMergeCaffe::LayerSetUp()']]], + ['legacyshape_1907',['LegacyShape',['../classop_1_1_array_cpu_gpu.html#a9aa5001613f7199de933eef152db40b0',1,'op::ArrayCpuGpu']]], + ['loaddata_1908',['loadData',['../namespaceop.html#a9f14054fbf4e63fc85d10c83f2f9ecb7',1,'op::loadData(const std::vector< std::string > &cvMatNames, const std::string &fileNameNoExtension, const DataFormat dataFormat)'],['../namespaceop.html#a1c2921f841ab87033b535b5ae8a4d526',1,'op::loadData(const std::string &cvMatName, const std::string &fileNameNoExtension, const DataFormat dataFormat)']]], + ['loadhanddetectortxt_1909',['loadHandDetectorTxt',['../namespaceop.html#a0ce96f84c6e380b261802c7e2639dc7d',1,'op']]], + ['loadimage_1910',['loadImage',['../namespaceop.html#a871a61f08021460e0f24f51583546a75',1,'op']]] +]; diff --git a/web/html/doc/search/functions_c.html b/web/html/doc/search/functions_c.html new file mode 100644 index 000000000..c4f326877 --- /dev/null +++ b/web/html/doc/search/functions_c.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_c.js b/web/html/doc/search/functions_c.js new file mode 100644 index 000000000..7f4041e1d --- /dev/null +++ b/web/html/doc/search/functions_c.js @@ -0,0 +1,13 @@ +var searchData= +[ + ['makedirectory_1911',['makeDirectory',['../namespaceop.html#acc650faa23df88ca16a09a2d2a522960',1,'op']]], + ['matrix_1912',['Matrix',['../classop_1_1_matrix.html#adbdc98003dd0f666c845ac2acf592bd8',1,'op::Matrix::Matrix()'],['../classop_1_1_matrix.html#af9dc44c30ec3ae5f8e7ba8f76516985a',1,'op::Matrix::Matrix(const void *cvMatPtr)'],['../classop_1_1_matrix.html#a770bbf0242b96b2e746d7f1e30dbf8fc',1,'op::Matrix::Matrix(const int rows, const int cols, const int type)'],['../classop_1_1_matrix.html#a53786b5c97e1cded5accbcb3cd6b992d',1,'op::Matrix::Matrix(const int rows, const int cols, const int type, void *cvMatPtr)']]], + ['maximumcaffe_1913',['MaximumCaffe',['../classop_1_1_maximum_caffe.html#a66bd0aa863a97647ae6350d1f886ea51',1,'op::MaximumCaffe']]], + ['maximumcpu_1914',['maximumCpu',['../namespaceop.html#ae0fea41041a70ae8449a77f46ffe8100',1,'op']]], + ['maximumgpu_1915',['maximumGpu',['../namespaceop.html#a8ec109805adf02f9872a6af37d602caa',1,'op']]], + ['mergevectors_1916',['mergeVectors',['../namespaceop.html#aa3a3e2acfb27ecbd187d01c8dcd41899',1,'op']]], + ['mutable_5fcpu_5fdata_1917',['mutable_cpu_data',['../classop_1_1_array_cpu_gpu.html#a6eafc0638925b776bb78c68c1fef972a',1,'op::ArrayCpuGpu']]], + ['mutable_5fcpu_5fdiff_1918',['mutable_cpu_diff',['../classop_1_1_array_cpu_gpu.html#aba03b602ed1c745b3ba344d7ccedfd30',1,'op::ArrayCpuGpu']]], + ['mutable_5fgpu_5fdata_1919',['mutable_gpu_data',['../classop_1_1_array_cpu_gpu.html#ac0bb37052ae356e85d681f52a4716f3c',1,'op::ArrayCpuGpu']]], + ['mutable_5fgpu_5fdiff_1920',['mutable_gpu_diff',['../classop_1_1_array_cpu_gpu.html#a678e65cb71d2cc2e1070499465894892',1,'op::ArrayCpuGpu']]] +]; diff --git a/web/html/doc/search/functions_d.html b/web/html/doc/search/functions_d.html new file mode 100644 index 000000000..7a1ed065d --- /dev/null +++ b/web/html/doc/search/functions_d.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_d.js b/web/html/doc/search/functions_d.js new file mode 100644 index 000000000..237757e96 --- /dev/null +++ b/web/html/doc/search/functions_d.js @@ -0,0 +1,12 @@ +var searchData= +[ + ['netcaffe_1921',['NetCaffe',['../classop_1_1_net_caffe.html#af6d9ee03568d2783e0e4ed0b78a21c3d',1,'op::NetCaffe']]], + ['netinitializationonthread_1922',['netInitializationOnThread',['../classop_1_1_face_extractor_caffe.html#add2a24d9bd5e03ff90034239e90523c2',1,'op::FaceExtractorCaffe::netInitializationOnThread()'],['../classop_1_1_face_extractor_net.html#a6a9a02b46596283cab6f8a4640161081',1,'op::FaceExtractorNet::netInitializationOnThread()'],['../classop_1_1_hand_extractor_caffe.html#ace3ee9d717887ee9dc0f00ce69bd0c82',1,'op::HandExtractorCaffe::netInitializationOnThread()'],['../classop_1_1_hand_extractor_net.html#aad7c29237d50e77e606bb32c20c60d24',1,'op::HandExtractorNet::netInitializationOnThread()'],['../classop_1_1_pose_extractor_caffe.html#ae5d41065ea3eaf37d2c9663aa35554d6',1,'op::PoseExtractorCaffe::netInitializationOnThread()'],['../classop_1_1_pose_extractor_net.html#aa8bf8cdfdede22410e2dfcea5d3f0cdc',1,'op::PoseExtractorNet::netInitializationOnThread()']]], + ['netopencv_1923',['NetOpenCv',['../classop_1_1_net_open_cv.html#af46f57f8a4093c927dd39109ad0411e9',1,'op::NetOpenCv']]], + ['nmscaffe_1924',['NmsCaffe',['../classop_1_1_nms_caffe.html#afb808d9a264ce50664f8641e477d9e2d',1,'op::NmsCaffe']]], + ['nmscpu_1925',['nmsCpu',['../namespaceop.html#a6a97f255cc323f1c1babe4c598727196',1,'op']]], + ['nmsgpu_1926',['nmsGpu',['../namespaceop.html#a28c5ac530845231600fb93c0be44ad6d',1,'op']]], + ['nmsocl_1927',['nmsOcl',['../namespaceop.html#a37dce2abad2568d7664654e4598002af',1,'op']]], + ['num_1928',['num',['../classop_1_1_array_cpu_gpu.html#aa87f658e2ff9699908f5cb823e988188',1,'op::ArrayCpuGpu']]], + ['num_5faxes_1929',['num_axes',['../classop_1_1_array_cpu_gpu.html#ad79b3b3cf4180535211e20e086262837',1,'op::ArrayCpuGpu']]] +]; diff --git a/web/html/doc/search/functions_e.html b/web/html/doc/search/functions_e.html new file mode 100644 index 000000000..22d2a6bf5 --- /dev/null +++ b/web/html/doc/search/functions_e.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_e.js b/web/html/doc/search/functions_e.js new file mode 100644 index 000000000..dcd6c77b3 --- /dev/null +++ b/web/html/doc/search/functions_e.js @@ -0,0 +1,26 @@ +var searchData= +[ + ['objectclose_1930',['objectClose',['../classop_1_1_json_ofstream.html#a45eeb25d9413fc31786f315b46c341cc',1,'op::JsonOfstream']]], + ['objectopen_1931',['objectOpen',['../classop_1_1_json_ofstream.html#a5c38e36c1449d808dd4ab6558d65289c',1,'op::JsonOfstream']]], + ['offset_1932',['offset',['../classop_1_1_array_cpu_gpu.html#af24813492bad97de4e4c628fe356abe7',1,'op::ArrayCpuGpu']]], + ['operator_21_3d_1933',['operator!=',['../structop_1_1_datum.html#a8337f6ff81ba8231ceeabc840372bff9',1,'op::Datum::operator!=()'],['../structop_1_1_point.html#a8a82a0d663d9572fa28394f7562ebfb2',1,'op::Point::operator!=()']]], + ['operator_28_29_1934',['operator()',['../classop_1_1_pointer_container_greater.html#a7c571ddbcfd6eaaaf33bb6abe4b22aaa',1,'op::PointerContainerGreater::operator()()'],['../classop_1_1_pointer_container_less.html#af34bafbf659ff4768dbb33fe7454cb21',1,'op::PointerContainerLess::operator()()']]], + ['operator_2a_1935',['operator*',['../structop_1_1_point.html#ad66e33cf5d57e78c80220881406e41ce',1,'op::Point::operator*()'],['../structop_1_1_rectangle.html#a66e38889d2b413df95a9995e93103ff7',1,'op::Rectangle::operator*()']]], + ['operator_2a_3d_1936',['operator*=',['../structop_1_1_point.html#afb53230d0d884ca5432e948605b5c2e6',1,'op::Point::operator*=()'],['../structop_1_1_rectangle.html#a2d3d7951770da3954d5af9e365f5780c',1,'op::Rectangle::operator*=()']]], + ['operator_2b_1937',['operator+',['../structop_1_1_point.html#a0b362efa00fc5a0d35f743f3c01fa1d0',1,'op::Point::operator+(const Point< T > &point) const'],['../structop_1_1_point.html#af656ee43b596b5bb68139404a54c5a63',1,'op::Point::operator+(const T value) const']]], + ['operator_2b_3d_1938',['operator+=',['../structop_1_1_point.html#a60488ca743d82fe8dd574b01f992460c',1,'op::Point::operator+=(const T value)'],['../structop_1_1_point.html#ad5005ff994bfcd1846854d6af103a3a6',1,'op::Point::operator+=(const Point< T > &point)']]], + ['operator_2d_1939',['operator-',['../structop_1_1_point.html#a075741b8963b342bb068976afcf579af',1,'op::Point::operator-(const Point< T > &point) const'],['../structop_1_1_point.html#a8961164fe93cd91fcf55f56200730578',1,'op::Point::operator-(const T value) const']]], + ['operator_2d_3d_1940',['operator-=',['../structop_1_1_point.html#ad42deecd0077f7c962ca383cbc87e08f',1,'op::Point::operator-=(const Point< T > &point)'],['../structop_1_1_point.html#abf2bb2d2d5b3dba3424b489b99faa760',1,'op::Point::operator-=(const T value)']]], + ['operator_2f_1941',['operator/',['../structop_1_1_point.html#ad599eeba7a0137c3c138e5542bb2e9ed',1,'op::Point::operator/()'],['../structop_1_1_rectangle.html#adba48a35368d4a4d55896899b217d523',1,'op::Rectangle::operator/()']]], + ['operator_2f_3d_1942',['operator/=',['../structop_1_1_point.html#af8e49e33dad417d05ce8fb5f9dd68762',1,'op::Point::operator/=()'],['../structop_1_1_rectangle.html#a65620c7efbb3db95d85c90c2be3a851d',1,'op::Rectangle::operator/=()']]], + ['operator_3c_1943',['operator<',['../structop_1_1_datum.html#a9d67e55fbc26399e4efd2385c1899541',1,'op::Datum::operator<()'],['../structop_1_1_point.html#a6aaab75fe6d1c8b4c935c2da385fd7ee',1,'op::Point::operator<()']]], + ['operator_3c_3d_1944',['operator<=',['../structop_1_1_datum.html#a32752199884dcb51b7157daa098063e1',1,'op::Datum::operator<=()'],['../structop_1_1_point.html#a872607032f6b4fa8982f88a74c88c6bd',1,'op::Point::operator<=()']]], + ['operator_3d_1945',['operator=',['../classop_1_1_array.html#a9c8e006e0eea472485f37971330ecbab',1,'op::Array::operator=(const Array< T > &array)'],['../classop_1_1_array.html#ae388368128afac05369172198911e05d',1,'op::Array::operator=(Array< T > &&array)'],['../structop_1_1_datum.html#a72ee10bf507aea368cfd3dba3dd38cb5',1,'op::Datum::operator=(const Datum &datum)'],['../structop_1_1_datum.html#a24f3bfcb0ffffeb5742eb1530bc9e367',1,'op::Datum::operator=(Datum &&datum)'],['../structop_1_1_point.html#aada0e9eecee2fb30fb903b32f9f33047',1,'op::Point::operator=(const Point< T > &point)'],['../structop_1_1_point.html#ac8596f2b3b50464b6c6eaa34b0a2c48b',1,'op::Point::operator=(Point< T > &&point)'],['../structop_1_1_rectangle.html#abd3476f9a32ad2058ea67c75c2a547a2',1,'op::Rectangle::operator=(const Rectangle< T > &rectangle)'],['../structop_1_1_rectangle.html#abea1a6760629dc4ed99875dae9d5ac36',1,'op::Rectangle::operator=(Rectangle< T > &&rectangle)'],['../classop_1_1_json_ofstream.html#aff76578c824c0314e33231884b40147e',1,'op::JsonOfstream::operator=()'],['../classop_1_1_thread.html#a16d1835e2bd7c5ae988f4bc225b3ca09',1,'op::Thread::operator=()']]], + ['operator_3d_3d_1946',['operator==',['../structop_1_1_point.html#a2f7900c0d58fb297b3b039cfb3c98a3e',1,'op::Point::operator==()'],['../structop_1_1_datum.html#ae740051202ca0db8358d5308143bb1b3',1,'op::Datum::operator==(const Datum &datum) const']]], + ['operator_3e_1947',['operator>',['../structop_1_1_datum.html#a79a05bec9871522cfab5d33cc7b63614',1,'op::Datum::operator>()'],['../structop_1_1_point.html#a0e94c712c194c0b317eef4d8995e52f3',1,'op::Point::operator>()']]], + ['operator_3e_3d_1948',['operator>=',['../structop_1_1_datum.html#ab97601a7628b46619f4a071cf1613ce6',1,'op::Datum::operator>=()'],['../structop_1_1_point.html#ae7afe35869eea79f72bd8b74fae4a2f1',1,'op::Point::operator>=()']]], + ['operator_5b_5d_1949',['operator[]',['../classop_1_1_array.html#aa40dc59e800d3c4cce623d560c0e0fad',1,'op::Array::operator[](const int index)'],['../classop_1_1_array.html#a0e1d5ce14d11caa3b92306ee677af4cc',1,'op::Array::operator[](const int index) const'],['../classop_1_1_array.html#aada0f1bd6e9eb73b4f977e62da536f58',1,'op::Array::operator[](const std::vector< int > &indexes)'],['../classop_1_1_array.html#ac4e9514cfee78a3a0236c1a6265376d8',1,'op::Array::operator[](const std::vector< int > &indexes) const']]], + ['oplog_1950',['opLog',['../namespaceop.html#aa72861fea0671209aca1ea5fa385891a',1,'op::opLog(const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")'],['../namespaceop.html#a838b69fead43c8a848d059b5f9d63baf',1,'op::opLog(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")']]], + ['oplogifdebug_1951',['opLogIfDebug',['../namespaceop.html#a91dd00cbb8fb646e6612455eb0f1b3e9',1,'op']]], + ['opoutputtocvmat_1952',['OpOutputToCvMat',['../classop_1_1_op_output_to_cv_mat.html#a60affeb41b26b1357cf8c797c7e16ecb',1,'op::OpOutputToCvMat']]] +]; diff --git a/web/html/doc/search/functions_f.html b/web/html/doc/search/functions_f.html new file mode 100644 index 000000000..54b7dee08 --- /dev/null +++ b/web/html/doc/search/functions_f.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/functions_f.js b/web/html/doc/search/functions_f.js new file mode 100644 index 000000000..c63eb6fa1 --- /dev/null +++ b/web/html/doc/search/functions_f.js @@ -0,0 +1,33 @@ +var searchData= +[ + ['pafptrintovector_1953',['pafPtrIntoVector',['../namespaceop.html#aaec4a34b015f898d28be2b9f2aba0d38',1,'op']]], + ['pafvectorintopeoplevector_1954',['pafVectorIntoPeopleVector',['../namespaceop.html#a36f0207c6263e7174f4c79eba7c4df3f',1,'op']]], + ['peoplejsonsaver_1955',['PeopleJsonSaver',['../classop_1_1_people_json_saver.html#aa6e2f479d959752c5c0f71cd8b4427c2',1,'op::PeopleJsonSaver']]], + ['peoplevectortopeoplearray_1956',['peopleVectorToPeopleArray',['../namespaceop.html#a3dbd17f2f656a2bc751441a42b5b9516',1,'op']]], + ['personidextractor_1957',['PersonIdExtractor',['../classop_1_1_person_id_extractor.html#a5916ec673bdbe127386b7f496b188828',1,'op::PersonIdExtractor']]], + ['persontracker_1958',['PersonTracker',['../classop_1_1_person_tracker.html#aa88f617ff9f1ff509c54b2cbf51e764a',1,'op::PersonTracker']]], + ['plaintext_1959',['plainText',['../classop_1_1_json_ofstream.html#aa432ff172e10bb9e3b6e2bfa6124c648',1,'op::JsonOfstream']]], + ['point_1960',['Point',['../structop_1_1_point.html#a9f80114d18ec8055360222d975bcd5a8',1,'op::Point::Point(const T x=0, const T y=0)'],['../structop_1_1_point.html#a44559988e3980e21568b5d9dd2897368',1,'op::Point::Point(const Point< T > &point)'],['../structop_1_1_point.html#ab3b92e4a40cd58d948647484f21dd9ef',1,'op::Point::Point(Point< T > &&point)']]], + ['pop_1961',['pop',['../classop_1_1_queue_base.html#a5b28915cc58e040aca673bdfdf7c8be3',1,'op::QueueBase']]], + ['posebodypartmapstringtokey_1962',['poseBodyPartMapStringToKey',['../namespaceop.html#aacf6e688031bb116e4878b811e8dbc23',1,'op::poseBodyPartMapStringToKey(const PoseModel poseModel, const std::vector< std::string > &strings)'],['../namespaceop.html#a3df938ef93037c534c5d342720d5fb70',1,'op::poseBodyPartMapStringToKey(const PoseModel poseModel, const std::string &string)']]], + ['posecpurenderer_1963',['PoseCpuRenderer',['../classop_1_1_pose_cpu_renderer.html#a5863733d560345d4890557b0f8c0d08e',1,'op::PoseCpuRenderer']]], + ['poseextractor_1964',['PoseExtractor',['../classop_1_1_pose_extractor.html#acd50fa337aef1d658b6fed3edc717ada',1,'op::PoseExtractor']]], + ['poseextractorcaffe_1965',['PoseExtractorCaffe',['../classop_1_1_pose_extractor_caffe.html#a682152a072d07b1b0764c2f7aab09ab7',1,'op::PoseExtractorCaffe']]], + ['poseextractornet_1966',['PoseExtractorNet',['../classop_1_1_pose_extractor_net.html#a5503fceecf280b6b1ed6e3251de46e26',1,'op::PoseExtractorNet']]], + ['posegpurenderer_1967',['PoseGpuRenderer',['../classop_1_1_pose_gpu_renderer.html#a1582e63e33192d79f80b5879ba04d448',1,'op::PoseGpuRenderer']]], + ['poserenderer_1968',['PoseRenderer',['../classop_1_1_pose_renderer.html#a1dfd34d42fa69913a9702e0a0ebcd04e',1,'op::PoseRenderer']]], + ['posetriangulation_1969',['PoseTriangulation',['../classop_1_1_pose_triangulation.html#a7858f0c4adf7845c2be072e0985af3ee',1,'op::PoseTriangulation']]], + ['positivecharround_1970',['positiveCharRound',['../namespaceop.html#ab5eb10c958f3f37fb82d29361ad81467',1,'op']]], + ['positiveintround_1971',['positiveIntRound',['../namespaceop.html#a699ef17b0f27b8bc2c4d4a03e46e6be1',1,'op']]], + ['positivelonglonground_1972',['positiveLongLongRound',['../namespaceop.html#a1b479fea39a56c041a8a51aecf024bed',1,'op']]], + ['positivelonground_1973',['positiveLongRound',['../namespaceop.html#a57eee48e4cefd583a81cfc907586c035',1,'op']]], + ['positivescharround_1974',['positiveSCharRound',['../namespaceop.html#ab71596bc88b87ea5920f19f978d6d6ac',1,'op']]], + ['printaveragedtimemseveryxiterations_1975',['printAveragedTimeMsEveryXIterations',['../classop_1_1_profiler.html#a1192952d076f52b884b32fcd496df2ec',1,'op::Profiler']]], + ['printaveragedtimemsoniterationx_1976',['printAveragedTimeMsOnIterationX',['../classop_1_1_profiler.html#a58b930a54a98bbc91af074395852da76',1,'op::Profiler']]], + ['printsize_1977',['printSize',['../classop_1_1_array.html#a3f26a48c35cde008970078a66ff6e5c7',1,'op::Array']]], + ['printtime_1978',['printTime',['../namespaceop.html#ab0908bcc0abb00c49ecbe7fc373b58c9',1,'op']]], + ['printverbose_1979',['printVerbose',['../classop_1_1_verbose_printer.html#ab85c8d6555a52eb77042646dfe798fbf',1,'op::VerbosePrinter']]], + ['priorityqueue_1980',['PriorityQueue',['../classop_1_1_priority_queue.html#acecdd3c5789942777652b66d08578d93',1,'op::PriorityQueue']]], + ['producer_1981',['Producer',['../classop_1_1_producer.html#aaec98c35fe9f2695cd31be3e2d437a61',1,'op::Producer']]], + ['profilegpumemory_1982',['profileGpuMemory',['../classop_1_1_profiler.html#a6e828c0b4fef5671a094727b7919a948',1,'op::Profiler']]] +]; diff --git a/web/html/doc/search/mag_sel.svg b/web/html/doc/search/mag_sel.svg new file mode 100644 index 000000000..03626f64a --- /dev/null +++ b/web/html/doc/search/mag_sel.svg @@ -0,0 +1,74 @@ + + + + + + + + image/svg+xml + + + + + + + + + + + diff --git a/web/html/doc/search/namespaces_0.html b/web/html/doc/search/namespaces_0.html new file mode 100644 index 000000000..21db2c3a5 --- /dev/null +++ b/web/html/doc/search/namespaces_0.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/namespaces_0.js b/web/html/doc/search/namespaces_0.js new file mode 100644 index 000000000..1df955daf --- /dev/null +++ b/web/html/doc/search/namespaces_0.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['configureerror_1426',['ConfigureError',['../namespaceop_1_1_configure_error.html',1,'op']]], + ['configurelog_1427',['ConfigureLog',['../namespaceop_1_1_configure_log.html',1,'op']]], + ['op_1428',['op',['../namespaceop.html',1,'']]] +]; diff --git a/web/html/doc/search/nomatches.html b/web/html/doc/search/nomatches.html new file mode 100644 index 000000000..2b9360b6b --- /dev/null +++ b/web/html/doc/search/nomatches.html @@ -0,0 +1,13 @@ + + + + + + + + +
    +
    No Matches
    +
    + + diff --git a/web/html/doc/search/pages_0.html b/web/html/doc/search/pages_0.html new file mode 100644 index 000000000..8517b48f0 --- /dev/null +++ b/web/html/doc/search/pages_0.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/pages_0.js b/web/html/doc/search/pages_0.js new file mode 100644 index 000000000..1dc75e2c3 --- /dev/null +++ b/web/html/doc/search/pages_0.js @@ -0,0 +1,31 @@ +var searchData= +[ + ['openpose_20advanced_20doc_20_2d_203_2dd_20reconstruction_20module_20and_20demo_2673',['OpenPose Advanced Doc - 3-D Reconstruction Module and Demo',['../md_doc_advanced_3d_reconstruction_module.html',1,'']]], + ['openpose_20advanced_20doc_20_2d_20calibration_20module_20and_20demo_2674',['OpenPose Advanced Doc - Calibration Module and Demo',['../md_doc_advanced_calibration_module.html',1,'']]], + ['openpose_20advanced_20doc_20_2d_20demo_20_2d_20advanced_2675',['OpenPose Advanced Doc - Demo - Advanced',['../md_doc_advanced_demo_advanced.html',1,'']]], + ['openpose_20advanced_20doc_20_2d_20deploying_2fexporting_20openpose_20to_20other_20projects_2676',['OpenPose Advanced Doc - Deploying/Exporting OpenPose to Other Projects',['../md_doc_advanced_deployment.html',1,'']]], + ['openpose_20advanced_20doc_20_2d_20heatmap_20output_2677',['OpenPose Advanced Doc - Heatmap Output',['../md_doc_advanced_heatmap_output.html',1,'']]], + ['openpose_20advanced_20doc_20_2d_20standalone_20face_20or_20hand_20keypoint_20detector_2678',['OpenPose Advanced Doc - Standalone Face or Hand Keypoint Detector',['../md_doc_advanced_standalone_face_or_hand_keypoint_detector.html',1,'']]], + ['openpose_20doc_2679',['OpenPose Doc',['../md_doc_00_index.html',1,'']]], + ['openpose_20doc_20_2d_20authors_20and_20contributors_2680',['OpenPose Doc - Authors and Contributors',['../md_doc_09_authors_and_contributors.html',1,'']]], + ['openpose_20doc_20_2d_20c_2b_2b_20api_2681',['OpenPose Doc - C++ API',['../md_doc_04_cpp_api.html',1,'']]], + ['openpose_20doc_20_2d_20community_2dbased_20projects_2682',['OpenPose Doc - Community-based Projects',['../md_doc_10_community_projects.html',1,'']]], + ['openpose_20doc_20_2d_20demo_2683',['OpenPose Doc - Demo',['../md_doc_01_demo.html',1,'']]], + ['openpose_20doc_20_2d_20frequently_20asked_20questions_20_28faq_29_2684',['OpenPose Doc - Frequently Asked Questions (FAQ)',['../md_doc_05_faq.html',1,'']]], + ['openpose_20doc_20_2d_20installation_2685',['OpenPose Doc - Installation',['../md_doc_installation_0_index.html',1,'']]], + ['openpose_20doc_20_2d_20installation_20_28deprecated_29_2686',['OpenPose Doc - Installation (deprecated)',['../md_doc_installation_deprecated_installation_deprecated.html',1,'']]], + ['openpose_20doc_20_2d_20installation_20_2d_20additional_20settings_20_28optional_29_2687',['OpenPose Doc - Installation - Additional Settings (Optional)',['../md_doc_installation_2_additional_settings.html',1,'']]], + ['openpose_20doc_20_2d_20installation_20_2d_20prerequisites_2688',['OpenPose Doc - Installation - Prerequisites',['../md_doc_installation_1_prerequisites.html',1,'']]], + ['openpose_20doc_20_2d_20installation_20on_20nvidia_20jetson_20tx1_2689',['OpenPose Doc - Installation on Nvidia Jetson TX1',['../md_doc_installation_jetson_tx_installation_jetson_tx1.html',1,'']]], + ['openpose_20doc_20_2d_20installation_20on_20nvidia_20jetson_20tx2_20jetpack_203_2e1_2690',['OpenPose Doc - Installation on Nvidia Jetson TX2 JetPack 3.1',['../md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_1.html',1,'']]], + ['openpose_20doc_20_2d_20installation_20on_20nvidia_20jetson_20tx2_20jetpack_203_2e3_2691',['OpenPose Doc - Installation on Nvidia Jetson TX2 JetPack 3.3',['../md_doc_installation_jetson_tx_installation_jetson_tx2_jetpack3_3.html',1,'']]], + ['openpose_20doc_20_2d_20major_20released_20features_2692',['OpenPose Doc - Major Released Features',['../md_doc_07_major_released_features.html',1,'']]], + ['openpose_20doc_20_2d_20maximizing_20the_20openpose_20speed_2693',['OpenPose Doc - Maximizing the OpenPose Speed',['../md_doc_06_maximizing_openpose_speed.html',1,'']]], + ['openpose_20doc_20_2d_20output_2694',['OpenPose Doc - Output',['../md_doc_02_output.html',1,'']]], + ['openpose_20doc_20_2d_20python_20api_2695',['OpenPose Doc - Python API',['../md_doc_03_python_api.html',1,'']]], + ['openpose_20doc_20_2d_20release_20notes_2696',['OpenPose Doc - Release Notes',['../md_doc_08_release_notes.html',1,'']]], + ['openpose_20very_20advanced_20doc_20_2d_20library_20structure_2697',['OpenPose Very Advanced Doc - Library Structure',['../md_doc_very_advanced_library_structure_0_index.html',1,'']]], + ['openpose_20very_20advanced_20doc_20_2d_20library_20structure_20_2d_20deep_20overview_2698',['OpenPose Very Advanced Doc - Library Structure - Deep Overview',['../md_doc_very_advanced_library_structure_1_library_deep_overview.html',1,'']]], + ['openpose_20very_20advanced_20doc_20_2d_20library_20structure_20_2d_20how_20to_20extend_20functionality_2699',['OpenPose Very Advanced Doc - Library Structure - How to Extend Functionality',['../md_doc_very_advanced_library_structure_2_library_extend_functionality.html',1,'']]], + ['openpose_20very_20advanced_20doc_20_2d_20library_20structure_20_2d_20steps_20to_20add_20a_20new_20module_2700',['OpenPose Very Advanced Doc - Library Structure - Steps to Add a New Module',['../md_doc_very_advanced_library_structure_3_library_add_new_module.html',1,'']]] +]; diff --git a/web/html/doc/search/search.css b/web/html/doc/search/search.css new file mode 100644 index 000000000..9074198f8 --- /dev/null +++ b/web/html/doc/search/search.css @@ -0,0 +1,257 @@ +/*---------------- Search Box */ + +#MSearchBox { + white-space : nowrap; + background: white; + border-radius: 0.65em; + box-shadow: inset 0.5px 0.5px 3px 0px #555; + z-index: 102; +} + +#MSearchBox .left { + display: inline-block; + vertical-align: middle; + height: 1.4em; +} + +#MSearchSelect { + display: inline-block; + vertical-align: middle; + height: 1.4em; + padding: 0 0 0 0.3em; + margin: 0; +} + +#MSearchField { + display: inline-block; + vertical-align: middle; + width: 7.5em; + height: 1.1em; + margin: 0 0.15em; + padding: 0; + line-height: 1em; + border:none; + color: #909090; + outline: none; + font-family: Arial, Verdana, sans-serif; + -webkit-border-radius: 0px; + border-radius: 0px; + background: none; +} + + +#MSearchBox .right { + display: inline-block; + vertical-align: middle; + width: 1.4em; + height: 1.4em; +} + +#MSearchClose { + display: none; + font-size: inherit; + background : none; + border: none; + margin: 0; + padding: 0; + outline: none; + +} + +#MSearchCloseImg { + height: 1.4em; + padding: 0.3em; + margin: 0; +} + +.MSearchBoxActive #MSearchField { + color: #000000; +} + +#main-menu > li:last-child { + /* This
  • object is the parent of the search bar */ + display: flex; + justify-content: center; + align-items: center; + height: 36px; + margin-right: 1em; +} + +/*---------------- Search filter selection */ + +#MSearchSelectWindow { + display: none; + position: absolute; + left: 0; top: 0; + border: 1px solid #90A5CE; + background-color: #F9FAFC; + z-index: 10001; + padding-top: 4px; + padding-bottom: 4px; + -moz-border-radius: 4px; + -webkit-border-top-left-radius: 4px; + -webkit-border-top-right-radius: 4px; + -webkit-border-bottom-left-radius: 4px; + -webkit-border-bottom-right-radius: 4px; + -webkit-box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); +} + +.SelectItem { + font: 8pt Arial, Verdana, sans-serif; + padding-left: 2px; + padding-right: 12px; + border: 0px; +} + +span.SelectionMark { + margin-right: 4px; + font-family: monospace; + outline-style: none; + text-decoration: none; +} + +a.SelectItem { + display: block; + outline-style: none; + color: #000000; + text-decoration: none; + padding-left: 6px; + padding-right: 12px; +} + +a.SelectItem:focus, +a.SelectItem:active { + color: #000000; + outline-style: none; + text-decoration: none; +} + +a.SelectItem:hover { + color: #FFFFFF; + background-color: #3D578C; + outline-style: none; + text-decoration: none; + cursor: pointer; + display: block; +} + +/*---------------- Search results window */ + +iframe#MSearchResults { + width: 60ex; + height: 15em; +} + +#MSearchResultsWindow { + display: none; + position: absolute; + left: 0; top: 0; + border: 1px solid #000; + background-color: #EEF1F7; + z-index:10000; +} + +/* ----------------------------------- */ + + +#SRIndex { + clear:both; + padding-bottom: 15px; +} + +.SREntry { + font-size: 10pt; + padding-left: 1ex; +} + +.SRPage .SREntry { + font-size: 8pt; + padding: 1px 5px; +} + +body.SRPage { + margin: 5px 2px; +} + +.SRChildren { + padding-left: 3ex; padding-bottom: .5em +} + +.SRPage .SRChildren { + display: none; +} + +.SRSymbol { + font-weight: bold; + color: #425E97; + font-family: Arial, Verdana, sans-serif; + text-decoration: none; + outline: none; +} + +a.SRScope { + display: block; + color: #425E97; + font-family: Arial, Verdana, sans-serif; + text-decoration: none; + outline: none; +} + +a.SRSymbol:focus, a.SRSymbol:active, +a.SRScope:focus, a.SRScope:active { + text-decoration: underline; +} + +span.SRScope { + padding-left: 4px; + font-family: Arial, Verdana, sans-serif; +} + +.SRPage .SRStatus { + padding: 2px 5px; + font-size: 8pt; + font-style: italic; + font-family: Arial, Verdana, sans-serif; +} + +.SRResult { + display: none; +} + +div.searchresults { + margin-left: 10px; + margin-right: 10px; +} + +/*---------------- External search page results */ + +.searchresult { + background-color: #F0F3F8; +} + +.pages b { + color: white; + padding: 5px 5px 3px 5px; + background-image: url("../tab_a.png"); + background-repeat: repeat-x; + text-shadow: 0 1px 1px #000000; +} + +.pages { + line-height: 17px; + margin-left: 4px; + text-decoration: none; +} + +.hl { + font-weight: bold; +} + +#searchresults { + margin-bottom: 20px; +} + +.searchpages { + margin-top: 10px; +} + diff --git a/web/html/doc/search/search.js b/web/html/doc/search/search.js new file mode 100644 index 000000000..fb226f734 --- /dev/null +++ b/web/html/doc/search/search.js @@ -0,0 +1,816 @@ +/* + @licstart The following is the entire license notice for the JavaScript code in this file. + + The MIT License (MIT) + + Copyright (C) 1997-2020 by Dimitri van Heesch + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software + and associated documentation files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, publish, distribute, + sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or + substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + @licend The above is the entire license notice for the JavaScript code in this file + */ +function convertToId(search) +{ + var result = ''; + for (i=0;i do a search + { + this.Search(); + } + } + + this.OnSearchSelectKey = function(evt) + { + var e = (evt) ? evt : window.event; // for IE + if (e.keyCode==40 && this.searchIndex0) // Up + { + this.searchIndex--; + this.OnSelectItem(this.searchIndex); + } + else if (e.keyCode==13 || e.keyCode==27) + { + this.OnSelectItem(this.searchIndex); + this.CloseSelectionWindow(); + this.DOMSearchField().focus(); + } + return false; + } + + // --------- Actions + + // Closes the results window. + this.CloseResultsWindow = function() + { + this.DOMPopupSearchResultsWindow().style.display = 'none'; + this.DOMSearchClose().style.display = 'none'; + this.Activate(false); + } + + this.CloseSelectionWindow = function() + { + this.DOMSearchSelectWindow().style.display = 'none'; + } + + // Performs a search. + this.Search = function() + { + this.keyTimeout = 0; + + // strip leading whitespace + var searchValue = this.DOMSearchField().value.replace(/^ +/, ""); + + var code = searchValue.toLowerCase().charCodeAt(0); + var idxChar = searchValue.substr(0, 1).toLowerCase(); + if ( 0xD800 <= code && code <= 0xDBFF && searchValue > 1) // surrogate pair + { + idxChar = searchValue.substr(0, 2); + } + + var resultsPage; + var resultsPageWithSearch; + var hasResultsPage; + + var idx = indexSectionsWithContent[this.searchIndex].indexOf(idxChar); + if (idx!=-1) + { + var hexCode=idx.toString(16); + resultsPage = this.resultsPath + '/' + indexSectionNames[this.searchIndex] + '_' + hexCode + this.extension; + resultsPageWithSearch = resultsPage+'?'+escape(searchValue); + hasResultsPage = true; + } + else // nothing available for this search term + { + resultsPage = this.resultsPath + '/nomatches' + this.extension; + resultsPageWithSearch = resultsPage; + hasResultsPage = false; + } + + window.frames.MSearchResults.location = resultsPageWithSearch; + var domPopupSearchResultsWindow = this.DOMPopupSearchResultsWindow(); + + if (domPopupSearchResultsWindow.style.display!='block') + { + var domSearchBox = this.DOMSearchBox(); + this.DOMSearchClose().style.display = 'inline-block'; + if (this.insideFrame) + { + var domPopupSearchResults = this.DOMPopupSearchResults(); + domPopupSearchResultsWindow.style.position = 'relative'; + domPopupSearchResultsWindow.style.display = 'block'; + var width = document.body.clientWidth - 8; // the -8 is for IE :-( + domPopupSearchResultsWindow.style.width = width + 'px'; + domPopupSearchResults.style.width = width + 'px'; + } + else + { + var domPopupSearchResults = this.DOMPopupSearchResults(); + var left = getXPos(domSearchBox) + 150; // domSearchBox.offsetWidth; + var top = getYPos(domSearchBox) + 20; // domSearchBox.offsetHeight + 1; + domPopupSearchResultsWindow.style.display = 'block'; + left -= domPopupSearchResults.offsetWidth; + domPopupSearchResultsWindow.style.top = top + 'px'; + domPopupSearchResultsWindow.style.left = left + 'px'; + } + } + + this.lastSearchValue = searchValue; + this.lastResultsPage = resultsPage; + } + + // -------- Activation Functions + + // Activates or deactivates the search panel, resetting things to + // their default values if necessary. + this.Activate = function(isActive) + { + if (isActive || // open it + this.DOMPopupSearchResultsWindow().style.display == 'block' + ) + { + this.DOMSearchBox().className = 'MSearchBoxActive'; + + var searchField = this.DOMSearchField(); + + if (searchField.value == this.searchLabel) // clear "Search" term upon entry + { + searchField.value = ''; + this.searchActive = true; + } + } + else if (!isActive) // directly remove the panel + { + this.DOMSearchBox().className = 'MSearchBoxInactive'; + this.DOMSearchField().value = this.searchLabel; + this.searchActive = false; + this.lastSearchValue = '' + this.lastResultsPage = ''; + } + } +} + +// ----------------------------------------------------------------------- + +// The class that handles everything on the search results page. +function SearchResults(name) +{ + // The number of matches from the last run of . + this.lastMatchCount = 0; + this.lastKey = 0; + this.repeatOn = false; + + // Toggles the visibility of the passed element ID. + this.FindChildElement = function(id) + { + var parentElement = document.getElementById(id); + var element = parentElement.firstChild; + + while (element && element!=parentElement) + { + if (element.nodeName.toLowerCase() == 'div' && element.className == 'SRChildren') + { + return element; + } + + if (element.nodeName.toLowerCase() == 'div' && element.hasChildNodes()) + { + element = element.firstChild; + } + else if (element.nextSibling) + { + element = element.nextSibling; + } + else + { + do + { + element = element.parentNode; + } + while (element && element!=parentElement && !element.nextSibling); + + if (element && element!=parentElement) + { + element = element.nextSibling; + } + } + } + } + + this.Toggle = function(id) + { + var element = this.FindChildElement(id); + if (element) + { + if (element.style.display == 'block') + { + element.style.display = 'none'; + } + else + { + element.style.display = 'block'; + } + } + } + + // Searches for the passed string. If there is no parameter, + // it takes it from the URL query. + // + // Always returns true, since other documents may try to call it + // and that may or may not be possible. + this.Search = function(search) + { + if (!search) // get search word from URL + { + search = window.location.search; + search = search.substring(1); // Remove the leading '?' + search = unescape(search); + } + + search = search.replace(/^ +/, ""); // strip leading spaces + search = search.replace(/ +$/, ""); // strip trailing spaces + search = search.toLowerCase(); + search = convertToId(search); + + var resultRows = document.getElementsByTagName("div"); + var matches = 0; + + var i = 0; + while (i < resultRows.length) + { + var row = resultRows.item(i); + if (row.className == "SRResult") + { + var rowMatchName = row.id.toLowerCase(); + rowMatchName = rowMatchName.replace(/^sr\d*_/, ''); // strip 'sr123_' + + if (search.length<=rowMatchName.length && + rowMatchName.substr(0, search.length)==search) + { + row.style.display = 'block'; + matches++; + } + else + { + row.style.display = 'none'; + } + } + i++; + } + document.getElementById("Searching").style.display='none'; + if (matches == 0) // no results + { + document.getElementById("NoMatches").style.display='block'; + } + else // at least one result + { + document.getElementById("NoMatches").style.display='none'; + } + this.lastMatchCount = matches; + return true; + } + + // return the first item with index index or higher that is visible + this.NavNext = function(index) + { + var focusItem; + while (1) + { + var focusName = 'Item'+index; + focusItem = document.getElementById(focusName); + if (focusItem && focusItem.parentNode.parentNode.style.display=='block') + { + break; + } + else if (!focusItem) // last element + { + break; + } + focusItem=null; + index++; + } + return focusItem; + } + + this.NavPrev = function(index) + { + var focusItem; + while (1) + { + var focusName = 'Item'+index; + focusItem = document.getElementById(focusName); + if (focusItem && focusItem.parentNode.parentNode.style.display=='block') + { + break; + } + else if (!focusItem) // last element + { + break; + } + focusItem=null; + index--; + } + return focusItem; + } + + this.ProcessKeys = function(e) + { + if (e.type == "keydown") + { + this.repeatOn = false; + this.lastKey = e.keyCode; + } + else if (e.type == "keypress") + { + if (!this.repeatOn) + { + if (this.lastKey) this.repeatOn = true; + return false; // ignore first keypress after keydown + } + } + else if (e.type == "keyup") + { + this.lastKey = 0; + this.repeatOn = false; + } + return this.lastKey!=0; + } + + this.Nav = function(evt,itemIndex) + { + var e = (evt) ? evt : window.event; // for IE + if (e.keyCode==13) return true; + if (!this.ProcessKeys(e)) return false; + + if (this.lastKey==38) // Up + { + var newIndex = itemIndex-1; + var focusItem = this.NavPrev(newIndex); + if (focusItem) + { + var child = this.FindChildElement(focusItem.parentNode.parentNode.id); + if (child && child.style.display == 'block') // children visible + { + var n=0; + var tmpElem; + while (1) // search for last child + { + tmpElem = document.getElementById('Item'+newIndex+'_c'+n); + if (tmpElem) + { + focusItem = tmpElem; + } + else // found it! + { + break; + } + n++; + } + } + } + if (focusItem) + { + focusItem.focus(); + } + else // return focus to search field + { + parent.document.getElementById("MSearchField").focus(); + } + } + else if (this.lastKey==40) // Down + { + var newIndex = itemIndex+1; + var focusItem; + var item = document.getElementById('Item'+itemIndex); + var elem = this.FindChildElement(item.parentNode.parentNode.id); + if (elem && elem.style.display == 'block') // children visible + { + focusItem = document.getElementById('Item'+itemIndex+'_c0'); + } + if (!focusItem) focusItem = this.NavNext(newIndex); + if (focusItem) focusItem.focus(); + } + else if (this.lastKey==39) // Right + { + var item = document.getElementById('Item'+itemIndex); + var elem = this.FindChildElement(item.parentNode.parentNode.id); + if (elem) elem.style.display = 'block'; + } + else if (this.lastKey==37) // Left + { + var item = document.getElementById('Item'+itemIndex); + var elem = this.FindChildElement(item.parentNode.parentNode.id); + if (elem) elem.style.display = 'none'; + } + else if (this.lastKey==27) // Escape + { + parent.searchBox.CloseResultsWindow(); + parent.document.getElementById("MSearchField").focus(); + } + else if (this.lastKey==13) // Enter + { + return true; + } + return false; + } + + this.NavChild = function(evt,itemIndex,childIndex) + { + var e = (evt) ? evt : window.event; // for IE + if (e.keyCode==13) return true; + if (!this.ProcessKeys(e)) return false; + + if (this.lastKey==38) // Up + { + if (childIndex>0) + { + var newIndex = childIndex-1; + document.getElementById('Item'+itemIndex+'_c'+newIndex).focus(); + } + else // already at first child, jump to parent + { + document.getElementById('Item'+itemIndex).focus(); + } + } + else if (this.lastKey==40) // Down + { + var newIndex = childIndex+1; + var elem = document.getElementById('Item'+itemIndex+'_c'+newIndex); + if (!elem) // last child, jump to parent next parent + { + elem = this.NavNext(itemIndex+1); + } + if (elem) + { + elem.focus(); + } + } + else if (this.lastKey==27) // Escape + { + parent.searchBox.CloseResultsWindow(); + parent.document.getElementById("MSearchField").focus(); + } + else if (this.lastKey==13) // Enter + { + return true; + } + return false; + } +} + +function setKeyActions(elem,action) +{ + elem.setAttribute('onkeydown',action); + elem.setAttribute('onkeypress',action); + elem.setAttribute('onkeyup',action); +} + +function setClassAttr(elem,attr) +{ + elem.setAttribute('class',attr); + elem.setAttribute('className',attr); +} + +function createResults() +{ + var results = document.getElementById("SRResults"); + for (var e=0; e + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/typedefs_0.js b/web/html/doc/search/typedefs_0.js new file mode 100644 index 000000000..abd3981ff --- /dev/null +++ b/web/html/doc/search/typedefs_0.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['wrapper_2488',['Wrapper',['../namespaceop.html#a790dea3c007bed742fbc8cdd5757d026',1,'op']]] +]; diff --git a/web/html/doc/search/variables_0.html b/web/html/doc/search/variables_0.html new file mode 100644 index 000000000..1e477c08c --- /dev/null +++ b/web/html/doc/search/variables_0.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_0.js b/web/html/doc/search/variables_0.js new file mode 100644 index 000000000..e9a55d4af --- /dev/null +++ b/web/html/doc/search/variables_0.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['addpartcandidates_2318',['addPartCandidates',['../structop_1_1_wrapper_struct_pose.html#ad73981c6ad9b23f511ef6f12136bf8e7',1,'op::WrapperStructPose']]], + ['alphaheatmap_2319',['alphaHeatMap',['../structop_1_1_wrapper_struct_face.html#a49f609ae1c075f272bbaf32e128cc3a9',1,'op::WrapperStructFace::alphaHeatMap()'],['../structop_1_1_wrapper_struct_hand.html#a716f9c98cbee1a4a70d5978875795c4d',1,'op::WrapperStructHand::alphaHeatMap()'],['../structop_1_1_wrapper_struct_pose.html#ac1233492c750fbd98df353bffa8f9b78',1,'op::WrapperStructPose::alphaHeatMap()']]], + ['alphakeypoint_2320',['alphaKeypoint',['../structop_1_1_wrapper_struct_face.html#a65a0244cbcea06621c6f8c41e519990f',1,'op::WrapperStructFace::alphaKeypoint()'],['../structop_1_1_wrapper_struct_hand.html#a8074cf22f8926d7f4d1d60cacae99c3e',1,'op::WrapperStructHand::alphaKeypoint()'],['../structop_1_1_wrapper_struct_pose.html#a4d3ad84b14697d5f1009fa29e2ff1998',1,'op::WrapperStructPose::alphaKeypoint()']]] +]; diff --git a/web/html/doc/search/variables_1.html b/web/html/doc/search/variables_1.html new file mode 100644 index 000000000..ea73d9a49 --- /dev/null +++ b/web/html/doc/search/variables_1.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_1.js b/web/html/doc/search/variables_1.js new file mode 100644 index 000000000..523a19670 --- /dev/null +++ b/web/html/doc/search/variables_1.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['blendoriginalframe_2321',['blendOriginalFrame',['../structop_1_1_wrapper_struct_pose.html#aa2cee9019b708d48cc18313615d0189e',1,'op::WrapperStructPose']]] +]; diff --git a/web/html/doc/search/variables_10.html b/web/html/doc/search/variables_10.html new file mode 100644 index 000000000..dc9920b6d --- /dev/null +++ b/web/html/doc/search/variables_10.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_10.js b/web/html/doc/search/variables_10.js new file mode 100644 index 000000000..9417bd0a3 --- /dev/null +++ b/web/html/doc/search/variables_10.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['tracking_2463',['tracking',['../structop_1_1_wrapper_struct_extra.html#a86ae9d1faa008aaeed4d6fa6ff03f0fb',1,'op::WrapperStructExtra']]] +]; diff --git a/web/html/doc/search/variables_11.html b/web/html/doc/search/variables_11.html new file mode 100644 index 000000000..704bcb187 --- /dev/null +++ b/web/html/doc/search/variables_11.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_11.js b/web/html/doc/search/variables_11.js new file mode 100644 index 000000000..422cfa1f0 --- /dev/null +++ b/web/html/doc/search/variables_11.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['udphost_2464',['udpHost',['../structop_1_1_wrapper_struct_output.html#a17c339a9c6c97e1dfdb29b3af0bdab77',1,'op::WrapperStructOutput']]], + ['udpport_2465',['udpPort',['../structop_1_1_wrapper_struct_output.html#a873bcabae9cf7039830cae565009ce2b',1,'op::WrapperStructOutput']]], + ['undistortimage_2466',['undistortImage',['../structop_1_1_wrapper_struct_input.html#ae7183e10862dbdbed422f042f1a71ed1',1,'op::WrapperStructInput']]], + ['upsamplingratio_2467',['upsamplingRatio',['../structop_1_1_wrapper_struct_pose.html#a25ee056d914f1cdc990a8a7956810313',1,'op::WrapperStructPose']]] +]; diff --git a/web/html/doc/search/variables_12.html b/web/html/doc/search/variables_12.html new file mode 100644 index 000000000..a3a32eb8e --- /dev/null +++ b/web/html/doc/search/variables_12.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_12.js b/web/html/doc/search/variables_12.js new file mode 100644 index 000000000..453d0febe --- /dev/null +++ b/web/html/doc/search/variables_12.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['verbose_2468',['verbose',['../structop_1_1_wrapper_struct_output.html#aec57f5b4f6920cd43c2f6e55a21c769b',1,'op::WrapperStructOutput']]] +]; diff --git a/web/html/doc/search/variables_13.html b/web/html/doc/search/variables_13.html new file mode 100644 index 000000000..7d05bd863 --- /dev/null +++ b/web/html/doc/search/variables_13.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_13.js b/web/html/doc/search/variables_13.js new file mode 100644 index 000000000..12794af49 --- /dev/null +++ b/web/html/doc/search/variables_13.js @@ -0,0 +1,20 @@ +var searchData= +[ + ['width_2469',['width',['../structop_1_1_rectangle.html#a0d0ae826039b0961fae8723708809cdf',1,'op::Rectangle']]], + ['writebvh_2470',['writeBvh',['../structop_1_1_wrapper_struct_output.html#abfa84cf0ae76a1c07f9d33b98e9f6d84',1,'op::WrapperStructOutput']]], + ['writecocojson_2471',['writeCocoJson',['../structop_1_1_wrapper_struct_output.html#a8658c8313ac1d8ddb177d83de2e1bfe7',1,'op::WrapperStructOutput']]], + ['writecocojsonvariant_2472',['writeCocoJsonVariant',['../structop_1_1_wrapper_struct_output.html#a0119bb7429483928c587ffaf607919de',1,'op::WrapperStructOutput']]], + ['writecocojsonvariants_2473',['writeCocoJsonVariants',['../structop_1_1_wrapper_struct_output.html#aca7b610f478c36b643fcbd73931c9278',1,'op::WrapperStructOutput']]], + ['writeheatmaps_2474',['writeHeatMaps',['../structop_1_1_wrapper_struct_output.html#a29583f73263bdffe1903ea64a9c09463',1,'op::WrapperStructOutput']]], + ['writeheatmapsformat_2475',['writeHeatMapsFormat',['../structop_1_1_wrapper_struct_output.html#a70278a7418053ced9de2447cc78f4240',1,'op::WrapperStructOutput']]], + ['writeimages_2476',['writeImages',['../structop_1_1_wrapper_struct_output.html#acb0e1a13713fd796c9452684bdb7cdaf',1,'op::WrapperStructOutput']]], + ['writeimagesformat_2477',['writeImagesFormat',['../structop_1_1_wrapper_struct_output.html#ae12454443c1c8b1c74f257eaac4927d3',1,'op::WrapperStructOutput']]], + ['writejson_2478',['writeJson',['../structop_1_1_wrapper_struct_output.html#a8899e8af7df7dad1215a09f61fc8198b',1,'op::WrapperStructOutput']]], + ['writekeypoint_2479',['writeKeypoint',['../structop_1_1_wrapper_struct_output.html#a183afe9fa45aa69a9d79b9434163ed3e',1,'op::WrapperStructOutput']]], + ['writekeypointformat_2480',['writeKeypointFormat',['../structop_1_1_wrapper_struct_output.html#ad338fd4719d6f243bb64bc67f68bc7c9',1,'op::WrapperStructOutput']]], + ['writevideo_2481',['writeVideo',['../structop_1_1_wrapper_struct_output.html#a49d8f54f546bfe6a6c644280c5e50458',1,'op::WrapperStructOutput']]], + ['writevideo3d_2482',['writeVideo3D',['../structop_1_1_wrapper_struct_output.html#ad996d177c4b84e2d38c105f637559094',1,'op::WrapperStructOutput']]], + ['writevideoadam_2483',['writeVideoAdam',['../structop_1_1_wrapper_struct_output.html#a4b829e1d007943cba3f4a23be25bc74d',1,'op::WrapperStructOutput']]], + ['writevideofps_2484',['writeVideoFps',['../structop_1_1_wrapper_struct_output.html#ad595edffced2bfd80c3bee183f32f505',1,'op::WrapperStructOutput']]], + ['writevideowithaudio_2485',['writeVideoWithAudio',['../structop_1_1_wrapper_struct_output.html#a3f6370fa1cb1f13922e36831c564588c',1,'op::WrapperStructOutput']]] +]; diff --git a/web/html/doc/search/variables_14.html b/web/html/doc/search/variables_14.html new file mode 100644 index 000000000..ab9e0fa9b --- /dev/null +++ b/web/html/doc/search/variables_14.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_14.js b/web/html/doc/search/variables_14.js new file mode 100644 index 000000000..ec63c39e2 --- /dev/null +++ b/web/html/doc/search/variables_14.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['x_2486',['x',['../structop_1_1_point.html#a812d4ef29d102f4ad18f32ae54eb17ec',1,'op::Point::x()'],['../structop_1_1_rectangle.html#ac4ae58fe6ffd2f811f5cbc48661c1856',1,'op::Rectangle::x()']]] +]; diff --git a/web/html/doc/search/variables_15.html b/web/html/doc/search/variables_15.html new file mode 100644 index 000000000..45b78e3d9 --- /dev/null +++ b/web/html/doc/search/variables_15.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_15.js b/web/html/doc/search/variables_15.js new file mode 100644 index 000000000..d48ad7232 --- /dev/null +++ b/web/html/doc/search/variables_15.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['y_2487',['y',['../structop_1_1_point.html#a5821bc77a416629916e671793df3ce3b',1,'op::Point::y()'],['../structop_1_1_rectangle.html#a64e6891af0088a4ad271a725601b8043',1,'op::Rectangle::y()']]] +]; diff --git a/web/html/doc/search/variables_2.html b/web/html/doc/search/variables_2.html new file mode 100644 index 000000000..0580462e9 --- /dev/null +++ b/web/html/doc/search/variables_2.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_2.js b/web/html/doc/search/variables_2.js new file mode 100644 index 000000000..22902f631 --- /dev/null +++ b/web/html/doc/search/variables_2.js @@ -0,0 +1,13 @@ +var searchData= +[ + ['caffemodelpath_2322',['caffeModelPath',['../structop_1_1_wrapper_struct_pose.html#a6de869a73fd338bd41e390fcb1a5bcf3',1,'op::WrapperStructPose']]], + ['cameraextrinsics_2323',['cameraExtrinsics',['../structop_1_1_datum.html#aa3e5b74f3d54bc880f47831c3932dfa9',1,'op::Datum']]], + ['cameraintrinsics_2324',['cameraIntrinsics',['../structop_1_1_datum.html#ae2aad08cc74ee43e1242b403d47be2ff',1,'op::Datum']]], + ['cameramatrix_2325',['cameraMatrix',['../structop_1_1_datum.html#aa27ee36fd2e1fb0dfc5c1e6869e2073e',1,'op::Datum']]], + ['cameraparameterpath_2326',['cameraParameterPath',['../structop_1_1_wrapper_struct_input.html#a4c77c6257dec58ac0a5e18cfe5b38a26',1,'op::WrapperStructInput']]], + ['cameraresolution_2327',['cameraResolution',['../structop_1_1_wrapper_struct_input.html#ae2078c540324a9cdc8500dce5d361bee',1,'op::WrapperStructInput']]], + ['cuda_5fnum_5fthreads_2328',['CUDA_NUM_THREADS',['../namespaceop.html#ac7bbf63b37bf6762c47557ad227e036d',1,'op']]], + ['cvinputdata_2329',['cvInputData',['../structop_1_1_datum.html#a1f9ba4bd5be779a911c8c8e7962ea727',1,'op::Datum']]], + ['cvoutputdata_2330',['cvOutputData',['../structop_1_1_datum.html#ad70b95f61637fe23092bca8f0a4fb088',1,'op::Datum']]], + ['cvoutputdata3d_2331',['cvOutputData3D',['../structop_1_1_datum.html#a0aa21ea7a3adea0126003b778509f2d2',1,'op::Datum']]] +]; diff --git a/web/html/doc/search/variables_3.html b/web/html/doc/search/variables_3.html new file mode 100644 index 000000000..0d69e7619 --- /dev/null +++ b/web/html/doc/search/variables_3.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_3.js b/web/html/doc/search/variables_3.js new file mode 100644 index 000000000..1508eac10 --- /dev/null +++ b/web/html/doc/search/variables_3.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['default_5fx_2332',['DEFAULT_X',['../classop_1_1_profiler.html#a13de5fe55b2599c0626d5071d3851dec',1,'op::Profiler']]], + ['defaultparttorender_2333',['defaultPartToRender',['../structop_1_1_wrapper_struct_pose.html#ab6810e97aa62a728aa09dbbe6b9b6c06',1,'op::WrapperStructPose']]], + ['detector_2334',['detector',['../structop_1_1_wrapper_struct_face.html#a30d45f383e5c9d72709b5281f24f1ed0',1,'op::WrapperStructFace::detector()'],['../structop_1_1_wrapper_struct_hand.html#a2759e92ee811d7a8eb69e1b7eba29d08',1,'op::WrapperStructHand::detector()']]], + ['displaymode_2335',['displayMode',['../structop_1_1_wrapper_struct_gui.html#a2a979a7daa368cc847ae6d9a168ff556',1,'op::WrapperStructGui']]] +]; diff --git a/web/html/doc/search/variables_4.html b/web/html/doc/search/variables_4.html new file mode 100644 index 000000000..a4b6506bb --- /dev/null +++ b/web/html/doc/search/variables_4.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_4.js b/web/html/doc/search/variables_4.js new file mode 100644 index 000000000..663f18b30 --- /dev/null +++ b/web/html/doc/search/variables_4.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['elementrendered_2336',['elementRendered',['../structop_1_1_datum.html#a35212700ef2a2ac290a6666e2993a192',1,'op::Datum']]], + ['enable_2337',['enable',['../structop_1_1_wrapper_struct_face.html#a8fadeca500dde0df2a35f864bf05ee05',1,'op::WrapperStructFace::enable()'],['../structop_1_1_wrapper_struct_hand.html#a667ad7eed9f4f96b460f331d25f3d87f',1,'op::WrapperStructHand::enable()']]], + ['enablegooglelogging_2338',['enableGoogleLogging',['../structop_1_1_wrapper_struct_pose.html#a782ba82c6aeabea8fa625042fdf09408',1,'op::WrapperStructPose']]] +]; diff --git a/web/html/doc/search/variables_5.html b/web/html/doc/search/variables_5.html new file mode 100644 index 000000000..7e345d16c --- /dev/null +++ b/web/html/doc/search/variables_5.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_5.js b/web/html/doc/search/variables_5.js new file mode 100644 index 000000000..837abb902 --- /dev/null +++ b/web/html/doc/search/variables_5.js @@ -0,0 +1,27 @@ +var searchData= +[ + ['f135_2339',['F135',['../namespaceop.html#a593bb53120d8db14cab814dfb5d9ed2c',1,'op']]], + ['face_5fccn_5fdecrease_5ffactor_2340',['FACE_CCN_DECREASE_FACTOR',['../namespaceop.html#aa6701cc08e1a8651798ef3bf8437375b',1,'op']]], + ['face_5fcolors_5frender_2341',['FACE_COLORS_RENDER',['../namespaceop.html#a3fbae1778780ae5bf4ffcc84cdef1870',1,'op']]], + ['face_5fdefault_5falpha_5fheat_5fmap_2342',['FACE_DEFAULT_ALPHA_HEAT_MAP',['../namespaceop.html#a4f191afed46fea5e3ce5b2a8756e1ddd',1,'op']]], + ['face_5fdefault_5falpha_5fkeypoint_2343',['FACE_DEFAULT_ALPHA_KEYPOINT',['../namespaceop.html#a15f6c39797cee87f6aa941d93f22b78b',1,'op']]], + ['face_5fmax_5ffaces_2344',['FACE_MAX_FACES',['../namespaceop.html#a9b4b92c621cc5962a72898899d2f2534',1,'op']]], + ['face_5fnumber_5fparts_2345',['FACE_NUMBER_PARTS',['../namespaceop.html#a8a05bdc38612c38e28b96bba5b4679b8',1,'op']]], + ['face_5fpairs_5frender_2346',['FACE_PAIRS_RENDER',['../namespaceop.html#a1245f62cf98c4ee7591dfc8807ef355d',1,'op']]], + ['face_5fprototxt_2347',['FACE_PROTOTXT',['../namespaceop.html#a4d07868d77fb11253b413ed579e04c22',1,'op']]], + ['face_5fscales_5frender_2348',['FACE_SCALES_RENDER',['../namespaceop.html#a00c56c20997f734b2bd44d6f85b86cf0',1,'op']]], + ['face_5ftrained_5fmodel_2349',['FACE_TRAINED_MODEL',['../namespaceop.html#abd0ef2306478c3295283e7f1b59e3aff',1,'op']]], + ['faceheatmaps_2350',['faceHeatMaps',['../structop_1_1_datum.html#ae0c0f33a6b75c7f47e11112dd33f23c1',1,'op::Datum']]], + ['facekeypoints_2351',['faceKeypoints',['../structop_1_1_datum.html#aebd19bf50725a5cd87de1efd96f6ebfe',1,'op::Datum']]], + ['facekeypoints3d_2352',['faceKeypoints3D',['../structop_1_1_datum.html#a9a44196a197d5c050e626efe8b016e84',1,'op::Datum']]], + ['facerectangles_2353',['faceRectangles',['../structop_1_1_datum.html#a0b2f6955a1751fc79b107789272effad',1,'op::Datum']]], + ['fpsmax_2354',['fpsMax',['../structop_1_1_wrapper_struct_pose.html#a16c4fb26e6ce76dfa577e0f4b5747733',1,'op::WrapperStructPose']]], + ['framefirst_2355',['frameFirst',['../structop_1_1_wrapper_struct_input.html#acc72b8efe09ec3888823ed5680a19fe4',1,'op::WrapperStructInput']]], + ['frameflip_2356',['frameFlip',['../structop_1_1_wrapper_struct_input.html#a5ee9722814fe2b5a695511cabd12b613',1,'op::WrapperStructInput']]], + ['framelast_2357',['frameLast',['../structop_1_1_wrapper_struct_input.html#a5cffb282052bdd812217e54f0b2ec7d5',1,'op::WrapperStructInput']]], + ['framenumber_2358',['frameNumber',['../structop_1_1_datum.html#a8b930d61467f98702ebea68f39fc762b',1,'op::Datum']]], + ['framerotate_2359',['frameRotate',['../structop_1_1_wrapper_struct_input.html#a86df98e50b680b30afe100d8b2b50685',1,'op::WrapperStructInput']]], + ['framesrepeat_2360',['framesRepeat',['../structop_1_1_wrapper_struct_input.html#a7c80f6a3687696ba30d3ce0902ac162f',1,'op::WrapperStructInput']]], + ['framestep_2361',['frameStep',['../structop_1_1_wrapper_struct_input.html#ac4349e123d359f436cc01d4068231dc2',1,'op::WrapperStructInput']]], + ['fullscreen_2362',['fullScreen',['../structop_1_1_wrapper_struct_gui.html#ac1d393d3ce6be9304017c1aa3afd8f13',1,'op::WrapperStructGui']]] +]; diff --git a/web/html/doc/search/variables_6.html b/web/html/doc/search/variables_6.html new file mode 100644 index 000000000..7d48e75e2 --- /dev/null +++ b/web/html/doc/search/variables_6.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_6.js b/web/html/doc/search/variables_6.js new file mode 100644 index 000000000..1d8dfdf28 --- /dev/null +++ b/web/html/doc/search/variables_6.js @@ -0,0 +1,6 @@ +var searchData= +[ + ['gpunumber_2363',['gpuNumber',['../structop_1_1_wrapper_struct_pose.html#a536ea76d50e94d513066e9e5767d0c03',1,'op::WrapperStructPose']]], + ['gpunumberstart_2364',['gpuNumberStart',['../structop_1_1_wrapper_struct_pose.html#a8be188d871061079432ead77b278fe0d',1,'op::WrapperStructPose']]], + ['guiverbose_2365',['guiVerbose',['../structop_1_1_wrapper_struct_gui.html#a9dbb0bfce2593b0a560ed738e11708ce',1,'op::WrapperStructGui']]] +]; diff --git a/web/html/doc/search/variables_7.html b/web/html/doc/search/variables_7.html new file mode 100644 index 000000000..5c2634092 --- /dev/null +++ b/web/html/doc/search/variables_7.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_7.js b/web/html/doc/search/variables_7.js new file mode 100644 index 000000000..689817f73 --- /dev/null +++ b/web/html/doc/search/variables_7.js @@ -0,0 +1,21 @@ +var searchData= +[ + ['h135_2366',['H135',['../namespaceop.html#ae37c577c1054c89da4a6736342d491aa',1,'op']]], + ['hand_5fccn_5fdecrease_5ffactor_2367',['HAND_CCN_DECREASE_FACTOR',['../namespaceop.html#aed0d108f5ada623eeb0ed41f896f8e97',1,'op']]], + ['hand_5fcolors_5frender_2368',['HAND_COLORS_RENDER',['../namespaceop.html#a450bb646e7573322d8f622bfdbab4833',1,'op']]], + ['hand_5fdefault_5falpha_5fheat_5fmap_2369',['HAND_DEFAULT_ALPHA_HEAT_MAP',['../namespaceop.html#a76c1f1ea90b73e13e93f72413b3cab0e',1,'op']]], + ['hand_5fdefault_5falpha_5fkeypoint_2370',['HAND_DEFAULT_ALPHA_KEYPOINT',['../namespaceop.html#aa8cc53d2fe5353f9d87d50c32a8c1a95',1,'op']]], + ['hand_5fmax_5fhands_2371',['HAND_MAX_HANDS',['../namespaceop.html#a182585e2e944cdb62f3dededdd85d1fc',1,'op']]], + ['hand_5fnumber_5fparts_2372',['HAND_NUMBER_PARTS',['../namespaceop.html#a41b6fb82924c5532cf10151e6ce497f2',1,'op']]], + ['hand_5fpairs_5frender_2373',['HAND_PAIRS_RENDER',['../namespaceop.html#a335d707e98d311d39d9a9dab0e325391',1,'op']]], + ['hand_5fprototxt_2374',['HAND_PROTOTXT',['../namespaceop.html#a3fe70bd1eacdd78aef3344c83533ffc7',1,'op']]], + ['hand_5fscales_5frender_2375',['HAND_SCALES_RENDER',['../namespaceop.html#a4e9bbc2167923763c5982d6d1f41f560',1,'op']]], + ['hand_5ftrained_5fmodel_2376',['HAND_TRAINED_MODEL',['../namespaceop.html#ac13af59538bcb8a1709f20010681d1c7',1,'op']]], + ['handheatmaps_2377',['handHeatMaps',['../structop_1_1_datum.html#aef6c478313691ab5101664c1df55aa58',1,'op::Datum']]], + ['handkeypoints_2378',['handKeypoints',['../structop_1_1_datum.html#a59d455dbddc50d700809c5e102c40d4e',1,'op::Datum']]], + ['handkeypoints3d_2379',['handKeypoints3D',['../structop_1_1_datum.html#a27bb38102b5ebecd9b13a3619e658316',1,'op::Datum']]], + ['handrectangles_2380',['handRectangles',['../structop_1_1_datum.html#a52d75e3273490624414f0602785bb608',1,'op::Datum']]], + ['heatmapscalemode_2381',['heatMapScaleMode',['../structop_1_1_wrapper_struct_pose.html#a2a5cceaf05cf228b47d2b001e05efeb8',1,'op::WrapperStructPose']]], + ['heatmaptypes_2382',['heatMapTypes',['../structop_1_1_wrapper_struct_pose.html#aa459f2f26c1f1a929af55b8c2d39ccf6',1,'op::WrapperStructPose']]], + ['height_2383',['height',['../structop_1_1_rectangle.html#a5db9f0e8c946d837a1d351cc0bc72811',1,'op::Rectangle']]] +]; diff --git a/web/html/doc/search/variables_8.html b/web/html/doc/search/variables_8.html new file mode 100644 index 000000000..dc9ec54a5 --- /dev/null +++ b/web/html/doc/search/variables_8.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_8.js b/web/html/doc/search/variables_8.js new file mode 100644 index 000000000..b835dcee4 --- /dev/null +++ b/web/html/doc/search/variables_8.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['id_2384',['id',['../structop_1_1_datum.html#a65deddd49d0fbca81f367198fc600015',1,'op::Datum']]], + ['identification_2385',['identification',['../structop_1_1_wrapper_struct_extra.html#a08578de8a074415df3e645d3ddb27b8b',1,'op::WrapperStructExtra']]], + ['ikthreads_2386',['ikThreads',['../structop_1_1_wrapper_struct_extra.html#ad41edf2717e5446a250efc05512ee07f',1,'op::WrapperStructExtra']]], + ['inputnetdata_2387',['inputNetData',['../structop_1_1_datum.html#a46ff336119fd0d67c8223b1a9371731d',1,'op::Datum']]] +]; diff --git a/web/html/doc/search/variables_9.html b/web/html/doc/search/variables_9.html new file mode 100644 index 000000000..7b0147509 --- /dev/null +++ b/web/html/doc/search/variables_9.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_9.js b/web/html/doc/search/variables_9.js new file mode 100644 index 000000000..5864561ee --- /dev/null +++ b/web/html/doc/search/variables_9.js @@ -0,0 +1,4 @@ +var searchData= +[ + ['keypointscalemode_2388',['keypointScaleMode',['../structop_1_1_wrapper_struct_pose.html#a054c88e977084707e80eb31dd0a658ab',1,'op::WrapperStructPose']]] +]; diff --git a/web/html/doc/search/variables_a.html b/web/html/doc/search/variables_a.html new file mode 100644 index 000000000..52a724d19 --- /dev/null +++ b/web/html/doc/search/variables_a.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_a.js b/web/html/doc/search/variables_a.js new file mode 100644 index 000000000..fdea3a1f9 --- /dev/null +++ b/web/html/doc/search/variables_a.js @@ -0,0 +1,34 @@ +var searchData= +[ + ['maximizepositives_2389',['maximizePositives',['../structop_1_1_wrapper_struct_pose.html#a84edcbf2237d579adc88badaa17c9795',1,'op::WrapperStructPose']]], + ['mblendoriginalframe_2390',['mBlendOriginalFrame',['../classop_1_1_renderer.html#a88449a7c29a48e157cd6b16089825be7',1,'op::Renderer']]], + ['mconditionvariable_2391',['mConditionVariable',['../classop_1_1_queue_base.html#a8b5e59161a0b175d12955b552a90a47f',1,'op::QueueBase']]], + ['mdisplaymode_2392',['mDisplayMode',['../classop_1_1_gui.html#a5b95cbfa7cd4018977f4eb1fc095823b',1,'op::Gui']]], + ['mdisplaymodeoriginal_2393',['mDisplayModeOriginal',['../classop_1_1_gui.html#a94cfbf759e88467bfcab18fcd2c987f2',1,'op::Gui']]], + ['menabled_2394',['mEnabled',['../classop_1_1_face_extractor_net.html#a637f9c4c19e110be435cd05052248f86',1,'op::FaceExtractorNet::mEnabled()'],['../classop_1_1_hand_extractor_net.html#a2ee9d38650ed3138fa74fae2cad4bd77',1,'op::HandExtractorNet::mEnabled()']]], + ['mfaceimagecrop_2395',['mFaceImageCrop',['../classop_1_1_face_extractor_net.html#ae18226cef1478a929df9061c7d699c6f',1,'op::FaceExtractorNet']]], + ['mfacekeypoints_2396',['mFaceKeypoints',['../classop_1_1_face_extractor_net.html#a5d3437e6a4a0fd834232b0afaab95a8a',1,'op::FaceExtractorNet']]], + ['mhandimagecrop_2397',['mHandImageCrop',['../classop_1_1_hand_extractor_net.html#a0981f4dfd15ce4a13de9d166cad9e1d4',1,'op::HandExtractorNet']]], + ['mhandkeypoints_2398',['mHandKeypoints',['../classop_1_1_hand_extractor_net.html#a7f97a5b842d20d3d37d3469418faac7b',1,'op::HandExtractorNet']]], + ['mheatmaps_2399',['mHeatMaps',['../classop_1_1_face_extractor_net.html#a43bd29f8c1fc0dbef051bd574df2deca',1,'op::FaceExtractorNet::mHeatMaps()'],['../classop_1_1_hand_extractor_net.html#a5c4174ed2c09ff7c15edfc5d971f4aef',1,'op::HandExtractorNet::mHeatMaps()']]], + ['mheatmapscalemode_2400',['mHeatMapScaleMode',['../classop_1_1_face_extractor_net.html#aa3f6566e8b857262f57e18a88c90b9be',1,'op::FaceExtractorNet::mHeatMapScaleMode()'],['../classop_1_1_hand_extractor_net.html#af03c8872258c644086bda26a3aaf95b5',1,'op::HandExtractorNet::mHeatMapScaleMode()']]], + ['mheatmaptypes_2401',['mHeatMapTypes',['../classop_1_1_face_extractor_net.html#a3bf177dbf1a3effbe6b15545e6102d6e',1,'op::FaceExtractorNet::mHeatMapTypes()'],['../classop_1_1_hand_extractor_net.html#aaf0386c8c15a37cf79e9f3f4b1ced2e8',1,'op::HandExtractorNet::mHeatMapTypes()']]], + ['minviews3d_2402',['minViews3d',['../structop_1_1_wrapper_struct_extra.html#ae8a3562b010c4fa31e6a9722947301c6',1,'op::WrapperStructExtra']]], + ['mmaxpopperspushers_2403',['mMaxPoppersPushers',['../classop_1_1_queue_base.html#a1d55f40e032cd5d43d63ba02040b3117',1,'op::QueueBase']]], + ['mmultiscalenumberandrange_2404',['mMultiScaleNumberAndRange',['../classop_1_1_hand_extractor_net.html#a270f22a05dbae6d156d79f0386cfbf4b',1,'op::HandExtractorNet']]], + ['mmutex_2405',['mMutex',['../classop_1_1_queue_base.html#a22c5e2964e9d9c18a9f02b8d2e0f30b4',1,'op::QueueBase']]], + ['mnetoutputsize_2406',['mNetOutputSize',['../classop_1_1_face_extractor_net.html#acf72945f62375b6ac8939c463a616f4a',1,'op::FaceExtractorNet::mNetOutputSize()'],['../classop_1_1_hand_extractor_net.html#ac5e36cd33696a684a4447acccec28fdd',1,'op::HandExtractorNet::mNetOutputSize()'],['../classop_1_1_pose_extractor_net.html#aab49f9af9f5d7e4e64957dc0feb60ca7',1,'op::PoseExtractorNet::mNetOutputSize()']]], + ['modelfolder_2407',['modelFolder',['../structop_1_1_wrapper_struct_pose.html#a0f00648621ca97fde61287be23671523',1,'op::WrapperStructPose']]], + ['mpartindextoname_2408',['mPartIndexToName',['../classop_1_1_pose_renderer.html#aecc0a9296ca880ad6ceaf38ecd8c3c53',1,'op::PoseRenderer']]], + ['mpopisstopped_2409',['mPopIsStopped',['../classop_1_1_queue_base.html#a77bf3592bbb6ac586cd4c2b0aea98e62',1,'op::QueueBase']]], + ['mpoppers_2410',['mPoppers',['../classop_1_1_queue_base.html#a04f7160c199f90b8f8e91ddfd40e92fb',1,'op::QueueBase']]], + ['mposekeypoints_2411',['mPoseKeypoints',['../classop_1_1_pose_extractor_net.html#aaaa4c619868bbf6306a549280002a2c6',1,'op::PoseExtractorNet']]], + ['mposemodel_2412',['mPoseModel',['../classop_1_1_pose_extractor_net.html#a8595789b244399ecd9c4b2a774f2c74b',1,'op::PoseExtractorNet::mPoseModel()'],['../classop_1_1_pose_renderer.html#a9fea1f9ce47b4b5f1015cae13f4ddcb1',1,'op::PoseRenderer::mPoseModel()']]], + ['mposescores_2413',['mPoseScores',['../classop_1_1_pose_extractor_net.html#a528c3056546b0759fafb249a02edd1b6',1,'op::PoseExtractorNet']]], + ['mpushers_2414',['mPushers',['../classop_1_1_queue_base.html#a7c382bb98f5b769cde37b06d67cb0530',1,'op::QueueBase']]], + ['mpushisstopped_2415',['mPushIsStopped',['../classop_1_1_queue_base.html#af2c0f21c6b4f4639661b59aa247ae407',1,'op::QueueBase']]], + ['mrenderthreshold_2416',['mRenderThreshold',['../classop_1_1_renderer.html#adc4cd0a62008325c5c7df6df2f95a167',1,'op::Renderer']]], + ['mscalenettooutput_2417',['mScaleNetToOutput',['../classop_1_1_pose_extractor_net.html#a67ea32116dfaff15cc16e5a0a2bef822',1,'op::PoseExtractorNet']]], + ['mshowgooglyeyes_2418',['mShowGooglyEyes',['../classop_1_1_renderer.html#ace2490fa3c5a87443e4d1e64007cd1ff',1,'op::Renderer']]], + ['mtqueue_2419',['mTQueue',['../classop_1_1_queue_base.html#a49c1d6740f2ce7f26eae606f109b5738',1,'op::QueueBase']]] +]; diff --git a/web/html/doc/search/variables_b.html b/web/html/doc/search/variables_b.html new file mode 100644 index 000000000..f376b27af --- /dev/null +++ b/web/html/doc/search/variables_b.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_b.js b/web/html/doc/search/variables_b.js new file mode 100644 index 000000000..1e876a9dd --- /dev/null +++ b/web/html/doc/search/variables_b.js @@ -0,0 +1,10 @@ +var searchData= +[ + ['name_2420',['name',['../structop_1_1_datum.html#ae6adcdacea12a9cfa445cf0cac1985b0',1,'op::Datum']]], + ['netinputsize_2421',['netInputSize',['../structop_1_1_wrapper_struct_face.html#a9845712fd6ebb66fccb0c1647e3491a0',1,'op::WrapperStructFace::netInputSize()'],['../structop_1_1_wrapper_struct_hand.html#a6a54d5b5766d23412c87bd10c26cb291',1,'op::WrapperStructHand::netInputSize()'],['../structop_1_1_wrapper_struct_pose.html#acff912f14ba3c0ba706ea99e1cef790e',1,'op::WrapperStructPose::netInputSize()']]], + ['netinputsizedynamicbehavior_2422',['netInputSizeDynamicBehavior',['../structop_1_1_wrapper_struct_pose.html#a8bafec1b3ee2f2a6473fd604925e265a',1,'op::WrapperStructPose']]], + ['netinputsizes_2423',['netInputSizes',['../structop_1_1_datum.html#a32d164c01acf6b4f7eb1323d74edbdca',1,'op::Datum']]], + ['netoutputsize_2424',['netOutputSize',['../structop_1_1_datum.html#ac734d4262a5a7892c6d4094cdd2bcc7c',1,'op::Datum']]], + ['numberpeoplemax_2425',['numberPeopleMax',['../structop_1_1_wrapper_struct_pose.html#a02c4ab6b56e4da4b3ed0da4eae8ac0fc',1,'op::WrapperStructPose']]], + ['numberviews_2426',['numberViews',['../structop_1_1_wrapper_struct_input.html#adac2c3e58e1e75a96e52904762c37c42',1,'op::WrapperStructInput']]] +]; diff --git a/web/html/doc/search/variables_c.html b/web/html/doc/search/variables_c.html new file mode 100644 index 000000000..6019eba96 --- /dev/null +++ b/web/html/doc/search/variables_c.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_c.js b/web/html/doc/search/variables_c.js new file mode 100644 index 000000000..0d660a436 --- /dev/null +++ b/web/html/doc/search/variables_c.js @@ -0,0 +1,8 @@ +var searchData= +[ + ['open_5fpose_5fname_5fand_5fversion_2427',['OPEN_POSE_NAME_AND_VERSION',['../macros_8hpp.html#adcf24c45000a4f44f355f1cc3062ea49',1,'macros.hpp']]], + ['open_5fpose_5fname_5fstring_2428',['OPEN_POSE_NAME_STRING',['../macros_8hpp.html#afda430d83b9513af7270f1d680bf5471',1,'macros.hpp']]], + ['open_5fpose_5fversion_5fstring_2429',['OPEN_POSE_VERSION_STRING',['../macros_8hpp.html#a7de0b663a3aa8043a346ebf2c411bda3',1,'macros.hpp']]], + ['outputdata_2430',['outputData',['../structop_1_1_datum.html#a42b953c082f479eddc527da9a3a4cc75',1,'op::Datum']]], + ['outputsize_2431',['outputSize',['../structop_1_1_wrapper_struct_pose.html#a80ead0f411ddab86f643345e4effe805',1,'op::WrapperStructPose']]] +]; diff --git a/web/html/doc/search/variables_d.html b/web/html/doc/search/variables_d.html new file mode 100644 index 000000000..f61ae7511 --- /dev/null +++ b/web/html/doc/search/variables_d.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_d.js b/web/html/doc/search/variables_d.js new file mode 100644 index 000000000..405fe3bae --- /dev/null +++ b/web/html/doc/search/variables_d.js @@ -0,0 +1,18 @@ +var searchData= +[ + ['pose_5fdefault_5falpha_5fheat_5fmap_2432',['POSE_DEFAULT_ALPHA_HEAT_MAP',['../namespaceop.html#af45cddacd69fff73a4ea4acbbbac43e0',1,'op']]], + ['pose_5fdefault_5falpha_5fkeypoint_2433',['POSE_DEFAULT_ALPHA_KEYPOINT',['../namespaceop.html#a21fcb98366f6ea8895fc7f527f232db5',1,'op']]], + ['pose_5fmax_5fpeople_2434',['POSE_MAX_PEOPLE',['../namespaceop.html#a522d4552d2aeabe367f4d3bf371e6b3e',1,'op']]], + ['posecandidates_2435',['poseCandidates',['../structop_1_1_datum.html#a55dd5354e09696ed6896923755f1c85b',1,'op::Datum']]], + ['poseheatmaps_2436',['poseHeatMaps',['../structop_1_1_datum.html#a5429e97e0ab9b0e2209a3947af668381',1,'op::Datum']]], + ['poseids_2437',['poseIds',['../structop_1_1_datum.html#aba90dccffb5a830296231bd430c4766c',1,'op::Datum']]], + ['posekeypoints_2438',['poseKeypoints',['../structop_1_1_datum.html#a6d629b1f6f7b958fe4cf2ef4cdf57c5b',1,'op::Datum']]], + ['posekeypoints3d_2439',['poseKeypoints3D',['../structop_1_1_datum.html#a652ac1e7de13ec9a886dece75848cfea',1,'op::Datum']]], + ['posemode_2440',['poseMode',['../structop_1_1_wrapper_struct_pose.html#ad0f4992658b9d624184dcecf79e54e43',1,'op::WrapperStructPose']]], + ['posemodel_2441',['poseModel',['../structop_1_1_wrapper_struct_pose.html#a35147b6fb9e300d79b71637793053a1b',1,'op::WrapperStructPose']]], + ['posenetoutput_2442',['poseNetOutput',['../structop_1_1_datum.html#a8f6f5fd181abe3bdfd6f7bdf8a165782',1,'op::Datum']]], + ['posescores_2443',['poseScores',['../structop_1_1_datum.html#afb117821de7aff9ac3c219ef3bbc0c14',1,'op::Datum']]], + ['producerstring_2444',['producerString',['../structop_1_1_wrapper_struct_input.html#a6aec09a94fdf393d6ab3b23857c376da',1,'op::WrapperStructInput']]], + ['producertype_2445',['producerType',['../structop_1_1_wrapper_struct_input.html#acd6a460d6c0a64bc818539b67fcafea7',1,'op::WrapperStructInput']]], + ['prototxtpath_2446',['protoTxtPath',['../structop_1_1_wrapper_struct_pose.html#a8a6273145f5e2f2ccade81865cbdfecb',1,'op::WrapperStructPose']]] +]; diff --git a/web/html/doc/search/variables_e.html b/web/html/doc/search/variables_e.html new file mode 100644 index 000000000..7bfd37215 --- /dev/null +++ b/web/html/doc/search/variables_e.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_e.js b/web/html/doc/search/variables_e.js new file mode 100644 index 000000000..46ba04a7f --- /dev/null +++ b/web/html/doc/search/variables_e.js @@ -0,0 +1,7 @@ +var searchData= +[ + ['realtimeprocessing_2447',['realTimeProcessing',['../structop_1_1_wrapper_struct_input.html#a2eeea9ee711a1dcbec99c3dc871fbc47',1,'op::WrapperStructInput']]], + ['reconstruct3d_2448',['reconstruct3d',['../structop_1_1_wrapper_struct_extra.html#aa157c20ca959fd952a85866a119183ca',1,'op::WrapperStructExtra']]], + ['rendermode_2449',['renderMode',['../structop_1_1_wrapper_struct_face.html#accc6e564598130b9bf0a6d0ec9c304c4',1,'op::WrapperStructFace::renderMode()'],['../structop_1_1_wrapper_struct_hand.html#a90ddd24ee55b6aabd9a1728ccd91525e',1,'op::WrapperStructHand::renderMode()'],['../structop_1_1_wrapper_struct_pose.html#ad6b5ea0cef8eb81d20ab39099ba7716e',1,'op::WrapperStructPose::renderMode()']]], + ['renderthreshold_2450',['renderThreshold',['../structop_1_1_wrapper_struct_face.html#a982e3f1a13358a522e1882d17cb80d57',1,'op::WrapperStructFace::renderThreshold()'],['../structop_1_1_wrapper_struct_hand.html#a9655c0dfa83eefde174d09e622482089',1,'op::WrapperStructHand::renderThreshold()'],['../structop_1_1_wrapper_struct_pose.html#a322ff95b6a2838fe0d55afb28d2a4224',1,'op::WrapperStructPose::renderThreshold()']]] +]; diff --git a/web/html/doc/search/variables_f.html b/web/html/doc/search/variables_f.html new file mode 100644 index 000000000..d97920d08 --- /dev/null +++ b/web/html/doc/search/variables_f.html @@ -0,0 +1,37 @@ + + + + + + + + + + +
    +
    Loading...
    +
    + +
    Searching...
    +
    No Matches
    + +
    + + diff --git a/web/html/doc/search/variables_f.js b/web/html/doc/search/variables_f.js new file mode 100644 index 000000000..569bd6175 --- /dev/null +++ b/web/html/doc/search/variables_f.js @@ -0,0 +1,15 @@ +var searchData= +[ + ['scalegap_2451',['scaleGap',['../structop_1_1_wrapper_struct_pose.html#a646ae142f821411d22d772b76960d585',1,'op::WrapperStructPose']]], + ['scaleinputtonetinputs_2452',['scaleInputToNetInputs',['../structop_1_1_datum.html#a0e416771f275be98c83aaff01e482a71',1,'op::Datum']]], + ['scaleinputtooutput_2453',['scaleInputToOutput',['../structop_1_1_datum.html#a6cf96c250c236a03f13da69e1d4336d9',1,'op::Datum']]], + ['scalenettooutput_2454',['scaleNetToOutput',['../structop_1_1_datum.html#a44af7162e180c6856ce909057f43d8e1',1,'op::Datum']]], + ['scalerange_2455',['scaleRange',['../structop_1_1_wrapper_struct_hand.html#adaea15e182e5c75129293873cd94e35f',1,'op::WrapperStructHand']]], + ['scalesnumber_2456',['scalesNumber',['../structop_1_1_wrapper_struct_hand.html#aea6263dc96708b11fab72416d810b3d6',1,'op::WrapperStructHand::scalesNumber()'],['../structop_1_1_wrapper_struct_pose.html#ad23a9c103a60709eed9d7b7381828e5e',1,'op::WrapperStructPose::scalesNumber()']]], + ['spelementtorender_2457',['spElementToRender',['../classop_1_1_renderer.html#aca8ebf0c0a50b87f0be82afa090155a0',1,'op::Renderer']]], + ['spgpumemory_2458',['spGpuMemory',['../classop_1_1_gpu_renderer.html#a5d729aab549908c758953be742dd0115',1,'op::GpuRenderer']]], + ['spisrunning_2459',['spIsRunning',['../classop_1_1_gui.html#a0ad7be7018e634769da8d22d60e7edc0',1,'op::Gui']]], + ['spnumberelementstorender_2460',['spNumberElementsToRender',['../classop_1_1_renderer.html#a3e04644546dd9990a16d0b6861b60553',1,'op::Renderer']]], + ['subid_2461',['subId',['../structop_1_1_datum.html#aeb6d10e4fa40a20d38118bf1be3112d8',1,'op::Datum']]], + ['subidmax_2462',['subIdMax',['../structop_1_1_datum.html#ab87c493347456b592b616e9f656a5d60',1,'op::Datum']]] +]; diff --git a/web/html/doc/spinnaker_wrapper_8hpp.html b/web/html/doc/spinnaker_wrapper_8hpp.html new file mode 100644 index 000000000..6aedfe983 --- /dev/null +++ b/web/html/doc/spinnaker_wrapper_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/producer/spinnakerWrapper.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    spinnakerWrapper.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::SpinnakerWrapper
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/spinnaker_wrapper_8hpp_source.html b/web/html/doc/spinnaker_wrapper_8hpp_source.html new file mode 100644 index 000000000..ad393eb87 --- /dev/null +++ b/web/html/doc/spinnaker_wrapper_8hpp_source.html @@ -0,0 +1,156 @@ + + + + + + + +OpenPose: include/openpose/producer/spinnakerWrapper.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    spinnakerWrapper.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_PRODUCER_SPINNAKER_WRAPPER_HPP
    +
    2 #define OPENPOSE_PRODUCER_SPINNAKER_WRAPPER_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    + +
    13  {
    +
    14  public:
    +
    19  explicit SpinnakerWrapper(const std::string& cameraParameterPath, const Point<int>& cameraResolution,
    +
    20  const bool undistortImage, const int cameraIndex = -1);
    +
    21 
    +
    22  virtual ~SpinnakerWrapper();
    +
    23 
    +
    24  std::vector<Matrix> getRawFrames();
    +
    25 
    +
    30  std::vector<Matrix> getCameraMatrices() const;
    +
    31 
    +
    32  std::vector<Matrix> getCameraExtrinsics() const;
    +
    33 
    +
    34  std::vector<Matrix> getCameraIntrinsics() const;
    +
    35 
    + +
    37 
    +
    38  bool isOpened() const;
    +
    39 
    +
    40  void release();
    +
    41 
    +
    42  private:
    +
    43  // PIMPL idiom
    +
    44  // http://www.cppsamples.com/common-tasks/pimpl.html
    +
    45  struct ImplSpinnakerWrapper;
    +
    46  std::shared_ptr<ImplSpinnakerWrapper> upImpl;
    +
    47 
    + +
    49  };
    +
    50 }
    +
    51 
    +
    52 #endif // OPENPOSE_PRODUCER_SPINNAKER_WRAPPER_HPP
    + +
    std::vector< Matrix > getRawFrames()
    +
    std::vector< Matrix > getCameraExtrinsics() const
    +
    bool isOpened() const
    +
    SpinnakerWrapper(const std::string &cameraParameterPath, const Point< int > &cameraResolution, const bool undistortImage, const int cameraIndex=-1)
    + +
    std::vector< Matrix > getCameraMatrices() const
    +
    virtual ~SpinnakerWrapper()
    +
    Point< int > getResolution() const
    +
    std::vector< Matrix > getCameraIntrinsics() const
    + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + + +
    +
    + + + + diff --git a/web/html/doc/splitbar.png b/web/html/doc/splitbar.png new file mode 100644 index 000000000..fe895f2c5 Binary files /dev/null and b/web/html/doc/splitbar.png differ diff --git a/web/html/doc/standalone__face__or__hand__keypoint__detector_8md.html b/web/html/doc/standalone__face__or__hand__keypoint__detector_8md.html new file mode 100644 index 000000000..0567cbd7e --- /dev/null +++ b/web/html/doc/standalone__face__or__hand__keypoint__detector_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/advanced/standalone_face_or_hand_keypoint_detector.md File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    doc/advanced/standalone_face_or_hand_keypoint_detector.md File Reference
    +
    +
    +
    +
    + + + + diff --git a/web/html/doc/standard_8hpp.html b/web/html/doc/standard_8hpp.html new file mode 100644 index 000000000..7080dcc7d --- /dev/null +++ b/web/html/doc/standard_8hpp.html @@ -0,0 +1,122 @@ + + + + + + + +OpenPose: include/openpose/utilities/standard.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    standard.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + + + +

    +Functions

    template<typename T >
    bool op::vectorsAreEqual (const std::vector< T > &vectorA, const std::vector< T > &vectorB)
     
    template<typename T >
    std::vector< T > op::mergeVectors (const std::vector< T > &vectorA, const std::vector< T > &vectorB)
     
    +
    +
    + + + + diff --git a/web/html/doc/standard_8hpp.js b/web/html/doc/standard_8hpp.js new file mode 100644 index 000000000..0ca8bc1e2 --- /dev/null +++ b/web/html/doc/standard_8hpp.js @@ -0,0 +1,5 @@ +var standard_8hpp = +[ + [ "mergeVectors", "standard_8hpp.html#aa3a3e2acfb27ecbd187d01c8dcd41899", null ], + [ "vectorsAreEqual", "standard_8hpp.html#af63e418966741f7efebacc9519174a0a", null ] +]; \ No newline at end of file diff --git a/web/html/doc/standard_8hpp_source.html b/web/html/doc/standard_8hpp_source.html new file mode 100644 index 000000000..fe23015bb --- /dev/null +++ b/web/html/doc/standard_8hpp_source.html @@ -0,0 +1,154 @@ + + + + + + + +OpenPose: include/openpose/utilities/standard.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    standard.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_UTILITIES_STANDARD_HPP
    +
    2 #define OPENPOSE_UTILITIES_STANDARD_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    +
    8  template <typename T>
    +
    9  bool vectorsAreEqual(const std::vector<T>& vectorA, const std::vector<T>& vectorB)
    +
    10  {
    +
    11  try
    +
    12  {
    +
    13  if (vectorA.size() != vectorB.size())
    +
    14  return false;
    +
    15  else
    +
    16  {
    +
    17  for (auto i = 0u ; i < vectorA.size() ; i++)
    +
    18  if (vectorA[i] != vectorB[i])
    +
    19  return false;
    +
    20  return true;
    +
    21  }
    +
    22  }
    +
    23  catch (const std::exception& e)
    +
    24  {
    +
    25  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    26  return false;
    +
    27  }
    +
    28  }
    +
    29 
    +
    39  template <typename T>
    +
    40  std::vector<T> mergeVectors(const std::vector<T>& vectorA, const std::vector<T>& vectorB)
    +
    41  {
    +
    42  try
    +
    43  {
    +
    44  auto vectorToReturn(vectorA);
    +
    45  for (auto& tElement : vectorB)
    +
    46  vectorToReturn.emplace_back(tElement);
    +
    47  return vectorToReturn;
    +
    48  }
    +
    49  catch (const std::exception& e)
    +
    50  {
    +
    51  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    52  return std::vector<T>{};
    +
    53  }
    +
    54  }
    +
    55 }
    +
    56 
    +
    57 #endif // OPENPOSE_UTILITIES_STANDARD_HPP
    + + +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    std::vector< T > mergeVectors(const std::vector< T > &vectorA, const std::vector< T > &vectorB)
    Definition: standard.hpp:40
    +
    bool vectorsAreEqual(const std::vector< T > &vectorA, const std::vector< T > &vectorB)
    Definition: standard.hpp:9
    +
    +
    + + + + diff --git a/web/html/doc/structop_1_1_datum-members.html b/web/html/doc/structop_1_1_datum-members.html new file mode 100644 index 000000000..c37e27eea --- /dev/null +++ b/web/html/doc/structop_1_1_datum-members.html @@ -0,0 +1,150 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + + + + + + diff --git a/web/html/doc/structop_1_1_datum.html b/web/html/doc/structop_1_1_datum.html new file mode 100644 index 000000000..e5ec7c1ff --- /dev/null +++ b/web/html/doc/structop_1_1_datum.html @@ -0,0 +1,1174 @@ + + + + + + + +OpenPose: op::Datum Struct Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op::Datum Struct Reference
    +
    +
    + +

    #include <datum.hpp>

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Public Member Functions

     Datum ()
     
     Datum (const Datum &datum)
     
    Datumoperator= (const Datum &datum)
     
     Datum (Datum &&datum)
     
    Datumoperator= (Datum &&datum)
     
    virtual ~Datum ()
     
    Datum clone () const
     
    bool operator< (const Datum &datum) const
     
    bool operator> (const Datum &datum) const
     
    bool operator<= (const Datum &datum) const
     
    bool operator>= (const Datum &datum) const
     
    bool operator== (const Datum &datum) const
     
    bool operator!= (const Datum &datum) const
     
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Public Attributes

    unsigned long long id
     
    unsigned long long subId
     
    unsigned long long subIdMax
     
    std::string name
     
    unsigned long long frameNumber
     
    Matrix cvInputData
     
    std::vector< Array< float > > inputNetData
     
    Array< float > outputData
     
    Matrix cvOutputData
     
    Matrix cvOutputData3D
     
    Array< float > poseKeypoints
     
    Array< long long > poseIds
     
    Array< float > poseScores
     
    Array< float > poseHeatMaps
     
    std::vector< std::vector< std::array< float, 3 > > > poseCandidates
     
    std::vector< Rectangle< float > > faceRectangles
     
    Array< float > faceKeypoints
     
    Array< float > faceHeatMaps
     
    std::vector< std::array< Rectangle< float >, 2 > > handRectangles
     
    std::array< Array< float >, 2 > handKeypoints
     
    std::array< Array< float >, 2 > handHeatMaps
     
    Array< float > poseKeypoints3D
     
    Array< float > faceKeypoints3D
     
    std::array< Array< float >, 2 > handKeypoints3D
     
    Matrix cameraMatrix
     
    Matrix cameraExtrinsics
     
    Matrix cameraIntrinsics
     
    Array< float > poseNetOutput
     
    std::vector< double > scaleInputToNetInputs
     
    std::vector< Point< int > > netInputSizes
     
    double scaleInputToOutput
     
    Point< int > netOutputSize
     
    double scaleNetToOutput
     
    std::pair< int, std::string > elementRendered
     
    +

    Detailed Description

    +

    Datum: The OpenPose Basic Piece of Information Between Threads Datum is one the main OpenPose classes/structs. The workers and threads share by default a std::shared_ptr<std::vector<Datum>>. It contains all the parameters that the different workers and threads need to exchange.

    + +

    Definition at line 19 of file datum.hpp.

    +

    Constructor & Destructor Documentation

    + +

    ◆ Datum() [1/3]

    + +
    +
    + + + + + +
    + + + + + + + +
    op::Datum::Datum ()
    +
    +explicit
    +
    +

    Default constructor struct. It simply initializes the struct, id is temporary set to 0 and each other variable is assigned to its default value.

    + +
    +
    + +

    ◆ Datum() [2/3]

    + +
    +
    + + + + + + + + +
    op::Datum::Datum (const Datumdatum)
    +
    +

    Copy constructor. It performs fast copy: For performance purpose, copying a Datum or Array<T> or cv::Mat just copies the reference, it still shares the same internal data. Modifying the copied element will modify the original one. Use clone() for a slower but real copy, similarly to cv::Mat and Array<T>.

    Parameters
    + + +
    datumDatum to be copied.
    +
    +
    + +
    +
    + +

    ◆ Datum() [3/3]

    + +
    +
    + + + + + + + + +
    op::Datum::Datum (Datum && datum)
    +
    +

    Move constructor. It destroys the original Datum to be moved.

    Parameters
    + + +
    datumDatum to be moved.
    +
    +
    + +
    +
    + +

    ◆ ~Datum()

    + +
    +
    + + + + + +
    + + + + + + + +
    virtual op::Datum::~Datum ()
    +
    +virtual
    +
    +

    Destructor class. Declared virtual so that Datum can be inherited.

    + +
    +
    +

    Member Function Documentation

    + +

    ◆ clone()

    + +
    +
    + + + + + + + +
    Datum op::Datum::clone () const
    +
    +

    Clone function. Similar to cv::Mat::clone and Array<T>::clone. It performs a real but slow copy of the data, i.e., even if the copied element is modified, the original one is not.

    Returns
    The resulting Datum.
    + +
    +
    + +

    ◆ operator!=()

    + +
    +
    + + + + + +
    + + + + + + + + +
    bool op::Datum::operator!= (const Datumdatum) const
    +
    +inline
    +
    +

    Not equal comparison operator.

    Parameters
    + + +
    datumDatum to be compared.
    +
    +
    +
    Returns
    Whether the instance satisfies the condition with respect to datum.
    + +

    Definition at line 395 of file datum.hpp.

    + +
    +
    + +

    ◆ operator<()

    + +
    +
    + + + + + +
    + + + + + + + + +
    bool op::Datum::operator< (const Datumdatum) const
    +
    +inline
    +
    +

    Less comparison operator.

    Parameters
    + + +
    datumDatum to be compared.
    +
    +
    +
    Returns
    Whether the instance satisfies the condition with respect to datum.
    + +

    Definition at line 345 of file datum.hpp.

    + +
    +
    + +

    ◆ operator<=()

    + +
    +
    + + + + + +
    + + + + + + + + +
    bool op::Datum::operator<= (const Datumdatum) const
    +
    +inline
    +
    +

    Less or equal comparison operator.

    Parameters
    + + +
    datumDatum to be compared.
    +
    +
    +
    Returns
    Whether the instance satisfies the condition with respect to datum.
    + +

    Definition at line 365 of file datum.hpp.

    + +
    +
    + +

    ◆ operator=() [1/2]

    + +
    +
    + + + + + + + + +
    Datum& op::Datum::operator= (const Datumdatum)
    +
    +

    Copy assignment. Similar to Datum::Datum(const Datum& datum).

    Parameters
    + + +
    datumDatum to be copied.
    +
    +
    +
    Returns
    The resulting Datum.
    + +
    +
    + +

    ◆ operator=() [2/2]

    + +
    +
    + + + + + + + + +
    Datum& op::Datum::operator= (Datum && datum)
    +
    +

    Move assignment. Similar to Datum::Datum(Datum&& datum).

    Parameters
    + + +
    datumDatum to be moved.
    +
    +
    +
    Returns
    The resulting Datum.
    + +
    +
    + +

    ◆ operator==()

    + +
    +
    + + + + + +
    + + + + + + + + +
    bool op::Datum::operator== (const Datumdatum) const
    +
    +inline
    +
    +

    Equal comparison operator.

    Parameters
    + + +
    datumDatum to be compared.
    +
    +
    +
    Returns
    Whether the instance satisfies the condition with respect to datum.
    + +

    Definition at line 385 of file datum.hpp.

    + +
    +
    + +

    ◆ operator>()

    + +
    +
    + + + + + +
    + + + + + + + + +
    bool op::Datum::operator> (const Datumdatum) const
    +
    +inline
    +
    +

    Greater comparison operator.

    Parameters
    + + +
    datumDatum to be compared.
    +
    +
    +
    Returns
    Whether the instance satisfies the condition with respect to datum.
    + +

    Definition at line 355 of file datum.hpp.

    + +
    +
    + +

    ◆ operator>=()

    + +
    +
    + + + + + +
    + + + + + + + + +
    bool op::Datum::operator>= (const Datumdatum) const
    +
    +inline
    +
    +

    Greater or equal comparison operator.

    Parameters
    + + +
    datumDatum to be compared.
    +
    +
    +
    Returns
    Whether the instance satisfies the condition with respect to datum.
    + +

    Definition at line 375 of file datum.hpp.

    + +
    +
    +

    Member Data Documentation

    + +

    ◆ cameraExtrinsics

    + +
    +
    + + + + +
    Matrix op::Datum::cameraExtrinsics
    +
    +

    3x4 extrinsic parameters of the camera.

    + +

    Definition at line 204 of file datum.hpp.

    + +
    +
    + +

    ◆ cameraIntrinsics

    + +
    +
    + + + + +
    Matrix op::Datum::cameraIntrinsics
    +
    +

    3x3 intrinsic parameters of the camera.

    + +

    Definition at line 209 of file datum.hpp.

    + +
    +
    + +

    ◆ cameraMatrix

    + +
    +
    + + + + +
    Matrix op::Datum::cameraMatrix
    +
    +

    3x4 camera matrix of the camera (equivalent to cameraIntrinsics * cameraExtrinsics).

    + +

    Definition at line 199 of file datum.hpp.

    + +
    +
    + +

    ◆ cvInputData

    + +
    +
    + + + + +
    Matrix op::Datum::cvInputData
    +
    +

    Original image to be processed in cv::Mat uchar format. Size: (input_width x input_height) x 3 channels

    + +

    Definition at line 45 of file datum.hpp.

    + +
    +
    + +

    ◆ cvOutputData

    + +
    +
    + + + + +
    Matrix op::Datum::cvOutputData
    +
    +

    Rendered image in cv::Mat uchar format. It has been resized to the desired output resolution (e.g., resolution flag in the demo). If outputData is empty, cvOutputData will also be empty. Size: (output_height x output_width) x 3 channels

    + +

    Definition at line 72 of file datum.hpp.

    + +
    +
    + +

    ◆ cvOutputData3D

    + +
    +
    + + + + +
    Matrix op::Datum::cvOutputData3D
    +
    +

    Rendered 3D image in cv::Mat uchar format.

    + +

    Definition at line 77 of file datum.hpp.

    + +
    +
    + +

    ◆ elementRendered

    + +
    +
    + + + + +
    std::pair<int, std::string> op::Datum::elementRendered
    +
    +

    Pair with the element key id POSE_BODY_PART_MAPPING on pose/poseParameters.hpp and its mapped value (e.g. 1 and "Neck").

    + +

    Definition at line 250 of file datum.hpp.

    + +
    +
    + +

    ◆ faceHeatMaps

    + +
    +
    + + + + +
    Array<float> op::Datum::faceHeatMaps
    +
    +

    Face pose heatmaps (face parts and/or background) for the whole image. Analogous of bodyHeatMaps applied to face. However, there is no PAFs and the size is different. Size: #people x #face parts (70) x output_net_height x output_net_width

    + +

    Definition at line 150 of file datum.hpp.

    + +
    +
    + +

    ◆ faceKeypoints

    + +
    +
    + + + + +
    Array<float> op::Datum::faceKeypoints
    +
    +

    Face keypoints (x,y,score) locations for each person in the image. It has been resized to the same resolution as poseKeypoints. Size: #people x #face parts (70) x 3 ((x,y) coordinates + score)

    + +

    Definition at line 143 of file datum.hpp.

    + +
    +
    + +

    ◆ faceKeypoints3D

    + +
    +
    + + + + +
    Array<float> op::Datum::faceKeypoints3D
    +
    +

    Face keypoints (x,y,z,score) locations for each person in the image. It has been resized to the same resolution as poseKeypoints3D. Size: #people x #face parts (70) x 4 ((x,y,z) coordinates + score)

    + +

    Definition at line 186 of file datum.hpp.

    + +
    +
    + +

    ◆ faceRectangles

    + +
    +
    + + + + +
    std::vector<Rectangle<float> > op::Datum::faceRectangles
    +
    +

    Face detection locations (x,y,width,height) for each person in the image. It is resized to cvInputData.size(). Size: #people

    + +

    Definition at line 136 of file datum.hpp.

    + +
    +
    + +

    ◆ frameNumber

    + +
    +
    + + + + +
    unsigned long long op::Datum::frameNumber
    +
    +

    Corresponding frame number. If the producer (e.g., video) starts from frame 0 and does not repeat any frame, then frameNumber should match the field id.

    + +

    Definition at line 38 of file datum.hpp.

    + +
    +
    + +

    ◆ handHeatMaps

    + +
    +
    + + + + +
    std::array<Array<float>, 2> op::Datum::handHeatMaps
    +
    +

    Hand pose heatmaps (hand parts and/or background) for the whole image. Analogous of faceHeatMaps applied to face. Size each Array: #people x #hand parts (21) x output_net_height x output_net_width

    + +

    Definition at line 172 of file datum.hpp.

    + +
    +
    + +

    ◆ handKeypoints

    + +
    +
    + + + + +
    std::array<Array<float>, 2> op::Datum::handKeypoints
    +
    +

    Hand keypoints (x,y,score) locations for each person in the image. It has been resized to the same resolution as poseKeypoints. handKeypoints[0] corresponds to left hands, and handKeypoints[1] to right ones. Size each Array: #people x #hand parts (21) x 3 ((x,y) coordinates + score)

    + +

    Definition at line 165 of file datum.hpp.

    + +
    +
    + +

    ◆ handKeypoints3D

    + +
    +
    + + + + +
    std::array<Array<float>, 2> op::Datum::handKeypoints3D
    +
    +

    Hand keypoints (x,y,z,score) locations for each person in the image. It has been resized to the same resolution as poseKeypoints3D. handKeypoints[0] corresponds to left hands, and handKeypoints[1] to right ones. Size each Array: #people x #hand parts (21) x 4 ((x,y,z) coordinates + score)

    + +

    Definition at line 194 of file datum.hpp.

    + +
    +
    + +

    ◆ handRectangles

    + +
    +
    + + + + +
    std::vector<std::array<Rectangle<float>, 2> > op::Datum::handRectangles
    +
    +

    Hand detection locations (x,y,width,height) for each person in the image. It is resized to cvInputData.size(). Size: #people

    + +

    Definition at line 157 of file datum.hpp.

    + +
    +
    + +

    ◆ id

    + +
    +
    + + + + +
    unsigned long long op::Datum::id
    +
    +

    Datum ID. Internally used to sort the Datums if multi-threading is used.

    + +

    Definition at line 22 of file datum.hpp.

    + +
    +
    + +

    ◆ inputNetData

    + +
    +
    + + + + +
    std::vector<Array<float> > op::Datum::inputNetData
    +
    +

    Original image to be processed in Array<float> format. It has been resized to the net input resolution, as well as reformatted Array<float> format to be compatible with the net. If >1 scales, each scale is right- and bottom-padded to fill the greatest resolution. The scales are sorted from bigger to smaller. Vector size: #scales Each array size: 3 x input_net_height x input_net_width

    + +

    Definition at line 56 of file datum.hpp.

    + +
    +
    + +

    ◆ name

    + +
    +
    + + + + +
    std::string op::Datum::name
    +
    +

    Name used when saving the data to disk (e.g., write_images or write_keypoint flags in the demo).

    + +

    Definition at line 31 of file datum.hpp.

    + +
    +
    + +

    ◆ netInputSizes

    + +
    +
    + + + + +
    std::vector<Point<int> > op::Datum::netInputSizes
    +
    +

    Size(s) (width x height) of the image(s) fed to the pose deep net. The size of the std::vector corresponds to the number of scales.

    + +

    Definition at line 229 of file datum.hpp.

    + +
    +
    + +

    ◆ netOutputSize

    + +
    +
    + + + + +
    Point<int> op::Datum::netOutputSize
    +
    +

    Size (width x height) of the image returned by the deep net.

    + +

    Definition at line 239 of file datum.hpp.

    + +
    +
    + +

    ◆ outputData

    + +
    +
    + + + + +
    Array<float> op::Datum::outputData
    +
    +

    Rendered image in Array<float> format. It consists of a blending of the cvInputData and the pose/body part(s) heatmap/PAF(s). If rendering is disabled (e.g., no_render_pose flag in the demo), outputData will be empty. Size: 3 x output_net_height x output_net_width

    + +

    Definition at line 64 of file datum.hpp.

    + +
    +
    + +

    ◆ poseCandidates

    + +
    +
    + + + + +
    std::vector<std::vector<std::array<float,3> > > op::Datum::poseCandidates
    +
    +

    Body pose candidates for the whole image. This parameter is by default empty and disabled for performance. It can be enabled with candidates_body. Candidates refer to all the detected body parts, before being assembled into people. Note that the number of candidates is equal or higher than the number of body parts after being assembled into people. Size: #body parts x min(part candidates, POSE_MAX_PEOPLE) x 3 (x,y,score). Rather than vector, it should ideally be: std::array<std::vector<std::array<float,3>>, #BP> poseCandidates;

    + +

    Definition at line 129 of file datum.hpp.

    + +
    +
    + +

    ◆ poseHeatMaps

    + +
    +
    + + + + +
    Array<float> op::Datum::poseHeatMaps
    +
    +

    Body pose heatmaps (body parts, background and/or PAFs) for the whole image. This parameter is by default empty and disabled for performance. Each group (body parts, background and PAFs) can be individually enabled. #heatmaps = #body parts (if enabled) + 1 (if background enabled) + 2 x #PAFs (if enabled). Each PAF has 2 consecutive channels, one for x- and one for y-coordinates. Order heatmaps: body parts + background (as appears in POSE_BODY_PART_MAPPING) + (x,y) channel of each PAF (sorted as appears in POSE_BODY_PART_PAIRS). See pose/poseParameters.hpp. The user can choose the heatmaps normalization: ranges [0, 1], [-1, 1] or [0, 255]. Check the heatmaps_scale flag in {OpenPose_path}/doc/advanced/demo_advanced.md for more details. Size: #heatmaps x output_net_height x output_net_width

    + +

    Definition at line 118 of file datum.hpp.

    + +
    +
    + +

    ◆ poseIds

    + +
    +
    + + + + +
    Array<long long> op::Datum::poseIds
    +
    +

    People ID It returns a person ID for each body pose, providing temporal consistency. The ID will be the same one for a person across frames. I.e. this ID allows to keep track of the same person in time. If either person identification is disabled or poseKeypoints is empty, poseIds will also be empty. Size: #people

    + +

    Definition at line 94 of file datum.hpp.

    + +
    +
    + +

    ◆ poseKeypoints

    + +
    +
    + + + + +
    Array<float> op::Datum::poseKeypoints
    +
    +

    Body pose (x,y,score) locations for each person in the image. It has been resized to the desired output resolution (e.g., resolution flag in the demo). Size: #people x #body parts (e.g., 18 for COCO or 15 for MPI) x 3 ((x,y) coordinates + score)

    + +

    Definition at line 85 of file datum.hpp.

    + +
    +
    + +

    ◆ poseKeypoints3D

    + +
    +
    + + + + +
    Array<float> op::Datum::poseKeypoints3D
    +
    +

    Body pose (x,y,z,score) locations for each person in the image. Size: #people x #body parts (e.g., 18 for COCO or 15 for MPI) x 4 ((x,y,z) coordinates + score)

    + +

    Definition at line 179 of file datum.hpp.

    + +
    +
    + +

    ◆ poseNetOutput

    + +
    +
    + + + + +
    Array<float> op::Datum::poseNetOutput
    +
    +

    If it is not empty, OpenPose will not run its internal body pose estimation network and will instead use this data as the substitute of its network. The size of this element must match the size of the output of its internal network, or it will lead to core dumped (segmentation) errors. You can modify the pose estimation flags to match the dimension of both elements (e.g., --net_resolution, --scale_number, etc.).

    + +

    Definition at line 217 of file datum.hpp.

    + +
    +
    + +

    ◆ poseScores

    + +
    +
    + + + + +
    Array<float> op::Datum::poseScores
    +
    +

    Body pose global confidence/score for each person in the image. It does not only consider the score of each body keypoint, but also the score of each PAF association. Optimized for COCO evaluation metric. It will highly penalyze people with missing body parts (e.g., cropped people on the borders of the image). If poseKeypoints is empty, poseScores will also be empty. Size: #people

    + +

    Definition at line 104 of file datum.hpp.

    + +
    +
    + +

    ◆ scaleInputToNetInputs

    + +
    +
    + + + + +
    std::vector<double> op::Datum::scaleInputToNetInputs
    +
    +

    Scale ratio between the input Datum::cvInputData and the net input size.

    + +

    Definition at line 223 of file datum.hpp.

    + +
    +
    + +

    ◆ scaleInputToOutput

    + +
    +
    + + + + +
    double op::Datum::scaleInputToOutput
    +
    +

    Scale ratio between the input Datum::cvInputData and the output Datum::cvOutputData.

    + +

    Definition at line 234 of file datum.hpp.

    + +
    +
    + +

    ◆ scaleNetToOutput

    + +
    +
    + + + + +
    double op::Datum::scaleNetToOutput
    +
    +

    Scale ratio between the net output and the final output Datum::cvOutputData.

    + +

    Definition at line 244 of file datum.hpp.

    + +
    +
    + +

    ◆ subId

    + +
    +
    + + + + +
    unsigned long long op::Datum::subId
    +
    +

    Datum sub-ID. Internally used to sort the Datums if multi-threading is used.

    + +

    Definition at line 24 of file datum.hpp.

    + +
    +
    + +

    ◆ subIdMax

    + +
    +
    + + + + +
    unsigned long long op::Datum::subIdMax
    +
    +

    Datum maximum sub-ID. Used to sort the Datums if multi-threading is used.

    + +

    Definition at line 26 of file datum.hpp.

    + +
    +
    +
    The documentation for this struct was generated from the following file: +
    +
    + + + + diff --git a/web/html/doc/structop_1_1_datum.js b/web/html/doc/structop_1_1_datum.js new file mode 100644 index 000000000..fe779c872 --- /dev/null +++ b/web/html/doc/structop_1_1_datum.js @@ -0,0 +1,50 @@ +var structop_1_1_datum = +[ + [ "Datum", "structop_1_1_datum.html#a72c75834671aebe44705738fb5efc3c5", null ], + [ "Datum", "structop_1_1_datum.html#a42f9aef848c6335c5a81cad374319f0b", null ], + [ "Datum", "structop_1_1_datum.html#a2d4940d8cb12d95b8588cd0280f6524c", null ], + [ "~Datum", "structop_1_1_datum.html#a16b968aec06e9b904751216402972e74", null ], + [ "clone", "structop_1_1_datum.html#ad137a102ef753734a9413762d72e6d46", null ], + [ "operator!=", "structop_1_1_datum.html#a8337f6ff81ba8231ceeabc840372bff9", null ], + [ "operator<", "structop_1_1_datum.html#a9d67e55fbc26399e4efd2385c1899541", null ], + [ "operator<=", "structop_1_1_datum.html#a32752199884dcb51b7157daa098063e1", null ], + [ "operator=", "structop_1_1_datum.html#a72ee10bf507aea368cfd3dba3dd38cb5", null ], + [ "operator=", "structop_1_1_datum.html#a24f3bfcb0ffffeb5742eb1530bc9e367", null ], + [ "operator==", "structop_1_1_datum.html#ae740051202ca0db8358d5308143bb1b3", null ], + [ "operator>", "structop_1_1_datum.html#a79a05bec9871522cfab5d33cc7b63614", null ], + [ "operator>=", "structop_1_1_datum.html#ab97601a7628b46619f4a071cf1613ce6", null ], + [ "cameraExtrinsics", "structop_1_1_datum.html#aa3e5b74f3d54bc880f47831c3932dfa9", null ], + [ "cameraIntrinsics", "structop_1_1_datum.html#ae2aad08cc74ee43e1242b403d47be2ff", null ], + [ "cameraMatrix", "structop_1_1_datum.html#aa27ee36fd2e1fb0dfc5c1e6869e2073e", null ], + [ "cvInputData", "structop_1_1_datum.html#a1f9ba4bd5be779a911c8c8e7962ea727", null ], + [ "cvOutputData", "structop_1_1_datum.html#ad70b95f61637fe23092bca8f0a4fb088", null ], + [ "cvOutputData3D", "structop_1_1_datum.html#a0aa21ea7a3adea0126003b778509f2d2", null ], + [ "elementRendered", "structop_1_1_datum.html#a35212700ef2a2ac290a6666e2993a192", null ], + [ "faceHeatMaps", "structop_1_1_datum.html#ae0c0f33a6b75c7f47e11112dd33f23c1", null ], + [ "faceKeypoints", "structop_1_1_datum.html#aebd19bf50725a5cd87de1efd96f6ebfe", null ], + [ "faceKeypoints3D", "structop_1_1_datum.html#a9a44196a197d5c050e626efe8b016e84", null ], + [ "faceRectangles", "structop_1_1_datum.html#a0b2f6955a1751fc79b107789272effad", null ], + [ "frameNumber", "structop_1_1_datum.html#a8b930d61467f98702ebea68f39fc762b", null ], + [ "handHeatMaps", "structop_1_1_datum.html#aef6c478313691ab5101664c1df55aa58", null ], + [ "handKeypoints", "structop_1_1_datum.html#a59d455dbddc50d700809c5e102c40d4e", null ], + [ "handKeypoints3D", "structop_1_1_datum.html#a27bb38102b5ebecd9b13a3619e658316", null ], + [ "handRectangles", "structop_1_1_datum.html#a52d75e3273490624414f0602785bb608", null ], + [ "id", "structop_1_1_datum.html#a65deddd49d0fbca81f367198fc600015", null ], + [ "inputNetData", "structop_1_1_datum.html#a46ff336119fd0d67c8223b1a9371731d", null ], + [ "name", "structop_1_1_datum.html#ae6adcdacea12a9cfa445cf0cac1985b0", null ], + [ "netInputSizes", "structop_1_1_datum.html#a32d164c01acf6b4f7eb1323d74edbdca", null ], + [ "netOutputSize", "structop_1_1_datum.html#ac734d4262a5a7892c6d4094cdd2bcc7c", null ], + [ "outputData", "structop_1_1_datum.html#a42b953c082f479eddc527da9a3a4cc75", null ], + [ "poseCandidates", "structop_1_1_datum.html#a55dd5354e09696ed6896923755f1c85b", null ], + [ "poseHeatMaps", "structop_1_1_datum.html#a5429e97e0ab9b0e2209a3947af668381", null ], + [ "poseIds", "structop_1_1_datum.html#aba90dccffb5a830296231bd430c4766c", null ], + [ "poseKeypoints", "structop_1_1_datum.html#a6d629b1f6f7b958fe4cf2ef4cdf57c5b", null ], + [ "poseKeypoints3D", "structop_1_1_datum.html#a652ac1e7de13ec9a886dece75848cfea", null ], + [ "poseNetOutput", "structop_1_1_datum.html#a8f6f5fd181abe3bdfd6f7bdf8a165782", null ], + [ "poseScores", "structop_1_1_datum.html#afb117821de7aff9ac3c219ef3bbc0c14", null ], + [ "scaleInputToNetInputs", "structop_1_1_datum.html#a0e416771f275be98c83aaff01e482a71", null ], + [ "scaleInputToOutput", "structop_1_1_datum.html#a6cf96c250c236a03f13da69e1d4336d9", null ], + [ "scaleNetToOutput", "structop_1_1_datum.html#a44af7162e180c6856ce909057f43d8e1", null ], + [ "subId", "structop_1_1_datum.html#aeb6d10e4fa40a20d38118bf1be3112d8", null ], + [ "subIdMax", "structop_1_1_datum.html#ab87c493347456b592b616e9f656a5d60", null ] +]; \ No newline at end of file diff --git a/web/html/doc/structop_1_1_point-members.html b/web/html/doc/structop_1_1_point-members.html new file mode 100644 index 000000000..e1afb4734 --- /dev/null +++ b/web/html/doc/structop_1_1_point-members.html @@ -0,0 +1,130 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    op::Point< T > Member List
    +
    +
    + +

    This is the complete list of members for op::Point< T >, including all inherited members.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    area() constop::Point< T >inline
    operator!=(const Point< T > &point) constop::Point< T >inline
    operator*(const T value) constop::Point< T >
    operator*=(const T value)op::Point< T >
    operator+(const Point< T > &point) constop::Point< T >
    operator+(const T value) constop::Point< T >
    operator+=(const Point< T > &point)op::Point< T >
    operator+=(const T value)op::Point< T >
    operator-(const Point< T > &point) constop::Point< T >
    operator-(const T value) constop::Point< T >
    operator-=(const Point< T > &point)op::Point< T >
    operator-=(const T value)op::Point< T >
    operator/(const T value) constop::Point< T >
    operator/=(const T value)op::Point< T >
    operator<(const Point< T > &point) constop::Point< T >inline
    operator<=(const Point< T > &point) constop::Point< T >inline
    operator=(const Point< T > &point)op::Point< T >
    operator=(Point< T > &&point)op::Point< T >
    operator==(const Point< T > &point) constop::Point< T >inline
    operator>(const Point< T > &point) constop::Point< T >inline
    operator>=(const Point< T > &point) constop::Point< T >inline
    Point(const T x=0, const T y=0)op::Point< T >
    Point(const Point< T > &point)op::Point< T >
    Point(Point< T > &&point)op::Point< T >
    toString() constop::Point< T >
    xop::Point< T >
    yop::Point< T >
    +
    + + + + diff --git a/web/html/doc/structop_1_1_point.html b/web/html/doc/structop_1_1_point.html new file mode 100644 index 000000000..4f687ab2f --- /dev/null +++ b/web/html/doc/structop_1_1_point.html @@ -0,0 +1,863 @@ + + + + + + + +OpenPose: op::Point< T > Struct Template Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op::Point< T > Struct Template Reference
    +
    +
    + +

    #include <point.hpp>

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Public Member Functions

     Point (const T x=0, const T y=0)
     
     Point (const Point< T > &point)
     
    Point< T > & operator= (const Point< T > &point)
     
     Point (Point< T > &&point)
     
    Point< T > & operator= (Point< T > &&point)
     
    area () const
     
    std::string toString () const
     
    bool operator< (const Point< T > &point) const
     
    bool operator> (const Point< T > &point) const
     
    bool operator<= (const Point< T > &point) const
     
    bool operator>= (const Point< T > &point) const
     
    bool operator== (const Point< T > &point) const
     
    bool operator!= (const Point< T > &point) const
     
    Point< T > & operator+= (const Point< T > &point)
     
    Point< T > operator+ (const Point< T > &point) const
     
    Point< T > & operator+= (const T value)
     
    Point< T > operator+ (const T value) const
     
    Point< T > & operator-= (const Point< T > &point)
     
    Point< T > operator- (const Point< T > &point) const
     
    Point< T > & operator-= (const T value)
     
    Point< T > operator- (const T value) const
     
    Point< T > & operator*= (const T value)
     
    Point< T > operator* (const T value) const
     
    Point< T > & operator/= (const T value)
     
    Point< T > operator/ (const T value) const
     
    + + + + + +

    +Public Attributes

    x
     
    y
     
    +

    Detailed Description

    +

    template<typename T>
    +struct op::Point< T >

    + + +

    Definition at line 10 of file point.hpp.

    +

    Constructor & Destructor Documentation

    + +

    ◆ Point() [1/3]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + +
    op::Point< T >::Point (const T x = 0,
    const T y = 0 
    )
    +
    + +
    +
    + +

    ◆ Point() [2/3]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    op::Point< T >::Point (const Point< T > & point)
    +
    +

    Copy constructor. It performs fast copy: For performance purpose, copying a Point<T> or Point<T> or cv::Mat just copies the reference, it still shares the same internal data. Modifying the copied element will modify the original one. Use clone() for a slower but real copy, similarly to cv::Mat and Point<T>.

    Parameters
    + + +
    pointPoint to be copied.
    +
    +
    + +
    +
    + +

    ◆ Point() [3/3]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    op::Point< T >::Point (Point< T > && point)
    +
    +

    Move constructor. It destroys the original Point to be moved.

    Parameters
    + + +
    pointPoint to be moved.
    +
    +
    + +
    +
    +

    Member Function Documentation

    + +

    ◆ area()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + +
    T op::Point< T >::area () const
    +
    +inline
    +
    + +

    Definition at line 50 of file point.hpp.

    + +
    +
    + +

    ◆ operator!=()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    bool op::Point< T >::operator!= (const Point< T > & point) const
    +
    +inline
    +
    +

    Not equal comparison operator.

    Parameters
    + + +
    pointPoint<T> to be compared.
    +
    +
    +
    Returns
    Whether the instance satisfies the condition with respect to point.
    + +

    Definition at line 122 of file point.hpp.

    + +
    +
    + +

    ◆ operator*()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T> op::Point< T >::operator* (const T value) const
    +
    + +
    +
    + +

    ◆ operator*=()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T>& op::Point< T >::operator*= (const T value)
    +
    + +
    +
    + +

    ◆ operator+() [1/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T> op::Point< T >::operator+ (const Point< T > & point) const
    +
    + +
    +
    + +

    ◆ operator+() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T> op::Point< T >::operator+ (const T value) const
    +
    + +
    +
    + +

    ◆ operator+=() [1/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T>& op::Point< T >::operator+= (const Point< T > & point)
    +
    + +
    +
    + +

    ◆ operator+=() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T>& op::Point< T >::operator+= (const T value)
    +
    + +
    +
    + +

    ◆ operator-() [1/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T> op::Point< T >::operator- (const Point< T > & point) const
    +
    + +
    +
    + +

    ◆ operator-() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T> op::Point< T >::operator- (const T value) const
    +
    + +
    +
    + +

    ◆ operator-=() [1/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T>& op::Point< T >::operator-= (const Point< T > & point)
    +
    + +
    +
    + +

    ◆ operator-=() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T>& op::Point< T >::operator-= (const T value)
    +
    + +
    +
    + +

    ◆ operator/()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T> op::Point< T >::operator/ (const T value) const
    +
    + +
    +
    + +

    ◆ operator/=()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T>& op::Point< T >::operator/= (const T value)
    +
    + +
    +
    + +

    ◆ operator<()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    bool op::Point< T >::operator< (const Point< T > & point) const
    +
    +inline
    +
    +

    Less comparison operator.

    Parameters
    + + +
    pointPoint<T> to be compared.
    +
    +
    +
    Returns
    Whether the instance satisfies the condition with respect to point.
    + +

    Definition at line 72 of file point.hpp.

    + +
    +
    + +

    ◆ operator<=()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    bool op::Point< T >::operator<= (const Point< T > & point) const
    +
    +inline
    +
    +

    Less or equal comparison operator.

    Parameters
    + + +
    pointPoint<T> to be compared.
    +
    +
    +
    Returns
    Whether the instance satisfies the condition with respect to point.
    + +

    Definition at line 92 of file point.hpp.

    + +
    +
    + +

    ◆ operator=() [1/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T>& op::Point< T >::operator= (const Point< T > & point)
    +
    +

    Copy assignment. Similar to Point<T>(const Point<T>& point).

    Parameters
    + + +
    pointPoint to be copied.
    +
    +
    +
    Returns
    The resulting Point.
    + +
    +
    + +

    ◆ operator=() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Point<T>& op::Point< T >::operator= (Point< T > && point)
    +
    +

    Move assignment. Similar to Point<T>(Point<T>&& point).

    Parameters
    + + +
    pointPoint to be moved.
    +
    +
    +
    Returns
    The resulting Point.
    + +
    +
    + +

    ◆ operator==()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    bool op::Point< T >::operator== (const Point< T > & point) const
    +
    +inline
    +
    +

    Equal comparison operator.

    Parameters
    + + +
    pointPoint<T> to be compared.
    +
    +
    +
    Returns
    Whether the instance satisfies the condition with respect to point.
    + +

    Definition at line 112 of file point.hpp.

    + +
    +
    + +

    ◆ operator>()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    bool op::Point< T >::operator> (const Point< T > & point) const
    +
    +inline
    +
    +

    Greater comparison operator.

    Parameters
    + + +
    pointPoint<T> to be compared.
    +
    +
    +
    Returns
    Whether the instance satisfies the condition with respect to point.
    + +

    Definition at line 82 of file point.hpp.

    + +
    +
    + +

    ◆ operator>=()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + + +
    bool op::Point< T >::operator>= (const Point< T > & point) const
    +
    +inline
    +
    +

    Greater or equal comparison operator.

    Parameters
    + + +
    pointPoint<T> to be compared.
    +
    +
    +
    Returns
    Whether the instance satisfies the condition with respect to point.
    + +

    Definition at line 102 of file point.hpp.

    + +
    +
    + +

    ◆ toString()

    + +
    +
    +
    +template<typename T >
    + + + + + + + +
    std::string op::Point< T >::toString () const
    +
    +

    It returns a string with the whole Point<T> data. Useful for debugging. The format is: [x, y]

    Returns
    A string with the Point<T> values in the above format.
    + +
    +
    +

    Member Data Documentation

    + +

    ◆ x

    + +
    +
    +
    +template<typename T >
    + + + + +
    T op::Point< T >::x
    +
    + +

    Definition at line 12 of file point.hpp.

    + +
    +
    + +

    ◆ y

    + +
    +
    +
    +template<typename T >
    + + + + +
    T op::Point< T >::y
    +
    + +

    Definition at line 13 of file point.hpp.

    + +
    +
    +
    The documentation for this struct was generated from the following file: +
    +
    + + + + diff --git a/web/html/doc/structop_1_1_point.js b/web/html/doc/structop_1_1_point.js new file mode 100644 index 000000000..db1bf4c38 --- /dev/null +++ b/web/html/doc/structop_1_1_point.js @@ -0,0 +1,30 @@ +var structop_1_1_point = +[ + [ "Point", "structop_1_1_point.html#a9f80114d18ec8055360222d975bcd5a8", null ], + [ "Point", "structop_1_1_point.html#a44559988e3980e21568b5d9dd2897368", null ], + [ "Point", "structop_1_1_point.html#ab3b92e4a40cd58d948647484f21dd9ef", null ], + [ "area", "structop_1_1_point.html#ac85e32b9381abc2af106fe96dba81b08", null ], + [ "operator!=", "structop_1_1_point.html#a8a82a0d663d9572fa28394f7562ebfb2", null ], + [ "operator*", "structop_1_1_point.html#ad66e33cf5d57e78c80220881406e41ce", null ], + [ "operator*=", "structop_1_1_point.html#afb53230d0d884ca5432e948605b5c2e6", null ], + [ "operator+", "structop_1_1_point.html#a0b362efa00fc5a0d35f743f3c01fa1d0", null ], + [ "operator+", "structop_1_1_point.html#af656ee43b596b5bb68139404a54c5a63", null ], + [ "operator+=", "structop_1_1_point.html#ad5005ff994bfcd1846854d6af103a3a6", null ], + [ "operator+=", "structop_1_1_point.html#a60488ca743d82fe8dd574b01f992460c", null ], + [ "operator-", "structop_1_1_point.html#a075741b8963b342bb068976afcf579af", null ], + [ "operator-", "structop_1_1_point.html#a8961164fe93cd91fcf55f56200730578", null ], + [ "operator-=", "structop_1_1_point.html#ad42deecd0077f7c962ca383cbc87e08f", null ], + [ "operator-=", "structop_1_1_point.html#abf2bb2d2d5b3dba3424b489b99faa760", null ], + [ "operator/", "structop_1_1_point.html#ad599eeba7a0137c3c138e5542bb2e9ed", null ], + [ "operator/=", "structop_1_1_point.html#af8e49e33dad417d05ce8fb5f9dd68762", null ], + [ "operator<", "structop_1_1_point.html#a6aaab75fe6d1c8b4c935c2da385fd7ee", null ], + [ "operator<=", "structop_1_1_point.html#a872607032f6b4fa8982f88a74c88c6bd", null ], + [ "operator=", "structop_1_1_point.html#aada0e9eecee2fb30fb903b32f9f33047", null ], + [ "operator=", "structop_1_1_point.html#ac8596f2b3b50464b6c6eaa34b0a2c48b", null ], + [ "operator==", "structop_1_1_point.html#a2f7900c0d58fb297b3b039cfb3c98a3e", null ], + [ "operator>", "structop_1_1_point.html#a0e94c712c194c0b317eef4d8995e52f3", null ], + [ "operator>=", "structop_1_1_point.html#ae7afe35869eea79f72bd8b74fae4a2f1", null ], + [ "toString", "structop_1_1_point.html#a73d1088b5d0f3370499ca5c6e80b544a", null ], + [ "x", "structop_1_1_point.html#a812d4ef29d102f4ad18f32ae54eb17ec", null ], + [ "y", "structop_1_1_point.html#a5821bc77a416629916e671793df3ce3b", null ] +]; \ No newline at end of file diff --git a/web/html/doc/structop_1_1_rectangle-members.html b/web/html/doc/structop_1_1_rectangle-members.html new file mode 100644 index 000000000..9897fb26b --- /dev/null +++ b/web/html/doc/structop_1_1_rectangle-members.html @@ -0,0 +1,122 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    op::Rectangle< T > Member List
    +
    +
    + +

    This is the complete list of members for op::Rectangle< T >, including all inherited members.

    + + + + + + + + + + + + + + + + + + + + +
    area() constop::Rectangle< T >inline
    bottomRight() constop::Rectangle< T >
    center() constop::Rectangle< T >
    heightop::Rectangle< T >
    operator*(const T value) constop::Rectangle< T >
    operator*=(const T value)op::Rectangle< T >
    operator/(const T value) constop::Rectangle< T >
    operator/=(const T value)op::Rectangle< T >
    operator=(const Rectangle< T > &rectangle)op::Rectangle< T >
    operator=(Rectangle< T > &&rectangle)op::Rectangle< T >
    recenter(const T newWidth, const T newHeight)op::Rectangle< T >
    Rectangle(const T x=0, const T y=0, const T width=0, const T height=0)op::Rectangle< T >
    Rectangle(const Rectangle< T > &rectangle)op::Rectangle< T >
    Rectangle(Rectangle< T > &&rectangle)op::Rectangle< T >
    topLeft() constop::Rectangle< T >inline
    toString() constop::Rectangle< T >
    widthop::Rectangle< T >
    xop::Rectangle< T >
    yop::Rectangle< T >
    +
    + + + + diff --git a/web/html/doc/structop_1_1_rectangle.html b/web/html/doc/structop_1_1_rectangle.html new file mode 100644 index 000000000..7801aea80 --- /dev/null +++ b/web/html/doc/structop_1_1_rectangle.html @@ -0,0 +1,610 @@ + + + + + + + +OpenPose: op::Rectangle< T > Struct Template Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op::Rectangle< T > Struct Template Reference
    +
    +
    + +

    #include <rectangle.hpp>

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Public Member Functions

     Rectangle (const T x=0, const T y=0, const T width=0, const T height=0)
     
     Rectangle (const Rectangle< T > &rectangle)
     
    Rectangle< T > & operator= (const Rectangle< T > &rectangle)
     
     Rectangle (Rectangle< T > &&rectangle)
     
    Rectangle< T > & operator= (Rectangle< T > &&rectangle)
     
    Point< T > center () const
     
    Point< T > topLeft () const
     
    Point< T > bottomRight () const
     
    area () const
     
    void recenter (const T newWidth, const T newHeight)
     
    std::string toString () const
     
    Rectangle< T > & operator*= (const T value)
     
    Rectangle< T > operator* (const T value) const
     
    Rectangle< T > & operator/= (const T value)
     
    Rectangle< T > operator/ (const T value) const
     
    + + + + + + + + + +

    +Public Attributes

    x
     
    y
     
    width
     
    height
     
    +

    Detailed Description

    +

    template<typename T>
    +struct op::Rectangle< T >

    + + +

    Definition at line 11 of file rectangle.hpp.

    +

    Constructor & Destructor Documentation

    + +

    ◆ Rectangle() [1/3]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    op::Rectangle< T >::Rectangle (const T x = 0,
    const T y = 0,
    const T width = 0,
    const T height = 0 
    )
    +
    + +
    +
    + +

    ◆ Rectangle() [2/3]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    op::Rectangle< T >::Rectangle (const Rectangle< T > & rectangle)
    +
    +

    Copy constructor. It performs fast copy: For performance purpose, copying a Rectangle<T> or Datum or cv::Mat just copies the reference, it still shares the same internal data. Modifying the copied element will modify the original one. Use clone() for a slower but real copy, similarly to cv::Mat and Rectangle<T>.

    Parameters
    + + +
    rectangleRectangle to be copied.
    +
    +
    + +
    +
    + +

    ◆ Rectangle() [3/3]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    op::Rectangle< T >::Rectangle (Rectangle< T > && rectangle)
    +
    +

    Move constructor. It destroys the original Rectangle to be moved.

    Parameters
    + + +
    rectangleRectangle to be moved.
    +
    +
    + +
    +
    +

    Member Function Documentation

    + +

    ◆ area()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + +
    T op::Rectangle< T >::area () const
    +
    +inline
    +
    + +

    Definition at line 62 of file rectangle.hpp.

    + +
    +
    + +

    ◆ bottomRight()

    + +
    +
    +
    +template<typename T >
    + + + + + + + +
    Point<T> op::Rectangle< T >::bottomRight () const
    +
    + +
    +
    + +

    ◆ center()

    + +
    +
    +
    +template<typename T >
    + + + + + + + +
    Point<T> op::Rectangle< T >::center () const
    +
    + +
    +
    + +

    ◆ operator*()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Rectangle<T> op::Rectangle< T >::operator* (const T value) const
    +
    + +
    +
    + +

    ◆ operator*=()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Rectangle<T>& op::Rectangle< T >::operator*= (const T value)
    +
    + +
    +
    + +

    ◆ operator/()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Rectangle<T> op::Rectangle< T >::operator/ (const T value) const
    +
    + +
    +
    + +

    ◆ operator/=()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Rectangle<T>& op::Rectangle< T >::operator/= (const T value)
    +
    + +
    +
    + +

    ◆ operator=() [1/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Rectangle<T>& op::Rectangle< T >::operator= (const Rectangle< T > & rectangle)
    +
    +

    Copy assignment. Similar to Rectangle<T>(const Rectangle<T>& rectangle).

    Parameters
    + + +
    rectangleRectangle to be copied.
    +
    +
    +
    Returns
    The resulting Rectangle.
    + +
    +
    + +

    ◆ operator=() [2/2]

    + +
    +
    +
    +template<typename T >
    + + + + + + + + +
    Rectangle<T>& op::Rectangle< T >::operator= (Rectangle< T > && rectangle)
    +
    +

    Move assignment. Similar to Rectangle<T>(Rectangle<T>&& rectangle).

    Parameters
    + + +
    rectangleRectangle to be moved.
    +
    +
    +
    Returns
    The resulting Rectangle.
    + +
    +
    + +

    ◆ recenter()

    + +
    +
    +
    +template<typename T >
    + + + + + + + + + + + + + + + + + + +
    void op::Rectangle< T >::recenter (const T newWidth,
    const T newHeight 
    )
    +
    + +
    +
    + +

    ◆ topLeft()

    + +
    +
    +
    +template<typename T >
    + + + + + +
    + + + + + + + +
    Point<T> op::Rectangle< T >::topLeft () const
    +
    +inline
    +
    + +

    Definition at line 55 of file rectangle.hpp.

    + +
    +
    + +

    ◆ toString()

    + +
    +
    +
    +template<typename T >
    + + + + + + + +
    std::string op::Rectangle< T >::toString () const
    +
    +

    It returns a string with the whole Rectangle<T> data. Useful for debugging. The format is: [x, y, width, height]

    Returns
    A string with the Rectangle<T> values in the above format.
    + +
    +
    +

    Member Data Documentation

    + +

    ◆ height

    + +
    +
    +
    +template<typename T >
    + + + + +
    T op::Rectangle< T >::height
    +
    + +

    Definition at line 16 of file rectangle.hpp.

    + +
    +
    + +

    ◆ width

    + +
    +
    +
    +template<typename T >
    + + + + +
    T op::Rectangle< T >::width
    +
    + +

    Definition at line 15 of file rectangle.hpp.

    + +
    +
    + +

    ◆ x

    + +
    +
    +
    +template<typename T >
    + + + + +
    T op::Rectangle< T >::x
    +
    + +

    Definition at line 13 of file rectangle.hpp.

    + +
    +
    + +

    ◆ y

    + +
    +
    +
    +template<typename T >
    + + + + +
    T op::Rectangle< T >::y
    +
    + +

    Definition at line 14 of file rectangle.hpp.

    + +
    +
    +
    The documentation for this struct was generated from the following file: +
    +
    + + + + diff --git a/web/html/doc/structop_1_1_rectangle.js b/web/html/doc/structop_1_1_rectangle.js new file mode 100644 index 000000000..36a7921b9 --- /dev/null +++ b/web/html/doc/structop_1_1_rectangle.js @@ -0,0 +1,22 @@ +var structop_1_1_rectangle = +[ + [ "Rectangle", "structop_1_1_rectangle.html#a0112ddaa9782f3ccbb76a319b05f030b", null ], + [ "Rectangle", "structop_1_1_rectangle.html#afbb0da8956e35178d3f28d2b1d998175", null ], + [ "Rectangle", "structop_1_1_rectangle.html#a5a9a60fdfd9c88ab8ded6275d64333ea", null ], + [ "area", "structop_1_1_rectangle.html#a5b319240c995c81bfa1d73a2461d49fd", null ], + [ "bottomRight", "structop_1_1_rectangle.html#ab4473fb43ab826ffb10c2be18cb96f24", null ], + [ "center", "structop_1_1_rectangle.html#a0b0b8be8a0b300204a2afff4f219879b", null ], + [ "operator*", "structop_1_1_rectangle.html#a66e38889d2b413df95a9995e93103ff7", null ], + [ "operator*=", "structop_1_1_rectangle.html#a2d3d7951770da3954d5af9e365f5780c", null ], + [ "operator/", "structop_1_1_rectangle.html#adba48a35368d4a4d55896899b217d523", null ], + [ "operator/=", "structop_1_1_rectangle.html#a65620c7efbb3db95d85c90c2be3a851d", null ], + [ "operator=", "structop_1_1_rectangle.html#abd3476f9a32ad2058ea67c75c2a547a2", null ], + [ "operator=", "structop_1_1_rectangle.html#abea1a6760629dc4ed99875dae9d5ac36", null ], + [ "recenter", "structop_1_1_rectangle.html#a1c9a572db2c17fb02a7d19e965c1d3dc", null ], + [ "topLeft", "structop_1_1_rectangle.html#a640050d6186148b425bedba8c33cf1ea", null ], + [ "toString", "structop_1_1_rectangle.html#af1c7f96c34132924fa9237248894e63d", null ], + [ "height", "structop_1_1_rectangle.html#a5db9f0e8c946d837a1d351cc0bc72811", null ], + [ "width", "structop_1_1_rectangle.html#a0d0ae826039b0961fae8723708809cdf", null ], + [ "x", "structop_1_1_rectangle.html#ac4ae58fe6ffd2f811f5cbc48661c1856", null ], + [ "y", "structop_1_1_rectangle.html#a64e6891af0088a4ad271a725601b8043", null ] +]; \ No newline at end of file diff --git a/web/html/doc/structop_1_1_wrapper_struct_extra-members.html b/web/html/doc/structop_1_1_wrapper_struct_extra-members.html new file mode 100644 index 000000000..8aa78c822 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_extra-members.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    op::WrapperStructExtra Member List
    +
    +
    + +

    This is the complete list of members for op::WrapperStructExtra, including all inherited members.

    + + + + + + + +
    identificationop::WrapperStructExtra
    ikThreadsop::WrapperStructExtra
    minViews3dop::WrapperStructExtra
    reconstruct3dop::WrapperStructExtra
    trackingop::WrapperStructExtra
    WrapperStructExtra(const bool reconstruct3d=false, const int minViews3d=-1, const bool identification=false, const int tracking=-1, const int ikThreads=0)op::WrapperStructExtra
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_extra.html b/web/html/doc/structop_1_1_wrapper_struct_extra.html new file mode 100644 index 000000000..89547e010 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_extra.html @@ -0,0 +1,267 @@ + + + + + + + +OpenPose: op::WrapperStructExtra Struct Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op::WrapperStructExtra Struct Reference
    +
    +
    + +

    #include <wrapperStructExtra.hpp>

    + + + + +

    +Public Member Functions

     WrapperStructExtra (const bool reconstruct3d=false, const int minViews3d=-1, const bool identification=false, const int tracking=-1, const int ikThreads=0)
     
    + + + + + + + + + + + +

    +Public Attributes

    bool reconstruct3d
     
    int minViews3d
     
    bool identification
     
    int tracking
     
    int ikThreads
     
    +

    Detailed Description

    +

    WrapperStructExtra: Pose estimation and rendering configuration struct. WrapperStructExtra allows the user to set up the pose estimation and rendering parameters that will be used for the OpenPose WrapperT template and Wrapper class.

    + +

    Definition at line 13 of file wrapperStructExtra.hpp.

    +

    Constructor & Destructor Documentation

    + +

    ◆ WrapperStructExtra()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    op::WrapperStructExtra::WrapperStructExtra (const bool reconstruct3d = false,
    const int minViews3d = -1,
    const bool identification = false,
    const int tracking = -1,
    const int ikThreads = 0 
    )
    +
    +

    Constructor of the struct. It has the recommended and default values we recommend for each element of the struct. Since all the elements of the struct are public, they can also be manually filled.

    + +
    +
    +

    Member Data Documentation

    + +

    ◆ identification

    + +
    +
    + + + + +
    bool op::WrapperStructExtra::identification
    +
    +

    Whether to return a person ID for each body skeleton, providing temporal consistency.

    + +

    Definition at line 33 of file wrapperStructExtra.hpp.

    + +
    +
    + +

    ◆ ikThreads

    + +
    +
    + + + + +
    int op::WrapperStructExtra::ikThreads
    +
    +

    Whether to enable inverse kinematics (IK) from 3-D keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing the number of threads will increase the speed but also the global system latency.

    + +

    Definition at line 47 of file wrapperStructExtra.hpp.

    + +
    +
    + +

    ◆ minViews3d

    + +
    +
    + + + + +
    int op::WrapperStructExtra::minViews3d
    +
    +

    Minimum number of views required to reconstruct each keypoint. By default (-1), it will require max(2, min(4, #cameras-1)) cameras to see the keypoint in order to reconstruct it.

    + +

    Definition at line 28 of file wrapperStructExtra.hpp.

    + +
    +
    + +

    ◆ reconstruct3d

    + +
    +
    + + + + +
    bool op::WrapperStructExtra::reconstruct3d
    +
    +

    Whether to run the 3-D reconstruction demo, i.e., 1) Reading from a stereo camera system. 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction results.

    + +

    Definition at line 21 of file wrapperStructExtra.hpp.

    + +
    +
    + +

    ◆ tracking

    + +
    +
    + + + + +
    int op::WrapperStructExtra::tracking
    +
    +

    Whether to enable people tracking across frames. The value indicates the number of frames where tracking is run between each OpenPose keypoint detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint detector and tracking for potentially higher accuracy than only OpenPose.

    + +

    Definition at line 40 of file wrapperStructExtra.hpp.

    + +
    +
    +
    The documentation for this struct was generated from the following file: +
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_extra.js b/web/html/doc/structop_1_1_wrapper_struct_extra.js new file mode 100644 index 000000000..af42b4ce6 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_extra.js @@ -0,0 +1,9 @@ +var structop_1_1_wrapper_struct_extra = +[ + [ "WrapperStructExtra", "structop_1_1_wrapper_struct_extra.html#a70cdc27c953962810333fafe011f86dd", null ], + [ "identification", "structop_1_1_wrapper_struct_extra.html#a08578de8a074415df3e645d3ddb27b8b", null ], + [ "ikThreads", "structop_1_1_wrapper_struct_extra.html#ad41edf2717e5446a250efc05512ee07f", null ], + [ "minViews3d", "structop_1_1_wrapper_struct_extra.html#ae8a3562b010c4fa31e6a9722947301c6", null ], + [ "reconstruct3d", "structop_1_1_wrapper_struct_extra.html#aa157c20ca959fd952a85866a119183ca", null ], + [ "tracking", "structop_1_1_wrapper_struct_extra.html#a86ae9d1faa008aaeed4d6fa6ff03f0fb", null ] +]; \ No newline at end of file diff --git a/web/html/doc/structop_1_1_wrapper_struct_face-members.html b/web/html/doc/structop_1_1_wrapper_struct_face-members.html new file mode 100644 index 000000000..9ae6998bb --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_face-members.html @@ -0,0 +1,111 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    op::WrapperStructFace Member List
    +
    +
    + +

    This is the complete list of members for op::WrapperStructFace, including all inherited members.

    + + + + + + + + + +
    alphaHeatMapop::WrapperStructFace
    alphaKeypointop::WrapperStructFace
    detectorop::WrapperStructFace
    enableop::WrapperStructFace
    netInputSizeop::WrapperStructFace
    renderModeop::WrapperStructFace
    renderThresholdop::WrapperStructFace
    WrapperStructFace(const bool enable=false, const Detector detector=Detector::Body, const Point< int > &netInputSize=Point< int >{368, 368}, const RenderMode renderMode=RenderMode::Auto, const float alphaKeypoint=FACE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=FACE_DEFAULT_ALPHA_HEAT_MAP, const float renderThreshold=0.4f)op::WrapperStructFace
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_face.html b/web/html/doc/structop_1_1_wrapper_struct_face.html new file mode 100644 index 000000000..70ae77e90 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_face.html @@ -0,0 +1,317 @@ + + + + + + + +OpenPose: op::WrapperStructFace Struct Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op::WrapperStructFace Struct Reference
    +
    +
    + +

    #include <wrapperStructFace.hpp>

    + + + + +

    +Public Member Functions

     WrapperStructFace (const bool enable=false, const Detector detector=Detector::Body, const Point< int > &netInputSize=Point< int >{368, 368}, const RenderMode renderMode=RenderMode::Auto, const float alphaKeypoint=FACE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=FACE_DEFAULT_ALPHA_HEAT_MAP, const float renderThreshold=0.4f)
     
    + + + + + + + + + + + + + + + +

    +Public Attributes

    bool enable
     
    Detector detector
     
    Point< int > netInputSize
     
    RenderMode renderMode
     
    float alphaKeypoint
     
    float alphaHeatMap
     
    float renderThreshold
     
    +

    Detailed Description

    +

    WrapperStructFace: Face estimation and rendering configuration struct. WrapperStructFace allows the user to set up the face estimation and rendering parameters that will be used for the OpenPose WrapperT template and Wrapper class.

    + +

    Definition at line 16 of file wrapperStructFace.hpp.

    +

    Constructor & Destructor Documentation

    + +

    ◆ WrapperStructFace()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    op::WrapperStructFace::WrapperStructFace (const bool enable = false,
    const Detector detector = Detector::Body,
    const Point< int > & netInputSize = Point< int >{368, 368},
    const RenderMode renderMode = RenderMode::Auto,
    const float alphaKeypoint = FACE_DEFAULT_ALPHA_KEYPOINT,
    const float alphaHeatMap = FACE_DEFAULT_ALPHA_HEAT_MAP,
    const float renderThreshold = 0.4f 
    )
    +
    +

    Constructor of the struct. It has the recommended and default values we recommend for each element of the struct. Since all the elements of the struct are public, they can also be manually filled.

    + +
    +
    +

    Member Data Documentation

    + +

    ◆ alphaHeatMap

    + +
    +
    + + + + +
    float op::WrapperStructFace::alphaHeatMap
    +
    +

    Rendering blending alpha value of the heat maps (face part, background or PAF) with respect to the background image. Value in the range [0, 1]. 0 will only render the background, 1 will only render the heat map.

    + +

    Definition at line 53 of file wrapperStructFace.hpp.

    + +
    +
    + +

    ◆ alphaKeypoint

    + +
    +
    + + + + +
    float op::WrapperStructFace::alphaKeypoint
    +
    +

    Rendering blending alpha value of the pose point locations with respect to the background image. Value in the range [0, 1]. 0 will only render the background, 1 will fully render the pose.

    + +

    Definition at line 46 of file wrapperStructFace.hpp.

    + +
    +
    + +

    ◆ detector

    + +
    +
    + + + + +
    Detector op::WrapperStructFace::detector
    +
    +

    Kind of face rectangle detector. Recommended Detector::Body (fastest one if body is enabled and most accurate one), which is based on the OpenPose body keypoint detector.

    + +

    Definition at line 27 of file wrapperStructFace.hpp.

    + +
    +
    + +

    ◆ enable

    + +
    +
    + + + + +
    bool op::WrapperStructFace::enable
    +
    +

    Whether to extract face.

    + +

    Definition at line 21 of file wrapperStructFace.hpp.

    + +
    +
    + +

    ◆ netInputSize

    + +
    +
    + + + + +
    Point<int> op::WrapperStructFace::netInputSize
    +
    +

    CCN (Conv Net) input size. The greater, the slower and more memory it will be needed, but it will potentially increase accuracy. Both width and height must be divisible by 16.

    + +

    Definition at line 34 of file wrapperStructFace.hpp.

    + +
    +
    + +

    ◆ renderMode

    + +
    +
    + + + + +
    RenderMode op::WrapperStructFace::renderMode
    +
    +

    Whether to render the output (pose locations, body, background or PAF heat maps) with CPU or GPU. Select None for no rendering, Cpu or Gpu por CPU and GPU rendering respectively.

    + +

    Definition at line 40 of file wrapperStructFace.hpp.

    + +
    +
    + +

    ◆ renderThreshold

    + +
    +
    + + + + +
    float op::WrapperStructFace::renderThreshold
    +
    +

    Rendering threshold. Only estimated keypoints whose score confidences are higher than this value will be rendered. Note: Rendered refers only to visual display in the OpenPose basic GUI, not in the saved results. Generally, a high threshold (> 0.5) will only render very clear body parts; while small thresholds (~0.1) will also output guessed and occluded keypoints, but also more false positives (i.e., wrong detections).

    + +

    Definition at line 62 of file wrapperStructFace.hpp.

    + +
    +
    +
    The documentation for this struct was generated from the following file: +
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_face.js b/web/html/doc/structop_1_1_wrapper_struct_face.js new file mode 100644 index 000000000..2dfb86e5c --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_face.js @@ -0,0 +1,11 @@ +var structop_1_1_wrapper_struct_face = +[ + [ "WrapperStructFace", "structop_1_1_wrapper_struct_face.html#a0fb08ed60a50f19713df6f62ee685593", null ], + [ "alphaHeatMap", "structop_1_1_wrapper_struct_face.html#a49f609ae1c075f272bbaf32e128cc3a9", null ], + [ "alphaKeypoint", "structop_1_1_wrapper_struct_face.html#a65a0244cbcea06621c6f8c41e519990f", null ], + [ "detector", "structop_1_1_wrapper_struct_face.html#a30d45f383e5c9d72709b5281f24f1ed0", null ], + [ "enable", "structop_1_1_wrapper_struct_face.html#a8fadeca500dde0df2a35f864bf05ee05", null ], + [ "netInputSize", "structop_1_1_wrapper_struct_face.html#a9845712fd6ebb66fccb0c1647e3491a0", null ], + [ "renderMode", "structop_1_1_wrapper_struct_face.html#accc6e564598130b9bf0a6d0ec9c304c4", null ], + [ "renderThreshold", "structop_1_1_wrapper_struct_face.html#a982e3f1a13358a522e1882d17cb80d57", null ] +]; \ No newline at end of file diff --git a/web/html/doc/structop_1_1_wrapper_struct_gui-members.html b/web/html/doc/structop_1_1_wrapper_struct_gui-members.html new file mode 100644 index 000000000..84098432e --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_gui-members.html @@ -0,0 +1,107 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    op::WrapperStructGui Member List
    +
    +
    + +

    This is the complete list of members for op::WrapperStructGui, including all inherited members.

    + + + + + +
    displayModeop::WrapperStructGui
    fullScreenop::WrapperStructGui
    guiVerboseop::WrapperStructGui
    WrapperStructGui(const DisplayMode displayMode=DisplayMode::NoDisplay, const bool guiVerbose=false, const bool fullScreen=false)op::WrapperStructGui
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_gui.html b/web/html/doc/structop_1_1_wrapper_struct_gui.html new file mode 100644 index 000000000..084f26109 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_gui.html @@ -0,0 +1,217 @@ + + + + + + + +OpenPose: op::WrapperStructGui Struct Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op::WrapperStructGui Struct Reference
    +
    +
    + +

    #include <wrapperStructGui.hpp>

    + + + + +

    +Public Member Functions

     WrapperStructGui (const DisplayMode displayMode=DisplayMode::NoDisplay, const bool guiVerbose=false, const bool fullScreen=false)
     
    + + + + + + + +

    +Public Attributes

    DisplayMode displayMode
     
    bool guiVerbose
     
    bool fullScreen
     
    +

    Detailed Description

    +

    WrapperStructGui: It controls a small GUI for quick visualization.

    + +

    Definition at line 12 of file wrapperStructGui.hpp.

    +

    Constructor & Destructor Documentation

    + +

    ◆ WrapperStructGui()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + +
    op::WrapperStructGui::WrapperStructGui (const DisplayMode displayMode = DisplayMode::NoDisplay,
    const bool guiVerbose = false,
    const bool fullScreen = false 
    )
    +
    +

    Constructor of the struct. It has the recommended and default values we recommend for each element of the struct. Since all the elements of the struct are public, they can also be manually filled.

    + +
    +
    +

    Member Data Documentation

    + +

    ◆ displayMode

    + +
    +
    + + + + +
    DisplayMode op::WrapperStructGui::displayMode
    +
    +

    Display mode a) -1 for automatic selection. b) 0 for no display. Useful if there is no X server and/or to slightly speed up the processing if visual output is not required. c) 2 for 2-D display in the OpenPose small integrated GUI. d) 3 for 3-D display, if --3d was enabled. e) 1 for both 2-D and 3-D display.

    + +

    Definition at line 23 of file wrapperStructGui.hpp.

    + +
    +
    + +

    ◆ fullScreen

    + +
    +
    + + + + +
    bool op::WrapperStructGui::fullScreen
    +
    +

    Whether to display the OpenPose small integrated GUI on fullscreen mode. It can be changed by interacting with the GUI itself.

    + +

    Definition at line 35 of file wrapperStructGui.hpp.

    + +
    +
    + +

    ◆ guiVerbose

    + +
    +
    + + + + +
    bool op::WrapperStructGui::guiVerbose
    +
    +

    Whether to add some information to the frame (number of frame, number people detected, etc.) after it is saved on disk and before it is displayed and/or returned to the user.

    + +

    Definition at line 29 of file wrapperStructGui.hpp.

    + +
    +
    +
    The documentation for this struct was generated from the following file: +
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_gui.js b/web/html/doc/structop_1_1_wrapper_struct_gui.js new file mode 100644 index 000000000..3b29426a2 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_gui.js @@ -0,0 +1,7 @@ +var structop_1_1_wrapper_struct_gui = +[ + [ "WrapperStructGui", "structop_1_1_wrapper_struct_gui.html#a41638659ae2237d4ebfac635f4cc7842", null ], + [ "displayMode", "structop_1_1_wrapper_struct_gui.html#a2a979a7daa368cc847ae6d9a168ff556", null ], + [ "fullScreen", "structop_1_1_wrapper_struct_gui.html#ac1d393d3ce6be9304017c1aa3afd8f13", null ], + [ "guiVerbose", "structop_1_1_wrapper_struct_gui.html#a9dbb0bfce2593b0a560ed738e11708ce", null ] +]; \ No newline at end of file diff --git a/web/html/doc/structop_1_1_wrapper_struct_hand-members.html b/web/html/doc/structop_1_1_wrapper_struct_hand-members.html new file mode 100644 index 000000000..2c99a6d68 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_hand-members.html @@ -0,0 +1,113 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    op::WrapperStructHand Member List
    +
    +
    + +

    This is the complete list of members for op::WrapperStructHand, including all inherited members.

    + + + + + + + + + + + +
    alphaHeatMapop::WrapperStructHand
    alphaKeypointop::WrapperStructHand
    detectorop::WrapperStructHand
    enableop::WrapperStructHand
    netInputSizeop::WrapperStructHand
    renderModeop::WrapperStructHand
    renderThresholdop::WrapperStructHand
    scaleRangeop::WrapperStructHand
    scalesNumberop::WrapperStructHand
    WrapperStructHand(const bool enable=false, const Detector detector=Detector::Body, const Point< int > &netInputSize=Point< int >{368, 368}, const int scalesNumber=1, const float scaleRange=0.4f, const RenderMode renderMode=RenderMode::Auto, const float alphaKeypoint=HAND_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=HAND_DEFAULT_ALPHA_HEAT_MAP, const float renderThreshold=0.2f)op::WrapperStructHand
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_hand.html b/web/html/doc/structop_1_1_wrapper_struct_hand.html new file mode 100644 index 000000000..87295dc71 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_hand.html @@ -0,0 +1,367 @@ + + + + + + + +OpenPose: op::WrapperStructHand Struct Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op::WrapperStructHand Struct Reference
    +
    +
    + +

    #include <wrapperStructHand.hpp>

    + + + + +

    +Public Member Functions

     WrapperStructHand (const bool enable=false, const Detector detector=Detector::Body, const Point< int > &netInputSize=Point< int >{368, 368}, const int scalesNumber=1, const float scaleRange=0.4f, const RenderMode renderMode=RenderMode::Auto, const float alphaKeypoint=HAND_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=HAND_DEFAULT_ALPHA_HEAT_MAP, const float renderThreshold=0.2f)
     
    + + + + + + + + + + + + + + + + + + + +

    +Public Attributes

    bool enable
     
    Detector detector
     
    Point< int > netInputSize
     
    int scalesNumber
     
    float scaleRange
     
    RenderMode renderMode
     
    float alphaKeypoint
     
    float alphaHeatMap
     
    float renderThreshold
     
    +

    Detailed Description

    +

    WrapperStructHand: Hand estimation and rendering configuration struct. WrapperStructHand allows the user to set up the hand estimation and rendering parameters that will be used for the OpenPose WrapperT template and Wrapper class.

    + +

    Definition at line 16 of file wrapperStructHand.hpp.

    +

    Constructor & Destructor Documentation

    + +

    ◆ WrapperStructHand()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    op::WrapperStructHand::WrapperStructHand (const bool enable = false,
    const Detector detector = Detector::Body,
    const Point< int > & netInputSize = Point< int >{368, 368},
    const int scalesNumber = 1,
    const float scaleRange = 0.4f,
    const RenderMode renderMode = RenderMode::Auto,
    const float alphaKeypoint = HAND_DEFAULT_ALPHA_KEYPOINT,
    const float alphaHeatMap = HAND_DEFAULT_ALPHA_HEAT_MAP,
    const float renderThreshold = 0.2f 
    )
    +
    +

    Constructor of the struct. It has the recommended and default values we recommend for each element of the struct. Since all the elements of the struct are public, they can also be manually filled.

    + +
    +
    +

    Member Data Documentation

    + +

    ◆ alphaHeatMap

    + +
    +
    + + + + +
    float op::WrapperStructHand::alphaHeatMap
    +
    +

    Rendering blending alpha value of the heat maps (hand part, background or PAF) with respect to the background image. Value in the range [0, 1]. 0 will only render the background, 1 will only render the heat map.

    + +

    Definition at line 72 of file wrapperStructHand.hpp.

    + +
    +
    + +

    ◆ alphaKeypoint

    + +
    +
    + + + + +
    float op::WrapperStructHand::alphaKeypoint
    +
    +

    Rendering blending alpha value of the pose point locations with respect to the background image. Value in the range [0, 1]. 0 will only render the background, 1 will fully render the pose.

    + +

    Definition at line 65 of file wrapperStructHand.hpp.

    + +
    +
    + +

    ◆ detector

    + +
    +
    + + + + +
    Detector op::WrapperStructHand::detector
    +
    +

    Kind of hand rectangle detector. Recommended Detector::Body (fastest one if body is enabled and most accurate one), which is based on the OpenPose body keypoint detector. For hand, there is the alternative of Detector::BodyWithTracking. If selected, it will add tracking between frames. Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate is high enough, i.e., >7 FPS per GPU) and video. This is not person ID tracking, it simply looks for hands in positions at which hands were located in previous frames, but it does not guarantee the same person id among frames.

    + +

    Definition at line 32 of file wrapperStructHand.hpp.

    + +
    +
    + +

    ◆ enable

    + +
    +
    + + + + +
    bool op::WrapperStructHand::enable
    +
    +

    Whether to extract hand.

    + +

    Definition at line 21 of file wrapperStructHand.hpp.

    + +
    +
    + +

    ◆ netInputSize

    + +
    +
    + + + + +
    Point<int> op::WrapperStructHand::netInputSize
    +
    +

    CCN (Conv Net) input size. The greater, the slower and more memory it will be needed, but it will potentially increase accuracy. Both width and height must be divisible by 16.

    + +

    Definition at line 39 of file wrapperStructHand.hpp.

    + +
    +
    + +

    ◆ renderMode

    + +
    +
    + + + + +
    RenderMode op::WrapperStructHand::renderMode
    +
    +

    Whether to render the output (pose locations, body, background or PAF heat maps) with CPU or GPU. Select None for no rendering, Cpu or Gpu por CPU and GPU rendering respectively.

    + +

    Definition at line 59 of file wrapperStructHand.hpp.

    + +
    +
    + +

    ◆ renderThreshold

    + +
    +
    + + + + +
    float op::WrapperStructHand::renderThreshold
    +
    +

    Rendering threshold. Only estimated keypoints whose score confidences are higher than this value will be rendered. Note: Rendered refers only to visual display in the OpenPose basic GUI, not in the saved results. Generally, a high threshold (> 0.5) will only render very clear body parts; while small thresholds (~0.1) will also output guessed and occluded keypoints, but also more false positives (i.e., wrong detections).

    + +

    Definition at line 81 of file wrapperStructHand.hpp.

    + +
    +
    + +

    ◆ scaleRange

    + +
    +
    + + + + +
    float op::WrapperStructHand::scaleRange
    +
    +

    Total range between smallest and biggest scale. The scales will be centered in ratio 1. E.g., if scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2.

    + +

    Definition at line 53 of file wrapperStructHand.hpp.

    + +
    +
    + +

    ◆ scalesNumber

    + +
    +
    + + + + +
    int op::WrapperStructHand::scalesNumber
    +
    +

    Number of scales to process. The greater, the slower and more memory it will be needed, but it will potentially increase accuracy. This parameter is related with scaleRange, such as the final pose estimation will be an average of the predicted results for each scale.

    + +

    Definition at line 47 of file wrapperStructHand.hpp.

    + +
    +
    +
    The documentation for this struct was generated from the following file: +
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_hand.js b/web/html/doc/structop_1_1_wrapper_struct_hand.js new file mode 100644 index 000000000..77d4d1919 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_hand.js @@ -0,0 +1,13 @@ +var structop_1_1_wrapper_struct_hand = +[ + [ "WrapperStructHand", "structop_1_1_wrapper_struct_hand.html#a223b29ce9a234c3fb8a7864cfe2919fc", null ], + [ "alphaHeatMap", "structop_1_1_wrapper_struct_hand.html#a716f9c98cbee1a4a70d5978875795c4d", null ], + [ "alphaKeypoint", "structop_1_1_wrapper_struct_hand.html#a8074cf22f8926d7f4d1d60cacae99c3e", null ], + [ "detector", "structop_1_1_wrapper_struct_hand.html#a2759e92ee811d7a8eb69e1b7eba29d08", null ], + [ "enable", "structop_1_1_wrapper_struct_hand.html#a667ad7eed9f4f96b460f331d25f3d87f", null ], + [ "netInputSize", "structop_1_1_wrapper_struct_hand.html#a6a54d5b5766d23412c87bd10c26cb291", null ], + [ "renderMode", "structop_1_1_wrapper_struct_hand.html#a90ddd24ee55b6aabd9a1728ccd91525e", null ], + [ "renderThreshold", "structop_1_1_wrapper_struct_hand.html#a9655c0dfa83eefde174d09e622482089", null ], + [ "scaleRange", "structop_1_1_wrapper_struct_hand.html#adaea15e182e5c75129293873cd94e35f", null ], + [ "scalesNumber", "structop_1_1_wrapper_struct_hand.html#aea6263dc96708b11fab72416d810b3d6", null ] +]; \ No newline at end of file diff --git a/web/html/doc/structop_1_1_wrapper_struct_input-members.html b/web/html/doc/structop_1_1_wrapper_struct_input-members.html new file mode 100644 index 000000000..bdb9f081d --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_input-members.html @@ -0,0 +1,117 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    op::WrapperStructInput Member List
    +
    +
    + +

    This is the complete list of members for op::WrapperStructInput, including all inherited members.

    + + + + + + + + + + + + + + + +
    cameraParameterPathop::WrapperStructInput
    cameraResolutionop::WrapperStructInput
    frameFirstop::WrapperStructInput
    frameFlipop::WrapperStructInput
    frameLastop::WrapperStructInput
    frameRotateop::WrapperStructInput
    framesRepeatop::WrapperStructInput
    frameStepop::WrapperStructInput
    numberViewsop::WrapperStructInput
    producerStringop::WrapperStructInput
    producerTypeop::WrapperStructInput
    realTimeProcessingop::WrapperStructInput
    undistortImageop::WrapperStructInput
    WrapperStructInput(const ProducerType producerType=ProducerType::None, const String &producerString="", const unsigned long long frameFirst=0, const unsigned long long frameStep=1, const unsigned long long frameLast=std::numeric_limits< unsigned long long >::max(), const bool realTimeProcessing=false, const bool frameFlip=false, const int frameRotate=0, const bool framesRepeat=false, const Point< int > &cameraResolution=Point< int >{-1,-1}, const String &cameraParameterPath="models/cameraParameters/", const bool undistortImage=false, const int numberViews=-1)op::WrapperStructInput
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_input.html b/web/html/doc/structop_1_1_wrapper_struct_input.html new file mode 100644 index 000000000..16a147684 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_input.html @@ -0,0 +1,467 @@ + + + + + + + +OpenPose: op::WrapperStructInput Struct Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op::WrapperStructInput Struct Reference
    +
    +
    + +

    #include <wrapperStructInput.hpp>

    + + + + +

    +Public Member Functions

     WrapperStructInput (const ProducerType producerType=ProducerType::None, const String &producerString="", const unsigned long long frameFirst=0, const unsigned long long frameStep=1, const unsigned long long frameLast=std::numeric_limits< unsigned long long >::max(), const bool realTimeProcessing=false, const bool frameFlip=false, const int frameRotate=0, const bool framesRepeat=false, const Point< int > &cameraResolution=Point< int >{-1,-1}, const String &cameraParameterPath="models/cameraParameters/", const bool undistortImage=false, const int numberViews=-1)
     
    + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Public Attributes

    ProducerType producerType
     
    String producerString
     
    unsigned long long frameFirst
     
    unsigned long long frameStep
     
    unsigned long long frameLast
     
    bool realTimeProcessing
     
    bool frameFlip
     
    int frameRotate
     
    bool framesRepeat
     
    Point< int > cameraResolution
     
    String cameraParameterPath
     
    bool undistortImage
     
    int numberViews
     
    +

    Detailed Description

    +

    WrapperStructInput: Input (images, video, webcam, etc.) configuration struct. WrapperStructInput allows the user to set up the input frames generator.

    + +

    Definition at line 14 of file wrapperStructInput.hpp.

    +

    Constructor & Destructor Documentation

    + +

    ◆ WrapperStructInput()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    op::WrapperStructInput::WrapperStructInput (const ProducerType producerType = ProducerType::None,
    const StringproducerString = "",
    const unsigned long long frameFirst = 0,
    const unsigned long long frameStep = 1,
    const unsigned long long frameLast = std::numeric_limits< unsigned long long >::max(),
    const bool realTimeProcessing = false,
    const bool frameFlip = false,
    const int frameRotate = 0,
    const bool framesRepeat = false,
    const Point< int > & cameraResolution = Point< int >{-1,-1},
    const StringcameraParameterPath = "models/cameraParameters/",
    const bool undistortImage = false,
    const int numberViews = -1 
    )
    +
    +

    Constructor of the struct. It has the recommended and default values we recommend for each element of the struct. Since all the elements of the struct are public, they can also be manually filled.

    + +
    +
    +

    Member Data Documentation

    + +

    ◆ cameraParameterPath

    + +
    +
    + + + + +
    String op::WrapperStructInput::cameraParameterPath
    +
    +

    Directory path for the camera parameters (intrinsic and extrinsic parameters) or optionally XML file full path (if only 1 view).

    + +

    Definition at line 78 of file wrapperStructInput.hpp.

    + +
    +
    + +

    ◆ cameraResolution

    + +
    +
    + + + + +
    Point<int> op::WrapperStructInput::cameraResolution
    +
    +

    Camera resolution (only for Webcam and FlirCamera).

    + +

    Definition at line 72 of file wrapperStructInput.hpp.

    + +
    +
    + +

    ◆ frameFirst

    + +
    +
    + + + + +
    unsigned long long op::WrapperStructInput::frameFirst
    +
    +

    First image to process. Default: 0.

    + +

    Definition at line 33 of file wrapperStructInput.hpp.

    + +
    +
    + +

    ◆ frameFlip

    + +
    +
    + + + + +
    bool op::WrapperStructInput::frameFlip
    +
    +

    Whether to flip (mirror) the image.

    + +

    Definition at line 56 of file wrapperStructInput.hpp.

    + +
    +
    + +

    ◆ frameLast

    + +
    +
    + + + + +
    unsigned long long op::WrapperStructInput::frameLast
    +
    +

    Last image to process. Default: -1 (i.e., process all frames).

    + +

    Definition at line 46 of file wrapperStructInput.hpp.

    + +
    +
    + +

    ◆ frameRotate

    + +
    +
    + + + + +
    int op::WrapperStructInput::frameRotate
    +
    +

    Image rotation. Only 4 possible values: 0 (default, no rotation), 90, 180 or 270 degrees

    + +

    Definition at line 62 of file wrapperStructInput.hpp.

    + +
    +
    + +

    ◆ framesRepeat

    + +
    +
    + + + + +
    bool op::WrapperStructInput::framesRepeat
    +
    +

    Whether to re-open the producer if it reaches the end (e.g., video or image directory after the last frame).

    + +

    Definition at line 67 of file wrapperStructInput.hpp.

    + +
    +
    + +

    ◆ frameStep

    + +
    +
    + + + + +
    unsigned long long op::WrapperStructInput::frameStep
    +
    +

    Step or gap across processed frames. Default: 1 (i.e., process all frames). Example: A value of 5 would mean to process frames 0, 5, 10, etc.

    + +

    Definition at line 40 of file wrapperStructInput.hpp.

    + +
    +
    + +

    ◆ numberViews

    + +
    +
    + + + + +
    int op::WrapperStructInput::numberViews
    +
    +

    Number of camera views. Complementary option for --image_dir or --video It is -1 for Flir cameras (# cameras detected at runtime), as well as for any other frames source (-1 and 1 are equivalent for those). It could be greater than 1 only for prerecorded produced sources, such as video and image directory.

    + +

    Definition at line 92 of file wrapperStructInput.hpp.

    + +
    +
    + +

    ◆ producerString

    + +
    +
    + + + + +
    String op::WrapperStructInput::producerString
    +
    +

    Path of the producer (image directory path for ImageDirectory, video path for Video, camera index for Webcam and FlirCamera, URL for IPCamera, etc.). Default: "".

    + +

    Definition at line 27 of file wrapperStructInput.hpp.

    + +
    +
    + +

    ◆ producerType

    + +
    +
    + + + + +
    ProducerType op::WrapperStructInput::producerType
    +
    +

    Desired type of producer (FlirCamera, ImageDirectory, IPCamera, Video, Webcam, None, etc.). Default: ProducerType::None.

    + +

    Definition at line 20 of file wrapperStructInput.hpp.

    + +
    +
    + +

    ◆ realTimeProcessing

    + +
    +
    + + + + +
    bool op::WrapperStructInput::realTimeProcessing
    +
    +

    Whether to skip or sleep in order to keep the same FPS as the frames producer.

    + +

    Definition at line 51 of file wrapperStructInput.hpp.

    + +
    +
    + +

    ◆ undistortImage

    + +
    +
    + + + + +
    bool op::WrapperStructInput::undistortImage
    +
    +

    Whether to undistort the image given the camera parameters.

    + +

    Definition at line 83 of file wrapperStructInput.hpp.

    + +
    +
    +
    The documentation for this struct was generated from the following file: +
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_input.js b/web/html/doc/structop_1_1_wrapper_struct_input.js new file mode 100644 index 000000000..f57822090 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_input.js @@ -0,0 +1,17 @@ +var structop_1_1_wrapper_struct_input = +[ + [ "WrapperStructInput", "structop_1_1_wrapper_struct_input.html#a2ee8db5c1fbade720719bb1464e59175", null ], + [ "cameraParameterPath", "structop_1_1_wrapper_struct_input.html#a4c77c6257dec58ac0a5e18cfe5b38a26", null ], + [ "cameraResolution", "structop_1_1_wrapper_struct_input.html#ae2078c540324a9cdc8500dce5d361bee", null ], + [ "frameFirst", "structop_1_1_wrapper_struct_input.html#acc72b8efe09ec3888823ed5680a19fe4", null ], + [ "frameFlip", "structop_1_1_wrapper_struct_input.html#a5ee9722814fe2b5a695511cabd12b613", null ], + [ "frameLast", "structop_1_1_wrapper_struct_input.html#a5cffb282052bdd812217e54f0b2ec7d5", null ], + [ "frameRotate", "structop_1_1_wrapper_struct_input.html#a86df98e50b680b30afe100d8b2b50685", null ], + [ "framesRepeat", "structop_1_1_wrapper_struct_input.html#a7c80f6a3687696ba30d3ce0902ac162f", null ], + [ "frameStep", "structop_1_1_wrapper_struct_input.html#ac4349e123d359f436cc01d4068231dc2", null ], + [ "numberViews", "structop_1_1_wrapper_struct_input.html#adac2c3e58e1e75a96e52904762c37c42", null ], + [ "producerString", "structop_1_1_wrapper_struct_input.html#a6aec09a94fdf393d6ab3b23857c376da", null ], + [ "producerType", "structop_1_1_wrapper_struct_input.html#acd6a460d6c0a64bc818539b67fcafea7", null ], + [ "realTimeProcessing", "structop_1_1_wrapper_struct_input.html#a2eeea9ee711a1dcbec99c3dc871fbc47", null ], + [ "undistortImage", "structop_1_1_wrapper_struct_input.html#ae7183e10862dbdbed422f042f1a71ed1", null ] +]; \ No newline at end of file diff --git a/web/html/doc/structop_1_1_wrapper_struct_output-members.html b/web/html/doc/structop_1_1_wrapper_struct_output-members.html new file mode 100644 index 000000000..4847158c1 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_output-members.html @@ -0,0 +1,123 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    op::WrapperStructOutput Member List
    +
    +
    + +

    This is the complete list of members for op::WrapperStructOutput, including all inherited members.

    + + + + + + + + + + + + + + + + + + + + + +
    udpHostop::WrapperStructOutput
    udpPortop::WrapperStructOutput
    verboseop::WrapperStructOutput
    WrapperStructOutput(const double verbose=-1, const String &writeKeypoint="", const DataFormat writeKeypointFormat=DataFormat::Xml, const String &writeJson="", const String &writeCocoJson="", const int writeCocoJsonVariants=1, const int writeCocoJsonVariant=1, const String &writeImages="", const String &writeImagesFormat="png", const String &writeVideo="", const double writeVideoFps=-1., const bool writeVideoWithAudio=false, const String &writeHeatMaps="", const String &writeHeatMapsFormat="png", const String &writeVideo3D="", const String &writeVideoAdam="", const String &writeBvh="", const String &udpHost="", const String &udpPort="8051")op::WrapperStructOutput
    writeBvhop::WrapperStructOutput
    writeCocoJsonop::WrapperStructOutput
    writeCocoJsonVariantop::WrapperStructOutput
    writeCocoJsonVariantsop::WrapperStructOutput
    writeHeatMapsop::WrapperStructOutput
    writeHeatMapsFormatop::WrapperStructOutput
    writeImagesop::WrapperStructOutput
    writeImagesFormatop::WrapperStructOutput
    writeJsonop::WrapperStructOutput
    writeKeypointop::WrapperStructOutput
    writeKeypointFormatop::WrapperStructOutput
    writeVideoop::WrapperStructOutput
    writeVideo3Dop::WrapperStructOutput
    writeVideoAdamop::WrapperStructOutput
    writeVideoFpsop::WrapperStructOutput
    writeVideoWithAudioop::WrapperStructOutput
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_output.html b/web/html/doc/structop_1_1_wrapper_struct_output.html new file mode 100644 index 000000000..4e33376dc --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_output.html @@ -0,0 +1,621 @@ + + + + + + + +OpenPose: op::WrapperStructOutput Struct Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op::WrapperStructOutput Struct Reference
    +
    +
    + +

    #include <wrapperStructOutput.hpp>

    + + + + +

    +Public Member Functions

     WrapperStructOutput (const double verbose=-1, const String &writeKeypoint="", const DataFormat writeKeypointFormat=DataFormat::Xml, const String &writeJson="", const String &writeCocoJson="", const int writeCocoJsonVariants=1, const int writeCocoJsonVariant=1, const String &writeImages="", const String &writeImagesFormat="png", const String &writeVideo="", const double writeVideoFps=-1., const bool writeVideoWithAudio=false, const String &writeHeatMaps="", const String &writeHeatMapsFormat="png", const String &writeVideo3D="", const String &writeVideoAdam="", const String &writeBvh="", const String &udpHost="", const String &udpPort="8051")
     
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Public Attributes

    double verbose
     
    String writeKeypoint
     
    DataFormat writeKeypointFormat
     
    String writeJson
     
    String writeCocoJson
     
    int writeCocoJsonVariants
     
    int writeCocoJsonVariant
     
    String writeImages
     
    String writeImagesFormat
     
    String writeVideo
     
    double writeVideoFps
     
    bool writeVideoWithAudio
     
    String writeHeatMaps
     
    String writeHeatMapsFormat
     
    String writeVideo3D
     
    String writeVideoAdam
     
    String writeBvh
     
    String udpHost
     
    String udpPort
     
    +

    Detailed Description

    +

    WrapperStructOutput: Output ( writing rendered results and/or pose data, etc.) configuration struct.

    + +

    Definition at line 13 of file wrapperStructOutput.hpp.

    +

    Constructor & Destructor Documentation

    + +

    ◆ WrapperStructOutput()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    op::WrapperStructOutput::WrapperStructOutput (const double verbose = -1,
    const StringwriteKeypoint = "",
    const DataFormat writeKeypointFormat = DataFormat::Xml,
    const StringwriteJson = "",
    const StringwriteCocoJson = "",
    const int writeCocoJsonVariants = 1,
    const int writeCocoJsonVariant = 1,
    const StringwriteImages = "",
    const StringwriteImagesFormat = "png",
    const StringwriteVideo = "",
    const double writeVideoFps = -1.,
    const bool writeVideoWithAudio = false,
    const StringwriteHeatMaps = "",
    const StringwriteHeatMapsFormat = "png",
    const StringwriteVideo3D = "",
    const StringwriteVideoAdam = "",
    const StringwriteBvh = "",
    const StringudpHost = "",
    const StringudpPort = "8051" 
    )
    +
    +

    Constructor of the struct. It has the recommended and default values we recommend for each element of the struct. Since all the elements of the struct are public, they can also be manually filled.

    + +
    +
    +

    Member Data Documentation

    + +

    ◆ udpHost

    + +
    +
    + + + + +
    String op::WrapperStructOutput::udpHost
    +
    +

    Target server IP address for UDP client-server communication.

    + +

    Definition at line 138 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ udpPort

    + +
    +
    + + + + +
    String op::WrapperStructOutput::udpPort
    +
    +

    Target server IP port for UDP client-server communication.

    + +

    Definition at line 143 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ verbose

    + +
    +
    + + + + +
    double op::WrapperStructOutput::verbose
    +
    +

    Output verbose in the command line. If -1, it will be disabled (default). If it is a positive integer number, it will print on" +the command line every `verbose` frames. If number in the range (0,1), it will print the" progress every verbose times the total of frames.

    + +

    Definition at line 21 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeBvh

    + +
    +
    + + + + +
    String op::WrapperStructOutput::writeBvh
    +
    +

    Path to save a 3-D joint angle BVH file. Please, use *.bvh format. If it is empty (default), it is disabled.

    + +

    Definition at line 133 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeCocoJson

    + +
    +
    + + + + +
    String op::WrapperStructOutput::writeCocoJson
    +
    +

    Pose (x, y, score) locations saving folder location in JSON COCO validation format. If it is empty (default), it is disabled.

    + +

    Definition at line 51 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeCocoJsonVariant

    + +
    +
    + + + + +
    int op::WrapperStructOutput::writeCocoJsonVariant
    +
    +

    Experimental option (only makes effect on car JSON generation). It selects the COCO variant for cocoJsonSaver.

    + +

    Definition at line 64 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeCocoJsonVariants

    + +
    +
    + + + + +
    int op::WrapperStructOutput::writeCocoJsonVariants
    +
    +

    It selects the COCO variants for cocoJsonSaver. Add 1 for body, add 2 for foot, 4 for face, and/or 8 for hands. Use 0 to use all the possible candidates. E.g., 7 would mean body+foot+face COCO JSON..

    + +

    Definition at line 58 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeHeatMaps

    + +
    +
    + + + + +
    String op::WrapperStructOutput::writeHeatMaps
    +
    +

    Rendered heat maps saving folder. In order to save the heatmaps, WrapperStructPose.heatMapTypes must also be filled. If it is empty (default), it is disabled.

    + +

    Definition at line 106 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeHeatMapsFormat

    + +
    +
    + + + + +
    String op::WrapperStructOutput::writeHeatMapsFormat
    +
    +

    Heat maps image saving format. Analogous to writeImagesFormat.

    + +

    Definition at line 112 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeImages

    + +
    +
    + + + + +
    String op::WrapperStructOutput::writeImages
    +
    +

    Rendered image saving folder. If it is empty (default), it is disabled.

    + +

    Definition at line 70 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeImagesFormat

    + +
    +
    + + + + +
    String op::WrapperStructOutput::writeImagesFormat
    +
    +

    Rendered image saving folder format. Check your OpenCV version documentation for a list of compatible formats. E.g., png, jpg, etc. If writeImages is empty (default), it makes no effect.

    + +

    Definition at line 78 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeJson

    + +
    +
    + + + + +
    String op::WrapperStructOutput::writeJson
    +
    +

    Directory to write OpenPose output in JSON format. If it is empty (default), it is disabled. It includes:

      +
    • people field with body, hand, and face pose keypoints in (x, y, score) format.
    • +
    • part_candidates field with body part candidates in (x, y, score) format (if enabled with --part_candidates).
    • +
    + +

    Definition at line 45 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeKeypoint

    + +
    +
    + + + + +
    String op::WrapperStructOutput::writeKeypoint
    +
    +

    Pose (x, y, score) locations saving folder location. If it is empty (default), it is disabled. Select format with writeKeypointFormat.

    + +

    Definition at line 28 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeKeypointFormat

    + +
    +
    + + + + +
    DataFormat op::WrapperStructOutput::writeKeypointFormat
    +
    +

    Data format to save Pose (x, y, score) locations. Options: DataFormat::Json (default), DataFormat::Xml and DataFormat::Yml (equivalent to DataFormat::Yaml) JSON option only available for OpenCV >= 3.0.

    + +

    Definition at line 35 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeVideo

    + +
    +
    + + + + +
    String op::WrapperStructOutput::writeVideo
    +
    +

    Rendered images saving video path. Please, use *.avi format. If it is empty (default), it is disabled.

    + +

    Definition at line 85 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeVideo3D

    + +
    +
    + + + + +
    String op::WrapperStructOutput::writeVideo3D
    +
    +

    Rendered 3D images saving video path. Please, use *.avi format. If it is empty (default), it is disabled.

    + +

    Definition at line 119 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeVideoAdam

    + +
    +
    + + + + +
    String op::WrapperStructOutput::writeVideoAdam
    +
    +

    Rendered Adam images saving video path. Please, use *.avi format. If it is empty (default), it is disabled.

    + +

    Definition at line 126 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeVideoFps

    + +
    +
    + + + + +
    double op::WrapperStructOutput::writeVideoFps
    +
    +

    Frame rate of the recorded video. By default (-1.), it will try to get the input frames producer frame rate (e.g., input video or webcam frame rate). If the input frames producer does not have a set FPS (e.g., image_dir or webcam if OpenCV not compiled with its support), set this value accordingly (e.g., to the frame rate displayed by the OpenPose GUI).

    + +

    Definition at line 94 of file wrapperStructOutput.hpp.

    + +
    +
    + +

    ◆ writeVideoWithAudio

    + +
    +
    + + + + +
    bool op::WrapperStructOutput::writeVideoWithAudio
    +
    +

    Whether to save the output video with audio. The input producer must be a video too.

    + +

    Definition at line 99 of file wrapperStructOutput.hpp.

    + +
    +
    +
    The documentation for this struct was generated from the following file: +
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_output.js b/web/html/doc/structop_1_1_wrapper_struct_output.js new file mode 100644 index 000000000..f4f4fe39b --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_output.js @@ -0,0 +1,23 @@ +var structop_1_1_wrapper_struct_output = +[ + [ "WrapperStructOutput", "structop_1_1_wrapper_struct_output.html#ae8975341f779a89d68a125cbfb17d940", null ], + [ "udpHost", "structop_1_1_wrapper_struct_output.html#a17c339a9c6c97e1dfdb29b3af0bdab77", null ], + [ "udpPort", "structop_1_1_wrapper_struct_output.html#a873bcabae9cf7039830cae565009ce2b", null ], + [ "verbose", "structop_1_1_wrapper_struct_output.html#aec57f5b4f6920cd43c2f6e55a21c769b", null ], + [ "writeBvh", "structop_1_1_wrapper_struct_output.html#abfa84cf0ae76a1c07f9d33b98e9f6d84", null ], + [ "writeCocoJson", "structop_1_1_wrapper_struct_output.html#a8658c8313ac1d8ddb177d83de2e1bfe7", null ], + [ "writeCocoJsonVariant", "structop_1_1_wrapper_struct_output.html#a0119bb7429483928c587ffaf607919de", null ], + [ "writeCocoJsonVariants", "structop_1_1_wrapper_struct_output.html#aca7b610f478c36b643fcbd73931c9278", null ], + [ "writeHeatMaps", "structop_1_1_wrapper_struct_output.html#a29583f73263bdffe1903ea64a9c09463", null ], + [ "writeHeatMapsFormat", "structop_1_1_wrapper_struct_output.html#a70278a7418053ced9de2447cc78f4240", null ], + [ "writeImages", "structop_1_1_wrapper_struct_output.html#acb0e1a13713fd796c9452684bdb7cdaf", null ], + [ "writeImagesFormat", "structop_1_1_wrapper_struct_output.html#ae12454443c1c8b1c74f257eaac4927d3", null ], + [ "writeJson", "structop_1_1_wrapper_struct_output.html#a8899e8af7df7dad1215a09f61fc8198b", null ], + [ "writeKeypoint", "structop_1_1_wrapper_struct_output.html#a183afe9fa45aa69a9d79b9434163ed3e", null ], + [ "writeKeypointFormat", "structop_1_1_wrapper_struct_output.html#ad338fd4719d6f243bb64bc67f68bc7c9", null ], + [ "writeVideo", "structop_1_1_wrapper_struct_output.html#a49d8f54f546bfe6a6c644280c5e50458", null ], + [ "writeVideo3D", "structop_1_1_wrapper_struct_output.html#ad996d177c4b84e2d38c105f637559094", null ], + [ "writeVideoAdam", "structop_1_1_wrapper_struct_output.html#a4b829e1d007943cba3f4a23be25bc74d", null ], + [ "writeVideoFps", "structop_1_1_wrapper_struct_output.html#ad595edffced2bfd80c3bee183f32f505", null ], + [ "writeVideoWithAudio", "structop_1_1_wrapper_struct_output.html#a3f6370fa1cb1f13922e36831c564588c", null ] +]; \ No newline at end of file diff --git a/web/html/doc/structop_1_1_wrapper_struct_pose-members.html b/web/html/doc/structop_1_1_wrapper_struct_pose-members.html new file mode 100644 index 000000000..d4b6f763b --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_pose-members.html @@ -0,0 +1,131 @@ + + + + + + + +OpenPose: Member List + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    op::WrapperStructPose Member List
    +
    +
    + +

    This is the complete list of members for op::WrapperStructPose, including all inherited members.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    addPartCandidatesop::WrapperStructPose
    alphaHeatMapop::WrapperStructPose
    alphaKeypointop::WrapperStructPose
    blendOriginalFrameop::WrapperStructPose
    caffeModelPathop::WrapperStructPose
    defaultPartToRenderop::WrapperStructPose
    enableGoogleLoggingop::WrapperStructPose
    fpsMaxop::WrapperStructPose
    gpuNumberop::WrapperStructPose
    gpuNumberStartop::WrapperStructPose
    heatMapScaleModeop::WrapperStructPose
    heatMapTypesop::WrapperStructPose
    keypointScaleModeop::WrapperStructPose
    maximizePositivesop::WrapperStructPose
    modelFolderop::WrapperStructPose
    netInputSizeop::WrapperStructPose
    netInputSizeDynamicBehaviorop::WrapperStructPose
    numberPeopleMaxop::WrapperStructPose
    outputSizeop::WrapperStructPose
    poseModeop::WrapperStructPose
    poseModelop::WrapperStructPose
    protoTxtPathop::WrapperStructPose
    renderModeop::WrapperStructPose
    renderThresholdop::WrapperStructPose
    scaleGapop::WrapperStructPose
    scalesNumberop::WrapperStructPose
    upsamplingRatioop::WrapperStructPose
    WrapperStructPose(const PoseMode poseMode=PoseMode::Enabled, const Point< int > &netInputSize=Point< int >{-1, 368}, const double netInputSizeDynamicBehavior=1., const Point< int > &outputSize=Point< int >{-1, -1}, const ScaleMode keypointScaleMode=ScaleMode::InputResolution, const int gpuNumber=-1, const int gpuNumberStart=0, const int scalesNumber=1, const float scaleGap=0.25f, const RenderMode renderMode=RenderMode::Auto, const PoseModel poseModel=PoseModel::BODY_25, const bool blendOriginalFrame=true, const float alphaKeypoint=POSE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=POSE_DEFAULT_ALPHA_HEAT_MAP, const int defaultPartToRender=0, const String &modelFolder="models/", const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::UnsignedChar, const bool addPartCandidates=false, const float renderThreshold=0.05f, const int numberPeopleMax=-1, const bool maximizePositives=false, const double fpsMax=-1., const String &protoTxtPath="", const String &caffeModelPath="", const float upsamplingRatio=0.f, const bool enableGoogleLogging=true)op::WrapperStructPose
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_pose.html b/web/html/doc/structop_1_1_wrapper_struct_pose.html new file mode 100644 index 000000000..80a893e51 --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_pose.html @@ -0,0 +1,817 @@ + + + + + + + +OpenPose: op::WrapperStructPose Struct Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    op::WrapperStructPose Struct Reference
    +
    +
    + +

    #include <wrapperStructPose.hpp>

    + + + + +

    +Public Member Functions

     WrapperStructPose (const PoseMode poseMode=PoseMode::Enabled, const Point< int > &netInputSize=Point< int >{-1, 368}, const double netInputSizeDynamicBehavior=1., const Point< int > &outputSize=Point< int >{-1, -1}, const ScaleMode keypointScaleMode=ScaleMode::InputResolution, const int gpuNumber=-1, const int gpuNumberStart=0, const int scalesNumber=1, const float scaleGap=0.25f, const RenderMode renderMode=RenderMode::Auto, const PoseModel poseModel=PoseModel::BODY_25, const bool blendOriginalFrame=true, const float alphaKeypoint=POSE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=POSE_DEFAULT_ALPHA_HEAT_MAP, const int defaultPartToRender=0, const String &modelFolder="models/", const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::UnsignedChar, const bool addPartCandidates=false, const float renderThreshold=0.05f, const int numberPeopleMax=-1, const bool maximizePositives=false, const double fpsMax=-1., const String &protoTxtPath="", const String &caffeModelPath="", const float upsamplingRatio=0.f, const bool enableGoogleLogging=true)
     
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    +Public Attributes

    PoseMode poseMode
     
    Point< int > netInputSize
     
    double netInputSizeDynamicBehavior
     
    Point< int > outputSize
     
    ScaleMode keypointScaleMode
     
    int gpuNumber
     
    int gpuNumberStart
     
    int scalesNumber
     
    float scaleGap
     
    RenderMode renderMode
     
    PoseModel poseModel
     
    bool blendOriginalFrame
     
    float alphaKeypoint
     
    float alphaHeatMap
     
    int defaultPartToRender
     
    String modelFolder
     
    std::vector< HeatMapTypeheatMapTypes
     
    ScaleMode heatMapScaleMode
     
    bool addPartCandidates
     
    float renderThreshold
     
    int numberPeopleMax
     
    bool maximizePositives
     
    double fpsMax
     
    String protoTxtPath
     
    String caffeModelPath
     
    float upsamplingRatio
     
    bool enableGoogleLogging
     
    +

    Detailed Description

    +

    WrapperStructPose: Pose estimation and rendering configuration struct. WrapperStructPose allows the user to set up the pose estimation and rendering parameters that will be used for the OpenPose WrapperT template and Wrapper class.

    + +

    Definition at line 18 of file wrapperStructPose.hpp.

    +

    Constructor & Destructor Documentation

    + +

    ◆ WrapperStructPose()

    + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    op::WrapperStructPose::WrapperStructPose (const PoseMode poseMode = PoseMode::Enabled,
    const Point< int > & netInputSize = Point< int >{-1, 368},
    const double netInputSizeDynamicBehavior = 1.,
    const Point< int > & outputSize = Point< int >{-1, -1},
    const ScaleMode keypointScaleMode = ScaleMode::InputResolution,
    const int gpuNumber = -1,
    const int gpuNumberStart = 0,
    const int scalesNumber = 1,
    const float scaleGap = 0.25f,
    const RenderMode renderMode = RenderMode::Auto,
    const PoseModel poseModel = PoseModel::BODY_25,
    const bool blendOriginalFrame = true,
    const float alphaKeypoint = POSE_DEFAULT_ALPHA_KEYPOINT,
    const float alphaHeatMap = POSE_DEFAULT_ALPHA_HEAT_MAP,
    const int defaultPartToRender = 0,
    const StringmodelFolder = "models/",
    const std::vector< HeatMapType > & heatMapTypes = {},
    const ScaleMode heatMapScaleMode = ScaleMode::UnsignedChar,
    const bool addPartCandidates = false,
    const float renderThreshold = 0.05f,
    const int numberPeopleMax = -1,
    const bool maximizePositives = false,
    const double fpsMax = -1.,
    const StringprotoTxtPath = "",
    const StringcaffeModelPath = "",
    const float upsamplingRatio = 0.f,
    const bool enableGoogleLogging = true 
    )
    +
    +

    Constructor of the struct. It has the recommended and default values we recommend for each element of the struct. Since all the elements of the struct are public, they can also be manually filled.

    + +
    +
    +

    Member Data Documentation

    + +

    ◆ addPartCandidates

    + +
    +
    + + + + +
    bool op::WrapperStructPose::addPartCandidates
    +
    +

    Whether to add the body part candidates. Candidates refer to all the detected body parts, before being assembled into people.

    + +

    Definition at line 151 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ alphaHeatMap

    + +
    +
    + + + + +
    float op::WrapperStructPose::alphaHeatMap
    +
    +

    Rendering blending alpha value of the heat maps (body part, background or PAF) with respect to the background image. Value in the range [0, 1]. 0 will only render the background, 1 will only render the heat map.

    + +

    Definition at line 116 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ alphaKeypoint

    + +
    +
    + + + + +
    float op::WrapperStructPose::alphaKeypoint
    +
    +

    Rendering blending alpha value of the pose point locations with respect to the background image. Value in the range [0, 1]. 0 will only render the background, 1 will fully render the pose.

    + +

    Definition at line 109 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ blendOriginalFrame

    + +
    +
    + + + + +
    bool op::WrapperStructPose::blendOriginalFrame
    +
    +

    Whether to blend the final results on top of the original image, or just render them on a flat background.

    + +

    Definition at line 103 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ caffeModelPath

    + +
    +
    + + + + +
    String op::WrapperStructPose::caffeModelPath
    +
    +

    Final path where the pose Caffe CaffeModel is located. The combination modelFolder + caffeModelPath represents the whole path to the caffemodel file. If empty, it will use the default OpenPose CaffeModel file.

    + +

    Definition at line 199 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ defaultPartToRender

    + +
    +
    + + + + +
    int op::WrapperStructPose::defaultPartToRender
    +
    +

    Element to initially render. Set 0 for pose, [1, #body parts] for each body part following the order on POSE_BODY_PART_MAPPING on include/pose/poseParameters.hpp, #body parts+1 for background, #body parts+2 for all body parts overlapped, #body parts+3 for all PAFs, and [#body parts+4, #body parts+4+#pair pairs] for each PAF following the order on POSE_BODY_PART_PAIRS.

    + +

    Definition at line 125 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ enableGoogleLogging

    + +
    +
    + + + + +
    bool op::WrapperStructPose::enableGoogleLogging
    +
    +

    Whether to internally enable Google Logging. This option is only applicable if Caffe is used. Only disable it if the user is already calling google::InitGoogleLogging() in his code. If the user disables Google Logging and he does not call it by himself, then Caffe will start to pop up all the verbose messages.

    + +

    Definition at line 214 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ fpsMax

    + +
    +
    + + + + +
    double op::WrapperStructPose::fpsMax
    +
    +

    Maximum processing frame rate. By default (-1), OpenPose will process frames as fast as possible. Example usage: If OpenPose is displaying images too quickly, this can reduce the speed so the user can analyze better each frame from the GUI.

    + +

    Definition at line 185 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ gpuNumber

    + +
    +
    + + + + +
    int op::WrapperStructPose::gpuNumber
    +
    +

    Number of GPUs processing in parallel. The greater, the faster the algorithm will run, but potentially higher lag will appear (which only affects in real-time webcam scenarios).

    + +

    Definition at line 64 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ gpuNumberStart

    + +
    +
    + + + + +
    int op::WrapperStructPose::gpuNumberStart
    +
    +

    First GPU device. Such as the GPUs used will be the ones in the range: [gpuNumberStart, gpuNumberStart + gpuNumber].

    + +

    Definition at line 70 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ heatMapScaleMode

    + +
    +
    + + + + +
    ScaleMode op::WrapperStructPose::heatMapScaleMode
    +
    +

    Scale of the Datum.heatmaps. Select ScaleMode::ZeroToOne(FixedAspect) for range [0,1], ScaleMode::PlusMinusOne(FixedAspect) for [-1,1] and ScaleMode::UnsignedChar for [0, 255]. If heatMapTypes.empty(), then this parameters makes no effect.

    + +

    Definition at line 145 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ heatMapTypes

    + +
    +
    + + + + +
    std::vector<HeatMapType> op::WrapperStructPose::heatMapTypes
    +
    +

    Whether and which heat maps to save on the Array<float> Datum.heatmaps. Use HeatMapType::Parts for body parts, HeatMapType::Background for the background, and HeatMapType::PAFs for the Part Affinity Fields.

    + +

    Definition at line 137 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ keypointScaleMode

    + +
    +
    + + + + +
    ScaleMode op::WrapperStructPose::keypointScaleMode
    +
    +

    Final scale of the Array<float> Datum.poseKeypoints and the written pose data. The final Datum.poseKeypoints can be scaled with respect to input size (ScaleMode::InputResolution), net output size (ScaleMode::NetOutputResolution), output rendering size (ScaleMode::OutputResolution), from 0 to 1 (ScaleMode::ZeroToOne(FixedAspect)), and -1 to 1 (ScaleMode::PlusMinusOne(FixedAspect)).

    + +

    Definition at line 57 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ maximizePositives

    + +
    +
    + + + + +
    bool op::WrapperStructPose::maximizePositives
    +
    +

    Whether to maximize the number of positives. It reduces the thresholds to accept a person candidate. It highly increases both false and true positives. I.e., it maximizes average recall but could harm average precision.

    + +

    Definition at line 177 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ modelFolder

    + +
    +
    + + + + +
    String op::WrapperStructPose::modelFolder
    +
    +

    Folder where the pose Caffe models are located.

    + +

    Definition at line 130 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ netInputSize

    + +
    +
    + + + + +
    Point<int> op::WrapperStructPose::netInputSize
    +
    +

    CCN (Conv Net) input size. The greater, the slower and more memory it will be needed, but it will potentially increase accuracy. Both width and height must be divisible by 16.

    + +

    Definition at line 32 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ netInputSizeDynamicBehavior

    + +
    +
    + + + + +
    double op::WrapperStructPose::netInputSizeDynamicBehavior
    +
    +

    Zero or negative means that using -1 in netInputSize will behave as explained in its flag description. Otherwise, and to avoid out of memory errors, the -1 in netInputSize will clip to this value times the default 16/9 aspect ratio value (i.e., 656 width for a 368 height). E.g., netInputSizeDynamicBehavior = 10 and netInputSize = {-1x368} will clip to 6560x368 (10 x 656). Recommended 1 for small GPUs (to avoid out of memory errors but maximize speed) and 0 for big GPUs (for maximum accuracy and speed).

    + +

    Definition at line 41 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ numberPeopleMax

    + +
    +
    + + + + +
    int op::WrapperStructPose::numberPeopleMax
    +
    +

    Maximum number of people to be detected. This parameter will limit the maximum number of people detected, by keeping the people with the numberPeopleMax top scores. Useful if you know the exact number of people in the scene, so it can remove false positives (if all the people have been detected. However, it might also include false negatives by removing very small or highly occluded people.

    + +

    Definition at line 170 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ outputSize

    + +
    +
    + + + + +
    Point<int> op::WrapperStructPose::outputSize
    +
    +

    Output size of the final rendered image. It barely affects performance compared to netInputSize. The final Datum.poseKeypoints can be scaled with respect to outputSize if keypointScaleMode is set to ScaleMode::OutputResolution, even if the rendering is disabled.

    + +

    Definition at line 49 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ poseMode

    + +
    +
    + + + + +
    PoseMode op::WrapperStructPose::poseMode
    +
    +

    Whether to extract body. It might be optionally disabled for very few cases (e.g., if only face keypoint detection is desired for speedup while reducing its accuracy). Otherwise, it must be always enabled.

    + +

    Definition at line 25 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ poseModel

    + +
    +
    + + + + +
    PoseModel op::WrapperStructPose::poseModel
    +
    +

    Pose model, it affects the number of body parts to render Select PoseModel::BODY_25 for 25 body-part COCO + foot model; PoseModel::COCO_18 for 18 body-part COCO; PoseModel::MPI_15 for 15 body-part MPI; PoseModel::MPI_15_4 for faster version of MPI; etc..

    + +

    Definition at line 98 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ protoTxtPath

    + +
    +
    + + + + +
    String op::WrapperStructPose::protoTxtPath
    +
    +

    Final path where the pose Caffe ProtoTxt file is located. The combination modelFolder + protoTxtPath represents the whole path to the prototxt file. If empty, it will use the default OpenPose ProtoTxt file.

    + +

    Definition at line 192 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ renderMode

    + +
    +
    + + + + +
    RenderMode op::WrapperStructPose::renderMode
    +
    +

    Whether to render the output (pose locations, body, background or PAF heat maps) with CPU or GPU. Select None for no rendering, Cpu or Gpu por CPU and GPU rendering respectively.

    + +

    Definition at line 91 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ renderThreshold

    + +
    +
    + + + + +
    float op::WrapperStructPose::renderThreshold
    +
    +

    Rendering threshold. Only estimated keypoints whose score confidences are higher than this value will be rendered. Note: Rendered refers only to visual display in the OpenPose basic GUI, not in the saved results. Generally, a high threshold (> 0.5) will only render very clear body parts; while small thresholds (~0.1) will also output guessed and occluded keypoints, but also more false positives (i.e., wrong detections).

    + +

    Definition at line 160 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ scaleGap

    + +
    +
    + + + + +
    float op::WrapperStructPose::scaleGap
    +
    +

    Gap between successive scales. The pose estimation will be estimation for the scales in the range [1, 1-scaleGap*scalesNumber], with a gap of scaleGap.

    + +

    Definition at line 85 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ scalesNumber

    + +
    +
    + + + + +
    int op::WrapperStructPose::scalesNumber
    +
    +

    Number of scales to process. The greater, the slower and more memory it will be needed, but it will potentially increase accuracy. This parameter is related with scaleGap, such as the final pose estimation will be an average of the predicted results for each scale.

    + +

    Definition at line 78 of file wrapperStructPose.hpp.

    + +
    +
    + +

    ◆ upsamplingRatio

    + +
    +
    + + + + +
    float op::WrapperStructPose::upsamplingRatio
    +
    +

    The image upsampling scale. 8 is the stride of the network, so the ideal value to maximize the speed/accuracy trade-off.

    + +

    Definition at line 205 of file wrapperStructPose.hpp.

    + +
    +
    +
    The documentation for this struct was generated from the following file: +
    +
    + + + + diff --git a/web/html/doc/structop_1_1_wrapper_struct_pose.js b/web/html/doc/structop_1_1_wrapper_struct_pose.js new file mode 100644 index 000000000..84aecf14d --- /dev/null +++ b/web/html/doc/structop_1_1_wrapper_struct_pose.js @@ -0,0 +1,31 @@ +var structop_1_1_wrapper_struct_pose = +[ + [ "WrapperStructPose", "structop_1_1_wrapper_struct_pose.html#af3c639dd4de2bfebe1376a0ab7666c86", null ], + [ "addPartCandidates", "structop_1_1_wrapper_struct_pose.html#ad73981c6ad9b23f511ef6f12136bf8e7", null ], + [ "alphaHeatMap", "structop_1_1_wrapper_struct_pose.html#ac1233492c750fbd98df353bffa8f9b78", null ], + [ "alphaKeypoint", "structop_1_1_wrapper_struct_pose.html#a4d3ad84b14697d5f1009fa29e2ff1998", null ], + [ "blendOriginalFrame", "structop_1_1_wrapper_struct_pose.html#aa2cee9019b708d48cc18313615d0189e", null ], + [ "caffeModelPath", "structop_1_1_wrapper_struct_pose.html#a6de869a73fd338bd41e390fcb1a5bcf3", null ], + [ "defaultPartToRender", "structop_1_1_wrapper_struct_pose.html#ab6810e97aa62a728aa09dbbe6b9b6c06", null ], + [ "enableGoogleLogging", "structop_1_1_wrapper_struct_pose.html#a782ba82c6aeabea8fa625042fdf09408", null ], + [ "fpsMax", "structop_1_1_wrapper_struct_pose.html#a16c4fb26e6ce76dfa577e0f4b5747733", null ], + [ "gpuNumber", "structop_1_1_wrapper_struct_pose.html#a536ea76d50e94d513066e9e5767d0c03", null ], + [ "gpuNumberStart", "structop_1_1_wrapper_struct_pose.html#a8be188d871061079432ead77b278fe0d", null ], + [ "heatMapScaleMode", "structop_1_1_wrapper_struct_pose.html#a2a5cceaf05cf228b47d2b001e05efeb8", null ], + [ "heatMapTypes", "structop_1_1_wrapper_struct_pose.html#aa459f2f26c1f1a929af55b8c2d39ccf6", null ], + [ "keypointScaleMode", "structop_1_1_wrapper_struct_pose.html#a054c88e977084707e80eb31dd0a658ab", null ], + [ "maximizePositives", "structop_1_1_wrapper_struct_pose.html#a84edcbf2237d579adc88badaa17c9795", null ], + [ "modelFolder", "structop_1_1_wrapper_struct_pose.html#a0f00648621ca97fde61287be23671523", null ], + [ "netInputSize", "structop_1_1_wrapper_struct_pose.html#acff912f14ba3c0ba706ea99e1cef790e", null ], + [ "netInputSizeDynamicBehavior", "structop_1_1_wrapper_struct_pose.html#a8bafec1b3ee2f2a6473fd604925e265a", null ], + [ "numberPeopleMax", "structop_1_1_wrapper_struct_pose.html#a02c4ab6b56e4da4b3ed0da4eae8ac0fc", null ], + [ "outputSize", "structop_1_1_wrapper_struct_pose.html#a80ead0f411ddab86f643345e4effe805", null ], + [ "poseMode", "structop_1_1_wrapper_struct_pose.html#ad0f4992658b9d624184dcecf79e54e43", null ], + [ "poseModel", "structop_1_1_wrapper_struct_pose.html#a35147b6fb9e300d79b71637793053a1b", null ], + [ "protoTxtPath", "structop_1_1_wrapper_struct_pose.html#a8a6273145f5e2f2ccade81865cbdfecb", null ], + [ "renderMode", "structop_1_1_wrapper_struct_pose.html#ad6b5ea0cef8eb81d20ab39099ba7716e", null ], + [ "renderThreshold", "structop_1_1_wrapper_struct_pose.html#a322ff95b6a2838fe0d55afb28d2a4224", null ], + [ "scaleGap", "structop_1_1_wrapper_struct_pose.html#a646ae142f821411d22d772b76960d585", null ], + [ "scalesNumber", "structop_1_1_wrapper_struct_pose.html#ad23a9c103a60709eed9d7b7381828e5e", null ], + [ "upsamplingRatio", "structop_1_1_wrapper_struct_pose.html#a25ee056d914f1cdc990a8a7956810313", null ] +]; \ No newline at end of file diff --git a/web/html/doc/sub_thread_8hpp.html b/web/html/doc/sub_thread_8hpp.html new file mode 100644 index 000000000..3bdf2556e --- /dev/null +++ b/web/html/doc/sub_thread_8hpp.html @@ -0,0 +1,125 @@ + + + + + + + +OpenPose: include/openpose/thread/subThread.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    subThread.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::SubThread< TDatums, TWorker >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (SubThread)
     
    +
    +
    + + + + diff --git a/web/html/doc/sub_thread_8hpp.js b/web/html/doc/sub_thread_8hpp.js new file mode 100644 index 000000000..7671ef8d1 --- /dev/null +++ b/web/html/doc/sub_thread_8hpp.js @@ -0,0 +1,5 @@ +var sub_thread_8hpp = +[ + [ "SubThread", "classop_1_1_sub_thread.html", "classop_1_1_sub_thread" ], + [ "COMPILE_TEMPLATE_DATUM", "sub_thread_8hpp.html#af98c8e514e79d4718fb1fc64dc0e431b", null ] +]; \ No newline at end of file diff --git a/web/html/doc/sub_thread_8hpp_source.html b/web/html/doc/sub_thread_8hpp_source.html new file mode 100644 index 000000000..186c84f1c --- /dev/null +++ b/web/html/doc/sub_thread_8hpp_source.html @@ -0,0 +1,270 @@ + + + + + + + +OpenPose: include/openpose/thread/subThread.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    subThread.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_SUB_THREAD_HPP
    +
    2 #define OPENPOSE_THREAD_SUB_THREAD_HPP
    +
    3 
    + + +
    6 
    +
    7 namespace op
    +
    8 {
    +
    9  template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>>
    +
    10  class SubThread
    +
    11  {
    +
    12  public:
    +
    13  explicit SubThread(const std::vector<TWorker>& tWorkers);
    +
    14 
    +
    15  // Destructor
    +
    16  virtual ~SubThread();
    +
    17 
    + +
    19 
    +
    20  virtual bool work() = 0;
    +
    21 
    +
    22  protected:
    +
    23  inline size_t getTWorkersSize() const
    +
    24  {
    +
    25  return mTWorkers.size();
    +
    26  }
    +
    27 
    +
    28  bool workTWorkers(TDatums& tDatums, const bool inputIsRunning);
    +
    29 
    +
    30  private:
    +
    31  std::vector<TWorker> mTWorkers;
    +
    32 
    +
    33  DELETE_COPY(SubThread);
    +
    34  };
    +
    35 }
    +
    36 
    +
    37 
    +
    38 
    +
    39 
    +
    40 
    +
    41 // Implementation
    +
    42 namespace op
    +
    43 {
    +
    44  template<typename TDatums, typename TWorker>
    +
    45  SubThread<TDatums, TWorker>::SubThread(const std::vector<TWorker>& tWorkers) :
    +
    46  mTWorkers{tWorkers}
    +
    47  {
    +
    48  }
    +
    49 
    +
    50  template<typename TDatums, typename TWorker>
    + +
    52  {
    +
    53  }
    +
    54 
    +
    55  template<typename TDatums, typename TWorker>
    +
    56  bool SubThread<TDatums, TWorker>::workTWorkers(TDatums& tDatums, const bool inputIsRunning)
    +
    57  {
    +
    58  try
    +
    59  {
    +
    60  // If !inputIsRunning -> try to close TWorkers
    +
    61  if (!inputIsRunning)
    +
    62  {
    +
    63  for (auto& tWorkers : mTWorkers)
    +
    64  {
    +
    65  tWorkers->tryStop();
    +
    66  if (tWorkers->isRunning())
    +
    67  break;
    +
    68  }
    +
    69  }
    +
    70 
    +
    71  // If (at least) last TWorker still working -> make TWorkers work
    +
    72  if ((*mTWorkers.crbegin())->isRunning())
    +
    73  {
    +
    74  // Iterate over all workers and check whether some of them stopped
    +
    75  auto allRunning = true;
    +
    76  auto lastOneStopped = false;
    +
    77  for (auto& worker : mTWorkers)
    +
    78  {
    +
    79  if (lastOneStopped)
    +
    80  worker->tryStop();
    +
    81 
    +
    82  if (!worker->checkAndWork(tDatums))
    +
    83  {
    +
    84  allRunning = false;
    +
    85  lastOneStopped = true;
    +
    86  }
    +
    87  else
    +
    88  lastOneStopped = false;
    +
    89  }
    +
    90 
    +
    91  if (allRunning)
    +
    92  return true;
    +
    93  else
    +
    94  {
    +
    95  // If last one still running -> try to stop workers
    +
    96  // If last one stopped -> return false
    +
    97  auto lastRunning = (*mTWorkers.crbegin())->isRunning();
    +
    98  if (lastRunning)
    +
    99  {
    +
    100  // Check last one that stopped
    +
    101  auto lastIndexNotRunning = 0ull;
    +
    102  for (auto i = mTWorkers.size() - 1 ; i > 0 ; i--)
    +
    103  {
    +
    104  if (!mTWorkers[i]->checkAndWork(tDatums))
    +
    105  {
    +
    106  lastIndexNotRunning = i;
    +
    107  break;
    +
    108  }
    +
    109  }
    +
    110 
    +
    111  // Stop workers before last index stopped
    +
    112  for (auto i = 0ull; i < lastIndexNotRunning ; i++)
    +
    113  mTWorkers[i]->stop();
    +
    114 
    +
    115  // Try stopping workers after last index stopped
    +
    116  lastRunning = false;
    +
    117  for (auto i = lastIndexNotRunning+1; i < mTWorkers.size() ; i++)
    +
    118  {
    +
    119  mTWorkers[i]->tryStop();
    +
    120  if (mTWorkers[i]->isRunning())
    +
    121  {
    +
    122  lastRunning = true;
    +
    123  break;
    +
    124  }
    +
    125  }
    +
    126  }
    +
    127  return lastRunning;
    +
    128  }
    +
    129  }
    +
    130  else
    +
    131  return false;
    +
    132  }
    +
    133  catch (const std::exception& e)
    +
    134  {
    +
    135  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    136  return false;
    +
    137  }
    +
    138  }
    +
    139 
    +
    140  template<typename TDatums, typename TWorker>
    + +
    142  {
    +
    143  try
    +
    144  {
    +
    145  for (auto& tWorker : mTWorkers)
    +
    146  tWorker->initializationOnThreadNoException();
    +
    147  }
    +
    148  catch (const std::exception& e)
    +
    149  {
    +
    150  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    151  }
    +
    152  }
    +
    153 
    + +
    155 }
    +
    156 
    +
    157 #endif // OPENPOSE_THREAD_SUB_THREAD_HPP
    + +
    virtual bool work()=0
    +
    size_t getTWorkersSize() const
    Definition: subThread.hpp:23
    +
    virtual ~SubThread()
    Definition: subThread.hpp:51
    +
    void initializationOnThread()
    Definition: subThread.hpp:141
    +
    SubThread(const std::vector< TWorker > &tWorkers)
    Definition: subThread.hpp:45
    +
    bool workTWorkers(TDatums &tDatums, const bool inputIsRunning)
    Definition: subThread.hpp:56
    + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    + +
    +
    + + + + diff --git a/web/html/doc/sub_thread_no_queue_8hpp.html b/web/html/doc/sub_thread_no_queue_8hpp.html new file mode 100644 index 000000000..2da694571 --- /dev/null +++ b/web/html/doc/sub_thread_no_queue_8hpp.html @@ -0,0 +1,126 @@ + + + + + + + +OpenPose: include/openpose/thread/subThreadNoQueue.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    subThreadNoQueue.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::SubThreadNoQueue< TDatums, TWorker >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (SubThreadNoQueue)
     
    +
    +
    + + + + diff --git a/web/html/doc/sub_thread_no_queue_8hpp.js b/web/html/doc/sub_thread_no_queue_8hpp.js new file mode 100644 index 000000000..3eda1ba0f --- /dev/null +++ b/web/html/doc/sub_thread_no_queue_8hpp.js @@ -0,0 +1,5 @@ +var sub_thread_no_queue_8hpp = +[ + [ "SubThreadNoQueue", "classop_1_1_sub_thread_no_queue.html", "classop_1_1_sub_thread_no_queue" ], + [ "COMPILE_TEMPLATE_DATUM", "sub_thread_no_queue_8hpp.html#a36492d15f864f7c813a573789ea554aa", null ] +]; \ No newline at end of file diff --git a/web/html/doc/sub_thread_no_queue_8hpp_source.html b/web/html/doc/sub_thread_no_queue_8hpp_source.html new file mode 100644 index 000000000..40a465ae9 --- /dev/null +++ b/web/html/doc/sub_thread_no_queue_8hpp_source.html @@ -0,0 +1,173 @@ + + + + + + + +OpenPose: include/openpose/thread/subThreadNoQueue.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    subThreadNoQueue.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_THREAD_NO_QUEUE_HPP
    +
    2 #define OPENPOSE_THREAD_THREAD_NO_QUEUE_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>>
    +
    11  class SubThreadNoQueue : public SubThread<TDatums, TWorker>
    +
    12  {
    +
    13  public:
    +
    14  explicit SubThreadNoQueue(const std::vector<TWorker>& tWorkers);
    +
    15 
    +
    16  virtual ~SubThreadNoQueue();
    +
    17 
    +
    18  bool work();
    +
    19 
    + +
    21  };
    +
    22 }
    +
    23 
    +
    24 
    +
    25 
    +
    26 
    +
    27 
    +
    28 // Implementation
    +
    29 namespace op
    +
    30 {
    +
    31  template<typename TDatums, typename TWorker>
    +
    32  SubThreadNoQueue<TDatums, TWorker>::SubThreadNoQueue(const std::vector<TWorker>& tWorkers) :
    +
    33  SubThread<TDatums, TWorker>{tWorkers}
    +
    34  {
    +
    35  }
    +
    36 
    +
    37  template<typename TDatums, typename TWorker>
    + +
    39  {
    +
    40  }
    +
    41 
    +
    42  template<typename TDatums, typename TWorker>
    + +
    44  {
    +
    45  try
    +
    46  {
    +
    47  TDatums tDatums;
    +
    48  return this->workTWorkers(tDatums, true);
    +
    49  }
    +
    50  catch (const std::exception& e)
    +
    51  {
    +
    52  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    53  return false;
    +
    54  }
    +
    55  }
    +
    56 
    + +
    58 }
    +
    59 
    +
    60 #endif // OPENPOSE_THREAD_THREAD_NO_QUEUE_HPP
    + + +
    DELETE_COPY(SubThreadNoQueue)
    + + +
    SubThreadNoQueue(const std::vector< TWorker > &tWorkers)
    + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    + + +
    +
    + + + + diff --git a/web/html/doc/sub_thread_queue_in_8hpp.html b/web/html/doc/sub_thread_queue_in_8hpp.html new file mode 100644 index 000000000..47745641d --- /dev/null +++ b/web/html/doc/sub_thread_queue_in_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/thread/subThreadQueueIn.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    subThreadQueueIn.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::SubThreadQueueIn< TDatums, TWorker, TQueue >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (SubThreadQueueIn)
     
    +
    +
    + + + + diff --git a/web/html/doc/sub_thread_queue_in_8hpp.js b/web/html/doc/sub_thread_queue_in_8hpp.js new file mode 100644 index 000000000..1e9302fc0 --- /dev/null +++ b/web/html/doc/sub_thread_queue_in_8hpp.js @@ -0,0 +1,5 @@ +var sub_thread_queue_in_8hpp = +[ + [ "SubThreadQueueIn", "classop_1_1_sub_thread_queue_in.html", "classop_1_1_sub_thread_queue_in" ], + [ "COMPILE_TEMPLATE_DATUM", "sub_thread_queue_in_8hpp.html#a506578f3e723f992eabb627a371351ba", null ] +]; \ No newline at end of file diff --git a/web/html/doc/sub_thread_queue_in_8hpp_source.html b/web/html/doc/sub_thread_queue_in_8hpp_source.html new file mode 100644 index 000000000..e5f2f3e4c --- /dev/null +++ b/web/html/doc/sub_thread_queue_in_8hpp_source.html @@ -0,0 +1,193 @@ + + + + + + + +OpenPose: include/openpose/thread/subThreadQueueIn.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    subThreadQueueIn.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_THREAD_QUEUE_IN_HPP
    +
    2 #define OPENPOSE_THREAD_THREAD_QUEUE_IN_HPP
    +
    3 
    + + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>, typename TQueue = Queue<TDatums>>
    +
    12  class SubThreadQueueIn : public SubThread<TDatums, TWorker>
    +
    13  {
    +
    14  public:
    +
    15  SubThreadQueueIn(const std::vector<TWorker>& tWorkers, const std::shared_ptr<TQueue>& tQueueIn);
    +
    16 
    +
    17  virtual ~SubThreadQueueIn();
    +
    18 
    +
    19  bool work();
    +
    20 
    +
    21  private:
    +
    22  std::shared_ptr<TQueue> spTQueueIn;
    +
    23 
    +
    24  DELETE_COPY(SubThreadQueueIn);
    +
    25  };
    +
    26 }
    +
    27 
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 // Implementation
    +
    33 namespace op
    +
    34 {
    +
    35  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    37  const std::shared_ptr<TQueue>& tQueueIn) :
    +
    38  SubThread<TDatums, TWorker>{tWorkers},
    +
    39  spTQueueIn{tQueueIn}
    +
    40  {
    +
    41  // spTQueueIn->addPopper();
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    51  {
    +
    52  try
    +
    53  {
    +
    54  // Pop TDatums
    +
    55  if (spTQueueIn->empty())
    +
    56  std::this_thread::sleep_for(std::chrono::microseconds{100});
    +
    57  TDatums tDatums;
    +
    58  bool queueIsRunning = spTQueueIn->tryPop(tDatums);
    +
    59  // Check queue not empty
    +
    60  if (!queueIsRunning)
    +
    61  queueIsRunning = spTQueueIn->isRunning();
    +
    62  // Process TDatums
    +
    63  const auto workersAreRunning = this->workTWorkers(tDatums, queueIsRunning);
    +
    64  // Close queue input if all workers closed
    +
    65  if (!workersAreRunning)
    +
    66  spTQueueIn->stop();
    +
    67  return workersAreRunning;
    +
    68  }
    +
    69  catch (const std::exception& e)
    +
    70  {
    +
    71  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    72  spTQueueIn->stop();
    +
    73  return false;
    +
    74  }
    +
    75  }
    +
    76 
    + +
    78 }
    +
    79 
    +
    80 #endif // OPENPOSE_THREAD_THREAD_QUEUE_IN_HPP
    + + +
    SubThreadQueueIn(const std::vector< TWorker > &tWorkers, const std::shared_ptr< TQueue > &tQueueIn)
    + + + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    + + + +
    +
    + + + + diff --git a/web/html/doc/sub_thread_queue_in_out_8hpp.html b/web/html/doc/sub_thread_queue_in_out_8hpp.html new file mode 100644 index 000000000..ec0b02900 --- /dev/null +++ b/web/html/doc/sub_thread_queue_in_out_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/thread/subThreadQueueInOut.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    subThreadQueueInOut.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::SubThreadQueueInOut< TDatums, TWorker, TQueue >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (SubThreadQueueInOut)
     
    +
    +
    + + + + diff --git a/web/html/doc/sub_thread_queue_in_out_8hpp.js b/web/html/doc/sub_thread_queue_in_out_8hpp.js new file mode 100644 index 000000000..541a1bf04 --- /dev/null +++ b/web/html/doc/sub_thread_queue_in_out_8hpp.js @@ -0,0 +1,5 @@ +var sub_thread_queue_in_out_8hpp = +[ + [ "SubThreadQueueInOut", "classop_1_1_sub_thread_queue_in_out.html", "classop_1_1_sub_thread_queue_in_out" ], + [ "COMPILE_TEMPLATE_DATUM", "sub_thread_queue_in_out_8hpp.html#a63605cf0e6f4049beacf6094995272e8", null ] +]; \ No newline at end of file diff --git a/web/html/doc/sub_thread_queue_in_out_8hpp_source.html b/web/html/doc/sub_thread_queue_in_out_8hpp_source.html new file mode 100644 index 000000000..e35f9eecf --- /dev/null +++ b/web/html/doc/sub_thread_queue_in_out_8hpp_source.html @@ -0,0 +1,228 @@ + + + + + + + +OpenPose: include/openpose/thread/subThreadQueueInOut.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    subThreadQueueInOut.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_THREAD_QUEUE_IN_OUT_HPP
    +
    2 #define OPENPOSE_THREAD_THREAD_QUEUE_IN_OUT_HPP
    +
    3 
    + + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>, typename TQueue = Queue<TDatums>>
    +
    12  class SubThreadQueueInOut : public SubThread<TDatums, TWorker>
    +
    13  {
    +
    14  public:
    +
    15  SubThreadQueueInOut(const std::vector<TWorker>& tWorkers, const std::shared_ptr<TQueue>& tQueueIn,
    +
    16  const std::shared_ptr<TQueue>& tQueueOut);
    +
    17 
    +
    18  virtual ~SubThreadQueueInOut();
    +
    19 
    +
    20  bool work();
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<TQueue> spTQueueIn;
    +
    24  std::shared_ptr<TQueue> spTQueueOut;
    +
    25 
    +
    26  DELETE_COPY(SubThreadQueueInOut);
    +
    27  };
    +
    28 }
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 
    +
    34 // Implementation
    +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    39  const std::shared_ptr<TQueue>& tQueueIn,
    +
    40  const std::shared_ptr<TQueue>& tQueueOut) :
    +
    41  SubThread<TDatums, TWorker>{tWorkers},
    +
    42  spTQueueIn{tQueueIn},
    +
    43  spTQueueOut{tQueueOut}
    +
    44  {
    +
    45  // spTQueueIn->addPopper();
    +
    46  spTQueueOut->addPusher();
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    51  {
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    56  {
    +
    57  try
    +
    58  {
    +
    59  // If output queue is closed -> close input queue
    +
    60  if (!spTQueueOut->isRunning())
    +
    61  {
    +
    62  spTQueueIn->stop();
    +
    63  return false;
    +
    64  }
    +
    65  // If output queue running -> normal operation
    +
    66  else
    +
    67  {
    +
    68  // Don't work until next queue is not full
    +
    69  // This reduces latency to half
    +
    70  if (!spTQueueOut->isFull())
    +
    71  {
    +
    72  // Pop TDatums
    +
    73  if (spTQueueIn->empty())
    +
    74  std::this_thread::sleep_for(std::chrono::microseconds{100});
    +
    75  TDatums tDatums;
    +
    76  bool workersAreRunning = spTQueueIn->tryPop(tDatums);
    +
    77  // Check queue not stopped
    +
    78  if (!workersAreRunning)
    +
    79  workersAreRunning = spTQueueIn->isRunning();
    +
    80  // Process TDatums
    +
    81  workersAreRunning = this->workTWorkers(tDatums, workersAreRunning);
    +
    82  // Push/emplace tDatums if successfully processed
    +
    83  if (workersAreRunning)
    +
    84  {
    +
    85  if (tDatums != nullptr)
    +
    86  spTQueueOut->waitAndEmplace(tDatums);
    +
    87  }
    +
    88  // Close both queues otherwise
    +
    89  else
    +
    90  {
    +
    91  spTQueueIn->stop();
    +
    92  spTQueueOut->stopPusher();
    +
    93  }
    +
    94  return workersAreRunning;
    +
    95  }
    +
    96  else
    +
    97  {
    +
    98  std::this_thread::sleep_for(std::chrono::microseconds{100});
    +
    99  return true;
    +
    100  }
    +
    101  }
    +
    102  }
    +
    103  catch (const std::exception& e)
    +
    104  {
    +
    105  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    106  spTQueueIn->stop();
    +
    107  spTQueueOut->stop();
    +
    108  return false;
    +
    109  }
    +
    110  }
    +
    111 
    + +
    113 }
    +
    114 
    +
    115 #endif // OPENPOSE_THREAD_THREAD_QUEUE_IN_OUT_HPP
    + + + +
    SubThreadQueueInOut(const std::vector< TWorker > &tWorkers, const std::shared_ptr< TQueue > &tQueueIn, const std::shared_ptr< TQueue > &tQueueOut)
    + + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    + + + +
    +
    + + + + diff --git a/web/html/doc/sub_thread_queue_out_8hpp.html b/web/html/doc/sub_thread_queue_out_8hpp.html new file mode 100644 index 000000000..7498e3add --- /dev/null +++ b/web/html/doc/sub_thread_queue_out_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/thread/subThreadQueueOut.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    subThreadQueueOut.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::SubThreadQueueOut< TDatums, TWorker, TQueue >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (SubThreadQueueOut)
     
    +
    +
    + + + + diff --git a/web/html/doc/sub_thread_queue_out_8hpp.js b/web/html/doc/sub_thread_queue_out_8hpp.js new file mode 100644 index 000000000..f9600d164 --- /dev/null +++ b/web/html/doc/sub_thread_queue_out_8hpp.js @@ -0,0 +1,5 @@ +var sub_thread_queue_out_8hpp = +[ + [ "SubThreadQueueOut", "classop_1_1_sub_thread_queue_out.html", "classop_1_1_sub_thread_queue_out" ], + [ "COMPILE_TEMPLATE_DATUM", "sub_thread_queue_out_8hpp.html#aee90a0429c2d14da0c3a85cd67a17821", null ] +]; \ No newline at end of file diff --git a/web/html/doc/sub_thread_queue_out_8hpp_source.html b/web/html/doc/sub_thread_queue_out_8hpp_source.html new file mode 100644 index 000000000..d86ff087d --- /dev/null +++ b/web/html/doc/sub_thread_queue_out_8hpp_source.html @@ -0,0 +1,208 @@ + + + + + + + +OpenPose: include/openpose/thread/subThreadQueueOut.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    subThreadQueueOut.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_THREAD_QUEUE_OUT_HPP
    +
    2 #define OPENPOSE_THREAD_THREAD_QUEUE_OUT_HPP
    +
    3 
    + + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>, typename TQueue = Queue<TDatums>>
    +
    12  class SubThreadQueueOut : public SubThread<TDatums, TWorker>
    +
    13  {
    +
    14  public:
    +
    15  SubThreadQueueOut(const std::vector<TWorker>& tWorkers, const std::shared_ptr<TQueue>& tQueueOut);
    +
    16 
    +
    17  virtual ~SubThreadQueueOut();
    +
    18 
    +
    19  bool work();
    +
    20 
    +
    21  private:
    +
    22  std::shared_ptr<TQueue> spTQueueOut;
    +
    23 
    +
    24  DELETE_COPY(SubThreadQueueOut);
    +
    25  };
    +
    26 }
    +
    27 
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 // Implementation
    +
    33 namespace op
    +
    34 {
    +
    35  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    37  const std::shared_ptr<TQueue>& tQueueOut) :
    +
    38  SubThread<TDatums, TWorker>{tWorkers},
    +
    39  spTQueueOut{tQueueOut}
    +
    40  {
    +
    41  spTQueueOut->addPusher();
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    51  {
    +
    52  try
    +
    53  {
    +
    54  // If output queue is closed -> close input queue
    +
    55  if (!spTQueueOut->isRunning())
    +
    56  return false;
    +
    57  else
    +
    58  {
    +
    59  // Don't work until next queue is not full
    +
    60  // This reduces latency to half
    +
    61  if (!spTQueueOut->isFull())
    +
    62  {
    +
    63  // Process TDatums
    +
    64  TDatums tDatums;
    +
    65  const auto workersAreRunning = this->workTWorkers(tDatums, true);
    +
    66  // Push/emplace tDatums if successfully processed
    +
    67  if (workersAreRunning)
    +
    68  {
    +
    69  if (tDatums != nullptr)
    +
    70  spTQueueOut->waitAndEmplace(tDatums);
    +
    71  }
    +
    72  // Close queue otherwise
    +
    73  else
    +
    74  spTQueueOut->stopPusher();
    +
    75  return workersAreRunning;
    +
    76  }
    +
    77  else
    +
    78  {
    +
    79  std::this_thread::sleep_for(std::chrono::microseconds{100});
    +
    80  return true;
    +
    81  }
    +
    82  }
    +
    83  }
    +
    84  catch (const std::exception& e)
    +
    85  {
    +
    86  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    87  spTQueueOut->stop();
    +
    88  return false;
    +
    89  }
    +
    90  }
    +
    91 
    + +
    93 }
    +
    94 
    +
    95 #endif // OPENPOSE_THREAD_THREAD_QUEUE_OUT_HPP
    + + + +
    SubThreadQueueOut(const std::vector< TWorker > &tWorkers, const std::shared_ptr< TQueue > &tQueueOut)
    + + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    + + + +
    +
    + + + + diff --git a/web/html/doc/sync_off.png b/web/html/doc/sync_off.png new file mode 100644 index 000000000..3b443fc62 Binary files /dev/null and b/web/html/doc/sync_off.png differ diff --git a/web/html/doc/sync_on.png b/web/html/doc/sync_on.png new file mode 100644 index 000000000..e08320fb6 Binary files /dev/null and b/web/html/doc/sync_on.png differ diff --git a/web/html/doc/tab_a.png b/web/html/doc/tab_a.png new file mode 100644 index 000000000..3b725c41c Binary files /dev/null and b/web/html/doc/tab_a.png differ diff --git a/web/html/doc/tab_b.png b/web/html/doc/tab_b.png new file mode 100644 index 000000000..e2b4a8638 Binary files /dev/null and b/web/html/doc/tab_b.png differ diff --git a/web/html/doc/tab_h.png b/web/html/doc/tab_h.png new file mode 100644 index 000000000..fd5cb7054 Binary files /dev/null and b/web/html/doc/tab_h.png differ diff --git a/web/html/doc/tab_s.png b/web/html/doc/tab_s.png new file mode 100644 index 000000000..ab478c95b Binary files /dev/null and b/web/html/doc/tab_s.png differ diff --git a/web/html/doc/tabs.css b/web/html/doc/tabs.css new file mode 100644 index 000000000..85a0cd5b5 --- /dev/null +++ b/web/html/doc/tabs.css @@ -0,0 +1 @@ +.sm{position:relative;z-index:9999}.sm,.sm ul,.sm li{display:block;list-style:none;margin:0;padding:0;line-height:normal;direction:ltr;text-align:left;-webkit-tap-highlight-color:rgba(0,0,0,0)}.sm-rtl,.sm-rtl ul,.sm-rtl li{direction:rtl;text-align:right}.sm>li>h1,.sm>li>h2,.sm>li>h3,.sm>li>h4,.sm>li>h5,.sm>li>h6{margin:0;padding:0}.sm ul{display:none}.sm li,.sm a{position:relative}.sm a{display:block}.sm a.disabled{cursor:not-allowed}.sm:after{content:"\00a0";display:block;height:0;font:0/0 serif;clear:both;visibility:hidden;overflow:hidden}.sm,.sm *,.sm *:before,.sm *:after{-moz-box-sizing:border-box;-webkit-box-sizing:border-box;box-sizing:border-box}.sm-dox{background-image:url("tab_b.png")}.sm-dox a,.sm-dox a:focus,.sm-dox a:hover,.sm-dox a:active{padding:0 12px;padding-right:43px;font-family:"Lucida Grande","Geneva","Helvetica",Arial,sans-serif;font-size:13px;font-weight:bold;line-height:36px;text-decoration:none;text-shadow:0 1px 1px rgba(255,255,255,0.9);color:#283a5d;outline:0}.sm-dox a:hover{background-image:url("tab_a.png");background-repeat:repeat-x;color:white;text-shadow:0 1px 1px black}.sm-dox a.current{color:#d23600}.sm-dox a.disabled{color:#bbb}.sm-dox a span.sub-arrow{position:absolute;top:50%;margin-top:-14px;left:auto;right:3px;width:28px;height:28px;overflow:hidden;font:bold 12px/28px monospace!important;text-align:center;text-shadow:none;background:rgba(255,255,255,0.5);-moz-border-radius:5px;-webkit-border-radius:5px;border-radius:5px}.sm-dox a.highlighted span.sub-arrow:before{display:block;content:'-'}.sm-dox>li:first-child>a,.sm-dox>li:first-child>:not(ul) a{-moz-border-radius:5px 5px 0 0;-webkit-border-radius:5px;border-radius:5px 5px 0 0}.sm-dox>li:last-child>a,.sm-dox>li:last-child>*:not(ul) a,.sm-dox>li:last-child>ul,.sm-dox>li:last-child>ul>li:last-child>a,.sm-dox>li:last-child>ul>li:last-child>*:not(ul) a,.sm-dox>li:last-child>ul>li:last-child>ul,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>a,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>*:not(ul) a,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>ul,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>a,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>*:not(ul) a,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>ul,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>a,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>*:not(ul) a,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>ul{-moz-border-radius:0 0 5px 5px;-webkit-border-radius:0;border-radius:0 0 5px 5px}.sm-dox>li:last-child>a.highlighted,.sm-dox>li:last-child>*:not(ul) a.highlighted,.sm-dox>li:last-child>ul>li:last-child>a.highlighted,.sm-dox>li:last-child>ul>li:last-child>*:not(ul) a.highlighted,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>a.highlighted,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>*:not(ul) a.highlighted,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>a.highlighted,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>*:not(ul) a.highlighted,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>a.highlighted,.sm-dox>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>ul>li:last-child>*:not(ul) a.highlighted{-moz-border-radius:0;-webkit-border-radius:0;border-radius:0}.sm-dox ul{background:rgba(162,162,162,0.1)}.sm-dox ul a,.sm-dox ul a:focus,.sm-dox ul a:hover,.sm-dox ul a:active{font-size:12px;border-left:8px solid transparent;line-height:36px;text-shadow:none;background-color:white;background-image:none}.sm-dox ul a:hover{background-image:url("tab_a.png");background-repeat:repeat-x;color:white;text-shadow:0 1px 1px black}.sm-dox ul ul a,.sm-dox ul ul a:hover,.sm-dox ul ul a:focus,.sm-dox ul ul a:active{border-left:16px solid transparent}.sm-dox ul ul ul a,.sm-dox ul ul ul a:hover,.sm-dox ul ul ul a:focus,.sm-dox ul ul ul a:active{border-left:24px solid transparent}.sm-dox ul ul ul ul a,.sm-dox ul ul ul ul a:hover,.sm-dox ul ul ul ul a:focus,.sm-dox ul ul ul ul a:active{border-left:32px solid transparent}.sm-dox ul ul ul ul ul a,.sm-dox ul ul ul ul ul a:hover,.sm-dox ul ul ul ul ul a:focus,.sm-dox ul ul ul ul ul a:active{border-left:40px solid transparent}@media(min-width:768px){.sm-dox ul{position:absolute;width:12em}.sm-dox li{float:left}.sm-dox.sm-rtl li{float:right}.sm-dox ul li,.sm-dox.sm-rtl ul li,.sm-dox.sm-vertical li{float:none}.sm-dox a{white-space:nowrap}.sm-dox ul a,.sm-dox.sm-vertical a{white-space:normal}.sm-dox .sm-nowrap>li>a,.sm-dox .sm-nowrap>li>:not(ul) a{white-space:nowrap}.sm-dox{padding:0 10px;background-image:url("tab_b.png");line-height:36px}.sm-dox a span.sub-arrow{top:50%;margin-top:-2px;right:12px;width:0;height:0;border-width:4px;border-style:solid dashed dashed dashed;border-color:#283a5d transparent transparent transparent;background:transparent;-moz-border-radius:0;-webkit-border-radius:0;border-radius:0}.sm-dox a,.sm-dox a:focus,.sm-dox a:active,.sm-dox a:hover,.sm-dox a.highlighted{padding:0 12px;background-image:url("tab_s.png");background-repeat:no-repeat;background-position:right;-moz-border-radius:0!important;-webkit-border-radius:0;border-radius:0!important}.sm-dox a:hover{background-image:url("tab_a.png");background-repeat:repeat-x;color:white;text-shadow:0 1px 1px black}.sm-dox a:hover span.sub-arrow{border-color:white transparent transparent transparent}.sm-dox a.has-submenu{padding-right:24px}.sm-dox li{border-top:0}.sm-dox>li>ul:before,.sm-dox>li>ul:after{content:'';position:absolute;top:-18px;left:30px;width:0;height:0;overflow:hidden;border-width:9px;border-style:dashed dashed solid dashed;border-color:transparent transparent #bbb transparent}.sm-dox>li>ul:after{top:-16px;left:31px;border-width:8px;border-color:transparent transparent #fff transparent}.sm-dox ul{border:1px solid #bbb;padding:5px 0;background:#fff;-moz-border-radius:5px!important;-webkit-border-radius:5px;border-radius:5px!important;-moz-box-shadow:0 5px 9px rgba(0,0,0,0.2);-webkit-box-shadow:0 5px 9px rgba(0,0,0,0.2);box-shadow:0 5px 9px rgba(0,0,0,0.2)}.sm-dox ul a span.sub-arrow{right:8px;top:50%;margin-top:-5px;border-width:5px;border-color:transparent transparent transparent #555;border-style:dashed dashed dashed solid}.sm-dox ul a,.sm-dox ul a:hover,.sm-dox ul a:focus,.sm-dox ul a:active,.sm-dox ul a.highlighted{color:#555;background-image:none;border:0!important;color:#555;background-image:none}.sm-dox ul a:hover{background-image:url("tab_a.png");background-repeat:repeat-x;color:white;text-shadow:0 1px 1px black}.sm-dox ul a:hover span.sub-arrow{border-color:transparent transparent transparent white}.sm-dox span.scroll-up,.sm-dox span.scroll-down{position:absolute;display:none;visibility:hidden;overflow:hidden;background:#fff;height:36px}.sm-dox span.scroll-up:hover,.sm-dox span.scroll-down:hover{background:#eee}.sm-dox span.scroll-up:hover span.scroll-up-arrow,.sm-dox span.scroll-up:hover span.scroll-down-arrow{border-color:transparent transparent #d23600 transparent}.sm-dox span.scroll-down:hover span.scroll-down-arrow{border-color:#d23600 transparent transparent transparent}.sm-dox span.scroll-up-arrow,.sm-dox span.scroll-down-arrow{position:absolute;top:0;left:50%;margin-left:-6px;width:0;height:0;overflow:hidden;border-width:6px;border-style:dashed dashed solid dashed;border-color:transparent transparent #555 transparent}.sm-dox span.scroll-down-arrow{top:8px;border-style:solid dashed dashed dashed;border-color:#555 transparent transparent transparent}.sm-dox.sm-rtl a.has-submenu{padding-right:12px;padding-left:24px}.sm-dox.sm-rtl a span.sub-arrow{right:auto;left:12px}.sm-dox.sm-rtl.sm-vertical a.has-submenu{padding:10px 20px}.sm-dox.sm-rtl.sm-vertical a span.sub-arrow{right:auto;left:8px;border-style:dashed solid dashed dashed;border-color:transparent #555 transparent transparent}.sm-dox.sm-rtl>li>ul:before{left:auto;right:30px}.sm-dox.sm-rtl>li>ul:after{left:auto;right:31px}.sm-dox.sm-rtl ul a.has-submenu{padding:10px 20px!important}.sm-dox.sm-rtl ul a span.sub-arrow{right:auto;left:8px;border-style:dashed solid dashed dashed;border-color:transparent #555 transparent transparent}.sm-dox.sm-vertical{padding:10px 0;-moz-border-radius:5px;-webkit-border-radius:5px;border-radius:5px}.sm-dox.sm-vertical a{padding:10px 20px}.sm-dox.sm-vertical a:hover,.sm-dox.sm-vertical a:focus,.sm-dox.sm-vertical a:active,.sm-dox.sm-vertical a.highlighted{background:#fff}.sm-dox.sm-vertical a.disabled{background-image:url("tab_b.png")}.sm-dox.sm-vertical a span.sub-arrow{right:8px;top:50%;margin-top:-5px;border-width:5px;border-style:dashed dashed dashed solid;border-color:transparent transparent transparent #555}.sm-dox.sm-vertical>li>ul:before,.sm-dox.sm-vertical>li>ul:after{display:none}.sm-dox.sm-vertical ul a{padding:10px 20px}.sm-dox.sm-vertical ul a:hover,.sm-dox.sm-vertical ul a:focus,.sm-dox.sm-vertical ul a:active,.sm-dox.sm-vertical ul a.highlighted{background:#eee}.sm-dox.sm-vertical ul a.disabled{background:#fff}} \ No newline at end of file diff --git a/web/html/doc/thread_2enum_classes_8hpp.html b/web/html/doc/thread_2enum_classes_8hpp.html new file mode 100644 index 000000000..5b3d9b337 --- /dev/null +++ b/web/html/doc/thread_2enum_classes_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/thread/enumClasses.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    enumClasses.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + +

    +Enumerations

    enum class  op::ThreadManagerMode : unsigned char { op::Asynchronous +, op::AsynchronousIn +, op::AsynchronousOut +, op::Synchronous + }
     
    +
    +
    + + + + diff --git a/web/html/doc/thread_2enum_classes_8hpp.js b/web/html/doc/thread_2enum_classes_8hpp.js new file mode 100644 index 000000000..af98568df --- /dev/null +++ b/web/html/doc/thread_2enum_classes_8hpp.js @@ -0,0 +1,9 @@ +var thread_2enum_classes_8hpp = +[ + [ "ThreadManagerMode", "thread_2enum_classes_8hpp.html#a3593e2d53bec533f0048ef3973eebd36", [ + [ "Asynchronous", "thread_2enum_classes_8hpp.html#a3593e2d53bec533f0048ef3973eebd36a288aae25bc408055f50c21c991903a44", null ], + [ "AsynchronousIn", "thread_2enum_classes_8hpp.html#a3593e2d53bec533f0048ef3973eebd36a435b3ab344c03bfc0e4530a2e75f5e44", null ], + [ "AsynchronousOut", "thread_2enum_classes_8hpp.html#a3593e2d53bec533f0048ef3973eebd36ac68f8680ccf3a65dfcfc63356112c9f9", null ], + [ "Synchronous", "thread_2enum_classes_8hpp.html#a3593e2d53bec533f0048ef3973eebd36a2fe4167817733fec8e6ba1afddf78f1b", null ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/thread_2enum_classes_8hpp_source.html b/web/html/doc/thread_2enum_classes_8hpp_source.html new file mode 100644 index 000000000..2fba6dd15 --- /dev/null +++ b/web/html/doc/thread_2enum_classes_8hpp_source.html @@ -0,0 +1,122 @@ + + + + + + + +OpenPose: include/openpose/thread/enumClasses.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    enumClasses.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_ENUM_CLASSES_HPP
    +
    2 #define OPENPOSE_THREAD_ENUM_CLASSES_HPP
    +
    3 
    +
    4 namespace op
    +
    5 {
    +
    9  enum class ThreadManagerMode : unsigned char
    +
    10  {
    + + + + +
    24  };
    +
    25 }
    +
    26 
    +
    27 #endif // OPENPOSE_THREAD_ENUM_CLASSES_HPP
    + +
    ThreadManagerMode
    Definition: enumClasses.hpp:10
    + + + + +
    +
    + + + + diff --git a/web/html/doc/thread_2headers_8hpp.html b/web/html/doc/thread_2headers_8hpp.html new file mode 100644 index 000000000..9464d555a --- /dev/null +++ b/web/html/doc/thread_2headers_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/thread/headers.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + + + + + + diff --git a/web/html/doc/thread_2headers_8hpp_source.html b/web/html/doc/thread_2headers_8hpp_source.html new file mode 100644 index 000000000..44b6c9a7a --- /dev/null +++ b/web/html/doc/thread_2headers_8hpp_source.html @@ -0,0 +1,143 @@ + + + + + + + +OpenPose: include/openpose/thread/headers.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + + + + + + diff --git a/web/html/doc/thread_8hpp.html b/web/html/doc/thread_8hpp.html new file mode 100644 index 000000000..54c6bf636 --- /dev/null +++ b/web/html/doc/thread_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/thread/thread.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    thread.hpp File Reference
    +
    +
    +
    #include <atomic>
    +#include <openpose/core/common.hpp>
    +#include <openpose/thread/subThread.hpp>
    +#include <openpose/thread/worker.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::Thread< TDatums, TWorker >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (Thread)
     
    +
    +
    + + + + diff --git a/web/html/doc/thread_8hpp.js b/web/html/doc/thread_8hpp.js new file mode 100644 index 000000000..e7e0f2a8b --- /dev/null +++ b/web/html/doc/thread_8hpp.js @@ -0,0 +1,5 @@ +var thread_8hpp = +[ + [ "Thread", "classop_1_1_thread.html", "classop_1_1_thread" ], + [ "COMPILE_TEMPLATE_DATUM", "thread_8hpp.html#ae5dac6cf1ccdf461838f9795be8fda03", null ] +]; \ No newline at end of file diff --git a/web/html/doc/thread_8hpp_source.html b/web/html/doc/thread_8hpp_source.html new file mode 100644 index 000000000..557261335 --- /dev/null +++ b/web/html/doc/thread_8hpp_source.html @@ -0,0 +1,357 @@ + + + + + + + +OpenPose: include/openpose/thread/thread.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    thread.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_THREAD_HPP
    +
    2 #define OPENPOSE_THREAD_THREAD_HPP
    +
    3 
    +
    4 #include <atomic>
    + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>>
    +
    12  class Thread
    +
    13  {
    +
    14  public:
    +
    15  explicit Thread(const std::shared_ptr<std::atomic<bool>>& isRunningSharedPtr = nullptr);
    +
    16 
    +
    17  // Move constructor
    +
    18  Thread(Thread&& t);
    +
    19 
    +
    20  // Move assignment
    +
    21  Thread& operator=(Thread&& t);
    +
    22 
    +
    23  // Destructor
    +
    24  virtual ~Thread();
    +
    25 
    +
    26  void add(const std::vector<std::shared_ptr<SubThread<TDatums, TWorker>>>& subThreads);
    +
    27 
    +
    28  void add(const std::shared_ptr<SubThread<TDatums, TWorker>>& subThread);
    +
    29 
    +
    30  void exec(const std::shared_ptr<std::atomic<bool>>& isRunningSharedPtr);
    +
    31 
    +
    32  void startInThread();
    +
    33 
    +
    34  void stopAndJoin();
    +
    35 
    +
    36  inline bool isRunning() const
    +
    37  {
    +
    38  return *spIsRunning;
    +
    39  }
    +
    40 
    +
    41  private:
    +
    42  std::shared_ptr<std::atomic<bool>> spIsRunning;
    +
    43  std::vector<std::shared_ptr<SubThread<TDatums, TWorker>>> mSubThreads;
    +
    44  std::thread mThread;
    +
    45 
    +
    46  void initializationOnThread();
    +
    47 
    +
    48  void threadFunction();
    +
    49 
    +
    50  void stop();
    +
    51 
    +
    52  void join();
    +
    53 
    +
    54  DELETE_COPY(Thread);
    +
    55  };
    +
    56 }
    +
    57 
    +
    58 
    +
    59 
    +
    60 
    +
    61 
    +
    62 // Implementation
    +
    63 namespace op
    +
    64 {
    +
    65  template<typename TDatums, typename TWorker>
    +
    66  Thread<TDatums, TWorker>::Thread(const std::shared_ptr<std::atomic<bool>>& isRunningSharedPtr) :
    +
    67  spIsRunning{(isRunningSharedPtr != nullptr ? isRunningSharedPtr : std::make_shared<std::atomic<bool>>(false))}
    +
    68  {
    +
    69  }
    +
    70 
    +
    71  template<typename TDatums, typename TWorker>
    + +
    73  spIsRunning{std::make_shared<std::atomic<bool>>(t.spIsRunning->load())}
    +
    74  {
    +
    75  std::swap(mSubThreads, t.mSubThreads);
    +
    76  std::swap(mThread, t.mThread);
    +
    77  }
    +
    78 
    +
    79  template<typename TDatums, typename TWorker>
    + +
    81  {
    +
    82  std::swap(mSubThreads, t.mSubThreads);
    +
    83  std::swap(mThread, t.mThread);
    +
    84  spIsRunning = {std::make_shared<std::atomic<bool>>(t.spIsRunning->load())};
    +
    85  return *this;
    +
    86  }
    +
    87 
    +
    88  template<typename TDatums, typename TWorker>
    + +
    90  {
    +
    91  try
    +
    92  {
    +
    93  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    94  stopAndJoin();
    +
    95  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    96  }
    +
    97  catch (const std::exception& e)
    +
    98  {
    +
    99  errorDestructor(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    100  }
    +
    101  }
    +
    102 
    +
    103  template<typename TDatums, typename TWorker>
    +
    104  void Thread<TDatums, TWorker>::add(const std::vector<std::shared_ptr<SubThread<TDatums, TWorker>>>& subThreads)
    +
    105  {
    +
    106  for (const auto& subThread : subThreads)
    +
    107  mSubThreads.emplace_back(subThread);
    +
    108  }
    +
    109 
    +
    110  template<typename TDatums, typename TWorker>
    +
    111  void Thread<TDatums, TWorker>::add(const std::shared_ptr<SubThread<TDatums, TWorker>>& subThread)
    +
    112  {
    +
    113  add(std::vector<std::shared_ptr<SubThread<TDatums, TWorker>>>{subThread});
    +
    114  }
    +
    115 
    +
    116  template<typename TDatums, typename TWorker>
    +
    117  void Thread<TDatums, TWorker>::exec(const std::shared_ptr<std::atomic<bool>>& isRunningSharedPtr)
    +
    118  {
    +
    119  try
    +
    120  {
    +
    121  stopAndJoin();
    +
    122  spIsRunning = isRunningSharedPtr;
    +
    123  *spIsRunning = true;
    +
    124  threadFunction();
    +
    125  }
    +
    126  catch (const std::exception& e)
    +
    127  {
    +
    128  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    129  }
    +
    130  }
    +
    131 
    +
    132  template<typename TDatums, typename TWorker>
    + +
    134  {
    +
    135  try
    +
    136  {
    +
    137  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    138  stopAndJoin();
    +
    139  *spIsRunning = true;
    +
    140  mThread = {std::thread{&Thread::threadFunction, this}};
    +
    141  }
    +
    142  catch (const std::exception& e)
    +
    143  {
    +
    144  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    145  }
    +
    146  }
    +
    147 
    +
    148  template<typename TDatums, typename TWorker>
    + +
    150  {
    +
    151  try
    +
    152  {
    +
    153  stop();
    +
    154  join();
    +
    155  }
    +
    156  catch (const std::exception& e)
    +
    157  {
    +
    158  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    159  }
    +
    160  }
    +
    161 
    +
    162  template<typename TDatums, typename TWorker>
    + +
    164  {
    +
    165  try
    +
    166  {
    +
    167  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    168  for (auto& subThread : mSubThreads)
    +
    169  subThread->initializationOnThread();
    +
    170  }
    +
    171  catch (const std::exception& e)
    +
    172  {
    +
    173  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    174  }
    +
    175  }
    +
    176 
    +
    177  template<typename TDatums, typename TWorker>
    +
    178  void Thread<TDatums, TWorker>::threadFunction()
    +
    179  {
    +
    180  try
    +
    181  {
    +
    182  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    183  initializationOnThread();
    +
    184 
    +
    185  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    186  while (isRunning())
    +
    187  {
    +
    188  bool allSubThreadsClosed = true;
    +
    189  for (auto& subThread : mSubThreads)
    +
    190  allSubThreadsClosed &= !subThread->work();
    +
    191 
    +
    192  if (allSubThreadsClosed)
    +
    193  {
    +
    194  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    195  stop();
    +
    196  break;
    +
    197  }
    +
    198  }
    +
    199  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    200  }
    +
    201  catch (const std::exception& e)
    +
    202  {
    +
    203  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    204  }
    +
    205  }
    +
    206 
    +
    207  template<typename TDatums, typename TWorker>
    +
    208  void Thread<TDatums, TWorker>::stop()
    +
    209  {
    +
    210  try
    +
    211  {
    +
    212  *spIsRunning = false;
    +
    213  }
    +
    214  catch (const std::exception& e)
    +
    215  {
    +
    216  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    217  }
    +
    218  }
    +
    219 
    +
    220  template<typename TDatums, typename TWorker>
    +
    221  void Thread<TDatums, TWorker>::join()
    +
    222  {
    +
    223  try
    +
    224  {
    +
    225  if (mThread.joinable())
    +
    226  mThread.join();
    +
    227  }
    +
    228  catch (const std::exception& e)
    +
    229  {
    +
    230  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    231  }
    +
    232  }
    +
    233 
    + +
    235 }
    +
    236 
    +
    237 #endif // OPENPOSE_THREAD_THREAD_HPP
    + + +
    void add(const std::vector< std::shared_ptr< SubThread< TDatums, TWorker >>> &subThreads)
    Definition: thread.hpp:104
    +
    virtual ~Thread()
    Definition: thread.hpp:89
    +
    Thread & operator=(Thread &&t)
    Definition: thread.hpp:80
    +
    bool isRunning() const
    Definition: thread.hpp:36
    +
    void stopAndJoin()
    Definition: thread.hpp:149
    +
    Thread(const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr=nullptr)
    Definition: thread.hpp:66
    +
    void startInThread()
    Definition: thread.hpp:133
    +
    void exec(const std::shared_ptr< std::atomic< bool >> &isRunningSharedPtr)
    Definition: thread.hpp:117
    + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    OP_API void errorDestructor(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    OP_API void opLog(const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    + + + +
    +
    + + + + diff --git a/web/html/doc/thread_manager_8hpp.html b/web/html/doc/thread_manager_8hpp.html new file mode 100644 index 000000000..3104e9425 --- /dev/null +++ b/web/html/doc/thread_manager_8hpp.html @@ -0,0 +1,138 @@ + + + + + + + +OpenPose: include/openpose/thread/threadManager.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    threadManager.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::ThreadManager< TDatums, TWorker, TQueue >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (ThreadManager)
     
    +
    +
    + + + + diff --git a/web/html/doc/thread_manager_8hpp.js b/web/html/doc/thread_manager_8hpp.js new file mode 100644 index 000000000..5701e75c6 --- /dev/null +++ b/web/html/doc/thread_manager_8hpp.js @@ -0,0 +1,5 @@ +var thread_manager_8hpp = +[ + [ "ThreadManager", "classop_1_1_thread_manager.html", "classop_1_1_thread_manager" ], + [ "COMPILE_TEMPLATE_DATUM", "thread_manager_8hpp.html#ac06eeab84c4861ef08834855b48750a6", null ] +]; \ No newline at end of file diff --git a/web/html/doc/thread_manager_8hpp_source.html b/web/html/doc/thread_manager_8hpp_source.html new file mode 100644 index 000000000..5e476d0d0 --- /dev/null +++ b/web/html/doc/thread_manager_8hpp_source.html @@ -0,0 +1,687 @@ + + + + + + + +OpenPose: include/openpose/thread/threadManager.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    threadManager.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_THREAD_MANAGER_HPP
    +
    2 #define OPENPOSE_THREAD_THREAD_MANAGER_HPP
    +
    3 
    +
    4 #include <atomic>
    +
    5 #include <set> // std::multiset
    +
    6 #include <tuple>
    + + + + + +
    12 
    +
    13 namespace op
    +
    14 {
    +
    15  template<typename TDatums, typename TWorker = std::shared_ptr<Worker<TDatums>>, typename TQueue = Queue<TDatums>>
    + +
    17  {
    +
    18  public:
    +
    19  // Completely customizable case
    +
    20  explicit ThreadManager(const ThreadManagerMode threadManagerMode = ThreadManagerMode::Synchronous);
    +
    21 
    +
    22  virtual ~ThreadManager();
    +
    23 
    +
    33  void setDefaultMaxSizeQueues(const long long defaultMaxSizeQueues = -1);
    +
    34 
    +
    35  void add(const unsigned long long threadId, const std::vector<TWorker>& tWorkers,
    +
    36  const unsigned long long queueInId, const unsigned long long queueOutId);
    +
    37 
    +
    38  void add(const unsigned long long threadId, const TWorker& tWorker, const unsigned long long queueInId,
    +
    39  const unsigned long long queueOutId);
    +
    40 
    +
    41  void reset();
    +
    42 
    +
    43  void exec();
    +
    44 
    +
    45  void start();
    +
    46 
    +
    47  void stop();
    +
    48 
    +
    49  inline std::shared_ptr<std::atomic<bool>> getIsRunningSharedPtr()
    +
    50  {
    +
    51  return spIsRunning;
    +
    52  }
    +
    53 
    +
    54  inline bool isRunning() const
    +
    55  {
    +
    56  return *spIsRunning;
    +
    57  }
    +
    58 
    +
    59  bool tryEmplace(TDatums& tDatums);
    +
    60 
    +
    61  bool waitAndEmplace(TDatums& tDatums);
    +
    62 
    +
    63  bool tryPush(const TDatums& tDatums);
    +
    64 
    +
    65  bool waitAndPush(const TDatums& tDatums);
    +
    66 
    +
    67  bool tryPop(TDatums& tDatums);
    +
    68 
    +
    69  bool waitAndPop(TDatums& tDatums);
    +
    70 
    +
    71  private:
    +
    72  const ThreadManagerMode mThreadManagerMode;
    +
    73  std::shared_ptr<std::atomic<bool>> spIsRunning;
    +
    74  long long mDefaultMaxSizeQueues;
    +
    75  std::multiset<std::tuple<unsigned long long, std::vector<TWorker>, unsigned long long, unsigned long long>> mThreadWorkerQueues;
    +
    76  std::vector<std::shared_ptr<Thread<TDatums, TWorker>>> mThreads;
    +
    77  std::vector<std::shared_ptr<TQueue>> mTQueues;
    +
    78 
    +
    79  void add(const std::vector<std::tuple<unsigned long long, std::vector<TWorker>, unsigned long long, unsigned long long>>& threadWorkerQueues);
    +
    80 
    +
    81  void add(const std::vector<std::tuple<unsigned long long, TWorker, unsigned long long, unsigned long long>>& threadWorkerQueues);
    +
    82 
    +
    83  void multisetToThreads();
    +
    84 
    +
    85  void checkAndCreateEmptyThreads();
    +
    86 
    +
    87  void checkAndCreateQueues();
    +
    88 
    +
    89  DELETE_COPY(ThreadManager);
    +
    90  };
    +
    91 }
    +
    92 
    +
    93 
    +
    94 
    +
    95 
    +
    96 
    +
    97 // Implementation
    +
    98 #include <utility> // std::pair
    + + + + + + +
    105 namespace op
    +
    106 {
    +
    107  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    109  mThreadManagerMode{threadManagerMode},
    +
    110  spIsRunning{std::make_shared<std::atomic<bool>>(false)},
    +
    111  mDefaultMaxSizeQueues{-1ll}
    +
    112  {
    +
    113  }
    +
    114 
    +
    115  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    117  {
    +
    118  }
    +
    119 
    +
    120  template<typename TDatums, typename TWorker, typename TQueue>
    +
    121  void ThreadManager<TDatums, TWorker, TQueue>::setDefaultMaxSizeQueues(const long long defaultMaxSizeQueues)
    +
    122  {
    +
    123  try
    +
    124  {
    +
    125  mDefaultMaxSizeQueues = {defaultMaxSizeQueues};
    +
    126  }
    +
    127  catch (const std::exception& e)
    +
    128  {
    +
    129  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    130  }
    +
    131  }
    +
    132 
    +
    133  template<typename TDatums, typename TWorker, typename TQueue>
    +
    134  void ThreadManager<TDatums, TWorker, TQueue>::add(const unsigned long long threadId,
    +
    135  const std::vector<TWorker>& tWorkers,
    +
    136  const unsigned long long queueInId,
    +
    137  const unsigned long long queueOutId)
    +
    138  {
    +
    139  try
    +
    140  {
    +
    141  add({std::make_tuple(threadId, tWorkers, queueInId, queueOutId)});
    +
    142  }
    +
    143  catch (const std::exception& e)
    +
    144  {
    +
    145  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    146  }
    +
    147  }
    +
    148 
    +
    149  template<typename TDatums, typename TWorker, typename TQueue>
    +
    150  void ThreadManager<TDatums, TWorker, TQueue>::add(const unsigned long long threadId,
    +
    151  const TWorker& tWorker,
    +
    152  const unsigned long long queueInId,
    +
    153  const unsigned long long queueOutId)
    +
    154  {
    +
    155  try
    +
    156  {
    +
    157  add({std::make_tuple(threadId, std::vector<TWorker>{tWorker}, queueInId, queueOutId)});
    +
    158  }
    +
    159  catch (const std::exception& e)
    +
    160  {
    +
    161  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    162  }
    +
    163  }
    +
    164 
    +
    165  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    167  {
    +
    168  try
    +
    169  {
    +
    170  mThreadWorkerQueues.clear();
    +
    171  mThreads.clear();
    +
    172  mTQueues.clear();
    +
    173  }
    +
    174  catch (const std::exception& e)
    +
    175  {
    +
    176  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    177  }
    +
    178  }
    +
    179 
    +
    180  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    182  {
    +
    183  try
    +
    184  {
    +
    185  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    186  // Set threads
    +
    187  multisetToThreads();
    +
    188  if (!mThreads.empty())
    +
    189  {
    +
    190  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    191  // Start threads
    +
    192  for (auto i = 0u; i < mThreads.size() - 1; i++)
    +
    193  mThreads.at(i)->startInThread();
    +
    194  (*mThreads.rbegin())->exec(spIsRunning);
    +
    195  // Stop threads - It will arrive here when the exec() command has finished
    +
    196  stop();
    +
    197  }
    +
    198  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    199  }
    +
    200  catch (const std::exception& e)
    +
    201  {
    +
    202  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    203  }
    +
    204  }
    +
    205 
    +
    206  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    208  {
    +
    209  try
    +
    210  {
    +
    211  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    212  // Set threads
    +
    213  multisetToThreads();
    +
    214  // Start threads
    +
    215  for (auto& thread : mThreads)
    +
    216  thread->startInThread();
    +
    217  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    218  }
    +
    219  catch (const std::exception& e)
    +
    220  {
    +
    221  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    222  }
    +
    223  }
    +
    224 
    +
    225  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    227  {
    +
    228  try
    +
    229  {
    +
    230  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    231  for (auto& tQueue : mTQueues)
    +
    232  tQueue->stop();
    +
    233  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    234  *spIsRunning = false;
    +
    235  for (auto& thread : mThreads)
    +
    236  thread->stopAndJoin();
    +
    237  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    + +
    239  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    240  }
    +
    241  catch (const std::exception& e)
    +
    242  {
    +
    243  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    244  }
    +
    245  }
    +
    246 
    +
    247  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    249  {
    +
    250  try
    +
    251  {
    +
    252  if (mThreadManagerMode != ThreadManagerMode::Asynchronous
    +
    253  && mThreadManagerMode != ThreadManagerMode::AsynchronousIn)
    +
    254  error("Not available for this ThreadManagerMode.", __LINE__, __FUNCTION__, __FILE__);
    +
    255  if (mTQueues.empty())
    +
    256  error("ThreadManager already stopped or not started yet.", __LINE__, __FUNCTION__, __FILE__);
    +
    257  return mTQueues[0]->tryEmplace(tDatums);
    +
    258  }
    +
    259  catch (const std::exception& e)
    +
    260  {
    +
    261  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    262  return false;
    +
    263  }
    +
    264  }
    +
    265 
    +
    266  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    268  {
    +
    269  try
    +
    270  {
    +
    271  if (mThreadManagerMode != ThreadManagerMode::Asynchronous
    +
    272  && mThreadManagerMode != ThreadManagerMode::AsynchronousIn)
    +
    273  error("Not available for this ThreadManagerMode.", __LINE__, __FUNCTION__, __FILE__);
    +
    274  if (mTQueues.empty())
    +
    275  error("ThreadManager already stopped or not started yet.", __LINE__, __FUNCTION__, __FILE__);
    +
    276  return mTQueues[0]->waitAndEmplace(tDatums);
    +
    277  }
    +
    278  catch (const std::exception& e)
    +
    279  {
    +
    280  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    281  return false;
    +
    282  }
    +
    283  }
    +
    284 
    +
    285  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    287  {
    +
    288  try
    +
    289  {
    +
    290  if (mThreadManagerMode != ThreadManagerMode::Asynchronous
    +
    291  && mThreadManagerMode != ThreadManagerMode::AsynchronousIn)
    +
    292  error("Not available for this ThreadManagerMode.", __LINE__, __FUNCTION__, __FILE__);
    +
    293  if (mTQueues.empty())
    +
    294  error("ThreadManager already stopped or not started yet.", __LINE__, __FUNCTION__, __FILE__);
    +
    295  return mTQueues[0]->tryPush(tDatums);
    +
    296  }
    +
    297  catch (const std::exception& e)
    +
    298  {
    +
    299  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    300  return false;
    +
    301  }
    +
    302  }
    +
    303 
    +
    304  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    306  {
    +
    307  try
    +
    308  {
    +
    309  if (mThreadManagerMode != ThreadManagerMode::Asynchronous
    +
    310  && mThreadManagerMode != ThreadManagerMode::AsynchronousIn)
    +
    311  error("Not available for this ThreadManagerMode.", __LINE__, __FUNCTION__, __FILE__);
    +
    312  if (mTQueues.empty())
    +
    313  error("ThreadManager already stopped or not started yet.", __LINE__, __FUNCTION__, __FILE__);
    +
    314  return mTQueues[0]->waitAndPush(tDatums);
    +
    315  }
    +
    316  catch (const std::exception& e)
    +
    317  {
    +
    318  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    319  return false;
    +
    320  }
    +
    321  }
    +
    322 
    +
    323  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    325  {
    +
    326  try
    +
    327  {
    +
    328  if (mThreadManagerMode != ThreadManagerMode::Asynchronous
    +
    329  && mThreadManagerMode != ThreadManagerMode::AsynchronousOut)
    +
    330  error("Not available for this ThreadManagerMode.", __LINE__, __FUNCTION__, __FILE__);
    +
    331  if (mTQueues.empty())
    +
    332  error("ThreadManager already stopped or not started yet.", __LINE__, __FUNCTION__, __FILE__);
    +
    333  return (*mTQueues.rbegin())->tryPop(tDatums);
    +
    334  }
    +
    335  catch (const std::exception& e)
    +
    336  {
    +
    337  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    338  return false;
    +
    339  }
    +
    340  }
    +
    341 
    +
    342  template<typename TDatums, typename TWorker, typename TQueue>
    + +
    344  {
    +
    345  try
    +
    346  {
    +
    347  if (mThreadManagerMode != ThreadManagerMode::Asynchronous
    +
    348  && mThreadManagerMode != ThreadManagerMode::AsynchronousOut)
    +
    349  error("Not available for this ThreadManagerMode.", __LINE__, __FUNCTION__, __FILE__);
    +
    350  if (mTQueues.empty())
    +
    351  error("ThreadManager already stopped or not started yet.", __LINE__, __FUNCTION__, __FILE__);
    +
    352  return (*mTQueues.rbegin())->waitAndPop(tDatums);
    +
    353  }
    +
    354  catch (const std::exception& e)
    +
    355  {
    +
    356  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    357  return false;
    +
    358  }
    +
    359  }
    +
    360 
    +
    361  template<typename TDatums, typename TWorker, typename TQueue>
    +
    362  void ThreadManager<TDatums, TWorker, TQueue>::add(const std::vector<std::tuple<unsigned long long, std::vector<TWorker>,
    +
    363  unsigned long long, unsigned long long>>& threadWorkerQueues)
    +
    364  {
    +
    365  try
    +
    366  {
    +
    367  for (const auto& threadWorkerQueue : threadWorkerQueues)
    +
    368  mThreadWorkerQueues.insert(threadWorkerQueue);
    +
    369  }
    +
    370  catch (const std::exception& e)
    +
    371  {
    +
    372  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    373  }
    +
    374  }
    +
    375 
    +
    376  template<typename TDatums, typename TWorker, typename TQueue>
    +
    377  void ThreadManager<TDatums, TWorker, TQueue>::add(const std::vector<std::tuple<unsigned long long, TWorker, unsigned long long,
    +
    378  unsigned long long>>& threadWorkerQueues)
    +
    379  {
    +
    380  try
    +
    381  {
    +
    382  for (const auto& threadWorkerQueue : threadWorkerQueues)
    +
    383  add({std::make_tuple(std::get<0>(threadWorkerQueue),
    +
    384  std::vector<TWorker>{std::get<1>(threadWorkerQueue)},
    +
    385  std::get<2>(threadWorkerQueue),
    +
    386  std::get<3>(threadWorkerQueue))});
    +
    387  }
    +
    388  catch (const std::exception& e)
    +
    389  {
    +
    390  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    391  }
    +
    392  }
    +
    393 
    +
    394  template<typename TDatums, typename TWorker, typename TQueue>
    +
    395  void ThreadManager<TDatums, TWorker, TQueue>::multisetToThreads()
    +
    396  {
    +
    397  try
    +
    398  {
    +
    399  if (!mThreadWorkerQueues.empty())
    +
    400  {
    +
    401  // This avoids extra std::cout if errors occur on different threads
    +
    402  setMainThread();
    +
    403 
    +
    404  // Check threads
    +
    405  checkAndCreateEmptyThreads();
    +
    406 
    +
    407  // Check and create queues
    +
    408  checkAndCreateQueues();
    +
    409 
    +
    410  // Data
    +
    411  const auto maxQueueIdSynchronous = mTQueues.size()+1;
    +
    412 
    +
    413  // Set up threads
    +
    414  for (const auto& threadWorkerQueue : mThreadWorkerQueues)
    +
    415  {
    +
    416  auto& thread = mThreads[std::get<0>(threadWorkerQueue)];
    +
    417  const auto& tWorkers = std::get<1>(threadWorkerQueue);
    +
    418  const auto queueIn = std::get<2>(threadWorkerQueue);
    +
    419  const auto queueOut = std::get<3>(threadWorkerQueue);
    +
    420  std::shared_ptr<SubThread<TDatums, TWorker>> subThread;
    +
    421  // If AsynchronousIn -> queue indexes are OK
    +
    422  if (mThreadManagerMode == ThreadManagerMode::Asynchronous
    +
    423  || mThreadManagerMode == ThreadManagerMode::AsynchronousIn)
    +
    424  {
    +
    425  if (mThreadManagerMode == ThreadManagerMode::AsynchronousIn
    +
    426  && queueOut == mTQueues.size())
    +
    427  subThread = {std::make_shared<SubThreadQueueIn<TDatums, TWorker, TQueue>>(
    +
    428  tWorkers, mTQueues.at(queueIn))};
    +
    429  else
    +
    430  subThread = {std::make_shared<SubThreadQueueInOut<TDatums, TWorker, TQueue>>(
    +
    431  tWorkers, mTQueues.at(queueIn), mTQueues.at(queueOut))};
    +
    432  }
    +
    433  // If !AsynchronousIn -> queue indexes - 1
    +
    434  else if (queueOut != maxQueueIdSynchronous
    +
    435  || mThreadManagerMode == ThreadManagerMode::AsynchronousOut)
    +
    436  {
    +
    437  // Queue in + out
    +
    438  if (queueIn != 0)
    +
    439  subThread = {std::make_shared<SubThreadQueueInOut<TDatums, TWorker, TQueue>>(
    +
    440  tWorkers, mTQueues.at(queueIn-1), mTQueues.at(queueOut-1))};
    +
    441  // Case queue out (first TWorker(s))
    +
    442  else
    +
    443  subThread = {std::make_shared<SubThreadQueueOut<TDatums, TWorker, TQueue>>(
    +
    444  tWorkers, mTQueues.at(queueOut-1))};
    +
    445  }
    +
    446  // Case queue in (last TWorker(s))
    +
    447  else if (queueIn != 0) // && queueOut == maxQueueIdSynchronous
    +
    448  subThread = {std::make_shared<SubThreadQueueIn<TDatums, TWorker, TQueue>>(
    +
    449  tWorkers, mTQueues.at(queueIn-1))};
    +
    450  // Case no queue
    +
    451  else // if (queueIn == 0 && queueOut == maxQueueIdSynchronous)
    +
    452  subThread = {std::make_shared<SubThreadNoQueue<TDatums, TWorker>>(tWorkers)};
    +
    453  thread->add(subThread);
    +
    454  }
    +
    455  }
    +
    456  else
    +
    457  error("Empty, no TWorker(s) added.", __LINE__);
    +
    458  }
    +
    459  catch (const std::exception& e)
    +
    460  {
    +
    461  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    462  }
    +
    463  }
    +
    464 
    +
    465  template<typename TDatums, typename TWorker, typename TQueue>
    +
    466  void ThreadManager<TDatums, TWorker, TQueue>::checkAndCreateEmptyThreads()
    +
    467  {
    +
    468  try
    +
    469  {
    +
    470  // Check all thread ids from 0-maxThreadId are present
    +
    471  const auto maxThreadId = std::get<0>(*mThreadWorkerQueues.crbegin());
    +
    472  auto previousThreadId = std::get<0>(*mThreadWorkerQueues.cbegin());
    +
    473  for (const auto& threadWorkerQueue : mThreadWorkerQueues)
    +
    474  {
    +
    475  const auto currentThreadId = std::get<0>(threadWorkerQueue);
    +
    476  if (currentThreadId - previousThreadId > 1)
    +
    477  error("Missing thread id " + std::to_string(currentThreadId) + " of "
    +
    478  + std::to_string(maxThreadId) + ".", __LINE__, __FUNCTION__, __FILE__);
    +
    479  previousThreadId = currentThreadId;
    +
    480  }
    +
    481 
    +
    482  // Create Threads
    +
    483  // #threads = maxThreadId+1
    +
    484  mThreads.resize(maxThreadId);
    +
    485  for (auto& thread : mThreads)
    +
    486  thread = std::make_shared<Thread<TDatums, TWorker>>();
    +
    487  mThreads.emplace_back(std::make_shared<Thread<TDatums, TWorker>>(spIsRunning));
    +
    488  }
    +
    489  catch (const std::exception& e)
    +
    490  {
    +
    491  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    492  }
    +
    493  }
    +
    494 
    +
    495  template<typename TDatums, typename TWorker, typename TQueue>
    +
    496  void ThreadManager<TDatums, TWorker, TQueue>::checkAndCreateQueues()
    +
    497  {
    +
    498  try
    +
    499  {
    +
    500  if (!mThreadWorkerQueues.empty())
    +
    501  {
    +
    502  // Get max queue id to get queue size
    +
    503  auto maxQueueId = std::get<3>(*mThreadWorkerQueues.cbegin());
    +
    504  for (const auto& threadWorkerQueue : mThreadWorkerQueues)
    +
    505  maxQueueId = fastMax(
    +
    506  maxQueueId, fastMax(std::get<2>(threadWorkerQueue), std::get<3>(threadWorkerQueue)));
    +
    507 
    +
    508  // Check each queue id has at least a worker that uses it as input and another one as output.
    +
    509  // Special cases:
    +
    510  std::vector<std::pair<bool, bool>> usedQueueIds(maxQueueId+1, {false, false});
    +
    511  for (const auto& threadWorkerQueue : mThreadWorkerQueues)
    +
    512  {
    +
    513  usedQueueIds.at(std::get<2>(threadWorkerQueue)).first = true;
    +
    514  usedQueueIds.at(std::get<3>(threadWorkerQueue)).second = true;
    +
    515  }
    +
    516  // Id 0 must only needs a worker using it as input.
    +
    517  usedQueueIds.begin()->second = true;
    +
    518  // Id maxQueueId only needs a worker using it as output.
    +
    519  usedQueueIds.rbegin()->first = true;
    +
    520  // Error if missing queue id
    +
    521  for (auto i = 0ull ; i < usedQueueIds.size() ; i++)
    +
    522  {
    +
    523  if (!usedQueueIds[i].first)
    +
    524  error("Missing queue id " + std::to_string(i) + " (of "
    +
    525  + std::to_string(maxQueueId) + ") as input.", __LINE__, __FUNCTION__, __FILE__);
    +
    526  if (!usedQueueIds[i].second)
    +
    527  error("Missing queue id " + std::to_string(i) + " (of "
    +
    528  + std::to_string(maxQueueId) + ") as output.", __LINE__, __FUNCTION__, __FILE__);
    +
    529  }
    +
    530 
    +
    531  // Create Queues
    +
    532  if (mThreadManagerMode == ThreadManagerMode::Asynchronous)
    +
    533  mTQueues.resize(maxQueueId+1); // First and last one are queues
    +
    534  else if (mThreadManagerMode == ThreadManagerMode::Synchronous)
    +
    535  mTQueues.resize(maxQueueId-1); // First and last one are not actually queues
    +
    536  else if (mThreadManagerMode == ThreadManagerMode::AsynchronousIn
    +
    537  || mThreadManagerMode == ThreadManagerMode::AsynchronousOut)
    +
    538  mTQueues.resize(maxQueueId); // First or last one is queue
    +
    539  else
    +
    540  error("Unknown ThreadManagerMode", __LINE__, __FUNCTION__, __FILE__);
    +
    541  for (auto& tQueue : mTQueues)
    +
    542  tQueue = std::make_shared<TQueue>(mDefaultMaxSizeQueues);
    +
    543  }
    +
    544  }
    +
    545  catch (const std::exception& e)
    +
    546  {
    +
    547  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    548  }
    +
    549  }
    +
    550 
    + +
    552 }
    +
    553 
    +
    554 #endif // OPENPOSE_THREAD_THREAD_MANAGER_HPP
    + + +
    virtual ~ThreadManager()
    +
    bool waitAndEmplace(TDatums &tDatums)
    +
    bool waitAndPop(TDatums &tDatums)
    + +
    std::shared_ptr< std::atomic< bool > > getIsRunningSharedPtr()
    +
    bool tryPop(TDatums &tDatums)
    + + +
    bool tryPush(const TDatums &tDatums)
    +
    bool isRunning() const
    +
    void add(const unsigned long long threadId, const std::vector< TWorker > &tWorkers, const unsigned long long queueInId, const unsigned long long queueOutId)
    +
    ThreadManager(const ThreadManagerMode threadManagerMode=ThreadManagerMode::Synchronous)
    +
    bool tryEmplace(TDatums &tDatums)
    +
    bool waitAndPush(const TDatums &tDatums)
    +
    void setDefaultMaxSizeQueues(const long long defaultMaxSizeQueues=-1)
    + + + +
    ThreadManagerMode
    Definition: enumClasses.hpp:10
    + + + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    OP_API void setMainThread()
    +
    OP_API void checkWorkerErrors()
    +
    T fastMax(const T a, const T b)
    Definition: fastMath.hpp:73
    +
    OP_API void opLog(const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    + + + + + + + + + + +
    +
    + + + + diff --git a/web/html/doc/tracking_2headers_8hpp.html b/web/html/doc/tracking_2headers_8hpp.html new file mode 100644 index 000000000..d7d8b060c --- /dev/null +++ b/web/html/doc/tracking_2headers_8hpp.html @@ -0,0 +1,106 @@ + + + + + + + +OpenPose: include/openpose/tracking/headers.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    headers.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/tracking_2headers_8hpp_source.html b/web/html/doc/tracking_2headers_8hpp_source.html new file mode 100644 index 000000000..5a20c1a04 --- /dev/null +++ b/web/html/doc/tracking_2headers_8hpp_source.html @@ -0,0 +1,113 @@ + + + + + + + +OpenPose: include/openpose/tracking/headers.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    headers.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_TRACKING_HEADERS_HPP
    +
    2 #define OPENPOSE_TRACKING_HEADERS_HPP
    +
    3 
    +
    4 // tracking module
    + + + +
    8 
    +
    9 #endif // OPENPOSE_TRACKING_HEADERS_HPP
    + + + +
    +
    + + + + diff --git a/web/html/doc/udp_sender_8hpp.html b/web/html/doc/udp_sender_8hpp.html new file mode 100644 index 000000000..262a65919 --- /dev/null +++ b/web/html/doc/udp_sender_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/filestream/udpSender.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    udpSender.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::UdpSender
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/udp_sender_8hpp_source.html b/web/html/doc/udp_sender_8hpp_source.html new file mode 100644 index 000000000..dc11e30b7 --- /dev/null +++ b/web/html/doc/udp_sender_8hpp_source.html @@ -0,0 +1,140 @@ + + + + + + + +OpenPose: include/openpose/filestream/udpSender.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    udpSender.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_UDP_SENDER_HPP
    +
    2 #define OPENPOSE_FILESTREAM_UDP_SENDER_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    + +
    9  {
    +
    10  public:
    +
    11  UdpSender(const std::string& udpHost, const std::string& udpPort);
    +
    12 
    +
    13  virtual ~UdpSender();
    +
    14 
    +
    15  void sendJointAngles(const double* const adamPosePtr, const int adamPoseRows,
    +
    16  const double* const adamTranslationPtr,
    +
    17  const double* const adamFaceCoeffsExpPtr, const int faceCoeffRows);
    +
    18 
    +
    19  private:
    +
    20  // PIMPL idiom
    +
    21  // http://www.cppsamples.com/common-tasks/pimpl.html
    +
    22  struct ImplUdpSender;
    +
    23  std::shared_ptr<ImplUdpSender> spImpl;
    +
    24 
    +
    25  // PIMP requires DELETE_COPY & destructor, or extra code
    +
    26  // http://oliora.github.io/2015/12/29/pimpl-and-rule-of-zero.html
    + +
    28  };
    +
    29 }
    +
    30 
    +
    31 #endif // OPENPOSE_FILESTREAM_UDP_SENDER_HPP
    + +
    void sendJointAngles(const double *const adamPosePtr, const int adamPoseRows, const double *const adamTranslationPtr, const double *const adamFaceCoeffsExpPtr, const int faceCoeffRows)
    +
    UdpSender(const std::string &udpHost, const std::string &udpPort)
    +
    virtual ~UdpSender()
    + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    +
    + + + + diff --git a/web/html/doc/unity_2headers_8hpp.html b/web/html/doc/unity_2headers_8hpp.html new file mode 100644 index 000000000..c39147da3 --- /dev/null +++ b/web/html/doc/unity_2headers_8hpp.html @@ -0,0 +1,104 @@ + + + + + + + +OpenPose: include/openpose/unity/headers.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    headers.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/unity_2headers_8hpp_source.html b/web/html/doc/unity_2headers_8hpp_source.html new file mode 100644 index 000000000..719ae36ac --- /dev/null +++ b/web/html/doc/unity_2headers_8hpp_source.html @@ -0,0 +1,109 @@ + + + + + + + +OpenPose: include/openpose/unity/headers.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    headers.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_UNITY_HEADERS_HPP
    +
    2 #define OPENPOSE_UNITY_HEADERS_HPP
    +
    3 
    +
    4 // unity module
    + +
    6 
    +
    7 #endif // OPENPOSE_UNITY_HEADERS_HPP
    + +
    +
    + + + + diff --git a/web/html/doc/unity_binding_8hpp.html b/web/html/doc/unity_binding_8hpp.html new file mode 100644 index 000000000..b7733605a --- /dev/null +++ b/web/html/doc/unity_binding_8hpp.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: include/openpose/unity/unityBinding.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    unityBinding.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/unity_binding_8hpp_source.html b/web/html/doc/unity_binding_8hpp_source.html new file mode 100644 index 000000000..f88f88714 --- /dev/null +++ b/web/html/doc/unity_binding_8hpp_source.html @@ -0,0 +1,104 @@ + + + + + + + +OpenPose: include/openpose/unity/unityBinding.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    unityBinding.hpp
    +
    +
    +Go to the documentation of this file.
    1 // Temporarily, all the code is located in
    +
    2 // src/openpose/unity/unityBinding.cpp
    +
    3 // TODO: Move functionality from unityBinding.cpp to this class
    +
    +
    + + + + diff --git a/web/html/doc/utilities_2enum_classes_8hpp.html b/web/html/doc/utilities_2enum_classes_8hpp.html new file mode 100644 index 000000000..382d810b1 --- /dev/null +++ b/web/html/doc/utilities_2enum_classes_8hpp.html @@ -0,0 +1,141 @@ + + + + + + + +OpenPose: include/openpose/utilities/enumClasses.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    enumClasses.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + + + + + +

    +Enumerations

    enum class  op::ErrorMode : unsigned char { op::StdRuntimeError +, op::FileLogging +, op::StdCerr +, op::All + }
     
    enum class  op::LogMode : unsigned char { op::FileLogging +, op::StdCout +, op::All + }
     
    enum class  op::Priority : unsigned char {
    +  op::None = 0 +, op::Low = 1 +, op::Normal = 2 +, op::High = 3 +,
    +  op::Max = 4 +, op::NoOutput = 255 +
    + }
     
    enum class  op::Extensions : unsigned char { op::Images +, op::Size + }
     
    +
    +
    + + + + diff --git a/web/html/doc/utilities_2enum_classes_8hpp.js b/web/html/doc/utilities_2enum_classes_8hpp.js new file mode 100644 index 000000000..1215fa084 --- /dev/null +++ b/web/html/doc/utilities_2enum_classes_8hpp.js @@ -0,0 +1,26 @@ +var utilities_2enum_classes_8hpp = +[ + [ "ErrorMode", "utilities_2enum_classes_8hpp.html#a5f5a4cee9809deaf7201fb9caf5e400c", [ + [ "StdRuntimeError", "utilities_2enum_classes_8hpp.html#a5f5a4cee9809deaf7201fb9caf5e400cafe50b062b9b9100a72e68b48fe26fc50", null ], + [ "FileLogging", "utilities_2enum_classes_8hpp.html#a5f5a4cee9809deaf7201fb9caf5e400ca68ec2bf5b1662d1d27a523dcfc3c702a", null ], + [ "StdCerr", "utilities_2enum_classes_8hpp.html#a5f5a4cee9809deaf7201fb9caf5e400ca002f2100f8870e7c823894f492e4d337", null ], + [ "All", "utilities_2enum_classes_8hpp.html#a5f5a4cee9809deaf7201fb9caf5e400cab1c94ca2fbc3e78fc30069c8d0f01680", null ] + ] ], + [ "Extensions", "utilities_2enum_classes_8hpp.html#a553bd31855c20a0d14e4c44a20bd91da", [ + [ "Images", "utilities_2enum_classes_8hpp.html#a553bd31855c20a0d14e4c44a20bd91daafff0d600f8a0b5e19e88bfb821dd1157", null ], + [ "Size", "utilities_2enum_classes_8hpp.html#a553bd31855c20a0d14e4c44a20bd91daa6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "LogMode", "utilities_2enum_classes_8hpp.html#a5fa46d7c4b25c823d1cdcc8e9d460f94", [ + [ "FileLogging", "utilities_2enum_classes_8hpp.html#a5fa46d7c4b25c823d1cdcc8e9d460f94a68ec2bf5b1662d1d27a523dcfc3c702a", null ], + [ "StdCout", "utilities_2enum_classes_8hpp.html#a5fa46d7c4b25c823d1cdcc8e9d460f94aa544d56d9492a20da20018000b5043b6", null ], + [ "All", "utilities_2enum_classes_8hpp.html#a5fa46d7c4b25c823d1cdcc8e9d460f94ab1c94ca2fbc3e78fc30069c8d0f01680", null ] + ] ], + [ "Priority", "utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14", [ + [ "None", "utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14a6adf97f83acf6453d4a6a4b1070f3754", null ], + [ "Low", "utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14a28d0edd045e05cf5af64e35ae0c4c6ef", null ], + [ "Normal", "utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14a960b44c579bc2f6818d2daaf9e4c16f0", null ], + [ "High", "utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14a655d20c1ca69519ca647684edbb2db35", null ], + [ "Max", "utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14a6a061313d22e51e0f25b7cd4dc065233", null ], + [ "NoOutput", "utilities_2enum_classes_8hpp.html#adc43fb9031418e7f8112816a3b535d14a828d496739024f4af00df1e277d96ebd", null ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/utilities_2enum_classes_8hpp_source.html b/web/html/doc/utilities_2enum_classes_8hpp_source.html new file mode 100644 index 000000000..1f6b110d0 --- /dev/null +++ b/web/html/doc/utilities_2enum_classes_8hpp_source.html @@ -0,0 +1,159 @@ + + + + + + + +OpenPose: include/openpose/utilities/enumClasses.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    enumClasses.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_UTILITIES_ENUM_CLASSES_HPP
    +
    2 #define OPENPOSE_UTILITIES_ENUM_CLASSES_HPP
    +
    3 
    +
    4 namespace op
    +
    5 {
    +
    6  enum class ErrorMode : unsigned char
    +
    7  {
    + + +
    10  StdCerr,
    +
    11  All,
    +
    12  };
    +
    13 
    +
    14  enum class LogMode : unsigned char
    +
    15  {
    + +
    17  StdCout,
    +
    18  All,
    +
    19  };
    +
    20 
    +
    21  enum class Priority : unsigned char
    +
    22  {
    +
    23  None = 0,
    +
    24  Low = 1,
    +
    25  Normal = 2,
    +
    26  High = 3,
    +
    27  Max = 4,
    +
    28  NoOutput = 255,
    +
    29  };
    +
    30 
    +
    31  enum class Extensions : unsigned char
    +
    32  {
    +
    33  Images, // jpg, png, ...
    +
    34  Size
    +
    35  };
    +
    36 }
    +
    37 
    +
    38 #endif // OPENPOSE_UTILITIES_ENUM_CLASSES_HPP
    + +
    Extensions
    Definition: enumClasses.hpp:32
    + + +
    ErrorMode
    Definition: enumClasses.hpp:7
    + + + + + + + + +
    Priority
    Definition: enumClasses.hpp:22
    + + + + + + +
    +
    + + + + diff --git a/web/html/doc/utilities_2headers_8hpp.html b/web/html/doc/utilities_2headers_8hpp.html new file mode 100644 index 000000000..cb669bf4e --- /dev/null +++ b/web/html/doc/utilities_2headers_8hpp.html @@ -0,0 +1,115 @@ + + + + + + + +OpenPose: include/openpose/utilities/headers.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + + + + + + diff --git a/web/html/doc/utilities_2headers_8hpp_source.html b/web/html/doc/utilities_2headers_8hpp_source.html new file mode 100644 index 000000000..b4adb4725 --- /dev/null +++ b/web/html/doc/utilities_2headers_8hpp_source.html @@ -0,0 +1,131 @@ + + + + + + + +OpenPose: include/openpose/utilities/headers.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    headers.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_UTILITIES_HEADERS_HPP
    +
    2 #define OPENPOSE_UTILITIES_HEADERS_HPP
    +
    3 
    +
    4 // utilities module
    + + + + + + + + + + + + +
    17 
    +
    18 #endif // OPENPOSE_UTILITIES_HEADERS_HPP
    + + + + + + + + + + + + +
    +
    + + + + diff --git a/web/html/doc/utilities_2string_8hpp.html b/web/html/doc/utilities_2string_8hpp.html new file mode 100644 index 000000000..ae06f8768 --- /dev/null +++ b/web/html/doc/utilities_2string_8hpp.html @@ -0,0 +1,131 @@ + + + + + + + +OpenPose: include/openpose/utilities/string.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    string.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + + + + + + + + + + + + +

    +Functions

    OP_API unsigned long long op::getLastNumber (const std::string &string)
     
    template<typename T >
    std::string op::toFixedLengthString (const T number, const unsigned long long stringLength=0)
     
    OP_API std::vector< std::string > op::splitString (const std::string &stringToSplit, const std::string &delimiter)
     
    OP_API std::string op::toLower (const std::string &string)
     
    OP_API std::string op::toUpper (const std::string &string)
     
    OP_API std::string op::remove0sFromString (const std::string &string)
     
    OP_API std::string op::getFirstNumberOnString (const std::string &string)
     
    +
    +
    + + + + diff --git a/web/html/doc/utilities_2string_8hpp.js b/web/html/doc/utilities_2string_8hpp.js new file mode 100644 index 000000000..81e2f4c75 --- /dev/null +++ b/web/html/doc/utilities_2string_8hpp.js @@ -0,0 +1,10 @@ +var utilities_2string_8hpp = +[ + [ "getFirstNumberOnString", "utilities_2string_8hpp.html#a844c35ea57a8bc67f33f49deb5070652", null ], + [ "getLastNumber", "utilities_2string_8hpp.html#ab670c693d8e4a540cfe75ce8383b6d10", null ], + [ "remove0sFromString", "utilities_2string_8hpp.html#a2f610ba8a71cf16628df2f4d270b7d34", null ], + [ "splitString", "utilities_2string_8hpp.html#ae80a103d8a4308bc435342b3d31404c8", null ], + [ "toFixedLengthString", "utilities_2string_8hpp.html#a42292d44d10f55cb1d83a296183e9b31", null ], + [ "toLower", "utilities_2string_8hpp.html#a3290f48d24c9992dd00d339ce49cfac7", null ], + [ "toUpper", "utilities_2string_8hpp.html#a7a815e303884fb2b3346c8cc19d61b23", null ] +]; \ No newline at end of file diff --git a/web/html/doc/utilities_2string_8hpp_source.html b/web/html/doc/utilities_2string_8hpp_source.html new file mode 100644 index 000000000..13aaaca73 --- /dev/null +++ b/web/html/doc/utilities_2string_8hpp_source.html @@ -0,0 +1,135 @@ + + + + + + + +OpenPose: include/openpose/utilities/string.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    string.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_UTILITIES_STRING_HPP
    +
    2 #define OPENPOSE_UTILITIES_STRING_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    +
    8  OP_API unsigned long long getLastNumber(const std::string& string);
    +
    9 
    +
    17  template<typename T>
    +
    18  std::string toFixedLengthString(const T number, const unsigned long long stringLength = 0);
    +
    19 
    +
    20  OP_API std::vector<std::string> splitString(const std::string& stringToSplit, const std::string& delimiter);
    +
    21 
    +
    22  OP_API std::string toLower(const std::string& string);
    +
    23 
    +
    24  OP_API std::string toUpper(const std::string& string);
    +
    25 
    +
    26  OP_API std::string remove0sFromString(const std::string& string);
    +
    27 
    +
    28  OP_API std::string getFirstNumberOnString(const std::string& string);
    +
    29 }
    +
    30 
    +
    31 #endif // OPENPOSE_UTILITIES_STRING_HPP
    + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    OP_API std::string remove0sFromString(const std::string &string)
    +
    OP_API std::string toLower(const std::string &string)
    +
    std::string toFixedLengthString(const T number, const unsigned long long stringLength=0)
    +
    OP_API std::string toUpper(const std::string &string)
    +
    OP_API std::string getFirstNumberOnString(const std::string &string)
    +
    OP_API unsigned long long getLastNumber(const std::string &string)
    +
    OP_API std::vector< std::string > splitString(const std::string &stringToSplit, const std::string &delimiter)
    +
    +
    + + + + diff --git a/web/html/doc/verbose_printer_8hpp.html b/web/html/doc/verbose_printer_8hpp.html new file mode 100644 index 000000000..59efbe18b --- /dev/null +++ b/web/html/doc/verbose_printer_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/core/verbosePrinter.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    verbosePrinter.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::VerbosePrinter
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/verbose_printer_8hpp_source.html b/web/html/doc/verbose_printer_8hpp_source.html new file mode 100644 index 000000000..2ec8d4ce2 --- /dev/null +++ b/web/html/doc/verbose_printer_8hpp_source.html @@ -0,0 +1,132 @@ + + + + + + + +OpenPose: include/openpose/core/verbosePrinter.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    verbosePrinter.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_VERBOSE_PRINTER_HPP
    +
    2 #define OPENPOSE_CORE_VERBOSE_PRINTER_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    + +
    9  {
    +
    10  public:
    +
    11  VerbosePrinter(const double verbose, const unsigned long long numberFrames);
    +
    12 
    +
    13  virtual ~VerbosePrinter();
    +
    14 
    +
    15  void printVerbose(const unsigned long long frameNumber) const;
    +
    16 
    +
    17  private:
    +
    18  const unsigned long long mNumberFrames;
    +
    19  const std::string mNumberFramesString;
    +
    20  const double mVerbose;
    +
    21  };
    +
    22 }
    +
    23 
    +
    24 #endif // OPENPOSE_CORE_VERBOSE_PRINTER_HPP
    + +
    virtual ~VerbosePrinter()
    +
    VerbosePrinter(const double verbose, const unsigned long long numberFrames)
    +
    void printVerbose(const unsigned long long frameNumber) const
    + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    +
    + + + + diff --git a/web/html/doc/very__advanced_2library__structure_20__index_8md.html b/web/html/doc/very__advanced_2library__structure_20__index_8md.html new file mode 100644 index 000000000..04d361edb --- /dev/null +++ b/web/html/doc/very__advanced_2library__structure_20__index_8md.html @@ -0,0 +1,101 @@ + + + + + + + +OpenPose: doc/very_advanced/library_structure/0_index.md File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    doc/very_advanced/library_structure/0_index.md File Reference
    +
    +
    +
    +
    + + + + diff --git a/web/html/doc/video_capture_reader_8hpp.html b/web/html/doc/video_capture_reader_8hpp.html new file mode 100644 index 000000000..38a37995a --- /dev/null +++ b/web/html/doc/video_capture_reader_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/producer/videoCaptureReader.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    videoCaptureReader.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::VideoCaptureReader
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/video_capture_reader_8hpp_source.html b/web/html/doc/video_capture_reader_8hpp_source.html new file mode 100644 index 000000000..e92bd2eca --- /dev/null +++ b/web/html/doc/video_capture_reader_8hpp_source.html @@ -0,0 +1,170 @@ + + + + + + + +OpenPose: include/openpose/producer/videoCaptureReader.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    videoCaptureReader.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_PRODUCER_VIDEO_CAPTURE_READER_HPP
    +
    2 #define OPENPOSE_PRODUCER_VIDEO_CAPTURE_READER_HPP
    +
    3 
    + + +
    6 
    +
    7 namespace op
    +
    8 {
    + +
    14  {
    +
    15  public:
    +
    20  explicit VideoCaptureReader(const int index, const bool throwExceptionIfNoOpened,
    +
    21  const std::string& cameraParameterPath, const bool undistortImage,
    +
    22  const int numberViews);
    +
    23 
    +
    29  explicit VideoCaptureReader(const std::string& path, const ProducerType producerType,
    +
    30  const std::string& cameraParameterPath, const bool undistortImage,
    +
    31  const int numberViews);
    +
    32 
    + +
    38 
    +
    39  virtual std::string getNextFrameName() = 0;
    +
    40 
    +
    41  virtual bool isOpened() const;
    +
    42 
    +
    43  void release();
    +
    44 
    +
    45  virtual double get(const int capProperty) = 0;
    +
    46 
    +
    47  virtual void set(const int capProperty, const double value) = 0;
    +
    48 
    +
    49  protected:
    +
    50  virtual Matrix getRawFrame() = 0;
    +
    51 
    +
    52  virtual std::vector<Matrix> getRawFrames() = 0;
    +
    53 
    +
    54  void resetWebcam(const int index, const bool throwExceptionIfNoOpened);
    +
    55 
    +
    56  private:
    +
    57  // PIMPL idiom
    +
    58  // http://www.cppsamples.com/common-tasks/pimpl.html
    +
    59  struct ImplVideoCaptureReader;
    +
    60  std::unique_ptr<ImplVideoCaptureReader> upImpl;
    +
    61 
    + +
    63  };
    +
    64 }
    +
    65 
    +
    66 #endif // OPENPOSE_PRODUCER_VIDEO_CAPTURE_READER_HPP
    + + + +
    virtual std::string getNextFrameName()=0
    + +
    virtual Matrix getRawFrame()=0
    +
    VideoCaptureReader(const std::string &path, const ProducerType producerType, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)
    +
    virtual double get(const int capProperty)=0
    +
    virtual ~VideoCaptureReader()
    +
    virtual std::vector< Matrix > getRawFrames()=0
    +
    virtual bool isOpened() const
    +
    virtual void set(const int capProperty, const double value)=0
    +
    void resetWebcam(const int index, const bool throwExceptionIfNoOpened)
    +
    VideoCaptureReader(const int index, const bool throwExceptionIfNoOpened, const std::string &cameraParameterPath, const bool undistortImage, const int numberViews)
    + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    ProducerType
    Definition: enumClasses.hpp:30
    + +
    +
    + + + + diff --git a/web/html/doc/video_reader_8hpp.html b/web/html/doc/video_reader_8hpp.html new file mode 100644 index 000000000..6161cd487 --- /dev/null +++ b/web/html/doc/video_reader_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/producer/videoReader.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    videoReader.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::VideoReader
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/video_reader_8hpp_source.html b/web/html/doc/video_reader_8hpp_source.html new file mode 100644 index 000000000..2ebf1f04b --- /dev/null +++ b/web/html/doc/video_reader_8hpp_source.html @@ -0,0 +1,156 @@ + + + + + + + +OpenPose: include/openpose/producer/videoReader.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    videoReader.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_PRODUCER_VIDEO_READER_HPP
    +
    2 #define OPENPOSE_PRODUCER_VIDEO_READER_HPP
    +
    3 
    + + +
    6 
    +
    7 namespace op
    +
    8 {
    + +
    14  {
    +
    15  public:
    +
    25  explicit VideoReader(
    +
    26  const std::string& videoPath, const std::string& cameraParameterPath = "",
    +
    27  const bool undistortImage = false, const int numberViews = -1);
    +
    28 
    +
    29  virtual ~VideoReader();
    +
    30 
    +
    31  std::string getNextFrameName();
    +
    32 
    +
    33  inline bool isOpened() const
    +
    34  {
    + +
    36  }
    +
    37 
    +
    38  double get(const int capProperty);
    +
    39 
    +
    40  void set(const int capProperty, const double value);
    +
    41 
    +
    42  private:
    +
    43  const std::string mPathName;
    +
    44 
    +
    45  Matrix getRawFrame();
    +
    46 
    +
    47  std::vector<Matrix> getRawFrames();
    +
    48 
    + +
    50  };
    +
    51 }
    +
    52 
    +
    53 #endif // OPENPOSE_PRODUCER_VIDEO_READER_HPP
    + + +
    virtual bool isOpened() const
    + +
    double get(const int capProperty)
    +
    void set(const int capProperty, const double value)
    +
    VideoReader(const std::string &videoPath, const std::string &cameraParameterPath="", const bool undistortImage=false, const int numberViews=-1)
    +
    virtual ~VideoReader()
    +
    bool isOpened() const
    Definition: videoReader.hpp:33
    +
    std::string getNextFrameName()
    + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + + +
    +
    + + + + diff --git a/web/html/doc/video_saver_8hpp.html b/web/html/doc/video_saver_8hpp.html new file mode 100644 index 000000000..8c90ffd5f --- /dev/null +++ b/web/html/doc/video_saver_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/filestream/videoSaver.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    videoSaver.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::VideoSaver
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/video_saver_8hpp_source.html b/web/html/doc/video_saver_8hpp_source.html new file mode 100644 index 000000000..11a5bc94a --- /dev/null +++ b/web/html/doc/video_saver_8hpp_source.html @@ -0,0 +1,145 @@ + + + + + + + +OpenPose: include/openpose/filestream/videoSaver.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    videoSaver.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_VIDEO_SAVER_HPP
    +
    2 #define OPENPOSE_FILESTREAM_VIDEO_SAVER_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    + +
    9  {
    +
    10  public:
    + +
    12  const std::string& videoSaverPath, const int cvFourcc, const double fps,
    +
    13  const std::string& addAudioFromThisVideo = "");
    +
    14 
    +
    15  virtual ~VideoSaver();
    +
    16 
    +
    17  bool isOpened();
    +
    18 
    +
    19  void write(const Matrix& matToSave);
    +
    20 
    +
    21  void write(const std::vector<Matrix>& matsToSave);
    +
    22 
    +
    23  private:
    +
    24  // PIMPL idiom
    +
    25  // http://www.cppsamples.com/common-tasks/pimpl.html
    +
    26  struct ImplVideoSaver;
    +
    27  std::unique_ptr<ImplVideoSaver> upImpl;
    +
    28 
    + +
    30  };
    +
    31 }
    +
    32 
    +
    33 #endif // OPENPOSE_FILESTREAM_VIDEO_SAVER_HPP
    + + + +
    VideoSaver(const std::string &videoSaverPath, const int cvFourcc, const double fps, const std::string &addAudioFromThisVideo="")
    +
    void write(const std::vector< Matrix > &matsToSave)
    +
    void write(const Matrix &matToSave)
    +
    virtual ~VideoSaver()
    + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    +
    + + + + diff --git a/web/html/doc/w_bvh_saver_8hpp.html b/web/html/doc/w_bvh_saver_8hpp.html new file mode 100644 index 000000000..81c3682ba --- /dev/null +++ b/web/html/doc/w_bvh_saver_8hpp.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: include/openpose/filestream/wBvhSaver.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wBvhSaver.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/w_bvh_saver_8hpp_source.html b/web/html/doc/w_bvh_saver_8hpp_source.html new file mode 100644 index 000000000..a4b5db356 --- /dev/null +++ b/web/html/doc/w_bvh_saver_8hpp_source.html @@ -0,0 +1,198 @@ + + + + + + + +OpenPose: include/openpose/filestream/wBvhSaver.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wBvhSaver.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifdef USE_3D_ADAM_MODEL
    +
    2 #ifndef OPENPOSE_FILESTREAM_W_BVH_SAVER_HPP
    +
    3 #define OPENPOSE_FILESTREAM_W_BVH_SAVER_HPP
    +
    4 
    + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums>
    +
    12  class WBvhSaver : public WorkerConsumer<TDatums>
    +
    13  {
    +
    14  public:
    +
    15  explicit WBvhSaver(const std::shared_ptr<BvhSaver>& bvhSaver);
    +
    16 
    +
    17  virtual ~WBvhSaver();
    +
    18 
    +
    19  void initializationOnThread();
    +
    20 
    +
    21  void workConsumer(const TDatums& tDatums);
    +
    22 
    +
    23  private:
    +
    24  std::shared_ptr<BvhSaver> spBvhSaver;
    +
    25 
    +
    26  DELETE_COPY(WBvhSaver);
    +
    27  };
    +
    28 }
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 
    +
    34 // Implementation
    + +
    36 namespace op
    +
    37 {
    +
    38  template<typename TDatums>
    +
    39  WBvhSaver<TDatums>::WBvhSaver(const std::shared_ptr<BvhSaver>& bvhSaver) :
    +
    40  spBvhSaver{bvhSaver}
    +
    41  {
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums>
    +
    45  WBvhSaver<TDatums>::~WBvhSaver()
    +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums>
    +
    50  void WBvhSaver<TDatums>::initializationOnThread()
    +
    51  {
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums>
    +
    55  void WBvhSaver<TDatums>::workConsumer(const TDatums& tDatums)
    +
    56  {
    +
    57  try
    +
    58  {
    +
    59  if (checkNoNullNorEmpty(tDatums))
    +
    60  {
    +
    61  // Debugging log
    +
    62  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    63  // Profiling speed
    +
    64  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    65  // Record BVH file
    +
    66  const auto& tDatumPtr = (*tDatums)[0];
    +
    67  if (!tDatumPtr->poseKeypoints3D.empty())
    +
    68  spBvhSaver->updateBvh(tDatumPtr->adamPose, tDatumPtr->adamTranslation, tDatumPtr->j0Vec);
    +
    69  // Profiling speed
    +
    70  Profiler::timerEnd(profilerKey);
    +
    71  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    72  // Debugging log
    +
    73  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    74  }
    +
    75  }
    +
    76  catch (const std::exception& e)
    +
    77  {
    +
    78  this->stop();
    +
    79  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    80  }
    +
    81  }
    +
    82 
    +
    83  COMPILE_TEMPLATE_DATUM(WBvhSaver);
    +
    84 }
    +
    85 
    +
    86 #endif // OPENPOSE_FILESTREAM_W_BVH_SAVER_HPP
    +
    87 #endif
    + + +
    #define COMPILE_TEMPLATE_DATUM(templateName)
    Definition: datum.hpp:407
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + +
    +
    + + + + diff --git a/web/html/doc/w_coco_json_saver_8hpp.html b/web/html/doc/w_coco_json_saver_8hpp.html new file mode 100644 index 000000000..31d04517b --- /dev/null +++ b/web/html/doc/w_coco_json_saver_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/filestream/wCocoJsonSaver.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wCocoJsonSaver.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WCocoJsonSaver< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WCocoJsonSaver)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_coco_json_saver_8hpp.js b/web/html/doc/w_coco_json_saver_8hpp.js new file mode 100644 index 000000000..f2edc3b7e --- /dev/null +++ b/web/html/doc/w_coco_json_saver_8hpp.js @@ -0,0 +1,5 @@ +var w_coco_json_saver_8hpp = +[ + [ "WCocoJsonSaver", "classop_1_1_w_coco_json_saver.html", "classop_1_1_w_coco_json_saver" ], + [ "COMPILE_TEMPLATE_DATUM", "w_coco_json_saver_8hpp.html#af46e80e6bac0f815006759df4c9d00c3", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_coco_json_saver_8hpp_source.html b/web/html/doc/w_coco_json_saver_8hpp_source.html new file mode 100644 index 000000000..8828cf4f4 --- /dev/null +++ b/web/html/doc/w_coco_json_saver_8hpp_source.html @@ -0,0 +1,209 @@ + + + + + + + +OpenPose: include/openpose/filestream/wCocoJsonSaver.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wCocoJsonSaver.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_W_COCO_JSON_SAVER_HPP
    +
    2 #define OPENPOSE_FILESTREAM_W_COCO_JSON_SAVER_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WCocoJsonSaver : public WorkerConsumer<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WCocoJsonSaver(const std::shared_ptr<CocoJsonSaver>& cocoJsonSaver);
    +
    15 
    +
    16  virtual ~WCocoJsonSaver();
    +
    17 
    + +
    19 
    +
    20  void workConsumer(const TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<CocoJsonSaver> spCocoJsonSaver;
    +
    24 
    +
    25  DELETE_COPY(WCocoJsonSaver);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WCocoJsonSaver<TDatums>::WCocoJsonSaver(const std::shared_ptr<CocoJsonSaver>& cocoJsonSaver) :
    +
    39  spCocoJsonSaver{cocoJsonSaver}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WCocoJsonSaver<TDatums>::workConsumer(const TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Check tDatums->size() == 1
    +
    61  if (tDatums->size() > 1)
    +
    62  error("Function only ready for tDatums->size() == 1", __LINE__, __FUNCTION__, __FILE__);
    +
    63  // Debugging log
    +
    64  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    65  // Profiling speed
    +
    66  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    67  // T* to T
    +
    68  const auto& tDatumPtr = tDatums->at(0);
    +
    69  // Record json in COCO format
    +
    70  spCocoJsonSaver->record(
    +
    71  tDatumPtr->poseKeypoints, tDatumPtr->poseScores, tDatumPtr->name, tDatumPtr->frameNumber);
    +
    72  // Profiling speed
    +
    73  Profiler::timerEnd(profilerKey);
    +
    74  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    75  // Debugging log
    +
    76  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    77  }
    +
    78  }
    +
    79  catch (const std::exception& e)
    +
    80  {
    +
    81  this->stop();
    +
    82  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    83  }
    +
    84  }
    +
    85 
    + +
    87 }
    +
    88 
    +
    89 #endif // OPENPOSE_FILESTREAM_W_COCO_JSON_SAVER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + +
    WCocoJsonSaver(const std::shared_ptr< CocoJsonSaver > &cocoJsonSaver)
    + +
    void workConsumer(const TDatums &tDatums)
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_cv_mat_to_op_input_8hpp.html b/web/html/doc/w_cv_mat_to_op_input_8hpp.html new file mode 100644 index 000000000..a817302dc --- /dev/null +++ b/web/html/doc/w_cv_mat_to_op_input_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/core/wCvMatToOpInput.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wCvMatToOpInput.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WCvMatToOpInput< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WCvMatToOpInput)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_cv_mat_to_op_input_8hpp.js b/web/html/doc/w_cv_mat_to_op_input_8hpp.js new file mode 100644 index 000000000..8301a4d9c --- /dev/null +++ b/web/html/doc/w_cv_mat_to_op_input_8hpp.js @@ -0,0 +1,5 @@ +var w_cv_mat_to_op_input_8hpp = +[ + [ "WCvMatToOpInput", "classop_1_1_w_cv_mat_to_op_input.html", "classop_1_1_w_cv_mat_to_op_input" ], + [ "COMPILE_TEMPLATE_DATUM", "w_cv_mat_to_op_input_8hpp.html#a9076fc1719030c2a74f21682999d2315", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_cv_mat_to_op_input_8hpp_source.html b/web/html/doc/w_cv_mat_to_op_input_8hpp_source.html new file mode 100644 index 000000000..5c81bdc2c --- /dev/null +++ b/web/html/doc/w_cv_mat_to_op_input_8hpp_source.html @@ -0,0 +1,206 @@ + + + + + + + +OpenPose: include/openpose/core/wCvMatToOpInput.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wCvMatToOpInput.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_W_CV_MAT_TO_OP_INPUT_HPP
    +
    2 #define OPENPOSE_CORE_W_CV_MAT_TO_OP_INPUT_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WCvMatToOpInput : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WCvMatToOpInput(const std::shared_ptr<CvMatToOpInput>& cvMatToOpInput);
    +
    15 
    +
    16  virtual ~WCvMatToOpInput();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  const std::shared_ptr<CvMatToOpInput> spCvMatToOpInput;
    +
    24 
    +
    25  DELETE_COPY(WCvMatToOpInput);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WCvMatToOpInput<TDatums>::WCvMatToOpInput(const std::shared_ptr<CvMatToOpInput>& cvMatToOpInput) :
    +
    39  spCvMatToOpInput{cvMatToOpInput}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WCvMatToOpInput<TDatums>::work(TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // cv::Mat -> float*
    +
    65  for (auto& tDatumPtr : *tDatums)
    +
    66  tDatumPtr->inputNetData = spCvMatToOpInput->createArray(
    +
    67  tDatumPtr->cvInputData, tDatumPtr->scaleInputToNetInputs, tDatumPtr->netInputSizes);
    +
    68  // Profiling speed
    +
    69  Profiler::timerEnd(profilerKey);
    +
    70  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    71  // Debugging log
    +
    72  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    73  }
    +
    74  }
    +
    75  catch (const std::exception& e)
    +
    76  {
    +
    77  this->stop();
    +
    78  tDatums = nullptr;
    +
    79  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    80  }
    +
    81  }
    +
    82 
    + +
    84 }
    +
    85 
    +
    86 #endif // OPENPOSE_CORE_W_CV_MAT_TO_OP_INPUT_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    WCvMatToOpInput(const std::shared_ptr< CvMatToOpInput > &cvMatToOpInput)
    + +
    void work(TDatums &tDatums)
    + + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_cv_mat_to_op_output_8hpp.html b/web/html/doc/w_cv_mat_to_op_output_8hpp.html new file mode 100644 index 000000000..fd5d20eb0 --- /dev/null +++ b/web/html/doc/w_cv_mat_to_op_output_8hpp.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: include/openpose/core/wCvMatToOpOutput.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wCvMatToOpOutput.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WCvMatToOpOutput< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WCvMatToOpOutput)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_cv_mat_to_op_output_8hpp.js b/web/html/doc/w_cv_mat_to_op_output_8hpp.js new file mode 100644 index 000000000..42e5fdfb2 --- /dev/null +++ b/web/html/doc/w_cv_mat_to_op_output_8hpp.js @@ -0,0 +1,5 @@ +var w_cv_mat_to_op_output_8hpp = +[ + [ "WCvMatToOpOutput", "classop_1_1_w_cv_mat_to_op_output.html", "classop_1_1_w_cv_mat_to_op_output" ], + [ "COMPILE_TEMPLATE_DATUM", "w_cv_mat_to_op_output_8hpp.html#a6d12bd1e42cfb63d2f780bed55fa01fb", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_cv_mat_to_op_output_8hpp_source.html b/web/html/doc/w_cv_mat_to_op_output_8hpp_source.html new file mode 100644 index 000000000..361666e94 --- /dev/null +++ b/web/html/doc/w_cv_mat_to_op_output_8hpp_source.html @@ -0,0 +1,210 @@ + + + + + + + +OpenPose: include/openpose/core/wCvMatToOpOutput.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wCvMatToOpOutput.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_W_CV_MAT_TO_OP_OUTPUT_HPP
    +
    2 #define OPENPOSE_CORE_W_CV_MAT_TO_OP_OUTPUT_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WCvMatToOpOutput : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WCvMatToOpOutput(const std::shared_ptr<CvMatToOpOutput>& cvMatToOpOutput);
    +
    15 
    +
    16  virtual ~WCvMatToOpOutput();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  const std::shared_ptr<CvMatToOpOutput> spCvMatToOpOutput;
    +
    24 
    +
    25  DELETE_COPY(WCvMatToOpOutput);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + + +
    36 namespace op
    +
    37 {
    +
    38  template<typename TDatums>
    +
    39  WCvMatToOpOutput<TDatums>::WCvMatToOpOutput(const std::shared_ptr<CvMatToOpOutput>& cvMatToOpOutput) :
    +
    40  spCvMatToOpOutput{cvMatToOpOutput}
    +
    41  {
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums>
    + +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums>
    + +
    51  {
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums>
    +
    55  void WCvMatToOpOutput<TDatums>::work(TDatums& tDatums)
    +
    56  {
    +
    57  try
    +
    58  {
    +
    59  if (checkNoNullNorEmpty(tDatums))
    +
    60  {
    +
    61  // Debugging log
    +
    62  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    63  // T* to T
    +
    64  auto& tDatumsNoPtr = *tDatums;
    +
    65  // Profiling speed
    +
    66  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    67  // cv::Mat -> float*
    +
    68  for (auto& tDatumPtr : tDatumsNoPtr)
    +
    69  tDatumPtr->outputData = spCvMatToOpOutput->createArray(
    +
    70  tDatumPtr->cvInputData, tDatumPtr->scaleInputToOutput, tDatumPtr->netOutputSize);
    +
    71  // Profiling speed
    +
    72  Profiler::timerEnd(profilerKey);
    +
    73  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    74  // Debugging log
    +
    75  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    76  }
    +
    77  }
    +
    78  catch (const std::exception& e)
    +
    79  {
    +
    80  this->stop();
    +
    81  tDatums = nullptr;
    +
    82  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    83  }
    +
    84  }
    +
    85 
    + +
    87 }
    +
    88 
    +
    89 #endif // OPENPOSE_CORE_W_CV_MAT_TO_OP_OUTPUT_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    WCvMatToOpOutput(const std::shared_ptr< CvMatToOpOutput > &cvMatToOpOutput)
    +
    void work(TDatums &tDatums)
    + + + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + +
    +
    + + + + diff --git a/web/html/doc/w_datum_producer_8hpp.html b/web/html/doc/w_datum_producer_8hpp.html new file mode 100644 index 000000000..ba32761f1 --- /dev/null +++ b/web/html/doc/w_datum_producer_8hpp.html @@ -0,0 +1,123 @@ + + + + + + + +OpenPose: include/openpose/producer/wDatumProducer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wDatumProducer.hpp File Reference
    +
    +
    +
    #include <limits>
    +#include <queue>
    +#include <openpose/core/common.hpp>
    +#include <openpose/producer/datumProducer.hpp>
    +#include <openpose/thread/workerProducer.hpp>
    +#include <openpose/core/datum.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WDatumProducer< TDatum >
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/w_datum_producer_8hpp_source.html b/web/html/doc/w_datum_producer_8hpp_source.html new file mode 100644 index 000000000..94023da38 --- /dev/null +++ b/web/html/doc/w_datum_producer_8hpp_source.html @@ -0,0 +1,237 @@ + + + + + + + +OpenPose: include/openpose/producer/wDatumProducer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wDatumProducer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_PRODUCER_W_DATUM_PRODUCER_HPP
    +
    2 #define OPENPOSE_PRODUCER_W_DATUM_PRODUCER_HPP
    +
    3 
    +
    4 #include <limits> // std::numeric_limits
    +
    5 #include <queue> // std::queue
    + + + +
    9 
    +
    10 namespace op
    +
    11 {
    +
    12  template<typename TDatum>
    +
    13  class WDatumProducer : public WorkerProducer<std::shared_ptr<std::vector<std::shared_ptr<TDatum>>>>
    +
    14  {
    +
    15  public:
    +
    16  explicit WDatumProducer(const std::shared_ptr<DatumProducer<TDatum>>& datumProducer);
    +
    17 
    +
    18  virtual ~WDatumProducer();
    +
    19 
    + +
    21 
    +
    22  std::shared_ptr<std::vector<std::shared_ptr<TDatum>>> workProducer();
    +
    23 
    +
    24  private:
    +
    25  std::shared_ptr<DatumProducer<TDatum>> spDatumProducer;
    +
    26  std::queue<std::shared_ptr<std::vector<std::shared_ptr<TDatum>>>> mQueuedElements;
    +
    27 
    +
    28  DELETE_COPY(WDatumProducer);
    +
    29  };
    +
    30 }
    +
    31 
    +
    32 
    +
    33 
    +
    34 
    +
    35 
    +
    36 // Implementation
    +
    37 #include <openpose/core/datum.hpp>
    +
    38 namespace op
    +
    39 {
    +
    40  template<typename TDatum>
    + +
    42  const std::shared_ptr<DatumProducer<TDatum>>& datumProducer) :
    +
    43  spDatumProducer{datumProducer}
    +
    44  {
    +
    45  }
    +
    46 
    +
    47  template<typename TDatum>
    + +
    49  {
    +
    50  }
    +
    51 
    +
    52 
    +
    53  template<typename TDatum>
    + +
    55  {
    +
    56  }
    +
    57 
    +
    58  template<typename TDatum>
    +
    59  std::shared_ptr<std::vector<std::shared_ptr<TDatum>>> WDatumProducer<TDatum>::workProducer()
    +
    60  {
    +
    61  try
    +
    62  {
    +
    63  // Debugging log
    +
    64  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    65  // Profiling speed
    +
    66  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    67  // Create and fill final shared pointer
    +
    68  std::shared_ptr<std::vector<std::shared_ptr<TDatum>>> tDatums;
    +
    69  // Producer
    +
    70  if (mQueuedElements.empty())
    +
    71  {
    +
    72  bool isRunning;
    +
    73  std::tie(isRunning, tDatums) = spDatumProducer->checkIfRunningAndGetDatum();
    +
    74  // Stop Worker if producer finished
    +
    75  if (!isRunning)
    +
    76  this->stop();
    +
    77  // Profiling speed
    +
    78  Profiler::timerEnd(profilerKey);
    +
    79  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    80  // Debugging log
    +
    81  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    82  }
    +
    83  // Equivalent to WQueueSplitter
    +
    84  // Queued elements - Multiple views --> Split views into different shared pointers
    +
    85  if (tDatums != nullptr && tDatums->size() > 1)
    +
    86  {
    +
    87  // Add tDatums to mQueuedElements
    +
    88  for (auto i = 0u ; i < tDatums->size() ; i++)
    +
    89  {
    +
    90  auto& tDatumPtr = (*tDatums)[i];
    +
    91  tDatumPtr->subId = i;
    +
    92  tDatumPtr->subIdMax = tDatums->size()-1;
    +
    93  mQueuedElements.emplace(
    +
    94  std::make_shared<std::vector<std::shared_ptr<TDatum>>>(
    +
    95  std::vector<std::shared_ptr<TDatum>>{tDatumPtr}));
    +
    96  }
    +
    97  }
    +
    98  // Queued elements - Multiple views --> Return oldest view
    +
    99  if (!mQueuedElements.empty())
    +
    100  {
    +
    101  tDatums = mQueuedElements.front();
    +
    102  mQueuedElements.pop();
    +
    103  }
    +
    104  // Return result
    +
    105  return tDatums;
    +
    106  }
    +
    107  catch (const std::exception& e)
    +
    108  {
    +
    109  this->stop();
    +
    110  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    111  return nullptr;
    +
    112  }
    +
    113  }
    +
    114 
    +
    115  extern template class WDatumProducer<BASE_DATUM>;
    +
    116 }
    +
    117 
    +
    118 #endif // OPENPOSE_PRODUCER_W_DATUM_PRODUCER_HPP
    + +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + +
    WDatumProducer(const std::shared_ptr< DatumProducer< TDatum >> &datumProducer)
    + +
    std::shared_ptr< std::vector< std::shared_ptr< TDatum > > > workProducer()
    + + + + + +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + +
    +
    + + + + diff --git a/web/html/doc/w_face_detector_8hpp.html b/web/html/doc/w_face_detector_8hpp.html new file mode 100644 index 000000000..b2345d984 --- /dev/null +++ b/web/html/doc/w_face_detector_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/face/wFaceDetector.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wFaceDetector.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WFaceDetector< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WFaceDetector)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_face_detector_8hpp.js b/web/html/doc/w_face_detector_8hpp.js new file mode 100644 index 000000000..74340186b --- /dev/null +++ b/web/html/doc/w_face_detector_8hpp.js @@ -0,0 +1,5 @@ +var w_face_detector_8hpp = +[ + [ "WFaceDetector", "classop_1_1_w_face_detector.html", "classop_1_1_w_face_detector" ], + [ "COMPILE_TEMPLATE_DATUM", "w_face_detector_8hpp.html#a196f17357cd1c1bb02e24e4e8a0e6ec3", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_face_detector_8hpp_source.html b/web/html/doc/w_face_detector_8hpp_source.html new file mode 100644 index 000000000..312487cfb --- /dev/null +++ b/web/html/doc/w_face_detector_8hpp_source.html @@ -0,0 +1,205 @@ + + + + + + + +OpenPose: include/openpose/face/wFaceDetector.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wFaceDetector.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FACE_W_FACE_EXTRACTOR_HPP
    +
    2 #define OPENPOSE_FACE_W_FACE_EXTRACTOR_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WFaceDetector : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WFaceDetector(const std::shared_ptr<FaceDetector>& faceDetector);
    +
    15 
    +
    16  virtual ~WFaceDetector();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<FaceDetector> spFaceDetector;
    +
    24 
    +
    25  DELETE_COPY(WFaceDetector);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WFaceDetector<TDatums>::WFaceDetector(const std::shared_ptr<FaceDetector>& faceDetector) :
    +
    39  spFaceDetector{faceDetector}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WFaceDetector<TDatums>::work(TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // Detect people face
    +
    65  for (auto& tDatumPtr : *tDatums)
    +
    66  tDatumPtr->faceRectangles = spFaceDetector->detectFaces(tDatumPtr->poseKeypoints);
    +
    67  // Profiling speed
    +
    68  Profiler::timerEnd(profilerKey);
    +
    69  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    70  // Debugging log
    +
    71  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    72  }
    +
    73  }
    +
    74  catch (const std::exception& e)
    +
    75  {
    +
    76  this->stop();
    +
    77  tDatums = nullptr;
    +
    78  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    79  }
    +
    80  }
    +
    81 
    + +
    83 }
    +
    84 
    +
    85 #endif // OPENPOSE_FACE_W_FACE_EXTRACTOR_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    void work(TDatums &tDatums)
    +
    WFaceDetector(const std::shared_ptr< FaceDetector > &faceDetector)
    +
    virtual ~WFaceDetector()
    +
    void initializationOnThread()
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_face_detector_open_c_v_8hpp.html b/web/html/doc/w_face_detector_open_c_v_8hpp.html new file mode 100644 index 000000000..2ed85133e --- /dev/null +++ b/web/html/doc/w_face_detector_open_c_v_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/face/wFaceDetectorOpenCV.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wFaceDetectorOpenCV.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WFaceDetectorOpenCV< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WFaceDetectorOpenCV)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_face_detector_open_c_v_8hpp.js b/web/html/doc/w_face_detector_open_c_v_8hpp.js new file mode 100644 index 000000000..308930cad --- /dev/null +++ b/web/html/doc/w_face_detector_open_c_v_8hpp.js @@ -0,0 +1,5 @@ +var w_face_detector_open_c_v_8hpp = +[ + [ "WFaceDetectorOpenCV", "classop_1_1_w_face_detector_open_c_v.html", "classop_1_1_w_face_detector_open_c_v" ], + [ "COMPILE_TEMPLATE_DATUM", "w_face_detector_open_c_v_8hpp.html#abf3a59fc4662f07e6ba19b95bd4da32f", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_face_detector_open_c_v_8hpp_source.html b/web/html/doc/w_face_detector_open_c_v_8hpp_source.html new file mode 100644 index 000000000..14a33edb5 --- /dev/null +++ b/web/html/doc/w_face_detector_open_c_v_8hpp_source.html @@ -0,0 +1,205 @@ + + + + + + + +OpenPose: include/openpose/face/wFaceDetectorOpenCV.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wFaceDetectorOpenCV.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FACE_W_FACE_EXTRACTOR_OPENCV_HPP
    +
    2 #define OPENPOSE_FACE_W_FACE_EXTRACTOR_OPENCV_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WFaceDetectorOpenCV : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WFaceDetectorOpenCV(const std::shared_ptr<FaceDetectorOpenCV>& faceDetectorOpenCV);
    +
    15 
    +
    16  virtual ~WFaceDetectorOpenCV();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<FaceDetectorOpenCV> spFaceDetectorOpenCV;
    +
    24 
    +
    25  DELETE_COPY(WFaceDetectorOpenCV);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WFaceDetectorOpenCV<TDatums>::WFaceDetectorOpenCV(const std::shared_ptr<FaceDetectorOpenCV>& faceDetectorOpenCV) :
    +
    39  spFaceDetectorOpenCV{faceDetectorOpenCV}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WFaceDetectorOpenCV<TDatums>::work(TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // Detect people face
    +
    65  for (auto& tDatumPtr : *tDatums)
    +
    66  tDatumPtr->faceRectangles = spFaceDetectorOpenCV->detectFaces(tDatumPtr->cvInputData);
    +
    67  // Profiling speed
    +
    68  Profiler::timerEnd(profilerKey);
    +
    69  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    70  // Debugging log
    +
    71  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    72  }
    +
    73  }
    +
    74  catch (const std::exception& e)
    +
    75  {
    +
    76  this->stop();
    +
    77  tDatums = nullptr;
    +
    78  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    79  }
    +
    80  }
    +
    81 
    + +
    83 }
    +
    84 
    +
    85 #endif // OPENPOSE_FACE_W_FACE_EXTRACTOR_OPENCV_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + +
    void work(TDatums &tDatums)
    +
    WFaceDetectorOpenCV(const std::shared_ptr< FaceDetectorOpenCV > &faceDetectorOpenCV)
    + + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_face_extractor_net_8hpp.html b/web/html/doc/w_face_extractor_net_8hpp.html new file mode 100644 index 000000000..2200bea20 --- /dev/null +++ b/web/html/doc/w_face_extractor_net_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/face/wFaceExtractorNet.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wFaceExtractorNet.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WFaceExtractorNet< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WFaceExtractorNet)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_face_extractor_net_8hpp.js b/web/html/doc/w_face_extractor_net_8hpp.js new file mode 100644 index 000000000..57723c4a9 --- /dev/null +++ b/web/html/doc/w_face_extractor_net_8hpp.js @@ -0,0 +1,5 @@ +var w_face_extractor_net_8hpp = +[ + [ "WFaceExtractorNet", "classop_1_1_w_face_extractor_net.html", "classop_1_1_w_face_extractor_net" ], + [ "COMPILE_TEMPLATE_DATUM", "w_face_extractor_net_8hpp.html#ab5b47f0069e9f397ff891194b20d28f2", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_face_extractor_net_8hpp_source.html b/web/html/doc/w_face_extractor_net_8hpp_source.html new file mode 100644 index 000000000..bcc74c8bb --- /dev/null +++ b/web/html/doc/w_face_extractor_net_8hpp_source.html @@ -0,0 +1,210 @@ + + + + + + + +OpenPose: include/openpose/face/wFaceExtractorNet.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wFaceExtractorNet.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FACE_W_FACE_DETECTOR_NET_HPP
    +
    2 #define OPENPOSE_FACE_W_FACE_DETECTOR_NET_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WFaceExtractorNet : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WFaceExtractorNet(const std::shared_ptr<FaceExtractorNet>& faceExtractorNet);
    +
    15 
    +
    16  virtual ~WFaceExtractorNet();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<FaceExtractorNet> spFaceExtractorNet;
    +
    24 
    +
    25  DELETE_COPY(WFaceExtractorNet);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WFaceExtractorNet<TDatums>::WFaceExtractorNet(const std::shared_ptr<FaceExtractorNet>& faceExtractorNet) :
    +
    39  spFaceExtractorNet{faceExtractorNet}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  spFaceExtractorNet->initializationOnThread();
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums>
    +
    55  void WFaceExtractorNet<TDatums>::work(TDatums& tDatums)
    +
    56  {
    +
    57  try
    +
    58  {
    +
    59  if (checkNoNullNorEmpty(tDatums))
    +
    60  {
    +
    61  // Debugging log
    +
    62  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    63  // Profiling speed
    +
    64  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    65  // Extract people face
    +
    66  for (auto& tDatumPtr : *tDatums)
    +
    67  {
    +
    68  spFaceExtractorNet->forwardPass(tDatumPtr->faceRectangles, tDatumPtr->cvInputData);
    +
    69  tDatumPtr->faceHeatMaps = spFaceExtractorNet->getHeatMaps().clone();
    +
    70  tDatumPtr->faceKeypoints = spFaceExtractorNet->getFaceKeypoints().clone();
    +
    71  }
    +
    72  // Profiling speed
    +
    73  Profiler::timerEnd(profilerKey);
    +
    74  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    75  // Debugging log
    +
    76  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    77  }
    +
    78  }
    +
    79  catch (const std::exception& e)
    +
    80  {
    +
    81  this->stop();
    +
    82  tDatums = nullptr;
    +
    83  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    84  }
    +
    85  }
    +
    86 
    + +
    88 }
    +
    89 
    +
    90 #endif // OPENPOSE_FACE_W_FACE_DETECTOR_NET_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    WFaceExtractorNet(const std::shared_ptr< FaceExtractorNet > &faceExtractorNet)
    +
    void work(TDatums &tDatums)
    + + + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_face_renderer_8hpp.html b/web/html/doc/w_face_renderer_8hpp.html new file mode 100644 index 000000000..ffd67ed48 --- /dev/null +++ b/web/html/doc/w_face_renderer_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/face/wFaceRenderer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wFaceRenderer.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WFaceRenderer< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WFaceRenderer)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_face_renderer_8hpp.js b/web/html/doc/w_face_renderer_8hpp.js new file mode 100644 index 000000000..e0050dda5 --- /dev/null +++ b/web/html/doc/w_face_renderer_8hpp.js @@ -0,0 +1,5 @@ +var w_face_renderer_8hpp = +[ + [ "WFaceRenderer", "classop_1_1_w_face_renderer.html", "classop_1_1_w_face_renderer" ], + [ "COMPILE_TEMPLATE_DATUM", "w_face_renderer_8hpp.html#af42afa53c725d556c14928b2603883e3", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_face_renderer_8hpp_source.html b/web/html/doc/w_face_renderer_8hpp_source.html new file mode 100644 index 000000000..44d1b808f --- /dev/null +++ b/web/html/doc/w_face_renderer_8hpp_source.html @@ -0,0 +1,207 @@ + + + + + + + +OpenPose: include/openpose/face/wFaceRenderer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wFaceRenderer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FACE_W_FACE_RENDERER_HPP
    +
    2 #define OPENPOSE_FACE_W_FACE_RENDERER_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WFaceRenderer : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WFaceRenderer(const std::shared_ptr<FaceRenderer>& faceRenderer);
    +
    15 
    +
    16  virtual ~WFaceRenderer();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<FaceRenderer> spFaceRenderer;
    +
    24 
    +
    25  DELETE_COPY(WFaceRenderer);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WFaceRenderer<TDatums>::WFaceRenderer(const std::shared_ptr<FaceRenderer>& faceRenderer) :
    +
    39  spFaceRenderer{faceRenderer}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  spFaceRenderer->initializationOnThread();
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums>
    +
    55  void WFaceRenderer<TDatums>::work(TDatums& tDatums)
    +
    56  {
    +
    57  try
    +
    58  {
    +
    59  if (checkNoNullNorEmpty(tDatums))
    +
    60  {
    +
    61  // Debugging log
    +
    62  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    63  // Profiling speed
    +
    64  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    65  // Render people face
    +
    66  for (auto& tDatumPtr : *tDatums)
    +
    67  spFaceRenderer->renderFace(
    +
    68  tDatumPtr->outputData, tDatumPtr->faceKeypoints, (float)tDatumPtr->scaleInputToOutput);
    +
    69  // Profiling speed
    +
    70  Profiler::timerEnd(profilerKey);
    +
    71  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    72  // Debugging log
    +
    73  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    74  }
    +
    75  }
    +
    76  catch (const std::exception& e)
    +
    77  {
    +
    78  this->stop();
    +
    79  tDatums = nullptr;
    +
    80  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    81  }
    +
    82  }
    +
    83 
    + +
    85 }
    +
    86 
    +
    87 #endif // OPENPOSE_FACE_W_FACE_RENDERER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    virtual ~WFaceRenderer()
    +
    WFaceRenderer(const std::shared_ptr< FaceRenderer > &faceRenderer)
    +
    void initializationOnThread()
    +
    void work(TDatums &tDatums)
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_face_saver_8hpp.html b/web/html/doc/w_face_saver_8hpp.html new file mode 100644 index 000000000..6273ca84e --- /dev/null +++ b/web/html/doc/w_face_saver_8hpp.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: include/openpose/filestream/wFaceSaver.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wFaceSaver.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/w_face_saver_8hpp.js b/web/html/doc/w_face_saver_8hpp.js new file mode 100644 index 000000000..c51383a50 --- /dev/null +++ b/web/html/doc/w_face_saver_8hpp.js @@ -0,0 +1,5 @@ +var w_face_saver_8hpp = +[ + [ "WFaceSaver", "classop_1_1_w_face_saver.html", "classop_1_1_w_face_saver" ], + [ "COMPILE_TEMPLATE_DATUM", "w_face_saver_8hpp.html#a57c4f3ada0db4882a4106d4dedf08012", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_face_saver_8hpp_source.html b/web/html/doc/w_face_saver_8hpp_source.html new file mode 100644 index 000000000..64d3e42e7 --- /dev/null +++ b/web/html/doc/w_face_saver_8hpp_source.html @@ -0,0 +1,212 @@ + + + + + + + +OpenPose: include/openpose/filestream/wFaceSaver.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wFaceSaver.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_W_FACE_SAVER_HPP
    +
    2 #define OPENPOSE_FILESTREAM_W_FACE_SAVER_HPP
    +
    3 
    + + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums>
    +
    12  class WFaceSaver : public WorkerConsumer<TDatums>
    +
    13  {
    +
    14  public:
    +
    15  explicit WFaceSaver(const std::shared_ptr<KeypointSaver>& keypointSaver);
    +
    16 
    +
    17  virtual ~WFaceSaver();
    +
    18 
    + +
    20 
    +
    21  void workConsumer(const TDatums& tDatums);
    +
    22 
    +
    23  private:
    +
    24  const std::shared_ptr<KeypointSaver> spKeypointSaver;
    +
    25 
    +
    26  DELETE_COPY(WFaceSaver);
    +
    27  };
    +
    28 }
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 
    +
    34 // Implementation
    + +
    36 namespace op
    +
    37 {
    +
    38  template<typename TDatums>
    +
    39  WFaceSaver<TDatums>::WFaceSaver(const std::shared_ptr<KeypointSaver>& keypointSaver) :
    +
    40  spKeypointSaver{keypointSaver}
    +
    41  {
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums>
    + +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums>
    + +
    51  {
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums>
    +
    55  void WFaceSaver<TDatums>::workConsumer(const TDatums& tDatums)
    +
    56  {
    +
    57  try
    +
    58  {
    +
    59  if (checkNoNullNorEmpty(tDatums))
    +
    60  {
    +
    61  // Debugging log
    +
    62  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    63  // Profiling speed
    +
    64  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    65  // T* to T
    +
    66  auto& tDatumsNoPtr = *tDatums;
    +
    67  // Record people face keypoint data
    +
    68  std::vector<Array<float>> keypointVector(tDatumsNoPtr.size());
    +
    69  for (auto i = 0u; i < tDatumsNoPtr.size(); i++)
    +
    70  keypointVector[i] = tDatumsNoPtr[i]->faceKeypoints;
    +
    71  const auto fileName = (!tDatumsNoPtr[0]->name.empty()
    +
    72  ? tDatumsNoPtr[0]->name : std::to_string(tDatumsNoPtr[0]->id));
    +
    73  spKeypointSaver->saveKeypoints(keypointVector, fileName, "face");
    +
    74  // Profiling speed
    +
    75  Profiler::timerEnd(profilerKey);
    +
    76  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    77  // Debugging log
    +
    78  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    79  }
    +
    80  }
    +
    81  catch (const std::exception& e)
    +
    82  {
    +
    83  this->stop();
    +
    84  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    85  }
    +
    86  }
    +
    87 
    + +
    89 }
    +
    90 
    +
    91 #endif // OPENPOSE_FILESTREAM_W_FACE_SAVER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    void workConsumer(const TDatums &tDatums)
    Definition: wFaceSaver.hpp:55
    +
    WFaceSaver(const std::shared_ptr< KeypointSaver > &keypointSaver)
    Definition: wFaceSaver.hpp:39
    +
    virtual ~WFaceSaver()
    Definition: wFaceSaver.hpp:45
    +
    void initializationOnThread()
    Definition: wFaceSaver.hpp:50
    + + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_fps_max_8hpp.html b/web/html/doc/w_fps_max_8hpp.html new file mode 100644 index 000000000..16db0b26d --- /dev/null +++ b/web/html/doc/w_fps_max_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/thread/wFpsMax.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wFpsMax.hpp File Reference
    +
    +
    +
    #include <thread>
    +#include <openpose/core/common.hpp>
    +#include <openpose/thread/worker.hpp>
    +#include <openpose/utilities/fastMath.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WFpsMax< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WFpsMax)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_fps_max_8hpp.js b/web/html/doc/w_fps_max_8hpp.js new file mode 100644 index 000000000..9b6202818 --- /dev/null +++ b/web/html/doc/w_fps_max_8hpp.js @@ -0,0 +1,5 @@ +var w_fps_max_8hpp = +[ + [ "WFpsMax", "classop_1_1_w_fps_max.html", "classop_1_1_w_fps_max" ], + [ "COMPILE_TEMPLATE_DATUM", "w_fps_max_8hpp.html#adfc12925650978828707c1c0dcbebd0e", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_fps_max_8hpp_source.html b/web/html/doc/w_fps_max_8hpp_source.html new file mode 100644 index 000000000..fe40ca3c6 --- /dev/null +++ b/web/html/doc/w_fps_max_8hpp_source.html @@ -0,0 +1,203 @@ + + + + + + + +OpenPose: include/openpose/thread/wFpsMax.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wFpsMax.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_W_FPS_MAX_HPP
    +
    2 #define OPENPOSE_THREAD_W_FPS_MAX_HPP
    +
    3 
    +
    4 #include <thread>
    + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums>
    +
    12  class WFpsMax : public Worker<TDatums>
    +
    13  {
    +
    14  public:
    +
    15  explicit WFpsMax(const double fpsMax);
    +
    16 
    +
    17  virtual ~WFpsMax();
    +
    18 
    + +
    20 
    +
    21  void work(TDatums& tDatums);
    +
    22 
    +
    23  private:
    +
    24  const unsigned long long mNanosecondsToSleep;
    +
    25 
    +
    26  DELETE_COPY(WFpsMax);
    +
    27  };
    +
    28 }
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 
    +
    34 // Implementation
    +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WFpsMax<TDatums>::WFpsMax(const double fpsMax) :
    +
    39  mNanosecondsToSleep{uLongLongRound(1e9/fpsMax)}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WFpsMax<TDatums>::work(TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  // Debugging log
    +
    59  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    60  // Profiling speed
    +
    61  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    62  // tDatums not used --> Avoid warning
    +
    63  UNUSED(tDatums);
    +
    64  // Sleep the desired time
    +
    65  std::this_thread::sleep_for(std::chrono::nanoseconds{mNanosecondsToSleep});
    +
    66  // Profiling speed
    +
    67  Profiler::timerEnd(profilerKey);
    +
    68  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    69  // Debugging log
    +
    70  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    71  }
    +
    72  catch (const std::exception& e)
    +
    73  {
    +
    74  this->stop();
    +
    75  tDatums = nullptr;
    +
    76  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    77  }
    +
    78  }
    +
    79 
    + +
    81 }
    +
    82 
    +
    83 #endif // OPENPOSE_THREAD_W_FPS_MAX_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    void work(TDatums &tDatums)
    Definition: wFpsMax.hpp:54
    +
    virtual ~WFpsMax()
    Definition: wFpsMax.hpp:44
    +
    void initializationOnThread()
    Definition: wFpsMax.hpp:49
    +
    WFpsMax(const double fpsMax)
    Definition: wFpsMax.hpp:38
    + + + +
    #define UNUSED(unusedVariable)
    Definition: macros.hpp:30
    + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    unsigned long long uLongLongRound(const T a)
    Definition: fastMath.hpp:66
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + +
    +
    + + + + diff --git a/web/html/doc/w_gui3_d_8hpp.html b/web/html/doc/w_gui3_d_8hpp.html new file mode 100644 index 000000000..dffccfd7b --- /dev/null +++ b/web/html/doc/w_gui3_d_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/gui/wGui3D.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wGui3D.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WGui3D< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WGui3D)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_gui3_d_8hpp.js b/web/html/doc/w_gui3_d_8hpp.js new file mode 100644 index 000000000..66f5ccdb6 --- /dev/null +++ b/web/html/doc/w_gui3_d_8hpp.js @@ -0,0 +1,5 @@ +var w_gui3_d_8hpp = +[ + [ "WGui3D", "classop_1_1_w_gui3_d.html", "classop_1_1_w_gui3_d" ], + [ "COMPILE_TEMPLATE_DATUM", "w_gui3_d_8hpp.html#a54b38240e45009f7e6a25d956ac96fe0", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_gui3_d_8hpp_source.html b/web/html/doc/w_gui3_d_8hpp_source.html new file mode 100644 index 000000000..237f1a6f2 --- /dev/null +++ b/web/html/doc/w_gui3_d_8hpp_source.html @@ -0,0 +1,235 @@ + + + + + + + +OpenPose: include/openpose/gui/wGui3D.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wGui3D.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_GUI_W_GUI_3D_HPP
    +
    2 #define OPENPOSE_GUI_W_GUI_3D_HPP
    +
    3 
    + +
    5 #include <openpose/gui/gui3D.hpp>
    + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  // This worker will do 3-D rendering
    +
    11  template<typename TDatums>
    +
    12  class WGui3D : public WorkerConsumer<TDatums>
    +
    13  {
    +
    14  public:
    +
    15  explicit WGui3D(const std::shared_ptr<Gui3D>& gui3D);
    +
    16 
    +
    17  virtual ~WGui3D();
    +
    18 
    + +
    20 
    +
    21  void workConsumer(const TDatums& tDatums);
    +
    22 
    +
    23  private:
    +
    24  std::shared_ptr<Gui3D> spGui3D;
    +
    25 
    +
    26  DELETE_COPY(WGui3D);
    +
    27  };
    +
    28 }
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 
    +
    34 // Implementation
    + +
    36 namespace op
    +
    37 {
    +
    38  template<typename TDatums>
    +
    39  WGui3D<TDatums>::WGui3D(const std::shared_ptr<Gui3D>& gui3D) :
    +
    40  spGui3D{gui3D}
    +
    41  {
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums>
    + +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums>
    + +
    51  {
    +
    52  try
    +
    53  {
    +
    54  spGui3D->initializationOnThread();
    +
    55  }
    +
    56  catch (const std::exception& e)
    +
    57  {
    +
    58  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    59  }
    +
    60  }
    +
    61 
    +
    62  template<typename TDatums>
    +
    63  void WGui3D<TDatums>::workConsumer(const TDatums& tDatums)
    +
    64  {
    +
    65  try
    +
    66  {
    +
    67  // tDatums might be empty but we still wanna update the GUI
    +
    68  if (tDatums != nullptr)
    +
    69  {
    +
    70  // Debugging log
    +
    71  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    72  // Profiling speed
    +
    73  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    74  // Update cvMat & keypoints
    +
    75  if (!tDatums->empty())
    +
    76  {
    +
    77  // Update cvMat
    +
    78  std::vector<Matrix> cvOutputDatas;
    +
    79  for (auto& tDatumPtr : *tDatums)
    +
    80  cvOutputDatas.emplace_back(tDatumPtr->cvOutputData);
    +
    81  spGui3D->setImage(cvOutputDatas);
    +
    82  // Update keypoints
    +
    83  auto& tDatumPtr = (*tDatums)[0];
    +
    84  spGui3D->setKeypoints(
    +
    85  tDatumPtr->poseKeypoints3D, tDatumPtr->faceKeypoints3D, tDatumPtr->handKeypoints3D[0],
    +
    86  tDatumPtr->handKeypoints3D[1]);
    +
    87  }
    +
    88  // Refresh/update GUI
    +
    89  spGui3D->update();
    +
    90  // Read OpenCV mat equivalent
    +
    91  if (!tDatums->empty())
    +
    92  {
    +
    93  auto& tDatumPtr = (*tDatums)[0];
    +
    94  tDatumPtr->cvOutputData3D = spGui3D->readCvMat();
    +
    95  }
    +
    96  // Profiling speed
    +
    97  if (!tDatums->empty())
    +
    98  {
    +
    99  Profiler::timerEnd(profilerKey);
    +
    100  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    101  }
    +
    102  // Debugging log
    +
    103  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    104  }
    +
    105  }
    +
    106  catch (const std::exception& e)
    +
    107  {
    +
    108  this->stop();
    +
    109  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    110  }
    +
    111  }
    +
    112 
    + +
    114 }
    +
    115 
    +
    116 #endif // OPENPOSE_GUI_W_GUI_3D_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    virtual ~WGui3D()
    Definition: wGui3D.hpp:45
    +
    void initializationOnThread()
    Definition: wGui3D.hpp:50
    +
    WGui3D(const std::shared_ptr< Gui3D > &gui3D)
    Definition: wGui3D.hpp:39
    +
    void workConsumer(const TDatums &tDatums)
    Definition: wGui3D.hpp:63
    + + + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_gui_8hpp.html b/web/html/doc/w_gui_8hpp.html new file mode 100644 index 000000000..7edd8f497 --- /dev/null +++ b/web/html/doc/w_gui_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/gui/wGui.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wGui.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WGui< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WGui)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_gui_8hpp.js b/web/html/doc/w_gui_8hpp.js new file mode 100644 index 000000000..967823fc3 --- /dev/null +++ b/web/html/doc/w_gui_8hpp.js @@ -0,0 +1,5 @@ +var w_gui_8hpp = +[ + [ "WGui", "classop_1_1_w_gui.html", "classop_1_1_w_gui" ], + [ "COMPILE_TEMPLATE_DATUM", "w_gui_8hpp.html#ade3b2e4b105242a3cf41def3def1691d", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_gui_8hpp_source.html b/web/html/doc/w_gui_8hpp_source.html new file mode 100644 index 000000000..1a7788e67 --- /dev/null +++ b/web/html/doc/w_gui_8hpp_source.html @@ -0,0 +1,222 @@ + + + + + + + +OpenPose: include/openpose/gui/wGui.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wGui.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_GUI_W_GUI_HPP
    +
    2 #define OPENPOSE_GUI_W_GUI_HPP
    +
    3 
    + +
    5 #include <openpose/gui/gui.hpp>
    + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WGui : public WorkerConsumer<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WGui(const std::shared_ptr<Gui>& gui);
    +
    15 
    +
    16  virtual ~WGui();
    +
    17 
    + +
    19 
    +
    20  void workConsumer(const TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<Gui> spGui;
    +
    24 
    +
    25  DELETE_COPY(WGui);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WGui<TDatums>::WGui(const std::shared_ptr<Gui>& gui) :
    +
    39  spGui{gui}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  try
    +
    52  {
    +
    53  spGui->initializationOnThread();
    +
    54  }
    +
    55  catch (const std::exception& e)
    +
    56  {
    +
    57  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    58  }
    +
    59  }
    +
    60 
    +
    61  template<typename TDatums>
    +
    62  void WGui<TDatums>::workConsumer(const TDatums& tDatums)
    +
    63  {
    +
    64  try
    +
    65  {
    +
    66  // tDatums might be empty but we still wanna update the GUI
    +
    67  if (tDatums != nullptr)
    +
    68  {
    +
    69  // Debugging log
    +
    70  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    71  // Profiling speed
    +
    72  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    73  // Update cvMat
    +
    74  if (!tDatums->empty())
    +
    75  {
    +
    76  std::vector<Matrix> cvOutputDatas;
    +
    77  for (auto& tDatumPtr : *tDatums)
    +
    78  cvOutputDatas.emplace_back(tDatumPtr->cvOutputData);
    +
    79  spGui->setImage(cvOutputDatas);
    +
    80  }
    +
    81  // Refresh/update GUI
    +
    82  spGui->update();
    +
    83  // Profiling speed
    +
    84  if (!tDatums->empty())
    +
    85  {
    +
    86  Profiler::timerEnd(profilerKey);
    +
    87  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    88  }
    +
    89  // Debugging log
    +
    90  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    91  }
    +
    92  }
    +
    93  catch (const std::exception& e)
    +
    94  {
    +
    95  this->stop();
    +
    96  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    97  }
    +
    98  }
    +
    99 
    + +
    101 }
    +
    102 
    +
    103 #endif // OPENPOSE_GUI_W_GUI_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    virtual ~WGui()
    Definition: wGui.hpp:44
    +
    void initializationOnThread()
    Definition: wGui.hpp:49
    +
    WGui(const std::shared_ptr< Gui > &gui)
    Definition: wGui.hpp:38
    +
    void workConsumer(const TDatums &tDatums)
    Definition: wGui.hpp:62
    + + + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_gui_adam_8hpp.html b/web/html/doc/w_gui_adam_8hpp.html new file mode 100644 index 000000000..e35efc289 --- /dev/null +++ b/web/html/doc/w_gui_adam_8hpp.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: include/openpose/gui/wGuiAdam.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wGuiAdam.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/w_gui_adam_8hpp_source.html b/web/html/doc/w_gui_adam_8hpp_source.html new file mode 100644 index 000000000..1cb7c2004 --- /dev/null +++ b/web/html/doc/w_gui_adam_8hpp_source.html @@ -0,0 +1,224 @@ + + + + + + + +OpenPose: include/openpose/gui/wGuiAdam.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wGuiAdam.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifdef USE_3D_ADAM_MODEL
    +
    2 #ifndef OPENPOSE_GUI_W_GUI_ADAM_HPP
    +
    3 #define OPENPOSE_GUI_W_GUI_ADAM_HPP
    +
    4 
    + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums>
    +
    12  class WGuiAdam : public WorkerConsumer<TDatums>
    +
    13  {
    +
    14  public:
    +
    15  explicit WGuiAdam(const std::shared_ptr<GuiAdam>& guiAdam);
    +
    16 
    +
    17  virtual ~WGuiAdam();
    +
    18 
    +
    19  void initializationOnThread();
    +
    20 
    +
    21  void workConsumer(const TDatums& tDatums);
    +
    22 
    +
    23  private:
    +
    24  std::shared_ptr<GuiAdam> spGuiAdam;
    +
    25 
    +
    26  DELETE_COPY(WGuiAdam);
    +
    27  };
    +
    28 }
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 
    +
    34 // Implementation
    + +
    36 namespace op
    +
    37 {
    +
    38  template<typename TDatums>
    +
    39  WGuiAdam<TDatums>::WGuiAdam(const std::shared_ptr<GuiAdam>& guiAdam) :
    +
    40  spGuiAdam{guiAdam}
    +
    41  {
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums>
    +
    45  WGuiAdam<TDatums>::~WGuiAdam()
    +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums>
    +
    50  void WGuiAdam<TDatums>::initializationOnThread()
    +
    51  {
    +
    52  try
    +
    53  {
    +
    54  spGuiAdam->initializationOnThread();
    +
    55  }
    +
    56  catch (const std::exception& e)
    +
    57  {
    +
    58  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    59  }
    +
    60  }
    +
    61 
    +
    62  template<typename TDatums>
    +
    63  void WGuiAdam<TDatums>::workConsumer(const TDatums& tDatums)
    +
    64  {
    +
    65  try
    +
    66  {
    +
    67  // tDatums might be empty but we still wanna update the GUI
    +
    68  if (tDatums != nullptr)
    +
    69  {
    +
    70  // Debugging log
    +
    71  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    72  // Profiling speed
    +
    73  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    74  // Update cvMat & keypoints
    +
    75  if (!tDatums->empty())
    +
    76  {
    +
    77  // Update cvMat
    +
    78  std::vector<Mat> cvOutputDatas;
    +
    79  for (auto& tDatum : *tDatums)
    +
    80  cvOutputDatas.emplace_back(tDatumPtr->cvOutputData);
    +
    81  spGuiAdam->setImage(cvOutputDatas);
    +
    82  // Update keypoints
    +
    83  const auto& tDatumPtr = (*tDatums)[0];
    +
    84  if (!tDatumPtr->poseKeypoints3D.empty())
    +
    85  spGuiAdam->generateMesh(
    +
    86  tDatumPtr->poseKeypoints3D, tDatumPtr->faceKeypoints3D, tDatumPtr->handKeypoints3D,
    +
    87  tDatumPtr->adamPose.data(), tDatumPtr->adamTranslation.data(), tDatumPtr->vtVec.data(),
    +
    88  tDatumPtr->vtVec.rows(), tDatumPtr->j0Vec.data(), tDatumPtr->j0Vec.rows(),
    +
    89  tDatumPtr->adamFaceCoeffsExp.data());
    +
    90  }
    +
    91  // Refresh/update GUI
    +
    92  spGuiAdam->update();
    +
    93  // Profiling speed
    +
    94  if (!tDatums->empty())
    +
    95  {
    +
    96  Profiler::timerEnd(profilerKey);
    +
    97  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    98  }
    +
    99  // Debugging log
    +
    100  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    101  }
    +
    102  }
    +
    103  catch (const std::exception& e)
    +
    104  {
    +
    105  this->stop();
    +
    106  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    107  }
    +
    108  }
    +
    109 
    +
    110  COMPILE_TEMPLATE_DATUM(WGuiAdam);
    +
    111 }
    +
    112 
    +
    113 #endif // OPENPOSE_GUI_W_GUI_ADAM_HPP
    +
    114 #endif
    + +
    #define COMPILE_TEMPLATE_DATUM(templateName)
    Definition: datum.hpp:407
    + +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + +
    +
    + + + + diff --git a/web/html/doc/w_gui_info_adder_8hpp.html b/web/html/doc/w_gui_info_adder_8hpp.html new file mode 100644 index 000000000..cc214b484 --- /dev/null +++ b/web/html/doc/w_gui_info_adder_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/gui/wGuiInfoAdder.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wGuiInfoAdder.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WGuiInfoAdder< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WGuiInfoAdder)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_gui_info_adder_8hpp.js b/web/html/doc/w_gui_info_adder_8hpp.js new file mode 100644 index 000000000..a30b7a3a7 --- /dev/null +++ b/web/html/doc/w_gui_info_adder_8hpp.js @@ -0,0 +1,5 @@ +var w_gui_info_adder_8hpp = +[ + [ "WGuiInfoAdder", "classop_1_1_w_gui_info_adder.html", "classop_1_1_w_gui_info_adder" ], + [ "COMPILE_TEMPLATE_DATUM", "w_gui_info_adder_8hpp.html#ae88e9ced5d14fa221205b492ff76c56b", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_gui_info_adder_8hpp_source.html b/web/html/doc/w_gui_info_adder_8hpp_source.html new file mode 100644 index 000000000..c9c591f8f --- /dev/null +++ b/web/html/doc/w_gui_info_adder_8hpp_source.html @@ -0,0 +1,209 @@ + + + + + + + +OpenPose: include/openpose/gui/wGuiInfoAdder.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wGuiInfoAdder.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_GUI_W_ADD_GUI_INFO_HPP
    +
    2 #define OPENPOSE_GUI_W_ADD_GUI_INFO_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WGuiInfoAdder : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WGuiInfoAdder(const std::shared_ptr<GuiInfoAdder>& guiInfoAdder);
    +
    15 
    +
    16  virtual ~WGuiInfoAdder();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<GuiInfoAdder> spGuiInfoAdder;
    +
    24 
    +
    25  DELETE_COPY(WGuiInfoAdder);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WGuiInfoAdder<TDatums>::WGuiInfoAdder(const std::shared_ptr<GuiInfoAdder>& guiInfoAdder) :
    +
    39  spGuiInfoAdder{guiInfoAdder}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WGuiInfoAdder<TDatums>::work(TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // Add GUI components to frame
    +
    65  for (auto& tDatumPtr : *tDatums)
    +
    66  spGuiInfoAdder->addInfo(
    +
    67  tDatumPtr->cvOutputData,
    +
    68  std::max(tDatumPtr->poseKeypoints.getSize(0), tDatumPtr->faceKeypoints.getSize(0)),
    +
    69  tDatumPtr->id, tDatumPtr->elementRendered.second, tDatumPtr->frameNumber,
    +
    70  tDatumPtr->poseIds, tDatumPtr->poseKeypoints);
    +
    71  // Profiling speed
    +
    72  Profiler::timerEnd(profilerKey);
    +
    73  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    74  // Debugging log
    +
    75  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    76  }
    +
    77  }
    +
    78  catch (const std::exception& e)
    +
    79  {
    +
    80  this->stop();
    +
    81  tDatums = nullptr;
    +
    82  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    83  }
    +
    84  }
    +
    85 
    + +
    87 }
    +
    88 
    +
    89 #endif // OPENPOSE_GUI_W_ADD_GUI_INFO_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    WGuiInfoAdder(const std::shared_ptr< GuiInfoAdder > &guiInfoAdder)
    +
    virtual ~WGuiInfoAdder()
    +
    void initializationOnThread()
    +
    void work(TDatums &tDatums)
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_hand_detector_8hpp.html b/web/html/doc/w_hand_detector_8hpp.html new file mode 100644 index 000000000..ce5d88cfa --- /dev/null +++ b/web/html/doc/w_hand_detector_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/hand/wHandDetector.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wHandDetector.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WHandDetector< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WHandDetector)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_hand_detector_8hpp.js b/web/html/doc/w_hand_detector_8hpp.js new file mode 100644 index 000000000..6b26010f1 --- /dev/null +++ b/web/html/doc/w_hand_detector_8hpp.js @@ -0,0 +1,5 @@ +var w_hand_detector_8hpp = +[ + [ "WHandDetector", "classop_1_1_w_hand_detector.html", "classop_1_1_w_hand_detector" ], + [ "COMPILE_TEMPLATE_DATUM", "w_hand_detector_8hpp.html#a0424a8e4dc8ceb5e8d8a2230c157a7fd", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_hand_detector_8hpp_source.html b/web/html/doc/w_hand_detector_8hpp_source.html new file mode 100644 index 000000000..f6572cdd0 --- /dev/null +++ b/web/html/doc/w_hand_detector_8hpp_source.html @@ -0,0 +1,205 @@ + + + + + + + +OpenPose: include/openpose/hand/wHandDetector.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wHandDetector.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_HAND_W_HAND_DETECTOR_HPP
    +
    2 #define OPENPOSE_HAND_W_HAND_DETECTOR_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WHandDetector : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WHandDetector(const std::shared_ptr<HandDetector>& handDetector);
    +
    15 
    +
    16  virtual ~WHandDetector();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<HandDetector> spHandDetector;
    +
    24 
    +
    25  DELETE_COPY(WHandDetector);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WHandDetector<TDatums>::WHandDetector(const std::shared_ptr<HandDetector>& handDetector) :
    +
    39  spHandDetector{handDetector}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WHandDetector<TDatums>::work(TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // Detect people hand
    +
    65  for (auto& tDatumPtr : *tDatums)
    +
    66  tDatumPtr->handRectangles = spHandDetector->detectHands(tDatumPtr->poseKeypoints);
    +
    67  // Profiling speed
    +
    68  Profiler::timerEnd(profilerKey);
    +
    69  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    70  // Debugging log
    +
    71  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    72  }
    +
    73  }
    +
    74  catch (const std::exception& e)
    +
    75  {
    +
    76  this->stop();
    +
    77  tDatums = nullptr;
    +
    78  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    79  }
    +
    80  }
    +
    81 
    + +
    83 }
    +
    84 
    +
    85 #endif // OPENPOSE_HAND_W_HAND_DETECTOR_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    void initializationOnThread()
    +
    virtual ~WHandDetector()
    +
    void work(TDatums &tDatums)
    +
    WHandDetector(const std::shared_ptr< HandDetector > &handDetector)
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_hand_detector_from_txt_8hpp.html b/web/html/doc/w_hand_detector_from_txt_8hpp.html new file mode 100644 index 000000000..d37a13f8c --- /dev/null +++ b/web/html/doc/w_hand_detector_from_txt_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/hand/wHandDetectorFromTxt.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wHandDetectorFromTxt.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WHandDetectorFromTxt< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WHandDetectorFromTxt)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_hand_detector_from_txt_8hpp.js b/web/html/doc/w_hand_detector_from_txt_8hpp.js new file mode 100644 index 000000000..b80daec2e --- /dev/null +++ b/web/html/doc/w_hand_detector_from_txt_8hpp.js @@ -0,0 +1,5 @@ +var w_hand_detector_from_txt_8hpp = +[ + [ "WHandDetectorFromTxt", "classop_1_1_w_hand_detector_from_txt.html", "classop_1_1_w_hand_detector_from_txt" ], + [ "COMPILE_TEMPLATE_DATUM", "w_hand_detector_from_txt_8hpp.html#a767385c8d3ebe736e1752825ab4d4ea0", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_hand_detector_from_txt_8hpp_source.html b/web/html/doc/w_hand_detector_from_txt_8hpp_source.html new file mode 100644 index 000000000..201a48aed --- /dev/null +++ b/web/html/doc/w_hand_detector_from_txt_8hpp_source.html @@ -0,0 +1,205 @@ + + + + + + + +OpenPose: include/openpose/hand/wHandDetectorFromTxt.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wHandDetectorFromTxt.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_HAND_W_HAND_DETECTOR_FROM_JSON_HPP
    +
    2 #define OPENPOSE_HAND_W_HAND_DETECTOR_FROM_JSON_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WHandDetectorFromTxt : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WHandDetectorFromTxt(const std::shared_ptr<HandDetectorFromTxt>& handDetectorFromTxt);
    +
    15 
    +
    16  virtual ~WHandDetectorFromTxt();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<HandDetectorFromTxt> spHandDetectorFromTxt;
    +
    24 
    +
    25  DELETE_COPY(WHandDetectorFromTxt);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WHandDetectorFromTxt<TDatums>::WHandDetectorFromTxt(const std::shared_ptr<HandDetectorFromTxt>& handDetectorFromTxt) :
    +
    39  spHandDetectorFromTxt{handDetectorFromTxt}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WHandDetectorFromTxt<TDatums>::work(TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // Detect people hand
    +
    65  for (auto& tDatumPtr : *tDatums)
    +
    66  tDatumPtr->handRectangles = spHandDetectorFromTxt->detectHands();
    +
    67  // Profiling speed
    +
    68  Profiler::timerEnd(profilerKey);
    +
    69  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    70  // Debugging log
    +
    71  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    72  }
    +
    73  }
    +
    74  catch (const std::exception& e)
    +
    75  {
    +
    76  this->stop();
    +
    77  tDatums = nullptr;
    +
    78  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    79  }
    +
    80  }
    +
    81 
    + +
    83 }
    +
    84 
    +
    85 #endif // OPENPOSE_HAND_W_HAND_DETECTOR_FROM_JSON_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    WHandDetectorFromTxt(const std::shared_ptr< HandDetectorFromTxt > &handDetectorFromTxt)
    +
    void work(TDatums &tDatums)
    + + + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_hand_detector_tracking_8hpp.html b/web/html/doc/w_hand_detector_tracking_8hpp.html new file mode 100644 index 000000000..1935a2155 --- /dev/null +++ b/web/html/doc/w_hand_detector_tracking_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/hand/wHandDetectorTracking.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wHandDetectorTracking.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WHandDetectorTracking< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WHandDetectorTracking)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_hand_detector_tracking_8hpp.js b/web/html/doc/w_hand_detector_tracking_8hpp.js new file mode 100644 index 000000000..f5b3bad80 --- /dev/null +++ b/web/html/doc/w_hand_detector_tracking_8hpp.js @@ -0,0 +1,5 @@ +var w_hand_detector_tracking_8hpp = +[ + [ "WHandDetectorTracking", "classop_1_1_w_hand_detector_tracking.html", "classop_1_1_w_hand_detector_tracking" ], + [ "COMPILE_TEMPLATE_DATUM", "w_hand_detector_tracking_8hpp.html#a361310c59d16e88a4d2450a80f078f01", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_hand_detector_tracking_8hpp_source.html b/web/html/doc/w_hand_detector_tracking_8hpp_source.html new file mode 100644 index 000000000..4f6e3769b --- /dev/null +++ b/web/html/doc/w_hand_detector_tracking_8hpp_source.html @@ -0,0 +1,205 @@ + + + + + + + +OpenPose: include/openpose/hand/wHandDetectorTracking.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wHandDetectorTracking.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_HAND_W_HAND_DETECTOR_TRACKING_HPP
    +
    2 #define OPENPOSE_HAND_W_HAND_DETECTOR_TRACKING_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WHandDetectorTracking : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WHandDetectorTracking(const std::shared_ptr<HandDetector>& handDetector);
    +
    15 
    +
    16  virtual ~WHandDetectorTracking();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<HandDetector> spHandDetector;
    +
    24 
    +
    25  DELETE_COPY(WHandDetectorTracking);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WHandDetectorTracking<TDatums>::WHandDetectorTracking(const std::shared_ptr<HandDetector>& handDetector) :
    +
    39  spHandDetector{handDetector}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WHandDetectorTracking<TDatums>::work(TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // Detect people hand
    +
    65  for (auto& tDatumPtr : *tDatums)
    +
    66  tDatumPtr->handRectangles = spHandDetector->trackHands(tDatumPtr->poseKeypoints);
    +
    67  // Profiling speed
    +
    68  Profiler::timerEnd(profilerKey);
    +
    69  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    70  // Debugging log
    +
    71  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    72  }
    +
    73  }
    +
    74  catch (const std::exception& e)
    +
    75  {
    +
    76  this->stop();
    +
    77  tDatums = nullptr;
    +
    78  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    79  }
    +
    80  }
    +
    81 
    + +
    83 }
    +
    84 
    +
    85 #endif // OPENPOSE_HAND_W_HAND_DETECTOR_TRACKING_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + + + +
    WHandDetectorTracking(const std::shared_ptr< HandDetector > &handDetector)
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_hand_detector_update_8hpp.html b/web/html/doc/w_hand_detector_update_8hpp.html new file mode 100644 index 000000000..aa6a44ec3 --- /dev/null +++ b/web/html/doc/w_hand_detector_update_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/hand/wHandDetectorUpdate.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wHandDetectorUpdate.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WHandDetectorUpdate< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WHandDetectorUpdate)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_hand_detector_update_8hpp.js b/web/html/doc/w_hand_detector_update_8hpp.js new file mode 100644 index 000000000..6ab3eff2d --- /dev/null +++ b/web/html/doc/w_hand_detector_update_8hpp.js @@ -0,0 +1,5 @@ +var w_hand_detector_update_8hpp = +[ + [ "WHandDetectorUpdate", "classop_1_1_w_hand_detector_update.html", "classop_1_1_w_hand_detector_update" ], + [ "COMPILE_TEMPLATE_DATUM", "w_hand_detector_update_8hpp.html#a5cc3f625b2644b1aade85a9458b5503a", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_hand_detector_update_8hpp_source.html b/web/html/doc/w_hand_detector_update_8hpp_source.html new file mode 100644 index 000000000..aeb653edd --- /dev/null +++ b/web/html/doc/w_hand_detector_update_8hpp_source.html @@ -0,0 +1,205 @@ + + + + + + + +OpenPose: include/openpose/hand/wHandDetectorUpdate.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wHandDetectorUpdate.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_HAND_W_HAND_DETECTOR_UPDATE_HPP
    +
    2 #define OPENPOSE_HAND_W_HAND_DETECTOR_UPDATE_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WHandDetectorUpdate : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WHandDetectorUpdate(const std::shared_ptr<HandDetector>& handDetector);
    +
    15 
    +
    16  virtual ~WHandDetectorUpdate();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<HandDetector> spHandDetector;
    +
    24 
    +
    25  DELETE_COPY(WHandDetectorUpdate);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WHandDetectorUpdate<TDatums>::WHandDetectorUpdate(const std::shared_ptr<HandDetector>& handDetector) :
    +
    39  spHandDetector{handDetector}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WHandDetectorUpdate<TDatums>::work(TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // Detect people hand
    +
    65  for (auto& tDatumPtr : *tDatums)
    +
    66  spHandDetector->updateTracker(tDatumPtr->handKeypoints, tDatumPtr->id);
    +
    67  // Profiling speed
    +
    68  Profiler::timerEnd(profilerKey);
    +
    69  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    70  // Debugging log
    +
    71  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    72  }
    +
    73  }
    +
    74  catch (const std::exception& e)
    +
    75  {
    +
    76  this->stop();
    +
    77  tDatums = nullptr;
    +
    78  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    79  }
    +
    80  }
    +
    81 
    + +
    83 }
    +
    84 
    +
    85 #endif // OPENPOSE_HAND_W_HAND_DETECTOR_UPDATE_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + + +
    WHandDetectorUpdate(const std::shared_ptr< HandDetector > &handDetector)
    +
    void work(TDatums &tDatums)
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_hand_extractor_net_8hpp.html b/web/html/doc/w_hand_extractor_net_8hpp.html new file mode 100644 index 000000000..4187c3131 --- /dev/null +++ b/web/html/doc/w_hand_extractor_net_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/hand/wHandExtractorNet.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wHandExtractorNet.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WHandExtractorNet< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WHandExtractorNet)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_hand_extractor_net_8hpp.js b/web/html/doc/w_hand_extractor_net_8hpp.js new file mode 100644 index 000000000..109a91909 --- /dev/null +++ b/web/html/doc/w_hand_extractor_net_8hpp.js @@ -0,0 +1,5 @@ +var w_hand_extractor_net_8hpp = +[ + [ "WHandExtractorNet", "classop_1_1_w_hand_extractor_net.html", "classop_1_1_w_hand_extractor_net" ], + [ "COMPILE_TEMPLATE_DATUM", "w_hand_extractor_net_8hpp.html#ae5cc3e92ffd9696f01ce7824ebbd0759", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_hand_extractor_net_8hpp_source.html b/web/html/doc/w_hand_extractor_net_8hpp_source.html new file mode 100644 index 000000000..d273a0a38 --- /dev/null +++ b/web/html/doc/w_hand_extractor_net_8hpp_source.html @@ -0,0 +1,213 @@ + + + + + + + +OpenPose: include/openpose/hand/wHandExtractorNet.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wHandExtractorNet.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_HAND_W_HAND_EXTRACTOR_NET_HPP
    +
    2 #define OPENPOSE_HAND_W_HAND_EXTRACTOR_NET_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WHandExtractorNet : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WHandExtractorNet(const std::shared_ptr<HandExtractorNet>& handExtractorNet);
    +
    15 
    +
    16  virtual ~WHandExtractorNet();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<HandExtractorNet> spHandExtractorNet;
    +
    24 
    +
    25  DELETE_COPY(WHandExtractorNet);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WHandExtractorNet<TDatums>::WHandExtractorNet(const std::shared_ptr<HandExtractorNet>& handExtractorNet) :
    +
    39  spHandExtractorNet{handExtractorNet}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  spHandExtractorNet->initializationOnThread();
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums>
    +
    55  void WHandExtractorNet<TDatums>::work(TDatums& tDatums)
    +
    56  {
    +
    57  try
    +
    58  {
    +
    59  if (checkNoNullNorEmpty(tDatums))
    +
    60  {
    +
    61  // Debugging log
    +
    62  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    63  // Profiling speed
    +
    64  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    65  // Extract people hands
    +
    66  for (auto& tDatumPtr : *tDatums)
    +
    67  {
    +
    68  spHandExtractorNet->forwardPass(tDatumPtr->handRectangles, tDatumPtr->cvInputData);
    +
    69  for (auto hand = 0 ; hand < 2 ; hand++)
    +
    70  {
    +
    71  tDatumPtr->handHeatMaps[hand] = spHandExtractorNet->getHeatMaps()[hand].clone();
    +
    72  tDatumPtr->handKeypoints[hand] = spHandExtractorNet->getHandKeypoints()[hand].clone();
    +
    73  }
    +
    74  }
    +
    75  // Profiling speed
    +
    76  Profiler::timerEnd(profilerKey);
    +
    77  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    78  // Debugging log
    +
    79  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    80  }
    +
    81  }
    +
    82  catch (const std::exception& e)
    +
    83  {
    +
    84  this->stop();
    +
    85  tDatums = nullptr;
    +
    86  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    87  }
    +
    88  }
    +
    89 
    + +
    91 }
    +
    92 
    +
    93 #endif // OPENPOSE_HAND_W_HAND_EXTRACTOR_NET_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    void work(TDatums &tDatums)
    +
    WHandExtractorNet(const std::shared_ptr< HandExtractorNet > &handExtractorNet)
    + + + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_hand_renderer_8hpp.html b/web/html/doc/w_hand_renderer_8hpp.html new file mode 100644 index 000000000..fe15b6258 --- /dev/null +++ b/web/html/doc/w_hand_renderer_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/hand/wHandRenderer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wHandRenderer.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WHandRenderer< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WHandRenderer)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_hand_renderer_8hpp.js b/web/html/doc/w_hand_renderer_8hpp.js new file mode 100644 index 000000000..a2b7d714a --- /dev/null +++ b/web/html/doc/w_hand_renderer_8hpp.js @@ -0,0 +1,5 @@ +var w_hand_renderer_8hpp = +[ + [ "WHandRenderer", "classop_1_1_w_hand_renderer.html", "classop_1_1_w_hand_renderer" ], + [ "COMPILE_TEMPLATE_DATUM", "w_hand_renderer_8hpp.html#a635579f5f8d20b8e65f4f94da4d3d2f2", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_hand_renderer_8hpp_source.html b/web/html/doc/w_hand_renderer_8hpp_source.html new file mode 100644 index 000000000..3e5490302 --- /dev/null +++ b/web/html/doc/w_hand_renderer_8hpp_source.html @@ -0,0 +1,207 @@ + + + + + + + +OpenPose: include/openpose/hand/wHandRenderer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wHandRenderer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_HAND_W_HAND_RENDERER_HPP
    +
    2 #define OPENPOSE_HAND_W_HAND_RENDERER_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WHandRenderer : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WHandRenderer(const std::shared_ptr<HandRenderer>& handRenderer);
    +
    15 
    +
    16  virtual ~WHandRenderer();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<HandRenderer> spHandRenderer;
    +
    24 
    +
    25  DELETE_COPY(WHandRenderer);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WHandRenderer<TDatums>::WHandRenderer(const std::shared_ptr<HandRenderer>& handRenderer) :
    +
    39  spHandRenderer{handRenderer}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  spHandRenderer->initializationOnThread();
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums>
    +
    55  void WHandRenderer<TDatums>::work(TDatums& tDatums)
    +
    56  {
    +
    57  try
    +
    58  {
    +
    59  if (checkNoNullNorEmpty(tDatums))
    +
    60  {
    +
    61  // Debugging log
    +
    62  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    63  // Profiling speed
    +
    64  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    65  // Render people hands
    +
    66  for (auto& tDatumPtr : *tDatums)
    +
    67  spHandRenderer->renderHand(
    +
    68  tDatumPtr->outputData, tDatumPtr->handKeypoints, (float)tDatumPtr->scaleInputToOutput);
    +
    69  // Profiling speed
    +
    70  Profiler::timerEnd(profilerKey);
    +
    71  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    72  // Debugging log
    +
    73  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    74  }
    +
    75  }
    +
    76  catch (const std::exception& e)
    +
    77  {
    +
    78  this->stop();
    +
    79  tDatums = nullptr;
    +
    80  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    81  }
    +
    82  }
    +
    83 
    + +
    85 }
    +
    86 
    +
    87 #endif // OPENPOSE_HAND_W_HAND_RENDERER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    void initializationOnThread()
    +
    WHandRenderer(const std::shared_ptr< HandRenderer > &handRenderer)
    +
    virtual ~WHandRenderer()
    +
    void work(TDatums &tDatums)
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_hand_saver_8hpp.html b/web/html/doc/w_hand_saver_8hpp.html new file mode 100644 index 000000000..1de77fda3 --- /dev/null +++ b/web/html/doc/w_hand_saver_8hpp.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: include/openpose/filestream/wHandSaver.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wHandSaver.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/w_hand_saver_8hpp.js b/web/html/doc/w_hand_saver_8hpp.js new file mode 100644 index 000000000..cfaa107c8 --- /dev/null +++ b/web/html/doc/w_hand_saver_8hpp.js @@ -0,0 +1,5 @@ +var w_hand_saver_8hpp = +[ + [ "WHandSaver", "classop_1_1_w_hand_saver.html", "classop_1_1_w_hand_saver" ], + [ "COMPILE_TEMPLATE_DATUM", "w_hand_saver_8hpp.html#a602d5d238fe0c7096698cf36b7dee9ab", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_hand_saver_8hpp_source.html b/web/html/doc/w_hand_saver_8hpp_source.html new file mode 100644 index 000000000..f4906c122 --- /dev/null +++ b/web/html/doc/w_hand_saver_8hpp_source.html @@ -0,0 +1,217 @@ + + + + + + + +OpenPose: include/openpose/filestream/wHandSaver.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wHandSaver.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_W_HAND_SAVER_HPP
    +
    2 #define OPENPOSE_FILESTREAM_W_HAND_SAVER_HPP
    +
    3 
    + + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums>
    +
    12  class WHandSaver : public WorkerConsumer<TDatums>
    +
    13  {
    +
    14  public:
    +
    15  explicit WHandSaver(const std::shared_ptr<KeypointSaver>& keypointSaver);
    +
    16 
    +
    17  virtual ~WHandSaver();
    +
    18 
    + +
    20 
    +
    21  void workConsumer(const TDatums& tDatums);
    +
    22 
    +
    23  private:
    +
    24  const std::shared_ptr<KeypointSaver> spKeypointSaver;
    +
    25 
    +
    26  DELETE_COPY(WHandSaver);
    +
    27  };
    +
    28 }
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 
    +
    34 // Implementation
    + +
    36 namespace op
    +
    37 {
    +
    38  template<typename TDatums>
    +
    39  WHandSaver<TDatums>::WHandSaver(const std::shared_ptr<KeypointSaver>& keypointSaver) :
    +
    40  spKeypointSaver{keypointSaver}
    +
    41  {
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums>
    + +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums>
    + +
    51  {
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums>
    +
    55  void WHandSaver<TDatums>::workConsumer(const TDatums& tDatums)
    +
    56  {
    +
    57  try
    +
    58  {
    +
    59  if (checkNoNullNorEmpty(tDatums))
    +
    60  {
    +
    61  // Debugging log
    +
    62  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    63  // Profiling speed
    +
    64  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    65  // T* to T
    +
    66  auto& tDatumsNoPtr = *tDatums;
    +
    67  // Record people hand keypoint data
    +
    68  const auto fileName = (!tDatumsNoPtr[0]->name.empty()
    +
    69  ? tDatumsNoPtr[0]->name : std::to_string(tDatumsNoPtr[0]->id));
    +
    70  std::vector<Array<float>> keypointVector(tDatumsNoPtr.size());
    +
    71  // Left hand
    +
    72  for (auto i = 0u; i < tDatumsNoPtr.size(); i++)
    +
    73  keypointVector[i] = tDatumsNoPtr[i]->handKeypoints[0];
    +
    74  spKeypointSaver->saveKeypoints(keypointVector, fileName, "hand_left");
    +
    75  // Right hand
    +
    76  for (auto i = 0u; i < tDatumsNoPtr.size(); i++)
    +
    77  keypointVector[i] = tDatumsNoPtr[i]->handKeypoints[1];
    +
    78  spKeypointSaver->saveKeypoints(keypointVector, fileName, "hand_right");
    +
    79  // Profiling speed
    +
    80  Profiler::timerEnd(profilerKey);
    +
    81  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    82  // Debugging log
    +
    83  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    84  }
    +
    85  }
    +
    86  catch (const std::exception& e)
    +
    87  {
    +
    88  this->stop();
    +
    89  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    90  }
    +
    91  }
    +
    92 
    + +
    94 }
    +
    95 
    +
    96 #endif // OPENPOSE_FILESTREAM_W_HAND_SAVER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    void initializationOnThread()
    Definition: wHandSaver.hpp:50
    +
    WHandSaver(const std::shared_ptr< KeypointSaver > &keypointSaver)
    Definition: wHandSaver.hpp:39
    +
    virtual ~WHandSaver()
    Definition: wHandSaver.hpp:45
    +
    void workConsumer(const TDatums &tDatums)
    Definition: wHandSaver.hpp:55
    + + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_heat_map_saver_8hpp.html b/web/html/doc/w_heat_map_saver_8hpp.html new file mode 100644 index 000000000..81afe9e04 --- /dev/null +++ b/web/html/doc/w_heat_map_saver_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/filestream/wHeatMapSaver.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wHeatMapSaver.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WHeatMapSaver< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WHeatMapSaver)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_heat_map_saver_8hpp.js b/web/html/doc/w_heat_map_saver_8hpp.js new file mode 100644 index 000000000..9977294a7 --- /dev/null +++ b/web/html/doc/w_heat_map_saver_8hpp.js @@ -0,0 +1,5 @@ +var w_heat_map_saver_8hpp = +[ + [ "WHeatMapSaver", "classop_1_1_w_heat_map_saver.html", "classop_1_1_w_heat_map_saver" ], + [ "COMPILE_TEMPLATE_DATUM", "w_heat_map_saver_8hpp.html#a7ac10b9f503668695643c366e25f3b68", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_heat_map_saver_8hpp_source.html b/web/html/doc/w_heat_map_saver_8hpp_source.html new file mode 100644 index 000000000..1a48aaa2a --- /dev/null +++ b/web/html/doc/w_heat_map_saver_8hpp_source.html @@ -0,0 +1,211 @@ + + + + + + + +OpenPose: include/openpose/filestream/wHeatMapSaver.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wHeatMapSaver.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_W_HEAT_MAP_SAVER_HPP
    +
    2 #define OPENPOSE_FILESTREAM_W_HEAT_MAP_SAVER_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WHeatMapSaver : public WorkerConsumer<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WHeatMapSaver(const std::shared_ptr<HeatMapSaver>& heatMapSaver);
    +
    15 
    +
    16  virtual ~WHeatMapSaver();
    +
    17 
    + +
    19 
    +
    20  void workConsumer(const TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  const std::shared_ptr<HeatMapSaver> spHeatMapSaver;
    +
    24 
    +
    25  DELETE_COPY(WHeatMapSaver);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WHeatMapSaver<TDatums>::WHeatMapSaver(const std::shared_ptr<HeatMapSaver>& heatMapSaver) :
    +
    39  spHeatMapSaver{heatMapSaver}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WHeatMapSaver<TDatums>::workConsumer(const TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // T* to T
    +
    65  auto& tDatumsNoPtr = *tDatums;
    +
    66  // Record pose heatmap image(s) on disk
    +
    67  std::vector<Array<float>> poseHeatMaps(tDatumsNoPtr.size());
    +
    68  for (auto i = 0u; i < tDatumsNoPtr.size(); i++)
    +
    69  poseHeatMaps[i] = tDatumsNoPtr[i]->poseHeatMaps;
    +
    70  const auto fileName = (!tDatumsNoPtr[0]->name.empty()
    +
    71  ? tDatumsNoPtr[0]->name : std::to_string(tDatumsNoPtr[0]->id)) + "_pose_heatmaps";
    +
    72  spHeatMapSaver->saveHeatMaps(poseHeatMaps, fileName);
    +
    73  // Profiling speed
    +
    74  Profiler::timerEnd(profilerKey);
    + +
    76  __LINE__, __FUNCTION__, __FILE__);
    +
    77  // Debugging log
    +
    78  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    79  }
    +
    80  }
    +
    81  catch (const std::exception& e)
    +
    82  {
    +
    83  this->stop();
    +
    84  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    85  }
    +
    86  }
    +
    87 
    + +
    89 }
    +
    90 
    +
    91 #endif // OPENPOSE_FILESTREAM_W_HEAT_MAP_SAVER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    void initializationOnThread()
    +
    WHeatMapSaver(const std::shared_ptr< HeatMapSaver > &heatMapSaver)
    +
    void workConsumer(const TDatums &tDatums)
    +
    virtual ~WHeatMapSaver()
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_id_generator_8hpp.html b/web/html/doc/w_id_generator_8hpp.html new file mode 100644 index 000000000..b219eec62 --- /dev/null +++ b/web/html/doc/w_id_generator_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/thread/wIdGenerator.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wIdGenerator.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WIdGenerator< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WIdGenerator)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_id_generator_8hpp.js b/web/html/doc/w_id_generator_8hpp.js new file mode 100644 index 000000000..0ae4f278e --- /dev/null +++ b/web/html/doc/w_id_generator_8hpp.js @@ -0,0 +1,5 @@ +var w_id_generator_8hpp = +[ + [ "WIdGenerator", "classop_1_1_w_id_generator.html", "classop_1_1_w_id_generator" ], + [ "COMPILE_TEMPLATE_DATUM", "w_id_generator_8hpp.html#ad22c543a4376e943b728e657fab5ed9f", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_id_generator_8hpp_source.html b/web/html/doc/w_id_generator_8hpp_source.html new file mode 100644 index 000000000..aa4f3a3bf --- /dev/null +++ b/web/html/doc/w_id_generator_8hpp_source.html @@ -0,0 +1,211 @@ + + + + + + + +OpenPose: include/openpose/thread/wIdGenerator.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wIdGenerator.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_W_ID_GENERATOR_HPP
    +
    2 #define OPENPOSE_THREAD_W_ID_GENERATOR_HPP
    +
    3 
    +
    4 #include <queue> // std::priority_queue
    + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums>
    +
    12  class WIdGenerator : public Worker<TDatums>
    +
    13  {
    +
    14  public:
    +
    15  explicit WIdGenerator();
    +
    16 
    +
    17  virtual ~WIdGenerator();
    +
    18 
    + +
    20 
    +
    21  void work(TDatums& tDatums);
    +
    22 
    +
    23  private:
    +
    24  unsigned long long mGlobalCounter;
    +
    25 
    +
    26  DELETE_COPY(WIdGenerator);
    +
    27  };
    +
    28 }
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 
    +
    34 // Implementation
    + +
    36 namespace op
    +
    37 {
    +
    38  template<typename TDatums>
    + +
    40  mGlobalCounter{0ull}
    +
    41  {
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums>
    + +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums>
    + +
    51  {
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums>
    +
    55  void WIdGenerator<TDatums>::work(TDatums& tDatums)
    +
    56  {
    +
    57  try
    +
    58  {
    +
    59  if (checkNoNullNorEmpty(tDatums))
    +
    60  {
    +
    61  // Debugging log
    +
    62  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    63  // Profiling speed
    +
    64  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    65  // Add ID
    +
    66  for (auto& tDatumPtr : *tDatums)
    +
    67  // To avoid overwriting ID if e.g., custom input has already filled it
    +
    68  if (tDatumPtr->id == std::numeric_limits<unsigned long long>::max())
    +
    69  tDatumPtr->id = mGlobalCounter;
    +
    70  // Increase ID
    +
    71  const auto& tDatumPtr = (*tDatums)[0];
    +
    72  if (tDatumPtr->subId == tDatumPtr->subIdMax)
    +
    73  mGlobalCounter++;
    +
    74  // Profiling speed
    +
    75  Profiler::timerEnd(profilerKey);
    +
    76  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    77  // Debugging log
    +
    78  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    79  }
    +
    80  }
    +
    81  catch (const std::exception& e)
    +
    82  {
    +
    83  this->stop();
    +
    84  tDatums = nullptr;
    +
    85  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    86  }
    +
    87  }
    +
    88 
    + +
    90 }
    +
    91 
    +
    92 #endif // OPENPOSE_THREAD_W_ID_GENERATOR_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    void work(TDatums &tDatums)
    +
    void initializationOnThread()
    + +
    virtual ~WIdGenerator()
    + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_image_saver_8hpp.html b/web/html/doc/w_image_saver_8hpp.html new file mode 100644 index 000000000..b5bdd37d1 --- /dev/null +++ b/web/html/doc/w_image_saver_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/filestream/wImageSaver.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wImageSaver.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WImageSaver< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WImageSaver)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_image_saver_8hpp.js b/web/html/doc/w_image_saver_8hpp.js new file mode 100644 index 000000000..800cfa5fd --- /dev/null +++ b/web/html/doc/w_image_saver_8hpp.js @@ -0,0 +1,5 @@ +var w_image_saver_8hpp = +[ + [ "WImageSaver", "classop_1_1_w_image_saver.html", "classop_1_1_w_image_saver" ], + [ "COMPILE_TEMPLATE_DATUM", "w_image_saver_8hpp.html#a505ea16cc6c2c0068bbf4e7269dc8e0a", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_image_saver_8hpp_source.html b/web/html/doc/w_image_saver_8hpp_source.html new file mode 100644 index 000000000..97ed54c05 --- /dev/null +++ b/web/html/doc/w_image_saver_8hpp_source.html @@ -0,0 +1,210 @@ + + + + + + + +OpenPose: include/openpose/filestream/wImageSaver.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wImageSaver.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_W_IMAGE_SAVER_HPP
    +
    2 #define OPENPOSE_FILESTREAM_W_IMAGE_SAVER_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WImageSaver : public WorkerConsumer<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WImageSaver(const std::shared_ptr<ImageSaver>& imageSaver);
    +
    15 
    +
    16  virtual ~WImageSaver();
    +
    17 
    + +
    19 
    +
    20  void workConsumer(const TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  const std::shared_ptr<ImageSaver> spImageSaver;
    +
    24 
    +
    25  DELETE_COPY(WImageSaver);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WImageSaver<TDatums>::WImageSaver(const std::shared_ptr<ImageSaver>& imageSaver) :
    +
    39  spImageSaver{imageSaver}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WImageSaver<TDatums>::workConsumer(const TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // T* to T
    +
    65  auto& tDatumsNoPtr = *tDatums;
    +
    66  // Record image(s) on disk
    +
    67  std::vector<Matrix> opOutputDatas(tDatumsNoPtr.size());
    +
    68  for (auto i = 0u; i < tDatumsNoPtr.size(); i++)
    +
    69  opOutputDatas[i] = tDatumsNoPtr[i]->cvOutputData;
    +
    70  const auto fileName = (!tDatumsNoPtr[0]->name.empty()
    +
    71  ? tDatumsNoPtr[0]->name : std::to_string(tDatumsNoPtr[0]->id));
    +
    72  spImageSaver->saveImages(opOutputDatas, fileName);
    +
    73  // Profiling speed
    +
    74  Profiler::timerEnd(profilerKey);
    +
    75  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    76  // Debugging log
    +
    77  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    78  }
    +
    79  }
    +
    80  catch (const std::exception& e)
    +
    81  {
    +
    82  this->stop();
    +
    83  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    84  }
    +
    85  }
    +
    86 
    + +
    88 }
    +
    89 
    +
    90 #endif // OPENPOSE_FILESTREAM_W_IMAGE_SAVER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    WImageSaver(const std::shared_ptr< ImageSaver > &imageSaver)
    Definition: wImageSaver.hpp:38
    +
    void workConsumer(const TDatums &tDatums)
    Definition: wImageSaver.hpp:54
    +
    void initializationOnThread()
    Definition: wImageSaver.hpp:49
    +
    virtual ~WImageSaver()
    Definition: wImageSaver.hpp:44
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_joint_angle_estimation_8hpp.html b/web/html/doc/w_joint_angle_estimation_8hpp.html new file mode 100644 index 000000000..9c3323a26 --- /dev/null +++ b/web/html/doc/w_joint_angle_estimation_8hpp.html @@ -0,0 +1,103 @@ + + + + + + + +OpenPose: include/openpose/3d/wJointAngleEstimation.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wJointAngleEstimation.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/w_joint_angle_estimation_8hpp_source.html b/web/html/doc/w_joint_angle_estimation_8hpp_source.html new file mode 100644 index 000000000..30cc0596a --- /dev/null +++ b/web/html/doc/w_joint_angle_estimation_8hpp_source.html @@ -0,0 +1,212 @@ + + + + + + + +OpenPose: include/openpose/3d/wJointAngleEstimation.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wJointAngleEstimation.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifdef USE_3D_ADAM_MODEL
    +
    2 #ifndef OPENPOSE_3D_W_JOINT_ANGLE_ESTIMATION_HPP
    +
    3 #define OPENPOSE_3D_W_JOINT_ANGLE_ESTIMATION_HPP
    +
    4 
    + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums>
    +
    12  class WJointAngleEstimation : public Worker<TDatums>
    +
    13  {
    +
    14  public:
    +
    15  explicit WJointAngleEstimation(const std::shared_ptr<JointAngleEstimation>& jointAngleEstimation);
    +
    16 
    +
    17  virtual ~WJointAngleEstimation();
    +
    18 
    +
    19  void initializationOnThread();
    +
    20 
    +
    21  void work(TDatums& tDatums);
    +
    22 
    +
    23  private:
    +
    24  const std::shared_ptr<JointAngleEstimation> spJointAngleEstimation;
    +
    25 
    +
    26  DELETE_COPY(WJointAngleEstimation);
    +
    27  };
    +
    28 }
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 
    +
    34 // Implementation
    + +
    36 namespace op
    +
    37 {
    +
    38  template<typename TDatums>
    +
    39  WJointAngleEstimation<TDatums>::WJointAngleEstimation(const std::shared_ptr<JointAngleEstimation>& jointAngleEstimation) :
    +
    40  spJointAngleEstimation{jointAngleEstimation}
    +
    41  {
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums>
    +
    45  WJointAngleEstimation<TDatums>::~WJointAngleEstimation()
    +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums>
    +
    50  void WJointAngleEstimation<TDatums>::initializationOnThread()
    +
    51  {
    +
    52  try
    +
    53  {
    +
    54  spJointAngleEstimation->initializationOnThread();
    +
    55  }
    +
    56  catch (const std::exception& e)
    +
    57  {
    +
    58  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    59  }
    +
    60  }
    +
    61 
    +
    62  template<typename TDatums>
    +
    63  void WJointAngleEstimation<TDatums>::work(TDatums& tDatums)
    +
    64  {
    +
    65  try
    +
    66  {
    +
    67  if (checkNoNullNorEmpty(tDatums))
    +
    68  {
    +
    69  // Debugging log
    +
    70  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    71  // Profiling speed
    +
    72  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    73  // Input
    +
    74  auto& tDatumPtr = tDatums->at(0);
    +
    75  const auto& poseKeypoints3D = tDatumPtr->poseKeypoints3D;
    +
    76  const auto& faceKeypoints3D = tDatumPtr->faceKeypoints3D;
    +
    77  const auto& handKeypoints3D = tDatumPtr->handKeypoints3D;
    +
    78  // Running Adam model
    +
    79  spJointAngleEstimation->adamFastFit(
    +
    80  tDatumPtr->adamPose, tDatumPtr->adamTranslation, tDatumPtr->vtVec, tDatumPtr->j0Vec,
    +
    81  tDatumPtr->adamFaceCoeffsExp, poseKeypoints3D, faceKeypoints3D, handKeypoints3D);
    +
    82  // Profiling speed
    +
    83  Profiler::timerEnd(profilerKey);
    +
    84  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    85  // Debugging log
    +
    86  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    87  }
    +
    88  }
    +
    89  catch (const std::exception& e)
    +
    90  {
    +
    91  this->stop();
    +
    92  tDatums = nullptr;
    +
    93  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    94  }
    +
    95  }
    +
    96 
    +
    97  COMPILE_TEMPLATE_DATUM(WJointAngleEstimation);
    +
    98 }
    +
    99 
    +
    100 #endif // OPENPOSE_3D_W_JOINT_ANGLE_ESTIMATION_HPP
    +
    101 #endif
    + +
    #define COMPILE_TEMPLATE_DATUM(templateName)
    Definition: datum.hpp:407
    + +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + +
    +
    + + + + diff --git a/web/html/doc/w_keep_top_n_people_8hpp.html b/web/html/doc/w_keep_top_n_people_8hpp.html new file mode 100644 index 000000000..0f885c91b --- /dev/null +++ b/web/html/doc/w_keep_top_n_people_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/core/wKeepTopNPeople.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wKeepTopNPeople.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WKeepTopNPeople< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WKeepTopNPeople)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_keep_top_n_people_8hpp.js b/web/html/doc/w_keep_top_n_people_8hpp.js new file mode 100644 index 000000000..a3c131590 --- /dev/null +++ b/web/html/doc/w_keep_top_n_people_8hpp.js @@ -0,0 +1,5 @@ +var w_keep_top_n_people_8hpp = +[ + [ "WKeepTopNPeople", "classop_1_1_w_keep_top_n_people.html", "classop_1_1_w_keep_top_n_people" ], + [ "COMPILE_TEMPLATE_DATUM", "w_keep_top_n_people_8hpp.html#aaee32c4c68404e5086844bcb911b7a20", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_keep_top_n_people_8hpp_source.html b/web/html/doc/w_keep_top_n_people_8hpp_source.html new file mode 100644 index 000000000..983ea9ef3 --- /dev/null +++ b/web/html/doc/w_keep_top_n_people_8hpp_source.html @@ -0,0 +1,212 @@ + + + + + + + +OpenPose: include/openpose/core/wKeepTopNPeople.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wKeepTopNPeople.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_W_KEEP_TOP_N_PEOPLE_HPP
    +
    2 #define OPENPOSE_CORE_W_KEEP_TOP_N_PEOPLE_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WKeepTopNPeople : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WKeepTopNPeople(const std::shared_ptr<KeepTopNPeople>& keepTopNPeople);
    +
    15 
    +
    16  virtual ~WKeepTopNPeople();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<KeepTopNPeople> spKeepTopNPeople;
    +
    24  };
    +
    25 }
    +
    26 
    +
    27 
    +
    28 
    +
    29 
    +
    30 
    +
    31 // Implementation
    + +
    33 namespace op
    +
    34 {
    +
    35  template<typename TDatums>
    +
    36  WKeepTopNPeople<TDatums>::WKeepTopNPeople(const std::shared_ptr<KeepTopNPeople>& keepTopNPeople) :
    +
    37  spKeepTopNPeople{keepTopNPeople}
    +
    38  {
    +
    39  }
    +
    40 
    +
    41  template<typename TDatums>
    + +
    43  {
    +
    44  }
    +
    45 
    +
    46  template<typename TDatums>
    + +
    48  {
    +
    49  }
    +
    50 
    +
    51  template<typename TDatums>
    +
    52  void WKeepTopNPeople<TDatums>::work(TDatums& tDatums)
    +
    53  {
    +
    54  try
    +
    55  {
    +
    56  if (checkNoNullNorEmpty(tDatums))
    +
    57  {
    +
    58  // Debugging log
    +
    59  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    60  // Profiling speed
    +
    61  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    62  // Rescale pose data
    +
    63  for (auto& tDatumPtr : *tDatums)
    +
    64  {
    +
    65  tDatumPtr->poseKeypoints = spKeepTopNPeople->keepTopPeople(
    +
    66  tDatumPtr->poseKeypoints, tDatumPtr->poseScores);
    +
    67  tDatumPtr->faceKeypoints = spKeepTopNPeople->keepTopPeople(
    +
    68  tDatumPtr->faceKeypoints, tDatumPtr->poseScores);
    +
    69  tDatumPtr->handKeypoints[0] = spKeepTopNPeople->keepTopPeople(
    +
    70  tDatumPtr->handKeypoints[0], tDatumPtr->poseScores);
    +
    71  tDatumPtr->handKeypoints[1] = spKeepTopNPeople->keepTopPeople(
    +
    72  tDatumPtr->handKeypoints[1], tDatumPtr->poseScores);
    +
    73  }
    +
    74  // Profiling speed
    +
    75  Profiler::timerEnd(profilerKey);
    +
    76  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    77  // Debugging log
    +
    78  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    79  }
    +
    80  }
    +
    81  catch (const std::exception& e)
    +
    82  {
    +
    83  this->stop();
    +
    84  tDatums = nullptr;
    +
    85  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    86  }
    +
    87  }
    +
    88 
    + +
    90 }
    +
    91 
    +
    92 #endif // OPENPOSE_CORE_W_KEEP_TOP_N_PEOPLE_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + +
    void work(TDatums &tDatums)
    + +
    WKeepTopNPeople(const std::shared_ptr< KeepTopNPeople > &keepTopNPeople)
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_keypoint_scaler_8hpp.html b/web/html/doc/w_keypoint_scaler_8hpp.html new file mode 100644 index 000000000..6566eebec --- /dev/null +++ b/web/html/doc/w_keypoint_scaler_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/core/wKeypointScaler.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wKeypointScaler.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WKeypointScaler< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WKeypointScaler)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_keypoint_scaler_8hpp.js b/web/html/doc/w_keypoint_scaler_8hpp.js new file mode 100644 index 000000000..43f2a788f --- /dev/null +++ b/web/html/doc/w_keypoint_scaler_8hpp.js @@ -0,0 +1,5 @@ +var w_keypoint_scaler_8hpp = +[ + [ "WKeypointScaler", "classop_1_1_w_keypoint_scaler.html", "classop_1_1_w_keypoint_scaler" ], + [ "COMPILE_TEMPLATE_DATUM", "w_keypoint_scaler_8hpp.html#a47758c703fccdbb65c26dc7bc4022237", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_keypoint_scaler_8hpp_source.html b/web/html/doc/w_keypoint_scaler_8hpp_source.html new file mode 100644 index 000000000..dbb56431f --- /dev/null +++ b/web/html/doc/w_keypoint_scaler_8hpp_source.html @@ -0,0 +1,215 @@ + + + + + + + +OpenPose: include/openpose/core/wKeypointScaler.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wKeypointScaler.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_W_KEYPOINT_SCALER_HPP
    +
    2 #define OPENPOSE_CORE_W_KEYPOINT_SCALER_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WKeypointScaler : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WKeypointScaler(const std::shared_ptr<KeypointScaler>& keypointScaler);
    +
    15 
    +
    16  virtual ~WKeypointScaler();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<KeypointScaler> spKeypointScaler;
    +
    24  };
    +
    25 }
    +
    26 
    +
    27 
    +
    28 
    +
    29 
    +
    30 
    +
    31 // Implementation
    + +
    33 namespace op
    +
    34 {
    +
    35  template<typename TDatums>
    +
    36  WKeypointScaler<TDatums>::WKeypointScaler(const std::shared_ptr<KeypointScaler>& keypointScaler) :
    +
    37  spKeypointScaler{keypointScaler}
    +
    38  {
    +
    39  }
    +
    40 
    +
    41  template<typename TDatums>
    + +
    43  {
    +
    44  }
    +
    45 
    +
    46  template<typename TDatums>
    + +
    48  {
    +
    49  }
    +
    50 
    +
    51  template<typename TDatums>
    +
    52  void WKeypointScaler<TDatums>::work(TDatums& tDatums)
    +
    53  {
    +
    54  try
    +
    55  {
    +
    56  if (checkNoNullNorEmpty(tDatums))
    +
    57  {
    +
    58  // Debugging log
    +
    59  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    60  // Profiling speed
    +
    61  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    62  // Rescale pose data
    +
    63  for (auto& tDatumPtr : *tDatums)
    +
    64  {
    +
    65  std::vector<Array<float>> arraysToScale{
    +
    66  tDatumPtr->poseKeypoints, tDatumPtr->handKeypoints[0],
    +
    67  tDatumPtr->handKeypoints[1], tDatumPtr->faceKeypoints};
    +
    68  spKeypointScaler->scale(
    +
    69  arraysToScale, tDatumPtr->scaleInputToOutput, tDatumPtr->scaleNetToOutput,
    +
    70  Point<int>{tDatumPtr->cvInputData.cols(), tDatumPtr->cvInputData.rows()});
    +
    71  // Rescale part candidates
    +
    72  spKeypointScaler->scale(
    +
    73  tDatumPtr->poseCandidates, tDatumPtr->scaleInputToOutput, tDatumPtr->scaleNetToOutput,
    +
    74  Point<int>{tDatumPtr->cvInputData.cols(), tDatumPtr->cvInputData.rows()});
    +
    75  }
    +
    76  // Profiling speed
    +
    77  Profiler::timerEnd(profilerKey);
    +
    78  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    79  // Debugging log
    +
    80  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    81  }
    +
    82  }
    +
    83  catch (const std::exception& e)
    +
    84  {
    +
    85  this->stop();
    +
    86  tDatums = nullptr;
    +
    87  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    88  }
    +
    89  }
    +
    90 
    + +
    92 }
    +
    93 
    +
    94 #endif // OPENPOSE_CORE_W_KEYPOINT_SCALER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    WKeypointScaler(const std::shared_ptr< KeypointScaler > &keypointScaler)
    +
    void work(TDatums &tDatums)
    + + + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + +
    +
    + + + + diff --git a/web/html/doc/w_op_output_to_cv_mat_8hpp.html b/web/html/doc/w_op_output_to_cv_mat_8hpp.html new file mode 100644 index 000000000..f37099ecc --- /dev/null +++ b/web/html/doc/w_op_output_to_cv_mat_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/core/wOpOutputToCvMat.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wOpOutputToCvMat.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WOpOutputToCvMat< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WOpOutputToCvMat)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_op_output_to_cv_mat_8hpp.js b/web/html/doc/w_op_output_to_cv_mat_8hpp.js new file mode 100644 index 000000000..cfaf27449 --- /dev/null +++ b/web/html/doc/w_op_output_to_cv_mat_8hpp.js @@ -0,0 +1,5 @@ +var w_op_output_to_cv_mat_8hpp = +[ + [ "WOpOutputToCvMat", "classop_1_1_w_op_output_to_cv_mat.html", "classop_1_1_w_op_output_to_cv_mat" ], + [ "COMPILE_TEMPLATE_DATUM", "w_op_output_to_cv_mat_8hpp.html#a1d9f50688522ed7368acc33a09ae9ece", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_op_output_to_cv_mat_8hpp_source.html b/web/html/doc/w_op_output_to_cv_mat_8hpp_source.html new file mode 100644 index 000000000..1b64c18d2 --- /dev/null +++ b/web/html/doc/w_op_output_to_cv_mat_8hpp_source.html @@ -0,0 +1,205 @@ + + + + + + + +OpenPose: include/openpose/core/wOpOutputToCvMat.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wOpOutputToCvMat.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_W_OP_OUTPUT_TO_CV_MAT_HPP
    +
    2 #define OPENPOSE_CORE_W_OP_OUTPUT_TO_CV_MAT_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WOpOutputToCvMat : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WOpOutputToCvMat(const std::shared_ptr<OpOutputToCvMat>& opOutputToCvMat);
    +
    15 
    +
    16  virtual ~WOpOutputToCvMat();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  const std::shared_ptr<OpOutputToCvMat> spOpOutputToCvMat;
    +
    24 
    +
    25  DELETE_COPY(WOpOutputToCvMat);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WOpOutputToCvMat<TDatums>::WOpOutputToCvMat(const std::shared_ptr<OpOutputToCvMat>& opOutputToCvMat) :
    +
    39  spOpOutputToCvMat{opOutputToCvMat}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WOpOutputToCvMat<TDatums>::work(TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // float* -> cv::Mat
    +
    65  for (auto& tDatumPtr : *tDatums)
    +
    66  tDatumPtr->cvOutputData = spOpOutputToCvMat->formatToCvMat(tDatumPtr->outputData);
    +
    67  // Profiling speed
    +
    68  Profiler::timerEnd(profilerKey);
    +
    69  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    70  // Debugging log
    +
    71  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    72  }
    +
    73  }
    +
    74  catch (const std::exception& e)
    +
    75  {
    +
    76  this->stop();
    +
    77  tDatums = nullptr;
    +
    78  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    79  }
    +
    80  }
    +
    81 
    + +
    83 }
    +
    84 
    +
    85 #endif // OPENPOSE_CORE_W_OP_OUTPUT_TO_CV_MAT_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + +
    WOpOutputToCvMat(const std::shared_ptr< OpOutputToCvMat > &opOutputToCvMat)
    + +
    void work(TDatums &tDatums)
    + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + +
    +
    + + + + diff --git a/web/html/doc/w_people_json_saver_8hpp.html b/web/html/doc/w_people_json_saver_8hpp.html new file mode 100644 index 000000000..aeb4be469 --- /dev/null +++ b/web/html/doc/w_people_json_saver_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/filestream/wPeopleJsonSaver.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wPeopleJsonSaver.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WPeopleJsonSaver< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WPeopleJsonSaver)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_people_json_saver_8hpp.js b/web/html/doc/w_people_json_saver_8hpp.js new file mode 100644 index 000000000..51318882f --- /dev/null +++ b/web/html/doc/w_people_json_saver_8hpp.js @@ -0,0 +1,5 @@ +var w_people_json_saver_8hpp = +[ + [ "WPeopleJsonSaver", "classop_1_1_w_people_json_saver.html", "classop_1_1_w_people_json_saver" ], + [ "COMPILE_TEMPLATE_DATUM", "w_people_json_saver_8hpp.html#a774871462f7fefb8cadea1e49f501e45", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_people_json_saver_8hpp_source.html b/web/html/doc/w_people_json_saver_8hpp_source.html new file mode 100644 index 000000000..195c58989 --- /dev/null +++ b/web/html/doc/w_people_json_saver_8hpp_source.html @@ -0,0 +1,234 @@ + + + + + + + +OpenPose: include/openpose/filestream/wPeopleJsonSaver.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wPeopleJsonSaver.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_W_PEOPLE_JSON_SAVER_HPP
    +
    2 #define OPENPOSE_FILESTREAM_W_PEOPLE_JSON_SAVER_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WPeopleJsonSaver : public WorkerConsumer<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WPeopleJsonSaver(const std::shared_ptr<PeopleJsonSaver>& peopleJsonSaver);
    +
    15 
    +
    16  virtual ~WPeopleJsonSaver();
    +
    17 
    + +
    19 
    +
    20  void workConsumer(const TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  const std::shared_ptr<PeopleJsonSaver> spPeopleJsonSaver;
    +
    24 
    +
    25  DELETE_COPY(WPeopleJsonSaver);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WPeopleJsonSaver<TDatums>::WPeopleJsonSaver(const std::shared_ptr<PeopleJsonSaver>& peopleJsonSaver) :
    +
    39  spPeopleJsonSaver{peopleJsonSaver}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WPeopleJsonSaver<TDatums>::workConsumer(const TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // Save body/face/hand keypoints to JSON file
    +
    65  const auto& tDatumFirstPtr = (*tDatums)[0];
    +
    66  const auto baseFileName = (!tDatumFirstPtr->name.empty() ? tDatumFirstPtr->name
    +
    67  : std::to_string(tDatumFirstPtr->id)) + "_keypoints";
    +
    68  const bool humanReadable = false;
    +
    69  for (auto i = 0u ; i < tDatums->size() ; i++)
    +
    70  {
    +
    71  const auto& tDatumPtr = (*tDatums)[i];
    +
    72  // const auto fileName = baseFileName;
    +
    73  const auto fileName = baseFileName + (i != 0 ? "_" + std::to_string(i) : "");
    +
    74 
    +
    75  // Pose IDs from long long to float
    +
    76  Array<float> poseIds{tDatumPtr->poseIds};
    +
    77 
    +
    78  const std::vector<std::pair<Array<float>, std::string>> keypointVector{
    +
    79  // Pose IDs
    +
    80  std::make_pair(poseIds, "person_id"),
    +
    81  // 2D
    +
    82  std::make_pair(tDatumPtr->poseKeypoints, "pose_keypoints_2d"),
    +
    83  std::make_pair(tDatumPtr->faceKeypoints, "face_keypoints_2d"),
    +
    84  std::make_pair(tDatumPtr->handKeypoints[0], "hand_left_keypoints_2d"),
    +
    85  std::make_pair(tDatumPtr->handKeypoints[1], "hand_right_keypoints_2d"),
    +
    86  // 3D
    +
    87  std::make_pair(tDatumPtr->poseKeypoints3D, "pose_keypoints_3d"),
    +
    88  std::make_pair(tDatumPtr->faceKeypoints3D, "face_keypoints_3d"),
    +
    89  std::make_pair(tDatumPtr->handKeypoints3D[0], "hand_left_keypoints_3d"),
    +
    90  std::make_pair(tDatumPtr->handKeypoints3D[1], "hand_right_keypoints_3d")
    +
    91  };
    +
    92  // Save keypoints
    +
    93  spPeopleJsonSaver->save(
    +
    94  keypointVector, tDatumPtr->poseCandidates, fileName, humanReadable);
    +
    95  }
    +
    96  // Profiling speed
    +
    97  Profiler::timerEnd(profilerKey);
    +
    98  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    99  // Debugging log
    +
    100  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    101  }
    +
    102  }
    +
    103  catch (const std::exception& e)
    +
    104  {
    +
    105  this->stop();
    +
    106  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    107  }
    +
    108  }
    +
    109 
    + +
    111 }
    +
    112 
    +
    113 #endif // OPENPOSE_FILESTREAM_W_PEOPLE_JSON_SAVER_HPP
    + +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + + +
    WPeopleJsonSaver(const std::shared_ptr< PeopleJsonSaver > &peopleJsonSaver)
    +
    void workConsumer(const TDatums &tDatums)
    + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + +
    +
    + + + + diff --git a/web/html/doc/w_person_id_extractor_8hpp.html b/web/html/doc/w_person_id_extractor_8hpp.html new file mode 100644 index 000000000..75ff4f11e --- /dev/null +++ b/web/html/doc/w_person_id_extractor_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/tracking/wPersonIdExtractor.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wPersonIdExtractor.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WPersonIdExtractor< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WPersonIdExtractor)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_person_id_extractor_8hpp.js b/web/html/doc/w_person_id_extractor_8hpp.js new file mode 100644 index 000000000..7d884acfd --- /dev/null +++ b/web/html/doc/w_person_id_extractor_8hpp.js @@ -0,0 +1,5 @@ +var w_person_id_extractor_8hpp = +[ + [ "WPersonIdExtractor", "classop_1_1_w_person_id_extractor.html", "classop_1_1_w_person_id_extractor" ], + [ "COMPILE_TEMPLATE_DATUM", "w_person_id_extractor_8hpp.html#a674a652ad38b355285417529fc050847", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_person_id_extractor_8hpp_source.html b/web/html/doc/w_person_id_extractor_8hpp_source.html new file mode 100644 index 000000000..a16bb87e0 --- /dev/null +++ b/web/html/doc/w_person_id_extractor_8hpp_source.html @@ -0,0 +1,206 @@ + + + + + + + +OpenPose: include/openpose/tracking/wPersonIdExtractor.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wPersonIdExtractor.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_TRACKING_W_PERSON_ID_EXTRACTOR_HPP
    +
    2 #define OPENPOSE_TRACKING_W_PERSON_ID_EXTRACTOR_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WPersonIdExtractor : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WPersonIdExtractor(const std::shared_ptr<PersonIdExtractor>& personIdExtractor);
    +
    15 
    +
    16  virtual ~WPersonIdExtractor();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<PersonIdExtractor> spPersonIdExtractor;
    +
    24 
    +
    25  DELETE_COPY(WPersonIdExtractor);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WPersonIdExtractor<TDatums>::WPersonIdExtractor(const std::shared_ptr<PersonIdExtractor>& personIdExtractor) :
    +
    39  spPersonIdExtractor{personIdExtractor}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WPersonIdExtractor<TDatums>::work(TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // Render people pose
    +
    65  for (auto& tDatumPtr : *tDatums)
    +
    66  tDatumPtr->poseIds = spPersonIdExtractor->extractIds(
    +
    67  tDatumPtr->poseKeypoints, tDatumPtr->cvInputData);
    +
    68  // Profiling speed
    +
    69  Profiler::timerEnd(profilerKey);
    +
    70  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    71  // Debugging log
    +
    72  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    73  }
    +
    74  }
    +
    75  catch (const std::exception& e)
    +
    76  {
    +
    77  this->stop();
    +
    78  tDatums = nullptr;
    +
    79  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    80  }
    +
    81  }
    +
    82 
    + +
    84 }
    +
    85 
    +
    86 #endif // OPENPOSE_TRACKING_W_PERSON_ID_EXTRACTOR_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    WPersonIdExtractor(const std::shared_ptr< PersonIdExtractor > &personIdExtractor)
    + +
    void work(TDatums &tDatums)
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + +
    +
    + + + + diff --git a/web/html/doc/w_pose_extractor_8hpp.html b/web/html/doc/w_pose_extractor_8hpp.html new file mode 100644 index 000000000..84a49347a --- /dev/null +++ b/web/html/doc/w_pose_extractor_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/pose/wPoseExtractor.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wPoseExtractor.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WPoseExtractor< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WPoseExtractor)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_pose_extractor_8hpp.js b/web/html/doc/w_pose_extractor_8hpp.js new file mode 100644 index 000000000..e7dfc5329 --- /dev/null +++ b/web/html/doc/w_pose_extractor_8hpp.js @@ -0,0 +1,5 @@ +var w_pose_extractor_8hpp = +[ + [ "WPoseExtractor", "classop_1_1_w_pose_extractor.html", "classop_1_1_w_pose_extractor" ], + [ "COMPILE_TEMPLATE_DATUM", "w_pose_extractor_8hpp.html#a020603e3ad6326cb1dce43485157f768", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_pose_extractor_8hpp_source.html b/web/html/doc/w_pose_extractor_8hpp_source.html new file mode 100644 index 000000000..a4e9aa9d4 --- /dev/null +++ b/web/html/doc/w_pose_extractor_8hpp_source.html @@ -0,0 +1,235 @@ + + + + + + + +OpenPose: include/openpose/pose/wPoseExtractor.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wPoseExtractor.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_W_POSE_EXTRACTOR_HPP
    +
    2 #define OPENPOSE_POSE_W_POSE_EXTRACTOR_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WPoseExtractor : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WPoseExtractor(const std::shared_ptr<PoseExtractor>& poseExtractorSharedPtr);
    +
    15 
    +
    16  virtual ~WPoseExtractor();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<PoseExtractor> spPoseExtractor;
    +
    24 
    +
    25  DELETE_COPY(WPoseExtractor);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WPoseExtractor<TDatums>::WPoseExtractor(const std::shared_ptr<PoseExtractor>& poseExtractorSharedPtr) :
    +
    39  spPoseExtractor{poseExtractorSharedPtr}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  try
    +
    52  {
    +
    53  spPoseExtractor->initializationOnThread();
    +
    54  }
    +
    55  catch (const std::exception& e)
    +
    56  {
    +
    57  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    58  }
    +
    59  }
    +
    60 
    +
    61  template<typename TDatums>
    +
    62  void WPoseExtractor<TDatums>::work(TDatums& tDatums)
    +
    63  {
    +
    64  try
    +
    65  {
    +
    66  if (checkNoNullNorEmpty(tDatums))
    +
    67  {
    +
    68  // Debugging log
    +
    69  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    70  // Profiling speed
    +
    71  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    72  // Extract people pose
    +
    73  for (auto i = 0u ; i < tDatums->size() ; i++)
    +
    74  // for (auto& tDatum : *tDatums)
    +
    75  {
    +
    76  auto& tDatumPtr = (*tDatums)[i];
    +
    77  // OpenPose net forward pass
    +
    78  spPoseExtractor->forwardPass(
    +
    79  tDatumPtr->inputNetData, Point<int>{tDatumPtr->cvInputData.cols(), tDatumPtr->cvInputData.rows()},
    +
    80  tDatumPtr->scaleInputToNetInputs, tDatumPtr->poseNetOutput, tDatumPtr->id);
    +
    81  // OpenPose keypoint detector
    +
    82  tDatumPtr->poseCandidates = spPoseExtractor->getCandidatesCopy();
    +
    83  tDatumPtr->poseHeatMaps = spPoseExtractor->getHeatMapsCopy();
    +
    84  tDatumPtr->poseKeypoints = spPoseExtractor->getPoseKeypoints().clone();
    +
    85  tDatumPtr->poseScores = spPoseExtractor->getPoseScores().clone();
    +
    86  tDatumPtr->scaleNetToOutput = spPoseExtractor->getScaleNetToOutput();
    +
    87  // Keep desired top N people
    +
    88  spPoseExtractor->keepTopPeople(tDatumPtr->poseKeypoints, tDatumPtr->poseScores);
    +
    89  // ID extractor (experimental)
    +
    90  tDatumPtr->poseIds = spPoseExtractor->extractIdsLockThread(
    +
    91  tDatumPtr->poseKeypoints, tDatumPtr->cvInputData, i, tDatumPtr->id);
    +
    92  // Tracking (experimental)
    +
    93  spPoseExtractor->trackLockThread(
    +
    94  tDatumPtr->poseKeypoints, tDatumPtr->poseIds, tDatumPtr->cvInputData, i, tDatumPtr->id);
    +
    95  }
    +
    96  // Profiling speed
    +
    97  Profiler::timerEnd(profilerKey);
    +
    98  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    99  // Debugging log
    +
    100  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    101  }
    +
    102  }
    +
    103  catch (const std::exception& e)
    +
    104  {
    +
    105  this->stop();
    +
    106  tDatums = nullptr;
    +
    107  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    108  }
    +
    109  }
    +
    110 
    + +
    112 }
    +
    113 
    +
    114 #endif // OPENPOSE_POSE_W_POSE_EXTRACTOR_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + +
    void work(TDatums &tDatums)
    +
    WPoseExtractor(const std::shared_ptr< PoseExtractor > &poseExtractorSharedPtr)
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + + +
    +
    + + + + diff --git a/web/html/doc/w_pose_extractor_net_8hpp.html b/web/html/doc/w_pose_extractor_net_8hpp.html new file mode 100644 index 000000000..a8b8e0c05 --- /dev/null +++ b/web/html/doc/w_pose_extractor_net_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/pose/wPoseExtractorNet.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wPoseExtractorNet.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WPoseExtractorNet< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WPoseExtractorNet)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_pose_extractor_net_8hpp.js b/web/html/doc/w_pose_extractor_net_8hpp.js new file mode 100644 index 000000000..7d6f8ffeb --- /dev/null +++ b/web/html/doc/w_pose_extractor_net_8hpp.js @@ -0,0 +1,5 @@ +var w_pose_extractor_net_8hpp = +[ + [ "WPoseExtractorNet", "classop_1_1_w_pose_extractor_net.html", "classop_1_1_w_pose_extractor_net" ], + [ "COMPILE_TEMPLATE_DATUM", "w_pose_extractor_net_8hpp.html#ab1e242b1ae7ff3300324fbfedebb52fc", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_pose_extractor_net_8hpp_source.html b/web/html/doc/w_pose_extractor_net_8hpp_source.html new file mode 100644 index 000000000..36933ed48 --- /dev/null +++ b/web/html/doc/w_pose_extractor_net_8hpp_source.html @@ -0,0 +1,223 @@ + + + + + + + +OpenPose: include/openpose/pose/wPoseExtractorNet.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wPoseExtractorNet.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_W_POSE_EXTRACTOR_NET_HPP
    +
    2 #define OPENPOSE_POSE_W_POSE_EXTRACTOR_NET_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WPoseExtractorNet : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WPoseExtractorNet(const std::shared_ptr<PoseExtractorNet>& poseExtractorSharedPtr);
    +
    15 
    +
    16  virtual ~WPoseExtractorNet();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<PoseExtractorNet> spPoseExtractorNet;
    +
    24 
    +
    25  DELETE_COPY(WPoseExtractorNet);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WPoseExtractorNet<TDatums>::WPoseExtractorNet(const std::shared_ptr<PoseExtractorNet>& poseExtractorSharedPtr) :
    +
    39  spPoseExtractorNet{poseExtractorSharedPtr}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  try
    +
    52  {
    +
    53  spPoseExtractorNet->initializationOnThread();
    +
    54  }
    +
    55  catch (const std::exception& e)
    +
    56  {
    +
    57  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    58  }
    +
    59  }
    +
    60 
    +
    61  template<typename TDatums>
    +
    62  void WPoseExtractorNet<TDatums>::work(TDatums& tDatums)
    +
    63  {
    +
    64  try
    +
    65  {
    +
    66  if (checkNoNullNorEmpty(tDatums))
    +
    67  {
    +
    68  // Debugging log
    +
    69  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    70  // Profiling speed
    +
    71  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    72  // Extract people pose
    +
    73  for (auto& tDatumPtr : *tDatums)
    +
    74  {
    +
    75  spPoseExtractorNet->forwardPass(
    +
    76  tDatumPtr->inputNetData, Point<int>{tDatumPtr->cvInputData.cols(), tDatumPtr->cvInputData.rows()},
    +
    77  tDatumPtr->scaleInputToNetInputs, tDatumPtr->poseNetOutput);
    +
    78  tDatumPtr->poseCandidates = spPoseExtractorNet->getCandidatesCopy();
    +
    79  tDatumPtr->poseHeatMaps = spPoseExtractorNet->getHeatMapsCopy();
    +
    80  tDatumPtr->poseKeypoints = spPoseExtractorNet->getPoseKeypoints().clone();
    +
    81  tDatumPtr->poseScores = spPoseExtractorNet->getPoseScores().clone();
    +
    82  tDatumPtr->scaleNetToOutput = spPoseExtractorNet->getScaleNetToOutput();
    +
    83  }
    +
    84  // Profiling speed
    +
    85  Profiler::timerEnd(profilerKey);
    +
    86  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    87  // Debugging log
    +
    88  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    89  }
    +
    90  }
    +
    91  catch (const std::exception& e)
    +
    92  {
    +
    93  this->stop();
    +
    94  tDatums = nullptr;
    +
    95  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    96  }
    +
    97  }
    +
    98 
    + +
    100 }
    +
    101 
    +
    102 #endif // OPENPOSE_POSE_W_POSE_EXTRACTOR_NET_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + +
    void work(TDatums &tDatums)
    + +
    WPoseExtractorNet(const std::shared_ptr< PoseExtractorNet > &poseExtractorSharedPtr)
    + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + + +
    +
    + + + + diff --git a/web/html/doc/w_pose_renderer_8hpp.html b/web/html/doc/w_pose_renderer_8hpp.html new file mode 100644 index 000000000..46d30d540 --- /dev/null +++ b/web/html/doc/w_pose_renderer_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/pose/wPoseRenderer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wPoseRenderer.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WPoseRenderer< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WPoseRenderer)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_pose_renderer_8hpp.js b/web/html/doc/w_pose_renderer_8hpp.js new file mode 100644 index 000000000..ff360567a --- /dev/null +++ b/web/html/doc/w_pose_renderer_8hpp.js @@ -0,0 +1,5 @@ +var w_pose_renderer_8hpp = +[ + [ "WPoseRenderer", "classop_1_1_w_pose_renderer.html", "classop_1_1_w_pose_renderer" ], + [ "COMPILE_TEMPLATE_DATUM", "w_pose_renderer_8hpp.html#ae76afeeeaedaebe6941f41a4bdf50e2a", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_pose_renderer_8hpp_source.html b/web/html/doc/w_pose_renderer_8hpp_source.html new file mode 100644 index 000000000..2588e92e5 --- /dev/null +++ b/web/html/doc/w_pose_renderer_8hpp_source.html @@ -0,0 +1,215 @@ + + + + + + + +OpenPose: include/openpose/pose/wPoseRenderer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wPoseRenderer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_POSE_W_POSE_RENDERER_HPP
    +
    2 #define OPENPOSE_POSE_W_POSE_RENDERER_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WPoseRenderer : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WPoseRenderer(const std::shared_ptr<PoseRenderer>& poseRendererSharedPtr);
    +
    15 
    +
    16  virtual ~WPoseRenderer();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<PoseRenderer> spPoseRenderer;
    +
    24 
    +
    25  DELETE_COPY(WPoseRenderer);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WPoseRenderer<TDatums>::WPoseRenderer(const std::shared_ptr<PoseRenderer>& poseRendererSharedPtr) :
    +
    39  spPoseRenderer{poseRendererSharedPtr}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  try
    +
    52  {
    +
    53  spPoseRenderer->initializationOnThread();
    +
    54  }
    +
    55  catch (const std::exception& e)
    +
    56  {
    +
    57  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    58  }
    +
    59  }
    +
    60 
    +
    61  template<typename TDatums>
    +
    62  void WPoseRenderer<TDatums>::work(TDatums& tDatums)
    +
    63  {
    +
    64  try
    +
    65  {
    +
    66  if (checkNoNullNorEmpty(tDatums))
    +
    67  {
    +
    68  // Debugging log
    +
    69  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    70  // Profiling speed
    +
    71  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    72  // Render people pose
    +
    73  for (auto& tDatumPtr : *tDatums)
    +
    74  tDatumPtr->elementRendered = spPoseRenderer->renderPose(
    +
    75  tDatumPtr->outputData, tDatumPtr->poseKeypoints, (float)tDatumPtr->scaleInputToOutput,
    +
    76  (float)tDatumPtr->scaleNetToOutput);
    +
    77  // Profiling speed
    +
    78  Profiler::timerEnd(profilerKey);
    +
    79  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    80  // Debugging log
    +
    81  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    82  }
    +
    83  }
    +
    84  catch (const std::exception& e)
    +
    85  {
    +
    86  this->stop();
    +
    87  tDatums = nullptr;
    +
    88  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    89  }
    +
    90  }
    +
    91 
    + +
    93 }
    +
    94 
    +
    95 #endif // OPENPOSE_POSE_W_POSE_RENDERER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    void work(TDatums &tDatums)
    +
    void initializationOnThread()
    +
    WPoseRenderer(const std::shared_ptr< PoseRenderer > &poseRendererSharedPtr)
    +
    virtual ~WPoseRenderer()
    + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + +
    +
    + + + + diff --git a/web/html/doc/w_pose_saver_8hpp.html b/web/html/doc/w_pose_saver_8hpp.html new file mode 100644 index 000000000..a5d820b9d --- /dev/null +++ b/web/html/doc/w_pose_saver_8hpp.html @@ -0,0 +1,128 @@ + + + + + + + +OpenPose: include/openpose/filestream/wPoseSaver.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wPoseSaver.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/w_pose_saver_8hpp.js b/web/html/doc/w_pose_saver_8hpp.js new file mode 100644 index 000000000..2945ed92c --- /dev/null +++ b/web/html/doc/w_pose_saver_8hpp.js @@ -0,0 +1,5 @@ +var w_pose_saver_8hpp = +[ + [ "WPoseSaver", "classop_1_1_w_pose_saver.html", "classop_1_1_w_pose_saver" ], + [ "COMPILE_TEMPLATE_DATUM", "w_pose_saver_8hpp.html#a31ad937a2e52ea08ce925031d26616b9", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_pose_saver_8hpp_source.html b/web/html/doc/w_pose_saver_8hpp_source.html new file mode 100644 index 000000000..bf053ed1c --- /dev/null +++ b/web/html/doc/w_pose_saver_8hpp_source.html @@ -0,0 +1,212 @@ + + + + + + + +OpenPose: include/openpose/filestream/wPoseSaver.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wPoseSaver.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_W_POSE_SAVER_HPP
    +
    2 #define OPENPOSE_FILESTREAM_W_POSE_SAVER_HPP
    +
    3 
    + + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums>
    +
    12  class WPoseSaver : public WorkerConsumer<TDatums>
    +
    13  {
    +
    14  public:
    +
    15  explicit WPoseSaver(const std::shared_ptr<KeypointSaver>& keypointSaver);
    +
    16 
    +
    17  virtual ~WPoseSaver();
    +
    18 
    + +
    20 
    +
    21  void workConsumer(const TDatums& tDatums);
    +
    22 
    +
    23  private:
    +
    24  const std::shared_ptr<KeypointSaver> spKeypointSaver;
    +
    25 
    +
    26  DELETE_COPY(WPoseSaver);
    +
    27  };
    +
    28 }
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 
    +
    34 // Implementation
    + +
    36 namespace op
    +
    37 {
    +
    38  template<typename TDatums>
    +
    39  WPoseSaver<TDatums>::WPoseSaver(const std::shared_ptr<KeypointSaver>& keypointSaver) :
    +
    40  spKeypointSaver{keypointSaver}
    +
    41  {
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums>
    + +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums>
    + +
    51  {
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums>
    +
    55  void WPoseSaver<TDatums>::workConsumer(const TDatums& tDatums)
    +
    56  {
    +
    57  try
    +
    58  {
    +
    59  if (checkNoNullNorEmpty(tDatums))
    +
    60  {
    +
    61  // Debugging log
    +
    62  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    63  // Profiling speed
    +
    64  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    65  // T* to T
    +
    66  auto& tDatumsNoPtr = *tDatums;
    +
    67  // Record people pose keypoint data
    +
    68  std::vector<Array<float>> keypointVector(tDatumsNoPtr.size());
    +
    69  for (auto i = 0u; i < tDatumsNoPtr.size(); i++)
    +
    70  keypointVector[i] = tDatumsNoPtr[i]->poseKeypoints;
    +
    71  const auto fileName = (!tDatumsNoPtr[0]->name.empty()
    +
    72  ? tDatumsNoPtr[0]->name : std::to_string(tDatumsNoPtr[0]->id));
    +
    73  spKeypointSaver->saveKeypoints(keypointVector, fileName, "pose");
    +
    74  // Profiling speed
    +
    75  Profiler::timerEnd(profilerKey);
    +
    76  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    77  // Debugging log
    +
    78  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    79  }
    +
    80  }
    +
    81  catch (const std::exception& e)
    +
    82  {
    +
    83  this->stop();
    +
    84  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    85  }
    +
    86  }
    +
    87 
    + +
    89 }
    +
    90 
    +
    91 #endif // OPENPOSE_FILESTREAM_W_POSE_SAVER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    void workConsumer(const TDatums &tDatums)
    Definition: wPoseSaver.hpp:55
    +
    void initializationOnThread()
    Definition: wPoseSaver.hpp:50
    +
    virtual ~WPoseSaver()
    Definition: wPoseSaver.hpp:45
    +
    WPoseSaver(const std::shared_ptr< KeypointSaver > &keypointSaver)
    Definition: wPoseSaver.hpp:39
    + + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_pose_triangulation_8hpp.html b/web/html/doc/w_pose_triangulation_8hpp.html new file mode 100644 index 000000000..e1f122279 --- /dev/null +++ b/web/html/doc/w_pose_triangulation_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/3d/wPoseTriangulation.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wPoseTriangulation.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WPoseTriangulation< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WPoseTriangulation)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_pose_triangulation_8hpp.js b/web/html/doc/w_pose_triangulation_8hpp.js new file mode 100644 index 000000000..d96daa959 --- /dev/null +++ b/web/html/doc/w_pose_triangulation_8hpp.js @@ -0,0 +1,5 @@ +var w_pose_triangulation_8hpp = +[ + [ "WPoseTriangulation", "classop_1_1_w_pose_triangulation.html", "classop_1_1_w_pose_triangulation" ], + [ "COMPILE_TEMPLATE_DATUM", "w_pose_triangulation_8hpp.html#a53f346232d0743f3dd0f547de1fc508f", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_pose_triangulation_8hpp_source.html b/web/html/doc/w_pose_triangulation_8hpp_source.html new file mode 100644 index 000000000..02b028fd9 --- /dev/null +++ b/web/html/doc/w_pose_triangulation_8hpp_source.html @@ -0,0 +1,240 @@ + + + + + + + +OpenPose: include/openpose/3d/wPoseTriangulation.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wPoseTriangulation.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_3D_W_POSE_TRIANGULATION_HPP
    +
    2 #define OPENPOSE_3D_W_POSE_TRIANGULATION_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WPoseTriangulation : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WPoseTriangulation(const std::shared_ptr<PoseTriangulation>& poseTriangulation);
    +
    15 
    +
    16  virtual ~WPoseTriangulation();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  const std::shared_ptr<PoseTriangulation> spPoseTriangulation;
    +
    24 
    +
    25  DELETE_COPY(WPoseTriangulation);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WPoseTriangulation<TDatums>::WPoseTriangulation(const std::shared_ptr<PoseTriangulation>& poseTriangulation) :
    +
    39  spPoseTriangulation{poseTriangulation}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  try
    +
    52  {
    +
    53  spPoseTriangulation->initializationOnThread();
    +
    54  }
    +
    55  catch (const std::exception& e)
    +
    56  {
    +
    57  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    58  }
    +
    59  }
    +
    60 
    +
    61  template<typename TDatums>
    +
    62  void WPoseTriangulation<TDatums>::work(TDatums& tDatums)
    +
    63  {
    +
    64  try
    +
    65  {
    +
    66  if (checkNoNullNorEmpty(tDatums))
    +
    67  {
    +
    68  // Debugging log
    +
    69  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    70  // Profiling speed
    +
    71  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    72  // 3-D triangulation and reconstruction
    +
    73  std::vector<Matrix> cameraMatrices;
    +
    74  std::vector<Array<float>> poseKeypointVector;
    +
    75  std::vector<Array<float>> faceKeypointVector;
    +
    76  std::vector<Array<float>> leftHandKeypointVector;
    +
    77  std::vector<Array<float>> rightHandKeypointVector;
    +
    78  std::vector<Point<int>> imageSizes;
    +
    79  for (auto& tDatumPtr : *tDatums)
    +
    80  {
    +
    81  poseKeypointVector.emplace_back(tDatumPtr->poseKeypoints);
    +
    82  faceKeypointVector.emplace_back(tDatumPtr->faceKeypoints);
    +
    83  leftHandKeypointVector.emplace_back(tDatumPtr->handKeypoints[0]);
    +
    84  rightHandKeypointVector.emplace_back(tDatumPtr->handKeypoints[1]);
    +
    85  cameraMatrices.emplace_back(tDatumPtr->cameraMatrix);
    +
    86  imageSizes.emplace_back(
    +
    87  Point<int>{tDatumPtr->cvInputData.cols(), tDatumPtr->cvInputData.rows()});
    +
    88  }
    +
    89  // Pose 3-D reconstruction
    +
    90  auto poseKeypoints3Ds = spPoseTriangulation->reconstructArray(
    +
    91  {poseKeypointVector, faceKeypointVector, leftHandKeypointVector, rightHandKeypointVector},
    +
    92  cameraMatrices, imageSizes);
    +
    93  // Assign to all tDatums
    +
    94  for (auto& tDatumPtr : *tDatums)
    +
    95  {
    +
    96  tDatumPtr->poseKeypoints3D = poseKeypoints3Ds[0];
    +
    97  tDatumPtr->faceKeypoints3D = poseKeypoints3Ds[1];
    +
    98  tDatumPtr->handKeypoints3D[0] = poseKeypoints3Ds[2];
    +
    99  tDatumPtr->handKeypoints3D[1] = poseKeypoints3Ds[3];
    +
    100  }
    +
    101  // Profiling speed
    +
    102  Profiler::timerEnd(profilerKey);
    +
    103  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    104  // Debugging log
    +
    105  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    106  }
    +
    107  }
    +
    108  catch (const std::exception& e)
    +
    109  {
    +
    110  this->stop();
    +
    111  tDatums = nullptr;
    +
    112  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    113  }
    +
    114  }
    +
    115 
    + +
    117 }
    +
    118 
    +
    119 #endif // OPENPOSE_3D_W_POSE_TRIANGULATION_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    WPoseTriangulation(const std::shared_ptr< PoseTriangulation > &poseTriangulation)
    +
    void work(TDatums &tDatums)
    + + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + + +
    +
    + + + + diff --git a/web/html/doc/w_queue_assembler_8hpp.html b/web/html/doc/w_queue_assembler_8hpp.html new file mode 100644 index 000000000..d7f4aadc8 --- /dev/null +++ b/web/html/doc/w_queue_assembler_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/thread/wQueueAssembler.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wQueueAssembler.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WQueueAssembler< TDatums >
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/w_queue_assembler_8hpp_source.html b/web/html/doc/w_queue_assembler_8hpp_source.html new file mode 100644 index 000000000..7bc8c62a2 --- /dev/null +++ b/web/html/doc/w_queue_assembler_8hpp_source.html @@ -0,0 +1,230 @@ + + + + + + + +OpenPose: include/openpose/thread/wQueueAssembler.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wQueueAssembler.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_W_QUEUE_ASSEMBLER_HPP
    +
    2 #define OPENPOSE_THREAD_W_QUEUE_ASSEMBLER_HPP
    +
    3 
    +
    4 #include <queue> // std::queue
    + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  // Note: The goal of WQueueAssembler and WQueueSplitter (integrated in wDatumProducer) is to reduce the latency
    +
    12  // of OpenPose. E.g., if 4 cameras in stereo mode, without this, OpenPose would have to process all 4 cameras
    +
    13  // with the same GPU. In this way, this work is parallelized over GPUs (1 view for each).
    +
    14  // Pros: Latency highly recuded, same speed
    +
    15  // Cons: Requires these extra 2 classes and proper threads for them
    +
    16  template<typename TDatums>
    +
    17  class WQueueAssembler : public Worker<std::shared_ptr<TDatums>>
    +
    18  {
    +
    19  public:
    +
    20  explicit WQueueAssembler();
    +
    21 
    +
    22  virtual ~WQueueAssembler();
    +
    23 
    + +
    25 
    +
    26  void work(std::shared_ptr<TDatums>& tDatums);
    +
    27 
    +
    28  private:
    +
    29  std::shared_ptr<TDatums> mNextTDatums;
    +
    30 
    +
    31  DELETE_COPY(WQueueAssembler);
    +
    32  };
    +
    33 }
    +
    34 
    +
    35 
    +
    36 
    +
    37 
    +
    38 
    +
    39 // Implementation
    +
    40 namespace op
    +
    41 {
    +
    42  template<typename TDatums>
    + +
    44  {
    +
    45  }
    +
    46 
    +
    47  template<typename TDatums>
    + +
    49  {
    +
    50  }
    +
    51 
    +
    52  template<typename TDatums>
    + +
    54  {
    +
    55  }
    +
    56 
    +
    57  template<typename TDatums>
    +
    58  void WQueueAssembler<TDatums>::work(std::shared_ptr<TDatums>& tDatums)
    +
    59  {
    +
    60  try
    +
    61  {
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // Input TDatums -> enqueue it
    +
    65  if (checkNoNullNorEmpty(tDatums))
    +
    66  {
    +
    67  // Sanity check
    +
    68  if (tDatums->size() > 1)
    +
    69  error("This function assumes that WQueueSplitter (inside WDatumProducer)"
    +
    70  " was applied in the first place, i.e., that there is only 1 element"
    +
    71  " per TDatums (size = " + std::to_string(tDatums->size()) + ").",
    +
    72  __LINE__, __FUNCTION__, __FILE__);
    +
    73  auto tDatumPtr = (*tDatums)[0];
    +
    74  // Single view --> Return
    +
    75  if (tDatumPtr->subIdMax == 0)
    +
    76  return;
    +
    77  // Multiple view --> Merge views into different TDatums (1st frame)
    +
    78  if (mNextTDatums == nullptr)
    +
    79  mNextTDatums = std::make_shared<TDatums>();
    +
    80  // Multiple view --> Merge views into different TDatums
    +
    81  mNextTDatums->emplace_back(tDatumPtr);
    +
    82  // Last view - Return frame
    +
    83  if (mNextTDatums->back()->subId == mNextTDatums->back()->subIdMax)
    +
    84  {
    +
    85  tDatums = mNextTDatums;
    +
    86  mNextTDatums = nullptr;
    +
    87  // Profiling speed
    +
    88  Profiler::timerEnd(profilerKey);
    +
    89  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    90  // Debugging log
    +
    91  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    92  }
    +
    93  // Non-last view - Return nothing
    +
    94  else
    +
    95  tDatums = nullptr;
    +
    96  }
    +
    97  // Sleep if no new tDatums to either pop or push
    +
    98  else
    +
    99  std::this_thread::sleep_for(std::chrono::milliseconds{1});
    +
    100  }
    +
    101  catch (const std::exception& e)
    +
    102  {
    +
    103  this->stop();
    +
    104  tDatums = nullptr;
    +
    105  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    106  }
    +
    107  }
    +
    108 
    +
    109  extern template class WQueueAssembler<BASE_DATUMS>;
    +
    110 }
    +
    111 
    +
    112 #endif // OPENPOSE_THREAD_W_QUEUE_ASSEMBLER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + + +
    void work(std::shared_ptr< TDatums > &tDatums)
    + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_queue_orderer_8hpp.html b/web/html/doc/w_queue_orderer_8hpp.html new file mode 100644 index 000000000..2a3c832ef --- /dev/null +++ b/web/html/doc/w_queue_orderer_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/thread/wQueueOrderer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wQueueOrderer.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WQueueOrderer< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WQueueOrderer)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_queue_orderer_8hpp.js b/web/html/doc/w_queue_orderer_8hpp.js new file mode 100644 index 000000000..b476c8e0f --- /dev/null +++ b/web/html/doc/w_queue_orderer_8hpp.js @@ -0,0 +1,5 @@ +var w_queue_orderer_8hpp = +[ + [ "WQueueOrderer", "classop_1_1_w_queue_orderer.html", "classop_1_1_w_queue_orderer" ], + [ "COMPILE_TEMPLATE_DATUM", "w_queue_orderer_8hpp.html#add981a5f6a49d35cc316a54c613497f3", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_queue_orderer_8hpp_source.html b/web/html/doc/w_queue_orderer_8hpp_source.html new file mode 100644 index 000000000..597ce334b --- /dev/null +++ b/web/html/doc/w_queue_orderer_8hpp_source.html @@ -0,0 +1,299 @@ + + + + + + + +OpenPose: include/openpose/thread/wQueueOrderer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wQueueOrderer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_W_QUEUE_ORDERER_HPP
    +
    2 #define OPENPOSE_THREAD_W_QUEUE_ORDERER_HPP
    +
    3 
    +
    4 #include <queue> // std::priority_queue
    + + + +
    8 
    +
    9 namespace op
    +
    10 {
    +
    11  template<typename TDatums>
    +
    12  class WQueueOrderer : public Worker<TDatums>
    +
    13  {
    +
    14  public:
    +
    15  explicit WQueueOrderer(const unsigned int maxBufferSize = 64u);
    +
    16 
    +
    17  virtual ~WQueueOrderer();
    +
    18 
    + +
    20 
    +
    21  void work(TDatums& tDatums);
    +
    22 
    +
    23  void tryStop();
    +
    24 
    +
    25  private:
    +
    26  const unsigned int mMaxBufferSize;
    +
    27  bool mStopWhenEmpty;
    +
    28  unsigned long long mNextExpectedId;
    +
    29  unsigned long long mNextExpectedSubId;
    +
    30  std::priority_queue<TDatums, std::vector<TDatums>, PointerContainerGreater<TDatums>> mPriorityQueueBuffer;
    +
    31 
    +
    32  DELETE_COPY(WQueueOrderer);
    +
    33  };
    +
    34 }
    +
    35 
    +
    36 
    +
    37 
    +
    38 
    +
    39 
    +
    40 // Implementation
    +
    41 namespace op
    +
    42 {
    +
    43  template<typename TDatums>
    +
    44  WQueueOrderer<TDatums>::WQueueOrderer(const unsigned int maxBufferSize) :
    +
    45  mMaxBufferSize{maxBufferSize},
    +
    46  mStopWhenEmpty{false},
    +
    47  mNextExpectedId{0},
    +
    48  mNextExpectedSubId{0}
    +
    49  {
    +
    50  }
    +
    51 
    +
    52  template<typename TDatums>
    + +
    54  {
    +
    55  }
    +
    56 
    +
    57  template<typename TDatums>
    + +
    59  {
    +
    60  }
    +
    61 
    +
    62  template<typename TDatums>
    +
    63  void WQueueOrderer<TDatums>::work(TDatums& tDatums)
    +
    64  {
    +
    65  try
    +
    66  {
    +
    67  // Profiling speed
    +
    68  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    69  bool profileSpeed = (tDatums != nullptr);
    +
    70  // Input TDatum -> enqueue or return it back
    +
    71  if (checkNoNullNorEmpty(tDatums))
    +
    72  {
    +
    73  // T* to T
    +
    74  auto& tDatumsNoPtr = *tDatums;
    +
    75  // tDatums is the next expected, update counter
    +
    76  if (tDatumsNoPtr[0]->id == mNextExpectedId && tDatumsNoPtr[0]->subId == mNextExpectedSubId)
    +
    77  {
    +
    78  // If single-view
    +
    79  if (tDatumsNoPtr[0]->subIdMax == 0)
    +
    80  mNextExpectedId++;
    +
    81  // If muilti-view system
    +
    82  else
    +
    83  {
    +
    84  mNextExpectedSubId++;
    +
    85  if (mNextExpectedSubId > tDatumsNoPtr[0]->subIdMax)
    +
    86  {
    +
    87  mNextExpectedSubId = 0;
    +
    88  mNextExpectedId++;
    +
    89  }
    +
    90  }
    +
    91  }
    +
    92  // Else push it to our buffered queue
    +
    93  else
    +
    94  {
    +
    95  // Enqueue current tDatums
    +
    96  mPriorityQueueBuffer.emplace(tDatums);
    +
    97  tDatums = nullptr;
    +
    98  // Else if buffer full -> remove one tDatums
    +
    99  if (mPriorityQueueBuffer.size() > mMaxBufferSize)
    +
    100  {
    +
    101  tDatums = mPriorityQueueBuffer.top();
    +
    102  mPriorityQueueBuffer.pop();
    +
    103  }
    +
    104  }
    +
    105  }
    +
    106  // If input TDatum enqueued -> check if previously enqueued next desired frame and pop it
    +
    107  if (!checkNoNullNorEmpty(tDatums))
    +
    108  {
    +
    109  // Retrieve frame if next is desired frame or if we want to stop this worker
    +
    110  if (!mPriorityQueueBuffer.empty()
    +
    111  && (mStopWhenEmpty ||
    +
    112  ((*mPriorityQueueBuffer.top())[0]->id == mNextExpectedId
    +
    113  && (*mPriorityQueueBuffer.top())[0]->subId == mNextExpectedSubId)))
    +
    114  {
    +
    115  tDatums = { mPriorityQueueBuffer.top() };
    +
    116  mPriorityQueueBuffer.pop();
    +
    117  }
    +
    118  }
    +
    119  // If TDatum ready to be returned -> updated next expected id
    +
    120  if (checkNoNullNorEmpty(tDatums))
    +
    121  {
    +
    122  const auto& tDatumsNoPtr = *tDatums;
    +
    123  // If single-view
    +
    124  if (tDatumsNoPtr[0]->subIdMax == 0)
    +
    125  mNextExpectedId = tDatumsNoPtr[0]->id + 1;
    +
    126  // If muilti-view system
    +
    127  else
    +
    128  {
    +
    129  mNextExpectedSubId = tDatumsNoPtr[0]->subId + 1;
    +
    130  if (mNextExpectedSubId > tDatumsNoPtr[0]->subIdMax)
    +
    131  {
    +
    132  mNextExpectedSubId = 0;
    +
    133  mNextExpectedId = tDatumsNoPtr[0]->id + 1;
    +
    134  }
    +
    135  }
    +
    136  }
    +
    137  // Sleep if no new tDatums to either pop or push
    +
    138  if (!checkNoNullNorEmpty(tDatums) && mPriorityQueueBuffer.size() < mMaxBufferSize / 2u)
    +
    139  std::this_thread::sleep_for(std::chrono::milliseconds{1});
    +
    140  // If TDatum popped and/or pushed
    +
    141  if (profileSpeed || tDatums != nullptr)
    +
    142  {
    +
    143  // Profiling speed
    +
    144  Profiler::timerEnd(profilerKey);
    +
    145  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    146  // Debugging log
    +
    147  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    148  }
    +
    149  }
    +
    150  catch (const std::exception& e)
    +
    151  {
    +
    152  this->stop();
    +
    153  tDatums = nullptr;
    +
    154  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    155  }
    +
    156  }
    +
    157 
    +
    158  template<typename TDatums>
    + +
    160  {
    +
    161  try
    +
    162  {
    +
    163  // Close if all frames were retrieved from the queue
    +
    164  if (mPriorityQueueBuffer.empty())
    +
    165  this->stop();
    +
    166  mStopWhenEmpty = true;
    +
    167 
    +
    168  }
    +
    169  catch (const std::exception& e)
    +
    170  {
    +
    171  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    172  }
    +
    173  }
    +
    174 
    + +
    176 }
    +
    177 
    +
    178 #endif // OPENPOSE_THREAD_W_QUEUE_ORDERER_HPP
    + +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + +
    void work(TDatums &tDatums)
    +
    WQueueOrderer(const unsigned int maxBufferSize=64u)
    +
    virtual ~WQueueOrderer()
    +
    void initializationOnThread()
    + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + +
    +
    + + + + diff --git a/web/html/doc/w_scale_and_size_extractor_8hpp.html b/web/html/doc/w_scale_and_size_extractor_8hpp.html new file mode 100644 index 000000000..527206d19 --- /dev/null +++ b/web/html/doc/w_scale_and_size_extractor_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/core/wScaleAndSizeExtractor.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wScaleAndSizeExtractor.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WScaleAndSizeExtractor< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WScaleAndSizeExtractor)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_scale_and_size_extractor_8hpp.js b/web/html/doc/w_scale_and_size_extractor_8hpp.js new file mode 100644 index 000000000..93605309e --- /dev/null +++ b/web/html/doc/w_scale_and_size_extractor_8hpp.js @@ -0,0 +1,5 @@ +var w_scale_and_size_extractor_8hpp = +[ + [ "WScaleAndSizeExtractor", "classop_1_1_w_scale_and_size_extractor.html", "classop_1_1_w_scale_and_size_extractor" ], + [ "COMPILE_TEMPLATE_DATUM", "w_scale_and_size_extractor_8hpp.html#aaca98fe6101cda512a43c513182ae5cc", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_scale_and_size_extractor_8hpp_source.html b/web/html/doc/w_scale_and_size_extractor_8hpp_source.html new file mode 100644 index 000000000..0468ef960 --- /dev/null +++ b/web/html/doc/w_scale_and_size_extractor_8hpp_source.html @@ -0,0 +1,211 @@ + + + + + + + +OpenPose: include/openpose/core/wScaleAndSizeExtractor.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wScaleAndSizeExtractor.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_W_SCALE_AND_SIZE_EXTRACTOR_HPP
    +
    2 #define OPENPOSE_CORE_W_SCALE_AND_SIZE_EXTRACTOR_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WScaleAndSizeExtractor : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WScaleAndSizeExtractor(const std::shared_ptr<ScaleAndSizeExtractor>& scaleAndSizeExtractor);
    +
    15 
    +
    16  virtual ~WScaleAndSizeExtractor();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  const std::shared_ptr<ScaleAndSizeExtractor> spScaleAndSizeExtractor;
    +
    24 
    +
    25  DELETE_COPY(WScaleAndSizeExtractor);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    + +
    39  const std::shared_ptr<ScaleAndSizeExtractor>& scaleAndSizeExtractor) :
    +
    40  spScaleAndSizeExtractor{scaleAndSizeExtractor}
    +
    41  {
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums>
    + +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums>
    + +
    51  {
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums>
    + +
    56  {
    +
    57  try
    +
    58  {
    +
    59  if (checkNoNullNorEmpty(tDatums))
    +
    60  {
    +
    61  // Debugging log
    +
    62  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    63  // Profiling speed
    +
    64  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    65  // cv::Mat -> float*
    +
    66  for (auto& tDatumPtr : *tDatums)
    +
    67  {
    +
    68  const Point<int> inputSize{tDatumPtr->cvInputData.cols(), tDatumPtr->cvInputData.rows()};
    +
    69  std::tie(tDatumPtr->scaleInputToNetInputs, tDatumPtr->netInputSizes, tDatumPtr->scaleInputToOutput,
    +
    70  tDatumPtr->netOutputSize) = spScaleAndSizeExtractor->extract(inputSize);
    +
    71  }
    +
    72  // Profiling speed
    +
    73  Profiler::timerEnd(profilerKey);
    +
    74  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    75  // Debugging log
    +
    76  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    77  }
    +
    78  }
    +
    79  catch (const std::exception& e)
    +
    80  {
    +
    81  this->stop();
    +
    82  tDatums = nullptr;
    +
    83  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    84  }
    +
    85  }
    +
    86 
    + +
    88 }
    +
    89 
    +
    90 #endif // OPENPOSE_CORE_W_SCALE_AND_SIZE_EXTRACTOR_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    WScaleAndSizeExtractor(const std::shared_ptr< ScaleAndSizeExtractor > &scaleAndSizeExtractor)
    + + + + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + + +
    +
    + + + + diff --git a/web/html/doc/w_udp_sender_8hpp.html b/web/html/doc/w_udp_sender_8hpp.html new file mode 100644 index 000000000..2f1ac9521 --- /dev/null +++ b/web/html/doc/w_udp_sender_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/filestream/wUdpSender.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wUdpSender.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WUdpSender< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WUdpSender)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_udp_sender_8hpp.js b/web/html/doc/w_udp_sender_8hpp.js new file mode 100644 index 000000000..aec37b9f6 --- /dev/null +++ b/web/html/doc/w_udp_sender_8hpp.js @@ -0,0 +1,5 @@ +var w_udp_sender_8hpp = +[ + [ "WUdpSender", "classop_1_1_w_udp_sender.html", "classop_1_1_w_udp_sender" ], + [ "COMPILE_TEMPLATE_DATUM", "w_udp_sender_8hpp.html#af9e0d9e4028c0589b5eeeaed42a5088c", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_udp_sender_8hpp_source.html b/web/html/doc/w_udp_sender_8hpp_source.html new file mode 100644 index 000000000..34c0a9d5b --- /dev/null +++ b/web/html/doc/w_udp_sender_8hpp_source.html @@ -0,0 +1,223 @@ + + + + + + + +OpenPose: include/openpose/filestream/wUdpSender.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wUdpSender.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_W_UDP_SENDER_HPP
    +
    2 #define OPENPOSE_FILESTREAM_W_UDP_SENDER_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WUdpSender : public WorkerConsumer<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WUdpSender(const std::shared_ptr<UdpSender>& udpSender);
    +
    15 
    +
    16  virtual ~WUdpSender();
    +
    17 
    + +
    19 
    +
    20  void workConsumer(const TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  const std::shared_ptr<UdpSender> spUdpSender;
    +
    24 
    +
    25  DELETE_COPY(WUdpSender);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WUdpSender<TDatums>::WUdpSender(const std::shared_ptr<UdpSender>& udpSender) :
    +
    39  spUdpSender{udpSender}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WUdpSender<TDatums>::workConsumer(const TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // Send though UDP communication
    +
    65 #ifdef USE_3D_ADAM_MODEL
    +
    66  const auto& tDatumPtr = (*tDatums)[0];
    +
    67  if (!tDatumPtr->poseKeypoints3D.empty())
    +
    68  {
    +
    69  const auto& adamPose = tDatumPtr->adamPose; // Eigen::Matrix<double, 62, 3, Eigen::RowMajor>
    +
    70  const auto& adamTranslation = tDatumPtr->adamTranslation; // Eigen::Vector3d(3, 1)
    +
    71  const auto adamFaceCoeffsExp = tDatumPtr->adamFaceCoeffsExp; // Eigen::VectorXd resized to (200, 1)
    +
    72  //const float mouth_open = tDatumPtr->mouthOpening; // tDatumPtr->mouth_open;
    +
    73  //const float leye_open = tDatumPtr->rightEyeOpening; // tDatumPtr->leye_open;
    +
    74  //const float reye_open = tDatumPtr->leftEyeOpening; // tDatumPtr->reye_open;
    +
    75  //const float dist_root_foot = Datum.distanceRootFoot; // tDatumPtr->dist_root_foot;
    +
    76  // m_adam_t:
    +
    77  // 1. Total translation (centimeters) of the root in camera/global coordinate representation.
    +
    78  // m_adam_pose:
    +
    79  // 1. First row is global rotation, in AngleAxis representation. Radians (not degrees!)
    +
    80  // 2. Rest are joint-angles in Euler-Angle representation. Degrees.
    +
    81  spUdpSender->sendJointAngles(
    +
    82  adamPose.data(), adamPose.rows(), adamTranslation.data(), adamFaceCoeffsExp.data(),
    +
    83  adamFaceCoeffsExp.rows());
    +
    84  }
    +
    85 #endif
    +
    86  // Profiling speed
    +
    87  Profiler::timerEnd(profilerKey);
    +
    88  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    89  // Debugging log
    +
    90  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    91  }
    +
    92  }
    +
    93  catch (const std::exception& e)
    +
    94  {
    +
    95  this->stop();
    +
    96  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    97  }
    +
    98  }
    +
    99 
    + +
    101 }
    +
    102 
    +
    103 #endif // OPENPOSE_FILESTREAM_W_UDP_SENDER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    WUdpSender(const std::shared_ptr< UdpSender > &udpSender)
    Definition: wUdpSender.hpp:38
    +
    void initializationOnThread()
    Definition: wUdpSender.hpp:49
    +
    void workConsumer(const TDatums &tDatums)
    Definition: wUdpSender.hpp:54
    +
    virtual ~WUdpSender()
    Definition: wUdpSender.hpp:44
    + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + +
    +
    + + + + diff --git a/web/html/doc/w_verbose_printer_8hpp.html b/web/html/doc/w_verbose_printer_8hpp.html new file mode 100644 index 000000000..62ea3c80d --- /dev/null +++ b/web/html/doc/w_verbose_printer_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/core/wVerbosePrinter.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wVerbosePrinter.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WVerbosePrinter< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WVerbosePrinter)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_verbose_printer_8hpp.js b/web/html/doc/w_verbose_printer_8hpp.js new file mode 100644 index 000000000..58bfd1285 --- /dev/null +++ b/web/html/doc/w_verbose_printer_8hpp.js @@ -0,0 +1,5 @@ +var w_verbose_printer_8hpp = +[ + [ "WVerbosePrinter", "classop_1_1_w_verbose_printer.html", "classop_1_1_w_verbose_printer" ], + [ "COMPILE_TEMPLATE_DATUM", "w_verbose_printer_8hpp.html#a89984557f6968584d1938afe7b9f32bd", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_verbose_printer_8hpp_source.html b/web/html/doc/w_verbose_printer_8hpp_source.html new file mode 100644 index 000000000..1987f4442 --- /dev/null +++ b/web/html/doc/w_verbose_printer_8hpp_source.html @@ -0,0 +1,209 @@ + + + + + + + +OpenPose: include/openpose/core/wVerbosePrinter.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wVerbosePrinter.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_CORE_W_VERBOSE_PRINTER_HPP
    +
    2 #define OPENPOSE_CORE_W_VERBOSE_PRINTER_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WVerbosePrinter : public Worker<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WVerbosePrinter(const std::shared_ptr<VerbosePrinter>& verbosePrinter);
    +
    15 
    +
    16  virtual ~WVerbosePrinter();
    +
    17 
    + +
    19 
    +
    20  void work(TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  const std::shared_ptr<VerbosePrinter> spVerbosePrinter;
    +
    24 
    +
    25  DELETE_COPY(WVerbosePrinter);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    + +
    39  const std::shared_ptr<VerbosePrinter>& verbosePrinter) :
    +
    40  spVerbosePrinter{verbosePrinter}
    +
    41  {
    +
    42  }
    +
    43 
    +
    44  template<typename TDatums>
    + +
    46  {
    +
    47  }
    +
    48 
    +
    49  template<typename TDatums>
    + +
    51  {
    +
    52  }
    +
    53 
    +
    54  template<typename TDatums>
    +
    55  void WVerbosePrinter<TDatums>::work(TDatums& tDatums)
    +
    56  {
    +
    57  try
    +
    58  {
    +
    59  if (checkNoNullNorEmpty(tDatums))
    +
    60  {
    +
    61  // Debugging log
    +
    62  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    63  // Profiling speed
    +
    64  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    65  // Print verbose
    +
    66  if (checkNoNullNorEmpty(tDatums))
    +
    67  {
    +
    68  const auto tDatumPtr = (*tDatums)[0];
    +
    69  spVerbosePrinter->printVerbose(tDatumPtr->frameNumber);
    +
    70  }
    +
    71  // Profiling speed
    +
    72  Profiler::timerEnd(profilerKey);
    +
    73  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    74  // Debugging log
    +
    75  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    76  }
    +
    77  }
    +
    78  catch (const std::exception& e)
    +
    79  {
    +
    80  this->stop();
    +
    81  tDatums = nullptr;
    +
    82  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    83  }
    +
    84  }
    +
    85 
    + +
    87 }
    +
    88 
    +
    89 #endif // OPENPOSE_CORE_W_VERBOSE_PRINTER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + + +
    WVerbosePrinter(const std::shared_ptr< VerbosePrinter > &verbosePrinter)
    + +
    void work(TDatums &tDatums)
    + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + +
    +
    + + + + diff --git a/web/html/doc/w_video_saver3_d_8hpp.html b/web/html/doc/w_video_saver3_d_8hpp.html new file mode 100644 index 000000000..774f4d81b --- /dev/null +++ b/web/html/doc/w_video_saver3_d_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/filestream/wVideoSaver3D.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wVideoSaver3D.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WVideoSaver3D< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WVideoSaver3D)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_video_saver3_d_8hpp.js b/web/html/doc/w_video_saver3_d_8hpp.js new file mode 100644 index 000000000..7ae9de428 --- /dev/null +++ b/web/html/doc/w_video_saver3_d_8hpp.js @@ -0,0 +1,5 @@ +var w_video_saver3_d_8hpp = +[ + [ "WVideoSaver3D", "classop_1_1_w_video_saver3_d.html", "classop_1_1_w_video_saver3_d" ], + [ "COMPILE_TEMPLATE_DATUM", "w_video_saver3_d_8hpp.html#a0db530b6f607aa43e8f9154b308d207a", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_video_saver3_d_8hpp_source.html b/web/html/doc/w_video_saver3_d_8hpp_source.html new file mode 100644 index 000000000..20539d04c --- /dev/null +++ b/web/html/doc/w_video_saver3_d_8hpp_source.html @@ -0,0 +1,206 @@ + + + + + + + +OpenPose: include/openpose/filestream/wVideoSaver3D.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wVideoSaver3D.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_W_VIDEO_SAVER_3D_HPP
    +
    2 #define OPENPOSE_FILESTREAM_W_VIDEO_SAVER_3D_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WVideoSaver3D : public WorkerConsumer<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WVideoSaver3D(const std::shared_ptr<VideoSaver>& videoSaver);
    +
    15 
    +
    16  virtual ~WVideoSaver3D();
    +
    17 
    + +
    19 
    +
    20  void workConsumer(const TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<VideoSaver> spVideoSaver;
    +
    24 
    +
    25  DELETE_COPY(WVideoSaver3D);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WVideoSaver3D<TDatums>::WVideoSaver3D(const std::shared_ptr<VideoSaver>& videoSaver) :
    +
    39  spVideoSaver{videoSaver}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WVideoSaver3D<TDatums>::workConsumer(const TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // T* to T
    +
    65  auto& tDatumsNoPtr = *tDatums;
    +
    66  // Record video(s)
    +
    67  if (!tDatumsNoPtr.empty())
    +
    68  spVideoSaver->write(tDatumsNoPtr[0]->cvOutputData3D);
    +
    69  // Profiling speed
    +
    70  Profiler::timerEnd(profilerKey);
    +
    71  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    72  // Debugging log
    +
    73  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    74  }
    +
    75  }
    +
    76  catch (const std::exception& e)
    +
    77  {
    +
    78  this->stop();
    +
    79  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    80  }
    +
    81  }
    +
    82 
    + +
    84 }
    +
    85 
    +
    86 #endif // OPENPOSE_FILESTREAM_W_VIDEO_SAVER_3D_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    virtual ~WVideoSaver3D()
    +
    WVideoSaver3D(const std::shared_ptr< VideoSaver > &videoSaver)
    +
    void initializationOnThread()
    +
    void workConsumer(const TDatums &tDatums)
    + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + +
    +
    + + + + diff --git a/web/html/doc/w_video_saver_8hpp.html b/web/html/doc/w_video_saver_8hpp.html new file mode 100644 index 000000000..f1e846c57 --- /dev/null +++ b/web/html/doc/w_video_saver_8hpp.html @@ -0,0 +1,127 @@ + + + + + + + +OpenPose: include/openpose/filestream/wVideoSaver.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wVideoSaver.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WVideoSaver< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WVideoSaver)
     
    +
    +
    + + + + diff --git a/web/html/doc/w_video_saver_8hpp.js b/web/html/doc/w_video_saver_8hpp.js new file mode 100644 index 000000000..818144f94 --- /dev/null +++ b/web/html/doc/w_video_saver_8hpp.js @@ -0,0 +1,5 @@ +var w_video_saver_8hpp = +[ + [ "WVideoSaver", "classop_1_1_w_video_saver.html", "classop_1_1_w_video_saver" ], + [ "COMPILE_TEMPLATE_DATUM", "w_video_saver_8hpp.html#a49bd4106b0cd1cb81980329b06c0d2c8", null ] +]; \ No newline at end of file diff --git a/web/html/doc/w_video_saver_8hpp_source.html b/web/html/doc/w_video_saver_8hpp_source.html new file mode 100644 index 000000000..feeb6d4fb --- /dev/null +++ b/web/html/doc/w_video_saver_8hpp_source.html @@ -0,0 +1,208 @@ + + + + + + + +OpenPose: include/openpose/filestream/wVideoSaver.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wVideoSaver.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_FILESTREAM_W_VIDEO_SAVER_HPP
    +
    2 #define OPENPOSE_FILESTREAM_W_VIDEO_SAVER_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    +
    10  template<typename TDatums>
    +
    11  class WVideoSaver : public WorkerConsumer<TDatums>
    +
    12  {
    +
    13  public:
    +
    14  explicit WVideoSaver(const std::shared_ptr<VideoSaver>& videoSaver);
    +
    15 
    +
    16  virtual ~WVideoSaver();
    +
    17 
    + +
    19 
    +
    20  void workConsumer(const TDatums& tDatums);
    +
    21 
    +
    22  private:
    +
    23  std::shared_ptr<VideoSaver> spVideoSaver;
    +
    24 
    +
    25  DELETE_COPY(WVideoSaver);
    +
    26  };
    +
    27 }
    +
    28 
    +
    29 
    +
    30 
    +
    31 
    +
    32 
    +
    33 // Implementation
    + +
    35 namespace op
    +
    36 {
    +
    37  template<typename TDatums>
    +
    38  WVideoSaver<TDatums>::WVideoSaver(const std::shared_ptr<VideoSaver>& videoSaver) :
    +
    39  spVideoSaver{videoSaver}
    +
    40  {
    +
    41  }
    +
    42 
    +
    43  template<typename TDatums>
    + +
    45  {
    +
    46  }
    +
    47 
    +
    48  template<typename TDatums>
    + +
    50  {
    +
    51  }
    +
    52 
    +
    53  template<typename TDatums>
    +
    54  void WVideoSaver<TDatums>::workConsumer(const TDatums& tDatums)
    +
    55  {
    +
    56  try
    +
    57  {
    +
    58  if (checkNoNullNorEmpty(tDatums))
    +
    59  {
    +
    60  // Debugging log
    +
    61  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    62  // Profiling speed
    +
    63  const auto profilerKey = Profiler::timerInit(__LINE__, __FUNCTION__, __FILE__);
    +
    64  // T* to T
    +
    65  auto& tDatumsNoPtr = *tDatums;
    +
    66  // Record video(s)
    +
    67  std::vector<Matrix> opOutputDatas(tDatumsNoPtr.size());
    +
    68  for (auto i = 0u ; i < opOutputDatas.size() ; i++)
    +
    69  opOutputDatas[i] = tDatumsNoPtr[i]->cvOutputData;
    +
    70  spVideoSaver->write(opOutputDatas);
    +
    71  // Profiling speed
    +
    72  Profiler::timerEnd(profilerKey);
    +
    73  Profiler::printAveragedTimeMsOnIterationX(profilerKey, __LINE__, __FUNCTION__, __FILE__);
    +
    74  // Debugging log
    +
    75  opLogIfDebug("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    76  }
    +
    77  }
    +
    78  catch (const std::exception& e)
    +
    79  {
    +
    80  this->stop();
    +
    81  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    82  }
    +
    83  }
    +
    84 
    + +
    86 }
    +
    87 
    +
    88 #endif // OPENPOSE_FILESTREAM_W_VIDEO_SAVER_HPP
    +
    static void printAveragedTimeMsOnIterationX(const std::string &key, const int line, const std::string &function, const std::string &file, const unsigned long long x=DEFAULT_X)
    +
    static const std::string timerInit(const int line, const std::string &function, const std::string &file)
    +
    static void timerEnd(const std::string &key)
    + +
    WVideoSaver(const std::shared_ptr< VideoSaver > &videoSaver)
    Definition: wVideoSaver.hpp:38
    +
    void workConsumer(const TDatums &tDatums)
    Definition: wVideoSaver.hpp:54
    +
    virtual ~WVideoSaver()
    Definition: wVideoSaver.hpp:44
    +
    void initializationOnThread()
    Definition: wVideoSaver.hpp:49
    + + + +
    bool checkNoNullNorEmpty(const TPointerContainer &tPointerContainer)
    +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    void opLogIfDebug(const T &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    Definition: errorAndLog.hpp:97
    + + + + +
    +
    + + + + diff --git a/web/html/doc/webcam_reader_8hpp.html b/web/html/doc/webcam_reader_8hpp.html new file mode 100644 index 000000000..4a6946d52 --- /dev/null +++ b/web/html/doc/webcam_reader_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/producer/webcamReader.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    webcamReader.hpp File Reference
    +
    +
    +
    #include <atomic>
    +#include <mutex>
    +#include <openpose/core/common.hpp>
    +#include <openpose/producer/videoCaptureReader.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WebcamReader
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/webcam_reader_8hpp_source.html b/web/html/doc/webcam_reader_8hpp_source.html new file mode 100644 index 000000000..5373419ec --- /dev/null +++ b/web/html/doc/webcam_reader_8hpp_source.html @@ -0,0 +1,170 @@ + + + + + + + +OpenPose: include/openpose/producer/webcamReader.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    webcamReader.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_PRODUCER_WEBCAM_READER_HPP
    +
    2 #define OPENPOSE_PRODUCER_WEBCAM_READER_HPP
    +
    3 
    +
    4 #include <atomic>
    +
    5 #include <mutex>
    + + +
    8 
    +
    9 namespace op
    +
    10 {
    + +
    16  {
    +
    17  public:
    +
    27  explicit WebcamReader(const int webcamIndex = 0, const Point<int>& webcamResolution = Point<int>{},
    +
    28  const bool throwExceptionIfNoOpened = true, const std::string& cameraParameterPath = "",
    +
    29  const bool undistortImage = false);
    +
    30 
    +
    31  virtual ~WebcamReader();
    +
    32 
    +
    33  std::string getNextFrameName();
    +
    34 
    +
    35  bool isOpened() const;
    +
    36 
    +
    37  double get(const int capProperty);
    +
    38 
    +
    39  void set(const int capProperty, const double value);
    +
    40 
    +
    41  private:
    +
    42  const int mIndex;
    +
    43  const bool mWebcamStarted;
    +
    44  long long mFrameNameCounter;
    +
    45  bool mThreadOpened;
    +
    46  Matrix mBuffer;
    +
    47  std::mutex mBufferMutex;
    +
    48  std::atomic<bool> mCloseThread;
    +
    49  std::thread mThread;
    +
    50  // Detect camera unplugged
    +
    51  double mLastNorm;
    +
    52  std::atomic<int> mDisconnectedCounter;
    +
    53  Point<int> mResolution;
    +
    54 
    +
    55  Matrix getRawFrame();
    +
    56 
    +
    57  std::vector<Matrix> getRawFrames();
    +
    58 
    +
    59  void bufferingThread();
    +
    60 
    +
    61  bool reset();
    +
    62 
    + +
    64  };
    +
    65 }
    +
    66 
    +
    67 #endif // OPENPOSE_PRODUCER_WEBCAM_READER_HPP
    + + + +
    WebcamReader(const int webcamIndex=0, const Point< int > &webcamResolution=Point< int >{}, const bool throwExceptionIfNoOpened=true, const std::string &cameraParameterPath="", const bool undistortImage=false)
    +
    double get(const int capProperty)
    +
    std::string getNextFrameName()
    +
    bool isOpened() const
    +
    void set(const int capProperty, const double value)
    +
    virtual ~WebcamReader()
    + +
    #define OP_API
    Definition: macros.hpp:18
    +
    #define DELETE_COPY(className)
    Definition: macros.hpp:32
    + + + +
    +
    + + + + diff --git a/web/html/doc/worker_8hpp.html b/web/html/doc/worker_8hpp.html new file mode 100644 index 000000000..499abc6e9 --- /dev/null +++ b/web/html/doc/worker_8hpp.html @@ -0,0 +1,124 @@ + + + + + + + +OpenPose: include/openpose/thread/worker.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    worker.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::Worker< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (Worker)
     
    +
    +
    + + + + diff --git a/web/html/doc/worker_8hpp.js b/web/html/doc/worker_8hpp.js new file mode 100644 index 000000000..3fe14c0a3 --- /dev/null +++ b/web/html/doc/worker_8hpp.js @@ -0,0 +1,5 @@ +var worker_8hpp = +[ + [ "Worker", "classop_1_1_worker.html", "classop_1_1_worker" ], + [ "COMPILE_TEMPLATE_DATUM", "worker_8hpp.html#a5642545fda1c3bbaf60810cf0e2d2c1d", null ] +]; \ No newline at end of file diff --git a/web/html/doc/worker_8hpp_source.html b/web/html/doc/worker_8hpp_source.html new file mode 100644 index 000000000..43cacd814 --- /dev/null +++ b/web/html/doc/worker_8hpp_source.html @@ -0,0 +1,216 @@ + + + + + + + +OpenPose: include/openpose/thread/worker.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    worker.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_WORKER_HPP
    +
    2 #define OPENPOSE_THREAD_WORKER_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    +
    8  template<typename TDatums>
    +
    9  class Worker
    +
    10  {
    +
    11  public:
    +
    12  Worker();
    +
    13 
    +
    14  virtual ~Worker();
    +
    15 
    + +
    17 
    +
    18  bool checkAndWork(TDatums& tDatums);
    +
    19 
    +
    20  inline bool isRunning() const
    +
    21  {
    +
    22  return mIsRunning;
    +
    23  }
    +
    24 
    +
    25  inline void stop()
    +
    26  {
    +
    27  mIsRunning = false;
    +
    28  }
    +
    29 
    +
    30  // Virtual in case some function needs special stopping (e.g., buffers might not stop immediately and need a
    +
    31  // few iterations)
    +
    32  inline virtual void tryStop()
    +
    33  {
    +
    34  stop();
    +
    35  }
    +
    36 
    +
    37  protected:
    +
    38  virtual void initializationOnThread() = 0;
    +
    39 
    +
    40  virtual void work(TDatums& tDatums) = 0;
    +
    41 
    +
    42  private:
    +
    43  bool mIsRunning;
    +
    44 
    +
    45  DELETE_COPY(Worker);
    +
    46  };
    +
    47 }
    +
    48 
    +
    49 
    +
    50 
    +
    51 
    +
    52 
    +
    53 // Implementation
    +
    54 namespace op
    +
    55 {
    +
    56  template<typename TDatums>
    + +
    58  mIsRunning{true}
    +
    59  {
    +
    60  }
    +
    61 
    +
    62  template<typename TDatums>
    + +
    64  {
    +
    65  }
    +
    66 
    +
    67  template<typename TDatums>
    + +
    69  {
    +
    70  try
    +
    71  {
    +
    72  this->initializationOnThread();
    +
    73  }
    +
    74  catch (const std::exception& e)
    +
    75  {
    +
    76  this->stop();
    +
    77  errorWorker(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    78  }
    +
    79  }
    +
    80 
    +
    81  template<typename TDatums>
    +
    82  bool Worker<TDatums>::checkAndWork(TDatums& tDatums)
    +
    83  {
    +
    84  try
    +
    85  {
    +
    86  if (mIsRunning)
    +
    87  work(tDatums);
    +
    88  return mIsRunning;
    +
    89  }
    +
    90  catch (const std::exception& e)
    +
    91  {
    +
    92  this->stop();
    +
    93  errorWorker(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    94  return false;
    +
    95  }
    +
    96  }
    +
    97 
    + +
    99 }
    +
    100 
    +
    101 #endif // OPENPOSE_THREAD_WORKER_HPP
    + + +
    bool isRunning() const
    Definition: worker.hpp:20
    +
    void initializationOnThreadNoException()
    Definition: worker.hpp:68
    +
    bool checkAndWork(TDatums &tDatums)
    Definition: worker.hpp:82
    +
    virtual ~Worker()
    Definition: worker.hpp:63
    +
    virtual void work(TDatums &tDatums)=0
    +
    virtual void initializationOnThread()=0
    +
    virtual void tryStop()
    Definition: worker.hpp:32
    +
    void stop()
    Definition: worker.hpp:25
    + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void errorWorker(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    +
    + + + + diff --git a/web/html/doc/worker_consumer_8hpp.html b/web/html/doc/worker_consumer_8hpp.html new file mode 100644 index 000000000..25e795117 --- /dev/null +++ b/web/html/doc/worker_consumer_8hpp.html @@ -0,0 +1,125 @@ + + + + + + + +OpenPose: include/openpose/thread/workerConsumer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    workerConsumer.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WorkerConsumer< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WorkerConsumer)
     
    +
    +
    + + + + diff --git a/web/html/doc/worker_consumer_8hpp.js b/web/html/doc/worker_consumer_8hpp.js new file mode 100644 index 000000000..d1c72bb7c --- /dev/null +++ b/web/html/doc/worker_consumer_8hpp.js @@ -0,0 +1,5 @@ +var worker_consumer_8hpp = +[ + [ "WorkerConsumer", "classop_1_1_worker_consumer.html", "classop_1_1_worker_consumer" ], + [ "COMPILE_TEMPLATE_DATUM", "worker_consumer_8hpp.html#a01aa5c6e24026536367cf47a64e9bba5", null ] +]; \ No newline at end of file diff --git a/web/html/doc/worker_consumer_8hpp_source.html b/web/html/doc/worker_consumer_8hpp_source.html new file mode 100644 index 000000000..a5a9a699a --- /dev/null +++ b/web/html/doc/worker_consumer_8hpp_source.html @@ -0,0 +1,162 @@ + + + + + + + +OpenPose: include/openpose/thread/workerConsumer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    workerConsumer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_WORKER_CONSUMER_HPP
    +
    2 #define OPENPOSE_THREAD_WORKER_CONSUMER_HPP
    +
    3 
    + + +
    6 
    +
    7 namespace op
    +
    8 {
    +
    9  template<typename TDatums>
    +
    10  class WorkerConsumer : public Worker<TDatums>
    +
    11  {
    +
    12  public:
    +
    13  virtual ~WorkerConsumer();
    +
    14 
    +
    15  void work(TDatums& tDatums);
    +
    16 
    +
    17  protected:
    +
    18  virtual void workConsumer(const TDatums& tDatums) = 0;
    +
    19  };
    +
    20 }
    +
    21 
    +
    22 
    +
    23 
    +
    24 
    +
    25 
    +
    26 // Implementation
    +
    27 namespace op
    +
    28 {
    +
    29  template<typename TDatums>
    + +
    31  {
    +
    32  }
    +
    33 
    +
    34  template<typename TDatums>
    +
    35  void WorkerConsumer<TDatums>::work(TDatums& tDatums)
    +
    36  {
    +
    37  try
    +
    38  {
    +
    39  workConsumer(tDatums);
    +
    40  }
    +
    41  catch (const std::exception& e)
    +
    42  {
    +
    43  this->stop();
    +
    44  errorWorker(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    45  }
    +
    46  }
    +
    47 
    + +
    49 }
    +
    50 
    +
    51 #endif // OPENPOSE_THREAD_WORKER_CONSUMER_HPP
    + +
    virtual void workConsumer(const TDatums &tDatums)=0
    +
    void work(TDatums &tDatums)
    + + + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void errorWorker(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    + +
    +
    + + + + diff --git a/web/html/doc/worker_producer_8hpp.html b/web/html/doc/worker_producer_8hpp.html new file mode 100644 index 000000000..6b3364ab8 --- /dev/null +++ b/web/html/doc/worker_producer_8hpp.html @@ -0,0 +1,125 @@ + + + + + + + +OpenPose: include/openpose/thread/workerProducer.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    workerProducer.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    class  op::WorkerProducer< TDatums >
     
    + + + +

    +Namespaces

     op
     
    + + + +

    +Functions

     op::COMPILE_TEMPLATE_DATUM (WorkerProducer)
     
    +
    +
    + + + + diff --git a/web/html/doc/worker_producer_8hpp.js b/web/html/doc/worker_producer_8hpp.js new file mode 100644 index 000000000..6a81ff89f --- /dev/null +++ b/web/html/doc/worker_producer_8hpp.js @@ -0,0 +1,5 @@ +var worker_producer_8hpp = +[ + [ "WorkerProducer", "classop_1_1_worker_producer.html", "classop_1_1_worker_producer" ], + [ "COMPILE_TEMPLATE_DATUM", "worker_producer_8hpp.html#a5660f0e72781ce6d7db9eb78b582e5c6", null ] +]; \ No newline at end of file diff --git a/web/html/doc/worker_producer_8hpp_source.html b/web/html/doc/worker_producer_8hpp_source.html new file mode 100644 index 000000000..bb0e6fba9 --- /dev/null +++ b/web/html/doc/worker_producer_8hpp_source.html @@ -0,0 +1,162 @@ + + + + + + + +OpenPose: include/openpose/thread/workerProducer.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    workerProducer.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_THREAD_WORKER_PRODUCER_HPP
    +
    2 #define OPENPOSE_THREAD_WORKER_PRODUCER_HPP
    +
    3 
    + + +
    6 
    +
    7 namespace op
    +
    8 {
    +
    9  template<typename TDatums>
    +
    10  class WorkerProducer : public Worker<TDatums>
    +
    11  {
    +
    12  public:
    +
    13  virtual ~WorkerProducer();
    +
    14 
    +
    15  void work(TDatums& tDatums);
    +
    16 
    +
    17  protected:
    +
    18  virtual TDatums workProducer() = 0;
    +
    19  };
    +
    20 }
    +
    21 
    +
    22 
    +
    23 
    +
    24 
    +
    25 
    +
    26 // Implementation
    +
    27 namespace op
    +
    28 {
    +
    29  template<typename TDatums>
    + +
    31  {
    +
    32  }
    +
    33 
    +
    34  template<typename TDatums>
    +
    35  void WorkerProducer<TDatums>::work(TDatums& tDatums)
    +
    36  {
    +
    37  try
    +
    38  {
    +
    39  tDatums = std::move(workProducer());
    +
    40  }
    +
    41  catch (const std::exception& e)
    +
    42  {
    +
    43  this->stop();
    +
    44  errorWorker(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    45  }
    +
    46  }
    +
    47 
    + +
    49 }
    +
    50 
    +
    51 #endif // OPENPOSE_THREAD_WORKER_PRODUCER_HPP
    + + +
    void work(TDatums &tDatums)
    +
    virtual TDatums workProducer()=0
    + + + +
    COMPILE_TEMPLATE_DATUM(WPoseTriangulation)
    +
    OP_API void errorWorker(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    + +
    +
    + + + + diff --git a/web/html/doc/wrapper_2enum_classes_8hpp.html b/web/html/doc/wrapper_2enum_classes_8hpp.html new file mode 100644 index 000000000..672fcb74c --- /dev/null +++ b/web/html/doc/wrapper_2enum_classes_8hpp.html @@ -0,0 +1,141 @@ + + + + + + + +OpenPose: include/openpose/wrapper/enumClasses.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    enumClasses.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + + + +

    +Enumerations

    enum class  op::PoseMode : unsigned char { op::Disabled = 0 +, op::Enabled +, op::NoNetwork +, op::Size + }
     
    enum class  op::Detector : unsigned char {
    +  op::Body = 0 +, op::OpenCV +, op::Provided +, op::BodyWithTracking +,
    +  op::Size +
    + }
     
    enum class  op::WorkerType : unsigned char {
    +  op::Input = 0 +, op::PreProcessing +, op::PostProcessing +, op::Output +,
    +  op::Size +
    + }
     
    +
    +
    + + + + diff --git a/web/html/doc/wrapper_2enum_classes_8hpp.js b/web/html/doc/wrapper_2enum_classes_8hpp.js new file mode 100644 index 000000000..030f51749 --- /dev/null +++ b/web/html/doc/wrapper_2enum_classes_8hpp.js @@ -0,0 +1,23 @@ +var wrapper_2enum_classes_8hpp = +[ + [ "Detector", "wrapper_2enum_classes_8hpp.html#a1070db47220e17cf37df40411350f6fb", [ + [ "Body", "wrapper_2enum_classes_8hpp.html#a1070db47220e17cf37df40411350f6fbaac101b32dda4448cf13a93fe283dddd8", null ], + [ "OpenCV", "wrapper_2enum_classes_8hpp.html#a1070db47220e17cf37df40411350f6fba5bd4c87976f48e6a53919d53e14025e9", null ], + [ "Provided", "wrapper_2enum_classes_8hpp.html#a1070db47220e17cf37df40411350f6fba900b06e1ae224594f075e0c882c73532", null ], + [ "BodyWithTracking", "wrapper_2enum_classes_8hpp.html#a1070db47220e17cf37df40411350f6fba65c691a85367d21881220b7a3d923747", null ], + [ "Size", "wrapper_2enum_classes_8hpp.html#a1070db47220e17cf37df40411350f6fba6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "PoseMode", "wrapper_2enum_classes_8hpp.html#a53e7c7ac399de4698e1e609ec0474a09", [ + [ "Disabled", "wrapper_2enum_classes_8hpp.html#a53e7c7ac399de4698e1e609ec0474a09ab9f5c797ebbf55adccdd8539a65a0241", null ], + [ "Enabled", "wrapper_2enum_classes_8hpp.html#a53e7c7ac399de4698e1e609ec0474a09a00d23a76e43b46dae9ec7aa9dcbebb32", null ], + [ "NoNetwork", "wrapper_2enum_classes_8hpp.html#a53e7c7ac399de4698e1e609ec0474a09aa6e20e86de146a7b524d32c9b1fea7f4", null ], + [ "Size", "wrapper_2enum_classes_8hpp.html#a53e7c7ac399de4698e1e609ec0474a09a6f6cb72d544962fa333e2e34ce64f719", null ] + ] ], + [ "WorkerType", "wrapper_2enum_classes_8hpp.html#a970a2a768a2ace81605b1558c9fdec18", [ + [ "Input", "wrapper_2enum_classes_8hpp.html#a970a2a768a2ace81605b1558c9fdec18a324118a6721dd6b8a9b9f4e327df2bf5", null ], + [ "PreProcessing", "wrapper_2enum_classes_8hpp.html#a970a2a768a2ace81605b1558c9fdec18a05318bd0215d16e009798570b53755d2", null ], + [ "PostProcessing", "wrapper_2enum_classes_8hpp.html#a970a2a768a2ace81605b1558c9fdec18aa52d6088cbae537944827c8f8c69c570", null ], + [ "Output", "wrapper_2enum_classes_8hpp.html#a970a2a768a2ace81605b1558c9fdec18a29c2c02a361c9d7028472e5d92cd4a54", null ], + [ "Size", "wrapper_2enum_classes_8hpp.html#a970a2a768a2ace81605b1558c9fdec18a6f6cb72d544962fa333e2e34ce64f719", null ] + ] ] +]; \ No newline at end of file diff --git a/web/html/doc/wrapper_2enum_classes_8hpp_source.html b/web/html/doc/wrapper_2enum_classes_8hpp_source.html new file mode 100644 index 000000000..ad97f4d10 --- /dev/null +++ b/web/html/doc/wrapper_2enum_classes_8hpp_source.html @@ -0,0 +1,152 @@ + + + + + + + +OpenPose: include/openpose/wrapper/enumClasses.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    enumClasses.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_WRAPPER_ENUM_CLASSES_HPP
    +
    2 #define OPENPOSE_WRAPPER_ENUM_CLASSES_HPP
    +
    3 
    +
    4 namespace op
    +
    5 {
    +
    6  enum class PoseMode : unsigned char
    +
    7  {
    +
    8  Disabled = 0,
    +
    9  Enabled,
    +
    10  NoNetwork,
    +
    11  Size,
    +
    12  };
    +
    13 
    +
    14  enum class Detector : unsigned char
    +
    15  {
    +
    16  Body = 0,
    +
    17  OpenCV,
    +
    18  Provided,
    + +
    20  Size,
    +
    21  };
    +
    22 
    +
    23  enum class WorkerType : unsigned char
    +
    24  {
    +
    25  Input = 0,
    + + +
    28  Output,
    +
    29  Size,
    +
    30  };
    +
    31 }
    +
    32 
    +
    33 #endif // OPENPOSE_WRAPPER_ENUM_CLASSES_HPP
    + +
    Detector
    Definition: enumClasses.hpp:15
    + + + + + +
    PoseMode
    Definition: enumClasses.hpp:7
    + + + + +
    WorkerType
    Definition: enumClasses.hpp:24
    + + + + + +
    +
    + + + + diff --git a/web/html/doc/wrapper_2headers_8hpp.html b/web/html/doc/wrapper_2headers_8hpp.html new file mode 100644 index 000000000..e00d377aa --- /dev/null +++ b/web/html/doc/wrapper_2headers_8hpp.html @@ -0,0 +1,112 @@ + + + + + + + +OpenPose: include/openpose/wrapper/headers.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + + + + + + diff --git a/web/html/doc/wrapper_2headers_8hpp_source.html b/web/html/doc/wrapper_2headers_8hpp_source.html new file mode 100644 index 000000000..022d14266 --- /dev/null +++ b/web/html/doc/wrapper_2headers_8hpp_source.html @@ -0,0 +1,125 @@ + + + + + + + +OpenPose: include/openpose/wrapper/headers.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    headers.hpp
    +
    + +
    + + + + diff --git a/web/html/doc/wrapper_8hpp.html b/web/html/doc/wrapper_8hpp.html new file mode 100644 index 000000000..ef612a8fd --- /dev/null +++ b/web/html/doc/wrapper_8hpp.html @@ -0,0 +1,134 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapper.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + + + + + + diff --git a/web/html/doc/wrapper_8hpp.js b/web/html/doc/wrapper_8hpp.js new file mode 100644 index 000000000..cc796973b --- /dev/null +++ b/web/html/doc/wrapper_8hpp.js @@ -0,0 +1,5 @@ +var wrapper_8hpp = +[ + [ "WrapperT", "classop_1_1_wrapper_t.html", "classop_1_1_wrapper_t" ], + [ "Wrapper", "wrapper_8hpp.html#a790dea3c007bed742fbc8cdd5757d026", null ] +]; \ No newline at end of file diff --git a/web/html/doc/wrapper_8hpp_source.html b/web/html/doc/wrapper_8hpp_source.html new file mode 100644 index 000000000..30affdc92 --- /dev/null +++ b/web/html/doc/wrapper_8hpp_source.html @@ -0,0 +1,717 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapper.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wrapper.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_WRAPPER_WRAPPER_HPP
    +
    2 #define OPENPOSE_WRAPPER_WRAPPER_HPP
    +
    3 
    + + + + + + + + + + +
    14 
    +
    15 namespace op
    +
    16 {
    +
    32  template<typename TDatum = BASE_DATUM,
    +
    33  typename TDatums = std::vector<std::shared_ptr<TDatum>>,
    +
    34  typename TDatumsSP = std::shared_ptr<TDatums>,
    +
    35  typename TWorker = std::shared_ptr<Worker<TDatumsSP>>>
    +
    36  class WrapperT
    +
    37  {
    +
    38  public:
    +
    46  explicit WrapperT(const ThreadManagerMode threadManagerMode = ThreadManagerMode::Synchronous);
    +
    47 
    +
    52  virtual ~WrapperT();
    +
    53 
    +
    59  void disableMultiThreading();
    +
    60 
    +
    68  void setWorker(const WorkerType workerType, const TWorker& worker, const bool workerOnNewThread = true);
    +
    69 
    +
    73  void configure(const WrapperStructPose& wrapperStructPose);
    +
    74 
    +
    78  void configure(const WrapperStructFace& wrapperStructFace);
    +
    79 
    +
    83  void configure(const WrapperStructHand& wrapperStructHand);
    +
    84 
    +
    88  void configure(const WrapperStructExtra& wrapperStructExtra);
    +
    89 
    +
    93  void configure(const WrapperStructInput& wrapperStructInput);
    +
    94 
    +
    98  void configure(const WrapperStructOutput& wrapperStructOutput);
    +
    99 
    +
    103  void configure(const WrapperStructGui& wrapperStructGui);
    +
    104 
    +
    110  void exec();
    +
    111 
    +
    121  void start();
    +
    122 
    +
    127  void stop();
    +
    128 
    +
    134  bool isRunning() const;
    +
    135 
    +
    145  void setDefaultMaxSizeQueues(const long long defaultMaxSizeQueues = -1);
    +
    146 
    +
    154  bool tryEmplace(TDatumsSP& tDatums);
    +
    155 
    +
    164  bool waitAndEmplace(TDatumsSP& tDatums);
    +
    165 
    +
    171  bool waitAndEmplace(Matrix& matrix);
    +
    172 
    +
    179  bool tryPush(const TDatumsSP& tDatums);
    +
    180 
    +
    187  bool waitAndPush(const TDatumsSP& tDatums);
    +
    188 
    +
    194  bool waitAndPush(const Matrix& matrix);
    +
    195 
    +
    203  bool tryPop(TDatumsSP& tDatums);
    +
    204 
    +
    213  bool waitAndPop(TDatumsSP& tDatums);
    +
    214 
    +
    220  bool emplaceAndPop(TDatumsSP& tDatums);
    +
    221 
    +
    227  TDatumsSP emplaceAndPop(const Matrix& matrix);
    +
    228 
    +
    229  private:
    +
    230  const ThreadManagerMode mThreadManagerMode;
    +
    231  ThreadManager<TDatumsSP> mThreadManager;
    +
    232  bool mMultiThreadEnabled;
    +
    233  // Configuration
    +
    234  WrapperStructPose mWrapperStructPose;
    +
    235  WrapperStructFace mWrapperStructFace;
    +
    236  WrapperStructHand mWrapperStructHand;
    +
    237  WrapperStructExtra mWrapperStructExtra;
    +
    238  WrapperStructInput mWrapperStructInput;
    +
    239  WrapperStructOutput mWrapperStructOutput;
    +
    240  WrapperStructGui mWrapperStructGui;
    +
    241  // User configurable workers
    +
    242  std::array<bool, int(WorkerType::Size)> mUserWsOnNewThread;
    +
    243  std::array<std::vector<TWorker>, int(WorkerType::Size)> mUserWs;
    +
    244 
    +
    245  DELETE_COPY(WrapperT);
    +
    246  };
    +
    247 
    +
    248  // Type
    + +
    250 }
    +
    251 
    +
    252 
    +
    253 
    +
    254 
    +
    255 
    +
    256 // Implementation
    + +
    258 namespace op
    +
    259 {
    +
    260  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    262  mThreadManagerMode{threadManagerMode},
    +
    263  mThreadManager{threadManagerMode},
    +
    264  mMultiThreadEnabled{true}
    +
    265  {
    +
    266  }
    +
    267 
    +
    268  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    270  {
    +
    271  try
    +
    272  {
    +
    273  stop();
    +
    274  // Reset mThreadManager
    +
    275  mThreadManager.reset();
    +
    276  // Reset user workers
    +
    277  for (auto& userW : mUserWs)
    +
    278  userW.clear();
    +
    279  }
    +
    280  catch (const std::exception& e)
    +
    281  {
    +
    282  errorDestructor(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    283  }
    +
    284  }
    +
    285 
    +
    286  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    288  {
    +
    289  try
    +
    290  {
    +
    291  mMultiThreadEnabled = false;
    +
    292  }
    +
    293  catch (const std::exception& e)
    +
    294  {
    +
    295  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    296  }
    +
    297  }
    +
    298 
    +
    299  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    301  const WorkerType workerType, const TWorker& worker, const bool workerOnNewThread)
    +
    302  {
    +
    303  try
    +
    304  {
    +
    305  // Sanity check
    +
    306  if (worker == nullptr)
    +
    307  error("Your worker is a nullptr.", __LINE__, __FILE__, __FUNCTION__);
    +
    308  // Add worker
    +
    309  mUserWs[int(workerType)].clear();
    +
    310  mUserWs[int(workerType)].emplace_back(worker);
    +
    311  mUserWsOnNewThread[int(workerType)] = workerOnNewThread;
    +
    312  }
    +
    313  catch (const std::exception& e)
    +
    314  {
    +
    315  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    316  }
    +
    317  }
    +
    318 
    +
    319  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    321  {
    +
    322  try
    +
    323  {
    +
    324  mWrapperStructPose = wrapperStructPose;
    +
    325  }
    +
    326  catch (const std::exception& e)
    +
    327  {
    +
    328  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    329  }
    +
    330  }
    +
    331 
    +
    332  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    334  {
    +
    335  try
    +
    336  {
    +
    337  mWrapperStructFace = wrapperStructFace;
    +
    338  }
    +
    339  catch (const std::exception& e)
    +
    340  {
    +
    341  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    342  }
    +
    343  }
    +
    344 
    +
    345  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    347  {
    +
    348  try
    +
    349  {
    +
    350  mWrapperStructHand = wrapperStructHand;
    +
    351  }
    +
    352  catch (const std::exception& e)
    +
    353  {
    +
    354  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    355  }
    +
    356  }
    +
    357 
    +
    358  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    360  {
    +
    361  try
    +
    362  {
    +
    363  mWrapperStructExtra = wrapperStructExtra;
    +
    364  }
    +
    365  catch (const std::exception& e)
    +
    366  {
    +
    367  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    368  }
    +
    369  }
    +
    370 
    +
    371  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    373  {
    +
    374  try
    +
    375  {
    +
    376  mWrapperStructInput = wrapperStructInput;
    +
    377  }
    +
    378  catch (const std::exception& e)
    +
    379  {
    +
    380  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    381  }
    +
    382  }
    +
    383 
    +
    384  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    386  {
    +
    387  try
    +
    388  {
    +
    389  mWrapperStructOutput = wrapperStructOutput;
    +
    390  }
    +
    391  catch (const std::exception& e)
    +
    392  {
    +
    393  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    394  }
    +
    395  }
    +
    396 
    +
    397  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    399  {
    +
    400  try
    +
    401  {
    +
    402  mWrapperStructGui = wrapperStructGui;
    +
    403  }
    +
    404  catch (const std::exception& e)
    +
    405  {
    +
    406  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    407  }
    +
    408  }
    +
    409 
    +
    410  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    412  {
    +
    413  try
    +
    414  {
    +
    415  configureThreadManager<TDatum, TDatums, TDatumsSP, TWorker>(
    +
    416  mThreadManager, mMultiThreadEnabled, mThreadManagerMode, mWrapperStructPose, mWrapperStructFace,
    +
    417  mWrapperStructHand, mWrapperStructExtra, mWrapperStructInput, mWrapperStructOutput, mWrapperStructGui,
    +
    418  mUserWs, mUserWsOnNewThread);
    +
    419  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    420  mThreadManager.exec();
    +
    421  }
    +
    422  catch (const std::exception& e)
    +
    423  {
    +
    424  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    425  }
    +
    426  }
    +
    427 
    +
    428  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    430  {
    +
    431  try
    +
    432  {
    +
    433  configureThreadManager<TDatum, TDatums, TDatumsSP, TWorker>(
    +
    434  mThreadManager, mMultiThreadEnabled, mThreadManagerMode, mWrapperStructPose, mWrapperStructFace,
    +
    435  mWrapperStructHand, mWrapperStructExtra, mWrapperStructInput, mWrapperStructOutput, mWrapperStructGui,
    +
    436  mUserWs, mUserWsOnNewThread);
    +
    437  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    438  mThreadManager.start();
    +
    439  }
    +
    440  catch (const std::exception& e)
    +
    441  {
    +
    442  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    443  }
    +
    444  }
    +
    445 
    +
    446  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    448  {
    +
    449  try
    +
    450  {
    +
    451  mThreadManager.stop();
    +
    452  }
    +
    453  catch (const std::exception& e)
    +
    454  {
    +
    455  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    456  }
    +
    457  }
    +
    458 
    +
    459  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    461  {
    +
    462  try
    +
    463  {
    +
    464  return mThreadManager.isRunning();
    +
    465  }
    +
    466  catch (const std::exception& e)
    +
    467  {
    +
    468  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    469  return false;
    +
    470  }
    +
    471  }
    +
    472 
    +
    473  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    475  {
    +
    476  try
    +
    477  {
    +
    478  mThreadManager.setDefaultMaxSizeQueues(defaultMaxSizeQueues);
    +
    479  }
    +
    480  catch (const std::exception& e)
    +
    481  {
    +
    482  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    483  }
    +
    484  }
    +
    485 
    +
    486  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    488  {
    +
    489  try
    +
    490  {
    +
    491  if (!mUserWs[int(WorkerType::Input)].empty())
    +
    492  error("Emplace cannot be called if an input worker was already selected.",
    +
    493  __LINE__, __FUNCTION__, __FILE__);
    +
    494  // tryEmplace for 1 camera
    +
    495  if (tDatums->size() < 2)
    +
    496  {
    +
    497  return mThreadManager.tryEmplace(tDatums);
    +
    498  }
    +
    499  // tryEmplace for multiview
    +
    500  else
    +
    501  {
    +
    502  bool successfulEmplace = true;
    +
    503  for (auto datumIndex = 0u; datumIndex < tDatums->size(); ++datumIndex)
    +
    504  {
    +
    505  auto tDatumsSingle = std::make_shared<TDatums>(TDatums({ tDatums->at(datumIndex) }));
    +
    506  if (!tryEmplace(tDatumsSingle))
    +
    507  {
    +
    508  successfulEmplace = false;
    +
    509  break;
    +
    510  }
    +
    511  }
    +
    512  return successfulEmplace;
    +
    513  }
    +
    514  }
    +
    515  catch (const std::exception& e)
    +
    516  {
    +
    517  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    518  return false;
    +
    519  }
    +
    520  }
    +
    521 
    +
    522  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    524  {
    +
    525  try
    +
    526  {
    +
    527  if (!mUserWs[int(WorkerType::Input)].empty())
    +
    528  error("Emplace cannot be called if an input worker was already selected.",
    +
    529  __LINE__, __FUNCTION__, __FILE__);
    +
    530  // waitAndEmplace for 1 camera
    +
    531  if (tDatums->size() < 2)
    +
    532  {
    +
    533  return mThreadManager.waitAndEmplace(tDatums);
    +
    534  }
    +
    535  // waitAndEmplace for multiview
    +
    536  else
    +
    537  {
    +
    538  bool successfulEmplace = true;
    +
    539  for (auto datumIndex = 0u ; datumIndex < tDatums->size() ; ++datumIndex)
    +
    540  {
    +
    541  auto tDatumsSingle = std::make_shared<TDatums>(TDatums({tDatums->at(datumIndex)}));
    +
    542  if (!waitAndEmplace(tDatumsSingle))
    +
    543  {
    +
    544  successfulEmplace = false;
    +
    545  opLog("Waiting to emplace for multi-camera failed.",
    +
    546  Priority::High, __LINE__, __FUNCTION__, __FILE__);
    +
    547  break;
    +
    548  }
    +
    549  }
    +
    550  return successfulEmplace;
    +
    551  }
    +
    552  }
    +
    553  catch (const std::exception& e)
    +
    554  {
    +
    555  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    556  return false;
    +
    557  }
    +
    558  }
    +
    559 
    +
    560  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    562  {
    +
    563  try
    +
    564  {
    +
    565  // Create new datum
    +
    566  auto datumsPtr = std::make_shared<std::vector<std::shared_ptr<TDatum>>>();
    +
    567  datumsPtr->emplace_back();
    +
    568  auto& tDatumPtr = datumsPtr->at(0);
    +
    569  tDatumPtr = std::make_shared<TDatum>();
    +
    570  // Fill datum
    +
    571  std::swap(tDatumPtr->cvInputData, matrix);
    +
    572  // Return result
    +
    573  return waitAndEmplace(datumsPtr);
    +
    574  }
    +
    575  catch (const std::exception& e)
    +
    576  {
    +
    577  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    578  return false;
    +
    579  }
    +
    580  }
    +
    581 
    +
    582  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    584  {
    +
    585  try
    +
    586  {
    +
    587  if (!mUserWs[int(WorkerType::Input)].empty())
    +
    588  error("Push cannot be called if an input worker was already selected.",
    +
    589  __LINE__, __FUNCTION__, __FILE__);
    +
    590  return mThreadManager.tryPush(tDatums);
    +
    591  }
    +
    592  catch (const std::exception& e)
    +
    593  {
    +
    594  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    595  return false;
    +
    596  }
    +
    597  }
    +
    598 
    +
    599  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    601  {
    +
    602  try
    +
    603  {
    +
    604  if (!mUserWs[int(WorkerType::Input)].empty())
    +
    605  error("Push cannot be called if an input worker was already selected.",
    +
    606  __LINE__, __FUNCTION__, __FILE__);
    +
    607  return mThreadManager.waitAndPush(tDatums);
    +
    608  }
    +
    609  catch (const std::exception& e)
    +
    610  {
    +
    611  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    612  return false;
    +
    613  }
    +
    614  }
    +
    615 
    +
    616  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    618  {
    +
    619  try
    +
    620  {
    +
    621  // Create new datum
    +
    622  auto datumsPtr = std::make_shared<std::vector<std::shared_ptr<TDatum>>>();
    +
    623  datumsPtr->emplace_back();
    +
    624  auto& tDatumPtr = datumsPtr->at(0);
    +
    625  tDatumPtr = std::make_shared<TDatum>();
    +
    626  // Fill datum
    +
    627  tDatumPtr->cvInputData = matrix.clone();
    +
    628  // Return result
    +
    629  return waitAndEmplace(datumsPtr);
    +
    630  }
    +
    631  catch (const std::exception& e)
    +
    632  {
    +
    633  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    634  return false;
    +
    635  }
    +
    636  }
    +
    637 
    +
    638  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    640  {
    +
    641  try
    +
    642  {
    +
    643  if (!mUserWs[int(WorkerType::Output)].empty())
    +
    644  error("Pop cannot be called if an output worker was already selected.",
    +
    645  __LINE__, __FUNCTION__, __FILE__);
    +
    646  return mThreadManager.tryPop(tDatums);
    +
    647  }
    +
    648  catch (const std::exception& e)
    +
    649  {
    +
    650  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    651  return false;
    +
    652  }
    +
    653  }
    +
    654 
    +
    655  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    657  {
    +
    658  try
    +
    659  {
    +
    660  if (!mUserWs[int(WorkerType::Output)].empty())
    +
    661  error("Pop cannot be called if an output worker was already selected.",
    +
    662  __LINE__, __FUNCTION__, __FILE__);
    +
    663  return mThreadManager.waitAndPop(tDatums);
    +
    664  }
    +
    665  catch (const std::exception& e)
    +
    666  {
    +
    667  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    668  return false;
    +
    669  }
    +
    670  }
    +
    671 
    +
    672  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    674  {
    +
    675  try
    +
    676  {
    +
    677  // Run waitAndEmplace + waitAndPop
    +
    678  if (waitAndEmplace(tDatums))
    +
    679  return waitAndPop(tDatums);
    +
    680  return false;
    +
    681  }
    +
    682  catch (const std::exception& e)
    +
    683  {
    +
    684  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    685  return false;
    +
    686  }
    +
    687  }
    +
    688 
    +
    689  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    691  {
    +
    692  try
    +
    693  {
    +
    694  // Create new datum
    +
    695  auto datumsPtr = std::make_shared<std::vector<std::shared_ptr<TDatum>>>();
    +
    696  datumsPtr->emplace_back();
    +
    697  auto& tDatumPtr = datumsPtr->at(0);
    +
    698  tDatumPtr = std::make_shared<TDatum>();
    +
    699  // Fill datum
    +
    700  tDatumPtr->cvInputData = matrix;
    +
    701  // Emplace and pop
    +
    702  emplaceAndPop(datumsPtr);
    +
    703  // Return result
    +
    704  return datumsPtr;
    +
    705  }
    +
    706  catch (const std::exception& e)
    +
    707  {
    +
    708  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    709  return TDatumsSP{};
    +
    710  }
    +
    711  }
    +
    712 
    +
    713  extern template class WrapperT<BASE_DATUM>;
    +
    714 }
    +
    715 
    +
    716 #endif // OPENPOSE_WRAPPER_WRAPPER_HPP
    + +
    Matrix clone() const
    + + +
    bool isRunning() const
    Definition: wrapper.hpp:460
    +
    void stop()
    Definition: wrapper.hpp:447
    +
    void setWorker(const WorkerType workerType, const TWorker &worker, const bool workerOnNewThread=true)
    Definition: wrapper.hpp:300
    +
    bool waitAndPush(const TDatumsSP &tDatums)
    Definition: wrapper.hpp:600
    +
    bool emplaceAndPop(TDatumsSP &tDatums)
    Definition: wrapper.hpp:673
    +
    bool waitAndPop(TDatumsSP &tDatums)
    Definition: wrapper.hpp:656
    +
    bool waitAndEmplace(TDatumsSP &tDatums)
    Definition: wrapper.hpp:523
    +
    void exec()
    Definition: wrapper.hpp:411
    +
    bool tryPush(const TDatumsSP &tDatums)
    Definition: wrapper.hpp:583
    +
    bool tryPop(TDatumsSP &tDatums)
    Definition: wrapper.hpp:639
    +
    virtual ~WrapperT()
    Definition: wrapper.hpp:269
    +
    void disableMultiThreading()
    Definition: wrapper.hpp:287
    +
    void configure(const WrapperStructPose &wrapperStructPose)
    Definition: wrapper.hpp:320
    +
    bool tryEmplace(TDatumsSP &tDatums)
    Definition: wrapper.hpp:487
    +
    void start()
    Definition: wrapper.hpp:429
    +
    WrapperT(const ThreadManagerMode threadManagerMode=ThreadManagerMode::Synchronous)
    Definition: wrapper.hpp:261
    +
    void setDefaultMaxSizeQueues(const long long defaultMaxSizeQueues=-1)
    Definition: wrapper.hpp:474
    + +
    #define BASE_DATUM
    Definition: datum.hpp:403
    + +
    ThreadManagerMode
    Definition: enumClasses.hpp:10
    + +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    WrapperT< BASE_DATUM > Wrapper
    Definition: wrapper.hpp:249
    +
    OP_API void errorDestructor(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    WorkerType
    Definition: enumClasses.hpp:24
    + + + +
    OP_API void opLog(const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    + + + + + + + + + + + + + + + + + + + +
    +
    + + + + diff --git a/web/html/doc/wrapper_auxiliary_8hpp.html b/web/html/doc/wrapper_auxiliary_8hpp.html new file mode 100644 index 000000000..a5956f5e0 --- /dev/null +++ b/web/html/doc/wrapper_auxiliary_8hpp.html @@ -0,0 +1,146 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperAuxiliary.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wrapperAuxiliary.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Namespaces

     op
     
    + + + + + + + + + + + +

    +Functions

    OP_API void op::wrapperConfigureSanityChecks (WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const WrapperStructGui &wrapperStructGui, const bool renderOutput, const bool userInputAndPreprocessingWsEmpty, const bool userOutputWsEmpty, const std::shared_ptr< Producer > &producerSharedPtr, const ThreadManagerMode threadManagerMode)
     
    OP_API void op::threadIdPP (unsigned long long &threadId, const bool multiThreadEnabled)
     
    template<typename TDatum , typename TDatums = std::vector<std::shared_ptr<TDatum>>, typename TDatumsSP = std::shared_ptr<TDatums>, typename TWorker = std::shared_ptr<Worker<TDatumsSP>>>
    void op::configureThreadManager (ThreadManager< TDatumsSP > &threadManager, const bool multiThreadEnabled, const ThreadManagerMode threadManagerMode, const WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const WrapperStructGui &wrapperStructGui, const std::array< std::vector< TWorker >, int(WorkerType::Size)> &userWs, const std::array< bool, int(WorkerType::Size)> &userWsOnNewThread)
     
    template<typename TDatum , typename TDatums = std::vector<std::shared_ptr<TDatum>>, typename TDatumsSP = std::shared_ptr<TDatums>>
    void op::createMultiviewTDatum (TDatumsSP &tDatumsSP, unsigned long long &frameCounter, const CameraParameterReader &cameraParameterReader, const void *const cvMatPtr)
     
    +
    +
    + + + + diff --git a/web/html/doc/wrapper_auxiliary_8hpp.js b/web/html/doc/wrapper_auxiliary_8hpp.js new file mode 100644 index 000000000..8251cc64c --- /dev/null +++ b/web/html/doc/wrapper_auxiliary_8hpp.js @@ -0,0 +1,7 @@ +var wrapper_auxiliary_8hpp = +[ + [ "configureThreadManager", "wrapper_auxiliary_8hpp.html#a4adaee31db7ae1d3f963daa9e022e62f", null ], + [ "createMultiviewTDatum", "wrapper_auxiliary_8hpp.html#a3da2a2a2f5ac58cfba53ea0d43ac6751", null ], + [ "threadIdPP", "wrapper_auxiliary_8hpp.html#af65a4564afcad06b72468679f6bee52b", null ], + [ "wrapperConfigureSanityChecks", "wrapper_auxiliary_8hpp.html#acc4a5460e02ae510e854724513eea822", null ] +]; \ No newline at end of file diff --git a/web/html/doc/wrapper_auxiliary_8hpp_source.html b/web/html/doc/wrapper_auxiliary_8hpp_source.html new file mode 100644 index 000000000..d8b1b29a2 --- /dev/null +++ b/web/html/doc/wrapper_auxiliary_8hpp_source.html @@ -0,0 +1,1560 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperAuxiliary.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wrapperAuxiliary.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
    +
    2 #define OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
    +
    3 
    + + + + + + + + + +
    13 
    +
    14 namespace op
    +
    15 {
    + +
    30  WrapperStructPose& wrapperStructPose, const WrapperStructFace& wrapperStructFace,
    +
    31  const WrapperStructHand& wrapperStructHand, const WrapperStructExtra& wrapperStructExtra,
    +
    32  const WrapperStructInput& wrapperStructInput, const WrapperStructOutput& wrapperStructOutput,
    +
    33  const WrapperStructGui& wrapperStructGui, const bool renderOutput, const bool userInputAndPreprocessingWsEmpty,
    +
    34  const bool userOutputWsEmpty, const std::shared_ptr<Producer>& producerSharedPtr,
    +
    35  const ThreadManagerMode threadManagerMode);
    +
    36 
    +
    45  OP_API void threadIdPP(unsigned long long& threadId, const bool multiThreadEnabled);
    +
    46 
    +
    53  template<typename TDatum,
    +
    54  typename TDatums = std::vector<std::shared_ptr<TDatum>>,
    +
    55  typename TDatumsSP = std::shared_ptr<TDatums>,
    +
    56  typename TWorker = std::shared_ptr<Worker<TDatumsSP>>>
    + +
    58  ThreadManager<TDatumsSP>& threadManager, const bool multiThreadEnabled,
    +
    59  const ThreadManagerMode threadManagerMode, const WrapperStructPose& wrapperStructPose,
    +
    60  const WrapperStructFace& wrapperStructFace, const WrapperStructHand& wrapperStructHand,
    +
    61  const WrapperStructExtra& wrapperStructExtra, const WrapperStructInput& wrapperStructInput,
    +
    62  const WrapperStructOutput& wrapperStructOutput, const WrapperStructGui& wrapperStructGui,
    +
    63  const std::array<std::vector<TWorker>, int(WorkerType::Size)>& userWs,
    +
    64  const std::array<bool, int(WorkerType::Size)>& userWsOnNewThread);
    +
    65 
    +
    70  template<typename TDatum,
    +
    71  typename TDatums = std::vector<std::shared_ptr<TDatum>>,
    +
    72  typename TDatumsSP = std::shared_ptr<TDatums>>
    + +
    74  TDatumsSP& tDatumsSP, unsigned long long& frameCounter,
    +
    75  const CameraParameterReader& cameraParameterReader, const void* const cvMatPtr);
    +
    76 }
    +
    77 
    +
    78 
    +
    79 
    +
    80 
    +
    81 
    +
    82 // Implementation
    +
    83 #include <openpose/3d/headers.hpp>
    + + + +
    87 #include <openpose/gpu/gpu.hpp>
    +
    88 #include <openpose/gui/headers.hpp>
    + + + + + + +
    95 namespace op
    +
    96 {
    +
    97  template<typename TDatum, typename TDatums, typename TDatumsSP, typename TWorker>
    + +
    99  ThreadManager<TDatumsSP>& threadManager, const bool multiThreadEnabledTemp,
    +
    100  const ThreadManagerMode threadManagerMode, const WrapperStructPose& wrapperStructPoseTemp,
    +
    101  const WrapperStructFace& wrapperStructFace, const WrapperStructHand& wrapperStructHand,
    +
    102  const WrapperStructExtra& wrapperStructExtra, const WrapperStructInput& wrapperStructInput,
    +
    103  const WrapperStructOutput& wrapperStructOutput, const WrapperStructGui& wrapperStructGui,
    +
    104  const std::array<std::vector<TWorker>, int(WorkerType::Size)>& userWs,
    +
    105  const std::array<bool, int(WorkerType::Size)>& userWsOnNewThread)
    +
    106  {
    +
    107  try
    +
    108  {
    +
    109  opLog("Running configureThreadManager...", Priority::Normal);
    +
    110 
    +
    111  // Create producer
    +
    112  auto producerSharedPtr = createProducer(
    +
    113  wrapperStructInput.producerType, wrapperStructInput.producerString.getStdString(),
    +
    114  wrapperStructInput.cameraResolution, wrapperStructInput.cameraParameterPath.getStdString(),
    +
    115  wrapperStructInput.undistortImage, wrapperStructInput.numberViews);
    +
    116 
    +
    117  // Editable arguments
    +
    118  auto wrapperStructPose = wrapperStructPoseTemp;
    +
    119  auto multiThreadEnabled = multiThreadEnabledTemp;
    +
    120 
    +
    121  // User custom workers
    +
    122  const auto& userInputWs = userWs[int(WorkerType::Input)];
    +
    123  const auto& userPreProcessingWs = userWs[int(WorkerType::PreProcessing)];
    +
    124  const auto& userPostProcessingWs = userWs[int(WorkerType::PostProcessing)];
    +
    125  const auto& userOutputWs = userWs[int(WorkerType::Output)];
    +
    126  const auto userInputWsOnNewThread = userWsOnNewThread[int(WorkerType::Input)];
    +
    127  const auto userPreProcessingWsOnNewThread = userWsOnNewThread[int(WorkerType::PreProcessing)];
    +
    128  const auto userPostProcessingWsOnNewThread = userWsOnNewThread[int(WorkerType::PostProcessing)];
    +
    129  const auto userOutputWsOnNewThread = userWsOnNewThread[int(WorkerType::Output)];
    +
    130 
    +
    131  // Video seek
    +
    132  const auto spVideoSeek = std::make_shared<std::pair<std::atomic<bool>, std::atomic<int>>>();
    +
    133  // It cannot be directly included in the constructor (compiler error for copying std::atomic)
    +
    134  spVideoSeek->first = false;
    +
    135  spVideoSeek->second = 0;
    +
    136 
    +
    137  // Required parameters
    +
    138  const auto gpuMode = getGpuMode();
    +
    139  const auto renderModePose = (
    +
    140  wrapperStructPose.renderMode != RenderMode::Auto
    +
    141  ? wrapperStructPose.renderMode
    +
    142  : (gpuMode == GpuMode::Cuda ? RenderMode::Gpu : RenderMode::Cpu));
    +
    143  const auto renderModeFace = (
    +
    144  wrapperStructFace.renderMode != RenderMode::Auto
    +
    145  ? wrapperStructFace.renderMode
    +
    146  : (gpuMode == GpuMode::Cuda ? RenderMode::Gpu : RenderMode::Cpu));
    +
    147  const auto renderModeHand = (
    +
    148  wrapperStructHand.renderMode != RenderMode::Auto
    +
    149  ? wrapperStructHand.renderMode
    +
    150  : (gpuMode == GpuMode::Cuda ? RenderMode::Gpu : RenderMode::Cpu));
    +
    151  const auto renderOutput = renderModePose != RenderMode::None
    +
    152  || renderModeFace != RenderMode::None
    +
    153  || renderModeHand != RenderMode::None;
    +
    154  const bool renderOutputGpu = renderModePose == RenderMode::Gpu
    +
    155  || (wrapperStructFace.enable && renderModeFace == RenderMode::Gpu)
    +
    156  || (wrapperStructHand.enable && renderModeHand == RenderMode::Gpu);
    +
    157  const bool renderFace = wrapperStructFace.enable && renderModeFace != RenderMode::None;
    +
    158  const bool renderHand = wrapperStructHand.enable && renderModeHand != RenderMode::None;
    +
    159  const bool renderHandGpu = wrapperStructHand.enable && renderModeHand == RenderMode::Gpu;
    +
    160  opLog("renderModePose = " + std::to_string(int(renderModePose)), Priority::Normal);
    +
    161  opLog("renderModeFace = " + std::to_string(int(renderModeFace)), Priority::Normal);
    +
    162  opLog("renderModeHand = " + std::to_string(int(renderModeHand)), Priority::Normal);
    +
    163  opLog("renderOutput = " + std::to_string(int(renderOutput)), Priority::Normal);
    +
    164  opLog("renderOutputGpu = " + std::to_string(int(renderOutput)), Priority::Normal);
    +
    165  opLog("renderFace = " + std::to_string(int(renderFace)), Priority::Normal);
    +
    166  opLog("renderHand = " + std::to_string(int(renderHand)), Priority::Normal);
    +
    167  opLog("renderHandGpu = " + std::to_string(int(renderHandGpu)), Priority::Normal);
    +
    168 
    +
    169  // Check no wrong/contradictory flags enabled
    +
    170  const bool userInputAndPreprocessingWsEmpty = userInputWs.empty() && userPreProcessingWs.empty();
    +
    171  const bool userOutputWsEmpty = userOutputWs.empty();
    + +
    173  wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra, wrapperStructInput,
    +
    174  wrapperStructOutput, wrapperStructGui, renderOutput, userInputAndPreprocessingWsEmpty,
    +
    175  userOutputWsEmpty, producerSharedPtr, threadManagerMode);
    +
    176  opLog("userInputAndPreprocessingWsEmpty = " + std::to_string(int(userInputAndPreprocessingWsEmpty)),
    + +
    178  opLog("userOutputWsEmpty = " + std::to_string(int(userOutputWsEmpty)), Priority::Normal);
    +
    179 
    +
    180  // Get number threads
    +
    181  auto numberGpuThreads = wrapperStructPose.gpuNumber;
    +
    182  auto gpuNumberStart = wrapperStructPose.gpuNumberStart;
    +
    183  opLog("numberGpuThreads = " + std::to_string(numberGpuThreads), Priority::Normal);
    +
    184  opLog("gpuNumberStart = " + std::to_string(gpuNumberStart), Priority::Normal);
    +
    185  // CPU --> 1 thread or no pose extraction
    +
    186  if (gpuMode == GpuMode::NoGpu)
    +
    187  {
    +
    188  numberGpuThreads = (wrapperStructPose.gpuNumber == 0 ? 0 : 1);
    +
    189  gpuNumberStart = 0;
    +
    190  // Disabling multi-thread makes the code 400 ms faster (2.3 sec vs. 2.7 in i7-6850K)
    +
    191  // and fixes the bug that the screen was not properly displayed and only refreshed sometimes
    +
    192  // Note: The screen bug could be also fixed by using waitKey(30) rather than waitKey(1)
    +
    193  multiThreadEnabled = false;
    +
    194  }
    +
    195  // GPU --> user picks (<= #GPUs)
    +
    196  else
    +
    197  {
    +
    198  // Get total number GPUs
    +
    199  const auto totalGpuNumber = getGpuNumber();
    +
    200  // If number GPU < 0 --> set it to all the available GPUs
    +
    201  if (numberGpuThreads < 0)
    +
    202  {
    +
    203  if (totalGpuNumber <= gpuNumberStart)
    +
    204  error("Number of initial GPU (`--number_gpu_start`) must be lower than the total number of"
    +
    205  " used GPUs (`--number_gpu`)", __LINE__, __FUNCTION__, __FILE__);
    +
    206  numberGpuThreads = totalGpuNumber - gpuNumberStart;
    +
    207  // Reset initial GPU to 0 (we want them all)
    +
    208  // Logging message
    +
    209  opLog("Auto-detecting all available GPUs... Detected " + std::to_string(totalGpuNumber)
    +
    210  + " GPU(s), using " + std::to_string(numberGpuThreads) + " of them starting at GPU "
    +
    211  + std::to_string(gpuNumberStart) + ".", Priority::High);
    +
    212  }
    +
    213  // Sanity check
    +
    214  if (gpuNumberStart + numberGpuThreads > totalGpuNumber)
    +
    215  error("Initial GPU selected (`--number_gpu_start`) + number GPUs to use (`--number_gpu`) must"
    +
    216  " be lower or equal than the total number of GPUs in your machine ("
    +
    217  + std::to_string(gpuNumberStart) + " + "
    +
    218  + std::to_string(numberGpuThreads) + " vs. "
    +
    219  + std::to_string(totalGpuNumber) + ").",
    +
    220  __LINE__, __FUNCTION__, __FILE__);
    +
    221  }
    +
    222 
    +
    223  // Proper format
    +
    224  const auto writeImagesCleaned = formatAsDirectory(wrapperStructOutput.writeImages.getStdString());
    +
    225  const auto writeKeypointCleaned = formatAsDirectory(wrapperStructOutput.writeKeypoint.getStdString());
    +
    226  const auto writeJsonCleaned = formatAsDirectory(wrapperStructOutput.writeJson.getStdString());
    +
    227  const auto writeHeatMapsCleaned = formatAsDirectory(wrapperStructOutput.writeHeatMaps.getStdString());
    +
    228  const auto modelFolder = formatAsDirectory(wrapperStructPose.modelFolder.getStdString());
    +
    229  opLog("writeImagesCleaned = " + writeImagesCleaned, Priority::Normal);
    +
    230  opLog("writeKeypointCleaned = " + writeKeypointCleaned, Priority::Normal);
    +
    231  opLog("writeJsonCleaned = " + writeJsonCleaned, Priority::Normal);
    +
    232  opLog("writeHeatMapsCleaned = " + writeHeatMapsCleaned, Priority::Normal);
    +
    233  opLog("modelFolder = " + modelFolder, Priority::Normal);
    +
    234 
    +
    235  // Common parameters
    +
    236  auto finalOutputSize = wrapperStructPose.outputSize;
    +
    237  Point<int> producerSize{-1,-1};
    +
    238  const auto oPProducer = (producerSharedPtr != nullptr);
    +
    239  if (oPProducer)
    +
    240  {
    +
    241  // 1. Set producer properties
    +
    242  const auto displayProducerFpsMode = (wrapperStructInput.realTimeProcessing
    + +
    244  producerSharedPtr->setProducerFpsMode(displayProducerFpsMode);
    +
    245  producerSharedPtr->set(ProducerProperty::Flip, wrapperStructInput.frameFlip);
    +
    246  producerSharedPtr->set(ProducerProperty::Rotation, wrapperStructInput.frameRotate);
    +
    247  producerSharedPtr->set(ProducerProperty::AutoRepeat, wrapperStructInput.framesRepeat);
    +
    248  // 2. Set finalOutputSize
    +
    249  producerSize = Point<int>{(int)producerSharedPtr->get(getCvCapPropFrameWidth()),
    +
    250  (int)producerSharedPtr->get(getCvCapPropFrameHeight())};
    +
    251  // Set finalOutputSize to input size if desired
    +
    252  if (finalOutputSize.x == -1 || finalOutputSize.y == -1)
    +
    253  finalOutputSize = producerSize;
    +
    254  }
    +
    255  opLog("finalOutputSize = [" + std::to_string(finalOutputSize.x) + "," + std::to_string(finalOutputSize.y)
    +
    256  + "]", Priority::Normal);
    +
    257 
    +
    258  // Producer
    +
    259  TWorker datumProducerW;
    +
    260  if (oPProducer)
    +
    261  {
    +
    262  const auto datumProducer = std::make_shared<DatumProducer<TDatum>>(
    +
    263  producerSharedPtr, wrapperStructInput.frameFirst, wrapperStructInput.frameStep,
    +
    264  wrapperStructInput.frameLast, spVideoSeek
    +
    265  );
    +
    266  datumProducerW = std::make_shared<WDatumProducer<TDatum>>(datumProducer);
    +
    267  }
    +
    268  else
    +
    269  datumProducerW = nullptr;
    +
    270 
    +
    271  std::vector<std::shared_ptr<PoseExtractorNet>> poseExtractorNets;
    +
    272  std::vector<std::shared_ptr<FaceExtractorNet>> faceExtractorNets;
    +
    273  std::vector<std::shared_ptr<HandExtractorNet>> handExtractorNets;
    +
    274  std::vector<std::shared_ptr<PoseGpuRenderer>> poseGpuRenderers;
    +
    275  // CUDA vs. CPU resize
    +
    276  std::vector<std::shared_ptr<CvMatToOpOutput>> cvMatToOpOutputs;
    +
    277  std::vector<std::shared_ptr<OpOutputToCvMat>> opOutputToCvMats;
    +
    278  std::shared_ptr<PoseCpuRenderer> poseCpuRenderer;
    +
    279  // Workers
    +
    280  TWorker scaleAndSizeExtractorW;
    +
    281  TWorker cvMatToOpInputW;
    +
    282  TWorker cvMatToOpOutputW;
    +
    283  bool addCvMatToOpOutput = renderOutput;
    +
    284  bool addCvMatToOpOutputInCpu = addCvMatToOpOutput;
    +
    285  std::vector<std::vector<TWorker>> poseExtractorsWs;
    +
    286  std::vector<std::vector<TWorker>> poseTriangulationsWs;
    +
    287  std::vector<std::vector<TWorker>> jointAngleEstimationsWs;
    +
    288  std::vector<TWorker> postProcessingWs;
    +
    289  if (numberGpuThreads > 0)
    +
    290  {
    +
    291  // Get input scales and sizes
    +
    292  const auto scaleAndSizeExtractor = std::make_shared<ScaleAndSizeExtractor>(
    +
    293  wrapperStructPose.netInputSize, (float)wrapperStructPose.netInputSizeDynamicBehavior, finalOutputSize,
    +
    294  wrapperStructPose.scalesNumber, wrapperStructPose.scaleGap);
    +
    295  scaleAndSizeExtractorW = std::make_shared<WScaleAndSizeExtractor<TDatumsSP>>(scaleAndSizeExtractor);
    +
    296 
    +
    297  // Input cvMat to OpenPose input & output format
    +
    298  // Note: resize on GPU reduces accuracy about 0.1%
    +
    299  bool resizeOnCpu = true;
    +
    300  // const auto resizeOnCpu = (wrapperStructPose.poseMode != PoseMode::Enabled);
    +
    301  if (resizeOnCpu)
    +
    302  {
    +
    303  const auto gpuResize = false;
    +
    304  const auto cvMatToOpInput = std::make_shared<CvMatToOpInput>(
    +
    305  wrapperStructPose.poseModel, gpuResize);
    +
    306  cvMatToOpInputW = std::make_shared<WCvMatToOpInput<TDatumsSP>>(cvMatToOpInput);
    +
    307  }
    +
    308  // Note: We realized that somehow doing it on GPU for any number of GPUs does speedup the whole OP
    +
    309  resizeOnCpu = false;
    +
    310  addCvMatToOpOutputInCpu = addCvMatToOpOutput
    +
    311  && (resizeOnCpu || !renderOutputGpu || wrapperStructPose.poseMode != PoseMode::Enabled
    +
    312  // Resize in GPU causing bug
    +
    313  || wrapperStructPose.outputSize.x != -1 || wrapperStructPose.outputSize.y != -1);
    +
    314  if (addCvMatToOpOutputInCpu)
    +
    315  {
    +
    316  const auto gpuResize = false;
    +
    317  const auto cvMatToOpOutput = std::make_shared<CvMatToOpOutput>(gpuResize);
    +
    318  cvMatToOpOutputW = std::make_shared<WCvMatToOpOutput<TDatumsSP>>(cvMatToOpOutput);
    +
    319  }
    +
    320 
    +
    321  // Pose estimators & renderers
    +
    322  std::vector<TWorker> cpuRenderers;
    +
    323  poseExtractorsWs.clear();
    +
    324  poseExtractorsWs.resize(numberGpuThreads);
    +
    325  if (wrapperStructPose.poseMode != PoseMode::Disabled)
    +
    326  {
    +
    327  // Pose estimators
    +
    328  for (auto gpuId = 0; gpuId < numberGpuThreads; gpuId++)
    +
    329  poseExtractorNets.emplace_back(std::make_shared<PoseExtractorCaffe>(
    +
    330  wrapperStructPose.poseModel, modelFolder, gpuId + gpuNumberStart,
    +
    331  wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScaleMode,
    +
    332  wrapperStructPose.addPartCandidates, wrapperStructPose.maximizePositives,
    +
    333  wrapperStructPose.protoTxtPath.getStdString(),
    +
    334  wrapperStructPose.caffeModelPath.getStdString(),
    +
    335  wrapperStructPose.upsamplingRatio, wrapperStructPose.poseMode == PoseMode::Enabled,
    +
    336  wrapperStructPose.enableGoogleLogging
    +
    337  ));
    +
    338 
    +
    339  // Pose renderers
    +
    340  if (renderOutputGpu || renderModePose == RenderMode::Cpu)
    +
    341  {
    +
    342  // If renderModePose != RenderMode::Gpu but renderOutput, then we create an
    +
    343  // alpha = 0 pose renderer in order to keep the removing background option
    +
    344  const auto alphaKeypoint = (renderModePose != RenderMode::None
    +
    345  ? wrapperStructPose.alphaKeypoint : 0.f);
    +
    346  const auto alphaHeatMap = (renderModePose != RenderMode::None
    +
    347  ? wrapperStructPose.alphaHeatMap : 0.f);
    +
    348  // GPU rendering
    +
    349  if (renderOutputGpu)
    +
    350  {
    +
    351  for (const auto& poseExtractorNet : poseExtractorNets)
    +
    352  {
    +
    353  poseGpuRenderers.emplace_back(std::make_shared<PoseGpuRenderer>(
    +
    354  wrapperStructPose.poseModel, poseExtractorNet, wrapperStructPose.renderThreshold,
    +
    355  wrapperStructPose.blendOriginalFrame, alphaKeypoint,
    +
    356  alphaHeatMap, wrapperStructPose.defaultPartToRender
    +
    357  ));
    +
    358  }
    +
    359  }
    +
    360  // CPU rendering
    +
    361  if (renderModePose == RenderMode::Cpu)
    +
    362  {
    +
    363  poseCpuRenderer = std::make_shared<PoseCpuRenderer>(
    +
    364  wrapperStructPose.poseModel, wrapperStructPose.renderThreshold,
    +
    365  wrapperStructPose.blendOriginalFrame, alphaKeypoint, alphaHeatMap,
    +
    366  wrapperStructPose.defaultPartToRender);
    +
    367  cpuRenderers.emplace_back(std::make_shared<WPoseRenderer<TDatumsSP>>(poseCpuRenderer));
    +
    368  }
    +
    369  }
    +
    370  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    371 
    +
    372  // Pose extractor(s)
    +
    373  poseExtractorsWs.resize(poseExtractorNets.size());
    +
    374  const auto personIdExtractor = (wrapperStructExtra.identification
    +
    375  ? std::make_shared<PersonIdExtractor>() : nullptr);
    +
    376  // Keep top N people
    +
    377  // Added right after PoseExtractorNet to avoid:
    +
    378  // 1) Rendering people that are later deleted (wrong visualization).
    +
    379  // 2) Processing faces and hands on people that will be deleted (speed up).
    +
    380  // 3) Running tracking before deleting the people.
    +
    381  // Add KeepTopNPeople for each PoseExtractorNet
    +
    382  const auto keepTopNPeople = (wrapperStructPose.numberPeopleMax > 0 ?
    +
    383  std::make_shared<KeepTopNPeople>(wrapperStructPose.numberPeopleMax)
    +
    384  : nullptr);
    +
    385  // Person tracker
    +
    386  auto personTrackers = std::make_shared<std::vector<std::shared_ptr<PersonTracker>>>();
    +
    387  if (wrapperStructExtra.tracking > -1)
    +
    388  personTrackers->emplace_back(
    +
    389  std::make_shared<PersonTracker>(wrapperStructExtra.tracking == 0));
    +
    390  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
    +
    391  {
    +
    392  // OpenPose keypoint detector + keepTopNPeople
    +
    393  // + ID extractor (experimental) + tracking (experimental)
    +
    394  const auto poseExtractor = std::make_shared<PoseExtractor>(
    +
    395  poseExtractorNets.at(i), keepTopNPeople, personIdExtractor, personTrackers,
    +
    396  wrapperStructPose.numberPeopleMax, wrapperStructExtra.tracking);
    +
    397  // If we want the initial image resize on GPU
    +
    398  if (cvMatToOpInputW == nullptr)
    +
    399  {
    +
    400  const auto gpuResize = true;
    +
    401  const auto cvMatToOpInput = std::make_shared<CvMatToOpInput>(
    +
    402  wrapperStructPose.poseModel, gpuResize);
    +
    403  poseExtractorsWs.at(i).emplace_back(
    +
    404  std::make_shared<WCvMatToOpInput<TDatumsSP>>(cvMatToOpInput));
    +
    405  }
    +
    406  // If we want the final image resize on GPU
    +
    407  if (addCvMatToOpOutput && cvMatToOpOutputW == nullptr)
    +
    408  {
    +
    409  const auto gpuResize = true;
    +
    410  cvMatToOpOutputs.emplace_back(std::make_shared<CvMatToOpOutput>(gpuResize));
    +
    411  poseExtractorsWs.at(i).emplace_back(
    +
    412  std::make_shared<WCvMatToOpOutput<TDatumsSP>>(cvMatToOpOutputs.back()));
    +
    413  }
    +
    414  poseExtractorsWs.at(i).emplace_back(
    +
    415  std::make_shared<WPoseExtractor<TDatumsSP>>(poseExtractor));
    +
    416  // poseExtractorsWs.at(i) = {std::make_shared<WPoseExtractor<TDatumsSP>>(poseExtractor)};
    +
    417  // // Just OpenPose keypoint detector
    +
    418  // poseExtractorsWs.at(i) = {std::make_shared<WPoseExtractorNet<TDatumsSP>>(
    +
    419  // poseExtractorNets.at(i))};
    +
    420  }
    +
    421 
    +
    422  // // (Before tracking / id extractor)
    +
    423  // // Added right after PoseExtractorNet to avoid:
    +
    424  // // 1) Rendering people that are later deleted (wrong visualization).
    +
    425  // // 2) Processing faces and hands on people that will be deleted (speed up).
    +
    426  // if (wrapperStructPose.numberPeopleMax > 0)
    +
    427  // {
    +
    428  // // Add KeepTopNPeople for each PoseExtractorNet
    +
    429  // const auto keepTopNPeople = std::make_shared<KeepTopNPeople>(
    +
    430  // wrapperStructPose.numberPeopleMax);
    +
    431  // for (auto& wPose : poseExtractorsWs)
    +
    432  // wPose.emplace_back(std::make_shared<WKeepTopNPeople<TDatumsSP>>(keepTopNPeople));
    +
    433  // }
    +
    434  }
    +
    435  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    436 
    +
    437  // Pose renderer(s)
    +
    438  if (!poseGpuRenderers.empty())
    +
    439  {
    +
    440  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    441  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
    +
    442  {
    +
    443  poseExtractorsWs.at(i).emplace_back(std::make_shared<WPoseRenderer<TDatumsSP>>(
    +
    444  poseGpuRenderers.at(i)));
    +
    445  // Get shared params
    +
    446  if (!cvMatToOpOutputs.empty())
    +
    447  poseGpuRenderers.at(i)->setSharedParameters(
    +
    448  cvMatToOpOutputs.at(i)->getSharedParameters());
    +
    449  }
    +
    450  }
    +
    451  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    452 
    +
    453  // Face extractor(s)
    +
    454  if (wrapperStructFace.enable)
    +
    455  {
    +
    456  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    457  // Face detector
    +
    458  // OpenPose body-based face detector
    +
    459  if (wrapperStructFace.detector == Detector::Body)
    +
    460  {
    +
    461  // Sanity check
    +
    462  if (wrapperStructPose.poseMode == PoseMode::Disabled)
    +
    463  error("Body keypoint detection is disabled but face Detector is set to Body. Either"
    +
    464  " re-enable OpenPose body or select a different face Detector (`--face_detector`).",
    +
    465  __LINE__, __FUNCTION__, __FILE__);
    +
    466  // Constructors
    +
    467  const auto faceDetector = std::make_shared<FaceDetector>(wrapperStructPose.poseModel);
    +
    468  for (auto& wPose : poseExtractorsWs)
    +
    469  wPose.emplace_back(std::make_shared<WFaceDetector<TDatumsSP>>(faceDetector));
    +
    470  }
    +
    471  // OpenCV face detector
    +
    472  else if (wrapperStructFace.detector == Detector::OpenCV)
    +
    473  {
    +
    474  opLog("Body keypoint detection is disabled. Hence, using OpenCV face detector (much less"
    +
    475  " accurate but faster).", Priority::High);
    +
    476  for (auto& wPose : poseExtractorsWs)
    +
    477  {
    +
    478  // 1 FaceDetectorOpenCV per thread, OpenCV face detector is not thread-safe
    +
    479  const auto faceDetectorOpenCV = std::make_shared<FaceDetectorOpenCV>(modelFolder);
    +
    480  wPose.emplace_back(
    +
    481  std::make_shared<WFaceDetectorOpenCV<TDatumsSP>>(faceDetectorOpenCV)
    +
    482  );
    +
    483  }
    +
    484  }
    +
    485  // If provided by user: We do not need to create a FaceDetector
    +
    486  // Unknown face Detector
    +
    487  else if (wrapperStructFace.detector != Detector::Provided)
    +
    488  error("Unknown face Detector. Select a valid face Detector (`--face_detector`).",
    +
    489  __LINE__, __FUNCTION__, __FILE__);
    +
    490  // Face keypoint extractor
    +
    491  for (auto gpu = 0u; gpu < poseExtractorsWs.size(); gpu++)
    +
    492  {
    +
    493  // Face keypoint extractor
    +
    494  const auto netOutputSize = wrapperStructFace.netInputSize;
    +
    495  const auto faceExtractorNet = std::make_shared<FaceExtractorCaffe>(
    +
    496  wrapperStructFace.netInputSize, netOutputSize, modelFolder,
    +
    497  gpu + gpuNumberStart, wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScaleMode,
    +
    498  wrapperStructPose.enableGoogleLogging
    +
    499  );
    +
    500  faceExtractorNets.emplace_back(faceExtractorNet);
    +
    501  poseExtractorsWs.at(gpu).emplace_back(
    +
    502  std::make_shared<WFaceExtractorNet<TDatumsSP>>(faceExtractorNet));
    +
    503  }
    +
    504  }
    +
    505  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    506 
    +
    507  // Hand extractor(s)
    +
    508  if (wrapperStructHand.enable)
    +
    509  {
    +
    510  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    511  const auto handDetector = std::make_shared<HandDetector>(wrapperStructPose.poseModel);
    +
    512  for (auto gpu = 0u; gpu < poseExtractorsWs.size(); gpu++)
    +
    513  {
    +
    514  // Sanity check
    +
    515  if ((wrapperStructHand.detector == Detector::BodyWithTracking
    +
    516  || wrapperStructHand.detector == Detector::Body)
    +
    517  && wrapperStructPose.poseMode == PoseMode::Disabled)
    +
    518  error("Body keypoint detection is disabled but hand Detector is set to Body. Either"
    +
    519  " re-enable OpenPose body or select a different hand Detector (`--hand_detector`).",
    +
    520  __LINE__, __FUNCTION__, __FILE__);
    +
    521  // Hand detector
    +
    522  // OpenPose body-based hand detector with tracking
    +
    523  if (wrapperStructHand.detector == Detector::BodyWithTracking)
    +
    524  {
    +
    525  poseExtractorsWs.at(gpu).emplace_back(
    +
    526  std::make_shared<WHandDetectorTracking<TDatumsSP>>(handDetector));
    +
    527  }
    +
    528  // OpenPose body-based hand detector
    +
    529  else if (wrapperStructHand.detector == Detector::Body)
    +
    530  {
    +
    531  poseExtractorsWs.at(gpu).emplace_back(
    +
    532  std::make_shared<WHandDetector<TDatumsSP>>(handDetector));
    +
    533  }
    +
    534  // If provided by user: We do not need to create a FaceDetector
    +
    535  // Unknown hand Detector
    +
    536  else if (wrapperStructHand.detector != Detector::Provided)
    +
    537  error("Unknown hand Detector. Select a valid hand Detector (`--hand_detector`).",
    +
    538  __LINE__, __FUNCTION__, __FILE__);
    +
    539  // Hand keypoint extractor
    +
    540  const auto netOutputSize = wrapperStructHand.netInputSize;
    +
    541  const auto handExtractorNet = std::make_shared<HandExtractorCaffe>(
    +
    542  wrapperStructHand.netInputSize, netOutputSize, modelFolder,
    +
    543  gpu + gpuNumberStart, wrapperStructHand.scalesNumber, wrapperStructHand.scaleRange,
    +
    544  wrapperStructPose.heatMapTypes, wrapperStructPose.heatMapScaleMode,
    +
    545  wrapperStructPose.enableGoogleLogging
    +
    546  );
    +
    547  handExtractorNets.emplace_back(handExtractorNet);
    +
    548  poseExtractorsWs.at(gpu).emplace_back(
    +
    549  std::make_shared<WHandExtractorNet<TDatumsSP>>(handExtractorNet)
    +
    550  );
    +
    551  // If OpenPose body-based hand detector with tracking
    +
    552  if (wrapperStructHand.detector == Detector::BodyWithTracking)
    +
    553  poseExtractorsWs.at(gpu).emplace_back(
    +
    554  std::make_shared<WHandDetectorUpdate<TDatumsSP>>(handDetector));
    +
    555  }
    +
    556  }
    +
    557  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    558 
    +
    559  // Face renderer(s)
    +
    560  if (renderFace)
    +
    561  {
    +
    562  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    563  // CPU rendering
    +
    564  if (renderModeFace == RenderMode::Cpu)
    +
    565  {
    +
    566  // Construct face renderer
    +
    567  const auto faceRenderer = std::make_shared<FaceCpuRenderer>(
    +
    568  wrapperStructFace.renderThreshold, wrapperStructFace.alphaKeypoint,
    +
    569  wrapperStructFace.alphaHeatMap);
    +
    570  // Add worker
    +
    571  cpuRenderers.emplace_back(std::make_shared<WFaceRenderer<TDatumsSP>>(faceRenderer));
    +
    572  }
    +
    573  // GPU rendering
    +
    574  else if (renderModeFace == RenderMode::Gpu)
    +
    575  {
    +
    576  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
    +
    577  {
    +
    578  // Construct face renderer
    +
    579  const auto faceRenderer = std::make_shared<FaceGpuRenderer>(
    +
    580  wrapperStructFace.renderThreshold, wrapperStructFace.alphaKeypoint,
    +
    581  wrapperStructFace.alphaHeatMap
    +
    582  );
    +
    583  // Performance boost -> share spGpuMemory for all renderers
    +
    584  if (!poseGpuRenderers.empty())
    +
    585  {
    +
    586  // const bool isLastRenderer = !renderHandGpu;
    +
    587  const bool isLastRenderer = !renderHandGpu && !(addCvMatToOpOutput && !addCvMatToOpOutputInCpu);
    +
    588  const auto renderer = std::static_pointer_cast<PoseGpuRenderer>(
    +
    589  poseGpuRenderers.at(i));
    +
    590  faceRenderer->setSharedParametersAndIfLast(
    +
    591  renderer->getSharedParameters(), isLastRenderer);
    +
    592  }
    +
    593  // Add worker
    +
    594  poseExtractorsWs.at(i).emplace_back(
    +
    595  std::make_shared<WFaceRenderer<TDatumsSP>>(faceRenderer));
    +
    596  }
    +
    597  }
    +
    598  else
    +
    599  error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__);
    +
    600  }
    +
    601  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    602 
    +
    603  // Hand renderer(s)
    +
    604  if (renderHand)
    +
    605  {
    +
    606  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    607  // CPU rendering
    +
    608  if (renderModeHand == RenderMode::Cpu)
    +
    609  {
    +
    610  // Construct hand renderer
    +
    611  const auto handRenderer = std::make_shared<HandCpuRenderer>(
    +
    612  wrapperStructHand.renderThreshold, wrapperStructHand.alphaKeypoint,
    +
    613  wrapperStructHand.alphaHeatMap);
    +
    614  // Add worker
    +
    615  cpuRenderers.emplace_back(std::make_shared<WHandRenderer<TDatumsSP>>(handRenderer));
    +
    616  }
    +
    617  // GPU rendering
    +
    618  else if (renderModeHand == RenderMode::Gpu)
    +
    619  {
    +
    620  for (auto i = 0u; i < poseExtractorsWs.size(); i++)
    +
    621  {
    +
    622  // Construct hands renderer
    +
    623  const auto handRenderer = std::make_shared<HandGpuRenderer>(
    +
    624  wrapperStructHand.renderThreshold, wrapperStructHand.alphaKeypoint,
    +
    625  wrapperStructHand.alphaHeatMap
    +
    626  );
    +
    627  // Performance boost -> share spGpuMemory for all renderers
    +
    628  if (!poseGpuRenderers.empty())
    +
    629  {
    +
    630  // const bool isLastRenderer = true;
    +
    631  const bool isLastRenderer = !(addCvMatToOpOutput && !addCvMatToOpOutputInCpu);
    +
    632  const auto renderer = std::static_pointer_cast<PoseGpuRenderer>(
    +
    633  poseGpuRenderers.at(i));
    +
    634  handRenderer->setSharedParametersAndIfLast(
    +
    635  renderer->getSharedParameters(), isLastRenderer);
    +
    636  }
    +
    637  // Add worker
    +
    638  poseExtractorsWs.at(i).emplace_back(
    +
    639  std::make_shared<WHandRenderer<TDatumsSP>>(handRenderer));
    +
    640  }
    +
    641  }
    +
    642  else
    +
    643  error("Unknown RenderMode.", __LINE__, __FUNCTION__, __FILE__);
    +
    644  }
    +
    645  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    646 
    +
    647  // Frames processor (OpenPose format -> cv::Mat format)
    +
    648  if (addCvMatToOpOutput && !addCvMatToOpOutputInCpu)
    +
    649  {
    +
    650  // for (auto& poseExtractorsW : poseExtractorsWs)
    +
    651  for (auto i = 0u ; i < poseExtractorsWs.size() ; ++i)
    +
    652  {
    +
    653  const auto gpuResize = true;
    +
    654  opOutputToCvMats.emplace_back(std::make_shared<OpOutputToCvMat>(gpuResize));
    +
    655  poseExtractorsWs.at(i).emplace_back(
    +
    656  std::make_shared<WOpOutputToCvMat<TDatumsSP>>(opOutputToCvMats.back()));
    +
    657  // Assign shared parameters
    +
    658  opOutputToCvMats.back()->setSharedParameters(
    +
    659  cvMatToOpOutputs.at(i)->getSharedParameters());
    +
    660  }
    +
    661  }
    +
    662  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    663 
    +
    664  // 3-D reconstruction
    +
    665  poseTriangulationsWs.clear();
    +
    666  if (wrapperStructExtra.reconstruct3d)
    +
    667  {
    +
    668  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    669  // For all (body/face/hands): PoseTriangulations ~30 msec, 8 GPUS ~30 msec for keypoint estimation
    +
    670  poseTriangulationsWs.resize(fastMax(1, int(poseExtractorsWs.size() / 4)));
    +
    671  for (auto i = 0u ; i < poseTriangulationsWs.size() ; i++)
    +
    672  {
    +
    673  const auto poseTriangulation = std::make_shared<PoseTriangulation>(
    +
    674  wrapperStructExtra.minViews3d);
    +
    675  poseTriangulationsWs.at(i) = {std::make_shared<WPoseTriangulation<TDatumsSP>>(
    +
    676  poseTriangulation)};
    +
    677  }
    +
    678  }
    +
    679  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    680  // Itermediate workers (e.g., OpenPose format to cv::Mat, json & frames recorder, ...)
    +
    681  postProcessingWs.clear();
    +
    682  // // Person ID identification (when no multi-thread and no dependency on tracking)
    +
    683  // if (wrapperStructExtra.identification)
    +
    684  // {
    +
    685  // const auto personIdExtractor = std::make_shared<PersonIdExtractor>();
    +
    686  // postProcessingWs.emplace_back(
    +
    687  // std::make_shared<WPersonIdExtractor<TDatumsSP>>(personIdExtractor)
    +
    688  // );
    +
    689  // }
    +
    690  // Frames processor (OpenPose format -> cv::Mat format)
    +
    691  if (addCvMatToOpOutputInCpu)
    +
    692  {
    +
    693  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    694  postProcessingWs = mergeVectors(postProcessingWs, cpuRenderers);
    +
    695  const auto opOutputToCvMat = std::make_shared<OpOutputToCvMat>();
    +
    696  postProcessingWs.emplace_back(std::make_shared<WOpOutputToCvMat<TDatumsSP>>(opOutputToCvMat));
    +
    697  }
    +
    698  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    699  // Re-scale pose if desired
    +
    700  // If desired scale is not the current input
    +
    701  if (wrapperStructPose.keypointScaleMode != ScaleMode::InputResolution
    +
    702  // and desired scale is not output when size(input) = size(output)
    +
    703  && !(wrapperStructPose.keypointScaleMode == ScaleMode::OutputResolution &&
    +
    704  (finalOutputSize == producerSize || finalOutputSize.x <= 0 || finalOutputSize.y <= 0))
    +
    705  // and desired scale is not net output when size(input) = size(net output)
    +
    706  && !(wrapperStructPose.keypointScaleMode == ScaleMode::NetOutputResolution
    +
    707  && producerSize == wrapperStructPose.netInputSize))
    +
    708  {
    +
    709  // Then we must rescale the keypoints
    +
    710  auto keypointScaler = std::make_shared<KeypointScaler>(wrapperStructPose.keypointScaleMode);
    +
    711  postProcessingWs.emplace_back(std::make_shared<WKeypointScaler<TDatumsSP>>(keypointScaler));
    +
    712  }
    +
    713  }
    +
    714  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    715 
    +
    716  // IK/Adam
    +
    717  const auto displayAdam = wrapperStructGui.displayMode == DisplayMode::DisplayAdam
    +
    718  || (wrapperStructGui.displayMode == DisplayMode::DisplayAll
    +
    719  && wrapperStructExtra.ikThreads > 0);
    +
    720  jointAngleEstimationsWs.clear();
    +
    721 #ifdef USE_3D_ADAM_MODEL
    +
    722  if (wrapperStructExtra.ikThreads > 0)
    +
    723  {
    +
    724  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    725  jointAngleEstimationsWs.resize(wrapperStructExtra.ikThreads);
    +
    726  // Pose extractor(s)
    +
    727  for (auto i = 0u; i < jointAngleEstimationsWs.size(); i++)
    +
    728  {
    +
    729  const auto jointAngleEstimation = std::make_shared<JointAngleEstimation>(displayAdam);
    +
    730  jointAngleEstimationsWs.at(i) = {std::make_shared<WJointAngleEstimation<TDatumsSP>>(
    +
    731  jointAngleEstimation)};
    +
    732  }
    +
    733  }
    +
    734  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    735 #endif
    +
    736 
    +
    737  // Output workers
    +
    738  std::vector<TWorker> outputWs;
    +
    739  // Print verbose
    +
    740  if (wrapperStructOutput.verbose > 0.)
    +
    741  {
    +
    742  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    743  const auto verbosePrinter = std::make_shared<VerbosePrinter>(
    +
    744  wrapperStructOutput.verbose, uLongLongRound(producerSharedPtr->get(getCvCapPropFrameCount())));
    +
    745  outputWs.emplace_back(std::make_shared<WVerbosePrinter<TDatumsSP>>(verbosePrinter));
    +
    746  }
    +
    747  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    748  // Send information (e.g., to Unity) though UDP client-server communication
    +
    749 
    +
    750 #ifdef USE_3D_ADAM_MODEL
    +
    751  if (!wrapperStructOutput.udpHost.empty() && !wrapperStructOutput.udpPort.empty())
    +
    752  {
    +
    753  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    754  const auto udpSender = std::make_shared<UdpSender>(wrapperStructOutput.udpHost,
    +
    755  wrapperStructOutput.udpPort);
    +
    756  outputWs.emplace_back(std::make_shared<WUdpSender<TDatumsSP>>(udpSender));
    +
    757  }
    +
    758  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    759 #endif
    +
    760  // Write people pose data on disk (json for OpenCV >= 3, xml, yml...)
    +
    761  if (!writeKeypointCleaned.empty())
    +
    762  {
    +
    763  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    764  const auto keypointSaver = std::make_shared<KeypointSaver>(writeKeypointCleaned,
    +
    765  wrapperStructOutput.writeKeypointFormat);
    +
    766  outputWs.emplace_back(std::make_shared<WPoseSaver<TDatumsSP>>(keypointSaver));
    +
    767  if (wrapperStructFace.enable)
    +
    768  outputWs.emplace_back(std::make_shared<WFaceSaver<TDatumsSP>>(keypointSaver));
    +
    769  if (wrapperStructHand.enable)
    +
    770  outputWs.emplace_back(std::make_shared<WHandSaver<TDatumsSP>>(keypointSaver));
    +
    771  }
    +
    772  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    773  // Write OpenPose output data on disk in JSON format (body/hand/face keypoints, body part locations if
    +
    774  // enabled, etc.)
    +
    775  if (!writeJsonCleaned.empty())
    +
    776  {
    +
    777  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    778  const auto peopleJsonSaver = std::make_shared<PeopleJsonSaver>(writeJsonCleaned);
    +
    779  outputWs.emplace_back(std::make_shared<WPeopleJsonSaver<TDatumsSP>>(peopleJsonSaver));
    +
    780  }
    +
    781  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    782  // Write people pose/foot/face/hand/etc. data on disk (COCO validation JSON format)
    +
    783  if (!wrapperStructOutput.writeCocoJson.empty())
    +
    784  {
    +
    785  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    786  // If humanFormat: bigger size (& maybe slower to process), but easier for user to read it
    +
    787  const auto humanFormat = true;
    +
    788  const auto cocoJsonSaver = std::make_shared<CocoJsonSaver>(
    +
    789  wrapperStructOutput.writeCocoJson.getStdString(), wrapperStructPose.poseModel, humanFormat,
    +
    790  wrapperStructOutput.writeCocoJsonVariants,
    +
    791  (wrapperStructPose.poseModel != PoseModel::CAR_22
    +
    792  && wrapperStructPose.poseModel != PoseModel::CAR_12
    + +
    794  wrapperStructOutput.writeCocoJsonVariant);
    +
    795  outputWs.emplace_back(std::make_shared<WCocoJsonSaver<TDatumsSP>>(cocoJsonSaver));
    +
    796  }
    +
    797  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    798  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    799  // Write frames as desired image format on hard disk
    +
    800  if (!writeImagesCleaned.empty())
    +
    801  {
    +
    802  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    803  const auto imageSaver = std::make_shared<ImageSaver>(
    +
    804  writeImagesCleaned, wrapperStructOutput.writeImagesFormat.getStdString());
    +
    805  outputWs.emplace_back(std::make_shared<WImageSaver<TDatumsSP>>(imageSaver));
    +
    806  }
    +
    807  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    808  auto originalVideoFps = 0.;
    +
    809  if (!wrapperStructOutput.writeVideo.empty() || !wrapperStructOutput.writeVideo3D.empty()
    +
    810  || !wrapperStructOutput.writeBvh.empty())
    +
    811  {
    +
    812  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    813  if (wrapperStructOutput.writeVideoFps <= 0
    +
    814  && (!oPProducer || producerSharedPtr->get(getCvCapPropFrameFps()) <= 0))
    +
    815  error("The frame rate of the frames producer is unknown. Set `--write_video_fps` to your desired"
    +
    816  " FPS if you wanna record video (`--write_video`). E.g., if it is a folder of images, you"
    +
    817  " will have to know or guess the frame rate; if it is a webcam, you should use the OpenPose"
    +
    818  " displayed FPS as desired value. If you do not care, simply add `--write_video_fps 30`.",
    +
    819  __LINE__, __FUNCTION__, __FILE__);
    +
    820  originalVideoFps = (
    +
    821  wrapperStructOutput.writeVideoFps > 0 ?
    +
    822  wrapperStructOutput.writeVideoFps : producerSharedPtr->get(getCvCapPropFrameFps()));
    +
    823  }
    +
    824  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    825  // Write frames as *.avi video on hard disk
    +
    826  if (!wrapperStructOutput.writeVideo.empty())
    +
    827  {
    +
    828  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    829  // Sanity checks
    +
    830  if (!oPProducer)
    +
    831  error("Video file can only be recorded inside `wrapper/wrapper.hpp` if the producer"
    +
    832  " is one of the default ones (e.g., video, webcam, ...).",
    +
    833  __LINE__, __FUNCTION__, __FILE__);
    +
    834  if (wrapperStructOutput.writeVideoWithAudio && producerSharedPtr->getType() != ProducerType::Video)
    +
    835  error("Audio can only be added to the output saved video if the input is also a video (either"
    +
    836  " disable `--write_video_with_audio` or use a video as input with `--video`).",
    +
    837  __LINE__, __FUNCTION__, __FILE__);
    +
    838  // Create video saver worker
    +
    839  const auto videoSaver = std::make_shared<VideoSaver>(
    +
    840  wrapperStructOutput.writeVideo.getStdString(), getCvFourcc('M','J','P','G'), originalVideoFps,
    +
    841  (wrapperStructOutput.writeVideoWithAudio ? wrapperStructInput.producerString.getStdString() : ""));
    +
    842  outputWs.emplace_back(std::make_shared<WVideoSaver<TDatumsSP>>(videoSaver));
    +
    843  }
    +
    844  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    845  // Write joint angles as *.bvh file on hard disk
    +
    846 #ifdef USE_3D_ADAM_MODEL
    +
    847  if (!wrapperStructOutput.writeBvh.empty())
    +
    848  {
    +
    849  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    850  const auto bvhSaver = std::make_shared<BvhSaver>(
    +
    851  wrapperStructOutput.writeBvh, JointAngleEstimation::getTotalModel(), originalVideoFps
    +
    852  );
    +
    853  outputWs.emplace_back(std::make_shared<WBvhSaver<TDatumsSP>>(bvhSaver));
    +
    854  }
    +
    855  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    856 #endif
    +
    857  // Write heat maps as desired image format on hard disk
    +
    858  if (!writeHeatMapsCleaned.empty())
    +
    859  {
    +
    860  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    861  const auto heatMapSaver = std::make_shared<HeatMapSaver>(
    +
    862  writeHeatMapsCleaned, wrapperStructOutput.writeHeatMapsFormat.getStdString());
    +
    863  outputWs.emplace_back(std::make_shared<WHeatMapSaver<TDatumsSP>>(heatMapSaver));
    +
    864  }
    +
    865  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    866  // Add frame information for GUI
    +
    867  const bool guiEnabled = (wrapperStructGui.displayMode != DisplayMode::NoDisplay);
    +
    868  // If this WGuiInfoAdder instance is placed before the WImageSaver or WVideoSaver, then the resulting
    +
    869  // recorded frames will look exactly as the final displayed image by the GUI
    +
    870  if (wrapperStructGui.guiVerbose && (guiEnabled || !userOutputWs.empty()
    +
    871  || threadManagerMode == ThreadManagerMode::Asynchronous
    +
    872  || threadManagerMode == ThreadManagerMode::AsynchronousOut))
    +
    873  {
    +
    874  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    875  const auto guiInfoAdder = std::make_shared<GuiInfoAdder>(numberGpuThreads, guiEnabled);
    +
    876  outputWs.emplace_back(std::make_shared<WGuiInfoAdder<TDatumsSP>>(guiInfoAdder));
    +
    877  }
    +
    878  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    879  // Minimal graphical user interface (GUI)
    +
    880  TWorker guiW;
    +
    881  TWorker videoSaver3DW;
    +
    882  if (guiEnabled)
    +
    883  {
    +
    884  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    885  // PoseRenderers to Renderers
    +
    886  std::vector<std::shared_ptr<Renderer>> renderers;
    +
    887  if (renderModePose == RenderMode::Cpu)
    +
    888  renderers.emplace_back(std::static_pointer_cast<Renderer>(poseCpuRenderer));
    +
    889  else
    +
    890  for (const auto& poseGpuRenderer : poseGpuRenderers)
    +
    891  renderers.emplace_back(std::static_pointer_cast<Renderer>(poseGpuRenderer));
    +
    892  // Display
    +
    893  const auto numberViews = (producerSharedPtr != nullptr
    +
    894  ? positiveIntRound(producerSharedPtr->get(ProducerProperty::NumberViews)) : 1);
    +
    895  auto finalOutputSizeGui = finalOutputSize;
    +
    896  if (numberViews > 1 && finalOutputSizeGui.x > 0)
    +
    897  finalOutputSizeGui.x *= numberViews;
    +
    898  // Adam (+3-D/2-D) display
    +
    899  if (displayAdam)
    +
    900  {
    +
    901 #ifdef USE_3D_ADAM_MODEL
    +
    902  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    903  // Gui
    +
    904  const auto gui = std::make_shared<GuiAdam>(
    +
    905  finalOutputSizeGui, wrapperStructGui.fullScreen, threadManager.getIsRunningSharedPtr(),
    +
    906  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers,
    +
    907  wrapperStructGui.displayMode, JointAngleEstimation::getTotalModel(),
    +
    908  wrapperStructOutput.writeVideoAdam
    +
    909  );
    +
    910  // WGui
    +
    911  guiW = {std::make_shared<WGuiAdam<TDatumsSP>>(gui)};
    +
    912  // Write 3D frames as *.avi video on hard disk
    +
    913  if (!wrapperStructOutput.writeVideo3D.empty())
    +
    914  error("3D video can only be recorded if 3D render is enabled.",
    +
    915  __LINE__, __FUNCTION__, __FILE__);
    +
    916 #endif
    +
    917  }
    +
    918  // 3-D (+2-D) display
    +
    919  else if (wrapperStructGui.displayMode == DisplayMode::Display3D
    +
    920  || wrapperStructGui.displayMode == DisplayMode::DisplayAll)
    +
    921  {
    +
    922  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    923  // Gui
    +
    924  const auto gui = std::make_shared<Gui3D>(
    +
    925  finalOutputSizeGui, wrapperStructGui.fullScreen, threadManager.getIsRunningSharedPtr(),
    +
    926  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers,
    +
    927  wrapperStructPose.poseModel, wrapperStructGui.displayMode,
    +
    928  !wrapperStructOutput.writeVideo3D.empty()
    +
    929  );
    +
    930  // WGui
    +
    931  guiW = {std::make_shared<WGui3D<TDatumsSP>>(gui)};
    +
    932  // Write 3D frames as *.avi video on hard disk
    +
    933  if (!wrapperStructOutput.writeVideo3D.empty())
    +
    934  {
    +
    935  const auto videoSaver = std::make_shared<VideoSaver>(
    +
    936  wrapperStructOutput.writeVideo3D.getStdString(), getCvFourcc('M','J','P','G'), originalVideoFps, "");
    +
    937  videoSaver3DW = std::make_shared<WVideoSaver3D<TDatumsSP>>(videoSaver);
    +
    938  }
    +
    939  }
    +
    940  // 2-D display
    +
    941  else if (wrapperStructGui.displayMode == DisplayMode::Display2D)
    +
    942  {
    +
    943  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    944  // Gui
    +
    945  const auto gui = std::make_shared<Gui>(
    +
    946  finalOutputSizeGui, wrapperStructGui.fullScreen, threadManager.getIsRunningSharedPtr(),
    +
    947  spVideoSeek, poseExtractorNets, faceExtractorNets, handExtractorNets, renderers
    +
    948  );
    +
    949  // WGui
    +
    950  guiW = {std::make_shared<WGui<TDatumsSP>>(gui)};
    +
    951  // Write 3D frames as *.avi video on hard disk
    +
    952  if (!wrapperStructOutput.writeVideo3D.empty())
    +
    953  error("3D video can only be recorded if 3D render is enabled.",
    +
    954  __LINE__, __FUNCTION__, __FILE__);
    +
    955  }
    +
    956  else
    +
    957  error("Unknown DisplayMode.", __LINE__, __FUNCTION__, __FILE__);
    +
    958  }
    +
    959  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    960  // Set FpsMax
    +
    961  TWorker wFpsMax;
    +
    962  if (wrapperStructPose.fpsMax > 0.)
    +
    963  wFpsMax = std::make_shared<WFpsMax<TDatumsSP>>(wrapperStructPose.fpsMax);
    +
    964  // Set wrapper as configured
    +
    965  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    966 
    +
    967 
    +
    968 
    +
    969 
    +
    970 
    +
    971  // The less number of queues -> the less threads opened, and potentially the less lag
    +
    972 
    +
    973  // Sanity checks
    +
    974  if ((datumProducerW == nullptr) == (userInputWs.empty())
    +
    975  && threadManagerMode != ThreadManagerMode::Asynchronous
    +
    976  && threadManagerMode != ThreadManagerMode::AsynchronousIn)
    +
    977  {
    +
    978  const auto message = "You need to have 1 and only 1 producer selected. You can introduce your own"
    +
    979  " producer by using setWorker(WorkerType::Input, ...) or use the OpenPose"
    +
    980  " default producer by configuring it in the configure function) or use the"
    +
    981  " ThreadManagerMode::Asynchronous(In) mode.";
    +
    982  error(message, __LINE__, __FUNCTION__, __FILE__);
    +
    983  }
    +
    984  if (outputWs.empty() && userOutputWs.empty() && guiW == nullptr
    +
    985  && threadManagerMode != ThreadManagerMode::Asynchronous
    +
    986  && threadManagerMode != ThreadManagerMode::AsynchronousOut)
    +
    987  {
    +
    988  error("No output selected.", __LINE__, __FUNCTION__, __FILE__);
    +
    989  }
    +
    990 
    +
    991  // Thread Manager
    +
    992  // Clean previous thread manager (avoid configure to crash the program if used more than once)
    +
    993  threadManager.reset();
    +
    994  unsigned long long threadId = 0ull;
    +
    995  auto queueIn = 0ull;
    +
    996  auto queueOut = 1ull;
    +
    997  // After producer
    +
    998  // ID generator (before any multi-threading or any function that requires the ID)
    +
    999  const auto wIdGenerator = std::make_shared<WIdGenerator<TDatumsSP>>();
    +
    1000  // If custom user Worker and uses its own thread
    +
    1001  std::vector<TWorker> workersAux;
    +
    1002  if (!userPreProcessingWs.empty())
    +
    1003  {
    +
    1004  // If custom user Worker in its own thread
    +
    1005  if (userPreProcessingWsOnNewThread)
    +
    1006  opLog("You chose to add your pre-processing function in a new thread. However, OpenPose will"
    +
    1007  " add it in the same thread than the input frame producer.",
    +
    1008  Priority::High, __LINE__, __FUNCTION__, __FILE__);
    +
    1009  workersAux = mergeVectors(workersAux, {userPreProcessingWs});
    +
    1010  }
    +
    1011  workersAux = mergeVectors(workersAux, {wIdGenerator});
    +
    1012  // Scale & cv::Mat to OP format
    +
    1013  if (scaleAndSizeExtractorW != nullptr)
    +
    1014  workersAux = mergeVectors(workersAux, {scaleAndSizeExtractorW});
    +
    1015  if (cvMatToOpInputW != nullptr)
    +
    1016  workersAux = mergeVectors(workersAux, {cvMatToOpInputW});
    +
    1017  // cv::Mat to output format
    +
    1018  if (cvMatToOpOutputW != nullptr)
    +
    1019  workersAux = mergeVectors(workersAux, {cvMatToOpOutputW});
    +
    1020 
    +
    1021  // Producer
    +
    1022  // If custom user Worker and uses its own thread
    +
    1023  if (!userInputWs.empty() && userInputWsOnNewThread)
    +
    1024  {
    +
    1025  // Thread 0, queues 0 -> 1
    +
    1026  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1027  threadManager.add(threadId, userInputWs, queueIn++, queueOut++);
    +
    1028  threadIdPP(threadId, multiThreadEnabled);
    +
    1029  }
    +
    1030  // If custom user Worker in same thread
    +
    1031  else if (!userInputWs.empty())
    +
    1032  workersAux = mergeVectors(userInputWs, workersAux);
    +
    1033  // If OpenPose producer (same thread)
    +
    1034  else if (datumProducerW != nullptr)
    +
    1035  workersAux = mergeVectors({datumProducerW}, workersAux);
    +
    1036  // Otherwise
    +
    1037  else if (threadManagerMode != ThreadManagerMode::Asynchronous
    +
    1038  && threadManagerMode != ThreadManagerMode::AsynchronousIn)
    +
    1039  error("No input selected.", __LINE__, __FUNCTION__, __FILE__);
    +
    1040  // Thread 0 or 1, queues 0 -> 1
    +
    1041  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1042  threadManager.add(threadId, workersAux, queueIn++, queueOut++);
    +
    1043  // Increase thread
    +
    1044  threadIdPP(threadId, multiThreadEnabled);
    +
    1045 
    +
    1046  // Pose estimation & rendering
    +
    1047  // Thread 1 or 2...X, queues 1 -> 2, X = 2 + #GPUs
    +
    1048  if (!poseExtractorsWs.empty())
    +
    1049  {
    +
    1050  if (multiThreadEnabled)
    +
    1051  {
    +
    1052  for (auto& wPose : poseExtractorsWs)
    +
    1053  {
    +
    1054  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1055  threadManager.add(threadId, wPose, queueIn, queueOut);
    +
    1056  threadIdPP(threadId, multiThreadEnabled);
    +
    1057  }
    +
    1058  queueIn++;
    +
    1059  queueOut++;
    +
    1060  // Sort frames - Required own thread
    +
    1061  if (poseExtractorsWs.size() > 1u)
    +
    1062  {
    +
    1063  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
    +
    1064  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1065  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
    +
    1066  threadIdPP(threadId, multiThreadEnabled);
    +
    1067  }
    +
    1068  }
    +
    1069  else
    +
    1070  {
    +
    1071  if (poseExtractorsWs.size() > 1)
    +
    1072  opLog("Multi-threading disabled, only 1 thread running. All GPUs have been disabled but the"
    +
    1073  " first one, which is defined by gpuNumberStart (e.g., in the OpenPose demo, it is set"
    +
    1074  " with the `--num_gpu_start` flag).", Priority::High);
    +
    1075  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1076  threadManager.add(threadId, poseExtractorsWs.at(0), queueIn++, queueOut++);
    +
    1077  }
    +
    1078  }
    +
    1079  // Assemble all frames from same time instant (3-D module)
    +
    1080  const auto wQueueAssembler = std::make_shared<WQueueAssembler<TDatums>>();
    +
    1081  // 3-D reconstruction
    +
    1082  if (!poseTriangulationsWs.empty())
    +
    1083  {
    +
    1084  // Assemble frames
    +
    1085  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1086  threadManager.add(threadId, wQueueAssembler, queueIn++, queueOut++);
    +
    1087  threadIdPP(threadId, multiThreadEnabled);
    +
    1088  // 3-D reconstruction
    +
    1089  if (multiThreadEnabled)
    +
    1090  {
    +
    1091  for (auto& wPoseTriangulations : poseTriangulationsWs)
    +
    1092  {
    +
    1093  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1094  threadManager.add(threadId, wPoseTriangulations, queueIn, queueOut);
    +
    1095  threadIdPP(threadId, multiThreadEnabled);
    +
    1096  }
    +
    1097  queueIn++;
    +
    1098  queueOut++;
    +
    1099  // Sort frames
    +
    1100  if (poseTriangulationsWs.size() > 1u)
    +
    1101  {
    +
    1102  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
    +
    1103  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1104  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
    +
    1105  threadIdPP(threadId, multiThreadEnabled);
    +
    1106  }
    +
    1107  }
    +
    1108  else
    +
    1109  {
    +
    1110  if (poseTriangulationsWs.size() > 1)
    +
    1111  opLog("Multi-threading disabled, only 1 thread running for 3-D triangulation.",
    +
    1112  Priority::High);
    +
    1113  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1114  threadManager.add(threadId, poseTriangulationsWs.at(0), queueIn++, queueOut++);
    +
    1115  }
    +
    1116  }
    +
    1117  else
    +
    1118  postProcessingWs = mergeVectors({wQueueAssembler}, postProcessingWs);
    +
    1119  // Adam/IK step
    +
    1120  if (!jointAngleEstimationsWs.empty())
    +
    1121  {
    +
    1122  if (multiThreadEnabled)
    +
    1123  {
    +
    1124  for (auto& wJointAngleEstimator : jointAngleEstimationsWs)
    +
    1125  {
    +
    1126  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1127  threadManager.add(threadId, wJointAngleEstimator, queueIn, queueOut);
    +
    1128  threadIdPP(threadId, multiThreadEnabled);
    +
    1129  }
    +
    1130  queueIn++;
    +
    1131  queueOut++;
    +
    1132  // Sort frames
    +
    1133  if (jointAngleEstimationsWs.size() > 1)
    +
    1134  {
    +
    1135  const auto wQueueOrderer = std::make_shared<WQueueOrderer<TDatumsSP>>();
    +
    1136  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1137  threadManager.add(threadId, wQueueOrderer, queueIn++, queueOut++);
    +
    1138  threadIdPP(threadId, multiThreadEnabled);
    +
    1139  }
    +
    1140  }
    +
    1141  else
    +
    1142  {
    +
    1143  if (jointAngleEstimationsWs.size() > 1)
    +
    1144  opLog("Multi-threading disabled, only 1 thread running for joint angle estimation.",
    +
    1145  Priority::High);
    +
    1146  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1147  threadManager.add(threadId, jointAngleEstimationsWs.at(0), queueIn++, queueOut++);
    +
    1148  }
    +
    1149  }
    +
    1150  // Post processing workers
    +
    1151  if (!postProcessingWs.empty())
    +
    1152  {
    +
    1153  // Combining postProcessingWs and outputWs
    +
    1154  outputWs = mergeVectors(postProcessingWs, outputWs);
    +
    1155  // // If I wanna split them
    +
    1156  // opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1157  // threadManager.add(threadId, postProcessingWs, queueIn++, queueOut++);
    +
    1158  // threadIdPP(threadId, multiThreadEnabled);
    +
    1159  }
    +
    1160  // If custom user Worker and uses its own thread
    +
    1161  if (!userPostProcessingWs.empty())
    +
    1162  {
    +
    1163  // If custom user Worker in its own thread
    +
    1164  if (userPostProcessingWsOnNewThread)
    +
    1165  {
    +
    1166  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1167  threadManager.add(threadId, userPostProcessingWs, queueIn++, queueOut++);
    +
    1168  threadIdPP(threadId, multiThreadEnabled);
    +
    1169  }
    +
    1170  // If custom user Worker in same thread
    +
    1171  // Merge with outputWs
    +
    1172  else
    +
    1173  outputWs = mergeVectors(outputWs, userPostProcessingWs);
    +
    1174  }
    +
    1175  // Output workers
    +
    1176  if (!outputWs.empty())
    +
    1177  {
    +
    1178  // Thread 4 or 5, queues 4 -> 5
    +
    1179  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1180  threadManager.add(threadId, outputWs, queueIn++, queueOut++);
    +
    1181  threadIdPP(threadId, multiThreadEnabled);
    +
    1182  }
    +
    1183  // User output worker
    +
    1184  // Thread Y, queues Q -> Q+1
    +
    1185  if (!userOutputWs.empty())
    +
    1186  {
    +
    1187  if (userOutputWsOnNewThread)
    +
    1188  {
    +
    1189  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1190  threadManager.add(threadId, userOutputWs, queueIn++, queueOut++);
    +
    1191  threadIdPP(threadId, multiThreadEnabled);
    +
    1192  }
    +
    1193  else
    +
    1194  {
    +
    1195  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1196  threadManager.add(threadId-1, userOutputWs, queueIn++, queueOut++);
    +
    1197  }
    +
    1198  }
    +
    1199  // OpenPose GUI
    +
    1200  if (guiW != nullptr)
    +
    1201  {
    +
    1202  // Thread Y+1, queues Q+1 -> Q+2
    +
    1203  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1204  threadManager.add(threadId, guiW, queueIn++, queueOut++);
    +
    1205  // Saving 3D output
    +
    1206  if (videoSaver3DW != nullptr)
    +
    1207  threadManager.add(threadId, videoSaver3DW, queueIn++, queueOut++);
    +
    1208  threadIdPP(threadId, multiThreadEnabled);
    +
    1209  }
    +
    1210  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1211  // Setting maximum speed
    +
    1212  if (wFpsMax != nullptr)
    +
    1213  {
    +
    1214  opLog("", Priority::Low, __LINE__, __FUNCTION__, __FILE__);
    +
    1215  threadManager.add(threadId, wFpsMax, queueIn++, queueOut++);
    +
    1216  threadIdPP(threadId, multiThreadEnabled);
    +
    1217  }
    +
    1218  }
    +
    1219  catch (const std::exception& e)
    +
    1220  {
    +
    1221  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    1222  }
    +
    1223  }
    +
    1224 
    +
    1225  template<typename TDatum, typename TDatums, typename TDatumsSP>
    + +
    1227  TDatumsSP& tDatumsSP, unsigned long long& frameCounter,
    +
    1228  const CameraParameterReader& cameraParameterReader, const void* const cvMatPtr)
    +
    1229  {
    +
    1230  try
    +
    1231  {
    +
    1232  // Sanity check
    +
    1233  if (tDatumsSP == nullptr)
    +
    1234  op::error("tDatumsSP was nullptr, it must be initialized.", __LINE__, __FUNCTION__, __FILE__);
    +
    1235  // Camera parameters
    +
    1236  const std::vector<op::Matrix>& cameraMatrices = cameraParameterReader.getCameraMatrices();
    +
    1237  const std::vector<op::Matrix>& cameraIntrinsics = cameraParameterReader.getCameraIntrinsics();
    +
    1238  const std::vector<op::Matrix>& cameraExtrinsics = cameraParameterReader.getCameraExtrinsics();
    +
    1239  const auto matrixesSize = cameraMatrices.size();
    +
    1240  // More sanity checks
    +
    1241  if (cameraMatrices.size() < 2)
    +
    1242  op::error("There is less than 2 camera parameter matrices.",
    +
    1243  __LINE__, __FUNCTION__, __FILE__);
    +
    1244  if (cameraMatrices.size() != cameraIntrinsics.size() || cameraMatrices.size() != cameraExtrinsics.size())
    +
    1245  op::error("Camera parameters must have the same size.", __LINE__, __FUNCTION__, __FILE__);
    +
    1246  // Split image to process
    +
    1247  std::vector<op::Matrix> imagesToProcess(matrixesSize);
    +
    1248  op::Matrix::splitCvMatIntoVectorMatrix(imagesToProcess, cvMatPtr);
    +
    1249  // Fill tDatumsSP
    +
    1250  tDatumsSP->resize(cameraMatrices.size());
    +
    1251  for (auto datumIndex = 0 ; datumIndex < matrixesSize ; ++datumIndex)
    +
    1252  {
    +
    1253  auto& datumPtr = tDatumsSP->at(datumIndex);
    +
    1254  datumPtr = std::make_shared<op::Datum>();
    +
    1255  datumPtr->frameNumber = frameCounter;
    +
    1256  datumPtr->cvInputData = imagesToProcess[datumIndex];
    +
    1257  if (matrixesSize > 1)
    +
    1258  {
    +
    1259  datumPtr->subId = datumIndex;
    +
    1260  datumPtr->subIdMax = matrixesSize-1;
    +
    1261  datumPtr->cameraMatrix = cameraMatrices[datumIndex];
    +
    1262  datumPtr->cameraExtrinsics = cameraExtrinsics[datumIndex];
    +
    1263  datumPtr->cameraIntrinsics = cameraIntrinsics[datumIndex];
    +
    1264  }
    +
    1265  }
    +
    1266  ++frameCounter;
    +
    1267  }
    +
    1268  catch (const std::exception& e)
    +
    1269  {
    +
    1270  error(e.what(), __LINE__, __FUNCTION__, __FILE__);
    +
    1271  }
    +
    1272  }
    +
    1273 }
    +
    1274 
    +
    1275 #endif // OPENPOSE_WRAPPER_WRAPPER_AUXILIARY_HPP
    + + +
    const std::vector< Matrix > & getCameraIntrinsics() const
    +
    const std::vector< Matrix > & getCameraExtrinsics() const
    +
    const std::vector< Matrix > & getCameraMatrices() const
    +
    static void splitCvMatIntoVectorMatrix(std::vector< Matrix > &matrixesResized, const void *const cvMatPtr)
    +
    const std::string & getStdString() const
    +
    bool empty() const
    + +
    std::shared_ptr< std::atomic< bool > > getIsRunningSharedPtr()
    + +
    void add(const unsigned long long threadId, const std::vector< TWorker > &tWorkers, const unsigned long long queueInId, const unsigned long long queueOutId)
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    OP_API int getCvCapPropFrameCount()
    + + + + +
    OP_API int getCvCapPropFrameHeight()
    +
    ThreadManagerMode
    Definition: enumClasses.hpp:10
    + + + +
    void createMultiviewTDatum(TDatumsSP &tDatumsSP, unsigned long long &frameCounter, const CameraParameterReader &cameraParameterReader, const void *const cvMatPtr)
    +
    OP_API int getCvFourcc(const char c1, const char c2, const char c3, const char c4)
    +
    void configureThreadManager(ThreadManager< TDatumsSP > &threadManager, const bool multiThreadEnabled, const ThreadManagerMode threadManagerMode, const WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const WrapperStructGui &wrapperStructGui, const std::array< std::vector< TWorker >, int(WorkerType::Size)> &userWs, const std::array< bool, int(WorkerType::Size)> &userWsOnNewThread)
    + + + + + +
    OP_API void error(const std::string &message, const int line=-1, const std::string &function="", const std::string &file="")
    +
    int positiveIntRound(const T a)
    Definition: fastMath.hpp:29
    +
    OP_API std::shared_ptr< Producer > createProducer(const ProducerType producerType=ProducerType::None, const std::string &producerString="", const Point< int > &cameraResolution=Point< int >{-1,-1}, const std::string &cameraParameterPath="models/cameraParameters/", const bool undistortImage=true, const int numberViews=-1)
    +
    OP_API int getCvCapPropFrameWidth()
    +
    unsigned long long uLongLongRound(const T a)
    Definition: fastMath.hpp:66
    + + + + + +
    OP_API GpuMode getGpuMode()
    +
    T fastMax(const T a, const T b)
    Definition: fastMath.hpp:73
    +
    std::vector< T > mergeVectors(const std::vector< T > &vectorA, const std::vector< T > &vectorB)
    Definition: standard.hpp:40
    +
    OP_API void opLog(const std::string &message, const Priority priority=Priority::Max, const int line=-1, const std::string &function="", const std::string &file="")
    +
    OP_API int getGpuNumber()
    +
    OP_API int getCvCapPropFrameFps()
    +
    OP_API std::string formatAsDirectory(const std::string &directoryPathString)
    + + + + + + +
    OP_API void wrapperConfigureSanityChecks(WrapperStructPose &wrapperStructPose, const WrapperStructFace &wrapperStructFace, const WrapperStructHand &wrapperStructHand, const WrapperStructExtra &wrapperStructExtra, const WrapperStructInput &wrapperStructInput, const WrapperStructOutput &wrapperStructOutput, const WrapperStructGui &wrapperStructGui, const bool renderOutput, const bool userInputAndPreprocessingWsEmpty, const bool userOutputWsEmpty, const std::shared_ptr< Producer > &producerSharedPtr, const ThreadManagerMode threadManagerMode)
    + + + + + + + + + + + + +
    OP_API void threadIdPP(unsigned long long &threadId, const bool multiThreadEnabled)
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    unsigned long long frameLast
    + + + + +
    unsigned long long frameStep
    +
    unsigned long long frameFirst
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    std::vector< HeatMapType > heatMapTypes
    + + + + + + + + + + + + + + + + + +
    +
    + + + + diff --git a/web/html/doc/wrapper_struct_extra_8hpp.html b/web/html/doc/wrapper_struct_extra_8hpp.html new file mode 100644 index 000000000..50b79f369 --- /dev/null +++ b/web/html/doc/wrapper_struct_extra_8hpp.html @@ -0,0 +1,118 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructExtra.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wrapperStructExtra.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    struct  op::WrapperStructExtra
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/wrapper_struct_extra_8hpp_source.html b/web/html/doc/wrapper_struct_extra_8hpp_source.html new file mode 100644 index 000000000..82d76dcb9 --- /dev/null +++ b/web/html/doc/wrapper_struct_extra_8hpp_source.html @@ -0,0 +1,137 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructExtra.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wrapperStructExtra.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_WRAPPER_WRAPPER_STRUCT_EXTRA_HPP
    +
    2 #define OPENPOSE_WRAPPER_WRAPPER_STRUCT_EXTRA_HPP
    +
    3 
    + +
    5 
    +
    6 namespace op
    +
    7 {
    + +
    14  {
    + +
    22 
    + +
    29 
    + +
    34 
    +
    40  int tracking;
    +
    41 
    +
    47  int ikThreads;
    +
    48 
    + +
    55  const bool reconstruct3d = false, const int minViews3d = -1, const bool identification = false,
    +
    56  const int tracking = -1, const int ikThreads = 0);
    +
    57  };
    +
    58 }
    +
    59 
    +
    60 #endif // OPENPOSE_WRAPPER_WRAPPER_STRUCT_EXTRA_HPP
    + +
    #define OP_API
    Definition: macros.hpp:18
    + + + +
    WrapperStructExtra(const bool reconstruct3d=false, const int minViews3d=-1, const bool identification=false, const int tracking=-1, const int ikThreads=0)
    + + + + +
    +
    + + + + diff --git a/web/html/doc/wrapper_struct_face_8hpp.html b/web/html/doc/wrapper_struct_face_8hpp.html new file mode 100644 index 000000000..dcb1d3828 --- /dev/null +++ b/web/html/doc/wrapper_struct_face_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructFace.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wrapperStructFace.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/wrapper_struct_face_8hpp_source.html b/web/html/doc/wrapper_struct_face_8hpp_source.html new file mode 100644 index 000000000..a29a2c914 --- /dev/null +++ b/web/html/doc/wrapper_struct_face_8hpp_source.html @@ -0,0 +1,158 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructFace.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wrapperStructFace.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_WRAPPER_WRAPPER_STRUCT_FACE_HPP
    +
    2 #define OPENPOSE_WRAPPER_WRAPPER_STRUCT_FACE_HPP
    +
    3 
    + + + + +
    8 
    +
    9 namespace op
    +
    10 {
    + +
    17  {
    +
    21  bool enable;
    +
    22 
    + +
    28 
    + +
    35 
    + +
    41 
    + +
    47 
    +
    53  float alphaHeatMap;
    +
    54 
    + +
    63 
    + +
    70  const bool enable = false, const Detector detector = Detector::Body,
    +
    71  const Point<int>& netInputSize = Point<int>{368, 368}, const RenderMode renderMode = RenderMode::Auto,
    +
    72  const float alphaKeypoint = FACE_DEFAULT_ALPHA_KEYPOINT,
    +
    73  const float alphaHeatMap = FACE_DEFAULT_ALPHA_HEAT_MAP, const float renderThreshold = 0.4f);
    +
    74  };
    +
    75 }
    +
    76 
    +
    77 #endif // OPENPOSE_WRAPPER_WRAPPER_STRUCT_FACE_HPP
    + + + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    Detector
    Definition: enumClasses.hpp:15
    + +
    const auto FACE_DEFAULT_ALPHA_KEYPOINT
    +
    const auto FACE_DEFAULT_ALPHA_HEAT_MAP
    +
    RenderMode
    Definition: enumClasses.hpp:27
    + + + +
    WrapperStructFace(const bool enable=false, const Detector detector=Detector::Body, const Point< int > &netInputSize=Point< int >{368, 368}, const RenderMode renderMode=RenderMode::Auto, const float alphaKeypoint=FACE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=FACE_DEFAULT_ALPHA_HEAT_MAP, const float renderThreshold=0.4f)
    + + + + + + + + +
    +
    + + + + diff --git a/web/html/doc/wrapper_struct_gui_8hpp.html b/web/html/doc/wrapper_struct_gui_8hpp.html new file mode 100644 index 000000000..0864dc68b --- /dev/null +++ b/web/html/doc/wrapper_struct_gui_8hpp.html @@ -0,0 +1,119 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructGui.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wrapperStructGui.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    struct  op::WrapperStructGui
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/wrapper_struct_gui_8hpp_source.html b/web/html/doc/wrapper_struct_gui_8hpp_source.html new file mode 100644 index 000000000..d6b1903bc --- /dev/null +++ b/web/html/doc/wrapper_struct_gui_8hpp_source.html @@ -0,0 +1,135 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructGui.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wrapperStructGui.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_WRAPPER_WRAPPER_STRUCT_GUI_HPP
    +
    2 #define OPENPOSE_WRAPPER_WRAPPER_STRUCT_GUI_HPP
    +
    3 
    + + +
    6 
    +
    7 namespace op
    +
    8 {
    + +
    13  {
    + +
    24 
    +
    29  bool guiVerbose;
    +
    30 
    +
    35  bool fullScreen;
    +
    36 
    + +
    43  const DisplayMode displayMode = DisplayMode::NoDisplay, const bool guiVerbose = false,
    +
    44  const bool fullScreen = false);
    +
    45  };
    +
    46 }
    +
    47 
    +
    48 #endif // OPENPOSE_WRAPPER_WRAPPER_STRUCT_GUI_HPP
    + + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    DisplayMode
    Definition: enumClasses.hpp:11
    + + + +
    WrapperStructGui(const DisplayMode displayMode=DisplayMode::NoDisplay, const bool guiVerbose=false, const bool fullScreen=false)
    + + +
    +
    + + + + diff --git a/web/html/doc/wrapper_struct_hand_8hpp.html b/web/html/doc/wrapper_struct_hand_8hpp.html new file mode 100644 index 000000000..17f341982 --- /dev/null +++ b/web/html/doc/wrapper_struct_hand_8hpp.html @@ -0,0 +1,121 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructHand.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wrapperStructHand.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/wrapper_struct_hand_8hpp_source.html b/web/html/doc/wrapper_struct_hand_8hpp_source.html new file mode 100644 index 000000000..19d681927 --- /dev/null +++ b/web/html/doc/wrapper_struct_hand_8hpp_source.html @@ -0,0 +1,165 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructHand.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wrapperStructHand.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_WRAPPER_WRAPPER_STRUCT_HAND_HPP
    +
    2 #define OPENPOSE_WRAPPER_WRAPPER_STRUCT_HAND_HPP
    +
    3 
    + + + + +
    8 
    +
    9 namespace op
    +
    10 {
    + +
    17  {
    +
    21  bool enable;
    +
    22 
    + +
    33 
    + +
    40 
    + +
    48 
    +
    53  float scaleRange;
    +
    54 
    + +
    60 
    + +
    66 
    +
    72  float alphaHeatMap;
    +
    73 
    + +
    82 
    + +
    89  const bool enable = false, const Detector detector = Detector::Body,
    +
    90  const Point<int>& netInputSize = Point<int>{368, 368}, const int scalesNumber = 1,
    +
    91  const float scaleRange = 0.4f, const RenderMode renderMode = RenderMode::Auto,
    +
    92  const float alphaKeypoint = HAND_DEFAULT_ALPHA_KEYPOINT,
    +
    93  const float alphaHeatMap = HAND_DEFAULT_ALPHA_HEAT_MAP, const float renderThreshold = 0.2f);
    +
    94  };
    +
    95 }
    +
    96 
    +
    97 #endif // OPENPOSE_WRAPPER_WRAPPER_STRUCT_HAND_HPP
    + + + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    Detector
    Definition: enumClasses.hpp:15
    + +
    const auto HAND_DEFAULT_ALPHA_HEAT_MAP
    +
    const auto HAND_DEFAULT_ALPHA_KEYPOINT
    +
    RenderMode
    Definition: enumClasses.hpp:27
    + + + +
    WrapperStructHand(const bool enable=false, const Detector detector=Detector::Body, const Point< int > &netInputSize=Point< int >{368, 368}, const int scalesNumber=1, const float scaleRange=0.4f, const RenderMode renderMode=RenderMode::Auto, const float alphaKeypoint=HAND_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=HAND_DEFAULT_ALPHA_HEAT_MAP, const float renderThreshold=0.2f)
    + + + + + + + + + + +
    +
    + + + + diff --git a/web/html/doc/wrapper_struct_input_8hpp.html b/web/html/doc/wrapper_struct_input_8hpp.html new file mode 100644 index 000000000..1cf4f8081 --- /dev/null +++ b/web/html/doc/wrapper_struct_input_8hpp.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructInput.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wrapperStructInput.hpp File Reference
    +
    +
    +
    #include <limits>
    +#include <openpose/core/common.hpp>
    +#include <openpose/producer/producer.hpp>
    +
    +

    Go to the source code of this file.

    + + + + +

    +Classes

    struct  op::WrapperStructInput
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/wrapper_struct_input_8hpp_source.html b/web/html/doc/wrapper_struct_input_8hpp_source.html new file mode 100644 index 000000000..436a53bd0 --- /dev/null +++ b/web/html/doc/wrapper_struct_input_8hpp_source.html @@ -0,0 +1,173 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructInput.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wrapperStructInput.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_WRAPPER_WRAPPER_STRUCT_INPUT_HPP
    +
    2 #define OPENPOSE_WRAPPER_WRAPPER_STRUCT_INPUT_HPP
    +
    3 
    +
    4 #include <limits> // std::numeric_limits
    + + +
    7 
    +
    8 namespace op
    +
    9 {
    + +
    15  {
    + +
    21 
    + +
    28 
    +
    33  unsigned long long frameFirst;
    +
    34 
    +
    40  unsigned long long frameStep;
    +
    41 
    +
    46  unsigned long long frameLast;
    +
    47 
    + +
    52 
    +
    56  bool frameFlip;
    +
    57 
    + +
    63 
    + +
    68 
    + +
    73 
    + +
    79 
    + +
    84 
    + +
    93 
    + +
    100  const ProducerType producerType = ProducerType::None, const String& producerString = "",
    +
    101  const unsigned long long frameFirst = 0, const unsigned long long frameStep = 1,
    +
    102  const unsigned long long frameLast = std::numeric_limits<unsigned long long>::max(),
    +
    103  const bool realTimeProcessing = false, const bool frameFlip = false, const int frameRotate = 0,
    +
    104  const bool framesRepeat = false, const Point<int>& cameraResolution = Point<int>{-1,-1},
    +
    105  const String& cameraParameterPath = "models/cameraParameters/",
    +
    106  const bool undistortImage = false, const int numberViews = -1);
    +
    107  };
    +
    108 }
    +
    109 
    +
    110 #endif // OPENPOSE_WRAPPER_WRAPPER_STRUCT_INPUT_HPP
    + + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    ProducerType
    Definition: enumClasses.hpp:30
    + + + + +
    WrapperStructInput(const ProducerType producerType=ProducerType::None, const String &producerString="", const unsigned long long frameFirst=0, const unsigned long long frameStep=1, const unsigned long long frameLast=std::numeric_limits< unsigned long long >::max(), const bool realTimeProcessing=false, const bool frameFlip=false, const int frameRotate=0, const bool framesRepeat=false, const Point< int > &cameraResolution=Point< int >{-1,-1}, const String &cameraParameterPath="models/cameraParameters/", const bool undistortImage=false, const int numberViews=-1)
    + + +
    unsigned long long frameLast
    + + + + +
    unsigned long long frameStep
    +
    unsigned long long frameFirst
    + + + + +
    +
    + + + + diff --git a/web/html/doc/wrapper_struct_output_8hpp.html b/web/html/doc/wrapper_struct_output_8hpp.html new file mode 100644 index 000000000..c09fd39a6 --- /dev/null +++ b/web/html/doc/wrapper_struct_output_8hpp.html @@ -0,0 +1,120 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructOutput.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wrapperStructOutput.hpp File Reference
    +
    +
    + +

    Go to the source code of this file.

    + + + + +

    +Classes

    struct  op::WrapperStructOutput
     
    + + + +

    +Namespaces

     op
     
    +
    +
    + + + + diff --git a/web/html/doc/wrapper_struct_output_8hpp_source.html b/web/html/doc/wrapper_struct_output_8hpp_source.html new file mode 100644 index 000000000..c5264014e --- /dev/null +++ b/web/html/doc/wrapper_struct_output_8hpp_source.html @@ -0,0 +1,194 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructOutput.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wrapperStructOutput.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_WRAPPER_WRAPPER_STRUCT_OUTPUT_HPP
    +
    2 #define OPENPOSE_WRAPPER_WRAPPER_STRUCT_OUTPUT_HPP
    +
    3 
    + + + +
    7 
    +
    8 namespace op
    +
    9 {
    + +
    14  {
    +
    21  double verbose;
    +
    22 
    + +
    29 
    + +
    36 
    + +
    46 
    + +
    52 
    + +
    59 
    + +
    65 
    + +
    71 
    + +
    79 
    + +
    86 
    +
    94  double writeVideoFps;
    +
    95 
    + +
    100 
    + +
    107 
    + +
    113 
    + +
    120 
    + +
    127 
    + +
    134 
    + +
    139 
    + +
    144 
    + +
    151  const double verbose = -1, const String& writeKeypoint = "",
    +
    152  const DataFormat writeKeypointFormat = DataFormat::Xml, const String& writeJson = "",
    +
    153  const String& writeCocoJson = "", const int writeCocoJsonVariants = 1,
    +
    154  const int writeCocoJsonVariant = 1, const String& writeImages = "",
    +
    155  const String& writeImagesFormat = "png", const String& writeVideo = "",
    +
    156  const double writeVideoFps = -1., const bool writeVideoWithAudio = false,
    +
    157  const String& writeHeatMaps = "", const String& writeHeatMapsFormat = "png",
    +
    158  const String& writeVideo3D = "", const String& writeVideoAdam = "",
    +
    159  const String& writeBvh = "", const String& udpHost = "",
    +
    160  const String& udpPort = "8051");
    +
    161  };
    +
    162 }
    +
    163 
    +
    164 #endif // OPENPOSE_WRAPPER_WRAPPER_STRUCT_OUTPUT_HPP
    + + + + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    DataFormat
    Definition: enumClasses.hpp:7
    + + + + + + + + + + + + + + + + + + + + +
    WrapperStructOutput(const double verbose=-1, const String &writeKeypoint="", const DataFormat writeKeypointFormat=DataFormat::Xml, const String &writeJson="", const String &writeCocoJson="", const int writeCocoJsonVariants=1, const int writeCocoJsonVariant=1, const String &writeImages="", const String &writeImagesFormat="png", const String &writeVideo="", const double writeVideoFps=-1., const bool writeVideoWithAudio=false, const String &writeHeatMaps="", const String &writeHeatMapsFormat="png", const String &writeVideo3D="", const String &writeVideoAdam="", const String &writeBvh="", const String &udpHost="", const String &udpPort="8051")
    + +
    +
    + + + + diff --git a/web/html/doc/wrapper_struct_pose_8hpp.html b/web/html/doc/wrapper_struct_pose_8hpp.html new file mode 100644 index 000000000..2f65fbe28 --- /dev/null +++ b/web/html/doc/wrapper_struct_pose_8hpp.html @@ -0,0 +1,123 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructPose.hpp File Reference + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    + +
    +
    wrapperStructPose.hpp File Reference
    +
    + +
    + + + + diff --git a/web/html/doc/wrapper_struct_pose_8hpp_source.html b/web/html/doc/wrapper_struct_pose_8hpp_source.html new file mode 100644 index 000000000..9f3d473ff --- /dev/null +++ b/web/html/doc/wrapper_struct_pose_8hpp_source.html @@ -0,0 +1,237 @@ + + + + + + + +OpenPose: include/openpose/wrapper/wrapperStructPose.hpp Source File + + + + + + + + + + + + + +
    +
    + + + + + + + +
    +
    OpenPose +  1.7.0 +
    +
    The first real-time multi-person system to jointly detect human body, hand, facial, and foot keypoints
    +
    +
    + + + + + + + +
    +
    + +
    +
    +
    + +
    + +
    +
    + + +
    + +
    + +
    +
    +
    wrapperStructPose.hpp
    +
    +
    +Go to the documentation of this file.
    1 #ifndef OPENPOSE_WRAPPER_WRAPPER_STRUCT_POSE_HPP
    +
    2 #define OPENPOSE_WRAPPER_WRAPPER_STRUCT_POSE_HPP
    +
    3 
    + + + + + + +
    10 
    +
    11 namespace op
    +
    12 {
    + +
    19  {
    + +
    26 
    + +
    33 
    + +
    42 
    + +
    50 
    + +
    58 
    +
    64  int gpuNumber;
    +
    65 
    + +
    71 
    + +
    79 
    +
    85  float scaleGap;
    +
    86 
    + +
    92 
    + +
    99 
    + +
    104 
    + +
    110 
    + +
    117 
    + +
    126 
    + +
    131 
    +
    137  std::vector<HeatMapType> heatMapTypes;
    +
    138 
    + +
    146 
    + +
    152 
    + +
    161 
    + +
    171 
    + +
    178 
    +
    185  double fpsMax;
    +
    186 
    + +
    193 
    + +
    200 
    + +
    206 
    + +
    215 
    + +
    222  const PoseMode poseMode = PoseMode::Enabled, const Point<int>& netInputSize = Point<int>{-1, 368},
    +
    223  const double netInputSizeDynamicBehavior = 1.,
    +
    224  const Point<int>& outputSize = Point<int>{-1, -1},
    +
    225  const ScaleMode keypointScaleMode = ScaleMode::InputResolution, const int gpuNumber = -1,
    +
    226  const int gpuNumberStart = 0, const int scalesNumber = 1, const float scaleGap = 0.25f,
    +
    227  const RenderMode renderMode = RenderMode::Auto, const PoseModel poseModel = PoseModel::BODY_25,
    +
    228  const bool blendOriginalFrame = true, const float alphaKeypoint = POSE_DEFAULT_ALPHA_KEYPOINT,
    +
    229  const float alphaHeatMap = POSE_DEFAULT_ALPHA_HEAT_MAP, const int defaultPartToRender = 0,
    +
    230  const String& modelFolder = "models/", const std::vector<HeatMapType>& heatMapTypes = {},
    +
    231  const ScaleMode heatMapScaleMode = ScaleMode::UnsignedChar, const bool addPartCandidates = false,
    +
    232  const float renderThreshold = 0.05f, const int numberPeopleMax = -1, const bool maximizePositives = false,
    +
    233  const double fpsMax = -1., const String& protoTxtPath = "", const String& caffeModelPath = "",
    +
    234  const float upsamplingRatio = 0.f, const bool enableGoogleLogging = true);
    +
    235  };
    +
    236 }
    +
    237 
    +
    238 #endif // OPENPOSE_WRAPPER_WRAPPER_STRUCT_POSE_HPP
    + + + +
    #define OP_API
    Definition: macros.hpp:18
    + +
    const auto POSE_DEFAULT_ALPHA_KEYPOINT
    +
    PoseMode
    Definition: enumClasses.hpp:7
    + +
    const auto POSE_DEFAULT_ALPHA_HEAT_MAP
    +
    PoseModel
    Definition: enumClasses.hpp:10
    + +
    ScaleMode
    Definition: enumClasses.hpp:7
    + + +
    RenderMode
    Definition: enumClasses.hpp:27
    + + + + + + + + + + + + + + + + + + + + + + + + + +
    std::vector< HeatMapType > heatMapTypes
    + + + + + + + +
    WrapperStructPose(const PoseMode poseMode=PoseMode::Enabled, const Point< int > &netInputSize=Point< int >{-1, 368}, const double netInputSizeDynamicBehavior=1., const Point< int > &outputSize=Point< int >{-1, -1}, const ScaleMode keypointScaleMode=ScaleMode::InputResolution, const int gpuNumber=-1, const int gpuNumberStart=0, const int scalesNumber=1, const float scaleGap=0.25f, const RenderMode renderMode=RenderMode::Auto, const PoseModel poseModel=PoseModel::BODY_25, const bool blendOriginalFrame=true, const float alphaKeypoint=POSE_DEFAULT_ALPHA_KEYPOINT, const float alphaHeatMap=POSE_DEFAULT_ALPHA_HEAT_MAP, const int defaultPartToRender=0, const String &modelFolder="models/", const std::vector< HeatMapType > &heatMapTypes={}, const ScaleMode heatMapScaleMode=ScaleMode::UnsignedChar, const bool addPartCandidates=false, const float renderThreshold=0.05f, const int numberPeopleMax=-1, const bool maximizePositives=false, const double fpsMax=-1., const String &protoTxtPath="", const String &caffeModelPath="", const float upsamplingRatio=0.f, const bool enableGoogleLogging=true)
    + +
    +
    + + + +