Update InspireFace to 1.1.6

This commit is contained in:
tunm
2024-07-17 08:53:50 +08:00
parent b9c1d8bdba
commit 3a8dd7710f
30 changed files with 1278 additions and 426 deletions

View File

@@ -0,0 +1,112 @@
---
Language: Cpp
# BasedOnStyle: Google
AccessModifierOffset: -4
AlignAfterOpenBracket: Align
AlignConsecutiveAssignments: false
AlignConsecutiveDeclarations: false
AlignEscapedNewlines: Left
AlignOperands: true
AlignTrailingComments: true
AllowAllParametersOfDeclarationOnNextLine: true
AllowShortBlocksOnASingleLine: false
AllowShortCaseLabelsOnASingleLine: false
AllowShortFunctionsOnASingleLine: Empty
AllowShortIfStatementsOnASingleLine: false
AllowShortLoopsOnASingleLine: false
AlwaysBreakAfterDefinitionReturnType: None
AlwaysBreakAfterReturnType: None
AlwaysBreakBeforeMultilineStrings: true
AlwaysBreakTemplateDeclarations: true
BinPackArguments: true
BinPackParameters: true
BraceWrapping:
AfterClass: false
AfterControlStatement: false
AfterEnum: false
AfterFunction: false
AfterNamespace: false
AfterObjCDeclaration: false
AfterStruct: false
AfterUnion: false
AfterExternBlock: false
BeforeCatch: false
BeforeElse: false
IndentBraces: false
SplitEmptyFunction: true
SplitEmptyRecord: true
SplitEmptyNamespace: true
BreakBeforeBinaryOperators: None
BreakBeforeBraces: Attach
BreakBeforeInheritanceComma: false
BreakBeforeTernaryOperators: true
BreakConstructorInitializersBeforeComma: false
BreakConstructorInitializers: BeforeColon
BreakAfterJavaFieldAnnotations: false
BreakStringLiterals: true
ColumnLimit: 100
CommentPragmas: '^ IWYU pragma:'
CompactNamespaces: false
ConstructorInitializerAllOnOneLineOrOnePerLine: true
ConstructorInitializerIndentWidth: 0
ContinuationIndentWidth: 2
Cpp11BracedListStyle: true
DerivePointerAlignment: true
DisableFormat: false
ExperimentalAutoDetectBinPacking: false
FixNamespaceComments: true
ForEachMacros:
- foreach
- Q_FOREACH
- BOOST_FOREACH
IncludeBlocks: Preserve
IncludeCategories:
- Regex: '^<ext/.*\.h>'
Priority: 2
- Regex: '^<.*\.h>'
Priority: 1
- Regex: '^<.*'
Priority: 2
- Regex: '.*'
Priority: 3
IncludeIsMainRegex: '([-_](test|unittest))?$'
IndentCaseLabels: true
IndentPPDirectives: None
IndentWidth: 4
IndentWrappedFunctionNames: false
JavaScriptQuotes: Leave
JavaScriptWrapImports: true
KeepEmptyLinesAtTheStartOfBlocks: false
MacroBlockBegin: ''
MacroBlockEnd: ''
MaxEmptyLinesToKeep: 1
NamespaceIndentation: None
ObjCBlockIndentWidth: 2
ObjCSpaceAfterProperty: false
ObjCSpaceBeforeProtocolList: false
PenaltyBreakAssignment: 2
PenaltyBreakBeforeFirstCallParameter: 1
PenaltyBreakComment: 300
PenaltyBreakFirstLessLess: 120
PenaltyBreakString: 1000
PenaltyExcessCharacter: 1000000
PenaltyReturnTypeOnItsOwnLine: 200
PointerAlignment: Left
ReflowComments: true
SortIncludes: false
SortUsingDeclarations: true
SpaceAfterCStyleCast: false
SpaceAfterTemplateKeyword: true
SpaceBeforeAssignmentOperators: true
SpaceBeforeParens: ControlStatements
SpaceInEmptyParentheses: false
SpacesBeforeTrailingComments: 2
SpacesInAngles: false
SpacesInContainerLiterals: true
SpacesInCStyleCastParentheses: false
SpacesInParentheses: false
SpacesInSquareBrackets: false
Standard: Auto
TabWidth: 4
UseTab: Never
...

View File

@@ -13,3 +13,4 @@ pack/*
.cache/*
.vscode/*
build_local/*
local_build/*

View File

@@ -1,15 +1,16 @@
cmake_minimum_required(VERSION 3.10)
project(InspireFace)
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3")
# Current version
set(INSPIRE_FACE_VERSION_MAJOR 1)
set(INSPIRE_FACE_VERSION_MINOR 1)
set(INSPIRE_FACE_VERSION_PATCH 4)
set(INSPIRE_FACE_VERSION_PATCH 6)
# Converts the version number to a string
string(CONCAT INSPIRE_FACE_VERSION_MAJOR_STR ${INSPIRE_FACE_VERSION_MAJOR})

View File

@@ -13,6 +13,8 @@ Please contact [contact@insightface.ai](mailto:contact@insightface.ai?subject=In
## Change Logs
**`2024-07-07`** Add some face action detection to the face interaction module.
**`2024-07-05`** Fixed some bugs in the python ctypes interface.
**`2024-07-03`** Add the blink detection algorithm of face interaction module.
@@ -64,7 +66,7 @@ The '**3rdparty**' directory already includes the MNN library and specifies a pa
### 1.5. Requirements
- CMake (version 3.10 or higher)
- OpenCV (version 4.20 or higher)
- OpenCV (version 3.5 or higher)
- Use the specific OpenCV-SDK supported by each target platform such as Android, iOS, and Linux.
- NDK (version 16 or higher, only required for Android)
- MNN (version 1.4.0 or higher)

View File

@@ -41,8 +41,9 @@ cmake -DCMAKE_SYSTEM_NAME=Linux \
-DISF_BUILD_WITH_SAMPLE=ON \
-DISF_BUILD_WITH_TEST=ON \
-DISF_ENABLE_BENCHMARK=ON \
-DISF_ENABLE_USE_LFW_DATA=ON \
-DISF_ENABLE_TEST_EVALUATION=ON \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DISF_ENABLE_TRACKING_BY_DETECTION=ON \
-DMNN_CUDA=ON \
-DISF_GLOBAL_INFERENCE_BACKEND_USE_MNN_CUDA=ON \
-DISF_LINUX_MNN_CUDA=/home/tunm/softwate/MNN-2.7.2/build_cuda ${SCRIPT_DIR}

View File

@@ -43,4 +43,16 @@ bool Launch::isMLoad() const {
return m_load_;
}
void Launch::Unload() {
std::lock_guard<std::mutex> lock(mutex_);
if (m_load_) {
// Assuming InspireArchive has a method to clear its resources
m_archive_.Release();
m_load_ = false;
INSPIRE_LOGI("All resources have been successfully unloaded and system is reset.");
} else {
INSPIRE_LOGW("Unload called but system was not loaded.");
}
}
} // namespace inspire

View File

@@ -33,6 +33,9 @@ public:
// Checks if the resources have been successfully loaded.
bool isMLoad() const;
// Unloads the resources and resets the system to its initial state.
void Unload();
private:
Launch() : m_load_(false) {} ///< Private constructor for the singleton pattern.

View File

@@ -0,0 +1,4 @@
#include "resource_manage.h"
std::unique_ptr<inspire::ResourceManager> inspire::ResourceManager::instance;
std::mutex inspire::ResourceManager::mutex;

View File

@@ -0,0 +1,121 @@
// Created by tunm on 2024/07/16.
#pragma once
#ifndef INSPIREFACE_RESOURCE_MANAGE_H
#define INSPIREFACE_RESOURCE_MANAGE_H
#include <iostream>
#include <mutex>
#include <unordered_map>
#include <memory>
#include <iomanip> // For std::setw and std::left
#ifndef INSPIRE_API
#define INSPIRE_API
#endif
#define RESOURCE_MANAGE inspire::ResourceManager::getInstance()
namespace inspire {
class ResourceManager {
private:
// Private static instance pointer
static std::unique_ptr<ResourceManager> instance;
static std::mutex mutex;
// Use hash tables to store session and image stream handles
std::unordered_map<long, bool> sessionMap;
std::unordered_map<long, bool> streamMap;
// The private constructor guarantees singletons
ResourceManager() {}
public:
// Remove copy constructors and assignment operators
ResourceManager(const ResourceManager&) = delete;
ResourceManager& operator=(const ResourceManager&) = delete;
// Method of obtaining singleton instance
static ResourceManager* getInstance() {
std::lock_guard<std::mutex> lock(mutex);
if (!instance) {
instance.reset(new ResourceManager());
}
return instance.get();
}
// Method of obtaining singleton instance
void createSession(long handle) {
std::lock_guard<std::mutex> lock(mutex);
sessionMap[handle] = false; // false indicates that it is not released
}
// Release session
bool releaseSession(long handle) {
std::lock_guard<std::mutex> lock(mutex);
auto it = sessionMap.find(handle);
if (it != sessionMap.end() && !it->second) {
it->second = true; // Mark as released
return true;
}
return false; // Release failed, possibly because the handle could not be found or was
// released
}
// Create and record image streams
void createStream(long handle) {
std::lock_guard<std::mutex> lock(mutex);
streamMap[handle] = false; // false indicates that it is not released
}
// Release image stream
bool releaseStream(long handle) {
std::lock_guard<std::mutex> lock(mutex);
auto it = streamMap.find(handle);
if (it != streamMap.end() && !it->second) {
it->second = true; // Mark as released
return true;
}
return false; // Release failed, possibly because the handle could not be found or was
// released
}
// Method to print resource management statistics
void printResourceStatistics() {
std::lock_guard<std::mutex> lock(mutex);
std::cout << std::left << std::setw(15) << "Resource Name" << std::setw(15)
<< "Total Created" << std::setw(15) << "Total Released" << std::setw(15)
<< "Not Released" << std::endl;
// Print session statistics
int totalSessionsCreated = sessionMap.size();
int totalSessionsReleased = 0;
int sessionsNotReleased = 0;
for (const auto& entry : sessionMap) {
if (entry.second)
++totalSessionsReleased;
if (!entry.second)
++sessionsNotReleased;
}
std::cout << std::left << std::setw(15) << "Session" << std::setw(15)
<< totalSessionsCreated << std::setw(15) << totalSessionsReleased << std::setw(15)
<< sessionsNotReleased << std::endl;
// Print stream statistics
int totalStreamsCreated = streamMap.size();
int totalStreamsReleased = 0;
int streamsNotReleased = 0;
for (const auto& entry : streamMap) {
if (entry.second)
++totalStreamsReleased;
if (!entry.second)
++streamsNotReleased;
}
std::cout << std::left << std::setw(15) << "Stream" << std::setw(15) << totalStreamsCreated
<< std::setw(15) << totalStreamsReleased << std::setw(15) << streamsNotReleased
<< std::endl;
}
};
} // namespace inspire
#endif // INSPIREFACE_RESOURCE_MANAGE_H

View File

@@ -8,57 +8,86 @@
#include "information.h"
#include "feature_hub/feature_hub.h"
#include "Initialization_module/launch.h"
#include "Initialization_module/resource_manage.h"
using namespace inspire;
HYPER_CAPI_EXPORT extern HResult HFCreateImageStream(PHFImageData data, HFImageStream* handle) {
HYPER_CAPI_EXPORT extern HResult HFCreateImageStream(PHFImageData data, HFImageStream *handle) {
if (data == nullptr || handle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
auto stream = new HF_CameraStream();
switch (data->rotation) {
case HF_CAMERA_ROTATION_90: stream->impl.SetRotationMode(ROTATION_90); break;
case HF_CAMERA_ROTATION_180: stream->impl.SetRotationMode(ROTATION_180); break;
case HF_CAMERA_ROTATION_270: stream->impl.SetRotationMode(ROTATION_270); break;
default: stream->impl.SetRotationMode(ROTATION_0); break;
case HF_CAMERA_ROTATION_90:
stream->impl.SetRotationMode(ROTATION_90);
break;
case HF_CAMERA_ROTATION_180:
stream->impl.SetRotationMode(ROTATION_180);
break;
case HF_CAMERA_ROTATION_270:
stream->impl.SetRotationMode(ROTATION_270);
break;
default:
stream->impl.SetRotationMode(ROTATION_0);
break;
}
switch (data->format) {
case HF_STREAM_RGB: stream->impl.SetDataFormat(RGB); break;
case HF_STREAM_BGR: stream->impl.SetDataFormat(BGR); break;
case HF_STREAM_RGBA: stream->impl.SetDataFormat(RGBA); break;
case HF_STREAM_BGRA: stream->impl.SetDataFormat(BGRA); break;
case HF_STREAM_YUV_NV12: stream->impl.SetDataFormat(NV12); break;
case HF_STREAM_YUV_NV21: stream->impl.SetDataFormat(NV21); break;
default: return HERR_INVALID_IMAGE_STREAM_PARAM; // Assume there's a return code for unsupported formats
case HF_STREAM_RGB:
stream->impl.SetDataFormat(RGB);
break;
case HF_STREAM_BGR:
stream->impl.SetDataFormat(BGR);
break;
case HF_STREAM_RGBA:
stream->impl.SetDataFormat(RGBA);
break;
case HF_STREAM_BGRA:
stream->impl.SetDataFormat(BGRA);
break;
case HF_STREAM_YUV_NV12:
stream->impl.SetDataFormat(NV12);
break;
case HF_STREAM_YUV_NV21:
stream->impl.SetDataFormat(NV21);
break;
default:
return HERR_INVALID_IMAGE_STREAM_PARAM; // Assume there's a return code for unsupported
// formats
}
stream->impl.SetDataBuffer(data->data, data->height, data->width);
*handle = (HFImageStream)stream;
// Record the creation of this stream in the ResourceManager
RESOURCE_MANAGE->createStream((long)*handle);
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFReleaseImageStream(HFImageStream streamHandle) {
if (streamHandle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
delete (HF_CameraStream*)streamHandle;
// Check and mark this stream as released in the ResourceManager
if (!RESOURCE_MANAGE->releaseStream((long)streamHandle)) {
return HERR_INVALID_IMAGE_STREAM_HANDLE; // or other appropriate error code
}
delete (HF_CameraStream *)streamHandle;
return HSUCCEED;
}
void HFDeBugImageStreamImShow(HFImageStream streamHandle) {
if (streamHandle == nullptr) {
INSPIRE_LOGE("Handle error");
}
HF_CameraStream *stream = (HF_CameraStream* ) streamHandle;
HF_CameraStream *stream = (HF_CameraStream *)streamHandle;
if (stream == nullptr) {
INSPIRE_LOGE("Image error");
return;
}
auto image = stream->impl.GetScaledImage(1.0f, true);
# ifdef DISABLE_GUI
#ifdef DISABLE_GUI
cv::imwrite("tmp.jpg", image);
#else
cv::imshow("Debug", image);
@@ -71,7 +100,7 @@ HResult HFDeBugImageStreamDecodeSave(HFImageStream streamHandle, HPath savePath)
INSPIRE_LOGE("Handle error");
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
HF_CameraStream *stream = (HF_CameraStream* ) streamHandle;
HF_CameraStream *stream = (HF_CameraStream *)streamHandle;
if (stream == nullptr) {
INSPIRE_LOGE("Image error");
return HERR_INVALID_IMAGE_STREAM_HANDLE;
@@ -87,17 +116,21 @@ HResult HFDeBugImageStreamDecodeSave(HFImageStream streamHandle, HPath savePath)
}
}
HResult HFReleaseInspireFaceSession(HFSession handle) {
if (handle == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
delete (HF_FaceAlgorithmSession*)handle;
// Check and mark this session as released in the ResourceManager
if (!RESOURCE_MANAGE->releaseSession((long)handle)) {
return HERR_INVALID_CONTEXT_HANDLE; // or other appropriate error code
}
delete (HF_FaceAlgorithmSession *)handle;
return HSUCCEED;
}
HResult HFCreateInspireFaceSession(HFSessionCustomParameter parameter, HFDetectMode detectMode, HInt32 maxDetectFaceNum, HInt32 detectPixelLevel, HInt32 trackByDetectModeFPS, HFSession *handle) {
HResult HFCreateInspireFaceSession(HFSessionCustomParameter parameter, HFDetectMode detectMode,
HInt32 maxDetectFaceNum, HInt32 detectPixelLevel,
HInt32 trackByDetectModeFPS, HFSession *handle) {
inspire::ContextCustomParameter param;
param.enable_mask_detect = parameter.enable_mask_detect;
param.enable_face_attribute = parameter.enable_face_quality;
@@ -115,19 +148,23 @@ HResult HFCreateInspireFaceSession(HFSessionCustomParameter parameter, HFDetectM
}
HF_FaceAlgorithmSession *ctx = new HF_FaceAlgorithmSession();
auto ret = ctx->impl.Configuration(detMode, maxDetectFaceNum, param, detectPixelLevel, trackByDetectModeFPS);
auto ret = ctx->impl.Configuration(detMode, maxDetectFaceNum, param, detectPixelLevel,
trackByDetectModeFPS);
if (ret != HSUCCEED) {
delete ctx;
*handle = nullptr;
} else {
*handle = ctx;
// Record the creation of this session in the ResourceManager
RESOURCE_MANAGE->createSession((long)*handle);
}
return ret;
}
HResult HFCreateInspireFaceSessionOptional(HOption customOption, HFDetectMode detectMode, HInt32 maxDetectFaceNum, HInt32 detectPixelLevel, HInt32 trackByDetectModeFPS, HFSession *handle) {
HResult HFCreateInspireFaceSessionOptional(HOption customOption, HFDetectMode detectMode,
HInt32 maxDetectFaceNum, HInt32 detectPixelLevel,
HInt32 trackByDetectModeFPS, HFSession *handle) {
inspire::ContextCustomParameter param;
if (customOption & HF_ENABLE_FACE_RECOGNITION) {
param.enable_recognition = true;
@@ -156,14 +193,17 @@ HResult HFCreateInspireFaceSessionOptional(HOption customOption, HFDetectMode de
} else if (detectMode == HF_DETECT_MODE_TRACK_BY_DETECTION) {
detMode = inspire::DETECT_MODE_TRACK_BY_DETECT;
}
HF_FaceAlgorithmSession *ctx = new HF_FaceAlgorithmSession();
auto ret = ctx->impl.Configuration(detMode, maxDetectFaceNum, param, detectPixelLevel, trackByDetectModeFPS);
auto ret = ctx->impl.Configuration(detMode, maxDetectFaceNum, param, detectPixelLevel,
trackByDetectModeFPS);
if (ret != HSUCCEED) {
delete ctx;
*handle = nullptr;
} else {
*handle = ctx;
// Record the creation of this session in the ResourceManager
RESOURCE_MANAGE->createSession((long)*handle);
}
return ret;
@@ -174,17 +214,23 @@ HResult HFLaunchInspireFace(HPath resourcePath) {
return INSPIRE_LAUNCH->Load(resourcePath);
}
HResult HFTerminateInspireFace() {
INSPIRE_LAUNCH->Unload();
return HSUCCEED;
}
HResult HFFeatureHubDataDisable() {
return FEATURE_HUB->DisableHub();
}
HResult HFFeatureHubDataEnable(HFFeatureHubConfiguration configuration) {
inspire::DatabaseConfiguration param = {0};
param.db_path = (configuration.dbPath != nullptr) ? std::string(configuration.dbPath) : std::string();
inspire::DatabaseConfiguration param;
param.db_path =
(configuration.dbPath != nullptr) ? std::string(configuration.dbPath) : std::string();
param.enable_use_db = configuration.enablePersistence;
param.feature_block_num = configuration.featureBlockNum;
param.recognition_threshold = configuration.searchThreshold;
param.search_mode = (SearchMode )configuration.searchMode;
param.search_mode = (SearchMode)configuration.searchMode;
auto ret = FEATURE_HUB->EnableHub(param);
return ret;
@@ -194,7 +240,7 @@ HResult HFSessionSetTrackPreviewSize(HFSession session, HInt32 previewSize) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
@@ -205,7 +251,7 @@ HResult HFSessionSetFilterMinimumFacePixelSize(HFSession session, HInt32 minSize
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
@@ -216,7 +262,7 @@ HResult HFSessionSetFaceTrackMode(HFSession session, HFDetectMode detectMode) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
@@ -231,36 +277,37 @@ HResult HFSessionSetFaceDetectThreshold(HFSession session, HFloat threshold) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
return ctx->impl.SetFaceDetectThreshold(threshold);
}
HResult HFExecuteFaceTrack(HFSession session, HFImageStream streamHandle, PHFMultipleFaceData results) {
HResult HFExecuteFaceTrack(HFSession session, HFImageStream streamHandle,
PHFMultipleFaceData results) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
if (streamHandle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_CameraStream *stream = (HF_CameraStream* ) streamHandle;
HF_CameraStream *stream = (HF_CameraStream *)streamHandle;
if (stream == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
auto ret = ctx->impl.FaceDetectAndTrack(stream->impl);
results->detectedNum = ctx->impl.GetNumberOfFacesCurrentlyDetected();
results->rects = (HFaceRect *) ctx->impl.GetFaceRectsCache().data();
results->trackIds = (HInt32 *) ctx->impl.GetTrackIDCache().data();
results->angles.pitch = (HFloat *) ctx->impl.GetPitchResultsCache().data();
results->angles.roll = (HFloat *) ctx->impl.GetRollResultsCache().data();
results->angles.yaw = (HFloat *) ctx->impl.GetYawResultsCache().data();
results->tokens = (HFFaceBasicToken *) ctx->impl.GetFaceBasicDataCache().data();
results->rects = (HFaceRect *)ctx->impl.GetFaceRectsCache().data();
results->trackIds = (HInt32 *)ctx->impl.GetTrackIDCache().data();
results->angles.pitch = (HFloat *)ctx->impl.GetPitchResultsCache().data();
results->angles.roll = (HFloat *)ctx->impl.GetRollResultsCache().data();
results->angles.yaw = (HFloat *)ctx->impl.GetYawResultsCache().data();
results->tokens = (HFFaceBasicToken *)ctx->impl.GetFaceBasicDataCache().data();
return ret;
}
@@ -283,7 +330,8 @@ HResult HFGetNumOfFaceDenseLandmark(HPInt32 num) {
return HSUCCEED;
}
HResult HFGetFaceDenseLandmarkFromFaceToken(HFFaceBasicToken singleFace, HPoint2f* landmarks, HInt32 num) {
HResult HFGetFaceDenseLandmarkFromFaceToken(HFFaceBasicToken singleFace, HPoint2f *landmarks,
HInt32 num) {
if (num != 106) {
return HERR_SESS_LANDMARK_NUM_NOT_MATCH;
}
@@ -292,16 +340,15 @@ HResult HFGetFaceDenseLandmarkFromFaceToken(HFFaceBasicToken singleFace, HPoint2
data.data = singleFace.data;
HyperFaceData face = {0};
HInt32 ret;
ret = DeserializeHyperFaceData((char* )data.data, data.dataSize, face);
ret = DeserializeHyperFaceData((char *)data.data, data.dataSize, face);
if (ret != HSUCCEED) {
return ret;
}
for (size_t i = 0; i < num; i++)
{
for (size_t i = 0; i < num; i++) {
landmarks[i].x = face.densityLandmark[i].x;
landmarks[i].y = face.densityLandmark[i].y;
}
return HSUCCEED;
}
@@ -310,18 +357,19 @@ HResult HFFeatureHubFaceSearchThresholdSetting(float threshold) {
return HSUCCEED;
}
HResult HFFaceFeatureExtract(HFSession session, HFImageStream streamHandle, HFFaceBasicToken singleFace, PHFFaceFeature feature) {
HResult HFFaceFeatureExtract(HFSession session, HFImageStream streamHandle,
HFFaceBasicToken singleFace, PHFFaceFeature feature) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
if (streamHandle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_CameraStream *stream = (HF_CameraStream* ) streamHandle;
HF_CameraStream *stream = (HF_CameraStream *)streamHandle;
if (stream == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
@@ -338,19 +386,19 @@ HResult HFFaceFeatureExtract(HFSession session, HFImageStream streamHandle, HFFa
return ret;
}
HResult HFFaceFeatureExtractCpy(HFSession session, HFImageStream streamHandle, HFFaceBasicToken singleFace, HPFloat feature) {
HResult HFFaceFeatureExtractCpy(HFSession session, HFImageStream streamHandle,
HFFaceBasicToken singleFace, HPFloat feature) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
if (streamHandle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_CameraStream *stream = (HF_CameraStream* ) streamHandle;
HF_CameraStream *stream = (HF_CameraStream *)streamHandle;
if (stream == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
@@ -368,8 +416,6 @@ HResult HFFaceFeatureExtractCpy(HFSession session, HFImageStream streamHandle, H
return ret;
}
HResult HFFaceComparison(HFFaceFeature feature1, HFFaceFeature feature2, HPFloat result) {
if (feature1.data == nullptr || feature2.data == nullptr) {
return HERR_INVALID_FACE_FEATURE;
@@ -392,7 +438,6 @@ HResult HFGetFeatureLength(HPInt32 num) {
return HSUCCEED;
}
HResult HFFeatureHubInsertFeature(HFFaceFeatureIdentity featureIdentity) {
if (featureIdentity.feature->data == nullptr) {
return HERR_INVALID_FACE_FEATURE;
@@ -408,8 +453,8 @@ HResult HFFeatureHubInsertFeature(HFFaceFeatureIdentity featureIdentity) {
return ret;
}
HResult HFFeatureHubFaceSearch(HFFaceFeature searchFeature, HPFloat confidence, PHFFaceFeatureIdentity mostSimilar) {
HResult HFFeatureHubFaceSearch(HFFaceFeature searchFeature, HPFloat confidence,
PHFFaceFeatureIdentity mostSimilar) {
if (searchFeature.data == nullptr) {
return HERR_INVALID_FACE_FEATURE;
}
@@ -420,8 +465,8 @@ HResult HFFeatureHubFaceSearch(HFFaceFeature searchFeature, HPFloat confidence,
}
inspire::SearchResult result;
HInt32 ret = FEATURE_HUB->SearchFaceFeature(feat, result);
mostSimilar->feature = (HFFaceFeature* ) FEATURE_HUB->GetFaceFeaturePtrCache().get();
mostSimilar->feature->data = (HFloat* ) FEATURE_HUB->GetSearchFaceFeatureCache().data();
mostSimilar->feature = (HFFaceFeature *)FEATURE_HUB->GetFaceFeaturePtrCache().get();
mostSimilar->feature->data = (HFloat *)FEATURE_HUB->GetSearchFaceFeatureCache().data();
mostSimilar->feature->size = FEATURE_HUB->GetSearchFaceFeatureCache().size();
mostSimilar->tag = FEATURE_HUB->GetStringCache();
mostSimilar->customId = result.customId;
@@ -430,7 +475,8 @@ HResult HFFeatureHubFaceSearch(HFFaceFeature searchFeature, HPFloat confidence,
return ret;
}
HResult HFFeatureHubFaceSearchTopK(HFFaceFeature searchFeature, HInt32 topK, PHFSearchTopKResults results) {
HResult HFFeatureHubFaceSearchTopK(HFFaceFeature searchFeature, HInt32 topK,
PHFSearchTopKResults results) {
if (searchFeature.data == nullptr) {
return HERR_INVALID_FACE_FEATURE;
}
@@ -475,8 +521,8 @@ HResult HFFeatureHubGetFaceIdentity(HInt32 customId, PHFFaceFeatureIdentity iden
if (ret == HSUCCEED) {
identity->tag = FEATURE_HUB->GetStringCache();
identity->customId = customId;
identity->feature = (HFFaceFeature* ) FEATURE_HUB->GetFaceFeaturePtrCache().get();
identity->feature->data = (HFloat* ) FEATURE_HUB->GetFaceFeaturePtrCache()->data;
identity->feature = (HFFaceFeature *)FEATURE_HUB->GetFaceFeaturePtrCache().get();
identity->feature->data = (HFloat *)FEATURE_HUB->GetFaceFeaturePtrCache()->data;
identity->feature->size = FEATURE_HUB->GetFaceFeaturePtrCache()->dataSize;
} else {
identity->customId = -1;
@@ -485,24 +531,30 @@ HResult HFFeatureHubGetFaceIdentity(HInt32 customId, PHFFaceFeatureIdentity iden
return ret;
}
HResult HFMultipleFacePipelineProcess(HFSession session, HFImageStream streamHandle, PHFMultipleFaceData faces, HFSessionCustomParameter parameter) {
HResult HFMultipleFacePipelineProcess(HFSession session, HFImageStream streamHandle,
PHFMultipleFaceData faces,
HFSessionCustomParameter parameter) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
if (streamHandle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_CameraStream *stream = (HF_CameraStream* ) streamHandle;
if (faces->detectedNum == 0) {
return HSUCCEED;
}
HF_CameraStream *stream = (HF_CameraStream *)streamHandle;
if (stream == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
if (faces->detectedNum <= 0 || faces->tokens->data == nullptr) {
if (faces == nullptr || faces->tokens == nullptr || faces->tokens->data == nullptr) {
return HERR_INVALID_FACE_LIST;
}
inspire::ContextCustomParameter param;
param.enable_mask_detect = parameter.enable_mask_detect;
param.enable_face_attribute = parameter.enable_face_quality;
@@ -518,7 +570,7 @@ HResult HFMultipleFacePipelineProcess(HFSession session, HFImageStream streamHan
data.resize(faces->detectedNum);
for (int i = 0; i < faces->detectedNum; ++i) {
auto &face = data[i];
ret = DeserializeHyperFaceData((char* )faces->tokens[i].data, faces->tokens[i].size, face);
ret = DeserializeHyperFaceData((char *)faces->tokens[i].data, faces->tokens[i].size, face);
if (ret != HSUCCEED) {
return HERR_INVALID_FACE_TOKEN;
}
@@ -527,25 +579,28 @@ HResult HFMultipleFacePipelineProcess(HFSession session, HFImageStream streamHan
ret = ctx->impl.FacesProcess(stream->impl, data, param);
return ret;
}
HResult HFMultipleFacePipelineProcessOptional(HFSession session, HFImageStream streamHandle, PHFMultipleFaceData faces, HInt32 customOption) {
HResult HFMultipleFacePipelineProcessOptional(HFSession session, HFImageStream streamHandle,
PHFMultipleFaceData faces, HInt32 customOption) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
if (streamHandle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (faces->detectedNum == 0) {
return HSUCCEED;
}
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_CameraStream *stream = (HF_CameraStream* ) streamHandle;
HF_CameraStream *stream = (HF_CameraStream *)streamHandle;
if (stream == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
if (faces->detectedNum <= 0 || faces->tokens->data == nullptr) {
if (faces == nullptr || faces->tokens == nullptr || faces->tokens->data == nullptr) {
return HERR_INVALID_FACE_LIST;
}
@@ -570,15 +625,14 @@ HResult HFMultipleFacePipelineProcessOptional(HFSession session, HFImageStream s
}
if (customOption & HF_ENABLE_INTERACTION) {
param.enable_interaction_liveness = true;
}
}
HResult ret;
std::vector<inspire::HyperFaceData> data;
data.resize(faces->detectedNum);
for (int i = 0; i < faces->detectedNum; ++i) {
auto &face = data[i];
ret = DeserializeHyperFaceData((char* )faces->tokens[i].data, faces->tokens[i].size, face);
ret = DeserializeHyperFaceData((char *)faces->tokens[i].data, faces->tokens[i].size, face);
if (ret != HSUCCEED) {
return HERR_INVALID_FACE_TOKEN;
}
@@ -587,20 +641,19 @@ HResult HFMultipleFacePipelineProcessOptional(HFSession session, HFImageStream s
ret = ctx->impl.FacesProcess(stream->impl, data, param);
return ret;
}
HResult HFGetRGBLivenessConfidence(HFSession session, PHFRGBLivenessConfidence confidence) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
confidence->num = ctx->impl.GetRgbLivenessResultsCache().size();
confidence->confidence = (HFloat* )ctx->impl.GetRgbLivenessResultsCache().data();
confidence->confidence = (HFloat *)ctx->impl.GetRgbLivenessResultsCache().data();
return HSUCCEED;
}
@@ -609,13 +662,13 @@ HResult HFGetFaceMaskConfidence(HFSession session, PHFFaceMaskConfidence confide
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
confidence->num = ctx->impl.GetMaskResultsCache().size();
confidence->confidence = (HFloat* )ctx->impl.GetMaskResultsCache().data();
confidence->confidence = (HFloat *)ctx->impl.GetMaskResultsCache().data();
return HSUCCEED;
}
@@ -624,13 +677,13 @@ HResult HFGetFaceQualityConfidence(HFSession session, PHFFaceQualityConfidence c
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
confidence->num = ctx->impl.GetFaceQualityScoresResultsCache().size();
confidence->confidence = (HFloat* )ctx->impl.GetFaceQualityScoresResultsCache().data();
confidence->confidence = (HFloat *)ctx->impl.GetFaceQualityScoresResultsCache().data();
return HSUCCEED;
}
@@ -639,7 +692,7 @@ HResult HFFaceQualityDetect(HFSession session, HFFaceBasicToken singleFace, HFlo
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
@@ -651,20 +704,39 @@ HResult HFFaceQualityDetect(HFSession session, HFFaceBasicToken singleFace, HFlo
auto ret = inspire::FaceContext::FaceQualityDetect(data, *confidence);
return ret;
}
HResult HFGetFaceIntereactionResult(HFSession session, PHFFaceIntereactionResult result) {
if (session == nullptr) {
HResult HFGetFaceIntereactionStateResult(HFSession session, PHFFaceIntereactionState result) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
result->num = ctx->impl.GetFaceInteractionLeftEyeStatusCache().size();
result->leftEyeStatusConfidence = (HFloat* )ctx->impl.GetFaceInteractionLeftEyeStatusCache().data();
result->rightEyeStatusConfidence = (HFloat* )ctx->impl.GetFaceInteractionRightEyeStatusCache().data();
result->leftEyeStatusConfidence =
(HFloat *)ctx->impl.GetFaceInteractionLeftEyeStatusCache().data();
result->rightEyeStatusConfidence =
(HFloat *)ctx->impl.GetFaceInteractionRightEyeStatusCache().data();
return HSUCCEED;
}
HResult HFGetFaceIntereactionActionsResult(HFSession session, PHFFaceIntereactionsActions actions) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
actions->num = ctx->impl.GetFaceNormalAactionsResultCache().size();
actions->normal = (HInt32 *)ctx->impl.GetFaceNormalAactionsResultCache().data();
actions->blink = (HInt32 *)ctx->impl.GetFaceBlinkAactionsResultCache().data();
actions->shake = (HInt32 *)ctx->impl.GetFaceShakeAactionsResultCache().data();
actions->headRiase = (HInt32 *)ctx->impl.GetFaceRaiseHeadAactionsResultCache().data();
actions->jawOpen = (HInt32 *)ctx->impl.GetFaceJawOpenAactionsResultCache().data();
return HSUCCEED;
}
@@ -673,20 +745,20 @@ HResult HFGetFaceAttributeResult(HFSession session, PHFFaceAttributeResult resul
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession* ) session;
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
results->num = ctx->impl.GetFaceAgeBracketResultsCache().size();
results->race = (HPInt32 )ctx->impl.GetFaceRaceResultsCache().data();
results->gender = (HPInt32 )ctx->impl.GetFaceGenderResultsCache().data();
results->ageBracket = (HPInt32 )ctx->impl.GetFaceAgeBracketResultsCache().data();
results->race = (HPInt32)ctx->impl.GetFaceRaceResultsCache().data();
results->gender = (HPInt32)ctx->impl.GetFaceGenderResultsCache().data();
results->ageBracket = (HPInt32)ctx->impl.GetFaceAgeBracketResultsCache().data();
return HSUCCEED;
}
HResult HFFeatureHubGetFaceCount(HInt32* count) {
HResult HFFeatureHubGetFaceCount(HInt32 *count) {
*count = FEATURE_HUB->GetFaceFeatureCount();
return HSUCCEED;
}
@@ -695,7 +767,6 @@ HResult HFFeatureHubViewDBTable() {
return FEATURE_HUB->ViewDBTable();
}
HResult HFQueryInspireFaceVersion(PHFInspireFaceVersion version) {
version->major = std::stoi(INSPIRE_FACE_VERSION_MAJOR_STR);
version->minor = std::stoi(INSPIRE_FACE_VERSION_MINOR_STR);
@@ -712,4 +783,9 @@ HResult HFSetLogLevel(HFLogLevel level) {
HResult HFLogDisable() {
INSPIRE_SET_LOG_LEVEL(inspire::ISF_LOG_NONE);
return HSUCCEED;
}
HResult HFDeBugShowResourceStatistics() {
RESOURCE_MANAGE->printResourceStatistics();
return HSUCCEED;
}

View File

@@ -17,63 +17,59 @@
#endif
#else
#define HYPER_CAPI_EXPORT __attribute__((visibility("default")))
#endif // _WIN32
#endif // _WIN32
#ifdef __cplusplus
extern "C" {
#endif
#define HF_ENABLE_NONE 0x00000000 ///< Flag to enable no features.
#define HF_ENABLE_FACE_RECOGNITION 0x00000002 ///< Flag to enable face recognition feature.
#define HF_ENABLE_LIVENESS 0x00000004 ///< Flag to enable RGB liveness detection feature.
#define HF_ENABLE_IR_LIVENESS 0x00000008 ///< Flag to enable IR (Infrared) liveness detection feature.
#define HF_ENABLE_MASK_DETECT 0x00000010 ///< Flag to enable mask detection feature.
#define HF_ENABLE_FACE_ATTRIBUTE 0x00000020 ///< Flag to enable face attribute prediction feature.
#define HF_ENABLE_PLACEHOLDER_ 0x00000040 ///< -
#define HF_ENABLE_QUALITY 0x00000080 ///< Flag to enable face quality assessment feature.
#define HF_ENABLE_INTERACTION 0x00000100 ///< Flag to enable interaction feature.
#define HF_ENABLE_NONE 0x00000000 ///< Flag to enable no features.
#define HF_ENABLE_FACE_RECOGNITION 0x00000002 ///< Flag to enable face recognition feature.
#define HF_ENABLE_LIVENESS 0x00000004 ///< Flag to enable RGB liveness detection feature.
#define HF_ENABLE_IR_LIVENESS \
0x00000008 ///< Flag to enable IR (Infrared) liveness detection feature.
#define HF_ENABLE_MASK_DETECT 0x00000010 ///< Flag to enable mask detection feature.
#define HF_ENABLE_FACE_ATTRIBUTE 0x00000020 ///< Flag to enable face attribute prediction feature.
#define HF_ENABLE_PLACEHOLDER_ 0x00000040 ///< -
#define HF_ENABLE_QUALITY 0x00000080 ///< Flag to enable face quality assessment feature.
#define HF_ENABLE_INTERACTION 0x00000100 ///< Flag to enable interaction feature.
/**
* Camera stream format.
* Contains several common camera stream formats available in the market.
*/
typedef enum HFImageFormat {
HF_STREAM_RGB = 0, ///< Image in RGB format.
HF_STREAM_BGR = 1, ///< Image in BGR format (Opencv Mat default).
HF_STREAM_RGBA = 2, ///< Image in RGB format with alpha channel.
HF_STREAM_BGRA = 3, ///< Image in BGR format with alpha channel.
HF_STREAM_YUV_NV12 = 4, ///< Image in YUV NV12 format.
HF_STREAM_YUV_NV21 = 5, ///< Image in YUV NV21 format.
HF_STREAM_RGB = 0, ///< Image in RGB format.
HF_STREAM_BGR = 1, ///< Image in BGR format (Opencv Mat default).
HF_STREAM_RGBA = 2, ///< Image in RGB format with alpha channel.
HF_STREAM_BGRA = 3, ///< Image in BGR format with alpha channel.
HF_STREAM_YUV_NV12 = 4, ///< Image in YUV NV12 format.
HF_STREAM_YUV_NV21 = 5, ///< Image in YUV NV21 format.
} HFImageFormat;
/**
* Camera picture rotation mode.
* To accommodate the rotation of certain devices, four image rotation modes are provided.
*/
typedef enum HFRotation {
HF_CAMERA_ROTATION_0 = 0, ///< 0 degree rotation.
HF_CAMERA_ROTATION_90 = 1, ///< 90 degree rotation.
HF_CAMERA_ROTATION_180 = 2, ///< 180 degree rotation.
HF_CAMERA_ROTATION_270 = 3, ///< 270 degree rotation.
HF_CAMERA_ROTATION_0 = 0, ///< 0 degree rotation.
HF_CAMERA_ROTATION_90 = 1, ///< 90 degree rotation.
HF_CAMERA_ROTATION_180 = 2, ///< 180 degree rotation.
HF_CAMERA_ROTATION_270 = 3, ///< 270 degree rotation.
} HFRotation;
/**
* Image Buffer Data structure.
* Defines the structure for image data stream.
*/
typedef struct HFImageData {
uint8_t *data; ///< Pointer to the image data stream.
HInt32 width; ///< Width of the image.
HInt32 height; ///< Height of the image.
HFImageFormat format; ///< Format of the image, indicating the data stream format to be parsed.
HFRotation rotation; ///< Rotation angle of the image.
uint8_t *data; ///< Pointer to the image data stream.
HInt32 width; ///< Width of the image.
HInt32 height; ///< Height of the image.
HFImageFormat format; ///< Format of the image, indicating the data stream format to be parsed.
HFRotation rotation; ///< Rotation angle of the image.
} HFImageData, *PHFImageData;
/**
* @brief Create a data buffer stream instantiation object.
*
@@ -85,7 +81,6 @@ typedef struct HFImageData {
*/
HYPER_CAPI_EXPORT extern HResult HFCreateImageStream(PHFImageData data, HFImageStream *handle);
/**
* @brief Release the instantiated DataBuffer object.
*
@@ -97,21 +92,30 @@ HYPER_CAPI_EXPORT extern HResult HFCreateImageStream(PHFImageData data, HFImageS
HYPER_CAPI_EXPORT extern HResult HFReleaseImageStream(HFImageStream streamHandle);
/************************************************************************
* Resource Function
************************************************************************/
* Resource Function
************************************************************************/
/**
* @brief Launch InspireFace SDK
* Start the InspireFace SDK at the initialization stage of your program, as it is global and designed to be used only once.
* It serves as a prerequisite for other function interfaces, so it is essential to ensure it is initialized before calling any other APIs.
* Start the InspireFace SDK at the initialization stage of your program, as it is global and
* designed to be used only once. It serves as a prerequisite for other function interfaces, so it
* is essential to ensure it is initialized before calling any other APIs.
* @param resourcePath Initializes the path to the resource file that needs to be loaded
* @return HResult indicating the success or failure of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFLaunchInspireFace(HPath resourcePath);
/**
* @brief Terminate InspireFace SDK
* Terminate the InspireFace SDK, releasing all allocated resources.
* This should be called at the end of your program to ensure proper cleanup.
* @return HResult indicating the success or failure of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFTerminateInspireFace();
/************************************************************************
* FaceContext
************************************************************************/
* FaceContext
************************************************************************/
/**
* @brief Struct for custom parameters in face recognition context.
@@ -121,24 +125,26 @@ HYPER_CAPI_EXPORT extern HResult HFLaunchInspireFace(HPath resourcePath);
* mask detection, age and gender prediction, etc.
*/
typedef struct HFSessionCustomParameter {
HInt32 enable_recognition; ///< Enable face recognition feature.
HInt32 enable_liveness; ///< Enable RGB liveness detection feature.
HInt32 enable_ir_liveness; ///< Enable IR liveness detection feature.
HInt32 enable_mask_detect; ///< Enable mask detection feature.
HInt32 enable_face_quality; ///< Enable face quality detection feature.
HInt32 enable_face_attribute; ///< Enable face attribute prediction feature.
HInt32 enable_interaction_liveness; ///< Enable interaction for liveness detection feature.
HInt32 enable_recognition; ///< Enable face recognition feature.
HInt32 enable_liveness; ///< Enable RGB liveness detection feature.
HInt32 enable_ir_liveness; ///< Enable IR liveness detection feature.
HInt32 enable_mask_detect; ///< Enable mask detection feature.
HInt32 enable_face_quality; ///< Enable face quality detection feature.
HInt32 enable_face_attribute; ///< Enable face attribute prediction feature.
HInt32 enable_interaction_liveness; ///< Enable interaction for liveness detection feature.
} HFSessionCustomParameter, *PHFSessionCustomParameter;
/**
* @brief Enumeration for face detection modes.
*/
typedef enum HFDetectMode {
HF_DETECT_MODE_ALWAYS_DETECT, ///< Image detection mode, always detect, applicable to images.
HF_DETECT_MODE_LIGHT_TRACK, ///< Video detection mode, face tracking, applicable to video streaming, front camera.
HF_DETECT_MODE_TRACK_BY_DETECTION, ///< Video detection mode, face tracking, applicable to high resolution, monitoring, capturing
// (You need a specific option turned on at compile time to use it).
HF_DETECT_MODE_ALWAYS_DETECT, ///< Image detection mode, always detect, applicable to images.
HF_DETECT_MODE_LIGHT_TRACK, ///< Video detection mode, face tracking, applicable to video
///< streaming, front camera.
HF_DETECT_MODE_TRACK_BY_DETECTION, ///< Video detection mode, face tracking, applicable to high
///< resolution, monitoring, capturing
// (You need a specific option turned on at compile time
// to use it).
} HFDetectMode;
/**
@@ -147,21 +153,17 @@ typedef enum HFDetectMode {
* @param parameter Custom parameters for session.
* @param detectMode Detection mode to be used.
* @param maxDetectFaceNum Maximum number of faces to detect.
* @param detectPixelLevel Modify the input resolution level of the detector, the larger the better,
* @param detectPixelLevel Modify the input resolution level of the detector, the larger the better,
* the need to input a multiple of 160, such as 160, 320, 640, the default value -1 is 320.
* @param trackByDetectModeFPS If you are using the MODE_TRACK_BY_DETECTION tracking mode,
* this value is used to set the fps frame rate of your current incoming video stream, which defaults to -1 at 30fps.
* @param trackByDetectModeFPS If you are using the MODE_TRACK_BY_DETECTION tracking mode,
* this value is used to set the fps frame rate of your current incoming video stream,
* which defaults to -1 at 30fps.
* @param handle Pointer to the context handle that will be returned.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFCreateInspireFaceSession(
HFSessionCustomParameter parameter,
HFDetectMode detectMode,
HInt32 maxDetectFaceNum,
HInt32 detectPixelLevel,
HInt32 trackByDetectModeFPS,
HFSession *handle
);
HFSessionCustomParameter parameter, HFDetectMode detectMode, HInt32 maxDetectFaceNum,
HInt32 detectPixelLevel, HInt32 trackByDetectModeFPS, HFSession *handle);
/**
* @brief Create a session from a resource file with additional options.
@@ -169,21 +171,17 @@ HYPER_CAPI_EXPORT extern HResult HFCreateInspireFaceSession(
* @param customOption Custom option for additional configuration.
* @param detectMode Detection mode to be used.
* @param maxDetectFaceNum Maximum number of faces to detect.
* @param detectPixelLevel Modify the input resolution level of the detector, the larger the better,
* @param detectPixelLevel Modify the input resolution level of the detector, the larger the better,
* the need to input a multiple of 160, such as 160, 320, 640, the default value -1 is 320.
* @param trackByDetectModeFPS If you are using the MODE_TRACK_BY_DETECTION tracking mode,
* this value is used to set the fps frame rate of your current incoming video stream, which defaults to -1 at 30fps.
* @param trackByDetectModeFPS If you are using the MODE_TRACK_BY_DETECTION tracking mode,
* this value is used to set the fps frame rate of your current incoming video stream,
* which defaults to -1 at 30fps.
* @param handle Pointer to the context handle that will be returned.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFCreateInspireFaceSessionOptional(
HOption customOption,
HFDetectMode detectMode,
HInt32 maxDetectFaceNum,
HInt32 detectPixelLevel,
HInt32 trackByDetectModeFPS,
HFSession *handle
);
HOption customOption, HFDetectMode detectMode, HInt32 maxDetectFaceNum, HInt32 detectPixelLevel,
HInt32 trackByDetectModeFPS, HFSession *handle);
/**
* @brief Release the session.
@@ -199,8 +197,8 @@ HYPER_CAPI_EXPORT extern HResult HFReleaseInspireFaceSession(HFSession handle);
* This struct holds the size and data pointer for a basic token associated with face data.
*/
typedef struct HFFaceBasicToken {
HInt32 size; ///< Size of the token.
HPVoid data; ///< Pointer to the token data.
HInt32 size; ///< Size of the token.
HPVoid data; ///< Pointer to the token data.
} HFFaceBasicToken, *PHFFaceBasicToken;
/**
@@ -209,9 +207,9 @@ typedef struct HFFaceBasicToken {
* This struct represents the Euler angles (roll, yaw, pitch) for face orientation.
*/
typedef struct HFFaceEulerAngle {
HFloat *roll; ///< Roll angle of the face.
HFloat *yaw; ///< Yaw angle of the face.
HFloat *pitch; ///< Pitch angle of the face.
HFloat *roll; ///< Roll angle of the face.
HFloat *yaw; ///< Yaw angle of the face.
HFloat *pitch; ///< Pitch angle of the face.
} HFFaceEulerAngle;
/**
@@ -221,31 +219,34 @@ typedef struct HFFaceEulerAngle {
* their bounding rectangles, track IDs, angles, and tokens.
*/
typedef struct HFMultipleFaceData {
HInt32 detectedNum; ///< Number of faces detected.
HFaceRect *rects; ///< Array of bounding rectangles for each face.
HInt32 *trackIds; ///< Array of track IDs for each face.
HFFaceEulerAngle angles; ///< Euler angles for each face.
PHFFaceBasicToken tokens; ///< Tokens associated with each face.
HInt32 detectedNum; ///< Number of faces detected.
HFaceRect *rects; ///< Array of bounding rectangles for each face.
HInt32 *trackIds; ///< Array of track IDs for each face.
HFFaceEulerAngle angles; ///< Euler angles for each face.
PHFFaceBasicToken tokens; ///< Tokens associated with each face.
} HFMultipleFaceData, *PHFMultipleFaceData;
/**
* @brief Set the track preview size in the session, it works with face detection and tracking algorithms.
* Default preview size is 192(px).
* @brief Set the track preview size in the session, it works with face detection and tracking
* algorithms. Default preview size is 192(px).
*
* @param session Handle to the session.
* @param previewSize The size of the preview for tracking.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFSessionSetTrackPreviewSize(HFSession session, HInt32 previewSize);
HYPER_CAPI_EXPORT extern HResult HFSessionSetTrackPreviewSize(HFSession session,
HInt32 previewSize);
/**
* @brief Set the minimum number of face pixels that the face detector can capture, and people below this number will be filtered.
* @brief Set the minimum number of face pixels that the face detector can capture, and people below
* this number will be filtered.
*
* @param session Handle to the session.
* @param minSize The minimum pixel value, default value is 24.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFSessionSetFilterMinimumFacePixelSize(HFSession session, HInt32 minSize);
HYPER_CAPI_EXPORT extern HResult HFSessionSetFilterMinimumFacePixelSize(HFSession session,
HInt32 minSize);
/**
* @brief Set the face detect threshold in the session.
@@ -254,7 +255,8 @@ HYPER_CAPI_EXPORT extern HResult HFSessionSetFilterMinimumFacePixelSize(HFSessio
* @param detectMode The mode of the detection mode for tracking.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFSessionSetFaceDetectThreshold(HFSession session, HFloat threshold);
HYPER_CAPI_EXPORT extern HResult HFSessionSetFaceDetectThreshold(HFSession session,
HFloat threshold);
/**
* @brief Run face tracking in the session.
@@ -264,7 +266,8 @@ HYPER_CAPI_EXPORT extern HResult HFSessionSetFaceDetectThreshold(HFSession sessi
* @param results Pointer to the structure where the results will be stored.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFExecuteFaceTrack(HFSession session, HFImageStream streamHandle, PHFMultipleFaceData results);
HYPER_CAPI_EXPORT extern HResult HFExecuteFaceTrack(HFSession session, HFImageStream streamHandle,
PHFMultipleFaceData results);
/**
* @brief Copies the data from a HF_FaceBasicToken to a specified buffer.
@@ -281,7 +284,8 @@ HYPER_CAPI_EXPORT extern HResult HFExecuteFaceTrack(HFSession session, HFImageSt
* if the operation was successful, or an error code if the buffer was too small
* or if any other error occurred.
*/
HYPER_CAPI_EXPORT extern HResult HFCopyFaceBasicToken(HFFaceBasicToken token, HPBuffer buffer, HInt32 bufferSize);
HYPER_CAPI_EXPORT extern HResult HFCopyFaceBasicToken(HFFaceBasicToken token, HPBuffer buffer,
HInt32 bufferSize);
/**
* @brief Retrieves the size of the data contained in a HF_FaceBasicToken.
@@ -305,18 +309,20 @@ HYPER_CAPI_EXPORT extern HResult HFGetFaceBasicTokenSize(HPInt32 bufferSize);
HYPER_CAPI_EXPORT extern HResult HFGetNumOfFaceDenseLandmark(HPInt32 num);
/**
* @brief When you pass in a valid facial token, you can retrieve a set of dense facial landmarks.
* @brief When you pass in a valid facial token, you can retrieve a set of dense facial landmarks.
* The memory for the dense landmarks must be allocated by you.
* @param singleFace Basic token representing a single face.
* @param landmarks Pre-allocated memory address of the array for 2D floating-point coordinates.
* @param num Number of landmark points
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFGetFaceDenseLandmarkFromFaceToken(HFFaceBasicToken singleFace, HPoint2f* landmarks, HInt32 num);
HYPER_CAPI_EXPORT extern HResult HFGetFaceDenseLandmarkFromFaceToken(HFFaceBasicToken singleFace,
HPoint2f *landmarks,
HInt32 num);
/************************************************************************
* Face Recognition
************************************************************************/
* Face Recognition
************************************************************************/
/**
* @brief Struct representing a face feature.
@@ -324,8 +330,8 @@ HYPER_CAPI_EXPORT extern HResult HFGetFaceDenseLandmarkFromFaceToken(HFFaceBasic
* This struct holds the data related to a face feature, including size and actual feature data.
*/
typedef struct HFFaceFeature {
HInt32 size; ///< Size of the feature data.
HPFloat data; ///< Pointer to the feature data.
HInt32 size; ///< Size of the feature data.
HPFloat data; ///< Pointer to the feature data.
} HFFaceFeature, *PHFFaceFeature;
/**
@@ -337,8 +343,9 @@ typedef struct HFFaceFeature {
* @param feature Pointer to the extracted face feature.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult
HFFaceFeatureExtract(HFSession session, HFImageStream streamHandle, HFFaceBasicToken singleFace, PHFFaceFeature feature);
HYPER_CAPI_EXPORT extern HResult HFFaceFeatureExtract(HFSession session, HFImageStream streamHandle,
HFFaceBasicToken singleFace,
PHFFaceFeature feature);
/**
* @brief Extract a face feature from a given face and copy it to the provided feature buffer.
@@ -349,41 +356,45 @@ HFFaceFeatureExtract(HFSession session, HFImageStream streamHandle, HFFaceBasicT
* @param feature Pointer to the buffer where the extracted feature will be copied.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult
HFFaceFeatureExtractCpy(HFSession session, HFImageStream streamHandle, HFFaceBasicToken singleFace, HPFloat feature);
HYPER_CAPI_EXPORT extern HResult HFFaceFeatureExtractCpy(HFSession session,
HFImageStream streamHandle,
HFFaceBasicToken singleFace,
HPFloat feature);
/************************************************************************
* Feature Hub
************************************************************************/
* Feature Hub
************************************************************************/
/**
* @brief Select the search mode in the process of face recognition search,
* and different modes will affect the execution efficiency and results
* */
typedef enum HFSearchMode {
HF_SEARCH_MODE_EAGER = 0, // Eager mode: Stops when a vector meets the threshold.
HF_SEARCH_MODE_EXHAUSTIVE, // Exhaustive mode: Searches until the best match is found.
HF_SEARCH_MODE_EAGER = 0, // Eager mode: Stops when a vector meets the threshold.
HF_SEARCH_MODE_EXHAUSTIVE, // Exhaustive mode: Searches until the best match is found.
} HFSearchMode;
/**
* @brief Struct for database configuration.
*
* This struct holds the configuration settings for using a database in the face recognition context.
* This struct holds the configuration settings for using a database in the face recognition
* context.
*/
typedef struct HFFeatureHubConfiguration {
HInt32 featureBlockNum; ///< The order of magnitude of face feature database is N * 512, and 20 is recommended by default
HInt32 enablePersistence; ///< Flag to enable or disable the use of the database.
HString dbPath; ///< Path to the database file.
float searchThreshold; ///< Threshold for face search
HFSearchMode searchMode; ///< Mode of face search
HInt32 featureBlockNum; ///< The order of magnitude of face feature database is N * 512, and 20
///< is recommended by default
HInt32 enablePersistence; ///< Flag to enable or disable the use of the database.
HString dbPath; ///< Path to the database file.
float searchThreshold; ///< Threshold for face search
HFSearchMode searchMode; ///< Mode of face search
} HFFeatureHubConfiguration;
/**
* @brief A lightweight face feature vector management.
* @details FeatureHub is a built-in global lightweight face feature vector management functionality provided in the InspireFace-SDK.
* It supports basic face feature search, deletion, and modification functions, and offers two optional data storage modes:
* an in-memory model and a persistence model. If you have simple storage needs, you can enable it.
* @details FeatureHub is a built-in global lightweight face feature vector management functionality
* provided in the InspireFace-SDK. It supports basic face feature search, deletion, and
* modification functions, and offers two optional data storage modes: an in-memory model and a
* persistence model. If you have simple storage needs, you can enable it.
*
* @param configuration FeatureHub configuration details.
* @return HResult indicating the success or failure of the operation.
@@ -396,25 +407,24 @@ HYPER_CAPI_EXPORT extern HResult HFFeatureHubDataEnable(HFFeatureHubConfiguratio
* */
HYPER_CAPI_EXPORT extern HResult HFFeatureHubDataDisable();
/**
* @brief Struct representing the identity of a face feature.
*
* This struct associates a custom identifier and a tag with a specific face feature.
*/
typedef struct HFFaceFeatureIdentity {
HInt32 customId; ///< Custom identifier for the face feature.
HString tag; ///< Tag associated with the face feature.
PHFFaceFeature feature; ///< Pointer to the face feature.
HInt32 customId; ///< Custom identifier for the face feature.
HString tag; ///< Tag associated with the face feature.
PHFFaceFeature feature; ///< Pointer to the face feature.
} HFFaceFeatureIdentity, *PHFFaceFeatureIdentity;
/**
* Search structure for top-k mode
* */
typedef struct HFSearchTopKResults {
HInt32 size; ///< The number of faces searched
HPFloat confidence; ///< Search confidence(it has already been filtered once by the threshold)
HPInt32 customIds; ///< fACE customIds
HInt32 size; ///< The number of faces searched
HPFloat confidence; ///< Search confidence(it has already been filtered once by the threshold)
HPInt32 customIds; ///< fACE customIds
} HFSearchTopKResults, *PHFSearchTopKResults;
/**
@@ -423,7 +433,8 @@ typedef struct HFSearchTopKResults {
* This function sets the threshold for face recognition, which determines the sensitivity
* of the recognition process. A lower threshold may yield more matches but with less confidence.
*
* @param threshold The threshold value to set for face recognition (default is 0.48, suitable for access control scenarios).
* @param threshold The threshold value to set for face recognition (default is 0.48, suitable for
* access control scenarios).
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceSearchThresholdSetting(float threshold);
@@ -437,7 +448,8 @@ HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceSearchThresholdSetting(float th
* @param result Pointer to the floating-point value where the comparison result will be stored.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFFaceComparison(HFFaceFeature feature1, HFFaceFeature feature2, HPFloat result);
HYPER_CAPI_EXPORT extern HResult HFFaceComparison(HFFaceFeature feature1, HFFaceFeature feature2,
HPFloat result);
/**
* @brief Get the length of the face feature.
@@ -447,7 +459,6 @@ HYPER_CAPI_EXPORT extern HResult HFFaceComparison(HFFaceFeature feature1, HFFace
*/
HYPER_CAPI_EXPORT extern HResult HFGetFeatureLength(HPInt32 num);
/**
* @brief Insert a face feature identity into the features group.
*
@@ -460,11 +471,14 @@ HYPER_CAPI_EXPORT extern HResult HFFeatureHubInsertFeature(HFFaceFeatureIdentity
* @brief Search for the most similar face feature in the features group.
*
* @param searchFeature The face feature to be searched.
* @param confidence Pointer to a floating-point value where the confidence level of the match will be stored.
* @param confidence Pointer to a floating-point value where the confidence level of the match will
* be stored.
* @param mostSimilar Pointer to the most similar face feature identity found.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceSearch(HFFaceFeature searchFeature, HPFloat confidence, PHFFaceFeatureIdentity mostSimilar);
HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceSearch(HFFaceFeature searchFeature,
HPFloat confidence,
PHFFaceFeatureIdentity mostSimilar);
/**
* @brief Search for the most similar k facial features in the feature group
@@ -474,7 +488,9 @@ HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceSearch(HFFaceFeature searchFeat
* @param PHFSearchTopKResults Output search result
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceSearchTopK(HFFaceFeature searchFeature, HInt32 topK, PHFSearchTopKResults results);
HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceSearchTopK(HFFaceFeature searchFeature,
HInt32 topK,
PHFSearchTopKResults results);
/**
* @brief Remove a face feature from the features group based on custom ID.
@@ -499,7 +515,8 @@ HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceUpdate(HFFaceFeatureIdentity fe
* @param identity Pointer to the face feature identity to be retrieved.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFFeatureHubGetFaceIdentity(HInt32 customId, PHFFaceFeatureIdentity identity);
HYPER_CAPI_EXPORT extern HResult HFFeatureHubGetFaceIdentity(HInt32 customId,
PHFFaceFeatureIdentity identity);
/**
* @brief Get the count of face features in the features group.
@@ -517,8 +534,8 @@ HYPER_CAPI_EXPORT extern HResult HFFeatureHubGetFaceCount(HInt32 *count);
HYPER_CAPI_EXPORT extern HResult HFFeatureHubViewDBTable();
/************************************************************************
* Face Pipeline
************************************************************************/
* Face Pipeline
************************************************************************/
/**
* @brief Process multiple faces in a pipeline.
@@ -532,9 +549,10 @@ HYPER_CAPI_EXPORT extern HResult HFFeatureHubViewDBTable();
* @param parameter Custom parameters for processing the faces.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult
HFMultipleFacePipelineProcess(HFSession session, HFImageStream streamHandle, PHFMultipleFaceData faces,
HFSessionCustomParameter parameter);
HYPER_CAPI_EXPORT extern HResult HFMultipleFacePipelineProcess(HFSession session,
HFImageStream streamHandle,
PHFMultipleFaceData faces,
HFSessionCustomParameter parameter);
/**
* @brief Process multiple faces in a pipeline with an optional custom option.
@@ -548,9 +566,10 @@ HFMultipleFacePipelineProcess(HFSession session, HFImageStream streamHandle, PHF
* @param customOption An integer representing a custom option for processing.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult
HFMultipleFacePipelineProcessOptional(HFSession session, HFImageStream streamHandle,
PHFMultipleFaceData faces, HInt32 customOption);
HYPER_CAPI_EXPORT extern HResult HFMultipleFacePipelineProcessOptional(HFSession session,
HFImageStream streamHandle,
PHFMultipleFaceData faces,
HInt32 customOption);
/**
* @brief Struct representing RGB liveness confidence.
@@ -559,8 +578,8 @@ HFMultipleFacePipelineProcessOptional(HFSession session, HFImageStream streamHan
* for each face, using RGB analysis.
*/
typedef struct HFRGBLivenessConfidence {
HInt32 num; ///< Number of faces detected.
HPFloat confidence; ///< Confidence level of RGB liveness detection for each face.
HInt32 num; ///< Number of faces detected.
HPFloat confidence; ///< Confidence level of RGB liveness detection for each face.
} HFRGBLivenessConfidence, *PHFRGBLivenessConfidence;
/**
@@ -573,8 +592,8 @@ typedef struct HFRGBLivenessConfidence {
* @param confidence Pointer to the structure where RGB liveness confidence data will be stored.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult
HFGetRGBLivenessConfidence(HFSession session, PHFRGBLivenessConfidence confidence);
HYPER_CAPI_EXPORT extern HResult HFGetRGBLivenessConfidence(HFSession session,
PHFRGBLivenessConfidence confidence);
/**
* @brief Struct representing face mask confidence.
@@ -583,8 +602,8 @@ HFGetRGBLivenessConfidence(HFSession session, PHFRGBLivenessConfidence confidenc
* for each face.
*/
typedef struct HFFaceMaskConfidence {
HInt32 num; ///< Number of faces detected.
HPFloat confidence; ///< Confidence level of mask detection for each face.
HInt32 num; ///< Number of faces detected.
HPFloat confidence; ///< Confidence level of mask detection for each face.
} HFFaceMaskConfidence, *PHFFaceMaskConfidence;
/**
@@ -597,7 +616,8 @@ typedef struct HFFaceMaskConfidence {
* @param confidence Pointer to the structure where face mask confidence data will be stored.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFGetFaceMaskConfidence(HFSession session, PHFFaceMaskConfidence confidence);
HYPER_CAPI_EXPORT extern HResult HFGetFaceMaskConfidence(HFSession session,
PHFFaceMaskConfidence confidence);
/**
* @brief Struct representing face quality predict confidence.
@@ -606,8 +626,8 @@ HYPER_CAPI_EXPORT extern HResult HFGetFaceMaskConfidence(HFSession session, PHFF
* for each face.
*/
typedef struct HFFaceQualityConfidence {
HInt32 num; ///< Number of faces detected.
HPFloat confidence; ///< Confidence level of face quality predict for each face.
HInt32 num; ///< Number of faces detected.
HPFloat confidence; ///< Confidence level of face quality predict for each face.
} HFFaceQualityConfidence, *PHFFaceQualityConfidence;
/**
@@ -620,7 +640,8 @@ typedef struct HFFaceQualityConfidence {
* @param confidence Pointer to the structure where face mask confidence data will be stored.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFGetFaceQualityConfidence(HFSession session, PHFFaceQualityConfidence confidence);
HYPER_CAPI_EXPORT extern HResult HFGetFaceQualityConfidence(HFSession session,
PHFFaceQualityConfidence confidence);
/**
* @brief Detect the quality of a face in an image.
@@ -632,51 +653,74 @@ HYPER_CAPI_EXPORT extern HResult HFGetFaceQualityConfidence(HFSession session, P
* @param confidence Pointer to a floating-point value where the quality confidence will be stored.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFFaceQualityDetect(HFSession session, HFFaceBasicToken singleFace, HFloat *confidence);
HYPER_CAPI_EXPORT extern HResult HFFaceQualityDetect(HFSession session, HFFaceBasicToken singleFace,
HFloat *confidence);
/**
* @brief Facial states in the face interaction module.
*/
typedef struct HFFaceIntereactionResult {
HInt32 num; ///< Number of faces detected.
HPFloat leftEyeStatusConfidence; ///< Left eye state: confidence close to 1 means open, close to 0 means closed.
HPFloat rightEyeStatusConfidence; ///< Right eye state: confidence close to 1 means open, close to 0 means closed.
} HFFaceIntereactionResult, *PHFFaceIntereactionResult;
typedef struct HFFaceIntereactionState {
HInt32 num; ///< Number of faces detected.
HPFloat leftEyeStatusConfidence; ///< Left eye state: confidence close to 1 means open, close
///< to 0 means closed.
HPFloat rightEyeStatusConfidence; ///< Right eye state: confidence close to 1 means open, close
///< to 0 means closed.
} HFFaceIntereactionState, *PHFFaceIntereactionState;
/**
* @brief Get the prediction results of face interaction.
* @param session Handle to the session.
* @param result Facial state prediction results in the face interaction module.
*/
HYPER_CAPI_EXPORT extern HResult HFGetFaceIntereactionResult(HFSession session, PHFFaceIntereactionResult result);
HYPER_CAPI_EXPORT extern HResult HFGetFaceIntereactionStateResult(HFSession session,
PHFFaceIntereactionState result);
/**
* @brief Actions detected in the face interaction module.
*/
typedef struct HFFaceIntereactionsActions {
HInt32 num; ///< Number of actions detected.
HPInt32 normal; ///< Normal actions.
HPInt32 shake; ///< Shake actions.
HPInt32 jawOpen; ///< Jaw open actions.
HPInt32 headRiase; ///< Head raise actions.
HPInt32 blink; ///< Blink actions.
} HFFaceIntereactionsActions, *PHFFaceIntereactionsActions;
/**
* @brief Get the prediction results of face interaction actions.
* @param session Handle to the session.
* @param actions Facial action prediction results in the face interaction module.
* @return HResult indicating success or failure of the function call.
*/
HYPER_CAPI_EXPORT extern HResult HFGetFaceIntereactionActionsResult(
HFSession session, PHFFaceIntereactionsActions actions);
/**
* @brief Struct representing face attribute results.
*
* This struct holds the race, gender, and age bracket attributes for a detected face.
*/
typedef struct HFFaceAttributeResult {
HInt32 num; ///< Number of faces detected.
HPInt32 race; ///< Race of the detected face.
///< 0: Black;
///< 1: Asian;
///< 2: Latino/Hispanic;
///< 3: Middle Eastern;
///< 4: White;
HPInt32 gender; ///< Gender of the detected face.
///< 0: Female;
///< 1: Male;
HPInt32 ageBracket; ///< Age bracket of the detected face.
///< 0: 0-2 years old;
///< 1: 3-9 years old;
///< 2: 10-19 years old;
///< 3: 20-29 years old;
///< 4: 30-39 years old;
///< 5: 40-49 years old;
///< 6: 50-59 years old;
///< 7: 60-69 years old;
///< 8: more than 70 years old;
HInt32 num; ///< Number of faces detected.
HPInt32 race; ///< Race of the detected face.
///< 0: Black;
///< 1: Asian;
///< 2: Latino/Hispanic;
///< 3: Middle Eastern;
///< 4: White;
HPInt32 gender; ///< Gender of the detected face.
///< 0: Female;
///< 1: Male;
HPInt32 ageBracket; ///< Age bracket of the detected face.
///< 0: 0-2 years old;
///< 1: 3-9 years old;
///< 2: 10-19 years old;
///< 3: 20-29 years old;
///< 4: 30-39 years old;
///< 5: 40-49 years old;
///< 6: 50-59 years old;
///< 7: 60-69 years old;
///< 8: more than 70 years old;
} HFFaceAttributeResult, *PHFFaceAttributeResult;
/**
@@ -689,20 +733,20 @@ typedef struct HFFaceAttributeResult {
* @param results Pointer to the structure where face attribute results will be stored.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFGetFaceAttributeResult(HFSession session, PHFFaceAttributeResult results);
HYPER_CAPI_EXPORT extern HResult HFGetFaceAttributeResult(HFSession session,
PHFFaceAttributeResult results);
/************************************************************************
* System Function
************************************************************************/
* System Function
************************************************************************/
/**
* @brief Structure representing the version information of the InspireFace library.
*/
typedef struct HFInspireFaceVersion {
int major; ///< Major version number.
int minor; ///< Minor version number.
int patch; ///< Patch version number.
int major; ///< Major version number.
int minor; ///< Minor version number.
int patch; ///< Patch version number.
} HFInspireFaceVersion, *PHFInspireFaceVersion;
/**
@@ -719,12 +763,14 @@ HYPER_CAPI_EXPORT extern HResult HFQueryInspireFaceVersion(PHFInspireFaceVersion
* @brief SDK built-in log level mode
* */
typedef enum HFLogLevel {
HF_LOG_NONE = 0, // No logging, disables all log output
HF_LOG_DEBUG, // Debug level for detailed system information mostly useful for developers
HF_LOG_INFO, // Information level for general system information about operational status
HF_LOG_WARN, // Warning level for non-critical issues that might need attention
HF_LOG_ERROR, // Error level for error events that might still allow the application to continue running
HF_LOG_FATAL // Fatal level for severe error events that will presumably lead the application to abort
HF_LOG_NONE = 0, // No logging, disables all log output
HF_LOG_DEBUG, // Debug level for detailed system information mostly useful for developers
HF_LOG_INFO, // Information level for general system information about operational status
HF_LOG_WARN, // Warning level for non-critical issues that might need attention
HF_LOG_ERROR, // Error level for error events that might still allow the application to
// continue running
HF_LOG_FATAL // Fatal level for severe error events that will presumably lead the application
// to abort
} HFLogLevel;
/**
@@ -753,20 +799,33 @@ HYPER_CAPI_EXPORT extern void HFDeBugImageStreamImShow(HFImageStream streamHandl
/**
* @brief Decode the image from ImageStream and store it to a disk path.
*
* It is used to verify whether there is a problem with image codec, and can quickly perform bug analysis.
* It is used to verify whether there is a problem with image codec, and can quickly perform bug
* analysis.
*
* @param streamHandle Handle to the data buffer representing the camera stream component.
* @param savePath The path to which the image is written.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFDeBugImageStreamDecodeSave(HFImageStream streamHandle, HPath savePath);
HYPER_CAPI_EXPORT extern HResult HFDeBugImageStreamDecodeSave(HFImageStream streamHandle,
HPath savePath);
/**
* @brief Display current resource management statistics.
*
* This function prints statistics about the resources managed by the ResourceManager,
* including the total number of created and released sessions and image streams, as well as
* the count of those that have not been released yet. This can be used for debugging purposes
* to ensure that resources are being properly managed and to identify potential resource leaks.
*
* @return HResult indicating the success or failure of the operation.
* Returns HSUCCEED if the statistics were successfully displayed,
* otherwise, it may return an error code if there is an issue accessing the resource
* manager.
*/
HYPER_CAPI_EXPORT extern HResult HFDeBugShowResourceStatistics();
#ifdef __cplusplus
}
#endif
#endif //HYPERFACEREPO_INSPIREFACE_H
#endif // HYPERFACEREPO_INSPIREFACE_H

View File

@@ -0,0 +1,156 @@
#ifndef INSPIRSE_FACE_FACE_ACTION_H
#define INSPIRSE_FACE_FACE_ACTION_H
#include <iostream>
#include "opencv2/opencv.hpp"
#include "middleware/utils.h"
#include "data_type.h"
#include "track_module/landmark/face_landmark.h"
namespace inspire {
enum FACE_ACTION {
NORMAL = 0,
SHAKE = 0,
BLINK = 1,
JAW_OPEN = 2,
RAISE_HEAD = 3
};
typedef struct FaceActions{
int normal = 0;
int shake = 0;
int blink = 0;
int jawOpen = 0;
int raiseHead = 0;
} FaceActions;
class INSPIRE_API FaceActionAnalyse {
public:
FaceActionAnalyse(int record_list_length) {
record_list.resize(record_list_length);
record_list_euler.resize(record_list_length);
record_list_eyes.resize(record_list_length);
record_size = record_list_length;
index = 0;
}
void RecordActionFrame(const std::vector<cv::Point2f> &landmark,
const cv::Vec3f &euler_angle,
const cv::Vec2f &eyes_status) {
MoveRecordList();
record_list[0] = landmark;
record_list_euler[0] = euler_angle;
record_list_eyes[0] = eyes_status;
index += 1;
}
void Reset() {
record_list.clear();
record_list.resize(record_size);
record_list_euler.clear();
record_list_euler.resize(record_size);
record_list_eyes.clear();
record_list_eyes.resize(record_size);
index = 0;
}
FaceActions AnalysisFaceAction() {
FaceActions actionRecord;
actions.clear();
eye_state_list.clear();
if (index < record_list.size()) {
actions.push_back(NORMAL);
actionRecord.normal = 1;
} else {
for (int i = 0; i < record_list_eyes.size(); i++) {
const auto &eye = record_list_eyes[i];
std::pair<float, float> eye_state(eye[0], eye[1]);
eye_state_list.push_back(eye_state);
}
// count mouth aspect ratio
float mouth_widthwise_d =
PointDistance(record_list[0][FaceLandmark::MOUTH_LEFT_CORNER],
record_list[0][FaceLandmark::MOUTH_RIGHT_CORNER]);
float mouth_heightwise_d =
PointDistance(record_list[0][FaceLandmark::MOUTH_UPPER],
record_list[0][FaceLandmark::MOUTH_LOWER]);
float mouth_aspect_ratio = mouth_heightwise_d / mouth_widthwise_d;
if (mouth_aspect_ratio > 0.3) {
actions.push_back(JAW_OPEN);
actionRecord.jawOpen = 1;
}
int counter_eye_open = 0;
int counter_eye_close = 0;
for (auto &e : eye_state_list) {
if (e.first < 0.5 || e.second < 0.5) {
counter_eye_close += 1;
}
if (e.first > 0.5 || e.second > 0.5) {
counter_eye_open += 1;
}
}
if (counter_eye_close > 0 && counter_eye_open > 2 &&
record_list_euler[0][1] > -6 && record_list_euler[0][0] < 6) {
actions.push_back(BLINK);
actionRecord.blink = 1;
Reset();
}
bool counter_head_shake_left = false;
bool counter_head_shake_right = false;
for (auto &e : record_list_euler) {
if (e[1] < -6) {
counter_head_shake_left = true;
}
if (e[1] > 6) {
counter_head_shake_right = true;
}
}
if (counter_head_shake_left && counter_head_shake_right) {
actions.push_back(SHAKE);
actionRecord.shake = 1;
}
if (record_list_euler[0][0] > 10) {
actions.push_back(RAISE_HEAD);
actionRecord.raiseHead = 1;
}
}
return actionRecord;
}
std::vector<FACE_ACTION> GetActions() const {
return actions;
}
private:
void MoveRecordList() {
// for(int i = 0 ; i < record_list.size() - 1 ; i++){
// record_list[i+1] = record_list[i];
// record_list_euler[i+1] = record_list_euler[i];
//}
for (int i = record_list.size() - 1; i > 0; i--) {
record_list[i] = record_list[i - 1];
record_list_euler[i] = record_list_euler[i - 1];
record_list_eyes[i] = record_list_eyes[i - 1];
}
}
std::vector<std::vector<cv::Point2f>> record_list;
std::vector<cv::Vec3f> record_list_euler;
std::vector<cv::Vec2f> record_list_eyes;
std::vector<std::pair<float, float>> eye_state_list; // pair left right
std::vector<float> mouth_state_list;
std::vector<FACE_ACTION> actions;
int record_size;
int index;
};
} // namespace inspire
#endif

View File

@@ -10,6 +10,7 @@
#include "data_type.h"
#include "face_process.h"
#include "track_module/quality/face_pose_quality.h"
#include "face_action.h"
namespace inspire {
@@ -28,7 +29,7 @@ public:
tracking_count_ = 0;
pose_euler_angle_.resize(3);
keyPointFive.resize(5);
// face_action_ = std::make_shared<FaceAction>(10);
face_action_ = std::make_shared<FaceActionAnalyse>(10);
}
void UpdateMatrix(const cv::Mat &matrix) {
@@ -137,9 +138,11 @@ public:
return box_square;
}
void UpdateFaceAction() {
// face_action_->RecordActionFrame(landmark_, euler_angle_);
// face_action_->AnalysisFaceAction();
FaceActions UpdateFaceAction() {
cv::Vec3f euler(high_result.pitch, high_result.yaw, high_result.roll);
cv::Vec2f eyes(left_eye_status_.back(), right_eye_status_.back());
face_action_->RecordActionFrame(landmark_, euler, eyes);
return face_action_->AnalysisFaceAction();
}
void DisableTracking() { tracking_state_ = UNTRACKING; }
@@ -318,7 +321,7 @@ public:
private:
TRACK_STATE tracking_state_;
// std::shared_ptr<FaceAction> face_action_;
std::shared_ptr<FaceActionAnalyse> face_action_;
int face_id_;
};

View File

@@ -63,6 +63,13 @@ int32_t FaceContext::FaceDetectAndTrack(CameraStream &image) {
m_quality_score_results_cache_.clear();
m_react_left_eye_results_cache_.clear();
m_react_right_eye_results_cache_.clear();
m_action_normal_results_cache_.clear();
m_action_shake_results_cache_.clear();
m_action_blink_results_cache_.clear();
m_action_jaw_open_results_cache_.clear();
m_action_raise_head_results_cache_.clear();
m_quality_score_results_cache_.clear();
m_attribute_race_results_cache_.clear();
m_attribute_gender_results_cache_.clear();
@@ -138,6 +145,11 @@ int32_t FaceContext::FacesProcess(CameraStream &image, const std::vector<HyperFa
m_attribute_race_results_cache_.resize(faces.size(), -1);
m_attribute_gender_results_cache_.resize(faces.size(), -1);
m_attribute_age_results_cache_.resize(faces.size(), -1);
m_action_normal_results_cache_.resize(faces.size(), -1);
m_action_jaw_open_results_cache_.resize(faces.size(), -1);
m_action_blink_results_cache_.resize(faces.size(), -1);
m_action_raise_head_results_cache_.resize(faces.size(), -1);
m_action_shake_results_cache_.resize(faces.size(), -1);
for (int i = 0; i < faces.size(); ++i) {
const auto &face = faces[i];
// RGB Liveness Detect
@@ -190,7 +202,12 @@ int32_t FaceContext::FacesProcess(CameraStream &image, const std::vector<HyperFa
m_react_left_eye_results_cache_[i] = new_eye_left;
m_react_right_eye_results_cache_[i] = new_eye_right;
}
const auto actions = target.UpdateFaceAction();
m_action_normal_results_cache_[i] = actions.normal;
m_action_jaw_open_results_cache_[i] = actions.jawOpen;
m_action_blink_results_cache_[i] = actions.blink;
m_action_raise_head_results_cache_[i] = actions.raiseHead;
m_action_shake_results_cache_[i] = actions.shake;
} else {
INSPIRE_LOGD("Serialized objects cannot connect to trace objects in memory, and there may be some problems");
}
@@ -274,6 +291,26 @@ const std::vector<int>& FaceContext::GetFaceAgeBracketResultsCache() const {
return m_attribute_age_results_cache_;
}
const std::vector<int>& FaceContext::GetFaceNormalAactionsResultCache() const {
return m_action_normal_results_cache_;
}
const std::vector<int>& FaceContext::GetFaceJawOpenAactionsResultCache() const {
return m_action_jaw_open_results_cache_;
}
const std::vector<int>& FaceContext::GetFaceBlinkAactionsResultCache() const {
return m_action_blink_results_cache_;
}
const std::vector<int>& FaceContext::GetFaceShakeAactionsResultCache() const {
return m_action_shake_results_cache_;
}
const std::vector<int>& FaceContext::GetFaceRaiseHeadAactionsResultCache() const {
return m_action_raise_head_results_cache_;
}
int32_t FaceContext::FaceFeatureExtract(CameraStream &image, FaceBasicData& data) {
std::lock_guard<std::mutex> lock(m_mtx_);
int32_t ret;

View File

@@ -2,6 +2,7 @@
// Created by Tunm-Air13 on 2023/9/7.
//
#pragma once
#include <vector>
#ifndef HYPERFACEREPO_FACE_CONTEXT_H
#define HYPERFACEREPO_FACE_CONTEXT_H
@@ -261,6 +262,36 @@ public:
*/
const std::vector<int>& GetFaceAgeBracketResultsCache() const;
/**
* @brief Gets the cache of face action normal results.
* @return A const reference to a vector containing face action normal results.
*/
const std::vector<int>& GetFaceNormalAactionsResultCache() const;
/**
* @brief Gets the cache of face action jaw open results.
* @return A const reference to a vector containing face action jaw open results.
*/
const std::vector<int>& GetFaceJawOpenAactionsResultCache() const;
/**
* @brief Gets the cache of face action blink results.
* @return A const reference to a vector containing face action blink results.
*/
const std::vector<int>& GetFaceBlinkAactionsResultCache() const;
/**
* @brief Gets the cache of face action shake results.
* @return A const reference to a vector containing face action shake results.
*/
const std::vector<int>& GetFaceShakeAactionsResultCache() const;
/**
* @brief Gets the cache of face action raise head results.
* @return A const reference to a vector containing face action raise head results.
*/
const std::vector<int>& GetFaceRaiseHeadAactionsResultCache() const;
/**
* @brief Gets the cache of the current face features.
* @return A const reference to the Embedded object containing current face feature data.
@@ -294,6 +325,13 @@ private:
std::vector<float> m_quality_score_results_cache_; ///< Cache for RGB face quality score results
std::vector<float> m_react_left_eye_results_cache_; ///< Cache for Left eye state in face interaction
std::vector<float> m_react_right_eye_results_cache_; ///< Cache for Right eye state in face interaction
std::vector<int> m_action_normal_results_cache_; ///< Cache for normal action in face interaction
std::vector<int> m_action_shake_results_cache_; ///< Cache for shake action in face interaction
std::vector<int> m_action_blink_results_cache_; ///< Cache for blink action in face interaction
std::vector<int> m_action_jaw_open_results_cache_; ///< Cache for jaw open action in face interaction
std::vector<int> m_action_raise_head_results_cache_; ///< Cache for raise head action in face interaction
std::vector<int> m_attribute_race_results_cache_;
std::vector<int> m_attribute_gender_results_cache_;
std::vector<int> m_attribute_age_results_cache_;

View File

@@ -7,6 +7,6 @@
#define INSPIRE_FACE_VERSION_MAJOR_STR "1"
#define INSPIRE_FACE_VERSION_MINOR_STR "1"
#define INSPIRE_FACE_VERSION_PATCH_STR "4"
#define INSPIRE_FACE_VERSION_PATCH_STR "6"
#endif //HYPERFACEREPO_INFORMATION_H

View File

@@ -4,7 +4,7 @@
#ifndef MODELLOADERTAR_INSPIREARCHIVE_H
#define MODELLOADERTAR_INSPIREARCHIVE_H
#include "sample_archive.h"
#include "simple_archive.h"
#include "inspire_model/inspire_model.h"
#include "yaml-cpp/yaml.h"
#include "fstream"
@@ -66,6 +66,11 @@ public:
PrintSubFiles();
}
void Release() {
m_status_ = NOT_READ;
Close();
}
private:
int32_t loadManifestFile() {

View File

@@ -89,13 +89,20 @@ public:
return m_subfiles_names_;
}
~SimpleArchive() {
~SimpleArchive() {
Close();
}
void Close() {
if (m_tar_.get() != nullptr) {
mtar_close(m_tar_.get());
}
m_tar_.reset();
m_load_file_status_ = SARC_NOT_LOAD;
m_subfiles_names_.clear();
}
void PrintSubFiles() {
std::cout << "Subfiles: " << m_subfiles_names_.size() << std::endl;

View File

@@ -230,7 +230,6 @@ bool FaceTrack::TrackFace(CameraStream &image, FaceObject &face) {
}
face.SetConfidence(score);
face.UpdateFaceAction();
return true;
}
@@ -308,8 +307,17 @@ void FaceTrack::nms(float th) {
float inter = w * h;
float ovr = inter / (area[i] + area[j] - inter);
if (ovr >= th) {
trackingFace.erase(trackingFace.begin() + j);
area.erase(area.begin() + j);
// Compare tracking IDs to decide which to keep
if (trackingFace[i].GetTrackingId() < trackingFace[j].GetTrackingId()) {
trackingFace.erase(trackingFace.begin() + j);
area.erase(area.begin() + j);
} else {
trackingFace.erase(trackingFace.begin() + i);
area.erase(area.begin() + i);
// If we erase i, we need to break the inner loop
// and start over with a new i, because the indexes shifted.
break;
}
} else {
j++;
}
@@ -317,6 +325,7 @@ void FaceTrack::nms(float th) {
}
}
void FaceTrack::BlackingTrackingRegion(cv::Mat &image, cv::Rect &rect_mask) {
int height = image.rows;
int width = image.cols;

View File

@@ -42,6 +42,8 @@ public:
const static int NOSE_CORNER = 69; ///< Landmark index for the tip of the nose.
const static int MOUTH_LEFT_CORNER = 45; ///< Landmark index for the left corner of the mouth.
const static int MOUTH_RIGHT_CORNER = 50; ///< Landmark index for the right corner of the mouth.
const static int MOUTH_LOWER = 37 - 1;
const static int MOUTH_UPPER = 104 - 1;
const static int NUM_OF_LANDMARK = 106; ///< Total number of landmarks detected.

View File

@@ -1 +1 @@
InspireFace Version: 1.1.4
InspireFace Version: 1.1.6

View File

@@ -2,6 +2,8 @@
#include "c_api/intypedef.h"
#include "opencv2/opencv.hpp"
#include "inspireface/c_api/inspireface.h"
#include <unordered_map>
#include <functional>
void drawMode(cv::Mat& frame, HFDetectMode mode) {
std::string modeText;
@@ -19,9 +21,26 @@ void drawMode(cv::Mat& frame, HFDetectMode mode) {
modeText = "Mode: Unknown";
break;
}
cv::putText(frame, modeText, cv::Point(10, 30), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(255, 255, 255), 2);
cv::putText(frame, modeText, cv::Point(10, 30), cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(90, 100, 255), 2);
}
cv::Scalar generateColor(int id) {
int maxID = 100;
id = id % maxID;
int hue = (id * 360 / maxID) % 360;
int saturation = 255;
int value = 200;
cv::Mat hsv(1, 1, CV_8UC3, cv::Scalar(hue, saturation, value));
cv::Mat rgb;
cv::cvtColor(hsv, rgb, cv::COLOR_HSV2BGR);
cv::Vec3b rgbColor = rgb.at<cv::Vec3b>(0, 0);
return cv::Scalar(rgbColor[0], rgbColor[1], rgbColor[2]);
}
int main(int argc, char* argv[]) {
// Check whether the number of parameters is correct
if (argc != 3) {
@@ -46,11 +65,11 @@ int main(int argc, char* argv[]) {
// Enable the functions in the pipeline: mask detection, live detection, and face quality detection
HOption option = HF_ENABLE_QUALITY | HF_ENABLE_MASK_DETECT | HF_ENABLE_INTERACTION;
// Video or frame sequence mode uses VIDEO-MODE, which is face detection with tracking
HFDetectMode detMode = HF_DETECT_MODE_LIGHT_TRACK;
HFDetectMode detMode = HF_DETECT_MODE_TRACK_BY_DETECTION;
// Maximum number of faces detected
HInt32 maxDetectNum = 20;
// Face detection image input level
HInt32 detectPixelLevel = 160;
HInt32 detectPixelLevel = 640;
// fps in tracking-by-detection mode
HInt32 trackByDetectFps = 20;
HFSession session = {0};
@@ -129,11 +148,11 @@ int main(int argc, char* argv[]) {
std::cout << "HFMultipleFacePipelineProcessOptional error: " << ret << std::endl;
return ret;
}
HFFaceIntereactionResult result;
ret = HFGetFaceIntereactionResult(session, &result);
HFFaceIntereactionState result;
ret = HFGetFaceIntereactionStateResult(session, &result);
if (ret != HSUCCEED)
{
std::cout << "HFGetFaceIntereactionResult error: " << ret << std::endl;
std::cout << "HFGetFaceIntereactionStateResult error: " << ret << std::endl;
return ret;
}
std::cout << "Left eye status: " << result.leftEyeStatusConfidence[0] << std::endl;
@@ -144,13 +163,14 @@ int main(int argc, char* argv[]) {
for (int index = 0; index < faceNum; ++index) {
// std::cout << "========================================" << std::endl;
// std::cout << "Process face index: " << index << std::endl;
// Print FaceID, In VIDEO-MODE it is fixed, but it may be lost
auto trackId = multipleFaceData.trackIds[index];
// Use OpenCV's Rect to receive face bounding boxes
auto rect = cv::Rect(multipleFaceData.rects[index].x, multipleFaceData.rects[index].y,
multipleFaceData.rects[index].width, multipleFaceData.rects[index].height);
cv::rectangle(draw, rect, cv::Scalar(0, 100, 255), 5);
cv::rectangle(draw, rect, generateColor(trackId), 3);
// Print FaceID, In VIDEO-MODE it is fixed, but it may be lost
auto trackId = multipleFaceData.trackIds[index];
// std::cout << "FaceID: " << trackId << std::endl;
// Print Head euler angle, It can often be used to judge the quality of a face by the Angle of the head
@@ -160,7 +180,7 @@ int main(int argc, char* argv[]) {
// Add TrackID to the drawing
cv::putText(draw, "ID: " + std::to_string(trackId), cv::Point(rect.x, rect.y - 10),
cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 255, 0), 2);
cv::FONT_HERSHEY_SIMPLEX, 0.5, generateColor(trackId), 2);
HInt32 numOfLmk;
HFGetNumOfFaceDenseLandmark(&numOfLmk);
@@ -172,7 +192,7 @@ int main(int argc, char* argv[]) {
}
for (size_t i = 0; i < numOfLmk; i++) {
cv::Point2f p(denseLandmarkPoints[i].x, denseLandmarkPoints[i].y);
cv::circle(draw, p, 0, (0, 0, 255), 2);
cv::circle(draw, p, 0, generateColor(trackId), 2);
}
}

View File

@@ -25,11 +25,17 @@ public:
void setTestResDir(const std::string &dir) { Enviro::testResDir = dir; }
const std::string &getTestRuntimeFullPath() const { return runtimeFullPath; }
void setTestRuntimeFullPath(const std::string &path) { Enviro::runtimeFullPath = path; }
private:
Enviro() {}
std::string packName = "Pikachu";
std::string testResDir = "test_res";
std::string runtimeFullPath = "";
};
#endif //INSPIREFACE_ENVIRO_H

View File

@@ -13,6 +13,8 @@
#define TEST_MODEL_FILE Enviro::getInstance().getPackName() // Optional model file
#define SET_PACK_NAME(name) Enviro::getInstance().setPackName(name)
#define SET_TEST_DIR(dir) Enviro::getInstance().setTestResDir(dir)
#define SET_RUNTIME_FULLPATH_NAME(name) Enviro::getInstance().setTestRuntimeFullPath(name)
#define GET_RUNTIME_FULLPATH_NAME Enviro::getInstance().getTestRuntimeFullPath()
#define TEST_LFW_FUNNELED_TXT "valid_lfw_funneled.txt" // LFW Index txt file
#define LFW_FUNNELED_DIR "" // LFW funneled data dir

View File

@@ -58,17 +58,17 @@ int main(int argc, char* argv[]) {
std::string packPath;
// Add command line options
auto cli = session.cli()
| Catch::clara::Opt(pack, "value")["--pack"]("Resource pack filename")
| Catch::clara::Opt(testDir, "value")["--test_dir"]("Test dir resource")
| Catch::clara::Opt(packPath, "value")["--pack_path"]("The specified path to the pack file");
auto cli =
session.cli() | Catch::clara::Opt(pack, "value")["--pack"]("Resource pack filename") |
Catch::clara::Opt(testDir, "value")["--test_dir"]("Test dir resource") |
Catch::clara::Opt(packPath, "value")["--pack_path"]("The specified path to the pack file");
// Set combined CLI to the session
session.cli(cli);
// Parse command line arguments
int returnCode = session.applyCommandLine(argc, argv);
if (returnCode != 0) // Indicate an error
if (returnCode != 0) // Indicate an error
return returnCode;
if (!testDir.empty()) {
@@ -84,12 +84,15 @@ int main(int argc, char* argv[]) {
SET_PACK_NAME(pack);
fullPath = GET_MODEL_FILE();
TEST_PRINT("Updated global Pack to: {}", TEST_MODEL_FILE);
SET_RUNTIME_FULLPATH_NAME(fullPath);
} else if (!packPath.empty()) {
fullPath = packPath;
TEST_PRINT("Updated global Pack File to: {}", packPath);
SET_RUNTIME_FULLPATH_NAME(packPath);
} else {
fullPath = GET_MODEL_FILE();
TEST_PRINT("Using default global Pack: {}", TEST_MODEL_FILE);
SET_RUNTIME_FULLPATH_NAME(fullPath);
}
std::cout << fullPath << std::endl;
@@ -102,5 +105,8 @@ int main(int argc, char* argv[]) {
// Set log level
HFSetLogLevel(HF_LOG_INFO);
return session.run();
ret = session.run();
HFTerminateInspireFace();
HFDeBugShowResourceStatistics();
return ret;
}

View File

@@ -7,32 +7,31 @@
#include "inspireface/c_api/inspireface.h"
#include "../test_helper/test_tools.h"
TEST_CASE("test_FacePipelineAttribute", "[face_pipeline_attribute]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
enum AGE_BRACKED {
AGE_0_2 = 0, ///< Age 0-2 years old
AGE_3_9, ///< Age 3-9 years old
AGE_10_19, ///< Age 10-19 years old
AGE_20_29, ///< Age 20-29 years old
AGE_30_39, ///< Age 30-39 years old
AGE_40_49, ///< Age 40-49 years old
AGE_50_59, ///< Age 50-59 years old
AGE_60_69, ///< Age 60-69 years old
MORE_THAN_70, ///< Age more than 70 years old
AGE_0_2 = 0, ///< Age 0-2 years old
AGE_3_9, ///< Age 3-9 years old
AGE_10_19, ///< Age 10-19 years old
AGE_20_29, ///< Age 20-29 years old
AGE_30_39, ///< Age 30-39 years old
AGE_40_49, ///< Age 40-49 years old
AGE_50_59, ///< Age 50-59 years old
AGE_60_69, ///< Age 60-69 years old
MORE_THAN_70, ///< Age more than 70 years old
};
enum GENDER {
FEMALE = 0, ///< Female
MALE, ///< Male
FEMALE = 0, ///< Female
MALE, ///< Male
};
enum RACE {
BLACK = 0, ///< Black
ASIAN, ///< Asian
LATINO_HISPANIC, ///< Latino/Hispanic
MIDDLE_EASTERN, ///< Middle Eastern
WHITE, ///< White
BLACK = 0, ///< Black
ASIAN, ///< Asian
LATINO_HISPANIC, ///< Latino/Hispanic
MIDDLE_EASTERN, ///< Middle Eastern
WHITE, ///< White
};
HResult ret;
@@ -57,15 +56,16 @@ TEST_CASE("test_FacePipelineAttribute", "[face_pipeline_attribute]") {
REQUIRE(multipleFaceData.detectedNum == 1);
// Run pipeline
ret = HFMultipleFacePipelineProcessOptional(session, imgHandle, &multipleFaceData, HF_ENABLE_FACE_ATTRIBUTE);
ret = HFMultipleFacePipelineProcessOptional(session, imgHandle, &multipleFaceData,
HF_ENABLE_FACE_ATTRIBUTE);
REQUIRE(ret == HSUCCEED);
HFFaceAttributeResult result = {0};
ret = HFGetFaceAttributeResult(session, &result);
REQUIRE(ret == HSUCCEED);
REQUIRE(result.num == 1);
// Check attribute
// Check attribute
CHECK(result.race[0] == BLACK);
CHECK(result.ageBracket[0] == AGE_10_19);
CHECK(result.gender[0] == FEMALE);
@@ -88,7 +88,8 @@ TEST_CASE("test_FacePipelineAttribute", "[face_pipeline_attribute]") {
REQUIRE(multipleFaceData.detectedNum == 2);
// Run pipeline
ret = HFMultipleFacePipelineProcessOptional(session, imgHandle, &multipleFaceData, HF_ENABLE_FACE_ATTRIBUTE);
ret = HFMultipleFacePipelineProcessOptional(session, imgHandle, &multipleFaceData,
HF_ENABLE_FACE_ATTRIBUTE);
REQUIRE(ret == HSUCCEED);
HFFaceAttributeResult result = {0};
@@ -96,14 +97,12 @@ TEST_CASE("test_FacePipelineAttribute", "[face_pipeline_attribute]") {
REQUIRE(ret == HSUCCEED);
REQUIRE(result.num == 2);
// Check attribute
for (size_t i = 0; i < result.num; i++)
{
// Check attribute
for (size_t i = 0; i < result.num; i++) {
CHECK(result.race[i] == WHITE);
CHECK(result.ageBracket[i] == AGE_20_29);
CHECK(result.gender[i] == FEMALE);
}
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
@@ -113,7 +112,49 @@ TEST_CASE("test_FacePipelineAttribute", "[face_pipeline_attribute]") {
ret = HFReleaseInspireFaceSession(session);
session = nullptr;
REQUIRE(ret == HSUCCEED);
}
TEST_CASE("test_FacePipelineRobustness", "[robustness]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("Exception") {
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
REQUIRE(ret == HSUCCEED);
// Input exception data
HFImageStream nullHandle = {0};
HFMultipleFaceData nullfaces = {0};
ret =
HFMultipleFacePipelineProcessOptional(session, nullHandle, &nullfaces, HF_ENABLE_NONE);
REQUIRE(ret == HERR_INVALID_IMAGE_STREAM_HANDLE);
// Get a face picture
HFImageStream img1Handle;
auto img1 = cv::imread(GET_DATA("data/bulk/image_T1.jpeg"));
ret = CVImageToImageStream(img1, img1Handle);
REQUIRE(ret == HSUCCEED);
// Input correct Image and exception faces struct
ret =
HFMultipleFacePipelineProcessOptional(session, img1Handle, &nullfaces, HF_ENABLE_NONE);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(img1Handle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
// Multiple release
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HERR_INVALID_CONTEXT_HANDLE);
HFDeBugShowResourceStatistics();
}
}
TEST_CASE("test_FacePipeline", "[face_pipeline]") {
@@ -172,14 +213,11 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
REQUIRE(ret == HSUCCEED);
img2Handle = nullptr;
ret = HFReleaseInspireFaceSession(session);
session = nullptr;
REQUIRE(ret == HSUCCEED);
}
SECTION("face mask detect") {
HResult ret;
HFSessionCustomParameter parameter = {0};
@@ -213,7 +251,6 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
REQUIRE(ret == HSUCCEED);
img1Handle = nullptr;
// no mask face
HFImageStream img2Handle;
auto img2 = cv::imread(GET_DATA("data/bulk/face_sample.png"));
@@ -225,7 +262,7 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
REQUIRE(ret == HSUCCEED);
ret = HFGetFaceMaskConfidence(session, &confidence);
REQUIRE(ret == HSUCCEED);
// spdlog::info("mask {}", confidence.confidence[0]);
// spdlog::info("mask {}", confidence.confidence[0]);
CHECK(confidence.num > 0);
CHECK(confidence.confidence[0] < 0.1);
@@ -233,7 +270,6 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
REQUIRE(ret == HSUCCEED);
img2Handle = nullptr;
ret = HFReleaseInspireFaceSession(session);
session = nullptr;
REQUIRE(ret == HSUCCEED);
@@ -259,7 +295,8 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
ret = HFMultipleFacePipelineProcessOptional(session, superiorHandle, &multipleFaceData, option);
ret =
HFMultipleFacePipelineProcessOptional(session, superiorHandle, &multipleFaceData, option);
REQUIRE(ret == HSUCCEED);
HFloat quality;
@@ -294,7 +331,6 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
}
TEST_CASE("test_FaceReaction", "[face_reaction]") {
@@ -326,20 +362,19 @@ TEST_CASE("test_FaceReaction", "[face_reaction]") {
// Predict eyes status
ret = HFMultipleFacePipelineProcess(session, imgHandle, &multipleFaceData, parameter);
REQUIRE(ret == HSUCCEED);
// Get results
HFFaceIntereactionResult result;
ret = HFGetFaceIntereactionResult(session, &result);
HFFaceIntereactionState result;
ret = HFGetFaceIntereactionStateResult(session, &result);
REQUIRE(multipleFaceData.detectedNum == result.num);
REQUIRE(ret == HSUCCEED);
// Check
// Check
CHECK(result.leftEyeStatusConfidence[0] > 0.5f);
CHECK(result.rightEyeStatusConfidence[0] > 0.5f);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
}
SECTION("close eyes") {
@@ -358,20 +393,19 @@ TEST_CASE("test_FaceReaction", "[face_reaction]") {
// Predict eyes status
ret = HFMultipleFacePipelineProcess(session, imgHandle, &multipleFaceData, parameter);
REQUIRE(ret == HSUCCEED);
// Get results
HFFaceIntereactionResult result;
ret = HFGetFaceIntereactionResult(session, &result);
HFFaceIntereactionState result;
ret = HFGetFaceIntereactionStateResult(session, &result);
REQUIRE(multipleFaceData.detectedNum == result.num);
REQUIRE(ret == HSUCCEED);
// Check
// Check
CHECK(result.leftEyeStatusConfidence[0] < 0.5f);
CHECK(result.rightEyeStatusConfidence[0] < 0.5f);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
}
SECTION("Close one eye and open the other") {
@@ -390,23 +424,21 @@ TEST_CASE("test_FaceReaction", "[face_reaction]") {
// Predict eyes status
ret = HFMultipleFacePipelineProcess(session, imgHandle, &multipleFaceData, parameter);
REQUIRE(ret == HSUCCEED);
// Get results
HFFaceIntereactionResult result;
ret = HFGetFaceIntereactionResult(session, &result);
HFFaceIntereactionState result;
ret = HFGetFaceIntereactionStateResult(session, &result);
REQUIRE(multipleFaceData.detectedNum == result.num);
REQUIRE(ret == HSUCCEED);
// Check
// Check
CHECK(result.leftEyeStatusConfidence[0] < 0.5f);
CHECK(result.rightEyeStatusConfidence[0] > 0.5f);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
}
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
}

View File

@@ -0,0 +1,40 @@
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include "inspireface/herror.h"
#include <cstdio>
TEST_CASE("test_System", "[system]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
// The global TEST environment has been started, so this side needs to be temporarily closed
// before testing
HFTerminateInspireFace();
SECTION("Create a session test when it is not loaded") {
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
REQUIRE(ret == HERR_ARCHIVE_NOT_LOAD);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HERR_INVALID_CONTEXT_HANDLE);
}
// Restart and start InspireFace
auto ret = HFLaunchInspireFace(GET_RUNTIME_FULLPATH_NAME.c_str());
REQUIRE(ret == HSUCCEED);
SECTION("Create a session test when it is reloaded") {
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
}

View File

@@ -1424,31 +1424,62 @@ if _libs[_LIBRARY_FILENAME].has("HFFaceQualityDetect", "cdecl"):
HFFaceQualityDetect.restype = HResult
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 645
class struct_HFFaceIntereactionResult(Structure):
class struct_HFFaceIntereactionState(Structure):
pass
struct_HFFaceIntereactionResult.__slots__ = [
struct_HFFaceIntereactionState.__slots__ = [
'num',
'leftEyeStatusConfidence',
'rightEyeStatusConfidence',
]
struct_HFFaceIntereactionResult._fields_ = [
struct_HFFaceIntereactionState._fields_ = [
('num', HInt32),
('leftEyeStatusConfidence', HPFloat),
('rightEyeStatusConfidence', HPFloat),
]
HFFaceIntereactionResult = struct_HFFaceIntereactionResult# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 645
HFFaceIntereactionState = struct_HFFaceIntereactionState# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 645
PHFFaceIntereactionResult = POINTER(struct_HFFaceIntereactionResult)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 645
PHFFaceIntereactionState = POINTER(struct_HFFaceIntereactionState)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 645
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 647
if _libs[_LIBRARY_FILENAME].has("HFGetFaceIntereactionResult", "cdecl"):
HFGetFaceIntereactionResult = _libs[_LIBRARY_FILENAME].get("HFGetFaceIntereactionResult", "cdecl")
HFGetFaceIntereactionResult.argtypes = [HFSession, PHFFaceIntereactionResult]
HFGetFaceIntereactionResult.restype = HResult
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 652
if _libs[_LIBRARY_FILENAME].has("HFGetFaceIntereactionStateResult", "cdecl"):
HFGetFaceIntereactionStateResult = _libs[_LIBRARY_FILENAME].get("HFGetFaceIntereactionStateResult", "cdecl")
HFGetFaceIntereactionStateResult.argtypes = [HFSession, PHFFaceIntereactionState]
HFGetFaceIntereactionStateResult.restype = HResult
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 675
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 661
class struct_HFFaceIntereactionsActions(Structure):
pass
struct_HFFaceIntereactionsActions.__slots__ = [
'num',
'normal',
'shake',
'jawOpen',
'headRiase',
'blink',
]
struct_HFFaceIntereactionsActions._fields_ = [
('num', HInt32),
('normal', HPInt32),
('shake', HPInt32),
('jawOpen', HPInt32),
('headRiase', HPInt32),
('blink', HPInt32),
]
HFFaceIntereactionsActions = struct_HFFaceIntereactionsActions# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 661
PHFFaceIntereactionsActions = POINTER(struct_HFFaceIntereactionsActions)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 661
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 663
if _libs[_LIBRARY_FILENAME].has("HFGetFaceIntereactionActionsResult", "cdecl"):
HFGetFaceIntereactionActionsResult = _libs[_LIBRARY_FILENAME].get("HFGetFaceIntereactionActionsResult", "cdecl")
HFGetFaceIntereactionActionsResult.argtypes = [HFSession, PHFFaceIntereactionsActions]
HFGetFaceIntereactionActionsResult.restype = HResult
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 691
class struct_HFFaceAttributeResult(Structure):
pass
@@ -1465,17 +1496,17 @@ struct_HFFaceAttributeResult._fields_ = [
('ageBracket', HPInt32),
]
HFFaceAttributeResult = struct_HFFaceAttributeResult# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 675
HFFaceAttributeResult = struct_HFFaceAttributeResult# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 691
PHFFaceAttributeResult = POINTER(struct_HFFaceAttributeResult)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 675
PHFFaceAttributeResult = POINTER(struct_HFFaceAttributeResult)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 691
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 687
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 703
if _libs[_LIBRARY_FILENAME].has("HFGetFaceAttributeResult", "cdecl"):
HFGetFaceAttributeResult = _libs[_LIBRARY_FILENAME].get("HFGetFaceAttributeResult", "cdecl")
HFGetFaceAttributeResult.argtypes = [HFSession, PHFFaceAttributeResult]
HFGetFaceAttributeResult.restype = HResult
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 701
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 717
class struct_HFInspireFaceVersion(Structure):
pass
@@ -1490,51 +1521,51 @@ struct_HFInspireFaceVersion._fields_ = [
('patch', c_int),
]
HFInspireFaceVersion = struct_HFInspireFaceVersion# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 701
HFInspireFaceVersion = struct_HFInspireFaceVersion# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 717
PHFInspireFaceVersion = POINTER(struct_HFInspireFaceVersion)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 701
PHFInspireFaceVersion = POINTER(struct_HFInspireFaceVersion)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 717
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 711
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 727
if _libs[_LIBRARY_FILENAME].has("HFQueryInspireFaceVersion", "cdecl"):
HFQueryInspireFaceVersion = _libs[_LIBRARY_FILENAME].get("HFQueryInspireFaceVersion", "cdecl")
HFQueryInspireFaceVersion.argtypes = [PHFInspireFaceVersion]
HFQueryInspireFaceVersion.restype = HResult
enum_HFLogLevel = c_int# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 723
enum_HFLogLevel = c_int# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 739
HF_LOG_NONE = 0# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 723
HF_LOG_NONE = 0# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 739
HF_LOG_DEBUG = (HF_LOG_NONE + 1)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 723
HF_LOG_DEBUG = (HF_LOG_NONE + 1)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 739
HF_LOG_INFO = (HF_LOG_DEBUG + 1)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 723
HF_LOG_INFO = (HF_LOG_DEBUG + 1)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 739
HF_LOG_WARN = (HF_LOG_INFO + 1)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 723
HF_LOG_WARN = (HF_LOG_INFO + 1)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 739
HF_LOG_ERROR = (HF_LOG_WARN + 1)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 723
HF_LOG_ERROR = (HF_LOG_WARN + 1)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 739
HF_LOG_FATAL = (HF_LOG_ERROR + 1)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 723
HF_LOG_FATAL = (HF_LOG_ERROR + 1)# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 739
HFLogLevel = enum_HFLogLevel# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 723
HFLogLevel = enum_HFLogLevel# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 739
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 728
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 744
if _libs[_LIBRARY_FILENAME].has("HFSetLogLevel", "cdecl"):
HFSetLogLevel = _libs[_LIBRARY_FILENAME].get("HFSetLogLevel", "cdecl")
HFSetLogLevel.argtypes = [HFLogLevel]
HFSetLogLevel.restype = HResult
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 733
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 749
if _libs[_LIBRARY_FILENAME].has("HFLogDisable", "cdecl"):
HFLogDisable = _libs[_LIBRARY_FILENAME].get("HFLogDisable", "cdecl")
HFLogDisable.argtypes = []
HFLogDisable.restype = HResult
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 746
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 762
if _libs[_LIBRARY_FILENAME].has("HFDeBugImageStreamImShow", "cdecl"):
HFDeBugImageStreamImShow = _libs[_LIBRARY_FILENAME].get("HFDeBugImageStreamImShow", "cdecl")
HFDeBugImageStreamImShow.argtypes = [HFImageStream]
HFDeBugImageStreamImShow.restype = None
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 757
# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 773
if _libs[_LIBRARY_FILENAME].has("HFDeBugImageStreamDecodeSave", "cdecl"):
HFDeBugImageStreamDecodeSave = _libs[_LIBRARY_FILENAME].get("HFDeBugImageStreamDecodeSave", "cdecl")
HFDeBugImageStreamDecodeSave.argtypes = [HFImageStream, HPath]
@@ -1618,11 +1649,13 @@ HFFaceMaskConfidence = struct_HFFaceMaskConfidence# /Users/tunm/work/InspireFace
HFFaceQualityConfidence = struct_HFFaceQualityConfidence# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 611
HFFaceIntereactionResult = struct_HFFaceIntereactionResult# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 645
HFFaceIntereactionState = struct_HFFaceIntereactionState# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 645
HFFaceAttributeResult = struct_HFFaceAttributeResult# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 675
HFFaceIntereactionsActions = struct_HFFaceIntereactionsActions# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 661
HFInspireFaceVersion = struct_HFInspireFaceVersion# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 701
HFFaceAttributeResult = struct_HFFaceAttributeResult# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 691
HFInspireFaceVersion = struct_HFInspireFaceVersion# /Users/tunm/work/InspireFace/cpp/inspireface/c_api/inspireface.h: 717
# No inserted files

View File

@@ -150,6 +150,11 @@ class FaceExtended:
quality_confidence: float
left_eye_status_confidence: float
right_eye_status_confidence: float
action_normal: int
action_jaw_open: int
action_shake: int
action_blink: int
action_head_raise: int
race: int
gender: int
age_bracket: int
@@ -356,7 +361,6 @@ class InspireFaceSession(object):
if ret != 0:
logger.error(f"Set filter minimum face pixel size error: {ret}")
def face_pipeline(self, image, faces: List[FaceInformation], exec_param) -> List[FaceExtended]:
"""
Processes detected faces to extract additional attributes based on the provided execution parameters.
@@ -387,7 +391,7 @@ class InspireFaceSession(object):
logger.error(f"Face pipeline error: {ret}")
return []
extends = [FaceExtended(-1.0, -1.0, -1.0, -1.0, -1.0, -1, -1, -1) for _ in range(len(faces))]
extends = [FaceExtended(-1.0, -1.0, -1.0, -1.0, -1.0, 0, 0, 0, 0, 0, -1, -1, -1) for _ in range(len(faces))]
self._update_mask_confidence(exec_param, flag, extends)
self._update_rgb_liveness_confidence(exec_param, flag, extends)
self._update_face_quality_confidence(exec_param, flag, extends)
@@ -456,14 +460,25 @@ class InspireFaceSession(object):
def _update_face_interact_confidence(self, exec_param, flag, extends):
if (flag == "object" and exec_param.enable_interaction_liveness) or (
flag == "bitmask" and exec_param & HF_ENABLE_INTERACTION):
results = HFFaceIntereactionResult()
ret = HFGetFaceIntereactionResult(self._sess, PHFFaceIntereactionResult(results))
results = HFFaceIntereactionState()
ret = HFGetFaceIntereactionStateResult(self._sess, PHFFaceIntereactionState(results))
if ret == 0:
for i in range(results.num):
extends[i].left_eye_status_confidence = results.leftEyeStatusConfidence[i]
extends[i].right_eye_status_confidence = results.rightEyeStatusConfidence[i]
else:
logger.error(f"Get face interact result error: {ret}")
actions = HFFaceIntereactionsActions()
ret = HFGetFaceIntereactionActionsResult(self._sess, PHFFaceIntereactionsActions(actions))
if ret == 0:
for i in range(results.num):
extends[i].action_normal = actions.normal[i]
extends[i].action_shake = actions.shake[i]
extends[i].action_jaw_open = actions.jawOpen[i]
extends[i].action_head_raise = actions.headRiase[i]
extends[i].action_blink = actions.blink[i]
else:
logger.error(f"Get face action result error: {ret}")
def _update_rgb_liveness_confidence(self, exec_param, flag, extends: List[FaceExtended]):
if (flag == "object" and exec_param.enable_liveness) or (

View File

@@ -1,14 +1,41 @@
import time
import click
import cv2
import inspireface as ifac
from inspireface.param import *
import numpy as np
def generate_color(id):
"""
Generate a bright color based on the given integer ID. Ensures 50 unique colors.
Args:
id (int): The ID for which to generate a color.
Returns:
tuple: A tuple representing the color in BGR format.
"""
max_id = 50 # Number of unique colors
id = id % max_id
# Generate HSV color
hue = int((id * 360 / max_id) % 360) # Distribute hue values equally
saturation = 200 + (55 * id) % 55 # High saturation for bright colors
value = 200 + (55 * id) % 55 # High value for bright colors
hsv_color = np.uint8([[[hue, saturation, value]]])
rgb_color = cv2.cvtColor(hsv_color, cv2.COLOR_HSV2BGR)[0][0]
return (int(rgb_color[0]), int(rgb_color[1]), int(rgb_color[2]))
@click.command()
@click.argument("resource_path")
@click.argument('source')
@click.option('--show', is_flag=True, help='Display the video stream or video file in a window.')
def case_face_tracker_from_video(resource_path, source, show):
@click.option('--out', type=str, default=None, help='Path to save the processed video.')
def case_face_tracker_from_video(resource_path, source, show, out):
"""
Launch a face tracking process from a video source. The 'source' can either be a webcam index (0, 1, ...)
or a path to a video file. Use the --show option to display the video.
@@ -17,6 +44,7 @@ def case_face_tracker_from_video(resource_path, source, show):
resource_path (str): Path to the resource directory for face tracking algorithms.
source (str): Webcam index or path to the video file.
show (bool): If set, the video will be displayed in a window.
out (str): Path to save the processed video.
"""
# Initialize the face tracker or other resources.
print(f"Initializing with resources from: {resource_path}")
@@ -25,9 +53,9 @@ def case_face_tracker_from_video(resource_path, source, show):
assert ret, "Launch failure. Please ensure the resource path is correct."
# Optional features, loaded during session creation based on the modules specified.
opt = HF_ENABLE_NONE
session = ifac.InspireFaceSession(opt, HF_DETECT_MODE_LIGHT_TRACK) # Use video mode
opt = HF_ENABLE_NONE | HF_ENABLE_INTERACTION
session = ifac.InspireFaceSession(opt, HF_DETECT_MODE_ALWAYS_DETECT, max_detect_num=25, detect_pixel_level=320) # Use video mode
session.set_filter_minimum_face_pixel_size(0)
# Determine if the source is a digital webcam index or a video file path.
try:
source_index = int(source) # Try to convert source to an integer.
@@ -42,6 +70,15 @@ def case_face_tracker_from_video(resource_path, source, show):
print("Error: Could not open video source.")
return
# VideoWriter to save the processed video if out is provided.
if out:
fourcc = cv2.VideoWriter_fourcc(*'XVID')
fps = cap.get(cv2.CAP_PROP_FPS) if cap.get(cv2.CAP_PROP_FPS) > 0 else 30
frame_width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
frame_height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
out_video = cv2.VideoWriter(out, fourcc, fps, (frame_width, frame_height))
print(f"Saving video to: {out}")
# Main loop to process video frames.
while True:
ret, frame = cap.read()
@@ -50,43 +87,55 @@ def case_face_tracker_from_video(resource_path, source, show):
# Process frame here (e.g., face detection/tracking).
faces = session.face_detection(frame)
for idx, face in enumerate(faces):
print(f"{'==' * 20}")
print(f"idx: {idx}")
# Print Euler angles of the face.
print(f"roll: {face.roll}, yaw: {face.yaw}, pitch: {face.pitch}")
exts = session.face_pipeline(frame, faces, HF_ENABLE_INTERACTION)
print(exts)
for idx, face in enumerate(faces):
# Get face bounding box
x1, y1, x2, y2 = face.location
# Calculate center, size, and angle
center = ((x1 + x2) / 2, (y1 + y2) / 2)
size = (x2 - x1, y2 - y1)
angle = face.roll
# Get rotation matrix
rotation_matrix = cv2.getRotationMatrix2D(center, angle, 1.0)
angle = face.roll
# Apply rotation to the bounding box corners
rect = ((center[0], center[1]), (size[0], size[1]), angle)
box = cv2.boxPoints(rect)
box = box.astype(int)
color = generate_color(face.track_id)
# Draw the rotated bounding box
cv2.drawContours(frame, [box], 0, (100, 180, 29), 2)
cv2.drawContours(frame, [box], 0, color, 4)
# Draw landmarks
lmk = session.get_face_dense_landmark(face)
for x, y in lmk.astype(int):
cv2.circle(frame, (x, y), 0, (220, 100, 0), 2)
cv2.circle(frame, (x, y), 0, color, 4)
# Draw track ID at the top of the bounding box
text = f"ID: {face.track_id}"
text_size, _ = cv2.getTextSize(text, cv2.FONT_HERSHEY_SIMPLEX, 0.6, 2)
text_x = min(box[:, 0])
text_y = min(box[:, 1]) - 10
if text_y < 0:
text_y = min(box[:, 1]) + text_size[1] + 10
cv2.putText(frame, text, (text_x, text_y), cv2.FONT_HERSHEY_SIMPLEX, 0.6, color, 2)
if show:
cv2.imshow("Face Tracker", frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break # Exit loop if 'q' is pressed.
if out:
out_video.write(frame)
# Cleanup: release video capture and close any open windows.
cap.release()
if out:
out_video.release()
cv2.destroyAllWindows()
print("Released all resources and closed windows.")