Update inspireface to 1.2.0

This commit is contained in:
Jingyu
2025-03-25 00:51:26 +08:00
parent 977ea6795b
commit ca64996b84
388 changed files with 28584 additions and 13036 deletions

View File

@@ -44,7 +44,7 @@ BreakConstructorInitializersBeforeComma: false
BreakConstructorInitializers: BeforeColon
BreakAfterJavaFieldAnnotations: false
BreakStringLiterals: true
ColumnLimit: 100
ColumnLimit: 150
CommentPragmas: '^ IWYU pragma:'
CompactNamespaces: false
ConstructorInitializerAllOnOneLineOrOnePerLine: true

View File

@@ -3,7 +3,12 @@ cmake-build-debug/*
build/*
test_*.rknn
test_zip_*
test_res/*
test_res/pack/*
test_res/save/*
!test_res/save/.gitkeep
# !test_res/save/video_frames/
test_res/save/video_frames/*
!test_res/save/video_frames/.gitkeep
resource/*
pack/*
*.zip
@@ -14,3 +19,12 @@ pack/*
.vscode/*
build_local/*
local_build/*
cpp/inspireface/information.h
cpp/inspireface/version.txt
.DS_Store
._.DS_Store
**/.DS_Store
**/._.DS_Store
.rknpu2_cache/
test_res/data/video_frames/
*.mp4

View File

@@ -1,16 +1,16 @@
cmake_minimum_required(VERSION 3.10)
project(InspireFace)
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3")
# Current version
set(INSPIRE_FACE_VERSION_MAJOR 1)
set(INSPIRE_FACE_VERSION_MINOR 1)
set(INSPIRE_FACE_VERSION_PATCH 7)
set(INSPIRE_FACE_VERSION_MINOR 2)
set(INSPIRE_FACE_VERSION_PATCH 0)
# Converts the version number to a string
string(CONCAT INSPIRE_FACE_VERSION_MAJOR_STR ${INSPIRE_FACE_VERSION_MAJOR})
@@ -18,9 +18,27 @@ string(CONCAT INSPIRE_FACE_VERSION_MINOR_STR ${INSPIRE_FACE_VERSION_MINOR})
string(CONCAT INSPIRE_FACE_VERSION_PATCH_STR ${INSPIRE_FACE_VERSION_PATCH})
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cpp/inspireface/information.h.in ${CMAKE_CURRENT_SOURCE_DIR}/cpp/inspireface/information.h)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cpp/inspireface/version.txt.in ${CMAKE_CURRENT_SOURCE_DIR}/cpp/inspireface/version.txt)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/python/version.txt.in ${CMAKE_CURRENT_SOURCE_DIR}/python/version.txt)
# Creates a package config file
configure_file("${InspireFace_SOURCE_DIR}/cpp/inspireface/cmake/templates/InspireFaceConfig.cmake.in" "${CMAKE_BINARY_DIR}/install/InspireFaceConfig.cmake" @ONLY)
# Check that the 3rdparty folder exists
if(NOT EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/3rdparty")
message(STATUS "3rdparty directory not found, cloning from github...")
# Execute git clone and directly specify it in the current directory
execute_process(
COMMAND git clone --recurse-submodules https://github.com/tunmx/inspireface-3rdparty.git "${CMAKE_CURRENT_SOURCE_DIR}/3rdparty"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
RESULT_VARIABLE GIT_RESULT
)
if(NOT GIT_RESULT EQUAL "0")
message(FATAL_ERROR "Failed to clone 3rdparty repository")
else()
message(STATUS "Successfully cloned 3rdparty repository")
endif()
else()
message(STATUS "3rdparty directory already exists")
endif()
# Set the ISF_THIRD_PARTY_DIR variable to allow it to be set externally from the command line, or use the default path if it is not set
set(ISF_THIRD_PARTY_DIR "${CMAKE_CURRENT_SOURCE_DIR}/3rdparty" CACHE PATH "Path to the third-party libraries directory")
@@ -49,10 +67,20 @@ endif()
set(TARGET_PLATFORM "drawin-x86" CACHE STRING "Target platform")
# TensorRT dependency configuration
set(ISF_ENABLE_TENSORRT OFF CACHE BOOL "Enable TensorRT Inference")
set(TENSORRT_ROOT "/usr/local/TensorRT" CACHE PATH "Path to TensorRT installation")
if(ISF_ENABLE_TENSORRT)
list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/toolchain)
include(FindTensorRT)
add_definitions("-DISF_ENABLE_TENSORRT")
add_definitions("-DINFERENCE_WRAPPER_ENABLE_TENSORRT")
endif()
# RKNN dependency configuration
option(ISF_ENABLE_RKNN "Use RKNPU." OFF)
set(ISF_RK_DEVICE_TYPE "RV1109RV1126" CACHE STRING "Type of the device")
set(ISF_RK_COMPILER_TYPE "armhf" CACHE STRING "Type of the compiler")
if (ISF_ENABLE_RKNN)
add_definitions("-DISF_ENABLE_RKNN")
# Device list
@@ -70,8 +98,46 @@ if (ISF_ENABLE_RKNN)
endif()
# Result
message(STATUS "Use ${ISF_RKNPU_MAJOR}")
if(ISF_RK_DEVICE_TYPE STREQUAL "RV1106")
set(ISF_RKNPU_RV1106 ON)
add_definitions("-DISF_RKNPU_RV1106")
endif()
endif ()
# Enable Rockchip RGA
option(ISF_ENABLE_RGA "Use Rockchip RGA." OFF)
if(ISF_ENABLE_RGA)
if(NOT ISF_ENABLE_RKNN)
message(FATAL_ERROR "RGA requires RKNN to be enabled. Please enable ISF_ENABLE_RKNN first.")
endif()
if(NOT ISF_RKNPU_MAJOR STREQUAL "rknpu2")
message(FATAL_ERROR "RGA requires RKNPU2. Please use a device that supports RKNPU2.")
endif()
add_definitions("-DISF_ENABLE_RGA")
if(ANDROID)
set(PLAT Android)
else()
set(PLAT Linux)
endif()
set(RGA_LIBS ${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/librga/${PLAT}/${ISF_RK_COMPILER_TYPE}/librga.a)
set(RGA_INCLUDE_DIRS ${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/librga/include)
endif()
option(ISF_ENABLE_COST_TIME "Enable cost time." OFF)
if(ISF_ENABLE_COST_TIME)
add_definitions("-DISF_ENABLE_COST_TIME")
endif()
# Apple device dedicated expansion services, support for some models coreml reasoning and so on
set(ISF_ENABLE_APPLE_EXTENSION OFF CACHE BOOL "Enable Apple device extension services, such as coreml, etc.")
if(ISF_ENABLE_APPLE_EXTENSION)
add_definitions("-DISF_ENABLE_APPLE_EXTENSION")
add_definitions("-DINFERENCE_WRAPPER_ENABLE_COREML")
endif()
# Platform configuration
option(ISF_BUILD_LINUX_ARM7 "Platform Armv7." OFF)
option(ISF_BUILD_LINUX_AARCH64 "Platform Armv8." OFF)
@@ -93,62 +159,100 @@ option(ISF_BUILD_WITH_SAMPLE "Open Build Sample Exec." ON)
set(SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/cpp/)
# OpenCV dependency configuration
if (APPLE)
if (IOS)
option(ISF_NEVER_USE_OPENCV "Never use opencv." ON)
if(ISF_NEVER_USE_OPENCV)
set(INSPIRECV_BACKEND_OPENCV OFF)
set(INSPIRECV_BACKEND_OKCV_USE_OPENCV OFF)
set(INSPIRECV_BACKEND_OKCV_USE_OPENCV_IO OFF)
set(INSPIRECV_BACKEND_OKCV_USE_OPENCV_GUI OFF)
endif()
option(ISF_ENABLE_OPENCV "Depends on opencv." OFF)
if(INSPIRECV_BACKEND_OPENCV OR INSPIRECV_BACKEND_OKCV_USE_OPENCV OR INSPIRECV_BACKEND_OKCV_USE_OPENCV_IO OR INSPIRECV_BACKEND_OKCV_USE_OPENCV_GUI)
set(ISF_ENABLE_OPENCV ON)
endif()
include_directories(${ISF_THIRD_PARTY_DIR}/InspireCV/3rdparty/Eigen-3.4.0-Headers)
if(APPLE)
if(IOS)
add_definitions(-DTARGET_OS_IOS)
message(IOS_3RDPARTY=${IOS_3RDPARTY})
set(CMAKE_XCODE_ATTRIBUTE_ONLY_ACTIVE_ARCH NO)
set(CMAKE_XCODE_ATTRIBUTE_ENABLE_BITCODE NO)
add_definitions(-DTARGET_OS_IOS)
message(IOS_3RDPARTY=${IOS_3RDPARTY})
set(CMAKE_XCODE_ATTRIBUTE_ONLY_ACTIVE_ARCH NO)
set(CMAKE_XCODE_ATTRIBUTE_ENABLE_BITCODE NO)
link_directories(${IOS_3RDPARTY})
include_directories(${IOS_3RDPARTY}/opencv2.framework)
set(MNN_FRAMEWORK_PATH "${IOS_3RDPARTY}/MNN.framework")
include_directories("${MNN_FRAMEWORK_PATH}/")
set(OpenCV_DIR "${IOS_3RDPARTY}/opencv2.framework")
# find_package(OpenCV REQUIRED)
link_directories(${IOS_3RDPARTY})
else()
message("Use apple device")
set(PLAT darwin)
find_package(OpenCV REQUIRED)
endif ()
endif()
else()
if (ISF_BUILD_LINUX_ARM7 OR ISF_BUILD_LINUX_AARCH64)
set(DISABLE_GUI ON)
add_definitions("-DDISABLE_GUI")
# set(OpenCV_DIR ${ISF_THIRD_PARTY_DIR}/opencv/opencv-linux-armhf/share/OpenCV)
# set(OpenCV_STATIC_INCLUDE_DIR ${PATH_3RDPARTY}/opencv/opencv-linux-armhf/include/)
if (ISF_RK_DEVICE_TYPE STREQUAL "RV1109RV1126" AND ISF_ENABLE_RKNN)
# In special cases, specialize for that version
message("The OpenCV that builds the RV1109RV1126 version depends on is specialized!")
set(OpenCV_DIR ${ISF_THIRD_PARTY_DIR}/inspireface-precompile/opencv/3.4.5/opencv-linux-armhf/share/OpenCV)
set(OpenCV_STATIC_INCLUDE_DIR ${PATH_3RDPARTY}/inspireface-precompile/opencv/3.4.5/opencv-linux-armhf/include/)
set(PLAT linux-arm7)
message("Use linux device")
set(PLAT linux)
endif()
# OpenCV dependency configuration
if(ISF_ENABLE_OPENCV)
if (APPLE)
if (IOS)
link_directories(${IOS_3RDPARTY})
include_directories(${IOS_3RDPARTY}/opencv2.framework)
set(OpenCV_DIR "${IOS_3RDPARTY}/opencv2.framework")
else()
if (VERSION_MAJOR STREQUAL "3")
set(CV_CMAKE_FOLDER share/OpenCV)
elseif(VERSION_MAJOR STREQUAL "4")
set(CV_CMAKE_FOLDER lib/cmake/opencv4)
endif ()
if(ISF_BUILD_LINUX_ARM7)
set(PLAT linux-arm7)
message("The OpenCV that builds the gnueabihf version depends on is specialized!")
message("Use apple device")
set(PLAT darwin)
find_package(OpenCV REQUIRED)
endif ()
else()
if (ISF_BUILD_LINUX_ARM7 OR ISF_BUILD_LINUX_AARCH64)
set(DISABLE_GUI ON)
add_definitions("-DDISABLE_GUI")
# set(OpenCV_DIR ${ISF_THIRD_PARTY_DIR}/opencv/opencv-linux-armhf/share/OpenCV)
# set(OpenCV_STATIC_INCLUDE_DIR ${PATH_3RDPARTY}/opencv/opencv-linux-armhf/include/)
if (ISF_RK_DEVICE_TYPE STREQUAL "RV1109RV1126" AND ISF_ENABLE_RKNN)
# In special cases, specialize for that version
message("The OpenCV that builds the RV1109RV1126 version depends on is specialized!")
set(OpenCV_DIR ${ISF_THIRD_PARTY_DIR}/inspireface-precompile/opencv/3.4.5/opencv-linux-armhf/share/OpenCV)
set(OpenCV_STATIC_INCLUDE_DIR ${PATH_3RDPARTY}/inspireface-precompile/opencv/3.4.5/opencv-linux-armhf/include/)
elseif(ISF_BUILD_LINUX_AARCH64)
set(PLAT linux-aarch64)
message("The OpenCV that builds the aarch64 version depends on is specialized!")
set(OpenCV_DIR ${ISF_THIRD_PARTY_DIR}/inspireface-precompile/opencv/3.4.5/opencv-linux-aarch64/share/OpenCV)
set(OpenCV_STATIC_INCLUDE_DIR ${PATH_3RDPARTY}/inspireface-precompile/opencv/3.4.5/opencv-linux-aarch64/include/)
set(PLAT linux-arm7)
else()
if (VERSION_MAJOR STREQUAL "3")
set(CV_CMAKE_FOLDER share/OpenCV)
elseif(VERSION_MAJOR STREQUAL "4")
set(CV_CMAKE_FOLDER lib/cmake/opencv4)
endif ()
if(ISF_BUILD_LINUX_ARM7)
set(PLAT linux-arm7)
message("The OpenCV that builds the gnueabihf version depends on is specialized!")
set(OpenCV_DIR ${ISF_THIRD_PARTY_DIR}/inspireface-precompile/opencv/3.4.5/opencv-linux-armhf/share/OpenCV)
set(OpenCV_STATIC_INCLUDE_DIR ${PATH_3RDPARTY}/inspireface-precompile/opencv/3.4.5/opencv-linux-armhf/include/)
elseif(ISF_BUILD_LINUX_AARCH64)
set(PLAT linux-aarch64)
message("The OpenCV that builds the aarch64 version depends on is specialized!")
set(OpenCV_DIR ${ISF_THIRD_PARTY_DIR}/inspireface-precompile/opencv/3.4.5/opencv-linux-aarch64/share/OpenCV)
set(OpenCV_STATIC_INCLUDE_DIR ${PATH_3RDPARTY}/inspireface-precompile/opencv/3.4.5/opencv-linux-aarch64/include/)
endif()
endif()
endif()
else ()
set(PLAT linux)
else ()
set(PLAT linux)
endif ()
find_package(OpenCV REQUIRED)
endif ()
find_package(OpenCV REQUIRED)
endif ()
# InspireCV dependency configuration
set(INSPIRECV_BUILD_OBJECT_LIBS OFF CACHE BOOL "Build InspireCV as object library")
add_subdirectory(${ISF_THIRD_PARTY_DIR}/InspireCV EXCLUDE_FROM_ALL)
set(INSPIRECV_INCLUDE_PATH "${ISF_THIRD_PARTY_DIR}/InspireCV/include")
set(INSPIRECV_LIBS InspireCV)
# If you need using CUDA-enabled MNN, you need to manually configure the pre-compiled CUDA-enabled MNN library path
set(ISF_LINUX_MNN_CUDA "" CACHE STRING "Path to CUDA directory")
@@ -175,6 +279,15 @@ elseif(DEFINED MNN_STATIC_PATH)
set(MNN_LIBS "${MNN_STATIC_PATH}/lib/libMNN.a")
elseif(IOS)
message(Build iOS)
elseif(DEFINED ISF_MNN_CUSTOM_SOURCE)
message("Using custom external MNN source path: ${ISF_MNN_CUSTOM_SOURCE}")
# In particular, rknpu2 uses a lower version of mnn
set(MNN_BUILD_SHARED_LIBS OFF CACHE BOOL "Build MNN as a shared library")
add_subdirectory(${ISF_MNN_CUSTOM_SOURCE} EXCLUDE_FROM_ALL)
set(MNN_INCLUDE_DIRS "${ISF_MNN_CUSTOM_SOURCE}/include")
set(MNN_LIBS MNN)
else ()
# Default or fallback case for MNN setup
message("Default or fallback case for MNN setup")
@@ -206,6 +319,7 @@ if (ISF_BUILD_WITH_TEST)
endif ()
# Print Message
message(STATUS ">>>>>>>>>>>>>")
message(STATUS "InspireFace Project Global:")
message(STATUS "\t CMAKE_SYSTEM_NAME: ${CMAKE_SYSTEM_NAME}")
message(STATUS "\t CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
@@ -216,10 +330,24 @@ message(STATUS "\t ISF_ENABLE_RKNN: ${ISF_ENABLE_RKNN}")
if (ISF_ENABLE_RKNN)
message(STATUS "\t ISF_RKNPU_MAJOR: ${ISF_RKNPU_MAJOR}")
message(STATUS "\t ISF_RK_DEVICE_TYPE: ${ISF_RK_DEVICE_TYPE}")
message(STATUS "\t ISF_RK_COMPILER_TYPE: ${ISF_RK_COMPILER_TYPE}")
if(ISF_RKNPU_RV1106)
message(STATUS "\t ISF_RKNPU_RV1106: ${ISF_RKNPU_RV1106}")
endif()
endif ()
message(STATUS "\t ISF_BUILD_LINUX_ARM7: ${ISF_BUILD_LINUX_ARM7}")
message(STATUS "\t ISF_BUILD_LINUX_AARCH64: ${ISF_BUILD_LINUX_AARCH64}")
message(STATUS "\t ISF_BUILD_WITH_TEST: ${ISF_BUILD_WITH_TEST}")
message(STATUS "\t ISF_BUILD_WITH_SAMPLE: ${ISF_BUILD_WITH_SAMPLE}")
message(STATUS "\t OpenCV_DIR: ${OpenCV_DIR}")
if(ISF_ENABLE_OPENCV)
message(STATUS "\t OpenCV: Enabled")
else()
message(STATUS "\t OpenCV: Disabled")
endif()
if(ISF_ENABLE_APPLE_EXTENSION)
message(STATUS "\t ISF_ENABLE_APPLE_EXTENSION: ${ISF_ENABLE_APPLE_EXTENSION}")
endif()
message(STATUS "\t CMAKE_INSTALL_PREFIX: ${CMAKE_INSTALL_PREFIX}")

View File

@@ -1,7 +1,11 @@
# InspireFace
[![GitHub release](https://img.shields.io/github/v/release/HyperInspire/InspireFace.svg?style=for-the-badge&color=blue)](https://github.com/HyperInspire/InspireFace/releases/latest)
[![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?&style=for-the-badge&label=build)](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?&style=for-the-badge&label=build)
[![test](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?&style=for-the-badge&label=test)](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/test_ubuntu_x86_Pikachu.yaml?&style=for-the-badge&label=test)
[![GitHub release](https://img.shields.io/github/v/release/HyperInspire/InspireFace.svg?style=for-the-badge&color=blue&label=Github+release&logo=github)](https://github.com/HyperInspire/InspireFace/releases/latest)
[![Model](https://img.shields.io/github/v/release/HyperInspire/InspireFace.svg?style=for-the-badge&color=blue&label=Model+Zoo&logo=github)](https://github.com/HyperInspire/InspireFace/releases/tag/v1.x)
[![pypi](https://img.shields.io/pypi/v/inspireface.svg?style=for-the-badge&color=orange&label=PYPI+release&logo=python)](https://pypi.org/project/inspireface/)
[![JitPack](https://img.shields.io/jitpack/v/github/HyperInspire/inspireface-android-sdk?style=for-the-badge&color=green&label=JitPack&logo=android)](https://jitpack.io/#HyperInspire/inspireface-android-sdk)
[![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?&style=for-the-badge&label=building&logo=cmake)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml)
[![test](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?&style=for-the-badge&label=testing&logo=c)](https://github.com/HyperInspire/InspireFace/actions/workflows/test_ubuntu_x86_Pikachu.yaml)
InspireFace is a cross-platform face recognition SDK developed in C/C++, supporting multiple operating systems and various backend types for inference, such as CPU, GPU, and NPU.
@@ -13,7 +17,23 @@ Please contact [contact@insightface.ai](mailto:contact@insightface.ai?subject=In
## Change Logs
**`2024-10-09`** Enhanced system-level resource monitoring, added statistical information for session and image stream creation.
**`2025-03-16`** Acceleration using NVIDIA-GPU (**CUDA**) devices is already supported.
**`2025-03-09`** Release of android sdk in JitPack.
**`2025-02-20`** Upgrade the face landmark model.
**`2025-01-21`** Update all models to t3 and add tool to convert cosine similarity to percentage.
**`2025-01-08`** Support inference on Rockchip devices **RK3566/RK3568** NPU.
**`2024-12-25`** Add support for optional **RKRGA** image acceleration processing on Rockchip devices.
**`2024-12-22`** Started adapting for multiple Rockchip devices with NPU support, beginning with **RV1103/RV1106** support.
**`2024-12-10`** Added support for quick installation via Python package manager.
**`2024-10-09`** Added system resource monitoring and session statistics.
**`2024-09-30`** Fixed some bugs in the feature hub.
@@ -33,10 +53,66 @@ Please contact [contact@insightface.ai](mailto:contact@insightface.ai?subject=In
**`2024-06-18`** Added face detection feature with tracking-by-detection mode.
**`2024-06-01`** Adapted for accelerated inference on CUDA-enabled devices.
## Quick Start
## 1. Preparation
### 1.1. Clone 3rdparty
For Python users on Linux and MacOS, InspireFace can be quickly installed via pip:
```bash
pip install inspireface
```
_Windows support is **not available yet**, but will be coming soon!_
After installation, you can use inspireface like this:
```Python
import cv2
import inspireface as isf
# Create a session with optional features
opt = isf.HF_ENABLE_NONE
session = isf.InspireFaceSession(opt, isf.HF_DETECT_MODE_ALWAYS_DETECT)
# Load the image using OpenCV.
image = cv2.imread(image_path)
# Perform face detection on the image.
faces = session.face_detection(image)
for face in faces:
x1, y1, x2, y2 = face.location
rect = ((x1, y1), (x2, y2), face.roll)
# Calculate center, size, and angle
center = ((x1 + x2) / 2, (y1 + y2) / 2)
size = (x2 - x1, y2 - y1)
angle = face.roll
# Apply rotation to the bounding box corners
rect = ((center[0], center[1]), (size[0], size[1]), angle)
box = cv2.boxPoints(rect)
box = box.astype(int)
# Draw the rotated bounding box
cv2.drawContours(image, [box], 0, (100, 180, 29), 2)
cv2.imshow("face detection", image)
cv2.waitKey(0)
cv2.destroyAllWindows()
```
The project is currently in a rapid iteration phase, **before each update**, please pull the latest model from the remote side!
```python
import inspireface
for model in ["Pikachu", "Megatron"]:
inspireface.pull_latest_model(model)
```
More examples can be found in the [python](python/) directory.
## Preparation
### Clone 3rdparty
Clone the `3rdparty` repository from the remote repository into the root directory of the project. Note that this repository contains some submodules. When cloning, you should use the `--recurse-submodules` parameter, or after entering the directory, use `git submodule update --init --recursive` to fetch and synchronize the latest submodules:
@@ -44,7 +120,7 @@ Clone the `3rdparty` repository from the remote repository into the root directo
# Must enter this directory
cd InspireFace
# Clone the repository and pull submodules
git clone --recurse-submodules https://github.com/HyperInspire/3rdparty.git
git clone --recurse-submodules https://github.com/tunmx/inspireface-3rdparty.git 3rdparty
```
If you need to update the `3rdparty` repository to ensure it is current, or if you didn't use the `--recursive` parameter during the initial pull, you can run `git submodule update --init --recursive`:
@@ -53,7 +129,7 @@ If you need to update the `3rdparty` repository to ensure it is current, or if y
# Must enter this directory
cd InspireFace
# If you're not using recursive pull
git clone https://github.com/HyperInspire/3rdparty.git
git clone https://github.com/tunmx/inspireface-3rdparty.git 3rdparty
cd 3rdparty
git pull
@@ -61,22 +137,41 @@ git pull
git submodule update --init --recursive
```
### 1.2. Downloading Model Package Files
### Downloading Model Package Files
You can download the model package files containing models and configurations needed for compilation from [Google Drive](https://drive.google.com/drive/folders/1krmv9Pj0XEZXR1GRPHjW_Sl7t4l0dNSS?usp=sharing) and extract them to any location.
You can download the model package files containing models and configurations needed for compilation from [Release Page](https://github.com/HyperInspire/InspireFace/releases/tag/v1.x) and extract them to any location.
### 1.3. Installing OpenCV
If you intend to use the SDK locally or on a server, ensure that OpenCV is installed on the host device beforehand to enable successful linking during the compilation process. For cross-compilation targets like Android or ARM embedded boards, you can use the pre-compiled OpenCV libraries provided by **3rdparty/inspireface-precompile/opencv/**.
You can use the **command/download_models_general.sh** command to download resource files, which will be downloaded to the **test_res/pack** directory. This way, when running the Test program, it can access and read the resource files from this path by default.
### 1.4. Installing MNN
The project is currently in a rapid iteration phase, **before each update**, please pull the latest model from the remote side!
```bash
# Download lightweight resource files for mobile device
bash command/download_models_general.sh Pikachu
# Download resource files for mobile device or PC/server
bash command/download_models_general.sh Megatron
# Download resource files for RV1109
bash command/download_models_general.sh Gundam_RV1109
# Download resource files for RV1106
bash command/download_models_general.sh Gundam_RV1106
# Download resource files for RK356X
bash command/download_models_general.sh Gundam_RK356X
# Download resource files for RK3588
bash command/download_models_general.sh Gundam_RK3588
# Download resource files for NVIDIA-GPU Device(TensorRT)
bash command/download_models_general.sh Megatron_TRT
# Download all model files
bash command/download_models_general.sh
```
### Installing MNN
The '**3rdparty**' directory already includes the MNN library and specifies a particular version as the stable version. If you need to enable or disable additional configuration options during compilation, you can refer to the CMake Options provided by MNN. If you need to use your own precompiled version, feel free to replace it.
### 1.5. Requirements
### Requirements
- CMake (version 3.10 or higher)
- OpenCV (version 3.5 or higher)
- Use the specific OpenCV-SDK supported by each target platform such as Android, iOS, and Linux.
- NDK (version 16 or higher, only required for Android)
- NDK (version 16 or higher, only required for Android) [**Optional**]
- MNN (version 1.4.0 or higher)
- C++ Compiler
- Either GCC or Clang can be used (macOS does not require additional installation as Xcode is included)
@@ -84,21 +179,21 @@ The '**3rdparty**' directory already includes the MNN library and specifies a pa
- Note that in some distributions, GCC (GNU C Compiler) and G++ (GNU C++ Compiler) are installed separately.
- For instance, on Ubuntu, you need to install both gcc and g++
- Recommended Clang version is 3.9 or higher
- arm-linux-gnueabihf (for RV1109/RV1126)
- arm-linux-gnueabihf (for RV1109/RV1126) [**Optional**]
- Prepare the cross-compilation toolchain in advance, such as gcc-arm-8.3-2019.03-x86_64-arm-linux-gnueabihf
- CUDA (version 10.1 or higher)
- CUDA (version 11.x or higher) [**Optional**]
- GPU-based inference requires installing NVIDIA's CUDA dependencies on the device.
- TensorRT (version 10 or higher) [**Optional**]
- Eigen3
- If you need to use the tracking-by-detection feature, you must have Eigen3 installed in advance.
- RKNN
- RKNN [**Optional**]
- Adjust and select versions currently supported for specific requirements.
## 2. Compilation
## Compilation
CMake option are used to control the various details of the compilation phase. Please select according to your actual requirements. [CMake Option](doc/CMake-Option.md).
### 2.1. Local Compilation
Make sure OpenCV is installed, you can begin the compilation process. If you are using macOS or Linux, you can quickly compile using the shell scripts provided in the `command` folder at the project root:
### Local Compilation
If you are using macOS or Linux, you can quickly compile using the shell scripts provided in the `command` folder at the project root:
```bash
cd InspireFace/
# Execute the local compilation script
@@ -117,17 +212,17 @@ inspireface-linux
- **libInspireFace.so**Compiled dynamic linking library.
- **inspireface.h**Header file definition.
- **herror.h**Reference error number definition.
### 2.2. Cross Compilation
Cross compilation requires you to prepare the target platform's cross-compilation toolchain on the host machine in advance. Here, compiling for Rockchip's embedded devices RV1109/RV1126 is used as an example:
### Cross Compilation
Cross compilation requires you to prepare the target platform's cross-compilation toolchain on the host machine in advance. Here, compiling for Rockchip's embedded devices RV1106 is used as an example:
```bash
# Set the path for the cross-compilation toolchain
export ARM_CROSS_COMPILE_TOOLCHAIN=YOUR_DIR/gcc-arm-8.3-2019.03-x86_64-arm-linux-gnueabihf
# Execute the cross-compilation script for RV1109/RV1126
bash command/build_cross_rv1109rv1126_armhf.sh
export ARM_CROSS_COMPILE_TOOLCHAIN=YOUR_DIR/arm-rockchip830-linux-uclibcgnueabihf
# Execute the cross-compilation script for RV1106
bash command/build_cross_rv1106_armhf_uclibc.sh
```
After the compilation is complete, you can find the compiled results in the `build/inspireface-linux-armv7-rv1109rv1126-armhf` directory.
After the compilation is complete, you can find the compiled results in the `build/inspireface-linux-armv7-rv1106-armhf-uclibc` directory.
### 2.3. iOS Compilation
### iOS Compilation
To compile for iOS, ensure you are using a Mac device. The script will automatically download third-party dependencies into the `.macos_cache` directory.
@@ -137,27 +232,71 @@ bash command/build_ios.sh
After the compilation is complete, `inspireface.framework` will be placed in the `build/inspireface-ios` directory.
### 2.4. Supported Platforms and Architectures
### Android Compilation
You can compile for Android using the following command, but first you need to set your Android NDK path:
```
export ANDROID_NDK=YOUR_ANDROID_NDK_PATH
bash command/build_android.sh
```
After the compilation is complete, arm64-v8a and armeabi-v7a libraries will be placed in the `build/inspireface-android` directory.
### Linux-based NVIDIA GPU Acceleration with TensorRT Compilation
If you want to use NVIDIA GPU devices for accelerated inference on Linux, you need to install **CUDA**, **cuDNN**, and **TensorRT-10** on your device, and configure the relevant environment variables.
```bash
# Example, Change to your TensorRT-10 path
export TENSORRT_ROOT=/user/tunm/software/TensorRT-10
```
Before compiling, please ensure that your related environments such as CUDA and TensorRT-10 are available. If you encounter issues with finding CUDA libraries during the compilation process, you may need to check whether the relevant environment variables have been configured: `CUDA_TOOLKIT_ROOT_DIR`, `CUDA_CUDART_LIBRARY`.
```bash
bash command/build_linux_tensorrt.sh
```
Additionally, you can use **NVIDIA's Docker images** for compilation. For example, to compile using a **CUDA 12** and **TensorRT-10** image on Ubuntu 22.04, you can execute the following commands:
```bash
docker-compose up build-tensorrt-cuda12-ubuntu22
```
If you want to use pre-compiled libraries, you can use **[FindTensorRT.cmake](toolchain/FindTensorRT.cmake)** to create links to CUDA and TensorRT.
### Supported Platforms and Architectures
We have completed the adaptation and testing of the software across various operating systems and CPU architectures. This includes compatibility verification for platforms such as Linux, macOS, iOS, and Android, as well as testing for specific hardware support to ensure stable operation in diverse environments.
| **No.** | **Operating System** | **CPU Architecture** | **Special Device Support** | **Adapted** | **Passed Tests** |
| ------- | -------------------- | --------------------- | -------------------------- | ----------- | ---------------- |
| 1 | **Linux** | ARMv7 | - | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?&style=for-the-badge&label=build)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) | ![test](https://img.shields.io/badge/OFFLINE-PASSING-blue?style=for-the-badge) |
| 2 | | ARMv8 | - | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?&style=for-the-badge&label=build)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) | ![test](https://img.shields.io/badge/OFFLINE-PASSING-blue?style=for-the-badge) |
| 3 | | x86/x86_64 | - | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?&style=for-the-badge&label=build)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) | [![test](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/test_ubuntu_x86_Pikachu.yaml?style=for-the-badge&label=Test&color=blue)](https://github.com/HyperInspire/InspireFace/actions/workflows/test_ubuntu_x86_Pikachu.yaml) |
| 4 | | ARMv7 | RV1109RV1126 | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?&style=for-the-badge&label=build)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) | ![test](https://img.shields.io/badge/OFFLINE-PASSING-blue?style=for-the-badge) |
| 5 | | x86/x86_64 | CUDA | ![build](https://img.shields.io/badge/OFFLINE-PASSING-green?style=for-the-badge) | ![test](https://img.shields.io/badge/OFFLINE-PASSING-blue?style=for-the-badge) |
| 6 | **macOS** | Intel x86 | - | ![build](https://img.shields.io/badge/OFFLINE-PASSING-green?style=for-the-badge) | ![test](https://img.shields.io/badge/OFFLINE-PASSING-blue?style=for-the-badge) |
| 7 | | Apple Silicon | - | ![build](https://img.shields.io/badge/OFFLINE-PASSING-green?style=for-the-badge) | ![test](https://img.shields.io/badge/OFFLINE-PASSING-blue?style=for-the-badge) |
| 8 | **iOS** | ARM | - | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?&style=for-the-badge&label=build)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) | ![test](https://img.shields.io/badge/OFFLINE-PASSING-blue?style=for-the-badge) |
| 9 | **Android** | ARMv7 | - | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?&style=for-the-badge&label=build)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) | |
| 10 | | ARMv8 | - | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?&style=for-the-badge&label=build)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) | |
| No. | Platform | Architecture<sup><br/>(CPU) | Device<sup><br/>(Special) | **Supported** | Passed Tests | Release<sup><br/>(Online) |
| ------- | -------------------- | --------------------- | -------------------------- | :-----------: | :----------------: | :----------------: |
| 1 | **Linux**<sup><br/>(CPU) | ARMv7 | - | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 2 | | ARMv8 | - | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 3 | | x86/x86_64 | - | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 4 | **Linux**<sup><br/>(Rockchip) | ARMv7 | RV1109/RV1126 | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 5 | | ARMv7 | RV1103/RV1106 | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 6 | | ARMv8 | RK3566/RK3568 | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 7 | | ARMv8 | RK3588 | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 8 | **Linux**<sup><br/>(MNN_CUDA) | x86/x86_64 | NVIDIA-GPU | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - |
| 9 | **Linux**<sup><br/>(CUDA) | x86/x86_64 | NVIDIA-GPU | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 10 | **MacOS** | Intel | CPU/Metal/**ANE** | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 11 | | Apple Silicon | - | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 12 | **iOS** | ARM | CPU/Metal/**ANE** | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 13 | **Android** | ARMv7 | - | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 14 | | ARMv8 | - | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 15 | **Android**<sup><br/>(Rockchip) | ARMv8 | RK3566/RK3568 | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 16 | | ARMv8 | RK3588 | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![build](https://img.shields.io/github/actions/workflow/status/HyperInspire/InspireFace/release-sdks.yaml?label=✓&labelColor=success&color=success&failedLabel=✗&failedColor=critical&logo=github&logoColor=white)](https://github.com/HyperInspire/InspireFace/actions/workflows/release-sdks.yaml) |
| 17 | **HarmonyOS** | ARMv8 | - | - | - | - |
| 18 | **Linux**<sup><br/>(Jetson series) | ARMv8 | Jetson series | - | - | - |
- Complete compilation scripts and successful compilation.
- Pass unit tests on physical devices.
- Meet all performance benchmarks in tests.
- **Device**: Some special device support, primarily focused on computing power devices.
- **Supported**: The solution has been fully developed and successfully verified on offline devices.
- **Passed Tests**: The feature has at least **passed unit tests** on offline devices.
- **Release**: The solution is already supported and has been successfully compiled and released through **[GitHub Actions](https://github.com/HyperInspire/InspireFace/actions/workflows/built_release_from_docker.yaml)**.
### 2.5. Multi-platform compilation using Docker
### Multi-platform compilation using Docker
We offer a method for rapid multi-platform compilation using Docker, provided that Docker is installed beforehand, and the appropriate commands are executed:
```Bash
@@ -165,23 +304,32 @@ We offer a method for rapid multi-platform compilation using Docker, provided th
docker-compose up build-ubuntu18
# Build armv7 cross-compile
build-cross-armv7-armhf
docker-compose up build-cross-armv7-armhf
# Build armv7 with support RV1109RV1126 device NPU cross-complie
docker-compose up build-cross-rv1109rv1126-armhf
# Build armv7 with support RV1106 device NPU cross-complie
docker-compose up build-cross-rv1106-armhf-uclibc
# Build armv8 with support RK356x device NPU cross-complie
docker-compose up build-cross-rk356x-aarch64
# Build Android with support arm64-v8a and armeabi-v7a
docker-compose up build-cross-android
# Compile the tensorRT back-end based on CUDA12 and then Ubuntu22.04
docker-compose up build-tensorrt-cuda12-ubuntu22
# Build all
docker-compose up
```
## 3. Example
### 3.1. C/C++ Sample
## Example
### C/C++ Sample
To integrate InspireFace into a C/C++ project, you simply need to link the InspireFace library and include the appropriate header files. Below is a basic example demonstrating face detection:
```cpp
```c
HResult ret;
// The resource file must be loaded before it can be used
ret = HFLaunchInspireFace(packPath);
@@ -190,37 +338,37 @@ if (ret != HSUCCEED) {
return ret;
}
// Enable the functions in the pipeline: mask detection, live detection, and face quality detection
// Enable the functions in the pipeline: mask detection, live detection, and face quality
// detection
HOption option = HF_ENABLE_QUALITY | HF_ENABLE_MASK_DETECT | HF_ENABLE_LIVENESS;
// Non-video or frame sequence mode uses IMAGE-MODE, which is always face detection without tracking
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
// Non-video or frame sequence mode uses IMAGE-MODE, which is always face detection without
// tracking
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
// Maximum number of faces detected
HInt32 maxDetectNum = 5;
HInt32 maxDetectNum = 20;
// Face detection image input level
HInt32 detectPixelLevel = 160;
// Handle of the current face SDK algorithm context
HFSession session = {0};
ret = HFCreateInspireFaceSessionOptional(option, detMode, maxDetectNum, -1, -1, &session);
ret = HFCreateInspireFaceSessionOptional(option, detMode, maxDetectNum, detectPixelLevel, -1, &session);
if (ret != HSUCCEED) {
std::cout << "Create FaceContext error: " << ret << std::endl;
return ret;
}
HFSessionSetTrackPreviewSize(session, detectPixelLevel);
HFSessionSetFilterMinimumFacePixelSize(session, 4);
// Load a image
cv::Mat image = cv::imread(sourcePath);
if (image.empty()) {
HFImageBitmap image;
ret = HFCreateImageBitmapFromFilePath(sourcePath, 3, &image);
if (ret != HSUCCEED) {
std::cout << "The source entered is not a picture or read error." << std::endl;
return 1;
return ret;
}
// Prepare an image parameter structure for configuration
HFImageData imageParam = {0};
imageParam.data = image.data; // Data buffer
imageParam.width = image.cols; // Target view width
imageParam.height = image.rows; // Target view width
imageParam.rotation = HF_CAMERA_ROTATION_0; // Data source rotate
imageParam.format = HF_STREAM_BGR; // Data source format
// Create an image data stream
HFImageStream imageHandle = {0};
ret = HFCreateImageStream(&imageParam, &imageHandle);
ret = HFCreateImageStreamFromImageBitmap(image, rotation_enum, &imageHandle);
if (ret != HSUCCEED) {
std::cout << "Create ImageStream error: " << ret << std::endl;
return ret;
@@ -237,11 +385,17 @@ if (ret != HSUCCEED) {
auto faceNum = multipleFaceData.detectedNum;
std::cout << "Num of face: " << faceNum << std::endl;
// The memory must be freed at the end of the program
ret = HFReleaseImageBitmap(image);
if (ret != HSUCCEED) {
printf("Release image bitmap error: %lu\n", ret);
return ret;
}
ret = HFReleaseImageStream(imageHandle);
if (ret != HSUCCEED) {
printf("Release image stream error: %lu\n", ret);
}
// The memory must be freed at the end of the program
ret = HFReleaseInspireFaceSession(session);
if (ret != HSUCCEED) {
printf("Release session error: %lu\n", ret);
@@ -250,10 +404,25 @@ if (ret != HSUCCEED) {
```
For more examples, you can refer to the `cpp/sample` sub-project located in the root directory. You can compile these sample executables by enabling the `ISF_BUILD_WITH_SAMPLE` option during the compilation process.
- **More detailed cases**: [C/C++ Sample](cpp/sample/api/)
**Note**: For each error code feedback, you can click on this [link](doc/Error-Feedback-Codes.md) to view detailed explanations.
### 3.2. Python Native Sample
We provide a Python API that allows for more efficient use of the InspireFace library. After compiling the dynamic link library, you need to either symlink or copy it to the `python/inspireface/modules/core` directory within the root directory. You can then start testing by navigating to the **[python/](python/)** directory. Your Python environment will need to have some dependencies installed:
### Python Native Sample
The Python implementation is compiled based on InspireFace source code, and is integrated using a native interface approach.
#### Use pip to install InspireFace
You can use pip to install the InspireFace Python package:
```bash
pip install inspireface
```
#### Python Native Sample
We provide a Python API that allows for more efficient use of the InspireFace library. After compiling the dynamic link library, you need to either symlink or copy it to the `python/inspireface/modules/core` directory within the root directory. You can then start testing by navigating to the **[python](python/)** directory. Your Python environment will need to have some dependencies installed:
- python >= 3.7
- opencv-python
@@ -263,26 +432,23 @@ We provide a Python API that allows for more efficient use of the InspireFace li
- ctypes
```bash
# Use a symbolic link
ln -s YOUR_BUILD_DIR/install/InspireFace/lib/libInspireFace.so python/inspireface/modules/core
ln -s YOUR_BUILD_DIR/install/InspireFace/lib/libInspireFace.so python/inspireface/modules/core/PLATFORM/ARCH/
# Navigate to the sub-project directory
cd python
```
Import inspireface for a quick facial detection example:
```python
import cv2
import inspireface as ifac
from inspireface.param import *
import inspireface as isf
# Step 1: Initialize the SDK and load the algorithm resource files.
resource_path = "pack/Pikachu"
ret = ifac.launch(resource_path)
# Step 1: Initialize the SDK globally (only needs to be called once per application)
ret = isf.reload()
assert ret, "Launch failure. Please ensure the resource path is correct."
# Optional features, loaded during session creation based on the modules specified.
opt = HF_ENABLE_NONE
session = ifac.InspireFaceSession(opt, HF_DETECT_MODE_IMAGE)
opt = isf.HF_ENABLE_NONE
session = isf.InspireFaceSession(opt, isf.HF_DETECT_MODE_ALWAYS_DETECT)
# Load the image using OpenCV.
image = cv2.imread(image_path)
@@ -305,17 +471,111 @@ for idx, face in enumerate(faces):
```
In the project, more usage examples are provided:
- sample_face_detection.py: Facial detection example
- sample_face_recognition.py: Facial recognition example
- sample_face_track_from_video.py: Facial tracking from video stream example
- `sample_face_detection.py`: Facial detection example
- `sample_face_recognition.py`: Facial recognition example
- `sample_face_track_from_video.py`: Facial tracking from video stream example
## 4. Test
In the project, there is a subproject called cpp/test. To compile it, you need to enable the ISF_BUILD_WITH_TEST switch, which will allow you to compile executable programs for testing.
### Java and Android platform API
We have an [Android SDK project](https://github.com/HyperInspire/inspireface-android-sdk) that integrates pre-compiled dynamic libraries, and you can use it directly.
Precompiled library support:
- arm64-v8a
- armeabi-v7a
#### a. Quick to use in Android
We released InspireFace's Android SDK on JitPack, which you can incorporate into your android projects in the following ways.
- Step 1. Add the JitPack repository to your build file add it in your root **build.gradle** at the end of repositories:
```groovy
allprojects {
repositories {
...
maven { url 'https://jitpack.io' }
}
}
```
- Step 2. Add the dependency
```groovy
dependencies {
implementation 'com.github.HyperInspire:inspireface-android-sdk:1.2.0'
}
```
#### b. Use the Android example project
We have prepared an [Android SDK project](https://github.com/HyperInspire/inspireface-android-sdk). You can download library from the [Release Page](https://github.com/HyperInspire/InspireFace/releases) or compile the Android library yourself and place it in the `inspireface/libs` directory of the Android sample project. You can compile and run this project using Android Studio.
```bash
inspireface-android-sdk/inspireface/libs
├── arm64-v8a
│   └── libInspireFace.so
└── armeabi-v7a
└── libInspireFace.so
```
You need to get the resource file from the release [Release Page](https://github.com/HyperInspire/InspireFace/releases) and place it in the `asset/inspireface` in your android project:
```
asset/
└── inspireface/
└── Pikachu
```
#### How to use the Android/Java API
We provide a Java API for Android devices, which is implemented using Java Native Interface(JNI).
```java
// Launch InspireFace, only need to call once
boolean launchStatus = InspireFace.GlobalLaunch(this, InspireFace.PIKACHU);
if (!launchStatus) {
Log.e(TAG, "Failed to launch InspireFace");
}
// Create a ImageStream
ImageStream stream = InspireFace.CreateImageStreamFromBitmap(img, InspireFace.CAMERA_ROTATION_0);
// Create a session
CustomParameter parameter = InspireFace.CreateCustomParameter()
.enableRecognition(true)
.enableFaceQuality(true)
.enableFaceAttribute(true)
.enableInteractionLiveness(true)
.enableLiveness(true)
.enableMaskDetect(true);
Session session = InspireFace.CreateSession(parameter, InspireFace.DETECT_MODE_ALWAYS_DETECT, 10, -1, -1);
// Execute face detection
MultipleFaceData multipleFaceData = InspireFace.ExecuteFaceTrack(session, stream);
if (multipleFaceData.detectedNum > 0) {
// Get face feature
FaceFeature feature = InspireFace.ExtractFaceFeature(session, stream, multipleFaceData.tokens[0]);
// ....
}
// ....
// Release resource
InspireFace.ReleaseSession(session);
InspireFace.ReleaseImageStream(stream);
// Global release
InspireFace.GlobalRelease();
```
## Test
In the project, there is a subproject called `cpp/test`. To compile it, you need to enable the `ISF_BUILD_WITH_TEST` switch, which will allow you to compile executable programs for testing.
```bash
cmake -DISF_BUILD_WITH_TEST=ON ..
```
If you need to run test cases, you will need to download the required [resource files](https://drive.google.com/drive/folders/1krmv9Pj0XEZXR1GRPHjW_Sl7t4l0dNSS?usp=sharing): **test_res**. Unzip the test_res folder. The directory structure of test_res should be prepared as follows before testing:
To run the test modules in the project, first check if the resource files exist in the test_res/pack directory. If they don't exist, you can either execute **command/download_models_general.sh** to download the required files, or download the files from the [Release Page](https://github.com/HyperInspire/InspireFace/releases/tag/v1.x) and manually place them in this directory.
```bash
@@ -356,30 +616,60 @@ bash ci/quick_test_local.sh
Every time code is committed, tests are run on GitHub Actions.
## 5. Function Support
The following functionalities and technologies are currently supported.
## Features
The following Features and technologies are currently supported.
| Index | Function | Adaptation | Note |
| -- | --- | --- | --- |
| 1 | Face Detection | ![Static Badge](https://img.shields.io/badge/STABLE-blue?style=for-the-badge) | SCRFD |
| 2 | Facial Landmark Detection | ![Static Badge](https://img.shields.io/badge/STABLE-blue?style=for-the-badge) | HyperLandmark |
| 3 | Face Recognition | ![Static Badge](https://img.shields.io/badge/STABLE-blue?style=for-the-badge) | ArcFace |
| 4 | Face Tracking | ![Static Badge](https://img.shields.io/badge/STABLE-blue?style=for-the-badge) | |
| 5 | Mask Detection | ![Static Badge](https://img.shields.io/badge/STABLE-blue?style=for-the-badge) | |
| 6 | Silent Liveness Detection | ![Static Badge](https://img.shields.io/badge/STABLE-blue?style=for-the-badge) | MiniVision |
| 7 | Face Quality Detection | ![Static Badge](https://img.shields.io/badge/STABLE-blue?style=for-the-badge) | |
| 8 | Face Pose Estimation | ![Static Badge](https://img.shields.io/badge/STABLE-blue?style=for-the-badge) | |
| 9 | Face Attribute Prediction | ![Static Badge](https://img.shields.io/badge/STABLE-blue?style=for-the-badge) | Age, Race, Gender |
| 10 | Cooperative Liveness Detection | ![Static Badge](https://img.shields.io/badge/DEVELOP-green?style=for-the-badge) | Blink |
| Feature | CPU | RKNPU<sup><br/>(RV1109/1126) | RKNPU<sup><br/>(RV1103/1106) | RKNPU<sup><br/>(RK3566/3568/3588) | ANE<sup><br/>(MacOS/iOS) | GPU<sup><br/>(TensorRT) |
| :---: | :---: | :---: | :---: | :---: | :---: | :---: |
| Face Detection | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) |
| Landmark | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) |
| Face Embeddings | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) |
| Face Comparison | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - | - | - | - | - |
| Face Recognition | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) |
| Alignment | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - | - | - | - | - |
| Tracking | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) |
| Mask Detection | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - | - |
| Silent Liveness | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - | - |
| Face Quality | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) |
| Pose Estimation | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) |
| Face Attribute | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) |
| Cooperative Liveness | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) |
| Embedding Management | [![](https://img.shields.io/badge/%E2%9C%93-green)](#) | - | - | - | - | - |
- Some models and features that do **not support** NPU or GPU will **automatically use CPU** for computation when running the program.
## 6. Models Package List
## Resource Package List
For different scenarios, we currently provide several Packs, each containing multiple models and configurations.The package file is placed in the **pack** subdirectory under the **test_res** directory.
For different scenarios, we currently provide several Packs, each containing multiple models and configurations.
| Name | Supported Devices | Note | Link |
| --- | --- | --- | --- |
| Pikachu | CPU | Lightweight edge-side models | [GDrive](https://drive.google.com/drive/folders/1krmv9Pj0XEZXR1GRPHjW_Sl7t4l0dNSS?usp=sharing) |
| Megatron | CPU, GPU | Mobile and server models | [GDrive](https://drive.google.com/drive/folders/1krmv9Pj0XEZXR1GRPHjW_Sl7t4l0dNSS?usp=sharing) |
| Gundam-RV1109 | RKNPU | Supports RK1109 and RK1126 | [GDrive](https://drive.google.com/drive/folders/1krmv9Pj0XEZXR1GRPHjW_Sl7t4l0dNSS?usp=sharing) |
| Name | Supported Devices | Note | Last Update | Link |
| --- | --- | --- | --- | --- |
| Pikachu | CPU | Lightweight edge-side models | Feb 20, 2025 | [Download](https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Pikachu) |
| Megatron | CPU, GPU | Mobile and server models | Feb 20, 2025 | [Download](https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Megatron) |
| Megatron_TRT | GPU | Cuda-based server models | Mar 16, 2025 | [Download](https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Megatron_TRT) |
| Gundam-RV1109 | RKNPU | Supports RK1109 and RK1126 | Feb 20, 2025 | [Download](https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Gundam_RV1109) |
| Gundam-RV1106 | RKNPU | Supports RV1103 and RV1106 | Feb 20, 2025 | [Download](https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Gundam_RV1106) |
| Gundam-RK356X | RKNPU | Supports RK3566 and RK3568 | Feb 20, 2025 | [Download](https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Gundam_RK356X) |
| Gundam-RK3588 | RKNPU | Supports RK3588 | Mar 16, 2025 | [Download](https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Gundam_RK3588) |
## Short-Term Plan
- [x] Add TensorRT backend support.
- [ ] Add the RKNPU backend support for Android .
- [ ] Example app project for Android and iOS samples.
- [ ] Add the batch forward feature.
## Acknowledgement
InspireFace is built on the following libraries:
- [MNN](https://github.com/alibaba/MNN)
- [RKNN](https://github.com/rockchip-linux/rknn-toolkit)
- [RKNN2](https://github.com/airockchip/rknn-toolkit2.git)
- [librga](https://github.com/airockchip/librga.git)
- [Eigen](https://eigen.tuxfamily.org/index.php?title=Main_Page)
- [sqlite](https://www.sqlite.org/index.html)
- [sqlite-vec](https://github.com/asg017/sqlite-vec)
- [Catch2](https://github.com/catchorg/Catch2)
- [yaml-cpp](https://github.com/jbeder/yaml-cpp)
- [TensorRT](https://github.com/NVIDIA/TensorRT)

View File

@@ -0,0 +1,16 @@
.idea/
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
local.properties

View File

@@ -0,0 +1 @@
/build

View File

@@ -0,0 +1,41 @@
plugins {
alias(libs.plugins.android.application)
}
android {
namespace 'com.example.inspireface_example'
compileSdk 34
defaultConfig {
applicationId "com.example.inspireface_example"
minSdk 24
targetSdk 34
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
implementation libs.appcompat
implementation libs.material
implementation libs.activity
implementation libs.constraintlayout
testImplementation libs.junit
androidTestImplementation libs.ext.junit
androidTestImplementation libs.espresso.core
implementation libs.inspireface.android.sdk
}

View File

@@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@@ -0,0 +1,26 @@
package com.example.inspireface_example;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.example.inspireface_example", appContext.getPackageName());
}
}

View File

@@ -0,0 +1,34 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools">
<!-- Phone information -->
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<!-- ************************************* -->
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<!-- Create and delete file permissions in SD card -->
<uses-permission android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEMS"
tools:ignore="ProtectedPermissions" />
<application
android:allowBackup="true"
android:dataExtractionRules="@xml/data_extraction_rules"
android:fullBackupContent="@xml/backup_rules"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.InspireFaceExample"
tools:targetApi="31">
<activity
android:name=".MainActivity"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@@ -0,0 +1,196 @@
package com.example.inspireface_example;
import android.content.Context;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.util.Log;
import androidx.activity.EdgeToEdge;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.graphics.Insets;
import androidx.core.view.ViewCompat;
import androidx.core.view.WindowInsetsCompat;
import com.insightface.sdk.inspireface.InspireFace;
import com.insightface.sdk.inspireface.base.*;
import com.insightface.sdk.inspireface.utils.SDKUtils;
import java.io.IOException;
public class MainActivity extends AppCompatActivity {
private final String TAG = "InspireFace";
void test() {
InspireFaceVersion version = InspireFace.QueryInspireFaceVersion();
Log.i(TAG, "InspireFace Version: " + version.major + "." + version.minor + "." + version.patch + " " + version.information);
String dbPath = "/storage/emulated/0/Android/data/com.example.inspireface_example/files/f.db";
FeatureHubConfiguration configuration = InspireFace.CreateFeatureHubConfiguration()
.setEnablePersistence(false)
.setPersistenceDbPath(dbPath)
.setSearchThreshold(0.42f)
.setSearchMode(InspireFace.SEARCH_MODE_EXHAUSTIVE)
.setPrimaryKeyMode(InspireFace.PK_AUTO_INCREMENT);
boolean enableStatus = InspireFace.FeatureHubDataEnable(configuration);
Log.d(TAG, "Enable feature hub data status: " + enableStatus);
InspireFace.FeatureHubFaceSearchThresholdSetting(0.42f);
boolean launchStatus = InspireFace.GlobalLaunch(this, InspireFace.PIKACHU);
Log.d(TAG, "Launch status: " + launchStatus);
if (!launchStatus) {
Log.e(TAG, "Failed to launch InspireFace");
return;
}
CustomParameter parameter = InspireFace.CreateCustomParameter()
.enableRecognition(true)
.enableFaceQuality(true)
.enableFaceAttribute(true)
.enableInteractionLiveness(true)
.enableLiveness(true)
.enableMaskDetect(true);
Session session = InspireFace.CreateSession(parameter, InspireFace.DETECT_MODE_ALWAYS_DETECT, 10, -1, -1);
Log.i(TAG, "session handle: " + session.handle);
InspireFace.SetTrackPreviewSize(session, 320);
InspireFace.SetFaceDetectThreshold(session, 0.5f);
InspireFace.SetFilterMinimumFacePixelSize(session, 0);
Bitmap img = SDKUtils.getImageFromAssetsFile(this, "inspireface/kun.jpg");
ImageStream stream = InspireFace.CreateImageStreamFromBitmap(img, InspireFace.CAMERA_ROTATION_0);
Log.i(TAG, "stream handle: " + stream.handle);
InspireFace.WriteImageStreamToFile(stream, "/storage/emulated/0/Android/data/com.example.inspireface_example/files/out.jpg");
MultipleFaceData multipleFaceData = InspireFace.ExecuteFaceTrack(session, stream);
Log.i(TAG, "Face num: " + multipleFaceData.detectedNum);
if (multipleFaceData.detectedNum > 0) {
Point2f[] lmk = InspireFace.GetFaceDenseLandmarkFromFaceToken(multipleFaceData.tokens[0]);
for (Point2f p : lmk) {
Log.i(TAG, p.x + ", " + p.y);
}
FaceFeature feature = InspireFace.ExtractFaceFeature(session, stream, multipleFaceData.tokens[0]);
Log.i(TAG, "Feature size: " + feature.data.length);
String strFt = "";
for (int i = 0; i < feature.data.length; i++) {
strFt = strFt + feature.data[i] + ", ";
}
Log.i(TAG, strFt);
for (int i = 0; i < 10; i++) {
FaceFeatureIdentity identity = FaceFeatureIdentity.create(-1, feature);
boolean succ = InspireFace.FeatureHubInsertFeature(identity);
if (succ) {
Log.i(TAG, "Allocation ID: " + identity.id);
}
}
FaceFeatureIdentity searched = InspireFace.FeatureHubFaceSearch(feature);
Log.i(TAG, "Searched id: " + searched.id + ", Confidence: " + searched.searchConfidence);
SearchTopKResults topKResults = InspireFace.FeatureHubFaceSearchTopK(feature, 10);
for (int i = 0; i < topKResults.num; i++) {
Log.i(TAG, "TopK id: " + topKResults.ids[i] + ", Confidence: " + topKResults.confidence[i]);
}
FaceFeature newFeature = new FaceFeature();
Log.i(TAG, "Feature length: " + InspireFace.GetFeatureLength());
newFeature.data = new float[InspireFace.GetFeatureLength()];
FaceFeatureIdentity identity = FaceFeatureIdentity.create(8, newFeature);
boolean updateSucc = InspireFace.FeatureHubFaceUpdate(identity);
if (updateSucc) {
Log.i(TAG, "Update feature success: " + 8);
}
boolean removeSucc = InspireFace.FeatureHubFaceRemove(4);
if (removeSucc) {
Log.i(TAG, "Remove feature success: " + 4);
}
SearchTopKResults topkAgn = InspireFace.FeatureHubFaceSearchTopK(feature, 10);
for (int i = 0; i < topkAgn.num; i++) {
Log.i(TAG, "Agn TopK id: " + topkAgn.ids[i] + ", Confidence: " + topKResults.confidence[i]);
}
FaceFeatureIdentity queryIdentity = InspireFace.FeatureHubGetFaceIdentity(4);
if (queryIdentity != null) {
Log.e(TAG, "query id: " + queryIdentity.id);
}
queryIdentity = InspireFace.FeatureHubGetFaceIdentity(2);
if (queryIdentity != null) {
strFt = "";
for (int i = 0; i < queryIdentity.feature.data.length; i++) {
strFt = strFt + queryIdentity.feature.data[i] + ", ";
}
Log.i(TAG, "query id: " + queryIdentity.id);
Log.i(TAG, strFt);
float comp = InspireFace.FaceComparison(queryIdentity.feature, feature);
Log.i(TAG, "Comparison: " + comp);
}
CustomParameter pipelineNeedParam = InspireFace.CreateCustomParameter()
.enableFaceQuality(true)
.enableLiveness(true)
.enableMaskDetect(true)
.enableFaceAttribute(true)
.enableInteractionLiveness(true);
boolean succPipe = InspireFace.MultipleFacePipelineProcess(session, stream, multipleFaceData, pipelineNeedParam);
if (succPipe) {
Log.i(TAG, "Exec pipeline success");
RGBLivenessConfidence rgbLivenessConfidence = InspireFace.GetRGBLivenessConfidence(session);
Log.i(TAG, "rgbLivenessConfidence: " + rgbLivenessConfidence.confidence[0]);
FaceQualityConfidence faceQualityConfidence = InspireFace.GetFaceQualityConfidence(session);
Log.i(TAG, "faceQualityConfidence: " + faceQualityConfidence.confidence[0]);
FaceMaskConfidence faceMaskConfidence = InspireFace.GetFaceMaskConfidence(session);
Log.i(TAG, "faceMaskConfidence: " + faceMaskConfidence.confidence[0]);
FaceInteractionState faceInteractionState = InspireFace.GetFaceInteractionStateResult(session);
Log.i(TAG, "Left eye status confidence: " + faceInteractionState.leftEyeStatusConfidence[0]);
Log.i(TAG, "Right eye status confidence: " + faceInteractionState.rightEyeStatusConfidence[0]);
FaceInteractionsActions faceInteractionsActions = InspireFace.GetFaceInteractionActionsResult(session);
Log.i(TAG, "Normal: " + faceInteractionsActions.normal[0]);
Log.i(TAG, "Shake: " + faceInteractionsActions.shake[0]);
Log.i(TAG, "Jaw open: " + faceInteractionsActions.jawOpen[0]);
Log.i(TAG, "Head raise: " + faceInteractionsActions.headRaise[0]);
Log.i(TAG, "Blink: " + faceInteractionsActions.blink[0]);
FaceAttributeResult faceAttributeResult = InspireFace.GetFaceAttributeResult(session);
Log.i(TAG, "Race: " + faceAttributeResult.race[0]);
Log.i(TAG, "Gender: " + faceAttributeResult.gender[0]);
Log.i(TAG, "Age bracket: " + faceAttributeResult.ageBracket[0]);
} else {
Log.e(TAG, "Exec pipeline fail");
}
}
int count = InspireFace.FeatureHubGetFaceCount();
Log.i(TAG, "Face count: " + count);
Bitmap crop = InspireFace.GetFaceAlignmentImage(session, stream, multipleFaceData.tokens[0]);
try {
SDKUtils.saveBitmap("/storage/emulated/0/Android/data/com.example.inspireface_example/files/", "crop", crop);
} catch (IOException e) {
throw new RuntimeException(e);
}
InspireFace.ReleaseImageStream(stream);
InspireFace.ReleaseSession(session);
InspireFace.FeatureHubDataDisable();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
EdgeToEdge.enable(this);
setContentView(R.layout.activity_main);
ViewCompat.setOnApplyWindowInsetsListener(findViewById(R.id.main), (v, insets) -> {
Insets systemBars = insets.getInsets(WindowInsetsCompat.Type.systemBars());
v.setPadding(systemBars.left, systemBars.top, systemBars.right, systemBars.bottom);
return insets;
});
//
test();
}
}

View File

@@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

View File

@@ -0,0 +1,30 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

View File

@@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/main"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Hello World!"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
<monochrome android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
<monochrome android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 982 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

View File

@@ -0,0 +1,7 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Base.Theme.InspireFaceExample" parent="Theme.Material3.DayNight.NoActionBar">
<!-- Customize your dark theme here. -->
<!-- <item name="colorPrimary">@color/my_dark_primary</item> -->
</style>
</resources>

View File

@@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color>
</resources>

View File

@@ -0,0 +1,3 @@
<resources>
<string name="app_name">InspireFace-Example</string>
</resources>

View File

@@ -0,0 +1,9 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Base.Theme.InspireFaceExample" parent="Theme.Material3.DayNight.NoActionBar">
<!-- Customize your light theme here. -->
<!-- <item name="colorPrimary">@color/my_light_primary</item> -->
</style>
<style name="Theme.InspireFaceExample" parent="Base.Theme.InspireFaceExample" />
</resources>

View File

@@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?><!--
Sample backup rules file; uncomment and customize as necessary.
See https://developer.android.com/guide/topics/data/autobackup
for details.
Note: This file is ignored for devices older that API 31
See https://developer.android.com/about/versions/12/backup-restore
-->
<full-backup-content>
<!--
<include domain="sharedpref" path="."/>
<exclude domain="sharedpref" path="device.xml"/>
-->
</full-backup-content>

View File

@@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?><!--
Sample data extraction rules file; uncomment and customize as necessary.
See https://developer.android.com/about/versions/12/backup-restore#xml-changes
for details.
-->
<data-extraction-rules>
<cloud-backup>
<!-- TODO: Use <include> and <exclude> to control what is backed up.
<include .../>
<exclude .../>
-->
</cloud-backup>
<!--
<device-transfer>
<include .../>
<exclude .../>
</device-transfer>
-->
</data-extraction-rules>

View File

@@ -0,0 +1,17 @@
package com.example.inspireface_example;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}

View File

@@ -0,0 +1,5 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
plugins {
alias(libs.plugins.android.application) apply false
alias(libs.plugins.android.library) apply false
}

View File

@@ -0,0 +1,21 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. For more details, visit
# https://developer.android.com/r/tools/gradle-multi-project-decoupled-projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app's APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Enables namespacing of each library's R class so that its R class includes only the
# resources declared in the library itself and none from the library's dependencies,
# thereby reducing the size of the R class for that library
android.nonTransitiveRClass=true

View File

@@ -0,0 +1,25 @@
[versions]
agp = "8.5.1"
inspirefaceAndroidSdk = "1.2.0"
junit = "4.13.2"
junitVersion = "1.1.5"
espressoCore = "3.5.1"
appcompat = "1.6.1"
material = "1.10.0"
activity = "1.8.0"
constraintlayout = "2.1.4"
[libraries]
inspireface-android-sdk = { module = "com.github.HyperInspire:inspireface-android-sdk", version.ref = "inspirefaceAndroidSdk" }
junit = { group = "junit", name = "junit", version.ref = "junit" }
ext-junit = { group = "androidx.test.ext", name = "junit", version.ref = "junitVersion" }
espresso-core = { group = "androidx.test.espresso", name = "espresso-core", version.ref = "espressoCore" }
appcompat = { group = "androidx.appcompat", name = "appcompat", version.ref = "appcompat" }
material = { group = "com.google.android.material", name = "material", version.ref = "material" }
activity = { group = "androidx.activity", name = "activity", version.ref = "activity" }
constraintlayout = { group = "androidx.constraintlayout", name = "constraintlayout", version.ref = "constraintlayout" }
[plugins]
android-application = { id = "com.android.application", version.ref = "agp" }
android-library = { id = "com.android.library", version.ref = "agp" }

View File

@@ -0,0 +1,6 @@
#Tue Nov 26 15:12:19 CST 2024
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@@ -0,0 +1,185 @@
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
exec "$JAVACMD" "$@"

View File

@@ -0,0 +1,89 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@@ -0,0 +1,24 @@
pluginManagement {
repositories {
google {
content {
includeGroupByRegex("com\\.android.*")
includeGroupByRegex("com\\.google.*")
includeGroupByRegex("androidx.*")
}
}
mavenCentral()
gradlePluginPortal()
}
}
dependencyResolutionManagement {
repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
repositories {
google()
mavenCentral()
maven { url 'https://jitpack.io' }
}
}
rootProject.name = "InspireFace-Example"
include ':app'

View File

@@ -4,31 +4,15 @@
set -e
TARGET_DIR="test_res"
DOWNLOAD_URL="https://github.com/tunmx/inspireface-store/raw/main/resource/test_res-lite.zip"
ZIP_FILE="test_res-lite.zip"
BUILD_DIRNAME="ci_ubuntu18"
TEST_DIR="./build/${BUILD_DIRNAME}/test"
TEST_EXECUTABLE="./test/Test"
# Check if the target directory already exists
if [ ! -d "$TARGET_DIR" ]; then
echo "Directory '$TARGET_DIR' does not exist. Downloading..."
# Make dir
mkdir -p ${TARGET_DIR}/save/video_frames
# Download the dataset zip file
wget -q "$DOWNLOAD_URL" -O "$ZIP_FILE"
echo "Extracting '$ZIP_FILE' to '$TARGET_DIR'..."
# Unzip the downloaded file
unzip "$ZIP_FILE"
# Remove the downloaded zip file and unnecessary folders
rm "$ZIP_FILE"
rm -rf "__MACOSX"
echo "Download and extraction complete."
else
echo "Directory '$TARGET_DIR' already exists. Skipping download."
fi
# Download models
bash command/download_models_general.sh Pikachu
# Get the absolute path of the target directory
FULL_TEST_DIR="$(realpath ${TARGET_DIR})"
@@ -48,7 +32,6 @@ cmake -DCMAKE_BUILD_TYPE=Release \
-DISF_ENABLE_BENCHMARK=ON \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DOpenCV_DIR=3rdparty/inspireface-precompile/opencv/4.5.1/opencv-ubuntu18-x86/lib/cmake/opencv4 \
-DISF_BUILD_SHARED_LIBS=OFF ../../
# Compile the project using 4 parallel jobs

View File

@@ -4,34 +4,8 @@
set -e
ROOT_DIR="$(pwd)"
TARGET_DIR="test_res"
DOWNLOAD_URL="https://github.com/tunmx/inspireface-store/raw/main/resource/test_res-lite.zip"
ZIP_FILE="test_res-lite.zip"
BUILD_DIRNAME="ubuntu18_shared"
# Check if the target directory already exists
if [ ! -d "$TARGET_DIR" ]; then
echo "Directory '$TARGET_DIR' does not exist. Downloading..."
# Download the dataset zip file
wget -q "$DOWNLOAD_URL" -O "$ZIP_FILE"
echo "Extracting '$ZIP_FILE' to '$TARGET_DIR'..."
# Unzip the downloaded file
unzip "$ZIP_FILE"
# Remove the downloaded zip file and unnecessary folders
rm "$ZIP_FILE"
rm -rf "__MACOSX"
echo "Download and extraction complete."
else
echo "Directory '$TARGET_DIR' already exists. Skipping download."
fi
# Get the absolute path of the target directory
FULL_TEST_DIR="$(realpath ${TARGET_DIR})"
# Create the build directory if it doesn't exist
mkdir -p build/${BUILD_DIRNAME}/
@@ -47,7 +21,6 @@ cmake -DCMAKE_BUILD_TYPE=Release \
-DISF_ENABLE_BENCHMARK=OFF \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DOpenCV_DIR=3rdparty/inspireface-precompile/opencv/4.5.1/opencv-ubuntu18-x86/lib/cmake/opencv4 \
-DISF_BUILD_SHARED_LIBS=ON ../../
# Compile the project using 4 parallel jobs
@@ -57,7 +30,8 @@ make -j4
cd ${ROOT_DIR}
# Important: You must copy the compiled dynamic library to this path!
cp build/${BUILD_DIRNAME}/lib/libInspireFace.so python/inspireface/modules/core/
mkdir -p python/inspireface/modules/core/libs/linux/x64/
cp build/${BUILD_DIRNAME}/lib/libInspireFace.so python/inspireface/modules/core/libs/linux/x64/
# Install dependency
pip install opencv-python
@@ -67,5 +41,5 @@ pip install loguru
cd python/
# Run sample
python sample_face_detection.py ../test_res/pack/Pikachu ../test_res/data/bulk/woman.png
python sample_face_detection.py ../test_res/data/bulk/woman.png

View File

@@ -4,8 +4,8 @@
set -e
TARGET_DIR="test_res"
DOWNLOAD_URL="https://github.com/tunmx/inspireface-store/raw/main/resource/test_res-lite.zip"
ZIP_FILE="test_res-lite.zip"
DOWNLOAD_URL="https://github.com/tunmx/inspireface-store/raw/main/resource/test_res-lite2.zip"
ZIP_FILE="test_res-lite2.zip"
BUILD_DIRNAME="quick_test_build"
TEST_DIR="./build/${BUILD_DIRNAME}/test"
TEST_EXECUTABLE="./test/Test"

View File

@@ -98,6 +98,8 @@ build() {
cmake ${SCRIPT_DIR} \
-G "Unix Makefiles" \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_C_FLAGS="-g0 ${CMAKE_C_FLAGS}" \
-DCMAKE_CXX_FLAGS="-g0 ${CMAKE_CXX_FLAGS}" \
-DCMAKE_TOOLCHAIN_FILE=${ANDROID_NDK}/build/cmake/android.toolchain.cmake \
-DANDROID_TOOLCHAIN=clang \
-DANDROID_ABI=${arch} \
@@ -109,8 +111,7 @@ build() {
-DISF_ENABLE_BENCHMARK=OFF \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DISF_BUILD_SHARED_LIBS=ON \
-DOpenCV_DIR=${OPENCV_DIR}
-DISF_BUILD_SHARED_LIBS=ON
make -j4
make install
popd

View File

@@ -0,0 +1,140 @@
#!/bin/bash
reorganize_structure() {
local base_path=$1
# Define the new main directories
local main_dirs=("lib" "sample" "test")
# Check if the base path exists
if [[ ! -d "$base_path" ]]; then
echo "Error: The path '$base_path' does not exist."
return 1
fi
# Create new main directories at the base path
for dir in "${main_dirs[@]}"; do
mkdir -p "$base_path/$dir"
done
# Find all architecture directories (e.g., arm64-v8a, armeabi-v7a)
local arch_dirs=($(find "$base_path" -maxdepth 1 -type d -name "arm*"))
for arch_dir in "${arch_dirs[@]}"; do
# Get the architecture name (e.g., arm64-v8a)
local arch=$(basename "$arch_dir")
# Operate on each main directory
for main_dir in "${main_dirs[@]}"; do
# Create a specific directory for each architecture under the main directory
mkdir -p "$base_path/$main_dir/$arch"
# Selectively copy content based on the directory type
case "$main_dir" in
lib)
# Copy the lib directory
if [ -d "$arch_dir/InspireFace/lib" ]; then
cp -r "$arch_dir/InspireFace/lib/"* "$base_path/$main_dir/$arch/"
fi
;;
sample)
# Copy the sample directory
if [ -d "$arch_dir/InspireFace/sample" ]; then
cp -r "$arch_dir/InspireFace/sample/"* "$base_path/$main_dir/$arch/"
fi
;;
test)
# Copy the test directory
if [ -d "$arch_dir/InspireFace/test" ]; then
cp -r "$arch_dir/InspireFace/test/"* "$base_path/$main_dir/$arch/"
fi
;;
esac
done
# Copy version.txt file to the base path, ignoring duplicates
if [ -f "$arch_dir/version.txt" ]; then
cp -f "$arch_dir/version.txt" "$base_path/version.txt"
fi
done
# Delete the original architecture directories
for arch_dir in "${arch_dirs[@]}"; do
rm -rf "$arch_dir"
done
echo "Reorganization complete."
}
# Reusable function to handle 'install' directory operations
move_install_files() {
local root_dir="$1"
local install_dir="$root_dir/install"
# Step 1: Check if the 'install' directory exists
if [ ! -d "$install_dir" ]; then
echo "Error: 'install' directory does not exist in $root_dir"
exit 1
fi
# Step 2: Delete all other files/folders except 'install'
find "$root_dir" -mindepth 1 -maxdepth 1 -not -name "install" -exec rm -rf {} +
# Step 3: Move all files from 'install' to the root directory
mv "$install_dir"/* "$root_dir" 2>/dev/null
# Step 4: Remove the empty 'install' directory
rmdir "$install_dir"
echo "Files from 'install' moved to $root_dir, and 'install' directory deleted."
}
build() {
arch=$1
NDK_API_LEVEL=$2
mkdir -p ${BUILD_FOLDER_PATH}/${arch}
pushd ${BUILD_FOLDER_PATH}/${arch}
cmake ${SCRIPT_DIR} \
-G "Unix Makefiles" \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_C_FLAGS="-g0 ${CMAKE_C_FLAGS}" \
-DCMAKE_CXX_FLAGS="-g0 ${CMAKE_CXX_FLAGS}" \
-DCMAKE_TOOLCHAIN_FILE=${ANDROID_NDK}/build/cmake/android.toolchain.cmake \
-DANDROID_TOOLCHAIN=clang \
-DANDROID_ABI=${arch} \
-DANDROID_NATIVE_API_LEVEL=${NDK_API_LEVEL} \
-DANDROID_STL=c++_static \
-DMNN_BUILD_FOR_ANDROID_COMMAND=true \
-DISF_ENABLE_RKNN=ON \
-DISF_ENABLE_RGA=OFF \
-DISF_RK_DEVICE_TYPE=RK356X \
-DISF_RKNPU_MAJOR=rknpu2 \
-DISF_RK_COMPILER_TYPE=${arch} \
-DISF_BUILD_WITH_SAMPLE=OFF \
-DISF_BUILD_WITH_TEST=OFF \
-DISF_ENABLE_BENCHMARK=OFF \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DISF_BUILD_SHARED_LIBS=ON
make -j4
make install
popd
move_install_files "${BUILD_FOLDER_PATH}/${arch}"
}
if [ -n "$VERSION" ]; then
TAG="-$VERSION"
else
TAG=""
fi
SCRIPT_DIR=$(pwd) # Project dir
BUILD_FOLDER_PATH="build/inspireface-android-rk356x-rk3588${TAG}"
build arm64-v8a 24
build armeabi-v7a 24
reorganize_structure "${BUILD_FOLDER_PATH}"

View File

@@ -0,0 +1,74 @@
#!/bin/bash
# Reusable function to handle 'install' directory operations
move_install_files() {
local root_dir="$1"
local install_dir="$root_dir/install"
# Step 1: Check if the 'install' directory exists
if [ ! -d "$install_dir" ]; then
echo "Error: 'install' directory does not exist in $root_dir"
exit 1
fi
# Step 2: Delete all other files/folders except 'install'
find "$root_dir" -mindepth 1 -maxdepth 1 -not -name "install" -exec rm -rf {} +
# Step 3: Move all files from 'install' to the root directory
mv "$install_dir"/* "$root_dir" 2>/dev/null
# Step 4: Remove the empty 'install' directory
rmdir "$install_dir"
echo "Files from 'install' moved to $root_dir, and 'install' directory deleted."
}
if [ -n "$VERSION" ]; then
TAG="-$VERSION"
else
TAG=""
fi
SCRIPT_DIR=$(pwd) # Project dir
cd ${SCRIPT_DIR}
BUILD_FOLDER_PATH="build/inspireface-linux-aarch64-rk356x-rk3588${TAG}"
mkdir -p ${BUILD_FOLDER_PATH}
# shellcheck disable=SC2164
cd ${BUILD_FOLDER_PATH}
# export ARM_CROSS_COMPILE_TOOLCHAIN=/host/software/gcc-linaro-6.3.1-2017.05-x86_64_aarch64-linux-gnu
cmake -DCMAKE_SYSTEM_NAME=Linux \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_SYSTEM_VERSION=1 \
-DCMAKE_SYSTEM_PROCESSOR=aarch64 \
-DCMAKE_C_COMPILER=$ARM_CROSS_COMPILE_TOOLCHAIN/bin/aarch64-linux-gnu-gcc \
-DCMAKE_CXX_COMPILER=$ARM_CROSS_COMPILE_TOOLCHAIN/bin/aarch64-linux-gnu-g++ \
-DCMAKE_CXX_FLAGS="${CMAKE_CXX_FLAGS} -flax-vector-conversions" \
-DTARGET_PLATFORM=armlinux \
-DISF_BUILD_LINUX_AARCH64=ON \
-DISF_BUILD_LINUX_ARM7=OFF \
-DMNN_SEP_BUILD=off \
-DISF_ENABLE_RKNN=ON \
-DISF_RK_DEVICE_TYPE=RK356X \
-DISF_RKNPU_MAJOR=rknpu2 \
-DISF_RK_COMPILER_TYPE=aarch64 \
-DISF_ENABLE_RGA=ON \
-DISF_ENABLE_COST_TIME=OFF \
-DISF_BUILD_WITH_SAMPLE=OFF \
-DISF_BUILD_WITH_TEST=OFF \
-DISF_ENABLE_BENCHMARK=OFF \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DISF_BUILD_SHARED_LIBS=OFF ${SCRIPT_DIR}
make -j4
make install
move_install_files "$(pwd)"

View File

@@ -0,0 +1,106 @@
#!/bin/bash
# Reusable function to handle 'install' directory operations
move_install_files() {
local root_dir="$1"
local install_dir="$root_dir/install"
# Step 1: Check if the 'install' directory exists
if [ ! -d "$install_dir" ]; then
echo "Error: 'install' directory does not exist in $root_dir"
exit 1
fi
# Step 2: Delete all other files/folders except 'install'
find "$root_dir" -mindepth 1 -maxdepth 1 -not -name "install" -exec rm -rf {} +
# Step 3: Move all files from 'install' to the root directory
mv "$install_dir"/* "$root_dir" 2>/dev/null
# Step 4: Remove the empty 'install' directory
rmdir "$install_dir"
echo "Files from 'install' moved to $root_dir, and 'install' directory deleted."
}
if [ -n "$VERSION" ]; then
TAG="-$VERSION"
else
TAG=""
fi
SCRIPT_DIR=$(pwd) # Project dir
# Create .rknpu2_cache directory if it doesn't exist
CACHE_DIR="$(pwd)/.rknpu2_cache"
mkdir -p "$CACHE_DIR"
# Check if MNN-2.3.0 directory already exists
if [ ! -d "$CACHE_DIR/MNN-2.3.0" ]; then
echo "Downloading MNN 2.3.0..."
# Download MNN 2.3.0
if ! wget -P "$CACHE_DIR" https://github.com/alibaba/MNN/archive/refs/tags/2.3.0.zip; then
echo "Error: Failed to download MNN 2.3.0"
exit 1
fi
# Extract the zip file
cd "$CACHE_DIR"
if ! unzip 2.3.0.zip; then
echo "Error: Failed to extract MNN 2.3.0"
exit 1
fi
# Remove the zip file
rm 2.3.0.zip
echo "MNN 2.3.0 downloaded and extracted"
else
echo "MNN-2.3.0 already exists in cache"
fi
# Set absolute path to MNN source
export ISF_MNN_CUSTOM_SOURCE="$CACHE_DIR/MNN-2.3.0"
echo "ISF_MNN_CUSTOM_SOURCE: ${ISF_MNN_CUSTOM_SOURCE}"
cd ${SCRIPT_DIR}
# export ARM_CROSS_COMPILE_TOOLCHAIN=/root/arm-rockchip830-linux-uclibcgnueabihf/
BUILD_FOLDER_PATH="build/inspireface-linux-armv7-rv1106-armhf-uclibc${TAG}"
mkdir -p ${BUILD_FOLDER_PATH}
# shellcheck disable=SC2164
cd ${BUILD_FOLDER_PATH}
# export cross_compile_toolchain=/home/jingyuyan/software/arm-rockchip830-linux-uclibcgnueabihf
cmake -DCMAKE_SYSTEM_NAME=Linux \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_SYSTEM_VERSION=1 \
-DCMAKE_SYSTEM_PROCESSOR=armv7 \
-DCMAKE_C_COMPILER=$ARM_CROSS_COMPILE_TOOLCHAIN/bin/arm-rockchip830-linux-uclibcgnueabihf-gcc \
-DCMAKE_CXX_COMPILER=$ARM_CROSS_COMPILE_TOOLCHAIN/bin/arm-rockchip830-linux-uclibcgnueabihf-g++ \
-DCMAKE_CXX_FLAGS="${CMAKE_CXX_FLAGS} -flax-vector-conversions" \
-DTARGET_PLATFORM=armlinux \
-DISF_BUILD_LINUX_ARM7=ON \
-DISF_MNN_CUSTOM_SOURCE=${ISF_MNN_CUSTOM_SOURCE} \
-DMNN_SEP_BUILD=off \
-DISF_ENABLE_RKNN=ON \
-DISF_RK_DEVICE_TYPE=RV1106 \
-DISF_RKNPU_MAJOR=rknpu2 \
-DISF_RK_COMPILER_TYPE=armhf-uclibc \
-DISF_ENABLE_RGA=ON \
-DISF_ENABLE_COST_TIME=OFF \
-DISF_BUILD_WITH_SAMPLE=OFF \
-DISF_BUILD_WITH_TEST=OFF \
-DISF_ENABLE_BENCHMARK=OFF \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DISF_BUILD_SHARED_LIBS=OFF ${SCRIPT_DIR}
make -j4
# make install
# move_install_files "$(pwd)"

View File

@@ -25,7 +25,6 @@ move_install_files() {
# Define download URLs
MNN_IOS_URL="https://github.com/alibaba/MNN/releases/download/2.8.1/mnn_2.8.1_ios_armv82_cpu_metal_coreml.zip"
OPENCV_IOS_URL="https://github.com/opencv/opencv/releases/download/4.5.1/opencv-4.5.1-ios-framework.zip"
# Set the cache directory
MACOS_CACHE="$PWD/.macos_cache/"
@@ -74,7 +73,6 @@ download_and_unzip() {
download_and_unzip "$MNN_IOS_URL" "$MACOS_CACHE" "MNN.framework"
# Download and unzip OpenCV iOS package
download_and_unzip "$OPENCV_IOS_URL" "$MACOS_CACHE" "opencv2.framework"
if [ -n "$VERSION" ]; then
TAG="-$VERSION"
@@ -154,3 +152,5 @@ cat <<EOF >$FRAMEWORK_DIR/Resources/Info.plist
EOF
echo "Framework $FRAMEWORK_NAME.framework has been created at $FRAMEWORK_DIR"
cp -r $MACOS_CACHE/MNN.framework $BUILD_DIR/

View File

@@ -43,7 +43,6 @@ cmake -DCMAKE_SYSTEM_NAME=Linux \
-DISF_ENABLE_BENCHMARK=ON \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DISF_ENABLE_TRACKING_BY_DETECTION=ON \
-DMNN_CUDA=ON \
-DISF_GLOBAL_INFERENCE_BACKEND_USE_MNN_CUDA=ON \
-DISF_LINUX_MNN_CUDA=/home/tunm/softwate/MNN-2.7.2/build_cuda ${SCRIPT_DIR}

View File

@@ -45,8 +45,7 @@ cmake -DCMAKE_SYSTEM_NAME=Linux \
-DISF_ENABLE_TEST_EVALUATION=ON \
-DMNN_CUDA=ON \
-DISF_GLOBAL_INFERENCE_BACKEND_USE_MNN_CUDA=ON \
-DISF_LINUX_MNN_CUDA=/host/softwate/MNN-2.7.2/build_cuda \
-DOpenCV_DIR=3rdparty/inspireface-precompile/opencv/4.5.1/opencv-ubuntu18-x86/lib/cmake/opencv4 ${SCRIPT_DIR}
-DISF_LINUX_MNN_CUDA=/host/softwate/MNN-2.7.2/build_cuda ${SCRIPT_DIR}
make -j4

View File

@@ -0,0 +1,51 @@
#!/bin/bash
# Reusable function to handle 'install' directory operations
move_install_files() {
local root_dir="$1"
local install_dir="$root_dir/install"
# Step 1: Check if the 'install' directory exists
if [ ! -d "$install_dir" ]; then
echo "Error: 'install' directory does not exist in $root_dir"
exit 1
fi
# Step 2: Delete all other files/folders except 'install'
find "$root_dir" -mindepth 1 -maxdepth 1 -not -name "install" -exec rm -rf {} +
# Step 3: Move all files from 'install' to the root directory
mv "$install_dir"/* "$root_dir" 2>/dev/null
# Step 4: Remove the empty 'install' directory
rmdir "$install_dir"
echo "Files from 'install' moved to $root_dir, and 'install' directory deleted."
}
if [ -n "$VERSION" ]; then
TAG="-$VERSION"
else
TAG=""
fi
# Build folder path
BUILD_FOLDER_PATH="build/inspireface-linux-x86-manylinux2014${TAG}/"
SCRIPT_DIR=$(pwd) # Project dir
mkdir -p ${BUILD_FOLDER_PATH}
# shellcheck disable=SC2164
cd ${BUILD_FOLDER_PATH}
cmake -DCMAKE_BUILD_TYPE=Release \
-DISF_BUILD_WITH_SAMPLE=OFF \
-DISF_BUILD_WITH_TEST=OFF \
-DISF_ENABLE_BENCHMARK=OFF \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DISF_BUILD_SHARED_LIBS=ON ${SCRIPT_DIR}
make -j4
make install
move_install_files "$(pwd)"

View File

@@ -0,0 +1,118 @@
#!/bin/bash
# Reusable function to handle 'install' directory operations
move_install_files() {
local root_dir="$1"
local install_dir="$root_dir/install"
# Step 1: Check if the 'install' directory exists
if [ ! -d "$install_dir" ]; then
echo "Error: 'install' directory does not exist in $root_dir"
exit 1
fi
# Step 2: Delete all other files/folders except 'install'
find "$root_dir" -mindepth 1 -maxdepth 1 -not -name "install" -exec rm -rf {} +
# Step 3: Move all files from 'install' to the root directory
mv "$install_dir"/* "$root_dir" 2>/dev/null
# Step 4: Remove the empty 'install' directory
rmdir "$install_dir"
echo "Files from 'install' moved to $root_dir, and 'install' directory deleted."
}
get_cuda_ubuntu_tag() {
# If CUDA_TAG is set, use it
if [ -n "${CUDA_TAG}" ]; then
echo "${CUDA_TAG}"
return 0
fi
# Get CUDA version
CUDA_VERSION="_none"
if command -v nvcc &> /dev/null; then
# Try to get version from nvcc
CUDA_VERSION=$(nvcc --version 2>/dev/null | grep "release" | awk '{print $6}' | cut -d',' -f1 | tr -d '.')
if [ -z "${CUDA_VERSION}" ]; then
CUDA_VERSION="_none"
else
CUDA_VERSION="${CUDA_VERSION}"
fi
elif [ -f "/usr/local/cuda/version.txt" ]; then
# Get version from CUDA installation directory
CUDA_VERSION=$(cat /usr/local/cuda/version.txt 2>/dev/null | grep "CUDA Version" | awk '{print $3}' | tr -d '.')
if [ -z "${CUDA_VERSION}" ]; then
CUDA_VERSION="_none"
fi
elif [ -d "/usr/local/cuda" ] && ls -l /usr/local/cuda 2>/dev/null | grep -q "cuda-"; then
# Get version from symbolic link
CUDA_LINK=$(ls -l /usr/local/cuda 2>/dev/null | grep -o "cuda-[0-9.]*" | head -n 1)
CUDA_VERSION=$(echo "${CUDA_LINK}" | cut -d'-' -f2 | tr -d '.')
if [ -z "${CUDA_VERSION}" ]; then
CUDA_VERSION="_none"
fi
fi
# Get Ubuntu version
UBUNTU_VERSION="_none"
if [ -f "/etc/os-release" ]; then
# Check if it is Ubuntu
if grep -q "Ubuntu" /etc/os-release 2>/dev/null; then
UBUNTU_VERSION=$(grep "VERSION_ID" /etc/os-release 2>/dev/null | cut -d'"' -f2)
if [ -z "${UBUNTU_VERSION}" ]; then
UBUNTU_VERSION="_none"
fi
fi
elif [ -f "/etc/lsb-release" ]; then
# Get version from lsb-release
if grep -q "Ubuntu" /etc/lsb-release 2>/dev/null; then
UBUNTU_VERSION=$(grep "DISTRIB_RELEASE" /etc/lsb-release 2>/dev/null | cut -d'=' -f2)
if [ -z "${UBUNTU_VERSION}" ]; then
UBUNTU_VERSION="_none"
fi
fi
fi
# Generate and return tag
echo "cuda${CUDA_VERSION}_ubuntu${UBUNTU_VERSION}"
}
CUDA_TAG=$(get_cuda_ubuntu_tag)
echo "Cuda Tag: ${CUDA_TAG}"
if [ -n "$VERSION" ]; then
TAG="-$VERSION"
else
TAG=""
fi
SCRIPT_DIR=$(pwd)
BUILD_FOLDER_NAME="inspireface-linux-tensorrt-${CUDA_TAG}${TAG}"
mkdir -p build/${BUILD_FOLDER_NAME}
cd build/${BUILD_FOLDER_NAME}
echo "TENSORRT_ROOT: ${TENSORRT_ROOT}"
cmake \
-DCMAKE_BUILD_TYPE=Release \
-DISF_BUILD_WITH_SAMPLE=ON \
-DISF_BUILD_WITH_TEST=ON \
-DISF_ENABLE_BENCHMARK=ON \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DTENSORRT_ROOT=${TENSORRT_ROOT} \
-DISF_ENABLE_TENSORRT=ON ../..
make -j4
make install
if [ $? -eq 0 ] && [ -d "$(pwd)/install" ]; then
move_install_files "$(pwd)"
else
echo "Build failed or the installation directory does not exist"
exit 1
fi

View File

@@ -37,12 +37,11 @@ mkdir -p ${BUILD_FOLDER_PATH}
cd ${BUILD_FOLDER_PATH}
cmake -DCMAKE_BUILD_TYPE=Release \
-DISF_BUILD_WITH_SAMPLE=ON \
-DISF_BUILD_WITH_SAMPLE=OFF \
-DISF_BUILD_WITH_TEST=OFF \
-DISF_ENABLE_BENCHMARK=OFF \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DOpenCV_DIR=3rdparty/inspireface-precompile/opencv/4.5.1/opencv-ubuntu18-x86/lib/cmake/opencv4 \
-DISF_BUILD_SHARED_LIBS=ON ${SCRIPT_DIR}
make -j4

View File

@@ -0,0 +1,50 @@
#!/bin/bash
# Reusable function to handle 'install' directory operations
move_install_files() {
local root_dir="$1"
local install_dir="$root_dir/install"
# Step 1: Check if the 'install' directory exists
if [ ! -d "$install_dir" ]; then
echo "Error: 'install' directory does not exist in $root_dir"
exit 1
fi
# Step 2: Delete all other files/folders except 'install'
find "$root_dir" -mindepth 1 -maxdepth 1 -not -name "install" -exec rm -rf {} +
# Step 3: Move all files from 'install' to the root directory
mv "$install_dir"/* "$root_dir" 2>/dev/null
# Step 4: Remove the empty 'install' directory
rmdir "$install_dir"
echo "Files from 'install' moved to $root_dir, and 'install' directory deleted."
}
if [ -n "$VERSION" ]; then
TAG="-$VERSION"
else
TAG=""
fi
BUILD_FOLDER_PATH="build/inspireface-macos-apple-silicon-arm64${TAG}/"
SCRIPT_DIR=$(pwd) # Project dir
mkdir -p ${BUILD_FOLDER_PATH}
# shellcheck disable=SC2164
cd ${BUILD_FOLDER_PATH}
cmake -DCMAKE_BUILD_TYPE=Release \
-DISF_BUILD_WITH_SAMPLE=OFF \
-DISF_BUILD_WITH_TEST=OFF \
-DISF_ENABLE_BENCHMARK=OFF \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DISF_BUILD_SHARED_LIBS=ON ${SCRIPT_DIR}
make -j4
make install
move_install_files "$(pwd)"

View File

@@ -0,0 +1,50 @@
#!/bin/bash
# Reusable function to handle 'install' directory operations
move_install_files() {
local root_dir="$1"
local install_dir="$root_dir/install"
# Step 1: Check if the 'install' directory exists
if [ ! -d "$install_dir" ]; then
echo "Error: 'install' directory does not exist in $root_dir"
exit 1
fi
# Step 2: Delete all other files/folders except 'install'
find "$root_dir" -mindepth 1 -maxdepth 1 -not -name "install" -exec rm -rf {} +
# Step 3: Move all files from 'install' to the root directory
mv "$install_dir"/* "$root_dir" 2>/dev/null
# Step 4: Remove the empty 'install' directory
rmdir "$install_dir"
echo "Files from 'install' moved to $root_dir, and 'install' directory deleted."
}
if [ -n "$VERSION" ]; then
TAG="-$VERSION"
else
TAG=""
fi
BUILD_FOLDER_PATH="build/inspireface-macos-intel-x86-64${TAG}/"
SCRIPT_DIR=$(pwd) # Project dir
mkdir -p ${BUILD_FOLDER_PATH}
# shellcheck disable=SC2164
cd ${BUILD_FOLDER_PATH}
cmake -DCMAKE_BUILD_TYPE=Release \
-DISF_BUILD_WITH_SAMPLE=OFF \
-DISF_BUILD_WITH_TEST=OFF \
-DISF_ENABLE_BENCHMARK=OFF \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DISF_BUILD_SHARED_LIBS=ON ${SCRIPT_DIR}
make -j4
make install
move_install_files "$(pwd)"

View File

@@ -0,0 +1,64 @@
#!/bin/bash
# Reusable function to handle 'install' directory operations
move_install_files() {
local root_dir="$1"
local install_dir="$root_dir/install"
# Step 1: Check if the 'install' directory exists
if [ ! -d "$install_dir" ]; then
echo "Error: 'install' directory does not exist in $root_dir"
exit 1
fi
# Step 2: Delete all other files/folders except 'install'
find "$root_dir" -mindepth 1 -maxdepth 1 -not -name "install" -exec rm -rf {} +
# Step 3: Move all files from 'install' to the root directory
mv "$install_dir"/* "$root_dir" 2>/dev/null
# Step 4: Remove the empty 'install' directory
rmdir "$install_dir"
echo "Files from 'install' moved to $root_dir, and 'install' directory deleted."
}
if [ -n "$VERSION" ]; then
TAG="-$VERSION"
else
TAG=""
fi
BUILD_FOLDER_PATH="build/inspireface-macos-apple-silicon-arm64${TAG}/"
SCRIPT_DIR=$(pwd) # Project dir
mkdir -p ${BUILD_FOLDER_PATH}
# shellcheck disable=SC2164
cd ${BUILD_FOLDER_PATH}
cmake -DCMAKE_BUILD_TYPE=Release \
-DISF_BUILD_WITH_SAMPLE=OFF \
-DISF_BUILD_WITH_TEST=OFF \
-DISF_ENABLE_BENCHMARK=OFF \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DISF_BUILD_SHARED_LIBS=ON ${SCRIPT_DIR}
make -j4
make install
move_install_files "$(pwd)"
BUILD_DYLIB_PATH="$(pwd)/InspireFace/lib/libInspireFace.dylib"
# Copy the library to the python directory
DYLIB_DEST_PATH="${SCRIPT_DIR}/python/inspireface/modules/core/libs/darwin/arm64/"
mkdir -p ${DYLIB_DEST_PATH}
cp -r ${BUILD_DYLIB_PATH} ${DYLIB_DEST_PATH}
pip3 install setuptools wheel twine
PYTHON_PRJ_PATH=${SCRIPT_DIR}/python
cd ${PYTHON_PRJ_PATH}/
python3 setup.py bdist_wheel
echo "Build wheel for MacOS Arm64, Well Done!"

View File

@@ -0,0 +1,64 @@
#!/bin/bash
# Reusable function to handle 'install' directory operations
move_install_files() {
local root_dir="$1"
local install_dir="$root_dir/install"
# Step 1: Check if the 'install' directory exists
if [ ! -d "$install_dir" ]; then
echo "Error: 'install' directory does not exist in $root_dir"
exit 1
fi
# Step 2: Delete all other files/folders except 'install'
find "$root_dir" -mindepth 1 -maxdepth 1 -not -name "install" -exec rm -rf {} +
# Step 3: Move all files from 'install' to the root directory
mv "$install_dir"/* "$root_dir" 2>/dev/null
# Step 4: Remove the empty 'install' directory
rmdir "$install_dir"
echo "Files from 'install' moved to $root_dir, and 'install' directory deleted."
}
if [ -n "$VERSION" ]; then
TAG="-$VERSION"
else
TAG=""
fi
BUILD_FOLDER_PATH="build/inspireface-macos-intel-x86-64${TAG}/"
SCRIPT_DIR=$(pwd) # Project dir
mkdir -p ${BUILD_FOLDER_PATH}
# shellcheck disable=SC2164
cd ${BUILD_FOLDER_PATH}
cmake -DCMAKE_BUILD_TYPE=Release \
-DISF_BUILD_WITH_SAMPLE=OFF \
-DISF_BUILD_WITH_TEST=OFF \
-DISF_ENABLE_BENCHMARK=OFF \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DISF_BUILD_SHARED_LIBS=ON ${SCRIPT_DIR}
make -j4
make install
move_install_files "$(pwd)"
BUILD_DYLIB_PATH="$(pwd)/InspireFace/lib/libInspireFace.dylib"
# Copy the library to the python directory
DYLIB_DEST_PATH="${SCRIPT_DIR}/python/inspireface/modules/core/libs/darwin/x64/"
mkdir -p ${DYLIB_DEST_PATH}
cp -r ${BUILD_DYLIB_PATH} ${DYLIB_DEST_PATH}
pip3 install setuptools wheel twine
PYTHON_PRJ_PATH=${SCRIPT_DIR}/python
cd ${PYTHON_PRJ_PATH}/
python3 setup.py bdist_wheel
echo "Build wheel for MacOS x86_64, Well Done!"

View File

@@ -0,0 +1,69 @@
#!/bin/bash
# Reusable function to handle 'install' directory operations
move_install_files() {
local root_dir="$1"
local install_dir="$root_dir/install"
# Step 1: Check if the 'install' directory exists
if [ ! -d "$install_dir" ]; then
echo "Error: 'install' directory does not exist in $root_dir"
exit 1
fi
# Step 2: Delete all other files/folders except 'install'
find "$root_dir" -mindepth 1 -maxdepth 1 -not -name "install" -exec rm -rf {} +
# Step 3: Move all files from 'install' to the root directory
mv "$install_dir"/* "$root_dir" 2>/dev/null
# Step 4: Remove the empty 'install' directory
rmdir "$install_dir"
echo "Files from 'install' moved to $root_dir, and 'install' directory deleted."
}
if [ -n "$VERSION" ]; then
TAG="-$VERSION"
else
TAG=""
fi
BUILD_FOLDER_PATH="build/inspireface-linux-x86-manylinux2014${TAG}/"
SCRIPT_DIR=$(pwd) # Project dir
mkdir -p ${BUILD_FOLDER_PATH}
# shellcheck disable=SC2164
cd ${BUILD_FOLDER_PATH}
cmake -DCMAKE_BUILD_TYPE=Release \
-DISF_BUILD_WITH_SAMPLE=OFF \
-DISF_BUILD_WITH_TEST=OFF \
-DISF_ENABLE_BENCHMARK=OFF \
-DISF_ENABLE_USE_LFW_DATA=OFF \
-DISF_ENABLE_TEST_EVALUATION=OFF \
-DISF_BUILD_SHARED_LIBS=ON ${SCRIPT_DIR}
make -j4
make install
move_install_files "$(pwd)"
BUILD_DYLIB_PATH="$(pwd)/InspireFace/lib/libInspireFace.so"
# Copy the library to the python directory
DYLIB_DEST_PATH="${SCRIPT_DIR}/python/inspireface/modules/core/libs/linux/x64/"
mkdir -p ${DYLIB_DEST_PATH}
cp -r ${BUILD_DYLIB_PATH} ${DYLIB_DEST_PATH}
PYTHON_PRJ_PATH=${SCRIPT_DIR}/python
cd ${PYTHON_PRJ_PATH}/
# Build wheels for Python 3.7-3.12
for PYTHON_VERSION in python3.7 python3.8 python3.9 python3.10 python3.11 python3.12; do
if [[ "${PYTHON_VERSION}" == "python3.12" ]]; then
${PYTHON_VERSION} -m pip install setuptools wheel twine
fi
${PYTHON_VERSION} setup.py bdist_wheel
done
echo "Build wheel for Linux x86_64, Well Done!"

View File

@@ -0,0 +1,97 @@
#!/bin/bash
# Target download folder
DOWNLOAD_DIR="test_res/pack"
# File URLs
URL1="https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Megatron"
URL2="https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Pikachu"
URL3="https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Gundam_RV1109"
URL4="https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Gundam_RV1106"
URL5="https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Gundam_RK356X"
URL6="https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Megatron_TRT"
URL7="https://github.com/HyperInspire/InspireFace/releases/download/v1.x/Gundam_RK3588"
# Color codes
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Create download folder
mkdir -p "$DOWNLOAD_DIR"
# Function to download file
download_file() {
local url=$1
if command -v wget > /dev/null 2>&1; then
echo "Using wget for download..."
wget --no-check-certificate -L -P "$DOWNLOAD_DIR" "$url"
else
echo "wget not found, using curl instead..."
cd "$DOWNLOAD_DIR"
curl -L -O "$url"
cd - > /dev/null
fi
}
# Function to print file path
print_file_path() {
local filename=$1
echo -e "File downloaded to: ${YELLOW}$(cd "$DOWNLOAD_DIR" && pwd)/${filename}${NC}"
}
# Check if argument is provided
if [ $# -eq 0 ]; then
echo "No argument provided, downloading all files..."
download_file "$URL1"
download_file "$URL2"
download_file "$URL3"
download_file "$URL4"
download_file "$URL5"
download_file "$URL6"
download_file "$URL7"
# Check all files
if [ -f "$DOWNLOAD_DIR/Megatron" ] && [ -f "$DOWNLOAD_DIR/Pikachu" ] && \
[ -f "$DOWNLOAD_DIR/Gundam_RV1109" ] && [ -f "$DOWNLOAD_DIR/Gundam_RV1106" ] && \
[ -f "$DOWNLOAD_DIR/Gundam_RK356X" ] && [ -f "$DOWNLOAD_DIR/Megatron_TRT" ] && \
[ -f "$DOWNLOAD_DIR/Gundam_RK3588" ]; then
echo "All downloads completed successfully!"
print_file_path "Megatron"
print_file_path "Pikachu"
print_file_path "Gundam_RV1109"
print_file_path "Gundam_RV1106"
print_file_path "Gundam_RK356X"
print_file_path "Megatron_TRT"
print_file_path "Gundam_RK3588"
else
echo "Download failed!"
exit 1
fi
else
case "$1" in
"Megatron"|"Pikachu"|"Gundam_RV1109"|"Gundam_RV1106"|"Gundam_RK356X"|"Megatron_TRT"|"Gundam_RK3588")
echo "Downloading $1..."
case "$1" in
"Megatron") url="$URL1" ;;
"Pikachu") url="$URL2" ;;
"Gundam_RV1109") url="$URL3" ;;
"Gundam_RV1106") url="$URL4" ;;
"Gundam_RK356X") url="$URL5" ;;
"Megatron_TRT") url="$URL6" ;;
"Gundam_RK3588") url="$URL7" ;;
esac
download_file "$url"
# Check file
if [ -f "$DOWNLOAD_DIR/$1" ]; then
echo "$1 download completed successfully!"
print_file_path "$1"
else
echo "$1 download failed!"
exit 1
fi
;;
*)
echo "Invalid argument. Please use 'Megatron', 'Pikachu', 'Gundam_RV1109', 'Gundam_RV1106', 'Gundam_RK356X', 'Megatron_TRT' or 'Gundam_RK3588'"
exit 1
;;
esac
fi

View File

@@ -0,0 +1,66 @@
#!/bin/bash
# 函数获取CUDA和Ubuntu版本标签
# 如果CUDA_TAG环境变量已设置则使用该值
# 否则自动检测CUDA和Ubuntu版本并生成标签
# 格式: cudaXX_ubuntuXX.XX
# 如果检测不到某个版本,则用"none"代替
get_cuda_ubuntu_tag() {
# 如果CUDA_TAG已设置则直接返回
if [ -n "${CUDA_TAG}" ]; then
echo "${CUDA_TAG}"
return 0
fi
# 获取CUDA版本
CUDA_VERSION="_none"
if command -v nvcc &> /dev/null; then
# 尝试从nvcc获取版本
CUDA_VERSION=$(nvcc --version 2>/dev/null | grep "release" | awk '{print $6}' | cut -d',' -f1 | tr -d '.')
if [ -z "${CUDA_VERSION}" ]; then
CUDA_VERSION="_none"
else
CUDA_VERSION="${CUDA_VERSION}"
fi
elif [ -f "/usr/local/cuda/version.txt" ]; then
# 尝试从CUDA安装目录获取版本
CUDA_VERSION=$(cat /usr/local/cuda/version.txt 2>/dev/null | grep "CUDA Version" | awk '{print $3}' | tr -d '.')
if [ -z "${CUDA_VERSION}" ]; then
CUDA_VERSION="_none"
fi
elif [ -d "/usr/local/cuda" ] && ls -l /usr/local/cuda 2>/dev/null | grep -q "cuda-"; then
# 尝试从符号链接获取版本
CUDA_LINK=$(ls -l /usr/local/cuda 2>/dev/null | grep -o "cuda-[0-9.]*" | head -n 1)
CUDA_VERSION=$(echo "${CUDA_LINK}" | cut -d'-' -f2 | tr -d '.')
if [ -z "${CUDA_VERSION}" ]; then
CUDA_VERSION="_none"
fi
fi
# 获取Ubuntu版本
UBUNTU_VERSION="_none"
if [ -f "/etc/os-release" ]; then
# 检查是否是Ubuntu
if grep -q "Ubuntu" /etc/os-release 2>/dev/null; then
UBUNTU_VERSION=$(grep "VERSION_ID" /etc/os-release 2>/dev/null | cut -d'"' -f2)
if [ -z "${UBUNTU_VERSION}" ]; then
UBUNTU_VERSION="_none"
fi
fi
elif [ -f "/etc/lsb-release" ]; then
# 尝试从lsb-release获取版本
if grep -q "Ubuntu" /etc/lsb-release 2>/dev/null; then
UBUNTU_VERSION=$(grep "DISTRIB_RELEASE" /etc/lsb-release 2>/dev/null | cut -d'=' -f2)
if [ -z "${UBUNTU_VERSION}" ]; then
UBUNTU_VERSION="_none"
fi
fi
fi
# 生成并返回标签
echo "cuda${CUDA_VERSION}_ubuntu${UBUNTU_VERSION}"
}
# 使用示例
CUDA_TAG=$(get_cuda_ubuntu_tag)
echo "Generated tag: ${CUDA_TAG}"

View File

@@ -6,41 +6,79 @@ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3")
option(ISF_BUILD_SHARED_LIBS "Build shared libraries (DLLs)." ON)
option(ISF_ENABLE_TRACKING_BY_DETECTION "Use the tracking-by-detection mode." OFF)
if(ISF_ENABLE_TRACKING_BY_DETECTION)
add_definitions("-DISF_ENABLE_TRACKING_BY_DETECTION")
find_package(Eigen3 REQUIRED)
include_directories(${EIGEN3_INCLUDE_DIRS})
string(TIMESTAMP BUILD_TIMESTAMP "%Y-%m-%d")
set(EXTENDED_INFORMATION "InspireFace[Community Edition]")
if(INSPIRECV_BACKEND_OPENCV)
set(EXTENDED_INFORMATION "${EXTENDED_INFORMATION}@OpenCV Backend")
else()
set(EXTENDED_INFORMATION "${EXTENDED_INFORMATION}@General")
endif()
set(EXTENDED_INFORMATION "${EXTENDED_INFORMATION} - Build Time: ${BUILD_TIMESTAMP}")
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/information.h.in ${CMAKE_CURRENT_SOURCE_DIR}/information.h)
file(GLOB_RECURSE SOURCE_FILES ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
set(SOURCE_FILES ${SOURCE_FILES} ${CMAKE_CURRENT_SOURCE_DIR}/c_api/inspireface.cc) # Add C_API file
if (ISF_ENABLE_RKNN)
set(ISF_RKNN_API_INCLUDE_DIRS ${ISF_THIRD_PARTY_DIR}/inspireface-precompile/rknn/${ISF_RKNPU_MAJOR}/runtime/${ISF_RK_DEVICE_TYPE}/Linux/librknn_api/include)
set(ISF_RKNN_API_LIB ${ISF_THIRD_PARTY_DIR}/inspireface-precompile/rknn/${ISF_RKNPU_MAJOR}/runtime/${ISF_RK_DEVICE_TYPE}/Linux/librknn_api/${CPU_ARCH}/)
link_directories(${ISF_RKNN_API_LIB})
if (ISF_RKNPU_MAJOR STREQUAL "rknpu1")
set(ISF_RKNN_API_INCLUDE_DIRS ${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/rknn/${ISF_RKNPU_MAJOR}/runtime/${ISF_RK_DEVICE_TYPE}/Linux/librknn_api/include)
set(ISF_RKNN_API_LIB ${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/rknn/${ISF_RKNPU_MAJOR}/runtime/${ISF_RK_DEVICE_TYPE}/Linux/librknn_api/${CPU_ARCH}/)
link_directories(${ISF_RKNN_API_LIB})
set(RKNN_LINKED rknn_api)
else()
if(ANDROID)
set(RK_PLATFORM "Android")
set(ISF_RKNN_API_INCLUDE_DIRS ${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/rknn/${ISF_RKNPU_MAJOR}/runtime/${RK_PLATFORM}/librknn_api/include)
set(ISF_RKNN_API_LIB ${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/rknn/${ISF_RKNPU_MAJOR}/runtime/${RK_PLATFORM}/librknn_api/${ANDROID_ABI}/)
link_directories(${ISF_RKNN_API_LIB})
set(RKNN_LINKED rknnrt)
else()
set(RK_PLATFORM "Linux")
set(ISF_RKNN_API_INCLUDE_DIRS ${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/rknn/${ISF_RKNPU_MAJOR}/runtime/${RK_PLATFORM}/librknn_api/include)
if (ISF_RK_COMPILER_TYPE STREQUAL "aarch64")
set(ISF_RKNN_API_LIB ${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/rknn/${ISF_RKNPU_MAJOR}/runtime/${RK_PLATFORM}/librknn_api/${ISF_RK_COMPILER_TYPE}/)
link_directories(${ISF_RKNN_API_LIB})
else()
# For rknpu2 with armv7, we recommend linking static libraries by default
set(ISF_RKNN_API_LIB ${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/rknn/${ISF_RKNPU_MAJOR}/runtime/${RK_PLATFORM}/librknn_api/${ISF_RK_COMPILER_TYPE}/librknnmrt.a)
set(RKNN_LINKED ${ISF_RKNN_API_LIB})
set(RKNN_USE_STATIC_LIBS TRUE)
endif()
endif()
endif()
endif()
set(LINK_THIRD_LIBS ${MNN_LIBS})
# OpenCV
set(LINK_THIRD_LIBS ${OpenCV_LIBS} ${MNN_LIBS})
if(ISF_ENABLE_TRACKING_BY_DETECTION)
set(LINK_THIRD_LIBS ${LINK_THIRD_LIBS} Eigen3::Eigen)
if(ISF_ENABLE_OPENCV)
set(LINK_THIRD_LIBS ${LINK_THIRD_LIBS} ${OpenCV_LIBS})
endif()
# SQLite3
set(SOURCE_FILES ${SOURCE_FILES} ${ISF_THIRD_PARTY_DIR}/inspireface-precompile/sqlite/sqlite3.c) # Add SQLite3 C_API file
set(SQLITE_INCLUDE ${ISF_THIRD_PARTY_DIR}/inspireface-precompile/sqlite/)
set(SOURCE_FILES ${SOURCE_FILES}
${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/sqlite/sqlite3.c
${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/sqlite/sqlite-vec.c
) # Add SQLite3 C_API file
set(SQLITE_INCLUDE ${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/sqlite/)
if (ISF_ENABLE_RKNN)
set(LINK_THIRD_LIBS ${LINK_THIRD_LIBS} rknn_api dl)
# InferenceHelp use RkNN
add_definitions("-DINFERENCE_HELPER_ENABLE_RKNN")
set(LINK_THIRD_LIBS ${LINK_THIRD_LIBS} ${RKNN_LINKED} dl)
# InferenceWrapper use RkNN
if (ISF_RKNPU_MAJOR STREQUAL "rknpu1")
add_definitions("-DINFERENCE_WRAPPER_ENABLE_RKNN")
elseif(ISF_RKNPU_MAJOR STREQUAL "rknpu2")
add_definitions("-DINFERENCE_WRAPPER_ENABLE_RKNN2")
endif()
if (ISF_RK_COMPILER_TYPE STREQUAL "aarch64")
set(LINK_THIRD_LIBS ${LINK_THIRD_LIBS} rknnrt)
endif()
endif()
# cpp yaml
@@ -49,25 +87,45 @@ set(SOURCE_FILES ${SOURCE_FILES} ${CPP_YAML_SRC})
set(CPP_YAML_INCLUDE ${ISF_THIRD_PARTY_DIR}/yaml-cpp/include)
# archive
set(SOURCE_FILES ${SOURCE_FILES} ${CMAKE_CURRENT_SOURCE_DIR}/middleware/model_archive/microtar/microtar.c)
set(SOURCE_FILES ${SOURCE_FILES} ${CMAKE_CURRENT_SOURCE_DIR}/middleware/model_archive/core_archive/microtar/microtar.c)
# CoreArchive source file
set(CORE_ARCHIVE_SOURCE_FILE ${CMAKE_CURRENT_SOURCE_DIR}/middleware/model_archive/core_archive/core_archive.cc CACHE PATH "")
set(SOURCE_FILES ${SOURCE_FILES} ${CORE_ARCHIVE_SOURCE_FILE})
if(ISF_ENABLE_APPLE_EXTENSION)
# link apple libs
find_library(FOUNDATION_LIBRARY Foundation)
find_library(COREML_LIBRARY CoreML)
find_library(ACCELERATE_LIBRARY Accelerate)
find_package(OpenCV REQUIRED)
set(LINK_THIRD_LIBS ${LINK_THIRD_LIBS} ${FOUNDATION_LIBRARY} ${COREML_LIBRARY} ${ACCELERATE_LIBRARY})
# Add objective-c files
set(SOURCE_FILES ${SOURCE_FILES} ${CMAKE_CURRENT_SOURCE_DIR}/middleware/inference_wrapper/coreml/CoreMLAdapter.mm)
endif()
# MNN
link_directories(${MNN_LIBS})
if(ISF_BUILD_SHARED_LIBS)
add_definitions("-DISF_BUILD_SHARED_LIBS")
add_library(InspireFace SHARED ${SOURCE_FILES})
add_library(InspireFace SHARED ${SOURCE_FILES} $<TARGET_OBJECTS:inspirecv>)
else()
add_library(InspireFace STATIC ${SOURCE_FILES})
add_library(InspireFace STATIC ${SOURCE_FILES} $<TARGET_OBJECTS:inspirecv>)
endif()
target_compile_definitions(InspireFace PUBLIC INFERENCE_HELPER_ENABLE_MNN)
target_compile_definitions(InspireFace PUBLIC INFERENCE_WRAPPER_ENABLE_MNN)
target_compile_definitions(InspireFace PUBLIC FEATURE_BLOCK_ENABLE_OPENCV)
# Include files
set(NEED_INCLUDE . ${MNN_INCLUDE_DIRS})
if (ISF_ENABLE_RKNN)
set(NEED_INCLUDE ${NEED_INCLUDE} ${ISF_RKNN_API_INCLUDE_DIRS})
if(ISF_ENABLE_RGA)
set(LINK_THIRD_LIBS ${LINK_THIRD_LIBS} ${RGA_LIBS})
set(NEED_INCLUDE ${NEED_INCLUDE} ${RGA_INCLUDE_DIRS})
endif()
endif ()
if (ISF_BUILD_LINUX_ARM7 OR ANDROID)
@@ -78,6 +136,12 @@ if (ISF_BUILD_LINUX_ARM7 OR ISF_BUILD_LINUX_AARCH64)
set(NEED_INCLUDE ${NEED_INCLUDE} ${OpenCV_STATIC_INCLUDE_DIR})
endif ()
if (ISF_ENABLE_TENSORRT)
set(NEED_INCLUDE ${NEED_INCLUDE} ${ISF_TENSORRT_INCLUDE_DIRS})
set(LINK_THIRD_LIBS ${LINK_THIRD_LIBS} ${ISF_TENSORRT_LIBRARIES})
endif()
# add cpp yaml header
set(NEED_INCLUDE ${NEED_INCLUDE} ${CPP_YAML_INCLUDE} ${SQLITE_INCLUDE})
@@ -86,6 +150,9 @@ if(PLAT STREQUAL "linux")
set(LINK_THIRD_LIBS ${LINK_THIRD_LIBS} ${CMAKE_THREAD_LIBS_INIT} dl)
endif()
# set(LINK_THIRD_LIBS ${LINK_THIRD_LIBS} inspirecv)
set(NEED_INCLUDE ${NEED_INCLUDE} ${INSPIRECV_INCLUDE_PATH})
target_include_directories(InspireFace PUBLIC
${NEED_INCLUDE}
)
@@ -101,49 +168,65 @@ elseif(IOS)
# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -ObjC")
# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -ObjC")
target_link_libraries(InspireFace
"-fobjc-arc"
"-framework opencv2"
"-framework Metal"
"-framework CoreML"
"-framework Foundation"
"-framework CoreVideo"
"-framework CoreMedia"
${MNN_FRAMEWORK_PATH}
)
"-fobjc-arc"
"-framework Metal"
"-framework CoreML"
"-framework Foundation"
"-framework CoreVideo"
"-framework CoreMedia"
${MNN_FRAMEWORK_PATH}
)
if(ISF_ENABLE_APPLE_EXTENSION)
set(LINK_THIRD_LIBS ${LINK_THIRD_LIBS} ${ACCELERATE_LIBRARY})
endif()
if(ISF_ENABLE_OPENCV)
set(LINK_THIRD_LIBS ${LINK_THIRD_LIBS} "-framework opencv2")
endif()
else()
target_link_libraries(InspireFace PUBLIC ${LINK_THIRD_LIBS})
set_target_properties(InspireFace PROPERTIES
LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib/
ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib/
)
)
endif()
# Print Message
message(STATUS ">>>>>>>>>>>>>")
message(STATUS "InspireFace Core:")
message(STATUS "\t Version: ${INSPIRE_FACE_VERSION_MAJOR}.${INSPIRE_FACE_VERSION_MINOR}.${INSPIRE_FACE_VERSION_PATCH}")
message(STATUS "\t MNN_INCLUDE_DIRS: ${MNN_INCLUDE_DIRS}")
message(STATUS "\t MNN_LIBS: ${MNN_LIBS}")
message(STATUS "\t ENABLE_TRACKING_BY_DETECTION: ${ISF_ENABLE_TRACKING_BY_DETECTION}")
if(ISF_ENABLE_TRACKING_BY_DETECTION)
message(STATUS "\t EIGEN3_PATH: ${EIGEN3_INCLUDE_DIRS}")
endif()
message(STATUS "\t ISF_BUILD_SHARED_LIBS: ${ISF_BUILD_SHARED_LIBS}")
message(STATUS "\t ISF_ENABLE_RKNN: ${ISF_ENABLE_RKNN}")
if (ISF_ENABLE_RKNN)
message(STATUS "\t ISF_RKNN_API_INCLUDE_DIRS: ${ISF_RKNN_API_INCLUDE_DIRS}")
message(STATUS "\t ISF_RKNN_API_LIB: ${ISF_RKNN_API_LIB}")
endif ()
if (ISF_ENABLE_RGA)
message(STATUS "\t ISF_ENABLE_RGA: ${ISF_ENABLE_RGA}")
message(STATUS "\t RGA_LIBS: ${RGA_LIBS}")
message(STATUS "\t RGA_INCLUDE_DIRS: ${RGA_INCLUDE_DIRS}")
endif ()
if (ISF_GLOBAL_INFERENCE_BACKEND_USE_MNN_CUDA)
message(STATUS "\t ISF_GLOBAL_INFERENCE_BACKEND_USE_MNN_CUDA: ${ISF_GLOBAL_INFERENCE_BACKEND_USE_MNN_CUDA}")
endif ()
message(STATUS "\t ISF_ENABLE_TENSORRT: ${ISF_ENABLE_TENSORRT}")
if (ISF_ENABLE_TENSORRT)
message(STATUS "\t TENSORRT_INCLUDE_DIR: ${TENSORRT_INCLUDE_DIR}")
message(STATUS "\t TENSORRT_LIBRARY_INFER: ${TENSORRT_LIBRARY_INFER}")
message(STATUS "\t TENSORRT_LIBRARY_RUNTIME: ${TENSORRT_LIBRARY_RUNTIME}")
message(STATUS "\t CUDA_RUNTIME_LIBRARY: ${CUDA_RUNTIME_LIBRARY}")
endif()
# Install lib
install(TARGETS InspireFace
LIBRARY DESTINATION ${CMAKE_INSTALL_PREFIX}/InspireFace/lib
ARCHIVE DESTINATION ${CMAKE_INSTALL_PREFIX}/InspireFace/lib
)
)
# Install header file
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/c_api/inspireface.h DESTINATION ${CMAKE_INSTALL_PREFIX}/InspireFace/include)
@@ -154,8 +237,32 @@ install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/herror.h DESTINATION ${CMAKE_INSTALL_P
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/version.txt DESTINATION ${CMAKE_INSTALL_PREFIX}/)
if (ISF_ENABLE_RKNN)
if (ISF_ENABLE_RKNN AND ISF_RKNPU_MAJOR STREQUAL "rknpu1")
# Install rknn 3rd lib
install(FILES ${ISF_RKNN_API_LIB}/librknn_api.so DESTINATION ${CMAKE_INSTALL_PREFIX}/InspireFace/lib)
endif ()
if(ISF_ENABLE_RKNN AND ISF_RKNPU_MAJOR STREQUAL "rknpu2" AND RK_PLATFORM STREQUAL "Android")
install(FILES ${ISF_RKNN_API_LIB}/librknnrt.so DESTINATION ${CMAKE_INSTALL_PREFIX}/InspireFace/lib)
endif()
if (ISF_RK_COMPILER_TYPE STREQUAL "aarch64")
install(FILES ${ISF_RKNN_API_LIB}/librknnrt.so DESTINATION ${CMAKE_INSTALL_PREFIX}/InspireFace/lib)
endif()
if (NOT IOS)
if (NOT ISF_BUILD_SHARED_LIBS)
if(MNN_BUILD_SHARED_LIBS)
install(FILES ${CMAKE_BINARY_DIR}/3rdparty/MNN/libMNN.so DESTINATION ${CMAKE_INSTALL_PREFIX}/InspireFace/lib)
else()
install(FILES ${CMAKE_BINARY_DIR}/3rdparty/MNN/libMNN.a DESTINATION ${CMAKE_INSTALL_PREFIX}/InspireFace/lib)
endif()
if(RKNN_USE_STATIC_LIBS)
# To be added: The compilation of the RK series needs to be added
install(FILES ${ISF_RKNN_API_LIB} DESTINATION ${CMAKE_INSTALL_PREFIX}/InspireFace/lib)
endif()
endif()
endif()

View File

@@ -1,10 +1,18 @@
//
// Created by tunm on 2024/4/17.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include "launch.h"
#include "log.h"
#include "herror.h"
#include "isf_check.h"
#include "middleware/cuda_toolkit.h"
#if defined(ISF_ENABLE_TENSORRT)
#include "middleware/cuda_toolkit.h"
#endif
#define APPLE_EXTENSION_SUFFIX ".bundle"
namespace inspire {
@@ -12,7 +20,11 @@ std::mutex Launch::mutex_;
std::shared_ptr<Launch> Launch::instance_ = nullptr;
InspireArchive& Launch::getMArchive() {
return m_archive_;
std::lock_guard<std::mutex> lock(mutex_);
if (!m_archive_) {
throw std::runtime_error("Archive not initialized");
}
return *m_archive_;
}
std::shared_ptr<Launch> Launch::GetInstance() {
@@ -24,21 +36,81 @@ std::shared_ptr<Launch> Launch::GetInstance() {
}
int32_t Launch::Load(const std::string& path) {
std::lock_guard<std::mutex> lock(mutex_);
#if defined(ISF_ENABLE_TENSORRT)
int32_t support_cuda;
auto ret = CheckCudaUsability(&support_cuda);
if (ret != HSUCCEED) {
INSPIRE_LOGE("An error occurred while checking CUDA device support. Please ensure that your environment supports CUDA!");
return ret;
}
if (!support_cuda) {
INSPIRE_LOGE("Your environment does not support CUDA! Please ensure that your environment supports CUDA!");
return HERR_DEVICE_CUDA_NOT_SUPPORT;
}
#endif
INSPIREFACE_CHECK_MSG(os::IsExists(path), "The package path does not exist because the launch failed.");
#if defined(ISF_ENABLE_APPLE_EXTENSION)
BuildAppleExtensionPath(path);
#endif
if (!m_load_) {
m_archive_.ReLoad(path);
if (m_archive_.QueryStatus() == SARC_SUCCESS) {
m_load_ = true;
return HSUCCEED;
} else {
try {
m_archive_ = std::make_unique<InspireArchive>();
m_archive_->ReLoad(path);
if (m_archive_->QueryStatus() == SARC_SUCCESS) {
m_load_ = true;
INSPIRE_LOGI("Successfully loaded resources");
return HSUCCEED;
} else {
m_archive_.reset();
INSPIRE_LOGE("Failed to load resources");
return HERR_ARCHIVE_LOAD_MODEL_FAILURE;
}
} catch (const std::exception& e) {
m_archive_.reset();
INSPIRE_LOGE("Exception during resource loading: %s", e.what());
return HERR_ARCHIVE_LOAD_MODEL_FAILURE;
}
} else {
INSPIRE_LOGW(
"There is no need to call launch more than once, as subsequent calls will not affect the initialization.");
INSPIRE_LOGW("There is no need to call launch more than once, as subsequent calls will not affect the initialization.");
return HSUCCEED;
}
}
int32_t Launch::Reload(const std::string& path) {
std::lock_guard<std::mutex> lock(mutex_);
INSPIREFACE_CHECK_MSG(os::IsExists(path), "The package path does not exist because the launch failed.");
#if defined(ISF_ENABLE_APPLE_EXTENSION)
BuildAppleExtensionPath(path);
#endif
try {
// Clean up existing archive if it exists
if (m_archive_) {
m_archive_.reset();
m_load_ = false;
}
// Create and load new archive
m_archive_ = std::make_unique<InspireArchive>();
m_archive_->ReLoad(path);
if (m_archive_->QueryStatus() == SARC_SUCCESS) {
m_load_ = true;
INSPIRE_LOGI("Successfully reloaded resources");
return HSUCCEED;
} else {
m_archive_.reset();
INSPIRE_LOGE("Failed to reload resources");
return HERR_ARCHIVE_LOAD_MODEL_FAILURE;
}
} catch (const std::exception& e) {
m_archive_.reset();
INSPIRE_LOGE("Exception during resource reloading: %s", e.what());
return HERR_ARCHIVE_LOAD_MODEL_FAILURE;
}
}
bool Launch::isMLoad() const {
return m_load_;
}
@@ -46,8 +118,7 @@ bool Launch::isMLoad() const {
void Launch::Unload() {
std::lock_guard<std::mutex> lock(mutex_);
if (m_load_) {
// Assuming InspireArchive has a method to clear its resources
m_archive_.Release();
m_archive_.reset();
m_load_ = false;
INSPIRE_LOGI("All resources have been successfully unloaded and system is reset.");
} else {
@@ -55,4 +126,54 @@ void Launch::Unload() {
}
}
void Launch::SetRockchipDmaHeapPath(const std::string& path) {
m_rockchip_dma_heap_path_ = path;
}
std::string Launch::GetRockchipDmaHeapPath() const {
return m_rockchip_dma_heap_path_;
}
void Launch::ConfigurationExtensionPath(const std::string& path) {
#if defined(ISF_ENABLE_APPLE_EXTENSION)
INSPIREFACE_CHECK_MSG(os::IsDir(path), "The apple extension path is not a directory, please check.");
#endif
INSPIREFACE_CHECK_MSG(os::IsExists(path), "The extension path is not exists, please check.");
m_extension_path_ = path;
}
std::string Launch::GetExtensionPath() const {
return m_extension_path_;
}
void Launch::SetGlobalCoreMLInferenceMode(InferenceWrapper::SpecialBackend mode) {
m_global_coreml_inference_mode_ = mode;
if (m_global_coreml_inference_mode_ == InferenceWrapper::COREML_CPU) {
INSPIRE_LOGW("Global CoreML Compute Units set to CPU Only.");
} else if (m_global_coreml_inference_mode_ == InferenceWrapper::COREML_GPU) {
INSPIRE_LOGW("Global CoreML Compute Units set to CPU and GPU.");
} else if (m_global_coreml_inference_mode_ == InferenceWrapper::COREML_ANE) {
INSPIRE_LOGW("Global CoreML Compute Units set to Auto Switch (ANE, GPU, CPU).");
}
}
InferenceWrapper::SpecialBackend Launch::GetGlobalCoreMLInferenceMode() const {
return m_global_coreml_inference_mode_;
}
void Launch::BuildAppleExtensionPath(const std::string& resource_path) {
std::string basename = os::Basename(resource_path);
m_extension_path_ = os::PathJoin(os::Dirname(resource_path), basename + APPLE_EXTENSION_SUFFIX);
INSPIREFACE_CHECK_MSG(os::IsExists(m_extension_path_), "The apple extension path is not exists, please check.");
INSPIREFACE_CHECK_MSG(os::IsDir(m_extension_path_), "The apple extension path is not a directory, please check.");
}
void Launch::SetCudaDeviceId(int32_t device_id) {
m_cuda_device_id_ = device_id;
}
int32_t Launch::GetCudaDeviceId() const {
return m_cuda_device_id_;
}
} // namespace inspire

View File

@@ -1,9 +1,17 @@
// Created by tunm on 2024/04/17.
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#pragma once
#ifndef INSPIREFACE_LAUNCH_H
#define INSPIREFACE_LAUNCH_H
#include "middleware/model_archive/inspire_archive.h"
#if defined(ISF_ENABLE_RGA)
#include "middleware/nexus_processor/rga/dma_alloc.h"
#endif
#include <mutex>
#include "middleware/inference_wrapper/inference_wrapper.h"
#include "middleware/system.h"
#ifndef INSPIRE_API
#define INSPIRE_API
@@ -25,7 +33,11 @@ public:
// Loads the necessary resources from a specified path.
// Returns an integer status code: 0 on success, non-zero on failure.
int32_t Load(const std::string &path);
int32_t Load(const std::string& path);
// Reloads the resources from a specified path.
// Returns an integer status code: 0 on success, non-zero on failure.
int32_t Reload(const std::string& path);
// Provides access to the loaded InspireArchive instance.
InspireArchive& getMArchive();
@@ -36,17 +48,62 @@ public:
// Unloads the resources and resets the system to its initial state.
void Unload();
// Set the rockchip dma heap path
void SetRockchipDmaHeapPath(const std::string& path);
// Get the rockchip dma heap path
std::string GetRockchipDmaHeapPath() const;
// Set the extension path
void ConfigurationExtensionPath(const std::string& path);
// Get the extension path
std::string GetExtensionPath() const;
// Set the global coreml inference mode
void SetGlobalCoreMLInferenceMode(InferenceWrapper::SpecialBackend mode);
// Get the global coreml inference mode
InferenceWrapper::SpecialBackend GetGlobalCoreMLInferenceMode() const;
// Build the extension path
void BuildAppleExtensionPath(const std::string& resource_path);
// Set the cuda device id
void SetCudaDeviceId(int32_t device_id);
// Get the cuda device id
int32_t GetCudaDeviceId() const;
private:
Launch() : m_load_(false) {} ///< Private constructor for the singleton pattern.
// Parameters
std::string m_rockchip_dma_heap_path_;
static std::mutex mutex_; ///< Mutex for synchronizing access to the singleton instance.
static std::shared_ptr<Launch> instance_; ///< The singleton instance of Launch.
// Constructor
Launch() : m_load_(false), m_archive_(nullptr) {
#if defined(ISF_ENABLE_RGA)
#if defined(ISF_RKNPU_RV1106)
m_rockchip_dma_heap_path_ = RV1106_CMA_HEAP_PATH;
#else
m_rockchip_dma_heap_path_ = DMA_HEAP_DMA32_UNCACHE_PATCH;
#endif
INSPIRE_LOGW("Rockchip dma heap configured path: %s", m_rockchip_dma_heap_path_.c_str());
#endif
} ///< Private constructor for the singleton pattern.
InspireArchive m_archive_; ///< The archive containing all necessary resources.
bool m_load_; ///< Flag indicating whether the resources have been successfully loaded.
static std::mutex mutex_; ///< Mutex for synchronizing access to the singleton instance.
static std::shared_ptr<Launch> instance_; ///< The singleton instance of Launch.
std::string m_extension_path_;
std::unique_ptr<InspireArchive> m_archive_; ///< The archive containing all necessary resources.
bool m_load_; ///< Flag indicating whether the resources have been successfully loaded.
int32_t m_cuda_device_id_{0};
InferenceWrapper::SpecialBackend m_global_coreml_inference_mode_{InferenceWrapper::COREML_ANE}; ///< The global coreml inference mode
};
} // namespace inspire
} // namespace inspire
#endif //INSPIREFACE_LAUNCH_H
#endif // INSPIREFACE_LAUNCH_H

View File

@@ -1,4 +1,7 @@
// Created by tunm on 2024/07/16.
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#pragma once
#ifndef INSPIREFACE_RESOURCE_MANAGE_H
#define INSPIREFACE_RESOURCE_MANAGE_H
@@ -8,6 +11,7 @@
#include <memory>
#include <iomanip> // For std::setw and std::left
#include <vector>
#include "log.h"
#ifndef INSPIRE_API
#define INSPIRE_API
#endif
@@ -30,6 +34,7 @@ private:
// Use hash tables to store session and image stream handles
std::unordered_map<long, bool> sessionMap;
std::unordered_map<long, bool> streamMap;
std::unordered_map<long, bool> imageBitmapMap;
// The private constructor guarantees singletons
ResourceManager() {}
@@ -84,6 +89,23 @@ public:
// released
}
// Create and record image bitmaps
void createImageBitmap(long handle) {
std::lock_guard<std::mutex> lock(mutex);
imageBitmapMap[handle] = false; // false indicates that it is not released
}
// Release image bitmap
bool releaseImageBitmap(long handle) {
std::lock_guard<std::mutex> lock(mutex);
auto it = imageBitmapMap.find(handle);
if (it != imageBitmapMap.end() && !it->second) {
it->second = true; // Mark as released
return true;
}
return false; // Release failed, possibly because the handle could not be found or was released
}
// Gets a list of unreleased session handles
std::vector<long> getUnreleasedSessions() {
std::lock_guard<std::mutex> lock(mutex);
@@ -108,11 +130,22 @@ public:
return unreleasedStreams;
}
// Gets a list of unreleased image bitmap handles
std::vector<long> getUnreleasedImageBitmaps() {
std::lock_guard<std::mutex> lock(mutex);
std::vector<long> unreleasedImageBitmaps;
for (const auto& entry : imageBitmapMap) {
if (!entry.second) {
unreleasedImageBitmaps.push_back(entry.first);
}
}
return unreleasedImageBitmaps;
}
// Method to print resource management statistics
void printResourceStatistics() {
std::lock_guard<std::mutex> lock(mutex);
std::cout << std::left << std::setw(15) << "Resource Name" << std::setw(15) << "Total Created" << std::setw(15) << "Total Released"
<< std::setw(15) << "Not Released" << std::endl;
INSPIRE_LOGI("%-15s%-15s%-15s%-15s", "Resource Name", "Total Created", "Total Released", "Not Released");
// Print session statistics
int totalSessionsCreated = sessionMap.size();
@@ -124,8 +157,7 @@ public:
if (!entry.second)
++sessionsNotReleased;
}
std::cout << std::left << std::setw(15) << "Session" << std::setw(15) << totalSessionsCreated << std::setw(15) << totalSessionsReleased
<< std::setw(15) << sessionsNotReleased << std::endl;
INSPIRE_LOGI("%-15s%-15d%-15d%-15d", "Session", totalSessionsCreated, totalSessionsReleased, sessionsNotReleased);
// Print stream statistics
int totalStreamsCreated = streamMap.size();
@@ -137,8 +169,19 @@ public:
if (!entry.second)
++streamsNotReleased;
}
std::cout << std::left << std::setw(15) << "Stream" << std::setw(15) << totalStreamsCreated << std::setw(15) << totalStreamsReleased
<< std::setw(15) << streamsNotReleased << std::endl;
INSPIRE_LOGI("%-15s%-15d%-15d%-15d", "Stream", totalStreamsCreated, totalStreamsReleased, streamsNotReleased);
// Print bitmap statistics
int totalBitmapsCreated = imageBitmapMap.size();
int totalBitmapsReleased = 0;
int bitmapsNotReleased = 0;
for (const auto& entry : imageBitmapMap) {
if (entry.second)
++totalBitmapsReleased;
if (!entry.second)
++bitmapsNotReleased;
}
INSPIRE_LOGI("%-15s%-15d%-15d%-15d", "Bitmap", totalBitmapsCreated, totalBitmapsReleased, bitmapsNotReleased);
}
};

View File

@@ -1,14 +1,20 @@
//
// Created by tunm on 2023/10/3.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include "inspireface.h"
#include "intypedef.h"
#include "inspireface_internal.h"
#include "information.h"
#include "feature_hub/feature_hub.h"
#include "Initialization_module/launch.h"
#include "Initialization_module/resource_manage.h"
#include "feature_hub/feature_hub_db.h"
#include "initialization_module/launch.h"
#include "initialization_module/resource_manage.h"
#include "recognition_module/similarity_converter.h"
#include "middleware/inference_wrapper/inference_wrapper.h"
#if defined(ISF_ENABLE_TENSORRT)
#include "middleware/cuda_toolkit.h"
#endif
using namespace inspire;
@@ -20,36 +26,36 @@ HYPER_CAPI_EXPORT extern HResult HFCreateImageStream(PHFImageData data, HFImageS
auto stream = new HF_CameraStream();
switch (data->rotation) {
case HF_CAMERA_ROTATION_90:
stream->impl.SetRotationMode(ROTATION_90);
stream->impl.SetRotationMode(inspirecv::ROTATION_90);
break;
case HF_CAMERA_ROTATION_180:
stream->impl.SetRotationMode(ROTATION_180);
stream->impl.SetRotationMode(inspirecv::ROTATION_180);
break;
case HF_CAMERA_ROTATION_270:
stream->impl.SetRotationMode(ROTATION_270);
stream->impl.SetRotationMode(inspirecv::ROTATION_270);
break;
default:
stream->impl.SetRotationMode(ROTATION_0);
stream->impl.SetRotationMode(inspirecv::ROTATION_0);
break;
}
switch (data->format) {
case HF_STREAM_RGB:
stream->impl.SetDataFormat(RGB);
stream->impl.SetDataFormat(inspirecv::RGB);
break;
case HF_STREAM_BGR:
stream->impl.SetDataFormat(BGR);
stream->impl.SetDataFormat(inspirecv::BGR);
break;
case HF_STREAM_RGBA:
stream->impl.SetDataFormat(RGBA);
stream->impl.SetDataFormat(inspirecv::RGBA);
break;
case HF_STREAM_BGRA:
stream->impl.SetDataFormat(BGRA);
stream->impl.SetDataFormat(inspirecv::BGRA);
break;
case HF_STREAM_YUV_NV12:
stream->impl.SetDataFormat(NV12);
stream->impl.SetDataFormat(inspirecv::NV12);
break;
case HF_STREAM_YUV_NV21:
stream->impl.SetDataFormat(NV21);
stream->impl.SetDataFormat(inspirecv::NV21);
break;
default:
return HERR_INVALID_IMAGE_STREAM_PARAM; // Assume there's a return code for unsupported
@@ -65,6 +71,74 @@ HYPER_CAPI_EXPORT extern HResult HFCreateImageStream(PHFImageData data, HFImageS
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFCreateImageStreamEmpty(HFImageStream *handle) {
if (handle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
auto stream = new HF_CameraStream();
*handle = (HFImageStream)stream;
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFImageStreamSetBuffer(HFImageStream handle, HPUInt8 buffer, HInt32 width, HInt32 height) {
if (handle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
((HF_CameraStream *)handle)->impl.SetDataBuffer(buffer, width, height);
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFImageStreamSetRotation(HFImageStream handle, HFRotation rotation) {
if (handle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
switch (rotation) {
case HF_CAMERA_ROTATION_90:
((HF_CameraStream *)handle)->impl.SetRotationMode(inspirecv::ROTATION_90);
break;
case HF_CAMERA_ROTATION_180:
((HF_CameraStream *)handle)->impl.SetRotationMode(inspirecv::ROTATION_180);
break;
case HF_CAMERA_ROTATION_270:
((HF_CameraStream *)handle)->impl.SetRotationMode(inspirecv::ROTATION_270);
break;
default:
((HF_CameraStream *)handle)->impl.SetRotationMode(inspirecv::ROTATION_0);
break;
}
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFImageStreamSetFormat(HFImageStream handle, HFImageFormat format) {
if (handle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
switch (format) {
case HF_STREAM_RGB:
((HF_CameraStream *)handle)->impl.SetDataFormat(inspirecv::RGB);
break;
case HF_STREAM_BGR:
((HF_CameraStream *)handle)->impl.SetDataFormat(inspirecv::BGR);
break;
case HF_STREAM_RGBA:
((HF_CameraStream *)handle)->impl.SetDataFormat(inspirecv::RGBA);
break;
case HF_STREAM_BGRA:
((HF_CameraStream *)handle)->impl.SetDataFormat(inspirecv::BGRA);
break;
case HF_STREAM_YUV_NV12:
((HF_CameraStream *)handle)->impl.SetDataFormat(inspirecv::NV12);
break;
case HF_STREAM_YUV_NV21:
((HF_CameraStream *)handle)->impl.SetDataFormat(inspirecv::NV21);
break;
default:
return HERR_INVALID_IMAGE_STREAM_PARAM; // Assume there's a return code for unsupported
// formats
}
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFReleaseImageStream(HFImageStream streamHandle) {
if (streamHandle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
@@ -77,6 +151,150 @@ HYPER_CAPI_EXPORT extern HResult HFReleaseImageStream(HFImageStream streamHandle
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFCreateImageBitmap(PHFImageBitmapData data, HFImageBitmap *handle) {
if (data == nullptr || handle == nullptr) {
return HERR_INVALID_IMAGE_BITMAP_HANDLE;
}
auto bitmap = new HF_ImageBitmap();
bitmap->impl.Reset(data->width, data->height, data->channels, data->data);
*handle = (HFImageBitmap)bitmap;
// Record the creation of this image bitmap in the ResourceManager
RESOURCE_MANAGE->createImageBitmap((long)*handle);
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFCreateImageBitmapFromFilePath(HPath filePath, HInt32 channels, HFImageBitmap *handle) {
if (handle == nullptr) {
return HERR_INVALID_IMAGE_BITMAP_HANDLE;
}
auto image = inspirecv::Image::Create(filePath, channels);
auto bitmap = new HF_ImageBitmap();
bitmap->impl.Reset(image.Width(), image.Height(), image.Channels(), image.Data());
*handle = (HFImageBitmap)bitmap;
// Record the creation of this image bitmap in the ResourceManager
RESOURCE_MANAGE->createImageBitmap((long)*handle);
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFImageBitmapCopy(HFImageBitmap handle, HFImageBitmap *copyHandle) {
if (handle == nullptr || copyHandle == nullptr) {
return HERR_INVALID_IMAGE_BITMAP_HANDLE;
}
auto bitmap = new HF_ImageBitmap();
bitmap->impl.Reset(((HF_ImageBitmap *)handle)->impl.Width(), ((HF_ImageBitmap *)handle)->impl.Height(),
((HF_ImageBitmap *)handle)->impl.Channels(), ((HF_ImageBitmap *)handle)->impl.Data());
*copyHandle = (HFImageBitmap)bitmap;
// Record the creation of this image bitmap in the ResourceManager
RESOURCE_MANAGE->createImageBitmap((long)*copyHandle);
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFReleaseImageBitmap(HFImageBitmap handle) {
if (handle == nullptr) {
return HERR_INVALID_IMAGE_BITMAP_HANDLE;
}
// Check and mark this image bitmap as released in the ResourceManager
if (!RESOURCE_MANAGE->releaseImageBitmap((long)handle)) {
return HERR_INVALID_IMAGE_BITMAP_HANDLE; // or other appropriate error code
}
delete (HF_ImageBitmap *)handle;
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFCreateImageStreamFromImageBitmap(HFImageBitmap handle, HFRotation rotation, HFImageStream *streamHandle) {
if (handle == nullptr || streamHandle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
auto stream = new HF_CameraStream();
switch (rotation) {
case HF_CAMERA_ROTATION_90:
stream->impl.SetRotationMode(inspirecv::ROTATION_90);
break;
case HF_CAMERA_ROTATION_180:
stream->impl.SetRotationMode(inspirecv::ROTATION_180);
break;
case HF_CAMERA_ROTATION_270:
stream->impl.SetRotationMode(inspirecv::ROTATION_270);
break;
default:
stream->impl.SetRotationMode(inspirecv::ROTATION_0);
break;
}
stream->impl.SetDataFormat(inspirecv::BGR);
stream->impl.SetDataBuffer(((HF_ImageBitmap *)handle)->impl.Data(), ((HF_ImageBitmap *)handle)->impl.Height(),
((HF_ImageBitmap *)handle)->impl.Width());
*streamHandle = (HFImageStream)stream;
// Record the creation of this stream in the ResourceManager
RESOURCE_MANAGE->createStream((long)*streamHandle);
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFCreateImageBitmapFromImageStreamProcess(HFImageStream streamHandle, HFImageBitmap *handle, int is_rotate,
float scale) {
if (streamHandle == nullptr || handle == nullptr) {
return HERR_INVALID_IMAGE_BITMAP_HANDLE;
}
auto bitmap = new HF_ImageBitmap();
auto img = ((HF_CameraStream *)streamHandle)->impl.ExecuteImageScaleProcessing(scale, is_rotate);
bitmap->impl.Reset(img.Width(), img.Height(), img.Channels(), img.Data());
*handle = (HFImageBitmap)bitmap;
// Record the creation of this image bitmap in the ResourceManager
RESOURCE_MANAGE->createImageBitmap((long)*handle);
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFImageBitmapWriteToFile(HFImageBitmap handle, HPath filePath) {
if (handle == nullptr) {
return HERR_INVALID_IMAGE_BITMAP_HANDLE;
}
return ((HF_ImageBitmap *)handle)->impl.Write(filePath);
}
HYPER_CAPI_EXPORT extern HResult HFImageBitmapDrawRect(HFImageBitmap handle, HFaceRect rect, HColor color, HInt32 thickness) {
if (handle == nullptr) {
return HERR_INVALID_IMAGE_BITMAP_HANDLE;
}
inspirecv::Rect<int> rect_inner(rect.x, rect.y, rect.width, rect.height);
((HF_ImageBitmap *)handle)->impl.DrawRect(rect_inner, {color.r, color.g, color.b}, thickness);
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFImageBitmapDrawCircle(HFImageBitmap handle, HPoint2i point, HInt32 radius, HColor color, HInt32 thickness) {
if (handle == nullptr) {
return HERR_INVALID_IMAGE_BITMAP_HANDLE;
}
((HF_ImageBitmap *)handle)->impl.DrawCircle({point.x, point.y}, radius, {color.r, color.g, color.b}, thickness);
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFImageBitmapDrawCircleF(HFImageBitmap handle, HPoint2f point, HInt32 radius, HColor color, HInt32 thickness) {
if (handle == nullptr) {
return HERR_INVALID_IMAGE_BITMAP_HANDLE;
}
((HF_ImageBitmap *)handle)->impl.DrawCircle({(int)point.x, (int)point.y}, radius, {color.r, color.g, color.b}, thickness);
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFImageBitmapGetData(HFImageBitmap handle, PHFImageBitmapData data) {
if (handle == nullptr || data == nullptr) {
return HERR_INVALID_IMAGE_BITMAP_HANDLE;
}
data->width = ((HF_ImageBitmap *)handle)->impl.Width();
data->height = ((HF_ImageBitmap *)handle)->impl.Height();
data->channels = ((HF_ImageBitmap *)handle)->impl.Channels();
data->data = (uint8_t *)((HF_ImageBitmap *)handle)->impl.Data();
return HSUCCEED;
}
HYPER_CAPI_EXPORT extern HResult HFImageBitmapShow(HFImageBitmap handle, HString title, HInt32 delay) {
if (handle == nullptr) {
return HERR_INVALID_IMAGE_BITMAP_HANDLE;
}
((HF_ImageBitmap *)handle)->impl.Show(title, delay);
return HSUCCEED;
}
void HFDeBugImageStreamImShow(HFImageStream streamHandle) {
if (streamHandle == nullptr) {
INSPIRE_LOGE("Handle error");
@@ -86,12 +304,11 @@ void HFDeBugImageStreamImShow(HFImageStream streamHandle) {
INSPIRE_LOGE("Image error");
return;
}
auto image = stream->impl.GetScaledImage(1.0f, true);
auto image = stream->impl.ExecuteImageScaleProcessing(1.0f, true);
#ifdef DISABLE_GUI
cv::imwrite("tmp.jpg", image);
image.Write("tmp.jpg");
#else
cv::imshow("Debug", image);
cv::waitKey(0);
image.Show();
#endif
}
@@ -105,10 +322,10 @@ HResult HFDeBugImageStreamDecodeSave(HFImageStream streamHandle, HPath savePath)
INSPIRE_LOGE("Image error");
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
auto image = stream->impl.GetScaledImage(1.0f, true);
auto ret = cv::imwrite(savePath, image);
auto image = stream->impl.ExecuteImageScaleProcessing(1.0f, true);
auto ret = image.Write(savePath);
if (ret) {
INSPIRE_LOGE("Image saved successfully to %s", savePath);
INSPIRE_LOGI("Image saved successfully to %s", savePath);
return HSUCCEED;
} else {
INSPIRE_LOGE("Failed to save image to %s", savePath);
@@ -138,7 +355,8 @@ HResult HFCreateInspireFaceSession(HFSessionCustomParameter parameter, HFDetectM
param.enable_ir_liveness = parameter.enable_ir_liveness;
param.enable_recognition = parameter.enable_recognition;
param.enable_face_attribute = parameter.enable_face_attribute;
inspire::DetectMode detMode = inspire::DETECT_MODE_ALWAYS_DETECT;
param.enable_detect_mode_landmark = parameter.enable_detect_mode_landmark;
inspire::DetectModuleMode detMode = inspire::DETECT_MODE_ALWAYS_DETECT;
if (detectMode == HF_DETECT_MODE_LIGHT_TRACK) {
detMode = inspire::DETECT_MODE_LIGHT_TRACK;
} else if (detectMode == HF_DETECT_MODE_TRACK_BY_DETECTION) {
@@ -183,7 +401,10 @@ HResult HFCreateInspireFaceSessionOptional(HOption customOption, HFDetectMode de
if (customOption & HF_ENABLE_INTERACTION) {
param.enable_interaction_liveness = true;
}
inspire::DetectMode detMode = inspire::DETECT_MODE_ALWAYS_DETECT;
if (customOption & HF_ENABLE_DETECT_MODE_LANDMARK) {
param.enable_detect_mode_landmark = true;
}
inspire::DetectModuleMode detMode = inspire::DETECT_MODE_ALWAYS_DETECT;
if (detectMode == HF_DETECT_MODE_LIGHT_TRACK) {
detMode = inspire::DETECT_MODE_LIGHT_TRACK;
} else if (detectMode == HF_DETECT_MODE_TRACK_BY_DETECTION) {
@@ -209,24 +430,107 @@ HResult HFLaunchInspireFace(HPath resourcePath) {
return INSPIRE_LAUNCH->Load(resourcePath);
}
HResult HFReloadInspireFace(HPath resourcePath) {
std::string path(resourcePath);
return INSPIRE_LAUNCH->Reload(resourcePath);
}
HResult HFTerminateInspireFace() {
INSPIRE_LAUNCH->Unload();
return HSUCCEED;
}
HResult HFQueryInspireFaceLaunchStatus(HInt32 *status) {
*status = INSPIRE_LAUNCH->isMLoad();
return HSUCCEED;
}
HResult HFFeatureHubDataDisable() {
return FEATURE_HUB->DisableHub();
return FEATURE_HUB_DB->DisableHub();
}
HResult HFSetExpansiveHardwareRockchipDmaHeapPath(HPath path) {
INSPIRE_LAUNCH->SetRockchipDmaHeapPath(path);
return HSUCCEED;
}
HResult HFQueryExpansiveHardwareRockchipDmaHeapPath(HString path) {
strcpy(path, INSPIRE_LAUNCH->GetRockchipDmaHeapPath().c_str());
return HSUCCEED;
}
HResult HFSetAppleCoreMLInferenceMode(HFAppleCoreMLInferenceMode mode) {
if (mode == HF_APPLE_COREML_INFERENCE_MODE_CPU) {
INSPIRE_LAUNCH->SetGlobalCoreMLInferenceMode(InferenceWrapper::COREML_CPU);
} else if (mode == HF_APPLE_COREML_INFERENCE_MODE_GPU) {
INSPIRE_LAUNCH->SetGlobalCoreMLInferenceMode(InferenceWrapper::COREML_GPU);
} else if (mode == HF_APPLE_COREML_INFERENCE_MODE_ANE) {
INSPIRE_LAUNCH->SetGlobalCoreMLInferenceMode(InferenceWrapper::COREML_ANE);
}
return HSUCCEED;
}
HResult HFSetCudaDeviceId(int32_t device_id) {
INSPIRE_LAUNCH->SetCudaDeviceId(device_id);
return HSUCCEED;
}
HResult HFGetCudaDeviceId(int32_t *device_id) {
*device_id = INSPIRE_LAUNCH->GetCudaDeviceId();
return HSUCCEED;
}
HResult HFPrintCudaDeviceInfo() {
#if defined(ISF_ENABLE_TENSORRT)
return inspire::PrintCudaDeviceInfo();
#else
INSPIRE_LOGW("CUDA is not supported, you need to enable the compile option that supports TensorRT");
return HERR_DEVICE_CUDA_DISABLE;
#endif
}
HResult HFGetNumCudaDevices(int32_t *num_devices) {
#if defined(ISF_ENABLE_TENSORRT)
return inspire::GetCudaDeviceCount(num_devices);
#else
INSPIRE_LOGW("CUDA is not supported, you need to enable the compile option that supports TensorRT");
return HERR_DEVICE_CUDA_DISABLE;
#endif
}
HResult HFCheckCudaDeviceSupport(int32_t *is_support) {
#if defined(ISF_ENABLE_TENSORRT)
return inspire::CheckCudaUsability(is_support);
#else
INSPIRE_LOGW("CUDA is not supported, you need to enable the compile option that supports TensorRT");
return HERR_DEVICE_CUDA_DISABLE;
#endif
}
HResult HFFeatureHubDataEnable(HFFeatureHubConfiguration configuration) {
inspire::DatabaseConfiguration param;
param.db_path = (configuration.dbPath != nullptr) ? std::string(configuration.dbPath) : std::string();
param.enable_use_db = configuration.enablePersistence;
param.feature_block_num = configuration.featureBlockNum;
if (configuration.primaryKeyMode != HF_PK_AUTO_INCREMENT && configuration.primaryKeyMode != HF_PK_MANUAL_INPUT) {
param.primary_key_mode = inspire::PrimaryKeyMode::AUTO_INCREMENT;
} else {
param.primary_key_mode = inspire::PrimaryKeyMode(configuration.primaryKeyMode);
}
if (configuration.persistenceDbPath == nullptr) {
INSPIRE_LOGE("persistenceDbPath is null, use default path");
}
// Add validation for persistenceDbPath
if (configuration.enablePersistence) {
if (configuration.persistenceDbPath == nullptr) {
param.persistence_db_path = std::string("");
} else {
param.persistence_db_path = std::string(configuration.persistenceDbPath);
}
} else {
param.persistence_db_path = std::string(""); // Empty string for in-memory mode
}
param.enable_persistence = configuration.enablePersistence;
param.recognition_threshold = configuration.searchThreshold;
param.search_mode = (SearchMode)configuration.searchMode;
auto ret = FEATURE_HUB->EnableHub(param);
param.search_mode = (inspire::SearchMode)configuration.searchMode;
auto ret = FEATURE_HUB_DB->EnableHub(param);
return ret;
}
@@ -260,7 +564,7 @@ HResult HFSessionSetFaceTrackMode(HFSession session, HFDetectMode detectMode) {
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
inspire::DetectMode detMode = inspire::DETECT_MODE_ALWAYS_DETECT;
inspire::DetectModuleMode detMode = inspire::DETECT_MODE_ALWAYS_DETECT;
if (detectMode == HF_DETECT_MODE_LIGHT_TRACK) {
detMode = inspire::DETECT_MODE_LIGHT_TRACK;
}
@@ -278,6 +582,39 @@ HResult HFSessionSetFaceDetectThreshold(HFSession session, HFloat threshold) {
return ctx->impl.SetFaceDetectThreshold(threshold);
}
HResult HFSessionSetTrackModeSmoothRatio(HFSession session, HFloat ratio) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
return ctx->impl.SetTrackModeSmoothRatio(ratio);
}
HResult HFSessionSetTrackModeNumSmoothCacheFrame(HFSession session, HInt32 num) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
return ctx->impl.SetTrackModeNumSmoothCacheFrame(num);
}
HResult HFSessionSetTrackModeDetectInterval(HFSession session, HInt32 num) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
return ctx->impl.SetTrackModeDetectInterval(num);
}
HResult HFExecuteFaceTrack(HFSession session, HFImageStream streamHandle, PHFMultipleFaceData results) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
@@ -333,20 +670,67 @@ HResult HFGetFaceDenseLandmarkFromFaceToken(HFFaceBasicToken singleFace, HPoint2
data.data = singleFace.data;
HyperFaceData face = {0};
HInt32 ret;
ret = DeserializeHyperFaceData((char *)data.data, data.dataSize, face);
ret = RunDeserializeHyperFaceData((char *)data.data, data.dataSize, face);
if (ret != HSUCCEED) {
return ret;
}
if (face.densityLandmarkEnable == 0) {
INSPIRE_LOGW("To get dense landmarks in always-detect mode, you need to enable HF_ENABLE_DETECT_MODE_LANDMARK");
return HERR_SESS_LANDMARK_NOT_ENABLE;
}
for (size_t i = 0; i < num; i++) {
landmarks[i].x = face.densityLandmark[i].x;
landmarks[i].y = face.densityLandmark[i].y;
}
return HSUCCEED;
}
HResult HFGetFaceFiveKeyPointsFromFaceToken(HFFaceBasicToken singleFace, HPoint2f *landmarks, HInt32 num) {
if (num != 5) {
return HERR_SESS_KEY_POINT_NUM_NOT_MATCH;
}
inspire::FaceBasicData data;
data.dataSize = singleFace.size;
data.data = singleFace.data;
HyperFaceData face = {0};
HInt32 ret;
ret = RunDeserializeHyperFaceData((char *)data.data, data.dataSize, face);
if (ret != HSUCCEED) {
return ret;
}
for (size_t i = 0; i < num; i++) {
landmarks[i].x = face.keyPoints[i].x;
landmarks[i].y = face.keyPoints[i].y;
}
return HSUCCEED;
}
HResult HFSessionSetEnableTrackCostSpend(HFSession session, int value) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
ctx->impl.SetEnableTrackCostSpend(value);
return HSUCCEED;
}
HResult HFSessionPrintTrackCostSpend(HFSession session) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
ctx->impl.PrintTrackCostSpend();
return HSUCCEED;
}
HResult HFFeatureHubFaceSearchThresholdSetting(float threshold) {
FEATURE_HUB->SetRecognitionThreshold(threshold);
FEATURE_HUB_DB->SetRecognitionThreshold(threshold);
return HSUCCEED;
}
@@ -407,6 +791,39 @@ HResult HFFaceFeatureExtractCpy(HFSession session, HFImageStream streamHandle, H
return ret;
}
HResult HFFaceGetFaceAlignmentImage(HFSession session, HFImageStream streamHandle, HFFaceBasicToken singleFace, HFImageBitmap *handle) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
if (streamHandle == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
HF_FaceAlgorithmSession *ctx = (HF_FaceAlgorithmSession *)session;
if (ctx == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
HF_CameraStream *stream = (HF_CameraStream *)streamHandle;
if (stream == nullptr) {
return HERR_INVALID_IMAGE_STREAM_HANDLE;
}
if (singleFace.data == nullptr || singleFace.size <= 0) {
return HERR_INVALID_FACE_TOKEN;
}
inspire::FaceBasicData data;
data.dataSize = singleFace.size;
data.data = singleFace.data;
auto bitmap = new HF_ImageBitmap();
auto ret = ctx->impl.FaceGetFaceAlignmentImage(stream->impl, data, bitmap->impl);
if (ret != HSUCCEED) {
delete bitmap;
return ret;
}
*handle = bitmap;
// Record the creation of this image bitmap in the ResourceManager
RESOURCE_MANAGE->createImageBitmap((long)*handle);
return HSUCCEED;
}
HResult HFFaceComparison(HFFaceFeature feature1, HFFaceFeature feature2, HPFloat result) {
if (feature1.data == nullptr || feature2.data == nullptr) {
return HERR_INVALID_FACE_FEATURE;
@@ -417,19 +834,62 @@ HResult HFFaceComparison(HFFaceFeature feature1, HFFaceFeature feature2, HPFloat
}
*result = 0.0f;
float res = -1.0f;
auto ret = FEATURE_HUB->CosineSimilarity(feature1.data, feature2.data, feature1.size, res);
auto ret = FEATURE_HUB_DB->CosineSimilarity(feature1.data, feature2.data, feature1.size, res);
*result = res;
return ret;
}
HResult HFGetRecommendedCosineThreshold(HPFloat threshold) {
if (!INSPIRE_LAUNCH->isMLoad()) {
INSPIRE_LOGW("Inspireface is not launched, using default threshold 0.48");
}
*threshold = SIMILARITY_CONVERTER_GET_RECOMMENDED_COSINE_THRESHOLD();
return HSUCCEED;
}
HResult HFCosineSimilarityConvertToPercentage(HFloat similarity, HPFloat result) {
if (!INSPIRE_LAUNCH->isMLoad()) {
INSPIRE_LOGW("Inspireface is not launched.");
}
*result = SIMILARITY_CONVERTER_RUN(similarity);
return HSUCCEED;
}
HResult HFUpdateCosineSimilarityConverter(HFSimilarityConverterConfig config) {
if (!INSPIRE_LAUNCH->isMLoad()) {
INSPIRE_LOGW("Inspireface is not launched.");
}
inspire::SimilarityConverterConfig cfg;
cfg.threshold = config.threshold;
cfg.middleScore = config.middleScore;
cfg.steepness = config.steepness;
cfg.outputMin = config.outputMin;
cfg.outputMax = config.outputMax;
SIMILARITY_CONVERTER_UPDATE_CONFIG(cfg);
return HSUCCEED;
}
HResult HFGetCosineSimilarityConverter(PHFSimilarityConverterConfig config) {
if (!INSPIRE_LAUNCH->isMLoad()) {
INSPIRE_LOGW("Inspireface is not launched.");
}
inspire::SimilarityConverterConfig cfg = SIMILARITY_CONVERTER_GET_CONFIG();
config->threshold = cfg.threshold;
config->middleScore = cfg.middleScore;
config->steepness = cfg.steepness;
config->outputMin = cfg.outputMin;
config->outputMax = cfg.outputMax;
return HSUCCEED;
}
HResult HFGetFeatureLength(HPInt32 num) {
*num = FEATURE_HUB->GetFeatureNum();
*num = 512;
return HSUCCEED;
}
HResult HFFeatureHubInsertFeature(HFFaceFeatureIdentity featureIdentity) {
HResult HFFeatureHubInsertFeature(HFFaceFeatureIdentity featureIdentity, HPFaceId allocId) {
if (featureIdentity.feature->data == nullptr) {
return HERR_INVALID_FACE_FEATURE;
}
@@ -438,8 +898,7 @@ HResult HFFeatureHubInsertFeature(HFFaceFeatureIdentity featureIdentity) {
for (int i = 0; i < featureIdentity.feature->size; ++i) {
feat.push_back(featureIdentity.feature->data[i]);
}
std::string tag(featureIdentity.tag);
HInt32 ret = FEATURE_HUB->FaceFeatureInsertFromCustomId(feat, tag, featureIdentity.customId);
HInt32 ret = FEATURE_HUB_DB->FaceFeatureInsert(feat, featureIdentity.id, *allocId);
return ret;
}
@@ -453,14 +912,16 @@ HResult HFFeatureHubFaceSearch(HFFaceFeature searchFeature, HPFloat confidence,
for (int i = 0; i < searchFeature.size; ++i) {
feat.push_back(searchFeature.data[i]);
}
inspire::SearchResult result;
HInt32 ret = FEATURE_HUB->SearchFaceFeature(feat, result);
mostSimilar->feature = (HFFaceFeature *)FEATURE_HUB->GetFaceFeaturePtrCache().get();
mostSimilar->feature->data = (HFloat *)FEATURE_HUB->GetSearchFaceFeatureCache().data();
mostSimilar->feature->size = FEATURE_HUB->GetSearchFaceFeatureCache().size();
mostSimilar->tag = FEATURE_HUB->GetStringCache();
mostSimilar->customId = result.customId;
*confidence = result.score;
*confidence = -1.0f;
inspire::FaceSearchResult result;
HInt32 ret = FEATURE_HUB_DB->SearchFaceFeature(feat, result);
mostSimilar->feature = (HFFaceFeature *)FEATURE_HUB_DB->GetFaceFeaturePtrCache().get();
mostSimilar->feature->data = (HFloat *)FEATURE_HUB_DB->GetSearchFaceFeatureCache().data();
mostSimilar->feature->size = FEATURE_HUB_DB->GetSearchFaceFeatureCache().size();
mostSimilar->id = result.id;
if (mostSimilar->id != -1) {
*confidence = result.similarity;
}
return ret;
}
@@ -474,18 +935,18 @@ HResult HFFeatureHubFaceSearchTopK(HFFaceFeature searchFeature, HInt32 topK, PHF
for (int i = 0; i < searchFeature.size; ++i) {
feat.push_back(searchFeature.data[i]);
}
HInt32 ret = FEATURE_HUB->SearchFaceFeatureTopK(feat, topK);
HInt32 ret = FEATURE_HUB_DB->SearchFaceFeatureTopKCache(feat, topK);
if (ret == HSUCCEED) {
results->size = FEATURE_HUB->GetTopKConfidence().size();
results->confidence = FEATURE_HUB->GetTopKConfidence().data();
results->customIds = FEATURE_HUB->GetTopKCustomIdsCache().data();
results->size = FEATURE_HUB_DB->GetTopKConfidence().size();
results->confidence = FEATURE_HUB_DB->GetTopKConfidence().data();
results->ids = FEATURE_HUB_DB->GetTopKCustomIdsCache().data();
}
return ret;
}
HResult HFFeatureHubFaceRemove(HInt32 customId) {
auto ret = FEATURE_HUB->FaceFeatureRemoveFromCustomId(customId);
HResult HFFeatureHubFaceRemove(HFaceId id) {
auto ret = FEATURE_HUB_DB->FaceFeatureRemove(id);
return ret;
}
@@ -498,23 +959,21 @@ HResult HFFeatureHubFaceUpdate(HFFaceFeatureIdentity featureIdentity) {
for (int i = 0; i < featureIdentity.feature->size; ++i) {
feat.push_back(featureIdentity.feature->data[i]);
}
std::string tag(featureIdentity.tag);
auto ret = FEATURE_HUB->FaceFeatureUpdateFromCustomId(feat, tag, featureIdentity.customId);
auto ret = FEATURE_HUB_DB->FaceFeatureUpdate(feat, featureIdentity.id);
return ret;
}
HResult HFFeatureHubGetFaceIdentity(HInt32 customId, PHFFaceFeatureIdentity identity) {
auto ret = FEATURE_HUB->GetFaceFeatureFromCustomId(customId);
HResult HFFeatureHubGetFaceIdentity(HFaceId id, PHFFaceFeatureIdentity identity) {
auto ret = FEATURE_HUB_DB->GetFaceFeature(id);
if (ret == HSUCCEED) {
identity->tag = FEATURE_HUB->GetStringCache();
identity->customId = customId;
identity->feature = (HFFaceFeature *)FEATURE_HUB->GetFaceFeaturePtrCache().get();
identity->feature->data = (HFloat *)FEATURE_HUB->GetFaceFeaturePtrCache()->data;
identity->feature->size = FEATURE_HUB->GetFaceFeaturePtrCache()->dataSize;
identity->id = id;
identity->feature = (HFFaceFeature *)FEATURE_HUB_DB->GetFaceFeaturePtrCache().get();
identity->feature->data = (HFloat *)FEATURE_HUB_DB->GetFaceFeaturePtrCache()->data;
identity->feature->size = FEATURE_HUB_DB->GetFaceFeaturePtrCache()->dataSize;
} else {
identity->customId = -1;
identity->id = -1;
}
return ret;
@@ -551,13 +1010,14 @@ HResult HFMultipleFacePipelineProcess(HFSession session, HFImageStream streamHan
param.enable_ir_liveness = parameter.enable_ir_liveness;
param.enable_recognition = parameter.enable_recognition;
param.enable_face_attribute = parameter.enable_face_attribute;
param.enable_detect_mode_landmark = parameter.enable_detect_mode_landmark;
HResult ret;
std::vector<inspire::HyperFaceData> data;
data.resize(faces->detectedNum);
for (int i = 0; i < faces->detectedNum; ++i) {
auto &face = data[i];
ret = DeserializeHyperFaceData((char *)faces->tokens[i].data, faces->tokens[i].size, face);
ret = RunDeserializeHyperFaceData((char *)faces->tokens[i].data, faces->tokens[i].size, face);
if (ret != HSUCCEED) {
return HERR_INVALID_FACE_TOKEN;
}
@@ -612,13 +1072,16 @@ HResult HFMultipleFacePipelineProcessOptional(HFSession session, HFImageStream s
if (customOption & HF_ENABLE_INTERACTION) {
param.enable_interaction_liveness = true;
}
if (customOption & HF_ENABLE_DETECT_MODE_LANDMARK) {
param.enable_detect_mode_landmark = true;
}
HResult ret;
std::vector<inspire::HyperFaceData> data;
data.resize(faces->detectedNum);
for (int i = 0; i < faces->detectedNum; ++i) {
auto &face = data[i];
ret = DeserializeHyperFaceData((char *)faces->tokens[i].data, faces->tokens[i].size, face);
ret = RunDeserializeHyperFaceData((char *)faces->tokens[i].data, faces->tokens[i].size, face);
if (ret != HSUCCEED) {
return HERR_INVALID_FACE_TOKEN;
}
@@ -687,12 +1150,12 @@ HResult HFFaceQualityDetect(HFSession session, HFFaceBasicToken singleFace, HFlo
data.dataSize = singleFace.size;
data.data = singleFace.data;
auto ret = inspire::FaceContext::FaceQualityDetect(data, *confidence);
auto ret = inspire::FaceSession::FaceQualityDetect(data, *confidence);
return ret;
}
HResult HFGetFaceIntereactionStateResult(HFSession session, PHFFaceIntereactionState result) {
HResult HFGetFaceInteractionStateResult(HFSession session, PHFFaceInteractionState result) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
@@ -707,7 +1170,7 @@ HResult HFGetFaceIntereactionStateResult(HFSession session, PHFFaceIntereactionS
return HSUCCEED;
}
HResult HFGetFaceIntereactionActionsResult(HFSession session, PHFFaceIntereactionsActions actions) {
HResult HFGetFaceInteractionActionsResult(HFSession session, PHFFaceInteractionsActions actions) {
if (session == nullptr) {
return HERR_INVALID_CONTEXT_HANDLE;
}
@@ -719,7 +1182,7 @@ HResult HFGetFaceIntereactionActionsResult(HFSession session, PHFFaceIntereactio
actions->normal = (HInt32 *)ctx->impl.GetFaceNormalAactionsResultCache().data();
actions->blink = (HInt32 *)ctx->impl.GetFaceBlinkAactionsResultCache().data();
actions->shake = (HInt32 *)ctx->impl.GetFaceShakeAactionsResultCache().data();
actions->headRiase = (HInt32 *)ctx->impl.GetFaceRaiseHeadAactionsResultCache().data();
actions->headRaise = (HInt32 *)ctx->impl.GetFaceRaiseHeadAactionsResultCache().data();
actions->jawOpen = (HInt32 *)ctx->impl.GetFaceJawOpenAactionsResultCache().data();
return HSUCCEED;
@@ -743,12 +1206,22 @@ HResult HFGetFaceAttributeResult(HFSession session, PHFFaceAttributeResult resul
}
HResult HFFeatureHubGetFaceCount(HInt32 *count) {
*count = FEATURE_HUB->GetFaceFeatureCount();
*count = FEATURE_HUB_DB->GetFaceFeatureCount();
return HSUCCEED;
}
HResult HFFeatureHubViewDBTable() {
return FEATURE_HUB->ViewDBTable();
FEATURE_HUB_DB->ViewDBTable();
return HSUCCEED;
}
HResult HFFeatureHubGetExistingIds(PHFFeatureHubExistingIds ids) {
auto ret = FEATURE_HUB_DB->GetAllIds();
if (ret == HSUCCEED) {
ids->size = FEATURE_HUB_DB->GetExistingIds().size();
ids->ids = FEATURE_HUB_DB->GetExistingIds().data();
}
return ret;
}
HResult HFQueryInspireFaceVersion(PHFInspireFaceVersion version) {
@@ -759,6 +1232,11 @@ HResult HFQueryInspireFaceVersion(PHFInspireFaceVersion version) {
return HSUCCEED;
}
HResult HFQueryInspireFaceExtendedInformation(PHFInspireFaceExtendedInformation information) {
strncpy(information->information, INSPIRE_FACE_EXTENDED_INFORMATION, strlen(INSPIRE_FACE_EXTENDED_INFORMATION));
return HSUCCEED;
}
HResult HFSetLogLevel(HFLogLevel level) {
INSPIRE_SET_LOG_LEVEL(LogLevel(level));
return HSUCCEED;
@@ -766,6 +1244,41 @@ HResult HFSetLogLevel(HFLogLevel level) {
HResult HFLogDisable() {
INSPIRE_SET_LOG_LEVEL(inspire::ISF_LOG_NONE);
return HSUCCEED;
}
HResult HFLogPrint(HFLogLevel level, HFormat format, ...) {
inspire::LogLevel logLevel = static_cast<inspire::LogLevel>(level);
if (inspire::LogManager::getInstance()->getLogLevel() == inspire::ISF_LOG_NONE || logLevel < inspire::LogManager::getInstance()->getLogLevel()) {
return HSUCCEED;
}
char buffer[1024];
va_list args;
va_start(args, format);
vsnprintf(buffer, sizeof(buffer), format, args);
va_end(args);
switch (logLevel) {
case inspire::ISF_LOG_DEBUG:
INSPIRE_LOGD("%s", buffer);
break;
case inspire::ISF_LOG_INFO:
INSPIRE_LOGI("%s", buffer);
break;
case inspire::ISF_LOG_WARN:
INSPIRE_LOGW("%s", buffer);
break;
case inspire::ISF_LOG_ERROR:
INSPIRE_LOGE("%s", buffer);
break;
case inspire::ISF_LOG_FATAL:
INSPIRE_LOGF("%s", buffer);
break;
default:
break;
}
return HSUCCEED;
}

View File

@@ -1,9 +1,10 @@
//
// Created by tunm on 2023/10/3.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#ifndef HYPERFACEREPO_INSPIREFACE_H
#define HYPERFACEREPO_INSPIREFACE_H
#ifndef INSPIREFACE_H
#define INSPIREFACE_H
#include <stdint.h>
#include "intypedef.h"
@@ -23,15 +24,16 @@
extern "C" {
#endif
#define HF_ENABLE_NONE 0x00000000 ///< Flag to enable no features.
#define HF_ENABLE_FACE_RECOGNITION 0x00000002 ///< Flag to enable face recognition feature.
#define HF_ENABLE_LIVENESS 0x00000004 ///< Flag to enable RGB liveness detection feature.
#define HF_ENABLE_IR_LIVENESS 0x00000008 ///< Flag to enable IR (Infrared) liveness detection feature.
#define HF_ENABLE_MASK_DETECT 0x00000010 ///< Flag to enable mask detection feature.
#define HF_ENABLE_FACE_ATTRIBUTE 0x00000020 ///< Flag to enable face attribute prediction feature.
#define HF_ENABLE_PLACEHOLDER_ 0x00000040 ///< -
#define HF_ENABLE_QUALITY 0x00000080 ///< Flag to enable face quality assessment feature.
#define HF_ENABLE_INTERACTION 0x00000100 ///< Flag to enable interaction feature.
#define HF_ENABLE_NONE 0x00000000 ///< Flag to enable no features.
#define HF_ENABLE_FACE_RECOGNITION 0x00000002 ///< Flag to enable face recognition feature.
#define HF_ENABLE_LIVENESS 0x00000004 ///< Flag to enable RGB liveness detection feature.
#define HF_ENABLE_IR_LIVENESS 0x00000008 ///< Flag to enable IR (Infrared) liveness detection feature.
#define HF_ENABLE_MASK_DETECT 0x00000010 ///< Flag to enable mask detection feature.
#define HF_ENABLE_FACE_ATTRIBUTE 0x00000020 ///< Flag to enable face attribute prediction feature.
#define HF_ENABLE_PLACEHOLDER_ 0x00000040 ///< -
#define HF_ENABLE_QUALITY 0x00000080 ///< Flag to enable face quality assessment feature.
#define HF_ENABLE_INTERACTION 0x00000100 ///< Flag to enable interaction feature.
#define HF_ENABLE_DETECT_MODE_LANDMARK 0x00000200 ///< Flag to enable landmark detection in detection mode
/**
* Camera stream format.
@@ -62,7 +64,7 @@ typedef enum HFRotation {
* Defines the structure for image data stream.
*/
typedef struct HFImageData {
uint8_t *data; ///< Pointer to the image data stream.
HPUInt8 data; ///< Pointer to the image data stream.
HInt32 width; ///< Width of the image.
HInt32 height; ///< Height of the image.
HFImageFormat format; ///< Format of the image, indicating the data stream format to be parsed.
@@ -80,6 +82,45 @@ typedef struct HFImageData {
*/
HYPER_CAPI_EXPORT extern HResult HFCreateImageStream(PHFImageData data, HFImageStream *handle);
/**
* @brief Create an empty image stream instance.
*
* This function is used to create an instance of a data buffer stream with the given image data.
*
* @param handle Pointer to the stream handle that will be returned.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFCreateImageStreamEmpty(HFImageStream *handle);
/**
* @brief Set the buffer of the image stream.
*
* @param handle Pointer to the stream handle.
* @param buffer Pointer to the buffer.
* @param width Width of the image.
* @param height Height of the image.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFImageStreamSetBuffer(HFImageStream handle, HPUInt8 buffer, HInt32 width, HInt32 height);
/**
* @brief Set the rotation of the image stream.
*
* @param handle Pointer to the stream handle.
* @param rotation Rotation angle of the image.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFImageStreamSetRotation(HFImageStream handle, HFRotation rotation);
/**
* @brief Set the format of the image stream.
*
* @param handle Pointer to the stream handle.
* @param format Format of the image.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFImageStreamSetFormat(HFImageStream handle, HFImageFormat format);
/**
* @brief Release the instantiated DataBuffer object.
*
@@ -90,6 +131,126 @@ HYPER_CAPI_EXPORT extern HResult HFCreateImageStream(PHFImageData data, HFImageS
*/
HYPER_CAPI_EXPORT extern HResult HFReleaseImageStream(HFImageStream streamHandle);
/**
* @brief Struct for image bitmap data.
*/
typedef struct HFImageBitmapData {
uint8_t *data; ///< Pointer to the image data.
HInt32 width; ///< Width of the image.
HInt32 height; ///< Height of the image.
HInt32 channels; ///< Number of channels in the image, only support 3 channels or 1 channel.
} HFImageBitmapData, *PHFImageBitmapData;
/**
* @brief Create a image bitmap from data, default pixel format is BGR.
*
* @param data Pointer to the image bitmap data structure.
* @param handle Pointer to the image bitmap handle that will be returned.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFCreateImageBitmap(PHFImageBitmapData data, HFImageBitmap *handle);
/**
* @brief Create a image bitmap from file path, default pixel format is BGR.
*
* @param filePath The path to the image file.
* @param channels The number of channels in the image, only support 3 channels or 1 channel.
* @param handle Pointer to the image bitmap handle that will be returned.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFCreateImageBitmapFromFilePath(HPath filePath, HInt32 channels, HFImageBitmap *handle);
/**
* @brief Copy an image bitmap.
*
* @param handle Pointer to the image bitmap handle.
* @param copyHandle Pointer to the image bitmap handle that will be returned.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFImageBitmapCopy(HFImageBitmap handle, HFImageBitmap *copyHandle);
/**
* @brief Release the image bitmap.
*
* @param handle Pointer to the image bitmap handle.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFReleaseImageBitmap(HFImageBitmap handle);
/**
* @brief Create a image stream from image bitmap.
*
* @param handle Pointer to the image bitmap handle.
* @param rotation The rotation angle of the image.
* @param streamHandle Pointer to the image stream handle that will be returned.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFCreateImageStreamFromImageBitmap(HFImageBitmap handle, HFRotation rotation, HFImageStream *streamHandle);
/**
* @brief Create a image bitmap from image stream.
*
* @param streamHandle Pointer to the image stream handle.
* @param handle Pointer to the image bitmap handle that will be returned.
* @param is_rotate Whether to rotate the image.
* @param scale The scale of the image.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFCreateImageBitmapFromImageStreamProcess(HFImageStream streamHandle, HFImageBitmap *handle, int is_rotate,
float scale);
/**
* @brief Write the image bitmap to a file.
*
* @param handle Pointer to the image bitmap handle.
* @param filePath The path to the image file.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFImageBitmapWriteToFile(HFImageBitmap handle, HPath filePath);
/**
* @brief Draw a rectangle on the image bitmap.
*
* @param handle Pointer to the image bitmap handle.
* @param rect The rectangle to be drawn.
* @param color The color of the rectangle.
* @param thickness The thickness of the rectangle.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFImageBitmapDrawRect(HFImageBitmap handle, HFaceRect rect, HColor color, HInt32 thickness);
/**
* @brief Draw a circle on the image bitmap.
*
* @param handle Pointer to the image bitmap handle.
* @param point The center point of the circle.
* @param radius The radius of the circle.
* @param color The color of the circle.
* @param thickness The thickness of the circle.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFImageBitmapDrawCircleF(HFImageBitmap handle, HPoint2f point, HInt32 radius, HColor color, HInt32 thickness);
HYPER_CAPI_EXPORT extern HResult HFImageBitmapDrawCircle(HFImageBitmap handle, HPoint2i point, HInt32 radius, HColor color, HInt32 thickness);
/**
* @brief Get the data of the image bitmap.
*
* @param handle Pointer to the image bitmap handle.
* @param data Pointer to the image bitmap data structure.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFImageBitmapGetData(HFImageBitmap handle, PHFImageBitmapData data);
/**
* @brief Show the image bitmap.
*
* @param handle Pointer to the image bitmap handle, must rely on opencv's gui functionality
* @param title The title of the image.
* @param delay The delay time of the image.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFImageBitmapShow(HFImageBitmap handle, HString title, HInt32 delay);
/************************************************************************
* Resource Function
************************************************************************/
@@ -104,6 +265,14 @@ HYPER_CAPI_EXPORT extern HResult HFReleaseImageStream(HFImageStream streamHandle
* */
HYPER_CAPI_EXPORT extern HResult HFLaunchInspireFace(HPath resourcePath);
/**
* @brief Reload InspireFace SDK
* Reload the InspireFace SDK, releasing all allocated resources.
* @param resourcePath Initializes the path to the resource file that needs to be loaded
* @return HResult indicating the success or failure of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFReloadInspireFace(HPath resourcePath);
/**
* @brief Terminate InspireFace SDK
* Terminate the InspireFace SDK, releasing all allocated resources.
@@ -112,8 +281,92 @@ HYPER_CAPI_EXPORT extern HResult HFLaunchInspireFace(HPath resourcePath);
* */
HYPER_CAPI_EXPORT extern HResult HFTerminateInspireFace();
/**
* @brief Query InspireFace SDK launch status
* Query the launch status of the InspireFace SDK.
* @param status Pointer to the status variable that will be returned.
* @return HResult indicating the success or failure of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFQueryInspireFaceLaunchStatus(HInt32 *status);
/************************************************************************
* FaceContext
* Extended Interface Based on Third-party Hardware Devices
*
* According to different manufacturers' devices, manufacturers typically perform deep customization and optimization, such as neural network
* inference computation, geometric image acceleration computation, and deeply customized device interfaces, etc. These types of functionalities are
* usually difficult to abstract, so they are placed in extension module APIs, involving hybrid computing, heterogeneous computing, multi-device
* computing, and other features.
************************************************************************/
/**
* @brief Set the rockchip dma heap path
* By default, we have already configured the DMA Heap address used by RGA on RK devices.
* If you wish to customize this address, you can modify it through this API.
* @param path The path to the rockchip dma heap
* @return HResult indicating the success or failure of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFSetExpansiveHardwareRockchipDmaHeapPath(HPath path);
/**
* @brief Query the rockchip dma heap path
* @param path Pointer to a pre-allocated character array that will store the returned path.
* The array should be at least 256 bytes in size.
* @return HResult indicating the success or failure of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFQueryExpansiveHardwareRockchipDmaHeapPath(HString path);
/**
* @brief Enum for Apple CoreML inference mode.
*/
typedef enum HFAppleCoreMLInferenceMode {
HF_APPLE_COREML_INFERENCE_MODE_CPU = 0, ///< CPU Only.
HF_APPLE_COREML_INFERENCE_MODE_GPU = 1, ///< GPU first.
HF_APPLE_COREML_INFERENCE_MODE_ANE = 2, ///< Automatic selection, ANE first.
} HFAppleCoreMLInferenceMode;
/**
* @brief Set the Apple CoreML inference mode, must be called before HFCreateInspireFaceSession.
* @param mode The inference mode to be set.
* @return HResult indicating the success or failure of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFSetAppleCoreMLInferenceMode(HFAppleCoreMLInferenceMode mode);
/**
* @brief Set the CUDA device id, must be called before HFCreateInspireFaceSession.
* @param device_id The device id to be set.
* @return HResult indicating the success or failure of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFSetCudaDeviceId(int32_t device_id);
/**
* @brief Get the CUDA device id, must be called after HFCreateInspireFaceSession.
* @param device_id Pointer to the device id to be returned.
* @return HResult indicating the success or failure of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFGetCudaDeviceId(int32_t *device_id);
/**
* @brief Print the CUDA device information.
* @return HResult indicating the success or failure of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFPrintCudaDeviceInfo();
/**
* @brief Get the number of CUDA devices.
* @param num_devices Pointer to the number of CUDA devices to be returned.
* @return HResult indicating the success or failure of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFGetNumCudaDevices(int32_t *num_devices);
/**
* @brief Check if the CUDA device is supported.
* @param support The support flag to be checked.
* @return HResult indicating the success or failure of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFCheckCudaDeviceSupport(int32_t *is_support);
/************************************************************************
* FaceSession
************************************************************************/
/**
@@ -131,6 +384,7 @@ typedef struct HFSessionCustomParameter {
HInt32 enable_face_quality; ///< Enable face quality detection feature.
HInt32 enable_face_attribute; ///< Enable face attribute prediction feature.
HInt32 enable_interaction_liveness; ///< Enable interaction for liveness detection feature.
HInt32 enable_detect_mode_landmark; ///< Enable landmark detection in detection mode
} HFSessionCustomParameter, *PHFSessionCustomParameter;
/**
@@ -253,6 +507,33 @@ HYPER_CAPI_EXPORT extern HResult HFSessionSetFilterMinimumFacePixelSize(HFSessio
*/
HYPER_CAPI_EXPORT extern HResult HFSessionSetFaceDetectThreshold(HFSession session, HFloat threshold);
/**
* @brief Set the track mode smooth ratio in the session. default value is 0.05
*
* @param session Handle to the session.
* @param ratio The smooth ratio value.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFSessionSetTrackModeSmoothRatio(HFSession session, HFloat ratio);
/**
* @brief Set the track mode num smooth cache frame in the session. default value is 5
*
* @param session Handle to the session.
* @param num The num smooth cache frame value.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFSessionSetTrackModeNumSmoothCacheFrame(HFSession session, HInt32 num);
/**
* @brief Set the track model detect interval in the session. default value is 20
*
* @param session Handle to the session.
* @param num The detect interval value.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFSessionSetTrackModeDetectInterval(HFSession session, HInt32 num);
/**
* @brief Run face tracking in the session.
*
@@ -311,6 +592,29 @@ HYPER_CAPI_EXPORT extern HResult HFGetNumOfFaceDenseLandmark(HPInt32 num);
*/
HYPER_CAPI_EXPORT extern HResult HFGetFaceDenseLandmarkFromFaceToken(HFFaceBasicToken singleFace, HPoint2f *landmarks, HInt32 num);
/**
* @brief Get the five key points from the face token.
* @param singleFace Basic token representing a single face.
* @param landmarks Pre-allocated memory address of the array for 2D floating-point coordinates.
* @param num Number of landmark points
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFGetFaceFiveKeyPointsFromFaceToken(HFFaceBasicToken singleFace, HPoint2f *landmarks, HInt32 num);
/**
* @brief Set the enable cost spend
* @param value The enable cost spend value
* @return int32_t Status code of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFSessionSetEnableTrackCostSpend(HFSession session, int value);
/**
* @brief Print the cost spend
* @param session The session handle
* @return int32_t Status code of the operation.
* */
HYPER_CAPI_EXPORT extern HResult HFSessionPrintTrackCostSpend(HFSession session);
/************************************************************************
* Face Recognition
************************************************************************/
@@ -348,6 +652,17 @@ HYPER_CAPI_EXPORT extern HResult HFFaceFeatureExtract(HFSession session, HFImage
*/
HYPER_CAPI_EXPORT extern HResult HFFaceFeatureExtractCpy(HFSession session, HFImageStream streamHandle, HFFaceBasicToken singleFace, HPFloat feature);
/**
* @brief Get the face alignment image.
* @param session Handle to the session.
* @param streamHandle Handle to the data buffer representing the camera stream component.
* @param singleFace Basic token representing a single face.
* @param handle Pointer to the handle that will be returned.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFFaceGetFaceAlignmentImage(HFSession session, HFImageStream streamHandle, HFFaceBasicToken singleFace,
HFImageBitmap *handle);
/************************************************************************
* Feature Hub
************************************************************************/
@@ -361,19 +676,25 @@ typedef enum HFSearchMode {
HF_SEARCH_MODE_EXHAUSTIVE, // Exhaustive mode: Searches until the best match is found.
} HFSearchMode;
/**
* @brief Primary key mode for face feature management.
*/
typedef enum HFPKMode {
HF_PK_AUTO_INCREMENT = 0, ///< Auto-increment mode for primary key.
HF_PK_MANUAL_INPUT, ///< Manual input mode for primary key.
} HFPKMode;
/**
* @brief Struct for database configuration.
*
* This struct holds the configuration settings for using a database in the face recognition
* context.
* This struct holds the configuration settings for using a database in the face recognition context.
*/
typedef struct HFFeatureHubConfiguration {
HInt32 featureBlockNum; ///< The order of magnitude of face feature database is N * 512, and 20
///< is recommended by default
HInt32 enablePersistence; ///< Flag to enable or disable the use of the database.
HString dbPath; ///< Path to the database file.
float searchThreshold; ///< Threshold for face search
HFSearchMode searchMode; ///< Mode of face search
HFPKMode primaryKeyMode; ///< Primary key mode(The id increment mode is recommended)
HInt32 enablePersistence; ///< Flag to enable or disable the use of the database.
HString persistenceDbPath; ///< Path to the database file.
float searchThreshold; ///< Threshold for face search
HFSearchMode searchMode; ///< Mode of face search
} HFFeatureHubConfiguration;
/**
@@ -400,9 +721,9 @@ HYPER_CAPI_EXPORT extern HResult HFFeatureHubDataDisable();
* This struct associates a custom identifier and a tag with a specific face feature.
*/
typedef struct HFFaceFeatureIdentity {
HInt32 customId; ///< Custom identifier for the face feature.
HString tag; ///< Tag associated with the face feature.
HFaceId id; ///< If you use automatic assignment id mode when inserting, ignore it.
PHFFaceFeature feature; ///< Pointer to the face feature.
// HString tag; ///< Not supported yet
} HFFaceFeatureIdentity, *PHFFaceFeatureIdentity;
/**
@@ -411,7 +732,7 @@ typedef struct HFFaceFeatureIdentity {
typedef struct HFSearchTopKResults {
HInt32 size; ///< The number of faces searched
HPFloat confidence; ///< Search confidence(it has already been filtered once by the threshold)
HPInt32 customIds; ///< fACE customIds
HPFaceId ids; ///< Searched face ids
} HFSearchTopKResults, *PHFSearchTopKResults;
/**
@@ -428,15 +749,68 @@ HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceSearchThresholdSetting(float th
/**
* @brief Perform a one-to-one comparison of two face features.
* Result is a cosine similarity score, not a percentage similarity.
*
* @param session Handle to the session.
* @param feature1 The first face feature for comparison.
* @param feature2 The second face feature for comparison.
* @param result Pointer to the floating-point value where the comparison result will be stored.
* The result is a cosine similarity score, not a percentage similarity.
* The score ranges from -1 to 1, where 1 indicates identical features,
* 0 indicates orthogonal features, and -1 indicates opposite features.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFFaceComparison(HFFaceFeature feature1, HFFaceFeature feature2, HPFloat result);
/**
* @brief Get recommended cosine threshold from loaded resource.
* Use it to determine face similarity. Note: it's just a reference and may not be optimal for your task.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFGetRecommendedCosineThreshold(HPFloat threshold);
/**
* @brief Convert cosine similarity to percentage similarity.
* This is a nonlinear transformation function. You can adjust curve parameters to map the similarity distribution you need.
* @note The conversion parameters are primarily read from the Resource file configuration, as different models
* have different conversion parameters. The parameters provided in the Resource file are only reference
* values. If they do not meet your specific use case requirements, you can implement your own conversion
* function.
* @param similarity The cosine similarity score.
* @param result Pointer to the floating-point value where the percentage similarity will be stored.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFCosineSimilarityConvertToPercentage(HFloat similarity, HPFloat result);
/**
* @brief Similarity converter configuration.
*/
typedef struct HFSimilarityConverterConfig {
HFloat threshold; ///< If you think that the threshold for judging the same person using cosine is some value such as 0.42,
// you need to convert him to a percentage of 0.6(pass), you can modify it.
HFloat middleScore; ///< Cosine threshold converted to a percentage reference value,
// usually set 0.6 or 0.5, greater than it indicates similar, pass
HFloat steepness; ///< Steepness of the curve, usually set 8.0
HFloat outputMin; ///< Minimum value of output range, usually set 0.01
HFloat outputMax; ///< Maximum value of output range, usually set 1.0
} HFSimilarityConverterConfig, *PHFSimilarityConverterConfig;
/**
* @brief Update the similarity converter configuration.
* @note The default configuration is loaded from the resource file during initialization.
* This function allows you to override those default settings if needed.
* @param config The new similarity converter configuration to apply.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFUpdateCosineSimilarityConverter(HFSimilarityConverterConfig config);
/**
* @brief Get the similarity converter configuration.
* @param config Pointer to the similarity converter configuration to be filled.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFGetCosineSimilarityConverter(PHFSimilarityConverterConfig config);
/**
* @brief Get the length of the face feature.
*
@@ -451,7 +825,7 @@ HYPER_CAPI_EXPORT extern HResult HFGetFeatureLength(HPInt32 num);
* @param featureIdentity The face feature identity to be inserted.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFFeatureHubInsertFeature(HFFaceFeatureIdentity featureIdentity);
HYPER_CAPI_EXPORT extern HResult HFFeatureHubInsertFeature(HFFaceFeatureIdentity featureIdentity, HPFaceId allocId);
/**
* @brief Search for the most similar face feature in the features group.
@@ -480,7 +854,7 @@ HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceSearchTopK(HFFaceFeature search
* @param customId The custom ID of the feature to be removed.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceRemove(HInt32 customId);
HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceRemove(HFaceId id);
/**
* @brief Update a face feature identity in the features group.
@@ -497,7 +871,7 @@ HYPER_CAPI_EXPORT extern HResult HFFeatureHubFaceUpdate(HFFaceFeatureIdentity fe
* @param identity Pointer to the face feature identity to be retrieved.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFFeatureHubGetFaceIdentity(HInt32 customId, PHFFaceFeatureIdentity identity);
HYPER_CAPI_EXPORT extern HResult HFFeatureHubGetFaceIdentity(HFaceId customId, PHFFaceFeatureIdentity identity);
/**
* @brief Get the count of face features in the features group.
@@ -514,6 +888,21 @@ HYPER_CAPI_EXPORT extern HResult HFFeatureHubGetFaceCount(HInt32 *count);
*/
HYPER_CAPI_EXPORT extern HResult HFFeatureHubViewDBTable();
/**
* @brief Struct representing the existing ids in the database.
*/
typedef struct HFFeatureHubExistingIds {
HInt32 size; ///< The number of ids
HPFaceId ids; ///< The ids
} HFFeatureHubExistingIds, *PHFFeatureHubExistingIds;
/**
* @brief Get all ids in the database.
* @param ids Output parameter to store the ids.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFFeatureHubGetExistingIds(PHFFeatureHubExistingIds ids);
/************************************************************************
* Face Pipeline
************************************************************************/
@@ -632,32 +1021,32 @@ HYPER_CAPI_EXPORT extern HResult HFFaceQualityDetect(HFSession session, HFFaceBa
/**
* @brief Facial states in the face interaction module.
*/
typedef struct HFFaceIntereactionState {
typedef struct HFFaceInteractionState {
HInt32 num; ///< Number of faces detected.
HPFloat leftEyeStatusConfidence; ///< Left eye state: confidence close to 1 means open, close
///< to 0 means closed.
HPFloat rightEyeStatusConfidence; ///< Right eye state: confidence close to 1 means open, close
///< to 0 means closed.
} HFFaceIntereactionState, *PHFFaceIntereactionState;
} HFFaceInteractionState, *PHFFaceInteractionState;
/**
* @brief Get the prediction results of face interaction.
* @param session Handle to the session.
* @param result Facial state prediction results in the face interaction module.
*/
HYPER_CAPI_EXPORT extern HResult HFGetFaceIntereactionStateResult(HFSession session, PHFFaceIntereactionState result);
HYPER_CAPI_EXPORT extern HResult HFGetFaceInteractionStateResult(HFSession session, PHFFaceInteractionState result);
/**
* @brief Actions detected in the face interaction module.
*/
typedef struct HFFaceIntereactionsActions {
typedef struct HFFaceInteractionsActions {
HInt32 num; ///< Number of actions detected.
HPInt32 normal; ///< Normal actions.
HPInt32 shake; ///< Shake actions.
HPInt32 jawOpen; ///< Jaw open actions.
HPInt32 headRiase; ///< Head raise actions.
HPInt32 headRaise; ///< Head raise actions.
HPInt32 blink; ///< Blink actions.
} HFFaceIntereactionsActions, *PHFFaceIntereactionsActions;
} HFFaceInteractionsActions, *PHFFaceInteractionsActions;
/**
* @brief Get the prediction results of face interaction actions.
@@ -665,7 +1054,7 @@ typedef struct HFFaceIntereactionsActions {
* @param actions Facial action prediction results in the face interaction module.
* @return HResult indicating success or failure of the function call.
*/
HYPER_CAPI_EXPORT extern HResult HFGetFaceIntereactionActionsResult(HFSession session, PHFFaceIntereactionsActions actions);
HYPER_CAPI_EXPORT extern HResult HFGetFaceInteractionActionsResult(HFSession session, PHFFaceInteractionsActions actions);
/**
* @brief Struct representing face attribute results.
*
@@ -729,6 +1118,21 @@ typedef struct HFInspireFaceVersion {
*/
HYPER_CAPI_EXPORT extern HResult HFQueryInspireFaceVersion(PHFInspireFaceVersion version);
/**
* @brief Struct representing the extended information of the InspireFace library.
*/
typedef struct HFInspireFaceExtendedInformation {
HChar information[256];
// TODO: Add more information
} HFInspireFaceExtendedInformation, *PHFInspireFaceExtendedInformation;
/**
* @brief Get the extended information of the InspireFace library.
*
* This function retrieves the extended information of the InspireFace library.
*/
HYPER_CAPI_EXPORT extern HResult HFQueryInspireFaceExtendedInformation(PHFInspireFaceExtendedInformation information);
/**
* @brief SDK built-in log level mode
* */
@@ -737,10 +1141,8 @@ typedef enum HFLogLevel {
HF_LOG_DEBUG, // Debug level for detailed system information mostly useful for developers
HF_LOG_INFO, // Information level for general system information about operational status
HF_LOG_WARN, // Warning level for non-critical issues that might need attention
HF_LOG_ERROR, // Error level for error events that might still allow the application to
// continue running
HF_LOG_FATAL // Fatal level for severe error events that will presumably lead the application
// to abort
HF_LOG_ERROR, // Error level for error events that might still allow the application to continue running
HF_LOG_FATAL // Fatal level for severe error events that will presumably lead the application to abort
} HFLogLevel;
/**
@@ -753,6 +1155,16 @@ HYPER_CAPI_EXPORT extern HResult HFSetLogLevel(HFLogLevel level);
* */
HYPER_CAPI_EXPORT extern HResult HFLogDisable();
/**
* @brief Print the log.
* @param level The log level.
* @param format The log format.
* @param ... The log arguments.
* @warning The maximum buffer size for log messages is 1024 bytes. Messages longer than this will be truncated.
* @return HResult indicating the success or failure of the operation.
*/
HYPER_CAPI_EXPORT extern HResult HFLogPrint(HFLogLevel level, HFormat format, ...);
/********************************DEBUG Utils****************************************/
/**
@@ -839,4 +1251,4 @@ HYPER_CAPI_EXPORT extern HResult HFDeBugGetUnreleasedStreams(HFImageStream *stre
}
#endif
#endif // HYPERFACEREPO_INSPIREFACE_H
#endif // INSPIREFACE_H

View File

@@ -1,19 +1,23 @@
//
// Created by tunm on 2023/10/3.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#ifndef HYPERFACEREPO_INSPIREFACE_INTERNAL_H
#define HYPERFACEREPO_INSPIREFACE_INTERNAL_H
#ifndef INSPIREFACE_INTERNAL_H
#define INSPIREFACE_INTERNAL_H
#include "face_context.h"
#include "face_session.h"
typedef struct HF_FaceAlgorithmSession {
inspire::FaceContext impl; ///< Implementation of the face context.
} HF_FaceAlgorithmSession; ///< Handle for managing face context.
inspire::FaceSession impl; ///< Implementation of the face context.
} HF_FaceAlgorithmSession; ///< Handle for managing face context.
typedef struct HF_CameraStream {
inspire::CameraStream impl; ///< Implementation of the camera stream.
} HF_CameraStream; ///< Handle for managing camera stream.
inspirecv::InspireImageProcess impl; ///< Implementation of the camera stream.
} HF_CameraStream; ///< Handle for managing camera stream.
typedef struct HF_ImageBitmap {
inspirecv::Image impl; ///< Implementation of the image bitmap.
} HF_ImageBitmap; ///< Handle for managing image bitmap.
#endif //HYPERFACEREPO_INSPIREFACE_INTERNAL_H
#endif // INSPIREFACE_INTERNAL_H

View File

@@ -1,28 +1,38 @@
//
// Created by tunm on 2023/10/3.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#ifndef HYPERFACEREPO_INTYPEDEF_H
#define HYPERFACEREPO_INTYPEDEF_H
#ifndef INSPIREFACE_INTYPEDEF_H
#define INSPIREFACE_INTYPEDEF_H
#include <stdint.h>
// clang-format off
typedef void* HPVoid; ///< Pointer to Void.
typedef void* HFImageStream; ///< Handle for image.
typedef void* HFSession; ///< Handle for context.
typedef void* HFImageBitmap; ///< Handle for image bitmap.
typedef long HLong; ///< Long integer.
typedef float HFloat; ///< Single-precision floating point.
typedef float* HPFloat; ///< Pointer to Single-precision floating point.
typedef float HFloat; ///< Single-precision floating point.
typedef float* HPFloat; ///< Pointer to Single-precision floating point.
typedef double HDouble; ///< Double-precision floating point.
typedef unsigned char HUInt8; ///< Unsigned 8-bit integer.
typedef unsigned char* HPUInt8; ///< Pointer to unsigned 8-bit integer.
typedef signed int HInt32; ///< Signed 32-bit integer.
typedef signed int HOption; ///< Signed 32-bit integer option.
typedef signed int* HPInt32; ///< Pointer to signed 32-bit integer.
typedef int64_t HFaceId; ///< Face ID type for non-Windows platforms
typedef int64_t* HPFaceId; ///< Pointer to Face ID type for non-Windows platforms
typedef long HResult; ///< Result code.
typedef char* HString; ///< String.
typedef const char* HPath; ///< Const String.
typedef const char* HFormat; ///< Const String.
typedef char HBuffer; ///< Character.
typedef char HChar; ///< Character.
typedef char* HPBuffer; ///< Pointer Character.
typedef long HSize; ///< Size
typedef long* HPSize; ///< Pointer Size
// clang-format on
typedef struct HFaceRect {
HInt32 x; ///< X-coordinate of the top-left corner of the rectangle.
@@ -36,4 +46,15 @@ typedef struct HPoint2f{
HFloat y; ///< Y-coordinate
} HPoint2f;
#endif //HYPERFACEREPO_INTYPEDEF_H
typedef struct HPoint2i{
HInt32 x; ///< X-coordinate
HInt32 y; ///< Y-coordinate
} HPoint2i;
typedef struct HColor {
HFloat r; ///< Red component
HFloat g; ///< Green component
HFloat b; ///< Blue component
} HColor;
#endif //INSPIREFACE_INTYPEDEF_H

View File

@@ -1,25 +0,0 @@
# ===================================================================================
# The InspireFace CMake configuration file
#
# ** File generated automatically, do not modify **
# Usage from an external project:
# In your CMakeLists.txt, add these lines:
#
# find_package(InspireFace REQUIRED)
# include_directories(${InspireFace_INCLUDE_DIRS}) # Not needed for CMake >= 2.8.11
# target_link_libraries(MY_TARGET_NAME ${InspireFace_LIBS})
#
#
#
# This file will define the following variables:
# - InspireFace_LIBS : The list of all imported targets for InspireFace modules.
# - InspireFace_INCLUDE_DIRS : The InspireFace include directories.
#
#
@PACKAGE_INIT@
set(InspireFace_LIBS "")
file(GLOB LIBS "@CMAKE_BINARY_DIR@/InspireFace/lib/*.*")
list(APPEND InspireFace_LIBS ${LIBS})
set(InspireFace_INCLUDE_DIRS "@CMAKE_BINARY_DIR@/InspireFace/include")

View File

@@ -1,15 +1,16 @@
//
// Created by tunm on 2023/9/17.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
// Include guard to prevent double inclusion of this header file
#pragma once
#ifndef HYPERFACEREPO_FACEDATATYPE_H
#define HYPERFACEREPO_FACEDATATYPE_H
#ifndef INSPIRE_FACE_FACEDATATYPE_H
#define INSPIRE_FACE_FACEDATATYPE_H
// Include the necessary header files
#include "../../data_type.h"
#include "../face_info/face_object.h"
#include "../face_info/face_object_internal.h"
// Define the namespace "inspire" for encapsulation
namespace inspire {
@@ -24,8 +25,8 @@ typedef struct Face3DAngle {
} Face3DAngle;
/**
* Struct to represent the rectangle coordinates of a face.
*/
* Struct to represent the rectangle coordinates of a face.
*/
typedef struct FaceRect {
int x; ///< X-coordinate of the top-left corner
int y; ///< Y-coordinate of the top-left corner
@@ -34,16 +35,16 @@ typedef struct FaceRect {
} FaceRect;
/**
* Struct to represent 2D point coordinates.
*/
* Struct to represent 2D point coordinates.
*/
typedef struct Point2F {
float x; ///< X-coordinate
float y; ///< Y-coordinate
} HPoint;
/**
* Struct to represent a 2D transformation matrix.
*/
* Struct to represent a 2D transformation matrix.
*/
typedef struct TransMatrix {
double m00; ///< Element (0,0) of the matrix
double m01; ///< Element (0,1) of the matrix
@@ -54,21 +55,22 @@ typedef struct TransMatrix {
} TransMatrix;
/**
* Struct to represent hyper face data.
*/
* Struct to represent hyper face data.
*/
typedef struct HyperFaceData {
int trackState; ///< Track state
int inGroupIndex; ///< Index within a group
int trackId; ///< Track ID
int trackCount; ///< Track count
FaceRect rect; ///< Face rectangle
TransMatrix trans; ///< Transformation matrix
Point2F keyPoints[5]; ///< Key points (e.g., landmarks)
Face3DAngle face3DAngle; ///< 3D face angles
float quality[5]; ///< Quality values for key points
Point2F densityLandmark[106]; ///< Face density landmark
int trackState; ///< Track state
int inGroupIndex; ///< Index within a group
int trackId; ///< Track ID
int trackCount; ///< Track count
FaceRect rect; ///< Face rectangle
TransMatrix trans; ///< Transformation matrix
Point2F keyPoints[5]; ///< Key points (e.g., landmarks)
Face3DAngle face3DAngle; ///< 3D face angles
float quality[5]; ///< Quality values for key points
Point2F densityLandmark[106]; ///< Face density landmark
int densityLandmarkEnable; ///< Density landmark enable
} HyperFaceData;
} // namespace inspire
} // namespace inspire
#endif //HYPERFACEREPO_FACEDATATYPE_H
#endif // INSPIRE_FACE_FACEDATATYPE_H

View File

@@ -1,14 +1,17 @@
//
// Created by tunm on 2023/9/17.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#ifndef INSPIRE_FACE_SERIALIZE_TOOLS_H
#define INSPIRE_FACE_SERIALIZE_TOOLS_H
#ifndef HYPERFACEREPO_DATATOOLS_H
#define HYPERFACEREPO_DATATOOLS_H
#include "opencv2/opencv.hpp"
#include "face_data_type.h"
#include "../face_info/face_object.h"
#include "../face_info/face_object_internal.h"
#include "herror.h"
#include "data_type.h"
#include "track_module/landmark/all.h"
#include <log.h>
// Define the namespace "inspire" for encapsulation
namespace inspire {
@@ -17,35 +20,26 @@ namespace inspire {
* @brief Print the transformation matrix.
* @param matrix The transformation matrix to print.
*/
inline void PrintTransMatrix(const TransMatrix& matrix) {
std::cout << "Transformation Matrix:" << std::endl;
std::cout << "m00: " << matrix.m00 << "\t";
std::cout << "m01: " << matrix.m01 << "\t";
std::cout << "tx: " << matrix.tx << std::endl;
std::cout << "m10: " << matrix.m10 << "\t";
std::cout << "m11: " << matrix.m11 << "\t";
std::cout << "ty: " << matrix.ty << std::endl;
inline void PrintTransformMatrix(const TransMatrix& matrix) {
INSPIRE_LOGI("Transformation Matrix:");
INSPIRE_LOGI("a: %f\tb: %f\ttx: %f", matrix.m00, matrix.m01, matrix.tx);
INSPIRE_LOGI("c: %f\td: %f\tty: %f", matrix.m10, matrix.m11, matrix.ty);
}
/**
* @brief Print HyperFaceData structure.
* @param data The HyperFaceData structure to print.
*/
inline void INSPIRE_API PrintHyperFaceData(const HyperFaceData& data) {
std::cout << "Track State: " << data.trackState << std::endl;
std::cout << "In Group Index: " << data.inGroupIndex << std::endl;
std::cout << "Track ID: " << data.trackId << std::endl;
std::cout << "Track Count: " << data.trackCount << std::endl;
inline void INSPIRE_API PrintHyperFaceDataDetail(const HyperFaceData& data) {
INSPIRE_LOGI("Track State: %d", data.trackState);
INSPIRE_LOGI("In Group Index: %d", data.inGroupIndex);
INSPIRE_LOGI("Track ID: %d", data.trackId);
INSPIRE_LOGI("Track Count: %d", data.trackCount);
std::cout << "Face Rectangle:" << std::endl;
std::cout << "x: " << data.rect.x << "\t";
std::cout << "y: " << data.rect.y << "\t";
std::cout << "width: " << data.rect.width << "\t";
std::cout << "height: " << data.rect.height << std::endl;
PrintTransMatrix(data.trans);
INSPIRE_LOGI("Face Rectangle:");
INSPIRE_LOGI("x: %f\ty: %f\twidth: %f\theight: %f", data.rect.x, data.rect.y, data.rect.width, data.rect.height);
PrintTransformMatrix(data.trans);
}
/**
@@ -54,34 +48,34 @@ inline void INSPIRE_API PrintHyperFaceData(const HyperFaceData& data) {
* @param group_index The group index.
* @return The converted HyperFaceData structure.
*/
inline HyperFaceData INSPIRE_API FaceObjectToHyperFaceData(const FaceObject& obj, int group_index = -1) {
inline HyperFaceData INSPIRE_API FaceObjectInternalToHyperFaceData(const FaceObjectInternal& obj, int group_index = -1) {
HyperFaceData data;
// Face rect
data.rect.x = obj.bbox_.x;
data.rect.y = obj.bbox_.y;
data.rect.width = obj.bbox_.width;
data.rect.height = obj.bbox_.height;
data.rect.x = obj.bbox_.GetX();
data.rect.y = obj.bbox_.GetY();
data.rect.width = obj.bbox_.GetWidth();
data.rect.height = obj.bbox_.GetHeight();
// Trans matrix
data.trans.m00 = obj.getTransMatrix().at<double>(0, 0);
data.trans.m01 = obj.getTransMatrix().at<double>(0, 1);
data.trans.m10 = obj.getTransMatrix().at<double>(1, 0);
data.trans.m11 = obj.getTransMatrix().at<double>(1, 1);
data.trans.tx = obj.getTransMatrix().at<double>(0, 2);
data.trans.ty = obj.getTransMatrix().at<double>(1, 2);
data.trans.m00 = obj.getTransMatrix().Get(0, 0);
data.trans.m01 = obj.getTransMatrix().Get(0, 1);
data.trans.m10 = obj.getTransMatrix().Get(1, 0);
data.trans.m11 = obj.getTransMatrix().Get(1, 1);
data.trans.tx = obj.getTransMatrix().Get(0, 2);
data.trans.ty = obj.getTransMatrix().Get(1, 2);
// KetPoints five
if (!obj.high_result.lmk.empty()) {
for (int i = 0; i < obj.high_result.lmk.size(); ++i) {
data.keyPoints[i].x = obj.high_result.lmk[i].x;
data.keyPoints[i].y = obj.high_result.lmk[i].y;
data.keyPoints[i].x = obj.high_result.lmk[i].GetX();
data.keyPoints[i].y = obj.high_result.lmk[i].GetY();
}
for (int i = 0; i < 5; ++i) {
data.quality[i] = obj.high_result.lmk_quality[i];
}
// LOGD("HIGHT");
// LOGD("HIGHT");
} else {
for (int i = 0; i < obj.keyPointFive.size(); ++i) {
data.keyPoints[i].x = obj.keyPointFive[i].x;
data.keyPoints[i].y = obj.keyPointFive[i].y;
data.keyPoints[i].x = obj.keyPointFive[i].GetX();
data.keyPoints[i].y = obj.keyPointFive[i].GetY();
}
for (int i = 0; i < 5; ++i) {
data.quality[i] = -1.0f;
@@ -96,15 +90,17 @@ inline HyperFaceData INSPIRE_API FaceObjectToHyperFaceData(const FaceObject& obj
data.face3DAngle.pitch = obj.high_result.pitch;
data.face3DAngle.roll = obj.high_result.roll;
data.face3DAngle.yaw = obj.high_result.yaw;
const auto &lmk = obj.landmark_smooth_aux_.back();
for (size_t i = 0; i < lmk.size(); i++)
{
data.densityLandmark[i].x = lmk[i].x;
data.densityLandmark[i].y = lmk[i].y;
// Density Landmark
if (!obj.landmark_smooth_aux_.empty()) {
data.densityLandmarkEnable = 1;
const auto& lmk = obj.landmark_smooth_aux_.back();
for (size_t i = 0; i < FaceLandmarkAdapt::NUM_OF_LANDMARK; i++) {
data.densityLandmark[i].x = lmk[i].GetX();
data.densityLandmark[i].y = lmk[i].GetY();
}
} else {
data.densityLandmarkEnable = 0;
}
return data;
}
@@ -114,14 +110,8 @@ inline HyperFaceData INSPIRE_API FaceObjectToHyperFaceData(const FaceObject& obj
* @param trans The TransMatrix to convert.
* @return The converted cv::Mat.
*/
inline cv::Mat INSPIRE_API TransMatrixToMat(const TransMatrix& trans) {
cv::Mat mat(2, 3, CV_64F);
mat.at<double>(0, 0) = trans.m00;
mat.at<double>(0, 1) = trans.m01;
mat.at<double>(1, 0) = trans.m10;
mat.at<double>(1, 1) = trans.m11;
mat.at<double>(0, 2) = trans.tx;
mat.at<double>(1, 2) = trans.ty;
inline inspirecv::TransformMatrix INSPIRE_API TransformMatrixToInternalMatrix(const TransMatrix& trans) {
inspirecv::TransformMatrix mat = inspirecv::TransformMatrix::Create(trans.m00, trans.m01, trans.tx, trans.m10, trans.m11, trans.ty);
return mat;
}
@@ -130,8 +120,8 @@ inline cv::Mat INSPIRE_API TransMatrixToMat(const TransMatrix& trans) {
* @param faceRect The FaceRect to convert.
* @return The converted cv::Rect.
*/
inline cv::Rect INSPIRE_API FaceRectToRect(const FaceRect& faceRect) {
return {faceRect.x, faceRect.y, faceRect.width, faceRect.height};
inline inspirecv::Rect2i INSPIRE_API FaceRectToInternalRect(const FaceRect& faceRect) {
return inspirecv::Rect2i(faceRect.x, faceRect.y, faceRect.width, faceRect.height);
}
/**
@@ -139,8 +129,8 @@ inline cv::Rect INSPIRE_API FaceRectToRect(const FaceRect& faceRect) {
* @param point The Point2F to convert.
* @return The converted cv::Point2f.
*/
inline cv::Point2f INSPIRE_API HPointToPoint2f(const Point2F& point) {
return {point.x, point.y};
inline inspirecv::Point2f INSPIRE_API HPointToInternalPoint2f(const Point2F& point) {
return inspirecv::Point2f(point.x, point.y);
}
/**
@@ -149,7 +139,7 @@ inline cv::Point2f INSPIRE_API HPointToPoint2f(const Point2F& point) {
* @param byteArray The output byte stream.
* @return The result code.
*/
inline int32_t INSPIRE_API SerializeHyperFaceData(const HyperFaceData& data, ByteArray& byteArray) {
inline int32_t INSPIRE_API RunSerializeHyperFaceData(const HyperFaceData& data, ByteArray& byteArray) {
byteArray.reserve(sizeof(data));
// Serialize the HyperFaceData structure itself
@@ -165,7 +155,7 @@ inline int32_t INSPIRE_API SerializeHyperFaceData(const HyperFaceData& data, Byt
* @param data The output HyperFaceData structure.
* @return The result code.
*/
inline int32_t INSPIRE_API DeserializeHyperFaceData(const ByteArray& byteArray, HyperFaceData &data) {
inline int32_t INSPIRE_API RunDeserializeHyperFaceData(const ByteArray& byteArray, HyperFaceData& data) {
// Check if the byte stream size is sufficient
if (byteArray.size() >= sizeof(data)) {
// Copy data from the byte stream to the HyperFaceData structure
@@ -185,7 +175,7 @@ inline int32_t INSPIRE_API DeserializeHyperFaceData(const ByteArray& byteArray,
* @param data The output HyperFaceData structure.
* @return The result code.
*/
inline int32_t INSPIRE_API DeserializeHyperFaceData(const char* byteArray, size_t byteCount, HyperFaceData& data) {
inline int32_t INSPIRE_API RunDeserializeHyperFaceData(const char* byteArray, size_t byteCount, HyperFaceData& data) {
// Check if the byte stream size is sufficient
if (byteCount >= sizeof(data)) {
// Copy data from the byte stream to the HyperFaceData structure
@@ -198,5 +188,5 @@ inline int32_t INSPIRE_API DeserializeHyperFaceData(const char* byteArray, size_
return HSUCCEED;
}
} // namespace hyper
#endif //HYPERFACEREPO_DATATOOLS_H
} // namespace inspire
#endif // INSPIRE_FACE_SERIALIZE_TOOLS_H

View File

@@ -1,10 +1,13 @@
//
// Created by tunm on 2023/8/29.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#pragma once
#ifndef HYPERFACEREPO_FO_ALL_H
#define HYPERFACEREPO_FO_ALL_H
#ifndef INSPIRE_FACE_FO_ALL_H
#define INSPIRE_FACE_FO_ALL_H
#include "face_object.h"
#include "face_object_internal.h"
#include "face_action_data.h"
#include "face_process.h"
#endif //HYPERFACEREPO_FO_ALL_H
#endif // INSPIRE_FACE_FO_ALL_H

View File

@@ -1,33 +1,27 @@
#ifndef INSPIRSE_FACE_FACE_ACTION_H
#define INSPIRSE_FACE_FACE_ACTION_H
#ifndef INSPIRSE_FACE_FACE_ACTION_DATA_H
#define INSPIRSE_FACE_FACE_ACTION_DATA_H
#include <iostream>
#include "opencv2/opencv.hpp"
#include <inspirecv/inspirecv.h>
#include "middleware/utils.h"
#include "data_type.h"
#include "track_module/landmark/face_landmark.h"
#include "track_module/landmark/face_landmark_adapt.h"
namespace inspire {
enum FACE_ACTION {
NORMAL = 0,
SHAKE = 0,
BLINK = 1,
JAW_OPEN = 2,
RAISE_HEAD = 3
};
enum FACE_ACTIONS { ACT_NORMAL = 0, ACT_SHAKE = 0, ACT_BLINK = 1, ACT_JAW_OPEN = 2, ACT_RAISE_HEAD = 3 };
typedef struct FaceActions{
typedef struct FaceActionList {
int normal = 0;
int shake = 0;
int blink = 0;
int jawOpen = 0;
int raiseHead = 0;
} FaceActions;
} FaceActionList;
class INSPIRE_API FaceActionAnalyse {
class INSPIRE_API FaceActionPredictor {
public:
FaceActionAnalyse(int record_list_length) {
FaceActionPredictor(int record_list_length) {
record_list.resize(record_list_length);
record_list_euler.resize(record_list_length);
record_list_eyes.resize(record_list_length);
@@ -35,9 +29,8 @@ public:
index = 0;
}
void RecordActionFrame(const std::vector<cv::Point2f> &landmark,
const cv::Vec3f &euler_angle,
const cv::Vec2f &eyes_status) {
void RecordActionFrame(const std::vector<inspirecv::Point2f> &landmark, const inspirecv::Vec3f &euler_angle,
const inspirecv::Vec2f &eyes_status) {
MoveRecordList();
record_list[0] = landmark;
record_list_euler[0] = euler_angle;
@@ -53,14 +46,14 @@ public:
record_list_eyes.clear();
record_list_eyes.resize(record_size);
index = 0;
}
}
FaceActions AnalysisFaceAction() {
FaceActions actionRecord;
FaceActionList AnalysisFaceAction() {
FaceActionList actionRecord;
actions.clear();
eye_state_list.clear();
if (index < record_list.size()) {
actions.push_back(NORMAL);
actions.push_back(ACT_NORMAL);
actionRecord.normal = 1;
} else {
for (int i = 0; i < record_list_eyes.size(); i++) {
@@ -70,15 +63,12 @@ public:
}
// count mouth aspect ratio
float mouth_widthwise_d =
PointDistance(record_list[0][FaceLandmark::MOUTH_LEFT_CORNER],
record_list[0][FaceLandmark::MOUTH_RIGHT_CORNER]);
float mouth_heightwise_d =
PointDistance(record_list[0][FaceLandmark::MOUTH_UPPER],
record_list[0][FaceLandmark::MOUTH_LOWER]);
float mouth_widthwise_d = record_list[0][FaceLandmarkAdapt::MOUTH_LEFT_CORNER].Distance(record_list[0][FaceLandmarkAdapt::MOUTH_RIGHT_CORNER]);
float mouth_heightwise_d = record_list[0][FaceLandmarkAdapt::MOUTH_UPPER].Distance(record_list[0][FaceLandmarkAdapt::MOUTH_LOWER]);
float mouth_aspect_ratio = mouth_heightwise_d / mouth_widthwise_d;
if (mouth_aspect_ratio > 0.3) {
actions.push_back(JAW_OPEN);
actions.push_back(ACT_JAW_OPEN);
actionRecord.jawOpen = 1;
}
@@ -92,9 +82,8 @@ public:
counter_eye_open += 1;
}
}
if (counter_eye_close > 0 && counter_eye_open > 2 &&
record_list_euler[0][1] > -6 && record_list_euler[0][0] < 6) {
actions.push_back(BLINK);
if (counter_eye_close > 0 && counter_eye_open > 2 && record_list_euler[0][1] > -6 && record_list_euler[0][0] < 6) {
actions.push_back(ACT_BLINK);
actionRecord.blink = 1;
Reset();
}
@@ -110,20 +99,19 @@ public:
}
}
if (counter_head_shake_left && counter_head_shake_right) {
actions.push_back(SHAKE);
actions.push_back(ACT_SHAKE);
actionRecord.shake = 1;
}
if (record_list_euler[0][0] > 10) {
actions.push_back(RAISE_HEAD);
actions.push_back(ACT_RAISE_HEAD);
actionRecord.raiseHead = 1;
}
}
return actionRecord;
}
std::vector<FACE_ACTION> GetActions() const {
std::vector<FACE_ACTIONS> GetActions() const {
return actions;
}
@@ -140,17 +128,16 @@ private:
}
}
std::vector<std::vector<cv::Point2f>> record_list;
std::vector<cv::Vec3f> record_list_euler;
std::vector<cv::Vec2f> record_list_eyes;
std::vector<std::pair<float, float>> eye_state_list; // pair left right
std::vector<std::vector<inspirecv::Point2f>> record_list;
std::vector<inspirecv::Vec3f> record_list_euler;
std::vector<inspirecv::Vec2f> record_list_eyes;
std::vector<std::pair<float, float>> eye_state_list; // pair left right
std::vector<float> mouth_state_list;
std::vector<FACE_ACTION> actions;
std::vector<FACE_ACTIONS> actions;
int record_size;
int index;
};
} // namespace inspire
} // namespace inspire
#endif
#endif // INSPIRSE_FACE_FACE_ACTION_DATA_H

View File

@@ -1,332 +0,0 @@
#ifndef FACE_INFO_H
#define FACE_INFO_H
#include <memory>
#include <utility>
//#include "face_action.h"
#include "opencv2/opencv.hpp"
#include "middleware/utils.h"
#include "data_type.h"
#include "face_process.h"
#include "track_module/quality/face_pose_quality.h"
#include "face_action.h"
namespace inspire {
enum TRACK_STATE {
UNTRACKING = -1, DETECT = 0, READY = 1, TRACKING = 2
};
class INSPIRE_API FaceObject {
public:
FaceObject(int instance_id, cv::Rect bbox, int num_landmark = 106) {
face_id_ = instance_id;
landmark_.resize(num_landmark);
bbox_ = std::move(bbox);
tracking_state_ = DETECT;
confidence_ = 1.0;
tracking_count_ = 0;
pose_euler_angle_.resize(3);
keyPointFive.resize(5);
face_action_ = std::make_shared<FaceActionAnalyse>(10);
}
void UpdateMatrix(const cv::Mat &matrix) {
assert(trans_matrix_.rows == 2 && trans_matrix_.cols == 3);
double a00 = matrix.at<double>(0, 0);
double a01 = matrix.at<double>(0, 1);
double a10 = matrix.at<double>(1, 0);
double a11 = matrix.at<double>(1, 1);
double t1x = matrix.at<double>(0, 2);
double t1y = matrix.at<double>(1, 2);
double m00 = trans_matrix_.at<double>(0, 0);
double m01 = trans_matrix_.at<double>(0, 1);
double m10 = trans_matrix_.at<double>(1, 0);
double m11 = trans_matrix_.at<double>(1, 1);
double t0x = trans_matrix_.at<double>(0, 2);
double t0y = trans_matrix_.at<double>(1, 2);
double n_m00 = a00 * m00 + a01 * m10;
double n_m01 = a00 * m01 + a01 * m11;
double n_m02 = a00 * t0x + a01 * t0y + t1x;
double n_m10 = a10 * m00 + a11 * m10;
double n_m11 = a10 * m01 + a11 * m11;
double n_m12 = a10 * t0x + a11 * t0y + t1y;
trans_matrix_.at<double>(0, 0) = n_m00;
trans_matrix_.at<double>(0, 1) = n_m01;
trans_matrix_.at<double>(0, 2) = n_m02;
trans_matrix_.at<double>(1, 0) = n_m10;
trans_matrix_.at<double>(1, 1) = n_m11;
trans_matrix_.at<double>(1, 2) = n_m12;
}
void SetLandmark(const std::vector<cv::Point2f> &lmk, bool update_rect = true,
bool update_matrix = true) {
if (lmk.size() != landmark_.size()) {
INSPIRE_LOGW("The SetLandmark function displays an exception indicating that the lmk number does not match");
return;
}
std::copy(lmk.begin(), lmk.end(), landmark_.begin());
DynamicSmoothParamUpdate(landmark_, landmark_smooth_aux_, 106 * 2, 0.06);
// cv::Vec3d euler_angle;
EstimateHeadPose(landmark_, euler_angle_);
// DynamicSmoothParamUpdate(landmark_, landmark_smooth_aux_, 106 * 2, 0.06);
if (update_rect)
bbox_ = cv::boundingRect(lmk);
if (update_matrix && tracking_state_ == TRACKING) {
// pass
}
keyPointFive[0] = landmark_[55];
keyPointFive[1] = landmark_[105];
keyPointFive[2] = landmark_[69];
keyPointFive[3] = landmark_[45];
keyPointFive[4] = landmark_[50];
}
void setAlignMeanSquareError(const std::vector<cv::Point2f> &lmk_5) {
float src_pts[] = {30.2946, 51.6963, 65.5318, 51.5014, 48.0252,
71.7366, 33.5493, 92.3655, 62.7299, 92.2041};
for (int i = 0; i < 5; i++) {
*(src_pts + 2 * i) += 8.0;
}
float sum = 0;
for (int i = 0; i < lmk_5.size(); i++) {
float l2 = L2norm(src_pts[i * 2 + 0], src_pts[i * 2 + 1], lmk_5[i].x, lmk_5[i].y);
sum += l2;
}
align_mse_ = sum / 5.0f;
}
// 增加跟踪次数
void IncrementTrackingCount() {
tracking_count_++;
}
// 获取跟踪次数
int GetTrackingCount() const {
return tracking_count_;
}
float GetAlignMSE() const { return align_mse_; }
std::vector<cv::Point2f> GetLanmdark() const { return landmark_; }
cv::Rect GetRect() const { return bbox_; }
cv::Rect GetRectSquare(float padding_ratio = 0.0) const {
int cx = bbox_.x + bbox_.width / 2;
int cy = bbox_.y + bbox_.height / 2;
int R = std::max(bbox_.width, bbox_.height) / 2;
int R_padding = static_cast<int>(R * (1 + padding_ratio));
int x1 = cx - R_padding;
int y1 = cy - R_padding;
int x2 = cx + R_padding;
int y2 = cy + R_padding;
int width = x2 - x1;
int height = y2 - y1;
assert(width > 0);
assert(height > 0);
assert(height == width);
cv::Rect box_square(x1, y1, width, height);
return box_square;
}
FaceActions UpdateFaceAction() {
cv::Vec3f euler(high_result.pitch, high_result.yaw, high_result.roll);
cv::Vec2f eyes(left_eye_status_.back(), right_eye_status_.back());
face_action_->RecordActionFrame(landmark_, euler, eyes);
return face_action_->AnalysisFaceAction();
}
void DisableTracking() { tracking_state_ = UNTRACKING; }
void EnableTracking() { tracking_state_ = TRACKING; }
void ReadyTracking() { tracking_state_ = READY; }
TRACK_STATE TrackingState() const { return tracking_state_; }
float GetConfidence() const { return confidence_; }
void SetConfidence(float confidence) { confidence_ = confidence; }
int GetTrackingId() const { return face_id_; }
const cv::Mat &getTransMatrix() const { return trans_matrix_; }
void setTransMatrix(const cv::Mat &transMatrix) {
transMatrix.copyTo(trans_matrix_);
}
static float L2norm(float x0, float y0, float x1, float y1) {
return sqrt((x0 - x1) * (x0 - x1) + (y0 - y1) * (y0 - y1));
}
void RequestFaceAction(
std::vector<cv::Point2f> &landmarks,
std::vector<std::vector<cv::Point2f>> &landmarks_lastNframes,
int lm_length, float h) {
int n = 5;
std::vector<cv::Point2f> landmarks_temp;
landmarks_temp.assign(landmarks.begin(), landmarks.end());
if (landmarks_lastNframes.size() == n) {
for (int i = 0; i < lm_length / 2; i++) {
float sum_d = 1;
float max_d = 0;
for (int j = 0; j < n; j++) {
float d = L2norm(landmarks_temp[i].x, landmarks_temp[i].y,
landmarks_lastNframes[j][i].x,
landmarks_lastNframes[j][i].y);
if (d > max_d)
max_d = d;
}
for (int j = 0; j < n; j++) {
float d = exp(-max_d * (n - j) * h);
sum_d += d;
landmarks[i].x = landmarks[i].x + d * landmarks_lastNframes[j][i].x;
landmarks[i].y = landmarks[i].y + d * landmarks_lastNframes[j][i].y;
}
landmarks[i].x = landmarks[i].x / sum_d;
landmarks[i].y = landmarks[i].y / sum_d;
}
}
std::vector<cv::Point2f> landmarks_frame;
for (int i = 0; i < lm_length / 2; i++) {
landmarks_frame.push_back(cv::Point2f(landmarks[i].x, landmarks[i].y));
}
landmarks_lastNframes.push_back(landmarks_frame);
if (landmarks_lastNframes.size() > 5)
landmarks_lastNframes.erase(landmarks_lastNframes.begin());
}
void DynamicSmoothParamUpdate(
std::vector<cv::Point2f> &landmarks,
std::vector<std::vector<cv::Point2f>> &landmarks_lastNframes,
int lm_length, float h) {
int n = 5;
std::vector<cv::Point2f> landmarks_temp;
landmarks_temp.assign(landmarks.begin(), landmarks.end());
if (landmarks_lastNframes.size() == n) {
for (int i = 0; i < lm_length / 2; i++) {
float sum_d = 1;
float max_d = 0;
for (int j = 0; j < n; j++) {
float d = L2norm(landmarks_temp[i].x, landmarks_temp[i].y,
landmarks_lastNframes[j][i].x,
landmarks_lastNframes[j][i].y);
if (d > max_d)
max_d = d;
}
for (int j = 0; j < n; j++) {
float d = exp(-max_d * (n - j) * h);
sum_d += d;
landmarks[i].x = landmarks[i].x + d * landmarks_lastNframes[j][i].x;
landmarks[i].y = landmarks[i].y + d * landmarks_lastNframes[j][i].y;
}
landmarks[i].x = landmarks[i].x / sum_d;
landmarks[i].y = landmarks[i].y / sum_d;
}
}
std::vector<cv::Point2f> landmarks_frame;
for (int i = 0; i < lm_length / 2; i++) {
landmarks_frame.push_back(cv::Point2f(landmarks[i].x, landmarks[i].y));
}
landmarks_lastNframes.push_back(landmarks_frame);
if (landmarks_lastNframes.size() > 5)
landmarks_lastNframes.erase(landmarks_lastNframes.begin());
}
public:
std::vector<cv::Point2f> landmark_;
std::vector<std::vector<cv::Point2f>> landmark_smooth_aux_;
cv::Rect bbox_;
cv::Vec3f euler_angle_;
std::vector<float> pose_euler_angle_;
float align_mse_{};
const cv::Vec3f &getEulerAngle() const { return euler_angle_; }
const std::vector<float> &getPoseEulerAngle() const { return pose_euler_angle_; }
void setPoseEulerAngle(const std::vector<float> &poseEulerAngle) {
pose_euler_angle_[0] = poseEulerAngle[0];
pose_euler_angle_[1] = poseEulerAngle[1];
pose_euler_angle_[2] = poseEulerAngle[2];
if (abs(pose_euler_angle_[0]) < 0.5 && abs(pose_euler_angle_[1]) < 0.48) {
is_standard_ = true;
}
}
bool isStandard() const {
return is_standard_;
}
const cv::Rect &getBbox() const { return bbox_; }
std::vector<cv::Point2f> getRotateLandmark(int height, int width, int rotate = 0) {
if (rotate != 0) {
std::vector<cv::Point2f> result = RotatePoints(landmark_, rotate, cv::Size(height, width));
return result;
} else {
return GetLanmdark();
}
}
cv::Rect getRotateBbox(int height, int width, int rotate = 0, bool use_flip = false) {
if (rotate != 0) {
cv::Rect src_bbox = bbox_;
std::vector<cv::Point2f> points;
cv::Rect trans_rect;
RotateRect(src_bbox, points, trans_rect, rotate, cv::Size(height, width));
if (use_flip)
trans_rect = flipRectWidth(trans_rect, cv::Size(width, height));
return trans_rect;
} else {
return getBbox();
}
}
void setBbox(const cv::Rect &bbox) { bbox_ = bbox; }
cv::Mat trans_matrix_;
float confidence_;
cv::Rect detect_bbox_;
int tracking_count_; // 跟踪次数
bool is_standard_;
FacePoseQualityResult high_result;
FaceProcess faceProcess;
std::vector<Point2f> keyPointFive;
void setId(int id) {
face_id_ = id;
}
std::vector<float> left_eye_status_;
std::vector<float> right_eye_status_;
private:
TRACK_STATE tracking_state_;
std::shared_ptr<FaceActionAnalyse> face_action_;
int face_id_;
};
typedef std::vector<FaceObject> FaceObjectList;
} // namespace hyper
#endif // FACE_INFO_H

View File

@@ -0,0 +1,273 @@
#ifndef INSPIRE_FACE_FACE_INFO_INTERNAL_H
#define INSPIRE_FACE_FACE_INFO_INTERNAL_H
#include <memory>
#include <utility>
#include <inspirecv/inspirecv.h>
#include "middleware/utils.h"
#include "data_type.h"
#include "face_process.h"
#include "face_action_data.h"
#include "track_module/quality/face_pose_quality_adapt.h"
namespace inspire {
enum ISF_TRACK_STATE { ISF_UNTRACKING = -1, ISF_DETECT = 0, ISF_READY = 1, ISF_TRACKING = 2 };
class INSPIRE_API FaceObjectInternal {
public:
FaceObjectInternal(int instance_id, inspirecv::Rect2i bbox, int num_landmark = 106) {
face_id_ = instance_id;
landmark_.resize(num_landmark);
bbox_ = std::move(bbox);
tracking_state_ = ISF_DETECT;
confidence_ = 1.0;
tracking_count_ = 0;
pose_euler_angle_.resize(3);
keyPointFive.resize(5);
face_action_ = std::make_shared<FaceActionPredictor>(10);
num_of_dense_landmark_ = num_landmark;
}
void SetLandmark(const std::vector<inspirecv::Point2f> &lmk, bool update_rect = true, bool update_matrix = true, float h = 0.06f, int n = 5,
int num_of_lmk = 106 * 2) {
// if (lmk.size() != landmark_.size()) {
// INSPIRE_LOGW("The SetLandmark function displays an exception indicating that the lmk number does not match");
// return;
// }
std::copy(lmk.begin(), lmk.end(), landmark_.begin());
DynamicSmoothParamUpdate(landmark_, landmark_smooth_aux_, num_of_lmk, h, n);
// std::cout << "smooth ratio: " << h << " num smooth cache frame: " << n << std::endl;
// cv::Vec3d euler_angle;
// EstimateHeadPose(landmark_, euler_angle_);
// DynamicSmoothParamUpdate(landmark_, landmark_smooth_aux_, 106 * 2, 0.06);
if (update_rect)
bbox_ = inspirecv::MinBoundingRect(lmk).As<int>();
if (update_matrix && tracking_state_ == ISF_TRACKING) {
// pass
}
keyPointFive[0] = landmark_[55];
keyPointFive[1] = landmark_[105];
keyPointFive[2] = landmark_[69];
keyPointFive[3] = landmark_[45];
keyPointFive[4] = landmark_[50];
}
void setAlignMeanSquareError(const std::vector<inspirecv::Point2f> &lmk_5) {
float src_pts[] = {30.2946, 51.6963, 65.5318, 51.5014, 48.0252, 71.7366, 33.5493, 92.3655, 62.7299, 92.2041};
for (int i = 0; i < 5; i++) {
*(src_pts + 2 * i) += 8.0;
}
float sum = 0;
for (int i = 0; i < lmk_5.size(); i++) {
float l2 = L2norm(src_pts[i * 2 + 0], src_pts[i * 2 + 1], lmk_5[i].GetX(), lmk_5[i].GetY());
sum += l2;
}
align_mse_ = sum / 5.0f;
}
// Increment tracking count
void IncrementTrackingCount() {
tracking_count_++;
}
// Get tracking count
int GetTrackingCount() const {
return tracking_count_;
}
float GetAlignMSE() const {
return align_mse_;
}
std::vector<inspirecv::Point2f> GetLanmdark() const {
return landmark_;
}
inspirecv::Rect2i GetRect() const {
return bbox_;
}
inspirecv::Rect2i GetRectSquare(float padding_ratio = 0.0) const {
int cx = bbox_.GetX() + bbox_.GetWidth() / 2;
int cy = bbox_.GetY() + bbox_.GetHeight() / 2;
int R = std::max(bbox_.GetWidth(), bbox_.GetHeight()) / 2;
int R_padding = static_cast<int>(R * (1 + padding_ratio));
int x1 = cx - R_padding;
int y1 = cy - R_padding;
int x2 = cx + R_padding;
int y2 = cy + R_padding;
int width = x2 - x1;
int height = y2 - y1;
assert(width > 0);
assert(height > 0);
assert(height == width);
inspirecv::Rect2i box_square(x1, y1, width, height);
return box_square;
}
FaceActionList UpdateFaceAction() {
inspirecv::Vec3f euler{high_result.pitch, high_result.yaw, high_result.roll};
inspirecv::Vec2f eyes{left_eye_status_.back(), right_eye_status_.back()};
face_action_->RecordActionFrame(landmark_, euler, eyes);
return face_action_->AnalysisFaceAction();
}
void DisableTracking() {
tracking_state_ = ISF_UNTRACKING;
}
void EnableTracking() {
tracking_state_ = ISF_TRACKING;
}
void ReadyTracking() {
tracking_state_ = ISF_READY;
}
ISF_TRACK_STATE TrackingState() const {
return tracking_state_;
}
float GetConfidence() const {
return confidence_;
}
void SetConfidence(float confidence) {
confidence_ = confidence;
}
int GetTrackingId() const {
return face_id_;
}
const inspirecv::TransformMatrix &getTransMatrix() const {
return trans_matrix_;
}
const inspirecv::TransformMatrix &getTransMatrixExtensive() const {
return trans_matrix_extensive_;
}
void setTransMatrix(const inspirecv::TransformMatrix &transMatrix) {
trans_matrix_ = transMatrix.Clone();
}
void setTransMatrixExtensive(const inspirecv::TransformMatrix &transMatrixExtensive) {
trans_matrix_extensive_ = transMatrixExtensive.Clone();
}
static float L2norm(float x0, float y0, float x1, float y1) {
return sqrt((x0 - x1) * (x0 - x1) + (y0 - y1) * (y0 - y1));
}
void DynamicSmoothParamUpdate(std::vector<inspirecv::Point2f> &landmarks, std::vector<std::vector<inspirecv::Point2f>> &landmarks_lastNframes,
int lm_length, float h = 0.06f, int n = 5) {
std::vector<inspirecv::Point2f> landmarks_temp;
landmarks_temp.assign(landmarks.begin(), landmarks.end());
if (landmarks_lastNframes.size() == n) {
for (int i = 0; i < lm_length / 2; i++) {
float sum_d = 1;
float max_d = 0;
for (int j = 0; j < n; j++) {
float d = L2norm(landmarks_temp[i].GetX(), landmarks_temp[i].GetY(), landmarks_lastNframes[j][i].GetX(),
landmarks_lastNframes[j][i].GetY());
if (d > max_d)
max_d = d;
}
for (int j = 0; j < n; j++) {
float d = exp(-max_d * (n - j) * h);
sum_d += d;
landmarks[i].SetX(landmarks[i].GetX() + d * landmarks_lastNframes[j][i].GetX());
landmarks[i].SetY(landmarks[i].GetY() + d * landmarks_lastNframes[j][i].GetY());
}
landmarks[i].SetX(landmarks[i].GetX() / sum_d);
landmarks[i].SetY(landmarks[i].GetY() / sum_d);
}
}
std::vector<inspirecv::Point2f> landmarks_frame;
for (int i = 0; i < lm_length / 2; i++) {
landmarks_frame.push_back(inspirecv::Point2f(landmarks[i].GetX(), landmarks[i].GetY()));
}
landmarks_lastNframes.push_back(landmarks_frame);
if (landmarks_lastNframes.size() > n)
landmarks_lastNframes.erase(landmarks_lastNframes.begin());
}
public:
std::vector<inspirecv::Point2f> landmark_;
std::vector<std::vector<inspirecv::Point2f>> landmark_smooth_aux_;
inspirecv::Rect2i bbox_;
inspirecv::Vec3f euler_angle_;
std::vector<float> pose_euler_angle_;
int num_of_dense_landmark_;
float align_mse_{};
const inspirecv::Vec3f &getEulerAngle() const {
return euler_angle_;
}
const std::vector<float> &getPoseEulerAngle() const {
return pose_euler_angle_;
}
void setPoseEulerAngle(const std::vector<float> &poseEulerAngle) {
pose_euler_angle_[0] = poseEulerAngle[0];
pose_euler_angle_[1] = poseEulerAngle[1];
pose_euler_angle_[2] = poseEulerAngle[2];
if (abs(pose_euler_angle_[0]) < 0.5 && abs(pose_euler_angle_[1]) < 0.48) {
is_standard_ = true;
}
}
bool isStandard() const {
return is_standard_;
}
const inspirecv::Rect2i &getBbox() const {
return bbox_;
}
void setBbox(const inspirecv::Rect2i &bbox) {
bbox_ = bbox;
}
inspirecv::TransformMatrix trans_matrix_;
inspirecv::TransformMatrix trans_matrix_extensive_;
float confidence_;
inspirecv::Rect2i detect_bbox_;
int tracking_count_; // Tracking count
bool is_standard_;
FacePoseQualityAdaptResult high_result;
FaceProcess faceProcess;
std::vector<inspirecv::Point2f> keyPointFive;
void setId(int id) {
face_id_ = id;
}
std::vector<float> left_eye_status_;
std::vector<float> right_eye_status_;
private:
ISF_TRACK_STATE tracking_state_;
std::shared_ptr<FaceActionPredictor> face_action_;
int face_id_;
};
typedef std::vector<FaceObjectInternal> FaceObjectInternalList;
} // namespace inspire
#endif // INSPIRE_FACE_FACE_INFO_INTERNAL_H

View File

@@ -1,11 +1,12 @@
//
// Created by tunm on 2023/9/12.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
// Include guard to prevent double inclusion of this header file
#pragma once
#ifndef HYPERFACEREPO_FACEPROCESS_H
#define HYPERFACEREPO_FACEPROCESS_H
#ifndef INSPIRE_FACE_FACEPROCESS_H
#define INSPIRE_FACE_FACEPROCESS_H
// Include the necessary header file "data_type.h"
#include "data_type.h"
@@ -23,8 +24,8 @@ typedef enum MaskInfo {
} MaskInfo;
/**
* Enumeration to represent different RGB liveness information.
*/
* Enumeration to represent different RGB liveness information.
*/
typedef enum RGBLivenessInfo {
UNKNOWN_RGB_LIVENESS = -1, ///< Unknown RGB liveness status
LIVENESS_FAKE = 0, ///< Fake liveness
@@ -32,8 +33,8 @@ typedef enum RGBLivenessInfo {
} RGBLivenessInfo;
/**
* Class definition for FaceProcess.
*/
* Class definition for FaceProcess.
*/
class INSPIRE_API FaceProcess {
public:
/**
@@ -45,9 +46,8 @@ public:
* Member variable to store RGB liveness information, initialized to UNKNOWN_RGB_LIVENESS.
*/
RGBLivenessInfo rgbLivenessInfo = UNKNOWN_RGB_LIVENESS;
};
} // namespace hyper
} // namespace inspire
#endif //HYPERFACEREPO_FACEPROCESS_H
#endif // INSPIRE_FACE_FACEPROCESS_H

View File

@@ -1,9 +1,10 @@
//
// Created by tunm on 2023/5/5.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#pragma once
#ifndef HYPERFACE_DATATYPE_H
#define HYPERFACE_DATATYPE_H
#ifndef INSPIRE_FACE_DATATYPE_H
#define INSPIRE_FACE_DATATYPE_H
#include <cstdint>
#if defined(_WIN32) && (defined(_DEBUG) || defined(DEBUG))
@@ -15,7 +16,7 @@
#define INSPIRE_API
#endif
#include <opencv2/opencv.hpp>
#include <inspirecv/inspirecv.h>
#ifndef M_PI
#define M_PI 3.14159265358979323846264338327950288
@@ -24,138 +25,138 @@
namespace inspire {
/**
* @defgroup DataType Definitions
* @brief Defines various data types used in the HyperFace project.
* @{
*/
* @defgroup DataType Definitions
* @brief Defines various data types used in the HyperFace project.
* @{
*/
#if !defined(int64)
/** @typedef int64
* @brief 64-bit integer type.
*/
* @brief 64-bit integer type.
*/
typedef int64_t int64;
#endif
#if !defined(uint64)
/** @typedef uint64
* @brief 64-bit unsigned integer type.
*/
* @brief 64-bit unsigned integer type.
*/
typedef uint64_t uint64;
#endif
#if !defined(int32)
/** @typedef int32
* @brief 32-bit integer type.
*/
* @brief 32-bit integer type.
*/
typedef int32_t int32;
#endif
#if !defined(uint32)
/** @typedef uint32
* @brief 32-bit unsigned integer type.
*/
* @brief 32-bit unsigned integer type.
*/
typedef uint32_t uint32;
#endif
#if !defined(int8)
/** @typedef int8
* @brief 8-bit integer type.
*/
* @brief 8-bit integer type.
*/
typedef int8_t int8;
#endif
#if !defined(uint8)
/** @typedef uint8
* @brief 8-bit unsigned integer type.
*/
* @brief 8-bit unsigned integer type.
*/
typedef uint8_t uint8;
#endif
/** @typedef ByteArray
* @brief Type definition for a byte array (vector of chars).
*/
* @brief Type definition for a byte array (vector of chars).
*/
typedef std::vector<char> ByteArray;
/** @typedef Point2i
* @brief 2D coordinate point with integer precision.
*/
typedef cv::Point Point2i;
* @brief 2D coordinate point with integer precision.
*/
typedef inspirecv::Point2i Point2i;
/** @typedef Point2f
* @brief 2D coordinate point with float precision.
*/
typedef cv::Point2f Point2f;
* @brief 2D coordinate point with float precision.
*/
typedef inspirecv::Point2f Point2f;
/** @typedef PointsList2i
* @brief List of 2D coordinate points with integer precision.
*/
* @brief List of 2D coordinate points with integer precision.
*/
typedef std::vector<Point2i> PointsList2i;
/** @typedef PointsList2f
* @brief List of 2D coordinate points with float precision.
*/
* @brief List of 2D coordinate points with float precision.
*/
typedef std::vector<Point2f> PointsList2f;
/** @typedef Contours2i
* @brief Contours represented as a list of 2D integer points.
*/
* @brief Contours represented as a list of 2D integer points.
*/
typedef std::vector<PointsList2i> Contours2i;
/** @typedef Contours2f
* @brief Contours represented as a list of 2D float points.
*/
* @brief Contours represented as a list of 2D float points.
*/
typedef std::vector<PointsList2f> Contours2f;
/** @typedef Textures2i
* @brief Texture lines represented as integer contours.
*/
* @brief Texture lines represented as integer contours.
*/
typedef Contours2i Textures2i;
/** @typedef AnyTensorFp32
* @brief Generic tensor representation using a vector of floats.
*/
* @brief Generic tensor representation using a vector of floats.
*/
typedef std::vector<float> AnyTensorFp32;
/** @typedef Matrix
* @brief Generic matrix representation.
*/
typedef cv::Mat Matrix;
/** @typedef ImageBitmap
* @brief Image bitmap representation.
*/
typedef inspirecv::Image ImageBitmap;
/** @typedef Rectangle
* @brief Rectangle representation using integer values.
*/
typedef cv::Rect_<int> Rectangle;
* @brief Rectangle representation using integer values.
*/
typedef inspirecv::Rect<int> Rectangle;
/** @typedef Size
* @brief Size representation using integer values.
*/
typedef cv::Size_<int> Size;
* @brief Size representation using integer values.
*/
typedef inspirecv::Size<int> Size;
/** @typedef Embedded
* @brief Dense vector for feature embedding.
*/
* @brief Dense vector for feature embedding.
*/
typedef std::vector<float> Embedded;
/** @typedef EmbeddedList
* @brief List of dense vectors for feature embedding.
*/
* @brief List of dense vectors for feature embedding.
*/
typedef std::vector<Embedded> EmbeddedList;
/** @typedef String
* @brief String type definition.
*/
* @brief String type definition.
*/
typedef std::string String;
/** @typedef IndexList
* @brief List of indices.
*/
* @brief List of indices.
*/
typedef std::vector<int> IndexList;
/** @struct FaceLoc
* @brief Struct representing standardized face landmarks for detection.
*
* Contains coordinates for the face, detection score, and landmarks.
*/
* @brief Struct representing standardized face landmarks for detection.
*
* Contains coordinates for the face, detection score, and landmarks.
*/
typedef struct FaceLoc {
float x1;
float y1;
@@ -166,32 +167,32 @@ typedef struct FaceLoc {
} FaceLoc;
/** @typedef FaceLocList
* @brief List of FaceLoc structures.
*/
* @brief List of FaceLoc structures.
*/
typedef std::vector<FaceLoc> FaceLocList;
/** @struct FaceBasicData
* @brief Struct for basic face data.
*
* Contains the size of the data and a pointer to the data.
*/
* @brief Struct for basic face data.
*
* Contains the size of the data and a pointer to the data.
*/
typedef struct FaceBasicData {
int32_t dataSize;
void* data;
} FaceBasicData;
/** @struct FaceFeatureEntity
* @brief Struct for face feature data.
*
* Contains the size of the feature data and a pointer to the feature array.
*/
* @brief Struct for face feature data.
*
* Contains the size of the feature data and a pointer to the feature array.
*/
typedef struct FaceFeatureEntity {
int32_t dataSize;
float *data;
float* data;
} FaceFeaturePtr;
/** @} */
} // namespace inspire
#endif //HYPERFACE_DATATYPE_H
#endif // INSPIRE_FACE_DATATYPE_H

View File

@@ -1,20 +1,22 @@
//
// Created by Tunm-Air13 on 2023/9/7.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include "face_context.h"
#include "Initialization_module/launch.h"
#include "face_session.h"
#include "initialization_module/launch.h"
#include <utility>
#include "log.h"
#include "herror.h"
#include "middleware/utils.h"
#include "recognition_module/dest_const.h"
namespace inspire {
FaceContext::FaceContext() = default;
FaceSession::FaceSession() = default;
int32_t FaceContext::Configuration(DetectMode detect_mode, int32_t max_detect_face, CustomPipelineParameter param,
int32_t detect_level_px, int32_t track_by_detect_mode_fps) {
int32_t FaceSession::Configuration(DetectModuleMode detect_mode, int32_t max_detect_face, CustomPipelineParameter param, int32_t detect_level_px,
int32_t track_by_detect_mode_fps) {
m_detect_mode_ = detect_mode;
m_max_detect_face_ = max_detect_face;
m_parameter_ = param;
@@ -25,26 +27,32 @@ int32_t FaceContext::Configuration(DetectMode detect_mode, int32_t max_detect_fa
return HERR_ARCHIVE_LOAD_FAILURE;
}
m_face_track_ = std::make_shared<FaceTrack>(m_detect_mode_, m_max_detect_face_, 20, 192, detect_level_px,
track_by_detect_mode_fps);
if (m_parameter_.enable_interaction_liveness) {
m_parameter_.enable_detect_mode_landmark = true;
}
m_face_track_ = std::make_shared<FaceTrackModule>(m_detect_mode_, m_max_detect_face_, 20, 192, detect_level_px, track_by_detect_mode_fps,
m_parameter_.enable_detect_mode_landmark);
m_face_track_->Configuration(INSPIRE_LAUNCH->getMArchive());
// SetDetectMode(m_detect_mode_);
m_face_recognition_ =
std::make_shared<FeatureExtraction>(INSPIRE_LAUNCH->getMArchive(), m_parameter_.enable_recognition);
m_face_recognition_ = std::make_shared<FeatureExtractionModule>(INSPIRE_LAUNCH->getMArchive(), m_parameter_.enable_recognition);
if (m_face_recognition_->QueryStatus() != HSUCCEED) {
return m_face_recognition_->QueryStatus();
}
m_face_pipeline_ =
std::make_shared<FacePipeline>(INSPIRE_LAUNCH->getMArchive(), param.enable_liveness, param.enable_mask_detect,
param.enable_face_attribute, param.enable_interaction_liveness);
m_face_pipeline_ = std::make_shared<FacePipelineModule>(INSPIRE_LAUNCH->getMArchive(), param.enable_liveness, param.enable_mask_detect,
param.enable_face_attribute, param.enable_interaction_liveness);
m_face_track_cost_ = std::make_shared<inspirecv::TimeSpend>("FaceTrack");
return HSUCCEED;
}
int32_t FaceContext::FaceDetectAndTrack(CameraStream& image) {
int32_t FaceSession::FaceDetectAndTrack(inspirecv::InspireImageProcess& process) {
std::lock_guard<std::mutex> lock(m_mtx_);
if (m_enable_track_cost_spend_) {
m_face_track_cost_->Start();
}
m_detect_cache_.clear();
m_face_basic_data_cache_.clear();
m_face_rects_cache_.clear();
@@ -70,12 +78,12 @@ int32_t FaceContext::FaceDetectAndTrack(CameraStream& image) {
if (m_face_track_ == nullptr) {
return HERR_SESS_TRACKER_FAILURE;
}
m_face_track_->UpdateStream(image);
m_face_track_->UpdateStream(process);
for (int i = 0; i < m_face_track_->trackingFace.size(); ++i) {
auto& face = m_face_track_->trackingFace[i];
HyperFaceData data = FaceObjectToHyperFaceData(face, i);
HyperFaceData data = FaceObjectInternalToHyperFaceData(face, i);
ByteArray byteArray;
auto ret = SerializeHyperFaceData(data, byteArray);
auto ret = RunSerializeHyperFaceData(data, byteArray);
if (ret != HSUCCEED) {
return HERR_INVALID_SERIALIZATION_FAILED;
}
@@ -103,33 +111,35 @@ int32_t FaceContext::FaceDetectAndTrack(CameraStream& image) {
basic.dataSize = m_detect_cache_[i].size();
basic.data = m_detect_cache_[i].data();
}
if (m_enable_track_cost_spend_) {
m_face_track_cost_->Stop();
}
// LOGD("Track COST: %f", m_face_track_->GetTrackTotalUseTime());
return HSUCCEED;
}
int32_t FaceContext::SetFaceDetectThreshold(float value) {
int32_t FaceSession::SetFaceDetectThreshold(float value) {
m_face_track_->SetDetectThreshold(value);
return HSUCCEED;
}
FaceObjectList& FaceContext::GetTrackingFaceList() {
FaceObjectInternalList& FaceSession::GetTrackingFaceList() {
return m_face_track_->trackingFace;
}
const std::shared_ptr<FeatureExtraction>& FaceContext::FaceRecognitionModule() {
const std::shared_ptr<FeatureExtractionModule>& FaceSession::FaceRecognitionModule() {
return m_face_recognition_;
}
const std::shared_ptr<FacePipeline>& FaceContext::FacePipelineModule() {
const std::shared_ptr<FacePipelineModule>& FaceSession::PipelineModule() {
return m_face_pipeline_;
}
const int32_t FaceContext::GetNumberOfFacesCurrentlyDetected() const {
const int32_t FaceSession::GetNumberOfFacesCurrentlyDetected() const {
return m_face_track_->trackingFace.size();
}
int32_t FaceContext::FacesProcess(CameraStream& image, const std::vector<HyperFaceData>& faces,
int32_t FaceSession::FacesProcess(inspirecv::InspireImageProcess& process, const std::vector<HyperFaceData>& faces,
const CustomPipelineParameter& param) {
std::lock_guard<std::mutex> lock(m_mtx_);
m_mask_results_cache_.resize(faces.size(), -1.0f);
@@ -148,7 +158,7 @@ int32_t FaceContext::FacesProcess(CameraStream& image, const std::vector<HyperFa
const auto& face = faces[i];
// RGB Liveness Detect
if (param.enable_liveness) {
auto ret = m_face_pipeline_->Process(image, face, PROCESS_RGB_LIVENESS);
auto ret = m_face_pipeline_->Process(process, face, PROCESS_RGB_LIVENESS);
if (ret != HSUCCEED) {
return ret;
}
@@ -156,7 +166,7 @@ int32_t FaceContext::FacesProcess(CameraStream& image, const std::vector<HyperFa
}
// Mask detection
if (param.enable_mask_detect) {
auto ret = m_face_pipeline_->Process(image, face, PROCESS_MASK);
auto ret = m_face_pipeline_->Process(process, face, PROCESS_MASK);
if (ret != HSUCCEED) {
return ret;
}
@@ -164,7 +174,7 @@ int32_t FaceContext::FacesProcess(CameraStream& image, const std::vector<HyperFa
}
// Face attribute prediction
if (param.enable_face_attribute) {
auto ret = m_face_pipeline_->Process(image, face, PROCESS_ATTRIBUTE);
auto ret = m_face_pipeline_->Process(process, face, PROCESS_ATTRIBUTE);
if (ret != HSUCCEED) {
return ret;
}
@@ -175,7 +185,7 @@ int32_t FaceContext::FacesProcess(CameraStream& image, const std::vector<HyperFa
// Face interaction
if (param.enable_interaction_liveness) {
auto ret = m_face_pipeline_->Process(image, face, PROCESS_INTERACTION);
auto ret = m_face_pipeline_->Process(process, face, PROCESS_INTERACTION);
if (ret != HSUCCEED) {
return ret;
}
@@ -188,10 +198,8 @@ int32_t FaceContext::FacesProcess(CameraStream& image, const std::vector<HyperFa
if (idx < m_face_track_->trackingFace.size()) {
auto& target = m_face_track_->trackingFace[idx];
if (target.GetTrackingId() == face.trackId) {
auto new_eye_left =
EmaFilter(m_face_pipeline_->eyesStatusCache[0], target.left_eye_status_, 8, 0.2f);
auto new_eye_right =
EmaFilter(m_face_pipeline_->eyesStatusCache[1], target.right_eye_status_, 8, 0.2f);
auto new_eye_left = EmaFilter(m_face_pipeline_->eyesStatusCache[0], target.left_eye_status_, 8, 0.2f);
auto new_eye_right = EmaFilter(m_face_pipeline_->eyesStatusCache[1], target.right_eye_status_, 8, 0.2f);
if (face.trackState > 1) {
// The filtered value can be obtained only in the tracking state
m_react_left_eye_results_cache_[i] = new_eye_left;
@@ -220,120 +228,141 @@ int32_t FaceContext::FacesProcess(CameraStream& image, const std::vector<HyperFa
return 0;
}
const std::vector<ByteArray>& FaceContext::GetDetectCache() const {
const std::vector<ByteArray>& FaceSession::GetDetectCache() const {
return m_detect_cache_;
}
const std::vector<FaceBasicData>& FaceContext::GetFaceBasicDataCache() const {
const std::vector<FaceBasicData>& FaceSession::GetFaceBasicDataCache() const {
return m_face_basic_data_cache_;
}
const std::vector<FaceRect>& FaceContext::GetFaceRectsCache() const {
const std::vector<FaceRect>& FaceSession::GetFaceRectsCache() const {
return m_face_rects_cache_;
}
const std::vector<int32_t>& FaceContext::GetTrackIDCache() const {
const std::vector<int32_t>& FaceSession::GetTrackIDCache() const {
return m_track_id_cache_;
}
const std::vector<float>& FaceContext::GetRollResultsCache() const {
const std::vector<float>& FaceSession::GetRollResultsCache() const {
return m_roll_results_cache_;
}
const std::vector<float>& FaceContext::GetYawResultsCache() const {
const std::vector<float>& FaceSession::GetYawResultsCache() const {
return m_yaw_results_cache_;
}
const std::vector<float>& FaceContext::GetPitchResultsCache() const {
const std::vector<float>& FaceSession::GetPitchResultsCache() const {
return m_pitch_results_cache_;
}
const std::vector<FacePoseQualityResult>& FaceContext::GetQualityResultsCache() const {
const std::vector<FacePoseQualityAdaptResult>& FaceSession::GetQualityResultsCache() const {
return m_quality_results_cache_;
}
const std::vector<float>& FaceContext::GetMaskResultsCache() const {
const std::vector<float>& FaceSession::GetMaskResultsCache() const {
return m_mask_results_cache_;
}
const std::vector<float>& FaceContext::GetRgbLivenessResultsCache() const {
const std::vector<float>& FaceSession::GetRgbLivenessResultsCache() const {
return m_rgb_liveness_results_cache_;
}
const std::vector<float>& FaceContext::GetFaceQualityScoresResultsCache() const {
const std::vector<float>& FaceSession::GetFaceQualityScoresResultsCache() const {
return m_quality_score_results_cache_;
}
const std::vector<float>& FaceContext::GetFaceInteractionLeftEyeStatusCache() const {
const std::vector<float>& FaceSession::GetFaceInteractionLeftEyeStatusCache() const {
return m_react_left_eye_results_cache_;
}
const std::vector<float>& FaceContext::GetFaceInteractionRightEyeStatusCache() const {
const std::vector<float>& FaceSession::GetFaceInteractionRightEyeStatusCache() const {
return m_react_right_eye_results_cache_;
}
const Embedded& FaceContext::GetFaceFeatureCache() const {
const Embedded& FaceSession::GetFaceFeatureCache() const {
return m_face_feature_cache_;
}
const std::vector<float>& FaceContext::GetDetConfidenceCache() const {
const std::vector<float>& FaceSession::GetDetConfidenceCache() const {
return m_det_confidence_cache_;
}
const std::vector<int>& FaceContext::GetFaceRaceResultsCache() const {
const float FaceSession::GetFaceFeatureNormCache() const {
return m_face_feature_norm_;
}
const std::vector<int>& FaceSession::GetFaceRaceResultsCache() const {
return m_attribute_race_results_cache_;
}
const std::vector<int>& FaceContext::GetFaceGenderResultsCache() const {
const std::vector<int>& FaceSession::GetFaceGenderResultsCache() const {
return m_attribute_gender_results_cache_;
}
const std::vector<int>& FaceContext::GetFaceAgeBracketResultsCache() const {
const std::vector<int>& FaceSession::GetFaceAgeBracketResultsCache() const {
return m_attribute_age_results_cache_;
}
const std::vector<int>& FaceContext::GetFaceNormalAactionsResultCache() const {
const std::vector<int>& FaceSession::GetFaceNormalAactionsResultCache() const {
return m_action_normal_results_cache_;
}
const std::vector<int>& FaceContext::GetFaceJawOpenAactionsResultCache() const {
const std::vector<int>& FaceSession::GetFaceJawOpenAactionsResultCache() const {
return m_action_jaw_open_results_cache_;
}
const std::vector<int>& FaceContext::GetFaceBlinkAactionsResultCache() const {
const std::vector<int>& FaceSession::GetFaceBlinkAactionsResultCache() const {
return m_action_blink_results_cache_;
}
const std::vector<int>& FaceContext::GetFaceShakeAactionsResultCache() const {
const std::vector<int>& FaceSession::GetFaceShakeAactionsResultCache() const {
return m_action_shake_results_cache_;
}
const std::vector<int>& FaceContext::GetFaceRaiseHeadAactionsResultCache() const {
const std::vector<int>& FaceSession::GetFaceRaiseHeadAactionsResultCache() const {
return m_action_raise_head_results_cache_;
}
int32_t FaceContext::FaceFeatureExtract(CameraStream& image, FaceBasicData& data) {
int32_t FaceSession::FaceFeatureExtract(inspirecv::InspireImageProcess& process, FaceBasicData& data) {
std::lock_guard<std::mutex> lock(m_mtx_);
int32_t ret;
HyperFaceData face = {0};
ret = DeserializeHyperFaceData((char*)data.data, data.dataSize, face);
ret = RunDeserializeHyperFaceData((char*)data.data, data.dataSize, face);
if (ret != HSUCCEED) {
return ret;
}
m_face_feature_cache_.clear();
ret = m_face_recognition_->FaceExtract(image, face, m_face_feature_cache_);
ret = m_face_recognition_->FaceExtract(process, face, m_face_feature_cache_, m_face_feature_norm_);
return ret;
}
const CustomPipelineParameter& FaceContext::getMParameter() const {
int32_t FaceSession::FaceGetFaceAlignmentImage(inspirecv::InspireImageProcess& process, FaceBasicData& data, inspirecv::Image& image) {
std::lock_guard<std::mutex> lock(m_mtx_);
int32_t ret;
HyperFaceData face = {0};
ret = RunDeserializeHyperFaceData((char*)data.data, data.dataSize, face);
if (ret != HSUCCEED) {
return ret;
}
std::vector<inspirecv::Point2f> pointsFive;
for (const auto& p : face.keyPoints) {
pointsFive.push_back(inspirecv::Point2f(p.x, p.y));
}
auto trans = inspirecv::SimilarityTransformEstimateUmeyama(SIMILARITY_TRANSFORM_DEST, pointsFive);
image = process.ExecuteImageAffineProcessing(trans, FACE_CROP_SIZE, FACE_CROP_SIZE);
return ret;
}
const CustomPipelineParameter& FaceSession::getMParameter() const {
return m_parameter_;
}
int32_t FaceContext::FaceQualityDetect(FaceBasicData& data, float& result) {
int32_t FaceSession::FaceQualityDetect(FaceBasicData& data, float& result) {
int32_t ret;
HyperFaceData face = {0};
ret = DeserializeHyperFaceData((char*)data.data, data.dataSize, face);
ret = RunDeserializeHyperFaceData((char*)data.data, data.dataSize, face);
// PrintHyperFaceData(face);
if (ret != HSUCCEED) {
return ret;
@@ -348,9 +377,9 @@ int32_t FaceContext::FaceQualityDetect(FaceBasicData& data, float& result) {
return ret;
}
int32_t FaceContext::SetDetectMode(DetectMode mode) {
int32_t FaceSession::SetDetectMode(DetectModuleMode mode) {
m_detect_mode_ = mode;
if (m_detect_mode_ == DetectMode::DETECT_MODE_ALWAYS_DETECT) {
if (m_detect_mode_ == DetectModuleMode::DETECT_MODE_ALWAYS_DETECT) {
m_always_detect_ = true;
} else {
m_always_detect_ = false;
@@ -358,14 +387,45 @@ int32_t FaceContext::SetDetectMode(DetectMode mode) {
return HSUCCEED;
}
int32_t FaceContext::SetTrackPreviewSize(const int32_t preview_size) {
bool FaceSession::IsDetectModeLandmark() const {
return m_face_track_->IsDetectModeLandmark();
}
int32_t FaceSession::SetTrackPreviewSize(const int32_t preview_size) {
m_face_track_->SetTrackPreviewSize(preview_size);
return HSUCCEED;
}
int32_t FaceContext::SetTrackFaceMinimumSize(int32_t minSize) {
int32_t FaceSession::SetTrackFaceMinimumSize(int32_t minSize) {
m_face_track_->SetMinimumFacePxSize(minSize);
return HSUCCEED;
}
} // namespace inspire
int32_t FaceSession::SetTrackModeSmoothRatio(float value) {
m_face_track_->SetTrackModeSmoothRatio(value);
return HSUCCEED;
}
int32_t FaceSession::SetTrackModeNumSmoothCacheFrame(int value) {
m_face_track_->SetTrackModeNumSmoothCacheFrame(value);
return HSUCCEED;
}
int32_t FaceSession::SetTrackModeDetectInterval(int value) {
m_face_track_->SetTrackModeDetectInterval(value);
return HSUCCEED;
}
int32_t FaceSession::SetEnableTrackCostSpend(int value) {
m_enable_track_cost_spend_ = value;
m_face_track_cost_->Reset();
return HSUCCEED;
}
void FaceSession::PrintTrackCostSpend() {
if (m_enable_track_cost_spend_) {
INSPIRE_LOGI("%s", m_face_track_cost_->Report().c_str());
}
}
} // namespace inspire

View File

@@ -1,29 +1,21 @@
//
// Created by Tunm-Air13 on 2023/9/7.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#pragma once
#include <vector>
#ifndef HYPERFACEREPO_FACE_CONTEXT_H
#define HYPERFACEREPO_FACE_CONTEXT_H
/**
* @file face_context.h
* @brief Face context handling for HyperFaceRepo project.
* Includes definitions for face detection, tracking, and feature extraction.
*/
#ifndef INSPIRE_FACE_CONTEXT_H
#define INSPIRE_FACE_CONTEXT_H
#include <memory>
#include "track_module/face_track.h"
#include <inspirecv/inspirecv.h>
#include "data_type.h"
#include "pipeline_module/face_pipeline.h"
#include "recognition_module/face_feature_extraction.h"
#include "track_module/face_track_module.h"
#include "pipeline_module/face_pipeline_module.h"
#include "middleware/model_archive/inspire_archive.h"
/**
* @def DB_FILE_NAME
* @brief Default database file name used in the FaceContext.
*/
#define DB_FILE_NAME ".E63520A95DD5B3892C56DA38C3B28E551D8173FD"
#include "recognition_module/face_feature_extraction_module.h"
#include "middleware/inspirecv_image_process.h"
#include "common/face_data/face_serialize_tools.h"
namespace inspire {
@@ -41,6 +33,7 @@ typedef struct CustomPipelineParameter {
bool enable_face_attribute = false; ///< Enable face attribute prediction feature
bool enable_face_quality = false; ///< Enable face quality assessment feature
bool enable_interaction_liveness = false; ///< Enable interactive liveness detection feature
bool enable_detect_mode_landmark = false; ///< Enable landmark detection in detection mode
} ContextCustomParameter;
@@ -52,12 +45,12 @@ typedef struct CustomPipelineParameter {
* and handle other face-related features. Integrates with various modules such as FaceTrack, FaceRecognition, and
* FacePipeline.
*/
class INSPIRE_API FaceContext {
class INSPIRE_API FaceSession {
public:
/**
* @brief Constructor for the FaceContext class.
*/
explicit FaceContext();
explicit FaceSession();
/**
* @brief Configures the face context with given parameters.
@@ -67,7 +60,7 @@ public:
* @param param Custom parameters for the face pipeline.
* @return int32_t Returns 0 on success, non-zero for any error.
*/
int32_t Configuration(DetectMode detect_mode, int32_t max_detect_face, CustomPipelineParameter param, int32_t detect_level_px = -1,
int32_t Configuration(DetectModuleMode detect_mode, int32_t max_detect_face, CustomPipelineParameter param, int32_t detect_level_px = -1,
int32_t track_by_detect_mode_fps = -1);
/**
@@ -75,7 +68,7 @@ public:
* @param image The camera stream to process for face detection and tracking.
* @return int32_t Returns the number of faces detected and tracked.
*/// Method for face detection and tracking
int32_t FaceDetectAndTrack(CameraStream& image);
int32_t FaceDetectAndTrack(inspirecv::InspireImageProcess& process);
/**
* @brief Set the threshold of face detection function, which only acts on the detection model
@@ -88,7 +81,7 @@ public:
* @brief Retrieves the list of currently tracked faces.
* @return FaceObjectList A list of face objects currently being tracked.
*/
FaceObjectList& GetTrackingFaceList();
FaceObjectInternalList& GetTrackingFaceList();
/**
* @brief Processes faces using the provided pipeline parameters.
@@ -97,19 +90,19 @@ public:
* @param param Custom pipeline parameters.
* @return int32_t Status code of the processing.
*/
int32_t FacesProcess(CameraStream& image, const std::vector<HyperFaceData>& faces, const CustomPipelineParameter& param);
int32_t FacesProcess(inspirecv::InspireImageProcess& process, const std::vector<HyperFaceData>& faces, const CustomPipelineParameter& param);
/**
* @brief Retrieves the face recognition module.
* @return std::shared_ptr<FaceRecognition> Shared pointer to the FaceRecognition module.
*/
const std::shared_ptr<FeatureExtraction>& FaceRecognitionModule();
const std::shared_ptr<FeatureExtractionModule>& FaceRecognitionModule();
/**
* @brief Retrieves the face pipeline module.
* @return std::shared_ptr<FacePipeline> Shared pointer to the FacePipeline module.
*/
const std::shared_ptr<FacePipeline>& FacePipelineModule();
const std::shared_ptr<FacePipelineModule>& PipelineModule();
/**
* @brief Gets the number of faces currently detected.
@@ -123,7 +116,16 @@ public:
* @param data FaceBasicData to store extracted features.
* @return int32_t Status code of the feature extraction.
*/
int32_t FaceFeatureExtract(CameraStream& image, FaceBasicData& data);
int32_t FaceFeatureExtract(inspirecv::InspireImageProcess& process, FaceBasicData& data);
/**
* @brief Gets the face alignment image.
* @param process The image process object.
* @param data The face basic data.
* @param image The output image.
* @return int32_t The status code of the operation.
*/
int32_t FaceGetFaceAlignmentImage(inspirecv::InspireImageProcess& process, FaceBasicData& data, inspirecv::Image& image);
/**
* @brief Retrieves the custom pipeline parameters.
@@ -158,7 +160,13 @@ public:
* @param mode You can select mode for track or detect.
* @return int32_t Status code of the operation.
* */
int32_t SetDetectMode(DetectMode mode);
int32_t SetDetectMode(DetectModuleMode mode);
/**
* @brief Check if landmark detection is enabled in detection mode.
* @return True if landmark detection is enabled, false otherwise.
*/
bool IsDetectModeLandmark() const;
public:
// Accessor methods for various cached data
@@ -208,7 +216,7 @@ public:
* @brief Gets the cache of face pose quality results.
* @return A const reference to a vector of FacePoseQualityResult objects.
*/
const std::vector<FacePoseQualityResult>& GetQualityResultsCache() const;
const std::vector<FacePoseQualityAdaptResult>& GetQualityResultsCache() const;
/**
* @brief Gets the cache of mask detection results.
@@ -300,33 +308,72 @@ public:
*/
const std::vector<float>& GetDetConfidenceCache() const;
/**
* @brief Gets the cache of face feature norm.
* @return A const reference to a float containing face feature norm.
*/
const float GetFaceFeatureNormCache() const;
/**
* @brief Set the track mode smooth ratio
* @param value The smooth ratio value
* @return int32_t Status code of the operation.
* */
int32_t SetTrackModeSmoothRatio(float value);
/**
* @brief Set the track mode num smooth cache frame
* @param value The num smooth cache frame value
* @return int32_t Status code of the operation.
* */
int32_t SetTrackModeNumSmoothCacheFrame(int value);
/**
* @brief Set the track model detect interval
* @param value The detect interval value
* @return int32_t Status code of the operation.
* */
int32_t SetTrackModeDetectInterval(int value);
/**
* @brief Set the enable cost spend
* @param value The enable cost spend value
* @return int32_t Status code of the operation.
* */
int32_t SetEnableTrackCostSpend(int value);
/**
* @brief Print the cost spend
* */
void PrintTrackCostSpend();
private:
// Private member variables
CustomPipelineParameter m_parameter_; ///< Stores custom parameters for the pipeline
int32_t m_max_detect_face_{}; ///< Maximum number of faces that can be detected
DetectMode m_detect_mode_; ///< Current detection mode (image or video)
DetectModuleMode m_detect_mode_; ///< Current detection mode (image or video)
bool m_always_detect_{}; ///< Flag to determine if detection should always occur
std::shared_ptr<FaceTrack> m_face_track_; ///< Shared pointer to the FaceTrack object
std::shared_ptr<FeatureExtraction> m_face_recognition_; ///< Shared pointer to the FaceRecognition object
std::shared_ptr<FacePipeline> m_face_pipeline_; ///< Shared pointer to the FacePipeline object
std::shared_ptr<FaceTrackModule> m_face_track_; ///< Shared pointer to the FaceTrack object
std::shared_ptr<FeatureExtractionModule> m_face_recognition_; ///< Shared pointer to the FaceRecognition object
std::shared_ptr<FacePipelineModule> m_face_pipeline_; ///< Shared pointer to the FacePipeline object
private:
// Cache data
std::vector<ByteArray> m_detect_cache_; ///< Cache for storing serialized detected face data
std::vector<FaceBasicData> m_face_basic_data_cache_; ///< Cache for basic face data extracted from detection
std::vector<FaceRect> m_face_rects_cache_; ///< Cache for face rectangle data from detection
std::vector<int32_t> m_track_id_cache_; ///< Cache for tracking IDs of detected faces
std::vector<float> m_det_confidence_cache_; ///< Cache for face detection confidence of detected faces
std::vector<float> m_roll_results_cache_; ///< Cache for storing roll results from face pose estimation
std::vector<float> m_yaw_results_cache_; ///< Cache for storing yaw results from face pose estimation
std::vector<float> m_pitch_results_cache_; ///< Cache for storing pitch results from face pose estimation
std::vector<FacePoseQualityResult> m_quality_results_cache_; ///< Cache for face pose quality results
std::vector<float> m_mask_results_cache_; ///< Cache for mask detection results
std::vector<float> m_rgb_liveness_results_cache_; ///< Cache for RGB liveness detection results
std::vector<float> m_quality_score_results_cache_; ///< Cache for RGB face quality score results
std::vector<float> m_react_left_eye_results_cache_; ///< Cache for Left eye state in face interaction
std::vector<float> m_react_right_eye_results_cache_; ///< Cache for Right eye state in face interaction
std::vector<ByteArray> m_detect_cache_; ///< Cache for storing serialized detected face data
std::vector<FaceBasicData> m_face_basic_data_cache_; ///< Cache for basic face data extracted from detection
std::vector<FaceRect> m_face_rects_cache_; ///< Cache for face rectangle data from detection
std::vector<int32_t> m_track_id_cache_; ///< Cache for tracking IDs of detected faces
std::vector<float> m_det_confidence_cache_; ///< Cache for face detection confidence of detected faces
std::vector<float> m_roll_results_cache_; ///< Cache for storing roll results from face pose estimation
std::vector<float> m_yaw_results_cache_; ///< Cache for storing yaw results from face pose estimation
std::vector<float> m_pitch_results_cache_; ///< Cache for storing pitch results from face pose estimation
std::vector<FacePoseQualityAdaptResult> m_quality_results_cache_; ///< Cache for face pose quality results
std::vector<float> m_mask_results_cache_; ///< Cache for mask detection results
std::vector<float> m_rgb_liveness_results_cache_; ///< Cache for RGB liveness detection results
std::vector<float> m_quality_score_results_cache_; ///< Cache for RGB face quality score results
std::vector<float> m_react_left_eye_results_cache_; ///< Cache for Left eye state in face interaction
std::vector<float> m_react_right_eye_results_cache_; ///< Cache for Right eye state in face interaction
std::vector<int> m_action_normal_results_cache_; ///< Cache for normal action in face interaction
std::vector<int> m_action_shake_results_cache_; ///< Cache for shake action in face interaction
@@ -334,14 +381,20 @@ private:
std::vector<int> m_action_jaw_open_results_cache_; ///< Cache for jaw open action in face interaction
std::vector<int> m_action_raise_head_results_cache_; ///< Cache for raise head action in face interaction
std::vector<int> m_attribute_race_results_cache_;
std::vector<int> m_attribute_gender_results_cache_;
std::vector<int> m_attribute_age_results_cache_;
Embedded m_face_feature_cache_; ///< Cache for current face feature data
std::vector<int> m_attribute_race_results_cache_; ///< Cache for face attribute race results
std::vector<int> m_attribute_gender_results_cache_; ///< Cache for face attribute gender results
std::vector<int> m_attribute_age_results_cache_; ///< Cache for face attribute age results
Embedded m_face_feature_cache_; ///< Cache for current face feature data
float m_face_feature_norm_; ///< Cache for face feature norm
std::mutex m_mtx_; ///< Mutex for thread safety.
// cost spend
std::shared_ptr<inspirecv::TimeSpend> m_face_track_cost_;
int m_enable_track_cost_spend_ = 0;
};
} // namespace inspire
#endif // HYPERFACEREPO_FACE_CONTEXT_H
#endif // INSPIRE_FACE_CONTEXT_H

View File

@@ -0,0 +1,368 @@
#include "embedding_db.h"
#include "sqlite-vec.h"
#include "isf_check.h"
#include <algorithm>
namespace inspire {
std::unique_ptr<EmbeddingDB> EmbeddingDB::instance_ = nullptr;
std::mutex EmbeddingDB::instanceMutex_;
EmbeddingDB &EmbeddingDB::GetInstance() {
std::lock_guard<std::mutex> lock(instanceMutex_);
INSPIREFACE_CHECK_MSG(instance_, "EmbeddingDB not initialized. Call Init() first.");
return *instance_;
}
void EmbeddingDB::Init(const std::string &dbPath, size_t vectorDim, IdMode idMode) {
std::lock_guard<std::mutex> lock(instanceMutex_);
INSPIREFACE_CHECK_MSG(!instance_, "EmbeddingDB already initialized");
instance_.reset(new EmbeddingDB(dbPath, vectorDim, "cosine", idMode));
}
EmbeddingDB::EmbeddingDB(const std::string &dbPath, size_t vectorDim, const std::string &distanceMetric, IdMode idMode)
: vectorDim_(vectorDim), tableName_("vec_items"), idMode_(idMode) {
int rc = sqlite3_auto_extension((void (*)())sqlite3_vec_init);
CheckSQLiteError(rc, nullptr);
// Open database
rc = sqlite3_open(dbPath.c_str(), &db_);
CheckSQLiteError(rc, db_);
// Create vector table
std::string createTableSQL = "CREATE VIRTUAL TABLE IF NOT EXISTS " + tableName_ + " USING vec0(embedding float[" + std::to_string(vectorDim_) +
"] distance_metric=" + distanceMetric + ")";
ExecuteSQL(createTableSQL);
initialized_ = true;
}
EmbeddingDB::~EmbeddingDB() {
if (db_) {
sqlite3_close(db_);
}
}
bool EmbeddingDB::InsertVector(const std::vector<float> &vector, int64_t &allocId) {
std::lock_guard<std::mutex> lock(dbMutex_);
return InsertVector(0, vector, allocId); // In auto-increment mode, the passed ID is ignored
}
bool EmbeddingDB::InsertVector(int64_t id, const std::vector<float> &vector, int64_t &allocId) {
CheckVectorDimension(vector);
sqlite3_stmt *stmt;
std::string sql;
if (idMode_ == IdMode::AUTO_INCREMENT) {
sql = "INSERT INTO " + tableName_ + "(embedding) VALUES (?)";
} else {
sql = "INSERT INTO " + tableName_ + "(rowid, embedding) VALUES (?, ?)";
}
int rc = sqlite3_prepare_v2(db_, sql.c_str(), -1, &stmt, nullptr);
// CheckSQLiteError(rc, db_);
if (rc != SQLITE_OK) {
INSPIRE_LOGE("Failed to prepare statement: %s", sqlite3_errmsg(db_));
sqlite3_finalize(stmt);
return false;
}
if (idMode_ == IdMode::AUTO_INCREMENT) {
sqlite3_bind_blob(stmt, 1, vector.data(), vector.size() * sizeof(float), SQLITE_STATIC);
} else {
sqlite3_bind_int64(stmt, 1, id);
sqlite3_bind_blob(stmt, 2, vector.data(), vector.size() * sizeof(float), SQLITE_STATIC);
}
rc = sqlite3_step(stmt);
sqlite3_finalize(stmt);
if (rc != SQLITE_DONE) {
INSPIRE_LOGE("Failed to insert vector: %s", sqlite3_errmsg(db_));
return false;
}
// CheckSQLiteError(rc == SQLITE_DONE ? SQLITE_OK : rc, db_);
allocId = idMode_ == IdMode::AUTO_INCREMENT ? GetLastInsertRowId() : id;
return true;
}
std::vector<float> EmbeddingDB::GetVector(int64_t id) const {
std::lock_guard<std::mutex> lock(dbMutex_);
sqlite3_stmt *stmt;
std::string sql = "SELECT embedding FROM " + tableName_ + " WHERE rowid = ?";
int rc = sqlite3_prepare_v2(db_, sql.c_str(), -1, &stmt, nullptr);
CheckSQLiteError(rc, db_);
sqlite3_bind_int64(stmt, 1, id);
rc = sqlite3_step(stmt);
if (rc != SQLITE_ROW) {
sqlite3_finalize(stmt);
// throw std::runtime_error("Vector with id " + std::to_string(id) + " not found");
return {};
}
const float *blob_data = static_cast<const float *>(sqlite3_column_blob(stmt, 0));
size_t blob_size = sqlite3_column_bytes(stmt, 0) / sizeof(float);
std::vector<float> result(blob_data, blob_data + blob_size);
sqlite3_finalize(stmt);
return result;
}
std::vector<int64_t> EmbeddingDB::BatchInsertVectors(const std::vector<VectorData> &vectors) {
ExecuteSQL("BEGIN");
std::vector<int64_t> insertedIds;
insertedIds.reserve(vectors.size());
try {
for (const auto &data : vectors) {
int64_t id = 0;
bool ret = InsertVector(data.id, data.vector, id);
if (!ret) {
throw std::runtime_error("Failed to insert vector");
}
insertedIds.push_back(id);
}
ExecuteSQL("COMMIT");
} catch (...) {
ExecuteSQL("ROLLBACK");
throw;
}
return insertedIds;
}
std::vector<int64_t> EmbeddingDB::BatchInsertVectors(const std::vector<std::vector<float>> &vectors) {
ExecuteSQL("BEGIN");
std::vector<int64_t> insertedIds;
insertedIds.reserve(vectors.size());
try {
for (const auto &vector : vectors) {
int64_t id = 0;
bool ret = InsertVector(0, vector, id);
if (!ret) {
throw std::runtime_error("Failed to insert vector");
}
insertedIds.push_back(id);
}
ExecuteSQL("COMMIT");
} catch (...) {
ExecuteSQL("ROLLBACK");
throw;
}
return insertedIds;
}
int64_t EmbeddingDB::GetLastInsertRowId() const {
return sqlite3_last_insert_rowid(db_);
}
void EmbeddingDB::UpdateVector(int64_t id, const std::vector<float> &newVector) {
CheckVectorDimension(newVector);
sqlite3_stmt *stmt;
std::string sql = "UPDATE " + tableName_ + " SET embedding = ? WHERE rowid = ?";
int rc = sqlite3_prepare_v2(db_, sql.c_str(), -1, &stmt, nullptr);
CheckSQLiteError(rc, db_);
sqlite3_bind_blob(stmt, 1, newVector.data(), newVector.size() * sizeof(float), SQLITE_STATIC);
sqlite3_bind_int64(stmt, 2, id);
rc = sqlite3_step(stmt);
sqlite3_finalize(stmt);
INSPIREFACE_CHECK_MSG(rc == SQLITE_DONE, "Failed to update vector");
if (sqlite3_changes(db_) == 0) {
INSPIRE_LOGF("Vector with id %ld not found", id);
}
}
void EmbeddingDB::DeleteVector(int64_t id) {
sqlite3_stmt *stmt;
std::string sql = "DELETE FROM " + tableName_ + " WHERE rowid = ?";
int rc = sqlite3_prepare_v2(db_, sql.c_str(), -1, &stmt, nullptr);
CheckSQLiteError(rc, db_);
sqlite3_bind_int64(stmt, 1, id);
rc = sqlite3_step(stmt);
sqlite3_finalize(stmt);
CheckSQLiteError(rc == SQLITE_DONE ? SQLITE_OK : rc, db_);
}
std::vector<FaceSearchResult> EmbeddingDB::SearchSimilarVectors(const std::vector<float> &queryVector, size_t top_k, float keep_similar_threshold,
bool return_feature) {
std::lock_guard<std::mutex> lock(dbMutex_);
CheckVectorDimension(queryVector);
sqlite3_stmt *stmt;
std::string sql;
if (return_feature) {
sql =
"SELECT rowid, embedding, 1.0 - distance as similarity "
"FROM " +
tableName_ +
" "
"WHERE embedding MATCH ? "
"ORDER BY distance "
"LIMIT ?";
} else {
sql =
"SELECT rowid, 1.0 - distance as similarity "
"FROM " +
tableName_ +
" "
"WHERE embedding MATCH ? "
"ORDER BY distance "
"LIMIT ?";
}
int rc = sqlite3_prepare_v2(db_, sql.c_str(), -1, &stmt, nullptr);
CheckSQLiteError(rc, db_);
sqlite3_bind_blob(stmt, 1, queryVector.data(), queryVector.size() * sizeof(float), SQLITE_STATIC);
sqlite3_bind_int64(stmt, 2, top_k);
std::vector<FaceSearchResult> results;
while ((rc = sqlite3_step(stmt)) == SQLITE_ROW) {
FaceSearchResult result;
result.id = sqlite3_column_int64(stmt, 0);
if (return_feature) {
const float *blob_data = static_cast<const float *>(sqlite3_column_blob(stmt, 1));
size_t blob_size = sqlite3_column_bytes(stmt, 1) / sizeof(float);
result.feature.assign(blob_data, blob_data + blob_size);
result.similarity = sqlite3_column_double(stmt, 2);
} else {
result.similarity = sqlite3_column_double(stmt, 1);
}
results.push_back(result);
}
sqlite3_finalize(stmt);
CheckSQLiteError(rc == SQLITE_DONE ? SQLITE_OK : rc, db_);
// Filter results whose similarity is below the threshold
results.erase(std::remove_if(results.begin(), results.end(),
[keep_similar_threshold](const FaceSearchResult &result) { return result.similarity < keep_similar_threshold; }),
results.end());
return results;
}
int64_t EmbeddingDB::GetVectorCount() const {
std::lock_guard<std::mutex> lock(dbMutex_);
sqlite3_stmt *stmt;
std::string sql = "SELECT COUNT(*) FROM " + tableName_;
int rc = sqlite3_prepare_v2(db_, sql.c_str(), -1, &stmt, nullptr);
CheckSQLiteError(rc, db_);
rc = sqlite3_step(stmt);
CheckSQLiteError(rc == SQLITE_ROW ? SQLITE_OK : rc, db_);
int64_t count = sqlite3_column_int64(stmt, 0);
sqlite3_finalize(stmt);
return count;
}
void EmbeddingDB::CheckVectorDimension(const std::vector<float> &vector) const {
INSPIREFACE_CHECK_MSG(vector.size() == vectorDim_,
("Vector dimension mismatch. Expected: " + std::to_string(vectorDim_) + ", Got: " + std::to_string(vector.size())).c_str());
}
void EmbeddingDB::ExecuteSQL(const std::string &sql) {
std::lock_guard<std::mutex> lock(dbMutex_);
char *errMsg = nullptr;
int rc = sqlite3_exec(db_, sql.c_str(), nullptr, nullptr, &errMsg);
if (errMsg) {
std::string error = errMsg;
sqlite3_free(errMsg);
INSPIREFACE_CHECK_MSG(false, ("SQL error: " + error).c_str());
}
CheckSQLiteError(rc, db_);
}
void EmbeddingDB::CheckSQLiteError(int rc, sqlite3 *db) {
std::string error = db ? sqlite3_errmsg(db) : "SQLite error";
INSPIREFACE_CHECK_MSG(rc == SQLITE_OK, error.c_str());
}
void EmbeddingDB::ShowTable() {
if (!initialized_) {
INSPIRE_LOGE("EmbeddingDB is not initialized");
return;
}
std::lock_guard<std::mutex> lock(dbMutex_);
sqlite3_stmt *stmt;
std::string sql = "SELECT rowid, embedding FROM " + tableName_;
int rc = sqlite3_prepare_v2(db_, sql.c_str(), -1, &stmt, nullptr);
CheckSQLiteError(rc, db_);
// Print header
#ifdef __ANDROID__
__android_log_print(ANDROID_LOG_INFO, "EmbeddingDB", "=== Table Content ===");
__android_log_print(ANDROID_LOG_INFO, "EmbeddingDB", "ID | Vector (first 5 elements)");
__android_log_print(ANDROID_LOG_INFO, "EmbeddingDB", "------------------------");
#else
printf("=== Table Content ===\n");
printf("ID | Vector (first 5 elements)\n");
printf("------------------------\n");
#endif
while (sqlite3_step(stmt) == SQLITE_ROW) {
int64_t id = sqlite3_column_int64(stmt, 0);
const float *vector_data = static_cast<const float *>(sqlite3_column_blob(stmt, 1));
size_t vector_size = std::min(size_t(5), sqlite3_column_bytes(stmt, 1) / sizeof(float));
std::string vector_str;
for (size_t i = 0; i < vector_size; ++i) {
vector_str += std::to_string(vector_data[i]);
if (i < vector_size - 1)
vector_str += ", ";
}
vector_str += "...";
#ifdef __ANDROID__
__android_log_print(ANDROID_LOG_INFO, "EmbeddingDB", "%lld | %s", id, vector_str.c_str());
#else
printf("%lld | %s\n", id, vector_str.c_str());
#endif
}
sqlite3_finalize(stmt);
}
std::vector<int64_t> EmbeddingDB::GetAllIds() {
if (!initialized_) {
INSPIRE_LOGE("EmbeddingDB is not initialized");
return {};
}
std::lock_guard<std::mutex> lock(dbMutex_);
std::vector<int64_t> ids;
sqlite3_stmt *stmt;
std::string sql = "SELECT rowid FROM " + tableName_;
int rc = sqlite3_prepare_v2(db_, sql.c_str(), -1, &stmt, nullptr);
CheckSQLiteError(rc, db_);
while (sqlite3_step(stmt) == SQLITE_ROW) {
ids.push_back(sqlite3_column_int64(stmt, 0));
}
sqlite3_finalize(stmt);
return ids;
}
} // namespace inspire

View File

@@ -0,0 +1,131 @@
#ifndef INSPIRE_EMBEDDING_DB_H
#define INSPIRE_EMBEDDING_DB_H
#ifndef SQLITE_CORE
#define SQLITE_CORE
#endif
#ifndef SQLITE_VEC_STATIC
#define SQLITE_VEC_STATIC
#endif
#ifndef SQLITE_VEC_ENABLE_AVX
#define SQLITE_VEC_ENABLE_AVX
#endif
#include <sqlite3.h>
#include <vector>
#include <string>
#include <memory>
#include <stdexcept>
#include <mutex>
#define EMBEDDING_DB inspire::EmbeddingDB
namespace inspire {
// Search for most similar vectors
struct FaceSearchResult {
int64_t id;
double similarity;
std::vector<float> feature;
};
// Vector data structure
struct VectorData {
int64_t id; // This field is ignored in auto-increment mode
std::vector<float> vector;
};
// ID mode enumeration
enum class IdMode {
AUTO_INCREMENT = 0, // Auto-incrementing ID
MANUAL, // Manually specify ID
};
class EmbeddingDB {
public:
~EmbeddingDB();
static EmbeddingDB &GetInstance();
static void Init(const std::string &dbPath = ":memory:", size_t vectorDim = 512, IdMode idMode = IdMode::AUTO_INCREMENT);
// Delete copy and move operations
EmbeddingDB(const EmbeddingDB &) = delete;
EmbeddingDB &operator=(const EmbeddingDB &) = delete;
EmbeddingDB(EmbeddingDB &&) = delete;
EmbeddingDB &operator=(EmbeddingDB &&) = delete;
// Insert a single vector
bool InsertVector(int64_t id, const std::vector<float> &vector, int64_t &allocId);
bool InsertVector(const std::vector<float> &vector, int64_t &allocId); // For auto-increment mode
// Batch insert vectors
std::vector<int64_t> BatchInsertVectors(const std::vector<VectorData> &vectors);
std::vector<int64_t> BatchInsertVectors(const std::vector<std::vector<float>> &vectors); // For auto-increment mode
// Update vector
void UpdateVector(int64_t id, const std::vector<float> &newVector);
// Delete vector
void DeleteVector(int64_t id);
std::vector<FaceSearchResult> SearchSimilarVectors(const std::vector<float> &queryVector, size_t top_k = 3, float keep_similar_threshold = 0.5f,
bool return_feature = false);
// Get vector count
int64_t GetVectorCount() const;
// Get current ID mode
IdMode GetIdMode() const {
return idMode_;
}
bool IsInitialized() const {
return initialized_;
}
// De-initialize database
static void Deinit() {
std::lock_guard<std::mutex> lock(instanceMutex_);
if (instance_) {
instance_.reset();
}
}
std::vector<float> GetVector(int64_t id) const;
void ShowTable();
std::vector<int64_t> GetAllIds();
private:
// Constructor: add ID mode parameter
explicit EmbeddingDB(const std::string &dbPath = ":memory:", size_t vectorDim = 4, const std::string &distanceMetric = "cosine",
IdMode idMode = IdMode::AUTO_INCREMENT);
private:
sqlite3 *db_;
size_t vectorDim_;
std::string tableName_;
IdMode idMode_;
bool initialized_ = false;
// Helper functions
void CheckVectorDimension(const std::vector<float> &vector) const;
void ExecuteSQL(const std::string &sql);
static void CheckSQLiteError(int rc, sqlite3 *db);
int64_t GetLastInsertRowId() const;
private:
// Singleton related
static std::unique_ptr<EmbeddingDB> instance_;
static std::mutex instanceMutex_;
// Database operation mutex
mutable std::mutex dbMutex_;
};
} // namespace inspire
#endif // INSPIRE_EMBEDDING_DB_H

View File

@@ -1,527 +0,0 @@
//
// Created by tunm on 2023/9/8.
//
#include "feature_hub.h"
#include "simd.h"
#include "herror.h"
#include <thread>
namespace inspire {
std::mutex FeatureHub::mutex_;
std::shared_ptr<FeatureHub> FeatureHub::instance_ = nullptr;
FeatureHub::FeatureHub() {}
std::shared_ptr<FeatureHub> FeatureHub::GetInstance() {
std::lock_guard<std::mutex> lock(mutex_);
if (!instance_) {
instance_ = std::shared_ptr<FeatureHub>(new FeatureHub());
}
return instance_;
}
int32_t FeatureHub::DisableHub() {
if (!m_enable_) {
INSPIRE_LOGW("FeatureHub is already disabled.");
return HERR_FT_HUB_DISABLE_REPETITION;
}
// Close the database if it starts
if (m_db_) {
int ret = m_db_->CloseDatabase();
if (ret != HSUCCEED) {
INSPIRE_LOGE("Failed to close the database: %d", ret);
return ret;
}
m_db_.reset();
}
m_feature_matrix_list_.clear();
m_search_face_feature_cache_.clear();
m_db_configuration_ = DatabaseConfiguration(); // Reset using the default constructor
m_recognition_threshold_ = 0.0f;
m_search_mode_ = SEARCH_MODE_EAGER;
m_face_feature_ptr_cache_.reset();
m_enable_ = false;
return HSUCCEED;
}
int32_t FeatureHub::EnableHub(const DatabaseConfiguration &configuration, MatrixCore core) {
int32_t ret;
if (m_enable_) {
INSPIRE_LOGW("You have enabled the FeatureHub feature. It is not valid to do so again");
return HERR_FT_HUB_ENABLE_REPETITION;
}
// Config
m_db_configuration_ = configuration;
m_recognition_threshold_ = m_db_configuration_.recognition_threshold;
if (m_recognition_threshold_ < -1.0f || m_recognition_threshold_ > 1.0f) {
INSPIRE_LOGW("The search threshold entered does not fit the required range (-1.0f, 1.0f) and has been set to 0.5 by default");
m_recognition_threshold_ = 0.5f;
}
m_search_mode_ = m_db_configuration_.search_mode;
if (m_db_configuration_.feature_block_num <= 0) {
m_db_configuration_.feature_block_num = 10;
INSPIRE_LOGW(
"The number of feature blocks cannot be 0, but has been set to the default number of 10, that is, the maximum number of stored faces is "
"supported: 5120");
} else if (m_db_configuration_.feature_block_num > 25) {
m_db_configuration_.feature_block_num = 25;
INSPIRE_LOGW(
"The number of feature blocks cannot exceed 25, which has been set to the maximum value, that is, the maximum number of stored faces "
"supported: 12800");
}
// Allocate memory for the feature matrix
for (int i = 0; i < m_db_configuration_.feature_block_num; ++i) {
std::shared_ptr<FeatureBlock> block;
block.reset(FeatureBlock::Create(core, 512, 512));
m_feature_matrix_list_.push_back(block);
}
if (m_db_configuration_.enable_use_db) {
m_db_ = std::make_shared<SQLiteFaceManage>();
if (IsDirectory(m_db_configuration_.db_path)) {
std::string dbFile = m_db_configuration_.db_path + "/" + DB_FILE_NAME;
ret = m_db_->OpenDatabase(dbFile);
} else {
ret = m_db_->OpenDatabase(m_db_configuration_.db_path);
}
if (ret != HSUCCEED) {
INSPIRE_LOGE("An error occurred while opening the database: %d", ret);
return ret;
}
std::vector<FaceFeatureInfo> infos;
ret = m_db_->GetTotalFeatures(infos);
if (ret == HSUCCEED) {
if (!infos.empty()) {
for (auto const &info : infos) {
ret = InsertFaceFeature(info.feature, info.tag, info.customId);
if (ret != HSUCCEED) {
INSPIRE_LOGE("ID: %d, Inserting error: %d", info.customId, ret);
return ret;
}
}
}
m_enable_ = true;
} else {
INSPIRE_LOGE("Failed to get the vector from the database.");
return ret;
}
} else {
m_enable_ = true;
}
m_face_feature_ptr_cache_ = std::make_shared<FaceFeatureEntity>();
return HSUCCEED;
}
int32_t FeatureHub::CosineSimilarity(const std::vector<float> &v1, const std::vector<float> &v2, float &res) {
if (v1.size() != v2.size() || v1.empty()) {
return HERR_SESS_REC_CONTRAST_FEAT_ERR; // The similarity cannot be calculated if the vector lengths are not equal
}
// Calculate the cosine similarity
res = simd_dot(v1.data(), v2.data(), v1.size());
return HSUCCEED;
}
int32_t FeatureHub::CosineSimilarity(const float *v1, const float *v2, int32_t size, float &res) {
res = simd_dot(v1, v2, size);
return HSUCCEED;
}
int32_t FeatureHub::RegisterFaceFeature(const std::vector<float> &feature, int featureIndex, const std::string &tag, int32_t customId) {
if (featureIndex < 0 || featureIndex >= m_feature_matrix_list_.size() * NUM_OF_FEATURES_IN_BLOCK) {
return HERR_SESS_REC_INVALID_INDEX; // Invalid feature index number
}
// Compute which FeatureBlock and which row the feature vector should be stored in
int blockIndex = featureIndex / NUM_OF_FEATURES_IN_BLOCK; // The FeatureBlock where the computation is located
int rowIndex = featureIndex % NUM_OF_FEATURES_IN_BLOCK; // Calculate the line number in the FeatureBlock
// Call the appropriate FeatureBlock registration function
int32_t result = m_feature_matrix_list_[blockIndex]->RegisterFeature(rowIndex, feature, tag, customId);
return result;
}
int32_t FeatureHub::InsertFaceFeature(const std::vector<float> &feature, const std::string &tag, int32_t customId) {
int32_t ret = HSUCCEED;
for (int i = 0; i < m_feature_matrix_list_.size(); ++i) {
auto &block = m_feature_matrix_list_[i];
ret = block->AddFeature(feature, tag, customId);
if (ret != HERR_SESS_REC_BLOCK_FULL) {
break;
}
}
return ret;
}
int32_t FeatureHub::SearchFaceFeature(const std::vector<float> &queryFeature, SearchResult &searchResult, float threshold, bool mostSimilar) {
if (queryFeature.size() != NUM_OF_FEATURES_IN_BLOCK) {
return HERR_SESS_REC_FEAT_SIZE_ERR; // Query feature size does not match expectations
}
bool found = false; // Whether matching features are found
float maxScore = -1.0f; // The maximum score is initialized to a negative number
int maxIndex = -1; // The index corresponding to the maximum score
std::string tag = "None";
int maxCid = -1;
for (int blockIndex = 0; blockIndex < m_feature_matrix_list_.size(); ++blockIndex) {
if (m_feature_matrix_list_[blockIndex]->GetUsedCount() == 0) {
// If the FeatureBlock has no used features, skip to the next block
continue;
}
int startIndex = blockIndex * NUM_OF_FEATURES_IN_BLOCK;
SearchResult tempResult;
// Call the appropriate FeatureBlock search function
int32_t result = m_feature_matrix_list_[blockIndex]->SearchNearest(queryFeature, tempResult);
if (result != HSUCCEED) {
// Error
return result;
}
// If you find a higher score feature
if (tempResult.score > maxScore) {
maxScore = tempResult.score;
maxIndex = startIndex + tempResult.index;
tag = tempResult.tag;
maxCid = tempResult.customId;
if (maxScore >= threshold) {
found = true;
if (!mostSimilar) {
// Use Eager-Mode: When the score is greater than or equal to the threshold, stop searching for the next FeatureBlock
break;
}
}
}
}
if (found) {
searchResult.score = maxScore;
searchResult.index = maxIndex;
searchResult.tag = tag;
searchResult.customId = maxCid;
} else {
searchResult.score = -1.0f;
searchResult.index = -1;
searchResult.tag = "None";
searchResult.customId = -1;
}
return HSUCCEED; // No matching feature found but not an error
}
int32_t FeatureHub::SearchFaceFeatureTopK(const std::vector<float> &queryFeature, std::vector<SearchResult> &searchResultList, size_t maxTopK,
float threshold) {
if (queryFeature.size() != NUM_OF_FEATURES_IN_BLOCK) {
return HERR_SESS_REC_FEAT_SIZE_ERR;
}
std::vector<SearchResult> tempResultList;
searchResultList.clear();
for (int blockIndex = 0; blockIndex < m_feature_matrix_list_.size(); ++blockIndex) {
if (m_feature_matrix_list_[blockIndex]->GetUsedCount() == 0) {
continue;
}
tempResultList.clear();
int32_t result = m_feature_matrix_list_[blockIndex]->SearchTopKNearest(queryFeature, maxTopK, tempResultList);
if (result != HSUCCEED) {
return result;
}
for (const SearchResult &result : tempResultList) {
if (result.score >= threshold) {
searchResultList.push_back(result);
}
}
}
std::sort(searchResultList.begin(), searchResultList.end(), [](const SearchResult &a, const SearchResult &b) { return a.score > b.score; });
if (searchResultList.size() > maxTopK) {
searchResultList.resize(maxTopK);
}
return HSUCCEED;
}
int32_t FeatureHub::DeleteFaceFeature(int featureIndex) {
if (featureIndex < 0 || featureIndex >= m_feature_matrix_list_.size() * NUM_OF_FEATURES_IN_BLOCK) {
return HERR_SESS_REC_INVALID_INDEX; // Invalid feature index number
}
// Calculate which FeatureBlock and which row the feature vector should be removed in
int blockIndex = featureIndex / NUM_OF_FEATURES_IN_BLOCK; // The FeatureBlock where the computation is located
int rowIndex = featureIndex % NUM_OF_FEATURES_IN_BLOCK; // Calculate the line number in the FeatureBlock
// Call the appropriate FeatureBlock delete function
int32_t result = m_feature_matrix_list_[blockIndex]->DeleteFeature(rowIndex);
return result;
}
int32_t FeatureHub::GetFaceFeature(int featureIndex, Embedded &feature) {
if (featureIndex < 0 || featureIndex >= m_feature_matrix_list_.size() * NUM_OF_FEATURES_IN_BLOCK) {
return HERR_SESS_REC_INVALID_INDEX; // Invalid feature index number
}
// Calculate which FeatureBlock and which row the feature vector should be removed in
int blockIndex = featureIndex / NUM_OF_FEATURES_IN_BLOCK; // The FeatureBlock where the computation is located
int rowIndex = featureIndex % NUM_OF_FEATURES_IN_BLOCK; // Calculate the line number in the FeatureBlock
int32_t result = m_feature_matrix_list_[blockIndex]->GetFeature(rowIndex, feature);
return result;
}
int32_t FeatureHub::GetFaceEntity(int featureIndex, Embedded &feature, std::string &tag, FEATURE_STATE &status) {
if (featureIndex < 0 || featureIndex >= m_feature_matrix_list_.size() * NUM_OF_FEATURES_IN_BLOCK) {
return HERR_SESS_REC_INVALID_INDEX; // Invalid feature index number
}
// Calculate which FeatureBlock and which row the feature vector should be removed in
int blockIndex = featureIndex / NUM_OF_FEATURES_IN_BLOCK; // The FeatureBlock where the computation is located
int rowIndex = featureIndex % NUM_OF_FEATURES_IN_BLOCK; // Calculate the line number in the FeatureBlock
int32_t result = m_feature_matrix_list_[blockIndex]->GetFeature(rowIndex, feature);
tag = m_feature_matrix_list_[blockIndex]->GetTagFromRow(rowIndex);
status = m_feature_matrix_list_[blockIndex]->GetStateFromRow(rowIndex);
return result;
}
int32_t FeatureHub::GetFaceFeatureCount() {
int totalFeatureCount = 0;
// Iterate over all FeatureBlocks and add up the number of feature vectors used
for (const auto &block : m_feature_matrix_list_) {
totalFeatureCount += block->GetUsedCount();
}
return totalFeatureCount;
}
int32_t FeatureHub::GetFeatureNum() const {
return NUM_OF_FEATURES_IN_BLOCK;
}
int32_t FeatureHub::UpdateFaceFeature(const std::vector<float> &feature, int featureIndex, const std::string &tag, int32_t customId) {
if (featureIndex < 0 || featureIndex >= m_feature_matrix_list_.size() * NUM_OF_FEATURES_IN_BLOCK) {
return HERR_SESS_REC_INVALID_INDEX; // Invalid feature index number
}
// Calculate which FeatureBlock and which row the feature vector should be removed in
int blockIndex = featureIndex / NUM_OF_FEATURES_IN_BLOCK; // The FeatureBlock where the computation is located
int rowIndex = featureIndex % NUM_OF_FEATURES_IN_BLOCK; // Calculate the line number in the FeatureBlock
// Call the appropriate FeatureBlock registration function
int32_t result = m_feature_matrix_list_[blockIndex]->UpdateFeature(rowIndex, feature, tag, customId);
return result;
}
void FeatureHub::PrintFeatureMatrixInfo() {
m_feature_matrix_list_[0]->PrintMatrix();
}
int32_t FeatureHub::FindFeatureIndexByCustomId(int32_t customId) {
// Iterate over all FeatureBlocks
for (int blockIndex = 0; blockIndex < m_feature_matrix_list_.size(); ++blockIndex) {
int startIndex = blockIndex * NUM_OF_FEATURES_IN_BLOCK;
// Query the customId from the current FeatureBlock
int rowIndex = m_feature_matrix_list_[blockIndex]->FindIndexByCustomId(customId);
if (rowIndex != -1) {
return startIndex + rowIndex; // 返回行号
}
}
return -1; // If none of the featureBlocks is found, -1 is returned
}
int32_t FeatureHub::SearchFaceFeature(const Embedded &queryFeature, SearchResult &searchResult) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
m_search_face_feature_cache_.clear();
std::memset(m_string_cache_, 0, sizeof(m_string_cache_)); // Initial Zero
auto ret = SearchFaceFeature(queryFeature, searchResult, m_recognition_threshold_, m_search_mode_ == SEARCH_MODE_EXHAUSTIVE);
if (ret == HSUCCEED) {
if (searchResult.index != -1) {
ret = GetFaceFeature(searchResult.index, m_search_face_feature_cache_);
}
m_face_feature_ptr_cache_->data = m_search_face_feature_cache_.data();
m_face_feature_ptr_cache_->dataSize = m_search_face_feature_cache_.size();
// Ensure that buffer overflows do not occur
size_t copy_length = std::min(searchResult.tag.size(), sizeof(m_string_cache_) - 1);
std::strncpy(m_string_cache_, searchResult.tag.c_str(), copy_length);
// Make sure the string ends with a null character
m_string_cache_[copy_length] = '\0';
}
return ret;
}
int32_t FeatureHub::SearchFaceFeatureTopK(const Embedded &queryFeature, size_t topK) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
m_top_k_confidence_.clear();
m_top_k_custom_ids_cache_.clear();
auto ret = SearchFaceFeatureTopK(queryFeature, m_search_top_k_cache_, topK, m_recognition_threshold_);
if (ret == HSUCCEED) {
for (int i = 0; i < m_search_top_k_cache_.size(); ++i) {
auto &item = m_search_top_k_cache_[i];
m_top_k_custom_ids_cache_.push_back(item.customId);
m_top_k_confidence_.push_back(item.score);
}
}
return ret;
}
int32_t FeatureHub::FaceFeatureInsertFromCustomId(const std::vector<float> &feature, const std::string &tag, int32_t customId) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
auto index = FindFeatureIndexByCustomId(customId);
if (index != -1) {
return HERR_SESS_REC_ID_ALREADY_EXIST;
}
auto ret = InsertFaceFeature(feature, tag, customId);
if (ret == HSUCCEED && m_db_ != nullptr) {
// operational database
FaceFeatureInfo item = {0};
item.customId = customId;
item.tag = tag;
item.feature = feature;
ret = m_db_->InsertFeature(item);
}
return ret;
}
int32_t FeatureHub::FaceFeatureRemoveFromCustomId(int32_t customId) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
auto index = FindFeatureIndexByCustomId(customId);
if (index == -1) {
return HERR_SESS_REC_INVALID_INDEX;
}
auto ret = DeleteFaceFeature(index);
if (ret == HSUCCEED && m_db_ != nullptr) {
ret = m_db_->DeleteFeature(customId);
}
return ret;
}
int32_t FeatureHub::FaceFeatureUpdateFromCustomId(const std::vector<float> &feature, const std::string &tag, int32_t customId) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
auto index = FindFeatureIndexByCustomId(customId);
if (index == -1) {
return HERR_SESS_REC_INVALID_INDEX;
}
auto ret = UpdateFaceFeature(feature, index, tag, customId);
if (ret == HSUCCEED && m_db_ != nullptr) {
FaceFeatureInfo item = {0};
item.customId = customId;
item.tag = tag;
item.feature = feature;
ret = m_db_->UpdateFeature(item);
}
return ret;
}
int32_t FeatureHub::GetFaceFeatureFromCustomId(int32_t customId) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
auto index = FindFeatureIndexByCustomId(customId);
if (index == -1) {
return HERR_SESS_REC_INVALID_INDEX;
}
m_getter_face_feature_cache_.clear();
std::string tag;
FEATURE_STATE status;
auto ret = GetFaceEntity(index, m_getter_face_feature_cache_, tag, status);
m_face_feature_ptr_cache_->data = m_getter_face_feature_cache_.data();
m_face_feature_ptr_cache_->dataSize = m_getter_face_feature_cache_.size();
// Ensure that buffer overflows do not occur
size_t copy_length = std::min(tag.size(), sizeof(m_string_cache_) - 1);
std::strncpy(m_string_cache_, tag.c_str(), copy_length);
// Make sure the string ends with a null character
m_string_cache_[copy_length] = '\0';
return ret;
}
int32_t FeatureHub::ViewDBTable() {
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
auto ret = m_db_->ViewTotal();
return ret;
}
void FeatureHub::SetRecognitionThreshold(float threshold) {
m_recognition_threshold_ = threshold;
}
void FeatureHub::SetRecognitionSearchMode(SearchMode mode) {
m_search_mode_ = mode;
}
// =========== Getter ===========
const Embedded &FeatureHub::GetSearchFaceFeatureCache() const {
return m_search_face_feature_cache_;
}
char *FeatureHub::GetStringCache() {
return m_string_cache_;
}
const std::shared_ptr<FaceFeaturePtr> &FeatureHub::GetFaceFeaturePtrCache() const {
return m_face_feature_ptr_cache_;
}
std::vector<float> &FeatureHub::GetTopKConfidence() {
return m_top_k_confidence_;
}
std::vector<int32_t> &FeatureHub::GetTopKCustomIdsCache() {
return m_top_k_custom_ids_cache_;
}
} // namespace inspire

View File

@@ -1,352 +0,0 @@
//
// Created by tunm on 2023/9/8.
//
#pragma once
#ifndef HYPERFACEREPO_FACERECOGNITION_H
#define HYPERFACEREPO_FACERECOGNITION_H
#include <mutex>
#include "common/face_info/face_object.h"
#include "common/face_data/data_tools.h"
#include "middleware/camera_stream/camera_stream.h"
#include "feature_hub/features_block/feature_block.h"
#include "feature_hub/persistence/sqlite_faces_manage.h"
#include "middleware/model_archive/inspire_archive.h"
/**
* @def DB_FILE_NAME
* @brief Default database file name used in the FaceContext.
*/
#define DB_FILE_NAME ".E63520A95DD5B3892C56DA38C3B28E551D8173FD"
#define FEATURE_HUB FeatureHub::GetInstance()
namespace inspire {
// Comparator function object to sort SearchResult by score (descending order)
struct CompareByScore {
bool operator()(const SearchResult& a, const SearchResult& b) const {
return a.score > b.score;
}
};
typedef enum SearchMode {
SEARCH_MODE_EAGER = 0, // Eager mode: Stops when a vector meets the threshold.
SEARCH_MODE_EXHAUSTIVE, // Exhaustive mode: Searches until the best match is found.
} SearchMode;
/**
* @struct DatabaseConfiguration
* @brief Structure to configure database settings for FaceRecognition.
*/
using DatabaseConfiguration = struct DatabaseConfiguration {
int feature_block_num = 20;
bool enable_use_db = false; ///< Whether to enable data persistence.
std::string db_path; ///< Path to the database file.
float recognition_threshold = 0.48f; ///< Face search threshold
SearchMode search_mode = SEARCH_MODE_EAGER; ///< Search mode
};
/**
* @class FeatureHub
* @brief Service for internal feature vector storage.
*
* This class provides methods for face feature extraction, registration, update, search, and more.
*/
class INSPIRE_API FeatureHub {
private:
static std::mutex mutex_; ///< Mutex lock
static std::shared_ptr<FeatureHub> instance_; ///< FeatureHub Instance
const int32_t NUM_OF_FEATURES_IN_BLOCK = 512; ///< Number of features in each feature block.
FeatureHub(const FeatureHub&) = delete;
FeatureHub& operator=(const FeatureHub&) = delete;
public:
/**
* @brief Enables the feature hub with the specified configuration and matrix core.
*
* This function initializes and configures the feature hub based on the provided database
* configuration and the specified matrix processing core. It prepares the hub for operation,
* setting up necessary resources such as database connections and data processing pipelines.
*
* @param configuration The database configuration settings used to configure the hub.
* @param core The matrix core used for processing, defaulting to OpenCV if not specified.
* @return int32_t Returns a status code indicating success (0) or failure (non-zero).
*/
int32_t EnableHub(const DatabaseConfiguration& configuration, MatrixCore core = MC_OPENCV);
/**
* @brief Disables the feature hub, freeing all associated resources.
*
* This function stops all operations within the hub, releases all occupied resources,
* such as database connections and internal data structures. It is used to safely
* shutdown the hub when it is no longer needed or before the application exits, ensuring
* that all resources are properly cleaned up.
*
* @return int32_t Returns a status code indicating success (0) or failure (non-zero).
*/
int32_t DisableHub();
static std::shared_ptr<FeatureHub> GetInstance();
/**
* @brief Searches for a face feature within stored data.
* @param queryFeature Embedded feature to search for.
* @param searchResult SearchResult object to store search results.
* @return int32_t Status code of the search operation.
*/
int32_t SearchFaceFeature(const Embedded& queryFeature, SearchResult &searchResult);
/**
* @brief Search the stored data for the top k facial features that are most similar.
* @param topK Maximum search
* @return int32_t Status code of the search operation.
*/
int32_t SearchFaceFeatureTopK(const Embedded& queryFeature, size_t topK);
/**
* @brief Inserts a face feature with a custom ID.
* @param feature Vector of floats representing the face feature.
* @param tag String tag associated with the feature.
* @param customId Custom ID for the feature.
* @return int32_t Status code of the insertion operation.
*/
int32_t FaceFeatureInsertFromCustomId(const std::vector<float>& feature, const std::string &tag, int32_t customId);
/**
* @brief Removes a face feature by its custom ID.
* @param customId Custom ID of the feature to remove.
* @return int32_t Status code of the removal operation.
*/
int32_t FaceFeatureRemoveFromCustomId(int32_t customId);
/**
* @brief Updates a face feature by its custom ID.
* @param feature Vector of floats representing the new face feature.
* @param tag String tag associated with the feature.
* @param customId Custom ID of the feature to update.
* @return int32_t Status code of the update operation.
*/
int32_t FaceFeatureUpdateFromCustomId(const std::vector<float>& feature, const std::string &tag, int32_t customId);
/**
* @brief Retrieves a face feature by its custom ID.
* @param customId Custom ID of the feature to retrieve.
* @return int32_t Status code of the retrieval operation.
*/
int32_t GetFaceFeatureFromCustomId(int32_t customId);
/**
* @brief Views the database table containing face data.
* @return int32_t Status code of the operation.
*/
int32_t ViewDBTable();
/**
* @brief Sets the recognition threshold for face recognition.
* @param threshold Float value of the new threshold.
*/
void SetRecognitionThreshold(float threshold);
/**
* @brief Sets the search mode for face recognition.
* @param mode Search mode.
*/
void SetRecognitionSearchMode(SearchMode mode);
/**
* @brief Computes the cosine similarity between two feature vectors.
*
* @param v1 First feature vector.
* @param v2 Second feature vector.
* @param res Output parameter to store the cosine similarity result.
* @return int32_t Status code indicating success (0) or failure.
*/
static int32_t CosineSimilarity(const std::vector<float>& v1, const std::vector<float>& v2, float &res);
/**
* @brief Computes the cosine similarity between two feature vectors.
*
* @param v1 Pointer to the first feature vector.
* @param v2 Pointer to the second feature vector.
* @param size Size of the feature vectors.
* @param res Output parameter to store the cosine similarity result.
* @return int32_t Status code indicating success (0) or failure.
*/
static int32_t CosineSimilarity(const float* v1, const float *v2, int32_t size, float &res);
public:
// Getter Function
/**
* @brief Gets the cache used for search operations in face feature data.
* @return A const reference to the Embedded object containing face feature data for search.
*/
const Embedded& GetSearchFaceFeatureCache() const;
/**
* @brief Gets the cache of face feature pointers.
* @return A shared pointer to the cache of face feature pointers.
*/
const std::shared_ptr<FaceFeaturePtr>& GetFaceFeaturePtrCache() const;
/**
* @brief Gets the cache for temporary string storage.
* @return A pointer to the character array used as a string cache.
*/
char* GetStringCache();
/**
* @brief Gets the number of features in the feature block.
*
* @return int32_t Number of features.
*/
int32_t GetFeatureNum() const;
/**
* @brief Retrieves the total number of facial features stored in the feature block.
*
* @return int32_t Total number of facial features.
*/
int32_t GetFaceFeatureCount();
std::vector<float> &GetTopKConfidence();
std::vector<int32_t> &GetTopKCustomIdsCache();
public:
/**
* @brief Constructor for FeatureHub class.
*/
FeatureHub();
/**
* @brief Registers a facial feature in the feature block.
*
* @param feature Vector of floats representing the feature.
* @param featureIndex Index of the feature in the block.
* @param tag String tag associated with the feature.
* @param customId Custom identifier for the feature.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t RegisterFaceFeature(const std::vector<float>& feature, int featureIndex, const std::string &tag, int32_t customId);
/**
* @brief Updates a facial feature in the feature block.
*
* @param feature Vector of floats representing the updated feature.
* @param featureIndex Index of the feature in the block.
* @param tag New string tag for the feature.
* @param customId Custom identifier for the feature.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t UpdateFaceFeature(const std::vector<float>& feature, int featureIndex, const std::string &tag, int32_t customId);
/**
* @brief Searches for the nearest facial feature in the feature block to a given query feature.
*
* @param queryFeature Query feature vector.
* @param searchResult SearchResult structure to store the search results.
* @param threshold Threshold for considering a match.
* @param mostSimilar Whether to find the most similar feature.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t SearchFaceFeature(const std::vector<float>& queryFeature, SearchResult &searchResult, float threshold, bool mostSimilar=true);
/**
* Search for the top K face features that are most similar to a given query feature.
* @param queryFeature A vector of floats representing the feature to query against.
* @param searchResultList A reference to a vector where the top K search results will be stored.
* @param maxTopK The maximum number of top results to return.
* @param threshold A float representing the minimum similarity score threshold.
* @return int32_t Returns a status code (0 for success, non-zero for any errors).
*/
int32_t SearchFaceFeatureTopK(const std::vector<float>& queryFeature, std::vector<SearchResult> &searchResultList, size_t maxTopK, float threshold);
/**
* @brief Inserts a facial feature into the feature block.
*
* @param feature Vector of floats representing the feature.
* @param tag String tag associated with the feature.
* @param customId Custom identifier for the feature.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t InsertFaceFeature(const std::vector<float>& feature, const std::string &tag, int32_t customId);
/**
* @brief Deletes a facial feature from the feature block.
*
* @param featureIndex Index of the feature to delete.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t DeleteFaceFeature(int featureIndex);
/**
* @brief Retrieves a facial feature from the feature block.
*
* @param featureIndex Index of the feature to retrieve.
* @param feature Output parameter to store the retrieved feature.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t GetFaceFeature(int featureIndex, Embedded &feature);
/**
* @brief Retrieves a facial entity from the feature block.
*
* @param featureIndex Index of the feature to retrieve.
* @param result Output parameter to store the retrieved entity.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t GetFaceEntity(int featureIndex, Embedded &feature, std::string& tag, FEATURE_STATE& status);
/**
* @brief Finds the index of a feature by its custom ID.
*
* @param customId Custom identifier to search for.
* @return int32_t Index of the feature with the given custom ID, or -1 if not found.
*/
int32_t FindFeatureIndexByCustomId(int32_t customId);
/**
* @brief Prints information about the feature matrix.
*/
void PrintFeatureMatrixInfo();
private:
Embedded m_search_face_feature_cache_; ///< Cache for face feature data used in search operations
Embedded m_getter_face_feature_cache_; ///< Cache for face feature data used in search operations
std::shared_ptr<FaceFeaturePtr> m_face_feature_ptr_cache_; ///< Shared pointer to cache of face feature pointers
char m_string_cache_[256]; ///< Cache for temporary string storage
std::vector<SearchResult> m_search_top_k_cache_; ///<
std::vector<float> m_top_k_confidence_;
std::vector<int32_t> m_top_k_custom_ids_cache_;
private:
std::vector<std::shared_ptr<FeatureBlock>> m_feature_matrix_list_; ///< List of feature blocks.
DatabaseConfiguration m_db_configuration_; ///< Configuration settings for the database
float m_recognition_threshold_{0.48f}; ///< Threshold value for face recognition
SearchMode m_search_mode_{SEARCH_MODE_EAGER}; ///< Flag to determine if the search should find the most similar feature
std::shared_ptr<SQLiteFaceManage> m_db_; ///< Shared pointer to the SQLiteFaceManage object
bool m_enable_{false}; ///< Running status
std::mutex m_res_mtx_; ///< Mutex for thread safety.
};
} // namespace inspire
#endif //HYPERFACEREPO_FACERECOGNITION_H

View File

@@ -0,0 +1,334 @@
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include "feature_hub_db.h"
#include "simd.h"
#include "herror.h"
#include <thread>
#include "middleware/utils.h"
#include "middleware/system.h"
namespace inspire {
std::mutex FeatureHubDB::mutex_;
std::shared_ptr<FeatureHubDB> FeatureHubDB::instance_ = nullptr;
FeatureHubDB::FeatureHubDB() {}
std::shared_ptr<FeatureHubDB> FeatureHubDB::GetInstance() {
std::lock_guard<std::mutex> lock(mutex_);
if (!instance_) {
instance_ = std::shared_ptr<FeatureHubDB>(new FeatureHubDB());
}
return instance_;
}
int32_t FeatureHubDB::DisableHub() {
if (!m_enable_) {
INSPIRE_LOGW("FeatureHub is already disabled.");
return HSUCCEED;
}
// Close the database if it starts
if (EMBEDDING_DB::GetInstance().IsInitialized()) {
EMBEDDING_DB::Deinit();
// if (ret != HSUCCEED) {
// INSPIRE_LOGE("Failed to close the database: %d", ret);
// return ret;
// }
// m_db_.reset();
}
m_search_face_feature_cache_.clear();
m_db_configuration_ = DatabaseConfiguration(); // Reset using the default constructor
m_recognition_threshold_ = 0.0f;
m_search_mode_ = SEARCH_MODE_EAGER;
m_face_feature_ptr_cache_.reset();
m_enable_ = false;
return HSUCCEED;
}
int32_t FeatureHubDB::GetAllIds() {
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
m_all_ids_ = EMBEDDING_DB::GetInstance().GetAllIds();
return HSUCCEED;
}
int32_t FeatureHubDB::EnableHub(const DatabaseConfiguration &configuration) {
int32_t ret;
if (m_enable_) {
INSPIRE_LOGW("You have enabled the FeatureHub feature. It is not valid to do so again");
return HSUCCEED;
}
// Config
m_db_configuration_ = configuration;
m_recognition_threshold_ = m_db_configuration_.recognition_threshold;
if (m_recognition_threshold_ < -1.0f || m_recognition_threshold_ > 1.0f) {
INSPIRE_LOGW("The search threshold entered does not fit the required range (-1.0f, 1.0f) and has been set to 0.5 by default");
m_recognition_threshold_ = 0.5f;
}
std::string dbFile = ":memory:";
if (m_db_configuration_.enable_persistence) {
if (IsDirectory(m_db_configuration_.persistence_db_path)) {
dbFile = os::PathJoin(m_db_configuration_.persistence_db_path, DB_FILE_NAME);
} else {
dbFile = m_db_configuration_.persistence_db_path;
}
}
EMBEDDING_DB::Init(dbFile, 512, IdMode(configuration.primary_key_mode));
m_enable_ = true;
m_face_feature_ptr_cache_ = std::make_shared<FaceFeatureEntity>();
return HSUCCEED;
}
int32_t FeatureHubDB::CosineSimilarity(const std::vector<float> &v1, const std::vector<float> &v2, float &res, bool normalize) {
if (v1.size() != v2.size() || v1.empty()) {
return HERR_SESS_REC_CONTRAST_FEAT_ERR; // The similarity cannot be calculated if the vector lengths are not equal
}
if (normalize) {
std::vector<float> v1_norm = v1;
std::vector<float> v2_norm = v2;
float mse1 = 0.0f;
float mse2 = 0.0f;
for (const auto &one : v1_norm) {
mse1 += one * one;
}
mse1 = sqrt(mse1);
for (float &one : v1_norm) {
one /= mse1;
}
for (const auto &one : v2_norm) {
mse2 += one * one;
}
mse2 = sqrt(mse2);
for (float &one : v2_norm) {
one /= mse2;
}
res = simd_dot(v1_norm.data(), v2_norm.data(), v1_norm.size());
} else {
// Calculate the cosine similarity
res = simd_dot(v1.data(), v2.data(), v1.size());
}
return HSUCCEED;
}
int32_t FeatureHubDB::CosineSimilarity(const float *v1, const float *v2, int32_t size, float &res, bool normalize) {
if (normalize) {
std::vector<float> v1_norm(v1, v1 + size);
std::vector<float> v2_norm(v2, v2 + size);
float mse1 = 0.0f;
float mse2 = 0.0f;
for (const auto &one : v1_norm) {
mse1 += one * one;
}
mse1 = sqrt(mse1);
for (float &one : v1_norm) {
one /= mse1;
}
for (const auto &one : v2_norm) {
mse2 += one * one;
}
mse2 = sqrt(mse2);
for (float &one : v2_norm) {
one /= mse2;
}
res = simd_dot(v1_norm.data(), v2_norm.data(), v1_norm.size());
} else {
res = simd_dot(v1, v2, size);
}
return HSUCCEED;
}
int32_t FeatureHubDB::GetFaceFeatureCount() {
if (!m_enable_) {
INSPIRE_LOGW("FeatureHub is disabled, please enable it before it can be served");
return 0;
}
int totalFeatureCount = 0;
// Iterate over all FeatureBlocks and add up the number of feature vectors used
totalFeatureCount = EMBEDDING_DB::GetInstance().GetVectorCount();
return totalFeatureCount;
}
int32_t FeatureHubDB::SearchFaceFeature(const Embedded &queryFeature, FaceSearchResult &searchResult, bool returnFeature) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HSUCCEED;
}
m_search_face_feature_cache_.clear();
auto results = EMBEDDING_DB::GetInstance().SearchSimilarVectors(queryFeature, 1, m_recognition_threshold_, returnFeature);
searchResult.id = -1;
if (!results.empty()) {
auto &searched = results[0];
searchResult.similarity = searched.similarity;
searchResult.id = searched.id;
if (returnFeature) {
searchResult.feature = searched.feature;
// copy feature to cache
m_search_face_feature_cache_ = searched.feature;
m_face_feature_ptr_cache_->data = m_search_face_feature_cache_.data();
m_face_feature_ptr_cache_->dataSize = m_search_face_feature_cache_.size();
}
}
return HSUCCEED;
}
int32_t FeatureHubDB::SearchFaceFeatureTopKCache(const Embedded &queryFeature, size_t topK) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
m_top_k_confidence_.clear();
m_top_k_custom_ids_cache_.clear();
auto results = EMBEDDING_DB::GetInstance().SearchSimilarVectors(queryFeature, topK, m_recognition_threshold_, false);
for (size_t i = 0; i < results.size(); i++) {
m_top_k_custom_ids_cache_.push_back(results[i].id);
m_top_k_confidence_.push_back(results[i].similarity);
}
return HSUCCEED;
}
int32_t FeatureHubDB::SearchFaceFeatureTopK(const Embedded &queryFeature, std::vector<FaceSearchResult> &searchResult, size_t topK,
bool returnFeature) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGW("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
searchResult = EMBEDDING_DB::GetInstance().SearchSimilarVectors(queryFeature, topK, m_recognition_threshold_, returnFeature);
return HSUCCEED;
}
int32_t FeatureHubDB::FaceFeatureInsert(const std::vector<float> &feature, int32_t id, int64_t &result_id) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
bool ret = EMBEDDING_DB::GetInstance().InsertVector(id, feature, result_id);
if (!ret) {
result_id = -1;
return HERR_FT_HUB_INSERT_FAILURE;
}
return HSUCCEED;
}
int32_t FeatureHubDB::FaceFeatureRemove(int32_t id) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
EMBEDDING_DB::GetInstance().DeleteVector(id);
return HSUCCEED;
}
int32_t FeatureHubDB::FaceFeatureUpdate(const std::vector<float> &feature, int32_t customId) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
try {
EMBEDDING_DB::GetInstance().UpdateVector(customId, feature);
} catch (const std::exception &e) {
INSPIRE_LOGW("Failed to update face feature, id: %d", customId);
return HERR_FT_HUB_NOT_FOUND_FEATURE;
}
return HSUCCEED;
}
int32_t FeatureHubDB::GetFaceFeature(int32_t id) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
auto vec = EMBEDDING_DB::GetInstance().GetVector(id);
if (vec.empty()) {
return HERR_FT_HUB_NOT_FOUND_FEATURE;
}
m_getter_face_feature_cache_ = vec;
m_face_feature_ptr_cache_->data = m_getter_face_feature_cache_.data();
m_face_feature_ptr_cache_->dataSize = m_getter_face_feature_cache_.size();
return HSUCCEED;
}
int32_t FeatureHubDB::GetFaceFeature(int32_t id, std::vector<float> &feature) {
std::lock_guard<std::mutex> lock(mutex_);
if (!m_enable_) {
INSPIRE_LOGW("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
try {
feature = EMBEDDING_DB::GetInstance().GetVector(id);
} catch (const std::exception &e) {
INSPIRE_LOGW("Failed to get face feature, id: %d", id);
return HERR_FT_HUB_NOT_FOUND_FEATURE;
}
return HSUCCEED;
}
int32_t FeatureHubDB::ViewDBTable() {
if (!m_enable_) {
INSPIRE_LOGE("FeatureHub is disabled, please enable it before it can be served");
return HERR_FT_HUB_DISABLE;
}
EMBEDDING_DB::GetInstance().ShowTable();
return HSUCCEED;
}
void FeatureHubDB::SetRecognitionThreshold(float threshold) {
m_recognition_threshold_ = threshold;
}
void FeatureHubDB::SetRecognitionSearchMode(SearchMode mode) {
m_search_mode_ = mode;
}
// =========== Getter ===========
const Embedded &FeatureHubDB::GetSearchFaceFeatureCache() const {
return m_search_face_feature_cache_;
}
const std::shared_ptr<FaceFeaturePtr> &FeatureHubDB::GetFaceFeaturePtrCache() const {
return m_face_feature_ptr_cache_;
}
std::vector<float> &FeatureHubDB::GetTopKConfidence() {
return m_top_k_confidence_;
}
std::vector<int64_t> &FeatureHubDB::GetTopKCustomIdsCache() {
return m_top_k_custom_ids_cache_;
}
std::vector<int64_t> &FeatureHubDB::GetExistingIds() {
return m_all_ids_;
}
} // namespace inspire

View File

@@ -0,0 +1,276 @@
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#pragma once
#ifndef INSPIRE_FEATURE_HUB_DB_H
#define INSPIRE_FEATURE_HUB_DB_H
#include <mutex>
#include <vector>
#include <string>
#include <memory>
#include "data_type.h"
#include "feature_hub/embedding_db/embedding_db.h"
#include "log.h"
// Default database file name used in the FaceContext.
#define DB_FILE_NAME ".feature_hub_db_v0"
#define FEATURE_HUB_DB FeatureHubDB::GetInstance()
namespace inspire {
// Comparator function object to sort SearchResult by score (descending order)
struct CompareByScore {
bool operator()(const FaceSearchResult& a, const FaceSearchResult& b) const {
return a.similarity > b.similarity;
}
};
typedef enum SearchMode {
SEARCH_MODE_EAGER = 0, // Eager mode: Stops when a vector meets the threshold.
SEARCH_MODE_EXHAUSTIVE, // Exhaustive mode: Searches until the best match is found.
} SearchMode;
typedef enum PrimaryKeyMode {
AUTO_INCREMENT = 0, // Auto-increment primary key
MANUAL_INPUT, // Manual input primary key
} PrimaryKeyMode;
/**
* @struct DatabaseConfiguration
* @brief Structure to configure database settings for FaceRecognition.
*/
using DatabaseConfiguration = struct DatabaseConfiguration {
PrimaryKeyMode primary_key_mode = PrimaryKeyMode::AUTO_INCREMENT; ///<
bool enable_persistence = false; ///< Whether to enable data persistence.
std::string persistence_db_path; ///< Path to the database file.
float recognition_threshold = 0.48f; ///< Face search threshold
SearchMode search_mode = SEARCH_MODE_EAGER; ///< Search mode (!!Temporarily unavailable!!)
};
/**
* @class FeatureHub
* @brief Service for internal feature vector storage.
*
* This class provides methods for face feature extraction, registration, update, search, and more.
*/
class INSPIRE_API FeatureHubDB {
private:
static std::mutex mutex_; ///< Mutex lock
static std::shared_ptr<FeatureHubDB> instance_; ///< FeatureHub Instance
FeatureHubDB(const FeatureHubDB&) = delete;
FeatureHubDB& operator=(const FeatureHubDB&) = delete;
public:
/**
* @brief Enables the feature hub with the specified configuration and matrix core.
*
* This function initializes and configures the feature hub based on the provided database
* configuration and the specified matrix processing core. It prepares the hub for operation,
* setting up necessary resources such as database connections and data processing pipelines.
*
* @param configuration The database configuration settings used to configure the hub.
* @param core The matrix core used for processing, defaulting to OpenCV if not specified.
* @return int32_t Returns a status code indicating success (0) or failure (non-zero).
*/
int32_t EnableHub(const DatabaseConfiguration& configuration);
/**
* @brief Disables the feature hub, freeing all associated resources.
*
* This function stops all operations within the hub, releases all occupied resources,
* such as database connections and internal data structures. It is used to safely
* shutdown the hub when it is no longer needed or before the application exits, ensuring
* that all resources are properly cleaned up.
*
* @return int32_t Returns a status code indicating success (0) or failure (non-zero).
*/
int32_t DisableHub();
/**
* @brief Get all ids in the database.
* @param ids Output parameter to store the ids.
* @return int32_t Status code of the operation.
*/
int32_t GetAllIds();
static std::shared_ptr<FeatureHubDB> GetInstance();
/**
* @brief Searches for a face feature within stored data.
* @param queryFeature Embedded feature to search for.
* @param searchResult SearchResult object to store search results.
* @return int32_t Status code of the search operation.
*/
int32_t SearchFaceFeature(const Embedded& queryFeature, FaceSearchResult& searchResult, bool returnFeature = true);
/**
* @brief Search the stored data for the top k facial features that are most similar.
* @param topK Maximum search
* @return int32_t Status code of the search operation.
*/
int32_t SearchFaceFeatureTopKCache(const Embedded& queryFeature, size_t topK);
/**
* @brief Search the stored data for the top k facial features that are most similar.
* @param topK Maximum search
* @return int32_t Status code of the search operation.
*/
int32_t SearchFaceFeatureTopK(const Embedded& queryFeature, std::vector<FaceSearchResult>& searchResult, size_t topK, bool returnFeature = false);
/**
* @brief Inserts a face feature with a custom ID.
* @param feature Vector of floats representing the face feature.
* @param tag String tag associated with the feature.
* @param customId Custom ID for the feature.
* @return int32_t Status code of the insertion operation.
*/
int32_t FaceFeatureInsert(const std::vector<float>& feature, int32_t id, int64_t& result_id);
/**
* @brief Removes a face feature by its custom ID.
* @param customId Custom ID of the feature to remove.
* @return int32_t Status code of the removal operation.
*/
int32_t FaceFeatureRemove(int32_t id);
/**
* @brief Updates a face feature by its custom ID.
* @param feature Vector of floats representing the new face feature.
* @param tag String tag associated with the feature.
* @param customId Custom ID of the feature to update.
* @return int32_t Status code of the update operation.
*/
int32_t FaceFeatureUpdate(const std::vector<float>& feature, int32_t customId);
/**
* @brief Retrieves a face feature by its custom ID.
* @param customId Custom ID of the feature to retrieve.
* @return int32_t Status code of the retrieval operation.
*/
int32_t GetFaceFeature(int32_t id);
/**
* @brief Retrieves a face feature by its custom ID.
* @param customId Custom ID of the feature to retrieve.
* @param feature Vector of floats representing the face feature.
* @return int32_t Status code of the retrieval operation.
*/
int32_t GetFaceFeature(int32_t id, std::vector<float>& feature);
/**
* @brief Views the database table containing face data.
* @return int32_t Status code of the operation.
*/
int32_t ViewDBTable();
/**
* @brief Sets the recognition threshold for face recognition.
* @param threshold Float value of the new threshold.
*/
void SetRecognitionThreshold(float threshold);
/**
* @brief Sets the search mode for face recognition.
* @param mode Search mode.
*/
void SetRecognitionSearchMode(SearchMode mode);
/**
* @brief Computes the cosine similarity between two feature vectors.
*
* @param v1 First feature vector.
* @param v2 Second feature vector.
* @param res Output parameter to store the cosine similarity result.
* @return int32_t Status code indicating success (0) or failure.
*/
static int32_t CosineSimilarity(const std::vector<float>& v1, const std::vector<float>& v2, float& res, bool normalize = false);
/**
* @brief Computes the cosine similarity between two feature vectors.
*
* @param v1 Pointer to the first feature vector.
* @param v2 Pointer to the second feature vector.
* @param size Size of the feature vectors.
* @param res Output parameter to store the cosine similarity result.
* @return int32_t Status code indicating success (0) or failure.
*/
static int32_t CosineSimilarity(const float* v1, const float* v2, int32_t size, float& res, bool normalize = true);
public:
// Getter Function
/**
* @brief Gets the cache used for search operations in face feature data.
* @return A const reference to the Embedded object containing face feature data for search.
*/
const Embedded& GetSearchFaceFeatureCache() const;
/**
* @brief Gets the cache of face feature pointers.
* @return A shared pointer to the cache of face feature pointers.
*/
const std::shared_ptr<FaceFeaturePtr>& GetFaceFeaturePtrCache() const;
/**
* @brief Retrieves the total number of facial features stored in the feature block.
*
* @return int32_t Total number of facial features.
*/
int32_t GetFaceFeatureCount();
/**
* @brief Retrieves the confidence scores for the top k facial features.
* @return A reference to the vector of confidence scores.
*/
std::vector<float>& GetTopKConfidence();
/**
* @brief Retrieves the custom IDs for the top k facial features.
* @return A reference to the vector of custom IDs.
*/
std::vector<int64_t>& GetTopKCustomIdsCache();
/**
* @brief Retrieves the existing ids in the database.
* @return A reference to the vector of existing ids.
*/
std::vector<int64_t>& GetExistingIds();
/**
* @brief Constructor for FeatureHub class.
*/
FeatureHubDB();
/**
* @brief Prints information about the feature matrix.
*/
void PrintFeatureMatrixInfo();
private:
Embedded m_search_face_feature_cache_; ///< Cache for face feature data used in search operations
Embedded m_getter_face_feature_cache_; ///< Cache for face feature data used in search operations
std::shared_ptr<FaceFeaturePtr> m_face_feature_ptr_cache_; ///< Shared pointer to cache of face feature pointers
std::vector<FaceSearchResult> m_search_top_k_cache_; ///< Cache for top k search results
std::vector<float> m_top_k_confidence_; ///< Cache for top k confidence scores
std::vector<int64_t> m_top_k_custom_ids_cache_; ///< Cache for top k custom ids
std::vector<int64_t> m_all_ids_; ///< Cache for all ids
private:
DatabaseConfiguration m_db_configuration_; ///< Configuration settings for the database
float m_recognition_threshold_{0.48f}; ///< Threshold value for face recognition
SearchMode m_search_mode_{SEARCH_MODE_EAGER}; ///< Flag to determine if the search should find the most similar feature
bool m_enable_{false}; ///< Running status
std::mutex m_res_mtx_; ///< Mutex for thread safety.
};
} // namespace inspire
#endif // INSPIRE_FEATURE_HUB_DB_H

View File

@@ -1,48 +0,0 @@
//
// Created by Tunm-Air13 on 2023/9/11.
//
#include "feature_block.h"
#include "log.h"
#include "feature_hub/features_block/implement/feature_block_none.h"
#ifdef FEATURE_BLOCK_ENABLE_OPENCV
#include "feature_hub/features_block/implement/feature_block_opencv.h"
#endif
namespace inspire {
FeatureBlock *FeatureBlock::Create(const MatrixCore crop_type, int32_t features_max, int32_t feature_length) {
FeatureBlock* p = nullptr;
switch (crop_type) {
#ifdef FEATURE_BLOCK_ENABLE_OPENCV
case MC_OPENCV:
p = new FeatureBlockOpenCV(features_max, feature_length);
break;
#endif
#ifdef FEATURE_BLOCK_ENABLE_EIGEN
case MC_EIGEN:
LOGD("Not Implement");
break;
#endif
case MC_NONE:
INSPIRE_LOGD("Not Implement");
break;
}
if (p != nullptr) {
p->m_matrix_core_ = crop_type;
p->m_features_max_ = features_max; // Number of facial features
p->m_feature_length_ = feature_length; // Face feature length (default: 512)
p->m_feature_state_.resize(features_max, FEATURE_STATE::IDLE);
p->m_tag_list_.resize(features_max, "None");
p->m_custom_id_list_.resize(features_max, -1);
} else {
INSPIRE_LOGE("Create FeatureBlock error.");
}
return p;
}
} // namespace hyper

View File

@@ -1,290 +0,0 @@
//
// Created by Tunm-Air13 on 2023/9/11.
//
#pragma once
#ifndef HYPERFACEREPO_FEATUREBLOCK_H
#define HYPERFACEREPO_FEATUREBLOCK_H
#include <mutex>
#include <iostream>
#include <algorithm>
#include "data_type.h"
namespace inspire {
/**
* @enum MatrixCore
* @brief Enumeration for different types of matrix cores used in feature extraction.
*/
typedef enum {
MC_NONE, ///< C/C++ Native matrix core.
MC_OPENCV, ///< OpenCV Mat based matrix core.
MC_EIGEN, ///< Eigen3 Mat based matrix core.
} MatrixCore;
/**
* @enum FEATURE_STATE
* @brief Enumeration for states of feature slots in the feature block.
*/
typedef enum {
IDLE = 0, ///< Slot is idle.
USED, ///< Slot is used.
} FEATURE_STATE;
/**
* @struct SearchResult
* @brief Structure to store the results of a feature search.
*/
typedef struct SearchResult {
float score = -1.0f; ///< Score of the search result.
int32_t index = -1; ///< Index of the result in the feature block.
std::string tag = "None"; ///< Tag associated with the feature.
int32_t customId = -1; ///< Custom identifier for the feature.
} SearchResult;
/**
* @class FeatureBlock
* @brief Class for managing and operating on a block of facial features.
*
* This class provides methods to add, delete, update, and search facial features
* in a feature block, with thread safety using mutexes.
*/
class INSPIRE_API FeatureBlock {
public:
static FeatureBlock* Create(const MatrixCore crop_type, int32_t features_max = 512, int32_t feature_length = 512);
public:
/**
* @brief Destructor for the FeatureBlock class.
*/
virtual ~FeatureBlock() {}
/**
* @brief Adds a feature to the feature block.
* @param feature Vector of floats representing the feature.
* @param tag String tag associated with the feature.
* @param customId Custom identifier for the feature.
* @return int32_t Status of the feature addition.
*/
virtual int32_t AddFeature(const std::vector<float>& feature, const std::string &tag, int32_t customId) {
std::lock_guard<std::mutex> lock(m_mtx_); // Use mutex to protect shared data
return UnsafeAddFeature(feature, tag, customId);
}
/**
* @brief Deletes a feature from the feature block.
* @param rowToDelete Index of the feature to be deleted.
* @return int32_t Status of the feature deletion.
*/
virtual int32_t DeleteFeature(int rowToDelete) {
std::lock_guard<std::mutex> lock(m_mtx_);
return UnsafeDeleteFeature(rowToDelete);
}
/**
* @brief Updates a feature in the feature block.
* @param rowToUpdate Index of the feature to be updated.
* @param newFeature New feature vector to replace the old one.
* @param tag New tag for the updated feature.
* @param customId Custom identifier for the updated feature.
* @return int32_t Status of the feature update.
*/
virtual int32_t UpdateFeature(int rowToUpdate, const std::vector<float>& newFeature, const std::string &tag, int32_t customId) {
std::lock_guard<std::mutex> lock(m_mtx_);
return UnsafeUpdateFeature(rowToUpdate, newFeature, tag, customId);
}
/**
* @brief Registers a feature at a specific index in the feature block.
* @param rowToUpdate Index at which to register the new feature.
* @param feature Feature vector to be registered.
* @param tag Tag associated with the feature.
* @param customId Custom identifier for the feature.
* @return int32_t Status of the feature registration.
*/
virtual int32_t RegisterFeature(int rowToUpdate, const std::vector<float>& feature, const std::string &tag, int32_t customId) {
std::lock_guard<std::mutex> lock(m_mtx_);
return UnsafeRegisterFeature(rowToUpdate, feature, tag, customId);
}
/**
* @brief Searches for the nearest feature in the block to a given query feature.
* @param queryFeature Query feature vector.
* @param searchResult SearchResult structure to store the search results.
* @return int32_t Status of the search operation.
*/
virtual int32_t SearchNearest(const std::vector<float>& queryFeature, SearchResult &searchResult) = 0;
/**
* @brief Search the first k features in a block that are closest to a given query feature.
* @param topK Maximum number of similarities
* @param searchResults outputs
* */
virtual int32_t SearchTopKNearest(const std::vector<float>& queryFeature, size_t topK, std::vector<SearchResult> &searchResults) = 0;
/**
* @brief Retrieves a feature from the feature block.
* @param row Index of the feature to retrieve.
* @param feature Vector to store the retrieved feature.
* @return int32_t Status of the retrieval operation.
*/
virtual int32_t GetFeature(int row, std::vector<float>& feature) = 0;
/**
* @brief Prints the size of the feature matrix.
*/
virtual void PrintMatrixSize() = 0;
/**
* @brief Prints the entire feature matrix.
*/
virtual void PrintMatrix() = 0;
public:
/**
* @brief Retrieves the tag associated with a feature at a given row index.
* @param row Index of the feature to retrieve the tag for.
* @return std::string Tag associated with the feature at the given row, or an empty string if the row is invalid.
*/
std::string GetTagFromRow(int row) {
std::lock_guard<std::mutex> lock(m_mtx_); // Ensure thread safety
if (row >= 0 && row < m_tag_list_.size() && m_feature_state_[row] == FEATURE_STATE::USED) {
return m_tag_list_[row];
} else {
return ""; // Return an empty string for invalid row or unused slot
}
}
/**
* @brief Retrieves the state of a feature slot at a given row index.
* @param row Index of the feature slot to retrieve the state for.
* @return FEATURE_STATE State of the feature slot at the given row, or IDLE if the row is invalid.
*/
FEATURE_STATE GetStateFromRow(int row) {
std::lock_guard<std::mutex> lock(m_mtx_); // Ensure thread safety
if (row >= 0 && row < m_feature_state_.size()) {
return m_feature_state_[row];
} else {
return FEATURE_STATE::IDLE; // Treat invalid rows as IDLE
}
}
/**
* @brief Finds the index of the first idle (unused) feature slot.
* @return int Index of the first idle slot, or -1 if no idle slot is found.
*/
int FindFirstIdleIndex() const {
for (int i = 0; i < m_feature_state_.size(); ++i) {
if (m_feature_state_[i] == FEATURE_STATE::IDLE) {
return i; // Find the first IDLE index
}
}
return -1; // No IDLE found
}
/**
* @brief Finds the index of the first used feature slot.
* @return int Index of the first used slot, or -1 if no used slot is found.
*/
int FindFirstUsedIndex() const {
for (int i = 0; i < m_feature_state_.size(); ++i) {
if (m_feature_state_[i] == FEATURE_STATE::USED) {
return i; // Find the first USED index
}
}
return -1; // not fond USED
}
/**
* @brief Counts the number of used feature slots.
* @return int Count of used feature slots.
*/
int GetUsedCount() const {
int usedCount = 0;
for (const FEATURE_STATE& state : m_feature_state_) {
if (state == FEATURE_STATE::USED) {
usedCount++;
}
}
return usedCount;
}
/**
* @brief Checks if all feature slots are used.
* @return bool True if all slots are used, false otherwise.
*/
bool IsUsedFull() const {
int usedCount = GetUsedCount();
return usedCount >= m_features_max_;
}
/**
* @brief Finds the index of a feature slot by its custom ID.
* @param customId The custom ID to search for.
* @return size_t Index of the slot with the given custom ID, or -1 if not found.
*/
size_t FindIndexByCustomId(int32_t customId) {
auto it = std::find(m_custom_id_list_.begin(), m_custom_id_list_.end(), customId);
if (it != m_custom_id_list_.end()) {
return std::distance(m_custom_id_list_.begin(), it); // return index
}
return -1;
}
protected:
/**
* @brief Adds a feature to the feature block without thread safety.
* This method should be overridden in derived classes.
* @param feature Vector of floats representing the feature.
* @param tag String tag associated with the feature.
* @param customId Custom identifier for the feature.
* @return int32_t Status of the feature addition.
*/
virtual int32_t UnsafeAddFeature(const std::vector<float>& feature, const std::string &tag, int32_t customId) = 0;
/**
* @brief Registers a feature at a specific index in the feature block without thread safety.
* This method should be overridden in derived classes.
* @param rowToUpdate Index at which to register the new feature.
* @param feature Feature vector to be registered.
* @param tag Tag associated with the feature.
* @param customId Custom identifier for the feature.
* @return int32_t Status of the feature registration.
*/
virtual int32_t UnsafeRegisterFeature(int rowToUpdate, const std::vector<float>& feature, const std::string &tag, int32_t customId) = 0;
/**
* @brief Deletes a feature from the feature block without thread safety.
* This method should be overridden in derived classes.
* @param rowToDelete Index of the feature to be deleted.
* @return int32_t Status of the feature deletion.
*/
virtual int32_t UnsafeDeleteFeature(int rowToDelete) = 0;
/**
* @brief Updates a feature in the feature block without thread safety.
* This method should be overridden in derived classes.
* @param rowToUpdate Index of the feature to be updated.
* @param newFeature New feature vector to replace the old one.
* @param tag New tag for the updated feature.
* @param customId Custom identifier for the updated feature.
* @return int32_t Status of the feature update.
*/
virtual int32_t UnsafeUpdateFeature(int rowToUpdate, const std::vector<float>& newFeature, const std::string &tag, int32_t customId) = 0;
protected:
MatrixCore m_matrix_core_; ///< Type of matrix core used.
int32_t m_features_max_; ///< Maximum number of features in the block.
int32_t m_feature_length_; ///< Length of each feature vector.
std::mutex m_mtx_; ///< Mutex for thread safety.
std::vector<FEATURE_STATE> m_feature_state_; ///< State of each feature slot.
std::vector<String> m_tag_list_; ///< List of tags associated with each feature.
std::vector<int32_t> m_custom_id_list_; ///< List of custom IDs associated with each feature.
};
} // namespace hyper
#endif //HYPERFACEREPO_FEATUREBLOCK_H

View File

@@ -1,5 +0,0 @@
//
// Created by Tunm-Air13 on 2023/9/11.
//
#include "feature_block_none.h"

View File

@@ -1,24 +0,0 @@
//
// Created by Tunm-Air13 on 2023/9/11.
//
#pragma once
#ifndef HYPERFACEREPO_FEATUREBLOCKNONE_H
#define HYPERFACEREPO_FEATUREBLOCKNONE_H
#include "feature_hub/features_block/feature_block.h"
namespace inspire {
class INSPIRE_API FeatureBlockNone {
public:
private:
};
} // namespace hyper
#endif //HYPERFACEREPO_FEATUREBLOCKNONE_H

View File

@@ -1,246 +0,0 @@
//
// Created by Tunm-Air13 on 2023/9/11.
//
#include "feature_block_opencv.h"
#include "herror.h"
#include "log.h"
namespace inspire {
FeatureBlockOpenCV::FeatureBlockOpenCV(int32_t features_max, int32_t feature_length)
:m_feature_matrix_(features_max, feature_length, CV_32F, cv::Scalar(0.0f)){
}
int32_t FeatureBlockOpenCV::UnsafeAddFeature(const std::vector<float> &feature, const std::string &tag, int32_t customId) {
if (feature.empty()) {
return HERR_SESS_REC_ADD_FEAT_EMPTY; // If the feature is empty, it is not added
}
if (feature.size() != m_feature_length_) {
return HERR_SESS_REC_FEAT_SIZE_ERR;
}
if (IsUsedFull()) {
return HERR_SESS_REC_BLOCK_FULL;
}
cv::Mat newFeatureMat(1, feature.size(), CV_32FC1);
for (int i = 0; i < feature.size(); ++i) {
newFeatureMat.at<float>(0, i) = feature[i];
}
auto idx = FindFirstIdleIndex(); // Find the first free vector position
if (idx == -1) {
return HERR_SESS_REC_BLOCK_FULL;
}
cv::Mat rowToUpdate = m_feature_matrix_.row(idx);
newFeatureMat.copyTo(rowToUpdate);
m_feature_state_[idx] = FEATURE_STATE::USED; // Set feature vector used
m_tag_list_[idx] = tag;
m_custom_id_list_[idx] = customId;
return HSUCCEED;
}
int32_t FeatureBlockOpenCV::UnsafeDeleteFeature(int rowToDelete) {
if (m_feature_matrix_.empty() || rowToDelete < 0 || rowToDelete >= m_feature_matrix_.rows) {
return HERR_SESS_REC_DEL_FAILURE; // Invalid row numbers or matrices are empty and will not be deleted
}
cv::Mat rowToUpdate = m_feature_matrix_.row(rowToDelete);
if (m_feature_state_[rowToDelete] == FEATURE_STATE::IDLE) {
return HERR_SESS_REC_BLOCK_DEL_FAILURE; // Rows are idle and will not be deleted
}
m_feature_state_[rowToDelete] = FEATURE_STATE::IDLE;
m_custom_id_list_[rowToDelete] = -1;
return HSUCCEED;
}
int32_t FeatureBlockOpenCV::UnsafeRegisterFeature(int rowToUpdate, const std::vector<float> &feature, const std::string &tag, int32_t customId) {
if (rowToUpdate < 0 || rowToUpdate >= m_feature_matrix_.rows) {
return HERR_SESS_REC_FEAT_SIZE_ERR; // Invalid line number, not updated
}
if (feature.size() != m_feature_length_) {
return HERR_SESS_REC_FEAT_SIZE_ERR; // The new feature does not match the expected size and will not be updated
}
cv::Mat rowToUpdateMat = m_feature_matrix_.row(rowToUpdate);
// 将新特征拷贝到指定行
for (int i = 0; i < feature.size(); ++i) {
rowToUpdateMat.at<float>(0, i) = feature[i];
}
m_feature_state_[rowToUpdate] = USED;
m_tag_list_[rowToUpdate] = tag;
m_custom_id_list_[rowToUpdate] = customId;
return 0;
}
int32_t FeatureBlockOpenCV::UnsafeUpdateFeature(int rowToUpdate, const std::vector<float> &newFeature, const std::string &tag, int32_t customId) {
if (rowToUpdate < 0 || rowToUpdate >= m_feature_matrix_.rows) {
return HERR_SESS_REC_FEAT_SIZE_ERR; // Invalid line number, not updated
}
if (newFeature.size() != m_feature_length_) {
return HERR_SESS_REC_FEAT_SIZE_ERR; // The new feature does not match the expected size and will not be updated
}
cv::Mat rowToUpdateMat = m_feature_matrix_.row(rowToUpdate);
if (m_feature_state_[rowToUpdate] == FEATURE_STATE::IDLE) {
return HERR_SESS_REC_BLOCK_UPDATE_FAILURE; // Rows are idle and not updated
}
// Copies the new feature to the specified row
for (int i = 0; i < newFeature.size(); ++i) {
rowToUpdateMat.at<float>(0, i) = newFeature[i];
}
m_tag_list_[rowToUpdate] = tag;
m_custom_id_list_[rowToUpdate] = customId;
return HSUCCEED;
}
int32_t FeatureBlockOpenCV::SearchNearest(const std::vector<float>& queryFeature, SearchResult &searchResult) {
std::lock_guard<std::mutex> lock(m_mtx_);
if (queryFeature.size() != m_feature_length_) {
return HERR_SESS_REC_FEAT_SIZE_ERR;
}
if (GetUsedCount() == 0) {
return HSUCCEED;
}
cv::Mat queryMat(queryFeature.size(), 1, CV_32FC1, (void*)queryFeature.data());
// Calculate the cosine similarity matrix
cv::Mat cosineSimilarities;
cv::gemm(m_feature_matrix_, queryMat, 1, cv::Mat(), 0, cosineSimilarities);
// Asserts that cosineSimilarities are the vector of m_features_max_ x 1
assert(cosineSimilarities.rows == m_features_max_ && cosineSimilarities.cols == 1);
// Used to store similarity scores and their indexes
std::vector<std::pair<float, int>> similarityScores;
for (int i = 0; i < m_features_max_; ++i) {
// Check whether the status is IDLE
if (m_feature_state_[i] == FEATURE_STATE::IDLE) {
continue; // Skip the eigenvector of IDLE state
}
// Gets the similarity score for line i
float similarityScore = cosineSimilarities.at<float>(i, 0);
// Adds the similarity score and index to the vector as a pair
similarityScores.push_back(std::make_pair(similarityScore, i));
}
// Find the index of the largest scores in similarityScores
if (!similarityScores.empty()) {
auto maxScoreIter = std::max_element(similarityScores.begin(), similarityScores.end());
float maxScore = maxScoreIter->first;
int maxScoreIndex = maxScoreIter->second;
// Sets the value in the searchResult
searchResult.score = maxScore;
searchResult.index = maxScoreIndex;
searchResult.tag = m_tag_list_[maxScoreIndex];
searchResult.customId = m_custom_id_list_[maxScoreIndex];
return HSUCCEED; // Indicates that the maximum score is found
}
searchResult.score = -1.0f;
searchResult.index = -1;
return HSUCCEED;
}
int32_t FeatureBlockOpenCV::SearchTopKNearest(const std::vector<float> &queryFeature, size_t topK, std::vector<SearchResult> &searchResults) {
std::lock_guard<std::mutex> lock(m_mtx_);
if (queryFeature.size() != m_feature_length_) {
return HERR_SESS_REC_FEAT_SIZE_ERR;
}
if (GetUsedCount() == 0) {
return HSUCCEED;
}
cv::Mat queryMat(queryFeature.size(), 1, CV_32FC1, (void*)queryFeature.data());
// Calculate the cosine similarity matrix
cv::Mat cosineSimilarities;
cv::gemm(m_feature_matrix_, queryMat, 1, cv::Mat(), 0, cosineSimilarities);
// Asserts that cosineSimilarities are the vector of m_features_max_ x 1
assert(cosineSimilarities.rows == m_features_max_ && cosineSimilarities.cols == 1);
// Used to store similarity scores and their indexes
std::vector<std::pair<float, int>> similarityScores;
for (int i = 0; i < m_features_max_; ++i) {
// Check whether the status is IDLE
if (m_feature_state_[i] == FEATURE_STATE::IDLE) {
continue; // Skip the eigenvector of IDLE state
}
// Gets the similarity score for line i
float similarityScore = cosineSimilarities.at<float>(i, 0);
// Adds the similarity score and index to the vector as a pair
similarityScores.push_back(std::make_pair(similarityScore, i));
}
searchResults.clear();
if (similarityScores.size() < topK) {
topK = similarityScores.size();
}
std::partial_sort(similarityScores.begin(), similarityScores.begin() + topK, similarityScores.end(),
[](const std::pair<float, int>& a, const std::pair<float, int>& b) {
return a.first > b.first;
});
for (size_t i = 0; i < topK; i++) {
SearchResult result;
result.score = similarityScores[i].first;
result.index = similarityScores[i].second;
result.tag = m_tag_list_[result.index];
result.customId = m_custom_id_list_[result.index];
searchResults.push_back(result);
}
return HSUCCEED;
}
void FeatureBlockOpenCV::PrintMatrixSize() {
std::cout << m_feature_matrix_.size << std::endl;
}
void FeatureBlockOpenCV::PrintMatrix() {
INSPIRE_LOGD("Num of Features: %d", m_feature_matrix_.cols);
INSPIRE_LOGD("Feature length: %d", m_feature_matrix_.rows);
}
int32_t FeatureBlockOpenCV::GetFeature(int row, std::vector<float> &feature) {
if (row < 0 || row >= m_feature_matrix_.rows) {
return HERR_SESS_REC_FEAT_SIZE_ERR; // Invalid line number, not updated
}
cv::Mat feat = m_feature_matrix_.row(row);
// Copies the new feature to the specified row
for (int i = 0; i < m_feature_length_; ++i) {
feature.push_back(feat.at<float>(0, i));
}
return HSUCCEED;
}
} // namespace hyper

View File

@@ -1,113 +0,0 @@
//
// Created by Tunm-Air13 on 2023/9/11.
//
#pragma once
#ifndef HYPERFACEREPO_FEATUREBLOCKOPENCV_H
#define HYPERFACEREPO_FEATUREBLOCKOPENCV_H
#include "feature_hub/features_block/feature_block.h"
namespace inspire {
/**
* @class FeatureBlockOpenCV
* @brief Class derived from FeatureBlock for managing facial features using OpenCV.
*
* This class provides an implementation of FeatureBlock using OpenCV's Mat data structure
* for storing and manipulating facial features.
*/
class INSPIRE_API FeatureBlockOpenCV : public FeatureBlock{
public:
/**
* @brief Constructor for FeatureBlockOpenCV.
* @param features_max Maximum number of features that can be stored.
* @param feature_length Length of each feature vector.
*/
explicit FeatureBlockOpenCV(int32_t features_max = 512, int32_t feature_length = 512);
/**
* @brief Searches for the nearest feature in the block to a given query feature.
* @param queryFeature Query feature vector.
* @param searchResult SearchResult structure to store the search results.
* @return int32_t Status of the search operation.
*/
int32_t SearchNearest(const std::vector<float>& queryFeature, SearchResult &searchResult) override;
/**
* @brief Search the first k features in a block that are closest to a given query feature.
* @param topK Maximum number of similarities
* @param searchResults outputs
* */
int32_t SearchTopKNearest(const std::vector<float>& queryFeature, size_t topK, std::vector<SearchResult> &searchResults) override;
/**
* @brief Retrieves a feature from the feature block.
* @param row Index of the feature to retrieve.
* @param feature Vector to store the retrieved feature.
* @return int32_t Status of the retrieval operation.
*/
int32_t GetFeature(int row, std::vector<float> &feature) override;
protected:
/**
* @brief Adds a feature to the feature block without thread safety.
* This method should be overridden in derived classes.
* @param feature Vector of floats representing the feature.
* @param tag String tag associated with the feature.
* @param customId Custom identifier for the feature.
* @return int32_t Status of the feature addition.
*/
int32_t UnsafeAddFeature(const std::vector<float> &feature, const std::string &tag, int32_t customId) override;
/**
* @brief Registers a feature at a specific index in the feature block without thread safety.
* This method should be overridden in derived classes.
* @param rowToUpdate Index at which to register the new feature.
* @param feature Feature vector to be registered.
* @param tag Tag associated with the feature.
* @param customId Custom identifier for the feature.
* @return int32_t Status of the feature registration.
*/
int32_t UnsafeDeleteFeature(int rowToDelete) override;
/**
* @brief Deletes a feature from the feature block without thread safety.
* This method should be overridden in derived classes.
* @param rowToDelete Index of the feature to be deleted.
* @return int32_t Status of the feature deletion.
*/
int32_t UnsafeUpdateFeature(int rowToUpdate, const std::vector<float> &newFeature, const std::string &tag, int32_t customId) override;
/**
* @brief Updates a feature in the feature block without thread safety.
* This method should be overridden in derived classes.
* @param rowToUpdate Index of the feature to be updated.
* @param newFeature New feature vector to replace the old one.
* @param tag New tag for the updated feature.
* @param customId Custom identifier for the updated feature.
* @return int32_t Status of the feature update.
*/
int32_t UnsafeRegisterFeature(int rowToUpdate, const std::vector<float> &feature, const std::string &tag, int32_t customId) override;
public:
/**
* @brief Prints the size of the feature matrix.
*/
void PrintMatrixSize() override;
/**
* @brief Prints the entire feature matrix.
*/
void PrintMatrix() override;
private:
cv::Mat m_feature_matrix_; ///< Matrix for storing feature vectors.
};
} // namespace hyper
#endif //HYPERFACEREPO_FEATUREBLOCKOPENCV_H

View File

@@ -1,346 +0,0 @@
//
// Created by Tunm-Air13 on 2023/10/11.
//
#include <iostream>
#include <iomanip> // for std::setw
#include "sqlite_faces_manage.h"
#include "herror.h"
namespace inspire {
SQLiteFaceManage::SQLiteFaceManage() {
}
SQLiteFaceManage::~SQLiteFaceManage() {
CloseDatabase();
// Optionally, you can add logging here if needed:
// LOG_INFO("SQLiteFaceManage object destroyed and database connection closed.");
}
struct SQLiteDeleter {
void operator()(sqlite3* ptr) const {
sqlite3_close(ptr);
}
};
int32_t SQLiteFaceManage::OpenDatabase(const std::string &dbPath) {
sqlite3* rawDb;
if (sqlite3_open(dbPath.c_str(), &rawDb) != SQLITE_OK) {
// Handle error
return HERR_FT_HUB_OPEN_ERROR;
}
m_db_ = std::shared_ptr<sqlite3>(rawDb, SQLiteDeleter());
// Check if the table exists
const char* checkTableSQL = "SELECT name FROM sqlite_master WHERE type='table' AND name='FaceFeatures';";
sqlite3_stmt* stmt = nullptr;
if (sqlite3_prepare_v2(m_db_.get(), checkTableSQL, -1, &stmt, nullptr) != SQLITE_OK) {
INSPIRE_LOGE("Error checking for table existence: %s", sqlite3_errmsg(m_db_.get()));
return HERR_FT_HUB_CHECK_TABLE_ERROR; // Assuming you have this error code
}
int result = sqlite3_step(stmt);
sqlite3_finalize(stmt);
// If table doesn't exist, create it
if (result != SQLITE_ROW) {
return CreateTable();
}
return HSUCCEED;
}
int32_t SQLiteFaceManage::CloseDatabase() {
if (!m_db_) {
// LOGE("Attempted to close an already closed or uninitialized database.");
return HERR_FT_HUB_NOT_OPENED;
}
// Reset the shared_ptr. This will decrease its reference count.
// If this is the last reference, the database will be closed due to the custom deleter.
m_db_.reset();
// Optionally, log that the database was successfully closed
// LOGD("Database successfully closed.");
return HSUCCEED;
}
int32_t SQLiteFaceManage::CreateTable() {
if (!m_db_) {
INSPIRE_LOGE("Database is not opened. Please open the database first.");
return HERR_FT_HUB_NOT_OPENED; // Example error code for unopened database
}
const char* createTableSQL = R"(
CREATE TABLE IF NOT EXISTS FaceFeatures (
customId INTEGER PRIMARY KEY,
tag TEXT,
feature BLOB
)
)";
char* errMsg = nullptr;
int result = sqlite3_exec(m_db_.get(), createTableSQL, 0, 0, &errMsg);
if (result != SQLITE_OK) {
INSPIRE_LOGE("Error creating table: %s" , errMsg);
sqlite3_free(errMsg);
return result;
}
// LOGD("Table successfully created or already exists.");
return SQLITE_OK; // or SUCCESS_CODE, based on your error code system
}
int32_t SQLiteFaceManage::InsertFeature(const FaceFeatureInfo& info) {
if (!m_db_) {
INSPIRE_LOGE("Database is not opened. Please open the database first.");
return HERR_FT_HUB_NOT_OPENED; // Example error code for unopened database
}
const char* insertSQL = "INSERT INTO FaceFeatures (customId, tag, feature) VALUES (?, ?, ?)";
sqlite3_stmt* stmt = nullptr;
int result = sqlite3_prepare_v2(m_db_.get(), insertSQL, -1, &stmt, nullptr);
if (result != SQLITE_OK) {
INSPIRE_LOGE("Error preparing the SQL statement: %s", sqlite3_errmsg(m_db_.get()));
return result;
}
// Binding values
sqlite3_bind_int(stmt, 1, info.customId);
sqlite3_bind_text(stmt, 2, info.tag.c_str(), -1, SQLITE_STATIC);
sqlite3_bind_blob(stmt, 3, info.feature.data(), info.feature.size() * sizeof(float), SQLITE_STATIC);
result = sqlite3_step(stmt);
if (result != SQLITE_DONE) {
INSPIRE_LOGE("Error inserting new feature: %s" , sqlite3_errmsg(m_db_.get()));
sqlite3_finalize(stmt);
return HERR_FT_HUB_INSERT_FAILURE;
}
// Clean up the statement
sqlite3_finalize(stmt);
// LOGD("Feature successfully inserted.");
return SQLITE_OK; // or SUCCESS_CODE, based on your error code system
}
int32_t SQLiteFaceManage::GetFeature(int32_t customId, FaceFeatureInfo& outInfo) {
if (!m_db_) {
INSPIRE_LOGE("Database is not opened. Please open the database first.");
return HERR_FT_HUB_NOT_OPENED;
}
const char* selectSQL = "SELECT customId, tag, feature FROM FaceFeatures WHERE customId = ?";
sqlite3_stmt* stmt = nullptr;
int result = sqlite3_prepare_v2(m_db_.get(), selectSQL, -1, &stmt, nullptr);
if (result != SQLITE_OK) {
INSPIRE_LOGE("Error preparing the SQL statement: %s", sqlite3_errmsg(m_db_.get()));
return HERR_FT_HUB_PREPARING_FAILURE;
}
// Bind the customId to the prepared statement
sqlite3_bind_int(stmt, 1, customId);
result = sqlite3_step(stmt);
if (result == SQLITE_ROW) {
outInfo.customId = sqlite3_column_int(stmt, 0);
outInfo.tag = reinterpret_cast<const char*>(sqlite3_column_text(stmt, 1));
const void* blobData = sqlite3_column_blob(stmt, 2);
int blobSize = sqlite3_column_bytes(stmt, 2) / sizeof(float);
const float* begin = static_cast<const float*>(blobData);
outInfo.feature = std::vector<float>(begin, begin + blobSize);
} else if (result == SQLITE_DONE) {
INSPIRE_LOGE("No feature found with customId: %d", customId);
sqlite3_finalize(stmt);
return HERR_FT_HUB_NO_RECORD_FOUND; // Assuming you have an error code for record not found
} else {
INSPIRE_LOGE("Error executing the SQL statement: %s", sqlite3_errmsg(m_db_.get()));
sqlite3_finalize(stmt);
return HERR_FT_HUB_EXECUTING_FAILURE;
}
// Clean up the statement
sqlite3_finalize(stmt);
INSPIRE_LOGD("Feature successfully retrieved.");
return HSUCCEED;
}
int32_t SQLiteFaceManage::DeleteFeature(int32_t customId) {
if (!m_db_) {
INSPIRE_LOGE("Database is not opened. Please open the database first.");
return HERR_FT_HUB_NOT_OPENED;
}
const char* deleteSQL = "DELETE FROM FaceFeatures WHERE customId = ?";
sqlite3_stmt* stmt = nullptr;
int result = sqlite3_prepare_v2(m_db_.get(), deleteSQL, -1, &stmt, nullptr);
if (result != SQLITE_OK) {
INSPIRE_LOGE("Error preparing the SQL statement: %s", sqlite3_errmsg(m_db_.get()));
return HERR_FT_HUB_PREPARING_FAILURE;
}
// Bind the customId to the prepared statement
sqlite3_bind_int(stmt, 1, customId);
result = sqlite3_step(stmt);
if (result != SQLITE_DONE) {
INSPIRE_LOGE("Error deleting feature with customId: %d, Error: %s", customId, sqlite3_errmsg(m_db_.get()));
sqlite3_finalize(stmt);
return HERR_FT_HUB_EXECUTING_FAILURE;
}
int changes = sqlite3_changes(m_db_.get());
if (changes == 0) {
INSPIRE_LOGE("No feature found with customId: %d. Nothing was deleted.", customId);
sqlite3_finalize(stmt);
return HERR_FT_HUB_NO_RECORD_FOUND; // Assuming you have an error code for record not found
}
// Clean up the statement
sqlite3_finalize(stmt);
// LOGD("Feature with customId: %d successfully deleted.", customId);
return HSUCCEED;
}
int32_t SQLiteFaceManage::UpdateFeature(const FaceFeatureInfo& info) {
if (!m_db_) {
INSPIRE_LOGE("Database is not opened. Please open the database first.");
return HERR_FT_HUB_NOT_OPENED;
}
const char* updateSQL = "UPDATE FaceFeatures SET tag = ?, feature = ? WHERE customId = ?";
sqlite3_stmt* stmt = nullptr;
int result = sqlite3_prepare_v2(m_db_.get(), updateSQL, -1, &stmt, nullptr);
if (result != SQLITE_OK) {
INSPIRE_LOGE("Error preparing the SQL statement: %s", sqlite3_errmsg(m_db_.get()));
return HERR_FT_HUB_PREPARING_FAILURE;
}
// Binding values
sqlite3_bind_text(stmt, 1, info.tag.c_str(), -1, SQLITE_STATIC);
sqlite3_bind_blob(stmt, 2, info.feature.data(), info.feature.size() * sizeof(float), SQLITE_STATIC);
sqlite3_bind_int(stmt, 3, info.customId);
result = sqlite3_step(stmt);
if (result != SQLITE_DONE) {
INSPIRE_LOGE("Error updating feature with customId: %d, Error: %s", info.customId, sqlite3_errmsg(m_db_.get()));
sqlite3_finalize(stmt);
return result;
}
int changes = sqlite3_changes(m_db_.get());
if (changes == 0) {
INSPIRE_LOGE("No feature found with customId: %d. Nothing was updated.", info.customId);
sqlite3_finalize(stmt);
return HERR_FT_HUB_NO_RECORD_FOUND; // Assuming you have an error code for record not found
}
// Clean up the statement
sqlite3_finalize(stmt);
// LOGD("Feature with customId: %d successfully updated.", info.customId);
return HSUCCEED;
}
int32_t SQLiteFaceManage::ViewTotal() {
if (!m_db_) {
INSPIRE_LOGE("Database is not opened. Please open the database first.");
return HERR_FT_HUB_NOT_OPENED;
}
const char* selectSQL = "SELECT customId, tag FROM FaceFeatures";
sqlite3_stmt* stmt = nullptr;
int result = sqlite3_prepare_v2(m_db_.get(), selectSQL, -1, &stmt, nullptr);
if (result != SQLITE_OK) {
INSPIRE_LOGE("Error preparing the SQL statement: %s", sqlite3_errmsg(m_db_.get()));
return result;
}
// Print table header
std::cout << "+----------+-----------------------+\n";
std::cout << "| customId | tag |\n";
std::cout << "+----------+-----------------------+\n";
while ((result = sqlite3_step(stmt)) == SQLITE_ROW) {
int32_t customId = sqlite3_column_int(stmt, 0);
const unsigned char* tag = sqlite3_column_text(stmt, 1);
std::cout << "| " << std::setw(8) << customId << " | " << std::setw(21) << tag << " |\n";
}
std::cout << "+----------+-----------------------+\n";
if (result != SQLITE_DONE) {
INSPIRE_LOGE("Error executing the SQL statement: %s", sqlite3_errmsg(m_db_.get()));
sqlite3_finalize(stmt);
return HERR_FT_HUB_PREPARING_FAILURE;
}
// Clean up the statement
sqlite3_finalize(stmt);
INSPIRE_LOGD("Successfully displayed all records.");
return HSUCCEED;
}
int32_t SQLiteFaceManage::GetTotalFeatures(std::vector<FaceFeatureInfo>& infoList) {
if (!m_db_) {
INSPIRE_LOGE("Database is not opened. Please open the database first.");
return HERR_FT_HUB_NOT_OPENED;
}
const char* selectSQL = "SELECT customId, tag, feature FROM FaceFeatures";
sqlite3_stmt* stmt = nullptr;
int result = sqlite3_prepare_v2(m_db_.get(), selectSQL, -1, &stmt, nullptr);
if (result != SQLITE_OK) {
INSPIRE_LOGE("Error preparing the SQL statement: %s", sqlite3_errmsg(m_db_.get()));
return HERR_FT_HUB_PREPARING_FAILURE;
}
while ((result = sqlite3_step(stmt)) == SQLITE_ROW) {
FaceFeatureInfo featureInfo;
featureInfo.customId = sqlite3_column_int(stmt, 0);
featureInfo.tag = reinterpret_cast<const char*>(sqlite3_column_text(stmt, 1));
const void* blobData = sqlite3_column_blob(stmt, 2);
int blobSize = sqlite3_column_bytes(stmt, 2) / sizeof(float);
const float* begin = static_cast<const float*>(blobData);
featureInfo.feature = std::vector<float>(begin, begin + blobSize);
infoList.push_back(featureInfo);
}
if (result != SQLITE_DONE) {
INSPIRE_LOGE("Error executing the SQL statement: %s", sqlite3_errmsg(m_db_.get()));
sqlite3_finalize(stmt);
return HERR_FT_HUB_EXECUTING_FAILURE;
}
// Clean up the statement
sqlite3_finalize(stmt);
// LOGD("Successfully retrieved all features.");
return HSUCCEED;
}
} // namespace hyper

View File

@@ -1,124 +0,0 @@
//
// Created by Tunm-Air13 on 2023/10/11.
//
#pragma once
#ifndef HYPERFACEREPO_SQLITEFACEMANAGE_H
#define HYPERFACEREPO_SQLITEFACEMANAGE_H
#include "data_type.h"
#include "log.h"
#include "sqlite3.h" // Include the SQLite3 header
#include <vector>
#include <string>
#include "memory"
namespace inspire {
/**
* @struct FaceFeatureInfo
* @brief Structure to represent information about a facial feature.
*/
typedef struct {
int32_t customId; ///< Custom identifier for the feature.
std::string tag; ///< Tag associated with the feature.
std::vector<float> feature; ///< Vector of floats representing the feature.
} FaceFeatureInfo;
/**
* @class SQLiteFaceManage
* @brief Class for managing facial features using SQLite database.
*
* This class provides methods to open, close, create tables, insert, retrieve, delete, and update
* facial features in an SQLite database. It also allows viewing the total number of features in the database.
*/
class INSPIRE_API SQLiteFaceManage {
public:
/**
* @brief Constructor for SQLiteFaceManage class.
*/
SQLiteFaceManage();
/**
* @brief Destructor for SQLiteFaceManage class.
*/
~SQLiteFaceManage();
/**
* @brief Opens an SQLite database at the specified path.
*
* @param dbPath Path to the SQLite database file.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t OpenDatabase(const std::string& dbPath);
/**
* @brief Closes the currently open SQLite database.
*
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t CloseDatabase();
/**
* @brief Creates an SQLite table for storing facial features if it doesn't exist.
*
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t CreateTable();
/**
* @brief Inserts a facial feature into the SQLite database.
*
* @param info Information about the facial feature to be inserted.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t InsertFeature(const FaceFeatureInfo& info);
/**
* @brief Retrieves information about a facial feature from the SQLite database by custom ID.
*
* @param customId Custom identifier of the facial feature to retrieve.
* @param outInfo Output parameter to store the retrieved feature information.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t GetFeature(int32_t customId, FaceFeatureInfo& outInfo);
/**
* @brief Deletes a facial feature from the SQLite database by custom ID.
*
* @param customId Custom identifier of the facial feature to delete.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t DeleteFeature(int32_t customId);
/**
* @brief Updates a facial feature in the SQLite database.
*
* @param info Updated information about the facial feature.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t UpdateFeature(const FaceFeatureInfo& info);
/**
* @brief Retrieves information about all facial features stored in the SQLite database.
*
* @param infoList Output parameter to store the list of facial feature information.
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t GetTotalFeatures(std::vector<FaceFeatureInfo>& infoList);
/**
* @brief Displays the total number of facial features stored in the SQLite database.
*
* @return int32_t Status code indicating success (0) or failure.
*/
int32_t ViewTotal();
private:
std::shared_ptr<sqlite3> m_db_; ///< Pointer to the SQLite database.
};
} // namespace inspire
#endif //HYPERFACEREPO_SQLITEFACEMANAGE_H

View File

@@ -9,52 +9,58 @@
#elif defined(__ARM_NEON__)
/* GCC-compatible compiler, targeting ARM with NEON */
#include <arm_neon.h>
#pragma message("USE SSE")
#endif
#if defined(__GNUC__) && \
(defined(__x86_64__) || defined(__i386__) || defined(_MSC_VER))
#if defined(__GNUC__) && (defined(__x86_64__) || defined(__i386__) || defined(_MSC_VER))
inline float simd_dot(const float *x, const float *y, const long &len) {
//#pragma message("USE SSE")
float inner_prod = 0.0f;
__m128 X, Y, Z; // 128-bit values
__m128 acc = _mm_setzero_ps(); // set to (0, 0, 0, 0)
float temp[4];
// #pragma message("USE SSE")
float inner_prod = 0.0f;
__m128 X, Y, Z; // 128-bit values
__m128 acc = _mm_setzero_ps(); // set to (0, 0, 0, 0)
float temp[4];
long i;
for (i = 0; i + 4 < len; i += 4) {
X = _mm_loadu_ps(x + i); // load chunk of 4 floats
Y = _mm_loadu_ps(y + i);
Z = _mm_mul_ps(X, Y);
acc = _mm_add_ps(acc, Z);
}
_mm_storeu_ps(&temp[0], acc); // store acc into an array
inner_prod = temp[0] + temp[1] + temp[2] + temp[3];
long i;
for (i = 0; i + 4 < len; i += 4) {
X = _mm_loadu_ps(x + i); // load chunk of 4 floats
Y = _mm_loadu_ps(y + i);
Z = _mm_mul_ps(X, Y);
acc = _mm_add_ps(acc, Z);
}
_mm_storeu_ps(&temp[0], acc); // store acc into an array
inner_prod = temp[0] + temp[1] + temp[2] + temp[3];
// add the remaining values
for (; i < len; ++i) {
inner_prod += x[i] * y[i];
}
return inner_prod;
// add the remaining values
for (; i < len; ++i) {
inner_prod += x[i] * y[i];
}
return inner_prod;
}
#elif defined(__ARM_NEON__)
inline float simd_dot(const float *x, const float *y, const long &len) {
// #pragma message("USE NEON")
float inner_prod = 0.0f;
float32x4_t X, Y, Z; // 128-bit values
float32x4_t acc = vdupq_n_f32(0.0f); // set to (0, 0, 0, 0)
long i;
for (i = 0; i + 4 < len; i += 4) {
X = vld1q_f32(x + i); // load chunk of 4 floats
Y = vld1q_f32(y + i);
Z = vmulq_f32(X, Y);
acc = vaddq_f32(acc, Z);
}
inner_prod = vgetq_lane_f32(acc, 0) + vgetq_lane_f32(acc, 1) + vgetq_lane_f32(acc, 2) + vgetq_lane_f32(acc, 3);
for (; i < len; ++i) {
inner_prod += x[i] * y[i];
}
return inner_prod;
}
#else
inline float simd_dot(const float *x, const float *y, const long &len) {
//#pragma message("USE NEON")
float inner_prod = 0.0f;
float32x4_t X, Y, Z; // 128-bit values
float32x4_t acc = vdupq_n_f32(0.0f); // set to (0, 0, 0, 0)
long i;
for (i = 0; i + 4 < len; i += 4) {
X = vld1q_f32(x + i); // load chunk of 4 floats
Y = vld1q_f32(y + i);
Z = vmulq_f32(X, Y);
acc = vaddq_f32(acc, Z);
}
inner_prod = vgetq_lane_f32(acc, 0) + vgetq_lane_f32(acc, 1) +
vgetq_lane_f32(acc, 2) + vgetq_lane_f32(acc, 3);
for (; i < len; ++i) {
inner_prod += x[i] * y[i];
}
return inner_prod;
// #pragma message("USE SCALAR")
float inner_prod = 0.0f;
for (long i = 0; i < len; ++i) {
inner_prod += x[i] * y[i];
}
return inner_prod;
}
#endif
#endif

View File

@@ -1,70 +1,79 @@
//
// Created by Tunm-Air13 on 2023/9/11.
//
#ifndef HYPERFACEREPO_HERROR_H
#define HYPERFACEREPO_HERROR_H
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#ifndef INSPIRE_FACE_HERROR_H
#define INSPIRE_FACE_HERROR_H
// [Anchor-Begin]
#define HSUCCEED (0) // Success
#define HERR_BASIC_BASE 0X0001 // Basic error types
#define HERR_UNKNOWN HERR_BASIC_BASE // Unknown error
#define HERR_INVALID_PARAM (HERR_BASIC_BASE+1) // Invalid parameter
#define HERR_INVALID_IMAGE_STREAM_HANDLE (HERR_BASIC_BASE+24) // Invalid image stream handle
#define HERR_INVALID_CONTEXT_HANDLE (HERR_BASIC_BASE+25) // Invalid context handle
#define HERR_INVALID_FACE_TOKEN (HERR_BASIC_BASE+30) // Invalid face token
#define HERR_INVALID_FACE_FEATURE (HERR_BASIC_BASE+31) // Invalid face feature
#define HERR_INVALID_FACE_LIST (HERR_BASIC_BASE+32) // Invalid face feature list
#define HERR_INVALID_BUFFER_SIZE (HERR_BASIC_BASE+33) // Invalid copy token
#define HERR_INVALID_IMAGE_STREAM_PARAM (HERR_BASIC_BASE+34) // Invalid image param
#define HERR_INVALID_SERIALIZATION_FAILED (HERR_BASIC_BASE+35) // Invalid face serialization failed
#define HERR_INVALID_DETECTION_INPUT (HERR_BASIC_BASE+36) // Failed to modify detector input size
#define HSUCCEED (0) // Success
#define HERR_BASIC_BASE 0X0001 // Basic error types
#define HERR_UNKNOWN HERR_BASIC_BASE // Unknown error
#define HERR_INVALID_PARAM (HERR_BASIC_BASE + 1) // Invalid parameter
#define HERR_INVALID_IMAGE_STREAM_HANDLE (HERR_BASIC_BASE + 24) // Invalid image stream handle
#define HERR_INVALID_CONTEXT_HANDLE (HERR_BASIC_BASE + 25) // Invalid context handle
#define HERR_INVALID_FACE_TOKEN (HERR_BASIC_BASE + 30) // Invalid face token
#define HERR_INVALID_FACE_FEATURE (HERR_BASIC_BASE + 31) // Invalid face feature
#define HERR_INVALID_FACE_LIST (HERR_BASIC_BASE + 32) // Invalid face feature list
#define HERR_INVALID_BUFFER_SIZE (HERR_BASIC_BASE + 33) // Invalid copy token
#define HERR_INVALID_IMAGE_STREAM_PARAM (HERR_BASIC_BASE + 34) // Invalid image param
#define HERR_INVALID_SERIALIZATION_FAILED (HERR_BASIC_BASE + 35) // Invalid face serialization failed
#define HERR_INVALID_DETECTION_INPUT (HERR_BASIC_BASE + 36) // Failed to modify detector input size
#define HERR_INVALID_IMAGE_BITMAP_HANDLE (HERR_BASIC_BASE + 37) // Invalid image bitmap handle
#define HERR_SESS_BASE 0X500 // Session error types
#define HERR_SESS_FUNCTION_UNUSABLE (HERR_SESS_BASE+2) // Function not usable
#define HERR_SESS_TRACKER_FAILURE (HERR_SESS_BASE+3) // Tracker module not initialized
#define HERR_SESS_INVALID_RESOURCE (HERR_SESS_BASE+10) // Invalid static resource
#define HERR_SESS_NUM_OF_MODELS_NOT_MATCH (HERR_SESS_BASE+11) // Number of models does not match
#define HERR_SESS_LANDMARK_NUM_NOT_MATCH (HERR_SESS_BASE+20) // The number of input landmark points does not match
#define HERR_SESS_BASE 0X500 // Session error types
#define HERR_SESS_FUNCTION_UNUSABLE (HERR_SESS_BASE + 2) // Function not usable
#define HERR_SESS_TRACKER_FAILURE (HERR_SESS_BASE + 3) // Tracker module not initialized
#define HERR_SESS_INVALID_RESOURCE (HERR_SESS_BASE + 10) // Invalid static resource
#define HERR_SESS_NUM_OF_MODELS_NOT_MATCH (HERR_SESS_BASE + 11) // Number of models does not match
#define HERR_SESS_LANDMARK_NUM_NOT_MATCH (HERR_SESS_BASE + 20) // The number of input landmark points does not match
#define HERR_SESS_LANDMARK_NOT_ENABLE (HERR_SESS_BASE + 21) // The number of input landmark points does not match
#define HERR_SESS_KEY_POINT_NUM_NOT_MATCH (HERR_SESS_BASE + 22) // The number of input key points does not match
#define HERR_SESS_PIPELINE_FAILURE (HERR_SESS_BASE+8) // Pipeline module not initialized
#define HERR_SESS_PIPELINE_FAILURE (HERR_SESS_BASE + 8) // Pipeline module not initialized
#define HERR_SESS_REC_EXTRACT_FAILURE (HERR_SESS_BASE+15) // Face feature extraction not registered
#define HERR_SESS_REC_DEL_FAILURE (HERR_SESS_BASE+16) // Face feature deletion failed due to out of range index
#define HERR_SESS_REC_UPDATE_FAILURE (HERR_SESS_BASE+17) // Face feature update failed due to out of range index
#define HERR_SESS_REC_ADD_FEAT_EMPTY (HERR_SESS_BASE+18) // Feature vector for registration cannot be empty
#define HERR_SESS_REC_FEAT_SIZE_ERR (HERR_SESS_BASE+19) // Incorrect length of feature vector for registration
#define HERR_SESS_REC_INVALID_INDEX (HERR_SESS_BASE+20) // Invalid index number
#define HERR_SESS_REC_CONTRAST_FEAT_ERR (HERR_SESS_BASE+23) // Incorrect length of feature vector for comparison
#define HERR_SESS_REC_BLOCK_FULL (HERR_SESS_BASE+24) // Feature vector block full
#define HERR_SESS_REC_BLOCK_DEL_FAILURE (HERR_SESS_BASE+25) // Deletion failed
#define HERR_SESS_REC_BLOCK_UPDATE_FAILURE (HERR_SESS_BASE+26) // Update failed
#define HERR_SESS_REC_ID_ALREADY_EXIST (HERR_SESS_BASE+27) // ID already exists
#define HERR_SESS_REC_EXTRACT_FAILURE (HERR_SESS_BASE + 15) // Face feature extraction not registered
#define HERR_SESS_REC_DEL_FAILURE (HERR_SESS_BASE + 16) // Face feature deletion failed due to out of range index
#define HERR_SESS_REC_UPDATE_FAILURE (HERR_SESS_BASE + 17) // Face feature update failed due to out of range index
#define HERR_SESS_REC_ADD_FEAT_EMPTY (HERR_SESS_BASE + 18) // Feature vector for registration cannot be empty
#define HERR_SESS_REC_FEAT_SIZE_ERR (HERR_SESS_BASE + 19) // Incorrect length of feature vector for registration
#define HERR_SESS_REC_INVALID_INDEX (HERR_SESS_BASE + 20) // Invalid index number
#define HERR_SESS_REC_CONTRAST_FEAT_ERR (HERR_SESS_BASE + 23) // Incorrect length of feature vector for comparison
#define HERR_SESS_REC_BLOCK_FULL (HERR_SESS_BASE + 24) // Feature vector block full
#define HERR_SESS_REC_BLOCK_DEL_FAILURE (HERR_SESS_BASE + 25) // Deletion failed
#define HERR_SESS_REC_BLOCK_UPDATE_FAILURE (HERR_SESS_BASE + 26) // Update failed
#define HERR_SESS_REC_ID_ALREADY_EXIST (HERR_SESS_BASE + 27) // ID already exists
#define HERR_SESS_FACE_DATA_ERROR (HERR_SESS_BASE+30) // Face data parsing
#define HERR_SESS_FACE_DATA_ERROR (HERR_SESS_BASE + 30) // Face data parsing
#define HERR_SESS_FACE_REC_OPTION_ERROR (HERR_SESS_BASE+40) // An optional parameter is incorrect
#define HERR_SESS_FACE_REC_OPTION_ERROR (HERR_SESS_BASE + 40) // An optional parameter is incorrect
#define HERR_FT_HUB_DISABLE (HERR_SESS_BASE+49) // FeatureHub is disabled
#define HERR_FT_HUB_OPEN_ERROR (HERR_SESS_BASE+50) // Database open error
#define HERR_FT_HUB_NOT_OPENED (HERR_SESS_BASE+51) // Database not opened
#define HERR_FT_HUB_NO_RECORD_FOUND (HERR_SESS_BASE+52) // No record found
#define HERR_FT_HUB_CHECK_TABLE_ERROR (HERR_SESS_BASE+53) // Data table check error
#define HERR_FT_HUB_INSERT_FAILURE (HERR_SESS_BASE+54) // Data insertion error
#define HERR_FT_HUB_PREPARING_FAILURE (HERR_SESS_BASE+55) // Data preparation error
#define HERR_FT_HUB_EXECUTING_FAILURE (HERR_SESS_BASE+56) // SQL execution error
#define HERR_FT_HUB_NOT_VALID_FOLDER_PATH (HERR_SESS_BASE+57) // Invalid folder path
#define HERR_FT_HUB_ENABLE_REPETITION (HERR_SESS_BASE+58) // Enable db function repeatedly
#define HERR_FT_HUB_DISABLE_REPETITION (HERR_SESS_BASE+59) // Disable db function repeatedly
#define HERR_FT_HUB_DISABLE (HERR_SESS_BASE + 49) // FeatureHub is disabled
#define HERR_FT_HUB_OPEN_ERROR (HERR_SESS_BASE + 50) // Database open error
#define HERR_FT_HUB_NOT_OPENED (HERR_SESS_BASE + 51) // Database not opened
#define HERR_FT_HUB_NO_RECORD_FOUND (HERR_SESS_BASE + 52) // No record found
#define HERR_FT_HUB_CHECK_TABLE_ERROR (HERR_SESS_BASE + 53) // Data table check error
#define HERR_FT_HUB_INSERT_FAILURE (HERR_SESS_BASE + 54) // Data insertion error
#define HERR_FT_HUB_PREPARING_FAILURE (HERR_SESS_BASE + 55) // Data preparation error
#define HERR_FT_HUB_EXECUTING_FAILURE (HERR_SESS_BASE + 56) // SQL execution error
#define HERR_FT_HUB_NOT_VALID_FOLDER_PATH (HERR_SESS_BASE + 57) // Invalid folder path
#define HERR_FT_HUB_ENABLE_REPETITION (HERR_SESS_BASE + 58) // Enable db function repeatedly
#define HERR_FT_HUB_DISABLE_REPETITION (HERR_SESS_BASE + 59) // Disable db function repeatedly
#define HERR_FT_HUB_NOT_FOUND_FEATURE (HERR_SESS_BASE + 60) // Get face feature error
#define HERR_ARCHIVE_LOAD_FAILURE (HERR_SESS_BASE+80) // Archive load failure
#define HERR_ARCHIVE_LOAD_MODEL_FAILURE (HERR_SESS_BASE+81) // Model load failure
#define HERR_ARCHIVE_FILE_FORMAT_ERROR (HERR_SESS_BASE+82) // The archive format is incorrect
#define HERR_ARCHIVE_REPETITION_LOAD (HERR_SESS_BASE+83) // Do not reload the model
#define HERR_ARCHIVE_NOT_LOAD (HERR_SESS_BASE+84) // Model not loaded
#define HERR_ARCHIVE_LOAD_FAILURE (HERR_SESS_BASE + 80) // Archive load failure
#define HERR_ARCHIVE_LOAD_MODEL_FAILURE (HERR_SESS_BASE + 81) // Model load failure
#define HERR_ARCHIVE_FILE_FORMAT_ERROR (HERR_SESS_BASE + 82) // The archive format is incorrect
#define HERR_ARCHIVE_REPETITION_LOAD (HERR_SESS_BASE + 83) // Do not reload the model
#define HERR_ARCHIVE_NOT_LOAD (HERR_SESS_BASE + 84) // Model not loaded
#define HERR_DEVICE_BASE 0X900 // hardware error
#define HERR_DEVICE_CUDA_NOT_SUPPORT (HERR_DEVICE_BASE + 1) // CUDA not supported
#define HERR_DEVICE_CUDA_TENSORRT_NOT_SUPPORT (HERR_DEVICE_BASE + 2) // CUDA TensorRT not supported
#define HERR_DEVICE_CUDA_UNKNOWN_ERROR (HERR_DEVICE_BASE + 20) // CUDA unknown error
#define HERR_DEVICE_CUDA_DISABLE (HERR_DEVICE_BASE + 21) // CUDA support is disabled
// [Anchor-End]
#endif //HYPERFACEREPO_HERROR_H
#endif // INSPIRE_FACE_HERROR_H

View File

@@ -1,12 +1,15 @@
//
// Created by tunm on 2024/1/31.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#ifndef HYPERFACEREPO_INFORMATION_H
#define HYPERFACEREPO_INFORMATION_H
#ifndef INSPIRE_FACE_INFORMATION_H
#define INSPIRE_FACE_INFORMATION_H
#define INSPIRE_FACE_VERSION_MAJOR_STR "1"
#define INSPIRE_FACE_VERSION_MINOR_STR "1"
#define INSPIRE_FACE_VERSION_PATCH_STR "7"
#define INSPIRE_FACE_VERSION_MINOR_STR "2"
#define INSPIRE_FACE_VERSION_PATCH_STR "0"
#endif //HYPERFACEREPO_INFORMATION_H
#define INSPIRE_FACE_EXTENDED_INFORMATION "InspireFace[Community Edition]@General - Build Time: 2025-03-25"
#endif // INSPIRE_FACE_INFORMATION_H

View File

@@ -1,12 +1,15 @@
//
// Created by tunm on 2024/1/31.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#ifndef HYPERFACEREPO_INFORMATION_H
#define HYPERFACEREPO_INFORMATION_H
#ifndef INSPIRE_FACE_INFORMATION_H
#define INSPIRE_FACE_INFORMATION_H
#define INSPIRE_FACE_VERSION_MAJOR_STR "@INSPIRE_FACE_VERSION_MAJOR_STR@"
#define INSPIRE_FACE_VERSION_MINOR_STR "@INSPIRE_FACE_VERSION_MINOR_STR@"
#define INSPIRE_FACE_VERSION_PATCH_STR "@INSPIRE_FACE_VERSION_PATCH_STR@"
#endif //HYPERFACEREPO_INFORMATION_H
#define INSPIRE_FACE_EXTENDED_INFORMATION "@EXTENDED_INFORMATION@"
#endif // INSPIRE_FACE_INFORMATION_H

View File

@@ -0,0 +1,42 @@
#ifndef INSPIRE_FACE_CHECK_H
#define INSPIRE_FACE_CHECK_H
#include "log.h"
#include "herror.h"
#define INSPIREFACE_RETURN_IF_ERROR(...) \
do { \
const int32_t _status = (__VA_ARGS__); \
if (_status != HSUCCEED) { \
INSPIRE_LOGE("Error code: %d", _status); \
return _status; \
} \
} while (0)
#define INSPIREFACE_LOG_IF(severity, condition) \
if (condition) \
INSPIRE_LOG##severity
#define INSPIREFACE_CHECK(condition) \
do { \
if (!(condition)) { \
INSPIRE_LOGF("Check failed: (%s)", #condition); \
} \
} while (0)
#define INSPIREFACE_CHECK_MSG(condition, message) \
do { \
if (!(condition)) { \
INSPIRE_LOGF("Check failed: (%s) %s", #condition, message); \
} \
} while (0)
#define INSPIREFACE_CHECK_EQ(a, b) INSPIREFACE_CHECK((a) == (b)) << "Expected equality of these values: " << #a << " vs " << #b
#define INSPIREFACE_CHECK_NE(a, b) INSPIREFACE_CHECK((a) != (b)) << "Expected inequality of these values: " << #a << " vs " << #b
#define INSPIREFACE_CHECK_LE(a, b) INSPIREFACE_CHECK((a) <= (b)) << "Expected " << #a << " <= " << #b
#define INSPIREFAFECE_CHECK_LT(a, b) INSPIREFACE_CHECK((a) < (b)) << "Expected " << #a << " < " << #b
#define INSPIREFAFECE_CHECK_GE(a, b) INSPIREFACE_CHECK((a) >= (b)) << "Expected " << #a << " >= " << #b
#define INSPIREFAFECE_CHECK_GT(a, b) INSPIREFACE_CHECK((a) > (b)) << "Expected " << #a << " > " << #b
#endif // INSPIRE_FACE_CHECK_H

View File

@@ -1,6 +1,7 @@
//
// Created by tunm on 2024/4/8.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include "log.h"
namespace inspire {
@@ -9,4 +10,4 @@ namespace inspire {
LogManager* LogManager::instance = nullptr;
std::mutex LogManager::mutex;
} // namespace inspire
} // namespace inspire

Some files were not shown because too many files have changed in this diff Show More