Update inspireface to 1.2.0

This commit is contained in:
Jingyu
2025-03-25 00:51:26 +08:00
parent 977ea6795b
commit ca64996b84
388 changed files with 28584 additions and 13036 deletions

View File

@@ -1,5 +1,5 @@
cmake_minimum_required(VERSION 3.10)
project(HyperFaceTest)
project(InspireFaceTest)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3")
# If you want to test the benchmark, you need to set this to ON
@@ -22,11 +22,26 @@ if (ISF_ENABLE_TEST_EVALUATION)
endif ()
if (ISF_ENABLE_RKNN)
if (ISF_RKNPU_MAJOR STREQUAL "rknpu1")
set(DEPEND rknn_api dl)
set(ISF_RKNN_API_LIB ${ISF_THIRD_PARTY_DIR}/inspireface-precompile/rknn/${ISF_RKNPU_MAJOR}/runtime/${ISF_RK_DEVICE_TYPE}/Linux/librknn_api/${CPU_ARCH}/)
message("Enable RKNN Inference")
link_directories(${ISF_RKNN_API_LIB})
set(DEPEND rknn_api dl)
set(ISF_RKNN_API_LIB ${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/rknn/${ISF_RKNPU_MAJOR}/runtime/${ISF_RK_DEVICE_TYPE}/Linux/librknn_api/${CPU_ARCH}/)
message("Enable RKNN Inference")
link_directories(${ISF_RKNN_API_LIB})
set(DEPEND rknn_api dl)
elseif(ISF_RKNPU_MAJOR STREQUAL "rknpu2" AND ISF_RK_COMPILER_TYPE STREQUAL "aarch64")
set(DEPEND rknnrt dl)
if(ANDROID)
set(RK_PLATFORM "Android")
else()
set(RK_PLATFORM "Linux")
endif()
set(ISF_RKNN_API_LIB ${ISF_THIRD_PARTY_DIR}/inspireface-precompile-lite/rknn/${ISF_RKNPU_MAJOR}/runtime/${RK_PLATFORM}/librknn_api/${ISF_RK_COMPILER_TYPE}/)
message("ISF_RKNN_API_LIB: ${ISF_RKNN_API_LIB}")
link_directories(${ISF_RKNN_API_LIB})
set(DEPEND rknnrt dl)
else()
set(DEPEND dl)
endif()
endif ()
include_directories(${SRC_DIR})
@@ -35,22 +50,24 @@ include_directories(${SRC_DIR})
set(TEST_COMMON_FILES ${CMAKE_CURRENT_SOURCE_DIR}/settings/test_settings.cpp)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/settings)
# =======================Internal Import Tests===========================
file(GLOB_RECURSE INTERNAL_TEST_INTERNAL_FILES unit/source/*.cpp)
add_executable(TestSource ${CMAKE_CURRENT_SOURCE_DIR}/test_source.cpp ${INTERNAL_TEST_INTERNAL_FILES} ${TEST_COMMON_FILES})
target_link_libraries(TestSource InspireFace ${DEPEND})
target_include_directories(TestSource PUBLIC
# =======================Internal Base Import Tests===========================
file(GLOB_RECURSE INTERNAL_TEST_INTERNAL_FILES unit/base/*.cpp)
add_executable(TestBase ${CMAKE_CURRENT_SOURCE_DIR}/test_base.cpp ${INTERNAL_TEST_INTERNAL_FILES} ${TEST_COMMON_FILES})
target_link_libraries(TestBase InspireFace ${DEPEND})
target_include_directories(TestBase PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}/
${ISF_THIRD_PARTY_DIR}/spdlog/include
${ISF_THIRD_PARTY_DIR}/Catch2/single_include/
${ISF_THIRD_PARTY_DIR}/indicators/include/
${SRC_DIR}
${INSPIRECV_INCLUDE_PATH}
)
set_target_properties(TestSource PROPERTIES
set_target_properties(TestBase PROPERTIES
RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/test/")
# =======================External API Testing===========================
file(GLOB_RECURSE TEST_INTERNAL_FILES unit/api/*.cpp)
add_executable(Test ${CMAKE_CURRENT_SOURCE_DIR}/test.cpp ${TEST_INTERNAL_FILES} ${TEST_COMMON_FILES}
@@ -69,6 +86,7 @@ set_target_properties(Test PROPERTIES
# Print Message
message(STATUS ">>>>>>>>>>>>>")
message(STATUS "InspireFace Test:")
message(STATUS "\t ISF_ENABLE_BENCHMARK: ${ISF_ENABLE_BENCHMARK}")
message(STATUS "\t ISF_ENABLE_USE_LFW_DATA: ${ISF_ENABLE_USE_LFW_DATA}")
@@ -76,4 +94,4 @@ message(STATUS "\t ISF_ENABLE_TEST_EVALUATION: ${ISF_ENABLE_TEST_EVALUATION}")
# Install bin
install(TARGETS Test RUNTIME DESTINATION ${CMAKE_INSTALL_PREFIX}/test)
install(TARGETS TestSource RUNTIME DESTINATION ${CMAKE_INSTALL_PREFIX}/test)
# install(TARGETS TestSource RUNTIME DESTINATION ${CMAKE_INSTALL_PREFIX}/test)

View File

@@ -0,0 +1,56 @@
/**
* Created by Jingyu Yan
* @date 2025-03-14
*/
#pragma once
#ifndef INSPIREFACE_TEST_CHECK_
#define INSPIREFACE_TEST_CHECK_
#include <cstdint> // for uint8_t
#include <limits> // for std::numeric_limits
#include <sstream>
#define REQUIRE_EQ_IMAGE(a, b, h, w, c) \
do { \
double eps = 0.01; \
double mse = CalculateImageMSE(a, b, h, w, c); \
REQUIRE(mse <= eps); \
if (mse > eps) { \
std::stringstream ss; \
ss << "Image comparison failed! MSE: " << mse << " (threshold: " << eps << "), dimensions: " << h << "x" << w << "x" << c; \
INFO(ss.str()); \
} \
} while (0)
#define REQUIRE_EQ_IMAGE_WITH_EPS(a, b, h, w, c, eps) \
do { \
double mse = CalculateImageMSE(a, b, h, w, c); \
REQUIRE(mse <= eps); \
if (mse > eps) { \
std::stringstream ss; \
ss << "Image comparison failed! MSE: " << mse << " (threshold: " << eps << "), dimensions: " << h << "x" << w << "x" << c; \
INFO(ss.str()); \
} \
} while (0)
inline double CalculateImageMSE(const uint8_t* a, const uint8_t* b, int h, int w, int c) {
if (a == nullptr || b == nullptr || h <= 0 || w <= 0 || c <= 0) {
return std::numeric_limits<double>::infinity();
}
double sum_squared_diff = 0.0;
size_t total_pixels = static_cast<size_t>(h) * w * c;
const double normalize_factor = 255.0;
for (size_t i = 0; i < total_pixels; ++i) {
double a_normalized = static_cast<double>(a[i]) / normalize_factor;
double b_normalized = static_cast<double>(b[i]) / normalize_factor;
double diff = a_normalized - b_normalized;
sum_squared_diff += diff * diff;
}
return sum_squared_diff / total_pixels;
}
#endif // INSPIREFACE_TEST_CHECK_

View File

@@ -1,6 +1,7 @@
//
// Created by Tunm-Air13 on 2024/4/7.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#pragma once
#ifndef INSPIREFACE_ENVIRO_H
#define INSPIREFACE_ENVIRO_H

View File

@@ -1,10 +1,10 @@
//
// Created by Tunm-Air13 on 2023/5/24.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include "test_settings.h"
std::string getTestDataDir() {
return Enviro::getInstance().getTestResDir();
}

View File

@@ -1,6 +1,7 @@
//
// Created by Tunm-Air13 on 2023/5/24.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#pragma once
#ifndef BIGGUYSMAIN_TEST_SETTINGS_H
#define BIGGUYSMAIN_TEST_SETTINGS_H
@@ -8,6 +9,8 @@
#include <spdlog/spdlog.h>
#include <iostream>
#include "enviro.h"
#include "check.h"
#include "inspireface/middleware/system.h"
// Define the test model file
#define TEST_MODEL_FILE Enviro::getInstance().getPackName()
@@ -41,6 +44,8 @@ using namespace Catch::Detail;
#define TEST_PRINT_OUTPUT(open) TestMessageBroadcast test_msg_broadcast_##open(open)
// Set the log output level
#define LOG_OUTPUT_LEVEL(level) LogLevelBroadcast log_level_broadcast_##level(level);
// Print test error message
#define TEST_ERROR_PRINT(...) SPDLOG_LOGGER_CALL(spdlog::get("TEST"), spdlog::level::err, __VA_ARGS__)
// Get the test data directory
#define GET_DIR getTestDataDir()

View File

@@ -1,6 +1,7 @@
//
// Created by tunm on 2023/10/11.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include <string>
#define CATCH_CONFIG_RUNNER
@@ -57,6 +58,8 @@ int main(int argc, char* argv[]) {
std::string testDir;
std::string packPath;
HInt32 ret;
// Add command line options
auto cli = session.cli() | Catch::clara::Opt(pack, "value")["--pack"]("Resource pack filename") |
Catch::clara::Opt(testDir, "value")["--test_dir"]("Test dir resource") |
@@ -77,6 +80,21 @@ int main(int argc, char* argv[]) {
TEST_PRINT("Using default test dir: {}", getTestDataDir());
}
#if defined(ISF_ENABLE_TENSORRT)
HInt32 support_cuda;
ret = HFCheckCudaDeviceSupport(&support_cuda);
if (ret != HSUCCEED) {
TEST_ERROR_PRINT("An error occurred while checking CUDA device support: {}", ret);
return ret;
}
if (!support_cuda) {
TEST_ERROR_PRINT("CUDA device support is not available");
return HERR_DEVICE_CUDA_NOT_SUPPORT;
}
HFPrintCudaDeviceInfo();
#endif
std::string fullPath;
// Check whether custom parameters are set
if (!pack.empty()) {
@@ -94,10 +112,10 @@ int main(int argc, char* argv[]) {
SET_RUNTIME_FULLPATH_NAME(fullPath);
}
std::cout << fullPath << std::endl;
auto ret = HFLaunchInspireFace(fullPath.c_str());
TEST_PRINT("Launching InspireFace with path: {}", fullPath);
ret = HFLaunchInspireFace(fullPath.c_str());
if (ret != HSUCCEED) {
spdlog::error("An error occurred while starting InspireFace: {}", ret);
TEST_ERROR_PRINT("An error occurred while starting InspireFace: {}", ret);
return ret;
}

View File

@@ -1,18 +1,17 @@
//
// Created by Tunm-Air13 on 2023/9/12.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#define CATCH_CONFIG_RUNNER
#include <iostream>
#include "settings/test_settings.h"
#include <spdlog/sinks/stdout_color_sinks.h>
#include "spdlog/spdlog.h"
#include "Initialization_module/launch.h"
#include "initialization_module/launch.h"
#define ENABLE_DRAW_SPLIT_LINE 1 // Whether dividers are printed during the test
#define ENABLE_TEST_MSG 1 // TEST PRINT output
#define ENABLE_DRAW_SPLIT_LINE 1 // Whether dividers are printed during the test
#define ENABLE_TEST_MSG 1 // TEST PRINT output
int init_test_logger() {
std::string name("TEST");

View File

@@ -0,0 +1,670 @@
/**
* Created by Jingyu Yan
* @date 2025-01-12
*/
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include "unit/test_helper/simple_csv_writer.h"
#include "unit/test_helper/test_help.h"
#include "unit/test_helper/test_tools.h"
#include "middleware/costman.h"
#ifdef ISF_ENABLE_BENCHMARK
TEST_CASE("test_BenchmarkFaceDetect", "[benchmark]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
const int loop = 1000;
SECTION("Benchmark face detection@160") {
auto pixLevel = 160;
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, pixLevel, -1, &session);
REQUIRE(ret == HSUCCEED);
// Get a face picture
HFImageStream imgHandle;
auto image = inspirecv::Image::Create(GET_DATA("data/bulk/kun.jpg"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
inspirecv::TimeSpend timeSpend("Face Detect@160");
for (size_t i = 0; i < loop; i++) {
timeSpend.Start();
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
timeSpend.Stop();
}
std::cout << timeSpend << std::endl;
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
SECTION("Benchmark face detection@320") {
auto pixLevel = 320;
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, pixLevel, -1, &session);
REQUIRE(ret == HSUCCEED);
// Get a face picture
HFImageStream imgHandle;
auto image = inspirecv::Image::Create(GET_DATA("data/bulk/kun.jpg"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
inspirecv::TimeSpend timeSpend("Face Detect@320");
for (size_t i = 0; i < loop; i++) {
timeSpend.Start();
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
timeSpend.Stop();
}
std::cout << timeSpend << std::endl;
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
SECTION("Benchmark face detection@640") {
auto pixLevel = 640;
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, pixLevel, -1, &session);
REQUIRE(ret == HSUCCEED);
// Get a face picture
HFImageStream imgHandle;
auto image = inspirecv::Image::Create(GET_DATA("data/bulk/kun.jpg"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
inspirecv::TimeSpend timeSpend("Face Detect@640");
for (size_t i = 0; i < loop; i++) {
timeSpend.Start();
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
timeSpend.Stop();
}
std::cout << timeSpend << std::endl;
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
}
TEST_CASE("test_BenchmarkFaceTrack", "[benchmark]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
const int loop = 1000;
auto pixLevel = 160;
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_LIGHT_TRACK;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, pixLevel, -1, &session);
REQUIRE(ret == HSUCCEED);
// Get a face picture
HFImageStream imgHandle;
auto image = inspirecv::Image::Create(GET_DATA("data/bulk/kun.jpg"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
inspirecv::TimeSpend timeSpend("Face Track");
for (size_t i = 0; i < loop; i++) {
timeSpend.Start();
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
timeSpend.Stop();
}
std::cout << timeSpend << std::endl;
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
TEST_CASE("test_BenchmarkFaceExtractWithAlign", "[benchmark]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
const int loop = 1000;
auto pixLevel = 160;
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_LIGHT_TRACK;
HFSession session;
ret = HFCreateInspireFaceSessionOptional(HF_ENABLE_FACE_RECOGNITION, detMode, 3, -1, -1, &session);
REQUIRE(ret == HSUCCEED);
// Face track
auto dstImage = inspirecv::Image::Create(GET_DATA("data/search/Teresa_Williams_0001_1k.jpg"));
// Get a face picture
HFImageStream imgHandle;
ret = CVImageToImageStream(dstImage, imgHandle);
REQUIRE(ret == HSUCCEED);
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
inspirecv::TimeSpend timeSpend("Face Extract With Align");
for (size_t i = 0; i < loop; i++) {
timeSpend.Start();
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
timeSpend.Stop();
}
std::cout << timeSpend << std::endl;
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
TEST_CASE("test_BenchmarkFaceComparison", "[benchmark]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
int loop = 1000;
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
REQUIRE(ret == HSUCCEED);
auto image = inspirecv::Image::Create(GET_DATA("data/bulk/woman.png"));
HFImageStream imgHandle;
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceDataZy = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceDataZy);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceDataZy.detectedNum > 0);
HInt32 featureNum;
HFGetFeatureLength(&featureNum);
// Extract face feature
HFloat featureCacheZy[featureNum];
ret = HFFaceFeatureExtractCpy(session, imgHandle, multipleFaceDataZy.tokens[0], featureCacheZy);
HFFaceFeature featureZy = {0};
featureZy.size = featureNum;
featureZy.data = featureCacheZy;
REQUIRE(ret == HSUCCEED);
auto imageQuery = inspirecv::Image::Create(GET_DATA("data/bulk/woman_search.jpeg"));
HFImageStream imgHandleQuery;
ret = CVImageToImageStream(imageQuery, imgHandleQuery);
REQUIRE(ret == HSUCCEED);
HFMultipleFaceData multipleFaceDataQuery = {0};
ret = HFExecuteFaceTrack(session, imgHandleQuery, &multipleFaceDataQuery);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceDataQuery.detectedNum > 0);
// Extract face feature
HFloat featureCacheZyQuery[featureNum];
ret = HFFaceFeatureExtractCpy(session, imgHandleQuery, multipleFaceDataQuery.tokens[0], featureCacheZyQuery);
HFFaceFeature featureZyQuery = {0};
featureZyQuery.data = featureCacheZyQuery;
featureZyQuery.size = featureNum;
REQUIRE(ret == HSUCCEED);
inspirecv::TimeSpend timeSpend("Face Comparison");
for (int i = 0; i < loop; ++i) {
timeSpend.Start();
HFloat compRes;
ret = HFFaceComparison(featureZy, featureZyQuery, &compRes);
REQUIRE(ret == HSUCCEED);
timeSpend.Stop();
}
std::cout << timeSpend << std::endl;
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(imgHandleQuery);
REQUIRE(ret == HSUCCEED);
// Finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
TEST_CASE("test_BenchmarkFaceHubSearchPersistence", "[benchmark]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("Benchmark search 1k@Persistence") {
const int loop = 1000;
HResult ret;
HFFeatureHubConfiguration configuration;
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 1;
configuration.persistenceDbPath = dbPathStr;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.persistenceDbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
std::vector<std::vector<HFloat>> baseFeatures;
size_t genSizeOfBase = 1000;
HInt32 featureLength;
HFGetFeatureLength(&featureLength);
REQUIRE(featureLength > 0);
for (int i = 0; i < genSizeOfBase; ++i) {
auto feat = GenerateRandomFeature(featureLength);
baseFeatures.push_back(feat);
// Construct face feature
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
HFaceId allocId;
ret = HFFeatureHubInsertFeature(identity, &allocId);
REQUIRE(ret == HSUCCEED);
}
HInt32 totalFace;
ret = HFFeatureHubGetFaceCount(&totalFace);
REQUIRE(ret == HSUCCEED);
REQUIRE(totalFace == genSizeOfBase);
HInt32 targetId = 800;
auto targetFeature = baseFeatures[targetId - 1];
auto searchFeat = SimulateSimilarVector(targetFeature);
HFFaceFeature searchFeature = {0};
searchFeature.size = searchFeat.size();
searchFeature.data = searchFeat.data();
HFloat confidence = 0.0f;
HFFaceFeatureIdentity mostSimilar = {0};
inspirecv::TimeSpend timeSpend("Face Search 1k@Persistence");
for (size_t i = 0; i < loop; i++) {
timeSpend.Start();
ret = HFFeatureHubFaceSearch(searchFeature, &confidence, &mostSimilar);
REQUIRE(ret == HSUCCEED);
REQUIRE(mostSimilar.id == targetId);
REQUIRE(confidence > 0.88f);
timeSpend.Stop();
}
std::cout << timeSpend << std::endl;
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete[] dbPathStr;
}
SECTION("Benchmark search 5k@Persistence") {
const int loop = 1000;
HResult ret;
HFFeatureHubConfiguration configuration;
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 1;
configuration.persistenceDbPath = dbPathStr;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.persistenceDbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
std::vector<std::vector<HFloat>> baseFeatures;
size_t genSizeOfBase = 5000;
HInt32 featureLength;
HFGetFeatureLength(&featureLength);
REQUIRE(featureLength > 0);
for (int i = 0; i < genSizeOfBase; ++i) {
auto feat = GenerateRandomFeature(featureLength);
baseFeatures.push_back(feat);
// Construct face feature
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
HFaceId allocId;
ret = HFFeatureHubInsertFeature(identity, &allocId);
REQUIRE(ret == HSUCCEED);
}
HInt32 totalFace;
ret = HFFeatureHubGetFaceCount(&totalFace);
REQUIRE(ret == HSUCCEED);
REQUIRE(totalFace == genSizeOfBase);
HInt32 targetId = 4800;
auto targetFeature = baseFeatures[targetId - 1];
auto searchFeat = SimulateSimilarVector(targetFeature);
HFFaceFeature searchFeature = {0};
searchFeature.size = searchFeat.size();
searchFeature.data = searchFeat.data();
HFloat confidence = 0.0f;
HFFaceFeatureIdentity mostSimilar = {0};
inspirecv::TimeSpend timeSpend("Face Search 5k@Persistence");
for (size_t i = 0; i < loop; i++) {
timeSpend.Start();
ret = HFFeatureHubFaceSearch(searchFeature, &confidence, &mostSimilar);
REQUIRE(ret == HSUCCEED);
REQUIRE(mostSimilar.id == targetId);
REQUIRE(confidence > 0.88f);
timeSpend.Stop();
}
std::cout << timeSpend << std::endl;
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete[] dbPathStr;
}
SECTION("Benchmark search 10k@Persistence") {
const int loop = 1000;
HResult ret;
HFFeatureHubConfiguration configuration;
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 1;
configuration.persistenceDbPath = dbPathStr;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.persistenceDbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
std::vector<std::vector<HFloat>> baseFeatures;
size_t genSizeOfBase = 10000;
HInt32 featureLength;
HFGetFeatureLength(&featureLength);
REQUIRE(featureLength > 0);
for (int i = 0; i < genSizeOfBase; ++i) {
auto feat = GenerateRandomFeature(featureLength);
baseFeatures.push_back(feat);
// Construct face feature
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
HFaceId allocId;
ret = HFFeatureHubInsertFeature(identity, &allocId);
REQUIRE(ret == HSUCCEED);
}
HInt32 totalFace;
ret = HFFeatureHubGetFaceCount(&totalFace);
REQUIRE(ret == HSUCCEED);
REQUIRE(totalFace == genSizeOfBase);
HInt32 targetId = 9800;
auto targetFeature = baseFeatures[targetId - 1];
auto searchFeat = SimulateSimilarVector(targetFeature);
HFFaceFeature searchFeature = {0};
searchFeature.size = searchFeat.size();
searchFeature.data = searchFeat.data();
HFloat confidence = 0.0f;
HFFaceFeatureIdentity mostSimilar = {0};
inspirecv::TimeSpend timeSpend("Face Search 10k@Persistence");
for (size_t i = 0; i < loop; i++) {
timeSpend.Start();
ret = HFFeatureHubFaceSearch(searchFeature, &confidence, &mostSimilar);
REQUIRE(ret == HSUCCEED);
REQUIRE(mostSimilar.id == targetId);
REQUIRE(confidence > 0.88f);
timeSpend.Stop();
}
std::cout << timeSpend << std::endl;
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete[] dbPathStr;
}
}
TEST_CASE("test_BenchmarkFaceHubSearchMemory", "[benchmark]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("Benchmark search 1k@Memory") {
const int loop = 1000;
HResult ret;
HFFeatureHubConfiguration configuration;
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 0;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
std::vector<std::vector<HFloat>> baseFeatures;
size_t genSizeOfBase = 1000;
HInt32 featureLength;
HFGetFeatureLength(&featureLength);
REQUIRE(featureLength > 0);
for (int i = 0; i < genSizeOfBase; ++i) {
auto feat = GenerateRandomFeature(featureLength);
baseFeatures.push_back(feat);
// Construct face feature
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
HFaceId allocId;
ret = HFFeatureHubInsertFeature(identity, &allocId);
REQUIRE(ret == HSUCCEED);
}
HInt32 totalFace;
ret = HFFeatureHubGetFaceCount(&totalFace);
REQUIRE(ret == HSUCCEED);
REQUIRE(totalFace == genSizeOfBase);
HInt32 targetId = 800;
auto targetFeature = baseFeatures[targetId - 1];
auto searchFeat = SimulateSimilarVector(targetFeature);
HFFaceFeature searchFeature = {0};
searchFeature.size = searchFeat.size();
searchFeature.data = searchFeat.data();
HFloat confidence = 0.0f;
HFFaceFeatureIdentity mostSimilar = {0};
inspirecv::TimeSpend timeSpend("Face Search 1k@Memory");
for (size_t i = 0; i < loop; i++) {
timeSpend.Start();
ret = HFFeatureHubFaceSearch(searchFeature, &confidence, &mostSimilar);
REQUIRE(ret == HSUCCEED);
REQUIRE(mostSimilar.id == targetId);
REQUIRE(confidence > 0.88f);
timeSpend.Stop();
}
std::cout << timeSpend << std::endl;
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
}
SECTION("Benchmark search 5k@Persistence") {
const int loop = 1000;
HResult ret;
HFFeatureHubConfiguration configuration;
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 0;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
std::vector<std::vector<HFloat>> baseFeatures;
size_t genSizeOfBase = 5000;
HInt32 featureLength;
HFGetFeatureLength(&featureLength);
REQUIRE(featureLength > 0);
for (int i = 0; i < genSizeOfBase; ++i) {
auto feat = GenerateRandomFeature(featureLength);
baseFeatures.push_back(feat);
// Construct face feature
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
HFaceId allocId;
ret = HFFeatureHubInsertFeature(identity, &allocId);
REQUIRE(ret == HSUCCEED);
}
HInt32 totalFace;
ret = HFFeatureHubGetFaceCount(&totalFace);
REQUIRE(ret == HSUCCEED);
REQUIRE(totalFace == genSizeOfBase);
HInt32 targetId = 4800;
auto targetFeature = baseFeatures[targetId - 1];
auto searchFeat = SimulateSimilarVector(targetFeature);
HFFaceFeature searchFeature = {0};
searchFeature.size = searchFeat.size();
searchFeature.data = searchFeat.data();
HFloat confidence = 0.0f;
HFFaceFeatureIdentity mostSimilar = {0};
inspirecv::TimeSpend timeSpend("Face Search 5k@Memory");
for (size_t i = 0; i < loop; i++) {
timeSpend.Start();
ret = HFFeatureHubFaceSearch(searchFeature, &confidence, &mostSimilar);
REQUIRE(ret == HSUCCEED);
REQUIRE(mostSimilar.id == targetId);
REQUIRE(confidence > 0.88f);
timeSpend.Stop();
}
std::cout << timeSpend << std::endl;
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
}
SECTION("Benchmark search 10k@Persistence") {
const int loop = 1000;
HResult ret;
HFFeatureHubConfiguration configuration;
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 0;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
std::vector<std::vector<HFloat>> baseFeatures;
size_t genSizeOfBase = 10000;
HInt32 featureLength;
HFGetFeatureLength(&featureLength);
REQUIRE(featureLength > 0);
for (int i = 0; i < genSizeOfBase; ++i) {
auto feat = GenerateRandomFeature(featureLength);
baseFeatures.push_back(feat);
// Construct face feature
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
HFaceId allocId;
ret = HFFeatureHubInsertFeature(identity, &allocId);
REQUIRE(ret == HSUCCEED);
}
HInt32 totalFace;
ret = HFFeatureHubGetFaceCount(&totalFace);
REQUIRE(ret == HSUCCEED);
REQUIRE(totalFace == genSizeOfBase);
HInt32 targetId = 9800;
auto targetFeature = baseFeatures[targetId - 1];
auto searchFeat = SimulateSimilarVector(targetFeature);
HFFaceFeature searchFeature = {0};
searchFeature.size = searchFeat.size();
searchFeature.data = searchFeat.data();
HFloat confidence = 0.0f;
HFFaceFeatureIdentity mostSimilar = {0};
inspirecv::TimeSpend timeSpend("Face Search 10k@Memory");
for (size_t i = 0; i < loop; i++) {
timeSpend.Start();
ret = HFFeatureHubFaceSearch(searchFeature, &confidence, &mostSimilar);
REQUIRE(ret == HSUCCEED);
REQUIRE(mostSimilar.id == targetId);
REQUIRE(confidence > 0.88f);
timeSpend.Stop();
}
std::cout << timeSpend << std::endl;
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
}
}
#endif

View File

@@ -1,11 +1,10 @@
//
// Created by Tunm-Air13 on 2024/3/26.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include "opencv2/opencv.hpp"
#include "unit/test_helper/simple_csv_writer.h"
#include "unit/test_helper/test_help.h"
#include "unit/test_helper/test_tools.h"
@@ -24,24 +23,15 @@ TEST_CASE("test_Evaluation", "[face_evaluation") {
REQUIRE(ret == HSUCCEED);
float mostSim = -1.0f;
auto succ = FindMostSimilarScoreFromTwoPic(session,
GET_DATA("data/bulk/jntm.jpg"),
GET_DATA("data/bulk/kun.jpg"),
mostSim);
auto succ = FindMostSimilarScoreFromTwoPic(session, GET_DATA("data/bulk/jntm.jpg"), GET_DATA("data/bulk/kun.jpg"), mostSim);
CHECK(succ);
TEST_PRINT("kun v kun :{}", mostSim);
succ = FindMostSimilarScoreFromTwoPic(session,
GET_DATA("data/bulk/jntm.jpg"),
GET_DATA("data/bulk/Rob_Lowe_0001.jpg"),
mostSim);
succ = FindMostSimilarScoreFromTwoPic(session, GET_DATA("data/bulk/jntm.jpg"), GET_DATA("data/bulk/Rob_Lowe_0001.jpg"), mostSim);
CHECK(succ);
TEST_PRINT("kun v other :{}", mostSim);
succ = FindMostSimilarScoreFromTwoPic(session,
GET_DATA("data/bulk/kun.jpg"),
GET_DATA("data/bulk/view.jpg"),
mostSim);
succ = FindMostSimilarScoreFromTwoPic(session, GET_DATA("data/bulk/kun.jpg"), GET_DATA("data/bulk/view.jpg"), mostSim);
CHECK(!succ);
TEST_PRINT("kun v other :{}", mostSim);
@@ -50,7 +40,6 @@ TEST_CASE("test_Evaluation", "[face_evaluation") {
REQUIRE(ret == HSUCCEED);
}
SECTION("Test LFW evaluation") {
#ifdef ISF_ENABLE_TEST_EVALUATION
HResult ret;
@@ -65,14 +54,12 @@ TEST_CASE("test_Evaluation", "[face_evaluation") {
auto pairs = ReadPairs(getTestLFWFunneledEvaTxt());
// Hide cursor
show_console_cursor(false);
BlockProgressBar bar{
option::BarWidth{60},
option::Start{"["},
option::End{"]"},
option::PostfixText{"Extracting face features"},
option::ForegroundColor{Color::white} ,
option::FontStyles{std::vector<FontStyle>{FontStyle::bold}}
};
BlockProgressBar bar{option::BarWidth{60},
option::Start{"["},
option::End{"]"},
option::PostfixText{"Extracting face features"},
option::ForegroundColor{Color::white},
option::FontStyles{std::vector<FontStyle>{FontStyle::bold}}};
auto progress = 0.0f;
for (int i = 0; i < pairs.size(); ++i) {
@@ -87,20 +74,16 @@ TEST_CASE("test_Evaluation", "[face_evaluation") {
person1 = pair[0];
imgNum1 = std::stoi(pair[1]);
imgNum2 = std::stoi(pair[2]);
imgPath1 = PathJoin(PathJoin(getLFWFunneledDir(), person1),
person1 + "_" + zfill(imgNum1, 4) + ".jpg");
imgPath2 = PathJoin(PathJoin(getLFWFunneledDir(), person1),
person1 + "_" + zfill(imgNum2, 4) + ".jpg");
imgPath1 = PathJoin(PathJoin(getLFWFunneledDir(), person1), person1 + "_" + zfill(imgNum1, 4) + ".jpg");
imgPath2 = PathJoin(PathJoin(getLFWFunneledDir(), person1), person1 + "_" + zfill(imgNum2, 4) + ".jpg");
match = 1;
} else {
person1 = pair[0];
imgNum1 = std::stoi(pair[1]);
person2 = pair[2];
imgNum2 = std::stoi(pair[3]);
imgPath1 = PathJoin(PathJoin(getLFWFunneledDir(), person1),
person1 + "_" + zfill(imgNum1, 4) + ".jpg");
imgPath2 = PathJoin(PathJoin(getLFWFunneledDir(), person2),
person2 + "_" + zfill(imgNum2, 4) + ".jpg");
imgPath1 = PathJoin(PathJoin(getLFWFunneledDir(), person1), person1 + "_" + zfill(imgNum1, 4) + ".jpg");
imgPath2 = PathJoin(PathJoin(getLFWFunneledDir(), person2), person2 + "_" + zfill(imgNum2, 4) + ".jpg");
match = 0;
}
@@ -132,5 +115,4 @@ TEST_CASE("test_Evaluation", "[face_evaluation") {
REQUIRE(ret == HSUCCEED);
#endif
}
}

View File

@@ -1,6 +1,7 @@
//
// Created by tunm on 2023/10/11.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include <iostream>
#include "settings/test_settings.h"
@@ -21,6 +22,4 @@ TEST_CASE("test_FeatureContext", "[face_context]") {
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
}

View File

@@ -1,11 +1,12 @@
//
// Created by tunm on 2023/10/12.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include "../test_helper/test_tools.h"
#include "../test_helper/test_help.h"
TEST_CASE("test_FacePipelineAttribute", "[face_pipeline_attribute]") {
DRAW_SPLIT_LINE
@@ -39,14 +40,14 @@ TEST_CASE("test_FacePipelineAttribute", "[face_pipeline_attribute]") {
parameter.enable_face_attribute = 1;
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
HInt32 faceDetectPixelLevel = 160;
HInt32 faceDetectPixelLevel = 320;
ret = HFCreateInspireFaceSession(parameter, detMode, 5, faceDetectPixelLevel, -1, &session);
REQUIRE(ret == HSUCCEED);
SECTION("a black girl") {
HFImageStream imgHandle;
auto img = cv::imread(GET_DATA("data/attribute/1423.jpg"));
REQUIRE(!img.empty());
auto img = inspirecv::Image::Create(GET_DATA("data/attribute/1423.jpg"));
REQUIRE(!img.Empty());
ret = CVImageToImageStream(img, imgHandle);
REQUIRE(ret == HSUCCEED);
@@ -56,8 +57,7 @@ TEST_CASE("test_FacePipelineAttribute", "[face_pipeline_attribute]") {
REQUIRE(multipleFaceData.detectedNum == 1);
// Run pipeline
ret = HFMultipleFacePipelineProcessOptional(session, imgHandle, &multipleFaceData,
HF_ENABLE_FACE_ATTRIBUTE);
ret = HFMultipleFacePipelineProcessOptional(session, imgHandle, &multipleFaceData, HF_ENABLE_FACE_ATTRIBUTE);
REQUIRE(ret == HSUCCEED);
HFFaceAttributeResult result = {0};
@@ -77,8 +77,8 @@ TEST_CASE("test_FacePipelineAttribute", "[face_pipeline_attribute]") {
SECTION("two young white women") {
HFImageStream imgHandle;
auto img = cv::imread(GET_DATA("data/attribute/7242.jpg"));
REQUIRE(!img.empty());
auto img = inspirecv::Image::Create(GET_DATA("data/attribute/7242.jpg"));
REQUIRE(!img.Empty());
ret = CVImageToImageStream(img, imgHandle);
REQUIRE(ret == HSUCCEED);
@@ -88,8 +88,7 @@ TEST_CASE("test_FacePipelineAttribute", "[face_pipeline_attribute]") {
REQUIRE(multipleFaceData.detectedNum == 2);
// Run pipeline
ret = HFMultipleFacePipelineProcessOptional(session, imgHandle, &multipleFaceData,
HF_ENABLE_FACE_ATTRIBUTE);
ret = HFMultipleFacePipelineProcessOptional(session, imgHandle, &multipleFaceData, HF_ENABLE_FACE_ATTRIBUTE);
REQUIRE(ret == HSUCCEED);
HFFaceAttributeResult result = {0};
@@ -114,65 +113,24 @@ TEST_CASE("test_FacePipelineAttribute", "[face_pipeline_attribute]") {
REQUIRE(ret == HSUCCEED);
}
TEST_CASE("test_FacePipelineRobustness", "[robustness]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("Exception") {
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
REQUIRE(ret == HSUCCEED);
// Input exception data
HFImageStream nullHandle = {0};
HFMultipleFaceData nullfaces = {0};
ret =
HFMultipleFacePipelineProcessOptional(session, nullHandle, &nullfaces, HF_ENABLE_NONE);
REQUIRE(ret == HERR_INVALID_IMAGE_STREAM_HANDLE);
// Get a face picture
HFImageStream img1Handle;
auto img1 = cv::imread(GET_DATA("data/bulk/image_T1.jpeg"));
ret = CVImageToImageStream(img1, img1Handle);
REQUIRE(ret == HSUCCEED);
// Input correct Image and exception faces struct
ret =
HFMultipleFacePipelineProcessOptional(session, img1Handle, &nullfaces, HF_ENABLE_NONE);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(img1Handle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
// Multiple release
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HERR_INVALID_CONTEXT_HANDLE);
HFDeBugShowResourceStatistics();
}
}
TEST_CASE("test_FacePipeline", "[face_pipeline]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("rgb liveness detect") {
#ifndef INFERENCE_WRAPPER_ENABLE_RKNN2
/** The anti spoofing model based on RGB faces seems to have some problems with quantization under RKNPU2, so it is not started yet */
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_liveness = 1;
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
ret = HFCreateInspireFaceSession(parameter, detMode, 3, 320, -1, &session);
REQUIRE(ret == HSUCCEED);
// Get a face picture
HFImageStream img1Handle;
auto img1 = cv::imread(GET_DATA("data/bulk/image_T1.jpeg"));
auto img1 = inspirecv::Image::Create(GET_DATA("data/bulk/image_T1.jpeg"));
ret = CVImageToImageStream(img1, img1Handle);
REQUIRE(ret == HSUCCEED);
@@ -189,7 +147,7 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
TEST_PRINT("{}", confidence.confidence[0]);
REQUIRE(ret == HSUCCEED);
CHECK(confidence.num > 0);
CHECK(confidence.confidence[0] > 0.9);
CHECK(confidence.confidence[0] > 0.8);
ret = HFReleaseImageStream(img1Handle);
REQUIRE(ret == HSUCCEED);
@@ -197,7 +155,7 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
// fake face
HFImageStream img2Handle;
auto img2 = cv::imread(GET_DATA("data/bulk/rgb_fake.jpg"));
auto img2 = inspirecv::Image::Create(GET_DATA("data/bulk/rgb_fake.jpg"));
ret = CVImageToImageStream(img2, img2Handle);
REQUIRE(ret == HSUCCEED);
ret = HFExecuteFaceTrack(session, img2Handle, &multipleFaceData);
@@ -216,6 +174,9 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
ret = HFReleaseInspireFaceSession(session);
session = nullptr;
REQUIRE(ret == HSUCCEED);
#else
TEST_PRINT("The anti spoofing model based on RGB faces seems to have some problems with quantization under RKNPU2, so we skip this test.");
#endif
}
SECTION("face mask detect") {
@@ -229,7 +190,7 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
// Get a face picture
HFImageStream img1Handle;
auto img1 = cv::imread(GET_DATA("data/bulk/mask2.jpg"));
auto img1 = inspirecv::Image::Create(GET_DATA("data/bulk/mask2.jpg"));
ret = CVImageToImageStream(img1, img1Handle);
REQUIRE(ret == HSUCCEED);
@@ -253,7 +214,7 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
// no mask face
HFImageStream img2Handle;
auto img2 = cv::imread(GET_DATA("data/bulk/face_sample.png"));
auto img2 = inspirecv::Image::Create(GET_DATA("data/bulk/face_sample.png"));
ret = CVImageToImageStream(img2, img2Handle);
REQUIRE(ret == HSUCCEED);
ret = HFExecuteFaceTrack(session, img2Handle, &multipleFaceData);
@@ -280,12 +241,12 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HInt32 option = HF_ENABLE_QUALITY;
HFSession session;
ret = HFCreateInspireFaceSessionOptional(option, detMode, 3, -1, -1, &session);
ret = HFCreateInspireFaceSessionOptional(option, detMode, 3, 320, -1, &session);
REQUIRE(ret == HSUCCEED);
// Get a face picture
HFImageStream superiorHandle;
auto superior = cv::imread(GET_DATA("data/bulk/yifei.jpg"));
auto superior = inspirecv::Image::Create(GET_DATA("data/bulk/yifei.jpg"));
ret = CVImageToImageStream(superior, superiorHandle);
REQUIRE(ret == HSUCCEED);
@@ -295,18 +256,17 @@ TEST_CASE("test_FacePipeline", "[face_pipeline]") {
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
ret =
HFMultipleFacePipelineProcessOptional(session, superiorHandle, &multipleFaceData, option);
ret = HFMultipleFacePipelineProcessOptional(session, superiorHandle, &multipleFaceData, option);
REQUIRE(ret == HSUCCEED);
HFloat quality;
ret = HFFaceQualityDetect(session, multipleFaceData.tokens[0], &quality);
REQUIRE(ret == HSUCCEED);
CHECK(quality > 0.85);
CHECK(quality > 0.8);
// blur image
HFImageStream blurHandle;
auto blur = cv::imread(GET_DATA("data/bulk/blur.jpg"));
auto blur = inspirecv::Image::Create(GET_DATA("data/bulk/blur.jpg"));
ret = CVImageToImageStream(blur, blurHandle);
REQUIRE(ret == HSUCCEED);
@@ -349,7 +309,7 @@ TEST_CASE("test_FaceReaction", "[face_reaction]") {
SECTION("open eyes") {
// Get a face picture
HFImageStream imgHandle;
auto img = cv::imread(GET_DATA("data/reaction/open_eyes.png"));
auto img = inspirecv::Image::Create(GET_DATA("data/reaction/open_eyes.png"));
ret = CVImageToImageStream(img, imgHandle);
REQUIRE(ret == HSUCCEED);
@@ -364,8 +324,8 @@ TEST_CASE("test_FaceReaction", "[face_reaction]") {
REQUIRE(ret == HSUCCEED);
// Get results
HFFaceIntereactionState result;
ret = HFGetFaceIntereactionStateResult(session, &result);
HFFaceInteractionState result;
ret = HFGetFaceInteractionStateResult(session, &result);
REQUIRE(multipleFaceData.detectedNum == result.num);
REQUIRE(ret == HSUCCEED);
@@ -380,7 +340,7 @@ TEST_CASE("test_FaceReaction", "[face_reaction]") {
SECTION("close eyes") {
// Get a face picture
HFImageStream imgHandle;
auto img = cv::imread(GET_DATA("data/reaction/close_eyes.jpeg"));
auto img = inspirecv::Image::Create(GET_DATA("data/reaction/close_eyes.jpeg"));
ret = CVImageToImageStream(img, imgHandle);
REQUIRE(ret == HSUCCEED);
@@ -395,8 +355,8 @@ TEST_CASE("test_FaceReaction", "[face_reaction]") {
REQUIRE(ret == HSUCCEED);
// Get results
HFFaceIntereactionState result;
ret = HFGetFaceIntereactionStateResult(session, &result);
HFFaceInteractionState result;
ret = HFGetFaceInteractionStateResult(session, &result);
REQUIRE(multipleFaceData.detectedNum == result.num);
REQUIRE(ret == HSUCCEED);
@@ -411,7 +371,7 @@ TEST_CASE("test_FaceReaction", "[face_reaction]") {
SECTION("Close one eye and open the other") {
// Get a face picture
HFImageStream imgHandle;
auto img = cv::imread(GET_DATA("data/reaction/close_open_eyes.jpeg"));
auto img = inspirecv::Image::Create(GET_DATA("data/reaction/close_open_eyes.jpeg"));
ret = CVImageToImageStream(img, imgHandle);
REQUIRE(ret == HSUCCEED);
@@ -426,8 +386,8 @@ TEST_CASE("test_FaceReaction", "[face_reaction]") {
REQUIRE(ret == HSUCCEED);
// Get results
HFFaceIntereactionState result;
ret = HFGetFaceIntereactionStateResult(session, &result);
HFFaceInteractionState result;
ret = HFGetFaceInteractionStateResult(session, &result);
REQUIRE(multipleFaceData.detectedNum == result.num);
REQUIRE(ret == HSUCCEED);
@@ -441,4 +401,87 @@ TEST_CASE("test_FaceReaction", "[face_reaction]") {
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
}
TEST_CASE("test_TrackModeFaceAction", "[face_action]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_interaction_liveness = 1;
HFDetectMode detMode = HF_DETECT_MODE_LIGHT_TRACK;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
REQUIRE(ret == HSUCCEED);
#if 0
SECTION("Action Blink") {
auto start = 130, end = 150;
std::vector<std::string> filenames = generateFilenames("frame-%04d.jpg", start, end);
int count = 0;
for (size_t i = 0; i < filenames.size(); i++) {
auto filename = filenames[i];
HFImageStream imgHandle;
auto image = inspirecv::Image::Create(GET_DATA("data/video_frames/" + filename));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
ret = HFMultipleFacePipelineProcessOptional(session, imgHandle, &multipleFaceData, HF_ENABLE_INTERACTION);
REQUIRE(ret == HSUCCEED);
HFFaceInteractionsActions result;
ret = HFGetFaceInteractionActionsResult(session, &result);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == result.num);
count += result.blink[0];
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
}
// Blink at least once
REQUIRE(count > 0);
}
#endif
#if 0
SECTION("Action Jaw Open") {
auto start = 110, end = 150;
std::vector<std::string> filenames = generateFilenames("frame-%04d.jpg", start, end);
int count = 0;
for (size_t i = 0; i < filenames.size(); i++) {
auto filename = filenames[i];
HFImageStream imgHandle;
auto image = inspirecv::Image::Create(GET_DATA("data/video_frames/" + filename));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
ret = HFMultipleFacePipelineProcessOptional(session, imgHandle, &multipleFaceData, HF_ENABLE_INTERACTION);
REQUIRE(ret == HSUCCEED);
HFFaceInteractionsActions result;
ret = HFGetFaceInteractionActionsResult(session, &result);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == result.num);
count += result.jawOpen[0];
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
}
// Jaw open at least once
REQUIRE(count > 0);
}
#endif
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}

View File

@@ -1,14 +1,15 @@
//
// Created by tunm on 2023/10/11.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include "opencv2/opencv.hpp"
#include "unit/test_helper/simple_csv_writer.h"
#include "unit/test_helper/test_help.h"
#include "unit/test_helper/test_tools.h"
#include "middleware/costman.h"
TEST_CASE("test_FaceTrack", "[face_track]") {
DRAW_SPLIT_LINE
@@ -20,11 +21,12 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
TEST_ERROR_PRINT("error ret :{}", ret);
REQUIRE(ret == HSUCCEED);
// Get a face picture
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/kun.jpg"));
auto image = inspirecv::Image::Create(GET_DATA("data/bulk/kun.jpg"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
@@ -37,24 +39,24 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
// Detect face position
auto rect = multipleFaceData.rects[0];
HFaceRect expect = {0};
expect.x = 98;
expect.y = 146;
expect.width = 233 - expect.x;
expect.height = 272 - expect.y;
expect.x = 79;
expect.y = 104;
expect.width = 168;
expect.height = 167;
auto iou = CalculateOverlap(rect, expect);
cv::Rect cvRect(rect.x, rect.y, rect.width, rect.height);
cv::rectangle(image, cvRect, cv::Scalar(255, 0, 124), 2);
cv::imwrite("ww.jpg", image);
// The iou is allowed to have an error of 10%
CHECK(iou == Approx(1.0f).epsilon(0.3));
auto cvRect = inspirecv::Rect<int>::Create(rect.x, rect.y, rect.width, rect.height);
image.DrawRect(cvRect, {0, 0, 255}, 2);
image.Write("ww.jpg");
// The iou is allowed to have an error of 25%
CHECK(iou == Approx(1.0f).epsilon(0.25));
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
// Prepare non-face images
HFImageStream viewHandle;
auto view = cv::imread(GET_DATA("data/bulk/view.jpg"));
auto view = inspirecv::Image::Create(GET_DATA("data/bulk/view.jpg"));
ret = CVImageToImageStream(view, viewHandle);
REQUIRE(ret == HSUCCEED);
ret = HFExecuteFaceTrack(session, viewHandle, &multipleFaceData);
@@ -68,6 +70,7 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
REQUIRE(ret == HSUCCEED);
}
#if 0
SECTION("Face tracking stability from frames") {
HResult ret;
HFSessionCustomParameter parameter = {0};
@@ -83,7 +86,7 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
for (int i = 0; i < filenames.size(); ++i) {
auto filename = filenames[i];
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/video_frames/" + filename));
auto image = inspirecv::Image::Create(GET_DATA("data/video_frames/" + filename));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
@@ -96,10 +99,10 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
continue;
}
auto rect = multipleFaceData.rects[0];
cv::Rect cvRect(rect.x, rect.y, rect.width, rect.height);
cv::rectangle(image, cvRect, cv::Scalar(255, 0, 124), 2);
std::string save = GET_SAVE_DATA("data/video_frames") + "/" + std::to_string(i) + ".jpg";
cv::imwrite(save, image);
auto cvRect = inspirecv::Rect<int>::Create(rect.x, rect.y, rect.width, rect.height);
image.DrawRect(cvRect, {0, 0, 255}, 2);
std::string save = GET_SAVE_DATA("video_frames") + "/" + std::to_string(i) + ".jpg";
image.Write(save);
auto id = multipleFaceData.trackIds[0];
// TEST_PRINT("{}", id);
if (id != expectedId) {
@@ -116,6 +119,7 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
#endif
SECTION("Head pose estimation") {
HResult ret;
@@ -130,7 +134,7 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
// Left side face
HFImageStream leftHandle;
auto left = cv::imread(GET_DATA("data/pose/left_face.jpeg"));
auto left = inspirecv::Image::Create(GET_DATA("data/pose/left_face.jpeg"));
ret = CVImageToImageStream(left, leftHandle);
REQUIRE(ret == HSUCCEED);
@@ -150,7 +154,7 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
// Right-handed rotation
HFImageStream rightHandle;
auto right = cv::imread(GET_DATA("data/pose/right_face.png"));
auto right = inspirecv::Image::Create(GET_DATA("data/pose/right_face.png"));
ret = CVImageToImageStream(right, rightHandle);
REQUIRE(ret == HSUCCEED);
@@ -165,7 +169,7 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
// Rise head
HFImageStream riseHandle;
auto rise = cv::imread(GET_DATA("data/pose/rise_face.jpeg"));
auto rise = inspirecv::Image::Create(GET_DATA("data/pose/rise_face.jpeg"));
ret = CVImageToImageStream(rise, riseHandle);
REQUIRE(ret == HSUCCEED);
@@ -173,12 +177,12 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
pitch = multipleFaceData.angles.pitch[0];
CHECK(pitch > 5);
CHECK(pitch > 3);
HFReleaseImageStream(riseHandle);
// Lower head
HFImageStream lowerHandle;
auto lower = cv::imread(GET_DATA("data/pose/lower_face.jpeg"));
auto lower = inspirecv::Image::Create(GET_DATA("data/pose/lower_face.jpeg"));
ret = CVImageToImageStream(lower, lowerHandle);
REQUIRE(ret == HSUCCEED);
@@ -191,7 +195,7 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
// Roll head
HFImageStream leftWryneckHandle;
auto leftWryneck = cv::imread(GET_DATA("data/pose/left_wryneck.png"));
auto leftWryneck = inspirecv::Image::Create(GET_DATA("data/pose/left_wryneck.png"));
ret = CVImageToImageStream(leftWryneck, leftWryneckHandle);
REQUIRE(ret == HSUCCEED);
@@ -204,7 +208,7 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
// Roll head
HFImageStream rightWryneckHandle;
auto rightWryneck = cv::imread(GET_DATA("data/pose/right_wryneck.png"));
auto rightWryneck = inspirecv::Image::Create(GET_DATA("data/pose/right_wryneck.png"));
ret = CVImageToImageStream(rightWryneck, rightWryneckHandle);
REQUIRE(ret == HSUCCEED);
@@ -212,7 +216,7 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
roll = multipleFaceData.angles.roll[0];
CHECK(roll > 30);
CHECK(roll > 25);
HFReleaseImageStream(rightWryneckHandle);
// finish
@@ -220,117 +224,44 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
REQUIRE(ret == HSUCCEED);
}
SECTION("Face detection benchmark") {
#ifdef ISF_ENABLE_BENCHMARK
SECTION("Face detection benchmark@160") {
int loop = 1000;
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
HInt32 pixLevel = 160;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, pixLevel, -1, &session);
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
REQUIRE(ret == HSUCCEED);
// Prepare an image
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/kun.jpg"));
auto image = inspirecv::Image::Create(GET_DATA("data/bulk/kun.jpg"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
BenchmarkRecord record(getBenchmarkRecordFile());
REQUIRE(ret == HSUCCEED);
HFMultipleFaceData multipleFaceData = {0};
auto start = (double)cv::getTickCount();
auto timer = inspire::Timer();
for (int i = 0; i < loop; ++i) {
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
}
auto cost = ((double)cv::getTickCount() - start) / cv::getTickFrequency() * 1000;
auto cost = timer.GetCostTime();
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
TEST_PRINT("<Benchmark> Face Detect@160 -> Loop: {}, Total Time: {:.5f}ms, Average Time: {:.5f}ms", loop, cost, cost / loop);
record.insertBenchmarkData("Face Detect@160", loop, cost, cost / loop);
TEST_PRINT("<Benchmark> Face Detect -> Loop: {}, Total Time: {:.5f}ms, Average Time: {:.5f}ms", loop, cost, cost / loop);
record.insertBenchmarkData("Face Detect", loop, cost, cost / loop);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
SECTION("Face detection benchmark@320") {
int loop = 1000;
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
HInt32 pixLevel = 320;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, pixLevel, -1, &session);
REQUIRE(ret == HSUCCEED);
// Prepare an image
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/kun.jpg"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
BenchmarkRecord record(getBenchmarkRecordFile());
REQUIRE(ret == HSUCCEED);
HFMultipleFaceData multipleFaceData = {0};
auto start = (double)cv::getTickCount();
for (int i = 0; i < loop; ++i) {
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
}
auto cost = ((double)cv::getTickCount() - start) / cv::getTickFrequency() * 1000;
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
TEST_PRINT("<Benchmark> Face Detect@320 -> Loop: {}, Total Time: {:.5f}ms, Average Time: {:.5f}ms", loop, cost, cost / loop);
record.insertBenchmarkData("Face Detect@320", loop, cost, cost / loop);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
SECTION("Face detection benchmark@640") {
int loop = 1000;
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
HInt32 pixLevel = 640;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, pixLevel, -1, &session);
REQUIRE(ret == HSUCCEED);
// Prepare an image
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/kun.jpg"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
BenchmarkRecord record(getBenchmarkRecordFile());
REQUIRE(ret == HSUCCEED);
HFMultipleFaceData multipleFaceData = {0};
auto start = (double)cv::getTickCount();
for (int i = 0; i < loop; ++i) {
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
}
auto cost = ((double)cv::getTickCount() - start) / cv::getTickFrequency() * 1000;
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
TEST_PRINT("<Benchmark> Face Detect@640 -> Loop: {}, Total Time: {:.5f}ms, Average Time: {:.5f}ms", loop, cost, cost / loop);
record.insertBenchmarkData("Face Detect@640", loop, cost, cost / loop);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
#else
TEST_PRINT("Skip the face detection benchmark test. To run it, you need to turn on the benchmark test.");
TEST_PRINT("Skip the face detection benchmark test. To run it, you need to turn on the benchmark test.");
#endif
}
SECTION("Face light track benchmark") {
#ifdef ISF_ENABLE_BENCHMARK
@@ -344,7 +275,7 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
// Prepare an image
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/kun.jpg"));
auto image = inspirecv::Image::Create(GET_DATA("data/bulk/kun.jpg"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
BenchmarkRecord record(getBenchmarkRecordFile());
@@ -352,13 +283,13 @@ TEST_CASE("test_FaceTrack", "[face_track]") {
// Case: Execute the benchmark using the VIDEO mode(Track)
REQUIRE(ret == HSUCCEED);
HFMultipleFaceData multipleFaceData = {0};
auto start = (double)cv::getTickCount();
auto timer = inspire::Timer();
for (int i = 0; i < loop; ++i) {
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
}
auto cost = ((double)cv::getTickCount() - start) / cv::getTickFrequency() * 1000;
auto cost = timer.GetCostTime();
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
REQUIRE(multipleFaceData.detectedNum >= 1);
TEST_PRINT("<Benchmark> Face Track -> Loop: {}, Total Time: {:.5f}ms, Average Time: {:.5f}ms", loop, cost, cost / loop);
record.insertBenchmarkData("Face Track", loop, cost, cost / loop);
@@ -377,12 +308,12 @@ TEST_CASE("test_MultipleLevelFaceDetect", "[face_detect]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("Detect input 160px") {
SECTION("Detect input 192px") {
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
HInt32 detectPixelLevel = 160;
HInt32 detectPixelLevel = 192;
ret = HFCreateInspireFaceSession(parameter, detMode, 20, detectPixelLevel, -1, &session);
REQUIRE(ret == HSUCCEED);
HFSessionSetTrackPreviewSize(session, detectPixelLevel);
@@ -390,7 +321,7 @@ TEST_CASE("test_MultipleLevelFaceDetect", "[face_detect]") {
// Get a face picture
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/pedestrian.png"));
auto image = inspirecv::Image::Create(GET_DATA("data/bulk/pedestrian.png"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
@@ -422,7 +353,7 @@ TEST_CASE("test_MultipleLevelFaceDetect", "[face_detect]") {
// Get a face picture
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/pedestrian.png"));
auto image = inspirecv::Image::Create(GET_DATA("data/bulk/pedestrian.png"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
@@ -432,7 +363,7 @@ TEST_CASE("test_MultipleLevelFaceDetect", "[face_detect]") {
REQUIRE(ret == HSUCCEED);
CHECK(multipleFaceData.detectedNum > 9);
CHECK(multipleFaceData.detectedNum < 15);
CHECK(multipleFaceData.detectedNum < 12);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
@@ -454,7 +385,7 @@ TEST_CASE("test_MultipleLevelFaceDetect", "[face_detect]") {
// Get a face picture
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/pedestrian.png"));
auto image = inspirecv::Image::Create(GET_DATA("data/bulk/pedestrian.png"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
@@ -464,167 +395,11 @@ TEST_CASE("test_MultipleLevelFaceDetect", "[face_detect]") {
REQUIRE(ret == HSUCCEED);
CHECK(multipleFaceData.detectedNum > 15);
CHECK(multipleFaceData.detectedNum < 25);
CHECK(multipleFaceData.detectedNum < 21);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
}
TEST_CASE("test_FaceShowLandmark", "[face_landmark]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
std::vector<std::string> images_path = {
GET_DATA("data/reaction/close_open_eyes.jpeg"),
GET_DATA("data/reaction/open_eyes.png"),
GET_DATA("data/reaction/close_eyes.jpeg"),
};
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
HInt32 detectPixelLevel = 160;
ret = HFCreateInspireFaceSession(parameter, detMode, 20, detectPixelLevel, -1, &session);
REQUIRE(ret == HSUCCEED);
HFSessionSetTrackPreviewSize(session, detectPixelLevel);
HFSessionSetFilterMinimumFacePixelSize(session, 0);
for (size_t i = 0; i < images_path.size(); i++) {
HFImageStream imgHandle;
auto image = cv::imread(images_path[i]);
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
HInt32 numOfLmk;
HFGetNumOfFaceDenseLandmark(&numOfLmk);
HPoint2f denseLandmarkPoints[numOfLmk];
ret = HFGetFaceDenseLandmarkFromFaceToken(multipleFaceData.tokens[0], denseLandmarkPoints, numOfLmk);
REQUIRE(ret == HSUCCEED);
for (size_t i = 0; i < numOfLmk; i++) {
cv::Point2f p(denseLandmarkPoints[i].x, denseLandmarkPoints[i].y);
cv::circle(image, p, 0, (0, 0, 255), 2);
}
cv::imwrite("lml_" + std::to_string(i) + ".jpg", image);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
}
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
TEST_CASE("test_FaceDetectConfidence", "[face_track]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("DetectConfidenceSchedule-1") {
// Schedule 1:
HFloat threshold = 0.4f;
HInt32 detectPixelLevel = 160;
HInt32 maxDetectNum = 20;
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, maxDetectNum, detectPixelLevel, -1, &session);
REQUIRE(ret == HSUCCEED);
ret = HFSessionSetFaceDetectThreshold(session, threshold);
REQUIRE(ret == HSUCCEED);
// Prepare an image
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/pedestrian.png"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
for (int i = 0; i < multipleFaceData.detectedNum; i++) {
CHECK(multipleFaceData.detConfidence[i] >= threshold);
}
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
SECTION("DetectConfidenceSchedule-2") {
// Schedule 2:
HFloat threshold = 0.7f;
HInt32 detectPixelLevel = 320;
HInt32 maxDetectNum = 10;
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, maxDetectNum, detectPixelLevel, -1, &session);
REQUIRE(ret == HSUCCEED);
ret = HFSessionSetFaceDetectThreshold(session, threshold);
REQUIRE(ret == HSUCCEED);
// Prepare an image
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/pedestrian.png"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
for (int i = 0; i < multipleFaceData.detectedNum; i++) {
CHECK(multipleFaceData.detConfidence[i] >= threshold);
}
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
SECTION("DetectConfidenceSchedule-3") {
// Schedule 3:
HFloat threshold = 0.80f;
HInt32 detectPixelLevel = 640;
HInt32 maxDetectNum = 20;
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, maxDetectNum, detectPixelLevel, -1, &session);
REQUIRE(ret == HSUCCEED);
ret = HFSessionSetFaceDetectThreshold(session, threshold);
REQUIRE(ret == HSUCCEED);
// Prepare an image
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/pedestrian.png"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
for (int i = 0; i < multipleFaceData.detectedNum; i++) {
CHECK(multipleFaceData.detConfidence[i] >= threshold);
}
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}

View File

@@ -1,6 +1,7 @@
//
// Created by tunm on 2024/4/13.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
@@ -13,17 +14,17 @@ TEST_CASE("test_FeatureHubBase", "[FeatureHub][BasicFunction]") {
SECTION("FeatureHub basic function") {
HResult ret;
HFFeatureHubConfiguration configuration = {0};
HFFeatureHubConfiguration configuration;
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.persistenceDbPath = dbPathStr;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
if (std::remove(configuration.persistenceDbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
@@ -37,17 +38,17 @@ TEST_CASE("test_FeatureHubBase", "[FeatureHub][BasicFunction]") {
SECTION("FeatureHub search top-k") {
HResult ret;
HFFeatureHubConfiguration configuration = {0};
HFFeatureHubConfiguration configuration;
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.persistenceDbPath = dbPathStr;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
if (std::remove(configuration.persistenceDbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
@@ -61,19 +62,14 @@ TEST_CASE("test_FeatureHubBase", "[FeatureHub][BasicFunction]") {
for (int i = 0; i < genSizeOfBase; ++i) {
auto feat = GenerateRandomFeature(featureLength);
baseFeatures.push_back(feat);
auto name = std::to_string(i);
// Establish a security buffer
std::vector<char> nameBuffer(name.begin(), name.end());
nameBuffer.push_back('\0');
// Construct face feature
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.customId = i;
identity.tag = nameBuffer.data();
ret = HFFeatureHubInsertFeature(identity);
HFaceId allocId;
ret = HFFeatureHubInsertFeature(identity, &allocId);
REQUIRE(ret == HSUCCEED);
}
HInt32 totalFace;
@@ -82,8 +78,8 @@ TEST_CASE("test_FeatureHubBase", "[FeatureHub][BasicFunction]") {
REQUIRE(totalFace == genSizeOfBase);
// 2000 data was imported
HInt32 targetId = 523;
auto targetFeature = baseFeatures[targetId];
HInt32 targetId = 524;
auto targetFeature = baseFeatures[targetId - 1];
std::vector<std::vector<HFloat>> similarVectors;
std::vector<HInt32> coverIds = {2, 300, 524, 789, 1024, 1995};
@@ -95,8 +91,7 @@ TEST_CASE("test_FeatureHubBase", "[FeatureHub][BasicFunction]") {
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.customId = coverIds[i];
identity.tag = "HOLD";
identity.id = coverIds[i];
ret = HFFeatureHubFaceUpdate(identity);
REQUIRE(ret == HSUCCEED);
}
@@ -111,12 +106,9 @@ TEST_CASE("test_FeatureHubBase", "[FeatureHub][BasicFunction]") {
ret = HFFeatureHubFaceSearchTopK(searchFeature, topK, &results);
REQUIRE(ret == HSUCCEED);
coverIds.push_back(targetId);
REQUIRE(coverIds.size() == results.size);
for (int i = 0; i < results.size; ++i) {
REQUIRE(std::find(coverIds.begin(), coverIds.end(), results.customIds[i]) !=
coverIds.end());
REQUIRE(std::find(coverIds.begin(), coverIds.end(), results.ids[i]) != coverIds.end());
}
ret = HFFeatureHubDataDisable();
@@ -129,10 +121,10 @@ TEST_CASE("test_FeatureHubBase", "[FeatureHub][BasicFunction]") {
HResult ret;
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
HFFeatureHubConfiguration configuration = {0};
HFFeatureHubConfiguration configuration;
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 0;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.persistenceDbPath = dbPathStr;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
@@ -140,20 +132,20 @@ TEST_CASE("test_FeatureHubBase", "[FeatureHub][BasicFunction]") {
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HERR_FT_HUB_ENABLE_REPETITION);
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HERR_FT_HUB_DISABLE_REPETITION);
REQUIRE(ret == HSUCCEED);
delete[] dbPathStr;
}
SECTION("Only memory storage is used") {
HResult ret;
HFFeatureHubConfiguration configuration = {0};
HFFeatureHubConfiguration configuration;
configuration.enablePersistence = 0;
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
@@ -170,17 +162,17 @@ TEST_CASE("test_ConcurrencyInsertion", "[FeatureHub][Concurrency]") {
TEST_PRINT_OUTPUT(true);
HResult ret;
HFFeatureHubConfiguration configuration = {0};
HFFeatureHubConfiguration configuration;
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.persistenceDbPath = dbPathStr;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
if (std::remove(configuration.persistenceDbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
@@ -199,7 +191,7 @@ TEST_CASE("test_ConcurrencyInsertion", "[FeatureHub][Concurrency]") {
auto beginGenId = 2000;
for (int i = 0; i < numThreads; ++i) {
threads.emplace_back([=]() { // 使用值捕获以避免捕获引用后变量改变
threads.emplace_back([=]() {
for (int j = 0; j < insertsPerThread; ++j) {
auto feat = GenerateRandomFeature(featureLength);
auto name = std::to_string(beginGenId + j + i * insertsPerThread);
@@ -210,10 +202,10 @@ TEST_CASE("test_ConcurrencyInsertion", "[FeatureHub][Concurrency]") {
feature.data = feat.data();
HFFaceFeatureIdentity featureIdentity = {0};
featureIdentity.feature = &feature;
featureIdentity.customId =
beginGenId + j + i * insertsPerThread; // 确保 customId 唯一
featureIdentity.tag = nameBuffer.data();
auto ret = HFFeatureHubInsertFeature(featureIdentity);
// featureIdentity.customId = beginGenId + j + i * insertsPerThread;
// featureIdentity.tag = nameBuffer.data();
HFaceId allocId;
auto ret = HFFeatureHubInsertFeature(featureIdentity, &allocId);
REQUIRE(ret == HSUCCEED);
}
});
@@ -226,9 +218,7 @@ TEST_CASE("test_ConcurrencyInsertion", "[FeatureHub][Concurrency]") {
HInt32 count;
ret = HFFeatureHubGetFaceCount(&count);
REQUIRE(ret == HSUCCEED);
REQUIRE(count ==
baseNum + numThreads * insertsPerThread); // Ensure that the previous base data is
// added to the newly inserted data
REQUIRE(count == baseNum + numThreads * insertsPerThread); // Ensure that the previous base data is added to the newly inserted data
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
@@ -241,17 +231,17 @@ TEST_CASE("test_ConcurrencyRemove", "[FeatureHub][Concurrency]") {
TEST_PRINT_OUTPUT(true);
HResult ret;
HFFeatureHubConfiguration configuration = {0};
HFFeatureHubConfiguration configuration;
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.persistenceDbPath = dbPathStr;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
if (std::remove(configuration.persistenceDbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
@@ -276,9 +266,10 @@ TEST_CASE("test_ConcurrencyRemove", "[FeatureHub][Concurrency]") {
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.customId = i;
identity.tag = nameBuffer.data();
ret = HFFeatureHubInsertFeature(identity);
// identity.customId = i;
// identity.tag = nameBuffer.data();
HFaceId allocId;
ret = HFFeatureHubInsertFeature(identity, &allocId);
REQUIRE(ret == HSUCCEED);
}
HInt32 totalFace;
@@ -305,7 +296,8 @@ TEST_CASE("test_ConcurrencyRemove", "[FeatureHub][Concurrency]") {
HInt32 remainingCount;
ret = HFFeatureHubGetFaceCount(&remainingCount);
REQUIRE(ret == HSUCCEED);
REQUIRE(remainingCount == genSizeOfBase - numThreads * removePerThread);
// need exclude id=0
REQUIRE(remainingCount - 1 == genSizeOfBase - numThreads * removePerThread);
TEST_PRINT("Remaining Count: {}", remainingCount);
ret = HFFeatureHubDataDisable();
@@ -319,17 +311,17 @@ TEST_CASE("test_ConcurrencySearch", "[FeatureHub][Concurrency]") {
TEST_PRINT_OUTPUT(true);
HResult ret;
HFFeatureHubConfiguration configuration = {0};
HFFeatureHubConfiguration configuration;
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.persistenceDbPath = dbPathStr;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
if (std::remove(configuration.persistenceDbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
@@ -353,9 +345,10 @@ TEST_CASE("test_ConcurrencySearch", "[FeatureHub][Concurrency]") {
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.customId = i;
identity.tag = nameBuffer.data();
ret = HFFeatureHubInsertFeature(identity);
// identity.customId = i;
// identity.tag = nameBuffer.data();
HFaceId allocId;
ret = HFFeatureHubInsertFeature(identity, &allocId);
REQUIRE(ret == HSUCCEED);
}
HInt32 totalFace;
@@ -372,10 +365,9 @@ TEST_CASE("test_ConcurrencySearch", "[FeatureHub][Concurrency]") {
for (int i = 0; i < numberOfSimilar; ++i) {
auto index = targetIds[i];
HFFaceFeatureIdentity identity = {0};
ret = HFFeatureHubGetFaceIdentity(index, &identity);
ret = HFFeatureHubGetFaceIdentity(index + 1, &identity);
REQUIRE(ret == HSUCCEED);
std::vector<HFloat> feature(identity.feature->data,
identity.feature->data + identity.feature->size);
std::vector<HFloat> feature(identity.feature->data, identity.feature->data + identity.feature->size);
auto simFeat = SimulateSimilarVector(feature);
HFFaceFeature simFeature = {0};
simFeature.data = simFeat.data();
@@ -427,7 +419,7 @@ TEST_CASE("test_ConcurrencySearch", "[FeatureHub][Concurrency]") {
HFloat score;
HFFaceFeatureIdentity identity = {0};
HFFeatureHubFaceSearch(feature, &score, &identity);
CHECK(identity.customId == targetId);
REQUIRE(identity.id == targetId + 1);
}
for (int j = 0; j < 50; ++j) {
int idx = dis(gen);
@@ -437,7 +429,7 @@ TEST_CASE("test_ConcurrencySearch", "[FeatureHub][Concurrency]") {
HFloat score;
HFFaceFeatureIdentity identity = {0};
HFFeatureHubFaceSearch(feature, &score, &identity);
CHECK(identity.customId == -1);
REQUIRE(identity.id == -1);
}
});
}
@@ -456,17 +448,17 @@ TEST_CASE("test_FeatureCache", "[FeatureHub][Concurrency]") {
TEST_PRINT_OUTPUT(true);
HResult ret;
HFFeatureHubConfiguration configuration = {0};
HFFeatureHubConfiguration configuration;
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.persistenceDbPath = dbPathStr;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
if (std::remove(configuration.persistenceDbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
@@ -478,10 +470,8 @@ TEST_CASE("test_FeatureCache", "[FeatureHub][Concurrency]") {
feature.size = randomVec.size();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.tag = "FK";
identity.customId = 12;
ret = HFFeatureHubInsertFeature(identity);
HFaceId allocId;
ret = HFFeatureHubInsertFeature(identity, &allocId);
REQUIRE(ret == HSUCCEED);
auto simVec = SimulateSimilarVector(randomVec);
@@ -491,7 +481,7 @@ TEST_CASE("test_FeatureCache", "[FeatureHub][Concurrency]") {
for (int i = 0; i < 10; ++i) {
HFFaceFeatureIdentity capture = {0};
ret = HFFeatureHubGetFaceIdentity(12, &capture);
ret = HFFeatureHubGetFaceIdentity(allocId, &capture);
REQUIRE(ret == HSUCCEED);
HFFaceFeature target = {0};
@@ -510,108 +500,53 @@ TEST_CASE("test_FeatureCache", "[FeatureHub][Concurrency]") {
delete[] dbPathStr;
}
TEST_CASE("test_DataPersistence", "[feature_manage]") {
TEST_CASE("test_FeatureHubManualInput", "[FeatureHub][ManualInput]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
HResult ret;
HFFeatureHubConfiguration configuration;
configuration.primaryKeyMode = HF_PK_MANUAL_INPUT;
configuration.enablePersistence = 0;
TEST_PRINT("Start enable feature hub");
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
TEST_PRINT("Enable feature hub success");
// Generate 10 random feature
std::vector<std::vector<HFloat>> features;
std::vector<std::string> identities;
for (int i = 0; i < 10; ++i) {
auto feat = GenerateRandomFeature(512);
features.push_back(feat);
identities.push_back("id_" + std::to_string(i));
std::vector<HFaceId> ids = {10086, 23541, 2124, 24, 204};
for (auto id : ids) {
auto randomVec = GenerateRandomFeature(512);
HFFaceFeature feature = {0};
feature.data = randomVec.data();
feature.size = randomVec.size();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.id = id;
HFaceId allocId;
ret = HFFeatureHubInsertFeature(identity, &allocId);
REQUIRE(ret == HSUCCEED);
}
SECTION("Insert") {
HResult ret;
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Maybe the file does not exist");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
for (size_t i = 0; i < features.size(); i++) {
HFFaceFeature feature = {0};
feature.data = features[i].data();
feature.size = features[i].size();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.tag = const_cast<char *>(identities[i].c_str());
identity.customId = i;
ret = HFFeatureHubInsertFeature(identity);
REQUIRE(ret == HSUCCEED);
// Get the feature from the database
HFFaceFeatureIdentity capture = {0};
ret = HFFeatureHubGetFaceIdentity(i, &capture);
REQUIRE(ret == HSUCCEED);
// Check the feature
HFFaceFeature target = {0};
target.data = capture.feature->data;
target.size = capture.feature->size;
HFloat cosine;
ret = HFFaceComparison(target, feature, &cosine);
REQUIRE(ret == HSUCCEED);
REQUIRE(cosine > 0.99f);
}
// Check number of faces
HInt32 count;
ret = HFFeatureHubGetFaceCount(&count);
REQUIRE(ret == HSUCCEED);
REQUIRE(count == features.size());
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete[] dbPathStr;
HFFeatureHubExistingIds existingIds = {0};
ret = HFFeatureHubGetExistingIds(&existingIds);
REQUIRE(ret == HSUCCEED);
REQUIRE(existingIds.size == ids.size());
for (int i = 0; i < existingIds.size; ++i) {
TEST_PRINT("Existing ID: {}", existingIds.ids[i]);
REQUIRE(existingIds.ids[i] == ids[i]);
}
SECTION("Check") {
HResult ret;
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
ret = HFFeatureHubViewDBTable();
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataEnable(configuration);
// query
for (auto id : ids) {
HFFaceFeatureIdentity query = {0};
ret = HFFeatureHubGetFaceIdentity(id, &query);
REQUIRE(ret == HSUCCEED);
// Check number of faces
HInt32 count;
ret = HFFeatureHubGetFaceCount(&count);
REQUIRE(ret == HSUCCEED);
REQUIRE(count == features.size());
// Check every face vector
for (size_t i = 0; i < features.size(); i++) {
HFFaceFeatureIdentity identity = {0};
ret = HFFeatureHubGetFaceIdentity(i, &identity);
REQUIRE(ret == HSUCCEED);
REQUIRE(identity.customId == i);
REQUIRE(std::string(identity.tag) == identities[i]);
REQUIRE(identity.feature->size == features[i].size());
}
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete[] dbPathStr;
REQUIRE(query.id == id);
}
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
}

View File

@@ -1,7 +1,8 @@
//
// Created by tunm on 2023/10/11.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#if 0
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
@@ -978,3 +979,4 @@ TEST_CASE("test_FeatureBenchmark", "[feature_benchmark]") {
#endif
}
}
#endif

View File

@@ -1,14 +1,15 @@
//
// Created by Tunm-Air13 on 2024/3/20.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include <iostream>
#include "settings/test_settings.h"
#include "../test_helper/test_help.h"
TEST_CASE("test_HelpTools", "[help_tools]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("Load lfw funneled data") {
#ifdef ISF_ENABLE_USE_LFW_DATA
@@ -19,17 +20,17 @@ TEST_CASE("test_HelpTools", "[help_tools]") {
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
REQUIRE(ret == HSUCCEED);
HFFeatureHubConfiguration configuration = {0};
HFFeatureHubConfiguration configuration;
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.persistenceDbPath = dbPathStr;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
if (std::remove(configuration.persistenceDbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
@@ -46,8 +47,8 @@ TEST_CASE("test_HelpTools", "[help_tools]") {
REQUIRE(ret == HSUCCEED);
CHECK(count == numOfNeedImport);
// ret = HF_ViewFaceDBTable(session);
// REQUIRE(ret == HSUCCEED);
// ret = HF_ViewFaceDBTable(session);
// REQUIRE(ret == HSUCCEED);
// Finish
ret = HFReleaseInspireFaceSession(session);
@@ -56,7 +57,7 @@ TEST_CASE("test_HelpTools", "[help_tools]") {
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
delete[] dbPathStr;
#else
TEST_PRINT("The test case that uses LFW is not enabled, so it will be skipped.");

View File

@@ -0,0 +1,45 @@
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include <cstdio>
TEST_CASE("test_ImageBitmap", "[image_bitmap]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
HFImageBitmap handle;
HResult ret = HFCreateImageBitmapFromFilePath(GET_DATA("data/bulk/r90.jpg").c_str(), 3, &handle);
REQUIRE(ret == HSUCCEED);
HFImageStream stream;
ret = HFCreateImageStreamFromImageBitmap(handle, HF_CAMERA_ROTATION_90, &stream);
REQUIRE(ret == HSUCCEED);
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
TEST_ERROR_PRINT("error ret :{}", ret);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, stream, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
auto rect = multipleFaceData.rects[0];
HColor color = {0, 0, 255};
HFImageBitmapDrawRect(handle, rect, color, 2);
HFImageBitmapWriteToFile(handle, "bitmap_draw_test.jpg");
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(stream);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageBitmap(handle);
REQUIRE(ret == HSUCCEED);
}

View File

@@ -0,0 +1,215 @@
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include <cstdio>
uint8_t* ReadNV21File(const char* filepath, size_t* fileSize) {
FILE* fp = fopen(filepath, "rb");
if (!fp) {
if (fileSize)
*fileSize = 0;
return nullptr;
}
fseek(fp, 0, SEEK_END);
size_t size = ftell(fp);
fseek(fp, 0, SEEK_SET);
uint8_t* data = new uint8_t[size];
size_t read_size = fread(data, 1, size, fp);
fclose(fp);
if (read_size != size) {
delete[] data;
if (fileSize)
*fileSize = 0;
return nullptr;
}
if (fileSize)
*fileSize = size;
return data;
}
TEST_CASE("test_ImageProcessRotateNV21", "[image_process]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
HFImageBitmap originBmp;
size_t fileSize;
uint8_t* data = ReadNV21File(GET_DATA("data/bulk/r0_w330_h409_c3.nv21").c_str(), &fileSize);
REQUIRE(data != nullptr);
HFImageData imageData;
imageData.data = data;
imageData.width = 330;
imageData.height = 409;
imageData.rotation = HF_CAMERA_ROTATION_0;
imageData.format = HF_STREAM_YUV_NV21;
HFImageStream stream;
HResult ret = HFCreateImageStream(&imageData, &stream);
REQUIRE(ret == HSUCCEED);
ret = HFCreateImageBitmapFromImageStreamProcess(stream, &originBmp, 1, 1.0f);
REQUIRE(ret == HSUCCEED);
HFImageBitmapData originData;
ret = HFImageBitmapGetData(originBmp, &originData);
REQUIRE(ret == HSUCCEED);
// compare with eps(0~1)
float eps = 0.01;
SECTION("rotate 90") {
size_t fileSize;
uint8_t* r90nv21 = ReadNV21File(GET_DATA("data/bulk/r90_w409_h330_c3.nv21").c_str(), &fileSize);
REQUIRE(r90nv21 != nullptr);
HFImageData imageData;
imageData.data = r90nv21;
imageData.width = 409;
imageData.height = 330;
imageData.rotation = HF_CAMERA_ROTATION_90;
imageData.format = HF_STREAM_YUV_NV21;
HFImageStream stream;
ret = HFCreateImageStream(&imageData, &stream);
REQUIRE(ret == HSUCCEED);
HFImageBitmap rot90;
ret = HFCreateImageBitmapFromImageStreamProcess(stream, &rot90, 1, 1.0f);
REQUIRE(ret == HSUCCEED);
// HFImageBitmapShow(rot90, "w", 0);
HFImageBitmapData rot90Data;
ret = HFImageBitmapGetData(rot90, &rot90Data);
REQUIRE(ret == HSUCCEED);
REQUIRE_EQ_IMAGE_WITH_EPS(originData.data, rot90Data.data, originData.height, originData.width, originData.channels, eps);
ret = HFReleaseImageBitmap(rot90);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(stream);
REQUIRE(ret == HSUCCEED);
delete[] r90nv21;
}
ret = HFReleaseImageStream(stream);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageBitmap(originBmp);
REQUIRE(ret == HSUCCEED);
delete[] data;
}
TEST_CASE("test_ImageProcessRotate", "[image_process]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
HFImageBitmap originBmp;
HResult ret = HFCreateImageBitmapFromFilePath(GET_DATA("data/bulk/r0.jpg").c_str(), 3, &originBmp);
REQUIRE(ret == HSUCCEED);
HFImageBitmapData originData;
ret = HFImageBitmapGetData(originBmp, &originData);
REQUIRE(ret == HSUCCEED);
// compare with eps(0~1)
float eps = 0.001;
SECTION("rotate 90") {
HFImageBitmap bitmap;
HResult ret = HFCreateImageBitmapFromFilePath(GET_DATA("data/bulk/r90.jpg").c_str(), 3, &bitmap);
REQUIRE(ret == HSUCCEED);
HFImageStream stream;
ret = HFCreateImageStreamFromImageBitmap(bitmap, HF_CAMERA_ROTATION_90, &stream);
REQUIRE(ret == HSUCCEED);
HFImageBitmap rot90;
ret = HFCreateImageBitmapFromImageStreamProcess(stream, &rot90, 1, 1.0f);
REQUIRE(ret == HSUCCEED);
HFImageBitmapData rot90Data;
ret = HFImageBitmapGetData(rot90, &rot90Data);
REQUIRE(ret == HSUCCEED);
REQUIRE_EQ_IMAGE_WITH_EPS(originData.data, rot90Data.data, originData.height, originData.width, originData.channels, eps);
ret = HFReleaseImageBitmap(rot90);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(stream);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageBitmap(bitmap);
REQUIRE(ret == HSUCCEED);
}
SECTION("rotate 180") {
HFImageBitmap bitmap;
HResult ret = HFCreateImageBitmapFromFilePath(GET_DATA("data/bulk/r180.jpg").c_str(), 3, &bitmap);
REQUIRE(ret == HSUCCEED);
HFImageStream stream;
ret = HFCreateImageStreamFromImageBitmap(bitmap, HF_CAMERA_ROTATION_180, &stream);
REQUIRE(ret == HSUCCEED);
HFImageBitmap rot180;
ret = HFCreateImageBitmapFromImageStreamProcess(stream, &rot180, 1, 1.0f);
REQUIRE(ret == HSUCCEED);
HFImageBitmapData rot180Data;
ret = HFImageBitmapGetData(rot180, &rot180Data);
REQUIRE(ret == HSUCCEED);
REQUIRE_EQ_IMAGE_WITH_EPS(originData.data, rot180Data.data, originData.height, originData.width, originData.channels, eps);
ret = HFReleaseImageBitmap(rot180);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(stream);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageBitmap(bitmap);
REQUIRE(ret == HSUCCEED);
}
SECTION("rotate 270") {
HFImageBitmap bitmap;
HResult ret = HFCreateImageBitmapFromFilePath(GET_DATA("data/bulk/r270.jpg").c_str(), 3, &bitmap);
REQUIRE(ret == HSUCCEED);
HFImageStream stream;
ret = HFCreateImageStreamFromImageBitmap(bitmap, HF_CAMERA_ROTATION_270, &stream);
REQUIRE(ret == HSUCCEED);
HFImageBitmap rot270;
ret = HFCreateImageBitmapFromImageStreamProcess(stream, &rot270, 1, 1.0f);
REQUIRE(ret == HSUCCEED);
HFImageBitmapData rot270Data;
ret = HFImageBitmapGetData(rot270, &rot270Data);
REQUIRE(ret == HSUCCEED);
REQUIRE_EQ_IMAGE_WITH_EPS(originData.data, rot270Data.data, originData.height, originData.width, originData.channels, eps);
ret = HFReleaseImageBitmap(rot270);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(stream);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageBitmap(bitmap);
REQUIRE(ret == HSUCCEED);
}
ret = HFReleaseImageBitmap(originBmp);
REQUIRE(ret == HSUCCEED);
}

View File

@@ -0,0 +1,153 @@
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include "unit/test_helper/test_help.h"
#include "inspireface/middleware/thread/resource_pool.h"
#include <thread>
TEST_CASE("test_SessionParallel", "[Session][Parallel]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
auto image1 = inspirecv::Image::Create(GET_DATA("data/bulk/kun.jpg"));
auto image2 = inspirecv::Image::Create(GET_DATA("data/bulk/jntm.jpg"));
int loop = 100;
// Run it once to make sure the similarity is stable
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
HResult ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
REQUIRE(ret == HSUCCEED);
float expectedSimilarity = GenerateRandomNumbers(1, 0, 100)[0] / 100.0f;
ret = CompareTwoFaces(session, image1, image2, expectedSimilarity);
REQUIRE(ret);
TEST_PRINT("Expected similarity: {}", expectedSimilarity);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
SECTION("Serial") {
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, -1, -1, &session);
REQUIRE(ret == HSUCCEED);
float similarity = 0.0f;
inspirecv::TimeSpend timeSpend("Serial loop: " + std::to_string(loop));
timeSpend.Start();
for (int i = 0; i < loop; ++i) {
ret = CompareTwoFaces(session, image1, image2, similarity);
REQUIRE(ret);
REQUIRE(similarity == Approx(expectedSimilarity).epsilon(0.01));
}
timeSpend.Stop();
std::cout << timeSpend << std::endl;
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
SECTION("Parallel") {
int N = 4; // Use 4 sessions in parallel
#ifdef ISF_RKNPU_RV1106
N = 1; // Use 1 session in parallel
#endif
inspire::parallel::ResourcePool<HFSession> sessionPool(N, [](HFSession& session) {
auto ret = HFReleaseInspireFaceSession(session);
if (ret != HSUCCEED) {
TEST_ERROR_PRINT("Failed to release session: {}", ret);
}
});
// Example Initialize N sessions to the resource pool
for (int i = 0; i < N; ++i) {
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFSession session;
HResult ret = HFCreateInspireFaceSession(parameter, HF_DETECT_MODE_ALWAYS_DETECT, 3, -1, -1, &session);
REQUIRE(ret == HSUCCEED);
sessionPool.AddResource(std::move(session));
}
// Create a thread pool to execute a task
std::vector<std::thread> threads;
std::atomic<int> completed(0);
float similaritySum = 0.0f;
std::mutex similarityMutex;
inspirecv::TimeSpend timeSpend("Parallel loop: " + std::to_string(loop) + ", thread: " + std::to_string(N));
timeSpend.Start();
// Start worker thread
int tasksPerThread = loop / N;
int remainingTasks = loop % N;
for (int i = 0; i < N; ++i) {
int taskCount = tasksPerThread + (i < remainingTasks ? 1 : 0);
threads.emplace_back([&, taskCount]() {
for (int j = 0; j < taskCount; ++j) {
auto sessionGuard = sessionPool.AcquireResource();
float similarity = 0.0f;
HResult ret = CompareTwoFaces(*sessionGuard, image1, image2, similarity);
REQUIRE(ret);
REQUIRE(similarity == Approx(expectedSimilarity).epsilon(0.01));
{
std::lock_guard<std::mutex> lock(similarityMutex);
similaritySum += similarity;
}
completed++;
}
});
}
// Wait for all threads to complete
for (auto& thread : threads) {
thread.join();
}
timeSpend.Stop();
std::cout << timeSpend << std::endl;
// Optional: Output average similarity(stability)
TEST_PRINT("Average similarity: {}", (similaritySum / loop));
}
}
TEST_CASE("test_SessionParallel_Memory", "[Session][Parallel][Memory]") {
size_t memoryUsage = getCurrentMemoryUsage();
TEST_PRINT("Current memory usage: {}MB", memoryUsage);
int loop = 4;
#ifdef ISF_RKNPU_RV1106
loop = 1;
#endif
std::vector<HFSession> sessions;
for (int i = 0; i < loop; ++i) {
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFSession session;
HResult ret = HFCreateInspireFaceSession(parameter, HF_DETECT_MODE_ALWAYS_DETECT, 3, -1, -1, &session);
REQUIRE(ret == HSUCCEED);
sessions.push_back(session);
size_t memoryUsage = getCurrentMemoryUsage();
TEST_PRINT("[alloc{}] Current memory usage: {}MB", i + 1, memoryUsage);
}
// Release all sessions
for (int i = 0; i < loop; ++i) {
auto ret = HFReleaseInspireFaceSession(sessions[i]);
REQUIRE(ret == HSUCCEED);
size_t memoryUsage = getCurrentMemoryUsage();
TEST_PRINT("[free{}] Current memory usage: {}MB", i + 1, memoryUsage);
}
}

View File

@@ -0,0 +1,51 @@
/**
* Created by Jingyu Yan
* @date 2025-01-20
*/
#include <iostream>
#include "settings/test_settings.h"
#include "unit/test_helper/test_help.h"
#include "inspireface/recognition_module/similarity_converter.h"
TEST_CASE("test_similarity_converter", "[similarity_converter]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("test_similarity_converter_0.42") {
inspire::SimilarityConverterConfig config;
config.threshold = 0.42;
config.middleScore = 0.6;
config.steepness = 8.0;
config.outputMin = 0.01;
config.outputMax = 1.0;
inspire::SimilarityConverter similarity_converter(config);
std::vector<double> test_points = {-0.80, -0.20, 0.02, 0.10, 0.25, 0.30, 0.48, 0.70, 0.80, 0.90, 1.00};
std::vector<double> expected_scores = {0.0101, 0.0201, 0.0661, 0.1113, 0.2819, 0.3673, 0.7074, 0.9334, 0.9689, 0.9858, 0.9936};
REQUIRE(test_points.size() == expected_scores.size());
for (size_t i = 0; i < test_points.size(); ++i) {
double cosine = test_points[i];
double similarity = similarity_converter.convert(cosine);
REQUIRE(similarity == Approx(expected_scores[i]).epsilon(0.01));
}
}
SECTION("test_similarity_converter_0.32") {
inspire::SimilarityConverterConfig config;
config.threshold = 0.32;
config.middleScore = 0.6;
config.steepness = 10.0;
config.outputMin = 0.02;
config.outputMax = 1.0;
inspire::SimilarityConverter similarity_converter(config);
std::vector<double> test_points = {-0.80, -0.20, 0.02, 0.10, 0.25, 0.32, 0.50, 0.70, 0.80, 0.90, 1.00};
std::vector<double> expected_scores = {0.0200, 0.0278, 0.0860, 0.1557, 0.4302, 0.6000, 0.8997, 0.9851, 0.9945, 0.9980, 0.9992};
REQUIRE(test_points.size() == expected_scores.size());
for (size_t i = 0; i < test_points.size(); ++i) {
double cosine = test_points[i];
double similarity = similarity_converter.convert(cosine);
REQUIRE(similarity == Approx(expected_scores[i]).epsilon(0.01));
}
}
}

View File

@@ -1,3 +1,4 @@
#if 0
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
@@ -265,4 +266,6 @@ TEST_CASE("test_SystemStreamReleaseCase", "[system]") {
REQUIRE(ret == HSUCCEED);
REQUIRE(count == 0);
}
}
}
#endif

View File

@@ -0,0 +1,86 @@
#include <iostream>
#include "settings/test_settings.h"
#include "unit/test_helper/help.h"
#include "feature_hub/feature_hub_db.h"
#include "middleware/costman.h"
#include "inspireface/initialization_module/launch.h"
#include "middleware/inspirecv_image_process.h"
#include "inspireface/face_session.h"
#include "inspireface/feature_hub/feature_hub_db.h"
using namespace inspire;
TEST_CASE("test_FaceSession", "[face_session") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
int32_t ret;
CustomPipelineParameter param;
param.enable_recognition = true;
param.enable_liveness = true;
param.enable_mask_detect = true;
param.enable_face_attribute = true;
param.enable_face_quality = true;
FaceSession session;
ret = session.Configuration(DetectModuleMode::DETECT_MODE_ALWAYS_DETECT, 1, param);
REQUIRE(ret == HSUCCEED);
inspirecv::Image kun1 = inspirecv::Image::Create(GET_DATA("data/bulk/kun.jpg"));
inspirecv::Image kun2 = inspirecv::Image::Create(GET_DATA("data/bulk/jntm.jpg"));
inspirecv::InspireImageProcess proc1 =
inspirecv::InspireImageProcess::Create(kun1.Data(), kun1.Height(), kun1.Width(), inspirecv::BGR, inspirecv::ROTATION_0);
inspirecv::InspireImageProcess proc2 =
inspirecv::InspireImageProcess::Create(kun2.Data(), kun2.Height(), kun2.Width(), inspirecv::BGR, inspirecv::ROTATION_0);
std::vector<std::vector<float>> features;
std::vector<inspirecv::InspireImageProcess> processes = {proc1, proc2};
for (auto &process : processes) {
ret = session.FaceDetectAndTrack(process);
REQUIRE(ret == HSUCCEED);
if (session.GetDetectCache().size() > 0) {
FaceBasicData data = session.GetFaceBasicDataCache()[0];
ret = session.FaceFeatureExtract(process, data);
REQUIRE(ret == HSUCCEED);
const auto &faces = session.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
Embedded feature;
HyperFaceData hyper_face_data = FaceObjectInternalToHyperFaceData(faces[0]);
float norm;
ret = session.FaceRecognitionModule()->FaceExtract(process, hyper_face_data, feature, norm);
REQUIRE(ret == HSUCCEED);
features.push_back(feature);
}
}
REQUIRE(features.size() == 2);
float res;
ret = FeatureHubDB::CosineSimilarity(features[0].data(), features[1].data(), features[0].size(), res);
REQUIRE(ret == HSUCCEED);
REQUIRE(res > 0.5f);
inspirecv::Image other = inspirecv::Image::Create(GET_DATA("data/bulk/woman.png"));
inspirecv::InspireImageProcess proc3 =
inspirecv::InspireImageProcess::Create(other.Data(), other.Height(), other.Width(), inspirecv::BGR, inspirecv::ROTATION_0);
ret = session.FaceDetectAndTrack(proc3);
REQUIRE(ret == HSUCCEED);
if (session.GetDetectCache().size() > 0) {
FaceBasicData data = session.GetFaceBasicDataCache()[0];
ret = session.FaceFeatureExtract(proc3, data);
auto faces = session.GetTrackingFaceList();
REQUIRE(ret == HSUCCEED);
Embedded feature;
HyperFaceData hyper_face_data = FaceObjectInternalToHyperFaceData(faces[0]);
float norm;
ret = session.FaceRecognitionModule()->FaceExtract(proc3, hyper_face_data, feature, norm);
REQUIRE(ret == HSUCCEED);
features.push_back(feature);
}
REQUIRE(features.size() == 3);
float other_v_kun1, other_v_kun2;
ret = FeatureHubDB::CosineSimilarity(features[0].data(), features[2].data(), features[0].size(), other_v_kun1);
REQUIRE(ret == HSUCCEED);
REQUIRE(other_v_kun1 < 0.5f);
ret = FeatureHubDB::CosineSimilarity(features[1].data(), features[2].data(), features[0].size(), other_v_kun2);
REQUIRE(ret == HSUCCEED);
REQUIRE(other_v_kun2 < 0.5f);
}

View File

@@ -0,0 +1,198 @@
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include "unit/test_helper/help.h"
#include "feature_hub/feature_hub_db.h"
#include "middleware/costman.h"
using namespace inspire;
TEST_CASE("test_FeatureHubBasic", "[feature_hub") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
// Enable feature hub
DatabaseConfiguration config;
config.primary_key_mode = PrimaryKeyMode::AUTO_INCREMENT;
config.enable_persistence = false; // memory mode
int32_t ret;
ret = FEATURE_HUB_DB->EnableHub(config);
REQUIRE(ret == HSUCCEED);
// Check if feature hub is enabled
ret = FEATURE_HUB_DB->EnableHub(config);
REQUIRE(ret == HSUCCEED);
// Check number of features
int32_t count = 1000;
std::vector<int64_t> ids;
std::vector<int64_t> expected_ids;
for (int32_t i = 0; i < count; i++) {
auto vec = GenerateRandomFeature(512, false);
int64_t alloc_id;
ret = FEATURE_HUB_DB->FaceFeatureInsert(vec, -1, alloc_id);
REQUIRE(ret == HSUCCEED);
ids.push_back(alloc_id);
expected_ids.push_back(i + 1);
}
REQUIRE(FEATURE_HUB_DB->GetFaceFeatureCount() == ids.size());
REQUIRE(ids == expected_ids);
// Delete data
std::vector<int64_t> delete_ids = {5, 20, 100};
for (auto id : delete_ids) {
FEATURE_HUB_DB->FaceFeatureRemove(id);
}
REQUIRE(FEATURE_HUB_DB->GetFaceFeatureCount() == ids.size() - delete_ids.size());
// Check if the deleted data can be found
std::vector<float> feature;
ret = FEATURE_HUB_DB->GetFaceFeature(5, feature);
REQUIRE(ret == HERR_FT_HUB_NOT_FOUND_FEATURE);
// Check if the data can be found
ret = FEATURE_HUB_DB->GetFaceFeature(1, feature);
REQUIRE(ret == HSUCCEED);
REQUIRE(feature.size() == 512);
// Check if the cached data is correct
ret = FEATURE_HUB_DB->GetFaceFeature(1);
REQUIRE(ret == HSUCCEED);
auto cached_feature = FEATURE_HUB_DB->GetFaceFeaturePtrCache();
for (size_t i = 0; i < cached_feature->dataSize; i++) {
REQUIRE(feature[i] == cached_feature->data[i]);
}
// Update data
auto update_feature = GenerateRandomFeature(512, false);
ret = FEATURE_HUB_DB->FaceFeatureUpdate(update_feature, 1);
REQUIRE(ret == HSUCCEED);
// Check if the updated data is correct
ret = FEATURE_HUB_DB->GetFaceFeature(1, feature);
REQUIRE(ret == HSUCCEED);
for (size_t i = 0; i < feature.size(); i++) {
REQUIRE(feature[i] == Approx(update_feature[i]).epsilon(0.0001));
}
// Update removed data
ret = FEATURE_HUB_DB->FaceFeatureUpdate(update_feature, 5);
REQUIRE(ret == HERR_FT_HUB_NOT_FOUND_FEATURE);
// Disable feature hub
FEATURE_HUB_DB->DisableHub();
REQUIRE(FEATURE_HUB_DB->GetFaceFeatureCount() == 0);
// Check if the data can be found
ret = FEATURE_HUB_DB->GetFaceFeature(1, feature);
REQUIRE(ret == HERR_FT_HUB_DISABLE);
ret = FEATURE_HUB_DB->EnableHub(config);
REQUIRE(ret == HSUCCEED);
// Because the memory mode is turned on, once the data is turned off, it goes back to empty
REQUIRE(FEATURE_HUB_DB->GetFaceFeatureCount() == 0);
ret = FEATURE_HUB_DB->DisableHub();
REQUIRE(ret == HSUCCEED);
}
TEST_CASE("test_PerformanceMemoryMode", "[feature_hub") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
DatabaseConfiguration config;
config.primary_key_mode = PrimaryKeyMode::AUTO_INCREMENT;
config.enable_persistence = false; // memory mode
int32_t ret;
ret = FEATURE_HUB_DB->EnableHub(config);
REQUIRE(ret == HSUCCEED);
Timer t1;
int num = 10000;
for (int i = 0; i < num; i++) {
auto vec = GenerateRandomFeature(512, false);
int64_t alloc_id;
ret = FEATURE_HUB_DB->FaceFeatureInsert(vec, -1, alloc_id);
REQUIRE(ret == HSUCCEED);
}
TEST_PRINT("[Memory Mode]Insert 10000 features cost: {:.2f} ms", t1.GetCostTime());
Timer t2;
std::vector<float> feature;
ret = FEATURE_HUB_DB->GetFaceFeature(1, feature);
TEST_PRINT("[Memory Mode]Get feature from id cost: {:.2f} ms", t2.GetCostTime());
REQUIRE(ret == HSUCCEED);
Timer t3;
ret = FEATURE_HUB_DB->GetFaceFeature(9998, feature);
TEST_PRINT("[Memory Mode]Get feature from id cost: {:.2f} ms", t3.GetCostTime());
REQUIRE(ret == HSUCCEED);
auto sim_vec = SimulateSimilarVector(feature, false);
FaceSearchResult search_result;
Timer t4;
FEATURE_HUB_DB->SearchFaceFeature(sim_vec, search_result, true);
TEST_PRINT("[Memory Mode]Search feature cost: {:.2f} ms", t4.GetCostTime());
REQUIRE(search_result.id == 9998);
ret = FEATURE_HUB_DB->FaceFeatureRemove(9998);
REQUIRE(ret == HSUCCEED);
FEATURE_HUB_DB->DisableHub();
}
TEST_CASE("test_PerformancePersistentMode", "[feature_hub") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
std::string db_path = ".test_db";
std::remove(db_path.c_str());
DatabaseConfiguration config;
config.primary_key_mode = PrimaryKeyMode::AUTO_INCREMENT;
config.enable_persistence = true; // persistent mode
config.persistence_db_path = db_path;
int32_t ret;
ret = FEATURE_HUB_DB->EnableHub(config);
REQUIRE(ret == HSUCCEED);
Timer t1;
int num = 10000;
for (int i = 0; i < num; i++) {
auto vec = GenerateRandomFeature(512, false);
int64_t alloc_id;
ret = FEATURE_HUB_DB->FaceFeatureInsert(vec, -1, alloc_id);
REQUIRE(ret == HSUCCEED);
}
TEST_PRINT("[Persistent Mode]Insert 10000 features cost: {:.2f} ms", t1.GetCostTime());
Timer t2;
std::vector<float> feature;
ret = FEATURE_HUB_DB->GetFaceFeature(1, feature);
TEST_PRINT("[Persistent Mode]Get feature from id cost: {:.2f} ms", t2.GetCostTime());
REQUIRE(ret == HSUCCEED);
Timer t3;
ret = FEATURE_HUB_DB->GetFaceFeature(9998, feature);
TEST_PRINT("[Persistent Mode]Get feature from id cost: {:.2f} ms", t3.GetCostTime());
REQUIRE(ret == HSUCCEED);
auto sim_vec = SimulateSimilarVector(feature, false);
FaceSearchResult search_result;
Timer t4;
FEATURE_HUB_DB->SearchFaceFeature(sim_vec, search_result, true);
TEST_PRINT("[Persistent Mode]Search feature cost: {:.2f} ms", t4.GetCostTime());
REQUIRE(search_result.id == 9998);
ret = FEATURE_HUB_DB->FaceFeatureRemove(9998);
REQUIRE(ret == HSUCCEED);
auto remark_num = FEATURE_HUB_DB->GetFaceFeatureCount();
REQUIRE(remark_num == num - 1);
// Verify important of persistence test
ret = FEATURE_HUB_DB->EnableHub(config);
REQUIRE(ret == HSUCCEED);
REQUIRE(FEATURE_HUB_DB->GetFaceFeatureCount() == remark_num);
FEATURE_HUB_DB->DisableHub();
}

View File

@@ -0,0 +1,112 @@
#include <iostream>
#include "settings/test_settings.h"
#include "unit/test_helper/help.h"
#include "feature_hub/feature_hub_db.h"
#include "middleware/costman.h"
#include "track_module/face_detect/all.h"
#include "inspireface/initialization_module/launch.h"
#include "track_module/face_track_module.h"
#include "middleware/inspirecv_image_process.h"
using namespace inspire;
TEST_CASE("test_FaceDetect", "[track_module") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
auto archive = INSPIRE_LAUNCH->getMArchive();
const std::vector<int32_t> supported_sizes = {160, 320, 640};
const std::vector<std::string> scheme_names = {"face_detect_160", "face_detect_320", "face_detect_640"};
for (size_t i = 0; i < scheme_names.size(); i++) {
InspireModel model;
auto ret = archive.LoadModel(scheme_names[i], model);
REQUIRE(ret == 0);
FaceDetectAdapt face_detector(supported_sizes[i]);
face_detector.loadData(model, model.modelType, false);
inspirecv::Image img = inspirecv::Image::Create(GET_DATA("data/bulk/kun.jpg"));
auto result = face_detector(img);
REQUIRE(result.size() == 1);
}
}
TEST_CASE("test_RefineNet", "[track_module") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
auto archive = INSPIRE_LAUNCH->getMArchive();
InspireModel model;
auto ret = archive.LoadModel("refine_net", model);
REQUIRE(ret == 0);
RNetAdapt rnet;
rnet.loadData(model, model.modelType, false);
inspirecv::Image face = inspirecv::Image::Create(GET_DATA("data/crop/crop.png"));
auto result1 = rnet(face);
REQUIRE(result1 > 0.5f);
inspirecv::Image no_face = inspirecv::Image::Create(GET_DATA("data/crop/no_face.png"));
auto result2 = rnet(no_face);
REQUIRE(result2 < 0.5f);
}
TEST_CASE("test_Landmark", "[track_module") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
auto archive = INSPIRE_LAUNCH->getMArchive();
InspireModel model;
auto ret = archive.LoadModel("landmark", model);
REQUIRE(ret == 0);
FaceLandmarkAdapt face_landmark(112);
face_landmark.loadData(model, model.modelType);
inspirecv::Image img = inspirecv::Image::Create(GET_DATA("data/crop/crop.png"));
auto result = face_landmark(img);
REQUIRE(result.size() == 106 * 2);
}
TEST_CASE("test_Quality", "[track_module") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
auto archive = INSPIRE_LAUNCH->getMArchive();
InspireModel model;
auto ret = archive.LoadModel("pose_quality", model);
REQUIRE(ret == 0);
FacePoseQualityAdapt quality;
ret = quality.loadData(model, model.modelType);
REQUIRE(ret == 0);
inspirecv::Image img = inspirecv::Image::Create(GET_DATA("data/crop/crop.png"));
auto result = quality(img);
REQUIRE(result.lmk.size() == 5);
REQUIRE(result.lmk_quality.size() == 5);
}
TEST_CASE("test_FaceTrackModule", "[track_module") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
auto archive = INSPIRE_LAUNCH->getMArchive();
SECTION("Test face detect rotate 0") {
auto mode = DetectModuleMode::DETECT_MODE_ALWAYS_DETECT;
int max_detected_faces = 10;
FaceTrackModule face_track(mode, max_detected_faces);
face_track.Configuration(archive);
inspirecv::Image img = inspirecv::Image::Create(GET_DATA("data/bulk/kun.jpg"));
inspirecv::InspireImageProcess image = inspirecv::InspireImageProcess::Create(img.Data(), img.Height(), img.Width(), inspirecv::BGR);
face_track.UpdateStream(image);
REQUIRE(face_track.trackingFace.size() == 1);
}
SECTION("Test face detect rotate 90") {
auto mode = DetectModuleMode::DETECT_MODE_ALWAYS_DETECT;
int max_detected_faces = 10;
FaceTrackModule face_track(mode, max_detected_faces);
face_track.Configuration(archive);
inspirecv::Image img = inspirecv::Image::Create(GET_DATA("data/bulk/r90.jpg"));
inspirecv::InspireImageProcess image =
inspirecv::InspireImageProcess::Create(img.Data(), img.Height(), img.Width(), inspirecv::BGR, inspirecv::ROTATION_90);
face_track.UpdateStream(image);
REQUIRE(face_track.trackingFace.size() == 1);
}
}

View File

@@ -1,75 +0,0 @@
//
// Created by Tunm-Air13 on 2024/2/2.
//
#include "settings/test_settings.h"
#include "inspireface/face_context.h"
#include "common/face_data/data_tools.h"
#include "../test_helper/test_tools.h"
#include "herror.h"
using namespace inspire;
TEST_CASE("test_CameraStream", "[camera_stream") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("DecodingRotatedImages") {
FaceContext ctx;
CustomPipelineParameter param;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_ALWAYS_DETECT, 1, param);
REQUIRE(ret == HSUCCEED);
std::vector<std::string> rotated_filename_list = {
getTestData("images/rotate/rot_0.jpg"),
getTestData("images/rotate/rot_90.jpg"),
getTestData("images/rotate/rot_180.jpg"),
getTestData("images/rotate/rot_270.jpg"),
};
std::vector<ROTATION_MODE> rotate_list = {ROTATION_0, ROTATION_90, ROTATION_180, ROTATION_270};
CHECK(rotate_list.size() == rotated_filename_list.size());
for (int i = 0; i < rotate_list.size(); ++i) {
cv::Mat image = cv::imread(rotated_filename_list[i]);
REQUIRE(!image.empty());
auto rotated = rotate_list[i];
CameraStream stream;
stream.SetDataBuffer(image.data, image.rows, image.cols);
stream.SetDataFormat(BGR);
stream.SetRotationMode(rotated);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
const auto &faces = ctx.GetTrackingFaceList();
CHECK(faces.size() == 1);
}
}
SECTION("DecodingNV21Image") {
FaceContext ctx;
CustomPipelineParameter param;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_ALWAYS_DETECT, 1, param);
REQUIRE(ret == HSUCCEED);
int32_t width = 402;
int32_t height = 324;
auto rotated = ROTATION_90;
auto format = NV21;
auto nv21 = ReadNV21Data(getTestData("images/rotate/rot_90_324x402.nv21").c_str(), width, height);
REQUIRE(nv21 != nullptr);
CameraStream stream;
stream.SetDataBuffer(nv21, height, width);
stream.SetDataFormat(format);
stream.SetRotationMode(rotated);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
const auto &faces = ctx.GetTrackingFaceList();
CHECK(faces.size() == 1);
}
}

View File

@@ -1,50 +0,0 @@
//
// Created by tunm on 2023/9/16.
//
#include "settings/test_settings.h"
#include "inspireface/face_context.h"
#include "herror.h"
using namespace inspire;
TEST_CASE("test_FaceDetectTrack", "[face_track]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("TrackBenchmark") {
// Initialize
FaceContext ctx;
CustomPipelineParameter param;
param.enable_face_quality = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_LIGHT_TRACK, 1, param);
REQUIRE(ret == HSUCCEED);
// Prepare a picture of a face
auto image = cv::imread(GET_DATA("images/face_sample.png"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
const auto loop = 1000;
double total = 0.0f;
spdlog::info("begin {} times tracking: ", loop);
auto out = (double) cv::getTickCount();
for (int i = 0; i < loop; ++i) {
auto timeStart = (double) cv::getTickCount();
// Face detection
ctx.FaceDetectAndTrack(stream);
auto &faces = ctx.GetTrackingFaceList();
double cost = ((double) cv::getTickCount() - timeStart) / cv::getTickFrequency() * 1000;
REQUIRE(ret == HSUCCEED);
REQUIRE(faces.size() > 0);
total += cost;
}
auto end = ((double) cv::getTickCount() - out) / cv::getTickFrequency() * 1000;
spdlog::info("[Face Tracking]{} times, Total cost: {}ms, Average cost: {}ms", loop, end, total / loop);
}
}

View File

@@ -1,55 +0,0 @@
//
// Created by tunm on 2023/9/17.
//
#include "settings/test_settings.h"
#include "inspireface/common/face_data/data_tools.h"
#include "herror.h"
#include "inspireface/face_context.h"
using namespace inspire;
TEST_CASE("test_FaceData", "[face_data]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("DataConversion") {
// Initialize
FaceContext ctx;
CustomPipelineParameter param;
param.enable_face_quality = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_ALWAYS_DETECT, 1, param);
REQUIRE(ret == HSUCCEED);
// Prepare a picture of a face
auto image = cv::imread(GET_DATA("images/face_sample.png"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ctx.FaceDetectAndTrack(stream);
auto &faces = ctx.GetTrackingFaceList();
REQUIRE(ret == HSUCCEED);
REQUIRE(faces.size() > 0);
HyperFaceData faceData = FaceObjectToHyperFaceData(faces[0], 0);
std::cout << faces[0].getTransMatrix() << std::endl;
PrintHyperFaceData(faceData);
ByteArray byteArray;
INSPIRE_LOGD("sizeof: %lu", sizeof(byteArray));
ret = SerializeHyperFaceData(faceData, byteArray);
CHECK(ret == HSUCCEED);
INSPIRE_LOGD("sizeof: %lu", sizeof(byteArray));
HyperFaceData decode;
ret = DeserializeHyperFaceData(byteArray, decode);
CHECK(ret == HSUCCEED);
PrintHyperFaceData(decode);
}
}

View File

@@ -1,11 +0,0 @@
//
// Created by Tunm-Air13 on 2023/9/12.
//
#include "settings/test_settings.h"
#include "inspireface/face_context.h"
#include "herror.h"
#include "../test_helper/test_help.h"
#include "feature_hub/feature_hub.h"
using namespace inspire;

View File

@@ -1,112 +0,0 @@
//
// Created by tunm on 2023/9/13.
//
#include "settings/test_settings.h"
#include "inspireface/face_context.h"
#include "herror.h"
using namespace inspire;
TEST_CASE("test_FacePipeline", "[face_pipe") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("FaceContextInit") {
FaceContext ctx;
CustomPipelineParameter param;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_ALWAYS_DETECT, 1, param);
REQUIRE(ret == HSUCCEED);
}
SECTION("FaceContextMaskPredict") {
FaceContext ctx;
CustomPipelineParameter param;
param.enable_mask_detect = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_ALWAYS_DETECT, 1, param);
REQUIRE(ret == HSUCCEED);
{
// Prepare a photo of your face without a mask
auto image = cv::imread(GET_DATA("images/kun.jpg"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
// Face detection
ctx.FaceDetectAndTrack(stream);
auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
auto &face = faces[0];
ctx.FacePipelineModule()->Process(stream, face);
CHECK(face.faceProcess.maskInfo == MaskInfo::UNMASKED);
}
{
// Prepare a face picture with a mask in advance
auto image = cv::imread(GET_DATA("images/mask.png"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
// Face detection
ctx.FaceDetectAndTrack(stream);
auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
auto &face = faces[0];
ctx.FacePipelineModule()->Process(stream, face);
CHECK(face.faceProcess.maskInfo == MaskInfo::MASKED);
}
SECTION("FaceContextLiveness") {
FaceContext ctx;
CustomPipelineParameter param;
param.enable_liveness = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_ALWAYS_DETECT, 1, param);
REQUIRE(ret == HSUCCEED);
{
// Prepare realistic face images
auto image = cv::imread(GET_DATA("images/face_sample.png"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
// Face detection
ctx.FaceDetectAndTrack(stream);
auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
auto &face = faces[0];
ctx.FacePipelineModule()->Process(stream, face);
CHECK(face.faceProcess.rgbLivenessInfo == RGBLivenessInfo::LIVENESS_REAL);
}
{
// Prepare a fake photo that wasn't actually taken
auto image = cv::imread(GET_DATA("images/rgb_fake.jpg"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
// Face detection
ctx.FaceDetectAndTrack(stream);
auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
auto &face = faces[0];
ctx.FacePipelineModule()->Process(stream, face);
CHECK(face.faceProcess.rgbLivenessInfo == RGBLivenessInfo::LIVENESS_FAKE);
}
}
}
}

View File

@@ -1,87 +0,0 @@
//
// Created by Tunm-Air13 on 2023/9/12.
//
#include "settings/test_settings.h"
#include "inspireface/face_context.h"
#include "herror.h"
#include "common/face_data/data_tools.h"
#include "feature_hub/feature_hub.h"
using namespace inspire;
TEST_CASE("test_FaceRecognition", "[face_rec]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("FaceContextInit") {
FaceContext ctx;
CustomPipelineParameter param;
param.enable_recognition = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_ALWAYS_DETECT, 1, param);
REQUIRE(ret == HSUCCEED);
}
SECTION("FaceRecognitionOption") {
FaceContext ctx;
CustomPipelineParameter param;
param.enable_recognition = false; // Disable the face recognition function
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_ALWAYS_DETECT, 1, param);
REQUIRE(ret == HSUCCEED);
auto image = cv::imread(GET_DATA("images/cxk.jpg"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
ctx.FaceDetectAndTrack(stream);
const auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
Embedded feature;
ret = ctx.FaceRecognitionModule()->FaceExtract(stream, faces[0], feature);
CHECK(ret == HERR_SESS_REC_EXTRACT_FAILURE);
}
SECTION("FaceRecognition1v1") {
FaceContext ctx;
CustomPipelineParameter param;
param.enable_recognition = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_ALWAYS_DETECT, 1, param);
REQUIRE(ret == HSUCCEED);
std::vector<std::string> list = {
GET_DATA("images/kun.jpg"),
GET_DATA("images/Kunkun.jpg"),
};
EmbeddedList vectors;
for (int i = 0; i < 2; ++i) {
auto image = cv::imread(list[i]);
REQUIRE(!image.empty());
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
ctx.FaceDetectAndTrack(stream);
const auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
Embedded feature;
HyperFaceData data = FaceObjectToHyperFaceData(faces[0]);
ret = ctx.FaceRecognitionModule()->FaceExtract(stream, data, feature);
REQUIRE(ret == HSUCCEED);
vectors.push_back(feature);
}
float score;
ret = FEATURE_HUB->CosineSimilarity(vectors[1], vectors[0], score);
REQUIRE(ret == HSUCCEED);
// spdlog::info("score: {}", score);
CHECK(0.7623623013 == Approx(score).epsilon(1e-2));
}
}

View File

@@ -0,0 +1,3 @@
#include "test_tools.h"
#include "test_help.h"
#include "simple_csv_writer.h"

View File

@@ -1,6 +1,7 @@
//
// Created by Tunm-Air13 on 2024/3/22.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#ifndef INSPIREFACE_SIMPLE_CSV_WRITER_H
#define INSPIREFACE_SIMPLE_CSV_WRITER_H
@@ -9,7 +10,7 @@
#include <vector>
#include <string>
#include <iostream>
#include <iomanip> // Used to set the output format
#include <iomanip> // Used to set the output format
class SimpleCSVWriter {
public:
@@ -28,39 +29,39 @@ public:
this->filepath = filepath;
}
virtual ~SimpleCSVWriter() {} // Add a virtual destructor to ensure correct destructor behavior
virtual ~SimpleCSVWriter() {} // Add a virtual destructor to ensure correct destructor behavior
protected:
std::string filepath;
void insertData(const std::vector<std::string>& data) {
std::ofstream file(this->filepath, std::ios_base::app); // Open the file in append mode
std::ofstream file(this->filepath, std::ios_base::app); // Open the file in append mode
if (!file.is_open()) {
std::cerr << "Failed to open file: " << this->filepath << std::endl;
return;
}
for (const auto& datum : data) {
file << datum;
if (&datum != &data.back()) { // If it is not the last element, add a comma separation
if (&datum != &data.back()) { // If it is not the last element, add a comma separation
file << ",";
}
}
file << "\n"; // Add a newline character after each inserted row of data
file << "\n"; // Add a newline character after each inserted row of data
file.close();
}
};
class BenchmarkRecord : public SimpleCSVWriter {
public:
BenchmarkRecord(const std::string& filepath, const std::string &name = "Benchmark") : SimpleCSVWriter(filepath) {
BenchmarkRecord(const std::string& filepath, const std::string& name = "Benchmark") : SimpleCSVWriter(filepath) {
std::ifstream file(this->filepath);
if (file.peek() == std::ifstream::traits_type::eof()) { // If the file is empty, insert header data
if (file.peek() == std::ifstream::traits_type::eof()) { // If the file is empty, insert header data
std::vector<std::string> header = {name, "Loops", "Total Time(ms)", "Average Time(ms)"};
SimpleCSVWriter::insertData(header);
}
}
void insertBenchmarkData(const std::string &caseName, int loops, double totalCost, double avgCost) {
void insertBenchmarkData(const std::string& caseName, int loops, double totalCost, double avgCost) {
std::ofstream file(this->filepath, std::ios_base::app);
if (!file.is_open()) {
std::cerr << "Failed to open file: " << this->filepath << std::endl;
@@ -73,18 +74,17 @@ public:
}
};
class EvaluationRecord : public SimpleCSVWriter {
public:
EvaluationRecord(const std::string& filepath) : SimpleCSVWriter(filepath) {
std::ifstream file(this->filepath);
if (file.peek() == std::ifstream::traits_type::eof()) { // If the file is empty, insert header data
if (file.peek() == std::ifstream::traits_type::eof()) { // If the file is empty, insert header data
std::vector<std::string> header = {"Resource Version", "Dataset", "Accuracy", "Best Threshold"};
SimpleCSVWriter::insertData(header);
}
}
void insertEvaluationData(const std::string &modelName, const std::string &dataset, double accuracy, double bestThreshold) {
void insertEvaluationData(const std::string& modelName, const std::string& dataset, double accuracy, double bestThreshold) {
std::ofstream file(this->filepath, std::ios_base::app);
if (!file.is_open()) {
std::cerr << "Failed to open file: " << this->filepath << std::endl;
@@ -97,5 +97,4 @@ public:
}
};
#endif //INSPIREFACE_SIMPLE_CSV_WRITER_H
#endif // INSPIREFACE_SIMPLE_CSV_WRITER_H

View File

@@ -1,9 +1,10 @@
//
// Created by Tunm-Air13 on 2023/9/12.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#ifndef HYPERFACEREPO_TEST_HELP_H
#define HYPERFACEREPO_TEST_HELP_H
#ifndef INSPIREFACE_TEST_TEST_HELP_H
#define INSPIREFACE_TEST_TEST_HELP_H
#include <iostream>
#include <vector>
@@ -15,10 +16,17 @@
#include <indicators/block_progress_bar.hpp>
#include <indicators/cursor_control.hpp>
#include "inspireface/c_api/inspireface.h"
#include "opencv2/opencv.hpp"
#include <inspirecv/inspirecv.h>
#include <iomanip>
#include "test_tools.h"
#include <random>
#include <fstream>
#ifdef _WIN32
#include <windows.h>
#include <psapi.h>
#else
#include <sys/resource.h>
#endif
using namespace indicators;
@@ -29,9 +37,9 @@ inline void Split(const std::string& src, std::vector<std::string>& res, const s
size_t Start = 0;
size_t end = 0;
std::string sub;
while(Start < src.size()) {
while (Start < src.size()) {
end = src.find_first_of(pattern, Start);
if(std::string::npos == end || res.size() >= maxsplit) {
if (std::string::npos == end || res.size() >= maxsplit) {
sub = src.substr(Start);
res.push_back(sub);
return;
@@ -50,20 +58,20 @@ inline std::vector<std::string> Split(const std::string& src, const std::string&
}
inline bool EndsWith(const std::string& str, const std::string& suffix) {
if(suffix.length() > str.length()) {
if (suffix.length() > str.length()) {
return false;
}
return 0 == str.compare(str.length() - suffix.length(), suffix.length(), suffix);
return 0 == str.compare(str.length() - suffix.length(), suffix.length(), suffix);
}
inline std::string PathJoin(const std::string& path1, const std::string& path2) {
if(EndsWith(path1, "/")) {
if (EndsWith(path1, "/")) {
return path1 + path2;
}
return path1 + "/" + path2;
}
inline FaceImageDataList LoadLFWFunneledValidData(const std::string &dir, const std::string &txtPath){
inline FaceImageDataList LoadLFWFunneledValidData(const std::string& dir, const std::string& txtPath) {
FaceImageDataList list;
std::ifstream file(txtPath);
std::string line;
@@ -87,14 +95,12 @@ inline bool ImportLFWFunneledValidData(HFSession handle, FaceImageDataList& data
std::string title = "Import " + std::to_string(importNum) + " face data...";
// Hide cursor
show_console_cursor(false);
BlockProgressBar bar{
option::BarWidth{60},
option::Start{"["},
option::End{"]"},
option::PostfixText{title},
option::ForegroundColor{Color::white} ,
option::FontStyles{std::vector<FontStyle>{FontStyle::bold}}
};
BlockProgressBar bar{option::BarWidth{60},
option::Start{"["},
option::End{"]"},
option::PostfixText{title},
option::ForegroundColor{Color::white},
option::FontStyles{std::vector<FontStyle>{FontStyle::bold}}};
auto progress = 0.0f;
for (size_t i = 0; i < importNum; ++i) {
@@ -102,17 +108,17 @@ inline bool ImportLFWFunneledValidData(HFSession handle, FaceImageDataList& data
size_t index = i % dataSize;
// Data processing
auto item = data[index];
cv::Mat image = cv::imread(item.second);
inspirecv::Image image = inspirecv::Image::Create(item.second);
HFImageData imageData = {0};
imageData.data = image.data;
imageData.height = image.rows;
imageData.width = image.cols;
imageData.data = (uint8_t*)image.Data();
imageData.height = image.Height();
imageData.width = image.Width();
imageData.format = HF_STREAM_BGR;
imageData.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandle;
auto ret = HFCreateImageStream(&imageData, &imgHandle);
if (ret != HSUCCEED || image.empty()) {
std::cerr << "Error image: " << std::to_string(ret) << " , " << item.second << std::endl;
if (ret != HSUCCEED || image.Empty()) {
std::cerr << "Error image: " << std::to_string(ret) << " , " << item.second << std::endl;
return false;
}
// Face tracked
@@ -120,7 +126,7 @@ inline bool ImportLFWFunneledValidData(HFSession handle, FaceImageDataList& data
ret = HFExecuteFaceTrack(handle, imgHandle, &multipleFaceData);
if (ret != HSUCCEED) {
std::cerr << "Error Track: " << std::to_string(ret) << " , " << item.second << std::endl;
std::cerr << "Error Track: " << std::to_string(ret) << " , " << item.second << std::endl;
return false;
}
@@ -133,18 +139,19 @@ inline bool ImportLFWFunneledValidData(HFSession handle, FaceImageDataList& data
HFFaceFeature feature = {0};
ret = HFFaceFeatureExtract(handle, imgHandle, multipleFaceData.tokens[0], &feature);
if (ret != HSUCCEED) {
std::cerr << "Error extract: " << std::to_string(ret) << " , " << item.second << std::endl;
std::cerr << "Error extract: " << std::to_string(ret) << " , " << item.second << std::endl;
return false;
}
char *newTagName = new char[item.first.size() + 1];
char* newTagName = new char[item.first.size() + 1];
std::strcpy(newTagName, item.first.c_str());
HFFaceFeatureIdentity identity = {0};
identity.customId = i;
identity.tag = newTagName;
identity.id = i;
// identity.tag = newTagName;
identity.feature = &feature;
ret = HFFeatureHubInsertFeature(identity);
HFaceId allocId;
ret = HFFeatureHubInsertFeature(identity, &allocId);
if (ret != HSUCCEED) {
std::cerr << "Error insert feature: " << std::to_string(ret) << " , " << item.second << std::endl;
std::cerr << "Error insert feature: " << std::to_string(ret) << " , " << item.second << std::endl;
return false;
}
@@ -156,7 +163,7 @@ inline bool ImportLFWFunneledValidData(HFSession handle, FaceImageDataList& data
bar.set_progress(100.0f);
// Show cursor
show_console_cursor(true);
std::cout << "\033[0m\n"; // ANSI resets the color code
std::cout << "\033[0m\n"; // ANSI resets the color code
return true;
}
@@ -192,14 +199,14 @@ inline std::vector<std::string> generateFilenames(const std::string& templateStr
return filenames;
}
inline bool FindMostSimilarScoreFromTwoPic(HFSession handle, const std::string& img1, const std::string& img2, float& mostSimilar){
inline bool FindMostSimilarScoreFromTwoPic(HFSession handle, const std::string& img1, const std::string& img2, float& mostSimilar) {
mostSimilar = -1.0f;
std::vector<std::vector<std::vector<float>>> features(2);
std::vector<std::string> images = {img1, img2};
for (int i = 0; i < 2; ++i) {
HFImageStream img;
// auto ret = ReadImageToImageStream(images[i].c_str(), img);
auto cvMat = cv::imread(images[i]);
// auto ret = ReadImageToImageStream(images[i].c_str(), img);
auto cvMat = inspirecv::Image::Create(images[i]);
auto ret = CVImageToImageStream(cvMat, img);
if (ret != 0) {
std::cerr << "Image is not found: " << ret << std::endl;
@@ -223,18 +230,28 @@ inline bool FindMostSimilarScoreFromTwoPic(HFSession handle, const std::string&
return false;
}
features[i].push_back(feature);
float qu;
HFFaceQualityDetect(handle, multipleFaceData.tokens[j], &qu);
std::ofstream file("tem.csv", std::ios::app);
if (file.is_open()) {
file << qu << ",\n";
file.close();
} else {
std::cerr << "Failed to open file tem.csv" << std::endl;
}
}
HFReleaseImageStream(img);
}
if (features[0].empty() || features[1].empty()) {
// std::cerr << "Not detected " << std::endl;
// std::cerr << "Not detected " << std::endl;
return false;
}
auto &features1 = features[0];
auto &features2 = features[1];
for (auto &feat1: features1) {
for (auto &feat2: features2) {
auto& features1 = features[0];
auto& features2 = features[1];
for (auto& feat1 : features1) {
for (auto& feat2 : features2) {
float comp;
HFFaceFeature faceFeature1 = {0};
faceFeature1.size = feat1.size();
@@ -255,15 +272,15 @@ inline bool FindMostSimilarScoreFromTwoPic(HFSession handle, const std::string&
inline std::vector<std::vector<std::string>> ReadPairs(const std::string& pairs_filename) {
std::vector<std::vector<std::string>> pairs;
std::ifstream file(pairs_filename); // Open the file
std::ifstream file(pairs_filename); // Open the file
std::string line;
if (!file.is_open()) {
std::cerr << "Unable to open file: " << pairs_filename << std::endl;
return pairs; // If the file cannot be opened, an empty list is returned
return pairs; // If the file cannot be opened, an empty list is returned
}
std::getline(file, line); // Skip the first line
std::getline(file, line); // Skip the first line
while (std::getline(file, line)) {
std::istringstream iss(line);
std::vector<std::string> pair;
@@ -321,7 +338,7 @@ inline std::pair<float, float> FindBestThreshold(const std::vector<float>& simil
}
/** Generate random eigenvectors of the specified length */
inline std::vector<float> GenerateRandomFeature(size_t length) {
inline std::vector<float> GenerateRandomFeature(size_t length, bool normalize = true) {
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_real_distribution<> dis(-1.0, 1.0);
@@ -329,15 +346,15 @@ inline std::vector<float> GenerateRandomFeature(size_t length) {
std::vector<float> featureVector(length);
float norm = 0.0;
for (float &value : featureVector) {
for (float& value : featureVector) {
value = dis(gen);
norm += value * value;
}
norm = std::sqrt(norm);
if (norm > 0) {
for (float &value : featureVector) {
if (norm > 0 && normalize) {
for (float& value : featureVector) {
value /= norm;
}
}
@@ -345,7 +362,7 @@ inline std::vector<float> GenerateRandomFeature(size_t length) {
return featureVector;
}
inline std::vector<float> SimulateSimilarVector(const std::vector<float>& original) {
inline std::vector<float> SimulateSimilarVector(const std::vector<float>& original, bool normalize = true) {
std::vector<float> similar(original.size());
std::random_device rd;
std::mt19937 gen(rd());
@@ -360,7 +377,7 @@ inline std::vector<float> SimulateSimilarVector(const std::vector<float>& origin
norm += value * value;
}
norm = std::sqrt(norm);
if (norm > 0) {
if (norm > 0 && normalize) {
for (auto& value : similar) {
value /= norm;
}
@@ -384,5 +401,108 @@ inline std::vector<int> GenerateRandomNumbers(int n, int min, int max) {
return numbers;
}
inline std::string ReplaceFileExtension(const std::string& filePath, const std::string& newExtension) {
size_t lastDotPos = filePath.find_last_of(".");
if (lastDotPos == std::string::npos) {
// If the 'dot' is not found, return to the original path
return filePath;
}
#endif //HYPERFACEREPO_TEST_HELP_H
return filePath.substr(0, lastDotPos) + newExtension;
}
inline bool CompareTwoFaces(HFSession handle, const inspirecv::Image& img1, const inspirecv::Image& img2, float& similarity) {
HFImageStream img;
auto ret = CVImageToImageStream(img1, img);
if (ret != 0) {
std::cerr << "Image is not found: " << ret << std::endl;
return false;
}
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(handle, img, &multipleFaceData);
if (ret != 0) {
std::cerr << "Error track: " << ret << std::endl;
HFReleaseImageStream(img);
return false;
}
if (multipleFaceData.detectedNum == 0) {
std::cerr << "No face detected in first image" << std::endl;
HFReleaseImageStream(img);
return false;
}
// Extract features from first image
HInt32 featureLength;
HFGetFeatureLength(&featureLength);
std::vector<float> feature1(featureLength);
ret = HFFaceFeatureExtractCpy(handle, img, multipleFaceData.tokens[0], feature1.data());
if (ret != HSUCCEED) {
std::cerr << "Feature extraction failed for first image" << std::endl;
HFReleaseImageStream(img);
return false;
}
HFReleaseImageStream(img);
// Process second image
ret = CVImageToImageStream(img2, img);
if (ret != HSUCCEED) {
std::cerr << "Second image not found" << std::endl;
return false;
}
ret = HFExecuteFaceTrack(handle, img, &multipleFaceData);
if (ret != HSUCCEED) {
std::cerr << "Face tracking failed on second image" << std::endl;
HFReleaseImageStream(img);
return false;
}
if (multipleFaceData.detectedNum == 0) {
std::cerr << "No face detected in second image" << std::endl;
HFReleaseImageStream(img);
return false;
}
// Extract features from second image
std::vector<float> feature2(featureLength);
ret = HFFaceFeatureExtractCpy(handle, img, multipleFaceData.tokens[0], feature2.data());
if (ret != HSUCCEED) {
std::cerr << "Feature extraction failed for second image" << std::endl;
HFReleaseImageStream(img);
return false;
}
HFReleaseImageStream(img);
// Compare features
HFFaceFeature faceFeature1 = {0};
faceFeature1.data = feature1.data();
faceFeature1.size = feature1.size();
HFFaceFeature faceFeature2 = {0};
faceFeature2.data = feature2.data();
faceFeature2.size = feature2.size();
ret = HFFaceComparison(faceFeature1, faceFeature2, &similarity);
if (ret != HSUCCEED) {
std::cerr << "Face comparison failed" << std::endl;
return false;
}
return true;
}
// Get the current memory usage in MB
inline size_t getCurrentMemoryUsage() {
#ifdef _WIN32
PROCESS_MEMORY_COUNTERS_EX pmc;
GetProcessMemoryInfo(GetCurrentProcess(), (PROCESS_MEMORY_COUNTERS*)&pmc, sizeof(pmc));
return pmc.WorkingSetSize / (1024 * 1024); // Convert bytes to MB
#else
struct rusage rusage;
getrusage(RUSAGE_SELF, &rusage);
return (size_t)rusage.ru_maxrss / 1024; // ru_maxrss is in KB, convert to MB
#endif
}
#endif // INSPIREFACE_TEST_TEST_HELP_H

View File

@@ -1,43 +1,25 @@
//
// Created by tunm on 2023/10/12.
//
/**
* Created by Jingyu Yan
* @date 2024-10-01
*/
#pragma
#ifndef HYPERFACEREPO_TEST_TOOLS_H
#define HYPERFACEREPO_TEST_TOOLS_H
#ifndef INSPIREFACE_TEST_TOOLS_H
#define INSPIREFACE_TEST_TOOLS_H
#include "opencv2/opencv.hpp"
#include "inspireface/c_api/inspireface.h"
#include <fstream>
#include <cstdint> // For uint8_t
#include <cstdint> // For uint8_t
#include <inspirecv/inspirecv.h>
// Bad function
inline HResult ReadImageToImageStream(const char *path, HFImageStream &handle, HFImageFormat format = HF_STREAM_BGR,
HFRotation rot = HF_CAMERA_ROTATION_0) {
cv::Mat image = cv::imread(path);
if (image.empty()) {
return -1;
}
HFImageData imageData = {0};
imageData.data = image.data;
imageData.height = image.rows;
imageData.width = image.cols;
imageData.format = format;
imageData.rotation = rot;
auto ret = HFCreateImageStream(&imageData, &handle);
return ret;
}
inline HResult CVImageToImageStream(const cv::Mat& image, HFImageStream &handle, HFImageFormat format = HF_STREAM_BGR,
inline HResult CVImageToImageStream(const inspirecv::Image &image, HFImageStream &handle, HFImageFormat format = HF_STREAM_BGR,
HFRotation rot = HF_CAMERA_ROTATION_0) {
if (image.empty()) {
if (image.Empty()) {
return -1;
}
HFImageData imageData = {0};
imageData.data = image.data;
imageData.height = image.rows;
imageData.width = image.cols;
imageData.data = (uint8_t *)image.Data();
imageData.height = image.Height();
imageData.width = image.Width();
imageData.format = format;
imageData.rotation = rot;
@@ -46,9 +28,8 @@ inline HResult CVImageToImageStream(const cv::Mat& image, HFImageStream &handle,
return ret;
}
inline uint8_t *ReadNV21Data(const char *filePath, int width, int height) {
const int nv21Size = width * height * 3 / 2; // Calculate the NV21 data size
const int nv21Size = width * height * 3 / 2; // Calculate the NV21 data size
// Memory is allocated dynamically to store NV21 data
uint8_t *nv21Data = new uint8_t[nv21Size];
@@ -76,4 +57,4 @@ inline uint8_t *ReadNV21Data(const char *filePath, int width, int height) {
return nv21Data;
}
#endif //HYPERFACEREPO_TEST_TOOLS_H
#endif // INSPIREFACE_TEST_TOOLS_H