Add the inspireface project to cpp-package.

This commit is contained in:
JingyuYan
2024-05-02 01:27:29 +08:00
parent e90dacb3cf
commit 08d7e96f79
431 changed files with 370534 additions and 0 deletions

View File

@@ -0,0 +1,137 @@
//
// Created by Tunm-Air13 on 2024/3/26.
//
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include "opencv2/opencv.hpp"
#include "unit/test_helper/simple_csv_writer.h"
#include "unit/test_helper/test_help.h"
#include "unit/test_helper/test_tools.h"
#include "limonp/StringUtil.hpp"
TEST_CASE("test_Evaluation", "[face_evaluation") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("Test compare tools") {
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 5, &session);
REQUIRE(ret == HSUCCEED);
float mostSim = -1.0f;
auto succ = FindMostSimilarScoreFromTwoPic(session,
GET_DATA("data/bulk/jntm.jpg"),
GET_DATA("data/bulk/kun.jpg"),
mostSim);
CHECK(succ);
TEST_PRINT("kun v kun :{}", mostSim);
succ = FindMostSimilarScoreFromTwoPic(session,
GET_DATA("data/bulk/jntm.jpg"),
GET_DATA("data/bulk/Rob_Lowe_0001.jpg"),
mostSim);
CHECK(succ);
TEST_PRINT("kun v other :{}", mostSim);
succ = FindMostSimilarScoreFromTwoPic(session,
GET_DATA("data/bulk/kun.jpg"),
GET_DATA("data/bulk/view.jpg"),
mostSim);
CHECK(!succ);
TEST_PRINT("kun v other :{}", mostSim);
// finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
SECTION("Test LFW evaluation") {
#ifdef ENABLE_TEST_EVALUATION
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 5, &session);
REQUIRE(ret == HSUCCEED);
std::vector<int> labels;
std::vector<float> confidences;
auto pairs = ReadPairs(getTestLFWFunneledEvaTxt());
// Hide cursor
show_console_cursor(false);
BlockProgressBar bar{
option::BarWidth{60},
option::Start{"["},
option::End{"]"},
option::PostfixText{"Extracting face features"},
option::ForegroundColor{Color::white} ,
option::FontStyles{std::vector<FontStyle>{FontStyle::bold}}
};
auto progress = 0.0f;
for (int i = 0; i < pairs.size(); ++i) {
bar.set_progress(progress);
auto &pair = pairs[i];
std::string person1, person2;
int imgNum1, imgNum2;
std::string imgPath1, imgPath2;
int match;
if (pair.size() == 3) {
person1 = pair[0];
imgNum1 = std::stoi(pair[1]);
imgNum2 = std::stoi(pair[2]);
imgPath1 = limonp::PathJoin(limonp::PathJoin(getLFWFunneledDir(), person1),
person1 + "_" + zfill(imgNum1, 4) + ".jpg");
imgPath2 = limonp::PathJoin(limonp::PathJoin(getLFWFunneledDir(), person1),
person1 + "_" + zfill(imgNum2, 4) + ".jpg");
match = 1;
} else {
person1 = pair[0];
imgNum1 = std::stoi(pair[1]);
person2 = pair[2];
imgNum2 = std::stoi(pair[3]);
imgPath1 = limonp::PathJoin(limonp::PathJoin(getLFWFunneledDir(), person1),
person1 + "_" + zfill(imgNum1, 4) + ".jpg");
imgPath2 = limonp::PathJoin(limonp::PathJoin(getLFWFunneledDir(), person2),
person2 + "_" + zfill(imgNum2, 4) + ".jpg");
match = 0;
}
float mostSim;
auto succ = FindMostSimilarScoreFromTwoPic(session, imgPath1, imgPath2, mostSim);
if (!succ) {
continue;
}
labels.push_back(match);
confidences.push_back(mostSim);
// Update progress
progress = 100.0f * (float)(i + 1) / pairs.size();
}
// Show cursor
show_console_cursor(true);
REQUIRE(labels.size() == confidences.size());
TEST_PRINT("scan pair: {}", labels.size());
bar.set_progress(100.0f);
auto result = FindBestThreshold(confidences, labels);
TEST_PRINT("Best Threshold: {}, Best Accuracy: {}", result.first, result.second);
EvaluationRecord record(getEvaluationRecordFile());
record.insertEvaluationData(TEST_MODEL_FILE, "LFW", result.second, result.first);
// finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
#endif
}
}

View File

@@ -0,0 +1,26 @@
//
// Created by tunm on 2023/10/11.
//
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include <cstdio>
TEST_CASE("test_FeatureContext", "[face_context]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("Test the new context positive process") {
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
}

View File

@@ -0,0 +1,191 @@
//
// Created by tunm on 2023/10/12.
//
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include "../test_helper/test_tools.h"
TEST_CASE("test_FacePipeline", "[face_pipeline]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("rgb liveness detect") {
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_liveness = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
// Get a face picture
HFImageStream img1Handle;
auto img1 = cv::imread(GET_DATA("images/image_T1.jpeg"));
ret = CVImageToImageStream(img1, img1Handle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, img1Handle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
ret = HFMultipleFacePipelineProcess(session, img1Handle, &multipleFaceData, parameter);
REQUIRE(ret == HSUCCEED);
HFRGBLivenessConfidence confidence;
ret = HFGetRGBLivenessConfidence(session, &confidence);
TEST_PRINT("{}", confidence.confidence[0]);
REQUIRE(ret == HSUCCEED);
CHECK(confidence.num > 0);
CHECK(confidence.confidence[0] > 0.9);
ret = HFReleaseImageStream(img1Handle);
REQUIRE(ret == HSUCCEED);
img1Handle = nullptr;
// fake face
HFImageStream img2Handle;
auto img2 = cv::imread(GET_DATA("images/rgb_fake.jpg"));
ret = CVImageToImageStream(img2, img2Handle);
REQUIRE(ret == HSUCCEED);
ret = HFExecuteFaceTrack(session, img2Handle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
ret = HFMultipleFacePipelineProcess(session, img2Handle, &multipleFaceData, parameter);
REQUIRE(ret == HSUCCEED);
ret = HFGetRGBLivenessConfidence(session, &confidence);
REQUIRE(ret == HSUCCEED);
CHECK(confidence.num > 0);
CHECK(confidence.confidence[0] < 0.9);
ret = HFReleaseImageStream(img2Handle);
REQUIRE(ret == HSUCCEED);
img2Handle = nullptr;
ret = HFReleaseInspireFaceSession(session);
session = nullptr;
REQUIRE(ret == HSUCCEED);
}
SECTION("face mask detect") {
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_mask_detect = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
// Get a face picture
HFImageStream img1Handle;
auto img1 = cv::imread(GET_DATA("images/mask2.jpg"));
ret = CVImageToImageStream(img1, img1Handle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, img1Handle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
ret = HFMultipleFacePipelineProcess(session, img1Handle, &multipleFaceData, parameter);
REQUIRE(ret == HSUCCEED);
HFFaceMaskConfidence confidence;
ret = HFGetFaceMaskConfidence(session, &confidence);
REQUIRE(ret == HSUCCEED);
CHECK(confidence.num > 0);
CHECK(confidence.confidence[0] > 0.9);
ret = HFReleaseImageStream(img1Handle);
REQUIRE(ret == HSUCCEED);
img1Handle = nullptr;
// no mask face
HFImageStream img2Handle;
auto img2 = cv::imread(GET_DATA("images/face_sample.png"));
ret = CVImageToImageStream(img2, img2Handle);
REQUIRE(ret == HSUCCEED);
ret = HFExecuteFaceTrack(session, img2Handle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
ret = HFMultipleFacePipelineProcess(session, img2Handle, &multipleFaceData, parameter);
REQUIRE(ret == HSUCCEED);
ret = HFGetFaceMaskConfidence(session, &confidence);
REQUIRE(ret == HSUCCEED);
// spdlog::info("mask {}", confidence.confidence[0]);
CHECK(confidence.num > 0);
CHECK(confidence.confidence[0] < 0.1);
ret = HFReleaseImageStream(img2Handle);
REQUIRE(ret == HSUCCEED);
img2Handle = nullptr;
ret = HFReleaseInspireFaceSession(session);
session = nullptr;
REQUIRE(ret == HSUCCEED);
}
SECTION("face quality") {
HResult ret;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HInt32 option = HF_ENABLE_QUALITY;
HFSession session;
ret = HFCreateInspireFaceSessionOptional(option, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
// Get a face picture
HFImageStream superiorHandle;
auto superior = cv::imread(GET_DATA("images/yifei.jpg"));
ret = CVImageToImageStream(superior, superiorHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, superiorHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
ret = HFMultipleFacePipelineProcessOptional(session, superiorHandle, &multipleFaceData, option);
REQUIRE(ret == HSUCCEED);
HFloat quality;
ret = HFFaceQualityDetect(session, multipleFaceData.tokens[0], &quality);
REQUIRE(ret == HSUCCEED);
CHECK(quality > 0.85);
// blur image
HFImageStream blurHandle;
auto blur = cv::imread(GET_DATA("images/blur.jpg"));
ret = CVImageToImageStream(blur, blurHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
ret = HFExecuteFaceTrack(session, blurHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
ret = HFMultipleFacePipelineProcessOptional(session, blurHandle, &multipleFaceData, option);
REQUIRE(ret == HSUCCEED);
ret = HFFaceQualityDetect(session, multipleFaceData.tokens[0], &quality);
REQUIRE(ret == HSUCCEED);
CHECK(quality < 0.85);
ret = HFReleaseImageStream(superiorHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(blurHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
}

View File

@@ -0,0 +1,282 @@
//
// Created by tunm on 2023/10/11.
//
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include "opencv2/opencv.hpp"
#include "unit/test_helper/simple_csv_writer.h"
#include "unit/test_helper/test_help.h"
#include "unit/test_helper/test_tools.h"
TEST_CASE("test_FaceTrack", "[face_track]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("Face detection from image") {
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
spdlog::error("error ret :{}", ret);
REQUIRE(ret == HSUCCEED);
// Get a face picture
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/kun.jpg"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
// Detect face position
auto rect = multipleFaceData.rects[0];
HFaceRect expect = {0};
expect.x = 98;
expect.y = 146;
expect.width = 233 - expect.x;
expect.height = 272 - expect.y;
auto iou = CalculateOverlap(rect, expect);
cv::Rect cvRect(rect.x, rect.y, rect.width, rect.height);
cv::rectangle(image, cvRect, cv::Scalar(255, 0, 124), 2);
cv::imwrite("ww.jpg", image);
// The iou is allowed to have an error of 10%
CHECK(iou == Approx(1.0f).epsilon(0.1));
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
// Prepare non-face images
HFImageStream viewHandle;
auto view = cv::imread(GET_DATA("data/bulk/view.jpg"));
ret = CVImageToImageStream(view, viewHandle);
REQUIRE(ret == HSUCCEED);
ret = HFExecuteFaceTrack(session, viewHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 0);
ret = HFReleaseImageStream(viewHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
SECTION("Face tracking stability from frames") {
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_VIDEO;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
auto expectedId = 1;
int start = 1, end = 288;
std::vector<std::string> filenames = generateFilenames("frame-%04d.jpg", start, end);
auto count_loss = 0;
for (int i = 0; i < filenames.size(); ++i) {
auto filename = filenames[i];
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("video_frames/" + filename));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
// CHECK(multipleFaceData.detectedNum == 1);
if (multipleFaceData.detectedNum != 1) {
count_loss++;
continue;
}
auto rect = multipleFaceData.rects[0];
cv::Rect cvRect(rect.x, rect.y, rect.width, rect.height);
cv::rectangle(image, cvRect, cv::Scalar(255, 0, 124), 2);
std::string save = GET_SAVE_DATA("video_frames") + "/" + std::to_string(i) + ".jpg";
cv::imwrite(save, image);
auto id = multipleFaceData.trackIds[0];
// TEST_PRINT("{}", id);
if (id != expectedId) {
count_loss++;
}
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
}
float loss = (float )count_loss / filenames.size();
// The face track loss is allowed to have an error of 5%
// CHECK(loss == Approx(0.0f).epsilon(0.05));
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
SECTION("Head pose estimation") {
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
// Left side face
HFImageStream leftHandle;
auto left = cv::imread(GET_DATA("data/pose/left_face.jpeg"));
ret = CVImageToImageStream(left, leftHandle);
REQUIRE(ret == HSUCCEED);
ret = HFExecuteFaceTrack(session, leftHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
HFloat yaw, pitch, roll;
bool checked;
// Left-handed rotation
yaw = multipleFaceData.angles.yaw[0];
checked = (yaw > -90 && yaw < -10);
CHECK(checked);
HFReleaseImageStream(leftHandle);
// Right-handed rotation
HFImageStream rightHandle;
auto right = cv::imread(GET_DATA("data/pose/right_face.png"));
ret = CVImageToImageStream(right, rightHandle);
REQUIRE(ret == HSUCCEED);
ret = HFExecuteFaceTrack(session, rightHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
yaw = multipleFaceData.angles.yaw[0];
checked = (yaw > 10 && yaw < 90);
CHECK(checked);
HFReleaseImageStream(rightHandle);
// Rise head
HFImageStream riseHandle;
auto rise = cv::imread(GET_DATA("data/pose/rise_face.jpeg"));
ret = CVImageToImageStream(rise, riseHandle);
REQUIRE(ret == HSUCCEED);
ret = HFExecuteFaceTrack(session, riseHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
pitch = multipleFaceData.angles.pitch[0];
CHECK(pitch > 5);
HFReleaseImageStream(riseHandle);
// Lower head
HFImageStream lowerHandle;
auto lower = cv::imread(GET_DATA("data/pose/lower_face.jpeg"));
ret = CVImageToImageStream(lower, lowerHandle);
REQUIRE(ret == HSUCCEED);
ret = HFExecuteFaceTrack(session, lowerHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
pitch = multipleFaceData.angles.pitch[0];
CHECK(pitch < -10);
HFReleaseImageStream(lowerHandle);
// Roll head
HFImageStream leftWryneckHandle;
auto leftWryneck = cv::imread(GET_DATA("data/pose/left_wryneck.png"));
ret = CVImageToImageStream(leftWryneck, leftWryneckHandle);
REQUIRE(ret == HSUCCEED);
ret = HFExecuteFaceTrack(session, leftWryneckHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
roll = multipleFaceData.angles.roll[0];
CHECK(roll < -30);
HFReleaseImageStream(leftWryneckHandle);
// Roll head
HFImageStream rightWryneckHandle;
auto rightWryneck = cv::imread(GET_DATA("data/pose/right_wryneck.png"));
ret = CVImageToImageStream(rightWryneck, rightWryneckHandle);
REQUIRE(ret == HSUCCEED);
ret = HFExecuteFaceTrack(session, rightWryneckHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
roll = multipleFaceData.angles.roll[0];
CHECK(roll > 30);
HFReleaseImageStream(rightWryneckHandle);
// finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
}
SECTION("Face detection benchmark") {
#ifdef ENABLE_BENCHMARK
int loop = 1000;
HResult ret;
HFSessionCustomParameter parameter = {0};
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
// Prepare an image
HFImageStream imgHandle;
auto image = cv::imread(GET_DATA("data/bulk/kun.jpg"));
ret = CVImageToImageStream(image, imgHandle);
REQUIRE(ret == HSUCCEED);
BenchmarkRecord record(getBenchmarkRecordFile());
// Case: Execute the benchmark using the IMAGE mode
ret = HFSessionSetFaceTrackMode(session, HF_DETECT_MODE_IMAGE);
REQUIRE(ret == HSUCCEED);
HFMultipleFaceData multipleFaceData = {0};
auto start = (double) cv::getTickCount();
for (int i = 0; i < loop; ++i) {
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
}
auto cost = ((double) cv::getTickCount() - start) / cv::getTickFrequency() * 1000;
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
TEST_PRINT("<Benchmark> Face Detect -> Loop: {}, Total Time: {:.5f}ms, Average Time: {:.5f}ms", loop, cost, cost / loop);
record.insertBenchmarkData("Face Detect", loop, cost, cost / loop);
// Case: Execute the benchmark using the VIDEO mode(Track)
ret = HFSessionSetFaceTrackMode(session, HF_DETECT_MODE_VIDEO);
REQUIRE(ret == HSUCCEED);
multipleFaceData = {0};
start = (double) cv::getTickCount();
for (int i = 0; i < loop; ++i) {
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
}
cost = ((double) cv::getTickCount() - start) / cv::getTickFrequency() * 1000;
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum == 1);
TEST_PRINT("<Benchmark> Face Track -> Loop: {}, Total Time: {:.5f}ms, Average Time: {:.5f}ms", loop, cost, cost / loop);
record.insertBenchmarkData("Face Track", loop, cost, cost / loop);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
#else
TEST_PRINT("Skip the face detection benchmark test. To run it, you need to turn on the benchmark test.");
#endif
}
}

View File

@@ -0,0 +1,517 @@
//
// Created by tunm on 2024/4/13.
//
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include "unit/test_helper/test_help.h"
#include <thread>
TEST_CASE("test_FeatureHubBase", "[FeatureHub][BasicFunction]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("FeatureHub basic function") {
HResult ret;
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
}
SECTION("FeatureHub search top-k") {
HResult ret;
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
std::vector<std::vector<HFloat>> baseFeatures;
size_t genSizeOfBase = 2000;
HInt32 featureLength;
HFGetFeatureLength(&featureLength);
REQUIRE(featureLength > 0);
for (int i = 0; i < genSizeOfBase; ++i) {
auto feat = GenerateRandomFeature(featureLength);
baseFeatures.push_back(feat);
auto name = std::to_string(i);
// Establish a security buffer
std::vector<char> nameBuffer(name.begin(), name.end());
nameBuffer.push_back('\0');
// Construct face feature
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.customId = i;
identity.tag = nameBuffer.data();
ret = HFFeatureHubInsertFeature(identity);
REQUIRE(ret == HSUCCEED);
}
HInt32 totalFace;
ret = HFFeatureHubGetFaceCount(&totalFace);
REQUIRE(ret == HSUCCEED);
REQUIRE(totalFace == genSizeOfBase);
// 2000 data was imported
HInt32 targetId = 523;
auto targetFeature = baseFeatures[targetId];
std::vector<std::vector<HFloat>> similarVectors;
std::vector<HInt32> coverIds = {2, 300, 524, 789, 1024, 1995};
for (int i = 0; i < coverIds.size(); ++i) {
auto feat = SimulateSimilarVector(targetFeature);
// Construct face feature
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.customId = coverIds[i];
identity.tag = "HOLD";
ret = HFFeatureHubFaceUpdate(identity);
REQUIRE(ret == HSUCCEED);
}
// Generate a new similar feature for search
auto topK = 10;
auto searchFeat = SimulateSimilarVector(targetFeature);
HFFaceFeature searchFeature = {0};
searchFeature.size = searchFeat.size();
searchFeature.data = searchFeat.data();
HFSearchTopKResults results = {0};
ret = HFFeatureHubFaceSearchTopK(searchFeature, topK, &results);
REQUIRE(ret == HSUCCEED);
coverIds.push_back(targetId);
REQUIRE(coverIds.size() == results.size);
for (int i = 0; i < results.size; ++i) {
REQUIRE(std::find(coverIds.begin(), coverIds.end(), results.customIds[i]) != coverIds.end());
}
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
}
SECTION("Repeat the enable and disable tests") {
HResult ret;
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
HFFeatureHubConfiguration configuration = {0};
configuration.enablePersistence = 0;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HERR_FT_HUB_ENABLE_REPETITION);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HERR_FT_HUB_DISABLE_REPETITION);
delete []dbPathStr;
}
SECTION("Only memory storage is used") {
HResult ret;
HFFeatureHubConfiguration configuration = {0};
configuration.enablePersistence = 0;
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
// TODO
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
}
}
TEST_CASE("test_ConcurrencyInsertion", "[FeatureHub][Concurrency]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
HResult ret;
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
HInt32 baseNum;
ret = HFFeatureHubGetFaceCount(&baseNum);
REQUIRE(ret == HSUCCEED);
HInt32 featureLength;
HFGetFeatureLength(&featureLength);
const int numThreads = 4;
const int insertsPerThread = 50;
std::vector<std::thread> threads;
auto beginGenId = 2000;
for (int i = 0; i < numThreads; ++i) {
threads.emplace_back([=]() { // 使用值捕获以避免捕获引用后变量改变
for (int j = 0; j < insertsPerThread; ++j) {
auto feat = GenerateRandomFeature(featureLength);
auto name = std::to_string(beginGenId + j + i * insertsPerThread);
std::vector<char> nameBuffer(name.begin(), name.end());
nameBuffer.push_back('\0');
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity featureIdentity = {0};
featureIdentity.feature = &feature;
featureIdentity.customId = beginGenId + j + i * insertsPerThread; // 确保 customId 唯一
featureIdentity.tag = nameBuffer.data();
auto ret = HFFeatureHubInsertFeature(featureIdentity);
REQUIRE(ret == HSUCCEED);
}
});
}
for (auto &th : threads) {
th.join();
}
HInt32 count;
ret = HFFeatureHubGetFaceCount(&count);
REQUIRE(ret == HSUCCEED);
REQUIRE(count == baseNum + numThreads * insertsPerThread); // Ensure that the previous base data is added to the newly inserted data
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
}
TEST_CASE("test_ConcurrencyRemove", "[FeatureHub][Concurrency]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
HResult ret;
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
std::vector<std::vector<HFloat>> baseFeatures;
size_t genSizeOfBase = 1000;
HInt32 featureLength;
HFGetFeatureLength(&featureLength);
REQUIRE(featureLength > 0);
for (int i = 0; i < genSizeOfBase; ++i) {
auto feat = GenerateRandomFeature(featureLength);
baseFeatures.push_back(feat);
auto name = std::to_string(i);
// Establish a security buffer
std::vector<char> nameBuffer(name.begin(), name.end());
nameBuffer.push_back('\0');
// Construct face feature
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.customId = i;
identity.tag = nameBuffer.data();
ret = HFFeatureHubInsertFeature(identity);
REQUIRE(ret == HSUCCEED);
}
HInt32 totalFace;
ret = HFFeatureHubGetFaceCount(&totalFace);
REQUIRE(ret == HSUCCEED);
REQUIRE(totalFace == genSizeOfBase);
const int numThreads = 4;
const int removePerThread = genSizeOfBase / 5;
std::vector<std::thread> threads;
for (int t = 0; t < numThreads; ++t) {
threads.emplace_back([&, t]() {
for (int j = 0; j < removePerThread; ++j) {
int idToRemove = t * removePerThread + j;
auto ret = HFFeatureHubFaceRemove(idToRemove);
REQUIRE(ret == HSUCCEED);
}
});
}
// Wait for all threads to complete
for (auto &th : threads) {
th.join();
}
HInt32 remainingCount;
ret = HFFeatureHubGetFaceCount(&remainingCount);
REQUIRE(ret == HSUCCEED);
REQUIRE(remainingCount == genSizeOfBase - numThreads * removePerThread);
TEST_PRINT("Remaining Count: {}", remainingCount);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
}
TEST_CASE("test_ConcurrencySearch", "[FeatureHub][Concurrency]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
HResult ret;
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
std::vector<std::vector<HFloat>> baseFeatures;
size_t genSizeOfBase = 1000;
HInt32 featureLength;
HFGetFeatureLength(&featureLength);
REQUIRE(featureLength > 0);
for (int i = 0; i < genSizeOfBase; ++i) {
auto feat = GenerateRandomFeature(featureLength);
baseFeatures.push_back(feat);
auto name = std::to_string(i);
// Establish a security buffer
std::vector<char> nameBuffer(name.begin(), name.end());
nameBuffer.push_back('\0');
// Construct face feature
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.customId = i;
identity.tag = nameBuffer.data();
ret = HFFeatureHubInsertFeature(identity);
REQUIRE(ret == HSUCCEED);
}
HInt32 totalFace;
ret = HFFeatureHubGetFaceCount(&totalFace);
REQUIRE(ret == HSUCCEED);
REQUIRE(totalFace == genSizeOfBase);
auto preDataSample = 200;
// Generate some feature vectors that are similar to those of the existing database
auto numberOfSimilar = preDataSample;
auto targetIds = GenerateRandomNumbers(numberOfSimilar, 0, genSizeOfBase - 1);
std::vector<std::vector<HFloat>> similarFeatures;
for (int i = 0; i < numberOfSimilar; ++i) {
auto index = targetIds[i];
HFFaceFeatureIdentity identity = {0};
ret = HFFeatureHubGetFaceIdentity(index, &identity);
REQUIRE(ret == HSUCCEED);
std::vector<HFloat> feature(identity.feature->data, identity.feature->data + identity.feature->size);
auto simFeat = SimulateSimilarVector(feature);
HFFaceFeature simFeature = {0};
simFeature.data = simFeat.data();
simFeature.size = simFeat.size();
HFFaceFeature target = {0};
target.data = identity.feature->data;
target.size = identity.feature->size;
HFloat cosine;
ret = HFFaceComparison(target, simFeature, &cosine);
REQUIRE(ret == HSUCCEED);
REQUIRE(cosine > 0.80f);
similarFeatures.push_back(feature);
}
REQUIRE(similarFeatures.size() == numberOfSimilar);
auto numberOfNotSimilar = preDataSample;
std::vector<std::vector<HFloat>> notSimilarFeatures;
// Generate some feature vectors that are not similar to the existing database
for (int i = 0; i < numberOfNotSimilar; ++i) {
auto feat = GenerateRandomFeature(featureLength);
HFFaceFeature feature = {0};
feature.size = feat.size();
feature.data = feat.data();
HFFaceFeatureIdentity mostSim = {0};
HFloat cosine;
HFFeatureHubFaceSearch(feature, &cosine, &mostSim);
REQUIRE(cosine < 0.3f);
notSimilarFeatures.push_back(feat);
}
REQUIRE(notSimilarFeatures.size() == numberOfNotSimilar);
// Multithreaded search simulation
const int numThreads = 5;
std::vector<std::thread> threads;
std::mutex mutex;
// Start threads for concurrent searching
for (int t = 0; t < numThreads; ++t) {
threads.emplace_back([&]() {
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, preDataSample - 1);
for (int j = 0; j < 50; ++j) { // Each thread performs 50 similar searches
int idx = dis(gen);
auto targetId = targetIds[idx];
HFFaceFeature feature = {0};
feature.data = similarFeatures[idx].data();
feature.size = similarFeatures[idx].size();
HFloat score;
HFFaceFeatureIdentity identity = {0};
HFFeatureHubFaceSearch(feature, &score, &identity);
CHECK(identity.customId == targetId);
}
for (int j = 0; j < 50; ++j) {
int idx = dis(gen);
HFFaceFeature feature = {0};
feature.data = notSimilarFeatures[idx].data();
feature.size = notSimilarFeatures[idx].size();
HFloat score;
HFFaceFeatureIdentity identity = {0};
HFFeatureHubFaceSearch(feature, &score, &identity);
CHECK(identity.customId == -1);
}
});
}
for (auto &thread : threads) {
thread.join();
}
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
}
TEST_CASE("test_FeatureCache", "[FeatureHub][Concurrency]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
HResult ret;
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
auto randomVec = GenerateRandomFeature(512);
HFFaceFeature feature = {0};
feature.data = randomVec.data();
feature.size = randomVec.size();
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.tag = "FK";
identity.customId = 12;
ret = HFFeatureHubInsertFeature(identity);
REQUIRE(ret == HSUCCEED);
auto simVec = SimulateSimilarVector(randomVec);
HFFaceFeature simFeature = {0};
simFeature.data = simVec.data();
simFeature.size = simVec.size();
for (int i = 0; i < 10; ++i) {
HFFaceFeatureIdentity capture = {0};
ret = HFFeatureHubGetFaceIdentity(12, &capture);
REQUIRE(ret == HSUCCEED);
HFFaceFeature target = {0};
target.data = capture.feature->data;
target.size = capture.feature->size;
HFloat cosine;
ret = HFFaceComparison(target, simFeature, &cosine);
REQUIRE(cosine > 0.8f);
REQUIRE(ret == HSUCCEED);
}
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
}

View File

@@ -0,0 +1,957 @@
//
// Created by tunm on 2023/10/11.
//
#include <iostream>
#include "settings/test_settings.h"
#include "inspireface/c_api/inspireface.h"
#include "opencv2/opencv.hpp"
#include "unit/test_helper/simple_csv_writer.h"
#include "unit/test_helper/test_help.h"
TEST_CASE("test_FeatureManage", "[feature_manage]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("Face feature management basic functions") {
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
// Get a face picture
cv::Mat kunImage = cv::imread(GET_DATA("images/kun.jpg"));
HFImageData imageData = {0};
imageData.data = kunImage.data;
imageData.height = kunImage.rows;
imageData.width = kunImage.cols;
imageData.format = HF_STREAM_BGR;
imageData.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandle;
ret = HFCreateImageStream(&imageData, &imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
// Extract face feature
HFFaceFeature feature = {0};
ret = HFFaceFeatureExtract(session, imgHandle, multipleFaceData.tokens[0], &feature);
REQUIRE(ret == HSUCCEED);
// Insert data into feature management
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.tag = "chicken";
identity.customId = 1234;
ret = HFFeatureHubInsertFeature(identity);
REQUIRE(ret == HSUCCEED);
// Check number
HInt32 num;
ret = HFFeatureHubGetFaceCount(&num);
REQUIRE(ret == HSUCCEED);
CHECK(num == 1);
// Update Face info
HFFaceFeatureIdentity updatedIdentity = {0};
updatedIdentity.feature = identity.feature;
updatedIdentity.customId = identity.customId;
updatedIdentity.tag = "iKun";
ret = HFFeatureHubFaceUpdate(updatedIdentity);
REQUIRE(ret == HSUCCEED);
// Trying to update an identity that doesn't exist
HFFaceFeatureIdentity nonIdentity = {0};
nonIdentity.customId = 234;
nonIdentity.tag = "no";
nonIdentity.feature = &feature;
ret = HFFeatureHubFaceUpdate(nonIdentity);
REQUIRE(ret != HSUCCEED);
// Trying to delete an identity that doesn't exist
ret = HFFeatureHubFaceRemove(nonIdentity.customId);
REQUIRE(ret != HSUCCEED);
// Delete kunkun
ret = HFFeatureHubFaceRemove(identity.customId);
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubGetFaceCount(&num);
REQUIRE(ret == HSUCCEED);
CHECK(num == 0);
// Finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete[]dbPathStr;
}
SECTION("Import a large faces data") {
#ifdef ENABLE_USE_LFW_DATA
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
auto lfwDir = getLFWFunneledDir();
auto dataList = LoadLFWFunneledValidData(lfwDir, getTestLFWFunneledTxt());
size_t numOfNeedImport = 1000;
auto importStatus = ImportLFWFunneledValidData(session, dataList, numOfNeedImport);
REQUIRE(importStatus);
HInt32 count;
ret = HFFeatureHubGetFaceCount(&count);
REQUIRE(ret == HSUCCEED);
CHECK(count == numOfNeedImport);
// Finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
delete[]dbPathStr;
#else
TEST_PRINT("The test case that uses LFW is not enabled, so it will be skipped.");
#endif
}
SECTION("Faces feature CURD") {
#ifdef ENABLE_USE_LFW_DATA
// This section needs to be connected to the "Import a large faces data" section before it can be executed
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
// Face track
cv::Mat dstImage = cv::imread(GET_DATA("data/bulk/Nathalie_Baye_0002.jpg"));
HFImageData imageData = {0};
imageData.data = dstImage.data;
imageData.height = dstImage.rows;
imageData.width = dstImage.cols;
imageData.format = HF_STREAM_BGR;
imageData.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandle;
ret = HFCreateImageStream(&imageData, &imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
// Extract face feature
HFFaceFeature feature = {0};
ret = HFFaceFeatureExtract(session, imgHandle, multipleFaceData.tokens[0], &feature);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
// Search for a face
HFloat confidence;
HFFaceFeatureIdentity searchedIdentity = {0};
ret = HFFeatureHubFaceSearch(feature, &confidence, &searchedIdentity);
REQUIRE(ret == HSUCCEED);
CHECK(searchedIdentity.customId == 898);
CHECK(std::string(searchedIdentity.tag) == "Nathalie_Baye");
// Delete kunkun and search
ret = HFFeatureHubFaceRemove(searchedIdentity.customId);
REQUIRE(ret == HSUCCEED);
// Search again
ret = HFFeatureHubFaceSearch(feature, &confidence, &searchedIdentity);
// spdlog::info("{}", confidence);
REQUIRE(ret == HSUCCEED);
CHECK(searchedIdentity.customId == -1);
// Insert again
HFFaceFeatureIdentity againIdentity = {0};
againIdentity.customId = 898;
againIdentity.tag = "Cover";
againIdentity.feature = &feature;
ret = HFFeatureHubInsertFeature(againIdentity);
REQUIRE(ret == HSUCCEED);
// Search again
HFFaceFeatureIdentity searchedAgainIdentity = {0};
ret = HFFeatureHubFaceSearch(feature, &confidence, &searchedAgainIdentity);
REQUIRE(ret == HSUCCEED);
CHECK(searchedAgainIdentity.customId == 898);
// Update any feature
HInt32 updateId = 909;
cv::Mat zyImage = cv::imread(GET_DATA("data/bulk/woman.png"));
HFImageData imageDataZy = {0};
imageDataZy.data = zyImage.data;
imageDataZy.height = zyImage.rows;
imageDataZy.width = zyImage.cols;
imageDataZy.format = HF_STREAM_BGR;
imageDataZy.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandleZy;
ret = HFCreateImageStream(&imageDataZy, &imgHandleZy);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceDataZy = {0};
ret = HFExecuteFaceTrack(session, imgHandleZy, &multipleFaceDataZy);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceDataZy.detectedNum > 0);
// Extract face feature
HFFaceFeature featureZy = {0};
ret = HFFaceFeatureExtract(session, imgHandleZy, multipleFaceDataZy.tokens[0], &featureZy);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(imgHandleZy);
REQUIRE(ret == HSUCCEED);
// Update id: 11297
HFFaceFeatureIdentity updateIdentity = {0};
updateIdentity.customId = updateId;
updateIdentity.tag = "ZY";
updateIdentity.feature = &featureZy;
ret = HFFeatureHubFaceUpdate(updateIdentity);
REQUIRE(ret == HSUCCEED);
//
// Prepare a zy query image
cv::Mat zyImageQuery = cv::imread(GET_DATA("data/bulk/woman_search.jpeg"));
HFImageData imageDataZyQuery = {0};
imageDataZyQuery.data = zyImageQuery.data;
imageDataZyQuery.height = zyImageQuery.rows;
imageDataZyQuery.width = zyImageQuery.cols;
imageDataZyQuery.format = HF_STREAM_BGR;
imageDataZyQuery.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandleZyQuery;
ret = HFCreateImageStream(&imageDataZyQuery, &imgHandleZyQuery);
REQUIRE(ret == HSUCCEED);
//
// Extract basic face information from photos
HFMultipleFaceData multipleFaceDataZyQuery = {0};
ret = HFExecuteFaceTrack(session, imgHandleZyQuery, &multipleFaceDataZyQuery);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceDataZyQuery.detectedNum > 0);
//
// Extract face feature
HFFaceFeature featureZyQuery = {0};
ret = HFFaceFeatureExtract(session, imgHandleZyQuery, multipleFaceDataZyQuery.tokens[0], &featureZyQuery);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(imgHandleZyQuery);
REQUIRE(ret == HSUCCEED);
// Search
HFloat confidenceQuery;
HFFaceFeatureIdentity searchedIdentityQuery = {0};
ret = HFFeatureHubFaceSearch(featureZyQuery, &confidenceQuery, &searchedIdentityQuery);
REQUIRE(ret == HSUCCEED);
CHECK(searchedIdentityQuery.customId == updateId);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
#else
TEST_PRINT("The test case that uses LFW is not enabled, so it will be skipped.");
#endif
}
}
TEST_CASE("test_SearchTopK", "[feature_search_top_k]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("Face feature management basic functions") {
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
// Import 1k faces
auto lfwDir = getLFWFunneledDir();
auto dataList = LoadLFWFunneledValidData(lfwDir, getTestLFWFunneledTxt());
size_t numOfNeedImport = 1000;
auto importStatus = ImportLFWFunneledValidData(session, dataList, numOfNeedImport);
REQUIRE(importStatus);
HInt32 count;
ret = HFFeatureHubGetFaceCount(&count);
REQUIRE(ret == HSUCCEED);
CHECK(count == numOfNeedImport);
// Prepare multiple photos of a person
std::vector<std::string> photos = {
GET_DATA("data/RD/d1.jpeg"),
GET_DATA("data/RD/d2.jpeg"),
GET_DATA("data/RD/d3.jpeg"),
GET_DATA("data/RD/d4.jpeg"),
};
std::vector<std::string> tags = {
"d1", "d2", "d3", "d4",
};
std::vector<HInt32> updateIds = {
5, 163, 670, 971,
};
REQUIRE(photos.size() == tags.size());
REQUIRE(updateIds.size() == tags.size());
// Replace the face features in the photo with each target in FeatureHub
for (int i = 0; i < photos.size(); ++i) {
// Face track
cv::Mat dstImage = cv::imread(photos[i]);
HFImageData imageData = {0};
imageData.data = dstImage.data;
imageData.height = dstImage.rows;
imageData.width = dstImage.cols;
imageData.format = HF_STREAM_BGR;
imageData.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandle;
ret = HFCreateImageStream(&imageData, &imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
// Extract face feature
HFFaceFeature feature = {0};
ret = HFFaceFeatureExtract(session, imgHandle, multipleFaceData.tokens[0], &feature);
REQUIRE(ret == HSUCCEED);
char* cstr = new char[tags[i].size() + 1]; // Dynamically allocate memory for the name
strcpy(cstr, tags[i].c_str()); // Copy the name into the allocated memory
// Create identity
HFFaceFeatureIdentity identity = {0};
identity.customId = updateIds[i];
identity.feature = &feature;
identity.tag = cstr;
// Update
ret = HFFeatureHubFaceUpdate(identity);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
delete[] cstr; // Clean up the dynamically allocated memory
}
// Prepare a target photo for a face top-k search
cv::Mat image = cv::imread(GET_DATA("data/RD/d5.jpeg"));
HFImageData imageData = {0};
imageData.data = image.data;
imageData.height = image.rows;
imageData.width = image.cols;
imageData.format = HF_STREAM_BGR;
imageData.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandle;
ret = HFCreateImageStream(&imageData, &imgHandle);
REQUIRE(ret == HSUCCEED);
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
HFFaceFeature feature = {0};
ret = HFFaceFeatureExtract(session, imgHandle, multipleFaceData.tokens[0], &feature);
REQUIRE(ret == HSUCCEED);
// Run the top-k search
HFSearchTopKResults topk;
ret = HFFeatureHubFaceSearchTopK(feature, 10, &topk);
REQUIRE(ret == HSUCCEED);
// Check whether the top-k result is consistent with the expectation
CHECK(topk.size == photos.size());
for (int i = 0; i < topk.size; ++i) {
TEST_PRINT("Top-{} -> id: {}, {}", i + 1, topk.customIds[i], topk.confidence[i]);
CHECK(std::find(updateIds.begin(), updateIds.end(), topk.customIds[i]) != updateIds.end());
}
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
// Finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete[]dbPathStr;
}
}
TEST_CASE("test_FeatureBenchmark", "[feature_benchmark]") {
// Test the search time at 1k, 5k and 10k of the face library (the target face is at the back).
SECTION("Search face benchmark from 1k") {
#if defined(ENABLE_BENCHMARK) && defined(ENABLE_USE_LFW_DATA)
size_t loop = 1000;
size_t numOfNeedImport = 1000;
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
auto lfwDir = getLFWFunneledDir();
auto dataList = LoadLFWFunneledValidData(lfwDir, getTestLFWFunneledTxt());
// TEST_PRINT("{}", dataList.size());
auto importStatus = ImportLFWFunneledValidData(session, dataList, numOfNeedImport);
REQUIRE(importStatus);
HInt32 count;
ret = HFFeatureHubGetFaceCount(&count);
REQUIRE(ret == HSUCCEED);
CHECK(count == numOfNeedImport);
// Face track
cv::Mat dstImage = cv::imread(GET_DATA("data/search/Teresa_Williams_0001_1k.jpg"));
HFImageData imageData = {0};
imageData.data = dstImage.data;
imageData.height = dstImage.rows;
imageData.width = dstImage.cols;
imageData.format = HF_STREAM_BGR;
imageData.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandle;
ret = HFCreateImageStream(&imageData, &imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
// Extract face feature
HFFaceFeature feature = {0};
ret = HFFaceFeatureExtract(session, imgHandle, multipleFaceData.tokens[0], &feature);
REQUIRE(ret == HSUCCEED);
// Search for a face
HFloat confidence;
HFFaceFeatureIdentity searchedIdentity = {0};
auto start = (double) cv::getTickCount();
for (int i = 0; i < loop; ++i) {
ret = HFFeatureHubFaceSearch(feature, &confidence, &searchedIdentity);
}
auto cost = ((double) cv::getTickCount() - start) / cv::getTickFrequency() * 1000;
REQUIRE(ret == HSUCCEED);
REQUIRE(searchedIdentity.customId == 999);
REQUIRE(std::string(searchedIdentity.tag) == "Teresa_Williams");
TEST_PRINT("<Benchmark> Search Face from 1k -> Loop: {}, Total Time: {:.5f}ms, Average Time: {:.5f}ms", loop, cost, cost / loop);
BenchmarkRecord record(getBenchmarkRecordFile());
record.insertBenchmarkData("Search Face from 1k", loop, cost, cost / loop);
// Finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
#else
TEST_PRINT("Skip face search benchmark test, you need to enable both lfw and benchmark test.");
#endif
}
SECTION("Search face benchmark from 5k") {
#if defined(ENABLE_BENCHMARK) && defined(ENABLE_USE_LFW_DATA)
size_t loop = 1000;
size_t numOfNeedImport = 5000;
HResult ret;
std::string modelPath = GET_MODEL_FILE();
HPath path = modelPath.c_str();
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
auto lfwDir = getLFWFunneledDir();
auto dataList = LoadLFWFunneledValidData(lfwDir, getTestLFWFunneledTxt());
auto importStatus = ImportLFWFunneledValidData(session, dataList, numOfNeedImport);
REQUIRE(importStatus);
HInt32 count;
ret = HFFeatureHubGetFaceCount(&count);
REQUIRE(ret == HSUCCEED);
CHECK(count == numOfNeedImport);
// Face track
cv::Mat dstImage = cv::imread(GET_DATA("data/search/Mary_Katherine_Smart_0001_5k.jpg"));
HFImageData imageData = {0};
imageData.data = dstImage.data;
imageData.height = dstImage.rows;
imageData.width = dstImage.cols;
imageData.format = HF_STREAM_BGR;
imageData.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandle;
ret = HFCreateImageStream(&imageData, &imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
// Extract face feature
HFFaceFeature feature = {0};
ret = HFFaceFeatureExtract(session, imgHandle, multipleFaceData.tokens[0], &feature);
REQUIRE(ret == HSUCCEED);
// Search for a face
HFloat confidence;
HFFaceFeatureIdentity searchedIdentity = {0};
auto start = (double) cv::getTickCount();
for (int i = 0; i < loop; ++i) {
ret = HFFeatureHubFaceSearch(feature, &confidence, &searchedIdentity);
}
auto cost = ((double) cv::getTickCount() - start) / cv::getTickFrequency() * 1000;
REQUIRE(ret == HSUCCEED);
REQUIRE(searchedIdentity.customId == 4998);
REQUIRE(std::string(searchedIdentity.tag) == "Mary_Katherine_Smart");
TEST_PRINT("<Benchmark> Search Face from 5k -> Loop: {}, Total Time: {:.5f}ms, Average Time: {:.5f}ms", loop, cost, cost / loop);
BenchmarkRecord record(getBenchmarkRecordFile());
record.insertBenchmarkData("Search Face from 5k", loop, cost, cost / loop);
// Finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
#else
TEST_PRINT("Skip face search benchmark test, you need to enable both lfw and benchmark test.");
#endif
}
SECTION("Search face benchmark from 10k") {
#if defined(ENABLE_BENCHMARK) && defined(ENABLE_USE_LFW_DATA)
size_t loop = 1000;
size_t numOfNeedImport = 10000;
HResult ret;
std::string modelPath = GET_MODEL_FILE();
HPath path = modelPath.c_str();
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
auto lfwDir = getLFWFunneledDir();
auto dataList = LoadLFWFunneledValidData(lfwDir, getTestLFWFunneledTxt());
// TEST_PRINT("{}", dataList.size());
auto importStatus = ImportLFWFunneledValidData(session, dataList, numOfNeedImport);
REQUIRE(importStatus);
HInt32 count;
ret = HFFeatureHubGetFaceCount(&count);
REQUIRE(ret == HSUCCEED);
CHECK(count == numOfNeedImport);
// Update any feature
HInt32 updateId = numOfNeedImport - 1;
cv::Mat zyImage = cv::imread(GET_DATA("data/bulk/woman.png"));
HFImageData imageDataZy = {0};
imageDataZy.data = zyImage.data;
imageDataZy.height = zyImage.rows;
imageDataZy.width = zyImage.cols;
imageDataZy.format = HF_STREAM_BGR;
imageDataZy.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandleZy;
ret = HFCreateImageStream(&imageDataZy, &imgHandleZy);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceDataZy = {0};
ret = HFExecuteFaceTrack(session, imgHandleZy, &multipleFaceDataZy);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceDataZy.detectedNum > 0);
// Extract face feature
HFFaceFeature featureZy = {0};
ret = HFFaceFeatureExtract(session, imgHandleZy, multipleFaceDataZy.tokens[0], &featureZy);
REQUIRE(ret == HSUCCEED);
// Update id: 11297
HFFaceFeatureIdentity updateIdentity = {0};
updateIdentity.customId = updateId;
updateIdentity.tag = "ZY";
updateIdentity.feature = &featureZy;
ret = HFFeatureHubFaceUpdate(updateIdentity);
REQUIRE(ret == HSUCCEED);
HFReleaseImageStream(imgHandleZy);
// Face track
cv::Mat dstImage = cv::imread(GET_DATA("data/bulk/woman_search.jpeg"));
HFImageData imageData = {0};
imageData.data = dstImage.data;
imageData.height = dstImage.rows;
imageData.width = dstImage.cols;
imageData.format = HF_STREAM_BGR;
imageData.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandle;
ret = HFCreateImageStream(&imageData, &imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
// Extract face feature
HFFaceFeature feature = {0};
ret = HFFaceFeatureExtract(session, imgHandle, multipleFaceData.tokens[0], &feature);
REQUIRE(ret == HSUCCEED);
// Search for a face
HFloat confidence;
HFFaceFeatureIdentity searchedIdentity = {0};
auto start = (double) cv::getTickCount();
for (int i = 0; i < loop; ++i) {
ret = HFFeatureHubFaceSearch(feature, &confidence, &searchedIdentity);
}
auto cost = ((double) cv::getTickCount() - start) / cv::getTickFrequency() * 1000;
REQUIRE(ret == HSUCCEED);
REQUIRE(searchedIdentity.customId == updateId);
REQUIRE(std::string(searchedIdentity.tag) == "ZY");
TEST_PRINT("<Benchmark> Search Face from 10k -> Loop: {}, Total Time: {:.5f}ms, Average Time: {:.5f}ms", loop, cost, cost / loop);
BenchmarkRecord record(getBenchmarkRecordFile());
record.insertBenchmarkData("Search Face from 10k", loop, cost, cost / loop);
// Finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
ret = HFReleaseImageStream(imgHandle);
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
#else
TEST_PRINT("Skip face search benchmark test, you need to enable both lfw and benchmark test.");
#endif
}
SECTION("Face comparison benchmark") {
#ifdef ENABLE_BENCHMARK
int loop = 1000;
HResult ret;
std::string modelPath = GET_MODEL_FILE();
HPath path = modelPath.c_str();
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
cv::Mat zyImage = cv::imread(GET_DATA("data/bulk/woman.png"));
HFImageData imageDataZy = {0};
imageDataZy.data = zyImage.data;
imageDataZy.height = zyImage.rows;
imageDataZy.width = zyImage.cols;
imageDataZy.format = HF_STREAM_BGR;
imageDataZy.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandleZy;
ret = HFCreateImageStream(&imageDataZy, &imgHandleZy);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceDataZy = {0};
ret = HFExecuteFaceTrack(session, imgHandleZy, &multipleFaceDataZy);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceDataZy.detectedNum > 0);
HInt32 featureNum;
HFGetFeatureLength(&featureNum);
// Extract face feature
HFloat featureCacheZy[featureNum];
ret = HFFaceFeatureExtractCpy(session, imgHandleZy, multipleFaceDataZy.tokens[0], featureCacheZy);
HFFaceFeature featureZy = {0};
featureZy.size = featureNum;
featureZy.data = featureCacheZy;
REQUIRE(ret == HSUCCEED);
cv::Mat zyImageQuery = cv::imread(GET_DATA("data/bulk/woman_search.jpeg"));
HFImageData imageDataZyQuery = {0};
imageDataZyQuery.data = zyImageQuery.data;
imageDataZyQuery.height = zyImageQuery.rows;
imageDataZyQuery.width = zyImageQuery.cols;
imageDataZyQuery.format = HF_STREAM_BGR;
imageDataZyQuery.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandleZyQuery;
ret = HFCreateImageStream(&imageDataZyQuery, &imgHandleZyQuery);
REQUIRE(ret == HSUCCEED);
//
// Extract basic face information from photos
HFMultipleFaceData multipleFaceDataZyQuery = {0};
ret = HFExecuteFaceTrack(session, imgHandleZyQuery, &multipleFaceDataZyQuery);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceDataZyQuery.detectedNum > 0);
//
// Extract face feature
HFloat featureCacheZyQuery[featureNum];
ret = HFFaceFeatureExtractCpy(session, imgHandleZyQuery, multipleFaceDataZyQuery.tokens[0], featureCacheZyQuery);
HFFaceFeature featureZyQuery = {0};
featureZyQuery.data = featureCacheZyQuery;
featureZyQuery.size = featureNum;
REQUIRE(ret == HSUCCEED);
auto start = (double) cv::getTickCount();
for (int i = 0; i < loop; ++i) {
HFloat compRes;
ret = HFFaceComparison(featureZy, featureZyQuery, &compRes);
}
auto cost = ((double) cv::getTickCount() - start) / cv::getTickFrequency() * 1000;
REQUIRE(ret == HSUCCEED);
TEST_PRINT("<Benchmark> Face Comparison -> Loop: {}, Total Time: {:.5f}ms, Average Time: {:.5f}ms", loop, cost, cost / loop);
BenchmarkRecord record(getBenchmarkRecordFile());
record.insertBenchmarkData("Face Comparison", loop, cost, cost / loop);
HFReleaseImageStream(imgHandleZy);
HFReleaseImageStream(imgHandleZyQuery);
// Finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
#else
TEST_PRINT("The benchmark is not enabled, so all relevant test cases are skipped.");
#endif
}
SECTION("Face feature extract benchmark") {
#ifdef ENABLE_BENCHMARK
int loop = 1000;
HResult ret;
std::string modelPath = GET_MODEL_FILE();
HPath path = modelPath.c_str();
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
// Face track
cv::Mat dstImage = cv::imread(GET_DATA("data/search/Teresa_Williams_0001_1k.jpg"));
HFImageData imageData = {0};
imageData.data = dstImage.data;
imageData.height = dstImage.rows;
imageData.width = dstImage.cols;
imageData.format = HF_STREAM_BGR;
imageData.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandle;
ret = HFCreateImageStream(&imageData, &imgHandle);
REQUIRE(ret == HSUCCEED);
// Extract basic face information from photos
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, imgHandle, &multipleFaceData);
REQUIRE(ret == HSUCCEED);
REQUIRE(multipleFaceData.detectedNum > 0);
// Extract face feature
HFFaceFeature feature = {0};
auto start = (double) cv::getTickCount();
for (int i = 0; i < loop; ++i) {
ret = HFFaceFeatureExtract(session, imgHandle, multipleFaceData.tokens[0], &feature);
}
auto cost = ((double) cv::getTickCount() - start) / cv::getTickFrequency() * 1000;
REQUIRE(ret == HSUCCEED);
TEST_PRINT("<Benchmark> Face Extract -> Loop: {}, Total Time: {:.5f}ms, Average Time: {:.5f}ms", loop, cost, cost / loop);
BenchmarkRecord record(getBenchmarkRecordFile());
record.insertBenchmarkData("Face Extract", loop, cost, cost / loop);
HFReleaseImageStream(imgHandle);
// Finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
#else
TEST_PRINT("Skip the face feature extraction benchmark test. To run it, you need to turn on the benchmark test.");
#endif
}
}

View File

@@ -0,0 +1,65 @@
//
// Created by Tunm-Air13 on 2024/3/20.
//
#include <iostream>
#include "settings/test_settings.h"
#include "../test_helper/test_help.h"
TEST_CASE("test_HelpTools", "[help_tools]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("Load lfw funneled data") {
#ifdef ENABLE_USE_LFW_DATA
HResult ret;
HFSessionCustomParameter parameter = {0};
parameter.enable_recognition = 1;
HFDetectMode detMode = HF_DETECT_MODE_IMAGE;
HFSession session;
ret = HFCreateInspireFaceSession(parameter, detMode, 3, &session);
REQUIRE(ret == HSUCCEED);
HFFeatureHubConfiguration configuration = {0};
auto dbPath = GET_SAVE_DATA(".test");
HString dbPathStr = new char[dbPath.size() + 1];
std::strcpy(dbPathStr, dbPath.c_str());
configuration.enablePersistence = 1;
configuration.dbPath = dbPathStr;
configuration.featureBlockNum = 20;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
// Delete the previous data before testing
if (std::remove(configuration.dbPath) != 0) {
spdlog::trace("Error deleting file");
}
ret = HFFeatureHubDataEnable(configuration);
REQUIRE(ret == HSUCCEED);
auto lfwDir = getLFWFunneledDir();
auto dataList = LoadLFWFunneledValidData(lfwDir, getTestLFWFunneledTxt());
size_t numOfNeedImport = 100;
auto importStatus = ImportLFWFunneledValidData(session, dataList, numOfNeedImport);
HFFeatureHubViewDBTable();
REQUIRE(importStatus);
HInt32 count;
ret = HFFeatureHubGetFaceCount(&count);
REQUIRE(ret == HSUCCEED);
CHECK(count == numOfNeedImport);
// ret = HF_ViewFaceDBTable(session);
// REQUIRE(ret == HSUCCEED);
// Finish
ret = HFReleaseInspireFaceSession(session);
REQUIRE(ret == HSUCCEED);
ret = HFFeatureHubDataDisable();
REQUIRE(ret == HSUCCEED);
delete []dbPathStr;
#else
TEST_PRINT("The test case that uses LFW is not enabled, so it will be skipped.");
#endif
}
}

View File

@@ -0,0 +1,75 @@
//
// Created by Tunm-Air13 on 2024/2/2.
//
#include "settings/test_settings.h"
#include "inspireface/face_context.h"
#include "common/face_data/data_tools.h"
#include "../test_helper/test_tools.h"
#include "herror.h"
using namespace inspire;
TEST_CASE("test_CameraStream", "[camera_stream") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("DecodingRotatedImages") {
FaceContext ctx;
CustomPipelineParameter param;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_IMAGE, 1, param);
REQUIRE(ret == HSUCCEED);
std::vector<std::string> rotated_filename_list = {
getTestData("images/rotate/rot_0.jpg"),
getTestData("images/rotate/rot_90.jpg"),
getTestData("images/rotate/rot_180.jpg"),
getTestData("images/rotate/rot_270.jpg"),
};
std::vector<ROTATION_MODE> rotate_list = {ROTATION_0, ROTATION_90, ROTATION_180, ROTATION_270};
CHECK(rotate_list.size() == rotated_filename_list.size());
for (int i = 0; i < rotate_list.size(); ++i) {
cv::Mat image = cv::imread(rotated_filename_list[i]);
REQUIRE(!image.empty());
auto rotated = rotate_list[i];
CameraStream stream;
stream.SetDataBuffer(image.data, image.rows, image.cols);
stream.SetDataFormat(BGR);
stream.SetRotationMode(rotated);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
const auto &faces = ctx.GetTrackingFaceList();
CHECK(faces.size() == 1);
}
}
SECTION("DecodingNV21Image") {
FaceContext ctx;
CustomPipelineParameter param;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_IMAGE, 1, param);
REQUIRE(ret == HSUCCEED);
int32_t width = 402;
int32_t height = 324;
auto rotated = ROTATION_90;
auto format = NV21;
auto nv21 = ReadNV21Data(getTestData("images/rotate/rot_90_324x402.nv21").c_str(), width, height);
REQUIRE(nv21 != nullptr);
CameraStream stream;
stream.SetDataBuffer(nv21, height, width);
stream.SetDataFormat(format);
stream.SetRotationMode(rotated);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
const auto &faces = ctx.GetTrackingFaceList();
CHECK(faces.size() == 1);
}
}

View File

@@ -0,0 +1,50 @@
//
// Created by tunm on 2023/9/16.
//
#include "settings/test_settings.h"
#include "inspireface/face_context.h"
#include "herror.h"
using namespace inspire;
TEST_CASE("test_FaceDetectTrack", "[face_track]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("TrackBenchmark") {
// Initialize
FaceContext ctx;
CustomPipelineParameter param;
param.enable_face_quality = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_VIDEO, 1, param);
REQUIRE(ret == HSUCCEED);
// Prepare a picture of a face
auto image = cv::imread(GET_DATA("images/face_sample.png"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
const auto loop = 1000;
double total = 0.0f;
spdlog::info("begin {} times tracking: ", loop);
auto out = (double) cv::getTickCount();
for (int i = 0; i < loop; ++i) {
auto timeStart = (double) cv::getTickCount();
// Face detection
ctx.FaceDetectAndTrack(stream);
auto &faces = ctx.GetTrackingFaceList();
double cost = ((double) cv::getTickCount() - timeStart) / cv::getTickFrequency() * 1000;
REQUIRE(ret == HSUCCEED);
REQUIRE(faces.size() > 0);
total += cost;
}
auto end = ((double) cv::getTickCount() - out) / cv::getTickFrequency() * 1000;
spdlog::info("[Face Tracking]{} times, Total cost: {}ms, Average cost: {}ms", loop, end, total / loop);
}
}

View File

@@ -0,0 +1,55 @@
//
// Created by tunm on 2023/9/17.
//
#include "settings/test_settings.h"
#include "inspireface/common/face_data/data_tools.h"
#include "herror.h"
#include "inspireface/face_context.h"
using namespace inspire;
TEST_CASE("test_FaceData", "[face_data]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("DataConversion") {
// Initialize
FaceContext ctx;
CustomPipelineParameter param;
param.enable_face_quality = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_VIDEO, 1, param);
REQUIRE(ret == HSUCCEED);
// Prepare a picture of a face
auto image = cv::imread(GET_DATA("images/face_sample.png"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ctx.FaceDetectAndTrack(stream);
auto &faces = ctx.GetTrackingFaceList();
REQUIRE(ret == HSUCCEED);
REQUIRE(faces.size() > 0);
HyperFaceData faceData = FaceObjectToHyperFaceData(faces[0], 0);
std::cout << faces[0].getTransMatrix() << std::endl;
PrintHyperFaceData(faceData);
ByteArray byteArray;
INSPIRE_LOGD("sizeof: %lu", sizeof(byteArray));
ret = SerializeHyperFaceData(faceData, byteArray);
CHECK(ret == HSUCCEED);
INSPIRE_LOGD("sizeof: %lu", sizeof(byteArray));
HyperFaceData decode;
ret = DeserializeHyperFaceData(byteArray, decode);
CHECK(ret == HSUCCEED);
PrintHyperFaceData(decode);
}
}

View File

@@ -0,0 +1,185 @@
//
// Created by Tunm-Air13 on 2023/9/12.
//
#include "settings/test_settings.h"
#include "inspireface/face_context.h"
#include "herror.h"
#include "../test_helper/test_help.h"
#include "feature_hub/feature_hub.h"
using namespace inspire;
TEST_CASE("test_FaceFeatureManagement", "[face_feature]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("FeatureCURD") {
DRAW_SPLIT_LINE
// Initialize
FaceContext ctx;
CustomPipelineParameter param;
param.enable_recognition = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_IMAGE, 1, param);
REQUIRE(ret == HSUCCEED);
FEATURE_HUB->PrintFeatureMatrixInfo();
// Know the location of 'kunkun' in advance
int32_t KunkunIndex = 795;
// Prepare a face photo in advance and extract the features
auto image = cv::imread(GET_DATA("images/kun.jpg"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
// Face detection
ctx.FaceDetectAndTrack(stream);
const auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
// Feature extraction of "Kunkun" was carried out
Embedded feature;
ret = ctx.FaceRecognitionModule()->FaceExtract(stream, faces[0], feature);
CHECK(ret == HSUCCEED);
// Import face feature vectors in batches
String mat_path = GET_DATA("test_faceset/test_faces_A1.npy");
String tags_path = GET_DATA("test_faceset/test_faces_A1.txt");
auto result = LoadMatrixAndTags(mat_path, tags_path);
// Gets the feature matrix and label names
EmbeddedList featureMatrix = result.first;
std::vector<std::string> tagNames = result.second;
REQUIRE(featureMatrix.size() == 3000);
REQUIRE(tagNames.size() == 3000);
REQUIRE(featureMatrix[0].size() == 512);
for (int i = 0; i < featureMatrix.size(); ++i) {
auto &feat = featureMatrix[i];
auto ret = FEATURE_HUB->RegisterFaceFeature(feat, i, tagNames[i], i);
CHECK(ret == HSUCCEED);
}
std::cout << std::endl;
REQUIRE(FEATURE_HUB->GetFaceFeatureCount() == 3000);
spdlog::trace("All 3000 Faces embedded vector are loaded");
// Prepare a face photo to search through the library
SearchResult searchResult;
ret = FEATURE_HUB->SearchFaceFeature(feature, searchResult, 0.5f);
REQUIRE(ret == HSUCCEED);
CHECK(searchResult.index != -1);
CHECK(searchResult.index == KunkunIndex);
CHECK(searchResult.tag == "Kunkun");
CHECK(searchResult.score == Approx(0.76096).epsilon(1e-3));
spdlog::info("Find Kunkun -> Location ID: {}, Confidence: {}, Tag: {}", searchResult.index, searchResult.score, searchResult.tag.c_str());
// Save "Kunkun"'s library features and so on
Embedded KunkunFeature;
ret = FEATURE_HUB->GetFaceFeature(KunkunIndex, KunkunFeature);
REQUIRE(ret == HSUCCEED);
// The features of "Kunkun" library corresponding to those found above are deleted from the face library
ret = FEATURE_HUB->DeleteFaceFeature(searchResult.index);
CHECK(ret == HSUCCEED);
// In search once
SearchResult secondSearchResult;
ret = FEATURE_HUB->SearchFaceFeature(feature, secondSearchResult, 0.5f);
REQUIRE(ret == HSUCCEED);
CHECK(secondSearchResult.index == -1);
spdlog::info("Kunkun被删除了无法找到: {}, {}", secondSearchResult.index, secondSearchResult.tag);
// Just take a random place and change the eigenvector for that place and put "Kunkun" back in there
auto newIndex = 2888;
// Try inserting an unused location first
ret = FEATURE_HUB->UpdateFaceFeature(KunkunFeature, 3001, "Chicken", 3001);
REQUIRE(ret == HERR_SESS_REC_BLOCK_UPDATE_FAILURE);
ret = FEATURE_HUB->UpdateFaceFeature(KunkunFeature, newIndex, "Chicken", 3001);
REQUIRE(ret == HSUCCEED);
SearchResult thirdlySearchResult;
ret = FEATURE_HUB->SearchFaceFeature(feature, thirdlySearchResult, 0.5f);
REQUIRE(ret == HSUCCEED);
CHECK(thirdlySearchResult.index != -1);
CHECK(thirdlySearchResult.index == newIndex);
CHECK(thirdlySearchResult.tag == "Chicken");
spdlog::info("Find Kunkun again -> New Location ID: {}, Confidence: {}, Tag: {}", thirdlySearchResult.index, thirdlySearchResult.score, thirdlySearchResult.tag.c_str());
}
#if ENABLE_BENCHMARK
SECTION("FeatureSearchBenchmark") {
DRAW_SPLIT_LINE
// Initialize
FaceContext ctx;
CustomPipelineParameter param;
param.enable_recognition = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_IMAGE, 1, param);
REQUIRE(ret == HSUCCEED);
FEATURE_HUB->PrintFeatureMatrixInfo();
// Import face feature vectors in batches
String mat_path = GET_DATA("test_faceset/test_faces_A1.npy");
String tags_path = GET_DATA("test_faceset/test_faces_A1.txt");
auto result = LoadMatrixAndTags(mat_path, tags_path);
// Gets the feature matrix and label names
EmbeddedList featureMatrix = result.first;
std::vector<std::string> tagNames = result.second;
REQUIRE(featureMatrix.size() == 3000);
REQUIRE(tagNames.size() == 3000);
REQUIRE(featureMatrix[0].size() == 512);
for (int i = 0; i < featureMatrix.size(); ++i) {
auto &feat = featureMatrix[i];
auto ret = FEATURE_HUB->RegisterFaceFeature(feat, i, tagNames[i], i);
CHECK(ret == HSUCCEED);
}
std::cout << std::endl;
REQUIRE(FEATURE_HUB->GetFaceFeatureCount() == 3000);
spdlog::trace("3000个特征向量全部载入");
// Prepare a picture of a face
auto image = cv::imread(GET_DATA("images/face_sample.png"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
// Face detection
ctx.FaceDetectAndTrack(stream);
const auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
// Feature extraction of "kunkun" was carried out
Embedded feature;
ret = ctx.FaceRecognitionModule()->FaceExtract(stream, faces[0], feature);
CHECK(ret == HSUCCEED);
// Insert the face further back
auto regIndex = 4000;
ret = FEATURE_HUB->RegisterFaceFeature(feature, regIndex, "test", 4000);
REQUIRE(ret == HSUCCEED);
const auto loop = 1000;
double total = 0.0f;
spdlog::info("Start performing {} searches: ", loop);
auto out = (double) cv::getTickCount();
for (int i = 0; i < loop; ++i) {
// Prepare a face photo to look it up from the library
SearchResult searchResult;
auto timeStart = (double) cv::getTickCount();
ret = FEATURE_HUB->SearchFaceFeature(feature, searchResult, 0.5f);
double cost = ((double) cv::getTickCount() - timeStart) / cv::getTickFrequency() * 1000;
REQUIRE(ret == HSUCCEED);
CHECK(searchResult.index == regIndex);
total += cost;
}
auto end = ((double) cv::getTickCount() - out) / cv::getTickFrequency() * 1000;
spdlog::info("Execute {} times Total Cost: {}ms, Average Cost: {}ms", loop, end, total / loop);
}
#endif
}

View File

@@ -0,0 +1,112 @@
//
// Created by tunm on 2023/9/13.
//
#include "settings/test_settings.h"
#include "inspireface/face_context.h"
#include "herror.h"
using namespace inspire;
TEST_CASE("test_FacePipeline", "[face_pipe") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("FaceContextInit") {
FaceContext ctx;
CustomPipelineParameter param;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_IMAGE, 1, param);
REQUIRE(ret == HSUCCEED);
}
SECTION("FaceContextMaskPredict") {
FaceContext ctx;
CustomPipelineParameter param;
param.enable_mask_detect = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_IMAGE, 1, param);
REQUIRE(ret == HSUCCEED);
{
// Prepare a photo of your face without a mask
auto image = cv::imread(GET_DATA("images/kun.jpg"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
// Face detection
ctx.FaceDetectAndTrack(stream);
auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
auto &face = faces[0];
ctx.FacePipelineModule()->Process(stream, face);
CHECK(face.faceProcess.maskInfo == MaskInfo::UNMASKED);
}
{
// Prepare a face picture with a mask in advance
auto image = cv::imread(GET_DATA("images/mask.png"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
// Face detection
ctx.FaceDetectAndTrack(stream);
auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
auto &face = faces[0];
ctx.FacePipelineModule()->Process(stream, face);
CHECK(face.faceProcess.maskInfo == MaskInfo::MASKED);
}
SECTION("FaceContextLiveness") {
FaceContext ctx;
CustomPipelineParameter param;
param.enable_liveness = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_IMAGE, 1, param);
REQUIRE(ret == HSUCCEED);
{
// Prepare realistic face images
auto image = cv::imread(GET_DATA("images/face_sample.png"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
// Face detection
ctx.FaceDetectAndTrack(stream);
auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
auto &face = faces[0];
ctx.FacePipelineModule()->Process(stream, face);
CHECK(face.faceProcess.rgbLivenessInfo == RGBLivenessInfo::LIVENESS_REAL);
}
{
// Prepare a fake photo that wasn't actually taken
auto image = cv::imread(GET_DATA("images/rgb_fake.jpg"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
// Face detection
ctx.FaceDetectAndTrack(stream);
auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
auto &face = faces[0];
ctx.FacePipelineModule()->Process(stream, face);
CHECK(face.faceProcess.rgbLivenessInfo == RGBLivenessInfo::LIVENESS_FAKE);
}
}
}
}

View File

@@ -0,0 +1,87 @@
//
// Created by Tunm-Air13 on 2023/9/12.
//
#include "settings/test_settings.h"
#include "inspireface/face_context.h"
#include "herror.h"
#include "common/face_data/data_tools.h"
#include "feature_hub/feature_hub.h"
using namespace inspire;
TEST_CASE("test_FaceRecognition", "[face_rec]") {
DRAW_SPLIT_LINE
TEST_PRINT_OUTPUT(true);
SECTION("FaceContextInit") {
FaceContext ctx;
CustomPipelineParameter param;
param.enable_recognition = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_IMAGE, 1, param);
REQUIRE(ret == HSUCCEED);
}
SECTION("FaceRecognitionOption") {
FaceContext ctx;
CustomPipelineParameter param;
param.enable_recognition = false; // Disable the face recognition function
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_IMAGE, 1, param);
REQUIRE(ret == HSUCCEED);
auto image = cv::imread(GET_DATA("images/cxk.jpg"));
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
ctx.FaceDetectAndTrack(stream);
const auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
Embedded feature;
ret = ctx.FaceRecognitionModule()->FaceExtract(stream, faces[0], feature);
CHECK(ret == HERR_SESS_REC_EXTRACT_FAILURE);
}
SECTION("FaceRecognition1v1") {
FaceContext ctx;
CustomPipelineParameter param;
param.enable_recognition = true;
auto ret = ctx.Configuration(DetectMode::DETECT_MODE_IMAGE, 1, param);
REQUIRE(ret == HSUCCEED);
std::vector<std::string> list = {
GET_DATA("images/kun.jpg"),
GET_DATA("images/Kunkun.jpg"),
};
EmbeddedList vectors;
for (int i = 0; i < 2; ++i) {
auto image = cv::imread(list[i]);
REQUIRE(!image.empty());
CameraStream stream;
stream.SetDataFormat(BGR);
stream.SetRotationMode(ROTATION_0);
stream.SetDataBuffer(image.data, image.rows, image.cols);
ret = ctx.FaceDetectAndTrack(stream);
REQUIRE(ret == HSUCCEED);
ctx.FaceDetectAndTrack(stream);
const auto &faces = ctx.GetTrackingFaceList();
REQUIRE(faces.size() > 0);
Embedded feature;
HyperFaceData data = FaceObjectToHyperFaceData(faces[0]);
ret = ctx.FaceRecognitionModule()->FaceExtract(stream, data, feature);
REQUIRE(ret == HSUCCEED);
vectors.push_back(feature);
}
float score;
ret = FEATURE_HUB->CosineSimilarity(vectors[1], vectors[0], score);
REQUIRE(ret == HSUCCEED);
// spdlog::info("score: {}", score);
CHECK(0.7623623013 == Approx(score).epsilon(1e-2));
}
}

View File

@@ -0,0 +1,101 @@
//
// Created by Tunm-Air13 on 2024/3/22.
//
#ifndef INSPIREFACE_SIMPLE_CSV_WRITER_H
#define INSPIREFACE_SIMPLE_CSV_WRITER_H
#include <fstream>
#include <vector>
#include <string>
#include <iostream>
#include <iomanip> // Used to set the output format
class SimpleCSVWriter {
public:
SimpleCSVWriter(const std::string& filepath) {
// Check whether the file exists
std::ifstream file(filepath);
if (!file.good()) {
// The file does not exist. Create a new csv file
std::ofstream outfile(filepath);
if (!outfile.is_open()) {
std::cerr << "Failed to create file: " << filepath << std::endl;
}
outfile.close();
}
// Save the file path for later use
this->filepath = filepath;
}
virtual ~SimpleCSVWriter() {} // Add a virtual destructor to ensure correct destructor behavior
protected:
std::string filepath;
void insertData(const std::vector<std::string>& data) {
std::ofstream file(this->filepath, std::ios_base::app); // Open the file in append mode
if (!file.is_open()) {
std::cerr << "Failed to open file: " << this->filepath << std::endl;
return;
}
for (const auto& datum : data) {
file << datum;
if (&datum != &data.back()) { // If it is not the last element, add a comma separation
file << ",";
}
}
file << "\n"; // Add a newline character after each inserted row of data
file.close();
}
};
class BenchmarkRecord : public SimpleCSVWriter {
public:
BenchmarkRecord(const std::string& filepath, const std::string &name = "Benchmark") : SimpleCSVWriter(filepath) {
std::ifstream file(this->filepath);
if (file.peek() == std::ifstream::traits_type::eof()) { // If the file is empty, insert header data
std::vector<std::string> header = {name, "Loops", "Total Time(ms)", "Average Time(ms)"};
SimpleCSVWriter::insertData(header);
}
}
void insertBenchmarkData(const std::string &caseName, int loops, double totalCost, double avgCost) {
std::ofstream file(this->filepath, std::ios_base::app);
if (!file.is_open()) {
std::cerr << "Failed to open file: " << this->filepath << std::endl;
return;
}
// Format output
file << std::fixed << std::setprecision(5);
file << caseName << "," << loops << "," << totalCost << "," << avgCost << "\n";
file.close();
}
};
class EvaluationRecord : public SimpleCSVWriter {
public:
EvaluationRecord(const std::string& filepath) : SimpleCSVWriter(filepath) {
std::ifstream file(this->filepath);
if (file.peek() == std::ifstream::traits_type::eof()) { // If the file is empty, insert header data
std::vector<std::string> header = {"Resource Version", "Dataset", "Accuracy", "Best Threshold"};
SimpleCSVWriter::insertData(header);
}
}
void insertEvaluationData(const std::string &modelName, const std::string &dataset, double accuracy, double bestThreshold) {
std::ofstream file(this->filepath, std::ios_base::app);
if (!file.is_open()) {
std::cerr << "Failed to open file: " << this->filepath << std::endl;
return;
}
// Format output
file << std::fixed << std::setprecision(5);
file << modelName << "," << dataset << "," << accuracy << "," << bestThreshold << "\n";
file.close();
}
};
#endif //INSPIREFACE_SIMPLE_CSV_WRITER_H

View File

@@ -0,0 +1,398 @@
//
// Created by Tunm-Air13 on 2023/9/12.
//
#ifndef HYPERFACEREPO_TEST_HELP_H
#define HYPERFACEREPO_TEST_HELP_H
#include <iostream>
#include <vector>
#include <string>
#include <fstream>
#include <cassert>
#include <sstream>
#include <iomanip>
#include <indicators/block_progress_bar.hpp>
#include <indicators/cursor_control.hpp>
#include "inspireface/c_api/inspireface.h"
#include "limonp/StringUtil.hpp"
#include "cnpy/npy.hpp"
#include "opencv2/opencv.hpp"
#include <iomanip>
#include "test_tools.h"
#include <random>
using namespace indicators;
typedef std::vector<std::pair<std::string, std::string>> FaceImageDataList;
inline FaceImageDataList LoadLFWFunneledValidData(const std::string &dir, const std::string &txtPath){
FaceImageDataList list;
std::ifstream file(txtPath);
std::string line;
while (std::getline(file, line)) {
std::vector<std::string> parts;
limonp::Split(line, parts, "/");
if (parts.size() >= 2) {
std::string name = parts[0];
std::string fullPath = dir + "/" + line;
list.push_back({name, fullPath});
}
}
return list;
}
inline bool ImportLFWFunneledValidData(HFSession handle, FaceImageDataList& data, size_t importNum) {
auto dataSize = data.size();
std::string title = "Import " + std::to_string(importNum) + " face data...";
// Hide cursor
show_console_cursor(false);
BlockProgressBar bar{
option::BarWidth{60},
option::Start{"["},
option::End{"]"},
option::PostfixText{title},
option::ForegroundColor{Color::white} ,
option::FontStyles{std::vector<FontStyle>{FontStyle::bold}}
};
auto progress = 0.0f;
for (size_t i = 0; i < importNum; ++i) {
bar.set_progress(progress);
size_t index = i % dataSize;
// Data processing
auto item = data[index];
cv::Mat image = cv::imread(item.second);
HFImageData imageData = {0};
imageData.data = image.data;
imageData.height = image.rows;
imageData.width = image.cols;
imageData.format = HF_STREAM_BGR;
imageData.rotation = HF_CAMERA_ROTATION_0;
HFImageStream imgHandle;
auto ret = HFCreateImageStream(&imageData, &imgHandle);
if (ret != HSUCCEED || image.empty()) {
std::cerr << "Error image: " << std::to_string(ret) << " , " << item.second << std::endl;
return false;
}
// Face tracked
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(handle, imgHandle, &multipleFaceData);
if (ret != HSUCCEED) {
std::cerr << "Error Track: " << std::to_string(ret) << " , " << item.second << std::endl;
return false;
}
if (multipleFaceData.detectedNum == 0) {
std::cerr << "Not Detected face: " << item.second << std::endl;
return false;
}
// Extract face feature
HFFaceFeature feature = {0};
ret = HFFaceFeatureExtract(handle, imgHandle, multipleFaceData.tokens[0], &feature);
if (ret != HSUCCEED) {
std::cerr << "Error extract: " << std::to_string(ret) << " , " << item.second << std::endl;
return false;
}
char *newTagName = new char[item.first.size() + 1];
std::strcpy(newTagName, item.first.c_str());
HFFaceFeatureIdentity identity = {0};
identity.customId = i;
identity.tag = newTagName;
identity.feature = &feature;
ret = HFFeatureHubInsertFeature(identity);
if (ret != HSUCCEED) {
std::cerr << "Error insert feature: " << std::to_string(ret) << " , " << item.second << std::endl;
return false;
}
delete[] newTagName;
HFReleaseImageStream(imgHandle);
// Update progress
progress = 100.0f * (float)(i + 1) / importNum;
}
bar.set_progress(100.0f);
// Show cursor
show_console_cursor(true);
std::cout << "\033[0m\n"; // ANSI resets the color code
return true;
}
inline std::pair<std::vector<std::vector<float>>, std::vector<std::string>> LoadMatrixAndTags(
const std::string& matrixFileName, const std::string& tagsFileName) {
std::vector<std::vector<float>> featureMatrix;
std::vector<std::string> tagNames;
std::vector<unsigned long> shape {};
bool fortran_order;
std::vector<double> data;
npy::LoadArrayFromNumpy(matrixFileName, shape, fortran_order, data);
auto feature_num = shape[0];
unsigned long vector_length = shape[1];
assert(shape[1] == 512);
// Iterate through the data, putting the data one vector ata time
for (unsigned long i = 0; i < feature_num; ++i) {
//Creates a new vector to store the data of the current vector
std::vector<float> vector_data(vector_length);
// Put the data in data into the vector in order
for (unsigned long j = 0; j < vector_length; ++j) {
vector_data[j] = (float)(data[i * vector_length + j]);
}
// Adds the current vector to featureMatrix
featureMatrix.push_back(vector_data);
// Here you can add the corresponding tag name, such as tagNames.push_back("Tag_Name");
}
// Read txt file
std::ifstream txtFile(tagsFileName);
if (txtFile.is_open()) {
std::string line;
while (std::getline(txtFile, line)) {
tagNames.push_back(line);
}
txtFile.close();
} else {
std::cerr << "Unable to open the text file." << std::endl;
}
//
return std::make_pair(featureMatrix, tagNames);
}
inline double CalculateOverlap(const HFaceRect& box1, const HFaceRect& box2) {
// Calculate the coordinates of the intersection rectangle
HInt32 x_overlap = std::max(0, std::min(box1.x + box1.width, box2.x + box2.width) - std::max(box1.x, box2.x));
HInt32 y_overlap = std::max(0, std::min(box1.y + box1.height, box2.y + box2.height) - std::max(box1.y, box2.y));
// Calculate the area of the intersection
HInt32 overlap_area = x_overlap * y_overlap;
// Calculate the area of each rectangle
HInt32 box1_area = box1.width * box1.height;
HInt32 box2_area = box2.width * box2.height;
// Calculate the total area
HInt32 total_area = box1_area + box2_area - overlap_area;
// Calculate the overlap ratio
double overlap_ratio = total_area > 0 ? static_cast<double>(overlap_area) / total_area : 0;
return overlap_ratio;
}
inline std::vector<std::string> generateFilenames(const std::string& templateStr, int start, int end) {
std::vector<std::string> filenames;
for (int i = start; i <= end; ++i) {
std::ostringstream oss;
oss << "frame-" << std::setw(4) << std::setfill('0') << i << ".jpg";
filenames.push_back(oss.str());
}
return filenames;
}
inline bool FindMostSimilarScoreFromTwoPic(HFSession handle, const std::string& img1, const std::string& img2, float& mostSimilar){
mostSimilar = -1.0f;
std::vector<std::vector<std::vector<float>>> features(2);
std::vector<std::string> images = {img1, img2};
for (int i = 0; i < 2; ++i) {
HFImageStream img;
// auto ret = ReadImageToImageStream(images[i].c_str(), img);
auto cvMat = cv::imread(images[i]);
auto ret = CVImageToImageStream(cvMat, img);
if (ret != 0) {
std::cerr << "Image is not found: " << ret << std::endl;
return false;
}
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(handle, img, &multipleFaceData);
if (ret != 0) {
std::cerr << "Error track: " << ret << std::endl;
HFReleaseImageStream(img);
return false;
}
HInt32 featureNum;
HFGetFeatureLength(&featureNum);
for (int j = 0; j < multipleFaceData.detectedNum; ++j) {
std::vector<float> feature(featureNum, 0.0f);
ret = HFFaceFeatureExtractCpy(handle, img, multipleFaceData.tokens[j], feature.data());
if (ret != 0) {
std::cerr << "Error extract: " << ret << std::endl;
HFReleaseImageStream(img);
return false;
}
features[i].push_back(feature);
}
HFReleaseImageStream(img);
}
if (features[0].empty() || features[1].empty()) {
// std::cerr << "Not detected " << std::endl;
return false;
}
auto &features1 = features[0];
auto &features2 = features[1];
for (auto &feat1: features1) {
for (auto &feat2: features2) {
float comp;
HFFaceFeature faceFeature1 = {0};
faceFeature1.size = feat1.size();
faceFeature1.data = feat1.data();
HFFaceFeature faceFeature2 = {0};
faceFeature2.size = feat2.size();
faceFeature2.data = feat2.data();
HFFaceComparison(faceFeature1, faceFeature2, &comp);
if (comp > mostSimilar) {
mostSimilar = comp;
}
}
}
return true;
}
inline std::vector<std::vector<std::string>> ReadPairs(const std::string& pairs_filename) {
std::vector<std::vector<std::string>> pairs;
std::ifstream file(pairs_filename); // Open the file
std::string line;
if (!file.is_open()) {
std::cerr << "Unable to open file: " << pairs_filename << std::endl;
return pairs; // If the file cannot be opened, an empty list is returned
}
std::getline(file, line); // Skip the first line
while (std::getline(file, line)) {
std::istringstream iss(line);
std::vector<std::string> pair;
std::string element;
while (iss >> element) {
pair.push_back(element);
}
if (!pair.empty()) {
pairs.push_back(pair);
}
}
return pairs;
}
inline std::string zfill(int number, int width) {
std::ostringstream oss;
// Set padding to '0' and define the string width
oss << std::setfill('0') << std::setw(width) << number;
return oss.str();
}
inline std::pair<float, float> FindBestThreshold(const std::vector<float>& similarities, const std::vector<int>& labels) {
std::vector<float> thresholds;
for (float i = 0.0f; i < 1.0f; i += 0.01f) {
thresholds.push_back(i);
}
float best_threshold = 0.0f;
float best_accuracy = 0.0f;
for (auto& threshold : thresholds) {
std::vector<int> predictions;
for (auto& similarity : similarities) {
predictions.push_back(similarity > threshold ? 1 : 0);
}
int correct = 0;
for (size_t i = 0; i < labels.size(); ++i) {
if (predictions[i] == labels[i]) {
++correct;
}
}
float accuracy = static_cast<float>(correct) / static_cast<float>(labels.size());
if (accuracy > best_accuracy) {
best_accuracy = accuracy;
best_threshold = threshold;
}
}
return {best_threshold, best_accuracy};
}
/** Generate random eigenvectors of the specified length */
inline std::vector<float> GenerateRandomFeature(size_t length) {
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_real_distribution<> dis(-1.0, 1.0);
std::vector<float> featureVector(length);
float norm = 0.0;
for (float &value : featureVector) {
value = dis(gen);
norm += value * value;
}
norm = std::sqrt(norm);
if (norm > 0) {
for (float &value : featureVector) {
value /= norm;
}
}
return featureVector;
}
inline std::vector<float> SimulateSimilarVector(const std::vector<float>& original) {
std::vector<float> similar(original.size());
std::random_device rd;
std::mt19937 gen(rd());
std::normal_distribution<float> dis(0.0, 0.02);
for (size_t i = 0; i < original.size(); ++i) {
similar[i] = original[i] + dis(gen);
}
float norm = 0.0f;
for (auto& value : similar) {
norm += value * value;
}
norm = std::sqrt(norm);
if (norm > 0) {
for (auto& value : similar) {
value /= norm;
}
}
return similar;
}
inline std::vector<int> GenerateRandomNumbers(int n, int min, int max) {
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> distrib(min, max);
std::vector<int> numbers;
numbers.reserve(n);
for (int i = 0; i < n; ++i) {
numbers.push_back(distrib(gen));
}
return numbers;
}
#endif //HYPERFACEREPO_TEST_HELP_H

View File

@@ -0,0 +1,79 @@
//
// Created by tunm on 2023/10/12.
//
#pragma
#ifndef HYPERFACEREPO_TEST_TOOLS_H
#define HYPERFACEREPO_TEST_TOOLS_H
#include "opencv2/opencv.hpp"
#include "inspireface/c_api/inspireface.h"
#include <fstream>
#include <cstdint> // For uint8_t
// Bad function
inline HResult ReadImageToImageStream(const char *path, HFImageStream &handle, HFImageFormat format = HF_STREAM_BGR,
HFRotation rot = HF_CAMERA_ROTATION_0) {
cv::Mat image = cv::imread(path);
if (image.empty()) {
return -1;
}
HFImageData imageData = {0};
imageData.data = image.data;
imageData.height = image.rows;
imageData.width = image.cols;
imageData.format = format;
imageData.rotation = rot;
auto ret = HFCreateImageStream(&imageData, &handle);
return ret;
}
inline HResult CVImageToImageStream(const cv::Mat& image, HFImageStream &handle, HFImageFormat format = HF_STREAM_BGR,
HFRotation rot = HF_CAMERA_ROTATION_0) {
if (image.empty()) {
return -1;
}
HFImageData imageData = {0};
imageData.data = image.data;
imageData.height = image.rows;
imageData.width = image.cols;
imageData.format = format;
imageData.rotation = rot;
auto ret = HFCreateImageStream(&imageData, &handle);
return ret;
}
inline uint8_t *ReadNV21Data(const char *filePath, int width, int height) {
const int nv21Size = width * height * 3 / 2; // Calculate the NV21 data size
// Memory is allocated dynamically to store NV21 data
uint8_t *nv21Data = new uint8_t[nv21Size];
std::ifstream file(filePath, std::ios::binary);
if (!file.is_open()) {
std::cerr << "Unable to open the file " << filePath << std::endl;
delete[] nv21Data;
return nullptr;
}
// Read data
file.read(reinterpret_cast<char *>(nv21Data), nv21Size);
if (!file) {
std::cerr << "Read error or incomplete file" << std::endl;
file.close();
delete[] nv21Data;
return nullptr;
}
// Open file
file.close();
// Returns a pointer to NV21 data
return nv21Data;
}
#endif //HYPERFACEREPO_TEST_TOOLS_H