This commit is contained in:
tunm
2025-05-22 16:07:26 +08:00
parent efb5639ec6
commit 7e25e4e482
168 changed files with 6434 additions and 2527 deletions

View File

@@ -2,9 +2,10 @@
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include <stdlib.h>
int main() {
char *n = new char[1024];
char *n = malloc(1024);
return 0;
}

View File

@@ -0,0 +1,100 @@
#include <iostream>
#include <vector>
#include <string>
#include <memory>
#include <inspirecv/inspirecv.h>
#include <inspireface/include/inspireface/session.h>
#include <inspireface/include/inspireface/launch.h>
#include <inspireface/middleware/thread/resource_pool.h>
#include <inspireface/include/inspireface/spend_timer.h>
#include <thread>
int main(int argc, char** argv) {
if (argc != 5) {
std::cerr << "Usage: " << argv[0] << " <model_path> <image_path> <loop_count> <thread_num>" << std::endl;
return -1;
}
std::string model_path = argv[1];
std::string image_path = argv[2];
int loop = std::stoi(argv[3]);
int thread_num = std::stoi(argv[4]);
if (thread_num > 10) {
std::cerr << "Error: thread_num cannot be greater than 10" << std::endl;
return -1;
}
if (loop < 1000) {
std::cerr << "Error: loop count must be at least 1000" << std::endl;
return -1;
}
INSPIREFACE_CONTEXT->Load(model_path);
inspirecv::Image image = inspirecv::Image::Create(image_path);
inspirecv::FrameProcess process =
inspirecv::FrameProcess::Create(image.Data(), image.Height(), image.Width(), inspirecv::BGR, inspirecv::ROTATION_0);
inspire::parallel::ResourcePool<inspire::Session> sessionPool(thread_num, [](inspire::Session& session) {
});
for (int i = 0; i < thread_num; ++i) {
inspire::CustomPipelineParameter param;
param.enable_recognition = true;
param.enable_liveness = true;
param.enable_mask_detect = true;
param.enable_face_attribute = true;
param.enable_face_quality = true;
inspire::Session session = inspire::Session::Create(inspire::DetectModuleMode::DETECT_MODE_ALWAYS_DETECT, 1, param);
sessionPool.AddResource(std::move(session));
}
std::vector<std::thread> threads;
int tasksPerThread = loop / thread_num;
int remainingTasks = loop % thread_num;
// Run the task in parallel
for (int i = 0; i < thread_num; ++i) {
int taskCount = tasksPerThread + (i < remainingTasks ? 1 : 0);
threads.emplace_back([&, taskCount]() {
for (int j = 0; j < taskCount; ++j) {
auto sessionGuard = sessionPool.AcquireResource();
std::vector<inspire::FaceTrackWrap> results;
int32_t ret;
ret = sessionGuard->FaceDetectAndTrack(process, results);
if (ret != 0) {
std::cerr << "FaceDetectAndTrack failed" << std::endl;
break;
}
if (results.size() == 0) {
std::cerr << "Not found face" << std::endl;
break;
}
}
});
}
// Print basic information before starting
std::cout << "\n=== Configuration Information ===" << std::endl;
std::cout << "Model Path: " << model_path << std::endl;
std::cout << "Image Path: " << image_path << std::endl;
std::cout << "Total Loop Count: " << loop << std::endl;
std::cout << "Number of Threads: " << thread_num << std::endl;
std::cout << "Tasks per Thread: " << tasksPerThread << std::endl;
std::cout << "Remaining Tasks: " << remainingTasks << std::endl;
std::cout << "==============================\n" << std::endl;
inspire::SpendTimer timer("Number of threads: " + std::to_string(thread_num) + ", Number of tasks: " + std::to_string(loop));
timer.Start();
for (auto& thread : threads) {
thread.join();
}
timer.Stop();
std::cout << timer << std::endl;
// Convert microseconds to milliseconds and print
double milliseconds = timer.Total() / 1000.0;
std::cout << "Total execution time: " << milliseconds << " ms" << std::endl;
return 0;
}

View File

@@ -2,113 +2,134 @@
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include <iostream>
#include <vector>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <inspireface.h>
#define NUM_IMAGES 2
int main(int argc, char* argv[]) {
// Check whether the number of parameters is correct
HResult ret;
const char* packPath;
const char* imgPath1;
const char* imgPath2;
HOption option;
HFSession session;
HFFaceFeature features[NUM_IMAGES];
const char* imgPaths[NUM_IMAGES];
int i;
HFloat similarity;
HFloat recommended_cosine_threshold;
HFloat percentage;
/* Check whether the number of parameters is correct */
if (argc != 4) {
HFLogPrint(HF_LOG_ERROR, "Usage: %s <pack_path> <img1_path> <img2_path>", argv[0]);
return 1;
}
auto packPath = argv[1];
auto imgPath1 = argv[2];
auto imgPath2 = argv[3];
packPath = argv[1];
imgPath1 = argv[2];
imgPath2 = argv[3];
/* Initialize features array to NULL */
memset(features, 0, sizeof(features));
/* Allocate memory for feature vectors */
for (i = 0; i < NUM_IMAGES; i++) {
ret = HFCreateFaceFeature(&features[i]);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create face feature error: %d", ret);
goto cleanup;
}
}
/* Set the image path array */
imgPaths[0] = imgPath1;
imgPaths[1] = imgPath2;
HFLogPrint(HF_LOG_INFO, "Pack file Path: %s", packPath);
HFLogPrint(HF_LOG_INFO, "Source file Path 1: %s", imgPath1);
HFLogPrint(HF_LOG_INFO, "Source file Path 2: %s", imgPath2);
HResult ret;
// The resource file must be loaded before it can be used
/* The resource file must be loaded before it can be used */
ret = HFLaunchInspireFace(packPath);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Load Resource error: %d", ret);
return ret;
goto cleanup;
}
// Create a session for face recognition
HOption option = HF_ENABLE_FACE_RECOGNITION;
HFSession session;
/* Create a session for face recognition */
option = HF_ENABLE_FACE_RECOGNITION;
ret = HFCreateInspireFaceSessionOptional(option, HF_DETECT_MODE_ALWAYS_DETECT, 1, -1, -1, &session);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create session error: %d", ret);
return ret;
goto cleanup;
}
std::vector<char*> twoImg = {imgPath1, imgPath2};
std::vector<std::vector<float>> vec(2, std::vector<float>(512));
for (int i = 0; i < twoImg.size(); ++i) {
/* Process two images */
for (i = 0; i < NUM_IMAGES; i++) {
HFImageBitmap imageBitmap = {0};
ret = HFCreateImageBitmapFromFilePath(twoImg[i], 3, &imageBitmap);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create image bitmap error: %d", ret);
return ret;
}
// Prepare image data for processing
HFImageStream stream;
ret = HFCreateImageStreamFromImageBitmap(imageBitmap, HF_CAMERA_ROTATION_0, &stream); // Create an image stream for processing
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create stream error: %d", ret);
return ret;
}
// Execute face tracking on the image
HFMultipleFaceData multipleFaceData = {0};
ret = HFExecuteFaceTrack(session, stream, &multipleFaceData); // Track faces in the image
ret = HFCreateImageBitmapFromFilePath(imgPaths[i], 3, &imageBitmap);
if (ret != HSUCCEED) {
HFReleaseImageBitmap(imageBitmap);
HFLogPrint(HF_LOG_ERROR, "Create image bitmap error: %d", ret);
goto cleanup;
}
ret = HFCreateImageStreamFromImageBitmap(imageBitmap, HF_CAMERA_ROTATION_0, &stream);
if (ret != HSUCCEED) {
HFReleaseImageStream(stream);
HFReleaseImageBitmap(imageBitmap);
HFLogPrint(HF_LOG_ERROR, "Create stream error: %d", ret);
goto cleanup;
}
ret = HFExecuteFaceTrack(session, stream, &multipleFaceData);
if (ret != HSUCCEED) {
HFReleaseImageStream(stream);
HFReleaseImageBitmap(imageBitmap);
HFLogPrint(HF_LOG_ERROR, "Run face track error: %d", ret);
return ret;
}
if (multipleFaceData.detectedNum == 0) { // Check if any faces were detected
HFLogPrint(HF_LOG_ERROR, "No face was detected: %s", twoImg[i]);
return ret;
goto cleanup;
}
// Extract facial features from the first detected face, an interface that uses copy features in a comparison scenario
ret = HFFaceFeatureExtractCpy(session, stream, multipleFaceData.tokens[0], vec[i].data()); // Extract features
if (multipleFaceData.detectedNum == 0) {
HFReleaseImageStream(stream);
HFReleaseImageBitmap(imageBitmap);
HFLogPrint(HF_LOG_ERROR, "No face was detected: %s", imgPaths[i]);
goto cleanup;
}
ret = HFFaceFeatureExtractTo(session, stream, multipleFaceData.tokens[0], features[i]);
if (ret != HSUCCEED) {
HFReleaseImageStream(stream);
HFReleaseImageBitmap(imageBitmap);
HFLogPrint(HF_LOG_ERROR, "Extract feature error: %d", ret);
return ret;
goto cleanup;
}
ret = HFReleaseImageStream(stream);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Release image stream error: %d", ret);
}
ret = HFReleaseImageBitmap(imageBitmap);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Release image bitmap error: %d", ret);
return ret;
}
HFReleaseImageStream(stream);
HFReleaseImageBitmap(imageBitmap);
}
// Make feature1
HFFaceFeature feature1 = {0};
feature1.data = vec[0].data();
feature1.size = vec[0].size();
HFFaceFeature feature1 = features[0];
HFFaceFeature feature2 = features[1];
// Make feature2
HFFaceFeature feature2 = {0};
feature2.data = vec[1].data();
feature2.size = vec[1].size();
// Run comparison
HFloat similarity;
/* Run comparison */
ret = HFFaceComparison(feature1, feature2, &similarity);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Feature comparison error: %d", ret);
return ret;
goto cleanup;
}
HFloat recommended_cosine_threshold;
ret = HFGetRecommendedCosineThreshold(&recommended_cosine_threshold);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Get recommended cosine threshold error: %d", ret);
return ret;
goto cleanup;
}
if (similarity > recommended_cosine_threshold) {
@@ -118,20 +139,28 @@ int main(int argc, char* argv[]) {
}
HFLogPrint(HF_LOG_INFO, "Similarity score: %.3f", similarity);
// Convert cosine similarity to percentage similarity.
// Note: conversion parameters are not optimal and should be adjusted based on your specific use case.
HFloat percentage;
ret = HFCosineSimilarityConvertToPercentage(similarity, &percentage);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Convert similarity to percentage error: %d", ret);
return ret;
goto cleanup;
}
HFLogPrint(HF_LOG_INFO, "Percentage similarity: %f", percentage);
// The memory must be freed at the end of the program
/* Clean up resources */
ret = HFReleaseInspireFaceSession(session);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Release session error: %d", ret);
return ret;
}
cleanup:
/* Release the feature vector memory */
for (i = 0; i < NUM_IMAGES; i++) {
if (features[i].data != NULL) { // Only release features that were successfully created
HFReleaseFaceFeature(&features[i]);
}
}
HFDeBugShowResourceStatistics();
return ret;
}

View File

@@ -0,0 +1,178 @@
#include <inspireface.h>
#include <stdio.h>
int main() {
HResult ret;
// The resource file must be loaded before it can be used
ret = HFLaunchInspireFace("test_res/pack/Pikachu");
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Load Resource error: %d", ret);
return ret;
}
char *db_path = "case_crud.db";
if (remove(db_path) != 0) {
HFLogPrint(HF_LOG_ERROR, "Remove database file error: %d", ret);
return ret;
}
HFFeatureHubConfiguration configuration;
configuration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
configuration.enablePersistence = 1;
configuration.persistenceDbPath = db_path;
configuration.searchMode = HF_SEARCH_MODE_EXHAUSTIVE;
configuration.searchThreshold = 0.48f;
ret = HFFeatureHubDataEnable(configuration);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Enable feature hub error: %d", ret);
return ret;
}
// Create a session
HFSession session;
ret = HFCreateInspireFaceSessionOptional(HF_ENABLE_FACE_RECOGNITION, HF_DETECT_MODE_ALWAYS_DETECT, 1, 320, -1, &session);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create session error: %d", ret);
return ret;
}
// Prepare an image for insertion into the hub
HFImageBitmap image;
ret = HFCreateImageBitmapFromFilePath("test_res/data/bulk/kun.jpg", 3, &image);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create image bitmap error: %d", ret);
return ret;
}
// Create an image stream
HFImageStream imageHandle;
ret = HFCreateImageStreamFromImageBitmap(image, HF_CAMERA_ROTATION_0, &imageHandle);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create image stream error: %d", ret);
return ret;
}
// Detect and track
HFMultipleFaceData multipleFaceData;
ret = HFExecuteFaceTrack(session, imageHandle, &multipleFaceData);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Execute face track error: %d", ret);
return ret;
}
if (multipleFaceData.detectedNum > 0) {
HFLogPrint(HF_LOG_INFO, "Face detected: %d", multipleFaceData.detectedNum);
}
HFFaceFeature feature;
ret = HFCreateFaceFeature(&feature);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create face feature error: %d", ret);
return ret;
}
ret = HFFaceFeatureExtractCpy(session, imageHandle, multipleFaceData.tokens[0], feature.data);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Extract feature error: %d", ret);
return ret;
}
// Insert face feature into the hub
HFFaceFeatureIdentity featureIdentity;
featureIdentity.feature = &feature;
featureIdentity.id = -1;
HFaceId result_id;
ret = HFFeatureHubInsertFeature(featureIdentity, &result_id);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Insert feature error: %d", ret);
return ret;
}
// Prepare a photo of the same person for the query
HFImageBitmap query_image;
ret = HFCreateImageBitmapFromFilePath("test_res/data/bulk/jntm.jpg", 3, &query_image);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create image bitmap error: %d", ret);
return ret;
}
// Create an image stream
HFImageStream query_imageHandle;
ret = HFCreateImageStreamFromImageBitmap(query_image, HF_CAMERA_ROTATION_0, &query_imageHandle);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create image stream error: %d", ret);
return ret;
}
// Detect and track
ret = HFExecuteFaceTrack(session, query_imageHandle, &multipleFaceData);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Execute face track error: %d", ret);
return ret;
}
if (multipleFaceData.detectedNum > 0) {
HFLogPrint(HF_LOG_INFO, "Face detected: %d", multipleFaceData.detectedNum);
}
HFFaceFeature query_feature;
ret = HFCreateFaceFeature(&query_feature);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create face feature error: %d", ret);
return ret;
}
// Extract face feature
ret = HFFaceFeatureExtractTo(session, query_imageHandle, multipleFaceData.tokens[0], query_feature);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Extract feature error: %d", ret);
return ret;
}
// Search face feature
HFFaceFeatureIdentity query_featureIdentity;
query_featureIdentity.feature = &query_feature;
query_featureIdentity.id = -1;
HFloat confidence;
ret = HFFeatureHubFaceSearch(query_feature, &confidence, &query_featureIdentity);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Search feature error: %d", ret);
return ret;
}
HFLogPrint(HF_LOG_INFO, "Search feature result: %d", query_featureIdentity.id);
HFLogPrint(HF_LOG_INFO, "Search feature confidence: %f", confidence);
// Remove face feature
ret = HFFeatureHubFaceRemove(result_id);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Remove feature error: %d", ret);
return ret;
}
HFLogPrint(HF_LOG_INFO, "Remove feature result: %d", result_id);
// Query again
ret = HFFeatureHubFaceSearch(query_feature, &confidence, &query_featureIdentity);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Search feature error: %d", ret);
return ret;
}
HFLogPrint(HF_LOG_INFO, "Query again, search feature result: %d", query_featureIdentity.id);
if (query_featureIdentity.id != -1) {
HFLogPrint(HF_LOG_INFO, "Remove feature failed");
}
// Release resources
HFReleaseFaceFeature(&feature);
HFReleaseFaceFeature(&query_feature);
HFReleaseImageStream(imageHandle);
HFReleaseImageStream(query_imageHandle);
HFReleaseImageBitmap(image);
HFReleaseImageBitmap(query_image);
HFReleaseInspireFaceSession(session);
HFDeBugShowResourceStatistics();
return 0;
}

View File

@@ -2,30 +2,53 @@
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include <iostream>
#include <stdio.h>
#include <stdlib.h>
#include <inspireface.h>
int main(int argc, char* argv[]) {
// Check whether the number of parameters is correct
HResult ret;
const char* packPath;
const char* sourcePath;
int rotation;
HFRotation rotation_enum;
HOption option;
HFDetectMode detMode;
HInt32 maxDetectNum;
HInt32 detectPixelLevel;
HFSession session;
HFImageBitmap image;
HFImageStream imageHandle;
HFMultipleFaceData multipleFaceData;
int faceNum;
HFImageBitmap drawImage;
HFImageBitmapData data;
int index;
HFFaceMaskConfidence maskConfidence;
HFFaceQualityConfidence qualityConfidence;
HOption pipelineOption;
HFFaceDetectPixelList pixelLevels;
/* Check whether the number of parameters is correct */
if (argc < 3 || argc > 4) {
HFLogPrint(HF_LOG_ERROR, "Usage: %s <pack_path> <source_path> [rotation]", argv[0]);
return 1;
}
auto packPath = argv[1];
auto sourcePath = argv[2];
int rotation = 0;
packPath = argv[1];
sourcePath = argv[2];
rotation = 0;
// If rotation is provided, check and set the value
/* If rotation is provided, check and set the value */
if (argc == 4) {
rotation = std::atoi(argv[3]);
rotation = atoi(argv[3]);
if (rotation != 0 && rotation != 90 && rotation != 180 && rotation != 270) {
HFLogPrint(HF_LOG_ERROR, "Invalid rotation value. Allowed values are 0, 90, 180, 270.");
return 1;
}
}
HFRotation rotation_enum;
// Set rotation based on input parameter
/* Set rotation based on input parameter */
switch (rotation) {
case 90:
rotation_enum = HF_CAMERA_ROTATION_90;
@@ -46,28 +69,37 @@ int main(int argc, char* argv[]) {
HFLogPrint(HF_LOG_INFO, "Source file Path: %s", sourcePath);
HFLogPrint(HF_LOG_INFO, "Rotation: %d", rotation);
HFSetLogLevel(HF_LOG_INFO);
HFSetLogLevel(HF_LOG_DEBUG);
HResult ret;
// The resource file must be loaded before it can be used
/* The resource file must be loaded before it can be used */
ret = HFLaunchInspireFace(packPath);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Load Resource error: %d", ret);
return ret;
}
// Enable the functions in the pipeline: mask detection, live detection, and face quality
// detection
HOption option = HF_ENABLE_QUALITY | HF_ENABLE_MASK_DETECT | HF_ENABLE_LIVENESS | HF_ENABLE_DETECT_MODE_LANDMARK;
// Non-video or frame sequence mode uses IMAGE-MODE, which is always face detection without
// tracking
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
// Maximum number of faces detected
HInt32 maxDetectNum = 20;
// Face detection image input level
HInt32 detectPixelLevel = 160;
// Handle of the current face SDK algorithm context
HFSession session = {0};
ret = HFQuerySupportedPixelLevelsForFaceDetection(&pixelLevels);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "HFQuerySupportedPixelLevelsForFaceDetection error: %d", ret);
return ret;
}
HFLogPrint(HF_LOG_INFO, "Supported pixel levels for face detection: %d", pixelLevels.size);
for (int i = 0; i < pixelLevels.size; i++) {
HFLogPrint(HF_LOG_INFO, "Supported pixel level %d: %d", i + 1, pixelLevels.pixel_level[i]);
}
/* Enable the functions in the pipeline: mask detection, live detection, and face quality
* detection */
option = HF_ENABLE_QUALITY | HF_ENABLE_MASK_DETECT | HF_ENABLE_LIVENESS;
/* Non-video or frame sequence mode uses IMAGE-MODE, which is always face detection without
* tracking */
detMode = HF_DETECT_MODE_LIGHT_TRACK;
/* Maximum number of faces detected */
maxDetectNum = 20;
/* Face detection image input level */
detectPixelLevel = 160;
/* Handle of the current face SDK algorithm context */
session = NULL;
ret = HFCreateInspireFaceSessionOptional(option, detMode, maxDetectNum, detectPixelLevel, -1, &session);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create FaceContext error: %d", ret);
@@ -77,123 +109,128 @@ int main(int argc, char* argv[]) {
HFSessionSetTrackPreviewSize(session, detectPixelLevel);
HFSessionSetFilterMinimumFacePixelSize(session, 4);
// Load a image
HFImageBitmap image;
/* Load a image */
ret = HFCreateImageBitmapFromFilePath(sourcePath, 3, &image);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "The source entered is not a picture or read error.");
return ret;
}
// Prepare an image parameter structure for configuration
HFImageStream imageHandle = {0};
/* Prepare an image parameter structure for configuration */
ret = HFCreateImageStreamFromImageBitmap(image, rotation_enum, &imageHandle);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Create ImageStream error: %d", ret);
return ret;
}
// Execute HF_FaceContextRunFaceTrack captures face information in an image
HFMultipleFaceData multipleFaceData = {0};
/* Execute HF_FaceContextRunFaceTrack captures face information in an image */
ret = HFExecuteFaceTrack(session, imageHandle, &multipleFaceData);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Execute HFExecuteFaceTrack error: %d", ret);
return ret;
}
// Print the number of faces detected
auto faceNum = multipleFaceData.detectedNum;
/* Print the number of faces detected */
faceNum = multipleFaceData.detectedNum;
HFLogPrint(HF_LOG_INFO, "Num of face: %d", faceNum);
// Copy a new image to draw
HFImageBitmap drawImage = {0};
/* Copy a new image to draw */
ret = HFImageBitmapCopy(image, &drawImage);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Copy ImageBitmap error: %d", ret);
return ret;
}
HFImageBitmapData data;
ret = HFImageBitmapGetData(drawImage, &data);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Get ImageBitmap data error: %d", ret);
return ret;
}
for (int index = 0; index < faceNum; ++index) {
for (index = 0; index < faceNum; ++index) {
HInt32 numOfLmk;
HPoint2f* denseLandmarkPoints;
HPoint2f fiveKeyPoints[5];
float area;
size_t i;
HFLogPrint(HF_LOG_INFO, "========================================");
HFLogPrint(HF_LOG_INFO, "Token size: %d", multipleFaceData.tokens[index].size);
HFLogPrint(HF_LOG_INFO, "Process face index: %d", index);
HFLogPrint(HF_LOG_INFO, "DetConfidence: %f", multipleFaceData.detConfidence[index]);
HFImageBitmapDrawRect(drawImage, multipleFaceData.rects[index], {0, 100, 255}, 4);
HFImageBitmapDrawRect(drawImage, multipleFaceData.rects[index], (HColor){0, 100, 255}, 4);
// Print FaceID, In IMAGE-MODE it is changing, in VIDEO-MODE it is fixed, but it may be lost
HFLogPrint(HF_LOG_INFO, "FaceID: %d", multipleFaceData.trackIds[index]);
// Print Head euler angle, It can often be used to judge the quality of a face by the Angle
// of the head
HFLogPrint(HF_LOG_INFO, "Roll: %f, Yaw: %f, Pitch: %f", multipleFaceData.angles.roll[index], multipleFaceData.angles.yaw[index],
multipleFaceData.angles.pitch[index]);
HInt32 numOfLmk;
/* Get the number of dense landmark points */
HFGetNumOfFaceDenseLandmark(&numOfLmk);
HPoint2f denseLandmarkPoints[numOfLmk];
denseLandmarkPoints = (HPoint2f*)malloc(sizeof(HPoint2f) * numOfLmk);
if (denseLandmarkPoints == NULL) {
HFLogPrint(HF_LOG_ERROR, "Memory allocation failed!");
return -1;
}
ret = HFGetFaceDenseLandmarkFromFaceToken(multipleFaceData.tokens[index], denseLandmarkPoints, numOfLmk);
if (ret != HSUCCEED) {
free(denseLandmarkPoints);
HFLogPrint(HF_LOG_ERROR, "HFGetFaceDenseLandmarkFromFaceToken error!!");
return -1;
}
for (size_t i = 0; i < numOfLmk; i++) {
HFImageBitmapDrawCircleF(drawImage, {denseLandmarkPoints[i].x, denseLandmarkPoints[i].y}, 0, {100, 100, 0}, 2);
/* Draw dense landmark points */
for (i = 0; i < numOfLmk; i++) {
HFImageBitmapDrawCircleF(drawImage,
(HPoint2f){denseLandmarkPoints[i].x, denseLandmarkPoints[i].y},
0,
(HColor){100, 100, 0},
2);
}
auto& rt = multipleFaceData.rects[index];
float area = ((float)(rt.height * rt.width)) / (data.width * data.height);
free(denseLandmarkPoints);
HFaceRect rt = multipleFaceData.rects[index];
area = ((float)(rt.height * rt.width)) / (data.width * data.height);
HFLogPrint(HF_LOG_INFO, "area: %f", area);
HPoint2f fiveKeyPoints[5];
ret = HFGetFaceFiveKeyPointsFromFaceToken(multipleFaceData.tokens[index], fiveKeyPoints, 5);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "HFGetFaceFiveKeyPointsFromFaceToken error!!");
return -1;
}
for (size_t i = 0; i < 5; i++) {
HFImageBitmapDrawCircleF(drawImage, {fiveKeyPoints[i].x, fiveKeyPoints[i].y}, 0, {0, 0, 232}, 2);
for (i = 0; i < 5; i++) {
HFImageBitmapDrawCircleF(drawImage, (HPoint2f){fiveKeyPoints[i].x, fiveKeyPoints[i].y}, 0, (HColor){0, 0, 232}, 2);
}
}
HFImageBitmapWriteToFile(drawImage, "draw_detected.jpg");
HFLogPrint(HF_LOG_WARN, "Write to file success: %s", "draw_detected.jpg");
// Run pipeline function
// Select the pipeline function that you want to execute, provided that it is already enabled
// when FaceContext is created!
auto pipelineOption = HF_ENABLE_QUALITY | HF_ENABLE_MASK_DETECT | HF_ENABLE_LIVENESS;
// In this loop, all faces are processed
/* Run pipeline function */
/* Select the pipeline function that you want to execute, provided that it is already enabled
* when FaceContext is created! */
pipelineOption = HF_ENABLE_QUALITY | HF_ENABLE_MASK_DETECT | HF_ENABLE_LIVENESS;
/* In this loop, all faces are processed */
ret = HFMultipleFacePipelineProcessOptional(session, imageHandle, &multipleFaceData, pipelineOption);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Execute Pipeline error: %d", ret);
return ret;
}
// Get mask detection results from the pipeline cache
HFFaceMaskConfidence maskConfidence = {0};
/* Get mask detection results from the pipeline cache */
ret = HFGetFaceMaskConfidence(session, &maskConfidence);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Get mask detect result error: %d", ret);
return -1;
}
// Get face quality results from the pipeline cache
HFFaceQualityConfidence qualityConfidence = {0};
/* Get face quality results from the pipeline cache */
ret = HFGetFaceQualityConfidence(session, &qualityConfidence);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Get face quality result error: %d", ret);
return -1;
}
for (int index = 0; index < faceNum; ++index) {
for (index = 0; index < faceNum; ++index) {
HFLogPrint(HF_LOG_INFO, "========================================");
HFLogPrint(HF_LOG_INFO, "Process face index from pipeline: %d", index);
HFLogPrint(HF_LOG_INFO, "Mask detect result: %f", maskConfidence.confidence[index]);
HFLogPrint(HF_LOG_INFO, "Quality predict result: %f", qualityConfidence.confidence[index]);
// We set the threshold of wearing a mask as 0.85. If it exceeds the threshold, it will be
// judged as wearing a mask. The threshold can be adjusted according to the scene
/* We set the threshold of wearing a mask as 0.85. If it exceeds the threshold, it will be
* judged as wearing a mask. The threshold can be adjusted according to the scene */
if (maskConfidence.confidence[index] > 0.85) {
HFLogPrint(HF_LOG_INFO, "Mask");
} else {
@@ -205,7 +242,7 @@ int main(int argc, char* argv[]) {
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Release image stream error: %d", ret);
}
// The memory must be freed at the end of the program
/* The memory must be freed at the end of the program */
ret = HFReleaseInspireFaceSession(session);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Release session error: %d", ret);
@@ -224,5 +261,8 @@ int main(int argc, char* argv[]) {
return ret;
}
HFLogPrint(HF_LOG_INFO, "");
HFDeBugShowResourceStatistics();
return 0;
}

View File

@@ -1,31 +1,33 @@
/**
/*
* Created by Jingyu Yan
* @date 2024-10-01
*/
#include <iostream>
#include <stdio.h>
#include <stdlib.h>
#include <inspireface.h>
int main(int argc, char* argv[]) {
// Check whether the number of parameters is correct
/* Check whether the number of parameters is correct */
if (argc < 3 || argc > 4) {
HFLogPrint(HF_LOG_ERROR, "Usage: %s <pack_path> <source_path> [rotation]", argv[0]);
return 1;
}
auto packPath = argv[1];
auto sourcePath = argv[2];
const char* packPath = argv[1];
const char* sourcePath = argv[2];
int rotation = 0;
// If rotation is provided, check and set the value
/* If rotation is provided, check and set the value */
if (argc == 4) {
rotation = std::atoi(argv[3]);
rotation = atoi(argv[3]);
if (rotation != 0 && rotation != 90 && rotation != 180 && rotation != 270) {
HFLogPrint(HF_LOG_ERROR, "Invalid rotation value. Allowed values are 0, 90, 180, 270.");
return 1;
}
}
HFRotation rotation_enum;
// Set rotation based on input parameter
/* Set rotation based on input parameter */
switch (rotation) {
case 90:
rotation_enum = HF_CAMERA_ROTATION_90;
@@ -49,24 +51,24 @@ int main(int argc, char* argv[]) {
HFSetLogLevel(HF_LOG_INFO);
HResult ret;
// The resource file must be loaded before it can be used
/* The resource file must be loaded before it can be used */
ret = HFLaunchInspireFace(packPath);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Load Resource error: %d", ret);
return ret;
}
// Enable the functions in the pipeline: mask detection, live detection, and face quality
// detection
HOption option = HF_ENABLE_QUALITY | HF_ENABLE_MASK_DETECT | HF_ENABLE_LIVENESS | HF_ENABLE_DETECT_MODE_LANDMARK;
// Non-video or frame sequence mode uses IMAGE-MODE, which is always face detection without
// tracking
/* Enable the functions in the pipeline: mask detection, live detection, and face quality
* detection */
HOption option = HF_ENABLE_QUALITY | HF_ENABLE_MASK_DETECT | HF_ENABLE_LIVENESS;
/* Non-video or frame sequence mode uses IMAGE-MODE, which is always face detection without
* tracking */
HFDetectMode detMode = HF_DETECT_MODE_ALWAYS_DETECT;
// Maximum number of faces detected
/* Maximum number of faces detected */
HInt32 maxDetectNum = 20;
// Face detection image input level
/* Face detection image input level */
HInt32 detectPixelLevel = 160;
// Handle of the current face SDK algorithm context
/* Handle of the current face SDK algorithm context */
HFSession session = {0};
ret = HFCreateInspireFaceSessionOptional(option, detMode, maxDetectNum, detectPixelLevel, -1, &session);
if (ret != HSUCCEED) {
@@ -77,14 +79,14 @@ int main(int argc, char* argv[]) {
HFSessionSetTrackPreviewSize(session, detectPixelLevel);
HFSessionSetFilterMinimumFacePixelSize(session, 4);
// Load a image
/* Load a image */
HFImageBitmap image;
ret = HFCreateImageBitmapFromFilePath(sourcePath, 3, &image);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "The source entered is not a picture or read error.");
return ret;
}
// Prepare an image parameter structure for configuration
/* Prepare an image parameter structure for configuration */
HFImageStream imageHandle = {0};
ret = HFCreateImageStreamFromImageBitmap(image, rotation_enum, &imageHandle);
if (ret != HSUCCEED) {
@@ -94,12 +96,13 @@ int main(int argc, char* argv[]) {
int loop = 100;
// Enable the cost spend
/* Enable the cost spend */
HFSessionSetEnableTrackCostSpend(session, 1);
// Execute HF_FaceContextRunFaceTrack captures face information in an image
int i;
/* Execute HF_FaceContextRunFaceTrack captures face information in an image */
HFMultipleFaceData multipleFaceData = {0};
for (int i = 0; i < loop; i++) {
for (i = 0; i < loop; i++) {
ret = HFExecuteFaceTrack(session, imageHandle, &multipleFaceData);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Execute HFExecuteFaceTrack error: %d", ret);
@@ -113,7 +116,7 @@ int main(int argc, char* argv[]) {
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Release image stream error: %d", ret);
}
// The memory must be freed at the end of the program
/* The memory must be freed at the end of the program */
ret = HFReleaseInspireFaceSession(session);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Release session error: %d", ret);

View File

@@ -1,8 +1,6 @@
#include <iostream>
#include <inspireface.h>
#include <vector>
static std::vector<float> FT = {
static float FT[] = {
0.0706566, 0.00640248, 0.0418103, -0.00597861, 0.0269879, 0.0187478, 0.0486305, 0.0349162, -0.0080779, -0.0550556, 0.0229963,
-0.00683422, -0.0338589, 0.0533989, -0.0371725, 0.000972469, 0.0612415, 0.0389846, -0.00126743, -0.0128782, 0.0935529, 0.0588179,
0.0164787, -0.00732871, -0.0458209, -0.0100137, -0.0372892, 0.000871123, 0.0245121, -0.0811471, -0.00481095, 0.0266868, 0.0712961,
@@ -73,12 +71,10 @@ int main() {
return ret;
}
// std::vector<float> feature(512, 0.0f);
int64_t result_id = 0;
HFFaceFeature feature = {0};
feature.data = FT.data();
feature.size = FT.size();
feature.data = FT;
feature.size = sizeof(FT) / sizeof(FT[0]);
HFFaceFeatureIdentity identity = {0};
identity.feature = &feature;
ret = HFFeatureHubInsertFeature(identity, &result_id);
@@ -87,10 +83,9 @@ int main() {
return ret;
}
// std::vector<float> query_feature(512, 20.0f);
HFFaceFeature query_feature = {0};
query_feature.data = FT.data();
query_feature.size = FT.size();
query_feature.data = FT;
query_feature.size = sizeof(FT) / sizeof(FT[0]);
HFloat confidence;
HFFaceFeatureIdentity search_result = {0};
ret = HFFeatureHubFaceSearch(query_feature, &confidence, &search_result);

View File

@@ -0,0 +1,52 @@
#include <inspireface.h>
#include <unistd.h>
#include <stdio.h>
int main(int argc, char* argv[]) {
if (argc != 2) {
HFLogPrint(HF_LOG_ERROR, "Usage: %s <pack_path>", argv[0]);
return -1;
}
const char* packPath = argv[1];
HResult ret;
ret = HFLaunchInspireFace(packPath);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Load Resource error: %d", ret);
return ret;
}
const char* DBFilePath = "feature.db";
// remove old db file
if (access(DBFilePath, F_OK) == 0) {
if (remove(DBFilePath) != 0) {
HFLogPrint(HF_LOG_ERROR, "Failed to remove old db file: %s", DBFilePath);
return -1;
}
HFLogPrint(HF_LOG_INFO, "Remove old db file: %s", DBFilePath);
}
HFFeatureHubConfiguration featureHubConfiguration;
featureHubConfiguration.primaryKeyMode = HF_PK_AUTO_INCREMENT;
featureHubConfiguration.enablePersistence = 1;
featureHubConfiguration.persistenceDbPath = DBFilePath;
featureHubConfiguration.searchMode = HF_SEARCH_MODE_EAGER;
featureHubConfiguration.searchThreshold = 0.48f;
ret = HFFeatureHubDataEnable(featureHubConfiguration);
if (ret != HSUCCEED)
{
HFLogPrint(HF_LOG_ERROR, "Enable FeatureHub failed: %d\n", ret);
return ret;
}
if (access(DBFilePath, F_OK) != 0) {
HFLogPrint(HF_LOG_ERROR, "DB file not found: %s", DBFilePath);
return -1;
}
HFLogPrint(HF_LOG_INFO, "DB file found: %s", DBFilePath);
// ....
HFTerminateInspireFace();
return 0;
}

View File

@@ -1,9 +1,8 @@
#include <iostream>
#include <inspireface.h>
int main() {
std::string resourcePath = "test_res/pack/Pikachu";
HResult ret = HFReloadInspireFace(resourcePath.c_str());
const char* resourcePath = "test_res/pack/Pikachu";
HResult ret = HFReloadInspireFace(resourcePath);
if (ret != HSUCCEED) {
HFLogPrint(HF_LOG_ERROR, "Failed to launch InspireFace: %d", ret);
return 1;