mirror of
https://github.com/yakhyo/uniface.git
synced 2026-05-15 04:37:49 +00:00
221 lines
6.6 KiB
Plaintext
221 lines
6.6 KiB
Plaintext
{
|
|
"cells": [
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Face Verification: One-to-One Face Comparison\n",
|
|
"\n",
|
|
"<div style=\"display:flex; flex-wrap:wrap; align-items:center;\">\n",
|
|
" <a style=\"margin-right:10px; margin-bottom:6px;\" href=\"https://pepy.tech/projects/uniface\"><img alt=\"PyPI Downloads\" src=\"https://static.pepy.tech/personalized-badge/uniface?period=total&units=international_system&left_color=grey&right_color=blue&left_text=Downloads\"></a>\n",
|
|
" <a style=\"margin-right:10px; margin-bottom:6px;\" href=\"https://pypi.org/project/uniface/\"><img alt=\"PyPI Version\" src=\"https://img.shields.io/pypi/v/uniface.svg\"></a>\n",
|
|
" <a style=\"margin-right:10px; margin-bottom:6px;\" href=\"https://opensource.org/licenses/MIT\"><img alt=\"License\" src=\"https://img.shields.io/badge/License-MIT-blue.svg\"></a>\n",
|
|
" <a style=\"margin-bottom:6px;\" href=\"https://github.com/yakhyo/uniface\"><img alt=\"GitHub Stars\" src=\"https://img.shields.io/github/stars/yakhyo/uniface.svg?style=social\"></a>\n",
|
|
"</div>\n",
|
|
"\n",
|
|
"**UniFace** is a lightweight, production-ready Python library for face detection, recognition, tracking, landmark analysis, face parsing, gaze estimation, and face attributes.\n",
|
|
"\n",
|
|
"🔗 **GitHub**: [github.com/yakhyo/uniface](https://github.com/yakhyo/uniface) | 📚 **Docs**: [yakhyo.github.io/uniface](https://yakhyo.github.io/uniface)\n",
|
|
"\n",
|
|
"---\n",
|
|
"\n",
|
|
"This notebook demonstrates how to verify if two face images belong to the same person using **UniFace**.\n",
|
|
"\n",
|
|
"## 1. Install UniFace"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"%pip install -q \"uniface[cpu]\"\n",
|
|
"\n",
|
|
"# Clone repo for assets (Colab only)\n",
|
|
"import os\n",
|
|
"if 'COLAB_GPU' in os.environ or 'COLAB_RELEASE_TAG' in os.environ:\n",
|
|
" if not os.path.exists('uniface'):\n",
|
|
" !git clone --depth 1 https://github.com/yakhyo/uniface.git\n",
|
|
" os.chdir('uniface/examples')"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"import cv2\n",
|
|
"import matplotlib.pyplot as plt\n",
|
|
"\n",
|
|
"import uniface\n",
|
|
"from uniface.analyzer import FaceAnalyzer\n",
|
|
"from uniface.detection import RetinaFace\n",
|
|
"from uniface.recognition import ArcFace\n",
|
|
"\n",
|
|
"print(uniface.__version__)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"## 3. Initialize Face Analyzer\n",
|
|
"We need detection and recognition models for face verification.\n",
|
|
"\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"analyzer = FaceAnalyzer(\n",
|
|
" detector=RetinaFace(confidence_threshold=0.5),\n",
|
|
" recognizer=ArcFace()\n",
|
|
")"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"image_path1 = '../assets/test_images/image0.jpg'\n",
|
|
"image_path2 = '../assets/test_images/image1.jpg'\n",
|
|
"\n",
|
|
"image1 = cv2.imread(image_path1)\n",
|
|
"image2 = cv2.imread(image_path2)\n",
|
|
"\n",
|
|
"# Analyze faces\n",
|
|
"faces1 = analyzer.analyze(image1)\n",
|
|
"faces2 = analyzer.analyze(image2)\n",
|
|
"\n",
|
|
"print(f'Detected {len(faces1)} and {len(faces2)} faces')"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"fig, axes = plt.subplots(1, 2, figsize=(10, 5))\n",
|
|
"\n",
|
|
"axes[0].imshow(cv2.cvtColor(image1, cv2.COLOR_BGR2RGB))\n",
|
|
"axes[0].set_title('Image 1')\n",
|
|
"axes[0].axis('off')\n",
|
|
"\n",
|
|
"axes[1].imshow(cv2.cvtColor(image2, cv2.COLOR_BGR2RGB))\n",
|
|
"axes[1].set_title('Image 2')\n",
|
|
"axes[1].axis('off')\n",
|
|
"\n",
|
|
"plt.tight_layout()\n",
|
|
"plt.show()"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"if faces1 and faces2:\n",
|
|
" face1 = faces1[0]\n",
|
|
" face2 = faces2[0]\n",
|
|
"\n",
|
|
" similarity = face1.compute_similarity(face2)\n",
|
|
" print(f'Similarity: {similarity:.4f}')\n",
|
|
"else:\n",
|
|
" print('Error: Could not detect faces')"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"THRESHOLD = 0.6\n",
|
|
"\n",
|
|
"if faces1 and faces2:\n",
|
|
" is_same_person = similarity > THRESHOLD\n",
|
|
"\n",
|
|
" print(f'Similarity: {similarity:.4f}')\n",
|
|
" print(f'Threshold: {THRESHOLD}')\n",
|
|
" print(f'Result: {\"Same person\" if is_same_person else \"Different people\"}')"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"image_pairs = [\n",
|
|
" ('../assets/test_images/image0.jpg', '../assets/test_images/image1.jpg'),\n",
|
|
" ('../assets/test_images/image0.jpg', '../assets/test_images/image2.jpg'),\n",
|
|
" ('../assets/test_images/image1.jpg', '../assets/test_images/image2.jpg'),\n",
|
|
"]\n",
|
|
"\n",
|
|
"print('Comparing multiple pairs:')\n",
|
|
"for img1_path, img2_path in image_pairs:\n",
|
|
" img1 = cv2.imread(img1_path)\n",
|
|
" img2 = cv2.imread(img2_path)\n",
|
|
"\n",
|
|
" faces_a = analyzer.analyze(img1)\n",
|
|
" faces_b = analyzer.analyze(img2)\n",
|
|
"\n",
|
|
" if faces_a and faces_b:\n",
|
|
" sim = faces_a[0].compute_similarity(faces_b[0])\n",
|
|
"\n",
|
|
" img1_name = img1_path.split('/')[-1]\n",
|
|
" img2_name = img2_path.split('/')[-1]\n",
|
|
"\n",
|
|
" print(f'{img1_name} vs {img2_name}: {sim:.4f}')"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"\n",
|
|
"## Notes\n",
|
|
"\n",
|
|
"- Similarity score ranges from -1 to 1 (higher = more similar)\n",
|
|
"- Threshold of 0.6 is commonly used (above = same person, below = different)\n",
|
|
"- Adjust threshold based on your use case (higher = stricter matching)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": []
|
|
}
|
|
],
|
|
"metadata": {
|
|
"kernelspec": {
|
|
"display_name": "base",
|
|
"language": "python",
|
|
"name": "python3"
|
|
},
|
|
"language_info": {
|
|
"codemirror_mode": {
|
|
"name": "ipython",
|
|
"version": 3
|
|
},
|
|
"file_extension": ".py",
|
|
"mimetype": "text/x-python",
|
|
"name": "python",
|
|
"nbconvert_exporter": "python",
|
|
"pygments_lexer": "ipython3",
|
|
"version": "3.13.5"
|
|
}
|
|
},
|
|
"nbformat": 4,
|
|
"nbformat_minor": 4
|
|
}
|