2025-12-07 19:51:08 +09:00
|
|
|
{
|
|
|
|
|
"cells": [
|
|
|
|
|
{
|
|
|
|
|
"cell_type": "markdown",
|
|
|
|
|
"metadata": {},
|
|
|
|
|
"source": [
|
|
|
|
|
"# Face Verification: One-to-One Face Comparison\n",
|
|
|
|
|
"\n",
|
|
|
|
|
"This notebook demonstrates how to verify if two face images belong to the same person using **UniFace**.\n",
|
|
|
|
|
"\n",
|
|
|
|
|
"## 1. Install UniFace"
|
|
|
|
|
]
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
"cell_type": "code",
|
2025-12-10 00:18:11 +09:00
|
|
|
"execution_count": 1,
|
2025-12-07 19:51:08 +09:00
|
|
|
"metadata": {},
|
2025-12-10 00:18:11 +09:00
|
|
|
"outputs": [
|
|
|
|
|
{
|
|
|
|
|
"name": "stdout",
|
|
|
|
|
"output_type": "stream",
|
|
|
|
|
"text": [
|
|
|
|
|
"Note: you may need to restart the kernel to use updated packages.\n"
|
|
|
|
|
]
|
|
|
|
|
}
|
|
|
|
|
],
|
2025-12-07 19:51:08 +09:00
|
|
|
"source": [
|
|
|
|
|
"%pip install -q uniface"
|
|
|
|
|
]
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
"cell_type": "code",
|
2025-12-10 00:18:11 +09:00
|
|
|
"execution_count": 2,
|
2025-12-07 19:51:08 +09:00
|
|
|
"metadata": {},
|
|
|
|
|
"outputs": [
|
|
|
|
|
{
|
|
|
|
|
"name": "stdout",
|
|
|
|
|
"output_type": "stream",
|
|
|
|
|
"text": [
|
2025-12-30 00:20:34 +09:00
|
|
|
"2.0.0\n"
|
2025-12-07 19:51:08 +09:00
|
|
|
]
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
"source": [
|
|
|
|
|
"import cv2\n",
|
|
|
|
|
"import matplotlib.pyplot as plt\n",
|
|
|
|
|
"\n",
|
|
|
|
|
"import uniface\n",
|
|
|
|
|
"from uniface import FaceAnalyzer\n",
|
|
|
|
|
"from uniface.detection import RetinaFace\n",
|
|
|
|
|
"from uniface.recognition import ArcFace\n",
|
|
|
|
|
"\n",
|
|
|
|
|
"print(uniface.__version__)"
|
|
|
|
|
]
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
"cell_type": "markdown",
|
|
|
|
|
"metadata": {},
|
|
|
|
|
"source": [
|
|
|
|
|
"## 3. Initialize Face Analyzer\n",
|
|
|
|
|
"We need detection and recognition models for face verification.\n",
|
|
|
|
|
"\n"
|
|
|
|
|
]
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
"cell_type": "code",
|
2025-12-10 00:18:11 +09:00
|
|
|
"execution_count": 3,
|
2025-12-07 19:51:08 +09:00
|
|
|
"metadata": {},
|
|
|
|
|
"outputs": [
|
|
|
|
|
{
|
|
|
|
|
"name": "stdout",
|
|
|
|
|
"output_type": "stream",
|
|
|
|
|
"text": [
|
|
|
|
|
"✓ Model loaded (CoreML (Apple Silicon))\n",
|
|
|
|
|
"✓ Model loaded (CoreML (Apple Silicon))\n"
|
|
|
|
|
]
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
"source": [
|
|
|
|
|
"analyzer = FaceAnalyzer(\n",
|
2025-12-30 00:20:34 +09:00
|
|
|
" detector=RetinaFace(confidence_threshold=0.5),\n",
|
2025-12-07 19:51:08 +09:00
|
|
|
" recognizer=ArcFace()\n",
|
2025-12-10 00:18:11 +09:00
|
|
|
")"
|
2025-12-07 19:51:08 +09:00
|
|
|
]
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
"cell_type": "code",
|
2025-12-10 00:18:11 +09:00
|
|
|
"execution_count": 4,
|
2025-12-07 19:51:08 +09:00
|
|
|
"metadata": {},
|
|
|
|
|
"outputs": [
|
|
|
|
|
{
|
|
|
|
|
"name": "stdout",
|
|
|
|
|
"output_type": "stream",
|
|
|
|
|
"text": [
|
|
|
|
|
"Detected 1 and 1 faces\n"
|
|
|
|
|
]
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
"source": [
|
|
|
|
|
"image_path1 = '../assets/test_images/image0.jpg'\n",
|
|
|
|
|
"image_path2 = '../assets/test_images/image1.jpg'\n",
|
|
|
|
|
"\n",
|
|
|
|
|
"image1 = cv2.imread(image_path1)\n",
|
|
|
|
|
"image2 = cv2.imread(image_path2)\n",
|
|
|
|
|
"\n",
|
|
|
|
|
"# Analyze faces\n",
|
|
|
|
|
"faces1 = analyzer.analyze(image1)\n",
|
|
|
|
|
"faces2 = analyzer.analyze(image2)\n",
|
|
|
|
|
"\n",
|
2025-12-10 00:18:11 +09:00
|
|
|
"print(f'Detected {len(faces1)} and {len(faces2)} faces')"
|
2025-12-07 19:51:08 +09:00
|
|
|
]
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
"cell_type": "code",
|
2025-12-10 00:18:11 +09:00
|
|
|
"execution_count": 5,
|
2025-12-07 19:51:08 +09:00
|
|
|
"metadata": {},
|
|
|
|
|
"outputs": [
|
|
|
|
|
{
|
|
|
|
|
"data": {
|
|
|
|
|
"image/png": "iVBORw0KGgoAAAANSUhEUgAAA9AAAAH6CAYAAADvBqSRAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQABAABJREFUeJzsvWnMbdua1zXfdjdnn+a21VlV0kWaaBQIJnwQFCoIIX6QCGhCZyDyQUESUAIBAhICCAQShQ9i0EQJBAgFQpCEUGiihCaAkBgCRKIWKlTdpu65Z5+999uZ/xjzv+Zv/d9nzLX2Pnufc27d+eyzzlrvbEbzjGc8/Rjj5O7u7m7aYIMNNthggw022GCDDTbYYIMNVuF0/fYGG2ywwQYbbLDBBhtssMEGG2wg2AzoDTbYYIMNNthggw022GCDDTY4AjYDeoMNNthggw022GCDDTbYYIMNjoDNgN5ggw022GCDDTbYYIMNNthggyNgM6A32GCDDTbYYIMNNthggw022OAI2AzoDTbYYIMNNthggw022GCDDTY4AjYDeoMNNthggw022GCDDTbYYIMNjoDNgN5ggw022GCDDTbYYIMNNthggyNgM6A32GCDDTbYYIMNNthggw022OAI2AzoDTbYYIMNNthggw022GCDDTY4AjYDeoMNPiL8N//NfzOdnJxMf/Nv/s3phzP8oT/0h6Z/59/5d6bv+q7vav39pb/0l37STdpggw022GCDl4JvBpn9f//f//f0W3/rb51+yk/5KdNnPvOZ6fOf//z003/6T5/+0l/6S5900zbY4IcFbAb0BhtscBT8rt/1u6a//Jf/8vQTfsJPmM7Pzz/p5mywwQYbbLDBBgX8mT/zZ5rM/tE/+kdPv/23//bpN/2m3zS9//770/d8z/dMf+SP/JFPunkbbPAND5sWvMEGGxwF/9P/9D/tos9Pnjz5pJuzwQYbbLDBBhsU8K//6//69H/9X/9XizwbfuWv/JXTv/wv/8vTb/7Nv3n6Zb/sl32i7dtgg2902CLQG2zwBkDpzTIyJcB+7s/9ue33d3zHd0z/5X/5X7b7f+/v/b3p3/g3/o3prbfemr77u797+qN/9I/uvf/lL395+rW/9tdO/+K/+C+2d995553pZ//snz39b//b/3avrv/z//w/p3/r3/q3Wllf/OIXp1/za37N9Bf/4l9shu5f+St/Ze/Zv/bX/tr0b/6b/+b07rvvTo8fP55+2k/7adP/8r/8L0f1Se1UmRtssMEGG2zwwwl+uMlsZYrReBY8ePBg+jk/5+dM3//939+i0RtssMGrw2ZAb7DBG4Kbm5smQL/zO79z+t2/+3dP//w//89P/+F/+B+29VcSiD/5J//klmL19ttvT7/4F//i6R//43+8e/f/+D/+j+l7v/d7myD/fb/v902/7tf9uibAJTz/n//n/9k998EHHzShrnVNv+pX/arpN/7G3zj9r//r/zr9p//pf3qvPUq//tf+tX9t+trXvjb9lt/yW6bf8Tt+x/TVr361vf/X//pf/9jwssEGG2ywwQafNvhmkNn/3//3/zVDXJ8NNtjgI8DdBhts8JHgj/yRP3KnqfQ3/sbf2F37Jb/kl7Rrv+N3/I7dta985St3jx49ujs5Obn7Y3/sj+2u//2///fbs7/lt/yW3bVnz57d3dzc7NXzj//xP7578ODB3W/7bb9td+33/t7f29793u/93t21Dz/88O7H/tgf265/3/d9X7t2e3t792N+zI+5+1k/62e134anT5/e/Ygf8SPuvud7vuel+vzWW2+1Pm6wwQYbbLDBNxJ8M8pswT/8h//w7uHDh3e/6Bf9opd+d4MNNtiHLQK9wQZvEH75L//lu9/vvffe9C/8C/9CS9v6+T//5++u65ruyYPNVKvT09OdV/xLX/pSSwvTs3/rb/2t3XP/4//4P7Y0M6WDGR4+fDj9il/xK/ba8Xf+zt+Z/uE//IfTv/fv/XutrB/8wR9sH3nDf8bP+BnT//w//8/T7e3tG8PDBhtssMEGG3za4YerzH769Gk7RePRo0fT7/ydv/MVMLPBBhsQtk3ENtjgDYGE4he+8IW9a1rH9M/9c//cvbXEuv6Vr3xl97cE4x/4A39g+oN/8A+2NDEJZMPnPve5vbVUP+pH/ah75WnnTYIEseCX/JJfMmzvD/3QD7XjLjbYYIMNNtjgmw1+uMpsteUX/sJfOP3v//v/Pv2Fv/AXpm//9m8/+M4GG2ywDpsBvcEGbwjOzs5e6vrdnTK4Omitk46d+Pf//X9/+s/+s/9s+uxnP9u82//xf/wfv1Kk2O/85//5f9524axg21l7gw022GCDb1b44SqzFd3+c3/uz03//X//37f10xtssMFHh82A3mCDTyH8yT/5J9sxFP/1f/1f713XBiLcWVO7gcqrLEFOj/Y/+kf/aO89ebwF2hn0Z/7Mn/nG27/BBhtssMEG3yzwaZXZ2sxM5z7//t//+6d/99/9d1+5nA022GAftjXQG2zwKQR5vOndFvyJP/Enpn/yT/7J3rWf9bN+Vrv2Z//sn91de/bs2fRf/Vf/1d5zP+kn/aQmkH/P7/k909e//vV79f3AD/zAa+/DBhtssMEGG3wzwKdRZit6rfd/w2/4DdOv/tW/+hV6tcEGG4xgi0BvsMGnEHQUxm/7bb9t+mW/7JdNP/Wn/tR2HIbSr37kj/yRe8/9B//BfzD9F//Ff9E8yxKQ3/Zt39ae01ougT3cSiX7w3/4D7cjOnQ+pMrVRiYS5N/3fd/XvNz/w//wP6y2Sfd9puXV1dX0d//u351++2//7e1vbYjyL/1L/9IbwsYGG2ywwQYbfHrh0yaz//Sf/tPTf/Kf/CfTj/kxP2b6cT/ux03/3X/33+3d/57v+Z7pW77lW94ILjbY4JsBNgN6gw0+hSCPsXbb/KN/9I9Of/yP//HpJ/7Enzj9+T//56df/+t//b01UDor8j/6j/6jtoGJ/tb5lBLgP+/n/bydUBb89J/+06e/+lf/alufJQEur/a3fuu3Tv/qv/qvNqF+CP7Un/pT03/73/63u7//9t/+2+0j0CYrmwG9wQYbbLDBNyN82mS2nd3ajOwX/aJfdO++jPDNgN5gg1eHE51l9RHe32CDDT6FoPVOv+bX/Jrp+7//+5vXeoMNNthggw02+HTCJrM32OAbCzYDeoMNvsHhww8/bGc7cj3Vv/Kv/Cvt6Ip/8A/+wSfatg022GCDDTbYYIFNZm+wwTc+bCncG2zwDQ7/9r/9b0/f9V3f1Y660LmQWuv09//+32/rqjbYYIMNNthgg08PbDJ7gw2+8WEzoDfY4BsctKunNhuR8JUH+8f/+B8//bE/9semX/ALfsEn3bQNNthggw022ACwyewNNvjGhy2Fe4MNNthggw022GCDDTbYYIMNjoDtHOgNNthggw022GCDDTbYYIMNNjgCNgN6gw022GCDDTbYYIMNNthggw2OgM2A3mCDDTbYYIMNNthggw022GCD17mJ2OfefVJe1xLq29vb9vvk5GT3aX/LPL+9nfSnrulZfZ+dnbWPfp+fn0+np6ftW9v6v/XWW+0ged3X1v766D1df+edd6YHDx5MV1dX04sXL9r7b7/9djsM/nOf++z05K3H08OLi+nm5np6//3321EBz58/b791AL3b6fpct8pReU+fPt3VeX193e7rHbVFz+kZXXdfzk5P273Hjx9Pjx49nj589nz66le/2tr39jvvTt/yrd/a3vn+7/8n05e+8tVWrnEmMB5UturRR+8af9pcQh9d072Li4v2Lpet+1nCDv/zt97lcxwz1e1n9Ts/bK/ecf2+V9GDx9l18n23Jdt4d6f29HJ9388L567TY6brws00aSyWPrLPfpbtcplVu/3tNlQ49rscA5fd21PD5eXlEF9r4PaQZk0z+ui3rmleqA59zs50/bTRi+aSrhlnuuYy9I7u61pr49n59PDBg3ZPdKuPaE8fjwFpUnNLH1/X876m+7om3Pkd9vV2uptOprPp+u52V34+M+ItpA+9p49AfVRf3Sf1U3/rWdWh+S0w/1D
|
|
|
|
|
"text/plain": [
|
|
|
|
|
"<Figure size 1000x500 with 2 Axes>"
|
|
|
|
|
]
|
|
|
|
|
},
|
|
|
|
|
"metadata": {},
|
|
|
|
|
"output_type": "display_data"
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
"source": [
|
|
|
|
|
"fig, axes = plt.subplots(1, 2, figsize=(10, 5))\n",
|
|
|
|
|
"\n",
|
|
|
|
|
"axes[0].imshow(cv2.cvtColor(image1, cv2.COLOR_BGR2RGB))\n",
|
|
|
|
|
"axes[0].set_title('Image 1')\n",
|
|
|
|
|
"axes[0].axis('off')\n",
|
|
|
|
|
"\n",
|
|
|
|
|
"axes[1].imshow(cv2.cvtColor(image2, cv2.COLOR_BGR2RGB))\n",
|
|
|
|
|
"axes[1].set_title('Image 2')\n",
|
|
|
|
|
"axes[1].axis('off')\n",
|
|
|
|
|
"\n",
|
|
|
|
|
"plt.tight_layout()\n",
|
2025-12-10 00:18:11 +09:00
|
|
|
"plt.show()"
|
2025-12-07 19:51:08 +09:00
|
|
|
]
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
"cell_type": "code",
|
2025-12-10 00:18:11 +09:00
|
|
|
"execution_count": 6,
|
2025-12-07 19:51:08 +09:00
|
|
|
"metadata": {},
|
|
|
|
|
"outputs": [
|
|
|
|
|
{
|
|
|
|
|
"name": "stdout",
|
|
|
|
|
"output_type": "stream",
|
|
|
|
|
"text": [
|
2025-12-10 00:18:11 +09:00
|
|
|
"Similarity: 0.1135\n"
|
2025-12-07 19:51:08 +09:00
|
|
|
]
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
"source": [
|
|
|
|
|
"if faces1 and faces2:\n",
|
|
|
|
|
" face1 = faces1[0]\n",
|
|
|
|
|
" face2 = faces2[0]\n",
|
|
|
|
|
"\n",
|
|
|
|
|
" similarity = face1.compute_similarity(face2)\n",
|
|
|
|
|
" print(f'Similarity: {similarity:.4f}')\n",
|
|
|
|
|
"else:\n",
|
2025-12-10 00:18:11 +09:00
|
|
|
" print('Error: Could not detect faces')"
|
2025-12-07 19:51:08 +09:00
|
|
|
]
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
"cell_type": "code",
|
2025-12-10 00:18:11 +09:00
|
|
|
"execution_count": 7,
|
2025-12-07 19:51:08 +09:00
|
|
|
"metadata": {},
|
|
|
|
|
"outputs": [
|
|
|
|
|
{
|
|
|
|
|
"name": "stdout",
|
|
|
|
|
"output_type": "stream",
|
|
|
|
|
"text": [
|
2025-12-10 00:18:11 +09:00
|
|
|
"Similarity: 0.1135\n",
|
2025-12-07 19:51:08 +09:00
|
|
|
"Threshold: 0.6\n",
|
|
|
|
|
"Result: Different people\n"
|
|
|
|
|
]
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
"source": [
|
|
|
|
|
"THRESHOLD = 0.6\n",
|
|
|
|
|
"\n",
|
|
|
|
|
"if faces1 and faces2:\n",
|
|
|
|
|
" is_same_person = similarity > THRESHOLD\n",
|
|
|
|
|
"\n",
|
|
|
|
|
" print(f'Similarity: {similarity:.4f}')\n",
|
|
|
|
|
" print(f'Threshold: {THRESHOLD}')\n",
|
2025-12-10 00:18:11 +09:00
|
|
|
" print(f'Result: {\"Same person\" if is_same_person else \"Different people\"}')"
|
2025-12-07 19:51:08 +09:00
|
|
|
]
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
"cell_type": "code",
|
2025-12-10 00:18:11 +09:00
|
|
|
"execution_count": 8,
|
2025-12-07 19:51:08 +09:00
|
|
|
"metadata": {},
|
|
|
|
|
"outputs": [
|
|
|
|
|
{
|
|
|
|
|
"name": "stdout",
|
|
|
|
|
"output_type": "stream",
|
|
|
|
|
"text": [
|
|
|
|
|
"Comparing multiple pairs:\n",
|
2025-12-10 00:18:11 +09:00
|
|
|
"image0.jpg vs image1.jpg: 0.1135\n",
|
|
|
|
|
"image0.jpg vs image2.jpg: 0.0833\n",
|
|
|
|
|
"image1.jpg vs image2.jpg: -0.0082\n"
|
2025-12-07 19:51:08 +09:00
|
|
|
]
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
"source": [
|
|
|
|
|
"image_pairs = [\n",
|
|
|
|
|
" ('../assets/test_images/image0.jpg', '../assets/test_images/image1.jpg'),\n",
|
|
|
|
|
" ('../assets/test_images/image0.jpg', '../assets/test_images/image2.jpg'),\n",
|
|
|
|
|
" ('../assets/test_images/image1.jpg', '../assets/test_images/image2.jpg'),\n",
|
|
|
|
|
"]\n",
|
|
|
|
|
"\n",
|
|
|
|
|
"print('Comparing multiple pairs:')\n",
|
|
|
|
|
"for img1_path, img2_path in image_pairs:\n",
|
|
|
|
|
" img1 = cv2.imread(img1_path)\n",
|
|
|
|
|
" img2 = cv2.imread(img2_path)\n",
|
|
|
|
|
"\n",
|
|
|
|
|
" faces_a = analyzer.analyze(img1)\n",
|
|
|
|
|
" faces_b = analyzer.analyze(img2)\n",
|
|
|
|
|
"\n",
|
|
|
|
|
" if faces_a and faces_b:\n",
|
|
|
|
|
" sim = faces_a[0].compute_similarity(faces_b[0])\n",
|
|
|
|
|
"\n",
|
|
|
|
|
" img1_name = img1_path.split('/')[-1]\n",
|
|
|
|
|
" img2_name = img2_path.split('/')[-1]\n",
|
|
|
|
|
"\n",
|
2025-12-10 00:18:11 +09:00
|
|
|
" print(f'{img1_name} vs {img2_name}: {sim:.4f}')"
|
2025-12-07 19:51:08 +09:00
|
|
|
]
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
"cell_type": "markdown",
|
|
|
|
|
"metadata": {},
|
|
|
|
|
"source": [
|
|
|
|
|
"\n",
|
|
|
|
|
"## Notes\n",
|
|
|
|
|
"\n",
|
|
|
|
|
"- Similarity score ranges from -1 to 1 (higher = more similar)\n",
|
|
|
|
|
"- Threshold of 0.6 is commonly used (above = same person, below = different)\n",
|
|
|
|
|
"- Adjust threshold based on your use case (higher = stricter matching)"
|
|
|
|
|
]
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
"cell_type": "markdown",
|
|
|
|
|
"metadata": {},
|
|
|
|
|
"source": []
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
"metadata": {
|
|
|
|
|
"kernelspec": {
|
|
|
|
|
"display_name": "base",
|
|
|
|
|
"language": "python",
|
|
|
|
|
"name": "python3"
|
|
|
|
|
},
|
|
|
|
|
"language_info": {
|
|
|
|
|
"codemirror_mode": {
|
|
|
|
|
"name": "ipython",
|
|
|
|
|
"version": 3
|
|
|
|
|
},
|
|
|
|
|
"file_extension": ".py",
|
|
|
|
|
"mimetype": "text/x-python",
|
|
|
|
|
"name": "python",
|
|
|
|
|
"nbconvert_exporter": "python",
|
|
|
|
|
"pygments_lexer": "ipython3",
|
|
|
|
|
"version": "3.13.5"
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
"nbformat": 4,
|
|
|
|
|
"nbformat_minor": 4
|
|
|
|
|
}
|