Files
uniface/examples/face_analyzer.ipynb

328 lines
609 KiB
Plaintext
Raw Normal View History

{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Face Analysis with UniFace\n",
"\n",
"This notebook demonstrates comprehensive face analysis using the **FaceAnalyzer** class.\n",
"\n",
"## 1. Install UniFace"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Note: you may need to restart the kernel to use updated packages.\n"
]
}
],
"source": [
"%pip install -q uniface"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 2. Import Libraries"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1.6.0\n"
]
}
],
"source": [
"import cv2\n",
"import matplotlib.pyplot as plt\n",
"\n",
"import uniface\n",
"from uniface import FaceAnalyzer\n",
"from uniface.detection import RetinaFace\n",
"from uniface.recognition import ArcFace\n",
"from uniface.attribute import AgeGender\n",
"from uniface.visualization import draw_detections\n",
"\n",
"print(uniface.__version__)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 3. Initialize FaceAnalyzer\n",
"\n",
"The `FaceAnalyzer` combines detection, recognition, and attribute prediction in one class."
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"✓ Model loaded (CoreML (Apple Silicon))\n",
"✓ Model loaded (CoreML (Apple Silicon))\n",
"✓ Model loaded (CoreML (Apple Silicon))\n"
]
}
],
"source": [
"analyzer = FaceAnalyzer(\n",
" detector=RetinaFace(conf_thresh=0.5),\n",
" recognizer=ArcFace(),\n",
" age_gender=AgeGender()\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 4. Analyze Faces in Images"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"image0.jpg: Detected 1 face(s)\n",
" Face 1: Female, 28y\n",
"\n",
"image1.jpg: Detected 1 face(s)\n",
" Face 1: Female, 29y\n",
"\n",
"image2.jpg: Detected 1 face(s)\n",
" Face 1: Female, 28y\n"
]
}
],
"source": [
"image_paths = [\n",
" '../assets/test_images/image0.jpg',\n",
" '../assets/test_images/image1.jpg',\n",
" '../assets/test_images/image2.jpg',\n",
"]\n",
"\n",
"results = []\n",
"\n",
"for image_path in image_paths:\n",
" # Load image\n",
" image = cv2.imread(image_path)\n",
" if image is None:\n",
" print(f'Error: Could not read {image_path}')\n",
" continue\n",
"\n",
" # Analyze faces - returns list of Face objects\n",
" faces = analyzer.analyze(image)\n",
" print(f'\\n{image_path.split(\"/\")[-1]}: Detected {len(faces)} face(s)')\n",
"\n",
" # Print face attributes\n",
" for i, face in enumerate(faces, 1):\n",
" print(f' Face {i}: {face.sex}, {face.age}y')\n",
"\n",
" # Prepare visualization (without text overlay)\n",
" vis_image = image.copy()\n",
" bboxes = [f.bbox for f in faces]\n",
" scores = [f.confidence for f in faces]\n",
" landmarks = [f.landmarks for f in faces]\n",
" draw_detections(image=vis_image, bboxes=bboxes, scores=scores, landmarks=landmarks, vis_threshold=0.5, fancy_bbox=True)\n",
"\n",
" results.append((image_path, cv2.cvtColor(vis_image, cv2.COLOR_BGR2RGB), faces))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 5. Visualize Results\n",
"\n",
"Display images with face information shown below each image."
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABdEAAALCCAYAAAAxsJqxAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQABAABJREFUeJzs/dmvbdt13ouNVe+zT0WRh5RYiaRYiKJqxZYd2ZYM29cIFCAPebgveblIYOAi/0T+icBPQXCTpyBAAuc+RHAuDDux5VKSbYkqSIoUK7EuTr33XtUMvj7Gb8xvfqv1Mefae+1zDsnZ95lnrjmKXrbeiq+33vrBarVaDfu0T/u0T/u0T/u0T/u0T/u0T/u0T/u0T/u0T/u0T/u0T/t0Ix3evLRP+7RP+7RP+7RP+7RP+7RP+7RP+7RP+7RP+7RP+7RP+7RPSnsQfZ/2aZ/2aZ/2aZ/2aZ/2aZ/2aZ/2aZ/2aZ/2aZ/2aZ/2qZP2IPo+7dM+7dM+7dM+7dM+7dM+7dM+7dM+7dM+7dM+7dM+7VMn7UH0fdqnfdqnfdqnfdqnfdqnfdqnfdqnfdqnfdqnfdqnfdqnTtqD6Pu0T/u0T/u0T/u0T/u0T/u0T/u0T/u0T/u0T/u0T/u0T520B9H3aZ/2aZ/2aZ/2aZ/2aZ/2aZ/2aZ/2aZ/2aZ/2aZ/2aZ86aQ+i79M+7dM+7dM+7dM+7dM+7dM+7dM+7dM+7dM+7dM+7dM+ddIeRN+nfdqnfdqnfdqnfdqnfdqnfdqnfdqnfdqnfdqnfdqnfeqk42HH9J4Xn+veW61W8/fBwYFdWw1Hh4fDeGnVrvHM0dHR/H18fNz+1uf09HQ4Oztr3/ro+cvLy5annj05OWnfzzzzTPu+vr5u9+/duze8733vG1566d3DvePj4ezstN179OhR+37w4MHw8ssvD2+++WZ7XtdUnspWOcr38PBwOD8/b88+fPhweOONN9pv6qjn9I7yvLi4aM+3ems14vCw3X/22WeHg8Pj4bXXXmtlqRueefbZ4ad+6qeGV199dfjOd78/vPraay0PylfyPlHdrq6u2jdJv1Vvlau/1Uf0J/1PUl0y0b8+Bkoqg/L0nurA8/z2sWJseU8f/eYZL5t7ytvrQD2UGFv6cu7T1g9Xw2o19gH1pXxv9/p5+k3tO5z7L9te1YVP1XfeZ9TD66O/aaPPBR+7HCMS/XvblHNJdCQa1jdtgL5EK5of+ohGj48P23X9vn//fvvWsxqL9773vW1uQcvKU7Qq2j1/+KitulG2+lf0qHuaM9CLvrmnj/JVHtAwH+hHc0wfPevjo7+Oj0+Hi+tN2mdcoa8cU5UPTepvvav89a7ares+5+k/vaMy4A36W3xA7+tZ+pGxJi/6Ar7geZCURzVXG90Om7TFnCEPzyffpa28R58w9tyHttWGs9PT4Zl7ZxtjpfLUxueee679Fq/Uh7bovv4+OjndoL/x78be5zoNjSM2rgi1arTWD3Xo2dvk7em1P+dlxSMeN/XKfJw8Mq+bPILOq9/T93e+/+pj1+MP/uAPSr6U9anq7r/hzUrQWSbn355/0qjP9SzH53GP7+e7vbnlvNB5seaq8yLPh3viafwtPoAu4vLZeRnf8Cn4nb7RQ/hd9Uu2QfnoPSXmciWznA+6XKae2X7qrvxz7HL86C9vL2NX6R3wU3gr+lo1nuh9Xl8fC6en1EOq+en1cX3CZbr3l9cXXVNywnmO9wn19bpUeosSfe/v0SeeXMdTGuXzKMv1N31YlZN9mjTB2LkehrwVbVO27om+kV30ATJTeqz+dtrTc/pb8kL1lB4hfUF1RzfXt36jY0j26Bnd09/6fvHFF9t9H8+kOZLroD6WPEc7mYcpU7yfXP9wPTFT9i3z18ulvi4r0ac1drQpdQTmNkl9wnyBZqgDusYLL7zQ+sztEXQLfx46y3Y67fDR2Cg/5p9o4Xvf+14bc+pLm/Rbto2+v/nNbzYbSTSgcZ3tH6sT8+dd73rX8NM//dOtrJdeeqm103mUnkc/0Tf8znVKbCXGlfteLmNRzYOeXGDs4P3wTMZN/UHbndacv2s+ITPUb+icSv/0n/7T4XHTz/zMz9y4tqsMv2t95knTLjrZkq676zubGMTNOpCSJ2x7p1f3J+1baBye5zazl7FNH0p7t5LnSs4rc15o/mmeidb1cQxA80xJ80Hvk3fyafGnD33oQ+39r371q+3aRz7ykeH5558fPvGJTwzvfve7m0762c9+dtaffuM3fmP4J//knwzvec97hg984AOzfab0yiuvtHklvqF3dU98RPnLRlEbsN/AbP76r/96+MEPfjB87WtfG77+9a83WSTe6bafMBnxjb/xN/5G400//OEPh9dff72VpY/4muxhXf/P//k/z/xa7f3+97/fntH8VD7f+c535jK//OUvz+Onsn7u536ulUPf//mf//nwV3/1V609aqvy+vznP9/eUf0kD3/zN3+ztRXbWvVS2372Z392+MxnPtOe0X1dU93Uh7/0S7/U+k9tUVv/5E/+ZPizP/uzeZyx45Wf+kV5qq66xjPgBOoffcMblb+eUz//7u/+bhtPlfuXf/mXLV/liY0omkA2pY6X9jp94jTYw1mWdOWUaU6ztAf9hG90EOQLZYlO1Y9K6KO0g/nhepo+6kvRgK5J1qGnKF9wFNGRZCe4i/oKetU9vaN+VRkaH9E9WOjP//zPD7/+678+fO5znxv+2T/7Zy1vzTHlr3eVB21QXaSziebVFtqlcdJc0FiJTvUbuc0c1nuaU85zVHf9jQ1DSux2NenPzouo013y/Z49ve3aLuXlve9+97vDnYHoS6lSQpuScjA1pil44zUUKwjXPyjgGlgRmq6huOg9EZSIAiALJUUMQ0Qvhvf8C88O90+kiA2NWGAAEIkThRt5lZECgeibOnh9vb1M3Pbs9Tj5GlOcJi0C6NoUQASQg2Ep7FLIV0a0pyTYntD1+2mceGJCALZlyvxT+Kdy35sU1Mlp6fp6DbD7c24EJS2N+ehzE0AhIZy9n92gzPpnO/J3DwzaBcRbeuY2IKADKPrbgQbGBYFwdXVwwzAar49zij4FNEZBOW2C5167hyGIkBB9J4BDuSgHXMdQx+hknBwQbvUe1gBApfQ5QJyCmXtpJFNetQDBuwBm3pcsUOi3A0MYSwAP8JmNdmyhFR+rVISX6GCJxlyQ3bhnoL4vAvAt5ZG/pTjRH/DDlfVla9tYmU5JfO8+F5wX5Lzbp9unXZWMik4TRKpoLt/rjVfO0yflj3m/JwsrkLF6z+VLVeelv3t1XZqfd8X7HzdtG4NeP2zLk+/Uj3rKb45J0lBFTxWdZXtuKz97banqUJW/jc9X9L+kq+V73r7es9XvNEj9m3wrozUN2nynV16VlngC37vyhCrPXYC0XXW7XVJP9676r0dbvTr73wkOLM2h3jP5vuvF1bjnO+j+aR9VIF3FH/13T5fbpX+X0m35ZW/+5r0l/t2rbyUb9umdmX6Uxmeb7lTN5V14tfNet93yvZ49kXzO7T7ygVfgmITtB1irT+bnvxNzyUVSypUdpvz5gK/oef3GSdNxHOqLXQqGwTvucMF1X3BmsVHP8r7jRe6sgY2NPc7f2IDYotmPSuAcjAX5uTMYNqm3nbzoH2xvPUeZPlZetuseONfpPYGtOGFs0zd69Najx6TJvH6blHLHF0DTSY2/vc+heRbFue7OEPS529Akxsnz7c0x6uuyNume8ar6wbEDd8j1vmbRQDTDb9rlddqFb/Tk509auhMQvTJ62udwGK4vr4fhYAQ1lRI0d09QQHKtZLLSCFDOSpJWeTSZtdqicnVdK28f/OAHJw+Ng+GZk7Ph6mpk0jA2ysabgVWqGRgypgHh4Z3EymwSsTNKX5W5vFwz43vTahCEejFdVxl6h8m5DbzIvk2B5kpvTwhmftxP8NLfSaV3m2FZ1bUn3BmHytAZ+1fjsFmWM8Be20f+tgmUen8yfhXDq5Tf/NvzqoyPbcpO1c+Pc8+VF5i1M0FfqHHQ9ORk7YEMHSMUNF9YaYXZM1cOrL3KT6uq+mYBxgFl/sa
"text/plain": [
"<Figure size 1500x800 with 6 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"fig, axes = plt.subplots(2, len(results), figsize=(15, 8),\n",
" gridspec_kw={'height_ratios': [4, 1]})\n",
"\n",
"for idx, (path, vis_image, faces) in enumerate(results):\n",
" # Display image\n",
" axes[0, idx].imshow(vis_image)\n",
" axes[0, idx].axis('off')\n",
"\n",
" # Display face information below image\n",
" axes[1, idx].axis('off')\n",
" info_text = f'{len(faces)} face(s)\\n'\n",
" for i, face in enumerate(faces, 1):\n",
" info_text += f'Face {i}: {face.sex}, {face.age}y\\n'\n",
"\n",
" axes[1, idx].text(0.5, 0.5, info_text,\n",
" ha='center', va='center',\n",
" fontsize=10, family='monospace')\n",
"\n",
"plt.tight_layout()\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 6. Access Face Attributes\n",
"\n",
"Each `Face` object contains detection, recognition, and attribute data."
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Face Attributes:\n",
" - Bounding box: [88, 54, 442, 444]\n",
" - Confidence: 1.000\n",
" - Landmarks shape: (5, 2)\n",
" - Age: 28 years\n",
" - Gender: Female\n",
" - Embedding shape: (1, 512)\n",
" - Embedding dimension: 512D\n"
]
}
],
"source": [
"# Get first face from first image\n",
"_, _, faces = results[0]\n",
"if faces:\n",
" face = faces[0]\n",
"\n",
" print('Face Attributes:')\n",
" print(f' - Bounding box: {face.bbox.astype(int).tolist()}')\n",
" print(f' - Confidence: {face.confidence:.3f}')\n",
" print(f' - Landmarks shape: {face.landmarks.shape}')\n",
" print(f' - Age: {face.age} years')\n",
" print(f' - Gender: {face.sex}')\n",
" print(f' - Embedding shape: {face.embedding.shape}')\n",
" print(f' - Embedding dimension: {face.embedding.shape[1]}D')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 7. Compare Face Similarity\n",
"\n",
"Use face embeddings to compute similarity between faces."
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Similarity between faces: 0.1135\n",
"Same person: No (threshold=0.6)\n"
]
}
],
"source": [
"# Compare first two faces\n",
"if len(results) >= 2:\n",
" face1 = results[0][2][0] # First face from first image\n",
" face2 = results[1][2][0] # First face from second image\n",
"\n",
" similarity = face1.compute_similarity(face2)\n",
" print(f'Similarity between faces: {similarity:.4f}')\n",
" print(f'Same person: {\"Yes\" if similarity > 0.6 else \"No\"} (threshold=0.6)')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Notes\n",
"\n",
"- `analyzer.analyze()` performs detection, recognition, and attribute prediction in one call\n",
"- Each `Face` object contains: `bbox`, `confidence`, `landmarks`, `embedding`, `age`, `gender`\n",
"- Gender is available as both ID (0=Female, 1=Male) and string via `face.sex` property\n",
"- Face embeddings are L2-normalized (norm ≈ 1.0) for similarity computation\n",
"- Use `face.compute_similarity(other_face)` to compare faces (returns cosine similarity)\n",
"- Typical similarity threshold: 0.6 (same person if similarity > 0.6)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "base",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.5"
}
},
"nbformat": 4,
"nbformat_minor": 4
}