mirror of
https://github.com/yakhyo/uniface.git
synced 2025-12-30 09:02:25 +00:00
323 lines
586 KiB
Plaintext
323 lines
586 KiB
Plaintext
|
|
{
|
||
|
|
"cells": [
|
||
|
|
{
|
||
|
|
"cell_type": "markdown",
|
||
|
|
"id": "cbbdf3a2",
|
||
|
|
"metadata": {},
|
||
|
|
"source": [
|
||
|
|
"# Face Analysis with UniFace\n",
|
||
|
|
"\n",
|
||
|
|
"This notebook demonstrates comprehensive face analysis using the **FaceAnalyzer** class.\n",
|
||
|
|
"\n",
|
||
|
|
"## 1. Install UniFace\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "code",
|
||
|
|
"execution_count": null,
|
||
|
|
"id": "bb0037e2",
|
||
|
|
"metadata": {},
|
||
|
|
"outputs": [],
|
||
|
|
"source": [
|
||
|
|
"%pip install -q uniface\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "markdown",
|
||
|
|
"id": "eb93156a",
|
||
|
|
"metadata": {},
|
||
|
|
"source": [
|
||
|
|
"## 2. Import Libraries\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "code",
|
||
|
|
"execution_count": 15,
|
||
|
|
"id": "df9115e1",
|
||
|
|
"metadata": {},
|
||
|
|
"outputs": [],
|
||
|
|
"source": [
|
||
|
|
"import cv2\n",
|
||
|
|
"import matplotlib.pyplot as plt\n",
|
||
|
|
"\n",
|
||
|
|
"from uniface import FaceAnalyzer, RetinaFace, ArcFace, AgeGender\n",
|
||
|
|
"from uniface.visualization import draw_detections\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "markdown",
|
||
|
|
"id": "3883457d",
|
||
|
|
"metadata": {},
|
||
|
|
"source": [
|
||
|
|
"\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "code",
|
||
|
|
"execution_count": 16,
|
||
|
|
"id": "49346c0d",
|
||
|
|
"metadata": {},
|
||
|
|
"outputs": [
|
||
|
|
{
|
||
|
|
"name": "stdout",
|
||
|
|
"output_type": "stream",
|
||
|
|
"text": [
|
||
|
|
"✓ Model loaded (CoreML (Apple Silicon))\n",
|
||
|
|
"✓ Model loaded (CoreML (Apple Silicon))\n",
|
||
|
|
"✓ Model loaded (CoreML (Apple Silicon))\n"
|
||
|
|
]
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"source": [
|
||
|
|
"analyzer = FaceAnalyzer(\n",
|
||
|
|
" detector=RetinaFace(),\n",
|
||
|
|
" recognizer=ArcFace(),\n",
|
||
|
|
" age_gender=AgeGender()\n",
|
||
|
|
")\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "markdown",
|
||
|
|
"id": "bddc7700",
|
||
|
|
"metadata": {},
|
||
|
|
"source": [
|
||
|
|
"\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "code",
|
||
|
|
"execution_count": 17,
|
||
|
|
"id": "1d32a1da",
|
||
|
|
"metadata": {},
|
||
|
|
"outputs": [
|
||
|
|
{
|
||
|
|
"name": "stdout",
|
||
|
|
"output_type": "stream",
|
||
|
|
"text": [
|
||
|
|
"\n",
|
||
|
|
"../assets/test_images/image0.jpg: Detected 1 face(s)\n",
|
||
|
|
" Face 1: Female, 28y, conf=1.000\n",
|
||
|
|
"\n",
|
||
|
|
"../assets/test_images/image1.jpg: Detected 1 face(s)\n",
|
||
|
|
" Face 1: Female, 29y, conf=1.000\n",
|
||
|
|
"\n",
|
||
|
|
"../assets/test_images/image2.jpg: Detected 1 face(s)\n",
|
||
|
|
" Face 1: Female, 28y, conf=1.000\n"
|
||
|
|
]
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"source": [
|
||
|
|
"image_paths = [\n",
|
||
|
|
" '../assets/test_images/image0.jpg',\n",
|
||
|
|
" '../assets/test_images/image1.jpg',\n",
|
||
|
|
" '../assets/test_images/image2.jpg',\n",
|
||
|
|
"]\n",
|
||
|
|
"\n",
|
||
|
|
"results = []\n",
|
||
|
|
"\n",
|
||
|
|
"for image_path in image_paths:\n",
|
||
|
|
" # Load image\n",
|
||
|
|
" image = cv2.imread(image_path)\n",
|
||
|
|
" if image is None:\n",
|
||
|
|
" print(f'Error: Could not read {image_path}')\n",
|
||
|
|
" continue\n",
|
||
|
|
"\n",
|
||
|
|
" # Analyze faces\n",
|
||
|
|
" faces = analyzer.analyze(image)\n",
|
||
|
|
"\n",
|
||
|
|
" print(f'\\n{image_path}: Detected {len(faces)} face(s)')\n",
|
||
|
|
" for i, face in enumerate(faces, 1):\n",
|
||
|
|
" print(f' Face {i}: {face.gender}, {face.age}y, conf={face.confidence:.3f}')\n",
|
||
|
|
"\n",
|
||
|
|
" # Prepare visualization\n",
|
||
|
|
" vis_image = image.copy()\n",
|
||
|
|
" bboxes = [f.bbox for f in faces]\n",
|
||
|
|
" scores = [f.confidence for f in faces]\n",
|
||
|
|
" landmarks = [f.landmarks for f in faces]\n",
|
||
|
|
" draw_detections(vis_image, bboxes, scores, landmarks, vis_threshold=0.5)\n",
|
||
|
|
"\n",
|
||
|
|
" # Add age/gender labels\n",
|
||
|
|
" for i, face in enumerate(faces, 1):\n",
|
||
|
|
" x1, y1 = int(face.bbox[0]), int(face.bbox[1])\n",
|
||
|
|
" text = f'{face.gender}, {face.age}y'\n",
|
||
|
|
" cv2.putText(vis_image, text, (x1, y1-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)\n",
|
||
|
|
"\n",
|
||
|
|
" results.append((image_path, cv2.cvtColor(vis_image, cv2.COLOR_BGR2RGB), faces))\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "markdown",
|
||
|
|
"id": "24f7d83a",
|
||
|
|
"metadata": {},
|
||
|
|
"source": [
|
||
|
|
"\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "code",
|
||
|
|
"execution_count": 18,
|
||
|
|
"id": "5d072b12",
|
||
|
|
"metadata": {},
|
||
|
|
"outputs": [
|
||
|
|
{
|
||
|
|
"data": {
|
||
|
|
"image/png": "iVBORw0KGgoAAAANSUhEUgAABcgAAAH9CAYAAAAuz592AAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQABAABJREFUeJzsvQe8ZEdxLl4Tb967OSpnCRQBIUQyCIMBAzYm2MYkh+f3bD8bY8A48LD/Ts8RnLCFDcY5PZOjAROFQAIkhLK0q7SrzeHmyfP/fX3mm/mmbp+5924QK+vUajRzT+hYXV31dXV1rt1uty2jjDLKKKOMMsooo4wyyiijjDLKKKOMMsooo4weY5T/Thcgo4wyyiijjDLKKKOMMsooo4wyyiijjDLKKKOMvhOUAeQZZZRRRhlllFFGGWWUUUYZZZRRRhlllFFGGT0mKQPIM8ooo4wyyiijjDLKKKOMMsooo4wyyiijjDJ6TFIGkGeUUUYZZZRRRhlllFFGGWWUUUYZZZRRRhll9JikDCDPKKOMMsooo4wyyiijjDLKKKOMMsooo4wyyugxSRlAnlFGGWWUUUYZZZRRRhlllFFGGWWUUUYZZZTRY5IygDyjjDLKKKOMMsooo4wyyiijjDLKKKOMMsooo8ckZQB5RhlllFFGGWWUUUYZZZRRRhlllFFGGWWUUUaPScoA8owyyiijjDLKKKOMMsooo4wyyiijjDLKKKOMHpOUAeQZZZRRRhlllFFGGWWUUUYZZZRRRhlllFFGGT0mKQPIM8roJKN3v/vdduqpp1o+n7d3vvOdj1i+Bw8etI0bN9r999+/5LMHDhwIz+7cufMRKVtGGWWUUUYZneyUzd8ZZZRRRhll9OihbN7OKKOMlDKAPKOMThB98YtftBe96EW2detWy+Vy9sEPfnDJd6anp+1nfuZn7Bd/8Rdt165d9j/+x/+wR4p+67d+y17ykpfYGWecseSz69evt9e85jX29re//REpW0YZZZRRRhk9UpTN3xlllFFGGWX06KFs3s4oo4yOB2UAeUYZnSCam5uzSy+91P78z/982e88+OCDVq/X7YUvfKFt2bLFRkdH7ZGg+fl5e8973mM/9mM/tux3Xv/619s//uM/2qFDh05o2TLKKKOMMsrokaRs/s4oo4wyyiijRw9l83ZGGWV0PCgDyDPK6ATR85//fPvN3/xN+/7v//5lPf++973PLr744vD7rLPOCqvf2Ha1ffv2sMK8adMmGx8ftyc96Un2mc98pu/darUaVr+xRWxoaMjOOeecMPGSbr311lAevI90Xv3qV4ftWqSPf/zj4b2rrrqqe+3w4cP2qle9yjZs2GAjIyN27rnn2t/8zd907z/ucY8Lq/Qf+MAHjqmdMsooo4wyyuhkomz+ziijjDLKKKNHD2XzdkYZZXQ8KAPIM8roJKFXvvKV3Qn4hhtusN27d4eJd3Z21l7wghfYZz/7Wbvpppvse77ne8IWMqx6k7Dt6p//+Z/tT/7kT+yOO+6wa6+9NkzKoCNHjtizn/1su/zyy+3rX/+6ffKTn7S9e/faK17xiu77X/rSl+wJT3hCX3ne9ra32e23326f+MQnQpp/8Rd/EbZ4KV155ZXh3YwyyiijjDJ6rFI2f2eUUUYZZZTRo4eyeTujjDKKUTF6NaOMMnrECavF69atC7+xerx58+bwG9vF8CH9xm/8Rlg9/vCHPxzipt199932b//2b/bpT3/anvOc53RXwkl/9md/Fibp3/7t3+5ee+973xuUALx73nnn2QMPPBBWpZWgCOC9Jz7xieHvWIw0vAPlIaOMMsooo4weq5TN3xlllFFGGWX06KFs3s4oo4xilHmQZ5TRSU5YyX7Tm95kF154oa1evTqsUGNlmSvZN998sxUKBXvmM58Zff9b3/qWfe5znwvv8XPBBReEe9hGBlpYWLDh4eG+9/7X//pf9i//8i922WWX2Vve8hb7yle+ElUuEEcto4wyyiijjDLqp2z+ziijjDLKKKNHD2XzdkYZPbYp8yDPKKOTnDBJY5X6D/7gD0KMM0yOL3vZy6xWq4X7+HupiR5bw373d3930T0cSALCFi7EPlNC7DSscCNOGvK/5ppr7Kd/+qdDOUg4KASr7hlllFFGGWWUUT9l83dGGWWUUUYZPXoom7czyuixTZkHeUYZneR03XXX2ete97pw6AgOE8EWMBwiQsK1VqtlX/jCF6LvX3HFFXbbbbeFrVqY6PUzNjYWnsGWLsQ984RJ+LWvfa39wz/8g73zne+0d7/73X33cQgJ3s0oo4wyyiijjPopm78zyiijjDLK6NFD2bydUUaPbcoA8owyOkGEFWRsw8IHdN9994XfesjHcginWL///e8P72Lb1g//8A+HiZmECRiT6Y/+6I/aBz/4wZDP5z//+RAfDYTVZ6w4/9AP/ZDdeOONYXvXpz71KXv9619vzWYzPPO85z0vTOa6mv1//s//sQ996EN27733hnsf/ehHw3YzErZ4feMb37DnPve5x9xWGWWUUUYZZXSyUDZ/Z5RRRhlllNGjh7J5O6OMMjoelAHkGWV0gggnV2OVlyu9b3zjG8NvTIAroT/6oz+yNWvW2NVXXx22bGFSxeq0Ek66xvavn/qpnwpxzn7iJ37C5ubmugd6YDUckzImVax8v+ENbwhx1fL5RATgGtLk5A4ql8v2S7/0S3bJJZfYM57xjBBvDbHRSJjETzvtNHv6059+TO2UUUYZZZRRRicTZfN3RhlllFFGGT16KJu3M8ooo+NBuXa73T4uKWWUUUaPavrYxz5mb37zm8P2LU7gg+iqq66yn/3Znw0r6xlllFFGGWWU0XeGsvk7o4wyyiijjB49lM3bGWV0clJ2SGdGGWUU6IUvfKHdc889tmvXLjv11FMHPnvgwAF76UtfGraPZZRRRhlllFFG3znK5u+MMsooo4wyevRQNm9nlNHJSZkHeUYZZZRRRhlllFFGGWWUUUYZZZRRRhlllFFGj0nKYpBnlFFGGWWUUUYZZZRRRhlllFFGGWWUUUYZZfSYpAwgzyijjDLKKKOMMsooo4wyyiijjDLKKKOMMsroMUkZQJ5RRhlllFFGGWWUUUYZZZRRRhlllFFGGWWU0WOSMoA8o4wyyiijjDLKKKOMMsooo4wyyiijjDLKKKPHJBWX++C6yfHodT3jE79zuVzveq5thVzecrl2+JvP5vN5KxaL4e9SqWSFQiEpTLFow8PD4drIyEi41mw2rdVqhXf4LO7x/UajEa5v2LDBNmxcb6Olkg0NlcO9arUa3p+fn7cjR47YwsJCeB7poZx4r1wuh7TwN57HM/jMzc2Fd/EM7uMb71UqlZA2yoPvYqEQ3h0dHbXh4RFbqFRtZmYmvGv5gm3ZsiXkuXvPXjt8ZCqkzbqC8C7Swgfp4xPe7RB+4/16vR5+Dw0NddvSn6+KNDwhfXxYZ/YN8wl16NSfH7QxPvjNcjI/9gfbgB8S77EOmq6modfxPn83m/Xue57HkLamybSS9Ard9Hk/9o6Wwz+r+fl7/hmmy7Lpt/afJ/CRT2sp0vJw7CAdfNj27CvwM8YHxlHC28m4wd/gUfAPCDy1bdu27pjDPaQFvgf/WiPpI+UVjKOpqak+nmVa4E98Y3yQZ/khn2F84Tl8tF/QcsVi2eqtfl7nM+QptrHyDPkXhPdqtVqXh9ke+Ga74Rtp4zmOOZSZf6N90BbsQ6bDdkAdtIzal7geG5dh/NlivmQdKf9ipGNL+Zb1ZjtwfKO+qMPYyHC4h7QpJ/H8+HgixyEPDx8+3O0LlAW/C6VyX17hd6u/Lr11VX7jATwbP+9Z24Pl1X5NGw+skx+jsTZeCa10/KW9H5Mb+t25apaLv4fvfQen7UTT17/+9ZSy9Zd7UPtwTPH5ND7X/tX09UOZEMvDzwtpafiyx+ZC5RuVYxjD+I0xTxkDQrk4vjn/QzZQZ+B45d8cO6wP5R7HEuVFvVi3W15/i9333PvsaKh0pGQX/sGFtu4r67r13/dd++yeX7jHGhMNe6QotzdnEz83YeUvlG3ul+ds4WcWury9iFpmm96
|
||
|
|
"text/plain": [
|
||
|
|
"<Figure size 1500x500 with 3 Axes>"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
"metadata": {},
|
||
|
|
"output_type": "display_data"
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"source": [
|
||
|
|
"fig, axes = plt.subplots(1, len(results), figsize=(15, 5))\n",
|
||
|
|
"\n",
|
||
|
|
"for idx, (path, vis_image, faces) in enumerate(results):\n",
|
||
|
|
" axes[idx].imshow(vis_image)\n",
|
||
|
|
" axes[idx].axis('off')\n",
|
||
|
|
" axes[idx].set_title(f'{len(faces)} face(s)', fontsize=10)\n",
|
||
|
|
"\n",
|
||
|
|
"plt.tight_layout()\n",
|
||
|
|
"plt.show()\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "markdown",
|
||
|
|
"id": "9ab7272c",
|
||
|
|
"metadata": {},
|
||
|
|
"source": [
|
||
|
|
"\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "code",
|
||
|
|
"execution_count": 20,
|
||
|
|
"id": "f5009932",
|
||
|
|
"metadata": {},
|
||
|
|
"outputs": [
|
||
|
|
{
|
||
|
|
"name": "stdout",
|
||
|
|
"output_type": "stream",
|
||
|
|
"text": [
|
||
|
|
"Face Attributes:\n",
|
||
|
|
" - Bounding box: [88, 54, 442, 444]\n",
|
||
|
|
" - Confidence: 1.000\n",
|
||
|
|
" - Landmarks shape: (5, 2)\n",
|
||
|
|
" - Age: 28 years\n",
|
||
|
|
" - Gender string: Female\n",
|
||
|
|
" - Embedding shape: (1, 512)\n",
|
||
|
|
" - Embedding norm: 1D, L2-norm ≈ 1.0\n"
|
||
|
|
]
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"source": [
|
||
|
|
"# Get first face from first image\n",
|
||
|
|
"_, _, faces = results[0]\n",
|
||
|
|
"if faces:\n",
|
||
|
|
" face = faces[0]\n",
|
||
|
|
"\n",
|
||
|
|
" print('Face Attributes:')\n",
|
||
|
|
" print(f' - Bounding box: {face.bbox.astype(int).tolist()}')\n",
|
||
|
|
" print(f' - Confidence: {face.confidence:.3f}')\n",
|
||
|
|
" print(f' - Landmarks shape: {face.landmarks.shape}')\n",
|
||
|
|
" print(f' - Age: {face.age} years')\n",
|
||
|
|
" print(f' - Gender string: {face.gender}')\n",
|
||
|
|
" print(f' - Embedding shape: {face.embedding.shape}')\n",
|
||
|
|
" print(f' - Embedding norm: {face.embedding.shape[0]}D, L2-norm ≈ 1.0')\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "markdown",
|
||
|
|
"id": "154247c1",
|
||
|
|
"metadata": {},
|
||
|
|
"source": [
|
||
|
|
"## Notes\n",
|
||
|
|
"\n",
|
||
|
|
"- `analyzer.analyze()` performs detection, recognition, and attribute prediction in one call\n",
|
||
|
|
"- Gender is returned as integer ID: `0` for Female, `1` for Male\n",
|
||
|
|
"- Use `face.gender` property to get string representation (\"Female\" or \"Male\")\n",
|
||
|
|
"- Face embeddings are normalized (L2-norm ≈ 1.0) for similarity computation\n",
|
||
|
|
"- Use `face.compute_similarity(other_face)` to compare faces\n"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "code",
|
||
|
|
"execution_count": 22,
|
||
|
|
"id": "c3cadebf",
|
||
|
|
"metadata": {},
|
||
|
|
"outputs": [],
|
||
|
|
"source": [
|
||
|
|
"face1 = faces[0]"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "code",
|
||
|
|
"execution_count": 23,
|
||
|
|
"id": "d7bb6975",
|
||
|
|
"metadata": {},
|
||
|
|
"outputs": [],
|
||
|
|
"source": [
|
||
|
|
"face2 = faces[0]"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "code",
|
||
|
|
"execution_count": 26,
|
||
|
|
"id": "d3f5f834",
|
||
|
|
"metadata": {},
|
||
|
|
"outputs": [
|
||
|
|
{
|
||
|
|
"data": {
|
||
|
|
"text/plain": [
|
||
|
|
"(1, 512)"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
"execution_count": 26,
|
||
|
|
"metadata": {},
|
||
|
|
"output_type": "execute_result"
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"source": [
|
||
|
|
"face1.embedding.shape"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "code",
|
||
|
|
"execution_count": 27,
|
||
|
|
"id": "1c8b72df",
|
||
|
|
"metadata": {},
|
||
|
|
"outputs": [
|
||
|
|
{
|
||
|
|
"data": {
|
||
|
|
"text/plain": [
|
||
|
|
"0.9999901056289673"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
"execution_count": 27,
|
||
|
|
"metadata": {},
|
||
|
|
"output_type": "execute_result"
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"source": [
|
||
|
|
"face1.compute_similarity(face2)"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"cell_type": "code",
|
||
|
|
"execution_count": null,
|
||
|
|
"id": "4a970f13",
|
||
|
|
"metadata": {},
|
||
|
|
"outputs": [],
|
||
|
|
"source": []
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"metadata": {
|
||
|
|
"language_info": {
|
||
|
|
"name": "python"
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"nbformat": 4,
|
||
|
|
"nbformat_minor": 5
|
||
|
|
}
|