mirror of
https://github.com/yakhyo/uniface.git
synced 2025-12-30 09:02:25 +00:00
ref: Several minor updates, does not affect performance
This commit is contained in:
@@ -69,7 +69,6 @@ class RetinaFace:
|
||||
|
||||
Logger.info(
|
||||
f"Initializing RetinaFace with model={model_name}, conf_thresh={conf_thresh}, nms_thresh={nms_thresh}, "
|
||||
f"pre_nms_topk={pre_nms_topk}, post_nms_topk={post_nms_topk}, dynamic_size={dynamic_size}, "
|
||||
f"input_size={input_size}"
|
||||
)
|
||||
|
||||
|
||||
@@ -7,6 +7,10 @@ import numpy as np
|
||||
from skimage.transform import SimilarityTransform
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
__all__ = ["face_alignment", "compute_similarity", "bbox_center_alignment", "transform_points_2d"]
|
||||
|
||||
|
||||
# Reference alignment for facial landmarks (ArcFace)
|
||||
reference_alignment: np.ndarray = np.array(
|
||||
[
|
||||
@@ -82,7 +86,7 @@ def face_alignment(image: np.ndarray, landmark: np.ndarray, image_size: int = 11
|
||||
return warped, M_inv
|
||||
|
||||
|
||||
def compute_similarity(feat1: np.ndarray, feat2: np.ndarray, normalized: bool=False) -> np.float32:
|
||||
def compute_similarity(feat1: np.ndarray, feat2: np.ndarray, normalized: bool = False) -> np.float32:
|
||||
"""Computing Similarity between two faces.
|
||||
|
||||
Args:
|
||||
@@ -146,36 +150,21 @@ def bbox_center_alignment(image, center, output_size, scale, rotation):
|
||||
return cropped, M
|
||||
|
||||
|
||||
def transform_points_2d(points: np.ndarray, transform: np.ndarray) -> np.ndarray:
|
||||
"""
|
||||
Apply a 2D affine transformation to an array of 2D points.
|
||||
|
||||
def trans_points2d(pts, M):
|
||||
new_pts = np.zeros(shape=pts.shape, dtype=np.float32)
|
||||
for i in range(pts.shape[0]):
|
||||
pt = pts[i]
|
||||
new_pt = np.array([pt[0], pt[1], 1.], dtype=np.float32)
|
||||
new_pt = np.dot(M, new_pt)
|
||||
#print('new_pt', new_pt.shape, new_pt)
|
||||
new_pts[i] = new_pt[0:2]
|
||||
Args:
|
||||
points (np.ndarray): An (N, 2) array of 2D points.
|
||||
transform (np.ndarray): A (2, 3) affine transformation matrix.
|
||||
|
||||
return new_pts
|
||||
Returns:
|
||||
np.ndarray: Transformed (N, 2) array of points.
|
||||
"""
|
||||
transformed = np.zeros_like(points, dtype=np.float32)
|
||||
for i in range(points.shape[0]):
|
||||
point = np.array([points[i, 0], points[i, 1], 1.0], dtype=np.float32)
|
||||
result = np.dot(transform, point)
|
||||
transformed[i] = result[:2]
|
||||
|
||||
|
||||
def trans_points3d(pts, M):
|
||||
scale = np.sqrt(M[0][0] * M[0][0] + M[0][1] * M[0][1])
|
||||
#print(scale)
|
||||
new_pts = np.zeros(shape=pts.shape, dtype=np.float32)
|
||||
for i in range(pts.shape[0]):
|
||||
pt = pts[i]
|
||||
new_pt = np.array([pt[0], pt[1], 1.], dtype=np.float32)
|
||||
new_pt = np.dot(M, new_pt)
|
||||
#print('new_pt', new_pt.shape, new_pt)
|
||||
new_pts[i][0:2] = new_pt[0:2]
|
||||
new_pts[i][2] = pts[i][2] * scale
|
||||
|
||||
return new_pts
|
||||
|
||||
|
||||
def trans_points(pts, M):
|
||||
if pts.shape[1] == 2:
|
||||
return trans_points2d(pts, M)
|
||||
else:
|
||||
return trans_points3d(pts, M)
|
||||
return transformed
|
||||
|
||||
@@ -6,7 +6,7 @@ import numpy as np
|
||||
from typing import Tuple
|
||||
|
||||
from uniface.log import Logger
|
||||
from uniface.face_utils import bbox_center_alignment, trans_points
|
||||
from uniface.face_utils import bbox_center_alignment, transform_points_2d
|
||||
from uniface.model_store import verify_model_weights
|
||||
|
||||
from uniface.detection import RetinaFace
|
||||
@@ -18,14 +18,14 @@ __all__ = ['Landmark']
|
||||
class Landmark:
|
||||
def __init__(self, model_name: LandmarkWeights = LandmarkWeights.DEFAULT, input_size: Tuple[int, int] = (192, 192)) -> None:
|
||||
"""
|
||||
Initializes the Attribute model for inference.
|
||||
Initializes the Facial Landmark model for inference.
|
||||
|
||||
Args:
|
||||
model_path (str): Path to the ONNX file.
|
||||
"""
|
||||
|
||||
Logger.info(
|
||||
f"Initializing Landmark with model={model_name}, "
|
||||
f"Initializing Facial Landmark with model={model_name}, "
|
||||
f"input_size={input_size}"
|
||||
)
|
||||
|
||||
@@ -40,8 +40,7 @@ class Landmark:
|
||||
# Initialize model
|
||||
self._initialize_model(model_path=self._model_path)
|
||||
|
||||
|
||||
def _initialize_model(self, model_path:str):
|
||||
def _initialize_model(self, model_path: str):
|
||||
""" Initialize the model from the given path.
|
||||
Args:
|
||||
model_path (str): Path to .onnx model.
|
||||
@@ -95,29 +94,29 @@ class Landmark:
|
||||
swapRB=True
|
||||
)
|
||||
return blob, M
|
||||
|
||||
def postprocess(self, preds: np.ndarray, M: np.ndarray) -> np.ndarray:
|
||||
|
||||
def postprocess(self, predictions: np.ndarray, M: np.ndarray) -> np.ndarray:
|
||||
"""
|
||||
Postprocess model outputs to get landmarks.
|
||||
|
||||
Args:
|
||||
preds (np.ndarray): Raw model predictions.
|
||||
predictions (np.ndarray): Raw model predictions.
|
||||
M (np.ndarray): Affine transformation matrix.
|
||||
|
||||
Returns:
|
||||
np.ndarray: Transformed landmarks.
|
||||
"""
|
||||
|
||||
preds = preds.reshape((-1, 2))
|
||||
predictions = predictions.reshape((-1, 2))
|
||||
|
||||
preds[:, 0:2] += 1
|
||||
preds[:, 0:2] *= (self.input_size[0] // 2)
|
||||
predictions[:, 0:2] += 1
|
||||
predictions[:, 0:2] *= (self.input_size[0] // 2)
|
||||
|
||||
IM = cv2.invertAffineTransform(M)
|
||||
preds = trans_points(preds, IM)
|
||||
predictions = transform_points_2d(predictions, IM)
|
||||
|
||||
return predictions
|
||||
|
||||
return preds
|
||||
|
||||
def predict(self, image: np.ndarray, bbox: np.ndarray) -> np.ndarray:
|
||||
"""
|
||||
Predict facial landmarks for the given image and bounding box.
|
||||
@@ -137,6 +136,7 @@ class Landmark:
|
||||
|
||||
# TODO: For testing purposes only, remote later
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
face_detector = RetinaFace(
|
||||
|
||||
@@ -47,19 +47,22 @@ def verify_model_weights(model_name: str, root: str = '~/.uniface/models') -> st
|
||||
os.makedirs(root, exist_ok=True)
|
||||
|
||||
model_name = model_name.value
|
||||
model_path = os.path.normpath(os.path.join(root, f'{model_name}.onnx'))
|
||||
url = const.MODEL_URLS.get(model_name)
|
||||
if not url:
|
||||
Logger.error(f"No URL found for model '{model_name}'")
|
||||
raise ValueError(f"No URL found for model '{model_name}'")
|
||||
|
||||
file_ext = os.path.splitext(url)[1]
|
||||
model_path = os.path.normpath(os.path.join(root, f'{model_name}{file_ext}'))
|
||||
|
||||
if not os.path.exists(model_path):
|
||||
url = const.MODEL_URLS.get(model_name)
|
||||
if not url:
|
||||
Logger.error(f"No URL found for model '{model_name}'")
|
||||
raise ValueError(f"No URL found for model '{model_name}'")
|
||||
|
||||
Logger.info(f"Downloading model '{model_name}' from {url}")
|
||||
download_file(url, model_path)
|
||||
Logger.info(f"Successfully downloaded '{model_name}' to {model_path}")
|
||||
else:
|
||||
Logger.info(f"Model '{model_name}' already exists at {model_path}")
|
||||
try:
|
||||
download_file(url, model_path)
|
||||
Logger.info(f"Successfully downloaded '{model_name}' to {model_path}")
|
||||
except Exception as e:
|
||||
Logger.error(f"Failed to download model '{model_name}': {e}")
|
||||
raise ConnectionError(f"Download failed for '{model_name}'")
|
||||
|
||||
expected_hash = const.MODEL_SHA256.get(model_name)
|
||||
if expected_hash and not verify_file_hash(model_path, expected_hash):
|
||||
|
||||
Reference in New Issue
Block a user