svjack's picture
Update app.py
cca4277 verified
raw
history blame
4.63 kB
from huggingface_hub import snapshot_download
from insightface.app import FaceAnalysis
import numpy as np
import cv2
import gradio as gr
# Download face encoder
snapshot_download(
"fal/AuraFace-v1",
local_dir="models/auraface",
)
app = FaceAnalysis(
name="auraface",
providers=["CUDAExecutionProvider", "CPUExecutionProvider"],
root=".",
)
app.prepare(ctx_id=0, det_size=(640, 640))
def process_image_by_bbox_larger(input_image, bbox_xyxy, min_bbox_ratio=0.2):
"""
Process an image based on a bounding box, cropping and resizing as necessary.
Parameters:
- input_image: PIL Image object.
- bbox_xyxy: Tuple (x1, y1, x2, y2) representing the bounding box coordinates.
Returns:
- A processed image cropped and resized to 1024x1024 if the bounding box is valid,
or None if the bounding box does not meet the required size criteria.
"""
# Constants
target_size = 1024
# min_bbox_ratio = 0.2 # Bounding box should be at least 20% of the crop
# Extract bounding box coordinates
x1, y1, x2, y2 = bbox_xyxy
bbox_w = x2 - x1
bbox_h = y2 - y1
# Calculate the area of the bounding box
bbox_area = bbox_w * bbox_h
# Start with the smallest square crop that allows bbox to be at least 20% of the crop area
crop_size = max(bbox_w, bbox_h)
initial_crop_area = crop_size * crop_size
while (bbox_area / initial_crop_area) < min_bbox_ratio:
crop_size += 10 # Gradually increase until bbox is at least 20% of the area
initial_crop_area = crop_size * crop_size
# Once the minimum condition is satisfied, try to expand the crop further
max_possible_crop_size = min(input_image.width, input_image.height)
while crop_size < max_possible_crop_size:
# Calculate a potential new area
new_crop_size = crop_size + 10
new_crop_area = new_crop_size * new_crop_size
if (bbox_area / new_crop_area) < min_bbox_ratio:
break # Stop if expanding further violates the 20% rule
crop_size = new_crop_size
# Determine the center of the bounding box
center_x = (x1 + x2) // 2
center_y = (y1 + y2) // 2
# Calculate the crop coordinates centered around the bounding box
crop_x1 = max(0, center_x - crop_size // 2)
crop_y1 = max(0, center_y - crop_size // 2)
crop_x2 = min(input_image.width, crop_x1 + crop_size)
crop_y2 = min(input_image.height, crop_y1 + crop_size)
# Ensure the crop is square, adjust if it goes out of image bounds
if crop_x2 - crop_x1 != crop_y2 - crop_y1:
side_length = min(crop_x2 - crop_x1, crop_y2 - crop_y1)
crop_x2 = crop_x1 + side_length
crop_y2 = crop_y1 + side_length
# Crop the image
cropped_image = input_image.crop((crop_x1, crop_y1, crop_x2, crop_y2))
# Resize the cropped image to 1024x1024
resized_image = cropped_image.resize((target_size, target_size), Image.LANCZOS)
return resized_image
def calc_emb_cropped(image, app, min_bbox_ratio=0.2):
face_image = image.copy()
face_info = app.get(cv2.cvtColor(np.array(face_image), cv2.COLOR_RGB2BGR))
face_info = face_info[0]
#print(face_info)
cropped_face_image = process_image_by_bbox_larger(face_image, face_info["bbox"], min_bbox_ratio=min_bbox_ratio)
return cropped_face_image
def get_embedding(image):
face_image = image.copy()
face_info = app.get(cv2.cvtColor(np.array(face_image), cv2.COLOR_RGB2BGR))
# 获取人脸嵌入
#face_info = app.get(cv2_image)
if len(face_info) > 0:
return face_info[0].normed_embedding
else:
return None
'''
from PIL import Image
im0 = Image.open("Unreal_5_render_of_a_handsome_man_gentle_snowfall_at_dusk_a_bustling_marketplace_in_the_background.png")
calc_emb_cropped(im0, app)
get_embedding(im0)
'''
def calculate_similarity(image1, image2):
# 获取两张图片的嵌入
embedding1 = get_embedding(image1)
embedding2 = get_embedding(image2)
if embedding1 is not None and embedding2 is not None:
# 计算余弦相似度
similarity = np.dot(embedding1, embedding2) / (np.linalg.norm(embedding1) * np.linalg.norm(embedding2))
return f"图片相似度: {similarity:.4f}"
else:
return "无法检测到人脸或计算相似度"
# 创建Gradio界面
iface = gr.Interface(
fn=calculate_similarity,
inputs=[gr.Image(type="pil"), gr.Image(type="pil")],
outputs="text",
title="图片相似度计算",
description="上传两张图片,计算它们的相似度。"
)
# 启动Gradio应用
iface.launch(share = True)