NickKolok's picture
Update app.py
4c31883 verified
import gradio as gr
from deepface import DeepFace
import cv2
import numpy as np
def crop_face(image_path):
# Load the Haarcascade classifier for face detection
face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')
# Read the image
img = cv2.imread(image_path)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Detect faces in the image
faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5)
# If a face is detected, crop it
if len(faces) > 0:
(x, y, w, h) = faces[0] # Take the first detected face
cropped_face = img[y:y+h, x:x+w]
return cropped_face
else:
raise ValueError("No face detected in the image.")
def calculate_similarity(image1, image2):
# Crop faces from both images
try:
face1 = crop_face(image1)
face2 = crop_face(image2)
# Convert cropped images to RGB format for DeepFace
face1_rgb = cv2.cvtColor(face1, cv2.COLOR_BGR2RGB)
face2_rgb = cv2.cvtColor(face2, cv2.COLOR_BGR2RGB)
# Analyze the two cropped images for facial similarity
result = DeepFace.verify(face1_rgb, face2_rgb, model_name='VGG-Face', enforce_detection=False)
similarity_percentage = (1 - result['distance']) * 100 # Convert distance to similarity percentage
return similarity_percentage, face1_rgb, face2_rgb
except Exception as e:
return str(e), None, None
# Create a Gradio interface
iface = gr.Interface(
fn=calculate_similarity,
inputs=[
gr.inputs.Image(type="filepath", label="Image 1"),
gr.inputs.Image(type="filepath", label="Image 2")
],
outputs=[
gr.outputs.Textbox(label="Similarity Percentage"),
gr.outputs.Image(label="Cropped Face 1", type="numpy"),
gr.outputs.Image(label="Cropped Face 2", type="numpy"),
],
title="Face Similarity Checker",
description="Upload two images of faces to check their similarity. The faces will be detected and cropped automatically."
)
# Launch the interface
iface.launch()