Duy-NM
commited on
Commit
·
ea5450c
1
Parent(s):
86bebdd
init
Browse files- app.py +83 -0
- example/face2.jpg +0 -0
- example/ronaldo.jpg +0 -0
app.py
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import os
|
3 |
+
import cv2
|
4 |
+
import numpy as np
|
5 |
+
import gradio as gr
|
6 |
+
from numpy.linalg import norm
|
7 |
+
from gradio_client import Client
|
8 |
+
|
9 |
+
os.environ['CUDA_LAUNCH_BLOCKING'] = '0'
|
10 |
+
G = ['Female', 'Male']
|
11 |
+
|
12 |
+
token = "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"
|
13 |
+
client = Client("https://vtechai-face-demo.hf.space/", token)
|
14 |
+
|
15 |
+
def draw(im, conf=0.65, max_face=1, metric='default'):
|
16 |
+
im_path, jj = client.predict(im, conf, fn_index=1)
|
17 |
+
with open(jj) as f:
|
18 |
+
jss = json.load(f)
|
19 |
+
return im_path, jss
|
20 |
+
|
21 |
+
def face_search(im1,im2, threshold):
|
22 |
+
im3, im4, te = client.predict(im1, im2, threshold, fn_index=3)
|
23 |
+
return im3, im4, te
|
24 |
+
|
25 |
+
|
26 |
+
with gr.Blocks() as face_compare:
|
27 |
+
with gr.Row():
|
28 |
+
im1 = gr.Image(label='Register', type='filepath').style(full_width=True, height=300)
|
29 |
+
im2 = gr.Image(label='Image for Search', type='filepath').style(full_width=True, height=300)
|
30 |
+
|
31 |
+
with gr.Row():
|
32 |
+
im3 = gr.Image(label='Output', height=300) #.style(height=300, full_width=True, full_height=True)
|
33 |
+
im4 = gr.Image(label='Output', height=300) #.style(height=300, full_width=True, full_height=True)
|
34 |
+
|
35 |
+
sl = gr.Slider(0.3, 1, step=0.05, value=0.5, label='Face Matching Threshold')
|
36 |
+
text = gr.Text(label="Output", interactive=False)
|
37 |
+
with gr.Row():
|
38 |
+
btn = gr.Button(value="Run")
|
39 |
+
btn_clean = gr.ClearButton([im1, im2, im3, im4])
|
40 |
+
btn.click(fn=face_search, inputs=[im1, im2, sl], outputs=[im3, im4, text])
|
41 |
+
# btn2 = gr.Button(value="Check", link="https://manhduy160396.wixsite.com/vtech")
|
42 |
+
|
43 |
+
# example
|
44 |
+
gr.Examples(
|
45 |
+
examples=[[
|
46 |
+
os.path.join(os.path.dirname(__file__), "example/ronaldo.jpg"),
|
47 |
+
os.path.join(os.path.dirname(__file__), "example/face2.jpg")
|
48 |
+
]],
|
49 |
+
inputs=[im1, im2]
|
50 |
+
|
51 |
+
)
|
52 |
+
|
53 |
+
with gr.Blocks() as face_analyze:
|
54 |
+
with gr.Row():
|
55 |
+
im1 = gr.Image(shape=(300, 300), type='filepath', height=300, container=True)
|
56 |
+
im2 = gr.Image(shape=(300, 300), height=300, container=True)
|
57 |
+
|
58 |
+
with gr.Row():
|
59 |
+
with gr.Column():
|
60 |
+
area = gr.Radio(["Asia", "Europe/America"], label="Area?", value="Asia")
|
61 |
+
cb_age = gr.Checkbox(label="Age")
|
62 |
+
cb_gender = gr.Checkbox(label="Gender")
|
63 |
+
cb_emotion = gr.Checkbox(label="Emotion")
|
64 |
+
sl = gr.Slider(0, 1, step=0.05, value=0.65, label='Confidence Threshold')
|
65 |
+
|
66 |
+
with gr.Column():
|
67 |
+
js = gr.JSON(label="json")
|
68 |
+
|
69 |
+
with gr.Row():
|
70 |
+
btn = gr.Button(value="Run")
|
71 |
+
btn_clean = gr.ClearButton([im1, im2])
|
72 |
+
|
73 |
+
btn.click(fn=draw, inputs=[im1, sl], outputs=[im2, js])
|
74 |
+
btn2 = gr.Button(value="Check", link="https://manhduy160396.wixsite.com/vtech")
|
75 |
+
|
76 |
+
|
77 |
+
with gr.Blocks() as demo:
|
78 |
+
gr.Markdown("## Text Examples")
|
79 |
+
gr.TabbedInterface([face_analyze, face_compare], ["Face Analyze", "Face Compare"])
|
80 |
+
|
81 |
+
|
82 |
+
if __name__ == "__main__":
|
83 |
+
demo.launch()
|
example/face2.jpg
ADDED
![]() |
example/ronaldo.jpg
ADDED
![]() |