|
import gradio as gr |
|
import os |
|
import requests |
|
|
|
from PIL import Image |
|
|
|
def face_compare(frame1, frame2): |
|
url = "https://faceapi.miniai.live/face_compare" |
|
files = {'file1': open(frame1, 'rb'), 'file2': open(frame2, 'rb')} |
|
|
|
r = requests.post(url=url, files=files) |
|
|
|
html = None |
|
faces = None |
|
|
|
compare_result = r.json().get('compare_result') |
|
compare_similarity = r.json().get('compare_similarity') |
|
|
|
html = ("<table>" |
|
"<tr>" |
|
"<th>State</th>" |
|
"<th>Value</th>" |
|
"</tr>" |
|
"<tr>" |
|
"<td>Is same person? </td>" |
|
"<td>{compare_result}</td>" |
|
"</tr>" |
|
"<tr>" |
|
"<td>Similarity</td>" |
|
"<td>{compare_similarity}</td>" |
|
"</tr>" |
|
"</table>".format(compare_result=compare_result, compare_similarity=compare_similarity)) |
|
|
|
try: |
|
image1 = Image.open(frame1) |
|
image2 = Image.open(frame2) |
|
|
|
face1 = None |
|
face2 = None |
|
|
|
if r.json().get('face1') is not None: |
|
face = r.json().get('face1') |
|
x1 = face.get('x1') |
|
y1 = face.get('y1') |
|
x2 = face.get('x2') |
|
y2 = face.get('y2') |
|
|
|
if x1 < 0: |
|
x1 = 0 |
|
if y1 < 0: |
|
y1 = 0 |
|
if x2 >= image1.width: |
|
x2 = image1.width - 1 |
|
if y2 >= image1.height: |
|
y2 = image1.height - 1 |
|
|
|
face1 = image1.crop((x1, y1, x2, y2)) |
|
face_image_ratio = face1.width / float(face1.height) |
|
resized_w = int(face_image_ratio * 150) |
|
resized_h = 150 |
|
|
|
face1 = face1.resize((int(resized_w), int(resized_h))) |
|
|
|
if r.json().get('face2') is not None: |
|
face = r.json().get('face2') |
|
x1 = face.get('x1') |
|
y1 = face.get('y1') |
|
x2 = face.get('x2') |
|
y2 = face.get('y2') |
|
|
|
if x1 < 0: |
|
x1 = 0 |
|
if y1 < 0: |
|
y1 = 0 |
|
if x2 >= image2.width: |
|
x2 = image2.width - 1 |
|
if y2 >= image2.height: |
|
y2 = image2.height - 1 |
|
|
|
face2 = image2.crop((x1, y1, x2, y2)) |
|
face_image_ratio = face2.width / float(face2.height) |
|
resized_w = int(face_image_ratio * 150) |
|
resized_h = 150 |
|
|
|
face2 = face2.resize((int(resized_w), int(resized_h))) |
|
|
|
if face1 is not None and face2 is not None: |
|
new_image = Image.new('RGB',(face1.width + face2.width + 10, 150), (80,80,80)) |
|
|
|
new_image.paste(face1,(0,0)) |
|
new_image.paste(face2,(face1.width + 10, 0)) |
|
faces = new_image.copy() |
|
elif face1 is not None and face2 is None: |
|
new_image = Image.new('RGB',(face1.width + face1.width + 10, 150), (80,80,80)) |
|
|
|
new_image.paste(face1,(0,0)) |
|
faces = new_image.copy() |
|
elif face1 is None and face2 is not None: |
|
new_image = Image.new('RGB',(face2.width + face2.width + 10, 150), (80,80,80)) |
|
|
|
new_image.paste(face2,(face2.width + 10, 0)) |
|
faces = new_image.copy() |
|
|
|
except: |
|
pass |
|
|
|
return [faces, html] |
|
|
|
def check_liveness(frame): |
|
url = "https://faceapi.miniai.live/face_liveness_check" |
|
file = {'file': open(frame, 'rb')} |
|
|
|
r = requests.post(url=url, files=file) |
|
|
|
faceCount = None |
|
|
|
response_data = r.json() |
|
|
|
for item in response_data.get('face_state', []): |
|
if 'faceCount' in item: |
|
faceCount = item['faceCount'] |
|
break |
|
|
|
faces = None |
|
live_result = [] |
|
live_result.append(f"<table><tr><th>FaceID</th><th>Age</th><th>Gender</th><th>Liveness</th></tr>") |
|
|
|
for item in response_data.get('face_state', []): |
|
if item.get('FaceID'): |
|
faceID = item.get('FaceID') |
|
result = item.get('LivenessCheck') |
|
age = item.get('Age') |
|
gender = item.get('Gender') |
|
live_result.append(f"<tr><td>{faceID}</td><td>{age}</td><td>{gender}</td><td>{result}</td></tr>") |
|
live_result.append(f"</table>") |
|
live_result = ''.join(live_result) |
|
|
|
try: |
|
image = Image.open(frame) |
|
|
|
for face in r.json().get('faces'): |
|
x1 = face.get('x1') |
|
y1 = face.get('y1') |
|
x2 = face.get('x2') |
|
y2 = face.get('y2') |
|
|
|
if x1 < 0: |
|
x1 = 0 |
|
if y1 < 0: |
|
y1 = 0 |
|
if x2 >= image.width: |
|
x2 = image.width - 1 |
|
if y2 >= image.height: |
|
y2 = image.height - 1 |
|
|
|
face_image = image.crop((x1, y1, x2, y2)) |
|
face_image_ratio = face_image.width / float(face_image.height) |
|
resized_w = int(face_image_ratio * 150) |
|
resized_h = 150 |
|
|
|
face_image = face_image.resize((int(resized_w), int(resized_h))) |
|
|
|
if faces is None: |
|
faces = face_image |
|
else: |
|
new_image = Image.new('RGB',(faces.width + face_image.width + 10, 150), (80,80,80)) |
|
|
|
new_image.paste(faces,(0,0)) |
|
new_image.paste(face_image,(faces.width + 10, 0)) |
|
faces = new_image.copy() |
|
except: |
|
pass |
|
|
|
return [faces, live_result] |
|
|
|
def face_emotion(frame): |
|
url = "https://faceapi.miniai.live/face_emotion" |
|
file = {'file': open(frame, 'rb')} |
|
|
|
r = requests.post(url=url, files=file) |
|
|
|
emotion_result = [] |
|
emotion_result.append(f"<table><tr><td>Emotional Result : </td><td>{r.json().get('emotion_result')}</td></tr>") |
|
emotion_result.append(f"</table>") |
|
emotion_result = ''.join(emotion_result) |
|
|
|
faces = None |
|
|
|
try: |
|
image = Image.open(frame) |
|
|
|
for face in r.json().get('faces'): |
|
x1 = face.get('x1') |
|
y1 = face.get('y1') |
|
x2 = face.get('x2') |
|
y2 = face.get('y2') |
|
|
|
if x1 < 0: |
|
x1 = 0 |
|
if y1 < 0: |
|
y1 = 0 |
|
if x2 >= image.width: |
|
x2 = image.width - 1 |
|
if y2 >= image.height: |
|
y2 = image.height - 1 |
|
|
|
face_image = image.crop((x1, y1, x2, y2)) |
|
face_image_ratio = face_image.width / float(face_image.height) |
|
resized_w = int(face_image_ratio * 150) |
|
resized_h = 150 |
|
|
|
face_image = face_image.resize((int(resized_w), int(resized_h))) |
|
|
|
if faces is None: |
|
faces = face_image |
|
else: |
|
new_image = Image.new('RGB',(faces.width + face_image.width + 10, 150), (80,80,80)) |
|
|
|
new_image.paste(faces,(0,0)) |
|
new_image.paste(face_image,(faces.width + 10, 0)) |
|
faces = new_image.copy() |
|
except: |
|
pass |
|
|
|
return [faces, emotion_result] |
|
|
|
|
|
with gr.Blocks() as MiniAIdemo: |
|
gr.Markdown( |
|
""" |
|
<a href="https://miniai.live" style="display: flex; align-items: center;"> |
|
<img src="https://miniai.live/wp-content/uploads/2024/02/logo_name-1-768x426-1.png" style="width: 18%; margin-right: 15px;"/> |
|
<div> |
|
<p style="font-size: 50px; font-weight: bold; margin-right: 20px;">FaceSDK Web Online Demo</p> |
|
<p style="font-size: 20px; margin-right: 0;">Experience our NIST FRVT Top Ranked FaceRecognition, iBeta 2 Certified Face Liveness Detection Engine</p> |
|
</div> |
|
</a> |
|
|
|
<br/> |
|
<ul> |
|
<li style="font-size: 18px;">Visit and learn more about our Service : <a href="https://miniai.live" target="_blank" style="font-size: 18px;">https://www.miniai.live</a></li> |
|
<li style="font-size: 18px;">Check our SDK for cross-platform from Github : <a href="https://github.com/MiniAiLive" target="_blank" style="font-size: 18px;">https://github.com/MiniAiLive</a></li> |
|
<li style="font-size: 18px;">Quick view our Youtube Demo Video : <a href="https://www.youtube.com/@miniailive" target="_blank" style="font-size: 18px;">MiniAiLive Youtube Channel</a></li> |
|
<li style="font-size: 18px;">Demo with Android device from Google Play : <a href="https://play.google.com/store/apps/dev?id=5831076207730531667" target="_blank" style="font-size: 18px;">MiniAiLive Google Play</a></li> |
|
</ul> |
|
<br/> |
|
""" |
|
) |
|
with gr.Tabs(): |
|
with gr.Tab("Face Recognition"): |
|
with gr.Row(): |
|
with gr.Column(): |
|
im_match_in1 = gr.Image(type='filepath', height=300) |
|
gr.Examples( |
|
[ |
|
os.path.join(os.path.dirname(__file__), "images/compare/demo-pic22.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/compare/demo-pic60.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/compare/demo-pic35.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/compare/demo-pic33.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/compare/demo-pic34.jpg"), |
|
], |
|
inputs=im_match_in1 |
|
) |
|
with gr.Column(): |
|
im_match_in2 = gr.Image(type='filepath', height=300) |
|
gr.Examples( |
|
[ |
|
os.path.join(os.path.dirname(__file__), "images/compare/demo-pic41.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/compare/demo-pic32.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/compare/demo-pic39.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/compare/demo-pic61.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/compare/demo-pic40.jpg"), |
|
], |
|
inputs=im_match_in2 |
|
) |
|
with gr.Column(): |
|
im_match_crop = gr.Image(type="pil", height=256) |
|
txt_compare_out = gr.HTML() |
|
btn_f_match = gr.Button("Check Comparing!", variant='primary') |
|
btn_f_match.click(face_compare, inputs=[im_match_in1, im_match_in2], outputs=[im_match_crop, txt_compare_out]) |
|
with gr.Tab("Face Liveness Detection"): |
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
im_liveness_in = gr.Image(type='filepath', height=300) |
|
gr.Examples( |
|
[ |
|
os.path.join(os.path.dirname(__file__), "images/liveness/f_real_andr.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/liveness/f_fake_andr_mask3d.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/liveness/f_fake_andr_monitor.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/liveness/f_fake_andr_outline.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/liveness/f_fake_andr_outline3d.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/liveness/1.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/liveness/3.png"), |
|
os.path.join(os.path.dirname(__file__), "images/liveness/4.jpg"), |
|
], |
|
inputs=im_liveness_in |
|
) |
|
btn_f_liveness = gr.Button("Check Liveness!", variant='primary') |
|
with gr.Blocks(): |
|
with gr.Row(): |
|
with gr.Column(): |
|
im_liveness_out = gr.Image(label="Croped Face", type="pil", scale=1) |
|
with gr.Column(): |
|
livness_result_output = gr.HTML() |
|
btn_f_liveness.click(check_liveness, inputs=im_liveness_in, outputs=[im_liveness_out, livness_result_output]) |
|
with gr.Tab("Face Emotional Recognition"): |
|
with gr.Row(): |
|
with gr.Column(): |
|
im_emotion_in = gr.Image(type='filepath', height=300) |
|
gr.Examples( |
|
[ |
|
os.path.join(os.path.dirname(__file__), "images/emotion/1.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/emotion/2.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/emotion/3.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/emotion/4.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/emotion/5.jpg"), |
|
os.path.join(os.path.dirname(__file__), "images/emotion/6.jpg"), |
|
], |
|
inputs=im_emotion_in |
|
) |
|
btn_f_emotion = gr.Button("Check Emotion!", variant='primary') |
|
with gr.Blocks(): |
|
with gr.Row(): |
|
with gr.Column(): |
|
im_emotion_out = gr.Image(label="Result Image", type="pil", scale=1) |
|
with gr.Column(): |
|
txt_emotion_out = gr.HTML() |
|
btn_f_emotion.click(face_emotion, inputs=im_emotion_in, outputs=[im_emotion_out, txt_emotion_out]) |
|
|
|
if __name__ == "__main__": |
|
MiniAIdemo.launch() |