Victorano's picture
more example images+requirements.txt added
a6f46f6
import gradio as gr
import os
import numpy as np
from onnx_inference import emotions_detector
class_names = ['angry', 'happy', 'sad']
def predict(img):
img = np.array(img)
onnx_pred, time_taken = emotions_detector(img)
pred_labels_and_probs = {class_names[i]: float(
onnx_pred[0][0][i]) for i in range(len(class_names))}
return pred_labels_and_probs, time_taken
title = "Human Emotion Detection 😭🤣🥹"
description = "An EfficientNet ONNX quantized feature extractor computer vision model to classify images and detect the emotion of the person in it.(Uploaded image should be of a single person)"
article = "Full Source code from scratch can be found in the huggingface Space: https://huggingface.co/spaces/Victorano/human_emotion_detection"
# Create examples list from "examples/" directory
example_list = [["examples/" + example] for example in os.listdir("examples")]
demo = gr.Interface(fn=predict, inputs=gr.Image(type='pil'), outputs=[gr.Label(num_top_classes=3, label='Predictions'), gr.Number(
label="Prediction time (s)")], examples=example_list, title=title, description=description, article=article)
demo.launch()