""" File: app.py Author: Elena Ryumina and Dmitry Ryumin Description: Description: Main application file for Facial_Expression_Recognition. The file defines the Gradio interface, sets up the main blocks, and includes event handlers for various components. License: MIT License """ import gradio as gr # Importing necessary components for the Gradio app from app.description import DESCRIPTION from app.app_utils import preprocess_and_predict def clear(): return ( gr.Image(value=None, type="pil"), gr.Image(value=None, scale=1, elem_classes="dl2"), gr.Label(value=None, num_top_classes=3, scale=1, elem_classes="dl3"), ) md = """ App developers: ``Elena Ryumina`` and ``Dmitry Ryumin`` Methodology developers: ``Elena Ryumina``, ``Denis Dresvyanskiy`` and ``Alexey Karpov`` Model developer: ``Elena Ryumina`` TensorFlow to PyTorch model converter: ``Maxim Markitantov`` and ``Elena Ryumina`` Citation If you are using EMO-AffectNetModel in your research, please consider to cite research [paper](https://www.sciencedirect.com/science/article/pii/S0925231222012656). Here is an example of BibTeX entry:
@article{RYUMINA2022,
title = {In Search of a Robust Facial Expressions Recognition Model: A Large-Scale Visual Cross-Corpus Study},
author = {Elena Ryumina and Denis Dresvyanskiy and Alexey Karpov},
journal = {Neurocomputing},
year = {2022},
doi = {10.1016/j.neucom.2022.10.013},
url = {https://www.sciencedirect.com/science/article/pii/S0925231222012656},
}