""" File: app.py Author: Elena Ryumina and Dmitry Ryumin Description: About the app. License: MIT License """ APP = """
OCEAN-AI is an open-source library consisting of a set of algorithms for intellectual analysis of human behavior based on multimodal data for automatic personality traits (PT) assessment. The library evaluates five PT: Openness to experience, Conscientiousness, Extraversion, Agreeableness, Non-Neuroticism.
The AIA, VIA and TIA algorithms implement the functions of strong artificial intelligence (AI) in terms of complexing acoustic, visual and linguistic features built on different principles (hand-crafted and deep features), i.e. these algorithms implement the approaches of composite (hybrid) AI. The necessary pre-processing of audio, video and text information, the calculation of visual, acoustic and linguistic features and the output of predictions of personality traits based on them are carried out in the algorithms.
The MIF algorithm is a combination of three information analysis algorithms (AIA, VIA and TIA). This algorithm performs feature-level fusion obtained by the AIA, VIA and TIA algorithms.
In addition to the main task - unimodal and multimodal personality traits assessment, the features implemented in OCEAN-AI will allow researchers to solve other problems of analyzing human behavior, for example, affective state recognition.
The library solves practical tasks:
OCEAN-AI uses the latest open-source libraries for audio, video and text processing: librosa, openSMILE, openCV, mediapipe, transformers.
OCEAN-AI is written in the python programming language. Neural network models are implemented and trained using an open-source library code TensorFlow.
The OCEAN-AI library was tested on two corpora:
@article{ryumina24_prl,
author = {Ryumina, Elena and Markitantov, Maxim and Ryumin, Dmitry and Karpov, Alexey},
title = {Gated Siamese Fusion Network based on Multimodal Deep and Hand-Crafted Features for Personality Traits Assessment},
journal = {Pattern Recognition Letters},
volume = {185},
pages = {45--51},
year = {2024},
doi = {https://doi.org/10.1016/j.patrec.2024.07.004},
}
@article{ryumina24_eswa,
author = {Elena Ryumina and Maxim Markitantov and Dmitry Ryumin and Alexey Karpov},
title = {OCEAN-AI Framework with EmoFormer Cross-Hemiface Attention Approach for Personality Traits Assessment},
journal = {Expert Systems with Applications},
volume = {239},
pages = {122441},
year = {2024},
doi = {https://doi.org/10.1016/j.eswa.2023.122441},
}
@article{ryumina22_neurocomputing,
author = {Elena Ryumina and Denis Dresvyanskiy and Alexey Karpov},
title = {In Search of a Robust Facial Expressions Recognition Model: A Large-Scale Visual Cross-Corpus Study},
journal = {Neurocomputing},
volume = {514},
pages = {435-450},
year = {2022},
doi = {https://doi.org/10.1016/j.neucom.2022.10.013},
}
@inproceedings{ryumina24_interspeech,
author = {Elena Ryumina and Dmitry Ryumin and and Alexey Karpov},
title = {OCEAN-AI: Open Multimodal Framework for Personality Traits Assessment and HR-Processes Automatization},
year = {2024},
booktitle = {INTERSPEECH},
pages = {3630--3631},
doi = {https://www.isca-archive.org/interspeech_2024/ryumina24_interspeech.html#},
}
@inproceedings{ryumina23_interspeech,
author = {Elena Ryumina and Dmitry Ryumin and Maxim Markitantov and Heysem Kaya and Alexey Karpov},
title = {Multimodal Personality Traits Assessment (MuPTA) Corpus: The Impact of Spontaneous and Read Speech},
year = {2023},
booktitle = {INTERSPEECH},
pages = {4049--4053},
doi = {https://doi.org/10.21437/Interspeech.2023-1686},
}