File size: 5,545 Bytes
6e2fd22
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
import os
import json
import base64
import shutil
import requests
import gradio as gr
from datetime import datetime
from huggingface_hub import hf_hub_download, HfApi

API_TOKEN = os.getenv('API_TOKEN')
API_URL = os.getenv('API_URL')

headers = {
    "Authorization": f"Bearer {API_TOKEN}",
    "Content-Type": "application/json"
}


def get_main_data():
    """
    Returns the scores parameters and authors
    """
    scores_parameters = [
        'Personalidad', 'Intereses', 'Lenguaje/Estilo', 'Autenticidad', 'Habilidad de conversaci贸n',
        'Marca/Producto', 'Identificaci贸n', 'Experiencia de uso', 'Recomendaci贸n', 'Conversaci贸n org谩nica'
    ]
    authors = ['Sofia', 'Eliza', 'Sindy', 'Carlos', 'Andres', 'Adriana', 'Carolina', 'Valeria']

    return scores_parameters, authors


def make_invisible():
    """
    Makes visible a row
    """
    return gr.Row.update(visible=False)


def make_visible():
    """
    Makes visibles 2 rows
    """
    return gr.Row.update(visible=True), gr.Row.update(visible=True)


def _query(payload):
    """
    Returns the json from a post request. It is done to the BellaAPI
    """
    response = requests.post(API_URL, headers=headers, json=payload)
    return response.json()


def _download_media(url: str, type_media: str) -> None:
    """
    Downloads a video or audio (depending on the type_media) that can be
    used inside a gr.Video or gr.Audio
    """
    name = 'video.mp4' if type_media == 'video' else 'audio.wav'
    with requests.get(url, stream=True) as r, open(name, "wb") as f:
        shutil.copyfileobj(r.raw, f)


def init_chatbot(chatbot: list[tuple[str, str]]):
    """
    Returns a greeting video, with its transcription and the user_id that
    will be used later in the other requests
    """
    # Call API with the following json
    inputs = {"inputs": {
        "date": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
        'get_video': True
    }}
    output = _query(inputs)

    chatbot.append(('', output['answer']))
    _download_media(output['link_media'], 'video')

    return 'video.mp4', chatbot, output['user_id']


def get_answer_text(question: str, chatbot: list[tuple[str, str]], user_id: str, checkbox: bool):
    """
    Gets the answer of the chatbot
    """
    # Create json and send it to the API
    inputs = {'inputs': {
        'text': question, 'user_id': user_id, 'get_video': checkbox
    }}
    output = _query(inputs)
    return _update_elements(question, chatbot, output, checkbox, '')


def get_answer_audio(audio_path, chatbot: list[tuple[str, str]], user_id: str, checkbox: bool):
    """
    Gets the answer of the chatbot
    """
    # Encode audio data to Base64
    with open(audio_path, 'rb') as audio_file:
        audio_data = audio_file.read()
    encoded_audio = base64.b64encode(audio_data).decode('utf-8')

    # Create json and send it to the API
    inputs = {'inputs': {
        'is_audio': True, 'audio': encoded_audio, 'user_id': user_id, 'get_video': checkbox
    }}
    output = _query(inputs)

    # Transcription of the audio
    question = output['question']
    return _update_elements(question, chatbot, output, checkbox, None)


def _update_elements(question, chatbot, output, checkbox, clean):
    """
    Adds the video, output audio, interaction and cleans the text or audio
    """
    chatbot.append((question, output['answer']))
    link_media = output['link_media']
    if checkbox:
        _download_media(link_media, 'video')
        return 'video.mp4', None, chatbot, clean

    else:
        _download_media(link_media, 'audio')
        return 'infinite_loop.mp4', 'audio.wav', chatbot, clean


def save_scores(author: gr.Dropdown, history: gr.Chatbot, opinion: gr.Textbox, *score_values):
    """
    Saves the scores and chat's info into the json file
    """
    # Get the parameters for each score
    score_parameters, _ = get_main_data()

    # Get the score of each parameter
    scores = dict()
    for parameter, score in zip(score_parameters, score_values):

        # Check the score is a valid value if not, raise Error
        if score is None:
            raise gr.Error('Aseg煤rese de haber seleccionado al menos 1 opci贸n en cada categor铆a')

        scores[parameter] = score

    # Get all the messages including their reaction
    chat = []
    for conversation in history:
        info = {
            'message': conversation[0],
            'answer': conversation[1]
        }
        chat.append(info)

    date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")

    # Save the info
    session = dict(
        opinion=opinion,
        scores=scores,
        chat=chat,
        author=author,
        date=date
    )

    # Open the file, add the new info and save it
    hf_hub_download(
        repo_id=os.environ.get('DATASET_NAME'),
        repo_type='dataset',
        filename="data.json",
        token=os.environ.get('HUB_TOKEN'),
        local_dir="./"
    )

    with open('data.json', 'r') as infile:
        past_sessions = json.load(infile)

    # Add the new info
    past_sessions['sessions'].append(session)
    with open('data.json', 'w', encoding='utf-8') as outfile:
        json.dump(past_sessions, outfile, indent=4, ensure_ascii=False)

    # Save the updated file
    api = HfApi(token=os.environ.get('HUB_TOKEN'))
    api.upload_file(
        path_or_fileobj="data.json",
        path_in_repo="data.json",
        repo_id=os.environ.get('DATASET_NAME'),
        repo_type='dataset'
    )

    # Return a confirmation message
    return 'Done'