File size: 3,623 Bytes
b064a39
 
 
ce1fac2
aa14eaf
b064a39
ce1fac2
 
 
 
 
b064a39
97850ea
ce1fac2
b064a39
97850ea
fb25852
 
97850ea
ce1fac2
 
b064a39
ce1fac2
b064a39
ce1fac2
 
 
 
 
 
b064a39
ce1fac2
b064a39
ce1fac2
 
b064a39
 
ce1fac2
b064a39
 
ce1fac2
 
 
 
 
 
b064a39
 
ce1fac2
b064a39
ce1fac2
b064a39
ce1fac2
7bbc203
b064a39
 
ce1fac2
b064a39
ce1fac2
b064a39
39017f3
ff559f5
 
39017f3
 
b064a39
ce1fac2
 
 
 
 
 
aa14eaf
b064a39
 
 
 
ce1fac2
b064a39
 
ce1fac2
 
 
b064a39
ce1fac2
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
import gradio as gr
import pandas as pd

import constants
from utils import display, eval_requests

#
##
###
##
#

# Load leaderboard data
leaderboard_df = pd.read_csv("leaderboard.csv")

# Get already evaluated or requested models
evaluated_models = leaderboard_df["model"].tolist()
requested_models = eval_requests.get_requested_models()

# Format the dataframe
for col in leaderboard_df.columns:
    if col == "model":
        leaderboard_df[col] = leaderboard_df[col].apply(lambda x: x.replace(x, display.make_clickable_model(x)))
    else:
        leaderboard_df[col] = leaderboard_df[col].apply(display.round_numbers)
leaderboard_df.rename(
                columns={"Average WER": "Average WER ⬇️", "RTF (1e-3)": "RTF (1e-3) ⬇️"},
                inplace=True,
            )
leaderboard_df.sort_values(by='Average WER ⬇️', inplace=True)

with gr.Blocks() as leaderboard_app:

    gr.HTML(constants.BANNER, elem_id="constants.banner")
    gr.Markdown(constants.INTRODUCTION_TEXT, elem_classes="markdown-text")

    with gr.Tabs(elem_classes="tab-buttons") as tabs:

        with gr.TabItem("🏅 Leaderboard", elem_id="od-benchmark-tab-table", id=0):
            leaderboard_table = gr.components.Dataframe(
                                    value=leaderboard_df,
                                    datatype=constants.COLUMN_DTYPES_LIST,
                                    elem_id="leaderboard-table",
                                    interactive=False,
                                    visible=True,
                                )

        with gr.TabItem("📈 Metrics", elem_id="od-benchmark-tab-table", id=1):
            gr.Markdown(constants.METRICS_TAB_TEXT, elem_classes="markdown-text")

        with gr.TabItem("✉️ Request a model here!", elem_id="od-benchmark-tab-table", id=2):
            with gr.Column():
                gr.Markdown("# ✉️ Request results for a new model here!", elem_classes="markdown-text")
                gr.Markdown("In case of multiple requests, wait the restart of the Space after each of them to ensure a correct submission.", elem_classes="markdown-text")
            with gr.Column():
                with gr.Column():
                    model_id = gr.Textbox(label="Model ID (user_name/model_name)")
                with gr.Column():
                    md_submission_result = gr.Markdown()
                    btn_submitt = gr.Button(value="🚀 Request")
                    btn_submitt.click(
                            fn=lambda model_id: eval_requests.request_model(model_id, evaluated_models, requested_models),
                            inputs=[model_id],      # inputs: List of gradio.components
                            outputs=md_submission_result,
                        )

        with gr.TabItem("☢️ Evaluate", elem_id="od-benchmark-tab-table", id=3):
            with gr.Column():
                gr.Markdown("For admins only.", elem_classes="markdown-text")
            with gr.Column():
                md_submission_result = gr.Markdown()
                btn_submitt = gr.Button(value="RUN EVALUATION")
                # btn_submitt.click(eval_requests.request_model, [model_id], md_submission_result)
    
    with gr.Row():
        with gr.Accordion("📙 Citation", open=False):
            gr.Textbox(
                value=constants.CITATION_TEXT, lines=7,
                label="Copy the BibTeX snippet to cite this source",
                elem_id="citation-button",
                show_label=True,
                show_copy_button=True,
            )

leaderboard_app.launch(allowed_paths=["banner.png"])