Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Clémentine
commited on
Commit
•
43c04ea
1
Parent(s):
64d2c90
removed tabs
Browse files- app.py +64 -63
- src/static/about.py +9 -15
app.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
from apscheduler.schedulers.background import BackgroundScheduler
|
3 |
from src.static.env import API, REPO_ID, HF_TOKEN
|
4 |
-
from src.static.about import TITLE, INTRO, ABOUT, DOCUMENTATION
|
5 |
|
6 |
from src.leaderboards.get_from_hub import get_leaderboard_info
|
7 |
from src.static.tag_info import *
|
@@ -44,68 +44,69 @@ with demo:
|
|
44 |
gr.Markdown(TITLE)
|
45 |
gr.Markdown(INTRO, elem_classes="markdown-text")
|
46 |
|
47 |
-
with gr.
|
48 |
-
with gr.
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
|
|
109 |
|
110 |
scheduler = BackgroundScheduler()
|
111 |
scheduler.add_job(restart_space, "interval", seconds=10800) # restarted every 3h
|
|
|
1 |
import gradio as gr
|
2 |
from apscheduler.schedulers.background import BackgroundScheduler
|
3 |
from src.static.env import API, REPO_ID, HF_TOKEN
|
4 |
+
from src.static.about import TITLE, INTRO, ABOUT, DOCUMENTATION, SUBMIT
|
5 |
|
6 |
from src.leaderboards.get_from_hub import get_leaderboard_info
|
7 |
from src.static.tag_info import *
|
|
|
44 |
gr.Markdown(TITLE)
|
45 |
gr.Markdown(INTRO, elem_classes="markdown-text")
|
46 |
|
47 |
+
with gr.Row():
|
48 |
+
with gr.Column():
|
49 |
+
modality_tags = gr.CheckboxGroup(
|
50 |
+
choices=[tag.name for tag in Modality],
|
51 |
+
value=[],
|
52 |
+
label="Modality of choice"
|
53 |
+
)
|
54 |
+
submission_tags = gr.CheckboxGroup(
|
55 |
+
choices=[tag.name for tag in SubmissionType],
|
56 |
+
value=[],
|
57 |
+
label="Submission type"
|
58 |
+
)
|
59 |
+
test_set_tags = gr.CheckboxGroup(
|
60 |
+
choices=[tag.name for tag in TestSetStatus],
|
61 |
+
value=[],
|
62 |
+
label="Test set status"
|
63 |
+
)
|
64 |
+
judge_tags = gr.CheckboxGroup(
|
65 |
+
choices=[tag.name for tag in Judge],
|
66 |
+
value=[],
|
67 |
+
label="Judge used for the evaluation"
|
68 |
+
)
|
69 |
+
with gr.Column():
|
70 |
+
show_all = gr.Checkbox(
|
71 |
+
value=False,
|
72 |
+
label="Show all leaderboards"
|
73 |
+
)
|
74 |
+
evaluation_tags = gr.CheckboxGroup(
|
75 |
+
choices=[tag.name for tag in EvaluationCategory],
|
76 |
+
value=[],
|
77 |
+
label="Specific evaluation categories"
|
78 |
+
)
|
79 |
+
language_tags = gr.CheckboxGroup(
|
80 |
+
choices=[tag.capitalize() for tag in sorted(list(INFO_TO_LEADERBOARDS["language"].keys()))],
|
81 |
+
value=[],
|
82 |
+
label="Specific languages"
|
83 |
+
)
|
84 |
+
with gr.Row():
|
85 |
+
leaderboards = gr.Markdown(
|
86 |
+
value="",
|
87 |
+
)
|
88 |
+
|
89 |
+
for selector in [modality_tags, submission_tags, test_set_tags, evaluation_tags, language_tags, judge_tags]:
|
90 |
+
selector.change(
|
91 |
+
lambda _: False,
|
92 |
+
outputs=show_all
|
93 |
+
)
|
94 |
+
for selector in [show_all, modality_tags, submission_tags, test_set_tags, evaluation_tags, language_tags, judge_tags]:
|
95 |
+
selector.change(
|
96 |
+
update_leaderboards,
|
97 |
+
[show_all, modality_tags, submission_tags, test_set_tags, evaluation_tags, language_tags, judge_tags],
|
98 |
+
leaderboards,
|
99 |
+
queue=True,
|
100 |
+
)
|
101 |
+
|
102 |
+
with gr.Accordion("How to submit your leaderboard?", open=False):
|
103 |
+
gr.Markdown(SUBMIT, elem_classes="markdown-text")
|
104 |
+
|
105 |
+
with gr.Accordion("Tags documentation", open=False):
|
106 |
+
gr.Markdown(ABOUT, elem_classes="markdown-text")
|
107 |
+
|
108 |
+
with gr.Accordion("How to build your own leaderboard?", open=False):
|
109 |
+
gr.Markdown(DOCUMENTATION, elem_classes="markdown-text")
|
110 |
|
111 |
scheduler = BackgroundScheduler()
|
112 |
scheduler.add_job(restart_space, "interval", seconds=10800) # restarted every 3h
|
src/static/about.py
CHANGED
@@ -6,28 +6,24 @@ INTRO = """
|
|
6 |
Have you ever wondered which leaderboard would be best for your use case?
|
7 |
"""
|
8 |
|
9 |
-
|
10 |
-
If you want your leaderboard to appear in our suggestions, feel free to add relevant information in its tag metadata, and it will be displayed here.
|
11 |
-
|
12 |
-
# First step
|
13 |
-
|
14 |
Make sure to either use the tag `leaderboard` or `arena` to your space, by adding the following to your README
|
15 |
|
16 |
```
|
17 |
tags:
|
18 |
- leaderboard
|
19 |
```
|
|
|
20 |
|
21 |
-
|
22 |
-
|
23 |
-
## Submission type
|
24 |
Arenas are not concerned by this category.
|
25 |
|
26 |
|
27 |
""" +
|
28 |
"\n".join([f"- `{s.value.key}`: {s.value.usage}" for s in SubmissionType]) +
|
29 |
"""
|
30 |
-
|
31 |
Arenas are not concerned by this category.
|
32 |
|
33 |
|
@@ -35,34 +31,32 @@ Arenas are not concerned by this category.
|
|
35 |
"\n".join([f"- `{s.value.key}`: {s.value.usage}" for s in TestSetStatus]) +
|
36 |
"""
|
37 |
|
38 |
-
|
39 |
|
40 |
""" +
|
41 |
"\n".join([f"- `{s.value.key}`: {s.value.usage}" for s in Judge]) +
|
42 |
"""
|
43 |
|
44 |
-
|
45 |
Can be any (or several) of the following list:
|
46 |
|
47 |
""" +
|
48 |
"\n".join([f"- `{s.value.key}`: {s.value.usage}" for s in Modality]) +
|
49 |
"""
|
50 |
|
51 |
-
|
52 |
Can be any (or several) of the following list:
|
53 |
|
54 |
""" +
|
55 |
"\n".join([f"- `{s.value.key}`: {s.value.usage}" for s in EvaluationCategory]) +
|
56 |
"""
|
57 |
|
58 |
-
|
59 |
You can indicate the languages covered by your benchmark like so: `language:mylanguage`.
|
60 |
At the moment, we do not support language codes, please use the language name in English.
|
61 |
""")
|
62 |
|
63 |
DOCUMENTATION = """
|
64 |
-
How to create your own leaderboard?
|
65 |
-
|
66 |
I'll make an updated documentation page here at some point, but for now, you can check our [demo leaderboard org](https://huggingface.co/demo-leaderboard-backend)!
|
67 |
|
68 |
You just need to duplicate the front space (and backend if you want to run your leaderboard on spaces compute), copy the datasets to your own org, and edit the env variables.
|
|
|
6 |
Have you ever wondered which leaderboard would be best for your use case?
|
7 |
"""
|
8 |
|
9 |
+
SUBMIT = """
|
|
|
|
|
|
|
|
|
10 |
Make sure to either use the tag `leaderboard` or `arena` to your space, by adding the following to your README
|
11 |
|
12 |
```
|
13 |
tags:
|
14 |
- leaderboard
|
15 |
```
|
16 |
+
"""
|
17 |
|
18 |
+
ABOUT = ("""
|
19 |
+
### Submission type
|
|
|
20 |
Arenas are not concerned by this category.
|
21 |
|
22 |
|
23 |
""" +
|
24 |
"\n".join([f"- `{s.value.key}`: {s.value.usage}" for s in SubmissionType]) +
|
25 |
"""
|
26 |
+
### Test set status
|
27 |
Arenas are not concerned by this category.
|
28 |
|
29 |
|
|
|
31 |
"\n".join([f"- `{s.value.key}`: {s.value.usage}" for s in TestSetStatus]) +
|
32 |
"""
|
33 |
|
34 |
+
### Judges
|
35 |
|
36 |
""" +
|
37 |
"\n".join([f"- `{s.value.key}`: {s.value.usage}" for s in Judge]) +
|
38 |
"""
|
39 |
|
40 |
+
### Modalities
|
41 |
Can be any (or several) of the following list:
|
42 |
|
43 |
""" +
|
44 |
"\n".join([f"- `{s.value.key}`: {s.value.usage}" for s in Modality]) +
|
45 |
"""
|
46 |
|
47 |
+
### Evaluation categories
|
48 |
Can be any (or several) of the following list:
|
49 |
|
50 |
""" +
|
51 |
"\n".join([f"- `{s.value.key}`: {s.value.usage}" for s in EvaluationCategory]) +
|
52 |
"""
|
53 |
|
54 |
+
### Language
|
55 |
You can indicate the languages covered by your benchmark like so: `language:mylanguage`.
|
56 |
At the moment, we do not support language codes, please use the language name in English.
|
57 |
""")
|
58 |
|
59 |
DOCUMENTATION = """
|
|
|
|
|
60 |
I'll make an updated documentation page here at some point, but for now, you can check our [demo leaderboard org](https://huggingface.co/demo-leaderboard-backend)!
|
61 |
|
62 |
You just need to duplicate the front space (and backend if you want to run your leaderboard on spaces compute), copy the datasets to your own org, and edit the env variables.
|