File size: 8,224 Bytes
4060d6d
 
 
 
 
 
 
 
 
bfbddc7
 
4060d6d
 
 
bfbddc7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4060d6d
f582647
4060d6d
 
f582647
 
 
4060d6d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4c9a580
5535c21
 
4c9a580
4060d6d
 
 
 
 
 
 
 
bfbddc7
4060d6d
 
 
 
 
 
 
 
 
 
 
 
 
1bbfb6f
 
4060d6d
 
 
c4f6e95
4060d6d
 
 
 
 
 
 
 
1bbfb6f
4060d6d
 
 
 
 
 
 
 
 
 
 
1bbfb6f
e00357c
 
4060d6d
 
 
 
 
 
5546144
 
 
 
7467112
 
 
 
 
 
 
 
 
 
 
 
 
 
5546144
 
 
 
4060d6d
 
 
bfbddc7
4060d6d
f77fa43
f582647
 
 
bfbddc7
4060d6d
 
 
bfbddc7
4060d6d
c4f6e95
 
 
4060d6d
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
import gradio as gr
from gradio_leaderboard import Leaderboard
from pathlib import Path
import pandas as pd

import os

import json 

import requests

from envs import API, EVAL_REQUESTS_PATH, TOKEN, QUEUE_REPO


def fill_form(model_name, model_id, contact_email, challenge, submission_id, paper_link, architecture, license):
    value = {
    # Model name
    "entry.1591601824": model_name,
    # username/space
    "entry.1171388028": model_id,
    # Submission ID on CMT
    "entry.171528970": submission_id,
    # Preprint or paper link
    "entry.1284338508": paper_link,
    # Model architecture
    "entry.1291571256": architecture,
    # License
    #   Option: any text
    "entry.272554778": license,
    # Challenge
    #   Option: any text
    "entry.1908975677": challenge,
    # Email 
    #   Option: any text
    "entry.964644151": contact_email
    }
    
    return value
    
def sendForm(url, data):
    try:
        requests.post(url, data = data)
        print("Submitted successfully!")
    except:
        print("Error!")

def submit(model_name, model_id, contact_email, challenge, submission_id, paper_link, architecture, license):
    
    if model_name == "" or model_id == "" or challenge == "" or architecture == "" or license == "":
        gr.Error("Please fill all the fields")
        return
    if submission_id == "" and paper_link =="":
        gr.Error("Provide either a link to a paper describing the method or a submission ID for the MLSB workshop.")
        return
    try:
        user_name = ""
        if "/" in model_id:
            user_name = model_id.split("/")[0]
            model_path = model_id.split("/")[1]

        eval_entry = {
            "model_name": model_name,
            "model_id": model_id,
            "challenge": challenge,
            "submission_id": submission_id,
            "architecture": architecture,
            "license": license
        }
        OUT_DIR = f"{EVAL_REQUESTS_PATH}/{user_name}"
        os.makedirs(OUT_DIR, exist_ok=True)
        out_path = f"{OUT_DIR}/{user_name}_{model_path}.json"

        with open(out_path, "w") as f:
            f.write(json.dumps(eval_entry))

        print("Sending form")
        formData = fill_form(model_name, model_id, contact_email, challenge, submission_id, paper_link, architecture, license)
        sendForm("https://docs.google.com/forms/d/e/1FAIpQLSf1zP7RAFC5RLlva03xm0eIAPLKXOmMvNUzirbm82kdCUFKNw/formResponse", formData)

        print("Uploading eval file")
        API.upload_file(
            path_or_fileobj=out_path,
            path_in_repo=out_path.split("eval-queue/")[1],
            repo_id=QUEUE_REPO,
            repo_type="dataset",
            commit_message=f"Add {model_name} to eval queue",
        )
        
        gr.Info("Successfully submitted", duration=10)
        # Remove the local file
        os.remove(out_path)
    except:
        gr.Error("Error submitting the model")





abs_path = Path(__file__).parent

# Any pandas-compatible data
pinder_df = pd.read_json(str(abs_path / "leaderboard_pinder.json"))
plinder_df = pd.read_json(str(abs_path / "leaderboard_plinder.json"))

with gr.Blocks() as demo:
    gr.Markdown("""
    # MLSB 2024 Leaderboard
    """)


    with gr.Tab("🎖️ PINDER Leaderboard"):
        gr.Markdown("""## PINDER Leaderboard
                Evaluating Protein-Protein interaction prediction
                """)
        Leaderboard(
        value=pinder_df,
        select_columns=["Arch", "Model", "L_rms", "I_rms",
            "F_nat", "DOCKQ", "CAPRI"],
        search_columns=["model_name_for_query"],
        hide_columns=["model_name_for_query",],
        filter_columns=["Arch"],
    )
    with gr.Tab("🥇 PLINDER Leaderboard"):
        gr.Markdown("""## PLINDER Leaderboard
                Evaluating Protein-Ligand prediction
                """)
        Leaderboard(
        value=plinder_df,
        select_columns=["Arch", "Model", "Mean lDDT-PLI", "Mean lDDT-LP",
                        "Median RMSD", "% lDDT-PLI >= 0.5", "% pass PoseBusters"],
        search_columns=["model_name_for_query"],
        hide_columns=["model_name_for_query",],
        filter_columns=["Arch"],
    )
    with gr.Tab("✉️ Submit"):
        gr.Markdown("""## Submit your model
                Submit your model to the leaderboard using the below form AFTER following the following steps: 
                - Create a HuggingFace account and request to join the  [MLSB organization](https://huggingface.co/MLSB)
                - Create a new space in the MLSB organization and add your model using the inference templates: https://huggingface.co/new-space?owner=MLSB
                - Fill the submission form. 
    
                ## Prerequisites:
                To qualify for submission, each team must:
                - Provide an MLSB submission ID (find it on CMT) or a link to a preprint/paper describing their methodology. This publication does not have to specifically report training or evaluation on the P(L)INDER dataset. Previously published methods, such as DiffDock, only need to link their existing paper. Note that entry into this competition does not equate to an MLSB workshop paper submission.
                - Create a copy of the provided [inference templates](https://huggingface.co/MLSB/).
                    - Go to the top right corner of the page of the respective inference template and click on the drop-down menu (vertical ellipsis) right next to the “Community”, then select “Duplicate this space”.
                - Change files in the newly create space to reflect the peculiarities of your model
                    - Edit `requirements.txt` to capture all python dependencies.
                    - Modify the Dockerfile as appropriate (including selecting the right base image)
                    - Include a `inference_app.py` file. This contains a `predict` function that should be modified to reflect the specifics of inference using their model.
                    - Include a `train.py` file to ensure that training and model selection use only the PINDER/PLINDER datasets and to clearly show any additional hyperparameters used.
                    - Provide a LICENSE file that allows for reuse, derivative works, and distribution of the provided software and weights (e.g., MIT or Apache2 license).
                - Submit to the leaderboard via the [form below](https://huggingface.co/spaces/MLSB/leaderboard2024).
                    - On submission page, add reference to the newly created space in the format username/space (e.g mlsb/alphafold3). You can create the space on your personal Huggingface account and transfer it to MLSB for the submission to get a GPU assigned. 

                After a brief technical review by our organizers we will grant you a free GPU until MLSB so that anyone can play with the model and we will run the evaluation. 

                If you have a questions please email: workshopmlsb@gmail.com
                """)
        model_name = gr.Textbox(label="Model name")
        model_id = gr.Textbox(label="username/space e.g mlsb/alphafold3")
        contact_email = gr.Textbox(label="Contact E-Mail")
        challenge = gr.Radio(choices=["PINDER", "PLINDER"],label="Challenge")
        gr.Markdown("Either give a submission id if you submitted to the MLSB workshop or provide a link to the preprint/paper describing the method.")
        with gr.Row():
            submission_id = gr.Textbox(label="Submission ID on CMT")
            paper_link = gr.Textbox(label="Preprint or Paper link")
        architecture = gr.Dropdown(choices=["GNN", "CNN","Diffusion Model", "Physics-based", "Other"],label="Model architecture")
        license = gr.Dropdown(choices=["mit", "apache-2.0", "gplv2", "gplv3", "lgpl", "mozilla", "bsd", "other"],label="License")
        submit_btn = gr.Button("Submit")

        submit_btn.click(submit, inputs=[model_name, model_id, contact_email, challenge, submission_id, paper_link, architecture, license], outputs=[])

        gr.Markdown("""
        Please find more information about the challenges on [mlsb.io/#challenge](https://mlsb.io/#challenge)""")

if __name__ == "__main__":
    demo.launch()