Spaces:
Runtime error
Runtime error
Replace AutoNLP CLI with AutoNLP endpoint
Browse files- app.py +72 -60
- requirements.txt +1 -1
app.py
CHANGED
@@ -6,6 +6,7 @@ import subprocess
|
|
6 |
from datetime import datetime
|
7 |
from pathlib import Path
|
8 |
|
|
|
9 |
import streamlit as st
|
10 |
from dotenv import load_dotenv
|
11 |
from huggingface_hub import HfApi, Repository
|
@@ -16,14 +17,35 @@ if Path(".env").is_file():
|
|
16 |
load_dotenv(".env")
|
17 |
|
18 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
|
|
|
|
|
|
19 |
|
20 |
-
# AutoNLP login
|
21 |
-
autonlp_login = subprocess.run(["autonlp", "login", "--api-key", HF_TOKEN], stdout=subprocess.PIPE)
|
22 |
-
if autonlp_login.returncode == -1:
|
23 |
-
raise Exception(f"AutoNLP login failed with return code {autonlp_login.returncode}")
|
24 |
|
25 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
|
|
|
|
|
27 |
with st.form(key="form"):
|
28 |
# Flush local repo
|
29 |
shutil.rmtree(LOCAL_REPO, ignore_errors=True)
|
@@ -33,62 +55,52 @@ with st.form(key="form"):
|
|
33 |
data = str(uploaded_file.read(), "utf-8")
|
34 |
json_data = json.loads(data)
|
35 |
|
36 |
-
token = st.text_input("
|
37 |
|
38 |
submit_button = st.form_submit_button("Submit")
|
39 |
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
json
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
"
|
69 |
-
"
|
70 |
-
"
|
71 |
-
"
|
72 |
-
"
|
73 |
-
"
|
74 |
-
|
75 |
-
"
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
)
|
80 |
-
|
81 |
-
st.write(
|
82 |
-
else:
|
83 |
-
try:
|
84 |
-
match_job_id = re.search(r"# (\d+)", process.stdout.decode("utf-8"))
|
85 |
-
job_id = match_job_id.group(1)
|
86 |
-
st.write(f"Successfully launched evaluation job #{job_id} for submission {submission_name}!")
|
87 |
-
except Exception as e:
|
88 |
-
st.write(f"Could not extract AutoNLP job ID due to error: {e}")
|
89 |
-
|
90 |
-
st.write(json_data["submission_name"])
|
91 |
-
st.write(commit_sha)
|
92 |
|
93 |
-
# Flush local repo
|
94 |
-
shutil.rmtree(LOCAL_REPO, ignore_errors=True)
|
|
|
6 |
from datetime import datetime
|
7 |
from pathlib import Path
|
8 |
|
9 |
+
import requests
|
10 |
import streamlit as st
|
11 |
from dotenv import load_dotenv
|
12 |
from huggingface_hub import HfApi, Repository
|
|
|
17 |
load_dotenv(".env")
|
18 |
|
19 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
20 |
+
AUTONLP_USERNAME = os.getenv("AUTONLP_USERNAME")
|
21 |
+
HF_AUTONLP_BACKEND_API = os.getenv("HF_AUTONLP_BACKEND_API")
|
22 |
+
LOCAL_REPO = "submission_repo"
|
23 |
|
|
|
|
|
|
|
|
|
24 |
|
25 |
+
def get_auth_headers(token: str, prefix: str = "autonlp"):
|
26 |
+
return {"Authorization": f"{prefix} {token}"}
|
27 |
+
|
28 |
+
|
29 |
+
def http_post(
|
30 |
+
path: str,
|
31 |
+
token: str,
|
32 |
+
payload=None,
|
33 |
+
domain: str = HF_AUTONLP_BACKEND_API,
|
34 |
+
suppress_logs: bool = False,
|
35 |
+
**kwargs,
|
36 |
+
) -> requests.Response:
|
37 |
+
"""HTTP POST request to the AutoNLP API, raises UnreachableAPIError if the API cannot be reached"""
|
38 |
+
try:
|
39 |
+
response = requests.post(
|
40 |
+
url=domain + path, json=payload, headers=get_auth_headers(token=token), allow_redirects=True, **kwargs
|
41 |
+
)
|
42 |
+
except requests.exceptions.ConnectionError:
|
43 |
+
print("❌ Failed to reach AutoNLP API, check your internet connection")
|
44 |
+
response.raise_for_status()
|
45 |
+
return response
|
46 |
|
47 |
+
|
48 |
+
### Main application ###
|
49 |
with st.form(key="form"):
|
50 |
# Flush local repo
|
51 |
shutil.rmtree(LOCAL_REPO, ignore_errors=True)
|
|
|
55 |
data = str(uploaded_file.read(), "utf-8")
|
56 |
json_data = json.loads(data)
|
57 |
|
58 |
+
token = st.text_input("Password", type="password")
|
59 |
|
60 |
submit_button = st.form_submit_button("Submit")
|
61 |
|
62 |
+
if submit_button:
|
63 |
+
validate_submission(json_data)
|
64 |
+
user_info = HfApi().whoami(token)
|
65 |
+
user_name = user_info["name"]
|
66 |
+
submission_name = json_data["submission_name"]
|
67 |
+
|
68 |
+
# Create submission dataset under benchmarks ORG
|
69 |
+
dataset_repo_url = f"https://huggingface.co/datasets/benchmarks/gem-{user_name}"
|
70 |
+
repo = Repository(
|
71 |
+
local_dir=LOCAL_REPO, clone_from=dataset_repo_url, repo_type="dataset", private=True, use_auth_token=HF_TOKEN
|
72 |
+
)
|
73 |
+
submission_metadata = {"benchmark": "gem", "type": "prediction", "submission_name": submission_name}
|
74 |
+
repo.repocard_metadata_save(submission_metadata)
|
75 |
+
|
76 |
+
with open(f"{LOCAL_REPO}/submission.json", "w", encoding="utf-8") as f:
|
77 |
+
json.dump(json_data, f)
|
78 |
+
|
79 |
+
# TODO: add informative commit msg
|
80 |
+
commit_url = repo.push_to_hub()
|
81 |
+
if commit_url is not None:
|
82 |
+
commit_sha = commit_url.split("/")[-1]
|
83 |
+
else:
|
84 |
+
commit_sha = repo.git_head_commit_url().split("/")[-1]
|
85 |
+
|
86 |
+
submission_time = str(int(datetime.now().timestamp()))
|
87 |
+
submission_id = submission_name + "__" + commit_sha + "__" + submission_time
|
88 |
+
|
89 |
+
payload = {
|
90 |
+
"username": AUTONLP_USERNAME,
|
91 |
+
"dataset": "GEM/references",
|
92 |
+
"task": 1,
|
93 |
+
"model": "gem",
|
94 |
+
"submission_dataset": f"benchmarks/gem-{user_name}",
|
95 |
+
"submission_id": submission_id,
|
96 |
+
"col_mapping": {},
|
97 |
+
"split": "test",
|
98 |
+
"config": None,
|
99 |
+
}
|
100 |
+
json_resp = http_post(path="/evaluate/create", payload=payload, token=HF_TOKEN).json()
|
101 |
+
st.write(json_data["submission_name"])
|
102 |
+
st.write(commit_sha)
|
103 |
+
st.write(json_resp)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
104 |
|
105 |
+
# Flush local repo
|
106 |
+
shutil.rmtree(LOCAL_REPO, ignore_errors=True)
|
requirements.txt
CHANGED
@@ -1,2 +1,2 @@
|
|
1 |
python-dotenv
|
2 |
-
|
|
|
1 |
python-dotenv
|
2 |
+
huggingface-hub==0.2.1
|