Update app.py
Browse files
app.py
CHANGED
@@ -20,7 +20,7 @@ if False and HF_TOKEN:
|
|
20 |
repo = Repository(local_dir="data", clone_from=DATASET_REPO_URL, token=HF_TOKEN)
|
21 |
|
22 |
|
23 |
-
def run(model_id: str,
|
24 |
print(model_id + ' ' + token)
|
25 |
return model_id + ' ' + token
|
26 |
|
@@ -39,9 +39,6 @@ The steps are the following:
|
|
39 |
title="Convert any model to Safetensors and open a PR"
|
40 |
allow_flagging="never"
|
41 |
|
42 |
-
def token_text(visible=False):
|
43 |
-
return gr.Text(max_lines=1, label="your_hf_token", visible=visible)
|
44 |
-
|
45 |
with gr.Blocks(title=title) as demo:
|
46 |
description = gr.Markdown(f"""# {title}""")
|
47 |
description = gr.Markdown(DESCRIPTION)
|
@@ -49,8 +46,7 @@ with gr.Blocks(title=title) as demo:
|
|
49 |
with gr.Row() as r:
|
50 |
with gr.Column() as c:
|
51 |
model_id = gr.Text(max_lines=1, label="model_id")
|
52 |
-
|
53 |
-
token = token_text()
|
54 |
with gr.Row() as c:
|
55 |
clean = gr.ClearButton()
|
56 |
submit = gr.Button("Submit", variant="primary")
|
@@ -58,7 +54,10 @@ with gr.Blocks(title=title) as demo:
|
|
58 |
with gr.Column() as d:
|
59 |
output = gr.Markdown()
|
60 |
|
61 |
-
|
62 |
-
|
|
|
|
|
|
|
63 |
|
64 |
demo.queue(max_size=10).launch(show_api=True)
|
|
|
20 |
repo = Repository(local_dir="data", clone_from=DATASET_REPO_URL, token=HF_TOKEN)
|
21 |
|
22 |
|
23 |
+
def run(model_id: str, token: Optional[str] = None) -> str:
|
24 |
print(model_id + ' ' + token)
|
25 |
return model_id + ' ' + token
|
26 |
|
|
|
39 |
title="Convert any model to Safetensors and open a PR"
|
40 |
allow_flagging="never"
|
41 |
|
|
|
|
|
|
|
42 |
with gr.Blocks(title=title) as demo:
|
43 |
description = gr.Markdown(f"""# {title}""")
|
44 |
description = gr.Markdown(DESCRIPTION)
|
|
|
46 |
with gr.Row() as r:
|
47 |
with gr.Column() as c:
|
48 |
model_id = gr.Text(max_lines=1, label="model_id")
|
49 |
+
token = gr.Text(max_lines=1, label="your_hf_token")
|
|
|
50 |
with gr.Row() as c:
|
51 |
clean = gr.ClearButton()
|
52 |
submit = gr.Button("Submit", variant="primary")
|
|
|
54 |
with gr.Column() as d:
|
55 |
output = gr.Markdown()
|
56 |
|
57 |
+
submit.click(run, inputs=[model_id, token], outputs=output, concurrency_limit=1)
|
58 |
+
|
59 |
+
|
60 |
+
|
61 |
+
|
62 |
|
63 |
demo.queue(max_size=10).launch(show_api=True)
|