Files changed (2) hide show
  1. app.py +36 -11
  2. requirements.txt +10 -6
app.py CHANGED
@@ -1,7 +1,12 @@
1
- import streamlit as st
2
- from huggingface_hub import HfApi
3
  import os
 
4
  import subprocess
 
 
 
 
 
5
 
6
  HF_TOKEN = st.secrets.get("HF_TOKEN") or os.environ.get("HF_TOKEN")
7
  HF_USERNAME = (
@@ -9,18 +14,29 @@ HF_USERNAME = (
9
  or os.environ.get("HF_USERNAME")
10
  or os.environ.get("SPACE_AUTHOR_NAME")
11
  )
12
- TRANSFORMERS_REPOSITORY_URL = "https://github.com/xenova/transformers.js"
13
- TRANSFORMERS_REPOSITORY_REVISION = "2.16.0"
 
 
 
14
  TRANSFORMERS_REPOSITORY_PATH = "./transformers.js"
 
15
  HF_BASE_URL = "https://huggingface.co"
16
 
17
  if not os.path.exists(TRANSFORMERS_REPOSITORY_PATH):
18
- os.system(f"git clone {TRANSFORMERS_REPOSITORY_URL} {TRANSFORMERS_REPOSITORY_PATH}")
19
-
20
- os.system(
21
- f"cd {TRANSFORMERS_REPOSITORY_PATH} && git checkout {TRANSFORMERS_REPOSITORY_REVISION}"
22
- )
23
-
 
 
 
 
 
 
 
24
  st.write("## Convert a HuggingFace model to ONNX")
25
 
26
  input_model_id = st.text_input(
@@ -62,6 +78,15 @@ if input_model_id:
62
  capture_output=True,
63
  text=True,
64
  )
 
 
 
 
 
 
 
 
 
65
 
66
  model_folder_path = (
67
  f"{TRANSFORMERS_REPOSITORY_PATH}/models/{input_model_id}"
@@ -103,4 +128,4 @@ if input_model_id:
103
  st.write("You can now go and view the model on HuggingFace!")
104
  st.link_button(
105
  f"Go to {output_model_id}", output_model_url, type="primary"
106
- )
 
1
+ import sys
 
2
  import os
3
+ import urllib.request
4
  import subprocess
5
+ import tarfile
6
+ import tempfile
7
+
8
+ import streamlit as st
9
+ from huggingface_hub import HfApi
10
 
11
  HF_TOKEN = st.secrets.get("HF_TOKEN") or os.environ.get("HF_TOKEN")
12
  HF_USERNAME = (
 
14
  or os.environ.get("HF_USERNAME")
15
  or os.environ.get("SPACE_AUTHOR_NAME")
16
  )
17
+
18
+ TRANSFORMERS_BASE_URL = "https://github.com/xenova/transformers.js/archive/refs"
19
+ TRANSFORMERS_REPOSITORY_REVISION = "3.0.0"
20
+ TRANSFORMERS_REF_TYPE = "tags" if urllib.request.urlopen(f"{TRANSFORMERS_BASE_URL}/tags/{TRANSFORMERS_REPOSITORY_REVISION}.tar.gz").getcode() == 200 else "heads"
21
+ TRANSFORMERS_REPOSITORY_URL = f"{TRANSFORMERS_BASE_URL}/{TRANSFORMERS_REF_TYPE}/{TRANSFORMERS_REPOSITORY_REVISION}.tar.gz"
22
  TRANSFORMERS_REPOSITORY_PATH = "./transformers.js"
23
+ ARCHIVE_PATH = f"./transformers_{TRANSFORMERS_REPOSITORY_REVISION}.tar.gz"
24
  HF_BASE_URL = "https://huggingface.co"
25
 
26
  if not os.path.exists(TRANSFORMERS_REPOSITORY_PATH):
27
+ urllib.request.urlretrieve(TRANSFORMERS_REPOSITORY_URL, ARCHIVE_PATH)
28
+
29
+ with tempfile.TemporaryDirectory() as tmp_dir:
30
+ with tarfile.open(ARCHIVE_PATH, "r:gz") as tar:
31
+ tar.extractall(tmp_dir)
32
+
33
+ extracted_folder = os.path.join(tmp_dir, os.listdir(tmp_dir)[0])
34
+
35
+ os.rename(extracted_folder, TRANSFORMERS_REPOSITORY_PATH)
36
+
37
+ os.remove(ARCHIVE_PATH)
38
+ print("Repository downloaded and extracted successfully.")
39
+
40
  st.write("## Convert a HuggingFace model to ONNX")
41
 
42
  input_model_id = st.text_input(
 
78
  capture_output=True,
79
  text=True,
80
  )
81
+
82
+ # Log the script output
83
+ print("### Script Output ###")
84
+ print(output.stdout)
85
+
86
+ # Log any errors
87
+ if output.stderr:
88
+ print("### Script Errors ###")
89
+ print(output.stderr)
90
 
91
  model_folder_path = (
92
  f"{TRANSFORMERS_REPOSITORY_PATH}/models/{input_model_id}"
 
128
  st.write("You can now go and view the model on HuggingFace!")
129
  st.link_button(
130
  f"Go to {output_model_id}", output_model_url, type="primary"
131
+ )
requirements.txt CHANGED
@@ -1,7 +1,11 @@
1
- transformers[torch]==4.35.2
2
- onnxruntime<1.16.0
3
- optimum==1.14.1
4
- tqdm
5
- onnx==1.13.1
6
  huggingface_hub
7
- streamlit
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  huggingface_hub
2
+ streamlit
3
+ transformers[torch]==4.43.4
4
+ onnxruntime==1.19.2
5
+ optimum==1.21.3
6
+ onnx==1.16.2
7
+ onnxconverter-common==1.14.0
8
+ tqdm==4.66.5
9
+ onnxslim==0.1.31
10
+ --extra-index-url https://pypi.ngc.nvidia.com
11
+ onnx_graphsurgeon==0.3.27