echarlaix HF staff commited on
Commit
d198aee
1 Parent(s): c1891cb

rephrase description

Browse files
Files changed (1) hide show
  1. app.py +9 -9
app.py CHANGED
@@ -101,7 +101,7 @@ def export(model_id: str, private_repo: bool, overwritte: bool, oauth_token: gr.
101
  ov_model.save_pretrained(folder)
102
  new_repo_url = api.create_repo(repo_id=new_repo_id, exist_ok=True, private=private_repo)
103
  new_repo_id = new_repo_url.repo_id
104
- print("Repo created successfully!", new_repo_url)
105
 
106
  folder = Path(folder)
107
  for dir_name in (
@@ -167,7 +167,7 @@ def export(model_id: str, private_repo: bool, overwritte: bool, oauth_token: gr.
167
  path_in_repo="README.md",
168
  repo_id=new_repo_id,
169
  )
170
- return f"This model was successfully exported, find it under your repo {new_repo_url}"
171
  finally:
172
  shutil.rmtree(folder, ignore_errors=True)
173
  except Exception as e:
@@ -176,25 +176,25 @@ def export(model_id: str, private_repo: bool, overwritte: bool, oauth_token: gr.
176
  DESCRIPTION = """
177
  This Space uses [Optimum Intel](https://huggingface.co/docs/optimum/main/en/intel/openvino/export) to automatically export a model from the Hub to the [OpenVINO IR format](https://docs.openvino.ai/2024/documentation/openvino-ir-format.html).
178
 
179
- The resulting model will then be pushed under your HF user namespace.
180
 
181
- The list of supported architectures can be found in the [optimum documentation](https://huggingface.co/docs/optimum/main/en/intel/openvino/models).
182
  """
183
 
184
  model_id = HuggingfaceHubSearch(
185
  label="Hub Model ID",
186
- placeholder="Search for model id on the hub",
187
  search_type="model",
188
  )
189
  private_repo = gr.Checkbox(
190
  value=False,
191
- label="Private Repo",
192
- info="Create a private repo under your username",
193
  )
194
  overwritte = gr.Checkbox(
195
  value=False,
196
- label="Overwrite repo content",
197
- info="Enable pushing files on existing repo, potentially overwriting existing files",
198
  )
199
  interface = gr.Interface(
200
  fn=export,
 
101
  ov_model.save_pretrained(folder)
102
  new_repo_url = api.create_repo(repo_id=new_repo_id, exist_ok=True, private=private_repo)
103
  new_repo_id = new_repo_url.repo_id
104
+ print("Repository created successfully!", new_repo_url)
105
 
106
  folder = Path(folder)
107
  for dir_name in (
 
167
  path_in_repo="README.md",
168
  repo_id=new_repo_id,
169
  )
170
+ return f"This model was successfully exported, find it under your repository {new_repo_url}"
171
  finally:
172
  shutil.rmtree(folder, ignore_errors=True)
173
  except Exception as e:
 
176
  DESCRIPTION = """
177
  This Space uses [Optimum Intel](https://huggingface.co/docs/optimum/main/en/intel/openvino/export) to automatically export a model from the Hub to the [OpenVINO IR format](https://docs.openvino.ai/2024/documentation/openvino-ir-format.html).
178
 
179
+ After conversion, a repository will be pushed under your namespace with the resulting model.
180
 
181
+ The list of supported architectures can be found in the [documentation](https://huggingface.co/docs/optimum/main/en/intel/openvino/models).
182
  """
183
 
184
  model_id = HuggingfaceHubSearch(
185
  label="Hub Model ID",
186
+ placeholder="Search for model ID on the hub",
187
  search_type="model",
188
  )
189
  private_repo = gr.Checkbox(
190
  value=False,
191
+ label="Private repository",
192
+ info="Create a private repository instead of a public one",
193
  )
194
  overwritte = gr.Checkbox(
195
  value=False,
196
+ label="Overwrite repository content",
197
+ info="Enable pushing files on existing repositories, potentially overwriting existing files",
198
  )
199
  interface = gr.Interface(
200
  fn=export,