ZeroCommand commited on
Commit
6314bb5
1 Parent(s): 201d156

cast float to int

Browse files
text_classification.py CHANGED
@@ -287,7 +287,9 @@ def get_example_prediction(model_id, dataset_id, dataset_config, dataset_split):
287
  results = hf_inference_api(model_id, hf_token, payload)
288
 
289
  if isinstance(results, dict) and "estimated_time" in results.keys():
290
- return prediction_input, str(results["estimated_time"])
 
 
291
 
292
  if isinstance(results, dict) and "error" in results.keys():
293
  raise ValueError(results["error"])
@@ -300,7 +302,7 @@ def get_example_prediction(model_id, dataset_id, dataset_config, dataset_split):
300
  f'{result["label"]}': result["score"] for result in results
301
  }
302
  except Exception as e:
303
- # Pipeline prediction failed, need to provide labels
304
  return prediction_input, e
305
 
306
  return prediction_input, prediction_result
 
287
  results = hf_inference_api(model_id, hf_token, payload)
288
 
289
  if isinstance(results, dict) and "estimated_time" in results.keys():
290
+ # return the estimated time for the inference api to load
291
+ # cast the float to int to be concise
292
+ return prediction_input, str(f"{int(results['estimated_time'])}s")
293
 
294
  if isinstance(results, dict) and "error" in results.keys():
295
  raise ValueError(results["error"])
 
302
  f'{result["label"]}': result["score"] for result in results
303
  }
304
  except Exception as e:
305
+ # inference api prediction failed, show the error message
306
  return prediction_input, e
307
 
308
  return prediction_input, prediction_result
text_classification_ui_helpers.py CHANGED
@@ -219,18 +219,18 @@ def align_columns_and_show_prediction(
219
  gr.update(visible=False),
220
  gr.update(visible=False, open=False),
221
  gr.update(interactive=False),
222
- f"Hugging Face Inference API is loading your model, estimation time {prediction_response}",
223
  *dropdown_placement,
224
  )
225
 
226
  if isinstance(prediction_response, Exception):
227
- gr.Warning("Please check your model or Hugging Face token.")
228
  return (
229
  gr.update(visible=False),
230
  gr.update(visible=False),
231
  gr.update(visible=False, open=False),
232
  gr.update(interactive=False),
233
- f"Sorry, inference api loading error {prediction_response}, please check your model and token.",
234
  *dropdown_placement,
235
  )
236
 
 
219
  gr.update(visible=False),
220
  gr.update(visible=False, open=False),
221
  gr.update(interactive=False),
222
+ f"Hugging Face Inference API is loading your model, estimation time {prediction_response}. Please validate again later.",
223
  *dropdown_placement,
224
  )
225
 
226
  if isinstance(prediction_response, Exception):
227
+ gr.Warning("Inference API loading error: {prediction_response}. Please check your model or Hugging Face token.")
228
  return (
229
  gr.update(visible=False),
230
  gr.update(visible=False),
231
  gr.update(visible=False, open=False),
232
  gr.update(interactive=False),
233
+ "",
234
  *dropdown_placement,
235
  )
236