ZeroCommand commited on
Commit
1dcb2d8
1 Parent(s): 08c711a

make inference api default; improve event triggers

Browse files
app_text_classification.py CHANGED
@@ -80,13 +80,13 @@ def get_demo():
80
  for _ in range(MAX_LABELS, MAX_LABELS + MAX_FEATURES):
81
  column_mappings.append(gr.Dropdown(visible=False))
82
 
83
- with gr.Accordion(label="Model Wrap Advance Config (optional)", open=False):
84
- run_local = gr.Checkbox(value=True, label="Run with Local Model Inference (pipeline)")
85
- run_inference = gr.Checkbox(value=False, label="Run with Inference API")
86
  inference_token = gr.Textbox(
87
  value="",
88
  label="HF Token for Inference API",
89
- visible=False,
90
  interactive=True,
91
  )
92
 
@@ -206,6 +206,8 @@ def get_demo():
206
  dataset_config_input,
207
  dataset_split_input,
208
  uid_label,
 
 
209
  ],
210
  outputs=[
211
  example_input,
@@ -235,7 +237,11 @@ def get_demo():
235
  outputs=[run_btn, logs, uid_label],
236
  )
237
 
238
- def enable_run_btn():
 
 
 
 
239
  return gr.update(interactive=True)
240
 
241
  gr.on(
@@ -246,13 +252,27 @@ def get_demo():
246
  scanners.input,
247
  ],
248
  fn=enable_run_btn,
249
- inputs=None,
 
 
 
 
 
 
 
250
  outputs=[run_btn],
251
  )
252
 
253
  gr.on(
254
  triggers=[label.input for label in column_mappings],
255
  fn=enable_run_btn,
256
- inputs=None, # FIXME
 
 
 
 
 
 
 
257
  outputs=[run_btn],
258
  )
 
80
  for _ in range(MAX_LABELS, MAX_LABELS + MAX_FEATURES):
81
  column_mappings.append(gr.Dropdown(visible=False))
82
 
83
+ with gr.Accordion(label="Model Wrap Advance Config", open=True):
84
+ run_local = gr.Checkbox(value=False, label="Run with Local Model Inference (pipeline)")
85
+ run_inference = gr.Checkbox(value=True, label="Run with Inference API")
86
  inference_token = gr.Textbox(
87
  value="",
88
  label="HF Token for Inference API",
89
+ visible=True,
90
  interactive=True,
91
  )
92
 
 
206
  dataset_config_input,
207
  dataset_split_input,
208
  uid_label,
209
+ run_inference,
210
+ inference_token,
211
  ],
212
  outputs=[
213
  example_input,
 
237
  outputs=[run_btn, logs, uid_label],
238
  )
239
 
240
+ def enable_run_btn(run_inference, inference_token, model_id, dataset_id, dataset_config, dataset_split):
241
+ if run_inference and inference_token == "":
242
+ return gr.update(interactive=False)
243
+ if model_id == "" or dataset_id == "" or dataset_config == "" or dataset_split == "":
244
+ return gr.update(interactive=False)
245
  return gr.update(interactive=True)
246
 
247
  gr.on(
 
252
  scanners.input,
253
  ],
254
  fn=enable_run_btn,
255
+ inputs=[
256
+ run_inference,
257
+ inference_token,
258
+ model_id_input,
259
+ dataset_id_input,
260
+ dataset_config_input,
261
+ dataset_split_input
262
+ ],
263
  outputs=[run_btn],
264
  )
265
 
266
  gr.on(
267
  triggers=[label.input for label in column_mappings],
268
  fn=enable_run_btn,
269
+ inputs=[
270
+ run_inference,
271
+ inference_token,
272
+ model_id_input,
273
+ dataset_id_input,
274
+ dataset_config_input,
275
+ dataset_split_input
276
+ ], # FIXME
277
  outputs=[run_btn],
278
  )
text_classification_ui_helpers.py CHANGED
@@ -215,7 +215,7 @@ def precheck_model_ds_enable_example_btn(
215
 
216
 
217
  def align_columns_and_show_prediction(
218
- model_id, dataset_id, dataset_config, dataset_split, uid
219
  ):
220
  ppl = check_model(model_id)
221
  if ppl is None or not isinstance(ppl, TextClassificationPipeline):
@@ -276,7 +276,7 @@ def align_columns_and_show_prediction(
276
  gr.update(value=MAPPING_STYLED_ERROR_WARNING, visible=True),
277
  gr.update(visible=False),
278
  gr.update(visible=True, open=True),
279
- gr.update(interactive=True),
280
  "",
281
  *column_mappings,
282
  )
@@ -288,7 +288,7 @@ def align_columns_and_show_prediction(
288
  gr.update(value=get_styled_input(prediction_input), visible=True),
289
  gr.update(value=prediction_output, visible=True),
290
  gr.update(visible=True, open=False),
291
- gr.update(interactive=True),
292
  "",
293
  *column_mappings,
294
  )
 
215
 
216
 
217
  def align_columns_and_show_prediction(
218
+ model_id, dataset_id, dataset_config, dataset_split, uid, run_inference, inference_token
219
  ):
220
  ppl = check_model(model_id)
221
  if ppl is None or not isinstance(ppl, TextClassificationPipeline):
 
276
  gr.update(value=MAPPING_STYLED_ERROR_WARNING, visible=True),
277
  gr.update(visible=False),
278
  gr.update(visible=True, open=True),
279
+ gr.update(interactive=(run_inference and inference_token != "")),
280
  "",
281
  *column_mappings,
282
  )
 
288
  gr.update(value=get_styled_input(prediction_input), visible=True),
289
  gr.update(value=prediction_output, visible=True),
290
  gr.update(visible=True, open=False),
291
+ gr.update(interactive=(run_inference and inference_token != "")),
292
  "",
293
  *column_mappings,
294
  )