jbilcke-hf HF staff commited on
Commit
92ff4bb
·
1 Parent(s): 0bccba3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -7
app.py CHANGED
@@ -81,7 +81,9 @@ def generate(prompt: str,
81
  guidance_scale_refiner: float = 5.0,
82
  num_inference_steps_base: int = 50,
83
  num_inference_steps_refiner: int = 50,
84
- apply_refiner: bool = False) -> PIL.Image.Image:
 
 
85
  generator = torch.Generator().manual_seed(seed)
86
 
87
  if not use_negative_prompt:
@@ -276,12 +278,6 @@ with gr.Blocks(css='style.css') as demo:
276
  secret_token,
277
  ]
278
  prompt.submit(
279
- fn=check_secret_token,
280
- inputs=[secret_token],
281
- outputs=gr.outputs.Void(),
282
- queue=False,
283
- api_name=False,
284
- ).then(
285
  fn=randomize_seed_fn,
286
  inputs=[seed, randomize_seed],
287
  outputs=seed,
 
81
  guidance_scale_refiner: float = 5.0,
82
  num_inference_steps_base: int = 50,
83
  num_inference_steps_refiner: int = 50,
84
+ secret_token: str = '') -> PIL.Image.Image:
85
+ if secret_token != SECRET_TOKEN:
86
+ raise ValueError("Invalid secret token!")
87
  generator = torch.Generator().manual_seed(seed)
88
 
89
  if not use_negative_prompt:
 
278
  secret_token,
279
  ]
280
  prompt.submit(
 
 
 
 
 
 
281
  fn=randomize_seed_fn,
282
  inputs=[seed, randomize_seed],
283
  outputs=seed,