TDN-M commited on
Commit
56ec4ad
·
1 Parent(s): 5cc06b8
Files changed (1) hide show
  1. app.py +6 -8
app.py CHANGED
@@ -283,16 +283,16 @@ def create_demo(model):
283
  step=1,
284
  randomize=True)
285
  strength = gr.Slider(label='Strength',
286
- minimum=0.1,
287
- maximum=1.0,
288
- value=0.9,
289
- step=0.1)
290
  a_prompt = gr.Textbox(
291
  label='Added Prompt',
292
  value="high resolution, clay render style , grayscale, 8K, high resolution, photorealistic")
293
  n_prompt = gr.Textbox(
294
  label='Negative Prompt',
295
- value=" low resolution, banner, logo, watermark, deformed, blurry, out of focus, surreal, ugly, beginner")
296
  submit = gr.Button("Submit")
297
 
298
  with gr.Column():
@@ -306,13 +306,11 @@ def create_demo(model):
306
  with torch.no_grad():
307
  out_img = model.generate_design(image, text, guidance_scale=guidance_scale, num_steps=num_steps, strength=strength, img_size=img_size)
308
 
309
- return out_img
310
 
311
  submit.click(on_submit, inputs=[input_image, input_text, num_steps, guidance_scale, seed, strength, a_prompt, n_prompt, img_size], outputs=design_gallery)
312
  examples = gr.Examples(examples=[["imgs/bedroom_1.jpg"]],
313
  inputs=[input_image, input_text], cache_examples=False)
314
-
315
-
316
 
317
  controlnet_depth= ControlNetModel.from_pretrained(
318
  "controlnet_depth", torch_dtype=dtype, use_safetensors=True)
 
283
  step=1,
284
  randomize=True)
285
  strength = gr.Slider(label='Strength',
286
+ minimum=0.1,
287
+ maximum=1.0,
288
+ value=0.9,
289
+ step=0.1)
290
  a_prompt = gr.Textbox(
291
  label='Added Prompt',
292
  value="high resolution, clay render style , grayscale, 8K, high resolution, photorealistic")
293
  n_prompt = gr.Textbox(
294
  label='Negative Prompt',
295
+ value="low resolution, banner, logo, watermark, deformed, blurry, out of focus, surreal, ugly, beginner")
296
  submit = gr.Button("Submit")
297
 
298
  with gr.Column():
 
306
  with torch.no_grad():
307
  out_img = model.generate_design(image, text, guidance_scale=guidance_scale, num_steps=num_steps, strength=strength, img_size=img_size)
308
 
309
+ return [out_img] # Return a list containing the generated image
310
 
311
  submit.click(on_submit, inputs=[input_image, input_text, num_steps, guidance_scale, seed, strength, a_prompt, n_prompt, img_size], outputs=design_gallery)
312
  examples = gr.Examples(examples=[["imgs/bedroom_1.jpg"]],
313
  inputs=[input_image, input_text], cache_examples=False)
 
 
314
 
315
  controlnet_depth= ControlNetModel.from_pretrained(
316
  "controlnet_depth", torch_dtype=dtype, use_safetensors=True)