TDN-M commited on
Commit
9460541
1 Parent(s): cb31fae

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -12
app.py CHANGED
@@ -259,7 +259,7 @@ def create_demo(model):
259
  with gr.Row():
260
  with gr.Column():
261
  input_image = gr.Image(label="Input Image", type='pil', elem_id='img-display-input')
262
- input_text = gr.Textbox(label='Prompt', value="masterpiece, high quality, ", lines=2)
263
  with gr.Accordion('Advanced options', open=False):
264
  num_steps = gr.Slider(label='Steps',
265
  minimum=1,
@@ -283,21 +283,22 @@ def create_demo(model):
283
  step=1,
284
  randomize=True)
285
  strength = gr.Slider(label='Strength',
286
- minimum=0.1,
287
- maximum=1.0,
288
- value=0.9,
289
- step=0.1)
290
  a_prompt = gr.Textbox(
291
  label='Added Prompt',
292
- value="high resolution, clay render style , grayscale, 8K, high resolution, photorealistic")
293
  n_prompt = gr.Textbox(
294
  label='Negative Prompt',
295
- value="low resolution, banner, logo, watermark, deformed, blurry, out of focus, surreal, ugly, beginner")
296
  submit = gr.Button("Submit")
297
 
298
  with gr.Column():
299
- design_gallery = gr.Gallery(label="Output Gallery", elem_id='img-display-output', elem_id_css={'height': '600px', 'width': '800px'})
300
-
 
301
  def on_submit(image, text, num_steps, guidance_scale, seed, strength, a_prompt, n_prompt, img_size):
302
  model.seed = seed
303
  model.neg_prompt = n_prompt
@@ -306,12 +307,13 @@ def create_demo(model):
306
  with torch.no_grad():
307
  out_img = model.generate_design(image, text, guidance_scale=guidance_scale, num_steps=num_steps, strength=strength, img_size=img_size)
308
 
309
- return [out_img] # Return a list containing the generated image
310
 
311
- submit.click(on_submit, inputs=[input_image, input_text, num_steps, guidance_scale, seed, strength, a_prompt, n_prompt, img_size], outputs=design_gallery)
312
  examples = gr.Examples(examples=[["imgs/bedroom_1.jpg"]],
313
  inputs=[input_image, input_text], cache_examples=False)
314
 
 
315
  controlnet_depth= ControlNetModel.from_pretrained(
316
  "controlnet_depth", torch_dtype=dtype, use_safetensors=True)
317
  controlnet_seg = ControlNetModel.from_pretrained(
@@ -343,7 +345,7 @@ def main():
343
  model = ControlNetDepthDesignModelMulti()
344
  print('Models uploaded successfully')
345
 
346
- title = "# StableDesign by TDN-M"
347
  description = """
348
  WELCOME
349
  """
 
259
  with gr.Row():
260
  with gr.Column():
261
  input_image = gr.Image(label="Input Image", type='pil', elem_id='img-display-input')
262
+ input_text = gr.Textbox(label='Prompt', value="high resolution, clay render style , grayscale", lines=2)
263
  with gr.Accordion('Advanced options', open=False):
264
  num_steps = gr.Slider(label='Steps',
265
  minimum=1,
 
283
  step=1,
284
  randomize=True)
285
  strength = gr.Slider(label='Strength',
286
+ minimum=0.1,
287
+ maximum=1.0,
288
+ value=0.9,
289
+ step=0.1)
290
  a_prompt = gr.Textbox(
291
  label='Added Prompt',
292
+ value="8K, high resolution, photorealistic")
293
  n_prompt = gr.Textbox(
294
  label='Negative Prompt',
295
+ value=" low resolution, banner, logo, watermark, deformed, blurry, out of focus, surreal, ugly, beginner")
296
  submit = gr.Button("Submit")
297
 
298
  with gr.Column():
299
+ design_image = gr.Image(label="Output Mask", elem_id='img-display-output')
300
+
301
+
302
  def on_submit(image, text, num_steps, guidance_scale, seed, strength, a_prompt, n_prompt, img_size):
303
  model.seed = seed
304
  model.neg_prompt = n_prompt
 
307
  with torch.no_grad():
308
  out_img = model.generate_design(image, text, guidance_scale=guidance_scale, num_steps=num_steps, strength=strength, img_size=img_size)
309
 
310
+ return out_img
311
 
312
+ submit.click(on_submit, inputs=[input_image, input_text, num_steps, guidance_scale, seed, strength, a_prompt, n_prompt, img_size], outputs=design_image)
313
  examples = gr.Examples(examples=[["imgs/bedroom_1.jpg"]],
314
  inputs=[input_image, input_text], cache_examples=False)
315
 
316
+
317
  controlnet_depth= ControlNetModel.from_pretrained(
318
  "controlnet_depth", torch_dtype=dtype, use_safetensors=True)
319
  controlnet_seg = ControlNetModel.from_pretrained(
 
345
  model = ControlNetDepthDesignModelMulti()
346
  print('Models uploaded successfully')
347
 
348
+ title = "# StableDesign"
349
  description = """
350
  WELCOME
351
  """