Spaces:
Running
on
Zero
Running
on
Zero
kayfahaarukku
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -48,15 +48,17 @@ def generate_image(prompt, negative_prompt, use_defaults, resolution, guidance_s
|
|
48 |
|
49 |
torch.cuda.empty_cache()
|
50 |
|
51 |
-
|
|
|
|
|
52 |
|
53 |
# Define Gradio interface
|
54 |
def interface_fn(prompt, negative_prompt, use_defaults, resolution, guidance_scale, num_inference_steps, seed, randomize_seed, progress=gr.Progress()):
|
55 |
-
image, seed = generate_image(prompt, negative_prompt, use_defaults, resolution, guidance_scale, num_inference_steps, seed, randomize_seed, progress)
|
56 |
-
return image, seed, gr.update(value=
|
57 |
|
58 |
def reset_inputs():
|
59 |
-
return gr.update(value=''), gr.update(value=''), gr.update(value=True), gr.update(value='832x1216'), gr.update(value=7), gr.update(value=28), gr.update(value=0), gr.update(value=True)
|
60 |
|
61 |
with gr.Blocks(title="UrangDiffusion 1.0 Demo", theme="NoCrypt/miku@1.2.1") as demo:
|
62 |
gr.HTML(
|
@@ -85,6 +87,7 @@ with gr.Blocks(title="UrangDiffusion 1.0 Demo", theme="NoCrypt/miku@1.2.1") as d
|
|
85 |
|
86 |
with gr.Column():
|
87 |
output_image = gr.Image(type="pil", label="Generated Image")
|
|
|
88 |
gr.Markdown(
|
89 |
"""
|
90 |
### Recommended prompt formatting:
|
@@ -103,15 +106,15 @@ with gr.Blocks(title="UrangDiffusion 1.0 Demo", theme="NoCrypt/miku@1.2.1") as d
|
|
103 |
inputs=[
|
104 |
prompt_input, negative_prompt_input, use_defaults_input, resolution_input, guidance_scale_input, num_inference_steps_input, seed_input, randomize_seed_input
|
105 |
],
|
106 |
-
outputs=[output_image, seed_input]
|
107 |
)
|
108 |
|
109 |
reset_button.click(
|
110 |
reset_inputs,
|
111 |
inputs=[],
|
112 |
outputs=[
|
113 |
-
prompt_input, negative_prompt_input, use_defaults_input, resolution_input, guidance_scale_input, num_inference_steps_input, seed_input, randomize_seed_input
|
114 |
]
|
115 |
)
|
116 |
|
117 |
-
demo.queue(max_size=20).launch(share=False)
|
|
|
48 |
|
49 |
torch.cuda.empty_cache()
|
50 |
|
51 |
+
metadata_text = f"{prompt}\nNegative prompt: {negative_prompt}\nSteps: {num_inference_steps}, Size: {width}x{height}, Seed: {seed}, CFG scale: {guidance_scale}"
|
52 |
+
|
53 |
+
return image, seed, metadata_text
|
54 |
|
55 |
# Define Gradio interface
|
56 |
def interface_fn(prompt, negative_prompt, use_defaults, resolution, guidance_scale, num_inference_steps, seed, randomize_seed, progress=gr.Progress()):
|
57 |
+
image, seed, metadata_text = generate_image(prompt, negative_prompt, use_defaults, resolution, guidance_scale, num_inference_steps, seed, randomize_seed, progress)
|
58 |
+
return image, seed, gr.update(value=metadata_text)
|
59 |
|
60 |
def reset_inputs():
|
61 |
+
return gr.update(value=''), gr.update(value=''), gr.update(value=True), gr.update(value='832x1216'), gr.update(value=7), gr.update(value=28), gr.update(value=0), gr.update(value=True), gr.update(value='')
|
62 |
|
63 |
with gr.Blocks(title="UrangDiffusion 1.0 Demo", theme="NoCrypt/miku@1.2.1") as demo:
|
64 |
gr.HTML(
|
|
|
87 |
|
88 |
with gr.Column():
|
89 |
output_image = gr.Image(type="pil", label="Generated Image")
|
90 |
+
metadata_textbox = gr.Textbox(lines=6, label="Metadata", interactive=False)
|
91 |
gr.Markdown(
|
92 |
"""
|
93 |
### Recommended prompt formatting:
|
|
|
106 |
inputs=[
|
107 |
prompt_input, negative_prompt_input, use_defaults_input, resolution_input, guidance_scale_input, num_inference_steps_input, seed_input, randomize_seed_input
|
108 |
],
|
109 |
+
outputs=[output_image, seed_input, metadata_textbox]
|
110 |
)
|
111 |
|
112 |
reset_button.click(
|
113 |
reset_inputs,
|
114 |
inputs=[],
|
115 |
outputs=[
|
116 |
+
prompt_input, negative_prompt_input, use_defaults_input, resolution_input, guidance_scale_input, num_inference_steps_input, seed_input, randomize_seed_input, metadata_textbox
|
117 |
]
|
118 |
)
|
119 |
|
120 |
+
demo.queue(max_size=20).launch(share=False)
|