Spaces:
Runtime error
Runtime error
dreamdrop-art
commited on
Commit
•
ea0045c
1
Parent(s):
5a482c2
Update app.py
Browse files
app.py
CHANGED
@@ -71,15 +71,15 @@ class Prodia:
|
|
71 |
}
|
72 |
|
73 |
def generate(self, params):
|
74 |
-
response = self._post(f"{self.base}/
|
75 |
return response.json()
|
76 |
|
77 |
def transform(self, params):
|
78 |
-
response = self._post(f"{self.base}/
|
79 |
return response.json()
|
80 |
|
81 |
def controlnet(self, params):
|
82 |
-
response = self._post(f"{self.base}/
|
83 |
return response.json()
|
84 |
|
85 |
def get_job(self, job_id):
|
@@ -96,11 +96,11 @@ class Prodia:
|
|
96 |
return job_result
|
97 |
|
98 |
def list_models(self):
|
99 |
-
response = self._get(f"{self.base}/
|
100 |
return response.json()
|
101 |
|
102 |
def list_samplers(self):
|
103 |
-
response = self._get(f"{self.base}/
|
104 |
return response.json()
|
105 |
|
106 |
def _post(self, url, params):
|
@@ -213,7 +213,7 @@ for model_name in model_list:
|
|
213 |
model_names[name_without_ext] = model_name
|
214 |
|
215 |
|
216 |
-
def txt2img(prompt, negative_prompt, model, steps, sampler, cfg_scale, width, height, seed, progress=gr.Progress()):
|
217 |
progress(0, desc="Starting")
|
218 |
time.sleep(2.5)
|
219 |
progress(0.25, desc="Generating")
|
@@ -226,6 +226,7 @@ def txt2img(prompt, negative_prompt, model, steps, sampler, cfg_scale, width, he
|
|
226 |
"cfg_scale": cfg_scale,
|
227 |
"width": width,
|
228 |
"height": height,
|
|
|
229 |
"seed": seed
|
230 |
})
|
231 |
progress(0.75, desc="Opening image")
|
@@ -234,7 +235,7 @@ def txt2img(prompt, negative_prompt, model, steps, sampler, cfg_scale, width, he
|
|
234 |
return [job["imageUrl"]], job["imageUrl"]
|
235 |
|
236 |
|
237 |
-
def img2img(input_image, denoising, prompt, negative_prompt, model, steps, sampler, cfg_scale, width, height, seed, progress=gr.Progress()):
|
238 |
progress(0, desc="Starting")
|
239 |
time.sleep(1.5)
|
240 |
progress(0.10, desc="Uploading input image")
|
@@ -251,6 +252,7 @@ def img2img(input_image, denoising, prompt, negative_prompt, model, steps, sampl
|
|
251 |
"cfg_scale": cfg_scale,
|
252 |
"width": width,
|
253 |
"height": height,
|
|
|
254 |
"seed": seed
|
255 |
})
|
256 |
progress(0.75, desc="Opening image")
|
@@ -290,10 +292,11 @@ with gr.Blocks(css=css) as demo:
|
|
290 |
with gr.Column(scale=1):
|
291 |
sampler = gr.Dropdown(value="DPM++ 2M Karras", show_label=True, label="Sampling Method",
|
292 |
choices=prodia_client.list_samplers())
|
293 |
-
|
294 |
with gr.Column(scale=1):
|
295 |
steps = gr.Slider(label="Sampling Steps", minimum=1, maximum=25, value=20, step=1)
|
296 |
-
|
|
|
297 |
with gr.Row():
|
298 |
with gr.Column(scale=1):
|
299 |
width = gr.Slider(label="Width", maximum=1024, value=512, step=8)
|
@@ -312,7 +315,7 @@ with gr.Blocks(css=css) as demo:
|
|
312 |
send_to_png = gr.Button(value="Send OUTPUT IMAGE to PNG Info")
|
313 |
past_url = gr.Textbox(visible=False, interactive=False)
|
314 |
|
315 |
-
text_button.click(txt2img, inputs=[prompt, negative_prompt, model, steps, sampler, cfg_scale, width, height,
|
316 |
seed], outputs=[image_output, past_url], concurrency_limit=64)
|
317 |
|
318 |
with gr.Tab("img2img", id='i2i'):
|
@@ -336,7 +339,8 @@ with gr.Blocks(css=css) as demo:
|
|
336 |
|
337 |
with gr.Column(scale=1):
|
338 |
i2i_steps = gr.Slider(label="Sampling Steps", minimum=1, maximum=25, value=20, step=1)
|
339 |
-
|
|
|
340 |
with gr.Row():
|
341 |
with gr.Column(scale=1):
|
342 |
i2i_width = gr.Slider(label="Width", maximum=1024, value=512, step=8)
|
@@ -357,7 +361,7 @@ with gr.Blocks(css=css) as demo:
|
|
357 |
i2i_past_url = gr.Textbox(visible=False, interactive=False)
|
358 |
|
359 |
i2i_text_button.click(img2img, inputs=[i2i_image_input, i2i_denoising, i2i_prompt, i2i_negative_prompt,
|
360 |
-
model, i2i_steps, i2i_sampler, i2i_cfg_scale, i2i_width, i2i_height,
|
361 |
i2i_seed], outputs=[i2i_image_output, i2i_past_url], concurrency_limit=64)
|
362 |
send_to_img2img.click(send_to_img2img_def, inputs=past_url, outputs=i2i_image_input)
|
363 |
|
|
|
71 |
}
|
72 |
|
73 |
def generate(self, params):
|
74 |
+
response = self._post(f"{self.base}/sd/generate", params)
|
75 |
return response.json()
|
76 |
|
77 |
def transform(self, params):
|
78 |
+
response = self._post(f"{self.base}/sd/transform", params)
|
79 |
return response.json()
|
80 |
|
81 |
def controlnet(self, params):
|
82 |
+
response = self._post(f"{self.base}/sd/controlnet", params)
|
83 |
return response.json()
|
84 |
|
85 |
def get_job(self, job_id):
|
|
|
96 |
return job_result
|
97 |
|
98 |
def list_models(self):
|
99 |
+
response = self._get(f"{self.base}/sd/models")
|
100 |
return response.json()
|
101 |
|
102 |
def list_samplers(self):
|
103 |
+
response = self._get(f"{self.base}/sd/samplers")
|
104 |
return response.json()
|
105 |
|
106 |
def _post(self, url, params):
|
|
|
213 |
model_names[name_without_ext] = model_name
|
214 |
|
215 |
|
216 |
+
def txt2img(prompt, negative_prompt, model, steps, sampler, cfg_scale, width, height, upscale, seed, progress=gr.Progress()):
|
217 |
progress(0, desc="Starting")
|
218 |
time.sleep(2.5)
|
219 |
progress(0.25, desc="Generating")
|
|
|
226 |
"cfg_scale": cfg_scale,
|
227 |
"width": width,
|
228 |
"height": height,
|
229 |
+
"upscale": upscale,
|
230 |
"seed": seed
|
231 |
})
|
232 |
progress(0.75, desc="Opening image")
|
|
|
235 |
return [job["imageUrl"]], job["imageUrl"]
|
236 |
|
237 |
|
238 |
+
def img2img(input_image, denoising, prompt, negative_prompt, model, steps, sampler, cfg_scale, width, height, upscale, seed, progress=gr.Progress()):
|
239 |
progress(0, desc="Starting")
|
240 |
time.sleep(1.5)
|
241 |
progress(0.10, desc="Uploading input image")
|
|
|
252 |
"cfg_scale": cfg_scale,
|
253 |
"width": width,
|
254 |
"height": height,
|
255 |
+
"upscale": upscale,
|
256 |
"seed": seed
|
257 |
})
|
258 |
progress(0.75, desc="Opening image")
|
|
|
292 |
with gr.Column(scale=1):
|
293 |
sampler = gr.Dropdown(value="DPM++ 2M Karras", show_label=True, label="Sampling Method",
|
294 |
choices=prodia_client.list_samplers())
|
295 |
+
|
296 |
with gr.Column(scale=1):
|
297 |
steps = gr.Slider(label="Sampling Steps", minimum=1, maximum=25, value=20, step=1)
|
298 |
+
with gr.Column(scale=1):
|
299 |
+
upscale = gr.Checkbox(label="Upscale", value=False, interactive=True)
|
300 |
with gr.Row():
|
301 |
with gr.Column(scale=1):
|
302 |
width = gr.Slider(label="Width", maximum=1024, value=512, step=8)
|
|
|
315 |
send_to_png = gr.Button(value="Send OUTPUT IMAGE to PNG Info")
|
316 |
past_url = gr.Textbox(visible=False, interactive=False)
|
317 |
|
318 |
+
text_button.click(txt2img, inputs=[prompt, negative_prompt, model, steps, sampler, cfg_scale, width, height, upscale,
|
319 |
seed], outputs=[image_output, past_url], concurrency_limit=64)
|
320 |
|
321 |
with gr.Tab("img2img", id='i2i'):
|
|
|
339 |
|
340 |
with gr.Column(scale=1):
|
341 |
i2i_steps = gr.Slider(label="Sampling Steps", minimum=1, maximum=25, value=20, step=1)
|
342 |
+
with gr.Column(scale=1):
|
343 |
+
i2i_upscale = gr.Checkbox(label="Upscale", value=False, interactive=True)
|
344 |
with gr.Row():
|
345 |
with gr.Column(scale=1):
|
346 |
i2i_width = gr.Slider(label="Width", maximum=1024, value=512, step=8)
|
|
|
361 |
i2i_past_url = gr.Textbox(visible=False, interactive=False)
|
362 |
|
363 |
i2i_text_button.click(img2img, inputs=[i2i_image_input, i2i_denoising, i2i_prompt, i2i_negative_prompt,
|
364 |
+
model, i2i_steps, i2i_sampler, i2i_cfg_scale, i2i_width, i2i_height, i2i_upscale,
|
365 |
i2i_seed], outputs=[i2i_image_output, i2i_past_url], concurrency_limit=64)
|
366 |
send_to_img2img.click(send_to_img2img_def, inputs=past_url, outputs=i2i_image_input)
|
367 |
|