ameerazam08 commited on
Commit
3451c21
·
1 Parent(s): 934e867

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -15
app.py CHANGED
@@ -26,23 +26,21 @@ pipe = DiffusionPipeline.from_pretrained("runwayml/stable-diffusion-v1-5",
26
  torch_dtype=torch.float16).to(device)
27
  pipe.enable_xformers_memory_efficient_attention()
28
 
29
- source_image = load_image('https://huggingface.co/lllyasviel/control_v11f1e_sd15_tile/resolve/main/images/original.png')
30
-
31
-
32
  def super_esr(source_image,prompt,negative_prompt,strength,seed,num_inference_steps):
33
  condition_image = resize_for_condition_image(source_image, 1024)
34
- image = pipe(prompt=prompt,#"best quality",
35
- negative_prompt="blur, lowres, bad anatomy, bad hands, cropped, worst quality",
36
- image=condition_image,
37
- controlnet_conditioning_image=condition_image,
38
- width=condition_image.size[0],
39
- height=condition_image.size[1],
40
- strength=1.0,
41
- generator=seed,
42
- num_inference_steps=num_inference_steps,
43
- ).image
44
- print(source_image,prompt,negative_prompt,strength,seed,num_inference_steps)
45
- return source_image
 
46
 
47
  #define laund take input nsame as super_esr function
48
 
@@ -66,3 +64,4 @@ examples=[
66
  #create queue the rerquests
67
  x=gr.Interface(fn=super_esr,inputs=inputs,outputs=outputs,title=title,description=description,examples=examples)
68
  x.launch()
 
 
26
  torch_dtype=torch.float16).to(device)
27
  pipe.enable_xformers_memory_efficient_attention()
28
 
 
 
 
29
  def super_esr(source_image,prompt,negative_prompt,strength,seed,num_inference_steps):
30
  condition_image = resize_for_condition_image(source_image, 1024)
31
+
32
+ # image = pipe(prompt=prompt,#"best quality",
33
+ # negative_prompt="blur, lowres, bad anatomy, bad hands, cropped, worst quality",
34
+ # image=condition_image,
35
+ # controlnet_conditioning_image=condition_image,
36
+ # width=condition_image.size[0],
37
+ # height=condition_image.size[1],
38
+ # strength=1.0,
39
+ # generator=seed,
40
+ # num_inference_steps=num_inference_steps,
41
+ # ).image
42
+ # print(source_image,prompt,negative_prompt,strength,seed,num_inference_steps)
43
+ return condition_image
44
 
45
  #define laund take input nsame as super_esr function
46
 
 
64
  #create queue the rerquests
65
  x=gr.Interface(fn=super_esr,inputs=inputs,outputs=outputs,title=title,description=description,examples=examples)
66
  x.launch()
67
+