Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -13,8 +13,7 @@ from huggingface_hub import login
|
|
13 |
from gradio_imageslider import ImageSlider
|
14 |
|
15 |
MAX_SEED = np.iinfo(np.int32).max
|
16 |
-
|
17 |
-
HF_TOKEN_UPSCALER = os.environ.get("HF_TOKEN_UPSCALER")
|
18 |
|
19 |
def enable_lora(lora_add, basemodel):
|
20 |
return basemodel if not lora_add else lora_add
|
@@ -34,7 +33,7 @@ async def generate_image(prompt, model, lora_word, width, height, scales, steps,
|
|
34 |
|
35 |
def get_upscale_finegrain(prompt, img_path, upscale_factor):
|
36 |
try:
|
37 |
-
client = Client("finegrain/finegrain-image-enhancer",
|
38 |
result = client.predict(input_image=handle_file(img_path), prompt=prompt, negative_prompt="", seed=42, upscale_factor=upscale_factor, controlnet_scale=0.6, controlnet_decay=1, condition_scale=6, tile_width=112, tile_height=144, denoise_strength=0.35, num_inference_steps=18, solver="DDIM", api_name="/process")
|
39 |
return result[1]
|
40 |
except Exception as e:
|
|
|
13 |
from gradio_imageslider import ImageSlider
|
14 |
|
15 |
MAX_SEED = np.iinfo(np.int32).max
|
16 |
+
|
|
|
17 |
|
18 |
def enable_lora(lora_add, basemodel):
|
19 |
return basemodel if not lora_add else lora_add
|
|
|
33 |
|
34 |
def get_upscale_finegrain(prompt, img_path, upscale_factor):
|
35 |
try:
|
36 |
+
client = Client("finegrain/finegrain-image-enhancer",
|
37 |
result = client.predict(input_image=handle_file(img_path), prompt=prompt, negative_prompt="", seed=42, upscale_factor=upscale_factor, controlnet_scale=0.6, controlnet_decay=1, condition_scale=6, tile_width=112, tile_height=144, denoise_strength=0.35, num_inference_steps=18, solver="DDIM", api_name="/process")
|
38 |
return result[1]
|
39 |
except Exception as e:
|