Spaces:
Running
on
Zero
Running
on
Zero
one more test with user HF_TOKEN
Browse files
app.py
CHANGED
@@ -125,8 +125,40 @@ def process_mask(
|
|
125 |
return mask
|
126 |
|
127 |
|
|
|
|
|
|
|
|
|
|
|
128 |
@spaces.GPU(duration=100)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
129 |
def process(
|
|
|
130 |
input_image_editor: dict,
|
131 |
inpainting_prompt_text: str,
|
132 |
masking_prompt_text: str,
|
@@ -135,8 +167,7 @@ def process(
|
|
135 |
seed_slicer: int,
|
136 |
randomize_seed_checkbox: bool,
|
137 |
strength_slider: float,
|
138 |
-
num_inference_steps_slider: int
|
139 |
-
progress=gr.Progress(track_tqdm=True)
|
140 |
):
|
141 |
if not inpainting_prompt_text:
|
142 |
gr.Info("Please enter a text prompt.")
|
@@ -171,25 +202,21 @@ def process(
|
|
171 |
image = image.resize((width, height), Image.LANCZOS)
|
172 |
mask = mask.resize((width, height), Image.LANCZOS)
|
173 |
mask = process_mask(mask, mask_inflation=mask_inflation_slider, mask_blur=mask_blur_slider)
|
174 |
-
|
175 |
-
if randomize_seed_checkbox:
|
176 |
-
seed_slicer = random.randint(0, MAX_SEED)
|
177 |
-
generator = torch.Generator().manual_seed(seed_slicer)
|
178 |
-
result = PIPE(
|
179 |
-
prompt=inpainting_prompt_text,
|
180 |
image=image,
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
return
|
190 |
|
191 |
|
192 |
with gr.Blocks() as demo:
|
|
|
193 |
gr.Markdown(MARKDOWN)
|
194 |
with gr.Row():
|
195 |
with gr.Column():
|
@@ -304,6 +331,7 @@ with gr.Blocks() as demo:
|
|
304 |
submit_button_component.click(
|
305 |
fn=process,
|
306 |
inputs=[
|
|
|
307 |
input_image_editor_component,
|
308 |
inpainting_prompt_text_component,
|
309 |
masking_prompt_text_component,
|
@@ -319,5 +347,6 @@ with gr.Blocks() as demo:
|
|
319 |
output_mask_component
|
320 |
]
|
321 |
)
|
|
|
322 |
|
323 |
demo.launch(debug=False, show_error=True)
|
|
|
125 |
return mask
|
126 |
|
127 |
|
128 |
+
def set_client_for_session(request: gr.Request):
|
129 |
+
x_ip_token = request.headers['x-ip-token']
|
130 |
+
return Client("SkalskiP/florence-sam-masking", headers={"X-IP-Token": x_ip_token})
|
131 |
+
|
132 |
+
|
133 |
@spaces.GPU(duration=100)
|
134 |
+
def run_flux(
|
135 |
+
image: Image.Image,
|
136 |
+
mask: Image.Image,
|
137 |
+
prompt: str,
|
138 |
+
seed_slicer: int,
|
139 |
+
randomize_seed_checkbox: bool,
|
140 |
+
strength_slider: float,
|
141 |
+
num_inference_steps_slider: int,
|
142 |
+
resolution_wh: Tuple[int, int],
|
143 |
+
) -> Image.Image:
|
144 |
+
width, height = resolution_wh
|
145 |
+
if randomize_seed_checkbox:
|
146 |
+
seed_slicer = random.randint(0, MAX_SEED)
|
147 |
+
generator = torch.Generator().manual_seed(seed_slicer)
|
148 |
+
return PIPE(
|
149 |
+
prompt=prompt,
|
150 |
+
image=image,
|
151 |
+
mask_image=mask,
|
152 |
+
width=width,
|
153 |
+
height=height,
|
154 |
+
strength=strength_slider,
|
155 |
+
generator=generator,
|
156 |
+
num_inference_steps=num_inference_steps_slider
|
157 |
+
).images[0]
|
158 |
+
|
159 |
+
|
160 |
def process(
|
161 |
+
client,
|
162 |
input_image_editor: dict,
|
163 |
inpainting_prompt_text: str,
|
164 |
masking_prompt_text: str,
|
|
|
167 |
seed_slicer: int,
|
168 |
randomize_seed_checkbox: bool,
|
169 |
strength_slider: float,
|
170 |
+
num_inference_steps_slider: int
|
|
|
171 |
):
|
172 |
if not inpainting_prompt_text:
|
173 |
gr.Info("Please enter a text prompt.")
|
|
|
202 |
image = image.resize((width, height), Image.LANCZOS)
|
203 |
mask = mask.resize((width, height), Image.LANCZOS)
|
204 |
mask = process_mask(mask, mask_inflation=mask_inflation_slider, mask_blur=mask_blur_slider)
|
205 |
+
image = run_flux(
|
|
|
|
|
|
|
|
|
|
|
206 |
image=image,
|
207 |
+
mask=mask,
|
208 |
+
prompt=inpainting_prompt_text,
|
209 |
+
seed_slicer=seed_slicer,
|
210 |
+
randomize_seed_checkbox=randomize_seed_checkbox,
|
211 |
+
strength_slider=strength_slider,
|
212 |
+
num_inference_steps_slider=num_inference_steps_slider,
|
213 |
+
resolution_wh=(width, height)
|
214 |
+
)
|
215 |
+
return image, mask
|
216 |
|
217 |
|
218 |
with gr.Blocks() as demo:
|
219 |
+
client_component = gr.State()
|
220 |
gr.Markdown(MARKDOWN)
|
221 |
with gr.Row():
|
222 |
with gr.Column():
|
|
|
331 |
submit_button_component.click(
|
332 |
fn=process,
|
333 |
inputs=[
|
334 |
+
client_component,
|
335 |
input_image_editor_component,
|
336 |
inpainting_prompt_text_component,
|
337 |
masking_prompt_text_component,
|
|
|
347 |
output_mask_component
|
348 |
]
|
349 |
)
|
350 |
+
demo.load(set_client_for_session, None, client_component)
|
351 |
|
352 |
demo.launch(debug=False, show_error=True)
|