Spaces:
Running
on
Zero
Running
on
Zero
bugfix
Browse files- app.py +7 -8
- requirements.txt +1 -0
app.py
CHANGED
@@ -26,12 +26,11 @@ from diffusers.pipelines.flux.pipeline_flux_controlnet_inpaint import FluxContro
|
|
26 |
from diffusers.models.controlnet_flux import FluxControlNetModel
|
27 |
|
28 |
HF_TOKEN = os.environ.get("HF_TOKEN")
|
29 |
-
os.environ['PYTORCH_CUDA_ALLOC_CONF'] = 'max_split_size_mb:30'
|
30 |
|
31 |
login(token=HF_TOKEN)
|
32 |
|
33 |
MAX_SEED = np.iinfo(np.int32).max
|
34 |
-
IMAGE_SIZE =
|
35 |
|
36 |
# init
|
37 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
@@ -42,9 +41,10 @@ controlnet = FluxControlNetModel.from_pretrained(controlnet_model, torch_dtype=t
|
|
42 |
|
43 |
|
44 |
pipe = FluxControlNetInpaintPipeline.from_pretrained(base_model, controlnet=controlnet, torch_dtype=torch.bfloat16).to(device)
|
45 |
-
pipe.to("cuda")
|
46 |
torch.backends.cuda.matmul.allow_tf32 = True
|
47 |
-
|
|
|
|
|
48 |
|
49 |
control_mode_ids = {
|
50 |
"scribble_hed": 0,
|
@@ -174,9 +174,8 @@ def run_flux(
|
|
174 |
seed_slicer = random.randint(0, MAX_SEED)
|
175 |
generator = torch.Generator().manual_seed(seed_slicer)
|
176 |
|
177 |
-
pipe.
|
178 |
-
|
179 |
-
pipe.enable_model_cpu_offload() # for saving memory
|
180 |
|
181 |
with calculateDuration("run pipe"):
|
182 |
print("start to run pipe")
|
@@ -264,7 +263,7 @@ def process(
|
|
264 |
preprocessor.load("Openpose")
|
265 |
control_image = preprocessor(
|
266 |
image=image,
|
267 |
-
hand_and_face=
|
268 |
image_resolution=width,
|
269 |
detect_resolution=512,
|
270 |
)
|
|
|
26 |
from diffusers.models.controlnet_flux import FluxControlNetModel
|
27 |
|
28 |
HF_TOKEN = os.environ.get("HF_TOKEN")
|
|
|
29 |
|
30 |
login(token=HF_TOKEN)
|
31 |
|
32 |
MAX_SEED = np.iinfo(np.int32).max
|
33 |
+
IMAGE_SIZE = 768
|
34 |
|
35 |
# init
|
36 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
41 |
|
42 |
|
43 |
pipe = FluxControlNetInpaintPipeline.from_pretrained(base_model, controlnet=controlnet, torch_dtype=torch.bfloat16).to(device)
|
|
|
44 |
torch.backends.cuda.matmul.allow_tf32 = True
|
45 |
+
pipe.vae.enable_tiling()
|
46 |
+
pipe.vae.enable_slicing()
|
47 |
+
pipe.enable_model_cpu_offload() # for saving memory
|
48 |
|
49 |
control_mode_ids = {
|
50 |
"scribble_hed": 0,
|
|
|
174 |
seed_slicer = random.randint(0, MAX_SEED)
|
175 |
generator = torch.Generator().manual_seed(seed_slicer)
|
176 |
|
177 |
+
# pipe.enable_xformers_memory_efficient_attention()
|
178 |
+
|
|
|
179 |
|
180 |
with calculateDuration("run pipe"):
|
181 |
print("start to run pipe")
|
|
|
263 |
preprocessor.load("Openpose")
|
264 |
control_image = preprocessor(
|
265 |
image=image,
|
266 |
+
hand_and_face=False,
|
267 |
image_resolution=width,
|
268 |
detect_resolution=512,
|
269 |
)
|
requirements.txt
CHANGED
@@ -13,3 +13,4 @@ peft
|
|
13 |
controlnet-aux
|
14 |
mediapipe
|
15 |
kornia
|
|
|
|
13 |
controlnet-aux
|
14 |
mediapipe
|
15 |
kornia
|
16 |
+
xformers
|