openfree commited on
Commit
4765e59
1 Parent(s): 8f79fb3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -20
app.py CHANGED
@@ -405,41 +405,41 @@ def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_ind
405
  # Load LoRA weights with respective scales
406
  lora_names = []
407
  lora_weights = []
408
-
409
  with calculateDuration("Loading LoRA weights"):
410
  for idx, lora in enumerate(selected_loras):
411
  try:
412
  lora_name = f"lora_{idx}"
413
  lora_path = lora['repo']
414
-
415
  # Private 모델인 경우 특별 처리
416
  if lora.get('private', False):
417
  lora_path = load_private_model(lora_path, huggingface_token)
418
-
419
- weight_name = lora.get("weights")
420
- print(f"Loading LoRA {lora_name} from {lora_path}")
421
-
422
  if image_input is not None:
423
- if weight_name:
424
- pipe_i2i.load_lora_weights(lora_path, weight_name=weight_name,
425
- adapter_name=lora_name, token=huggingface_token)
426
- else:
427
- pipe_i2i.load_lora_weights(lora_path, adapter_name=lora_name,
428
- token=huggingface_token)
429
  else:
430
- if weight_name:
431
- pipe.load_lora_weights(lora_path, weight_name=weight_name,
432
- adapter_name=lora_name, token=huggingface_token)
433
- else:
434
- pipe.load_lora_weights(lora_path, adapter_name=lora_name,
435
- token=huggingface_token)
436
-
437
  lora_names.append(lora_name)
438
  lora_weights.append(lora_scale_1 if idx == 0 else lora_scale_2 if idx == 1 else lora_scale_3)
 
 
439
  except Exception as e:
440
  print(f"Failed to load LoRA {lora_name}: {str(e)}")
441
  continue
442
 
 
 
443
  print("Loaded LoRAs:", lora_names)
444
  print("Adapter weights:", lora_weights)
445
 
@@ -652,7 +652,22 @@ def load_private_model(model_id, huggingface_token):
652
  local_dir_use_symlinks=False
653
  )
654
 
655
- return local_dir
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
656
  except Exception as e:
657
  print(f"Error loading private model {model_id}: {str(e)}")
658
  raise e
 
405
  # Load LoRA weights with respective scales
406
  lora_names = []
407
  lora_weights = []
408
+
409
  with calculateDuration("Loading LoRA weights"):
410
  for idx, lora in enumerate(selected_loras):
411
  try:
412
  lora_name = f"lora_{idx}"
413
  lora_path = lora['repo']
414
+
415
  # Private 모델인 경우 특별 처리
416
  if lora.get('private', False):
417
  lora_path = load_private_model(lora_path, huggingface_token)
418
+ print(f"Using private model path: {lora_path}")
419
+
 
 
420
  if image_input is not None:
421
+ pipe_i2i.load_lora_weights(
422
+ lora_path,
423
+ adapter_name=lora_name,
424
+ token=huggingface_token
425
+ )
 
426
  else:
427
+ pipe.load_lora_weights(
428
+ lora_path,
429
+ adapter_name=lora_name,
430
+ token=huggingface_token
431
+ )
432
+
 
433
  lora_names.append(lora_name)
434
  lora_weights.append(lora_scale_1 if idx == 0 else lora_scale_2 if idx == 1 else lora_scale_3)
435
+ print(f"Successfully loaded LoRA {lora_name} from {lora_path}")
436
+
437
  except Exception as e:
438
  print(f"Failed to load LoRA {lora_name}: {str(e)}")
439
  continue
440
 
441
+
442
+
443
  print("Loaded LoRAs:", lora_names)
444
  print("Adapter weights:", lora_weights)
445
 
 
652
  local_dir_use_symlinks=False
653
  )
654
 
655
+ # safetensors 파일 찾기
656
+ safetensors_file = None
657
+ for root, dirs, files in os.walk(local_dir):
658
+ for file in files:
659
+ if file.endswith('.safetensors'):
660
+ safetensors_file = os.path.join(root, file)
661
+ break
662
+ if safetensors_file:
663
+ break
664
+
665
+ if not safetensors_file:
666
+ raise Exception(f"No .safetensors file found in {local_dir}")
667
+
668
+ print(f"Found safetensors file: {safetensors_file}")
669
+ return safetensors_file # 전체 경로를 반환
670
+
671
  except Exception as e:
672
  print(f"Error loading private model {model_id}: {str(e)}")
673
  raise e