Safetensors
aredden commited on
Commit
7a7b2c1
·
1 Parent(s): 6d82dcc

Fix issue where lora alpha is not correct if lora from transformers checkpoint

Browse files
Files changed (1) hide show
  1. lora_loading.py +1 -1
lora_loading.py CHANGED
@@ -341,7 +341,7 @@ def get_lora_for_key(key: str, lora_weights: dict):
341
  prefix = key.split(".lora")[0]
342
  lora_A = lora_weights[f"{prefix}.lora_A.weight"]
343
  lora_B = lora_weights[f"{prefix}.lora_B.weight"]
344
- alpha = lora_weights.get(f"{prefix}.alpha", 1.0)
345
  return lora_A, lora_B, alpha
346
 
347
 
 
341
  prefix = key.split(".lora")[0]
342
  lora_A = lora_weights[f"{prefix}.lora_A.weight"]
343
  lora_B = lora_weights[f"{prefix}.lora_B.weight"]
344
+ alpha = lora_weights.get(f"{prefix}.alpha", None)
345
  return lora_A, lora_B, alpha
346
 
347