Fix issue where lora alpha is not correct if lora from transformers checkpoint
Browse files- lora_loading.py +1 -1
lora_loading.py
CHANGED
@@ -341,7 +341,7 @@ def get_lora_for_key(key: str, lora_weights: dict):
|
|
341 |
prefix = key.split(".lora")[0]
|
342 |
lora_A = lora_weights[f"{prefix}.lora_A.weight"]
|
343 |
lora_B = lora_weights[f"{prefix}.lora_B.weight"]
|
344 |
-
alpha = lora_weights.get(f"{prefix}.alpha",
|
345 |
return lora_A, lora_B, alpha
|
346 |
|
347 |
|
|
|
341 |
prefix = key.split(".lora")[0]
|
342 |
lora_A = lora_weights[f"{prefix}.lora_A.weight"]
|
343 |
lora_B = lora_weights[f"{prefix}.lora_B.weight"]
|
344 |
+
alpha = lora_weights.get(f"{prefix}.alpha", None)
|
345 |
return lora_A, lora_B, alpha
|
346 |
|
347 |
|