lora-multiple-adapters
#11
by
jupyterjazz
- opened
- modeling_lora.py +1 -1
modeling_lora.py
CHANGED
@@ -233,7 +233,7 @@ class XLMRobertaLoRA(XLMRobertaModel):
|
|
233 |
self._alpha = config.lora_alpha
|
234 |
|
235 |
self._register_lora(
|
236 |
-
num_adaptations=self.
|
237 |
rank=self._rank,
|
238 |
dropout_p=self._dropout_p,
|
239 |
alpha=self._alpha,
|
|
|
233 |
self._alpha = config.lora_alpha
|
234 |
|
235 |
self._register_lora(
|
236 |
+
num_adaptations=len(self._lora_adaptations),
|
237 |
rank=self._rank,
|
238 |
dropout_p=self._dropout_p,
|
239 |
alpha=self._alpha,
|