Spaces:
Running
Running
don't tie embeddings
Browse files
dalle_mini/configuration_bart.py
CHANGED
@@ -139,6 +139,7 @@ class BartConfig(PretrainedConfig):
|
|
139 |
is_encoder_decoder=True,
|
140 |
decoder_start_token_id=16384,
|
141 |
forced_eos_token_id=2,
|
|
|
142 |
**kwargs,
|
143 |
):
|
144 |
self.vocab_size = vocab_size
|
@@ -173,6 +174,7 @@ class BartConfig(PretrainedConfig):
|
|
173 |
is_encoder_decoder=is_encoder_decoder,
|
174 |
decoder_start_token_id=decoder_start_token_id,
|
175 |
forced_eos_token_id=forced_eos_token_id,
|
|
|
176 |
**kwargs,
|
177 |
)
|
178 |
|
|
|
139 |
is_encoder_decoder=True,
|
140 |
decoder_start_token_id=16384,
|
141 |
forced_eos_token_id=2,
|
142 |
+
tie_word_embeddings=False, # don't tie for scaling reasons
|
143 |
**kwargs,
|
144 |
):
|
145 |
self.vocab_size = vocab_size
|
|
|
174 |
is_encoder_decoder=is_encoder_decoder,
|
175 |
decoder_start_token_id=decoder_start_token_id,
|
176 |
forced_eos_token_id=forced_eos_token_id,
|
177 |
+
tie_word_embeddings=tie_word_embeddings,
|
178 |
**kwargs,
|
179 |
)
|
180 |
|