|
nr_frozen_epochs: 0.3 |
|
keep_embeddings_frozen: true |
|
optimizer: AdamW |
|
encoder_learning_rate: 1.0e-05 |
|
learning_rate: 3.1e-05 |
|
layerwise_decay: 0.95 |
|
encoder_model: XLM-RoBERTa |
|
pretrained_model: xlm-roberta-large |
|
pool: avg |
|
layer: mix |
|
dropout: 0.15 |
|
batch_size: 8 |
|
class_identifier: referenceless_regression_metric |
|
train_data: data/jan_28_ende_num_0.5_del_None_mask_None_xlm_mbart.csv |
|
validation_data: None |
|
hidden_sizes: |
|
- 2048 |
|
- 1024 |
|
activations: Tanh |
|
final_activation: null |
|
load_weights_from_checkpoint: null |
|
|