wmt23-comet-qe-mqm-zhen / hparams.yaml
Yulong-W's picture
Upload hparams.yaml
509212b
raw
history blame contribute delete
655 Bytes
activations: Tanh
batch_size: 4
class_identifier: referenceless_regression_metric
dropout: 0.1
encoder_learning_rate: 1.0e-06
encoder_model: XLM-RoBERTa
final_activation: null
hidden_sizes:
- 2048
- 1024
keep_embeddings_frozen: true
layer: mix
layer_norm: false
layer_transformation: sparsemax
layerwise_decay: 0.95
learning_rate: 1.5e-05
load_pretrained_weights: true
loss: mse
nr_frozen_epochs: 0.3
optimizer: AdamW
pool: avg
pretrained_model: xlm-roberta-large
train_data:
- /content/drive/MyDrive/WMT QE Shared Task 2023/data/train-zhen.csv
validation_data:
- /content/drive/MyDrive/WMT QE Shared Task 2023/data/n-mqm.2022_dev.new.csv
warmup_steps: 0