File size: 2,426 Bytes
f59a1a3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 |
backbone_config_path: /notebooks/models/exp492
best_model_path: /notebooks/models/exp492/models/fold_0_10_best.pth
checkpoint_path: /notebooks/models/exp492/chkp/fold_0_10_chkp.pth
config_path: /notebooks/models/exp492/config.yaml
criterion:
criterion_type: BCEWithLogitsLoss
mcrmse_loss:
weights:
- 0.5
- 0.5
mse_loss:
reduction: mean
rmse_loss:
eps: 1.0e-09
reduction: mean
smooth_l1_loss:
beta: 0.1
reduction: mean
data_dir: /notebooks/data
dataset:
bucket_batch_sampler:
bucket_size: 400
noise_factor: 0.2
folds: true
labels:
- generated
max_length: 1024
sampler_type: StratifiedBatchSampler
train_batch_size: 6
train_sources:
- daigt
- persuade
- persuade_gpt
- persuade_humanized_1
- persuade_gpt_patially_rewritten
- persuade_gpt_patially_rewritten_05
- persuade_humanized_easy_1
- daigt_gpt_patially_rewritten
- llama-mistral-partially-r
- moth
- books
- neural-chat-7b
- nbroad
valid_batch_size: 6
valid_sources:
- none
debug: false
exp_name: exp492_seed10
external_dir: /notebooks/data/external
fold: 0
interim_dir: /notebooks/data/interim
log_path: /notebooks/models/exp492/logs/fold-0.log
logger:
job_type: training
project: DAIGT-AIE
train_print_frequency: 100
use_wandb: true
valid_print_frequency: 100
model:
architecture_type: CustomModel
attention_dropout: 0.1
backbone_type: microsoft/deberta-v3-large
dropout: 0.05
freeze_embeddings: true
freeze_n_layers: 23
gem_pooling:
eps: 1.0e-06
p: 3
gradient_checkpointing: false
load_embeddings: true
load_head: false
load_n_layers: 24
load_parts: true
pooling_type: MeanPooling
reinitialize_n_layers: 0
state_from_model: exp489
models_dir: /notebooks/models
optimizer:
beta1: 0.9
beta2: 0.999
decoder_lr: 2.0e-05
embeddings_lr: 2.0e-05
encoder_lr: 2.0e-05
eps: 1.0e-06
group_lr_multiplier: 1
n_groups: 1
weight_decay: 0.01
processed_dir: /notebooks/data/processed
raw_dir: /notebooks/data/raw
run_dir: /notebooks/models/exp492
run_id: exp492_seed10_fold0
run_name: exp492_seed10_fold0
scheduler:
cosine_schedule_with_warmup:
n_cycles: 0.5
n_warmup_steps: 0
type: cosine_schedule_with_warmup
seed: 10
tokenizer: null
tokenizer_path: /notebooks/models/exp492/tokenizer
training:
apex: true
epochs: 1
evaluate_n_times_per_epoch: 1
gradient_accumulation_steps: 1
max_grad_norm: 1000
|