batch_size: 500
micro_batch_size: 24
num_epochs: 1
learning_rate: 0.0003
cutoff_len: 512
val_set_size: 0
lora_r: 8
lora_alpha: 16
lora_dropout: 0.05
lora_target_modules: ['q_proj', 'v_proj']
train_on_inputs: True
add_eos_token: True
group_by_length: False
resume_from_checkpoint: False
prompt template: alpaca
trainable params: 4194304 || all params: 6742609920 || trainable%: 0.06220594176090199
Inference Providers
NEW
This model is not currently available via any of the supported Inference Providers.
The model cannot be deployed to the HF Inference API:
The model has no library tag.