peft-lora-jul / lora_jul /adapter_config.json
fgiauna's picture
Training complete
bf64a25
raw
history blame contribute delete
543 Bytes
{
"config": {
"alpha": 8,
"architecture": "lora",
"attn_matrices": [
"q",
"v"
],
"composition_mode": "add",
"dropout": 0.0,
"init_weights": "lora",
"intermediate_lora": false,
"output_lora": false,
"r": 8,
"selfattn_lora": true,
"use_gating": false
},
"config_id": "625403edad0bf919",
"hidden_size": 768,
"model_class": "RobertaForTokenClassification",
"model_name": "Jean-Baptiste/camembert-ner",
"model_type": "camembert",
"name": "lora_jul",
"version": "3.2.1"
}