File size: 754 Bytes
e6bb3d8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
{
"torch_dtype": "float32",
"architectures": [
"xLSTMForCausalLM"
],
"transformers_version": "4.44.1",
"_xlstm_config": {
"num_blocks": 48,
"embedding_dim": 2048,
"mlstm_block": {
"mlstm": {
"num_heads": 4
}
},
"slstm_block": {
"slstm": {
"num_heads": 4
}
},
"slstm_at": [
3,
5,
7,
40,
42,
44
],
"context_length": 512,
"vocab_size": 16000
},
"vocab_size": 16000,
"embedding_dim": 2048,
"context_length": 512,
"model_type": "xlstm",
"auto_map": {
"AutoConfig": "configuration_xlstm.xLSTMConfig",
"AutoModelForCausalLM": "modeling_xlstm.xLSTMForCausalLM",
"AutoModel": "modeling_xlstm.xLSTMModel"
}
} |