|
{ |
|
"torch_dtype": "float32", |
|
"architectures": [ |
|
"xLSTMForCausalLM" |
|
], |
|
"transformers_version": "4.44.1", |
|
"_xlstm_config": { |
|
"num_blocks": 24, |
|
"embedding_dim": 768, |
|
"mlstm_block": { |
|
"mlstm": { |
|
"num_heads": 4 |
|
} |
|
}, |
|
"slstm_block": {}, |
|
"slstm_at": [], |
|
"context_length": 512, |
|
"vocab_size": 32000 |
|
}, |
|
"vocab_size": 32000, |
|
"embedding_dim": 768, |
|
"context_length": 512, |
|
"model_type": "xlstm", |
|
"auto_map": { |
|
"AutoConfig": "configuration_xlstm.xLSTMConfig", |
|
"AutoModelForCausalLM": "modeling_xlstm.xLSTMForCausalLM", |
|
"AutoModel": "modeling_xlstm.xLSTMModel" |
|
} |
|
} |
|
|