GLM / config.json
shunxing1234's picture
Upload config.json
053aaba
raw
history blame contribute delete
500 Bytes
{
"num_layers": 24,
"vocab_size": 30592,
"hidden_size": 1024,
"num_attention_heads":16,
"embedding_dropout_prob":0.1,
"attention_dropout_prob":0.1,
"output_dropout_prob":0.1,
"max_sequence_length":512,
"max_memory_length":0,
"checkpoint_activations": false,
"checkpoint_num_layers":1 ,
"parallel_output": true,
"relative_encoding": false,
"block_position_encoding": true,
"output_predict": true,
"spell_length": null,
"spell_func": "lstm",
"attention_scale":1.0
}