electricity_720_96 / config.json
Hannibal046's picture
Upload folder using huggingface_hub
32df38c
raw
history blame contribute delete
No virus
548 Bytes
{
"architectures": [
"GridTSTForTimeSeriesPrediction"
],
"attention_dropout": 0.2,
"attention_strategy": "channel_first",
"d_model": 256,
"dropout": 0.2,
"ffn_dim": 256,
"head_dropout": 0.0,
"init_std": 0.2,
"label_len": 96,
"model_type": "gridtst",
"norm_type": "batchnorm",
"num_channels": 321,
"num_heads": 16,
"num_layers": 3,
"num_patches": 8,
"patch_len": 96,
"qkv_bias": true,
"revin_affine": false,
"seq_len": 720,
"stride": 96,
"torch_dtype": "float32",
"transformers_version": "4.35.2"
}