{ "architectures": [ "PatchGPT" ], "dropout": 0.1, "model_type": "patchgpt", "n_channels": 6, "n_embd": 384, "n_head": 6, "n_labels": 18, "n_layer": 6, "n_positions": 1024, "n_static": 2, "patch_size": 7, "position_embedding": "relative_key", "pretrain": true, "torch_dtype": "float32", "transformers_version": "4.39.2", "use_lm_head": true }