{ "activation": "gelu", "architectures": [ "CustomDistilBERTHead" ], "attention_dropout": 0.1, "dim": 768, "dropout": 0.1, "hidden_dim": 3072, "initializer_range": 0.02, "max_position_embeddings": 512, "model_type": "CustomDistilBERTHead", "n_layers": 6, "qa_dropout": 0.1, "sinusoidal_pos_embds": false, "torch_dtype": "float32", "transformers_version": "4.33.2", "vocab_size": 30522 }