File size: 631 Bytes
dd55208 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
{
"_class_name": "FluxTransformer2DModel",
"_diffusers_version": "0.30.3",
"_name_or_path": "/data/huggingface/hub/models--black-forest-labs--FLUX.1-dev/snapshots/0ef5fff789c832c5c7f4e127f94c8b54bbcced44/transformer",
"attention_head_dim": 128,
"axes_dims_rope": [
16,
56,
56
],
"guidance_embeds": true,
"in_channels": 64,
"joint_attention_dim": 4096,
"num_attention_heads": 24,
"num_layers": 19,
"num_single_layers": 38,
"patch_size": 1,
"pooled_projection_dim": 768,
"quantization_config": {
"activation_scheme": "static",
"quant_method": "fp8"
},
"use_fp8_for_attn": true
}
|