Sapir commited on
Commit
9217013
1 Parent(s): 3596937

Create transformer_config.json

Browse files
Files changed (1) hide show
  1. transformer/transformer_config.json +32 -0
transformer/transformer_config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "Transformer3DModel",
3
+ "_diffusers_version": "0.25.1",
4
+ "_name_or_path": "PixArt-alpha/PixArt-XL-2-256x256",
5
+ "activation_fn": "gelu-approximate",
6
+ "attention_bias": true,
7
+ "attention_head_dim": 64,
8
+ "attention_type": "default",
9
+ "caption_channels": 4096,
10
+ "cross_attention_dim": 2048,
11
+ "double_self_attention": false,
12
+ "dropout": 0.0,
13
+ "in_channels": 128,
14
+ "norm_elementwise_affine": false,
15
+ "norm_eps": 1e-06,
16
+ "norm_num_groups": 32,
17
+ "num_attention_heads": 32,
18
+ "num_embeds_ada_norm": 1000,
19
+ "num_layers": 28,
20
+ "num_vector_embeds": null,
21
+ "only_cross_attention": false,
22
+ "out_channels": 128,
23
+ "project_to_2d_pos": true,
24
+ "upcast_attention": false,
25
+ "use_linear_projection": false,
26
+ "qk_norm": "rms_norm",
27
+ "standardization_norm": "rms_norm",
28
+ "positional_embedding_type": "rope",
29
+ "positional_embedding_theta": 10000.0,
30
+ "positional_embedding_max_pos": [20, 2048, 2048],
31
+ "timestep_scale_multiplier": 1000
32
+ }