sayakpaul HF staff commited on
Commit
c7880a6
1 Parent(s): 2cb4bbd

Upload SanaPipeline

Browse files
model_index.json CHANGED
@@ -7,11 +7,11 @@
7
  ],
8
  "text_encoder": [
9
  "transformers",
10
- "Gemma2ForCausalLM"
11
  ],
12
  "tokenizer": [
13
  "transformers",
14
- "GemmaTokenizerFast"
15
  ],
16
  "transformer": [
17
  "diffusers",
 
7
  ],
8
  "text_encoder": [
9
  "transformers",
10
+ "Gemma2Model"
11
  ],
12
  "tokenizer": [
13
  "transformers",
14
+ "GemmaTokenizer"
15
  ],
16
  "transformer": [
17
  "diffusers",
text_encoder/config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "Gemma2ForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
@@ -11,7 +11,7 @@
11
  "final_logit_softcapping": 30.0,
12
  "head_dim": 16,
13
  "hidden_activation": "gelu_pytorch_tanh",
14
- "hidden_size": 32,
15
  "initializer_range": 0.02,
16
  "intermediate_size": 64,
17
  "max_position_embeddings": 8192,
@@ -25,7 +25,7 @@
25
  "rope_theta": 10000.0,
26
  "sliding_window": 4096,
27
  "torch_dtype": "float32",
28
- "transformers_version": "4.47.0",
29
  "use_cache": true,
30
  "vocab_size": 8
31
  }
 
1
  {
2
  "architectures": [
3
+ "Gemma2Model"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
 
11
  "final_logit_softcapping": 30.0,
12
  "head_dim": 16,
13
  "hidden_activation": "gelu_pytorch_tanh",
14
+ "hidden_size": 8,
15
  "initializer_range": 0.02,
16
  "intermediate_size": 64,
17
  "max_position_embeddings": 8192,
 
25
  "rope_theta": 10000.0,
26
  "sliding_window": 4096,
27
  "torch_dtype": "float32",
28
+ "transformers_version": "4.47.1",
29
  "use_cache": true,
30
  "vocab_size": 8
31
  }
text_encoder/model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:06e998ceb070de43d93d9482722e8d74bd496861bca86f8ca8b394a0375ec692
3
- size 43952
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89072fb15c6c3870900027ba242a7cef686f62183693d1f701b4e2286c88e405
3
+ size 11872