alvarobartt HF staff commited on
Commit
bc0c205
1 Parent(s): 8ab12fc

Upload MistralForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. model.safetensors +2 -2
config.json CHANGED
@@ -6,10 +6,10 @@
6
  "bos_token_id": 1,
7
  "eos_token_id": 2,
8
  "hidden_act": "silu",
9
- "hidden_size": 512,
10
  "initializer_range": 0.02,
11
- "intermediate_size": 512,
12
- "max_position_embeddings": 512,
13
  "model_type": "mistral",
14
  "num_attention_heads": 1,
15
  "num_hidden_layers": 1,
 
6
  "bos_token_id": 1,
7
  "eos_token_id": 2,
8
  "hidden_act": "silu",
9
+ "hidden_size": 128,
10
  "initializer_range": 0.02,
11
+ "intermediate_size": 128,
12
+ "max_position_embeddings": 128,
13
  "model_type": "mistral",
14
  "num_attention_heads": 1,
15
  "num_hidden_layers": 1,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:34c3f0efb6c285418bad4633031057a8ba0ba85bc7c42dd36ac758db21502303
3
- size 138419480
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4de1f8ae7fbf6cfa4c8b941655105379786f62b18234afe66bf8713cb533c68
3
+ size 33229576