ClaudiaIoana550 commited on
Commit
4e62dec
1 Parent(s): 71f0eb4

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +9 -11
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "tiiuae/falcon-7b",
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
@@ -7,12 +6,12 @@
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
10
- "AutoConfig": "tiiuae/falcon-7b--configuration_falcon.FalconConfig",
11
- "AutoModel": "tiiuae/falcon-7b--modeling_falcon.FalconModel",
12
- "AutoModelForCausalLM": "tiiuae/falcon-7b--modeling_falcon.FalconForCausalLM",
13
- "AutoModelForQuestionAnswering": "tiiuae/falcon-7b--modeling_falcon.FalconForQuestionAnswering",
14
- "AutoModelForSequenceClassification": "tiiuae/falcon-7b--modeling_falcon.FalconForSequenceClassification",
15
- "AutoModelForTokenClassification": "tiiuae/falcon-7b--modeling_falcon.FalconForTokenClassification"
16
  },
17
  "bias": false,
18
  "bos_token_id": 11,
@@ -26,10 +25,9 @@
26
  "new_decoder_architecture": false,
27
  "num_attention_heads": 71,
28
  "num_hidden_layers": 32,
29
- "num_kv_heads": 71,
30
  "parallel_attn": true,
31
- "torch_dtype": "float16",
32
- "transformers_version": "4.30.0",
33
  "use_cache": true,
34
  "vocab_size": 65024
35
- }
 
1
  {
 
2
  "alibi": false,
3
  "apply_residual_connection_post_layernorm": false,
4
  "architectures": [
 
6
  ],
7
  "attention_dropout": 0.0,
8
  "auto_map": {
9
+ "AutoConfig": "configuration_falcon.FalconConfig",
10
+ "AutoModel": "modeling_falcon.FalconModel",
11
+ "AutoModelForSequenceClassification": "modeling_falcon.FalconForSequenceClassification",
12
+ "AutoModelForTokenClassification": "modeling_falcon.FalconForTokenClassification",
13
+ "AutoModelForQuestionAnswering": "modeling_falcon.FalconForQuestionAnswering",
14
+ "AutoModelForCausalLM": "modeling_falcon.FalconForCausalLM"
15
  },
16
  "bias": false,
17
  "bos_token_id": 11,
 
25
  "new_decoder_architecture": false,
26
  "num_attention_heads": 71,
27
  "num_hidden_layers": 32,
 
28
  "parallel_attn": true,
29
+ "torch_dtype": "bfloat16",
30
+ "transformers_version": "4.27.4",
31
  "use_cache": true,
32
  "vocab_size": 65024
33
+ }