doberst commited on
Commit
e7ae47c
1 Parent(s): 8f05c4b

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +30 -83
config.json CHANGED
@@ -4,93 +4,40 @@
4
  "training_timestamp": "Sat Oct 7 09:46:53 2023",
5
  "training_comments": "falcon-rw-1b-base",
6
  "_name_or_path": "tiiuae/falcon-rw-1b",
7
- "model_type": "falcon",
8
- "vocab_size": 50304,
9
- "hidden_size": 2048,
10
- "num_hidden_layers": 24,
11
- "num_attention_heads": 32,
12
- "layer_norm_epsilon": 1e-05,
13
- "initializer_range": 0.02,
14
- "use_cache": true,
15
- "hidden_dropout": 0.0,
16
- "attention_dropout": 0.0,
17
- "bos_token_id": 1,
18
- "eos_token_id": 2,
19
- "num_kv_heads": 32,
20
  "alibi": true,
21
- "new_decoder_architecture": false,
22
- "multi_query": false,
23
- "parallel_attn": false,
24
- "bias": true,
25
- "return_dict": true,
26
- "output_hidden_states": false,
27
- "output_attentions": false,
28
- "torchscript": false,
29
- "torch_dtype": "bfloat16",
30
- "use_bfloat16": false,
31
- "tf_legacy_loss": false,
32
- "pruned_heads": {},
33
- "tie_word_embeddings": true,
34
- "is_encoder_decoder": false,
35
- "is_decoder": false,
36
- "cross_attention_hidden_size": null,
37
- "add_cross_attention": false,
38
- "tie_encoder_decoder": false,
39
- "max_length": 20,
40
- "min_length": 0,
41
- "do_sample": false,
42
- "early_stopping": false,
43
- "num_beams": 1,
44
- "num_beam_groups": 1,
45
- "diversity_penalty": 0.0,
46
- "temperature": 1.0,
47
- "top_k": 50,
48
- "top_p": 1.0,
49
- "typical_p": 1.0,
50
- "repetition_penalty": 1.0,
51
- "length_penalty": 1.0,
52
- "no_repeat_ngram_size": 0,
53
- "encoder_no_repeat_ngram_size": 0,
54
- "bad_words_ids": null,
55
- "num_return_sequences": 1,
56
- "chunk_size_feed_forward": 0,
57
- "output_scores": false,
58
- "return_dict_in_generate": false,
59
- "forced_bos_token_id": null,
60
- "forced_eos_token_id": null,
61
- "remove_invalid_values": false,
62
- "exponential_decay_length_penalty": null,
63
- "suppress_tokens": null,
64
- "begin_suppress_tokens": null,
65
  "architectures": [
66
  "FalconForCausalLM"
67
  ],
68
- "finetuning_task": null,
69
- "id2label": {
70
- "0": "LABEL_0",
71
- "1": "LABEL_1"
72
- },
73
- "label2id": {
74
- "LABEL_0": 0,
75
- "LABEL_1": 1
76
- },
77
- "tokenizer_class": null,
78
- "prefix": null,
79
- "pad_token_id": null,
80
- "sep_token_id": null,
81
- "decoder_start_token_id": null,
82
- "task_specific_params": null,
83
- "problem_type": null,
84
- "_name_or_path": "tiiuae/falcon-rw-1b",
85
- "transformers_version": "4.28.1",
86
- "apply_residual_connection_post_layernorm": false,
87
  "auto_map": {
88
- "AutoConfig": "configuration_falcon.FalconConfig",
89
- "AutoModel": "modeling_falcon.FalconModel",
90
- "AutoModelForSequenceClassification": "modeling_falcon.FalconForSequenceClassification",
91
- "AutoModelForTokenClassification": "modeling_falcon.FalconForTokenClassification",
92
- "AutoModelForQuestionAnswering": "modeling_falcon.FalconForQuestionAnswering",
93
- "AutoModelForCausalLM": "modeling_falcon.FalconForCausalLM"
94
  },
95
- "trained": "custom training"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
96
  }
 
 
4
  "training_timestamp": "Sat Oct 7 09:46:53 2023",
5
  "training_comments": "falcon-rw-1b-base",
6
  "_name_or_path": "tiiuae/falcon-rw-1b",
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  "alibi": true,
8
+ "apply_residual_connection_post_layernorm": false,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  "architectures": [
10
  "FalconForCausalLM"
11
  ],
12
+ "attention_dropout": 0.0,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  "auto_map": {
14
+ "AutoConfig": "tiiuae/falcon-rw-1b--configuration_falcon.FalconConfig",
15
+ "AutoModel": "tiiuae/falcon-rw-1b--modeling_falcon.FalconModel",
16
+ "AutoModelForCausalLM": "tiiuae/falcon-rw-1b--modeling_falcon.FalconForCausalLM",
17
+ "AutoModelForQuestionAnswering": "tiiuae/falcon-rw-1b--modeling_falcon.FalconForQuestionAnswering",
18
+ "AutoModelForSequenceClassification": "tiiuae/falcon-rw-1b--modeling_falcon.FalconForSequenceClassification",
19
+ "AutoModelForTokenClassification": "tiiuae/falcon-rw-1b--modeling_falcon.FalconForTokenClassification"
20
  },
21
+ "bias": true,
22
+ "bos_token_id": 1,
23
+ "eos_token_id": 2,
24
+ "hidden_dropout": 0.0,
25
+ "hidden_size": 2048,
26
+ "initializer_range": 0.02,
27
+ "layer_norm_epsilon": 1e-05,
28
+ "max_position_embeddings": 2048,
29
+ "model_type": "falcon",
30
+ "multi_query": false,
31
+ "new_decoder_architecture": false,
32
+ "num_attention_heads": 32,
33
+ "num_hidden_layers": 24,
34
+ "num_kv_heads": 32,
35
+ "parallel_attn": false,
36
+ "rope_scaling": null,
37
+ "rope_theta": 10000.0,
38
+ "torch_dtype": "bfloat16",
39
+ "transformers_version": "4.33.2",
40
+ "use_cache": true,
41
+ "vocab_size": 50304
42
  }
43
+