flyhero commited on
Commit
f7bd23e
1 Parent(s): 34a4a1a

add config

Browse files
Files changed (1) hide show
  1. config.json +39 -10
config.json CHANGED
@@ -4,7 +4,36 @@
4
  "GPTNeoForCausalLM"
5
  ],
6
  "attention_dropout": 0,
7
- "attention_layers": ["global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global", "global"],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  "attention_types": [
9
  [
10
  [
@@ -34,16 +63,16 @@
34
  "summary_proj_to_labels": true,
35
  "summary_type": "cls_index",
36
  "summary_use_proj": true,
37
- "transformers_version": "4.5.0.dev0",
38
- "use_cache": true,
39
- "vocab_size": 50400,
40
- "window_size": 256,
41
- "tokenizer_class": "GPT2Tokenizer",
42
  "task_specific_params": {
43
  "text-generation": {
44
  "do_sample": true,
45
- "temperature": 1.0,
46
- "max_length": 50
47
  }
48
- }
49
- }
 
 
 
 
 
 
4
  "GPTNeoForCausalLM"
5
  ],
6
  "attention_dropout": 0,
7
+ "attention_layers": [
8
+ "global",
9
+ "global",
10
+ "global",
11
+ "global",
12
+ "global",
13
+ "global",
14
+ "global",
15
+ "global",
16
+ "global",
17
+ "global",
18
+ "global",
19
+ "global",
20
+ "global",
21
+ "global",
22
+ "global",
23
+ "global",
24
+ "global",
25
+ "global",
26
+ "global",
27
+ "global",
28
+ "global",
29
+ "global",
30
+ "global",
31
+ "global",
32
+ "global",
33
+ "global",
34
+ "global",
35
+ "global"
36
+ ],
37
  "attention_types": [
38
  [
39
  [
 
63
  "summary_proj_to_labels": true,
64
  "summary_type": "cls_index",
65
  "summary_use_proj": true,
 
 
 
 
 
66
  "task_specific_params": {
67
  "text-generation": {
68
  "do_sample": true,
69
+ "max_length": 50,
70
+ "temperature": 1.0
71
  }
72
+ },
73
+ "tokenizer_class": "GPT2Tokenizer",
74
+ "transformers_version": "4.7.0.dev0",
75
+ "use_cache": true,
76
+ "vocab_size": 50400,
77
+ "window_size": 256
78
+ }