mvasiliniuc commited on
Commit
551affd
1 Parent(s): 869221e

Reset entire model

Browse files
config.json DELETED
@@ -1,39 +0,0 @@
1
- {
2
- "_name_or_path": "gpt2-large",
3
- "activation_function": "gelu_new",
4
- "architectures": [
5
- "GPT2LMHeadModel"
6
- ],
7
- "attn_pdrop": 0.1,
8
- "bos_token_id": 50256,
9
- "embd_pdrop": 0.1,
10
- "eos_token_id": 50256,
11
- "initializer_range": 0.02,
12
- "layer_norm_epsilon": 1e-05,
13
- "model_type": "gpt2",
14
- "n_ctx": 1024,
15
- "n_embd": 1280,
16
- "n_head": 20,
17
- "n_inner": null,
18
- "n_layer": 36,
19
- "n_positions": 1024,
20
- "reorder_and_upcast_attn": true,
21
- "resid_pdrop": 0.1,
22
- "scale_attn_by_inverse_layer_idx": true,
23
- "scale_attn_weights": true,
24
- "summary_activation": null,
25
- "summary_first_dropout": 0.1,
26
- "summary_proj_to_labels": true,
27
- "summary_type": "cls_index",
28
- "summary_use_proj": true,
29
- "task_specific_params": {
30
- "text-generation": {
31
- "do_sample": true,
32
- "max_length": 50
33
- }
34
- },
35
- "torch_dtype": "float32",
36
- "transformers_version": "4.29.2",
37
- "use_cache": true,
38
- "vocab_size": 200000
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
generation_config.json DELETED
@@ -1,6 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 50256,
4
- "eos_token_id": 50256,
5
- "transformers_version": "4.29.2"
6
- }
 
 
 
 
 
 
 
merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:960a025d108bda049562ba7705f8add20c47b4cbb1475af1aca9723097c9451a
3
- size 3900712061
 
 
 
 
special_tokens_map.json DELETED
@@ -1,5 +0,0 @@
1
- {
2
- "bos_token": "<|endoftext|>",
3
- "eos_token": "<|endoftext|>",
4
- "unk_token": "<|endoftext|>"
5
- }
 
 
 
 
 
 
tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json DELETED
@@ -1,9 +0,0 @@
1
- {
2
- "add_prefix_space": false,
3
- "bos_token": "<|endoftext|>",
4
- "clean_up_tokenization_spaces": true,
5
- "eos_token": "<|endoftext|>",
6
- "model_max_length": 1024,
7
- "tokenizer_class": "GPT2Tokenizer",
8
- "unk_token": "<|endoftext|>"
9
- }
 
 
 
 
 
 
 
 
 
 
vocab.json DELETED
The diff for this file is too large to render. See raw diff