cl-modelcloud commited on
Commit
6bb1d3e
1 Parent(s): e1febe6

Upload folder using huggingface_hub (#1)

Browse files

- 6b23ff19bebe812fd2244d7fcbfde6273fbd46b424abe56d834d8c8f59f6df3b (37bc42a9bcfe606c0256c58932d66674c94f842e)

config.json ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_attn_implementation_autoset": true,
3
+ "_name_or_path": "/monster/data/model/opt-125",
4
+ "_remove_final_layer_norm": false,
5
+ "activation_dropout": 0.0,
6
+ "activation_function": "relu",
7
+ "architectures": [
8
+ "OPTForCausalLM"
9
+ ],
10
+ "attention_dropout": 0.0,
11
+ "bos_token_id": 2,
12
+ "do_layer_norm_before": true,
13
+ "dropout": 0.1,
14
+ "enable_bias": true,
15
+ "eos_token_id": 2,
16
+ "ffn_dim": 3072,
17
+ "hidden_size": 768,
18
+ "init_std": 0.02,
19
+ "layer_norm_elementwise_affine": true,
20
+ "layerdrop": 0.0,
21
+ "max_position_embeddings": 2048,
22
+ "model_type": "opt",
23
+ "num_attention_heads": 12,
24
+ "num_hidden_layers": 12,
25
+ "pad_token_id": 1,
26
+ "prefix": "</s>",
27
+ "quantization_config": {
28
+ "bits": 4,
29
+ "checkpoint_format": "gptq",
30
+ "damp_auto_increment": 0.0015,
31
+ "damp_percent": 0.005,
32
+ "desc_act": false,
33
+ "dynamic": null,
34
+ "group_size": 128,
35
+ "lm_head": false,
36
+ "meta": {
37
+ "damp_auto_increment": 0.0015,
38
+ "damp_percent": 0.005,
39
+ "quantizer": "gptqmodel:1.1.0-dev",
40
+ "uri": "https://github.com/modelcloud/gptqmodel"
41
+ },
42
+ "quant_method": "gptq",
43
+ "static_groups": false,
44
+ "sym": true,
45
+ "true_sequential": true
46
+ },
47
+ "torch_dtype": "float16",
48
+ "transformers_version": "4.47.0.dev0",
49
+ "use_cache": true,
50
+ "vocab_size": 50272,
51
+ "word_embed_proj_dim": 768
52
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6db62203a994e3e475f0b79e60d84cd95dd9b5b909a13ebe623c3b4cd219dde
3
+ size 202331472
quant_log.json ADDED
@@ -0,0 +1 @@
 
 
1
+ [{"layer": 0, "module": "self_attn.k_proj", "loss": "1.20996", "damp": "0.00500", "layer_forward_time": "2.708", "w_clone_time": "0.020", "quant_time": "11.027"}, {"layer": 0, "module": "self_attn.v_proj", "loss": "0.02060", "damp": "0.00500", "layer_forward_time": "2.708", "w_clone_time": "0.014", "quant_time": "0.640"}, {"layer": 0, "module": "self_attn.q_proj", "loss": "0.96240", "damp": "0.00500", "layer_forward_time": "2.708", "w_clone_time": "0.020", "quant_time": "0.159"}, {"layer": 0, "module": "self_attn.out_proj", "loss": "0.00040", "damp": "0.00500", "layer_forward_time": "1.381", "w_clone_time": "0.026", "quant_time": "0.151"}, {"layer": 0, "module": "fc1", "loss": "0.07153", "damp": "0.00500", "layer_forward_time": "1.229", "w_clone_time": "0.044", "quant_time": "1.262"}, {"layer": 0, "module": "fc2", "loss": "0.00394", "damp": "0.00500", "layer_forward_time": "2.702", "w_clone_time": "0.026", "quant_time": "0.810"}, {"layer": 1, "module": "self_attn.k_proj", "loss": "0.67139", "damp": "0.00500", "layer_forward_time": "0.827", "w_clone_time": "0.010", "quant_time": "0.153"}, {"layer": 1, "module": "self_attn.v_proj", "loss": "0.02145", "damp": "0.00500", "layer_forward_time": "0.827", "w_clone_time": "0.006", "quant_time": "0.518"}, {"layer": 1, "module": "self_attn.q_proj", "loss": "0.50635", "damp": "0.00500", "layer_forward_time": "0.827", "w_clone_time": "0.022", "quant_time": "0.238"}, {"layer": 1, "module": "self_attn.out_proj", "loss": "0.00033", "damp": "0.00500", "layer_forward_time": "0.495", "w_clone_time": "0.016", "quant_time": "0.152"}, {"layer": 1, "module": "fc1", "loss": "0.26050", "damp": "0.00500", "layer_forward_time": "0.552", "w_clone_time": "0.018", "quant_time": "0.155"}, {"layer": 1, "module": "fc2", "loss": "0.00221", "damp": "0.00500", "layer_forward_time": "1.106", "w_clone_time": "0.016", "quant_time": "0.865"}, {"layer": 2, "module": "self_attn.k_proj", "loss": "1.33008", "damp": "0.00500", "layer_forward_time": "0.566", "w_clone_time": "0.025", "quant_time": "0.235"}, {"layer": 2, "module": "self_attn.v_proj", "loss": "0.06781", "damp": "0.00500", "layer_forward_time": "0.566", "w_clone_time": "0.014", "quant_time": "0.157"}, {"layer": 2, "module": "self_attn.q_proj", "loss": "1.12793", "damp": "0.00500", "layer_forward_time": "0.566", "w_clone_time": "0.015", "quant_time": "0.155"}, {"layer": 2, "module": "self_attn.out_proj", "loss": "0.00042", "damp": "0.00500", "layer_forward_time": "0.420", "w_clone_time": "0.026", "quant_time": "0.138"}, {"layer": 2, "module": "fc1", "loss": "0.26978", "damp": "0.00500", "layer_forward_time": "0.380", "w_clone_time": "0.013", "quant_time": "0.142"}, {"layer": 2, "module": "fc2", "loss": "0.00240", "damp": "0.00500", "layer_forward_time": "1.293", "w_clone_time": "0.015", "quant_time": "0.651"}, {"layer": 3, "module": "self_attn.k_proj", "loss": "1.01367", "damp": "0.00500", "layer_forward_time": "0.591", "w_clone_time": "0.026", "quant_time": "0.271"}, {"layer": 3, "module": "self_attn.v_proj", "loss": "0.09216", "damp": "0.00500", "layer_forward_time": "0.591", "w_clone_time": "0.014", "quant_time": "0.162"}, {"layer": 3, "module": "self_attn.q_proj", "loss": "0.98779", "damp": "0.00500", "layer_forward_time": "0.591", "w_clone_time": "0.014", "quant_time": "0.154"}, {"layer": 3, "module": "self_attn.out_proj", "loss": "0.00051", "damp": "0.00500", "layer_forward_time": "0.532", "w_clone_time": "0.017", "quant_time": "0.161"}, {"layer": 3, "module": "fc1", "loss": "0.18787", "damp": "0.00500", "layer_forward_time": "0.756", "w_clone_time": "0.012", "quant_time": "0.148"}, {"layer": 3, "module": "fc2", "loss": "0.00157", "damp": "0.00500", "layer_forward_time": "1.858", "w_clone_time": "0.009", "quant_time": "0.654"}, {"layer": 4, "module": "self_attn.k_proj", "loss": "1.32715", "damp": "0.00500", "layer_forward_time": "0.706", "w_clone_time": "0.009", "quant_time": "0.154"}, {"layer": 4, "module": "self_attn.v_proj", "loss": "0.11310", "damp": "0.00500", "layer_forward_time": "0.706", "w_clone_time": "0.006", "quant_time": "0.138"}, {"layer": 4, "module": "self_attn.q_proj", "loss": "1.31250", "damp": "0.00500", "layer_forward_time": "0.706", "w_clone_time": "0.013", "quant_time": "0.137"}, {"layer": 4, "module": "self_attn.out_proj", "loss": "0.00068", "damp": "0.00500", "layer_forward_time": "0.398", "w_clone_time": "0.012", "quant_time": "0.135"}, {"layer": 4, "module": "fc1", "loss": "0.28613", "damp": "0.00500", "layer_forward_time": "0.410", "w_clone_time": "0.013", "quant_time": "0.146"}, {"layer": 4, "module": "fc2", "loss": "0.00459", "damp": "0.00500", "layer_forward_time": "1.113", "w_clone_time": "0.013", "quant_time": "0.626"}, {"layer": 5, "module": "self_attn.k_proj", "loss": "1.45410", "damp": "0.00500", "layer_forward_time": "0.601", "w_clone_time": "0.020", "quant_time": "0.133"}, {"layer": 5, "module": "self_attn.v_proj", "loss": "0.09650", "damp": "0.00500", "layer_forward_time": "0.601", "w_clone_time": "0.012", "quant_time": "0.148"}, {"layer": 5, "module": "self_attn.q_proj", "loss": "1.43359", "damp": "0.00500", "layer_forward_time": "0.601", "w_clone_time": "0.014", "quant_time": "0.149"}, {"layer": 5, "module": "self_attn.out_proj", "loss": "0.00108", "damp": "0.00500", "layer_forward_time": "0.610", "w_clone_time": "0.009", "quant_time": "0.138"}, {"layer": 5, "module": "fc1", "loss": "0.24805", "damp": "0.00500", "layer_forward_time": "0.640", "w_clone_time": "0.027", "quant_time": "0.135"}, {"layer": 5, "module": "fc2", "loss": "0.00741", "damp": "0.00500", "layer_forward_time": "1.789", "w_clone_time": "0.026", "quant_time": "0.722"}, {"layer": 6, "module": "self_attn.k_proj", "loss": "1.58691", "damp": "0.00500", "layer_forward_time": "0.654", "w_clone_time": "0.028", "quant_time": "0.141"}, {"layer": 6, "module": "self_attn.v_proj", "loss": "0.13135", "damp": "0.00500", "layer_forward_time": "0.654", "w_clone_time": "0.015", "quant_time": "0.167"}, {"layer": 6, "module": "self_attn.q_proj", "loss": "1.39062", "damp": "0.00500", "layer_forward_time": "0.654", "w_clone_time": "0.019", "quant_time": "0.139"}, {"layer": 6, "module": "self_attn.out_proj", "loss": "0.00156", "damp": "0.00500", "layer_forward_time": "0.512", "w_clone_time": "0.006", "quant_time": "0.151"}, {"layer": 6, "module": "fc1", "loss": "0.26514", "damp": "0.00500", "layer_forward_time": "0.553", "w_clone_time": "0.016", "quant_time": "0.154"}, {"layer": 6, "module": "fc2", "loss": "0.00933", "damp": "0.00500", "layer_forward_time": "1.785", "w_clone_time": "0.019", "quant_time": "0.680"}, {"layer": 7, "module": "self_attn.k_proj", "loss": "1.76367", "damp": "0.00500", "layer_forward_time": "0.638", "w_clone_time": "0.017", "quant_time": "0.157"}, {"layer": 7, "module": "self_attn.v_proj", "loss": "0.14624", "damp": "0.00500", "layer_forward_time": "0.638", "w_clone_time": "0.013", "quant_time": "0.141"}, {"layer": 7, "module": "self_attn.q_proj", "loss": "1.61133", "damp": "0.00500", "layer_forward_time": "0.638", "w_clone_time": "0.011", "quant_time": "0.178"}, {"layer": 7, "module": "self_attn.out_proj", "loss": "0.00215", "damp": "0.00500", "layer_forward_time": "0.510", "w_clone_time": "0.019", "quant_time": "0.143"}, {"layer": 7, "module": "fc1", "loss": "0.32153", "damp": "0.00500", "layer_forward_time": "0.474", "w_clone_time": "0.044", "quant_time": "0.194"}, {"layer": 7, "module": "fc2", "loss": "0.01111", "damp": "0.00500", "layer_forward_time": "1.270", "w_clone_time": "0.013", "quant_time": "0.653"}, {"layer": 8, "module": "self_attn.k_proj", "loss": "1.77539", "damp": "0.00500", "layer_forward_time": "0.621", "w_clone_time": "0.019", "quant_time": "0.134"}, {"layer": 8, "module": "self_attn.v_proj", "loss": "0.20496", "damp": "0.00500", "layer_forward_time": "0.621", "w_clone_time": "0.013", "quant_time": "0.142"}, {"layer": 8, "module": "self_attn.q_proj", "loss": "1.70215", "damp": "0.00500", "layer_forward_time": "0.621", "w_clone_time": "0.013", "quant_time": "0.129"}, {"layer": 8, "module": "self_attn.out_proj", "loss": "0.00298", "damp": "0.00500", "layer_forward_time": "0.399", "w_clone_time": "0.017", "quant_time": "0.141"}, {"layer": 8, "module": "fc1", "loss": "0.42334", "damp": "0.00500", "layer_forward_time": "0.582", "w_clone_time": "0.016", "quant_time": "0.242"}, {"layer": 8, "module": "fc2", "loss": "0.01706", "damp": "0.00500", "layer_forward_time": "1.298", "w_clone_time": "0.030", "quant_time": "0.681"}, {"layer": 9, "module": "self_attn.k_proj", "loss": "1.90918", "damp": "0.00500", "layer_forward_time": "0.629", "w_clone_time": "0.027", "quant_time": "0.183"}, {"layer": 9, "module": "self_attn.v_proj", "loss": "0.23218", "damp": "0.00500", "layer_forward_time": "0.629", "w_clone_time": "0.014", "quant_time": "0.184"}, {"layer": 9, "module": "self_attn.q_proj", "loss": "1.73535", "damp": "0.00500", "layer_forward_time": "0.629", "w_clone_time": "0.018", "quant_time": "0.233"}, {"layer": 9, "module": "self_attn.out_proj", "loss": "0.00544", "damp": "0.00500", "layer_forward_time": "0.506", "w_clone_time": "0.013", "quant_time": "0.168"}, {"layer": 9, "module": "fc1", "loss": "0.55273", "damp": "0.00500", "layer_forward_time": "0.457", "w_clone_time": "0.018", "quant_time": "0.136"}, {"layer": 9, "module": "fc2", "loss": "0.02431", "damp": "0.00500", "layer_forward_time": "1.183", "w_clone_time": "0.019", "quant_time": "0.627"}, {"layer": 10, "module": "self_attn.k_proj", "loss": "2.09961", "damp": "0.00500", "layer_forward_time": "0.586", "w_clone_time": "0.017", "quant_time": "0.145"}, {"layer": 10, "module": "self_attn.v_proj", "loss": "0.28589", "damp": "0.00500", "layer_forward_time": "0.586", "w_clone_time": "0.017", "quant_time": "0.153"}, {"layer": 10, "module": "self_attn.q_proj", "loss": "1.67480", "damp": "0.00500", "layer_forward_time": "0.586", "w_clone_time": "0.021", "quant_time": "0.163"}, {"layer": 10, "module": "self_attn.out_proj", "loss": "0.00759", "damp": "0.00500", "layer_forward_time": "0.510", "w_clone_time": "0.011", "quant_time": "0.153"}, {"layer": 10, "module": "fc1", "loss": "0.68848", "damp": "0.00500", "layer_forward_time": "0.448", "w_clone_time": "0.025", "quant_time": "0.172"}, {"layer": 10, "module": "fc2", "loss": "0.04025", "damp": "0.00500", "layer_forward_time": "1.201", "w_clone_time": "0.014", "quant_time": "0.530"}, {"layer": 11, "module": "self_attn.k_proj", "loss": "1.81934", "damp": "0.00500", "layer_forward_time": "0.736", "w_clone_time": "0.021", "quant_time": "0.201"}, {"layer": 11, "module": "self_attn.v_proj", "loss": "0.38770", "damp": "0.00500", "layer_forward_time": "0.736", "w_clone_time": "0.017", "quant_time": "0.242"}, {"layer": 11, "module": "self_attn.q_proj", "loss": "1.63086", "damp": "0.00500", "layer_forward_time": "0.736", "w_clone_time": "0.043", "quant_time": "0.243"}, {"layer": 11, "module": "self_attn.out_proj", "loss": "0.01492", "damp": "0.00500", "layer_forward_time": "0.988", "w_clone_time": "0.016", "quant_time": "0.169"}, {"layer": 11, "module": "fc1", "loss": "1.00293", "damp": "0.00500", "layer_forward_time": "0.817", "w_clone_time": "0.017", "quant_time": "0.152"}, {"layer": 11, "module": "fc2", "loss": "0.04837", "damp": "0.00500", "layer_forward_time": "1.927", "w_clone_time": "0.024", "quant_time": "0.692"}]
quantize_config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bits": 4,
3
+ "dynamic": null,
4
+ "group_size": 128,
5
+ "desc_act": false,
6
+ "static_groups": false,
7
+ "sym": true,
8
+ "lm_head": false,
9
+ "damp_percent": 0.005,
10
+ "damp_auto_increment": 0.0015,
11
+ "true_sequential": true,
12
+ "quant_method": "gptq",
13
+ "checkpoint_format": "gptq",
14
+ "meta": {
15
+ "quantizer": "gptqmodel:1.1.0-dev",
16
+ "uri": "https://github.com/modelcloud/gptqmodel",
17
+ "damp_percent": 0.005,
18
+ "damp_auto_increment": 0.0015
19
+ }
20
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "</s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": true,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "</s>",
25
+ "lstrip": false,
26
+ "normalized": true,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "1": {
6
+ "content": "<pad>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "2": {
14
+ "content": "</s>",
15
+ "lstrip": false,
16
+ "normalized": true,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ }
21
+ },
22
+ "bos_token": "</s>",
23
+ "clean_up_tokenization_spaces": false,
24
+ "eos_token": "</s>",
25
+ "errors": "replace",
26
+ "model_max_length": 1000000000000000019884624838656,
27
+ "pad_token": "<pad>",
28
+ "tokenizer_class": "GPT2Tokenizer",
29
+ "unk_token": "</s>"
30
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff