truongghieu commited on
Commit
f70796d
1 Parent(s): b58cb3a

Upload DeciLMForCausalLM

Browse files
config.json CHANGED
@@ -3,6 +3,7 @@
3
  "architectures": [
4
  "DeciLMForCausalLM"
5
  ],
 
6
  "auto_map": {
7
  "AutoConfig": "Deci/DeciLM-6b-instruct--configuration_decilm.DeciLMConfig",
8
  "AutoModelForCausalLM": "Deci/DeciLM-6b-instruct--modeling_decilm.DeciLMForCausalLM"
@@ -64,7 +65,7 @@
64
  "rope_theta": 10000.0,
65
  "tie_word_embeddings": false,
66
  "torch_dtype": "float16",
67
- "transformers_version": "4.33.3",
68
  "use_bfloat16": true,
69
  "use_cache": true,
70
  "vocab_size": 32000
 
3
  "architectures": [
4
  "DeciLMForCausalLM"
5
  ],
6
+ "attention_bias": false,
7
  "auto_map": {
8
  "AutoConfig": "Deci/DeciLM-6b-instruct--configuration_decilm.DeciLMConfig",
9
  "AutoModelForCausalLM": "Deci/DeciLM-6b-instruct--modeling_decilm.DeciLMForCausalLM"
 
65
  "rope_theta": 10000.0,
66
  "tie_word_embeddings": false,
67
  "torch_dtype": "float16",
68
+ "transformers_version": "4.34.1",
69
  "use_bfloat16": true,
70
  "use_cache": true,
71
  "vocab_size": 32000
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
- "transformers_version": "4.33.3"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
+ "transformers_version": "4.34.1"
6
  }
pytorch_model-00001-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9362112639bcc0e201cebdd5d9667eed81eb5099acc1082e455b3e5d33692225
3
- size 9959926189
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddb5353de9c95a76c7eea0aceea2ae5ecb886269265724a6547a2edfb9ba248d
3
+ size 9959926676
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a20c35284a70234d3d7686ccfc598eff01fb693802ebbab61cc7a08ceedaec1d
3
- size 1474383009
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0574464580cf4065117fbac7f0c8fb3e2bd16f93dd970b2b60241e0624855810
3
+ size 1474383484