full_lua-epoch1
Browse files- config.json +2 -2
- generation_config.json +1 -1
- pytorch_model.bin +1 -1
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"activation_function": "gelu_pytorch_tanh",
|
4 |
"architectures": [
|
5 |
"GPTBigCodeForCausalLM"
|
@@ -32,7 +32,7 @@
|
|
32 |
"summary_type": "cls_index",
|
33 |
"summary_use_proj": true,
|
34 |
"torch_dtype": "bfloat16",
|
35 |
-
"transformers_version": "4.
|
36 |
"use_cache": false,
|
37 |
"validate_runner_input": true,
|
38 |
"vocab_size": 49152
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "full_lua/checkpoint_3782",
|
3 |
"activation_function": "gelu_pytorch_tanh",
|
4 |
"architectures": [
|
5 |
"GPTBigCodeForCausalLM"
|
|
|
32 |
"summary_type": "cls_index",
|
33 |
"summary_use_proj": true,
|
34 |
"torch_dtype": "bfloat16",
|
35 |
+
"transformers_version": "4.31.0",
|
36 |
"use_cache": false,
|
37 |
"validate_runner_input": true,
|
38 |
"vocab_size": 49152
|
generation_config.json
CHANGED
@@ -2,6 +2,6 @@
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 0,
|
4 |
"eos_token_id": 0,
|
5 |
-
"transformers_version": "4.
|
6 |
"use_cache": false
|
7 |
}
|
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 0,
|
4 |
"eos_token_id": 0,
|
5 |
+
"transformers_version": "4.31.0",
|
6 |
"use_cache": false
|
7 |
}
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2274505693
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:441a18b87ea1fa9a7971c00fede85763c9686a4ea35bf8fa3f4bed748e8018a6
|
3 |
size 2274505693
|