arjunguha commited on
Commit
bc2cfcc
1 Parent(s): 448c85d

lua-more_stack-epoch_1

Browse files
Files changed (3) hide show
  1. config.json +3 -3
  2. generation_config.json +1 -2
  3. model.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "r_full_1b/checkpoint_13398/",
3
  "activation_function": "gelu_pytorch_tanh",
4
  "architectures": [
5
  "GPTBigCodeForCausalLM"
@@ -31,8 +31,8 @@
31
  "summary_proj_to_labels": true,
32
  "summary_type": "cls_index",
33
  "summary_use_proj": true,
34
- "torch_dtype": "float32",
35
- "transformers_version": "4.37.2",
36
  "use_cache": false,
37
  "validate_runner_input": true,
38
  "vocab_size": 49152
 
1
  {
2
+ "_name_or_path": "more_stack/lua/checkpoint_3721",
3
  "activation_function": "gelu_pytorch_tanh",
4
  "architectures": [
5
  "GPTBigCodeForCausalLM"
 
31
  "summary_proj_to_labels": true,
32
  "summary_type": "cls_index",
33
  "summary_use_proj": true,
34
+ "torch_dtype": "bfloat16",
35
+ "transformers_version": "4.40.0",
36
  "use_cache": false,
37
  "validate_runner_input": true,
38
  "vocab_size": 49152
generation_config.json CHANGED
@@ -2,6 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
- "transformers_version": "4.37.2",
6
- "use_cache": false
7
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
+ "transformers_version": "4.40.0"
 
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ccb6d724c365278df1bdee56608b92087a1a7409fcecf1ecb35e45b4cd182e84
3
- size 4548859752
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5aac2bf239c538a8770c900df6002035bf503eb1157d385d8cc454a139197a3
3
+ size 2274445296