pere commited on
Commit
ac8fe8e
1 Parent(s): 74202fe

test submit

Browse files
config.json CHANGED
@@ -34,7 +34,7 @@
34
  }
35
  },
36
  "torch_dtype": "float32",
37
- "transformers_version": "4.9.0.dev0",
38
  "use_cache": true,
39
  "vocab_size": 50257
40
  }
 
34
  }
35
  },
36
  "torch_dtype": "float32",
37
+ "transformers_version": "4.11.0.dev0",
38
  "use_cache": true,
39
  "vocab_size": 50257
40
  }
pytorch_model.bin → events.out.tfevents.1634552807.t1v-n-f6f5b6cc-w-0.1083016.0.v2 RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:269d12af9cbd67b0a184d01059c26b9c096debf65b24ea8e87dc33b6e2656087
3
- size 510404491
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc1a55582d739e19cf42476e987acab254639a592baf341caefe47ce1422f38a
3
+ size 367914
events.out.tfevents.1634745538.t1v-n-f6f5b6cc-w-0.1277181.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ae21f21ad44c444ca8e63b9a232eb05cbdd77331533f5a949ed9f5f98197884
3
+ size 40
flax_model.msgpack CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:38486582be28002574744c576acd719c845aae584aed1d4337b936f29e591ce1
3
  size 497764120
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcc8d7b05a5fd7c3d6ce4a92afc95f1c393d0e579907379f45a3e3a596c68606
3
  size 497764120
run.sh CHANGED
@@ -1,10 +1,12 @@
1
  python run_clm_flax.py \
2
  --output_dir="./" \
3
  --model_type="gpt2" \
 
4
  --config_name="./" \
5
  --tokenizer_name="./" \
6
  --train_file="/mnt/disks/flaxdisk/corpus/train.json" \
7
  --validation_file="/mnt/disks/flaxdisk/corpus/validation.json" \
 
8
  --do_train --do_eval \
9
  --block_size="512" \
10
  --per_device_train_batch_size="64" \
 
1
  python run_clm_flax.py \
2
  --output_dir="./" \
3
  --model_type="gpt2" \
4
+ --model_name_or_path="." \
5
  --config_name="./" \
6
  --tokenizer_name="./" \
7
  --train_file="/mnt/disks/flaxdisk/corpus/train.json" \
8
  --validation_file="/mnt/disks/flaxdisk/corpus/validation.json" \
9
+ --cache_dir="/mnt/disks/flaxdisk/cache/" \
10
  --do_train --do_eval \
11
  --block_size="512" \
12
  --per_device_train_batch_size="64" \
run_clm_flax.py CHANGED
@@ -669,7 +669,7 @@ def main():
669
  model.save_pretrained(training_args.output_dir, params=params)
670
  tokenizer.save_pretrained(training_args.output_dir)
671
  if training_args.push_to_hub:
672
- repo.push_to_hub(commit_message=f"Saving weights and logs of step {cur_step}", blocking=False)
673
 
674
 
675
  if __name__ == "__main__":
 
669
  model.save_pretrained(training_args.output_dir, params=params)
670
  tokenizer.save_pretrained(training_args.output_dir)
671
  if training_args.push_to_hub:
672
+ repo.push_to_hub(commit_message=f"Saving weights and logs of step {cur_step}")
673
 
674
 
675
  if __name__ == "__main__":
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"unk_token": "<|endoftext|>", "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "add_prefix_space": false, "special_tokens_map_file": null, "name_or_path": "norwegian-gpt2", "tokenizer_class": "GPT2Tokenizer"}
 
1
+ {"unk_token": "<|endoftext|>", "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "add_prefix_space": false, "special_tokens_map_file": null, "name_or_path": "./", "tokenizer_class": "GPT2Tokenizer"}