Spaces:
Sleeping
Sleeping
anilbhatt1
commited on
Commit
·
b393a63
1
Parent(s):
e1bd3ea
Upload pythia-70m-deduped files
Browse files- checkpoints/EleutherAI/pythia-70m-deduped/lit_config.json +1 -0
- checkpoints/EleutherAI/pythia-70m-deduped/lit_model.pth +3 -0
- checkpoints/EleutherAI/pythia-70m-deduped/pytorch_model.bin +3 -0
- checkpoints/EleutherAI/pythia-70m-deduped/tokenizer.json +0 -0
- checkpoints/EleutherAI/pythia-70m-deduped/tokenizer_config.json +9 -0
checkpoints/EleutherAI/pythia-70m-deduped/lit_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"name": "pythia-70m-deduped", "hf_config": {"org": "EleutherAI", "name": "pythia-70m-deduped"}, "block_size": 2048, "vocab_size": 50254, "padding_multiple": 128, "padded_vocab_size": 50304, "n_layer": 6, "n_head": 8, "n_embd": 512, "rotary_percentage": 0.25, "parallel_residual": true, "bias": true, "lm_head_bias": false, "n_query_groups": 8, "shared_attention_norm": false, "_norm_class": "LayerNorm", "norm_eps": 1e-05, "_mlp_class": "GptNeoxMLP", "gelu_approximate": "none", "intermediate_size": 2048, "rope_condense_ratio": 1, "rope_base": 10000}
|
checkpoints/EleutherAI/pythia-70m-deduped/lit_model.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:936599057c479ae4a41be755ca41f703afac623d4e5709e032890c350c01ab03
|
3 |
+
size 140874322
|
checkpoints/EleutherAI/pythia-70m-deduped/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a03394baafe51e4597b9c90fd5f1d80949da7697553608989fdb3dbe6753f211
|
3 |
+
size 166049099
|
checkpoints/EleutherAI/pythia-70m-deduped/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
checkpoints/EleutherAI/pythia-70m-deduped/tokenizer_config.json
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"bos_token": "<|endoftext|>",
|
4 |
+
"eos_token": "<|endoftext|>",
|
5 |
+
"name_or_path": "EleutherAI/gpt-neox-20b",
|
6 |
+
"special_tokens_map_file": "/admin/home-hailey/.cache/huggingface/hub/models--EleutherAI--gpt-neox-20b/snapshots/4e49eadb5d14bd22f314ec3f45b69a87b88c7691/special_tokens_map.json",
|
7 |
+
"tokenizer_class": "GPTNeoXTokenizer",
|
8 |
+
"unk_token": "<|endoftext|>"
|
9 |
+
}
|