jbloom commited on
Commit
d0a7c50
·
verified ·
1 Parent(s): c7c5fc1

Upload 18 files

Browse files
v5_128k_layer_10/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architecture": "standard", "d_in": 768, "d_sae": 131072, "dtype": "torch.float32", "device": "mps", "model_name": "gpt2-small", "hook_name": "blocks.10.hook_resid_post", "hook_layer": 10, "hook_head_index": null, "activation_fn_str": "topk", "activation_fn_kwargs": {"k": 32}, "apply_b_dec_to_input": true, "finetuning_scaling_factor": false, "sae_lens_training_version": null, "prepend_bos": false, "dataset_path": "apollo-research/Skylion007-openwebtext-tokenizer-gpt2", "dataset_trust_remote_code": true, "context_size": 64, "normalize_activations": "layer_norm"}
v5_128k_layer_10/sae_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad02be197048acb4249849068b8be4f1357592b175d477eb17486e19c4a4fd5f
3
+ size 805834048
v5_128k_layer_10/sparsity.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5465ee5a306ddf6df01f80077845fad02e12f43153a1778b71227170a0f8889
3
+ size 524368
v5_128k_layer_11/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architecture": "standard", "d_in": 768, "d_sae": 131072, "dtype": "torch.float32", "device": "mps", "model_name": "gpt2-small", "hook_name": "blocks.11.hook_resid_post", "hook_layer": 11, "hook_head_index": null, "activation_fn_str": "topk", "activation_fn_kwargs": {"k": 32}, "apply_b_dec_to_input": true, "finetuning_scaling_factor": false, "sae_lens_training_version": null, "prepend_bos": false, "dataset_path": "apollo-research/Skylion007-openwebtext-tokenizer-gpt2", "dataset_trust_remote_code": true, "context_size": 64, "normalize_activations": "layer_norm"}
v5_128k_layer_11/sae_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfdefbcec979c1ff3fca73b35777f492754b1f09faa48694203443a0862fde9b
3
+ size 805834048
v5_128k_layer_11/sparsity.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce18f650ba6e7301fce194d377ccc26e5316b22df3df8efce902b690289882f8
3
+ size 524368
v5_128k_layer_6/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architecture": "standard", "d_in": 768, "d_sae": 131072, "dtype": "torch.float32", "device": "mps", "model_name": "gpt2-small", "hook_name": "blocks.6.hook_resid_post", "hook_layer": 6, "hook_head_index": null, "activation_fn_str": "topk", "activation_fn_kwargs": {"k": 32}, "apply_b_dec_to_input": true, "finetuning_scaling_factor": false, "sae_lens_training_version": null, "prepend_bos": false, "dataset_path": "apollo-research/Skylion007-openwebtext-tokenizer-gpt2", "dataset_trust_remote_code": true, "context_size": 64, "normalize_activations": "layer_norm"}
v5_128k_layer_6/sae_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3cbdec8343c54c20c8507bc5aaa7a46c52b10bab5da61cce19c5689536e09d01
3
+ size 805834048
v5_128k_layer_6/sparsity.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df22ab8826eb3baab06779f4820e1c1fb868e6ff0e6cf5614b793b1ad4ead355
3
+ size 524368
v5_128k_layer_7/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architecture": "standard", "d_in": 768, "d_sae": 131072, "dtype": "torch.float32", "device": "mps", "model_name": "gpt2-small", "hook_name": "blocks.7.hook_resid_post", "hook_layer": 7, "hook_head_index": null, "activation_fn_str": "topk", "activation_fn_kwargs": {"k": 32}, "apply_b_dec_to_input": true, "finetuning_scaling_factor": false, "sae_lens_training_version": null, "prepend_bos": false, "dataset_path": "apollo-research/Skylion007-openwebtext-tokenizer-gpt2", "dataset_trust_remote_code": true, "context_size": 64, "normalize_activations": "layer_norm"}
v5_128k_layer_7/sae_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60f2570af614485c60bfaa59cec160f2ce5604be49e24db8fdcd11103ebc406a
3
+ size 805834048
v5_128k_layer_7/sparsity.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4367fb7c9b781f2e48fcd1fe179d5bb7b4890848c02c7f45325eebef002d62ae
3
+ size 524368
v5_128k_layer_8/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architecture": "standard", "d_in": 768, "d_sae": 131072, "dtype": "torch.float32", "device": "mps", "model_name": "gpt2-small", "hook_name": "blocks.8.hook_resid_post", "hook_layer": 8, "hook_head_index": null, "activation_fn_str": "topk", "activation_fn_kwargs": {"k": 32}, "apply_b_dec_to_input": true, "finetuning_scaling_factor": false, "sae_lens_training_version": null, "prepend_bos": false, "dataset_path": "apollo-research/Skylion007-openwebtext-tokenizer-gpt2", "dataset_trust_remote_code": true, "context_size": 64, "normalize_activations": "layer_norm"}
v5_128k_layer_8/sae_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d43b5af3d1a7aaf91f2bf9ff5ed9e653fe5b461349bedf877ed8a5c46fc2a26f
3
+ size 805834048
v5_128k_layer_8/sparsity.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c39f5f93455cbff5561b29519daa1ead9ffb7ee7e7e1c4762915a75e56621677
3
+ size 524368
v5_128k_layer_9/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"architecture": "standard", "d_in": 768, "d_sae": 131072, "dtype": "torch.float32", "device": "mps", "model_name": "gpt2-small", "hook_name": "blocks.9.hook_resid_post", "hook_layer": 9, "hook_head_index": null, "activation_fn_str": "topk", "activation_fn_kwargs": {"k": 32}, "apply_b_dec_to_input": true, "finetuning_scaling_factor": false, "sae_lens_training_version": null, "prepend_bos": false, "dataset_path": "apollo-research/Skylion007-openwebtext-tokenizer-gpt2", "dataset_trust_remote_code": true, "context_size": 64, "normalize_activations": "layer_norm"}
v5_128k_layer_9/sae_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c9a30b0b995cfc39319285e97b3a85fd7c5b726d81c4308ac009454b80a356e
3
+ size 805834048
v5_128k_layer_9/sparsity.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f7fbe4cabb9b9c28ba895d546eeb53463c8ccbe064d3d874ae7e6f4d276f5be
3
+ size 524368