{ "model_class_name": "HookedViT", "model_name": "open-clip:laion/CLIP-ViT-B-32-DataComp.XL-s13B-b90K", "hook_point_layer": 10, "layer_subtype": "hook_mlp_out", "hook_point_head_index": null, "context_size": 50, "use_cached_activations": false, "use_patches_only": false, "cached_activations_path": "activations/_network_scratch_s_sonia.joseph_datasets_kaggle_datasets/open-clip:laion_CLIP-ViT-B-32-DataComp.XL-s13B-b90K/blocks.9.hook_mlp_out", "d_in": 768, "activation_fn_str": "relu", "activation_fn_kwargs": {}, "cls_token_only": false, "max_grad_norm": 1.0, "initialization_method": "encoder_transpose_decoder", "normalize_activations": null, "n_batches_in_buffer": 20, "store_batch_size": 32, "num_workers": 16, "num_epochs": 10, "total_training_images": 13000000, "total_training_tokens": 650000000, "image_size": 224, "device": { "__type__": "torch.device", "value": "cuda" }, "seed": 42, "dtype": { "__type__": "torch.dtype", "value": "torch.float32" }, "architecture": "standard", "verbose": false, "b_dec_init_method": "geometric_median", "expansion_factor": 64, "from_pretrained_path": null, "d_sae": 49152, "l1_coefficient": 8e-05, "lp_norm": 1, "lr": 0.0004, "lr_scheduler_name": "cosineannealingwarmup", "lr_warm_up_steps": 200, "train_batch_size": 4096, "dataset_name": "imagenet1k", "dataset_path": "/network/scratch/s/sonia.joseph/datasets/kaggle_datasets", "dataset_train_path": "/network/scratch/s/sonia.joseph/datasets/kaggle_datasets/ILSVRC/Data/CLS-LOC/train", "dataset_val_path": "/network/scratch/s/sonia.joseph/datasets/kaggle_datasets/ILSVRC/Data/CLS-LOC/val", "use_ghost_grads": true, "feature_sampling_window": 1000, "dead_feature_window": 5000, "dead_feature_threshold": 1e-08, "log_to_wandb": true, "wandb_project": "clip_b_mlp_out_sae_hyperparam_sweep", "wandb_entity": null, "wandb_log_frequency": 100, "n_validation_runs": 10, "n_checkpoints": 10, "checkpoint_path": "/network/scratch/s/sonia.joseph/checkpoints/clip-b/98d43fdd-clip_b_mlp_out_sae_hyperparam_sweep" }