Upload SAE 1000001536
Browse files
1000001536/cfg.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"model_name": "gemma-2-2b", "model_class_name": "HookedTransformer", "hook_name": "blocks.1.hook_resid_post", "hook_eval": "NOT_IN_USE", "hook_layer": 1, "hook_head_index": null, "dataset_path": "HuggingFaceFW/fineweb", "dataset_trust_remote_code": true, "streaming": true, "is_dataset_tokenized": false, "context_size": 1024, "use_cached_activations": false, "cached_activations_path": null, "architecture": "standard", "d_in": 2304, "d_sae": 18432, "b_dec_init_method": "geometric_median", "expansion_factor": 8, "activation_fn": "relu", "activation_fn_kwargs": {}, "normalize_sae_decoder": false, "noise_scale": 0.0, "from_pretrained_path": null, "apply_b_dec_to_input": true, "decoder_orthogonal_init": false, "decoder_heuristic_init": true, "init_encoder_as_decoder_transpose": true, "n_batches_in_buffer": 64, "training_tokens": 1000000000, "finetuning_tokens": 0, "store_batch_size_prompts": 6, "train_batch_size_tokens": 4096, "normalize_activations": "none", "seqpos_slice": [null], "device": "cuda", "act_store_device": "cuda", "seed": 42, "dtype": "float32", "prepend_bos": true, "autocast": true, "autocast_lm": true, "compile_llm": false, "llm_compilation_mode": null, "compile_sae": false, "sae_compilation_mode": null, "adam_beta1": 0, "adam_beta2": 0.999, "mse_loss_normalization": null, "l1_coefficient": 10.0, "lp_norm": 1, "scale_sparsity_penalty_by_decoder_norm": true, "l1_warm_up_steps": 15000, "lr": 2e-05, "lr_scheduler_name": "constant", "lr_warm_up_steps": 0, "lr_end": 2.0000000000000003e-06, "lr_decay_steps": 0, "n_restart_cycles": 1, "finetuning_method": null, "use_ghost_grads": false, "feature_sampling_window": 2000, "dead_feature_window": 1000, "dead_feature_threshold": 1e-08, "n_eval_batches": 10, "eval_batch_size_prompts": 1, "log_to_wandb": true, "log_activations_store_to_wandb": false, "log_optimizer_state_to_wandb": false, "wandb_project": "gemma-2-2b-layer-1", "wandb_id": null, "run_name": "gemma-2-2b-multistage-tied-layer-1-1000000000-2024-10-28T10:18:00", "wandb_entity": "anti-absorb", "wandb_log_frequency": 100, "eval_every_n_wandb_logs": 50, "resume": false, "n_checkpoints": 6, "checkpoint_path": "/home/ucabdj4/Scratch/gemma2-multistage-tied-sae-ortho-checkpoints-2/icnx4mn9", "verbose": true, "model_kwargs": {}, "model_from_pretrained_kwargs": {"center_writing_weights": false}, "sae_lens_version": "4.0.9", "sae_lens_training_version": "4.0.9", "tokens_per_buffer": 268435456}
|
1000001536/sae_weights.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7ffc1aeee8a4259b8294f95a6619550de341f7400f41f3c9387ee5b70cdff3d4
|
3 |
+
size 339821888
|
1000001536/sparsity.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:df2e10b691baa1fd5c32db9a01fd9cc8250a27d0ef5223ebb34879ae9c4b17d2
|
3 |
+
size 73808
|