Davide Ghilardi commited on
Commit
01e9712
·
1 Parent(s): 9a585ba

new file: l1_5/hook_mlp_out/cfg.json

Browse files

new file: l1_5/hook_mlp_out/sae_weights.safetensors
new file: l1_5/hook_mlp_out/sparsity.safetensors
new file: l1_5/hook_resid_post/cfg.json
new file: l1_5/hook_resid_post/sae_weights.safetensors
new file: l1_5/hook_resid_post/sparsity.safetensors
new file: l1_5/hook_z/cfg.json
new file: l1_5/hook_z/sae_weights.safetensors
new file: l1_5/hook_z/sparsity.safetensors

l1_5/.DS_Store ADDED
Binary file (6.15 kB). View file
 
l1_5/hook_mlp_out/.DS_Store ADDED
Binary file (6.15 kB). View file
 
l1_5/hook_mlp_out/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model_name": "1l-gelu", "model_class_name": "HookedTransformer", "hook_name": "blocks.0.hook_mlp_out", "hook_eval": "NOT_IN_USE", "hook_layer": 0, "hook_head_index": null, "dataset_path": "ghidav/arithmetics", "dataset_trust_remote_code": true, "streaming": true, "is_dataset_tokenized": true, "context_size": 18, "use_cached_activations": false, "cached_activations_path": null, "architecture": "jumprelu", "d_in": 512, "d_sae": 8192, "b_dec_init_method": "zeros", "expansion_factor": 16, "activation_fn": "relu", "activation_fn_kwargs": {}, "normalize_sae_decoder": true, "noise_scale": 0.0, "from_pretrained_path": null, "apply_b_dec_to_input": false, "decoder_orthogonal_init": false, "decoder_heuristic_init": false, "init_encoder_as_decoder_transpose": false, "n_batches_in_buffer": 128, "training_tokens": 10000000, "finetuning_tokens": 0, "store_batch_size_prompts": 8, "train_batch_size_tokens": 1024, "normalize_activations": "none", "device": "cuda", "act_store_device": "cuda", "seed": 42, "dtype": "float32", "prepend_bos": false, "autocast": false, "autocast_lm": false, "compile_llm": false, "llm_compilation_mode": null, "compile_sae": false, "sae_compilation_mode": null, "adam_beta1": 0, "adam_beta2": 0.999, "mse_loss_normalization": null, "l1_coefficient": 5.0, "lp_norm": 1, "scale_sparsity_penalty_by_decoder_norm": false, "l1_warm_up_steps": 488, "lr": 3e-05, "lr_scheduler_name": "constant", "lr_warm_up_steps": 0, "lr_end": 3e-06, "lr_decay_steps": 1953, "n_restart_cycles": 1, "finetuning_method": null, "use_ghost_grads": false, "feature_sampling_window": 2000, "dead_feature_window": 1000, "dead_feature_threshold": 1e-06, "n_eval_batches": 10, "eval_batch_size_prompts": null, "log_to_wandb": true, "log_activations_store_to_wandb": false, "log_optimizer_state_to_wandb": false, "wandb_project": "sae-feature-circuits", "wandb_id": null, "run_name": "L0_hook_mlp_out_L1_5_0", "wandb_entity": null, "wandb_log_frequency": 30, "eval_every_n_wandb_logs": 100, "resume": false, "n_checkpoints": 0, "checkpoint_path": "checkpoints/affo2zht", "verbose": true, "model_kwargs": {}, "model_from_pretrained_kwargs": {}, "sae_lens_version": "3.18.2", "sae_lens_training_version": "3.18.2", "tokens_per_buffer": 2359296}
l1_5/hook_mlp_out/sae_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:550208f227d7a303f5b24066c384cfdc0750d337c0e70909ad424baaef0ad23c
3
+ size 33622400
l1_5/hook_mlp_out/sparsity.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e21fdf62c238f7be79f16ca9162b372248a325bc4e1f7ef34572d3ab37f17b14
3
+ size 32848
l1_5/hook_resid_post/.DS_Store ADDED
Binary file (6.15 kB). View file
 
l1_5/hook_resid_post/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model_name": "1l-gelu", "model_class_name": "HookedTransformer", "hook_name": "blocks.0.hook_resid_post", "hook_eval": "NOT_IN_USE", "hook_layer": 0, "hook_head_index": null, "dataset_path": "ghidav/arithmetics", "dataset_trust_remote_code": true, "streaming": true, "is_dataset_tokenized": true, "context_size": 18, "use_cached_activations": false, "cached_activations_path": null, "architecture": "jumprelu", "d_in": 512, "d_sae": 8192, "b_dec_init_method": "zeros", "expansion_factor": 16, "activation_fn": "relu", "activation_fn_kwargs": {}, "normalize_sae_decoder": true, "noise_scale": 0.0, "from_pretrained_path": null, "apply_b_dec_to_input": false, "decoder_orthogonal_init": false, "decoder_heuristic_init": false, "init_encoder_as_decoder_transpose": false, "n_batches_in_buffer": 128, "training_tokens": 10000000, "finetuning_tokens": 0, "store_batch_size_prompts": 8, "train_batch_size_tokens": 1024, "normalize_activations": "none", "device": "cuda", "act_store_device": "cuda", "seed": 42, "dtype": "float32", "prepend_bos": false, "autocast": false, "autocast_lm": false, "compile_llm": false, "llm_compilation_mode": null, "compile_sae": false, "sae_compilation_mode": null, "adam_beta1": 0, "adam_beta2": 0.999, "mse_loss_normalization": null, "l1_coefficient": 5.0, "lp_norm": 1, "scale_sparsity_penalty_by_decoder_norm": false, "l1_warm_up_steps": 488, "lr": 3e-05, "lr_scheduler_name": "constant", "lr_warm_up_steps": 0, "lr_end": 3e-06, "lr_decay_steps": 1953, "n_restart_cycles": 1, "finetuning_method": null, "use_ghost_grads": false, "feature_sampling_window": 2000, "dead_feature_window": 1000, "dead_feature_threshold": 1e-06, "n_eval_batches": 10, "eval_batch_size_prompts": null, "log_to_wandb": true, "log_activations_store_to_wandb": false, "log_optimizer_state_to_wandb": false, "wandb_project": "sae-feature-circuits", "wandb_id": null, "run_name": "L0_hook_resid_post_L1_5_0", "wandb_entity": null, "wandb_log_frequency": 30, "eval_every_n_wandb_logs": 100, "resume": false, "n_checkpoints": 0, "checkpoint_path": "checkpoints/rgomjrjm", "verbose": true, "model_kwargs": {}, "model_from_pretrained_kwargs": {}, "sae_lens_version": "3.18.2", "sae_lens_training_version": "3.18.2", "tokens_per_buffer": 2359296}
l1_5/hook_resid_post/sae_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69cd31a56a671afa0b2c3bff498eabe3c3eff5d7b3e23675987efb17fd5c7705
3
+ size 33622400
l1_5/hook_resid_post/sparsity.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c7f40311c05fe444fa08797beac2404ba668e6553cd6a4514bb24cc3adf72f1
3
+ size 32848
l1_5/hook_z/.DS_Store ADDED
Binary file (6.15 kB). View file
 
l1_5/hook_z/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model_name": "1l-gelu", "model_class_name": "HookedTransformer", "hook_name": "blocks.0.attn.hook_z", "hook_eval": "NOT_IN_USE", "hook_layer": 0, "hook_head_index": null, "dataset_path": "ghidav/arithmetics", "dataset_trust_remote_code": true, "streaming": true, "is_dataset_tokenized": true, "context_size": 18, "use_cached_activations": false, "cached_activations_path": null, "architecture": "jumprelu", "d_in": 512, "d_sae": 8192, "b_dec_init_method": "zeros", "expansion_factor": 16, "activation_fn": "relu", "activation_fn_kwargs": {}, "normalize_sae_decoder": true, "noise_scale": 0.0, "from_pretrained_path": null, "apply_b_dec_to_input": false, "decoder_orthogonal_init": false, "decoder_heuristic_init": false, "init_encoder_as_decoder_transpose": false, "n_batches_in_buffer": 128, "training_tokens": 10000000, "finetuning_tokens": 0, "store_batch_size_prompts": 8, "train_batch_size_tokens": 1024, "normalize_activations": "none", "device": "cuda", "act_store_device": "cuda", "seed": 42, "dtype": "float32", "prepend_bos": false, "autocast": false, "autocast_lm": false, "compile_llm": false, "llm_compilation_mode": null, "compile_sae": false, "sae_compilation_mode": null, "adam_beta1": 0, "adam_beta2": 0.999, "mse_loss_normalization": null, "l1_coefficient": 5.0, "lp_norm": 1, "scale_sparsity_penalty_by_decoder_norm": false, "l1_warm_up_steps": 488, "lr": 3e-05, "lr_scheduler_name": "constant", "lr_warm_up_steps": 0, "lr_end": 3e-06, "lr_decay_steps": 1953, "n_restart_cycles": 1, "finetuning_method": null, "use_ghost_grads": false, "feature_sampling_window": 2000, "dead_feature_window": 1000, "dead_feature_threshold": 1e-06, "n_eval_batches": 10, "eval_batch_size_prompts": null, "log_to_wandb": true, "log_activations_store_to_wandb": false, "log_optimizer_state_to_wandb": false, "wandb_project": "sae-feature-circuits", "wandb_id": null, "run_name": "L0_attn.hook_z_L1_5_0", "wandb_entity": null, "wandb_log_frequency": 30, "eval_every_n_wandb_logs": 100, "resume": false, "n_checkpoints": 0, "checkpoint_path": "checkpoints/vm8p8mf3", "verbose": true, "model_kwargs": {}, "model_from_pretrained_kwargs": {}, "sae_lens_version": "3.18.2", "sae_lens_training_version": "3.18.2", "tokens_per_buffer": 2359296}
l1_5/hook_z/sae_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:05c7e8366d211cd10397660c3312c5762f446378167c9d46c5b0e4c65321bf0d
3
+ size 33622400
l1_5/hook_z/sparsity.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e64b52e809a8e19629f1c4abf375c2494ba4785fc34dc9d40d3479c1e863e23a
3
+ size 32848