ghidav commited on
Commit
5a8b19c
·
verified ·
1 Parent(s): cfd093c

Upload 3 files

Browse files
l1_2/hook_resid_post/cfg.json CHANGED
@@ -1 +1,86 @@
1
- {"model_name": "1l-gelu", "model_class_name": "HookedTransformer", "hook_name": "blocks.0.hook_resid_post", "hook_eval": "NOT_IN_USE", "hook_layer": 0, "hook_head_index": null, "dataset_path": "ghidav/arithmetics", "dataset_trust_remote_code": true, "streaming": true, "is_dataset_tokenized": true, "context_size": 18, "use_cached_activations": false, "cached_activations_path": null, "architecture": "jumprelu", "d_in": 512, "d_sae": 8192, "b_dec_init_method": "zeros", "expansion_factor": 16, "activation_fn": "relu", "activation_fn_kwargs": {}, "normalize_sae_decoder": true, "noise_scale": 0.0, "from_pretrained_path": null, "apply_b_dec_to_input": false, "decoder_orthogonal_init": false, "decoder_heuristic_init": false, "init_encoder_as_decoder_transpose": false, "n_batches_in_buffer": 128, "training_tokens": 10000000, "finetuning_tokens": 0, "store_batch_size_prompts": 8, "train_batch_size_tokens": 1024, "normalize_activations": "none", "device": "cuda", "act_store_device": "cuda", "seed": 42, "dtype": "float32", "prepend_bos": false, "autocast": false, "autocast_lm": false, "compile_llm": false, "llm_compilation_mode": null, "compile_sae": false, "sae_compilation_mode": null, "adam_beta1": 0, "adam_beta2": 0.999, "mse_loss_normalization": null, "l1_coefficient": 2.0, "lp_norm": 1, "scale_sparsity_penalty_by_decoder_norm": false, "l1_warm_up_steps": 488, "lr": 3e-05, "lr_scheduler_name": "constant", "lr_warm_up_steps": 0, "lr_end": 3e-06, "lr_decay_steps": 1953, "n_restart_cycles": 1, "finetuning_method": null, "use_ghost_grads": false, "feature_sampling_window": 2000, "dead_feature_window": 1000, "dead_feature_threshold": 1e-06, "n_eval_batches": 10, "eval_batch_size_prompts": null, "log_to_wandb": true, "log_activations_store_to_wandb": false, "log_optimizer_state_to_wandb": false, "wandb_project": "sae-feature-circuits", "wandb_id": null, "run_name": "L0_hook_resid_post_L1_2_0", "wandb_entity": null, "wandb_log_frequency": 30, "eval_every_n_wandb_logs": 100, "resume": false, "n_checkpoints": 0, "checkpoint_path": "checkpoints/ag0mdvt3", "verbose": true, "model_kwargs": {}, "model_from_pretrained_kwargs": {}, "sae_lens_version": "3.18.2", "sae_lens_training_version": "3.18.2", "tokens_per_buffer": 2359296}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_name": "1l-gelu",
3
+ "model_class_name": "HookedTransformer",
4
+ "hook_name": "blocks.0.hook_resid_post",
5
+ "hook_eval": "NOT_IN_USE",
6
+ "hook_layer": 0,
7
+ "hook_head_index": null,
8
+ "dataset_path": "ghidav/arithmetics",
9
+ "dataset_trust_remote_code": true,
10
+ "streaming": true,
11
+ "is_dataset_tokenized": true,
12
+ "context_size": 18,
13
+ "use_cached_activations": false,
14
+ "cached_activations_path": null,
15
+ "architecture": "jumprelu",
16
+ "d_in": 512,
17
+ "d_sae": 8192,
18
+ "b_dec_init_method": "zeros",
19
+ "expansion_factor": 16,
20
+ "activation_fn": "relu",
21
+ "activation_fn_kwargs": {},
22
+ "normalize_sae_decoder": true,
23
+ "noise_scale": 0.0,
24
+ "from_pretrained_path": null,
25
+ "apply_b_dec_to_input": false,
26
+ "decoder_orthogonal_init": false,
27
+ "decoder_heuristic_init": false,
28
+ "init_encoder_as_decoder_transpose": false,
29
+ "n_batches_in_buffer": 128,
30
+ "training_tokens": 50000000,
31
+ "finetuning_tokens": 0,
32
+ "store_batch_size_prompts": 8,
33
+ "train_batch_size_tokens": 1024,
34
+ "normalize_activations": "none",
35
+ "device": "cuda",
36
+ "act_store_device": "cuda",
37
+ "seed": 42,
38
+ "dtype": "float32",
39
+ "prepend_bos": false,
40
+ "autocast": false,
41
+ "autocast_lm": false,
42
+ "compile_llm": false,
43
+ "llm_compilation_mode": null,
44
+ "compile_sae": false,
45
+ "sae_compilation_mode": null,
46
+ "adam_beta1": 0,
47
+ "adam_beta2": 0.999,
48
+ "mse_loss_normalization": null,
49
+ "l1_coefficient": 2.0,
50
+ "lp_norm": 1,
51
+ "scale_sparsity_penalty_by_decoder_norm": false,
52
+ "l1_warm_up_steps": 2441,
53
+ "lr": 0.0005,
54
+ "lr_scheduler_name": "constant",
55
+ "lr_warm_up_steps": 0,
56
+ "lr_end": 5e-05,
57
+ "lr_decay_steps": 9765,
58
+ "n_restart_cycles": 1,
59
+ "finetuning_method": null,
60
+ "use_ghost_grads": false,
61
+ "feature_sampling_window": 2000,
62
+ "dead_feature_window": 1000,
63
+ "dead_feature_threshold": 1e-06,
64
+ "n_eval_batches": 10,
65
+ "eval_batch_size_prompts": null,
66
+ "log_to_wandb": true,
67
+ "log_activations_store_to_wandb": false,
68
+ "log_optimizer_state_to_wandb": false,
69
+ "wandb_project": "sae-feature-circuits",
70
+ "wandb_id": null,
71
+ "run_name": "L0_hook_resid_post_L1_2_0",
72
+ "wandb_entity": null,
73
+ "wandb_log_frequency": 30,
74
+ "eval_every_n_wandb_logs": 100,
75
+ "resume": false,
76
+ "n_checkpoints": 0,
77
+ "checkpoint_path": "checkpoints/lychrdii",
78
+ "verbose": false,
79
+ "model_kwargs": {},
80
+ "model_from_pretrained_kwargs": {
81
+ "center_writing_weights": false
82
+ },
83
+ "sae_lens_version": "3.20.3",
84
+ "sae_lens_training_version": "3.20.3",
85
+ "tokens_per_buffer": 2359296
86
+ }
l1_2/hook_resid_post/sae_weights.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:efe3a105dfc0d86300fe672a5424d136c3485e7bc06cffc2cb9897c1b4957755
3
  size 33622400
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc898eb1fd9749afd82b798c5adc056f29c033a758bbc74e7508ccb638eb09f3
3
  size 33622400
l1_2/hook_resid_post/sparsity.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7ab1be493fcbd5d8074a237d66669df4cfa74066dfd6300ca2e28febe1dc9cf0
3
  size 32848
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89ca6807dc93d263b8db038c25c5c579d7b9c90ba0988f0cf841106934127bf9
3
  size 32848