so298 commited on
Commit
68c0b8d
·
verified ·
1 Parent(s): 22665a5

Add files using upload-large-folder tool

Browse files
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"sae": {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true}, "batch_size": 16, "grad_acc_steps": 1, "micro_acc_steps": 1, "loss_fn": "fvu", "optimizer": "signum", "lr": null, "lr_warmup_steps": 1000, "k_decay_steps": 0, "auxk_alpha": 0.0, "dead_feature_threshold": 10000000, "hookpoints": ["layers.0", "layers.1", "layers.2", "layers.3", "layers.4", "layers.5", "layers.6", "layers.7", "layers.8", "layers.9", "layers.10", "layers.11", "layers.12", "layers.13", "layers.14", "layers.15"], "init_seeds": [0], "layers": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], "layer_stride": 1, "distribute_modules": false, "save_every": 10000, "save_best": false, "finetune": null, "log_to_wandb": true, "run_name": null, "wandb_log_frequency": 1, "save_dir": "checkpoints"}
layers.0/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.0/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3dc38cb6baa0b74495b4af5a2767132992e4e7a2d63114e958d68c7bf66041c0
3
+ size 1074012496
layers.1/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.1/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc554505834736e54879cd490e4e427b88bd849c6889e0d49f619d9c98049fc6
3
+ size 1074012496
layers.10/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.10/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d95f0e1a94493402a1c08716102f852652c22a31a7e1bcce9674f72ad4bc46d
3
+ size 1074012496
layers.11/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.11/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79e3fc8db79ec472dccba513fa4597b28ad91922c9ede831c92952d3a83ae0a1
3
+ size 1074012496
layers.12/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.12/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:048c206ecb091cb09f00254bf17df1b4f41aa37fcc7996ab3cceff89562a5c9e
3
+ size 1074012496
layers.13/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.13/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a399676745267661624baeca7bebb3138edeec71b1616acfd19ed52cdad56545
3
+ size 1074012496
layers.14/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.14/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ab8fbec505350df9ff79eefe769915e30af0f125a22065878a82808d25b2f85
3
+ size 1074012496
layers.15/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.15/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42d6b0c155cab9c8525515891b55a25e9590f97b360d148caea0cd39f304831d
3
+ size 1074012496
layers.2/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.2/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:076df3e9dd3909ade95e75a9f7a673008d61a45a029a285c8d700883cb20073a
3
+ size 1074012496
layers.3/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.3/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd6234256fb8fe29a7bff33b59a8bdc8308eb1d4ccb15bd2f20f5e7e86e8de17
3
+ size 1074012496
layers.4/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.4/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b05f38958533ed8e2b8ca3778cc80a8f84aa495efad6841bf2676eacc1538158
3
+ size 1074012496
layers.5/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.5/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21e66fc13129ba3d134ad959f7535cf00d5c5aba2dceb0a57e92c2229afb07b2
3
+ size 1074012496
layers.6/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.6/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73f15f02aa74b6c3bcc15b6e07b464c0bd28328f28a81b07d0615317ac59c681
3
+ size 1074012496
layers.7/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.7/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7901c846fd68d3262b644c56abd204640956bb2d258b4f399420d2286e468646
3
+ size 1074012496
layers.8/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.8/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90988e1a82502d25b04ae3adfdc8c64e72341f273096071cdc44f00a60bf786e
3
+ size 1074012496
layers.9/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 0, "k": 32, "multi_topk": false, "skip_connection": false, "transcode": true, "d_in": 2048}
layers.9/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a51a16466c0f65ff6d6dc44870093284af24239ef4435ad95d5c6723029c49e0
3
+ size 1074012496