nev commited on
Commit
8f1d1d7
·
verified ·
1 Parent(s): 9265f2c

Add files using upload-large-folder tool

Browse files
Files changed (50) hide show
  1. latents/layers.0.mlp/13107_16383.safetensors +3 -0
  2. latents/layers.0.mlp/config.json +11 -0
  3. latents/layers.1.mlp/config.json +11 -0
  4. latents/layers.10.mlp/config.json +11 -0
  5. latents/layers.11.mlp/config.json +11 -0
  6. latents/layers.12.mlp/9830_13106.safetensors +3 -0
  7. latents/layers.12.mlp/config.json +11 -0
  8. latents/layers.13.mlp/config.json +11 -0
  9. latents/layers.14.mlp/6553_9829.safetensors +3 -0
  10. latents/layers.14.mlp/config.json +11 -0
  11. latents/layers.15.mlp/9830_13106.safetensors +3 -0
  12. latents/layers.15.mlp/config.json +11 -0
  13. latents/layers.16.mlp/config.json +11 -0
  14. latents/layers.17.mlp/config.json +11 -0
  15. latents/layers.18.mlp/9830_13106.safetensors +3 -0
  16. latents/layers.18.mlp/config.json +11 -0
  17. latents/layers.19.mlp/13107_16383.safetensors +3 -0
  18. latents/layers.19.mlp/config.json +11 -0
  19. latents/layers.2.mlp/config.json +11 -0
  20. latents/layers.20.mlp/config.json +11 -0
  21. latents/layers.21.mlp/0_3275.safetensors +3 -0
  22. latents/layers.21.mlp/13107_16383.safetensors +3 -0
  23. latents/layers.21.mlp/3276_6552.safetensors +3 -0
  24. latents/layers.21.mlp/6553_9829.safetensors +3 -0
  25. latents/layers.21.mlp/9830_13106.safetensors +3 -0
  26. latents/layers.21.mlp/config.json +11 -0
  27. latents/layers.22.mlp/0_3275.safetensors +3 -0
  28. latents/layers.22.mlp/3276_6552.safetensors +3 -0
  29. latents/layers.22.mlp/config.json +11 -0
  30. latents/layers.23.mlp/config.json +11 -0
  31. latents/layers.24.mlp/0_3275.safetensors +3 -0
  32. latents/layers.24.mlp/3276_6552.safetensors +3 -0
  33. latents/layers.24.mlp/6553_9829.safetensors +3 -0
  34. latents/layers.24.mlp/9830_13106.safetensors +3 -0
  35. latents/layers.24.mlp/config.json +11 -0
  36. latents/layers.25.mlp/13107_16383.safetensors +3 -0
  37. latents/layers.25.mlp/6553_9829.safetensors +3 -0
  38. latents/layers.25.mlp/9830_13106.safetensors +3 -0
  39. latents/layers.25.mlp/config.json +11 -0
  40. latents/layers.3.mlp/config.json +11 -0
  41. latents/layers.4.mlp/config.json +11 -0
  42. latents/layers.5.mlp/config.json +11 -0
  43. latents/layers.6.mlp/config.json +11 -0
  44. latents/layers.7.mlp/6553_9829.safetensors +3 -0
  45. latents/layers.7.mlp/config.json +11 -0
  46. latents/layers.8.mlp/3276_6552.safetensors +3 -0
  47. latents/layers.8.mlp/9830_13106.safetensors +3 -0
  48. latents/layers.8.mlp/config.json +11 -0
  49. latents/layers.9.mlp/config.json +11 -0
  50. run_config.json +81 -0
latents/layers.0.mlp/13107_16383.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f89d2d95b3a76092cb5986a851860156b052592f41b6fc3535e24332b5c6bd55
3
+ size 142114144
latents/layers.0.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.1.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.10.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.11.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.12.mlp/9830_13106.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecddb550d3df068c52629e89ff7c16b74f6ea2b4ce592c88cae777b4e92a6404
3
+ size 95995360
latents/layers.12.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.13.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.14.mlp/6553_9829.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:565056fde3ff5de8dff12eb9d0e486769e8fc81df45054b55b0ad7dd8df5f41c
3
+ size 77526256
latents/layers.14.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.15.mlp/9830_13106.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:412ac4c5b74545a6019d31b557e66cef33f9b292cbb13bd34f6b1c8990c5abfa
3
+ size 82303816
latents/layers.15.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.16.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.17.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.18.mlp/9830_13106.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2025f812cf713d573083724009d27f7ab22f5e1f6ffe0b7df10c0a3bd940f4c
3
+ size 110648904
latents/layers.18.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.19.mlp/13107_16383.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07727cba5df920f4e241b776e365bc5500ed6731709f0625584a9f1e79a5a6c3
3
+ size 97362792
latents/layers.19.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.2.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.20.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.21.mlp/0_3275.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9584a1711fbd97737639744df06acf81514f5d3283117367929f1d5f9a3b32eb
3
+ size 70023648
latents/layers.21.mlp/13107_16383.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9beef344d54cf9dab6424dfa42a8ee5e6482bc20c140e5bdc492547557e75fd
3
+ size 68286288
latents/layers.21.mlp/3276_6552.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa13fb4746c6e281d9fac54011cd2969a4dd5d5371a7e4cbf7ae06e2781807fb
3
+ size 68418576
latents/layers.21.mlp/6553_9829.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6cc72522cb5295dff979f7164798174621778f5cc680eecf601da10861aadc2
3
+ size 68586504
latents/layers.21.mlp/9830_13106.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33316bf9b94a56ace92800858a87af0169b50b365e09996a8e6cfcfe0c88ab37
3
+ size 76055176
latents/layers.21.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.22.mlp/0_3275.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:449aeecb93927bcc20cd7da3343482eeab1b55688d3612348e3226882896e177
3
+ size 76803312
latents/layers.22.mlp/3276_6552.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ef638ec137246ceb78db1e7f101f7d89639bf45f10460a25502e482047144c7
3
+ size 80527400
latents/layers.22.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.23.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.24.mlp/0_3275.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f8a4d18de008b583f8c719c4d4e60a1bd3fadbb8b10ba8e2e68120bf4b0d47b
3
+ size 63471248
latents/layers.24.mlp/3276_6552.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:767bb0f63fb3846350761f7682f94faddc4bc546ad24122ac2e960c81bd923e2
3
+ size 69611080
latents/layers.24.mlp/6553_9829.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc772c7a8d3a47fbda077d154a82f5ba83a8a43f219fee2c56677ba829a9ba04
3
+ size 62302368
latents/layers.24.mlp/9830_13106.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:037125ae58de5c1e8a08dd2b0d9326d11e220c9e7265bd661e5f44cfb8e8e42e
3
+ size 68635816
latents/layers.24.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.25.mlp/13107_16383.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:428c37ed6b657259068360cfda963652aeed6e84591cc8eb7f8e4d1b2cccbb57
3
+ size 77973200
latents/layers.25.mlp/6553_9829.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9447886d66ac6821af328f37941b79f9f7ad4695fc18e3d894a38d9ca37a72b
3
+ size 69532872
latents/layers.25.mlp/9830_13106.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5adbc1a3fd8aadfbc0ca39c33857b2a34cd8f7686615ad1c7ecb066f44ddc7cc
3
+ size 85392664
latents/layers.25.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.3.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.4.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.5.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.6.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.7.mlp/6553_9829.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:acdf5e15e0b8982c40ae3f500020af9a81598c22e5db8d02ac3e9ab0a9e379bd
3
+ size 122482800
latents/layers.7.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.8.mlp/3276_6552.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37f8890cb031e3ecb8ab2d10578dcb66037e3fe9833469b549d4e07a98a92cd4
3
+ size 94163624
latents/layers.8.mlp/9830_13106.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8c283edee97b8d17bdbbc67ac41ae2aa3612d1f716be7775671f17876aec27e
3
+ size 99608256
latents/layers.8.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
latents/layers.9.mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
3
+ "dataset_split": "train",
4
+ "dataset_name": "",
5
+ "dataset_column": "text",
6
+ "batch_size": 16,
7
+ "cache_ctx_len": 256,
8
+ "n_tokens": 1000000,
9
+ "n_splits": 5,
10
+ "model_name": "google/gemma-2-2b"
11
+ }
run_config.json ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cache_cfg": {
3
+ "dataset_repo": "EleutherAI/fineweb-edu-dedup-10b",
4
+ "dataset_split": "train",
5
+ "dataset_name": "",
6
+ "dataset_column": "text",
7
+ "batch_size": 16,
8
+ "cache_ctx_len": 256,
9
+ "n_tokens": 1000000,
10
+ "n_splits": 5
11
+ },
12
+ "constructor_cfg": {
13
+ "faiss_embedding_model": "sentence-transformers/all-MiniLM-L6-v2",
14
+ "faiss_embedding_cache_dir": ".embedding_cache",
15
+ "faiss_embedding_cache_enabled": true,
16
+ "example_ctx_len": 32,
17
+ "min_examples": 200,
18
+ "n_non_activating": 50,
19
+ "center_examples": true,
20
+ "non_activating_source": "random",
21
+ "neighbours_type": "co-occurrence"
22
+ },
23
+ "sampler_cfg": {
24
+ "n_examples_train": 40,
25
+ "n_examples_test": 50,
26
+ "n_quantiles": 10,
27
+ "train_type": "quantiles",
28
+ "test_type": "quantiles",
29
+ "ratio_top": 0.2
30
+ },
31
+ "model": "google/gemma-2-2b",
32
+ "sparse_model": "EleutherAI/gemmascope-transcoders-sparsify",
33
+ "hookpoints": [
34
+ "layers.0.mlp",
35
+ "layers.1.mlp",
36
+ "layers.2.mlp",
37
+ "layers.3.mlp",
38
+ "layers.4.mlp",
39
+ "layers.5.mlp",
40
+ "layers.6.mlp",
41
+ "layers.7.mlp",
42
+ "layers.8.mlp",
43
+ "layers.9.mlp",
44
+ "layers.10.mlp",
45
+ "layers.11.mlp",
46
+ "layers.12.mlp",
47
+ "layers.13.mlp",
48
+ "layers.14.mlp",
49
+ "layers.15.mlp",
50
+ "layers.16.mlp",
51
+ "layers.17.mlp",
52
+ "layers.18.mlp",
53
+ "layers.19.mlp",
54
+ "layers.20.mlp",
55
+ "layers.21.mlp",
56
+ "layers.22.mlp",
57
+ "layers.23.mlp",
58
+ "layers.24.mlp",
59
+ "layers.25.mlp"
60
+ ],
61
+ "explainer_model": "hugging-quants/Meta-Llama-3.1-70B-Instruct-AWQ-INT4",
62
+ "explainer_model_max_len": 5120,
63
+ "explainer_provider": "offline",
64
+ "explainer": "default",
65
+ "scorers": [
66
+ "fuzz",
67
+ "detection"
68
+ ],
69
+ "name": "gemmascope-transcoders-sparsify-1m",
70
+ "max_latents": null,
71
+ "filter_bos": false,
72
+ "log_probs": false,
73
+ "load_in_8bit": false,
74
+ "hf_token": null,
75
+ "pipeline_num_proc": 48,
76
+ "num_gpus": 1,
77
+ "seed": 22,
78
+ "verbose": true,
79
+ "num_examples_per_scorer_prompt": 5,
80
+ "overwrite": []
81
+ }