huypn16 commited on
Commit
8055dd0
·
verified ·
1 Parent(s): 06cadf5

Update layers.8/cfg.json

Browse files
Files changed (1) hide show
  1. layers.8/cfg.json +26 -26
layers.8/cfg.json CHANGED
@@ -1,28 +1,28 @@
1
  {
2
- 'normalize_decoder': True,
3
- 'num_latents': 0,
4
- 'k': 64,
5
- 'multi_topk': False,
6
- 'layer': 8,
7
- 'device': 'cuda',
8
- 'activation_fn_str': 'relu',
9
- 'd_sae': 24576,
10
- 'd_in': 1536,
11
- 'architecture': 'topk',
12
- 'apply_b_dec_to_input': False,
13
- 'finetuning_scaling_factor': False,
14
- 'context_size': 1024,
15
- 'model_name': 'Qwen/Qwen2.5-1.5B-Instruct',
16
- 'hook_name': 'blocks.8.hook_sae_acts_topk',
17
- 'hook_layer': 8,
18
- 'hook_head_index': None,
19
- 'prepend_bos': True,
20
- 'dataset_path': '',
21
- 'dataset_trust_remote_code': False,
22
- 'normalize_activations': 'none',
23
- 'dtype': 'float32',
24
- 'sae_lens_training_version': 'eleuther',
25
- 'activation_fn_kwargs': {},
26
- 'neuronpedia_id': None,
27
- 'model_from_pretrained_kwargs': {}
28
  }
 
1
  {
2
+ "normalize_decoder": true,
3
+ "num_latents": 0,
4
+ "k": 64,
5
+ "multi_topk": false,
6
+ "layer": 8,
7
+ "device": "cuda",
8
+ "activation_fn_str": "relu",
9
+ "d_sae": 24576,
10
+ "d_in": 1536,
11
+ "architecture": "topk",
12
+ "apply_b_dec_to_input": false,
13
+ "finetuning_scaling_factor": false,
14
+ "context_size": 1024,
15
+ "model_name": "Qwen/Qwen2.5-1.5B-Instruct",
16
+ "hook_name": "blocks.8.hook_sae_acts_topk",
17
+ "hook_layer": 8,
18
+ "hook_head_index": null,
19
+ "prepend_bos": true,
20
+ "dataset_path": "",
21
+ "dataset_trust_remote_code": false,
22
+ "normalize_activations": "none",
23
+ "dtype": "float32",
24
+ "sae_lens_training_version": "eleuther",
25
+ "activation_fn_kwargs": {},
26
+ "neuronpedia_id": null,
27
+ "model_from_pretrained_kwargs": {}
28
  }