huypn16 commited on
Commit
13bb61a
1 Parent(s): edfb2af

Update layers.16/cfg.json

Browse files
Files changed (1) hide show
  1. layers.16/cfg.json +24 -1
layers.16/cfg.json CHANGED
@@ -1 +1,24 @@
1
- {"expansion_factor": 16, "normalize_decoder": true, "num_latents": 0, "k": 64, "multi_topk": false, "d_in": 1536}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_name": "Qwen/Qwen2.5-1.5B-Instruct",
3
+ "architecture": "topk",
4
+ "hook_name": "blocks.16.hook_resid_post",
5
+ "hook_layer": 16,
6
+ "layer": 8,
7
+ "k": 64,
8
+ "activation_fn_str": "relu",
9
+ "d_sae": 24576,
10
+ "d_in": 1536,
11
+ "multi_topk": false,
12
+ "device": "cuda",
13
+ "apply_b_dec_to_input": false,
14
+ "finetuning_scaling_factor": false,
15
+ "context_size": 1024,
16
+ "hook_head_index": null,
17
+ "prepend_bos": true,
18
+ "normalize_activations": "none",
19
+ "dtype": "float32",
20
+ "sae_lens_training_version": "eleuther",
21
+ "neuronpedia_id": null,
22
+ "activation_fn_kwargs": {},
23
+ "model_from_pretrained_kwargs": {}
24
+ }