huypn16's picture
Update layers.16/cfg.json
13bb61a verified
raw
history blame contribute delete
611 Bytes
{
"model_name": "Qwen/Qwen2.5-1.5B-Instruct",
"architecture": "topk",
"hook_name": "blocks.16.hook_resid_post",
"hook_layer": 16,
"layer": 8,
"k": 64,
"activation_fn_str": "relu",
"d_sae": 24576,
"d_in": 1536,
"multi_topk": false,
"device": "cuda",
"apply_b_dec_to_input": false,
"finetuning_scaling_factor": false,
"context_size": 1024,
"hook_head_index": null,
"prepend_bos": true,
"normalize_activations": "none",
"dtype": "float32",
"sae_lens_training_version": "eleuther",
"neuronpedia_id": null,
"activation_fn_kwargs": {},
"model_from_pretrained_kwargs": {}
}