huypn16's picture
Update layers.14/cfg.json
af2cd1b verified
raw
history blame contribute delete
612 Bytes
{
"model_name": "Qwen/Qwen2.5-1.5B-Instruct",
"architecture": "topk",
"hook_name": "blocks.14.hook_resid_post",
"hook_layer": 14,
"layer": 14,
"k": 64,
"activation_fn_str": "relu",
"d_sae": 24576,
"d_in": 1536,
"multi_topk": false,
"device": "cuda",
"apply_b_dec_to_input": false,
"finetuning_scaling_factor": false,
"context_size": 1024,
"hook_head_index": null,
"prepend_bos": true,
"normalize_activations": "none",
"dtype": "float32",
"sae_lens_training_version": "eleuther",
"neuronpedia_id": null,
"activation_fn_kwargs": {},
"model_from_pretrained_kwargs": {}
}