File size: 582 Bytes
d2cb6b9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
{
  "_name_or_path": "/data0/zijun/CHECKPOINTS/push/layer.23.HF",
  "activation": "topk",
  "architectures": [
    "OpenSae"
  ],
  "auxk_alpha": 0.01,
  "decoder_impl": "triton",
  "feature_size": 262144,
  "hidden_size": 4096,
  "input_hookpoint": "layers.23",
  "input_normalize": true,
  "input_normalize_eps": 1e-05,
  "k": 128,
  "l1_coef": null,
  "model_name": "meta-llama/meta-llama-3.1-8b",
  "multi_topk": 4,
  "normalize_decoder": true,
  "normalize_shift_back": false,
  "output_hookpoint": "layers.23",
  "torch_dtype": "float32",
  "transformers_version": "4.44.1"
}