Wauplin HF staff commited on
Commit
57394c8
1 Parent(s): 834953a

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +14 -31
config.json CHANGED
@@ -1,33 +1,16 @@
1
  {
2
- "A_init_range": [
3
- 1,
4
- 16
5
- ],
6
- "D_has_hdim": false,
7
- "bias": false,
8
- "chunk_size": 256,
9
- "conv_bias": true,
10
- "conv_init": null,
11
- "d_conv": 4,
12
- "d_model": 768,
13
- "d_ssm": null,
14
- "d_state": 128,
15
- "device": null,
16
- "dt_init_floor": 0.0001,
17
- "dt_limit": [
18
- 0.0,
19
- Infinity
20
- ],
21
- "dt_max": 0.1,
22
- "dt_min": 0.001,
23
- "dtype": null,
24
- "expand": 2,
25
- "headdim": 64,
26
- "layer_idx": null,
27
- "ngroups": 1,
28
- "norm_before_gate": false,
29
- "process_group": null,
30
- "rmsnorm": true,
31
- "sequence_parallel": true,
32
- "use_mem_eff_path": true
33
  }
 
1
  {
2
+ "d_model": 768,
3
+ "d_intermediate": 0,
4
+ "n_layer": 24,
5
+ "vocab_size": 50277,
6
+ "ssm_cfg": {
7
+ "layer": "Mamba2"
8
+ },
9
+ "attn_layer_idx": [],
10
+ "attn_cfg": {},
11
+ "rms_norm": true,
12
+ "residual_in_fp32": true,
13
+ "fused_add_norm": true,
14
+ "pad_vocab_size_multiple": 16,
15
+ "tie_embeddings": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  }