Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -12,7 +12,7 @@
12
  "dim": 2048,
13
  "ffn_type": "swiglu_torch",
14
  "model": "open_lm_1b_swiglutorch",
15
- "model_norm": "gain_only_lp_layer_norm",
16
  "moe_capacity_factor": 1.25,
17
  "moe_expert_model_parallelism": false,
18
  "moe_freq": 0,
 
12
  "dim": 2048,
13
  "ffn_type": "swiglu_torch",
14
  "model": "open_lm_1b_swiglutorch",
15
+ "norm_type": "gain_only_lp_layer_norm",
16
  "moe_capacity_factor": 1.25,
17
  "moe_expert_model_parallelism": false,
18
  "moe_freq": 0,