WolfInk commited on
Commit
3f9cb66
·
verified ·
1 Parent(s): 74d6160

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "Gemma2ForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
@@ -19,7 +19,7 @@
19
  "initializer_range": 0.02,
20
  "intermediate_size": 9216,
21
  "max_position_embeddings": 8192,
22
- "model_type": "gemma2",
23
  "num_attention_heads": 8,
24
  "num_hidden_layers": 26,
25
  "num_key_value_heads": 4,
 
1
  {
2
  "architectures": [
3
+ "ConexuslLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
 
19
  "initializer_range": 0.02,
20
  "intermediate_size": 9216,
21
  "max_position_embeddings": 8192,
22
+ "model_type": "conexus",
23
  "num_attention_heads": 8,
24
  "num_hidden_layers": 26,
25
  "num_key_value_heads": 4,