smallcloudteam commited on
Commit
80807c9
·
1 Parent(s): 000adb0

Upload config

Browse files
Files changed (1) hide show
  1. config.json +1 -5
config.json CHANGED
@@ -15,9 +15,6 @@
15
  "type": "flash",
16
  "use_rotary_emb": null
17
  },
18
- "architectures": [
19
- "CodifyModel"
20
- ],
21
  "attn_a_reach": 2048,
22
  "attn_b_reach": 2048,
23
  "attn_heads": 32,
@@ -37,14 +34,13 @@
37
  "initializer_range": 0.02,
38
  "layer_norm_epsilon": 1e-05,
39
  "mlp_mult": 4,
40
- "model_type": "codify",
41
  "moe": null,
42
  "mup_optimal_lr": 0.0005,
43
  "mup_shapes_file": "lean_former/mup/flash_rot1d_24l/shapes.json",
44
  "posemb": false,
45
  "rescale_embeddings": false,
46
  "tie_word_embeddings": false,
47
- "torch_dtype": "float32",
48
  "transformers_version": "4.24.0",
49
  "tune": [
50
  3,
 
15
  "type": "flash",
16
  "use_rotary_emb": null
17
  },
 
 
 
18
  "attn_a_reach": 2048,
19
  "attn_b_reach": 2048,
20
  "attn_heads": 32,
 
34
  "initializer_range": 0.02,
35
  "layer_norm_epsilon": 1e-05,
36
  "mlp_mult": 4,
37
+ "model_type": "smallcloudai/codify_medium_multi",
38
  "moe": null,
39
  "mup_optimal_lr": 0.0005,
40
  "mup_shapes_file": "lean_former/mup/flash_rot1d_24l/shapes.json",
41
  "posemb": false,
42
  "rescale_embeddings": false,
43
  "tie_word_embeddings": false,
 
44
  "transformers_version": "4.24.0",
45
  "tune": [
46
  3,