File size: 895 Bytes
a47b4ea
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
attention_logit_softcapping: null
attention_scores_scalar: null
attn_bias: false
bias: false
block_size: 131072
final_logit_softcapping: null
gelu_approximate: none
head_size: 64
hf_config:
  name: MicroLlamaV2
  org: keeeeenw
intermediate_size: 5632
lm_head_bias: false
mlp_class_name: LLaMAMLP
n_embd: 1024
n_expert: 0
n_expert_per_token: 0
n_head: 16
n_layer: 12
n_query_groups: 4
name: micro-llama-300M-v2
norm_class_name: RMSNorm
norm_eps: 1.0e-05
norm_qk: false
padded_vocab_size: 128256
padding_multiple: 512
parallel_residual: false
post_attention_norm: false
post_mlp_norm: false
rope_adjustments:
  factor: 16.0
  high_freq_factor: 4.0
  low_freq_factor: 1.0
  original_max_seq_len: 8192
rope_base: 500000
rope_condense_ratio: 1
rotary_percentage: 1.0
scale_embeddings: false
shared_attention_norm: false
sliding_window_layer_placing: null
sliding_window_size: null
vocab_size: 128000