Zamba2-7B-Instruct / config.json
yury-zyphra's picture
Add architectures
2a108a8 verified
raw
history blame
2.41 kB
{
"architectures": [
"Zamba2ForCausalLM"
],
"adapter_rank": 128,
"add_bias_linear": false,
"attention_dropout": 0.0,
"attention_head_dim": 224,
"attention_hidden_size": 7168,
"bos_token_id": 1,
"chunk_size": 256,
"eos_token_id": 2,
"ffn_hidden_size": 14336,
"hidden_act": "gelu",
"hidden_size": 3584,
"hybrid_layer_ids": [
6,
11,
17,
23,
29,
35,
41,
47,
53,
59,
65,
71,
77
],
"initializer_range": 0.02,
"intermediate_size": 14336,
"kv_channels": 112,
"layers_block_type": [
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba",
"mamba",
"mamba",
"hybrid",
"mamba",
"mamba",
"mamba"
],
"mamba_d_conv": 4,
"mamba_d_state": 64,
"mamba_expand": 2,
"mamba_headdim": 64,
"mamba_ngroups": 2,
"max_position_embeddings": 4096,
"model_type": "zamba2",
"n_mamba_heads": 112,
"num_attention_heads": 32,
"num_hidden_layers": 81,
"num_key_value_heads": 32,
"num_logits_to_keep": 1,
"num_mem_blocks": 2,
"num_query_groups": 32,
"pad_token_id": 0,
"rms_norm_eps": 1e-05,
"rope_theta": 10000,
"time_step_floor": 0.0001,
"time_step_limit": null,
"time_step_max": 0.1,
"time_step_min": 0.001,
"transformers_version": "4.49.0.dev0",
"use_cache": true,
"use_conv_bias": true,
"use_long_context": false,
"use_mem_rope": true,
"use_shared_attention_adapter": false,
"use_shared_mlp_adapter": true,
"vocab_size": 32000
}