|
{ |
|
"activation_type": "swiglu", |
|
"alibi": false, |
|
"alibi_bias_max": 8.0, |
|
"architectures": [ |
|
"OLMoModelForCausalLM" |
|
], |
|
"attention_dropout": 0.0, |
|
"attention_layer_norm": false, |
|
"attention_layer_norm_with_affine": false, |
|
"bias_for_layer_norm": false, |
|
"block_group_size": 1, |
|
"block_type": "sequential", |
|
"clip_qkv": null, |
|
"d_model": 2048, |
|
"embedding_dropout": 0.0, |
|
"embedding_size": 50304, |
|
"eos_token_id": 50279, |
|
"flash_attention": true, |
|
"include_bias": false, |
|
"init_cutoff_factor": null, |
|
"init_device": "meta", |
|
"init_fn": "mitchell", |
|
"init_std": 0.02, |
|
"layer_norm_type": "rms", |
|
"layer_norm_with_affine": true, |
|
"max_sequence_length": 2048, |
|
"mlp_hidden_size": null, |
|
"mlp_ratio": 8, |
|
"model_type": "olmo", |
|
"multi_query_attention": false, |
|
"n_heads": 16, |
|
"n_layers": 16, |
|
"pad_token_id": 1, |
|
"precision": "amp_bf16", |
|
"residual_dropout": 0.0, |
|
"rope": true, |
|
"rope_full_precision": true, |
|
"scale_logits": false, |
|
"ternary": true, |
|
"transformers_version": "4.38.2", |
|
"use_cache": true, |
|
"vocab_size": 50280, |
|
"weight_tying": true, |
|
"auto_map": { |
|
"AutoConfig": "configuration_olmo.OLMoConfig", |
|
"AutoModelForCausalLM": "modeling_olmo.OLMoForCausalLM" |
|
} |
|
} |
|
|