{ "activation_function": "gelu_new", "architectures": [ "ProGenForCausalLM" ], "attn_pdrop": 0.0, "bos_token_id": 1, "embd_pdrop": 0.0, "embed_dim": 1536, "eos_token_id": 2, "gradient_checkpointing": false, "initializer_range": 0.02, "kinase_embedding_dim": 1280, "layer_norm_epsilon": 1e-05, "model_type": "progen", "n_head": 16, "n_inner": null, "n_layer": 27, "n_positions": 2048, "resid_pdrop": 0.0, "rotary_dim": 48, "scale_attn_weights": true, "torch_dtype": "float32", "transformers_version": "4.47.1", "use_cache": false, "use_cross_attention": true, "vocab_size_emb": 32, "vocab_size_lm_head": 32 }