tiny-random-maira2 / config.json
katuni4ka's picture
Upload 14 files
10ddc7b verified
raw
history blame
1.98 kB
{
"_name_or_path": "/home/ea/work/my_optimum_intel/optimum-intel/maira2",
"architectures": [
"Maira2ForConditionalGeneration"
],
"auto_map": {
"AutoConfig": "configuration_maira2.Maira2Config",
"AutoModelForCausalLM": "modeling_maira2.Maira2ForConditionalGeneration",
"AutoModelForVision2Seq": "modeling_maira2.Maira2ForConditionalGeneration"
},
"hidden_size": 16,
"ignore_index": -100,
"image_seq_length": 4,
"image_token_index": 32204,
"model_type": "maira2",
"multimodal_projector_bias": true,
"pad_token_id": 0,
"projector_hidden_act": "gelu",
"projector_n_layers": 4,
"text_config": {
"_name_or_path": "HuggingFaceM4/tiny-random-LlamaForCausalLM",
"architectures": [
"LlamaForCausalLM"
],
"bos_token_id": 0,
"eos_token_id": 1,
"head_dim": 4,
"hidden_size": 16,
"intermediate_size": 64,
"model_type": "llama",
"num_attention_heads": 4,
"num_hidden_layers": 2,
"num_key_value_heads": 4,
"pad_token_id": 2,
"torch_dtype": "bfloat16",
"vocab_size": 32207
},
"torch_dtype": "float32",
"transformers_version": "4.48.3",
"vision_config": {
"apply_layernorm": true,
"architectures": [
"Dinov2Model"
],
"attention_probs_dropout_prob": 0.0,
"drop_path_rate": 0.0,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 16,
"image_size": 30,
"layer_norm_eps": 1e-06,
"layerscale_value": 1.0,
"mlp_ratio": 4,
"model_type": "dinov2",
"num_attention_heads": 4,
"num_hidden_layers": 4,
"out_features": [
"stage4"
],
"out_indices": [
4
],
"patch_size": 2,
"qkv_bias": true,
"reshape_hidden_states": false,
"stage_names": [
"stem",
"stage1",
"stage2",
"stage3",
"stage4"
],
"torch_dtype": "float32",
"use_swiglu_ffn": false
},
"vision_feature_layer": -1,
"vision_feature_select_strategy": "default"
}