|
{ |
|
"_name_or_path": "google/gemma-2-9b", |
|
"architectures": [ |
|
"Gemma2ForSequenceClassification" |
|
], |
|
"attention_bias": false, |
|
"attention_dropout": 0.0, |
|
"attn_logit_softcapping": 50.0, |
|
"bos_token_id": 2, |
|
"cache_implementation": "hybrid", |
|
"eos_token_id": 1, |
|
"final_logit_softcapping": 30.0, |
|
"head_dim": 256, |
|
"hidden_act": "gelu_pytorch_tanh", |
|
"hidden_activation": "gelu_pytorch_tanh", |
|
"hidden_size": 3584, |
|
"id2label": { |
|
"0": "age", |
|
"1": "disability", |
|
"2": "feminine", |
|
"3": "general", |
|
"4": "masculine", |
|
"5": "neutral", |
|
"6": "racial", |
|
"7": "sexuality" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 14336, |
|
"label2id": { |
|
"age": 0, |
|
"disability": 1, |
|
"feminine": 2, |
|
"general": 3, |
|
"masculine": 4, |
|
"neutral": 5, |
|
"racial": 6, |
|
"sexuality": 7 |
|
}, |
|
"max_position_embeddings": 8192, |
|
"model_type": "gemma2", |
|
"num_attention_heads": 16, |
|
"num_hidden_layers": 42, |
|
"num_key_value_heads": 8, |
|
"pad_token_id": 0, |
|
"problem_type": "multi_label_classification", |
|
"quantization_config": { |
|
"_load_in_4bit": true, |
|
"_load_in_8bit": false, |
|
"bnb_4bit_compute_dtype": "bfloat16", |
|
"bnb_4bit_quant_storage": "uint8", |
|
"bnb_4bit_quant_type": "nf4", |
|
"bnb_4bit_use_double_quant": true, |
|
"llm_int8_enable_fp32_cpu_offload": false, |
|
"llm_int8_has_fp16_weight": false, |
|
"llm_int8_skip_modules": null, |
|
"llm_int8_threshold": 6.0, |
|
"load_in_4bit": true, |
|
"load_in_8bit": false, |
|
"quant_method": "bitsandbytes" |
|
}, |
|
"query_pre_attn_scalar": 256, |
|
"rms_norm_eps": 1e-06, |
|
"rope_theta": 10000.0, |
|
"sliding_window": 4096, |
|
"sliding_window_size": 4096, |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.42.4", |
|
"use_cache": true, |
|
"vocab_size": 256000 |
|
} |
|
|