ArGTC / config.json
dru-acrps's picture
added files
e844342
raw
history blame
1.59 kB
{
"_name_or_path": "ArGTClass",
"apply_residual_connection_post_layernorm": false,
"architectures": [
"BloomForSequenceClassification"
],
"attention_dropout": 0.0,
"attention_softmax_in_fp32": true,
"bias_dropout_fusion": true,
"bos_token_id": 1,
"eos_token_id": 2,
"hidden_dropout": 0.0,
"hidden_size": 4096,
"id2label": {
"0": "Religion",
"1": "Finance-and-Economy",
"2": "Politics",
"3": "Medical",
"4": "Culture",
"5": "Sports",
"6": "Science-and-Technology",
"7": "Anthropology-and-Sociology",
"8": "Art-and-Literature",
"9": "Education",
"10": "History",
"11": "Law",
"12": "Language-and-Linguistics",
"13": "Philosophy"
},
"initializer_range": 0.02,
"label2id": {
"Anthropology-and-Sociology": 7,
"Art-and-Literature": 8,
"Culture": 4,
"Education": 9,
"History": 10,
"Language-and-Linguistics": 12,
"Law": 11,
"Medical": 3,
"Philosophy": 13,
"Politics": 2,
"Religion": 0,
"Sports": 5,
"Finance-and-Economy": 1,
"Science-and-Technology": 6
},
"layer_norm_epsilon": 1e-05,
"masked_softmax_fusion": true,
"model_type": "bloom",
"n_head": 32,
"n_inner": null,
"n_layer": 30,
"offset_alibi": 100,
"pad_token_id": 3,
"pretraining_tp": 4,
"problem_type": "single_label_classification",
"seq_length": 2048,
"skip_bias_add": true,
"skip_bias_add_qkv": false,
"slow_but_exact": false,
"torch_dtype": "float32",
"transformers_version": "4.31.0",
"unk_token_id": 0,
"use_cache": true,
"vocab_size": 250880
}