{ | |
"_name_or_path": "data/converted_model_ner_huggingface", | |
"architectures": [ | |
"RobertaForTokenClassification" | |
], | |
"attention_probs_dropout_prob": 0.1, | |
"bos_token_id": 0, | |
"classifier_dropout": null, | |
"eos_token_id": 2, | |
"hidden_act": "gelu", | |
"hidden_dropout_prob": 0.1, | |
"hidden_size": 768, | |
"id2label": { | |
"0": "<s>", | |
"1": "<pad>", | |
"2": "</s>", | |
"3": "<unk>", | |
"4": "O", | |
"5": "B_ORG", | |
"6": "B_PER", | |
"7": "B_LOC", | |
"8": "B_MEA", | |
"9": "I_DTM", | |
"10": "I_ORG", | |
"11": "E_ORG", | |
"12": "I_PER", | |
"13": "B_TTL", | |
"14": "E_PER", | |
"15": "B_DES", | |
"16": "E_LOC", | |
"17": "B_DTM", | |
"18": "B_NUM", | |
"19": "I_MEA", | |
"20": "E_DTM", | |
"21": "E_MEA", | |
"22": "I_LOC", | |
"23": "I_DES", | |
"24": "E_DES", | |
"25": "I_NUM", | |
"26": "B", | |
"27": "E_NUM", | |
"28": "B_TRM", | |
"29": "B_BRN", | |
"30": "I_TRM", | |
"31": "E_TRM", | |
"32": "I_TTL", | |
"33": "I_BRN", | |
"34": "E_BRN", | |
"35": "E_TTL", | |
"36": "B_NAME", | |
"37": "__", | |
"38": "DDEM", | |
"39": "I", | |
"40": "MEA_BI", | |
"41": "OBRN_B", | |
"42": "ORG_I", | |
"43": "PER_I", | |
"44": "madeupword0000", | |
"45": "madeupword0001", | |
"46": "madeupword0002", | |
"47": "madeupword0003" | |
}, | |
"initializer_range": 0.02, | |
"intermediate_size": 3072, | |
"label2id": { | |
"</s>": 2, | |
"<pad>": 1, | |
"<s>": 0, | |
"<unk>": 3, | |
"B": 26, | |
"B_BRN": 29, | |
"B_DES": 15, | |
"B_DTM": 17, | |
"B_LOC": 7, | |
"B_MEA": 8, | |
"B_NAME": 36, | |
"B_NUM": 18, | |
"B_ORG": 5, | |
"B_PER": 6, | |
"B_TRM": 28, | |
"B_TTL": 13, | |
"DDEM": 38, | |
"E_BRN": 34, | |
"E_DES": 24, | |
"E_DTM": 20, | |
"E_LOC": 16, | |
"E_MEA": 21, | |
"E_NUM": 27, | |
"E_ORG": 11, | |
"E_PER": 14, | |
"E_TRM": 31, | |
"E_TTL": 35, | |
"I": 39, | |
"I_BRN": 33, | |
"I_DES": 23, | |
"I_DTM": 9, | |
"I_LOC": 22, | |
"I_MEA": 19, | |
"I_NUM": 25, | |
"I_ORG": 10, | |
"I_PER": 12, | |
"I_TRM": 30, | |
"I_TTL": 32, | |
"MEA_BI": 40, | |
"O": 4, | |
"OBRN_B": 41, | |
"ORG_I": 42, | |
"PER_I": 43, | |
"__": 37, | |
"madeupword0000": 44, | |
"madeupword0001": 45, | |
"madeupword0002": 46, | |
"madeupword0003": 47 | |
}, | |
"layer_norm_eps": 1e-05, | |
"max_position_embeddings": 514, | |
"model_type": "roberta", | |
"num_attention_heads": 12, | |
"num_hidden_layers": 12, | |
"pad_token_id": 1, | |
"position_embedding_type": "absolute", | |
"torch_dtype": "float32", | |
"transformers_version": "4.27.4", | |
"type_vocab_size": 1, | |
"use_cache": true, | |
"vocab_size": 74905 | |
} | |