{ "_name_or_path": "NlpHUST/ner-vietnamese-electra-base", "architectures": [ "ElectraForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "embedding_size": 768, "finetuning_task": "ner", "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "B-ORGANIZATION", "1": "I-GENDER", "2": "I-EDUCATION", "3": "I-PERSON", "4": "I-EVENT", "5": "I-LAW", "6": "I-DISEASE", "7": "B-DATETIME", "8": "B-PRODUCT", "9": "I-ROLE", "10": "B-EDUCATION", "11": "B-URL", "12": "B-ROLE", "13": "B-TRANSPORTATION", "14": "B-GENDER", "15": "B-DISEASE", "16": "I-URL", "17": "I-ACTION", "18": "B-PHONE", "19": "I-AGE", "20": "O", "21": "I-PRODUCT", "22": "B-EVENT", "23": "B-ACTION", "24": "I-ORGANIZATION", "25": "B-PERSON", "26": "B-AGE", "27": "I-TRANSPORTATION", "28": "I-EMAIL", "29": "B-EMAIL", "30": "B-QUANTITY-NUMBER", "31": "I-LOCATION", "32": "I-DATETIME", "33": "B-LOCATION", "34": "I-QUANTITY-NUMBER", "35": "I-PHONE", "36": "B-LAW" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-ACTION": 23, "B-AGE": 26, "B-DATETIME": 7, "B-DISEASE": 15, "B-EDUCATION": 10, "B-EMAIL": 29, "B-EVENT": 22, "B-GENDER": 14, "B-LAW": 36, "B-LOCATION": 33, "B-ORGANIZATION": 0, "B-PERSON": 25, "B-PHONE": 18, "B-PRODUCT": 8, "B-QUANTITY-NUMBER": 30, "B-ROLE": 12, "B-TRANSPORTATION": 13, "B-URL": 11, "I-ACTION": 17, "I-AGE": 19, "I-DATETIME": 32, "I-DISEASE": 6, "I-EDUCATION": 2, "I-EMAIL": 28, "I-EVENT": 4, "I-GENDER": 1, "I-LAW": 5, "I-LOCATION": 31, "I-ORGANIZATION": 24, "I-PERSON": 3, "I-PHONE": 35, "I-PRODUCT": 21, "I-QUANTITY-NUMBER": 34, "I-ROLE": 9, "I-TRANSPORTATION": 27, "I-URL": 16, "O": 20 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "electra", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "position_embedding_type": "absolute", "summary_activation": "gelu", "summary_last_dropout": 0.1, "summary_type": "first", "summary_use_proj": true, "torch_dtype": "float32", "transformers_version": "4.41.2", "type_vocab_size": 2, "use_cache": true, "vocab_size": 62000 }