File size: 2,467 Bytes
b4b8054
f86741f
b4b8054
4ea9c14
b4b8054
 
 
4ea9c14
 
b4b8054
 
 
 
2076f14
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f86741f
 
 
 
 
 
 
 
2076f14
 
f86741f
 
 
 
 
 
 
 
b4b8054
 
 
 
f86741f
 
2076f14
 
 
f86741f
 
2076f14
f86741f
 
2076f14
f86741f
2076f14
 
f86741f
2076f14
 
 
 
f86741f
 
2076f14
 
 
 
 
 
f86741f
 
2076f14
f86741f
 
 
2076f14
 
 
f86741f
b4b8054
4ea9c14
 
 
b4b8054
 
4ea9c14
b4b8054
4ea9c14
 
 
 
b4b8054
 
4ea9c14
b4b8054
4ea9c14
b4b8054
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
{
  "_name_or_path": "NlpHUST/ner-vietnamese-electra-base",
  "architectures": [
    "ElectraForTokenClassification"
  ],
  "attention_probs_dropout_prob": 0.1,
  "classifier_dropout": null,
  "embedding_size": 768,
  "finetuning_task": "ner",
  "hidden_act": "gelu",
  "hidden_dropout_prob": 0.1,
  "hidden_size": 768,
  "id2label": {
    "0": "B-ORGANIZATION",
    "1": "I-GENDER",
    "2": "I-EDUCATION",
    "3": "I-PERSON",
    "4": "I-EVENT",
    "5": "I-LAW",
    "6": "I-DISEASE",
    "7": "B-DATETIME",
    "8": "B-PRODUCT",
    "9": "I-ROLE",
    "10": "B-EDUCATION",
    "11": "B-URL",
    "12": "B-ROLE",
    "13": "B-TRANSPORTATION",
    "14": "B-GENDER",
    "15": "B-DISEASE",
    "16": "I-URL",
    "17": "I-ACTION",
    "18": "B-PHONE",
    "19": "I-AGE",
    "20": "O",
    "21": "I-PRODUCT",
    "22": "B-EVENT",
    "23": "B-ACTION",
    "24": "I-ORGANIZATION",
    "25": "B-PERSON",
    "26": "B-AGE",
    "27": "I-TRANSPORTATION",
    "28": "I-EMAIL",
    "29": "B-EMAIL",
    "30": "B-QUANTITY-NUMBER",
    "31": "I-LOCATION",
    "32": "I-DATETIME",
    "33": "B-LOCATION",
    "34": "I-QUANTITY-NUMBER",
    "35": "I-PHONE",
    "36": "B-LAW"
  },
  "initializer_range": 0.02,
  "intermediate_size": 3072,
  "label2id": {
    "B-ACTION": 23,
    "B-AGE": 26,
    "B-DATETIME": 7,
    "B-DISEASE": 15,
    "B-EDUCATION": 10,
    "B-EMAIL": 29,
    "B-EVENT": 22,
    "B-GENDER": 14,
    "B-LAW": 36,
    "B-LOCATION": 33,
    "B-ORGANIZATION": 0,
    "B-PERSON": 25,
    "B-PHONE": 18,
    "B-PRODUCT": 8,
    "B-QUANTITY-NUMBER": 30,
    "B-ROLE": 12,
    "B-TRANSPORTATION": 13,
    "B-URL": 11,
    "I-ACTION": 17,
    "I-AGE": 19,
    "I-DATETIME": 32,
    "I-DISEASE": 6,
    "I-EDUCATION": 2,
    "I-EMAIL": 28,
    "I-EVENT": 4,
    "I-GENDER": 1,
    "I-LAW": 5,
    "I-LOCATION": 31,
    "I-ORGANIZATION": 24,
    "I-PERSON": 3,
    "I-PHONE": 35,
    "I-PRODUCT": 21,
    "I-QUANTITY-NUMBER": 34,
    "I-ROLE": 9,
    "I-TRANSPORTATION": 27,
    "I-URL": 16,
    "O": 20
  },
  "layer_norm_eps": 1e-12,
  "max_position_embeddings": 512,
  "model_type": "electra",
  "num_attention_heads": 12,
  "num_hidden_layers": 12,
  "pad_token_id": 0,
  "position_embedding_type": "absolute",
  "summary_activation": "gelu",
  "summary_last_dropout": 0.1,
  "summary_type": "first",
  "summary_use_proj": true,
  "torch_dtype": "float32",
  "transformers_version": "4.41.2",
  "type_vocab_size": 2,
  "use_cache": true,
  "vocab_size": 62000
}