lilyyellow commited on
Commit
3f818e9
1 Parent(s): e41d2c0

End of training

Browse files
Files changed (4) hide show
  1. README.md +10 -16
  2. config.json +98 -126
  3. model.safetensors +2 -2
  4. training_args.bin +1 -1
README.md CHANGED
@@ -19,11 +19,11 @@ should probably proofread and complete it, then remove this comment. -->
19
 
20
  This model is a fine-tuned version of [NlpHUST/ner-vietnamese-electra-base](https://huggingface.co/NlpHUST/ner-vietnamese-electra-base) on an unknown dataset.
21
  It achieves the following results on the evaluation set:
22
- - Loss: 1.0322
23
- - Precision: 0.4590
24
- - Recall: 0.5400
25
- - F1: 0.4963
26
- - Accuracy: 0.7805
27
 
28
  ## Model description
29
 
@@ -48,20 +48,14 @@ The following hyperparameters were used during training:
48
  - seed: 42
49
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
50
  - lr_scheduler_type: cosine
51
- - num_epochs: 20
52
 
53
  ### Training results
54
 
55
- | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
56
- |:-------------:|:-------:|:----:|:---------------:|:---------:|:------:|:------:|:--------:|
57
- | 0.8701 | 1.9929 | 562 | 0.8431 | 0.4537 | 0.4154 | 0.4337 | 0.7907 |
58
- | 0.5651 | 3.9858 | 1124 | 0.7613 | 0.4524 | 0.4899 | 0.4704 | 0.7898 |
59
- | 0.4312 | 5.9787 | 1686 | 0.8134 | 0.4654 | 0.5182 | 0.4904 | 0.7902 |
60
- | 0.3305 | 7.9716 | 2248 | 0.8743 | 0.4417 | 0.5336 | 0.4833 | 0.7762 |
61
- | 0.255 | 9.9645 | 2810 | 0.9331 | 0.4217 | 0.5375 | 0.4726 | 0.7694 |
62
- | 0.2071 | 11.9574 | 3372 | 0.9707 | 0.4527 | 0.5435 | 0.4940 | 0.7795 |
63
- | 0.1984 | 13.9504 | 3934 | 0.9967 | 0.4663 | 0.5336 | 0.4977 | 0.7834 |
64
- | 0.1702 | 15.9433 | 4496 | 1.0322 | 0.4590 | 0.5400 | 0.4963 | 0.7805 |
65
 
66
 
67
  ### Framework versions
 
19
 
20
  This model is a fine-tuned version of [NlpHUST/ner-vietnamese-electra-base](https://huggingface.co/NlpHUST/ner-vietnamese-electra-base) on an unknown dataset.
21
  It achieves the following results on the evaluation set:
22
+ - Loss: 0.6311
23
+ - Precision: 0.5170
24
+ - Recall: 0.5666
25
+ - F1: 0.5406
26
+ - Accuracy: 0.8126
27
 
28
  ## Model description
29
 
 
48
  - seed: 42
49
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
50
  - lr_scheduler_type: cosine
51
+ - num_epochs: 5
52
 
53
  ### Training results
54
 
55
+ | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
56
+ |:-------------:|:------:|:----:|:---------------:|:---------:|:------:|:------:|:--------:|
57
+ | 0.6285 | 1.9973 | 1474 | 0.6449 | 0.5123 | 0.5238 | 0.5180 | 0.8125 |
58
+ | 0.5081 | 3.9946 | 2948 | 0.6311 | 0.5170 | 0.5666 | 0.5406 | 0.8126 |
 
 
 
 
 
 
59
 
60
 
61
  ### Framework versions
config.json CHANGED
@@ -11,136 +11,108 @@
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
13
  "id2label": {
14
- "0": "B-ACTION",
15
- "1": "I-EMAIL",
16
- "2": "B-TRANSPORTATION",
17
- "3": "B-LANGUAGE",
18
- "4": "I-DURATION",
19
- "5": "B-COMMUNICATION",
20
- "6": "I-MISCELLANEOUS",
21
- "7": "B-EDUCATION",
22
- "8": "B-MISCELLANEOUS",
23
- "9": "I-URL",
24
- "10": "I-AGE",
25
- "11": "B-MONEY",
26
- "12": "B-SKILL",
27
- "13": "B-ETHNICITY",
28
- "14": "I-COUNTRY",
29
- "15": "B-RELIGION",
30
- "16": "I-DATETIME",
31
- "17": "B-PRODUCT",
32
- "18": "I-RELIGION",
33
- "19": "I-PRODUCT",
34
- "20": "B-PHONE",
35
- "21": "B-FOOD",
36
- "22": "I-ANIMAL",
37
- "23": "I-LANGUAGE",
38
- "24": "B-COUNTRY",
39
- "25": "I-TRANSPORTATION",
40
- "26": "I-EVENT",
41
- "27": "B-DISEASE",
42
- "28": "I-ADDRESS",
43
- "29": "I-COMMUNICATION",
44
- "30": "I-MONEY",
45
- "31": "I-ORGANIZATION",
46
- "32": "B-ADDRESS",
47
- "33": "I-PERSON",
48
- "34": "I-ETHNICITY",
49
- "35": "I-SPORT",
50
- "36": "B-GENDER",
51
- "37": "I-DISEASE",
52
- "38": "B-EVENT",
53
- "39": "I-FOOD",
54
- "40": "B-PERSON",
55
- "41": "I-LOCATION",
56
- "42": "B-SPORT",
57
- "43": "B-QUANTITY",
58
- "44": "O",
59
- "45": "B-LOCATION",
60
- "46": "I-QUANTITY",
61
- "47": "I-PHONE",
62
- "48": "B-DURATION",
63
- "49": "B-URL",
64
- "50": "B-DATETIME",
65
- "51": "B-ROLE",
66
- "52": "I-EDUCATION",
67
- "53": "I-LAW",
68
- "54": "B-AGE",
69
- "55": "I-SKILL",
70
- "56": "B-EMAIL",
71
- "57": "B-ANIMAL",
72
- "58": "I-GENDER",
73
- "59": "B-ORGANIZATION",
74
- "60": "I-ROLE",
75
- "61": "B-LAW",
76
- "62": "I-ACTION"
77
  },
78
  "initializer_range": 0.02,
79
  "intermediate_size": 3072,
80
  "label2id": {
81
- "B-ACTION": 0,
82
- "B-ADDRESS": 32,
83
- "B-AGE": 54,
84
- "B-ANIMAL": 57,
85
- "B-COMMUNICATION": 5,
86
- "B-COUNTRY": 24,
87
- "B-DATETIME": 50,
88
- "B-DISEASE": 27,
89
- "B-DURATION": 48,
90
- "B-EDUCATION": 7,
91
- "B-EMAIL": 56,
92
- "B-ETHNICITY": 13,
93
- "B-EVENT": 38,
94
- "B-FOOD": 21,
95
- "B-GENDER": 36,
96
- "B-LANGUAGE": 3,
97
- "B-LAW": 61,
98
- "B-LOCATION": 45,
99
- "B-MISCELLANEOUS": 8,
100
- "B-MONEY": 11,
101
- "B-ORGANIZATION": 59,
102
- "B-PERSON": 40,
103
- "B-PHONE": 20,
104
- "B-PRODUCT": 17,
105
- "B-QUANTITY": 43,
106
- "B-RELIGION": 15,
107
- "B-ROLE": 51,
108
- "B-SKILL": 12,
109
- "B-SPORT": 42,
110
- "B-TRANSPORTATION": 2,
111
- "B-URL": 49,
112
- "I-ACTION": 62,
113
- "I-ADDRESS": 28,
114
- "I-AGE": 10,
115
- "I-ANIMAL": 22,
116
- "I-COMMUNICATION": 29,
117
- "I-COUNTRY": 14,
118
- "I-DATETIME": 16,
119
- "I-DISEASE": 37,
120
- "I-DURATION": 4,
121
- "I-EDUCATION": 52,
122
- "I-EMAIL": 1,
123
- "I-ETHNICITY": 34,
124
- "I-EVENT": 26,
125
- "I-FOOD": 39,
126
- "I-GENDER": 58,
127
- "I-LANGUAGE": 23,
128
- "I-LAW": 53,
129
- "I-LOCATION": 41,
130
- "I-MISCELLANEOUS": 6,
131
- "I-MONEY": 30,
132
- "I-ORGANIZATION": 31,
133
- "I-PERSON": 33,
134
- "I-PHONE": 47,
135
- "I-PRODUCT": 19,
136
- "I-QUANTITY": 46,
137
- "I-RELIGION": 18,
138
- "I-ROLE": 60,
139
- "I-SKILL": 55,
140
- "I-SPORT": 35,
141
- "I-TRANSPORTATION": 25,
142
- "I-URL": 9,
143
- "O": 44
144
  },
145
  "layer_norm_eps": 1e-12,
146
  "max_position_embeddings": 512,
 
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
13
  "id2label": {
14
+ "0": "B-RELIGION",
15
+ "1": "B-DATETIME",
16
+ "2": "B-AGE",
17
+ "3": "I-QUANTITY",
18
+ "4": "I-AGE",
19
+ "5": "I-ROLE",
20
+ "6": "I-SKILL",
21
+ "7": "I-RELIGION",
22
+ "8": "I-TRANSPORTATION",
23
+ "9": "I-LAW",
24
+ "10": "I-PHONE",
25
+ "11": "B-EVENT",
26
+ "12": "B-QUANTITY",
27
+ "13": "I-EMAIL",
28
+ "14": "B-ACTION",
29
+ "15": "B-ETHNICITY",
30
+ "16": "I-MISCELLANEOUS",
31
+ "17": "B-PERSON",
32
+ "18": "I-PERSON",
33
+ "19": "I-MONEY",
34
+ "20": "B-EDUCATION",
35
+ "21": "B-LOCATION",
36
+ "22": "I-GENDER",
37
+ "23": "B-ROLE",
38
+ "24": "B-MISCELLANEOUS",
39
+ "25": "I-EVENT",
40
+ "26": "B-SKILL",
41
+ "27": "B-EMAIL",
42
+ "28": "B-MONEY",
43
+ "29": "I-ACTION",
44
+ "30": "B-PRODUCT",
45
+ "31": "B-GENDER",
46
+ "32": "I-ETHNICITY",
47
+ "33": "B-ADDRESS",
48
+ "34": "I-DISEASE",
49
+ "35": "O",
50
+ "36": "I-ORGANIZATION",
51
+ "37": "I-PRODUCT",
52
+ "38": "B-LAW",
53
+ "39": "I-DATETIME",
54
+ "40": "B-PHONE",
55
+ "41": "I-ADDRESS",
56
+ "42": "I-LOCATION",
57
+ "43": "B-DISEASE",
58
+ "44": "I-EDUCATION",
59
+ "45": "B-ORGANIZATION",
60
+ "46": "B-URL",
61
+ "47": "I-URL",
62
+ "48": "B-TRANSPORTATION"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  },
64
  "initializer_range": 0.02,
65
  "intermediate_size": 3072,
66
  "label2id": {
67
+ "B-ACTION": 14,
68
+ "B-ADDRESS": 33,
69
+ "B-AGE": 2,
70
+ "B-DATETIME": 1,
71
+ "B-DISEASE": 43,
72
+ "B-EDUCATION": 20,
73
+ "B-EMAIL": 27,
74
+ "B-ETHNICITY": 15,
75
+ "B-EVENT": 11,
76
+ "B-GENDER": 31,
77
+ "B-LAW": 38,
78
+ "B-LOCATION": 21,
79
+ "B-MISCELLANEOUS": 24,
80
+ "B-MONEY": 28,
81
+ "B-ORGANIZATION": 45,
82
+ "B-PERSON": 17,
83
+ "B-PHONE": 40,
84
+ "B-PRODUCT": 30,
85
+ "B-QUANTITY": 12,
86
+ "B-RELIGION": 0,
87
+ "B-ROLE": 23,
88
+ "B-SKILL": 26,
89
+ "B-TRANSPORTATION": 48,
90
+ "B-URL": 46,
91
+ "I-ACTION": 29,
92
+ "I-ADDRESS": 41,
93
+ "I-AGE": 4,
94
+ "I-DATETIME": 39,
95
+ "I-DISEASE": 34,
96
+ "I-EDUCATION": 44,
97
+ "I-EMAIL": 13,
98
+ "I-ETHNICITY": 32,
99
+ "I-EVENT": 25,
100
+ "I-GENDER": 22,
101
+ "I-LAW": 9,
102
+ "I-LOCATION": 42,
103
+ "I-MISCELLANEOUS": 16,
104
+ "I-MONEY": 19,
105
+ "I-ORGANIZATION": 36,
106
+ "I-PERSON": 18,
107
+ "I-PHONE": 10,
108
+ "I-PRODUCT": 37,
109
+ "I-QUANTITY": 3,
110
+ "I-RELIGION": 7,
111
+ "I-ROLE": 5,
112
+ "I-SKILL": 6,
113
+ "I-TRANSPORTATION": 8,
114
+ "I-URL": 47,
115
+ "O": 35
 
 
 
 
 
 
 
 
 
 
 
 
 
 
116
  },
117
  "layer_norm_eps": 1e-12,
118
  "max_position_embeddings": 512,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cd70c2c09791921a9c6aafce4e51054f1c594aa9c95ca3525ae30deff02572ff
3
- size 532484732
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7da698a0b0b73061c3e4dd055ca3bb894d4980e9cc6a1ff3f9e35306b2574b30
3
+ size 532441668
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ce0e2e29626191b7ed49d832eef0b5a154ef4318f7effeb33b1ab2f2cfea3c08
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66c5c84e18537950235026bfa044f04fbfe7bd929860649bc9644691cddba0dc
3
  size 5112