Update README.md
Browse files
README.md
CHANGED
@@ -22,52 +22,52 @@ model-index:
|
|
22 |
metrics:
|
23 |
- name: Accuracy
|
24 |
type: Accuracy
|
25 |
-
value: 98.
|
26 |
- name: F1-score
|
27 |
type: F1-score
|
28 |
-
value: 93.
|
29 |
- name: Precision
|
30 |
type: Precision
|
31 |
-
value: 92.
|
32 |
- name: Recall
|
33 |
type: Recall
|
34 |
-
value: 93.
|
35 |
- name: LOC Precision
|
36 |
type: LOC Precision
|
37 |
-
value: 94.
|
38 |
- name: LOC Recall
|
39 |
type: LOC Recall
|
40 |
-
value: 95.
|
41 |
- name: LOC F1-score
|
42 |
type: LOC F1-score
|
43 |
-
value: 94.
|
44 |
- name: MISC Precision
|
45 |
type: MISC Precision
|
46 |
-
value: 85.
|
47 |
- name: MISC Recall
|
48 |
type: MISC Recall
|
49 |
-
value: 85.
|
50 |
- name: MISC F1-score
|
51 |
type: MISC F1-score
|
52 |
-
value: 85.
|
53 |
- name: ORG Precision
|
54 |
type: ORG Precision
|
55 |
-
value: 91.
|
56 |
- name: ORG Recall
|
57 |
type: ORG Recall
|
58 |
-
value: 91.
|
59 |
- name: ORG F1-score
|
60 |
type: ORG F1-score
|
61 |
-
value: 91.
|
62 |
- name: PER Precision
|
63 |
type: PER Precision
|
64 |
-
value:
|
65 |
- name: PER Recall
|
66 |
type: PER Recall
|
67 |
-
value: 96.
|
68 |
- name: PER F1-score
|
69 |
type: PER F1-score
|
70 |
-
value: 95.
|
71 |
---
|
72 |
## XLM-Roberta-base NER model for slavic languages
|
73 |
|
|
|
22 |
metrics:
|
23 |
- name: Accuracy
|
24 |
type: Accuracy
|
25 |
+
value: 98.346
|
26 |
- name: F1-score
|
27 |
type: F1-score
|
28 |
+
value: 93.158
|
29 |
- name: Precision
|
30 |
type: Precision
|
31 |
+
value: 92.700
|
32 |
- name: Recall
|
33 |
type: Recall
|
34 |
+
value: 93.622
|
35 |
- name: LOC Precision
|
36 |
type: LOC Precision
|
37 |
+
value: 94.105
|
38 |
- name: LOC Recall
|
39 |
type: LOC Recall
|
40 |
+
value: 95.513
|
41 |
- name: LOC F1-score
|
42 |
type: LOC F1-score
|
43 |
+
value: 94.804
|
44 |
- name: MISC Precision
|
45 |
type: MISC Precision
|
46 |
+
value: 85.196
|
47 |
- name: MISC Recall
|
48 |
type: MISC Recall
|
49 |
+
value: 85.545
|
50 |
- name: MISC F1-score
|
51 |
type: MISC F1-score
|
52 |
+
value: 85.370
|
53 |
- name: ORG Precision
|
54 |
type: ORG Precision
|
55 |
+
value: 91.226
|
56 |
- name: ORG Recall
|
57 |
type: ORG Recall
|
58 |
+
value: 91.519
|
59 |
- name: ORG F1-score
|
60 |
type: ORG F1-score
|
61 |
+
value: 91.372
|
62 |
- name: PER Precision
|
63 |
type: PER Precision
|
64 |
+
value: 94.995
|
65 |
- name: PER Recall
|
66 |
type: PER Recall
|
67 |
+
value: 96.191
|
68 |
- name: PER F1-score
|
69 |
type: PER F1-score
|
70 |
+
value: 95.589
|
71 |
---
|
72 |
## XLM-Roberta-base NER model for slavic languages
|
73 |
|