Initial commit
Browse files- .gitattributes +1 -0
- README.md +724 -0
- benchmark_results.txt +74 -0
- benchmark_translations.zip +3 -0
- config.json +45 -0
- pytorch_model.bin +3 -0
- source.spm +3 -0
- special_tokens_map.json +1 -0
- target.spm +3 -0
- tokenizer_config.json +1 -0
- vocab.json +0 -0
.gitattributes
CHANGED
@@ -29,3 +29,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
29 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
30 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
31 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
29 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
30 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
31 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.spm filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,724 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
language:
|
3 |
+
- bg
|
4 |
+
- bs_Latn
|
5 |
+
- es
|
6 |
+
- fr
|
7 |
+
- hr
|
8 |
+
- it
|
9 |
+
- itc
|
10 |
+
- mk
|
11 |
+
- pt
|
12 |
+
- sh
|
13 |
+
- sl
|
14 |
+
- sr_Cyrl
|
15 |
+
- sr_Latn
|
16 |
+
- zls
|
17 |
+
|
18 |
+
tags:
|
19 |
+
- translation
|
20 |
+
- opus-mt-tc
|
21 |
+
|
22 |
+
license: cc-by-4.0
|
23 |
+
model-index:
|
24 |
+
- name: opus-mt-tc-big-zls-itc
|
25 |
+
results:
|
26 |
+
- task:
|
27 |
+
name: Translation bul-fra
|
28 |
+
type: translation
|
29 |
+
args: bul-fra
|
30 |
+
dataset:
|
31 |
+
name: flores101-devtest
|
32 |
+
type: flores_101
|
33 |
+
args: bul fra devtest
|
34 |
+
metrics:
|
35 |
+
- name: BLEU
|
36 |
+
type: bleu
|
37 |
+
value: 34.4
|
38 |
+
- name: chr-F
|
39 |
+
type: chrf
|
40 |
+
value: 0.60640
|
41 |
+
- task:
|
42 |
+
name: Translation bul-ita
|
43 |
+
type: translation
|
44 |
+
args: bul-ita
|
45 |
+
dataset:
|
46 |
+
name: flores101-devtest
|
47 |
+
type: flores_101
|
48 |
+
args: bul ita devtest
|
49 |
+
metrics:
|
50 |
+
- name: BLEU
|
51 |
+
type: bleu
|
52 |
+
value: 24.0
|
53 |
+
- name: chr-F
|
54 |
+
type: chrf
|
55 |
+
value: 0.54135
|
56 |
+
- task:
|
57 |
+
name: Translation bul-por
|
58 |
+
type: translation
|
59 |
+
args: bul-por
|
60 |
+
dataset:
|
61 |
+
name: flores101-devtest
|
62 |
+
type: flores_101
|
63 |
+
args: bul por devtest
|
64 |
+
metrics:
|
65 |
+
- name: BLEU
|
66 |
+
type: bleu
|
67 |
+
value: 32.4
|
68 |
+
- name: chr-F
|
69 |
+
type: chrf
|
70 |
+
value: 0.59322
|
71 |
+
- task:
|
72 |
+
name: Translation bul-ron
|
73 |
+
type: translation
|
74 |
+
args: bul-ron
|
75 |
+
dataset:
|
76 |
+
name: flores101-devtest
|
77 |
+
type: flores_101
|
78 |
+
args: bul ron devtest
|
79 |
+
metrics:
|
80 |
+
- name: BLEU
|
81 |
+
type: bleu
|
82 |
+
value: 27.1
|
83 |
+
- name: chr-F
|
84 |
+
type: chrf
|
85 |
+
value: 0.55558
|
86 |
+
- task:
|
87 |
+
name: Translation bul-spa
|
88 |
+
type: translation
|
89 |
+
args: bul-spa
|
90 |
+
dataset:
|
91 |
+
name: flores101-devtest
|
92 |
+
type: flores_101
|
93 |
+
args: bul spa devtest
|
94 |
+
metrics:
|
95 |
+
- name: BLEU
|
96 |
+
type: bleu
|
97 |
+
value: 22.4
|
98 |
+
- name: chr-F
|
99 |
+
type: chrf
|
100 |
+
value: 0.50962
|
101 |
+
- task:
|
102 |
+
name: Translation hrv-fra
|
103 |
+
type: translation
|
104 |
+
args: hrv-fra
|
105 |
+
dataset:
|
106 |
+
name: flores101-devtest
|
107 |
+
type: flores_101
|
108 |
+
args: hrv fra devtest
|
109 |
+
metrics:
|
110 |
+
- name: BLEU
|
111 |
+
type: bleu
|
112 |
+
value: 33.1
|
113 |
+
- name: chr-F
|
114 |
+
type: chrf
|
115 |
+
value: 0.59349
|
116 |
+
- task:
|
117 |
+
name: Translation hrv-ita
|
118 |
+
type: translation
|
119 |
+
args: hrv-ita
|
120 |
+
dataset:
|
121 |
+
name: flores101-devtest
|
122 |
+
type: flores_101
|
123 |
+
args: hrv ita devtest
|
124 |
+
metrics:
|
125 |
+
- name: BLEU
|
126 |
+
type: bleu
|
127 |
+
value: 23.5
|
128 |
+
- name: chr-F
|
129 |
+
type: chrf
|
130 |
+
value: 0.52980
|
131 |
+
- task:
|
132 |
+
name: Translation hrv-por
|
133 |
+
type: translation
|
134 |
+
args: hrv-por
|
135 |
+
dataset:
|
136 |
+
name: flores101-devtest
|
137 |
+
type: flores_101
|
138 |
+
args: hrv por devtest
|
139 |
+
metrics:
|
140 |
+
- name: BLEU
|
141 |
+
type: bleu
|
142 |
+
value: 30.2
|
143 |
+
- name: chr-F
|
144 |
+
type: chrf
|
145 |
+
value: 0.57402
|
146 |
+
- task:
|
147 |
+
name: Translation hrv-ron
|
148 |
+
type: translation
|
149 |
+
args: hrv-ron
|
150 |
+
dataset:
|
151 |
+
name: flores101-devtest
|
152 |
+
type: flores_101
|
153 |
+
args: hrv ron devtest
|
154 |
+
metrics:
|
155 |
+
- name: BLEU
|
156 |
+
type: bleu
|
157 |
+
value: 25.9
|
158 |
+
- name: chr-F
|
159 |
+
type: chrf
|
160 |
+
value: 0.53650
|
161 |
+
- task:
|
162 |
+
name: Translation hrv-spa
|
163 |
+
type: translation
|
164 |
+
args: hrv-spa
|
165 |
+
dataset:
|
166 |
+
name: flores101-devtest
|
167 |
+
type: flores_101
|
168 |
+
args: hrv spa devtest
|
169 |
+
metrics:
|
170 |
+
- name: BLEU
|
171 |
+
type: bleu
|
172 |
+
value: 21.5
|
173 |
+
- name: chr-F
|
174 |
+
type: chrf
|
175 |
+
value: 0.50161
|
176 |
+
- task:
|
177 |
+
name: Translation mkd-fra
|
178 |
+
type: translation
|
179 |
+
args: mkd-fra
|
180 |
+
dataset:
|
181 |
+
name: flores101-devtest
|
182 |
+
type: flores_101
|
183 |
+
args: mkd fra devtest
|
184 |
+
metrics:
|
185 |
+
- name: BLEU
|
186 |
+
type: bleu
|
187 |
+
value: 35.2
|
188 |
+
- name: chr-F
|
189 |
+
type: chrf
|
190 |
+
value: 0.60801
|
191 |
+
- task:
|
192 |
+
name: Translation mkd-ita
|
193 |
+
type: translation
|
194 |
+
args: mkd-ita
|
195 |
+
dataset:
|
196 |
+
name: flores101-devtest
|
197 |
+
type: flores_101
|
198 |
+
args: mkd ita devtest
|
199 |
+
metrics:
|
200 |
+
- name: BLEU
|
201 |
+
type: bleu
|
202 |
+
value: 23.9
|
203 |
+
- name: chr-F
|
204 |
+
type: chrf
|
205 |
+
value: 0.53543
|
206 |
+
- task:
|
207 |
+
name: Translation mkd-por
|
208 |
+
type: translation
|
209 |
+
args: mkd-por
|
210 |
+
dataset:
|
211 |
+
name: flores101-devtest
|
212 |
+
type: flores_101
|
213 |
+
args: mkd por devtest
|
214 |
+
metrics:
|
215 |
+
- name: BLEU
|
216 |
+
type: bleu
|
217 |
+
value: 33.9
|
218 |
+
- name: chr-F
|
219 |
+
type: chrf
|
220 |
+
value: 0.59648
|
221 |
+
- task:
|
222 |
+
name: Translation mkd-ron
|
223 |
+
type: translation
|
224 |
+
args: mkd-ron
|
225 |
+
dataset:
|
226 |
+
name: flores101-devtest
|
227 |
+
type: flores_101
|
228 |
+
args: mkd ron devtest
|
229 |
+
metrics:
|
230 |
+
- name: BLEU
|
231 |
+
type: bleu
|
232 |
+
value: 28.0
|
233 |
+
- name: chr-F
|
234 |
+
type: chrf
|
235 |
+
value: 0.54998
|
236 |
+
- task:
|
237 |
+
name: Translation mkd-spa
|
238 |
+
type: translation
|
239 |
+
args: mkd-spa
|
240 |
+
dataset:
|
241 |
+
name: flores101-devtest
|
242 |
+
type: flores_101
|
243 |
+
args: mkd spa devtest
|
244 |
+
metrics:
|
245 |
+
- name: BLEU
|
246 |
+
type: bleu
|
247 |
+
value: 22.8
|
248 |
+
- name: chr-F
|
249 |
+
type: chrf
|
250 |
+
value: 0.51079
|
251 |
+
- task:
|
252 |
+
name: Translation slv-fra
|
253 |
+
type: translation
|
254 |
+
args: slv-fra
|
255 |
+
dataset:
|
256 |
+
name: flores101-devtest
|
257 |
+
type: flores_101
|
258 |
+
args: slv fra devtest
|
259 |
+
metrics:
|
260 |
+
- name: BLEU
|
261 |
+
type: bleu
|
262 |
+
value: 31.5
|
263 |
+
- name: chr-F
|
264 |
+
type: chrf
|
265 |
+
value: 0.58233
|
266 |
+
- task:
|
267 |
+
name: Translation slv-ita
|
268 |
+
type: translation
|
269 |
+
args: slv-ita
|
270 |
+
dataset:
|
271 |
+
name: flores101-devtest
|
272 |
+
type: flores_101
|
273 |
+
args: slv ita devtest
|
274 |
+
metrics:
|
275 |
+
- name: BLEU
|
276 |
+
type: bleu
|
277 |
+
value: 22.4
|
278 |
+
- name: chr-F
|
279 |
+
type: chrf
|
280 |
+
value: 0.52390
|
281 |
+
- task:
|
282 |
+
name: Translation slv-por
|
283 |
+
type: translation
|
284 |
+
args: slv-por
|
285 |
+
dataset:
|
286 |
+
name: flores101-devtest
|
287 |
+
type: flores_101
|
288 |
+
args: slv por devtest
|
289 |
+
metrics:
|
290 |
+
- name: BLEU
|
291 |
+
type: bleu
|
292 |
+
value: 29.0
|
293 |
+
- name: chr-F
|
294 |
+
type: chrf
|
295 |
+
value: 0.56436
|
296 |
+
- task:
|
297 |
+
name: Translation slv-ron
|
298 |
+
type: translation
|
299 |
+
args: slv-ron
|
300 |
+
dataset:
|
301 |
+
name: flores101-devtest
|
302 |
+
type: flores_101
|
303 |
+
args: slv ron devtest
|
304 |
+
metrics:
|
305 |
+
- name: BLEU
|
306 |
+
type: bleu
|
307 |
+
value: 25.0
|
308 |
+
- name: chr-F
|
309 |
+
type: chrf
|
310 |
+
value: 0.53116
|
311 |
+
- task:
|
312 |
+
name: Translation slv-spa
|
313 |
+
type: translation
|
314 |
+
args: slv-spa
|
315 |
+
dataset:
|
316 |
+
name: flores101-devtest
|
317 |
+
type: flores_101
|
318 |
+
args: slv spa devtest
|
319 |
+
metrics:
|
320 |
+
- name: BLEU
|
321 |
+
type: bleu
|
322 |
+
value: 21.1
|
323 |
+
- name: chr-F
|
324 |
+
type: chrf
|
325 |
+
value: 0.49621
|
326 |
+
- task:
|
327 |
+
name: Translation srp_Cyrl-fra
|
328 |
+
type: translation
|
329 |
+
args: srp_Cyrl-fra
|
330 |
+
dataset:
|
331 |
+
name: flores101-devtest
|
332 |
+
type: flores_101
|
333 |
+
args: srp_Cyrl fra devtest
|
334 |
+
metrics:
|
335 |
+
- name: BLEU
|
336 |
+
type: bleu
|
337 |
+
value: 36.0
|
338 |
+
- name: chr-F
|
339 |
+
type: chrf
|
340 |
+
value: 0.62110
|
341 |
+
- task:
|
342 |
+
name: Translation srp_Cyrl-ita
|
343 |
+
type: translation
|
344 |
+
args: srp_Cyrl-ita
|
345 |
+
dataset:
|
346 |
+
name: flores101-devtest
|
347 |
+
type: flores_101
|
348 |
+
args: srp_Cyrl ita devtest
|
349 |
+
metrics:
|
350 |
+
- name: BLEU
|
351 |
+
type: bleu
|
352 |
+
value: 23.9
|
353 |
+
- name: chr-F
|
354 |
+
type: chrf
|
355 |
+
value: 0.54083
|
356 |
+
- task:
|
357 |
+
name: Translation srp_Cyrl-por
|
358 |
+
type: translation
|
359 |
+
args: srp_Cyrl-por
|
360 |
+
dataset:
|
361 |
+
name: flores101-devtest
|
362 |
+
type: flores_101
|
363 |
+
args: srp_Cyrl por devtest
|
364 |
+
metrics:
|
365 |
+
- name: BLEU
|
366 |
+
type: bleu
|
367 |
+
value: 34.9
|
368 |
+
- name: chr-F
|
369 |
+
type: chrf
|
370 |
+
value: 0.61248
|
371 |
+
- task:
|
372 |
+
name: Translation srp_Cyrl-ron
|
373 |
+
type: translation
|
374 |
+
args: srp_Cyrl-ron
|
375 |
+
dataset:
|
376 |
+
name: flores101-devtest
|
377 |
+
type: flores_101
|
378 |
+
args: srp_Cyrl ron devtest
|
379 |
+
metrics:
|
380 |
+
- name: BLEU
|
381 |
+
type: bleu
|
382 |
+
value: 28.8
|
383 |
+
- name: chr-F
|
384 |
+
type: chrf
|
385 |
+
value: 0.56235
|
386 |
+
- task:
|
387 |
+
name: Translation srp_Cyrl-spa
|
388 |
+
type: translation
|
389 |
+
args: srp_Cyrl-spa
|
390 |
+
dataset:
|
391 |
+
name: flores101-devtest
|
392 |
+
type: flores_101
|
393 |
+
args: srp_Cyrl spa devtest
|
394 |
+
metrics:
|
395 |
+
- name: BLEU
|
396 |
+
type: bleu
|
397 |
+
value: 22.8
|
398 |
+
- name: chr-F
|
399 |
+
type: chrf
|
400 |
+
value: 0.51698
|
401 |
+
- task:
|
402 |
+
name: Translation bul-fra
|
403 |
+
type: translation
|
404 |
+
args: bul-fra
|
405 |
+
dataset:
|
406 |
+
name: tatoeba-test-v2021-08-07
|
407 |
+
type: tatoeba_mt
|
408 |
+
args: bul-fra
|
409 |
+
metrics:
|
410 |
+
- name: BLEU
|
411 |
+
type: bleu
|
412 |
+
value: 52.9
|
413 |
+
- name: chr-F
|
414 |
+
type: chrf
|
415 |
+
value: 0.68971
|
416 |
+
- task:
|
417 |
+
name: Translation bul-ita
|
418 |
+
type: translation
|
419 |
+
args: bul-ita
|
420 |
+
dataset:
|
421 |
+
name: tatoeba-test-v2021-08-07
|
422 |
+
type: tatoeba_mt
|
423 |
+
args: bul-ita
|
424 |
+
metrics:
|
425 |
+
- name: BLEU
|
426 |
+
type: bleu
|
427 |
+
value: 45.1
|
428 |
+
- name: chr-F
|
429 |
+
type: chrf
|
430 |
+
value: 0.66412
|
431 |
+
- task:
|
432 |
+
name: Translation bul-spa
|
433 |
+
type: translation
|
434 |
+
args: bul-spa
|
435 |
+
dataset:
|
436 |
+
name: tatoeba-test-v2021-08-07
|
437 |
+
type: tatoeba_mt
|
438 |
+
args: bul-spa
|
439 |
+
metrics:
|
440 |
+
- name: BLEU
|
441 |
+
type: bleu
|
442 |
+
value: 49.7
|
443 |
+
- name: chr-F
|
444 |
+
type: chrf
|
445 |
+
value: 0.66672
|
446 |
+
- task:
|
447 |
+
name: Translation hbs-fra
|
448 |
+
type: translation
|
449 |
+
args: hbs-fra
|
450 |
+
dataset:
|
451 |
+
name: tatoeba-test-v2021-08-07
|
452 |
+
type: tatoeba_mt
|
453 |
+
args: hbs-fra
|
454 |
+
metrics:
|
455 |
+
- name: BLEU
|
456 |
+
type: bleu
|
457 |
+
value: 48.1
|
458 |
+
- name: chr-F
|
459 |
+
type: chrf
|
460 |
+
value: 0.66434
|
461 |
+
- task:
|
462 |
+
name: Translation hbs-ita
|
463 |
+
type: translation
|
464 |
+
args: hbs-ita
|
465 |
+
dataset:
|
466 |
+
name: tatoeba-test-v2021-08-07
|
467 |
+
type: tatoeba_mt
|
468 |
+
args: hbs-ita
|
469 |
+
metrics:
|
470 |
+
- name: BLEU
|
471 |
+
type: bleu
|
472 |
+
value: 53.5
|
473 |
+
- name: chr-F
|
474 |
+
type: chrf
|
475 |
+
value: 0.72381
|
476 |
+
- task:
|
477 |
+
name: Translation hbs-spa
|
478 |
+
type: translation
|
479 |
+
args: hbs-spa
|
480 |
+
dataset:
|
481 |
+
name: tatoeba-test-v2021-08-07
|
482 |
+
type: tatoeba_mt
|
483 |
+
args: hbs-spa
|
484 |
+
metrics:
|
485 |
+
- name: BLEU
|
486 |
+
type: bleu
|
487 |
+
value: 58.0
|
488 |
+
- name: chr-F
|
489 |
+
type: chrf
|
490 |
+
value: 0.73105
|
491 |
+
- task:
|
492 |
+
name: Translation hrv-fra
|
493 |
+
type: translation
|
494 |
+
args: hrv-fra
|
495 |
+
dataset:
|
496 |
+
name: tatoeba-test-v2021-08-07
|
497 |
+
type: tatoeba_mt
|
498 |
+
args: hrv-fra
|
499 |
+
metrics:
|
500 |
+
- name: BLEU
|
501 |
+
type: bleu
|
502 |
+
value: 44.3
|
503 |
+
- name: chr-F
|
504 |
+
type: chrf
|
505 |
+
value: 0.62800
|
506 |
+
- task:
|
507 |
+
name: Translation hrv-spa
|
508 |
+
type: translation
|
509 |
+
args: hrv-spa
|
510 |
+
dataset:
|
511 |
+
name: tatoeba-test-v2021-08-07
|
512 |
+
type: tatoeba_mt
|
513 |
+
args: hrv-spa
|
514 |
+
metrics:
|
515 |
+
- name: BLEU
|
516 |
+
type: bleu
|
517 |
+
value: 57.5
|
518 |
+
- name: chr-F
|
519 |
+
type: chrf
|
520 |
+
value: 0.71370
|
521 |
+
- task:
|
522 |
+
name: Translation mkd-spa
|
523 |
+
type: translation
|
524 |
+
args: mkd-spa
|
525 |
+
dataset:
|
526 |
+
name: tatoeba-test-v2021-08-07
|
527 |
+
type: tatoeba_mt
|
528 |
+
args: mkd-spa
|
529 |
+
metrics:
|
530 |
+
- name: BLEU
|
531 |
+
type: bleu
|
532 |
+
value: 62.1
|
533 |
+
- name: chr-F
|
534 |
+
type: chrf
|
535 |
+
value: 0.75366
|
536 |
+
- task:
|
537 |
+
name: Translation srp_Latn-ita
|
538 |
+
type: translation
|
539 |
+
args: srp_Latn-ita
|
540 |
+
dataset:
|
541 |
+
name: tatoeba-test-v2021-08-07
|
542 |
+
type: tatoeba_mt
|
543 |
+
args: srp_Latn-ita
|
544 |
+
metrics:
|
545 |
+
- name: BLEU
|
546 |
+
type: bleu
|
547 |
+
value: 59.6
|
548 |
+
- name: chr-F
|
549 |
+
type: chrf
|
550 |
+
value: 0.76045
|
551 |
+
---
|
552 |
+
# opus-mt-tc-big-zls-itc
|
553 |
+
|
554 |
+
## Table of Contents
|
555 |
+
- [Model Details](#model-details)
|
556 |
+
- [Uses](#uses)
|
557 |
+
- [Risks, Limitations and Biases](#risks-limitations-and-biases)
|
558 |
+
- [How to Get Started With the Model](#how-to-get-started-with-the-model)
|
559 |
+
- [Training](#training)
|
560 |
+
- [Evaluation](#evaluation)
|
561 |
+
- [Citation Information](#citation-information)
|
562 |
+
- [Acknowledgements](#acknowledgements)
|
563 |
+
|
564 |
+
## Model Details
|
565 |
+
|
566 |
+
Neural machine translation model for translating from South Slavic languages (zls) to Italic languages (itc).
|
567 |
+
|
568 |
+
This model is part of the [OPUS-MT project](https://github.com/Helsinki-NLP/Opus-MT), an effort to make neural machine translation models widely available and accessible for many languages in the world. All models are originally trained using the amazing framework of [Marian NMT](https://marian-nmt.github.io/), an efficient NMT implementation written in pure C++. The models have been converted to pyTorch using the transformers library by huggingface. Training data is taken from [OPUS](https://opus.nlpl.eu/) and training pipelines use the procedures of [OPUS-MT-train](https://github.com/Helsinki-NLP/Opus-MT-train).
|
569 |
+
**Model Description:**
|
570 |
+
- **Developed by:** Language Technology Research Group at the University of Helsinki
|
571 |
+
- **Model Type:** Translation (transformer-big)
|
572 |
+
- **Release**: 2022-08-10
|
573 |
+
- **License:** CC-BY-4.0
|
574 |
+
- **Language(s):**
|
575 |
+
- Source Language(s): bos_Latn bul hbs hrv mkd slv srp_Cyrl srp_Latn
|
576 |
+
- Target Language(s): fra ita por spa
|
577 |
+
- Valid Target Language Labels: >>fra<< >>ita<< >>por<< >>spa<<
|
578 |
+
- **Original Model**: [opusTCv20210807_transformer-big_2022-08-10.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/zls-itc/opusTCv20210807_transformer-big_2022-08-10.zip)
|
579 |
+
- **Resources for more information:**
|
580 |
+
- [OPUS-MT-train GitHub Repo](https://github.com/Helsinki-NLP/OPUS-MT-train)
|
581 |
+
- More information about released models for this language pair: [OPUS-MT zls-itc README](https://github.com/Helsinki-NLP/Tatoeba-Challenge/tree/master/models/zls-itc/README.md)
|
582 |
+
- [More information about MarianNMT models in the transformers library](https://huggingface.co/docs/transformers/model_doc/marian)
|
583 |
+
- [Tatoeba Translation Challenge](https://github.com/Helsinki-NLP/Tatoeba-Challenge/
|
584 |
+
|
585 |
+
This is a multilingual translation model with multiple target languages. A sentence initial language token is required in the form of `>>id<<` (id = valid target language ID), e.g. `>>fra<<`
|
586 |
+
|
587 |
+
## Uses
|
588 |
+
|
589 |
+
This model can be used for translation and text-to-text generation.
|
590 |
+
|
591 |
+
## Risks, Limitations and Biases
|
592 |
+
|
593 |
+
**CONTENT WARNING: Readers should be aware that the model is trained on various public data sets that may contain content that is disturbing, offensive, and can propagate historical and current stereotypes.**
|
594 |
+
|
595 |
+
Significant research has explored bias and fairness issues with language models (see, e.g., [Sheng et al. (2021)](https://aclanthology.org/2021.acl-long.330.pdf) and [Bender et al. (2021)](https://dl.acm.org/doi/pdf/10.1145/3442188.3445922)).
|
596 |
+
|
597 |
+
## How to Get Started With the Model
|
598 |
+
|
599 |
+
A short example code:
|
600 |
+
|
601 |
+
```python
|
602 |
+
from transformers import MarianMTModel, MarianTokenizer
|
603 |
+
|
604 |
+
src_text = [
|
605 |
+
">>fra<< Dobar dan, kako si?",
|
606 |
+
">>spa<< Znam da je ovo čudno."
|
607 |
+
]
|
608 |
+
|
609 |
+
model_name = "pytorch-models/opus-mt-tc-big-zls-itc"
|
610 |
+
tokenizer = MarianTokenizer.from_pretrained(model_name)
|
611 |
+
model = MarianMTModel.from_pretrained(model_name)
|
612 |
+
translated = model.generate(**tokenizer(src_text, return_tensors="pt", padding=True))
|
613 |
+
|
614 |
+
for t in translated:
|
615 |
+
print( tokenizer.decode(t, skip_special_tokens=True) )
|
616 |
+
|
617 |
+
# expected output:
|
618 |
+
# Bonjour, comment allez-vous ?
|
619 |
+
# Sé que esto es raro.
|
620 |
+
```
|
621 |
+
|
622 |
+
You can also use OPUS-MT models with the transformers pipelines, for example:
|
623 |
+
|
624 |
+
```python
|
625 |
+
from transformers import pipeline
|
626 |
+
pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-tc-big-zls-itc")
|
627 |
+
print(pipe(">>fra<< Dobar dan, kako si?"))
|
628 |
+
|
629 |
+
# expected output: Bonjour, comment allez-vous ?
|
630 |
+
```
|
631 |
+
|
632 |
+
## Training
|
633 |
+
|
634 |
+
- **Data**: opusTCv20210807 ([source](https://github.com/Helsinki-NLP/Tatoeba-Challenge))
|
635 |
+
- **Pre-processing**: SentencePiece (spm32k,spm32k)
|
636 |
+
- **Model Type:** transformer-big
|
637 |
+
- **Original MarianNMT Model**: [opusTCv20210807_transformer-big_2022-08-10.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/zls-itc/opusTCv20210807_transformer-big_2022-08-10.zip)
|
638 |
+
- **Training Scripts**: [GitHub Repo](https://github.com/Helsinki-NLP/OPUS-MT-train)
|
639 |
+
|
640 |
+
## Evaluation
|
641 |
+
|
642 |
+
* test set translations: [opusTCv20210807_transformer-big_2022-08-10.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/zls-itc/opusTCv20210807_transformer-big_2022-08-10.test.txt)
|
643 |
+
* test set scores: [opusTCv20210807_transformer-big_2022-08-10.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/zls-itc/opusTCv20210807_transformer-big_2022-08-10.eval.txt)
|
644 |
+
* benchmark results: [benchmark_results.txt](benchmark_results.txt)
|
645 |
+
* benchmark output: [benchmark_translations.zip](benchmark_translations.zip)
|
646 |
+
|
647 |
+
| langpair | testset | chr-F | BLEU | #sent | #words |
|
648 |
+
|----------|---------|-------|-------|-------|--------|
|
649 |
+
| bul-fra | tatoeba-test-v2021-08-07 | 0.68971 | 52.9 | 446 | 3669 |
|
650 |
+
| bul-ita | tatoeba-test-v2021-08-07 | 0.66412 | 45.1 | 2500 | 16951 |
|
651 |
+
| bul-spa | tatoeba-test-v2021-08-07 | 0.66672 | 49.7 | 286 | 1783 |
|
652 |
+
| hbs-fra | tatoeba-test-v2021-08-07 | 0.66434 | 48.1 | 474 | 3370 |
|
653 |
+
| hbs-ita | tatoeba-test-v2021-08-07 | 0.72381 | 53.5 | 534 | 3208 |
|
654 |
+
| hbs-spa | tatoeba-test-v2021-08-07 | 0.73105 | 58.0 | 607 | 3766 |
|
655 |
+
| hrv-fra | tatoeba-test-v2021-08-07 | 0.62800 | 44.3 | 258 | 1943 |
|
656 |
+
| hrv-spa | tatoeba-test-v2021-08-07 | 0.71370 | 57.5 | 254 | 1702 |
|
657 |
+
| mkd-spa | tatoeba-test-v2021-08-07 | 0.75366 | 62.1 | 217 | 1121 |
|
658 |
+
| srp_Latn-ita | tatoeba-test-v2021-08-07 | 0.76045 | 59.6 | 212 | 1292 |
|
659 |
+
| bul-fra | flores101-devtest | 0.60640 | 34.4 | 1012 | 28343 |
|
660 |
+
| bul-ita | flores101-devtest | 0.54135 | 24.0 | 1012 | 27306 |
|
661 |
+
| bul-por | flores101-devtest | 0.59322 | 32.4 | 1012 | 26519 |
|
662 |
+
| bul-ron | flores101-devtest | 0.55558 | 27.1 | 1012 | 26799 |
|
663 |
+
| bul-spa | flores101-devtest | 0.50962 | 22.4 | 1012 | 29199 |
|
664 |
+
| hrv-fra | flores101-devtest | 0.59349 | 33.1 | 1012 | 28343 |
|
665 |
+
| hrv-ita | flores101-devtest | 0.52980 | 23.5 | 1012 | 27306 |
|
666 |
+
| hrv-por | flores101-devtest | 0.57402 | 30.2 | 1012 | 26519 |
|
667 |
+
| hrv-ron | flores101-devtest | 0.53650 | 25.9 | 1012 | 26799 |
|
668 |
+
| hrv-spa | flores101-devtest | 0.50161 | 21.5 | 1012 | 29199 |
|
669 |
+
| mkd-fra | flores101-devtest | 0.60801 | 35.2 | 1012 | 28343 |
|
670 |
+
| mkd-ita | flores101-devtest | 0.53543 | 23.9 | 1012 | 27306 |
|
671 |
+
| mkd-por | flores101-devtest | 0.59648 | 33.9 | 1012 | 26519 |
|
672 |
+
| mkd-ron | flores101-devtest | 0.54998 | 28.0 | 1012 | 26799 |
|
673 |
+
| mkd-spa | flores101-devtest | 0.51079 | 22.8 | 1012 | 29199 |
|
674 |
+
| slv-fra | flores101-devtest | 0.58233 | 31.5 | 1012 | 28343 |
|
675 |
+
| slv-ita | flores101-devtest | 0.52390 | 22.4 | 1012 | 27306 |
|
676 |
+
| slv-por | flores101-devtest | 0.56436 | 29.0 | 1012 | 26519 |
|
677 |
+
| slv-ron | flores101-devtest | 0.53116 | 25.0 | 1012 | 26799 |
|
678 |
+
| slv-spa | flores101-devtest | 0.49621 | 21.1 | 1012 | 29199 |
|
679 |
+
| srp_Cyrl-fra | flores101-devtest | 0.62110 | 36.0 | 1012 | 28343 |
|
680 |
+
| srp_Cyrl-ita | flores101-devtest | 0.54083 | 23.9 | 1012 | 27306 |
|
681 |
+
| srp_Cyrl-por | flores101-devtest | 0.61248 | 34.9 | 1012 | 26519 |
|
682 |
+
| srp_Cyrl-ron | flores101-devtest | 0.56235 | 28.8 | 1012 | 26799 |
|
683 |
+
| srp_Cyrl-spa | flores101-devtest | 0.51698 | 22.8 | 1012 | 29199 |
|
684 |
+
|
685 |
+
## Citation Information
|
686 |
+
|
687 |
+
* Publications: [OPUS-MT – Building open translation services for the World](https://aclanthology.org/2020.eamt-1.61/) and [The Tatoeba Translation Challenge – Realistic Data Sets for Low Resource and Multilingual MT](https://aclanthology.org/2020.wmt-1.139/) (Please, cite if you use this model.)
|
688 |
+
|
689 |
+
```
|
690 |
+
@inproceedings{tiedemann-thottingal-2020-opus,
|
691 |
+
title = "{OPUS}-{MT} {--} Building open translation services for the World",
|
692 |
+
author = {Tiedemann, J{\"o}rg and Thottingal, Santhosh},
|
693 |
+
booktitle = "Proceedings of the 22nd Annual Conference of the European Association for Machine Translation",
|
694 |
+
month = nov,
|
695 |
+
year = "2020",
|
696 |
+
address = "Lisboa, Portugal",
|
697 |
+
publisher = "European Association for Machine Translation",
|
698 |
+
url = "https://aclanthology.org/2020.eamt-1.61",
|
699 |
+
pages = "479--480",
|
700 |
+
}
|
701 |
+
|
702 |
+
@inproceedings{tiedemann-2020-tatoeba,
|
703 |
+
title = "The Tatoeba Translation Challenge {--} Realistic Data Sets for Low Resource and Multilingual {MT}",
|
704 |
+
author = {Tiedemann, J{\"o}rg},
|
705 |
+
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
|
706 |
+
month = nov,
|
707 |
+
year = "2020",
|
708 |
+
address = "Online",
|
709 |
+
publisher = "Association for Computational Linguistics",
|
710 |
+
url = "https://aclanthology.org/2020.wmt-1.139",
|
711 |
+
pages = "1174--1182",
|
712 |
+
}
|
713 |
+
```
|
714 |
+
|
715 |
+
## Acknowledgements
|
716 |
+
|
717 |
+
The work is supported by the [European Language Grid](https://www.european-language-grid.eu/) as [pilot project 2866](https://live.european-language-grid.eu/catalogue/#/resource/projects/2866), by the [FoTran project](https://www.helsinki.fi/en/researchgroups/natural-language-understanding-with-cross-lingual-grounding), funded by the European Research Council (ERC) under the European Union’s Horizon 2020 research and innovation programme (grant agreement No 771113), and the [MeMAD project](https://memad.eu/), funded by the European Union’s Horizon 2020 Research and Innovation Programme under grant agreement No 780069. We are also grateful for the generous computational resources and IT infrastructure provided by [CSC -- IT Center for Science](https://www.csc.fi/), Finland.
|
718 |
+
|
719 |
+
## Model conversion info
|
720 |
+
|
721 |
+
* transformers version: 4.16.2
|
722 |
+
* OPUS-MT git hash: 8b9f0b0
|
723 |
+
* port time: Fri Aug 12 13:57:47 EEST 2022
|
724 |
+
* port machine: LM0-400-22516.local
|
benchmark_results.txt
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
bul-fra flores101-dev 0.60515 34.0 997 26706
|
2 |
+
bul-ita flores101-dev 0.53356 22.8 997 25840
|
3 |
+
bul-por flores101-dev 0.58413 31.3 997 25287
|
4 |
+
bul-ron flores101-dev 0.55870 27.9 997 25616
|
5 |
+
bul-spa flores101-dev 0.50507 22.2 997 27793
|
6 |
+
hrv-fra flores101-dev 0.59160 32.9 997 26706
|
7 |
+
hrv-ita flores101-dev 0.52624 22.6 997 25840
|
8 |
+
hrv-por flores101-dev 0.56567 29.7 997 25287
|
9 |
+
hrv-ron flores101-dev 0.53565 26.0 997 25616
|
10 |
+
hrv-spa flores101-dev 0.49356 21.2 997 27793
|
11 |
+
mkd-fra flores101-dev 0.60379 34.6 997 26706
|
12 |
+
mkd-ita flores101-dev 0.53334 23.5 997 25840
|
13 |
+
mkd-por flores101-dev 0.59103 32.9 997 25287
|
14 |
+
mkd-ron flores101-dev 0.55096 28.2 997 25616
|
15 |
+
mkd-spa flores101-dev 0.50218 22.3 997 27793
|
16 |
+
slv-fra flores101-dev 0.57857 31.1 997 26706
|
17 |
+
slv-ita flores101-dev 0.52418 22.2 997 25840
|
18 |
+
slv-por flores101-dev 0.55881 28.3 997 25287
|
19 |
+
slv-ron flores101-dev 0.53118 25.9 997 25616
|
20 |
+
slv-spa flores101-dev 0.49112 21.2 997 27793
|
21 |
+
srp_Cyrl-fra flores101-dev 0.62093 36.3 997 26706
|
22 |
+
srp_Cyrl-ita flores101-dev 0.53829 23.4 997 25840
|
23 |
+
srp_Cyrl-por flores101-dev 0.60678 34.7 997 25287
|
24 |
+
srp_Cyrl-ron flores101-dev 0.56395 29.0 997 25616
|
25 |
+
srp_Cyrl-spa flores101-dev 0.50659 22.1 997 27793
|
26 |
+
bul-fra flores101-devtest 0.60640 34.4 1012 28343
|
27 |
+
bul-ita flores101-devtest 0.54135 24.0 1012 27306
|
28 |
+
bul-por flores101-devtest 0.59322 32.4 1012 26519
|
29 |
+
bul-ron flores101-devtest 0.55558 27.1 1012 26799
|
30 |
+
bul-spa flores101-devtest 0.50962 22.4 1012 29199
|
31 |
+
hrv-fra flores101-devtest 0.59349 33.1 1012 28343
|
32 |
+
hrv-ita flores101-devtest 0.52980 23.5 1012 27306
|
33 |
+
hrv-por flores101-devtest 0.57402 30.2 1012 26519
|
34 |
+
hrv-ron flores101-devtest 0.53650 25.9 1012 26799
|
35 |
+
hrv-spa flores101-devtest 0.50161 21.5 1012 29199
|
36 |
+
mkd-fra flores101-devtest 0.60801 35.2 1012 28343
|
37 |
+
mkd-ita flores101-devtest 0.53543 23.9 1012 27306
|
38 |
+
mkd-por flores101-devtest 0.59648 33.9 1012 26519
|
39 |
+
mkd-ron flores101-devtest 0.54998 28.0 1012 26799
|
40 |
+
mkd-spa flores101-devtest 0.51079 22.8 1012 29199
|
41 |
+
slv-fra flores101-devtest 0.58233 31.5 1012 28343
|
42 |
+
slv-ita flores101-devtest 0.52390 22.4 1012 27306
|
43 |
+
slv-por flores101-devtest 0.56436 29.0 1012 26519
|
44 |
+
slv-ron flores101-devtest 0.53116 25.0 1012 26799
|
45 |
+
slv-spa flores101-devtest 0.49621 21.1 1012 29199
|
46 |
+
srp_Cyrl-fra flores101-devtest 0.62110 36.0 1012 28343
|
47 |
+
srp_Cyrl-ita flores101-devtest 0.54083 23.9 1012 27306
|
48 |
+
srp_Cyrl-por flores101-devtest 0.61248 34.9 1012 26519
|
49 |
+
srp_Cyrl-ron flores101-devtest 0.56235 28.8 1012 26799
|
50 |
+
srp_Cyrl-spa flores101-devtest 0.51698 22.8 1012 29199
|
51 |
+
hbs-spa tatoeba-test-v2020-07-28 0.73160 58.1 606 3762
|
52 |
+
hrv-spa tatoeba-test-v2020-07-28 0.71492 57.6 253 1698
|
53 |
+
slv-fra tatoeba-test-v2020-07-28 0.26482 10.3 442 3662
|
54 |
+
bul-fra tatoeba-test-v2021-03-30 0.68891 52.8 448 3689
|
55 |
+
bul-spa tatoeba-test-v2021-03-30 0.66812 49.8 288 1791
|
56 |
+
hbs-fra tatoeba-test-v2021-03-30 0.66430 48.0 479 3404
|
57 |
+
hbs-ita tatoeba-test-v2021-03-30 0.72403 53.4 537 3226
|
58 |
+
hbs-spa tatoeba-test-v2021-03-30 0.73080 58.1 615 3812
|
59 |
+
hrv-fra tatoeba-test-v2021-03-30 0.62774 44.1 261 1969
|
60 |
+
hrv-spa tatoeba-test-v2021-03-30 0.71268 57.4 261 1739
|
61 |
+
mkd-spa tatoeba-test-v2021-03-30 0.75405 62.1 223 1149
|
62 |
+
slv-fra tatoeba-test-v2021-03-30 0.26515 10.2 444 3673
|
63 |
+
srp_Latn-ita tatoeba-test-v2021-03-30 0.76034 59.5 213 1297
|
64 |
+
bul-fra tatoeba-test-v2021-08-07 0.68971 52.9 446 3669
|
65 |
+
bul-ita tatoeba-test-v2021-08-07 0.66412 45.1 2500 16951
|
66 |
+
bul-spa tatoeba-test-v2021-08-07 0.66672 49.7 286 1783
|
67 |
+
hbs-fra tatoeba-test-v2021-08-07 0.66434 48.1 474 3370
|
68 |
+
hbs-ita tatoeba-test-v2021-08-07 0.72381 53.5 534 3208
|
69 |
+
hbs-spa tatoeba-test-v2021-08-07 0.73105 58.0 607 3766
|
70 |
+
hrv-fra tatoeba-test-v2021-08-07 0.62800 44.3 258 1943
|
71 |
+
hrv-spa tatoeba-test-v2021-08-07 0.71370 57.5 254 1702
|
72 |
+
mkd-spa tatoeba-test-v2021-08-07 0.75366 62.1 217 1121
|
73 |
+
slv-fra tatoeba-test-v2021-08-07 0.26575 11.4 448 3792
|
74 |
+
srp_Latn-ita tatoeba-test-v2021-08-07 0.76045 59.6 212 1292
|
benchmark_translations.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c08e88024b0728240f100e49c837b1ae39616cd81050113f908bbefb6ffc44b7
|
3 |
+
size 9194324
|
config.json
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"activation_dropout": 0.0,
|
3 |
+
"activation_function": "relu",
|
4 |
+
"architectures": [
|
5 |
+
"MarianMTModel"
|
6 |
+
],
|
7 |
+
"attention_dropout": 0.0,
|
8 |
+
"bad_words_ids": [
|
9 |
+
[
|
10 |
+
59412
|
11 |
+
]
|
12 |
+
],
|
13 |
+
"bos_token_id": 0,
|
14 |
+
"classifier_dropout": 0.0,
|
15 |
+
"d_model": 1024,
|
16 |
+
"decoder_attention_heads": 16,
|
17 |
+
"decoder_ffn_dim": 4096,
|
18 |
+
"decoder_layerdrop": 0.0,
|
19 |
+
"decoder_layers": 6,
|
20 |
+
"decoder_start_token_id": 59412,
|
21 |
+
"decoder_vocab_size": 59413,
|
22 |
+
"dropout": 0.1,
|
23 |
+
"encoder_attention_heads": 16,
|
24 |
+
"encoder_ffn_dim": 4096,
|
25 |
+
"encoder_layerdrop": 0.0,
|
26 |
+
"encoder_layers": 6,
|
27 |
+
"eos_token_id": 34288,
|
28 |
+
"forced_eos_token_id": 34288,
|
29 |
+
"init_std": 0.02,
|
30 |
+
"is_encoder_decoder": true,
|
31 |
+
"max_length": 512,
|
32 |
+
"max_position_embeddings": 1024,
|
33 |
+
"model_type": "marian",
|
34 |
+
"normalize_embedding": false,
|
35 |
+
"num_beams": 4,
|
36 |
+
"num_hidden_layers": 6,
|
37 |
+
"pad_token_id": 59412,
|
38 |
+
"scale_embedding": true,
|
39 |
+
"share_encoder_decoder_embeddings": true,
|
40 |
+
"static_position_embeddings": true,
|
41 |
+
"torch_dtype": "float16",
|
42 |
+
"transformers_version": "4.18.0.dev0",
|
43 |
+
"use_cache": true,
|
44 |
+
"vocab_size": 59413
|
45 |
+
}
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:447149996e9bc9e6cc1c946ff230ac6e384bbbebccabe01547fa0ffa1bda3c1b
|
3 |
+
size 596280899
|
source.spm
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4fe699c379148912d2c00945aec2f34716e12701ffbe707fbda59fc229ee0b6f
|
3 |
+
size 884536
|
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>"}
|
target.spm
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6f01392fe95e7d77c0c79eb784837cb3566c8beaeef019d6882babac3c4ad859
|
3 |
+
size 807963
|
tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"source_lang": "zls", "target_lang": "itc", "unk_token": "<unk>", "eos_token": "</s>", "pad_token": "<pad>", "model_max_length": 512, "sp_model_kwargs": {}, "separate_vocabs": false, "special_tokens_map_file": null, "name_or_path": "marian-models/opusTCv20210807_transformer-big_2022-08-10/zls-itc", "tokenizer_class": "MarianTokenizer"}
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|