tiedeman's picture
Initial commit
63f6d1f
|
raw
history blame
57.6 kB
---
library_name: transformers
language:
- anp
- as
- awa
- bal
- bho
- bn
- bpy
- de
- diq
- dv
- en
- es
- fa
- fr
- gbm
- glk
- gu
- hi
- hif
- hne
- hns
- jdt
- kok
- ks
- ku
- lah
- lrc
- mag
- mai
- mr
- mzn
- ne
- or
- os
- pa
- pal
- pi
- ps
- pt
- rhg
- rmy
- rom
- sa
- sd
- si
- skr
- syl
- tg
- tly
- ur
- zza
tags:
- translation
- opus-mt-tc-bible
license: apache-2.0
model-index:
- name: opus-mt-tc-bible-big-deu_eng_fra_por_spa-iir
results:
- task:
name: Translation deu-ben
type: translation
args: deu-ben
dataset:
name: flores200-devtest
type: flores200-devtest
args: deu-ben
metrics:
- name: BLEU
type: bleu
value: 10.6
- name: chr-F
type: chrf
value: 0.44005
- task:
name: Translation deu-guj
type: translation
args: deu-guj
dataset:
name: flores200-devtest
type: flores200-devtest
args: deu-guj
metrics:
- name: BLEU
type: bleu
value: 11.1
- name: chr-F
type: chrf
value: 0.39522
- task:
name: Translation deu-hin
type: translation
args: deu-hin
dataset:
name: flores200-devtest
type: flores200-devtest
args: deu-hin
metrics:
- name: BLEU
type: bleu
value: 22.3
- name: chr-F
type: chrf
value: 0.48448
- task:
name: Translation deu-hne
type: translation
args: deu-hne
dataset:
name: flores200-devtest
type: flores200-devtest
args: deu-hne
metrics:
- name: BLEU
type: bleu
value: 13.8
- name: chr-F
type: chrf
value: 0.42659
- task:
name: Translation deu-mag
type: translation
args: deu-mag
dataset:
name: flores200-devtest
type: flores200-devtest
args: deu-mag
metrics:
- name: BLEU
type: bleu
value: 14.0
- name: chr-F
type: chrf
value: 0.42477
- task:
name: Translation deu-pan
type: translation
args: deu-pan
dataset:
name: flores200-devtest
type: flores200-devtest
args: deu-pan
metrics:
- name: BLEU
type: bleu
value: 11.3
- name: chr-F
type: chrf
value: 0.37308
- task:
name: Translation deu-pes
type: translation
args: deu-pes
dataset:
name: flores200-devtest
type: flores200-devtest
args: deu-pes
metrics:
- name: BLEU
type: bleu
value: 14.9
- name: chr-F
type: chrf
value: 0.42726
- task:
name: Translation deu-prs
type: translation
args: deu-prs
dataset:
name: flores200-devtest
type: flores200-devtest
args: deu-prs
metrics:
- name: BLEU
type: bleu
value: 12.9
- name: chr-F
type: chrf
value: 0.38830
- task:
name: Translation deu-tgk
type: translation
args: deu-tgk
dataset:
name: flores200-devtest
type: flores200-devtest
args: deu-tgk
metrics:
- name: BLEU
type: bleu
value: 12.9
- name: chr-F
type: chrf
value: 0.40932
- task:
name: Translation deu-urd
type: translation
args: deu-urd
dataset:
name: flores200-devtest
type: flores200-devtest
args: deu-urd
metrics:
- name: BLEU
type: bleu
value: 14.4
- name: chr-F
type: chrf
value: 0.41250
- task:
name: Translation eng-ben
type: translation
args: eng-ben
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-ben
metrics:
- name: BLEU
type: bleu
value: 17.1
- name: chr-F
type: chrf
value: 0.51361
- task:
name: Translation eng-bho
type: translation
args: eng-bho
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-bho
metrics:
- name: BLEU
type: bleu
value: 12.1
- name: chr-F
type: chrf
value: 0.38188
- task:
name: Translation eng-guj
type: translation
args: eng-guj
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-guj
metrics:
- name: BLEU
type: bleu
value: 22.4
- name: chr-F
type: chrf
value: 0.54231
- task:
name: Translation eng-hin
type: translation
args: eng-hin
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-hin
metrics:
- name: BLEU
type: bleu
value: 33.7
- name: chr-F
type: chrf
value: 0.58371
- task:
name: Translation eng-hne
type: translation
args: eng-hne
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-hne
metrics:
- name: BLEU
type: bleu
value: 19.9
- name: chr-F
type: chrf
value: 0.47591
- task:
name: Translation eng-mag
type: translation
args: eng-mag
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-mag
metrics:
- name: BLEU
type: bleu
value: 22.2
- name: chr-F
type: chrf
value: 0.51070
- task:
name: Translation eng-mai
type: translation
args: eng-mai
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-mai
metrics:
- name: BLEU
type: bleu
value: 10.0
- name: chr-F
type: chrf
value: 0.39249
- task:
name: Translation eng-mar
type: translation
args: eng-mar
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-mar
metrics:
- name: BLEU
type: bleu
value: 14.8
- name: chr-F
type: chrf
value: 0.48733
- task:
name: Translation eng-pan
type: translation
args: eng-pan
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-pan
metrics:
- name: BLEU
type: bleu
value: 18.1
- name: chr-F
type: chrf
value: 0.45015
- task:
name: Translation eng-pes
type: translation
args: eng-pes
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-pes
metrics:
- name: BLEU
type: bleu
value: 21.1
- name: chr-F
type: chrf
value: 0.48588
- task:
name: Translation eng-prs
type: translation
args: eng-prs
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-prs
metrics:
- name: BLEU
type: bleu
value: 24.5
- name: chr-F
type: chrf
value: 0.51879
- task:
name: Translation eng-sin
type: translation
args: eng-sin
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-sin
metrics:
- name: BLEU
type: bleu
value: 10.6
- name: chr-F
type: chrf
value: 0.43823
- task:
name: Translation eng-tgk
type: translation
args: eng-tgk
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-tgk
metrics:
- name: BLEU
type: bleu
value: 17.8
- name: chr-F
type: chrf
value: 0.47323
- task:
name: Translation eng-urd
type: translation
args: eng-urd
dataset:
name: flores200-devtest
type: flores200-devtest
args: eng-urd
metrics:
- name: BLEU
type: bleu
value: 20.4
- name: chr-F
type: chrf
value: 0.48212
- task:
name: Translation fra-ben
type: translation
args: fra-ben
dataset:
name: flores200-devtest
type: flores200-devtest
args: fra-ben
metrics:
- name: BLEU
type: bleu
value: 11.0
- name: chr-F
type: chrf
value: 0.44029
- task:
name: Translation fra-guj
type: translation
args: fra-guj
dataset:
name: flores200-devtest
type: flores200-devtest
args: fra-guj
metrics:
- name: BLEU
type: bleu
value: 11.0
- name: chr-F
type: chrf
value: 0.38634
- task:
name: Translation fra-hin
type: translation
args: fra-hin
dataset:
name: flores200-devtest
type: flores200-devtest
args: fra-hin
metrics:
- name: BLEU
type: bleu
value: 22.6
- name: chr-F
type: chrf
value: 0.48406
- task:
name: Translation fra-hne
type: translation
args: fra-hne
dataset:
name: flores200-devtest
type: flores200-devtest
args: fra-hne
metrics:
- name: BLEU
type: bleu
value: 13.9
- name: chr-F
type: chrf
value: 0.42353
- task:
name: Translation fra-mag
type: translation
args: fra-mag
dataset:
name: flores200-devtest
type: flores200-devtest
args: fra-mag
metrics:
- name: BLEU
type: bleu
value: 14.3
- name: chr-F
type: chrf
value: 0.42678
- task:
name: Translation fra-pan
type: translation
args: fra-pan
dataset:
name: flores200-devtest
type: flores200-devtest
args: fra-pan
metrics:
- name: BLEU
type: bleu
value: 10.6
- name: chr-F
type: chrf
value: 0.36980
- task:
name: Translation fra-pes
type: translation
args: fra-pes
dataset:
name: flores200-devtest
type: flores200-devtest
args: fra-pes
metrics:
- name: BLEU
type: bleu
value: 15.5
- name: chr-F
type: chrf
value: 0.43526
- task:
name: Translation fra-prs
type: translation
args: fra-prs
dataset:
name: flores200-devtest
type: flores200-devtest
args: fra-prs
metrics:
- name: BLEU
type: bleu
value: 12.8
- name: chr-F
type: chrf
value: 0.37985
- task:
name: Translation fra-tgk
type: translation
args: fra-tgk
dataset:
name: flores200-devtest
type: flores200-devtest
args: fra-tgk
metrics:
- name: BLEU
type: bleu
value: 13.7
- name: chr-F
type: chrf
value: 0.42982
- task:
name: Translation fra-urd
type: translation
args: fra-urd
dataset:
name: flores200-devtest
type: flores200-devtest
args: fra-urd
metrics:
- name: BLEU
type: bleu
value: 14.2
- name: chr-F
type: chrf
value: 0.41438
- task:
name: Translation por-ben
type: translation
args: por-ben
dataset:
name: flores200-devtest
type: flores200-devtest
args: por-ben
metrics:
- name: BLEU
type: bleu
value: 10.4
- name: chr-F
type: chrf
value: 0.43390
- task:
name: Translation por-guj
type: translation
args: por-guj
dataset:
name: flores200-devtest
type: flores200-devtest
args: por-guj
metrics:
- name: BLEU
type: bleu
value: 10.6
- name: chr-F
type: chrf
value: 0.37374
- task:
name: Translation por-hin
type: translation
args: por-hin
dataset:
name: flores200-devtest
type: flores200-devtest
args: por-hin
metrics:
- name: BLEU
type: bleu
value: 23.6
- name: chr-F
type: chrf
value: 0.49524
- task:
name: Translation por-hne
type: translation
args: por-hne
dataset:
name: flores200-devtest
type: flores200-devtest
args: por-hne
metrics:
- name: BLEU
type: bleu
value: 13.9
- name: chr-F
type: chrf
value: 0.42269
- task:
name: Translation por-mag
type: translation
args: por-mag
dataset:
name: flores200-devtest
type: flores200-devtest
args: por-mag
metrics:
- name: BLEU
type: bleu
value: 15.0
- name: chr-F
type: chrf
value: 0.42753
- task:
name: Translation por-pan
type: translation
args: por-pan
dataset:
name: flores200-devtest
type: flores200-devtest
args: por-pan
metrics:
- name: BLEU
type: bleu
value: 10.7
- name: chr-F
type: chrf
value: 0.36653
- task:
name: Translation por-pes
type: translation
args: por-pes
dataset:
name: flores200-devtest
type: flores200-devtest
args: por-pes
metrics:
- name: BLEU
type: bleu
value: 15.4
- name: chr-F
type: chrf
value: 0.43194
- task:
name: Translation por-prs
type: translation
args: por-prs
dataset:
name: flores200-devtest
type: flores200-devtest
args: por-prs
metrics:
- name: BLEU
type: bleu
value: 12.1
- name: chr-F
type: chrf
value: 0.36411
- task:
name: Translation por-tgk
type: translation
args: por-tgk
dataset:
name: flores200-devtest
type: flores200-devtest
args: por-tgk
metrics:
- name: BLEU
type: bleu
value: 13.2
- name: chr-F
type: chrf
value: 0.41860
- task:
name: Translation por-urd
type: translation
args: por-urd
dataset:
name: flores200-devtest
type: flores200-devtest
args: por-urd
metrics:
- name: BLEU
type: bleu
value: 14.8
- name: chr-F
type: chrf
value: 0.41799
- task:
name: Translation spa-hin
type: translation
args: spa-hin
dataset:
name: flores200-devtest
type: flores200-devtest
args: spa-hin
metrics:
- name: BLEU
type: bleu
value: 16.4
- name: chr-F
type: chrf
value: 0.43777
- task:
name: Translation spa-hne
type: translation
args: spa-hne
dataset:
name: flores200-devtest
type: flores200-devtest
args: spa-hne
metrics:
- name: BLEU
type: bleu
value: 11.2
- name: chr-F
type: chrf
value: 0.39492
- task:
name: Translation spa-mag
type: translation
args: spa-mag
dataset:
name: flores200-devtest
type: flores200-devtest
args: spa-mag
metrics:
- name: BLEU
type: bleu
value: 11.4
- name: chr-F
type: chrf
value: 0.39882
- task:
name: Translation spa-pes
type: translation
args: spa-pes
dataset:
name: flores200-devtest
type: flores200-devtest
args: spa-pes
metrics:
- name: BLEU
type: bleu
value: 12.2
- name: chr-F
type: chrf
value: 0.40856
- task:
name: Translation spa-prs
type: translation
args: spa-prs
dataset:
name: flores200-devtest
type: flores200-devtest
args: spa-prs
metrics:
- name: BLEU
type: bleu
value: 12.8
- name: chr-F
type: chrf
value: 0.40361
- task:
name: Translation spa-tgk
type: translation
args: spa-tgk
dataset:
name: flores200-devtest
type: flores200-devtest
args: spa-tgk
metrics:
- name: BLEU
type: bleu
value: 10.8
- name: chr-F
type: chrf
value: 0.40100
- task:
name: Translation spa-urd
type: translation
args: spa-urd
dataset:
name: flores200-devtest
type: flores200-devtest
args: spa-urd
metrics:
- name: BLEU
type: bleu
value: 10.9
- name: chr-F
type: chrf
value: 0.38539
- task:
name: Translation deu-pan
type: translation
args: deu-pan
dataset:
name: flores101-devtest
type: flores_101
args: deu pan devtest
metrics:
- name: BLEU
type: bleu
value: 10.9
- name: chr-F
type: chrf
value: 0.36883
- task:
name: Translation eng-ben
type: translation
args: eng-ben
dataset:
name: flores101-devtest
type: flores_101
args: eng ben devtest
metrics:
- name: BLEU
type: bleu
value: 17.0
- name: chr-F
type: chrf
value: 0.51055
- task:
name: Translation eng-guj
type: translation
args: eng-guj
dataset:
name: flores101-devtest
type: flores_101
args: eng guj devtest
metrics:
- name: BLEU
type: bleu
value: 22.3
- name: chr-F
type: chrf
value: 0.53972
- task:
name: Translation eng-hin
type: translation
args: eng-hin
dataset:
name: flores101-devtest
type: flores_101
args: eng hin devtest
metrics:
- name: BLEU
type: bleu
value: 33.4
- name: chr-F
type: chrf
value: 0.57980
- task:
name: Translation eng-mar
type: translation
args: eng-mar
dataset:
name: flores101-devtest
type: flores_101
args: eng mar devtest
metrics:
- name: BLEU
type: bleu
value: 14.3
- name: chr-F
type: chrf
value: 0.48206
- task:
name: Translation eng-pus
type: translation
args: eng-pus
dataset:
name: flores101-devtest
type: flores_101
args: eng pus devtest
metrics:
- name: BLEU
type: bleu
value: 11.9
- name: chr-F
type: chrf
value: 0.37264
- task:
name: Translation eng-urd
type: translation
args: eng-urd
dataset:
name: flores101-devtest
type: flores_101
args: eng urd devtest
metrics:
- name: BLEU
type: bleu
value: 20.5
- name: chr-F
type: chrf
value: 0.48050
- task:
name: Translation fra-ben
type: translation
args: fra-ben
dataset:
name: flores101-devtest
type: flores_101
args: fra ben devtest
metrics:
- name: BLEU
type: bleu
value: 10.9
- name: chr-F
type: chrf
value: 0.43806
- task:
name: Translation fra-pan
type: translation
args: fra-pan
dataset:
name: flores101-devtest
type: flores_101
args: fra pan devtest
metrics:
- name: BLEU
type: bleu
value: 11.0
- name: chr-F
type: chrf
value: 0.37066
- task:
name: Translation por-ben
type: translation
args: por-ben
dataset:
name: flores101-devtest
type: flores_101
args: por ben devtest
metrics:
- name: BLEU
type: bleu
value: 10.0
- name: chr-F
type: chrf
value: 0.42730
- task:
name: Translation por-pan
type: translation
args: por-pan
dataset:
name: flores101-devtest
type: flores_101
args: por pan devtest
metrics:
- name: BLEU
type: bleu
value: 10.7
- name: chr-F
type: chrf
value: 0.36551
- task:
name: Translation spa-hin
type: translation
args: spa-hin
dataset:
name: flores101-devtest
type: flores_101
args: spa hin devtest
metrics:
- name: BLEU
type: bleu
value: 16.0
- name: chr-F
type: chrf
value: 0.43371
- task:
name: Translation spa-tgk
type: translation
args: spa-tgk
dataset:
name: flores101-devtest
type: flores_101
args: spa tgk devtest
metrics:
- name: BLEU
type: bleu
value: 10.6
- name: chr-F
type: chrf
value: 0.39762
- task:
name: Translation deu-fas
type: translation
args: deu-fas
dataset:
name: ntrex128
type: ntrex128
args: deu-fas
metrics:
- name: BLEU
type: bleu
value: 13.8
- name: chr-F
type: chrf
value: 0.41469
- task:
name: Translation deu-hin
type: translation
args: deu-hin
dataset:
name: ntrex128
type: ntrex128
args: deu-hin
metrics:
- name: BLEU
type: bleu
value: 16.8
- name: chr-F
type: chrf
value: 0.42940
- task:
name: Translation deu-pan
type: translation
args: deu-pan
dataset:
name: ntrex128
type: ntrex128
args: deu-pan
metrics:
- name: BLEU
type: bleu
value: 11.0
- name: chr-F
type: chrf
value: 0.36776
- task:
name: Translation deu-urd
type: translation
args: deu-urd
dataset:
name: ntrex128
type: ntrex128
args: deu-urd
metrics:
- name: BLEU
type: bleu
value: 14.5
- name: chr-F
type: chrf
value: 0.41881
- task:
name: Translation eng-ben
type: translation
args: eng-ben
dataset:
name: ntrex128
type: ntrex128
args: eng-ben
metrics:
- name: BLEU
type: bleu
value: 16.6
- name: chr-F
type: chrf
value: 0.51555
- task:
name: Translation eng-fas
type: translation
args: eng-fas
dataset:
name: ntrex128
type: ntrex128
args: eng-fas
metrics:
- name: BLEU
type: bleu
value: 19.7
- name: chr-F
type: chrf
value: 0.46895
- task:
name: Translation eng-guj
type: translation
args: eng-guj
dataset:
name: ntrex128
type: ntrex128
args: eng-guj
metrics:
- name: BLEU
type: bleu
value: 17.1
- name: chr-F
type: chrf
value: 0.48990
- task:
name: Translation eng-hin
type: translation
args: eng-hin
dataset:
name: ntrex128
type: ntrex128
args: eng-hin
metrics:
- name: BLEU
type: bleu
value: 26.9
- name: chr-F
type: chrf
value: 0.52307
- task:
name: Translation eng-mar
type: translation
args: eng-mar
dataset:
name: ntrex128
type: ntrex128
args: eng-mar
metrics:
- name: BLEU
type: bleu
value: 10.4
- name: chr-F
type: chrf
value: 0.44580
- task:
name: Translation eng-pan
type: translation
args: eng-pan
dataset:
name: ntrex128
type: ntrex128
args: eng-pan
metrics:
- name: BLEU
type: bleu
value: 19.6
- name: chr-F
type: chrf
value: 0.46141
- task:
name: Translation eng-prs
type: translation
args: eng-prs
dataset:
name: ntrex128
type: ntrex128
args: eng-prs
metrics:
- name: BLEU
type: bleu
value: 12.9
- name: chr-F
type: chrf
value: 0.39651
- task:
name: Translation eng-tgk_Cyrl
type: translation
args: eng-tgk_Cyrl
dataset:
name: ntrex128
type: ntrex128
args: eng-tgk_Cyrl
metrics:
- name: BLEU
type: bleu
value: 11.3
- name: chr-F
type: chrf
value: 0.38524
- task:
name: Translation eng-urd
type: translation
args: eng-urd
dataset:
name: ntrex128
type: ntrex128
args: eng-urd
metrics:
- name: BLEU
type: bleu
value: 22.1
- name: chr-F
type: chrf
value: 0.49646
- task:
name: Translation fra-fas
type: translation
args: fra-fas
dataset:
name: ntrex128
type: ntrex128
args: fra-fas
metrics:
- name: BLEU
type: bleu
value: 13.8
- name: chr-F
type: chrf
value: 0.41282
- task:
name: Translation fra-hin
type: translation
args: fra-hin
dataset:
name: ntrex128
type: ntrex128
args: fra-hin
metrics:
- name: BLEU
type: bleu
value: 17.1
- name: chr-F
type: chrf
value: 0.42475
- task:
name: Translation fra-pan
type: translation
args: fra-pan
dataset:
name: ntrex128
type: ntrex128
args: fra-pan
metrics:
- name: BLEU
type: bleu
value: 10.2
- name: chr-F
type: chrf
value: 0.36120
- task:
name: Translation fra-urd
type: translation
args: fra-urd
dataset:
name: ntrex128
type: ntrex128
args: fra-urd
metrics:
- name: BLEU
type: bleu
value: 14.8
- name: chr-F
type: chrf
value: 0.41536
- task:
name: Translation por-fas
type: translation
args: por-fas
dataset:
name: ntrex128
type: ntrex128
args: por-fas
metrics:
- name: BLEU
type: bleu
value: 14.4
- name: chr-F
type: chrf
value: 0.42010
- task:
name: Translation por-hin
type: translation
args: por-hin
dataset:
name: ntrex128
type: ntrex128
args: por-hin
metrics:
- name: BLEU
type: bleu
value: 17.6
- name: chr-F
type: chrf
value: 0.43275
- task:
name: Translation por-pan
type: translation
args: por-pan
dataset:
name: ntrex128
type: ntrex128
args: por-pan
metrics:
- name: BLEU
type: bleu
value: 10.6
- name: chr-F
type: chrf
value: 0.36360
- task:
name: Translation por-urd
type: translation
args: por-urd
dataset:
name: ntrex128
type: ntrex128
args: por-urd
metrics:
- name: BLEU
type: bleu
value: 15.2
- name: chr-F
type: chrf
value: 0.42484
- task:
name: Translation spa-ben
type: translation
args: spa-ben
dataset:
name: ntrex128
type: ntrex128
args: spa-ben
metrics:
- name: BLEU
type: bleu
value: 10.3
- name: chr-F
type: chrf
value: 0.44905
- task:
name: Translation spa-fas
type: translation
args: spa-fas
dataset:
name: ntrex128
type: ntrex128
args: spa-fas
metrics:
- name: BLEU
type: bleu
value: 14.1
- name: chr-F
type: chrf
value: 0.42207
- task:
name: Translation spa-hin
type: translation
args: spa-hin
dataset:
name: ntrex128
type: ntrex128
args: spa-hin
metrics:
- name: BLEU
type: bleu
value: 17.6
- name: chr-F
type: chrf
value: 0.43380
- task:
name: Translation spa-pan
type: translation
args: spa-pan
dataset:
name: ntrex128
type: ntrex128
args: spa-pan
metrics:
- name: BLEU
type: bleu
value: 11.1
- name: chr-F
type: chrf
value: 0.37361
- task:
name: Translation spa-prs
type: translation
args: spa-prs
dataset:
name: ntrex128
type: ntrex128
args: spa-prs
metrics:
- name: BLEU
type: bleu
value: 10.3
- name: chr-F
type: chrf
value: 0.37448
- task:
name: Translation spa-urd
type: translation
args: spa-urd
dataset:
name: ntrex128
type: ntrex128
args: spa-urd
metrics:
- name: BLEU
type: bleu
value: 15.0
- name: chr-F
type: chrf
value: 0.42434
- task:
name: Translation deu-fas
type: translation
args: deu-fas
dataset:
name: tatoeba-test-v2021-08-07
type: tatoeba_mt
args: deu-fas
metrics:
- name: BLEU
type: bleu
value: 20.3
- name: chr-F
type: chrf
value: 0.45763
- task:
name: Translation eng-hin
type: translation
args: eng-hin
dataset:
name: tatoeba-test-v2021-08-07
type: tatoeba_mt
args: eng-hin
metrics:
- name: BLEU
type: bleu
value: 28.4
- name: chr-F
type: chrf
value: 0.52525
- task:
name: Translation eng-mar
type: translation
args: eng-mar
dataset:
name: tatoeba-test-v2021-08-07
type: tatoeba_mt
args: eng-mar
metrics:
- name: BLEU
type: bleu
value: 24.4
- name: chr-F
type: chrf
value: 0.52549
- task:
name: Translation eng-ben
type: translation
args: eng-ben
dataset:
name: tico19-test
type: tico19-test
args: eng-ben
metrics:
- name: BLEU
type: bleu
value: 17.9
- name: chr-F
type: chrf
value: 0.51563
- task:
name: Translation eng-fas
type: translation
args: eng-fas
dataset:
name: tico19-test
type: tico19-test
args: eng-fas
metrics:
- name: BLEU
type: bleu
value: 25.8
- name: chr-F
type: chrf
value: 0.53182
- task:
name: Translation eng-hin
type: translation
args: eng-hin
dataset:
name: tico19-test
type: tico19-test
args: eng-hin
metrics:
- name: BLEU
type: bleu
value: 41.6
- name: chr-F
type: chrf
value: 0.63128
- task:
name: Translation eng-mar
type: translation
args: eng-mar
dataset:
name: tico19-test
type: tico19-test
args: eng-mar
metrics:
- name: BLEU
type: bleu
value: 12.9
- name: chr-F
type: chrf
value: 0.45619
- task:
name: Translation eng-nep
type: translation
args: eng-nep
dataset:
name: tico19-test
type: tico19-test
args: eng-nep
metrics:
- name: BLEU
type: bleu
value: 17.6
- name: chr-F
type: chrf
value: 0.53413
- task:
name: Translation eng-prs
type: translation
args: eng-prs
dataset:
name: tico19-test
type: tico19-test
args: eng-prs
metrics:
- name: BLEU
type: bleu
value: 17.3
- name: chr-F
type: chrf
value: 0.44101
- task:
name: Translation eng-pus
type: translation
args: eng-pus
dataset:
name: tico19-test
type: tico19-test
args: eng-pus
metrics:
- name: BLEU
type: bleu
value: 20.5
- name: chr-F
type: chrf
value: 0.47063
- task:
name: Translation eng-urd
type: translation
args: eng-urd
dataset:
name: tico19-test
type: tico19-test
args: eng-urd
metrics:
- name: BLEU
type: bleu
value: 22.0
- name: chr-F
type: chrf
value: 0.51054
- task:
name: Translation fra-fas
type: translation
args: fra-fas
dataset:
name: tico19-test
type: tico19-test
args: fra-fas
metrics:
- name: BLEU
type: bleu
value: 17.9
- name: chr-F
type: chrf
value: 0.43476
- task:
name: Translation fra-hin
type: translation
args: fra-hin
dataset:
name: tico19-test
type: tico19-test
args: fra-hin
metrics:
- name: BLEU
type: bleu
value: 25.6
- name: chr-F
type: chrf
value: 0.48625
- task:
name: Translation fra-prs
type: translation
args: fra-prs
dataset:
name: tico19-test
type: tico19-test
args: fra-prs
metrics:
- name: BLEU
type: bleu
value: 11.6
- name: chr-F
type: chrf
value: 0.36130
- task:
name: Translation fra-pus
type: translation
args: fra-pus
dataset:
name: tico19-test
type: tico19-test
args: fra-pus
metrics:
- name: BLEU
type: bleu
value: 12.7
- name: chr-F
type: chrf
value: 0.37217
- task:
name: Translation fra-urd
type: translation
args: fra-urd
dataset:
name: tico19-test
type: tico19-test
args: fra-urd
metrics:
- name: BLEU
type: bleu
value: 14.4
- name: chr-F
type: chrf
value: 0.40482
- task:
name: Translation por-ben
type: translation
args: por-ben
dataset:
name: tico19-test
type: tico19-test
args: por-ben
metrics:
- name: BLEU
type: bleu
value: 12.5
- name: chr-F
type: chrf
value: 0.45814
- task:
name: Translation por-fas
type: translation
args: por-fas
dataset:
name: tico19-test
type: tico19-test
args: por-fas
metrics:
- name: BLEU
type: bleu
value: 21.3
- name: chr-F
type: chrf
value: 0.49181
- task:
name: Translation por-hin
type: translation
args: por-hin
dataset:
name: tico19-test
type: tico19-test
args: por-hin
metrics:
- name: BLEU
type: bleu
value: 31.1
- name: chr-F
type: chrf
value: 0.55759
- task:
name: Translation por-nep
type: translation
args: por-nep
dataset:
name: tico19-test
type: tico19-test
args: por-nep
metrics:
- name: BLEU
type: bleu
value: 12.1
- name: chr-F
type: chrf
value: 0.47378
- task:
name: Translation por-prs
type: translation
args: por-prs
dataset:
name: tico19-test
type: tico19-test
args: por-prs
metrics:
- name: BLEU
type: bleu
value: 12.1
- name: chr-F
type: chrf
value: 0.38725
- task:
name: Translation por-pus
type: translation
args: por-pus
dataset:
name: tico19-test
type: tico19-test
args: por-pus
metrics:
- name: BLEU
type: bleu
value: 15.9
- name: chr-F
type: chrf
value: 0.42496
- task:
name: Translation por-urd
type: translation
args: por-urd
dataset:
name: tico19-test
type: tico19-test
args: por-urd
metrics:
- name: BLEU
type: bleu
value: 16.6
- name: chr-F
type: chrf
value: 0.45560
- task:
name: Translation spa-ben
type: translation
args: spa-ben
dataset:
name: tico19-test
type: tico19-test
args: spa-ben
metrics:
- name: BLEU
type: bleu
value: 12.7
- name: chr-F
type: chrf
value: 0.45751
- task:
name: Translation spa-fas
type: translation
args: spa-fas
dataset:
name: tico19-test
type: tico19-test
args: spa-fas
metrics:
- name: BLEU
type: bleu
value: 21.0
- name: chr-F
type: chrf
value: 0.48974
- task:
name: Translation spa-hin
type: translation
args: spa-hin
dataset:
name: tico19-test
type: tico19-test
args: spa-hin
metrics:
- name: BLEU
type: bleu
value: 30.9
- name: chr-F
type: chrf
value: 0.55641
- task:
name: Translation spa-nep
type: translation
args: spa-nep
dataset:
name: tico19-test
type: tico19-test
args: spa-nep
metrics:
- name: BLEU
type: bleu
value: 12.1
- name: chr-F
type: chrf
value: 0.47164
- task:
name: Translation spa-prs
type: translation
args: spa-prs
dataset:
name: tico19-test
type: tico19-test
args: spa-prs
metrics:
- name: BLEU
type: bleu
value: 14.3
- name: chr-F
type: chrf
value: 0.41879
- task:
name: Translation spa-pus
type: translation
args: spa-pus
dataset:
name: tico19-test
type: tico19-test
args: spa-pus
metrics:
- name: BLEU
type: bleu
value: 15.1
- name: chr-F
type: chrf
value: 0.41714
- task:
name: Translation spa-urd
type: translation
args: spa-urd
dataset:
name: tico19-test
type: tico19-test
args: spa-urd
metrics:
- name: BLEU
type: bleu
value: 15.3
- name: chr-F
type: chrf
value: 0.44931
- task:
name: Translation eng-hin
type: translation
args: eng-hin
dataset:
name: newstest2014
type: wmt-2014-news
args: eng-hin
metrics:
- name: BLEU
type: bleu
value: 23.6
- name: chr-F
type: chrf
value: 0.51249
- task:
name: Translation eng-guj
type: translation
args: eng-guj
dataset:
name: newstest2019
type: wmt-2019-news
args: eng-guj
metrics:
- name: BLEU
type: bleu
value: 25.5
- name: chr-F
type: chrf
value: 0.57282
---
# opus-mt-tc-bible-big-deu_eng_fra_por_spa-iir
## Table of Contents
- [Model Details](#model-details)
- [Uses](#uses)
- [Risks, Limitations and Biases](#risks-limitations-and-biases)
- [How to Get Started With the Model](#how-to-get-started-with-the-model)
- [Training](#training)
- [Evaluation](#evaluation)
- [Citation Information](#citation-information)
- [Acknowledgements](#acknowledgements)
## Model Details
Neural machine translation model for translating from unknown (deu+eng+fra+por+spa) to Indo-Iranian languages (iir).
This model is part of the [OPUS-MT project](https://github.com/Helsinki-NLP/Opus-MT), an effort to make neural machine translation models widely available and accessible for many languages in the world. All models are originally trained using the amazing framework of [Marian NMT](https://marian-nmt.github.io/), an efficient NMT implementation written in pure C++. The models have been converted to pyTorch using the transformers library by huggingface. Training data is taken from [OPUS](https://opus.nlpl.eu/) and training pipelines use the procedures of [OPUS-MT-train](https://github.com/Helsinki-NLP/Opus-MT-train).
**Model Description:**
- **Developed by:** Language Technology Research Group at the University of Helsinki
- **Model Type:** Translation (transformer-big)
- **Release**: 2024-05-30
- **License:** Apache-2.0
- **Language(s):**
- Source Language(s): deu eng fra por spa
- Target Language(s): anp asm awa bal ben bho bpy ckb diq div dty fas gbm glk guj hif hin hne hns jdt kas kmr kok kur lah lrc mag mai mar mzn nep npi ori oss pal pan pes pli prs pus rhg rmy rom san sdh sin skr snd syl tgk tly urd zza
- Valid Target Language Labels: >>aee<< >>aeq<< >>aiq<< >>anp<< >>anr<< >>ask<< >>asm<< >>atn<< >>avd<< >>ave<< >>awa<< >>bal<< >>bal_Latn<< >>bdv<< >>ben<< >>bfb<< >>bfy<< >>bfz<< >>bgc<< >>bgd<< >>bge<< >>bgw<< >>bha<< >>bhb<< >>bhd<< >>bhe<< >>bhh<< >>bhi<< >>bho<< >>bht<< >>bhu<< >>bjj<< >>bjm<< >>bkk<< >>bmj<< >>bns<< >>bpx<< >>bpy<< >>bqi<< >>bra<< >>bsg<< >>bsh<< >>btv<< >>ccp<< >>cdh<< >>cdi<< >>cdj<< >>cih<< >>ckb<< >>clh<< >>ctg<< >>dcc<< >>def<< >>deh<< >>dhn<< >>dho<< >>diq<< >>div<< >>dmk<< >>dml<< >>doi<< >>dry<< >>dty<< >>dub<< >>duh<< >>dwz<< >>emx<< >>esh<< >>fas<< >>fay<< >>gas<< >>gbk<< >>gbl<< >>gbm<< >>gbz<< >>gdx<< >>ggg<< >>ghr<< >>gig<< >>gjk<< >>glh<< >>glk<< >>goz<< >>gra<< >>guj<< >>gwc<< >>gwf<< >>gwt<< >>gzi<< >>hac<< >>haj<< >>haz<< >>hca<< >>hif<< >>hif_Latn<< >>hii<< >>hin<< >>hin_Latn<< >>hlb<< >>hne<< >>hns<< >>hrz<< >>isk<< >>jdg<< >>jdt<< >>jdt_Cyrl<< >>jml<< >>jnd<< >>jns<< >>jpr<< >>kas<< >>kas_Arab<< >>kas_Deva<< >>kbu<< >>keq<< >>key<< >>kfm<< >>kfr<< >>kfs<< >>kft<< >>kfu<< >>kfv<< >>kfx<< >>kfy<< >>kgn<< >>khn<< >>kho<< >>khw<< >>kjo<< >>kls<< >>kmr<< >>kok<< >>kra<< >>ksy<< >>ktl<< >>kur<< >>kur_Arab<< >>kur_Cyrl<< >>kur_Latn<< >>kvx<< >>kxp<< >>kyw<< >>lah<< >>lbm<< >>lhl<< >>lki<< >>lmn<< >>lrc<< >>lrl<< >>lsa<< >>lss<< >>luv<< >>luz<< >>mag<< >>mai<< >>mar<< >>mby<< >>mjl<< >>mjz<< >>mkb<< >>mke<< >>mki<< >>mnj<< >>mvy<< >>mwr<< >>mzn<< >>nag<< >>nep<< >>nhh<< >>nli<< >>nlx<< >>noe<< >>noi<< >>npi<< >>ntz<< >>nyq<< >>odk<< >>okh<< >>omr<< >>oos<< >>ori<< >>ort<< >>oru<< >>oss<< >>pal<< >>pan<< >>pan_Guru<< >>paq<< >>pcl<< >>peo<< >>pes<< >>pgg<< >>phd<< >>phl<< >>phv<< >>pli<< >>plk<< >>plp<< >>pmh<< >>prc<< >>prn<< >>prs<< >>psh<< >>psi<< >>psu<< >>pus<< >>pwr<< >>raj<< >>rat<< >>rdb<< >>rei<< >>rhg<< >>rhg_Latn<< >>rjs<< >>rkt<< >>rmi<< >>rmq<< >>rmt<< >>rmy<< >>rom<< >>rtw<< >>san<< >>san_Deva<< >>saz<< >>sbn<< >>sck<< >>scl<< >>sdb<< >>sdf<< >>sdg<< >>sdh<< >>sdr<< >>sgh<< >>sgl<< >>sgr<< >>sgy<< >>shd<< >>shm<< >>sin<< >>siy<< >>sjp<< >>skr<< >>smm<< >>smv<< >>smy<< >>snd<< >>snd_Arab<< >>sog<< >>soi<< >>soj<< >>sqo<< >>srh<< >>srx<< >>srz<< >>ssi<< >>sts<< >>syl<< >>syl_Sylo<< >>tdb<< >>tgk<< >>tgk_Cyrl<< >>tgk_Latn<< >>the<< >>thl<< >>thq<< >>thr<< >>tkb<< >>tks<< >>tkt<< >>tly<< >>tly_Latn<< >>tnv<< >>tov<< >>tra<< >>trm<< >>trw<< >>ttt<< >>urd<< >>ush<< >>vaa<< >>vaf<< >>vah<< >>vas<< >>vav<< >>ved<< >>vgr<< >>vmh<< >>wbk<< >>wbl<< >>wne<< >>wsv<< >>wtm<< >>xbc<< >>xco<< >>xka<< >>xkc<< >>xkj<< >>xkp<< >>xpr<< >>xsc<< >>xtq<< >>xvi<< >>xxx<< >>yah<< >>yai<< >>ydg<< >>zum<< >>zza<<
- **Original Model**: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/deu+eng+fra+por+spa-iir/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30.zip)
- **Resources for more information:**
- [OPUS-MT dashboard](https://opus.nlpl.eu/dashboard/index.php?pkg=opusmt&test=all&scoreslang=all&chart=standard&model=Tatoeba-MT-models/deu%2Beng%2Bfra%2Bpor%2Bspa-iir/opusTCv20230926max50%2Bbt%2Bjhubc_transformer-big_2024-05-30)
- [OPUS-MT-train GitHub Repo](https://github.com/Helsinki-NLP/OPUS-MT-train)
- [More information about MarianNMT models in the transformers library](https://huggingface.co/docs/transformers/model_doc/marian)
- [Tatoeba Translation Challenge](https://github.com/Helsinki-NLP/Tatoeba-Challenge/)
- [HPLT bilingual data v1 (as part of the Tatoeba Translation Challenge dataset)](https://hplt-project.org/datasets/v1)
- [A massively parallel Bible corpus](https://aclanthology.org/L14-1215/)
This is a multilingual translation model with multiple target languages. A sentence initial language token is required in the form of `>>id<<` (id = valid target language ID), e.g. `>>anp<<`
## Uses
This model can be used for translation and text-to-text generation.
## Risks, Limitations and Biases
**CONTENT WARNING: Readers should be aware that the model is trained on various public data sets that may contain content that is disturbing, offensive, and can propagate historical and current stereotypes.**
Significant research has explored bias and fairness issues with language models (see, e.g., [Sheng et al. (2021)](https://aclanthology.org/2021.acl-long.330.pdf) and [Bender et al. (2021)](https://dl.acm.org/doi/pdf/10.1145/3442188.3445922)).
## How to Get Started With the Model
A short example code:
```python
from transformers import MarianMTModel, MarianTokenizer
src_text = [
">>anp<< Replace this with text in an accepted source language.",
">>zza<< This is the second sentence."
]
model_name = "pytorch-models/opus-mt-tc-bible-big-deu_eng_fra_por_spa-iir"
tokenizer = MarianTokenizer.from_pretrained(model_name)
model = MarianMTModel.from_pretrained(model_name)
translated = model.generate(**tokenizer(src_text, return_tensors="pt", padding=True))
for t in translated:
print( tokenizer.decode(t, skip_special_tokens=True) )
```
You can also use OPUS-MT models with the transformers pipelines, for example:
```python
from transformers import pipeline
pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-tc-bible-big-deu_eng_fra_por_spa-iir")
print(pipe(">>anp<< Replace this with text in an accepted source language."))
```
## Training
- **Data**: opusTCv20230926max50+bt+jhubc ([source](https://github.com/Helsinki-NLP/Tatoeba-Challenge))
- **Pre-processing**: SentencePiece (spm32k,spm32k)
- **Model Type:** transformer-big
- **Original MarianNMT Model**: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/deu+eng+fra+por+spa-iir/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30.zip)
- **Training Scripts**: [GitHub Repo](https://github.com/Helsinki-NLP/OPUS-MT-train)
## Evaluation
* [Model scores at the OPUS-MT dashboard](https://opus.nlpl.eu/dashboard/index.php?pkg=opusmt&test=all&scoreslang=all&chart=standard&model=Tatoeba-MT-models/deu%2Beng%2Bfra%2Bpor%2Bspa-iir/opusTCv20230926max50%2Bbt%2Bjhubc_transformer-big_2024-05-30)
* test set translations: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-29.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/deu+eng+fra+por+spa-iir/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-29.test.txt)
* test set scores: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-29.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/deu+eng+fra+por+spa-iir/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-29.eval.txt)
* benchmark results: [benchmark_results.txt](benchmark_results.txt)
* benchmark output: [benchmark_translations.zip](benchmark_translations.zip)
| langpair | testset | chr-F | BLEU | #sent | #words |
|----------|---------|-------|-------|-------|--------|
| deu-fas | tatoeba-test-v2021-08-07 | 0.45763 | 20.3 | 3185 | 24941 |
| deu-kur_Latn | tatoeba-test-v2021-08-07 | 1.027 | 0.6 | 223 | 1249 |
| eng-ben | tatoeba-test-v2021-08-07 | 0.47927 | 17.6 | 2500 | 11654 |
| eng-fas | tatoeba-test-v2021-08-07 | 0.40192 | 17.1 | 3762 | 31110 |
| eng-hin | tatoeba-test-v2021-08-07 | 0.52525 | 28.4 | 5000 | 32904 |
| eng-kur_Latn | tatoeba-test-v2021-08-07 | 0.493 | 0.0 | 290 | 1682 |
| eng-mar | tatoeba-test-v2021-08-07 | 0.52549 | 24.4 | 10396 | 61140 |
| eng-pes | tatoeba-test-v2021-08-07 | 0.40401 | 17.3 | 3757 | 31044 |
| eng-urd | tatoeba-test-v2021-08-07 | 0.45764 | 18.1 | 1663 | 12155 |
| fra-fas | tatoeba-test-v2021-08-07 | 0.42414 | 18.9 | 376 | 3217 |
| deu-npi | flores101-devtest | 3.082 | 0.2 | 1012 | 19762 |
| eng-ben | flores101-devtest | 0.51055 | 17.0 | 1012 | 21155 |
| eng-ckb | flores101-devtest | 0.45337 | 7.1 | 1012 | 21159 |
| eng-guj | flores101-devtest | 0.53972 | 22.3 | 1012 | 23840 |
| eng-hin | flores101-devtest | 0.57980 | 33.4 | 1012 | 27743 |
| eng-mar | flores101-devtest | 0.48206 | 14.3 | 1012 | 21810 |
| eng-urd | flores101-devtest | 0.48050 | 20.5 | 1012 | 28098 |
| fra-ben | flores101-devtest | 0.43806 | 10.9 | 1012 | 21155 |
| fra-ckb | flores101-devtest | 0.41016 | 4.9 | 1012 | 21159 |
| por-ben | flores101-devtest | 0.42730 | 10.0 | 1012 | 21155 |
| por-npi | flores101-devtest | 2.084 | 0.2 | 1012 | 19762 |
| spa-hin | flores101-devtest | 0.43371 | 16.0 | 1012 | 27743 |
| deu-ben | flores200-devtest | 0.44005 | 10.6 | 1012 | 21155 |
| deu-hin | flores200-devtest | 0.48448 | 22.3 | 1012 | 27743 |
| deu-hne | flores200-devtest | 0.42659 | 13.8 | 1012 | 26582 |
| deu-mag | flores200-devtest | 0.42477 | 14.0 | 1012 | 26516 |
| deu-npi | flores200-devtest | 5.870 | 0.1 | 1012 | 19762 |
| deu-pes | flores200-devtest | 0.42726 | 14.9 | 1012 | 24986 |
| deu-tgk | flores200-devtest | 0.40932 | 12.9 | 1012 | 25530 |
| deu-urd | flores200-devtest | 0.41250 | 14.4 | 1012 | 28098 |
| eng-ben | flores200-devtest | 0.51361 | 17.1 | 1012 | 21155 |
| eng-ckb | flores200-devtest | 0.45750 | 7.7 | 1012 | 21152 |
| eng-guj | flores200-devtest | 0.54231 | 22.4 | 1012 | 23840 |
| eng-hin | flores200-devtest | 0.58371 | 33.7 | 1012 | 27743 |
| eng-hne | flores200-devtest | 0.47591 | 19.9 | 1012 | 26582 |
| eng-mag | flores200-devtest | 0.51070 | 22.2 | 1012 | 26516 |
| eng-mar | flores200-devtest | 0.48733 | 14.8 | 1012 | 21810 |
| eng-pan | flores200-devtest | 0.45015 | 18.1 | 1012 | 27451 |
| eng-pes | flores200-devtest | 0.48588 | 21.1 | 1012 | 24986 |
| eng-prs | flores200-devtest | 0.51879 | 24.5 | 1012 | 25885 |
| eng-sin | flores200-devtest | 0.43823 | 10.6 | 1012 | 23278 |
| eng-tgk | flores200-devtest | 0.47323 | 17.8 | 1012 | 25530 |
| eng-urd | flores200-devtest | 0.48212 | 20.4 | 1012 | 28098 |
| fra-ben | flores200-devtest | 0.44029 | 11.0 | 1012 | 21155 |
| fra-ckb | flores200-devtest | 0.41353 | 5.3 | 1012 | 21152 |
| fra-hin | flores200-devtest | 0.48406 | 22.6 | 1012 | 27743 |
| fra-hne | flores200-devtest | 0.42353 | 13.9 | 1012 | 26582 |
| fra-mag | flores200-devtest | 0.42678 | 14.3 | 1012 | 26516 |
| fra-npi | flores200-devtest | 6.525 | 0.1 | 1012 | 19762 |
| fra-pes | flores200-devtest | 0.43526 | 15.5 | 1012 | 24986 |
| fra-tgk | flores200-devtest | 0.42982 | 13.7 | 1012 | 25530 |
| fra-urd | flores200-devtest | 0.41438 | 14.2 | 1012 | 28098 |
| por-ben | flores200-devtest | 0.43390 | 10.4 | 1012 | 21155 |
| por-ckb | flores200-devtest | 0.42303 | 5.6 | 1012 | 21152 |
| por-hin | flores200-devtest | 0.49524 | 23.6 | 1012 | 27743 |
| por-hne | flores200-devtest | 0.42269 | 13.9 | 1012 | 26582 |
| por-mag | flores200-devtest | 0.42753 | 15.0 | 1012 | 26516 |
| por-npi | flores200-devtest | 6.737 | 0.1 | 1012 | 19762 |
| por-pes | flores200-devtest | 0.43194 | 15.4 | 1012 | 24986 |
| por-tgk | flores200-devtest | 0.41860 | 13.2 | 1012 | 25530 |
| por-urd | flores200-devtest | 0.41799 | 14.8 | 1012 | 28098 |
| spa-ben | flores200-devtest | 0.41893 | 8.3 | 1012 | 21155 |
| spa-hin | flores200-devtest | 0.43777 | 16.4 | 1012 | 27743 |
| spa-kas_Arab | flores200-devtest | 9.380 | 0.1 | 1012 | 23514 |
| spa-npi | flores200-devtest | 7.518 | 0.2 | 1012 | 19762 |
| spa-pes | flores200-devtest | 0.40856 | 12.2 | 1012 | 24986 |
| spa-prs | flores200-devtest | 0.40361 | 12.8 | 1012 | 25885 |
| spa-tgk | flores200-devtest | 0.40100 | 10.8 | 1012 | 25530 |
| eng-hin | newstest2014 | 0.51249 | 23.6 | 2507 | 60872 |
| eng-guj | newstest2019 | 0.57282 | 25.5 | 998 | 21924 |
| deu-ben | ntrex128 | 0.43971 | 9.6 | 1997 | 40095 |
| deu-fas | ntrex128 | 0.41469 | 13.8 | 1997 | 50525 |
| deu-hin | ntrex128 | 0.42940 | 16.8 | 1997 | 55219 |
| deu-snd_Arab | ntrex128 | 6.129 | 0.1 | 1997 | 49866 |
| deu-urd | ntrex128 | 0.41881 | 14.5 | 1997 | 54259 |
| eng-ben | ntrex128 | 0.51555 | 16.6 | 1997 | 40095 |
| eng-fas | ntrex128 | 0.46895 | 19.7 | 1997 | 50525 |
| eng-guj | ntrex128 | 0.48990 | 17.1 | 1997 | 45335 |
| eng-hin | ntrex128 | 0.52307 | 26.9 | 1997 | 55219 |
| eng-mar | ntrex128 | 0.44580 | 10.4 | 1997 | 42375 |
| eng-nep | ntrex128 | 0.42955 | 8.4 | 1997 | 40570 |
| eng-pan | ntrex128 | 0.46141 | 19.6 | 1997 | 54355 |
| eng-sin | ntrex128 | 0.42236 | 9.7 | 1997 | 44429 |
| eng-snd_Arab | ntrex128 | 1.932 | 0.1 | 1997 | 49866 |
| eng-urd | ntrex128 | 0.49646 | 22.1 | 1997 | 54259 |
| fra-ben | ntrex128 | 0.41716 | 8.9 | 1997 | 40095 |
| fra-fas | ntrex128 | 0.41282 | 13.8 | 1997 | 50525 |
| fra-hin | ntrex128 | 0.42475 | 17.1 | 1997 | 55219 |
| fra-snd_Arab | ntrex128 | 6.047 | 0.0 | 1997 | 49866 |
| fra-urd | ntrex128 | 0.41536 | 14.8 | 1997 | 54259 |
| por-ben | ntrex128 | 0.43855 | 9.9 | 1997 | 40095 |
| por-fas | ntrex128 | 0.42010 | 14.4 | 1997 | 50525 |
| por-hin | ntrex128 | 0.43275 | 17.6 | 1997 | 55219 |
| por-snd_Arab | ntrex128 | 6.336 | 0.1 | 1997 | 49866 |
| por-urd | ntrex128 | 0.42484 | 15.2 | 1997 | 54259 |
| spa-ben | ntrex128 | 0.44905 | 10.3 | 1997 | 40095 |
| spa-fas | ntrex128 | 0.42207 | 14.1 | 1997 | 50525 |
| spa-hin | ntrex128 | 0.43380 | 17.6 | 1997 | 55219 |
| spa-snd_Arab | ntrex128 | 5.551 | 0.0 | 1997 | 49866 |
| spa-urd | ntrex128 | 0.42434 | 15.0 | 1997 | 54259 |
| eng-ben | tico19-test | 0.51563 | 17.9 | 2100 | 51695 |
| eng-ckb | tico19-test | 0.46188 | 8.9 | 2100 | 50500 |
| eng-fas | tico19-test | 0.53182 | 25.8 | 2100 | 59779 |
| eng-hin | tico19-test | 0.63128 | 41.6 | 2100 | 62680 |
| eng-mar | tico19-test | 0.45619 | 12.9 | 2100 | 50872 |
| eng-nep | tico19-test | 0.53413 | 17.6 | 2100 | 48363 |
| eng-prs | tico19-test | 0.44101 | 17.3 | 2100 | 62972 |
| eng-pus | tico19-test | 0.47063 | 20.5 | 2100 | 66213 |
| eng-urd | tico19-test | 0.51054 | 22.0 | 2100 | 65312 |
| fra-fas | tico19-test | 0.43476 | 17.9 | 2100 | 59779 |
| fra-hin | tico19-test | 0.48625 | 25.6 | 2100 | 62680 |
| fra-nep | tico19-test | 0.41153 | 9.7 | 2100 | 48363 |
| fra-urd | tico19-test | 0.40482 | 14.4 | 2100 | 65312 |
| por-ben | tico19-test | 0.45814 | 12.5 | 2100 | 51695 |
| por-ckb | tico19-test | 0.41684 | 5.6 | 2100 | 50500 |
| por-fas | tico19-test | 0.49181 | 21.3 | 2100 | 59779 |
| por-hin | tico19-test | 0.55759 | 31.1 | 2100 | 62680 |
| por-mar | tico19-test | 0.40067 | 9.1 | 2100 | 50872 |
| por-nep | tico19-test | 0.47378 | 12.1 | 2100 | 48363 |
| por-pus | tico19-test | 0.42496 | 15.9 | 2100 | 66213 |
| por-urd | tico19-test | 0.45560 | 16.6 | 2100 | 65312 |
| spa-ben | tico19-test | 0.45751 | 12.7 | 2100 | 51695 |
| spa-ckb | tico19-test | 0.41568 | 5.4 | 2100 | 50500 |
| spa-fas | tico19-test | 0.48974 | 21.0 | 2100 | 59779 |
| spa-hin | tico19-test | 0.55641 | 30.9 | 2100 | 62680 |
| spa-mar | tico19-test | 0.40329 | 9.4 | 2100 | 50872 |
| spa-nep | tico19-test | 0.47164 | 12.1 | 2100 | 48363 |
| spa-prs | tico19-test | 0.41879 | 14.3 | 2100 | 62972 |
| spa-pus | tico19-test | 0.41714 | 15.1 | 2100 | 66213 |
| spa-urd | tico19-test | 0.44931 | 15.3 | 2100 | 65312 |
## Citation Information
* Publications: [Democratizing neural machine translation with OPUS-MT](https://doi.org/10.1007/s10579-023-09704-w) and [OPUS-MT – Building open translation services for the World](https://aclanthology.org/2020.eamt-1.61/) and [The Tatoeba Translation Challenge – Realistic Data Sets for Low Resource and Multilingual MT](https://aclanthology.org/2020.wmt-1.139/) (Please, cite if you use this model.)
```bibtex
@article{tiedemann2023democratizing,
title={Democratizing neural machine translation with {OPUS-MT}},
author={Tiedemann, J{\"o}rg and Aulamo, Mikko and Bakshandaeva, Daria and Boggia, Michele and Gr{\"o}nroos, Stig-Arne and Nieminen, Tommi and Raganato, Alessandro and Scherrer, Yves and Vazquez, Raul and Virpioja, Sami},
journal={Language Resources and Evaluation},
number={58},
pages={713--755},
year={2023},
publisher={Springer Nature},
issn={1574-0218},
doi={10.1007/s10579-023-09704-w}
}
@inproceedings{tiedemann-thottingal-2020-opus,
title = "{OPUS}-{MT} {--} Building open translation services for the World",
author = {Tiedemann, J{\"o}rg and Thottingal, Santhosh},
booktitle = "Proceedings of the 22nd Annual Conference of the European Association for Machine Translation",
month = nov,
year = "2020",
address = "Lisboa, Portugal",
publisher = "European Association for Machine Translation",
url = "https://aclanthology.org/2020.eamt-1.61",
pages = "479--480",
}
@inproceedings{tiedemann-2020-tatoeba,
title = "The Tatoeba Translation Challenge {--} Realistic Data Sets for Low Resource and Multilingual {MT}",
author = {Tiedemann, J{\"o}rg},
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.wmt-1.139",
pages = "1174--1182",
}
```
## Acknowledgements
The work is supported by the [HPLT project](https://hplt-project.org/), funded by the European Union’s Horizon Europe research and innovation programme under grant agreement No 101070350. We are also grateful for the generous computational resources and IT infrastructure provided by [CSC -- IT Center for Science](https://www.csc.fi/), Finland, and the [EuroHPC supercomputer LUMI](https://www.lumi-supercomputer.eu/).
## Model conversion info
* transformers version: 4.45.1
* OPUS-MT git hash: 0882077
* port time: Tue Oct 8 10:05:20 EEST 2024
* port machine: LM0-400-22516.local