de-francophones commited on
Commit
9ea2122
1 Parent(s): 4e08f55

94d4cb197348a4c55e0874e62bdb1e7eb35eae10d5bde946f7b01280f53113f3

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +2 -0
  2. sysoutputs/orange_deskin/test_02/pertreebank/bg_btb-ud-test.eval.log +17 -0
  3. sysoutputs/orange_deskin/test_02/pertreebank/cs_cac-ud-test-sys.conllu +0 -0
  4. sysoutputs/orange_deskin/test_02/pertreebank/cs_cac-ud-test.eval.log +17 -0
  5. sysoutputs/orange_deskin/test_02/pertreebank/cs_fictree-ud-test-sys.conllu +0 -0
  6. sysoutputs/orange_deskin/test_02/pertreebank/cs_fictree-ud-test.eval.log +17 -0
  7. sysoutputs/orange_deskin/test_02/pertreebank/cs_pdt-ud-test-sys.conllu +3 -0
  8. sysoutputs/orange_deskin/test_02/pertreebank/cs_pdt-ud-test.eval.log +17 -0
  9. sysoutputs/orange_deskin/test_02/pertreebank/cs_pud-ud-test-sys.conllu +0 -0
  10. sysoutputs/orange_deskin/test_02/pertreebank/cs_pud-ud-test.eval.log +17 -0
  11. sysoutputs/orange_deskin/test_02/pertreebank/en_ewt-ud-test-sys.conllu +0 -0
  12. sysoutputs/orange_deskin/test_02/pertreebank/en_ewt-ud-test.eval.log +17 -0
  13. sysoutputs/orange_deskin/test_02/pertreebank/en_pud-ud-test-sys.conllu +0 -0
  14. sysoutputs/orange_deskin/test_02/pertreebank/en_pud-ud-test.eval.log +17 -0
  15. sysoutputs/orange_deskin/test_02/pertreebank/et_edt-ud-test-sys.conllu +0 -0
  16. sysoutputs/orange_deskin/test_02/pertreebank/et_edt-ud-test.eval.log +17 -0
  17. sysoutputs/orange_deskin/test_02/pertreebank/et_ewt-ud-test-sys.conllu +0 -0
  18. sysoutputs/orange_deskin/test_02/pertreebank/et_ewt-ud-test.eval.log +17 -0
  19. sysoutputs/orange_deskin/test_02/pertreebank/fi_pud-ud-test-sys.conllu +0 -0
  20. sysoutputs/orange_deskin/test_02/pertreebank/fi_pud-ud-test.eval.log +17 -0
  21. sysoutputs/orange_deskin/test_02/pertreebank/fi_tdt-ud-test-sys.conllu +0 -0
  22. sysoutputs/orange_deskin/test_02/pertreebank/fi_tdt-ud-test.eval.log +17 -0
  23. sysoutputs/orange_deskin/test_02/pertreebank/fr_fqb-ud-test-sys.conllu +0 -0
  24. sysoutputs/orange_deskin/test_02/pertreebank/fr_fqb-ud-test.eval.log +17 -0
  25. sysoutputs/orange_deskin/test_02/pertreebank/fr_sequoia-ud-test-sys.conllu +0 -0
  26. sysoutputs/orange_deskin/test_02/pertreebank/fr_sequoia-ud-test.eval.log +17 -0
  27. sysoutputs/orange_deskin/test_02/pertreebank/it_isdt-ud-test-sys.conllu +0 -0
  28. sysoutputs/orange_deskin/test_02/pertreebank/it_isdt-ud-test.eval.log +17 -0
  29. sysoutputs/orange_deskin/test_02/pertreebank/lt_alksnis-ud-test-sys.conllu +0 -0
  30. sysoutputs/orange_deskin/test_02/pertreebank/lt_alksnis-ud-test.eval.log +17 -0
  31. sysoutputs/orange_deskin/test_02/pertreebank/lv_lvtb-ud-test-sys.conllu +0 -0
  32. sysoutputs/orange_deskin/test_02/pertreebank/lv_lvtb-ud-test.eval.log +17 -0
  33. sysoutputs/orange_deskin/test_02/pertreebank/nl_alpino-ud-test-sys.conllu +0 -0
  34. sysoutputs/orange_deskin/test_02/pertreebank/nl_alpino-ud-test.eval.log +17 -0
  35. sysoutputs/orange_deskin/test_02/pertreebank/nl_lassysmall-ud-test-sys.conllu +0 -0
  36. sysoutputs/orange_deskin/test_02/pertreebank/nl_lassysmall-ud-test.eval.log +17 -0
  37. sysoutputs/orange_deskin/test_02/pertreebank/pl_lfg-ud-test-sys.conllu +0 -0
  38. sysoutputs/orange_deskin/test_02/pertreebank/pl_lfg-ud-test.eval.log +17 -0
  39. sysoutputs/orange_deskin/test_02/pertreebank/pl_pdb-ud-test-sys.conllu +0 -0
  40. sysoutputs/orange_deskin/test_02/pertreebank/pl_pdb-ud-test.eval.log +17 -0
  41. sysoutputs/orange_deskin/test_02/pertreebank/pl_pud-ud-test-sys.conllu +0 -0
  42. sysoutputs/orange_deskin/test_02/pertreebank/pl_pud-ud-test.eval.log +17 -0
  43. sysoutputs/orange_deskin/test_02/pertreebank/ru_syntagrus-ud-test-sys.conllu +3 -0
  44. sysoutputs/orange_deskin/test_02/pertreebank/ru_syntagrus-ud-test.eval.log +17 -0
  45. sysoutputs/orange_deskin/test_02/pertreebank/sk_snk-ud-test-sys.conllu +0 -0
  46. sysoutputs/orange_deskin/test_02/pertreebank/sk_snk-ud-test.eval.log +17 -0
  47. sysoutputs/orange_deskin/test_02/pertreebank/sv_pud-ud-test-sys.conllu +0 -0
  48. sysoutputs/orange_deskin/test_02/pertreebank/sv_pud-ud-test.eval.log +17 -0
  49. sysoutputs/orange_deskin/test_02/pertreebank/sv_talbanken-ud-test-sys.conllu +0 -0
  50. sysoutputs/orange_deskin/test_02/pertreebank/sv_talbanken-ud-test.eval.log +17 -0
.gitattributes CHANGED
@@ -93,3 +93,5 @@ sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/cs_pdt-ud-test-sys.c
93
  sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/ru_syntagrus-ud-test-sys.conllu filter=lfs diff=lfs merge=lfs -text
94
  sysoutputs/koebsala/udpipe_test_20200425_134739/ru.conllu filter=lfs diff=lfs merge=lfs -text
95
  sysoutputs/orange_deskin/test_02/cs.conllu filter=lfs diff=lfs merge=lfs -text
 
 
 
93
  sysoutputs/koebsala/udpipe_test_20200425_134739/pertreebank/ru_syntagrus-ud-test-sys.conllu filter=lfs diff=lfs merge=lfs -text
94
  sysoutputs/koebsala/udpipe_test_20200425_134739/ru.conllu filter=lfs diff=lfs merge=lfs -text
95
  sysoutputs/orange_deskin/test_02/cs.conllu filter=lfs diff=lfs merge=lfs -text
96
+ sysoutputs/orange_deskin/test_02/pertreebank/cs_pdt-ud-test-sys.conllu filter=lfs diff=lfs merge=lfs -text
97
+ sysoutputs/orange_deskin/test_02/pertreebank/ru_syntagrus-ud-test-sys.conllu filter=lfs diff=lfs merge=lfs -text
sysoutputs/orange_deskin/test_02/pertreebank/bg_btb-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.88 | 99.90 | 99.89 |
4
+ Sentences | 94.44 | 92.83 | 93.63 |
5
+ Words | 99.88 | 99.90 | 99.89 |
6
+ UPOS | 99.16 | 99.18 | 99.17 | 99.28
7
+ XPOS | 97.39 | 97.41 | 97.40 | 97.50
8
+ UFeats | 98.05 | 98.07 | 98.06 | 98.17
9
+ AllTags | 96.90 | 96.92 | 96.91 | 97.01
10
+ Lemmas | 98.29 | 98.31 | 98.30 | 98.41
11
+ UAS | 94.45 | 94.47 | 94.46 | 94.56
12
+ LAS | 91.82 | 91.83 | 91.83 | 91.93
13
+ ELAS | 90.28 | 88.57 | 89.42 | 91.20
14
+ EULAS | 91.27 | 89.54 | 90.40 | 92.20
15
+ CLAS | 89.19 | 88.99 | 89.09 | 89.08
16
+ MLAS | 86.20 | 86.01 | 86.11 | 86.09
17
+ BLEX | 86.97 | 86.77 | 86.87 | 86.86
sysoutputs/orange_deskin/test_02/pertreebank/cs_cac-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/cs_cac-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.98 | 99.96 | 99.97 |
4
+ Sentences | 99.52 | 99.52 | 99.52 |
5
+ Words | 99.98 | 99.96 | 99.97 |
6
+ UPOS | 99.39 | 99.37 | 99.38 | 99.41
7
+ XPOS | 97.07 | 97.05 | 97.06 | 97.09
8
+ UFeats | 97.10 | 97.08 | 97.09 | 97.12
9
+ AllTags | 96.36 | 96.35 | 96.35 | 96.38
10
+ Lemmas | 98.77 | 98.75 | 98.76 | 98.78
11
+ UAS | 95.41 | 95.40 | 95.41 | 95.43
12
+ LAS | 93.76 | 93.74 | 93.75 | 93.77
13
+ ELAS | 90.43 | 83.95 | 87.07 | 93.98
14
+ EULAS | 91.85 | 85.27 | 88.43 | 95.45
15
+ CLAS | 92.57 | 92.67 | 92.62 | 92.72
16
+ MLAS | 88.51 | 88.61 | 88.56 | 88.66
17
+ BLEX | 91.16 | 91.26 | 91.21 | 91.31
sysoutputs/orange_deskin/test_02/pertreebank/cs_fictree-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/cs_fictree-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.97 | 99.99 | 99.98 |
4
+ Sentences | 94.48 | 88.85 | 91.58 |
5
+ Words | 99.97 | 99.99 | 99.98 |
6
+ UPOS | 98.59 | 98.62 | 98.61 | 98.62
7
+ XPOS | 93.34 | 93.37 | 93.36 | 93.37
8
+ UFeats | 94.59 | 94.61 | 94.60 | 94.62
9
+ AllTags | 92.29 | 92.31 | 92.30 | 92.31
10
+ Lemmas | 99.07 | 99.09 | 99.08 | 99.10
11
+ UAS | 94.78 | 94.80 | 94.79 | 94.80
12
+ LAS | 93.08 | 93.10 | 93.09 | 93.10
13
+ ELAS | 90.10 | 84.43 | 87.17 | 91.91
14
+ EULAS | 91.55 | 85.79 | 88.58 | 93.39
15
+ CLAS | 91.84 | 91.85 | 91.84 | 91.86
16
+ MLAS | 82.54 | 82.55 | 82.54 | 82.56
17
+ BLEX | 90.63 | 90.64 | 90.64 | 90.65
sysoutputs/orange_deskin/test_02/pertreebank/cs_pdt-ud-test-sys.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da40fdd96d248d48678e1807c7d18333c1152fe27d73f6e5b41f6cf186a913a1
3
+ size 18458028
sysoutputs/orange_deskin/test_02/pertreebank/cs_pdt-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.93 | 99.84 | 99.89 |
4
+ Sentences | 93.49 | 90.69 | 92.07 |
5
+ Words | 99.93 | 99.84 | 99.89 |
6
+ UPOS | 99.23 | 99.14 | 99.19 | 99.30
7
+ XPOS | 97.60 | 97.51 | 97.56 | 97.67
8
+ UFeats | 97.63 | 97.55 | 97.59 | 97.70
9
+ AllTags | 97.04 | 96.95 | 96.99 | 97.10
10
+ Lemmas | 99.07 | 98.98 | 99.02 | 99.14
11
+ UAS | 94.13 | 94.04 | 94.09 | 94.19
12
+ LAS | 92.62 | 92.53 | 92.57 | 92.68
13
+ ELAS | 89.55 | 85.38 | 87.41 | 91.97
14
+ EULAS | 90.95 | 86.72 | 88.78 | 93.42
15
+ CLAS | 91.64 | 91.65 | 91.64 | 91.81
16
+ MLAS | 88.37 | 88.38 | 88.38 | 88.53
17
+ BLEX | 90.67 | 90.67 | 90.67 | 90.83
sysoutputs/orange_deskin/test_02/pertreebank/cs_pud-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/cs_pud-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.25 | 99.31 | 99.28 |
4
+ Sentences | 92.98 | 95.40 | 94.18 |
5
+ Words | 99.25 | 99.31 | 99.28 |
6
+ UPOS | 97.16 | 97.22 | 97.19 | 97.90
7
+ XPOS | 94.36 | 94.41 | 94.39 | 95.07
8
+ UFeats | 94.03 | 94.08 | 94.06 | 94.74
9
+ AllTags | 92.64 | 92.69 | 92.66 | 93.33
10
+ Lemmas | 96.82 | 96.87 | 96.85 | 97.55
11
+ UAS | 90.86 | 90.91 | 90.88 | 91.54
12
+ LAS | 87.58 | 87.62 | 87.60 | 88.24
13
+ ELAS | 83.74 | 81.01 | 82.35 | 86.49
14
+ EULAS | 85.82 | 83.02 | 84.40 | 88.64
15
+ CLAS | 85.11 | 85.70 | 85.40 | 86.29
16
+ MLAS | 78.35 | 78.90 | 78.62 | 79.44
17
+ BLEX | 83.64 | 84.22 | 83.93 | 84.80
sysoutputs/orange_deskin/test_02/pertreebank/en_ewt-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/en_ewt-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 98.81 | 98.79 | 98.80 |
4
+ Sentences | 79.44 | 69.19 | 73.96 |
5
+ Words | 98.81 | 98.79 | 98.80 |
6
+ UPOS | 96.03 | 96.01 | 96.02 | 97.19
7
+ XPOS | 95.66 | 95.64 | 95.65 | 96.81
8
+ UFeats | 96.39 | 96.37 | 96.38 | 97.55
9
+ AllTags | 94.54 | 94.52 | 94.53 | 95.68
10
+ Lemmas | 97.38 | 97.36 | 97.37 | 98.55
11
+ UAS | 87.44 | 87.42 | 87.43 | 88.49
12
+ LAS | 85.25 | 85.22 | 85.23 | 86.27
13
+ ELAS | 84.14 | 83.31 | 83.72 | 87.81
14
+ EULAS | 84.83 | 84.00 | 84.41 | 88.53
15
+ CLAS | 82.38 | 82.19 | 82.28 | 83.32
16
+ MLAS | 78.20 | 78.02 | 78.11 | 79.10
17
+ BLEX | 80.97 | 80.79 | 80.88 | 81.90
sysoutputs/orange_deskin/test_02/pertreebank/en_pud-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/en_pud-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.74 | 99.68 | 99.71 |
4
+ Sentences | 93.34 | 96.70 | 94.99 |
5
+ Words | 99.74 | 99.68 | 99.71 |
6
+ UPOS | 96.46 | 96.39 | 96.42 | 96.70
7
+ XPOS | 95.29 | 95.23 | 95.26 | 95.54
8
+ UFeats | 95.98 | 95.92 | 95.95 | 96.23
9
+ AllTags | 92.77 | 92.70 | 92.73 | 93.00
10
+ Lemmas | 96.33 | 96.27 | 96.30 | 96.58
11
+ UAS | 91.34 | 91.28 | 91.31 | 91.58
12
+ LAS | 88.98 | 88.92 | 88.95 | 89.21
13
+ ELAS | 87.56 | 86.37 | 86.96 | 90.75
14
+ EULAS | 88.52 | 87.32 | 87.92 | 91.75
15
+ CLAS | 87.24 | 86.77 | 87.00 | 87.08
16
+ MLAS | 79.83 | 79.40 | 79.62 | 79.69
17
+ BLEX | 83.46 | 83.01 | 83.23 | 83.31
sysoutputs/orange_deskin/test_02/pertreebank/et_edt-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/et_edt-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.93 | 99.95 | 99.94 |
4
+ Sentences | 91.57 | 86.84 | 89.14 |
5
+ Words | 99.93 | 99.95 | 99.94 |
6
+ UPOS | 97.87 | 97.89 | 97.88 | 97.94
7
+ XPOS | 98.50 | 98.53 | 98.51 | 98.57
8
+ UFeats | 96.96 | 96.98 | 96.97 | 97.03
9
+ AllTags | 95.84 | 95.86 | 95.85 | 95.91
10
+ Lemmas | 95.54 | 95.56 | 95.55 | 95.61
11
+ UAS | 89.61 | 89.63 | 89.62 | 89.67
12
+ LAS | 87.23 | 87.25 | 87.24 | 87.29
13
+ ELAS | 82.95 | 82.45 | 82.70 | 83.01
14
+ EULAS | 86.49 | 85.96 | 86.22 | 86.55
15
+ CLAS | 86.02 | 85.99 | 86.01 | 86.04
16
+ MLAS | 81.79 | 81.77 | 81.78 | 81.81
17
+ BLEX | 80.97 | 80.94 | 80.95 | 80.98
sysoutputs/orange_deskin/test_02/pertreebank/et_ewt-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/et_ewt-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 97.99 | 99.12 | 98.55 |
4
+ Sentences | 85.60 | 77.29 | 81.23 |
5
+ Words | 97.99 | 99.12 | 98.55 |
6
+ UPOS | 94.87 | 95.97 | 95.42 | 96.82
7
+ XPOS | 95.58 | 96.68 | 96.13 | 97.54
8
+ UFeats | 93.29 | 94.36 | 93.82 | 95.20
9
+ AllTags | 91.32 | 92.37 | 91.84 | 93.19
10
+ Lemmas | 92.86 | 93.92 | 93.39 | 94.76
11
+ UAS | 83.78 | 84.74 | 84.26 | 85.50
12
+ LAS | 80.76 | 81.69 | 81.22 | 82.42
13
+ ELAS | 75.29 | 71.42 | 73.31 | 76.84
14
+ EULAS | 79.70 | 75.61 | 77.60 | 81.34
15
+ CLAS | 79.60 | 80.07 | 79.84 | 81.02
16
+ MLAS | 72.93 | 73.36 | 73.15 | 74.23
17
+ BLEX | 73.87 | 74.30 | 74.09 | 75.18
sysoutputs/orange_deskin/test_02/pertreebank/fi_pud-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/fi_pud-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.75 | 99.56 | 99.65 |
4
+ Sentences | 88.86 | 94.10 | 91.40 |
5
+ Words | 99.73 | 99.55 | 99.64 |
6
+ UPOS | 98.30 | 98.12 | 98.21 | 98.56
7
+ XPOS | 0.00 | 0.00 | 0.00 | 0.00
8
+ UFeats | 97.31 | 97.14 | 97.22 | 97.57
9
+ AllTags | 0.00 | 0.00 | 0.00 | 0.00
10
+ Lemmas | 92.34 | 92.17 | 92.26 | 92.59
11
+ UAS | 92.93 | 92.76 | 92.84 | 93.18
12
+ LAS | 91.27 | 91.10 | 91.19 | 91.51
13
+ ELAS | 87.96 | 88.83 | 88.39 | 90.38
14
+ EULAS | 89.43 | 90.31 | 89.87 | 91.89
15
+ CLAS | 89.92 | 89.71 | 89.82 | 90.19
16
+ MLAS | 86.50 | 86.30 | 86.40 | 86.75
17
+ BLEX | 81.01 | 80.82 | 80.92 | 81.25
sysoutputs/orange_deskin/test_02/pertreebank/fi_tdt-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/fi_tdt-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.79 | 99.64 | 99.71 |
4
+ Sentences | 87.93 | 85.27 | 86.58 |
5
+ Words | 99.78 | 99.63 | 99.70 |
6
+ UPOS | 98.11 | 97.96 | 98.04 | 98.33
7
+ XPOS | 98.63 | 98.48 | 98.55 | 98.85
8
+ UFeats | 96.57 | 96.43 | 96.50 | 96.79
9
+ AllTags | 95.83 | 95.68 | 95.75 | 96.04
10
+ Lemmas | 92.09 | 91.95 | 92.02 | 92.29
11
+ UAS | 91.38 | 91.24 | 91.31 | 91.59
12
+ LAS | 89.71 | 89.58 | 89.65 | 89.91
13
+ ELAS | 87.53 | 82.32 | 84.85 | 90.39
14
+ EULAS | 89.19 | 83.88 | 86.46 | 92.10
15
+ CLAS | 88.62 | 88.37 | 88.49 | 88.65
16
+ MLAS | 84.29 | 84.05 | 84.17 | 84.32
17
+ BLEX | 79.44 | 79.21 | 79.32 | 79.47
sysoutputs/orange_deskin/test_02/pertreebank/fr_fqb-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/fr_fqb-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.49 | 99.44 | 99.47 |
4
+ Sentences | 93.07 | 96.24 | 94.63 |
5
+ Words | 99.10 | 99.25 | 99.17 |
6
+ UPOS | 95.11 | 95.26 | 95.18 | 95.98
7
+ XPOS | 99.10 | 99.25 | 99.17 | 100.00
8
+ UFeats | 89.39 | 89.53 | 89.46 | 90.20
9
+ AllTags | 87.25 | 87.38 | 87.31 | 88.04
10
+ Lemmas | 95.92 | 96.07 | 95.99 | 96.79
11
+ UAS | 90.35 | 90.49 | 90.42 | 91.17
12
+ LAS | 84.66 | 84.79 | 84.72 | 85.43
13
+ ELAS | 83.15 | 82.56 | 82.86 | 84.25
14
+ EULAS | 84.56 | 83.96 | 84.26 | 85.68
15
+ CLAS | 79.38 | 77.03 | 78.19 | 77.67
16
+ MLAS | 65.64 | 63.71 | 64.66 | 64.24
17
+ BLEX | 75.13 | 72.91 | 74.00 | 73.52
sysoutputs/orange_deskin/test_02/pertreebank/fr_sequoia-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/fr_sequoia-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.70 | 99.76 | 99.73 |
4
+ Sentences | 88.81 | 83.55 | 86.10 |
5
+ Words | 98.77 | 99.28 | 99.03 |
6
+ UPOS | 97.83 | 98.34 | 98.08 | 99.05
7
+ XPOS | 98.77 | 99.28 | 99.03 | 100.00
8
+ UFeats | 97.15 | 97.65 | 97.40 | 98.36
9
+ AllTags | 96.65 | 97.15 | 96.90 | 97.85
10
+ Lemmas | 97.57 | 98.08 | 97.83 | 98.79
11
+ UAS | 92.08 | 92.56 | 92.32 | 93.22
12
+ LAS | 90.43 | 90.89 | 90.66 | 91.55
13
+ ELAS | 88.97 | 87.27 | 88.11 | 91.35
14
+ EULAS | 90.05 | 88.34 | 89.19 | 92.46
15
+ CLAS | 88.33 | 87.96 | 88.15 | 88.32
16
+ MLAS | 85.39 | 85.03 | 85.21 | 85.37
17
+ BLEX | 87.00 | 86.64 | 86.82 | 86.99
sysoutputs/orange_deskin/test_02/pertreebank/it_isdt-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/it_isdt-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.90 | 99.91 | 99.90 |
4
+ Sentences | 95.70 | 96.89 | 96.29 |
5
+ Words | 99.81 | 99.84 | 99.82 |
6
+ UPOS | 98.45 | 98.47 | 98.46 | 98.63
7
+ XPOS | 98.33 | 98.36 | 98.34 | 98.52
8
+ UFeats | 98.14 | 98.17 | 98.15 | 98.33
9
+ AllTags | 97.56 | 97.59 | 97.58 | 97.75
10
+ Lemmas | 98.48 | 98.51 | 98.50 | 98.67
11
+ UAS | 94.72 | 94.75 | 94.74 | 94.90
12
+ LAS | 93.08 | 93.11 | 93.09 | 93.26
13
+ ELAS | 91.20 | 90.46 | 90.83 | 94.34
14
+ EULAS | 92.37 | 91.62 | 91.99 | 95.55
15
+ CLAS | 89.76 | 89.46 | 89.61 | 89.67
16
+ MLAS | 86.98 | 86.69 | 86.84 | 86.90
17
+ BLEX | 87.92 | 87.63 | 87.77 | 87.83
sysoutputs/orange_deskin/test_02/pertreebank/lt_alksnis-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/lt_alksnis-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.94 | 99.94 | 99.94 |
4
+ Sentences | 89.33 | 85.67 | 87.46 |
5
+ Words | 99.94 | 99.94 | 99.94 |
6
+ UPOS | 96.57 | 96.57 | 96.57 | 96.63
7
+ XPOS | 91.05 | 91.05 | 91.05 | 91.11
8
+ UFeats | 91.67 | 91.67 | 91.67 | 91.73
9
+ AllTags | 90.13 | 90.13 | 90.13 | 90.18
10
+ Lemmas | 94.29 | 94.29 | 94.29 | 94.35
11
+ UAS | 84.91 | 84.91 | 84.91 | 84.96
12
+ LAS | 81.54 | 81.54 | 81.54 | 81.59
13
+ ELAS | 78.85 | 73.13 | 75.89 | 80.88
14
+ EULAS | 80.64 | 74.79 | 77.61 | 82.72
15
+ CLAS | 79.97 | 79.71 | 79.84 | 79.77
16
+ MLAS | 70.06 | 69.83 | 69.95 | 69.88
17
+ BLEX | 74.37 | 74.13 | 74.25 | 74.18
sysoutputs/orange_deskin/test_02/pertreebank/lv_lvtb-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/lv_lvtb-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.40 | 99.28 | 99.34 |
4
+ Sentences | 98.04 | 98.68 | 98.36 |
5
+ Words | 99.40 | 99.28 | 99.34 |
6
+ UPOS | 96.75 | 96.63 | 96.69 | 97.33
7
+ XPOS | 90.30 | 90.19 | 90.25 | 90.85
8
+ UFeats | 94.58 | 94.47 | 94.52 | 95.15
9
+ AllTags | 89.69 | 89.58 | 89.64 | 90.23
10
+ Lemmas | 96.34 | 96.22 | 96.28 | 96.92
11
+ UAS | 90.74 | 90.63 | 90.69 | 91.30
12
+ LAS | 88.01 | 87.91 | 87.96 | 88.55
13
+ ELAS | 83.96 | 80.34 | 82.11 | 87.32
14
+ EULAS | 86.42 | 82.69 | 84.51 | 89.87
15
+ CLAS | 85.91 | 85.79 | 85.85 | 86.68
16
+ MLAS | 78.50 | 78.39 | 78.45 | 79.21
17
+ BLEX | 82.66 | 82.54 | 82.60 | 83.40
sysoutputs/orange_deskin/test_02/pertreebank/nl_alpino-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/nl_alpino-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.33 | 99.66 | 99.49 |
4
+ Sentences | 89.42 | 87.92 | 88.66 |
5
+ Words | 99.33 | 99.66 | 99.49 |
6
+ UPOS | 96.93 | 97.25 | 97.09 | 97.58
7
+ XPOS | 95.74 | 96.05 | 95.90 | 96.38
8
+ UFeats | 96.96 | 97.28 | 97.12 | 97.61
9
+ AllTags | 95.17 | 95.48 | 95.33 | 95.81
10
+ Lemmas | 96.92 | 97.24 | 97.08 | 97.57
11
+ UAS | 93.05 | 93.36 | 93.20 | 93.68
12
+ LAS | 90.68 | 90.97 | 90.83 | 91.29
13
+ ELAS | 88.80 | 87.27 | 88.03 | 91.57
14
+ EULAS | 90.15 | 88.60 | 89.37 | 92.96
15
+ CLAS | 87.17 | 87.14 | 87.15 | 87.21
16
+ MLAS | 82.80 | 82.78 | 82.79 | 82.84
17
+ BLEX | 84.14 | 84.11 | 84.13 | 84.18
sysoutputs/orange_deskin/test_02/pertreebank/nl_lassysmall-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/nl_lassysmall-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.92 | 99.91 | 99.92 |
4
+ Sentences | 79.46 | 60.11 | 68.45 |
5
+ Words | 99.92 | 99.91 | 99.92 |
6
+ UPOS | 96.43 | 96.42 | 96.42 | 96.50
7
+ XPOS | 95.16 | 95.15 | 95.16 | 95.24
8
+ UFeats | 96.25 | 96.24 | 96.25 | 96.33
9
+ AllTags | 94.44 | 94.43 | 94.43 | 94.51
10
+ Lemmas | 97.29 | 97.28 | 97.29 | 97.37
11
+ UAS | 88.05 | 88.04 | 88.04 | 88.12
12
+ LAS | 85.40 | 85.39 | 85.40 | 85.47
13
+ ELAS | 82.86 | 81.90 | 82.38 | 85.76
14
+ EULAS | 84.41 | 83.43 | 83.92 | 87.36
15
+ CLAS | 79.94 | 79.35 | 79.64 | 79.41
16
+ MLAS | 75.25 | 74.70 | 74.98 | 74.76
17
+ BLEX | 77.43 | 76.86 | 77.14 | 76.92
sysoutputs/orange_deskin/test_02/pertreebank/pl_lfg-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/pl_lfg-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.06 | 98.21 | 98.63 |
4
+ Sentences | 99.48 | 98.96 | 99.22 |
5
+ Words | 99.86 | 99.91 | 99.89 |
6
+ UPOS | 98.73 | 98.77 | 98.75 | 98.86
7
+ XPOS | 94.98 | 95.02 | 95.00 | 95.11
8
+ UFeats | 92.74 | 92.78 | 92.76 | 92.86
9
+ AllTags | 89.88 | 89.92 | 89.90 | 90.00
10
+ Lemmas | 98.52 | 98.57 | 98.54 | 98.66
11
+ UAS | 95.65 | 95.69 | 95.67 | 95.78
12
+ LAS | 93.81 | 93.85 | 93.83 | 93.94
13
+ ELAS | 83.48 | 82.59 | 83.03 | 84.98
14
+ EULAS | 93.46 | 92.46 | 92.96 | 95.14
15
+ CLAS | 92.71 | 92.73 | 92.72 | 92.83
16
+ MLAS | 83.01 | 83.02 | 83.02 | 83.11
17
+ BLEX | 91.17 | 91.18 | 91.18 | 91.28
sysoutputs/orange_deskin/test_02/pertreebank/pl_pdb-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/pl_pdb-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.30 | 99.60 | 99.45 |
4
+ Sentences | 95.55 | 95.03 | 95.29 |
5
+ Words | 99.79 | 99.84 | 99.82 |
6
+ UPOS | 98.82 | 98.87 | 98.85 | 99.03
7
+ XPOS | 94.53 | 94.58 | 94.55 | 94.73
8
+ UFeats | 92.39 | 92.43 | 92.41 | 92.58
9
+ AllTags | 89.92 | 89.97 | 89.94 | 90.11
10
+ Lemmas | 98.15 | 98.19 | 98.17 | 98.35
11
+ UAS | 93.93 | 93.98 | 93.95 | 94.12
12
+ LAS | 91.78 | 91.83 | 91.81 | 91.97
13
+ ELAS | 82.26 | 78.26 | 80.21 | 84.55
14
+ EULAS | 90.56 | 86.15 | 88.30 | 93.07
15
+ CLAS | 90.30 | 90.27 | 90.28 | 90.42
16
+ MLAS | 80.21 | 80.18 | 80.19 | 80.32
17
+ BLEX | 88.34 | 88.31 | 88.33 | 88.47
sysoutputs/orange_deskin/test_02/pertreebank/pl_pud-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/pl_pud-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.47 | 99.73 | 99.60 |
4
+ Sentences | 94.97 | 96.30 | 95.63 |
5
+ Words | 99.54 | 99.77 | 99.65 |
6
+ UPOS | 97.87 | 98.09 | 97.98 | 98.32
7
+ XPOS | 93.43 | 93.65 | 93.54 | 93.87
8
+ UFeats | 93.01 | 93.22 | 93.12 | 93.44
9
+ AllTags | 90.62 | 90.83 | 90.73 | 91.04
10
+ Lemmas | 97.22 | 97.44 | 97.33 | 97.67
11
+ UAS | 93.70 | 93.91 | 93.81 | 94.13
12
+ LAS | 91.33 | 91.54 | 91.43 | 91.75
13
+ ELAS | 80.72 | 77.13 | 78.88 | 83.08
14
+ EULAS | 90.13 | 86.11 | 88.08 | 92.76
15
+ CLAS | 89.59 | 89.67 | 89.63 | 89.95
16
+ MLAS | 80.92 | 81.00 | 80.96 | 81.25
17
+ BLEX | 86.89 | 86.97 | 86.93 | 87.24
sysoutputs/orange_deskin/test_02/pertreebank/ru_syntagrus-ud-test-sys.conllu ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02dc2bbabd187989b6cfe22c675fb105c185c5d4e692035f38d4690d28ab888f
3
+ size 11100718
sysoutputs/orange_deskin/test_02/pertreebank/ru_syntagrus-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.37 | 99.58 | 99.48 |
4
+ Sentences | 97.85 | 98.01 | 97.93 |
5
+ Words | 99.37 | 99.58 | 99.48 |
6
+ UPOS | 98.69 | 98.90 | 98.80 | 99.31
7
+ XPOS | 99.37 | 99.58 | 99.48 | 100.00
8
+ UFeats | 97.65 | 97.86 | 97.76 | 98.27
9
+ AllTags | 97.47 | 97.68 | 97.58 | 98.09
10
+ Lemmas | 98.15 | 98.36 | 98.26 | 98.77
11
+ UAS | 94.31 | 94.51 | 94.41 | 94.90
12
+ LAS | 93.21 | 93.41 | 93.31 | 93.80
13
+ ELAS | 91.16 | 88.56 | 89.84 | 94.11
14
+ EULAS | 92.30 | 89.68 | 90.97 | 95.29
15
+ CLAS | 92.16 | 92.31 | 92.24 | 92.81
16
+ MLAS | 89.85 | 90.00 | 89.93 | 90.49
17
+ BLEX | 90.67 | 90.82 | 90.75 | 91.31
sysoutputs/orange_deskin/test_02/pertreebank/sk_snk-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/sk_snk-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.99 | 99.98 | 99.99 |
4
+ Sentences | 84.62 | 83.51 | 84.06 |
5
+ Words | 99.99 | 99.98 | 99.99 |
6
+ UPOS | 97.17 | 97.16 | 97.16 | 97.17
7
+ XPOS | 88.14 | 88.13 | 88.14 | 88.15
8
+ UFeats | 92.89 | 92.88 | 92.89 | 92.90
9
+ AllTags | 87.02 | 87.01 | 87.02 | 87.03
10
+ Lemmas | 96.63 | 96.62 | 96.63 | 96.64
11
+ UAS | 91.20 | 91.20 | 91.20 | 91.21
12
+ LAS | 89.06 | 89.05 | 89.06 | 89.07
13
+ ELAS | 85.89 | 82.87 | 84.36 | 88.35
14
+ EULAS | 87.75 | 84.66 | 86.17 | 90.26
15
+ CLAS | 87.35 | 87.38 | 87.37 | 87.39
16
+ MLAS | 78.01 | 78.04 | 78.03 | 78.05
17
+ BLEX | 83.72 | 83.75 | 83.73 | 83.76
sysoutputs/orange_deskin/test_02/pertreebank/sv_pud-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/sv_pud-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.52 | 99.61 | 99.57 |
4
+ Sentences | 73.36 | 84.00 | 78.32 |
5
+ Words | 99.52 | 99.61 | 99.57 |
6
+ UPOS | 96.48 | 96.57 | 96.53 | 96.95
7
+ XPOS | 93.80 | 93.89 | 93.85 | 94.26
8
+ UFeats | 80.05 | 80.12 | 80.08 | 80.43
9
+ AllTags | 78.24 | 78.31 | 78.28 | 78.62
10
+ Lemmas | 88.61 | 88.69 | 88.65 | 89.04
11
+ UAS | 85.28 | 85.36 | 85.32 | 85.70
12
+ LAS | 82.08 | 82.16 | 82.12 | 82.48
13
+ ELAS | 80.13 | 79.05 | 79.58 | 83.70
14
+ EULAS | 81.24 | 80.15 | 80.69 | 84.87
15
+ CLAS | 80.38 | 80.65 | 80.51 | 81.02
16
+ MLAS | 54.76 | 54.95 | 54.86 | 55.20
17
+ BLEX | 69.50 | 69.74 | 69.62 | 70.06
sysoutputs/orange_deskin/test_02/pertreebank/sv_talbanken-ud-test-sys.conllu ADDED
The diff for this file is too large to render. See raw diff
 
sysoutputs/orange_deskin/test_02/pertreebank/sv_talbanken-ud-test.eval.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metric | Precision | Recall | F1 Score | AligndAcc
2
+ -----------+-----------+-----------+-----------+-----------
3
+ Tokens | 99.85 | 99.89 | 99.87 |
4
+ Sentences | 96.25 | 96.80 | 96.52 |
5
+ Words | 99.85 | 99.89 | 99.87 |
6
+ UPOS | 98.41 | 98.45 | 98.43 | 98.56
7
+ XPOS | 97.31 | 97.35 | 97.33 | 97.45
8
+ UFeats | 97.36 | 97.39 | 97.38 | 97.50
9
+ AllTags | 96.49 | 96.53 | 96.51 | 96.63
10
+ Lemmas | 97.81 | 97.85 | 97.83 | 97.96
11
+ UAS | 91.74 | 91.78 | 91.76 | 91.88
12
+ LAS | 89.45 | 89.48 | 89.47 | 89.58
13
+ ELAS | 87.59 | 85.82 | 86.70 | 91.36
14
+ EULAS | 88.69 | 86.90 | 87.78 | 92.50
15
+ CLAS | 87.98 | 87.63 | 87.80 | 87.75
16
+ MLAS | 83.68 | 83.34 | 83.51 | 83.46
17
+ BLEX | 85.46 | 85.11 | 85.28 | 85.23