de-francophones
commited on
Commit
•
087a448
1
Parent(s):
db48e2f
8cfd52cf5e5096a383f2e8ad457556d35fcc06d9254565fad01b7f6c551f31fd
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- README.txt +299 -0
- dev-gold/uk.conllu +0 -0
- dev-gold/uk.nen.conllu +0 -0
- sysoutputs/adapt/test/ar.conllu +0 -0
- sysoutputs/adapt/test/ar.eval.log +17 -0
- sysoutputs/adapt/test/bg.conllu +0 -0
- sysoutputs/adapt/test/bg.eval.log +17 -0
- sysoutputs/adapt/test/cs.conllu +3 -0
- sysoutputs/adapt/test/cs.eval.log +17 -0
- sysoutputs/adapt/test/en.conllu +0 -0
- sysoutputs/adapt/test/en.eval.log +17 -0
- sysoutputs/adapt/test/et.conllu +0 -0
- sysoutputs/adapt/test/et.eval.log +17 -0
- sysoutputs/adapt/test/fi.conllu +0 -0
- sysoutputs/adapt/test/fi.eval.log +17 -0
- sysoutputs/adapt/test/fr.conllu +0 -0
- sysoutputs/adapt/test/fr.eval.log +17 -0
- sysoutputs/adapt/test/it.conllu +0 -0
- sysoutputs/adapt/test/it.eval.log +17 -0
- sysoutputs/adapt/test/lt.conllu +0 -0
- sysoutputs/adapt/test/lt.eval.log +17 -0
- sysoutputs/adapt/test/lv.conllu +0 -0
- sysoutputs/adapt/test/lv.eval.log +17 -0
- sysoutputs/adapt/test/nl.conllu +0 -0
- sysoutputs/adapt/test/nl.eval.log +17 -0
- sysoutputs/adapt/test/pertreebank/ar_padt-ud-test-sys.conllu +0 -0
- sysoutputs/adapt/test/pertreebank/ar_padt-ud-test.eval.log +17 -0
- sysoutputs/adapt/test/pertreebank/bg_btb-ud-test-sys.conllu +0 -0
- sysoutputs/adapt/test/pertreebank/bg_btb-ud-test.eval.log +17 -0
- sysoutputs/adapt/test/pertreebank/cs_cac-ud-test-sys.conllu +0 -0
- sysoutputs/adapt/test/pertreebank/cs_cac-ud-test.eval.log +17 -0
- sysoutputs/adapt/test/pertreebank/cs_fictree-ud-test-sys.conllu +0 -0
- sysoutputs/adapt/test/pertreebank/cs_fictree-ud-test.eval.log +17 -0
- sysoutputs/adapt/test/pertreebank/cs_pdt-ud-test-sys.conllu +3 -0
- sysoutputs/adapt/test/pertreebank/cs_pdt-ud-test.eval.log +17 -0
- sysoutputs/adapt/test/pertreebank/cs_pud-ud-test-sys.conllu +0 -0
- sysoutputs/adapt/test/pertreebank/cs_pud-ud-test.eval.log +17 -0
- sysoutputs/adapt/test/pertreebank/en_ewt-ud-test-sys.conllu +0 -0
- sysoutputs/adapt/test/pertreebank/en_ewt-ud-test.eval.log +17 -0
- sysoutputs/adapt/test/pertreebank/en_pud-ud-test-sys.conllu +0 -0
- sysoutputs/adapt/test/pertreebank/en_pud-ud-test.eval.log +17 -0
- sysoutputs/adapt/test/pertreebank/et_edt-ud-test-sys.conllu +0 -0
- sysoutputs/adapt/test/pertreebank/et_edt-ud-test.eval.log +17 -0
- sysoutputs/adapt/test/pertreebank/et_ewt-ud-test-sys.conllu +0 -0
- sysoutputs/adapt/test/pertreebank/et_ewt-ud-test.eval.log +17 -0
- sysoutputs/adapt/test/pertreebank/fi_pud-ud-test-sys.conllu +0 -0
- sysoutputs/adapt/test/pertreebank/fi_pud-ud-test.eval.log +17 -0
- sysoutputs/adapt/test/pertreebank/fi_tdt-ud-test-sys.conllu +0 -0
- sysoutputs/adapt/test/pertreebank/fi_tdt-ud-test.eval.log +17 -0
.gitattributes
CHANGED
@@ -60,3 +60,5 @@ dev-gold/cs.conllu filter=lfs diff=lfs merge=lfs -text
|
|
60 |
dev-gold/cs.nen.conllu filter=lfs diff=lfs merge=lfs -text
|
61 |
dev-gold/ru.conllu filter=lfs diff=lfs merge=lfs -text
|
62 |
dev-gold/ru.nen.conllu filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
60 |
dev-gold/cs.nen.conllu filter=lfs diff=lfs merge=lfs -text
|
61 |
dev-gold/ru.conllu filter=lfs diff=lfs merge=lfs -text
|
62 |
dev-gold/ru.nen.conllu filter=lfs diff=lfs merge=lfs -text
|
63 |
+
sysoutputs/adapt/test/cs.conllu filter=lfs diff=lfs merge=lfs -text
|
64 |
+
sysoutputs/adapt/test/pertreebank/cs_pdt-ud-test-sys.conllu filter=lfs diff=lfs merge=lfs -text
|
README.txt
ADDED
@@ -0,0 +1,299 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
IWPT 2020 Shared Task on Parsing into Enhanced Universal Dependencies
|
2 |
+
---------------------------------------------------------------------
|
3 |
+
|
4 |
+
This package contains data used in the IWPT 2020 shared task. The package is available from
|
5 |
+
http://hdl.handle.net/11234/1-3238 (permanent URL).
|
6 |
+
|
7 |
+
For more information on the shared task, see the IWPT conference proceedings in the ACL anthology
|
8 |
+
https://www.aclweb.org/anthology/sigs/sigparse/
|
9 |
+
as well as the shared task web site:
|
10 |
+
https://universaldependencies.org/iwpt20/
|
11 |
+
|
12 |
+
The package contains training, development and test (evaluation) datasets as they were used
|
13 |
+
during the shared task. The data is based on a subset of Universal Dependencies release 2.5
|
14 |
+
(http://hdl.handle.net/11234/1-3105) but some treebanks contain additional enhanced annotations.
|
15 |
+
Moreover, not all of these additions became part of Universal Dependencies release 2.6
|
16 |
+
(http://hdl.handle.net/11234/1-3226), which makes the shared task data unique and worth
|
17 |
+
a separate release to enable later comparison with new parsing algorithms.
|
18 |
+
|
19 |
+
|
20 |
+
LICENSE
|
21 |
+
-------
|
22 |
+
|
23 |
+
The package is distributed under the same license as Universal Dependencies 2.5. This is
|
24 |
+
a collective license, i.e., individual treebanks may use different license terms. See
|
25 |
+
https://lindat.mff.cuni.cz/repository/xmlui/page/licence-UD-2.5?locale-attribute=en
|
26 |
+
for details.
|
27 |
+
|
28 |
+
|
29 |
+
FILES AND FOLDERS
|
30 |
+
-----------------
|
31 |
+
|
32 |
+
There are 17 languages and 28 treebanks. Some treebanks contain training, development and
|
33 |
+
test data. Some treebanks contain only the test data. Each treebank has a folder named
|
34 |
+
UD_Language-TreebankID:
|
35 |
+
|
36 |
+
UD_Arabic-PADT
|
37 |
+
UD_Bulgarian-BTB
|
38 |
+
UD_Czech-CAC
|
39 |
+
UD_Czech-FicTree
|
40 |
+
UD_Czech-PDT
|
41 |
+
UD_Czech-PUD
|
42 |
+
UD_Dutch-Alpino
|
43 |
+
UD_Dutch-LassySmall
|
44 |
+
UD_English-EWT
|
45 |
+
UD_English-PUD
|
46 |
+
UD_Estonian-EDT
|
47 |
+
UD_Estonian-EWT
|
48 |
+
UD_Finnish-PUD
|
49 |
+
UD_Finnish-TDT
|
50 |
+
UD_French-FQB
|
51 |
+
UD_French-Sequoia
|
52 |
+
UD_Italian-ISDT
|
53 |
+
UD_Latvian-LVTB
|
54 |
+
UD_Lithuanian-ALKSNIS
|
55 |
+
UD_Polish-LFG
|
56 |
+
UD_Polish-PDB
|
57 |
+
UD_Polish-PUD
|
58 |
+
UD_Russian-SynTagRus
|
59 |
+
UD_Slovak-SNK
|
60 |
+
UD_Swedish-PUD
|
61 |
+
UD_Swedish-Talbanken
|
62 |
+
UD_Tamil-TTB
|
63 |
+
UD_Ukrainian-IU
|
64 |
+
|
65 |
+
Each folder contains one or more CoNLL-U files with gold-standard annotation following
|
66 |
+
the UD guidelines (https://universaldependencies.org/guidelines.html), and one or
|
67 |
+
more corresponding plain text files (the parsing systems get these files as input and
|
68 |
+
produce parsed CoNLL-U files on output).
|
69 |
+
|
70 |
+
During the shared task, only the training and development portions were distributed in this
|
71 |
+
form. The test data was distributed blind, i.e., only the plain text input without annotation.
|
72 |
+
Moreover, test sets from all treebanks of one language were merged in one file and the
|
73 |
+
participants were not told which part of the text comes from which treebank. In this package,
|
74 |
+
we add four folders:
|
75 |
+
|
76 |
+
test-blind
|
77 |
+
test-gold
|
78 |
+
dev-blind
|
79 |
+
dev-gold
|
80 |
+
|
81 |
+
The *-blind folders contain 17 text files each, named using language codes:
|
82 |
+
|
83 |
+
ar.txt (Arabic)
|
84 |
+
bg.txt (Bulgarian)
|
85 |
+
cs.txt (Czech)
|
86 |
+
en.txt (English)
|
87 |
+
et.txt (Estonian)
|
88 |
+
fi.txt (Finnish)
|
89 |
+
fr.txt (French)
|
90 |
+
it.txt (Italian)
|
91 |
+
lt.txt (Lithuanian)
|
92 |
+
lv.txt (Latvian)
|
93 |
+
nl.txt (Dutch)
|
94 |
+
pl.txt (Polish)
|
95 |
+
ru.txt (Russian)
|
96 |
+
sk.txt (Slovak)
|
97 |
+
sv.txt (Swedish)
|
98 |
+
ta.txt (Tamil)
|
99 |
+
uk.txt (Ukrainian)
|
100 |
+
|
101 |
+
The *-gold folders contain the corresponding gold-standard annotated files in two flavors:
|
102 |
+
the real UD-compliant annotation, e.g., ar.conllu, and a file where empty nodes have been
|
103 |
+
collapsed to make evaluation possible, e.g. ar.nen.conllu (nen = no empty nodes). In addition
|
104 |
+
to the test files, we also provide the development files in the same form (they were also
|
105 |
+
available to the participants of the shared task).
|
106 |
+
|
107 |
+
|
108 |
+
FRENCH
|
109 |
+
------
|
110 |
+
|
111 |
+
In addition to enhancements described in the UD v2 guidelines
|
112 |
+
(https://universaldependencies.org/u/overview/enhanced-syntax.html),
|
113 |
+
the French data also show neutralized diathesis in the spirit of (Candito et al. 2017).
|
114 |
+
It is not possible to squeeze this information into the dependency labels in a way that would
|
115 |
+
be both human-readable and valid according to the UD guidelines. Therefore, the French CoNLL-U
|
116 |
+
files are provided in two flavors: "fulldeps" and "xoxdeps". The former is the intended, human-
|
117 |
+
readable format where final and canonical grammatical functions are separated by the "@"
|
118 |
+
character; e.g., "obl:agent@nsubj" means that the dependent is an oblique agent phrase (the
|
119 |
+
final function) but canonically it corresponds to the subject of the verb in active form (the
|
120 |
+
canonical function). Such dependency labels do not comply with the current UD guidelines which
|
121 |
+
do not allow the "@" character in dependency labels (also, the full labels sometimes contain
|
122 |
+
more colons ":" than permitted). The labels thus have been transformed reducing the number of
|
123 |
+
colons and replacing "@" with "xox", hence the xoxdeps.conllu files. The systems participating
|
124 |
+
in the shared task worked with the xoxdeps files, as these pass the official validation. However,
|
125 |
+
the cryptic xoxdeps labels can be easily converted back to the original format, even in the
|
126 |
+
parser output (provided the parser predicted the label correctly; see the tools below).
|
127 |
+
|
128 |
+
|
129 |
+
TOOLS
|
130 |
+
-----
|
131 |
+
|
132 |
+
The package also contains a number of Perl and Python scripts that have been used to process
|
133 |
+
the data during preparation and during the shared task. They are in the "tools" folder.
|
134 |
+
|
135 |
+
validate.py
|
136 |
+
|
137 |
+
The official Universal Dependencies validator. It is a Python 3 script, and it needs the
|
138 |
+
third-party module regex to be installed on the system (use pip to install it). The script
|
139 |
+
recognizes several levels of validity; system output in the shared task must be valid on
|
140 |
+
level 2:
|
141 |
+
|
142 |
+
python validate.py --level 2 --lang ud file.conllu
|
143 |
+
|
144 |
+
enhanced_collapse_empty_nodes.pl
|
145 |
+
|
146 |
+
Removes empty nodes from a CoNLL-U file and transforms all paths traversing an empty node
|
147 |
+
into a long edge with a combined label, e.g., "conj>nsubj". Note that the output is not
|
148 |
+
valid according to UD guidelines, as the ">" character cannot occur in a normal CoNLL-U
|
149 |
+
file. After passing validation, system outputs and gold-standard files are processed with
|
150 |
+
this script and then they can be evaluated (the evaluator cannot work with empty nodes
|
151 |
+
directly).
|
152 |
+
|
153 |
+
perl enhanced_collapse_empty_nodes.pl file.conllu > file.nen.conllu
|
154 |
+
|
155 |
+
iwpt20_xud_eval.py
|
156 |
+
|
157 |
+
The official evaluation script in the shared task. It takes valid gold-standard and
|
158 |
+
system-output file after these have been processed with enhanced_collapse_empty_nodes.pl.
|
159 |
+
It also requires that the text of the gold standard and system output differ only in
|
160 |
+
whitespace characters (tokenization) while all non-whitespace characters must be identical
|
161 |
+
(no normalization is allowed).
|
162 |
+
|
163 |
+
python iwpt20_xud_eval.py -v gold.nen.conllu system.nen.conllu
|
164 |
+
|
165 |
+
The script can be told that certain types of enhancements should not be evaluated. This
|
166 |
+
is done with treebanks where some enhancements are not annotated in the gold standard and
|
167 |
+
we do not want to penalize the system for predicting the enhancement. For example,
|
168 |
+
|
169 |
+
--enhancements 156
|
170 |
+
|
171 |
+
means that gapping (1), relative clauses (5), and case-enhanced deprels (6) should be
|
172 |
+
ignored.
|
173 |
+
|
174 |
+
conllu-quick-fix.pl
|
175 |
+
|
176 |
+
A Perl script that tries to make an invalid file valid by filling out UPOS tags if they
|
177 |
+
are empty, fixing the format of morphological features etc. It can also make sure that
|
178 |
+
every node in the enhanced graph is reachable from the virtual root node with id 0;
|
179 |
+
however, this function is optional, as it modifies the system-output dependency structure,
|
180 |
+
and the algorithm it uses is not optimal in terms of evaluation score.
|
181 |
+
|
182 |
+
perl conllu-quick-fix.pl input.conllu > output.conllu
|
183 |
+
perl conllu-quick-fix.pl --connect-to-root input.conllu > output.conllu
|
184 |
+
|
185 |
+
conllu_to_text.pl
|
186 |
+
|
187 |
+
This script was used to generate the untokenized text files that serve as the input for
|
188 |
+
the trained parsing system (the "blind" data). It takes the ISO 639-1 code of the language
|
189 |
+
of the text (e.g., "--language en") because it works a bit differently with languages that
|
190 |
+
use Chinese characters. The option was not needed for the present shared task, as there
|
191 |
+
were no such languages.
|
192 |
+
|
193 |
+
perl conllu_to_text.pl --language xx file.conllu > file.txt
|
194 |
+
|
195 |
+
mergetest.pl
|
196 |
+
|
197 |
+
This script was used to merge the test data (both gold and blind) from multiple treebanks
|
198 |
+
of one language. The test sets are essentially concatenated, but the script makes sure that
|
199 |
+
there is a new document mark at the beginning of each gold CoNLL-U file, and between two
|
200 |
+
text files there is an empty line. This increases the chance that the systems will break
|
201 |
+
a sentence at this position and it will be possible to separate data from individual treebanks
|
202 |
+
in the system output. The first argument of the script is the name of the target file, the
|
203 |
+
remaining arguments are names of the source files (any number, but at least one). By default,
|
204 |
+
the files are processed as text files. If the target file name ends in ".conllu", the files are
|
205 |
+
processed as CoNLL-U files.
|
206 |
+
|
207 |
+
perl mergetest.pl nl.conllu UD_Dutch-Alpino/nl_alpino-ud-test.conllu UD_Dutch-LassySmall/nl_lassysmall-ud-test.conllu
|
208 |
+
perl mergetest.pl nl.txt UD_Dutch-Alpino/nl_alpino-ud-test.txt UD_Dutch-LassySmall/nl_lassysmall-ud-test.txt
|
209 |
+
|
210 |
+
match_and_split_conllu_by_input_text.pl
|
211 |
+
|
212 |
+
This script reverses mergetest.pl, i.e., splits the system output for a language into smaller
|
213 |
+
files corresponding to individual treebanks. This must be done if we want to evaluate each
|
214 |
+
treebank with different settings, i.e., ignoring enhancements that are not gold-annotated
|
215 |
+
in the treebank. The script takes the input text of one treebank and the CoNLL-U file that
|
216 |
+
starts with annotation of the input text but possibly contains more annotation of other text.
|
217 |
+
Two CoNLL-U files will be generated: the first one corresponds to the input text, the second
|
218 |
+
one is the rest. Therefore, if the language consists of more than two treebanks, the script
|
219 |
+
must be run multiple times. The script can handle sentences that cross the file boundary
|
220 |
+
(nodes whose parents lie in the other file will be re-attached to some other parent). However,
|
221 |
+
the script cannot handle situations where a token crosses the file boundary.
|
222 |
+
|
223 |
+
perl match_and_split_conllu_by_input_text.pl UD_Dutch-Alpino/nl_alpino-ud-test.txt nl.conllu nl_alpino.conllu nl_rest.conllu
|
224 |
+
|
225 |
+
evaluate_all.pl
|
226 |
+
|
227 |
+
This script takes one shared task submission (tgz archive with CoNLL-U files for individual
|
228 |
+
languages), unpacks it, checks whether the files are valid and evaluates them. The script is
|
229 |
+
provided as-is and it is not ready to be run outside the shared task submission site. Various
|
230 |
+
paths are hard-coded in the source code! However, the code shows how the evaluation was done
|
231 |
+
using the other tools described above.
|
232 |
+
|
233 |
+
perl evaluate_all.pl team_name submission_id dev|test
|
234 |
+
|
235 |
+
html_evaluation.pl
|
236 |
+
|
237 |
+
This script relies on the same fixed folder structure as evaluate_all.pl, and it, too, has
|
238 |
+
a path hard-wired in the source code. It scans the evaluation logs produced by evaluate_all.pl
|
239 |
+
and creates a HTML file with score tables for the submission.
|
240 |
+
|
241 |
+
perl html_evaluation.pl team_name submission_id dev|test
|
242 |
+
|
243 |
+
expand_edeps_in_french.pl
|
244 |
+
|
245 |
+
Takes a CoNLL-U file with the cryptic-but-ud-valid encoding of the French double relations,
|
246 |
+
("xoxdeps") and converts them back to the human-readable form ("fulldeps").
|
247 |
+
|
248 |
+
perl expand_edeps_in_french.pl fr.xoxdeps.conllu > fr.fulldeps.conllu
|
249 |
+
|
250 |
+
enhanced_graph_properties.pl
|
251 |
+
|
252 |
+
Reads a CoNLL-U file and collects statistics about the enhanced graphs found in the DEPS
|
253 |
+
column. Some of the statistics target abstract graph-theoretical properties, others target
|
254 |
+
properties specific to Enhanced Universal Dependencies. The script also tries to collect
|
255 |
+
clues about individual enhancement types defined in UD (thus assessing whether the
|
256 |
+
enhancement is annotated in the given dataset).
|
257 |
+
|
258 |
+
perl enhanced_graph_properties.pl file.conllu
|
259 |
+
|
260 |
+
|
261 |
+
SYSTEM OUTPUTS
|
262 |
+
--------------
|
263 |
+
|
264 |
+
The folder "sysoutputs" contains the official primary submission of each team participating
|
265 |
+
in the shared task. The folder names are the lowercased team names as used at submission
|
266 |
+
time; they slightly differ from the spelling used in the shared task overview paper:
|
267 |
+
|
268 |
+
TurkuNLP ....... turkunlp
|
269 |
+
Orange ......... orange_deskin
|
270 |
+
Emory NLP ...... emorynlp
|
271 |
+
FASTPARSE ...... fastparse
|
272 |
+
UNIPI .......... unipi
|
273 |
+
ShanghaiTech ... shanghaitech_alibaba
|
274 |
+
CLASP .......... clasp
|
275 |
+
ADAPT .......... adapt
|
276 |
+
Køpsala ........ koebsala
|
277 |
+
RobertNLP ...... robertnlp
|
278 |
+
|
279 |
+
In addition, there are three baseline submissions generated by the shared task organizers
|
280 |
+
and described on the shared task website and in the overview paper:
|
281 |
+
|
282 |
+
baseline1 ... gold basic trees copied as enhanced graphs
|
283 |
+
baseline2 ... UDPipe-predicted basic trees copied as enhanced graphs
|
284 |
+
baseline3 ... UDPipe predicted basic trees, then Stanford Enhancer created enhanced graphs from them
|
285 |
+
|
286 |
+
|
287 |
+
REFERENCES
|
288 |
+
----------
|
289 |
+
|
290 |
+
Gosse Bouma, Djamé Seddah, Daniel Zeman (2020).
|
291 |
+
Overview of the IWPT 2020 Shared Task on Parsing into Enhanced Universal Dependencies.
|
292 |
+
In Proceedings of the 16th International Conference on Parsing Technologies and the IWPT 2020
|
293 |
+
Shared Task on Parsing into Enhanced Universal Dependencies, Seattle, WA, USA,
|
294 |
+
ISBN 978-1-952148-11-8
|
295 |
+
|
296 |
+
Marie Candito, Bruno Guillaume, Guy Perrier, Djamé Seddah (2017).
|
297 |
+
Enhanced UD Dependencies with Neutralized Diathesis Alternation.
|
298 |
+
In Proceedings of the Fourth International Conference on Dependency Linguistics (Depling 2017),
|
299 |
+
pages 42–53, Pisa, Italy, September 18–20 2017
|
dev-gold/uk.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
dev-gold/uk.nen.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/ar.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/ar.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.98 | 99.98 | 99.98 |
|
4 |
+
Sentences | 76.20 | 88.97 | 82.09 |
|
5 |
+
Words | 95.76 | 93.42 | 94.58 |
|
6 |
+
UPOS | 92.88 | 90.60 | 91.73 | 96.99
|
7 |
+
XPOS | 90.26 | 88.05 | 89.14 | 94.25
|
8 |
+
UFeats | 90.39 | 88.18 | 89.27 | 94.39
|
9 |
+
AllTags | 90.00 | 87.80 | 88.89 | 93.99
|
10 |
+
Lemmas | 91.66 | 89.42 | 90.53 | 95.72
|
11 |
+
UAS | 79.46 | 77.52 | 78.48 | 82.98
|
12 |
+
LAS | 75.55 | 73.70 | 74.61 | 78.89
|
13 |
+
ELAS | 48.76 | 69.13 | 57.19 | 78.62
|
14 |
+
EULAS | 50.11 | 71.03 | 58.76 | 80.78
|
15 |
+
CLAS | 71.42 | 71.44 | 71.43 | 75.70
|
16 |
+
MLAS | 65.99 | 66.01 | 66.00 | 69.95
|
17 |
+
BLEX | 68.10 | 68.12 | 68.11 | 72.19
|
sysoutputs/adapt/test/bg.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/bg.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.94 | 99.88 | 99.91 |
|
4 |
+
Sentences | 94.33 | 94.00 | 94.17 |
|
5 |
+
Words | 99.94 | 99.88 | 99.91 |
|
6 |
+
UPOS | 99.16 | 99.10 | 99.13 | 99.22
|
7 |
+
XPOS | 97.37 | 97.31 | 97.34 | 97.43
|
8 |
+
UFeats | 98.05 | 97.99 | 98.02 | 98.11
|
9 |
+
AllTags | 96.88 | 96.82 | 96.85 | 96.94
|
10 |
+
Lemmas | 98.18 | 98.12 | 98.15 | 98.24
|
11 |
+
UAS | 93.83 | 93.78 | 93.81 | 93.89
|
12 |
+
LAS | 91.22 | 91.17 | 91.19 | 91.28
|
13 |
+
ELAS | 67.92 | 89.65 | 77.29 | 92.32
|
14 |
+
EULAS | 68.50 | 90.42 | 77.95 | 93.11
|
15 |
+
CLAS | 88.56 | 88.29 | 88.43 | 88.36
|
16 |
+
MLAS | 85.49 | 85.23 | 85.36 | 85.29
|
17 |
+
BLEX | 86.19 | 85.93 | 86.06 | 85.99
|
sysoutputs/adapt/test/cs.conllu
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b14d3f903e51a7cc907ddbf471f3865e9a76c94bf5f48f3650ed9256ee23ad91
|
3 |
+
size 24150051
|
sysoutputs/adapt/test/cs.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 98.85 | 98.54 | 98.70 |
|
4 |
+
Sentences | 66.72 | 75.84 | 70.99 |
|
5 |
+
Words | 98.85 | 98.54 | 98.70 |
|
6 |
+
UPOS | 96.17 | 95.87 | 96.02 | 97.29
|
7 |
+
XPOS | 90.22 | 89.93 | 90.07 | 91.26
|
8 |
+
UFeats | 90.70 | 90.42 | 90.56 | 91.76
|
9 |
+
AllTags | 88.82 | 88.54 | 88.68 | 89.85
|
10 |
+
Lemmas | 96.30 | 96.00 | 96.15 | 97.42
|
11 |
+
UAS | 86.50 | 86.23 | 86.37 | 87.51
|
12 |
+
LAS | 83.60 | 83.34 | 83.47 | 84.57
|
13 |
+
ELAS | 57.12 | 79.30 | 66.41 | 86.71
|
14 |
+
EULAS | 58.62 | 81.39 | 68.15 | 88.99
|
15 |
+
CLAS | 81.84 | 82.75 | 82.29 | 83.82
|
16 |
+
MLAS | 73.30 | 74.11 | 73.70 | 75.07
|
17 |
+
BLEX | 79.64 | 80.52 | 80.07 | 81.56
|
sysoutputs/adapt/test/en.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/en.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.23 | 99.20 | 99.22 |
|
4 |
+
Sentences | 87.96 | 80.05 | 83.82 |
|
5 |
+
Words | 99.23 | 99.20 | 99.22 |
|
6 |
+
UPOS | 96.00 | 95.97 | 95.98 | 96.74
|
7 |
+
XPOS | 95.30 | 95.28 | 95.29 | 96.05
|
8 |
+
UFeats | 96.06 | 96.03 | 96.04 | 96.80
|
9 |
+
AllTags | 93.51 | 93.48 | 93.50 | 94.24
|
10 |
+
Lemmas | 96.81 | 96.78 | 96.79 | 97.56
|
11 |
+
UAS | 88.04 | 88.02 | 88.03 | 88.73
|
12 |
+
LAS | 85.69 | 85.67 | 85.68 | 86.35
|
13 |
+
ELAS | 60.02 | 85.23 | 70.44 | 89.70
|
14 |
+
EULAS | 60.51 | 85.93 | 71.01 | 90.44
|
15 |
+
CLAS | 83.19 | 82.86 | 83.02 | 83.65
|
16 |
+
MLAS | 77.58 | 77.27 | 77.42 | 78.01
|
17 |
+
BLEX | 80.73 | 80.41 | 80.57 | 81.17
|
sysoutputs/adapt/test/et.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/et.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.61 | 99.80 | 99.70 |
|
4 |
+
Sentences | 92.14 | 87.93 | 89.98 |
|
5 |
+
Words | 99.61 | 99.80 | 99.70 |
|
6 |
+
UPOS | 96.97 | 97.16 | 97.07 | 97.36
|
7 |
+
XPOS | 97.67 | 97.86 | 97.77 | 98.06
|
8 |
+
UFeats | 95.71 | 95.90 | 95.80 | 96.09
|
9 |
+
AllTags | 94.35 | 94.53 | 94.44 | 94.72
|
10 |
+
Lemmas | 95.03 | 95.22 | 95.12 | 95.41
|
11 |
+
UAS | 86.69 | 86.86 | 86.78 | 87.03
|
12 |
+
LAS | 83.93 | 84.09 | 84.01 | 84.26
|
13 |
+
ELAS | 56.39 | 66.72 | 61.12 | 67.98
|
14 |
+
EULAS | 69.50 | 82.23 | 75.33 | 83.78
|
15 |
+
CLAS | 82.60 | 82.59 | 82.60 | 82.79
|
16 |
+
MLAS | 77.65 | 77.64 | 77.65 | 77.83
|
17 |
+
BLEX | 77.64 | 77.63 | 77.64 | 77.82
|
sysoutputs/adapt/test/fi.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/fi.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.75 | 99.64 | 99.70 |
|
4 |
+
Sentences | 87.61 | 89.71 | 88.65 |
|
5 |
+
Words | 99.74 | 99.63 | 99.68 |
|
6 |
+
UPOS | 97.97 | 97.86 | 97.91 | 98.22
|
7 |
+
XPOS | 56.21 | 56.14 | 56.17 | 56.35
|
8 |
+
UFeats | 96.71 | 96.60 | 96.65 | 96.96
|
9 |
+
AllTags | 54.59 | 54.53 | 54.56 | 54.73
|
10 |
+
Lemmas | 92.54 | 92.43 | 92.49 | 92.78
|
11 |
+
UAS | 90.95 | 90.85 | 90.90 | 91.19
|
12 |
+
LAS | 89.08 | 88.98 | 89.03 | 89.31
|
13 |
+
ELAS | 62.41 | 86.11 | 72.37 | 91.63
|
14 |
+
EULAS | 63.40 | 87.47 | 73.51 | 93.07
|
15 |
+
CLAS | 87.87 | 87.66 | 87.76 | 88.00
|
16 |
+
MLAS | 83.87 | 83.68 | 83.77 | 84.00
|
17 |
+
BLEX | 79.55 | 79.37 | 79.46 | 79.68
|
sysoutputs/adapt/test/fr.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/fr.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.63 | 99.67 | 99.65 |
|
4 |
+
Sentences | 93.52 | 95.19 | 94.35 |
|
5 |
+
Words | 99.08 | 99.39 | 99.23 |
|
6 |
+
UPOS | 95.03 | 95.33 | 95.18 | 95.91
|
7 |
+
XPOS | 74.39 | 74.62 | 74.50 | 75.08
|
8 |
+
UFeats | 76.80 | 77.04 | 76.92 | 77.51
|
9 |
+
AllTags | 55.36 | 55.54 | 55.45 | 55.88
|
10 |
+
Lemmas | 96.44 | 96.74 | 96.59 | 97.33
|
11 |
+
UAS | 90.86 | 91.14 | 91.00 | 91.70
|
12 |
+
LAS | 86.42 | 86.69 | 86.55 | 87.22
|
13 |
+
ELAS | 67.36 | 83.93 | 74.74 | 86.50
|
14 |
+
EULAS | 68.44 | 85.28 | 75.93 | 87.88
|
15 |
+
CLAS | 83.05 | 81.74 | 82.39 | 82.18
|
16 |
+
MLAS | 50.56 | 49.76 | 50.16 | 50.03
|
17 |
+
BLEX | 79.73 | 78.47 | 79.09 | 78.89
|
sysoutputs/adapt/test/it.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/it.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.91 | 99.95 | 99.93 |
|
4 |
+
Sentences | 98.76 | 98.76 | 98.76 |
|
5 |
+
Words | 99.82 | 99.87 | 99.84 |
|
6 |
+
UPOS | 98.43 | 98.47 | 98.45 | 98.61
|
7 |
+
XPOS | 98.29 | 98.34 | 98.32 | 98.47
|
8 |
+
UFeats | 98.25 | 98.30 | 98.28 | 98.43
|
9 |
+
AllTags | 97.62 | 97.67 | 97.64 | 97.80
|
10 |
+
Lemmas | 98.66 | 98.70 | 98.68 | 98.84
|
11 |
+
UAS | 93.88 | 93.92 | 93.90 | 94.05
|
12 |
+
LAS | 92.18 | 92.22 | 92.20 | 92.35
|
13 |
+
ELAS | 59.79 | 90.42 | 71.98 | 94.27
|
14 |
+
EULAS | 60.47 | 91.44 | 72.80 | 95.34
|
15 |
+
CLAS | 88.67 | 88.39 | 88.53 | 88.54
|
16 |
+
MLAS | 86.07 | 85.80 | 85.93 | 85.95
|
17 |
+
BLEX | 87.22 | 86.95 | 87.08 | 87.10
|
sysoutputs/adapt/test/lt.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/lt.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.91 | 99.91 | 99.91 |
|
4 |
+
Sentences | 90.03 | 85.82 | 87.87 |
|
5 |
+
Words | 99.91 | 99.91 | 99.91 |
|
6 |
+
UPOS | 93.92 | 93.92 | 93.92 | 94.01
|
7 |
+
XPOS | 85.97 | 85.97 | 85.97 | 86.05
|
8 |
+
UFeats | 87.01 | 87.01 | 87.01 | 87.09
|
9 |
+
AllTags | 84.74 | 84.74 | 84.74 | 84.82
|
10 |
+
Lemmas | 92.26 | 92.26 | 92.26 | 92.35
|
11 |
+
UAS | 76.72 | 76.72 | 76.72 | 76.79
|
12 |
+
LAS | 72.10 | 72.10 | 72.10 | 72.17
|
13 |
+
ELAS | 50.64 | 68.84 | 58.36 | 76.16
|
14 |
+
EULAS | 52.39 | 71.21 | 60.37 | 78.78
|
15 |
+
CLAS | 70.06 | 69.68 | 69.87 | 69.73
|
16 |
+
MLAS | 58.55 | 58.23 | 58.39 | 58.27
|
17 |
+
BLEX | 64.49 | 64.14 | 64.32 | 64.19
|
sysoutputs/adapt/test/lv.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/lv.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.40 | 99.26 | 99.33 |
|
4 |
+
Sentences | 98.42 | 99.07 | 98.74 |
|
5 |
+
Words | 99.40 | 99.26 | 99.33 |
|
6 |
+
UPOS | 96.57 | 96.44 | 96.50 | 97.16
|
7 |
+
XPOS | 90.09 | 89.96 | 90.03 | 90.63
|
8 |
+
UFeats | 94.25 | 94.12 | 94.18 | 94.82
|
9 |
+
AllTags | 89.51 | 89.38 | 89.45 | 90.05
|
10 |
+
Lemmas | 96.19 | 96.06 | 96.13 | 96.77
|
11 |
+
UAS | 89.39 | 89.26 | 89.32 | 89.93
|
12 |
+
LAS | 86.51 | 86.39 | 86.45 | 87.03
|
13 |
+
ELAS | 65.46 | 81.03 | 72.41 | 88.07
|
14 |
+
EULAS | 66.01 | 81.72 | 73.03 | 88.82
|
15 |
+
CLAS | 84.54 | 84.26 | 84.40 | 85.15
|
16 |
+
MLAS | 77.03 | 76.78 | 76.90 | 77.59
|
17 |
+
BLEX | 81.27 | 81.00 | 81.13 | 81.85
|
sysoutputs/adapt/test/nl.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/nl.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.76 | 99.73 | 99.74 |
|
4 |
+
Sentences | 78.77 | 61.79 | 69.26 |
|
5 |
+
Words | 99.76 | 99.73 | 99.74 |
|
6 |
+
UPOS | 96.99 | 96.96 | 96.97 | 97.22
|
7 |
+
XPOS | 95.82 | 95.79 | 95.81 | 96.06
|
8 |
+
UFeats | 96.89 | 96.86 | 96.87 | 97.13
|
9 |
+
AllTags | 95.25 | 95.22 | 95.23 | 95.48
|
10 |
+
Lemmas | 97.31 | 97.28 | 97.30 | 97.55
|
11 |
+
UAS | 88.17 | 88.14 | 88.16 | 88.39
|
12 |
+
LAS | 85.71 | 85.68 | 85.70 | 85.92
|
13 |
+
ELAS | 57.39 | 82.43 | 67.67 | 86.45
|
14 |
+
EULAS | 57.97 | 83.27 | 68.36 | 87.33
|
15 |
+
CLAS | 80.34 | 80.02 | 80.18 | 80.24
|
16 |
+
MLAS | 76.26 | 75.96 | 76.11 | 76.17
|
17 |
+
BLEX | 77.78 | 77.47 | 77.63 | 77.69
|
sysoutputs/adapt/test/pertreebank/ar_padt-ud-test-sys.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/pertreebank/ar_padt-ud-test.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.98 | 99.98 | 99.98 |
|
4 |
+
Sentences | 76.20 | 88.97 | 82.09 |
|
5 |
+
Words | 95.76 | 93.42 | 94.58 |
|
6 |
+
UPOS | 92.88 | 90.60 | 91.73 | 96.99
|
7 |
+
XPOS | 90.26 | 88.05 | 89.14 | 94.25
|
8 |
+
UFeats | 90.39 | 88.18 | 89.27 | 94.39
|
9 |
+
AllTags | 90.00 | 87.80 | 88.89 | 93.99
|
10 |
+
Lemmas | 91.66 | 89.42 | 90.53 | 95.72
|
11 |
+
UAS | 79.46 | 77.52 | 78.48 | 82.98
|
12 |
+
LAS | 75.55 | 73.70 | 74.61 | 78.89
|
13 |
+
ELAS | 48.77 | 69.13 | 57.19 | 78.59
|
14 |
+
EULAS | 50.11 | 71.03 | 58.77 | 80.76
|
15 |
+
CLAS | 71.42 | 71.44 | 71.43 | 75.70
|
16 |
+
MLAS | 65.99 | 66.01 | 66.00 | 69.95
|
17 |
+
BLEX | 68.10 | 68.12 | 68.11 | 72.19
|
sysoutputs/adapt/test/pertreebank/bg_btb-ud-test-sys.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/pertreebank/bg_btb-ud-test.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.94 | 99.88 | 99.91 |
|
4 |
+
Sentences | 94.33 | 94.00 | 94.17 |
|
5 |
+
Words | 99.94 | 99.88 | 99.91 |
|
6 |
+
UPOS | 99.16 | 99.10 | 99.13 | 99.22
|
7 |
+
XPOS | 97.37 | 97.31 | 97.34 | 97.43
|
8 |
+
UFeats | 98.05 | 97.99 | 98.02 | 98.11
|
9 |
+
AllTags | 96.88 | 96.82 | 96.85 | 96.94
|
10 |
+
Lemmas | 98.18 | 98.12 | 98.15 | 98.24
|
11 |
+
UAS | 93.83 | 93.78 | 93.81 | 93.89
|
12 |
+
LAS | 91.22 | 91.17 | 91.19 | 91.28
|
13 |
+
ELAS | 67.92 | 89.65 | 77.29 | 92.32
|
14 |
+
EULAS | 68.50 | 90.42 | 77.95 | 93.11
|
15 |
+
CLAS | 88.56 | 88.29 | 88.43 | 88.36
|
16 |
+
MLAS | 85.49 | 85.23 | 85.36 | 85.29
|
17 |
+
BLEX | 86.19 | 85.93 | 86.06 | 85.99
|
sysoutputs/adapt/test/pertreebank/cs_cac-ud-test-sys.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/pertreebank/cs_cac-ud-test.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.99 | 99.98 | 99.99 |
|
4 |
+
Sentences | 99.68 | 99.84 | 99.76 |
|
5 |
+
Words | 99.98 | 99.97 | 99.98 |
|
6 |
+
UPOS | 99.42 | 99.41 | 99.42 | 99.44
|
7 |
+
XPOS | 96.79 | 96.78 | 96.78 | 96.80
|
8 |
+
UFeats | 96.53 | 96.52 | 96.52 | 96.55
|
9 |
+
AllTags | 96.19 | 96.18 | 96.18 | 96.21
|
10 |
+
Lemmas | 98.77 | 98.76 | 98.76 | 98.78
|
11 |
+
UAS | 93.34 | 93.33 | 93.34 | 93.36
|
12 |
+
LAS | 91.43 | 91.42 | 91.42 | 91.44
|
13 |
+
ELAS | 61.65 | 85.73 | 71.72 | 95.97
|
14 |
+
EULAS | 62.99 | 87.59 | 73.28 | 98.05
|
15 |
+
CLAS | 89.64 | 89.72 | 89.68 | 89.75
|
16 |
+
MLAS | 85.37 | 85.45 | 85.41 | 85.48
|
17 |
+
BLEX | 88.26 | 88.34 | 88.30 | 88.37
|
sysoutputs/adapt/test/pertreebank/cs_fictree-ud-test-sys.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/pertreebank/cs_fictree-ud-test.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.85 | 99.78 | 99.82 |
|
4 |
+
Sentences | 54.83 | 61.50 | 57.98 |
|
5 |
+
Words | 99.82 | 99.74 | 99.78 |
|
6 |
+
UPOS | 96.31 | 96.23 | 96.27 | 96.48
|
7 |
+
XPOS | 76.37 | 76.31 | 76.34 | 76.51
|
8 |
+
UFeats | 79.45 | 79.39 | 79.42 | 79.59
|
9 |
+
AllTags | 72.30 | 72.24 | 72.27 | 72.43
|
10 |
+
Lemmas | 96.90 | 96.82 | 96.86 | 97.07
|
11 |
+
UAS | 87.95 | 87.88 | 87.91 | 88.10
|
12 |
+
LAS | 84.96 | 84.90 | 84.93 | 85.12
|
13 |
+
ELAS | 61.96 | 80.54 | 70.04 | 87.90
|
14 |
+
EULAS | 63.62 | 82.69 | 71.91 | 90.25
|
15 |
+
CLAS | 82.57 | 85.11 | 83.82 | 85.23
|
16 |
+
MLAS | 54.81 | 56.50 | 55.64 | 56.58
|
17 |
+
BLEX | 79.42 | 81.86 | 80.62 | 81.97
|
sysoutputs/adapt/test/pertreebank/cs_pdt-ud-test-sys.conllu
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ddeda748191f9595c1e1ee64047f7b415c72a128b0ccb4a3fdd07650123b7ad0
|
3 |
+
size 19126488
|
sysoutputs/adapt/test/pertreebank/cs_pdt-ud-test.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 98.71 | 98.39 | 98.55 |
|
4 |
+
Sentences | 64.99 | 74.67 | 69.49 |
|
5 |
+
Words | 98.72 | 98.40 | 98.56 |
|
6 |
+
UPOS | 96.11 | 95.80 | 95.95 | 97.36
|
7 |
+
XPOS | 91.19 | 90.90 | 91.05 | 92.38
|
8 |
+
UFeats | 91.45 | 91.16 | 91.31 | 92.64
|
9 |
+
AllTags | 90.03 | 89.74 | 89.89 | 91.20
|
10 |
+
Lemmas | 96.18 | 95.87 | 96.02 | 97.43
|
11 |
+
UAS | 85.95 | 85.67 | 85.81 | 87.06
|
12 |
+
LAS | 83.18 | 82.91 | 83.05 | 84.27
|
13 |
+
ELAS | 56.63 | 78.91 | 65.94 | 86.26
|
14 |
+
EULAS | 58.11 | 80.98 | 67.67 | 88.52
|
15 |
+
CLAS | 81.65 | 82.45 | 82.05 | 83.65
|
16 |
+
MLAS | 74.51 | 75.25 | 74.88 | 76.34
|
17 |
+
BLEX | 79.49 | 80.27 | 79.88 | 81.44
|
sysoutputs/adapt/test/pertreebank/cs_pud-ud-test-sys.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/pertreebank/cs_pud-ud-test.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 98.59 | 97.97 | 98.28 |
|
4 |
+
Sentences | 81.70 | 91.10 | 86.15 |
|
5 |
+
Words | 98.59 | 97.97 | 98.28 |
|
6 |
+
UPOS | 94.73 | 94.14 | 94.44 | 96.09
|
7 |
+
XPOS | 89.70 | 89.15 | 89.42 | 90.99
|
8 |
+
UFeats | 90.42 | 89.86 | 90.14 | 91.72
|
9 |
+
AllTags | 88.07 | 87.52 | 87.80 | 89.33
|
10 |
+
Lemmas | 95.47 | 94.88 | 95.18 | 96.84
|
11 |
+
UAS | 86.41 | 85.87 | 86.14 | 87.64
|
12 |
+
LAS | 81.71 | 81.20 | 81.46 | 82.88
|
13 |
+
ELAS | 54.81 | 77.88 | 64.34 | 84.29
|
14 |
+
EULAS | 56.47 | 80.24 | 66.29 | 86.84
|
15 |
+
CLAS | 78.49 | 79.44 | 78.96 | 80.66
|
16 |
+
MLAS | 70.34 | 71.19 | 70.77 | 72.29
|
17 |
+
BLEX | 76.14 | 77.07 | 76.60 | 78.25
|
sysoutputs/adapt/test/pertreebank/en_ewt-ud-test-sys.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/pertreebank/en_ewt-ud-test.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 98.93 | 98.86 | 98.90 |
|
4 |
+
Sentences | 84.04 | 71.74 | 77.40 |
|
5 |
+
Words | 98.93 | 98.86 | 98.90 |
|
6 |
+
UPOS | 95.90 | 95.83 | 95.87 | 96.94
|
7 |
+
XPOS | 95.62 | 95.55 | 95.58 | 96.65
|
8 |
+
UFeats | 96.35 | 96.28 | 96.32 | 97.39
|
9 |
+
AllTags | 94.48 | 94.41 | 94.44 | 95.50
|
10 |
+
Lemmas | 97.40 | 97.33 | 97.37 | 98.46
|
11 |
+
UAS | 86.75 | 86.69 | 86.72 | 87.69
|
12 |
+
LAS | 84.45 | 84.39 | 84.42 | 85.36
|
13 |
+
ELAS | 61.00 | 83.79 | 70.61 | 88.25
|
14 |
+
EULAS | 61.43 | 84.38 | 71.10 | 88.87
|
15 |
+
CLAS | 81.48 | 81.25 | 81.37 | 82.33
|
16 |
+
MLAS | 77.46 | 77.24 | 77.35 | 78.27
|
17 |
+
BLEX | 80.08 | 79.86 | 79.97 | 80.91
|
sysoutputs/adapt/test/pertreebank/en_pud-ud-test-sys.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/pertreebank/en_pud-ud-test.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.58 | 99.61 | 99.59 |
|
4 |
+
Sentences | 94.74 | 97.30 | 96.00 |
|
5 |
+
Words | 99.58 | 99.61 | 99.59 |
|
6 |
+
UPOS | 96.11 | 96.14 | 96.12 | 96.52
|
7 |
+
XPOS | 94.93 | 94.96 | 94.95 | 95.33
|
8 |
+
UFeats | 95.71 | 95.74 | 95.72 | 96.11
|
9 |
+
AllTags | 92.36 | 92.39 | 92.37 | 92.75
|
10 |
+
Lemmas | 96.10 | 96.13 | 96.11 | 96.51
|
11 |
+
UAS | 89.58 | 89.60 | 89.59 | 89.95
|
12 |
+
LAS | 87.15 | 87.18 | 87.17 | 87.52
|
13 |
+
ELAS | 58.93 | 86.93 | 70.25 | 91.41
|
14 |
+
EULAS | 59.50 | 87.76 | 70.92 | 92.28
|
15 |
+
CLAS | 85.34 | 84.87 | 85.10 | 85.29
|
16 |
+
MLAS | 77.72 | 77.30 | 77.51 | 77.68
|
17 |
+
BLEX | 81.54 | 81.10 | 81.32 | 81.50
|
sysoutputs/adapt/test/pertreebank/et_edt-ud-test-sys.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/pertreebank/et_edt-ud-test.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.96 | 99.96 | 99.96 |
|
4 |
+
Sentences | 93.40 | 89.79 | 91.56 |
|
5 |
+
Words | 99.96 | 99.96 | 99.96 |
|
6 |
+
UPOS | 97.58 | 97.58 | 97.58 | 97.62
|
7 |
+
XPOS | 98.24 | 98.23 | 98.24 | 98.28
|
8 |
+
UFeats | 96.36 | 96.36 | 96.36 | 96.40
|
9 |
+
AllTags | 95.16 | 95.15 | 95.15 | 95.19
|
10 |
+
Lemmas | 95.50 | 95.50 | 95.50 | 95.54
|
11 |
+
UAS | 87.92 | 87.92 | 87.92 | 87.95
|
12 |
+
LAS | 85.26 | 85.26 | 85.26 | 85.29
|
13 |
+
ELAS | 57.32 | 68.22 | 62.29 | 68.69
|
14 |
+
EULAS | 70.78 | 84.25 | 76.93 | 84.82
|
15 |
+
CLAS | 83.89 | 83.76 | 83.83 | 83.80
|
16 |
+
MLAS | 79.28 | 79.16 | 79.22 | 79.19
|
17 |
+
BLEX | 78.99 | 78.87 | 78.93 | 78.90
|
sysoutputs/adapt/test/pertreebank/et_ewt-ud-test-sys.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/pertreebank/et_ewt-ud-test.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 97.92 | 99.04 | 98.48 |
|
4 |
+
Sentences | 84.34 | 76.92 | 80.46 |
|
5 |
+
Words | 97.92 | 99.04 | 98.48 |
|
6 |
+
UPOS | 94.07 | 95.14 | 94.60 | 96.07
|
7 |
+
XPOS | 94.99 | 96.07 | 95.53 | 97.01
|
8 |
+
UFeats | 92.62 | 93.68 | 93.14 | 94.59
|
9 |
+
AllTags | 90.51 | 91.54 | 91.03 | 92.43
|
10 |
+
Lemmas | 92.80 | 93.85 | 93.32 | 94.77
|
11 |
+
UAS | 80.86 | 81.78 | 81.32 | 82.57
|
12 |
+
LAS | 77.62 | 78.51 | 78.06 | 79.27
|
13 |
+
ELAS | 52.02 | 59.93 | 55.70 | 64.53
|
14 |
+
EULAS | 63.44 | 73.10 | 67.93 | 78.70
|
15 |
+
CLAS | 76.35 | 76.88 | 76.62 | 77.82
|
16 |
+
MLAS | 69.77 | 70.26 | 70.02 | 71.12
|
17 |
+
BLEX | 71.10 | 71.60 | 71.35 | 72.47
|
sysoutputs/adapt/test/pertreebank/fi_pud-ud-test-sys.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/pertreebank/fi_pud-ud-test.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.76 | 99.58 | 99.67 |
|
4 |
+
Sentences | 85.30 | 92.30 | 88.66 |
|
5 |
+
Words | 99.75 | 99.58 | 99.66 |
|
6 |
+
UPOS | 98.21 | 98.04 | 98.12 | 98.46
|
7 |
+
XPOS | 0.00 | 0.00 | 0.00 | 0.00
|
8 |
+
UFeats | 97.16 | 97.00 | 97.08 | 97.41
|
9 |
+
AllTags | 0.00 | 0.00 | 0.00 | 0.00
|
10 |
+
Lemmas | 92.65 | 92.49 | 92.57 | 92.89
|
11 |
+
UAS | 91.76 | 91.61 | 91.69 | 92.00
|
12 |
+
LAS | 89.93 | 89.78 | 89.86 | 90.16
|
13 |
+
ELAS | 60.49 | 87.66 | 71.58 | 89.18
|
14 |
+
EULAS | 61.50 | 89.12 | 72.78 | 90.66
|
15 |
+
CLAS | 88.57 | 88.34 | 88.45 | 88.79
|
16 |
+
MLAS | 84.90 | 84.67 | 84.78 | 85.10
|
17 |
+
BLEX | 80.33 | 80.12 | 80.22 | 80.53
|
sysoutputs/adapt/test/pertreebank/fi_tdt-ud-test-sys.conllu
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sysoutputs/adapt/test/pertreebank/fi_tdt-ud-test.eval.log
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metric | Precision | Recall | F1 Score | AligndAcc
|
2 |
+
-----------+-----------+-----------+-----------+-----------
|
3 |
+
Tokens | 99.75 | 99.68 | 99.71 |
|
4 |
+
Sentences | 89.24 | 88.04 | 88.64 |
|
5 |
+
Words | 99.74 | 99.66 | 99.70 |
|
6 |
+
UPOS | 97.79 | 97.72 | 97.75 | 98.05
|
7 |
+
XPOS | 98.35 | 98.27 | 98.31 | 98.60
|
8 |
+
UFeats | 96.37 | 96.30 | 96.33 | 96.62
|
9 |
+
AllTags | 95.52 | 95.45 | 95.48 | 95.77
|
10 |
+
Lemmas | 92.46 | 92.39 | 92.42 | 92.70
|
11 |
+
UAS | 90.34 | 90.28 | 90.31 | 90.58
|
12 |
+
LAS | 88.44 | 88.37 | 88.41 | 88.67
|
13 |
+
ELAS | 63.97 | 85.03 | 73.02 | 93.33
|
14 |
+
EULAS | 64.94 | 86.32 | 74.12 | 94.74
|
15 |
+
CLAS | 87.32 | 87.15 | 87.24 | 87.40
|
16 |
+
MLAS | 83.08 | 82.92 | 83.00 | 83.16
|
17 |
+
BLEX | 78.96 | 78.80 | 78.88 | 79.03
|