Update README.md
#5
by
steveheh
- opened
README.md
CHANGED
@@ -79,7 +79,7 @@ model-index:
|
|
79 |
args:
|
80 |
language: en
|
81 |
metrics:
|
82 |
-
- name: Test WER
|
83 |
type: wer
|
84 |
value: 7.97
|
85 |
- task:
|
@@ -93,7 +93,7 @@ model-index:
|
|
93 |
args:
|
94 |
language: de
|
95 |
metrics:
|
96 |
-
- name: Test WER
|
97 |
type: wer
|
98 |
value: 4.61
|
99 |
- task:
|
@@ -107,7 +107,7 @@ model-index:
|
|
107 |
args:
|
108 |
language: es
|
109 |
metrics:
|
110 |
-
- name: Test WER
|
111 |
type: wer
|
112 |
value: 3.99
|
113 |
- task:
|
@@ -121,7 +121,7 @@ model-index:
|
|
121 |
args:
|
122 |
language: fr
|
123 |
metrics:
|
124 |
-
- name: Test WER
|
125 |
type: wer
|
126 |
value: 6.53
|
127 |
- task:
|
@@ -135,9 +135,122 @@ model-index:
|
|
135 |
args:
|
136 |
language: en-de
|
137 |
metrics:
|
138 |
-
- name: Test BLEU
|
139 |
type: bleu
|
140 |
value: 22.66
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
141 |
metrics:
|
142 |
- wer
|
143 |
- bleu
|
|
|
79 |
args:
|
80 |
language: en
|
81 |
metrics:
|
82 |
+
- name: Test WER (En)
|
83 |
type: wer
|
84 |
value: 7.97
|
85 |
- task:
|
|
|
93 |
args:
|
94 |
language: de
|
95 |
metrics:
|
96 |
+
- name: Test WER (De)
|
97 |
type: wer
|
98 |
value: 4.61
|
99 |
- task:
|
|
|
107 |
args:
|
108 |
language: es
|
109 |
metrics:
|
110 |
+
- name: Test WER (ES)
|
111 |
type: wer
|
112 |
value: 3.99
|
113 |
- task:
|
|
|
121 |
args:
|
122 |
language: fr
|
123 |
metrics:
|
124 |
+
- name: Test WER (Fr)
|
125 |
type: wer
|
126 |
value: 6.53
|
127 |
- task:
|
|
|
135 |
args:
|
136 |
language: en-de
|
137 |
metrics:
|
138 |
+
- name: Test BLEU (En->De)
|
139 |
type: bleu
|
140 |
value: 22.66
|
141 |
+
- task:
|
142 |
+
type: Automatic Speech Translation
|
143 |
+
name: automatic-speech-translation
|
144 |
+
dataset:
|
145 |
+
name: FLEURS
|
146 |
+
type: google/fleurs
|
147 |
+
config: en_us
|
148 |
+
split: test
|
149 |
+
args:
|
150 |
+
language: en-de
|
151 |
+
metrics:
|
152 |
+
- name: Test BLEU (En->Es)
|
153 |
+
type: bleu
|
154 |
+
value: 41.11
|
155 |
+
- task:
|
156 |
+
type: Automatic Speech Translation
|
157 |
+
name: automatic-speech-translation
|
158 |
+
dataset:
|
159 |
+
name: FLEURS
|
160 |
+
type: google/fleurs
|
161 |
+
config: en_us
|
162 |
+
split: test
|
163 |
+
args:
|
164 |
+
language: en-de
|
165 |
+
metrics:
|
166 |
+
- name: Test BLEU (En->Fr)
|
167 |
+
type: bleu
|
168 |
+
value: 40.76
|
169 |
+
- task:
|
170 |
+
type: Automatic Speech Translation
|
171 |
+
name: automatic-speech-translation
|
172 |
+
dataset:
|
173 |
+
name: FLEURS
|
174 |
+
type: google/fleurs
|
175 |
+
config: de_de
|
176 |
+
split: test
|
177 |
+
args:
|
178 |
+
language: de-en
|
179 |
+
metrics:
|
180 |
+
- name: Test BLEU (De->En)
|
181 |
+
type: bleu
|
182 |
+
value: 32.64
|
183 |
+
- task:
|
184 |
+
type: Automatic Speech Translation
|
185 |
+
name: automatic-speech-translation
|
186 |
+
dataset:
|
187 |
+
name: FLEURS
|
188 |
+
type: google/fleurs
|
189 |
+
config: es_419
|
190 |
+
split: test
|
191 |
+
args:
|
192 |
+
language: es-en
|
193 |
+
metrics:
|
194 |
+
- name: Test BLEU (Es->En)
|
195 |
+
type: bleu
|
196 |
+
value: 32.15
|
197 |
+
- task:
|
198 |
+
type: Automatic Speech Translation
|
199 |
+
name: automatic-speech-translation
|
200 |
+
dataset:
|
201 |
+
name: FLEURS
|
202 |
+
type: google/fleurs
|
203 |
+
config: fr_fr
|
204 |
+
split: test
|
205 |
+
args:
|
206 |
+
language: fr-en
|
207 |
+
metrics:
|
208 |
+
- name: Test BLEU (Fr->En)
|
209 |
+
type: bleu
|
210 |
+
value: 23.57
|
211 |
+
- task:
|
212 |
+
type: Automatic Speech Translation
|
213 |
+
name: automatic-speech-translation
|
214 |
+
dataset:
|
215 |
+
name: COVOST
|
216 |
+
type: covost2
|
217 |
+
config: de_de
|
218 |
+
split: test
|
219 |
+
args:
|
220 |
+
language: de-en
|
221 |
+
metrics:
|
222 |
+
- name: Test BLEU (De->En)
|
223 |
+
type: bleu
|
224 |
+
value: 37.67
|
225 |
+
- task:
|
226 |
+
type: Automatic Speech Translation
|
227 |
+
name: automatic-speech-translation
|
228 |
+
dataset:
|
229 |
+
name: COVOST
|
230 |
+
type: covost2
|
231 |
+
config: es_419
|
232 |
+
split: test
|
233 |
+
args:
|
234 |
+
language: es-en
|
235 |
+
metrics:
|
236 |
+
- name: Test BLEU (Es->En)
|
237 |
+
type: bleu
|
238 |
+
value: 40.7
|
239 |
+
- task:
|
240 |
+
type: Automatic Speech Translation
|
241 |
+
name: automatic-speech-translation
|
242 |
+
dataset:
|
243 |
+
name: COVOST
|
244 |
+
type: covost2
|
245 |
+
config: fr_fr
|
246 |
+
split: test
|
247 |
+
args:
|
248 |
+
language: fr-en
|
249 |
+
metrics:
|
250 |
+
- name: Test BLEU (Fr->En)
|
251 |
+
type: bleu
|
252 |
+
value: 40.42
|
253 |
+
|
254 |
metrics:
|
255 |
- wer
|
256 |
- bleu
|