karimouda commited on
Commit
8227d44
1 Parent(s): 67b5d22

Update README.md

Browse files

Adding MTEB results

Files changed (1) hide show
  1. README.md +535 -61
README.md CHANGED
@@ -17,77 +17,551 @@ tags:
17
  - feature-extraction
18
  - generated_from_trainer
19
  - loss:CosineSimilarityLoss
 
20
  model-index:
21
- - name: SentenceTransformer based on silma-ai/silma-embeddding-matryoshka-0.1
22
  results:
23
- - task:
24
- type: semantic-similarity
25
- name: Semantic Similarity
26
- dataset:
27
- config: ar-ar
28
- name: MTEB STS17 (ar-ar)
29
- revision: faeb762787bd10488a50c8b5be4a3b82e411949c
30
  split: test
31
- type: mteb/sts17-crosslingual-sts
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  metrics:
33
- - type: pearson_cosine
34
- value: 0.8515496450525244
35
- name: Pearson Cosine
36
- - type: spearman_cosine
37
- value: 0.8558624740720275
38
- name: Spearman Cosine
39
- - type: pearson_manhattan
40
- value: 0.821963706969713
41
- name: Pearson Manhattan
42
- - type: spearman_manhattan
43
- value: 0.8396900657477299
44
- name: Spearman Manhattan
45
- - type: pearson_euclidean
46
- value: 0.8231208177674895
47
- name: Pearson Euclidean
48
- - type: spearman_euclidean
49
- value: 0.8444168331737782
50
- name: Spearman Euclidean
51
- - type: pearson_dot
52
- value: 0.8515496381581389
53
- name: Pearson Dot
54
- - type: spearman_dot
55
- value: 0.8557531503465841
56
- name: Spearman Dot
57
- - task:
58
- type: semantic-similarity
59
- name: Semantic Similarity
60
- dataset:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
  config: en-ar
62
  name: MTEB STS17 (en-ar)
63
  revision: faeb762787bd10488a50c8b5be4a3b82e411949c
64
  split: test
65
  type: mteb/sts17-crosslingual-sts
66
  metrics:
67
- - type: pearson_cosine
68
- value: 0.4960250395119053
69
- name: Pearson Cosine
70
- - type: spearman_cosine
71
- value: 0.4770240652715316
72
- name: Spearman Cosine
73
- - type: pearson_manhattan
74
- value: 0.463401831917928
75
- name: Pearson Manhattan
76
- - type: spearman_manhattan
77
- value: 0.4468968000990917
78
- name: Spearman Manhattan
79
- - type: pearson_euclidean
80
- value: 0.4481739880481376
81
- name: Pearson Euclidean
82
- - type: spearman_euclidean
83
- value: 0.428311112429714
84
- name: Spearman Euclidean
85
- - type: pearson_dot
86
- value: 0.49602504450181617
87
- name: Pearson Dot
88
- - type: spearman_dot
89
- value: 0.4770240652715316
90
- name: Spearman Dot
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
  license: apache-2.0
92
  language:
93
  - ar
 
17
  - feature-extraction
18
  - generated_from_trainer
19
  - loss:CosineSimilarityLoss
20
+ - mteb
21
  model-index:
22
+ - name: silma-ai/silma-embeddding-sts-0.1
23
  results:
24
+ - dataset:
25
+ config: ar
26
+ name: MTEB MassiveIntentClassification (ar)
27
+ revision: 4672e20407010da34463acc759c162ca9734bca6
 
 
 
28
  split: test
29
+ type: mteb/amazon_massive_intent
30
+ metrics:
31
+ - type: accuracy
32
+ value: 56.489576328177534
33
+ - type: f1
34
+ value: 54.0532701115665
35
+ - type: f1_weighted
36
+ value: 56.74231335142343
37
+ - type: main_score
38
+ value: 56.489576328177534
39
+ task:
40
+ type: Classification
41
+ - dataset:
42
+ config: en
43
+ name: MTEB MassiveIntentClassification (en)
44
+ revision: 4672e20407010da34463acc759c162ca9734bca6
45
+ split: test
46
+ type: mteb/amazon_massive_intent
47
+ metrics:
48
+ - type: accuracy
49
+ value: 48.78278412911903
50
+ - type: f1
51
+ value: 47.56043284146044
52
+ - type: f1_weighted
53
+ value: 48.98016672316552
54
+ - type: main_score
55
+ value: 48.78278412911903
56
+ task:
57
+ type: Classification
58
+ - dataset:
59
+ config: ar
60
+ name: MTEB MassiveIntentClassification (ar)
61
+ revision: 4672e20407010da34463acc759c162ca9734bca6
62
+ split: validation
63
+ type: mteb/amazon_massive_intent
64
+ metrics:
65
+ - type: accuracy
66
+ value: 56.768322675848495
67
+ - type: f1
68
+ value: 53.963930379828895
69
+ - type: f1_weighted
70
+ value: 56.745501043116796
71
+ - type: main_score
72
+ value: 56.768322675848495
73
+ task:
74
+ type: Classification
75
+ - dataset:
76
+ config: en
77
+ name: MTEB MassiveIntentClassification (en)
78
+ revision: 4672e20407010da34463acc759c162ca9734bca6
79
+ split: validation
80
+ type: mteb/amazon_massive_intent
81
  metrics:
82
+ - type: accuracy
83
+ value: 49.54254795868175
84
+ - type: f1
85
+ value: 48.048926632026195
86
+ - type: f1_weighted
87
+ value: 49.60112881916927
88
+ - type: main_score
89
+ value: 49.54254795868175
90
+ task:
91
+ type: Classification
92
+ - dataset:
93
+ config: ar
94
+ name: MTEB MassiveScenarioClassification (ar)
95
+ revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
96
+ split: test
97
+ type: mteb/amazon_massive_scenario
98
+ metrics:
99
+ - type: accuracy
100
+ value: 62.76395427034298
101
+ - type: f1
102
+ value: 62.795517645393474
103
+ - type: f1_weighted
104
+ value: 61.993985553919295
105
+ - type: main_score
106
+ value: 62.76395427034298
107
+ task:
108
+ type: Classification
109
+ - dataset:
110
+ config: en
111
+ name: MTEB MassiveScenarioClassification (en)
112
+ revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
113
+ split: test
114
+ type: mteb/amazon_massive_scenario
115
+ metrics:
116
+ - type: accuracy
117
+ value: 55.457296570275716
118
+ - type: f1
119
+ value: 53.04898507492993
120
+ - type: f1_weighted
121
+ value: 55.69280690585543
122
+ - type: main_score
123
+ value: 55.457296570275716
124
+ task:
125
+ type: Classification
126
+ - dataset:
127
+ config: ar
128
+ name: MTEB MassiveScenarioClassification (ar)
129
+ revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
130
+ split: validation
131
+ type: mteb/amazon_massive_scenario
132
+ metrics:
133
+ - type: accuracy
134
+ value: 61.76586325627152
135
+ - type: f1
136
+ value: 62.096444561700956
137
+ - type: f1_weighted
138
+ value: 61.253818773337635
139
+ - type: main_score
140
+ value: 61.76586325627152
141
+ task:
142
+ type: Classification
143
+ - dataset:
144
+ config: en
145
+ name: MTEB MassiveScenarioClassification (en)
146
+ revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
147
+ split: validation
148
+ type: mteb/amazon_massive_scenario
149
+ metrics:
150
+ - type: accuracy
151
+ value: 55.248401377274966
152
+ - type: f1
153
+ value: 53.5659818815448
154
+ - type: f1_weighted
155
+ value: 55.392941321965914
156
+ - type: main_score
157
+ value: 55.248401377274966
158
+ task:
159
+ type: Classification
160
+ - dataset:
161
  config: en-ar
162
  name: MTEB STS17 (en-ar)
163
  revision: faeb762787bd10488a50c8b5be4a3b82e411949c
164
  split: test
165
  type: mteb/sts17-crosslingual-sts
166
  metrics:
167
+ - type: cosine_pearson
168
+ value: 49.60250026530193
169
+ - type: cosine_spearman
170
+ value: 47.702406527153165
171
+ - type: euclidean_pearson
172
+ value: 44.81740010078862
173
+ - type: euclidean_spearman
174
+ value: 42.831111242971396
175
+ - type: main_score
176
+ value: 47.702406527153165
177
+ - type: manhattan_pearson
178
+ value: 46.340186748112124
179
+ - type: manhattan_spearman
180
+ value: 44.689680009909175
181
+ - type: pearson
182
+ value: 49.60250612700404
183
+ - type: spearman
184
+ value: 47.702406527153165
185
+ task:
186
+ type: STS
187
+ - dataset:
188
+ config: en-en
189
+ name: MTEB STS17 (en-en)
190
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
191
+ split: test
192
+ type: mteb/sts17-crosslingual-sts
193
+ metrics:
194
+ - type: cosine_pearson
195
+ value: 80.50355999312305
196
+ - type: cosine_spearman
197
+ value: 80.05684742492551
198
+ - type: euclidean_pearson
199
+ value: 79.79426226586054
200
+ - type: euclidean_spearman
201
+ value: 78.62531622907113
202
+ - type: main_score
203
+ value: 80.05684742492551
204
+ - type: manhattan_pearson
205
+ value: 79.69928765568616
206
+ - type: manhattan_spearman
207
+ value: 78.57030908261245
208
+ - type: pearson
209
+ value: 80.50356022284683
210
+ - type: spearman
211
+ value: 80.05684742492551
212
+ task:
213
+ type: STS
214
+ - dataset:
215
+ config: es-en
216
+ name: MTEB STS17 (es-en)
217
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
218
+ split: test
219
+ type: mteb/sts17-crosslingual-sts
220
+ metrics:
221
+ - type: cosine_pearson
222
+ value: 21.624383947189354
223
+ - type: cosine_spearman
224
+ value: 21.4038834628452
225
+ - type: euclidean_pearson
226
+ value: 7.184950714569936
227
+ - type: euclidean_spearman
228
+ value: 3.4762228403044304
229
+ - type: main_score
230
+ value: 21.4038834628452
231
+ - type: manhattan_pearson
232
+ value: 6.551289317075073
233
+ - type: manhattan_spearman
234
+ value: 2.286368561838714
235
+ - type: pearson
236
+ value: 21.624390367032202
237
+ - type: spearman
238
+ value: 21.4038834628452
239
+ task:
240
+ type: STS
241
+ - dataset:
242
+ config: en-de
243
+ name: MTEB STS17 (en-de)
244
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
245
+ split: test
246
+ type: mteb/sts17-crosslingual-sts
247
+ metrics:
248
+ - type: cosine_pearson
249
+ value: 31.03301067892329
250
+ - type: cosine_spearman
251
+ value: 31.85713324783654
252
+ - type: euclidean_pearson
253
+ value: 21.63310145118274
254
+ - type: euclidean_spearman
255
+ value: 22.456677151668814
256
+ - type: main_score
257
+ value: 31.85713324783654
258
+ - type: manhattan_pearson
259
+ value: 21.67370664986112
260
+ - type: manhattan_spearman
261
+ value: 21.598819368637155
262
+ - type: pearson
263
+ value: 31.03301931810337
264
+ - type: spearman
265
+ value: 31.85713324783654
266
+ task:
267
+ type: STS
268
+ - dataset:
269
+ config: fr-en
270
+ name: MTEB STS17 (fr-en)
271
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
272
+ split: test
273
+ type: mteb/sts17-crosslingual-sts
274
+ metrics:
275
+ - type: cosine_pearson
276
+ value: 30.07580974074585
277
+ - type: cosine_spearman
278
+ value: 30.070765595685838
279
+ - type: euclidean_pearson
280
+ value: 17.235942672907232
281
+ - type: euclidean_spearman
282
+ value: 16.010962024640964
283
+ - type: main_score
284
+ value: 30.070765595685838
285
+ - type: manhattan_pearson
286
+ value: 16.98929367890981
287
+ - type: manhattan_spearman
288
+ value: 15.865314171439055
289
+ - type: pearson
290
+ value: 30.075805759312956
291
+ - type: spearman
292
+ value: 30.070765595685838
293
+ task:
294
+ type: STS
295
+ - dataset:
296
+ config: nl-en
297
+ name: MTEB STS17 (nl-en)
298
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
299
+ split: test
300
+ type: mteb/sts17-crosslingual-sts
301
+ metrics:
302
+ - type: cosine_pearson
303
+ value: 38.5738832598024
304
+ - type: cosine_spearman
305
+ value: 36.23552528353376
306
+ - type: euclidean_pearson
307
+ value: 28.920909050416814
308
+ - type: euclidean_spearman
309
+ value: 26.824767359797256
310
+ - type: main_score
311
+ value: 36.23552528353376
312
+ - type: manhattan_pearson
313
+ value: 28.449235903219787
314
+ - type: manhattan_spearman
315
+ value: 26.149497938525712
316
+ - type: pearson
317
+ value: 38.57388759602166
318
+ - type: spearman
319
+ value: 36.23552528353376
320
+ task:
321
+ type: STS
322
+ - dataset:
323
+ config: it-en
324
+ name: MTEB STS17 (it-en)
325
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
326
+ split: test
327
+ type: mteb/sts17-crosslingual-sts
328
+ metrics:
329
+ - type: cosine_pearson
330
+ value: 28.440771017135734
331
+ - type: cosine_spearman
332
+ value: 23.328373210539134
333
+ - type: euclidean_pearson
334
+ value: 14.616541134326836
335
+ - type: euclidean_spearman
336
+ value: 7.785452426485771
337
+ - type: main_score
338
+ value: 23.328373210539134
339
+ - type: manhattan_pearson
340
+ value: 16.35791121049381
341
+ - type: manhattan_spearman
342
+ value: 10.350376853181583
343
+ - type: pearson
344
+ value: 28.440782342934394
345
+ - type: spearman
346
+ value: 23.328373210539134
347
+ task:
348
+ type: STS
349
+ - dataset:
350
+ config: en-tr
351
+ name: MTEB STS17 (en-tr)
352
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
353
+ split: test
354
+ type: mteb/sts17-crosslingual-sts
355
+ metrics:
356
+ - type: cosine_pearson
357
+ value: 10.058384831429683
358
+ - type: cosine_spearman
359
+ value: 9.208230020320498
360
+ - type: euclidean_pearson
361
+ value: -3.778073300045484
362
+ - type: euclidean_spearman
363
+ value: -5.168172155878574
364
+ - type: main_score
365
+ value: 9.208230020320498
366
+ - type: manhattan_pearson
367
+ value: -5.081387114365387
368
+ - type: manhattan_spearman
369
+ value: -5.190932828652431
370
+ - type: pearson
371
+ value: 10.058387061356784
372
+ - type: spearman
373
+ value: 9.208230020320498
374
+ task:
375
+ type: STS
376
+ - dataset:
377
+ config: ar-ar
378
+ name: MTEB STS17 (ar-ar)
379
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
380
+ split: test
381
+ type: mteb/sts17-crosslingual-sts
382
+ metrics:
383
+ - type: cosine_pearson
384
+ value: 85.15496368852482
385
+ - type: cosine_spearman
386
+ value: 85.58624740720275
387
+ - type: euclidean_pearson
388
+ value: 82.31207769687893
389
+ - type: euclidean_spearman
390
+ value: 84.44298391864797
391
+ - type: main_score
392
+ value: 85.58624740720275
393
+ - type: manhattan_pearson
394
+ value: 82.19636675129995
395
+ - type: manhattan_spearman
396
+ value: 83.97030581469602
397
+ - type: pearson
398
+ value: 85.15496353205859
399
+ - type: spearman
400
+ value: 85.59382070976062
401
+ task:
402
+ type: STS
403
+ - dataset:
404
+ config: es-en
405
+ name: MTEB STS22.v2 (es-en)
406
+ revision: d31f33a128469b20e357535c39b82fb3c3f6f2bd
407
+ split: test
408
+ type: mteb/sts22-crosslingual-sts
409
+ metrics:
410
+ - type: cosine_pearson
411
+ value: 44.24743366469854
412
+ - type: cosine_spearman
413
+ value: 50.28917533427211
414
+ - type: euclidean_pearson
415
+ value: 45.87986269990654
416
+ - type: euclidean_spearman
417
+ value: 51.891514435608855
418
+ - type: main_score
419
+ value: 50.28917533427211
420
+ - type: manhattan_pearson
421
+ value: 45.45542397032592
422
+ - type: manhattan_spearman
423
+ value: 52.411033818833666
424
+ - type: pearson
425
+ value: 44.24743853113377
426
+ - type: spearman
427
+ value: 50.28917533427211
428
+ task:
429
+ type: STS
430
+ - dataset:
431
+ config: zh-en
432
+ name: MTEB STS22.v2 (zh-en)
433
+ revision: d31f33a128469b20e357535c39b82fb3c3f6f2bd
434
+ split: test
435
+ type: mteb/sts22-crosslingual-sts
436
+ metrics:
437
+ - type: cosine_pearson
438
+ value: 27.73878924884296
439
+ - type: cosine_spearman
440
+ value: 22.44663617360493
441
+ - type: euclidean_pearson
442
+ value: 22.868571735387977
443
+ - type: euclidean_spearman
444
+ value: 18.017657427593637
445
+ - type: main_score
446
+ value: 22.44663617360493
447
+ - type: manhattan_pearson
448
+ value: 24.20368152236799
449
+ - type: manhattan_spearman
450
+ value: 19.341058710109657
451
+ - type: pearson
452
+ value: 27.738791387167687
453
+ - type: spearman
454
+ value: 22.44663617360493
455
+ task:
456
+ type: STS
457
+ - dataset:
458
+ config: de-en
459
+ name: MTEB STS22.v2 (de-en)
460
+ revision: d31f33a128469b20e357535c39b82fb3c3f6f2bd
461
+ split: test
462
+ type: mteb/sts22-crosslingual-sts
463
+ metrics:
464
+ - type: cosine_pearson
465
+ value: 28.905819837460527
466
+ - type: cosine_spearman
467
+ value: 32.52679512081778
468
+ - type: euclidean_pearson
469
+ value: 28.61574417382465
470
+ - type: euclidean_spearman
471
+ value: 35.447663167023094
472
+ - type: main_score
473
+ value: 32.52679512081778
474
+ - type: manhattan_pearson
475
+ value: 28.736369410178426
476
+ - type: manhattan_spearman
477
+ value: 35.158643077723944
478
+ - type: pearson
479
+ value: 28.90580871894244
480
+ - type: spearman
481
+ value: 32.52679512081778
482
+ task:
483
+ type: STS
484
+ - dataset:
485
+ config: pl-en
486
+ name: MTEB STS22.v2 (pl-en)
487
+ revision: d31f33a128469b20e357535c39b82fb3c3f6f2bd
488
+ split: test
489
+ type: mteb/sts22-crosslingual-sts
490
+ metrics:
491
+ - type: cosine_pearson
492
+ value: 48.20842591896265
493
+ - type: cosine_spearman
494
+ value: 44.838254673346626
495
+ - type: euclidean_pearson
496
+ value: 51.55940058938421
497
+ - type: euclidean_spearman
498
+ value: 45.912821863788785
499
+ - type: main_score
500
+ value: 44.838254673346626
501
+ - type: manhattan_pearson
502
+ value: 52.13078297712538
503
+ - type: manhattan_spearman
504
+ value: 47.402814514453425
505
+ - type: pearson
506
+ value: 48.20843799095813
507
+ - type: spearman
508
+ value: 44.838254673346626
509
+ task:
510
+ type: STS
511
+ - dataset:
512
+ config: en
513
+ name: MTEB STS22.v2 (en)
514
+ revision: d31f33a128469b20e357535c39b82fb3c3f6f2bd
515
+ split: test
516
+ type: mteb/sts22-crosslingual-sts
517
+ metrics:
518
+ - type: cosine_pearson
519
+ value: 56.896647953120414
520
+ - type: cosine_spearman
521
+ value: 60.96741836410487
522
+ - type: euclidean_pearson
523
+ value: 55.90453382184861
524
+ - type: euclidean_spearman
525
+ value: 60.273680095845705
526
+ - type: main_score
527
+ value: 60.96741836410487
528
+ - type: manhattan_pearson
529
+ value: 55.87830113983942
530
+ - type: manhattan_spearman
531
+ value: 59.94276270978964
532
+ - type: pearson
533
+ value: 56.89664991046338
534
+ - type: spearman
535
+ value: 60.96741836410487
536
+ task:
537
+ type: STS
538
+ - dataset:
539
+ config: ar
540
+ name: MTEB STS22.v2 (ar)
541
+ revision: d31f33a128469b20e357535c39b82fb3c3f6f2bd
542
+ split: test
543
+ type: mteb/sts22-crosslingual-sts
544
+ metrics:
545
+ - type: cosine_pearson
546
+ value: 52.70294726367241
547
+ - type: cosine_spearman
548
+ value: 61.21881191987154
549
+ - type: euclidean_pearson
550
+ value: 54.13531251250594
551
+ - type: euclidean_spearman
552
+ value: 61.20287919055926
553
+ - type: main_score
554
+ value: 61.21881191987154
555
+ - type: manhattan_pearson
556
+ value: 54.60474684752885
557
+ - type: manhattan_spearman
558
+ value: 61.45150178016683
559
+ - type: pearson
560
+ value: 52.70294625001791
561
+ - type: spearman
562
+ value: 61.21881191987154
563
+ task:
564
+ type: STS
565
  license: apache-2.0
566
  language:
567
  - ar