Datasets:
mteb
/

Commit
1616d1b
1 Parent(s): 7695129

chore: add jina-v3 evaluation result on mmteb multilingual (#41)

Browse files

* chore: add jina-v3 evaluation result on mmteb

* chore: apply make pre push

* chore: patch miracl retrieval results

* trigger CI

---------

Co-authored-by: Kenneth Enevoldsen <kennethcenevoldsen@gmail.com>
Co-authored-by: Isaac Chung <chungisaac1217@gmail.com>

This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .github/workflows/test.yml +1 -1
  2. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/AILAStatutes.json +158 -0
  3. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/AfriSentiClassification.json +755 -0
  4. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/AlloProfClusteringS2S.v2.json +34 -0
  5. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/AlloprofReranking.json +26 -0
  6. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/AmazonCounterfactualClassification.json +685 -0
  7. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/ArXivHierarchicalClusteringP2P.json +46 -0
  8. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/ArXivHierarchicalClusteringS2S.json +46 -0
  9. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/ArguAna.json +158 -0
  10. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/ArmenianParaphrasePC.json +58 -0
  11. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BUCC.v2.json +59 -0
  12. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BelebeleRetrieval.json +0 -0
  13. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BibleNLPBitextMining.json +0 -0
  14. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BigPatentClustering.v2.json +34 -0
  15. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BiorxivClusteringP2P.v2.json +34 -0
  16. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BornholmBitextMining.json +22 -0
  17. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BrazilianToxicTweetsClassification.json +73 -0
  18. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BulgarianStoreReviewSentimentClassfication.json +73 -0
  19. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CEDRClassification.json +73 -0
  20. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CLSClusteringP2P.v2.json +34 -0
  21. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CSFDSKMovieReviewSentimentClassification.json +73 -0
  22. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CTKFactsNLI.json +107 -0
  23. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CataloniaTweetClassification.json +261 -0
  24. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/Core17InstructionRetrieval.json +137 -0
  25. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CovidRetrieval.json +158 -0
  26. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CyrillicTurkicLangClassification.json +81 -0
  27. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CzechProductReviewSentimentClassification.json +73 -0
  28. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/DBpediaClassification.json +73 -0
  29. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/DalajClassification.json +95 -0
  30. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/DiaBlaBitextMining.json +35 -0
  31. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/EstonianValenceClassification.json +73 -0
  32. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/FaroeseSTS.json +26 -0
  33. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/FilipinoShopeeReviewsClassification.json +137 -0
  34. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/FinParaSTS.json +43 -0
  35. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/FinancialPhrasebankClassification.json +73 -0
  36. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/FloresBitextMining.json +0 -0
  37. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/GermanSTSBenchmark.json +43 -0
  38. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/GreekLegalCodeClassification.json +137 -0
  39. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/GujaratiNewsClassification.json +73 -0
  40. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/HALClusteringS2S.v2.json +34 -0
  41. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/HagridRetrieval.json +158 -0
  42. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/IN22GenBitextMining.json +0 -0
  43. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/IndicCrosslingualSTS.json +203 -0
  44. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/IndicGenBenchFloresBitextMining.json +1405 -0
  45. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/IndicLangClassification.json +96 -0
  46. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/IndonesianIdClickbaitClassification.json +95 -0
  47. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/IsiZuluNewsClassification.json +73 -0
  48. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/ItaCaseholdClassification.json +73 -0
  49. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/JSICK.json +26 -0
  50. results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/KorHateSpeechMLClassification.json +73 -0
.github/workflows/test.yml CHANGED
@@ -16,7 +16,7 @@ jobs:
16
  fail-fast: false
17
  matrix:
18
  os: [ubuntu-latest] #, macos-latest, windows-latest]
19
- python-version: ["3.8"] # , "3.9", "3.10"]
20
 
21
  steps:
22
  - uses: actions/checkout@v3
 
16
  fail-fast: false
17
  matrix:
18
  os: [ubuntu-latest] #, macos-latest, windows-latest]
19
+ python-version: ["3.9"]
20
 
21
  steps:
22
  - uses: actions/checkout@v3
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/AILAStatutes.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "ebfcd844eadd3d667efa3c57fc5c8c87f5c2867e",
3
+ "evaluation_time": 1.649610996246338,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.32773,
14
+ "map_at_1": 0.071,
15
+ "map_at_10": 0.19666,
16
+ "map_at_100": 0.26018,
17
+ "map_at_1000": 0.26018,
18
+ "map_at_20": 0.21765,
19
+ "map_at_3": 0.1275,
20
+ "map_at_5": 0.16017,
21
+ "mrr_at_1": 0.32,
22
+ "mrr_at_10": 0.483,
23
+ "mrr_at_100": 0.4904463118580766,
24
+ "mrr_at_1000": 0.4904463118580766,
25
+ "mrr_at_20": 0.48949393090569565,
26
+ "mrr_at_3": 0.43,
27
+ "mrr_at_5": 0.461,
28
+ "nauc_map_at_1000_diff1": -0.12797552251112276,
29
+ "nauc_map_at_1000_max": 0.163195009479752,
30
+ "nauc_map_at_1000_std": 0.1784295270856589,
31
+ "nauc_map_at_100_diff1": -0.12797552251112276,
32
+ "nauc_map_at_100_max": 0.163195009479752,
33
+ "nauc_map_at_100_std": 0.1784295270856589,
34
+ "nauc_map_at_10_diff1": -0.08995364022904571,
35
+ "nauc_map_at_10_max": 0.1925229843113862,
36
+ "nauc_map_at_10_std": 0.18380298751451712,
37
+ "nauc_map_at_1_diff1": -0.21742547253217148,
38
+ "nauc_map_at_1_max": 0.05927526768636921,
39
+ "nauc_map_at_1_std": 0.05854718387583288,
40
+ "nauc_map_at_20_diff1": -0.11929591170076337,
41
+ "nauc_map_at_20_max": 0.1779790778162725,
42
+ "nauc_map_at_20_std": 0.17363915696250012,
43
+ "nauc_map_at_3_diff1": -0.1922330072240532,
44
+ "nauc_map_at_3_max": 0.19438147683933096,
45
+ "nauc_map_at_3_std": 0.14954385008570212,
46
+ "nauc_map_at_5_diff1": -0.08263647472418753,
47
+ "nauc_map_at_5_max": 0.2497528708805151,
48
+ "nauc_map_at_5_std": 0.14502538628236758,
49
+ "nauc_mrr_at_1000_diff1": -0.12336330643042633,
50
+ "nauc_mrr_at_1000_max": 0.13854040920452687,
51
+ "nauc_mrr_at_1000_std": 0.07329470588671386,
52
+ "nauc_mrr_at_100_diff1": -0.12336330643042633,
53
+ "nauc_mrr_at_100_max": 0.13854040920452687,
54
+ "nauc_mrr_at_100_std": 0.07329470588671386,
55
+ "nauc_mrr_at_10_diff1": -0.11631992795422062,
56
+ "nauc_mrr_at_10_max": 0.1449668733600396,
57
+ "nauc_mrr_at_10_std": 0.07455379940623108,
58
+ "nauc_mrr_at_1_diff1": -0.22367012736808312,
59
+ "nauc_mrr_at_1_max": 0.04209033501016787,
60
+ "nauc_mrr_at_1_std": 0.052204859252916465,
61
+ "nauc_mrr_at_20_diff1": -0.12071283554889783,
62
+ "nauc_mrr_at_20_max": 0.13849101618586232,
63
+ "nauc_mrr_at_20_std": 0.0725279183503699,
64
+ "nauc_mrr_at_3_diff1": -0.105666344069899,
65
+ "nauc_mrr_at_3_max": 0.1439281381622888,
66
+ "nauc_mrr_at_3_std": 0.07165737497103443,
67
+ "nauc_mrr_at_5_diff1": -0.10458708791169632,
68
+ "nauc_mrr_at_5_max": 0.1561852295113067,
69
+ "nauc_mrr_at_5_std": 0.09093723120024294,
70
+ "nauc_ndcg_at_1000_diff1": -0.13788256526312664,
71
+ "nauc_ndcg_at_1000_max": 0.12199528260885305,
72
+ "nauc_ndcg_at_1000_std": 0.15799629538429735,
73
+ "nauc_ndcg_at_100_diff1": -0.13788256526312664,
74
+ "nauc_ndcg_at_100_max": 0.12199528260885305,
75
+ "nauc_ndcg_at_100_std": 0.15799629538429735,
76
+ "nauc_ndcg_at_10_diff1": -0.054268964094045816,
77
+ "nauc_ndcg_at_10_max": 0.20300723974175514,
78
+ "nauc_ndcg_at_10_std": 0.18179546754141582,
79
+ "nauc_ndcg_at_1_diff1": -0.22367012736808312,
80
+ "nauc_ndcg_at_1_max": 0.04209033501016787,
81
+ "nauc_ndcg_at_1_std": 0.052204859252916465,
82
+ "nauc_ndcg_at_20_diff1": -0.09238136761445143,
83
+ "nauc_ndcg_at_20_max": 0.17200635359871944,
84
+ "nauc_ndcg_at_20_std": 0.16809293840346623,
85
+ "nauc_ndcg_at_3_diff1": -0.14214311345152836,
86
+ "nauc_ndcg_at_3_max": 0.16960155702863947,
87
+ "nauc_ndcg_at_3_std": 0.1410356537415608,
88
+ "nauc_ndcg_at_5_diff1": -0.036383910483647294,
89
+ "nauc_ndcg_at_5_max": 0.27156565266137944,
90
+ "nauc_ndcg_at_5_std": 0.14355032468779033,
91
+ "nauc_precision_at_1000_diff1": -0.40377966008671456,
92
+ "nauc_precision_at_1000_max": -0.7248312325058065,
93
+ "nauc_precision_at_1000_std": 0.09538793655440012,
94
+ "nauc_precision_at_100_diff1": -0.40377966008671107,
95
+ "nauc_precision_at_100_max": -0.7248312325058035,
96
+ "nauc_precision_at_100_std": 0.09538793655440192,
97
+ "nauc_precision_at_10_diff1": -0.03272268254790227,
98
+ "nauc_precision_at_10_max": 0.054343604350077776,
99
+ "nauc_precision_at_10_std": 0.2512407215605042,
100
+ "nauc_precision_at_1_diff1": -0.22367012736808312,
101
+ "nauc_precision_at_1_max": 0.04209033501016787,
102
+ "nauc_precision_at_1_std": 0.052204859252916465,
103
+ "nauc_precision_at_20_diff1": -0.12200642439899881,
104
+ "nauc_precision_at_20_max": -0.08255695652670927,
105
+ "nauc_precision_at_20_std": 0.1903085311903426,
106
+ "nauc_precision_at_3_diff1": -0.10267114523352527,
107
+ "nauc_precision_at_3_max": 0.20153550863723615,
108
+ "nauc_precision_at_3_std": 0.19846449136276398,
109
+ "nauc_precision_at_5_diff1": 0.046751707190575705,
110
+ "nauc_precision_at_5_max": 0.27963349427643525,
111
+ "nauc_precision_at_5_std": 0.21412430076190722,
112
+ "nauc_recall_at_1000_diff1": NaN,
113
+ "nauc_recall_at_1000_max": NaN,
114
+ "nauc_recall_at_1000_std": NaN,
115
+ "nauc_recall_at_100_diff1": NaN,
116
+ "nauc_recall_at_100_max": NaN,
117
+ "nauc_recall_at_100_std": NaN,
118
+ "nauc_recall_at_10_diff1": 0.07485826287523366,
119
+ "nauc_recall_at_10_max": 0.2455644517516293,
120
+ "nauc_recall_at_10_std": 0.2159406283650886,
121
+ "nauc_recall_at_1_diff1": -0.21742547253217148,
122
+ "nauc_recall_at_1_max": 0.05927526768636921,
123
+ "nauc_recall_at_1_std": 0.05854718387583288,
124
+ "nauc_recall_at_20_diff1": 0.023968970111766418,
125
+ "nauc_recall_at_20_max": 0.18284685351268357,
126
+ "nauc_recall_at_20_std": 0.17225210435399174,
127
+ "nauc_recall_at_3_diff1": -0.10092032231612168,
128
+ "nauc_recall_at_3_max": 0.29201596671116925,
129
+ "nauc_recall_at_3_std": 0.17071030178168448,
130
+ "nauc_recall_at_5_diff1": 0.09179148478207606,
131
+ "nauc_recall_at_5_max": 0.3822915149728462,
132
+ "nauc_recall_at_5_std": 0.1506803578849412,
133
+ "ndcg_at_1": 0.32,
134
+ "ndcg_at_10": 0.32773,
135
+ "ndcg_at_100": 0.52388,
136
+ "ndcg_at_1000": 0.52388,
137
+ "ndcg_at_20": 0.38143,
138
+ "ndcg_at_3": 0.26059,
139
+ "ndcg_at_5": 0.25606,
140
+ "precision_at_1": 0.32,
141
+ "precision_at_10": 0.168,
142
+ "precision_at_100": 0.0434,
143
+ "precision_at_1000": 0.00434,
144
+ "precision_at_20": 0.114,
145
+ "precision_at_3": 0.24,
146
+ "precision_at_5": 0.212,
147
+ "recall_at_1": 0.071,
148
+ "recall_at_10": 0.38867,
149
+ "recall_at_100": 1.0,
150
+ "recall_at_1000": 1.0,
151
+ "recall_at_20": 0.521,
152
+ "recall_at_3": 0.166,
153
+ "recall_at_5": 0.24867
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "AILAStatutes"
158
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/AfriSentiClassification.json ADDED
@@ -0,0 +1,755 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "b52e930385cf5ed7f063072c3f7bd17b599a16cf",
3
+ "evaluation_time": 46.945666790008545,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.5123061530765384,
10
+ "f1": 0.4402101663519046,
11
+ "f1_weighted": 0.5416984414960183,
12
+ "hf_subset": "amh",
13
+ "languages": [
14
+ "amh-Ethi"
15
+ ],
16
+ "main_score": 0.5123061530765384,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.6568284142071036,
20
+ "f1": 0.48239234305852435,
21
+ "f1_weighted": 0.6510745601457321
22
+ },
23
+ {
24
+ "accuracy": 0.6003001500750376,
25
+ "f1": 0.5129147867275591,
26
+ "f1_weighted": 0.6313744698583131
27
+ },
28
+ {
29
+ "accuracy": 0.45822911455727866,
30
+ "f1": 0.43716939510929254,
31
+ "f1_weighted": 0.5114914572983921
32
+ },
33
+ {
34
+ "accuracy": 0.3696848424212106,
35
+ "f1": 0.37343369755291195,
36
+ "f1_weighted": 0.40419457223602046
37
+ },
38
+ {
39
+ "accuracy": 0.5252626313156579,
40
+ "f1": 0.4385463545644502,
41
+ "f1_weighted": 0.5682337713617551
42
+ },
43
+ {
44
+ "accuracy": 0.6533266633316658,
45
+ "f1": 0.48253769477140707,
46
+ "f1_weighted": 0.6534033976273914
47
+ },
48
+ {
49
+ "accuracy": 0.4112056028014007,
50
+ "f1": 0.40428487726353346,
51
+ "f1_weighted": 0.44175568668987647
52
+ },
53
+ {
54
+ "accuracy": 0.5407703851925963,
55
+ "f1": 0.4600511991468963,
56
+ "f1_weighted": 0.578836850041584
57
+ },
58
+ {
59
+ "accuracy": 0.5492746373186593,
60
+ "f1": 0.4642838152396132,
61
+ "f1_weighted": 0.5766793472000966
62
+ },
63
+ {
64
+ "accuracy": 0.3581790895447724,
65
+ "f1": 0.3464875000848575,
66
+ "f1_weighted": 0.3999403025010219
67
+ }
68
+ ]
69
+ },
70
+ {
71
+ "accuracy": 0.4991649269311065,
72
+ "f1": 0.4761316787567272,
73
+ "f1_weighted": 0.5086421944981993,
74
+ "hf_subset": "arq",
75
+ "languages": [
76
+ "arq-Arab"
77
+ ],
78
+ "main_score": 0.4991649269311065,
79
+ "scores_per_experiment": [
80
+ {
81
+ "accuracy": 0.558455114822547,
82
+ "f1": 0.5266276913963747,
83
+ "f1_weighted": 0.5757039029760225
84
+ },
85
+ {
86
+ "accuracy": 0.5918580375782881,
87
+ "f1": 0.549778021249819,
88
+ "f1_weighted": 0.5934630258829264
89
+ },
90
+ {
91
+ "accuracy": 0.5835073068893528,
92
+ "f1": 0.5400287840145716,
93
+ "f1_weighted": 0.5879486070923603
94
+ },
95
+ {
96
+ "accuracy": 0.5521920668058455,
97
+ "f1": 0.5288892811515471,
98
+ "f1_weighted": 0.5672839251595454
99
+ },
100
+ {
101
+ "accuracy": 0.44258872651356995,
102
+ "f1": 0.42414841660798913,
103
+ "f1_weighted": 0.4585321524160486
104
+ },
105
+ {
106
+ "accuracy": 0.5302713987473904,
107
+ "f1": 0.4917111081950552,
108
+ "f1_weighted": 0.5400884613628603
109
+ },
110
+ {
111
+ "accuracy": 0.2599164926931106,
112
+ "f1": 0.2619579262011516,
113
+ "f1_weighted": 0.2588451666366523
114
+ },
115
+ {
116
+ "accuracy": 0.4175365344467641,
117
+ "f1": 0.41485560188485526,
118
+ "f1_weighted": 0.42623704314802374
119
+ },
120
+ {
121
+ "accuracy": 0.5052192066805845,
122
+ "f1": 0.4934158772333532,
123
+ "f1_weighted": 0.5157803920069832
124
+ },
125
+ {
126
+ "accuracy": 0.5501043841336117,
127
+ "f1": 0.5299040796325551,
128
+ "f1_weighted": 0.5625392683005705
129
+ }
130
+ ]
131
+ },
132
+ {
133
+ "accuracy": 0.436181640625,
134
+ "f1": 0.4237324686881199,
135
+ "f1_weighted": 0.4311175451984736,
136
+ "hf_subset": "ary",
137
+ "languages": [
138
+ "ary-Arab"
139
+ ],
140
+ "main_score": 0.436181640625,
141
+ "scores_per_experiment": [
142
+ {
143
+ "accuracy": 0.45849609375,
144
+ "f1": 0.4550953982589852,
145
+ "f1_weighted": 0.4555466230474953
146
+ },
147
+ {
148
+ "accuracy": 0.42578125,
149
+ "f1": 0.42005523306187076,
150
+ "f1_weighted": 0.42565448976228737
151
+ },
152
+ {
153
+ "accuracy": 0.3935546875,
154
+ "f1": 0.3810109668383232,
155
+ "f1_weighted": 0.38487214247617463
156
+ },
157
+ {
158
+ "accuracy": 0.42333984375,
159
+ "f1": 0.4187035156893146,
160
+ "f1_weighted": 0.42388381256530505
161
+ },
162
+ {
163
+ "accuracy": 0.47216796875,
164
+ "f1": 0.4554361433909127,
165
+ "f1_weighted": 0.4642651028683567
166
+ },
167
+ {
168
+ "accuracy": 0.3818359375,
169
+ "f1": 0.378331585894846,
170
+ "f1_weighted": 0.3884229930422563
171
+ },
172
+ {
173
+ "accuracy": 0.42822265625,
174
+ "f1": 0.3998458376233975,
175
+ "f1_weighted": 0.4118517000039773
176
+ },
177
+ {
178
+ "accuracy": 0.45751953125,
179
+ "f1": 0.42512185224420723,
180
+ "f1_weighted": 0.43969824021545934
181
+ },
182
+ {
183
+ "accuracy": 0.466796875,
184
+ "f1": 0.4610421282408963,
185
+ "f1_weighted": 0.46470535142176256
186
+ },
187
+ {
188
+ "accuracy": 0.4541015625,
189
+ "f1": 0.4426820256384454,
190
+ "f1_weighted": 0.4522749965816609
191
+ }
192
+ ]
193
+ },
194
+ {
195
+ "accuracy": 0.4,
196
+ "f1": 0.2524772795700922,
197
+ "f1_weighted": 0.4901937973268149,
198
+ "hf_subset": "hau",
199
+ "languages": [
200
+ "hau-Latn"
201
+ ],
202
+ "main_score": 0.4,
203
+ "scores_per_experiment": [
204
+ {
205
+ "accuracy": 0.53662109375,
206
+ "f1": 0.3309979240806643,
207
+ "f1_weighted": 0.650154575334325
208
+ },
209
+ {
210
+ "accuracy": 0.099609375,
211
+ "f1": 0.08978668465618311,
212
+ "f1_weighted": 0.17809485271053113
213
+ },
214
+ {
215
+ "accuracy": 0.16455078125,
216
+ "f1": 0.1366038950587478,
217
+ "f1_weighted": 0.2730317290861902
218
+ },
219
+ {
220
+ "accuracy": 0.35302734375,
221
+ "f1": 0.25489947218880044,
222
+ "f1_weighted": 0.4233017013090361
223
+ },
224
+ {
225
+ "accuracy": 0.69580078125,
226
+ "f1": 0.37086613258814155,
227
+ "f1_weighted": 0.7591158982784173
228
+ },
229
+ {
230
+ "accuracy": 0.0693359375,
231
+ "f1": 0.06817485566160009,
232
+ "f1_weighted": 0.12767282636311417
233
+ },
234
+ {
235
+ "accuracy": 0.50146484375,
236
+ "f1": 0.3179177606224387,
237
+ "f1_weighted": 0.5759236747735021
238
+ },
239
+ {
240
+ "accuracy": 0.30029296875,
241
+ "f1": 0.20777479892761394,
242
+ "f1_weighted": 0.44611037512567026
243
+ },
244
+ {
245
+ "accuracy": 0.64404296875,
246
+ "f1": 0.3625576958165215,
247
+ "f1_weighted": 0.741514919307085
248
+ },
249
+ {
250
+ "accuracy": 0.63525390625,
251
+ "f1": 0.3851935761002106,
252
+ "f1_weighted": 0.7270174209802778
253
+ }
254
+ ]
255
+ },
256
+ {
257
+ "accuracy": 0.51044921875,
258
+ "f1": 0.3186935249361017,
259
+ "f1_weighted": 0.4743608914532035,
260
+ "hf_subset": "ibo",
261
+ "languages": [
262
+ "ibo-Latn"
263
+ ],
264
+ "main_score": 0.51044921875,
265
+ "scores_per_experiment": [
266
+ {
267
+ "accuracy": 0.49658203125,
268
+ "f1": 0.3146949820943115,
269
+ "f1_weighted": 0.4597003828309322
270
+ },
271
+ {
272
+ "accuracy": 0.56591796875,
273
+ "f1": 0.399986972805732,
274
+ "f1_weighted": 0.6036505404250123
275
+ },
276
+ {
277
+ "accuracy": 0.44140625,
278
+ "f1": 0.30549323541476603,
279
+ "f1_weighted": 0.468190321614143
280
+ },
281
+ {
282
+ "accuracy": 0.5517578125,
283
+ "f1": 0.38773868316031623,
284
+ "f1_weighted": 0.583091737499138
285
+ },
286
+ {
287
+ "accuracy": 0.47119140625,
288
+ "f1": 0.27397357353679874,
289
+ "f1_weighted": 0.39218437969590325
290
+ },
291
+ {
292
+ "accuracy": 0.48486328125,
293
+ "f1": 0.2941280707594416,
294
+ "f1_weighted": 0.42520713572781554
295
+ },
296
+ {
297
+ "accuracy": 0.5810546875,
298
+ "f1": 0.3898143642824494,
299
+ "f1_weighted": 0.5903608395629673
300
+ },
301
+ {
302
+ "accuracy": 0.51904296875,
303
+ "f1": 0.30144248960035397,
304
+ "f1_weighted": 0.434355026465151
305
+ },
306
+ {
307
+ "accuracy": 0.54736328125,
308
+ "f1": 0.2666892117850849,
309
+ "f1_weighted": 0.42753076439928533
310
+ },
311
+ {
312
+ "accuracy": 0.4453125,
313
+ "f1": 0.2529736659217624,
314
+ "f1_weighted": 0.35933778631168756
315
+ }
316
+ ]
317
+ },
318
+ {
319
+ "accuracy": 0.39522417153996103,
320
+ "f1": 0.3812922913911826,
321
+ "f1_weighted": 0.37634312777044565,
322
+ "hf_subset": "kin",
323
+ "languages": [
324
+ "kin-Latn"
325
+ ],
326
+ "main_score": 0.39522417153996103,
327
+ "scores_per_experiment": [
328
+ {
329
+ "accuracy": 0.4171539961013645,
330
+ "f1": 0.41452827455406394,
331
+ "f1_weighted": 0.40664464055039823
332
+ },
333
+ {
334
+ "accuracy": 0.38596491228070173,
335
+ "f1": 0.3492096944098851,
336
+ "f1_weighted": 0.34952563516435026
337
+ },
338
+ {
339
+ "accuracy": 0.4083820662768031,
340
+ "f1": 0.38002680471158606,
341
+ "f1_weighted": 0.37085790737648733
342
+ },
343
+ {
344
+ "accuracy": 0.41130604288499023,
345
+ "f1": 0.402788496167104,
346
+ "f1_weighted": 0.4002817218943273
347
+ },
348
+ {
349
+ "accuracy": 0.3489278752436647,
350
+ "f1": 0.3466388985306028,
351
+ "f1_weighted": 0.34325087920026326
352
+ },
353
+ {
354
+ "accuracy": 0.42105263157894735,
355
+ "f1": 0.4164063802087126,
356
+ "f1_weighted": 0.4135157516912414
357
+ },
358
+ {
359
+ "accuracy": 0.3937621832358674,
360
+ "f1": 0.3767716250484039,
361
+ "f1_weighted": 0.37010478009417386
362
+ },
363
+ {
364
+ "accuracy": 0.38693957115009747,
365
+ "f1": 0.37197458051411775,
366
+ "f1_weighted": 0.36667198084049885
367
+ },
368
+ {
369
+ "accuracy": 0.4142300194931774,
370
+ "f1": 0.39982516641923516,
371
+ "f1_weighted": 0.39618194003916557
372
+ },
373
+ {
374
+ "accuracy": 0.3645224171539961,
375
+ "f1": 0.3547529933481153,
376
+ "f1_weighted": 0.34639604085355047
377
+ }
378
+ ]
379
+ },
380
+ {
381
+ "accuracy": 0.494970703125,
382
+ "f1": 0.4953541764022532,
383
+ "f1_weighted": 0.4836766158151411,
384
+ "hf_subset": "por",
385
+ "languages": [
386
+ "por-Latn"
387
+ ],
388
+ "main_score": 0.494970703125,
389
+ "scores_per_experiment": [
390
+ {
391
+ "accuracy": 0.4267578125,
392
+ "f1": 0.44025292147471756,
393
+ "f1_weighted": 0.39801946464472454
394
+ },
395
+ {
396
+ "accuracy": 0.54052734375,
397
+ "f1": 0.5381494127837124,
398
+ "f1_weighted": 0.5439088178258304
399
+ },
400
+ {
401
+ "accuracy": 0.62646484375,
402
+ "f1": 0.5717568440528269,
403
+ "f1_weighted": 0.6320091240816815
404
+ },
405
+ {
406
+ "accuracy": 0.53271484375,
407
+ "f1": 0.5281798362236413,
408
+ "f1_weighted": 0.5414058670895225
409
+ },
410
+ {
411
+ "accuracy": 0.5029296875,
412
+ "f1": 0.5052088308753605,
413
+ "f1_weighted": 0.5026318320564561
414
+ },
415
+ {
416
+ "accuracy": 0.53759765625,
417
+ "f1": 0.5384352109431081,
418
+ "f1_weighted": 0.5372638645846424
419
+ },
420
+ {
421
+ "accuracy": 0.39990234375,
422
+ "f1": 0.41321948115553875,
423
+ "f1_weighted": 0.3500390210664634
424
+ },
425
+ {
426
+ "accuracy": 0.46337890625,
427
+ "f1": 0.48343031495210703,
428
+ "f1_weighted": 0.45174099948613355
429
+ },
430
+ {
431
+ "accuracy": 0.408203125,
432
+ "f1": 0.4244306276829495,
433
+ "f1_weighted": 0.3681195690723143
434
+ },
435
+ {
436
+ "accuracy": 0.51123046875,
437
+ "f1": 0.5104782838785694,
438
+ "f1_weighted": 0.5116275982436425
439
+ }
440
+ ]
441
+ },
442
+ {
443
+ "accuracy": 0.41025390625,
444
+ "f1": 0.3383110514569484,
445
+ "f1_weighted": 0.4592771884447278,
446
+ "hf_subset": "pcm",
447
+ "languages": [
448
+ "pcm-Latn"
449
+ ],
450
+ "main_score": 0.41025390625,
451
+ "scores_per_experiment": [
452
+ {
453
+ "accuracy": 0.3916015625,
454
+ "f1": 0.3241468821015416,
455
+ "f1_weighted": 0.45064854663083453
456
+ },
457
+ {
458
+ "accuracy": 0.43212890625,
459
+ "f1": 0.35601453228963625,
460
+ "f1_weighted": 0.48468970078728313
461
+ },
462
+ {
463
+ "accuracy": 0.4248046875,
464
+ "f1": 0.3411837208208111,
465
+ "f1_weighted": 0.4755317669798927
466
+ },
467
+ {
468
+ "accuracy": 0.4345703125,
469
+ "f1": 0.35013826791545344,
470
+ "f1_weighted": 0.47977538624889987
471
+ },
472
+ {
473
+ "accuracy": 0.48095703125,
474
+ "f1": 0.37863897145306974,
475
+ "f1_weighted": 0.5222320508323921
476
+ },
477
+ {
478
+ "accuracy": 0.361328125,
479
+ "f1": 0.30609324066619015,
480
+ "f1_weighted": 0.41984591182265096
481
+ },
482
+ {
483
+ "accuracy": 0.36767578125,
484
+ "f1": 0.319334145949479,
485
+ "f1_weighted": 0.4283462165583519
486
+ },
487
+ {
488
+ "accuracy": 0.35595703125,
489
+ "f1": 0.3002243313298814,
490
+ "f1_weighted": 0.39898744169495837
491
+ },
492
+ {
493
+ "accuracy": 0.44482421875,
494
+ "f1": 0.36488720965556093,
495
+ "f1_weighted": 0.48989219092200686
496
+ },
497
+ {
498
+ "accuracy": 0.40869140625,
499
+ "f1": 0.3424492123878599,
500
+ "f1_weighted": 0.44282267197000685
501
+ }
502
+ ]
503
+ },
504
+ {
505
+ "accuracy": 0.41524064171122993,
506
+ "f1": 0.38431162421863435,
507
+ "f1_weighted": 0.42862548746338547,
508
+ "hf_subset": "swa",
509
+ "languages": [
510
+ "swa-Latn"
511
+ ],
512
+ "main_score": 0.41524064171122993,
513
+ "scores_per_experiment": [
514
+ {
515
+ "accuracy": 0.37433155080213903,
516
+ "f1": 0.35587988376946306,
517
+ "f1_weighted": 0.3906011107641745
518
+ },
519
+ {
520
+ "accuracy": 0.3716577540106952,
521
+ "f1": 0.3182072743742052,
522
+ "f1_weighted": 0.39766374286465095
523
+ },
524
+ {
525
+ "accuracy": 0.3649732620320856,
526
+ "f1": 0.35608565863572855,
527
+ "f1_weighted": 0.3701450668069928
528
+ },
529
+ {
530
+ "accuracy": 0.44786096256684493,
531
+ "f1": 0.41831522173637636,
532
+ "f1_weighted": 0.46255260400803627
533
+ },
534
+ {
535
+ "accuracy": 0.3783422459893048,
536
+ "f1": 0.3563303413448087,
537
+ "f1_weighted": 0.39779231034414525
538
+ },
539
+ {
540
+ "accuracy": 0.5320855614973262,
541
+ "f1": 0.45000142201972015,
542
+ "f1_weighted": 0.5369326963012561
543
+ },
544
+ {
545
+ "accuracy": 0.3622994652406417,
546
+ "f1": 0.3558594151448342,
547
+ "f1_weighted": 0.3743566420979573
548
+ },
549
+ {
550
+ "accuracy": 0.4318181818181818,
551
+ "f1": 0.4142034247992481,
552
+ "f1_weighted": 0.4372855240915391
553
+ },
554
+ {
555
+ "accuracy": 0.4692513368983957,
556
+ "f1": 0.4173544841744676,
557
+ "f1_weighted": 0.48247731099369323
558
+ },
559
+ {
560
+ "accuracy": 0.4197860962566845,
561
+ "f1": 0.4008791161874912,
562
+ "f1_weighted": 0.43644786636140914
563
+ }
564
+ ]
565
+ },
566
+ {
567
+ "accuracy": 0.3822971548998947,
568
+ "f1": 0.3537680611327759,
569
+ "f1_weighted": 0.3898921395542636,
570
+ "hf_subset": "twi",
571
+ "languages": [
572
+ "twi-Latn"
573
+ ],
574
+ "main_score": 0.3822971548998947,
575
+ "scores_per_experiment": [
576
+ {
577
+ "accuracy": 0.4088514225500527,
578
+ "f1": 0.3812346351528029,
579
+ "f1_weighted": 0.42466375814327656
580
+ },
581
+ {
582
+ "accuracy": 0.40252897787144365,
583
+ "f1": 0.3773221413392622,
584
+ "f1_weighted": 0.4047806996181359
585
+ },
586
+ {
587
+ "accuracy": 0.36459430979978924,
588
+ "f1": 0.3224467323649447,
589
+ "f1_weighted": 0.3361490476547068
590
+ },
591
+ {
592
+ "accuracy": 0.4488935721812434,
593
+ "f1": 0.3904077604655171,
594
+ "f1_weighted": 0.4460361688375754
595
+ },
596
+ {
597
+ "accuracy": 0.422550052687039,
598
+ "f1": 0.3911071248157197,
599
+ "f1_weighted": 0.43662788675941616
600
+ },
601
+ {
602
+ "accuracy": 0.3266596417281349,
603
+ "f1": 0.31794875607917755,
604
+ "f1_weighted": 0.33821715021250504
605
+ },
606
+ {
607
+ "accuracy": 0.3951527924130664,
608
+ "f1": 0.3517906955588777,
609
+ "f1_weighted": 0.40649333715054686
610
+ },
611
+ {
612
+ "accuracy": 0.3530031612223393,
613
+ "f1": 0.3425457908074916,
614
+ "f1_weighted": 0.36591967501527917
615
+ },
616
+ {
617
+ "accuracy": 0.37407797681770283,
618
+ "f1": 0.3514729687669121,
619
+ "f1_weighted": 0.40139281531576315
620
+ },
621
+ {
622
+ "accuracy": 0.3266596417281349,
623
+ "f1": 0.3114040059770537,
624
+ "f1_weighted": 0.3386408568354308
625
+ }
626
+ ]
627
+ },
628
+ {
629
+ "accuracy": 0.34724409448818894,
630
+ "f1": 0.3260641912513851,
631
+ "f1_weighted": 0.36005111395861633,
632
+ "hf_subset": "tso",
633
+ "languages": [
634
+ "tso-Latn"
635
+ ],
636
+ "main_score": 0.34724409448818894,
637
+ "scores_per_experiment": [
638
+ {
639
+ "accuracy": 0.3031496062992126,
640
+ "f1": 0.2925346160640278,
641
+ "f1_weighted": 0.303812354414485
642
+ },
643
+ {
644
+ "accuracy": 0.36220472440944884,
645
+ "f1": 0.3342665789764065,
646
+ "f1_weighted": 0.38453208704439457
647
+ },
648
+ {
649
+ "accuracy": 0.3110236220472441,
650
+ "f1": 0.3127699394341199,
651
+ "f1_weighted": 0.33097961160597983
652
+ },
653
+ {
654
+ "accuracy": 0.36220472440944884,
655
+ "f1": 0.3284803708532522,
656
+ "f1_weighted": 0.37099583542930664
657
+ },
658
+ {
659
+ "accuracy": 0.3464566929133858,
660
+ "f1": 0.34072498293844394,
661
+ "f1_weighted": 0.3620394825672479
662
+ },
663
+ {
664
+ "accuracy": 0.32677165354330706,
665
+ "f1": 0.31659544159544156,
666
+ "f1_weighted": 0.34429890975166566
667
+ },
668
+ {
669
+ "accuracy": 0.3464566929133858,
670
+ "f1": 0.31997917976873186,
671
+ "f1_weighted": 0.3532631752045612
672
+ },
673
+ {
674
+ "accuracy": 0.4094488188976378,
675
+ "f1": 0.3609436225509796,
676
+ "f1_weighted": 0.41609793877724505
677
+ },
678
+ {
679
+ "accuracy": 0.3464566929133858,
680
+ "f1": 0.3292909684965745,
681
+ "f1_weighted": 0.3596880829223544
682
+ },
683
+ {
684
+ "accuracy": 0.35826771653543305,
685
+ "f1": 0.32505621183587285,
686
+ "f1_weighted": 0.3748036618689228
687
+ }
688
+ ]
689
+ },
690
+ {
691
+ "accuracy": 0.3328125,
692
+ "f1": 0.18183456793000624,
693
+ "f1_weighted": 0.4085611607613312,
694
+ "hf_subset": "yor",
695
+ "languages": [
696
+ "yor-Latn"
697
+ ],
698
+ "main_score": 0.3328125,
699
+ "scores_per_experiment": [
700
+ {
701
+ "accuracy": 0.07470703125,
702
+ "f1": 0.09523392434840276,
703
+ "f1_weighted": 0.13267504393119908
704
+ },
705
+ {
706
+ "accuracy": 0.10302734375,
707
+ "f1": 0.08389753052729154,
708
+ "f1_weighted": 0.1185371532075585
709
+ },
710
+ {
711
+ "accuracy": 0.2861328125,
712
+ "f1": 0.19468604968604966,
713
+ "f1_weighted": 0.4201563899659603
714
+ },
715
+ {
716
+ "accuracy": 0.4365234375,
717
+ "f1": 0.2429922371206775,
718
+ "f1_weighted": 0.5859970448129852
719
+ },
720
+ {
721
+ "accuracy": 0.2822265625,
722
+ "f1": 0.1739072602565437,
723
+ "f1_weighted": 0.43166683587108334
724
+ },
725
+ {
726
+ "accuracy": 0.80126953125,
727
+ "f1": 0.3428927009501774,
728
+ "f1_weighted": 0.8437856906337917
729
+ },
730
+ {
731
+ "accuracy": 0.2333984375,
732
+ "f1": 0.16562521351253748,
733
+ "f1_weighted": 0.3669642040130597
734
+ },
735
+ {
736
+ "accuracy": 0.171875,
737
+ "f1": 0.1270154698227933,
738
+ "f1_weighted": 0.2229364361568958
739
+ },
740
+ {
741
+ "accuracy": 0.853515625,
742
+ "f1": 0.31579316043262173,
743
+ "f1_weighted": 0.867733661067742
744
+ },
745
+ {
746
+ "accuracy": 0.08544921875,
747
+ "f1": 0.07630213264296744,
748
+ "f1_weighted": 0.09515914795303607
749
+ }
750
+ ]
751
+ }
752
+ ]
753
+ },
754
+ "task_name": "AfriSentiClassification"
755
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/AlloProfClusteringS2S.v2.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "392ba3f5bcc8c51f578786c1fc3dae648662cb9b",
3
+ "evaluation_time": 6.292850494384766,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "fra-Latn"
12
+ ],
13
+ "main_score": 0.4467655078318857,
14
+ "v_measure": 0.4467655078318857,
15
+ "v_measure_std": 0.011932803218624204,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.4456740442976179,
19
+ 0.43747939525244117,
20
+ 0.42972216485169346,
21
+ 0.46256261721554837,
22
+ 0.44087029888530604,
23
+ 0.46098899574668306,
24
+ 0.44184178143443775,
25
+ 0.453910216544172,
26
+ 0.43175379214465676,
27
+ 0.46285177194630006
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "AlloProfClusteringS2S.v2"
34
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/AlloprofReranking.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "65393d0d7a08a10b4e348135e824f385d420b0fd",
3
+ "evaluation_time": 20.040235996246338,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "fra-Latn"
12
+ ],
13
+ "main_score": 0.7292786802385444,
14
+ "map": 0.7292786802385444,
15
+ "mrr": 0.7406043534281876,
16
+ "nAUC_map_diff1": 0.5538385226061495,
17
+ "nAUC_map_max": 0.12220012411283157,
18
+ "nAUC_map_std": 0.24944978010977142,
19
+ "nAUC_mrr_diff1": 0.5543515205726811,
20
+ "nAUC_mrr_max": 0.1299245195473337,
21
+ "nAUC_mrr_std": 0.241050373801174
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "AlloprofReranking"
26
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/AmazonCounterfactualClassification.json ADDED
@@ -0,0 +1,685 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
3
+ "evaluation_time": 21.676952600479126,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.9553973013493253,
10
+ "ap": 0.6780231608293034,
11
+ "ap_weighted": 0.6780231608293034,
12
+ "f1": 0.893303326488402,
13
+ "f1_weighted": 0.95773035804758,
14
+ "hf_subset": "en-ext",
15
+ "languages": [
16
+ "eng-Latn"
17
+ ],
18
+ "main_score": 0.9553973013493253,
19
+ "scores_per_experiment": [
20
+ {
21
+ "accuracy": 0.9640179910044977,
22
+ "ap": 0.7208726263164927,
23
+ "ap_weighted": 0.7208726263164927,
24
+ "f1": 0.9103860988731085,
25
+ "f1_weighted": 0.9652652443098788
26
+ },
27
+ {
28
+ "accuracy": 0.9647676161919041,
29
+ "ap": 0.7269684599707517,
30
+ "ap_weighted": 0.7269684599707517,
31
+ "f1": 0.9125056691715683,
32
+ "f1_weighted": 0.9660348796987017
33
+ },
34
+ {
35
+ "accuracy": 0.954272863568216,
36
+ "ap": 0.6704397174947406,
37
+ "ap_weighted": 0.6704397174947406,
38
+ "f1": 0.8908122860666043,
39
+ "f1_weighted": 0.9567064550358292
40
+ },
41
+ {
42
+ "accuracy": 0.9482758620689655,
43
+ "ap": 0.6396257806348624,
44
+ "ap_weighted": 0.6396257806348624,
45
+ "f1": 0.8784767009710404,
46
+ "f1_weighted": 0.9513826496549267
47
+ },
48
+ {
49
+ "accuracy": 0.9355322338830585,
50
+ "ap": 0.5914934443639736,
51
+ "ap_weighted": 0.5914934443639736,
52
+ "f1": 0.8564974182444062,
53
+ "f1_weighted": 0.940801221592302
54
+ },
55
+ {
56
+ "accuracy": 0.9490254872563718,
57
+ "ap": 0.6477070778656762,
58
+ "ap_weighted": 0.6477070778656762,
59
+ "f1": 0.8811880150676104,
60
+ "f1_weighted": 0.9522558430748842
61
+ },
62
+ {
63
+ "accuracy": 0.9565217391304348,
64
+ "ap": 0.6797236596073818,
65
+ "ap_weighted": 0.6797236596073818,
66
+ "f1": 0.8947422837490749,
67
+ "f1_weighted": 0.9585770243388165
68
+ },
69
+ {
70
+ "accuracy": 0.9662668665667167,
71
+ "ap": 0.7326301105890003,
72
+ "ap_weighted": 0.7326301105890003,
73
+ "f1": 0.9147527574351699,
74
+ "f1_weighted": 0.9672107123463689
75
+ },
76
+ {
77
+ "accuracy": 0.9632683658170914,
78
+ "ap": 0.7201118736623467,
79
+ "ap_weighted": 0.7201118736623467,
80
+ "f1": 0.9098184808237,
81
+ "f1_weighted": 0.9647776614985019
82
+ },
83
+ {
84
+ "accuracy": 0.952023988005997,
85
+ "ap": 0.6506588577878067,
86
+ "ap_weighted": 0.6506588577878067,
87
+ "f1": 0.8838535544817379,
88
+ "f1_weighted": 0.9542918889255906
89
+ }
90
+ ]
91
+ },
92
+ {
93
+ "accuracy": 0.9091044776119401,
94
+ "ap": 0.6547687537766163,
95
+ "ap_weighted": 0.6547687537766163,
96
+ "f1": 0.8656816899679225,
97
+ "f1_weighted": 0.9121721621959964,
98
+ "hf_subset": "en",
99
+ "languages": [
100
+ "eng-Latn"
101
+ ],
102
+ "main_score": 0.9091044776119401,
103
+ "scores_per_experiment": [
104
+ {
105
+ "accuracy": 0.9194029850746268,
106
+ "ap": 0.6823672195616629,
107
+ "ap_weighted": 0.6823672195616629,
108
+ "f1": 0.8787354703776695,
109
+ "f1_weighted": 0.9214992487187998
110
+ },
111
+ {
112
+ "accuracy": 0.9014925373134328,
113
+ "ap": 0.6357242241257282,
114
+ "ap_weighted": 0.6357242241257282,
115
+ "f1": 0.8569043181112146,
116
+ "f1_weighted": 0.9055460117863617
117
+ },
118
+ {
119
+ "accuracy": 0.9104477611940298,
120
+ "ap": 0.6557255583691981,
121
+ "ap_weighted": 0.6557255583691981,
122
+ "f1": 0.8666418970024283,
123
+ "f1_weighted": 0.913185627706005
124
+ },
125
+ {
126
+ "accuracy": 0.9119402985074627,
127
+ "ap": 0.6568736470320155,
128
+ "ap_weighted": 0.6568736470320155,
129
+ "f1": 0.8671613252279227,
130
+ "f1_weighted": 0.9141274463026072
131
+ },
132
+ {
133
+ "accuracy": 0.9149253731343283,
134
+ "ap": 0.6695686334856894,
135
+ "ap_weighted": 0.6695686334856894,
136
+ "f1": 0.8729857819905213,
137
+ "f1_weighted": 0.9174308552026597
138
+ },
139
+ {
140
+ "accuracy": 0.9119402985074627,
141
+ "ap": 0.6628312202635485,
142
+ "ap_weighted": 0.6628312202635485,
143
+ "f1": 0.8698553692479398,
144
+ "f1_weighted": 0.9149226950691612
145
+ },
146
+ {
147
+ "accuracy": 0.9208955223880597,
148
+ "ap": 0.685468931390089,
149
+ "ap_weighted": 0.685468931390089,
150
+ "f1": 0.8800414838236734,
151
+ "f1_weighted": 0.9226717849343373
152
+ },
153
+ {
154
+ "accuracy": 0.917910447761194,
155
+ "ap": 0.6740523566419705,
156
+ "ap_weighted": 0.6740523566419705,
157
+ "f1": 0.8748518429212331,
158
+ "f1_weighted": 0.9195539059611925
159
+ },
160
+ {
161
+ "accuracy": 0.9014925373134328,
162
+ "ap": 0.6392161330750826,
163
+ "ap_weighted": 0.6392161330750826,
164
+ "f1": 0.8582692307692308,
165
+ "f1_weighted": 0.9059316877152698
166
+ },
167
+ {
168
+ "accuracy": 0.8805970149253731,
169
+ "ap": 0.5858596138211775,
170
+ "ap_weighted": 0.5858596138211775,
171
+ "f1": 0.8313701802073895,
172
+ "f1_weighted": 0.88685235856357
173
+ }
174
+ ]
175
+ },
176
+ {
177
+ "accuracy": 0.8950749464668094,
178
+ "ap": 0.9331387046391194,
179
+ "ap_weighted": 0.9331387046391194,
180
+ "f1": 0.8821543990776405,
181
+ "f1_weighted": 0.8974348261529854,
182
+ "hf_subset": "de",
183
+ "languages": [
184
+ "deu-Latn"
185
+ ],
186
+ "main_score": 0.8950749464668094,
187
+ "scores_per_experiment": [
188
+ {
189
+ "accuracy": 0.8875802997858673,
190
+ "ap": 0.9364181904473619,
191
+ "ap_weighted": 0.9364181904473619,
192
+ "f1": 0.8756774230352355,
193
+ "f1_weighted": 0.8907516544910529
194
+ },
195
+ {
196
+ "accuracy": 0.9004282655246253,
197
+ "ap": 0.9322635970220163,
198
+ "ap_weighted": 0.9322635970220163,
199
+ "f1": 0.887091763451902,
200
+ "f1_weighted": 0.9022978686189324
201
+ },
202
+ {
203
+ "accuracy": 0.9004282655246253,
204
+ "ap": 0.9361123301678326,
205
+ "ap_weighted": 0.9361123301678326,
206
+ "f1": 0.8878312781593699,
207
+ "f1_weighted": 0.9025613017238537
208
+ },
209
+ {
210
+ "accuracy": 0.892933618843683,
211
+ "ap": 0.9338221465492803,
212
+ "ap_weighted": 0.9338221465492803,
213
+ "f1": 0.8802471985024489,
214
+ "f1_weighted": 0.8955209808869612
215
+ },
216
+ {
217
+ "accuracy": 0.8940042826552462,
218
+ "ap": 0.9352652442968317,
219
+ "ap_weighted": 0.9352652442968317,
220
+ "f1": 0.8815369221840581,
221
+ "f1_weighted": 0.8965965061195527
222
+ },
223
+ {
224
+ "accuracy": 0.8993576017130621,
225
+ "ap": 0.936619392186844,
226
+ "ap_weighted": 0.936619392186844,
227
+ "f1": 0.886898136697172,
228
+ "f1_weighted": 0.9016083437804486
229
+ },
230
+ {
231
+ "accuracy": 0.8950749464668094,
232
+ "ap": 0.9242983518559786,
233
+ "ap_weighted": 0.9242983518559786,
234
+ "f1": 0.8803146558712924,
235
+ "f1_weighted": 0.8967849801333632
236
+ },
237
+ {
238
+ "accuracy": 0.8832976445396146,
239
+ "ap": 0.9296266488464512,
240
+ "ap_weighted": 0.9296266488464512,
241
+ "f1": 0.8703654892013686,
242
+ "f1_weighted": 0.8864101293837348
243
+ },
244
+ {
245
+ "accuracy": 0.8982869379014989,
246
+ "ap": 0.9332477095604078,
247
+ "ap_weighted": 0.9332477095604078,
248
+ "f1": 0.8852329036088322,
249
+ "f1_weighted": 0.9004004482155498
250
+ },
251
+ {
252
+ "accuracy": 0.8993576017130621,
253
+ "ap": 0.9337134354581893,
254
+ "ap_weighted": 0.9337134354581893,
255
+ "f1": 0.8863482200647249,
256
+ "f1_weighted": 0.9014160481764066
257
+ }
258
+ ]
259
+ },
260
+ {
261
+ "accuracy": 0.9421841541755889,
262
+ "ap": 0.5396149211318628,
263
+ "ap_weighted": 0.5396149211318628,
264
+ "f1": 0.8402551431425296,
265
+ "f1_weighted": 0.9416263859300612,
266
+ "hf_subset": "ja",
267
+ "languages": [
268
+ "jpn-Jpan"
269
+ ],
270
+ "main_score": 0.9421841541755889,
271
+ "scores_per_experiment": [
272
+ {
273
+ "accuracy": 0.9379014989293362,
274
+ "ap": 0.5148326803043419,
275
+ "ap_weighted": 0.5148326803043419,
276
+ "f1": 0.8300859419107961,
277
+ "f1_weighted": 0.9376116721631573
278
+ },
279
+ {
280
+ "accuracy": 0.9464668094218416,
281
+ "ap": 0.5624176217474531,
282
+ "ap_weighted": 0.5624176217474531,
283
+ "f1": 0.8492848290818962,
284
+ "f1_weighted": 0.9454302016315488
285
+ },
286
+ {
287
+ "accuracy": 0.9411134903640257,
288
+ "ap": 0.5338494297468015,
289
+ "ap_weighted": 0.5338494297468015,
290
+ "f1": 0.8381185575944361,
291
+ "f1_weighted": 0.9406987456683092
292
+ },
293
+ {
294
+ "accuracy": 0.9453961456102784,
295
+ "ap": 0.5564471906102382,
296
+ "ap_weighted": 0.5564471906102382,
297
+ "f1": 0.8470106470106471,
298
+ "f1_weighted": 0.9444766549691604
299
+ },
300
+ {
301
+ "accuracy": 0.9443254817987152,
302
+ "ap": 0.5506094357205171,
303
+ "ap_weighted": 0.5506094357205171,
304
+ "f1": 0.8447572654162352,
305
+ "f1_weighted": 0.9435268062662354
306
+ },
307
+ {
308
+ "accuracy": 0.943254817987152,
309
+ "ap": 0.5448999831360646,
310
+ "ap_weighted": 0.5448999831360646,
311
+ "f1": 0.8425243441165337,
312
+ "f1_weighted": 0.9425805845609766
313
+ },
314
+ {
315
+ "accuracy": 0.9443254817987152,
316
+ "ap": 0.5506094357205171,
317
+ "ap_weighted": 0.5506094357205171,
318
+ "f1": 0.8447572654162352,
319
+ "f1_weighted": 0.9435268062662354
320
+ },
321
+ {
322
+ "accuracy": 0.9389721627408993,
323
+ "ap": 0.5265421606069303,
324
+ "ap_weighted": 0.5265421606069303,
325
+ "f1": 0.8353166808444823,
326
+ "f1_weighted": 0.9391120486813803
327
+ },
328
+ {
329
+ "accuracy": 0.9357601713062098,
330
+ "ap": 0.5081963593788693,
331
+ "ap_weighted": 0.5081963593788693,
332
+ "f1": 0.8274396777888631,
333
+ "f1_weighted": 0.9360529293967972
334
+ },
335
+ {
336
+ "accuracy": 0.9443254817987152,
337
+ "ap": 0.5477449143468951,
338
+ "ap_weighted": 0.5477449143468951,
339
+ "f1": 0.8432562222451719,
340
+ "f1_weighted": 0.9432474096968108
341
+ }
342
+ ]
343
+ }
344
+ ],
345
+ "validation": [
346
+ {
347
+ "accuracy": 0.9312312312312313,
348
+ "ap": 0.5449899577379449,
349
+ "ap_weighted": 0.5449899577379449,
350
+ "f1": 0.839169793356685,
351
+ "f1_weighted": 0.9363547132117447,
352
+ "hf_subset": "en-ext",
353
+ "languages": [
354
+ "eng-Latn"
355
+ ],
356
+ "main_score": 0.9312312312312313,
357
+ "scores_per_experiment": [
358
+ {
359
+ "accuracy": 0.9414414414414415,
360
+ "ap": 0.5788817279619084,
361
+ "ap_weighted": 0.5788817279619084,
362
+ "f1": 0.8560095794042808,
363
+ "f1_weighted": 0.9446055844798547
364
+ },
365
+ {
366
+ "accuracy": 0.9414414414414415,
367
+ "ap": 0.5788817279619084,
368
+ "ap_weighted": 0.5788817279619084,
369
+ "f1": 0.8560095794042808,
370
+ "f1_weighted": 0.9446055844798547
371
+ },
372
+ {
373
+ "accuracy": 0.9279279279279279,
374
+ "ap": 0.531131608605066,
375
+ "ap_weighted": 0.531131608605066,
376
+ "f1": 0.8331105913798864,
377
+ "f1_weighted": 0.9335943026220339
378
+ },
379
+ {
380
+ "accuracy": 0.9234234234234234,
381
+ "ap": 0.5040787055712429,
382
+ "ap_weighted": 0.5040787055712429,
383
+ "f1": 0.8217447663830971,
384
+ "f1_weighted": 0.9292856123979155
385
+ },
386
+ {
387
+ "accuracy": 0.9114114114114115,
388
+ "ap": 0.4784112470679635,
389
+ "ap_weighted": 0.4784112470679635,
390
+ "f1": 0.8059315964933942,
391
+ "f1_weighted": 0.9202192981968265
392
+ },
393
+ {
394
+ "accuracy": 0.9144144144144144,
395
+ "ap": 0.4930788997953177,
396
+ "ap_weighted": 0.4930788997953177,
397
+ "f1": 0.8125101864427706,
398
+ "f1_weighted": 0.9229237287664253
399
+ },
400
+ {
401
+ "accuracy": 0.9354354354354354,
402
+ "ap": 0.5592630268165635,
403
+ "ap_weighted": 0.5592630268165635,
404
+ "f1": 0.8464508034550982,
405
+ "f1_weighted": 0.9398232378013297
406
+ },
407
+ {
408
+ "accuracy": 0.9459459459459459,
409
+ "ap": 0.599262452526254,
410
+ "ap_weighted": 0.599262452526254,
411
+ "f1": 0.8647715736040609,
412
+ "f1_weighted": 0.9484629807472447
413
+ },
414
+ {
415
+ "accuracy": 0.9429429429429429,
416
+ "ap": 0.5957785724681581,
417
+ "ap_weighted": 0.5957785724681581,
418
+ "f1": 0.8620486656200942,
419
+ "f1_weighted": 0.9464325000039286
420
+ },
421
+ {
422
+ "accuracy": 0.9279279279279279,
423
+ "ap": 0.531131608605066,
424
+ "ap_weighted": 0.531131608605066,
425
+ "f1": 0.8331105913798864,
426
+ "f1_weighted": 0.9335943026220339
427
+ }
428
+ ]
429
+ },
430
+ {
431
+ "accuracy": 0.8934328358208956,
432
+ "ap": 0.5581302911937404,
433
+ "ap_weighted": 0.5581302911937404,
434
+ "f1": 0.8266009941425985,
435
+ "f1_weighted": 0.8969673640540142,
436
+ "hf_subset": "en",
437
+ "languages": [
438
+ "eng-Latn"
439
+ ],
440
+ "main_score": 0.8934328358208956,
441
+ "scores_per_experiment": [
442
+ {
443
+ "accuracy": 0.9014925373134328,
444
+ "ap": 0.5759412486638426,
445
+ "ap_weighted": 0.5759412486638426,
446
+ "f1": 0.8356891247157444,
447
+ "f1_weighted": 0.9036652915029791
448
+ },
449
+ {
450
+ "accuracy": 0.8865671641791045,
451
+ "ap": 0.5425258763652999,
452
+ "ap_weighted": 0.5425258763652999,
453
+ "f1": 0.8186609686609687,
454
+ "f1_weighted": 0.8912046604583919
455
+ },
456
+ {
457
+ "accuracy": 0.8865671641791045,
458
+ "ap": 0.5375744430556577,
459
+ "ap_weighted": 0.5375744430556577,
460
+ "f1": 0.8165071494464944,
461
+ "f1_weighted": 0.8906286143085311
462
+ },
463
+ {
464
+ "accuracy": 0.9014925373134328,
465
+ "ap": 0.5803396809058158,
466
+ "ap_weighted": 0.5803396809058158,
467
+ "f1": 0.8377247706422019,
468
+ "f1_weighted": 0.9042254415993427
469
+ },
470
+ {
471
+ "accuracy": 0.8925373134328358,
472
+ "ap": 0.5570031615322403,
473
+ "ap_weighted": 0.5570031615322403,
474
+ "f1": 0.826164667896679,
475
+ "f1_weighted": 0.8963850030291347
476
+ },
477
+ {
478
+ "accuracy": 0.9014925373134328,
479
+ "ap": 0.5803396809058158,
480
+ "ap_weighted": 0.5803396809058158,
481
+ "f1": 0.8377247706422019,
482
+ "f1_weighted": 0.9042254415993427
483
+ },
484
+ {
485
+ "accuracy": 0.9014925373134328,
486
+ "ap": 0.5803396809058158,
487
+ "ap_weighted": 0.5803396809058158,
488
+ "f1": 0.8377247706422019,
489
+ "f1_weighted": 0.9042254415993427
490
+ },
491
+ {
492
+ "accuracy": 0.8955223880597015,
493
+ "ap": 0.5553188962350053,
494
+ "ap_weighted": 0.5553188962350053,
495
+ "f1": 0.825730889850032,
496
+ "f1_weighted": 0.8978268243213414
497
+ },
498
+ {
499
+ "accuracy": 0.8895522388059701,
500
+ "ap": 0.5496625662363269,
501
+ "ap_weighted": 0.5496625662363269,
502
+ "f1": 0.8223932138302599,
503
+ "f1_weighted": 0.8937904297024957
504
+ },
505
+ {
506
+ "accuracy": 0.8776119402985074,
507
+ "ap": 0.5222576771315834,
508
+ "ap_weighted": 0.5222576771315834,
509
+ "f1": 0.8076896150992006,
510
+ "f1_weighted": 0.8834964924192412
511
+ }
512
+ ]
513
+ },
514
+ {
515
+ "accuracy": 0.9150214592274679,
516
+ "ap": 0.9414034215549709,
517
+ "ap_weighted": 0.9414034215549709,
518
+ "f1": 0.9026692177833964,
519
+ "f1_weighted": 0.9163521976941643,
520
+ "hf_subset": "de",
521
+ "languages": [
522
+ "deu-Latn"
523
+ ],
524
+ "main_score": 0.9150214592274679,
525
+ "scores_per_experiment": [
526
+ {
527
+ "accuracy": 0.9141630901287554,
528
+ "ap": 0.9462468343115115,
529
+ "ap_weighted": 0.9462468343115115,
530
+ "f1": 0.9026916410866797,
531
+ "f1_weighted": 0.9158838074850666
532
+ },
533
+ {
534
+ "accuracy": 0.9184549356223176,
535
+ "ap": 0.9422217165031449,
536
+ "ap_weighted": 0.9422217165031449,
537
+ "f1": 0.90627315647958,
538
+ "f1_weighted": 0.9196151050644831
539
+ },
540
+ {
541
+ "accuracy": 0.9184549356223176,
542
+ "ap": 0.9441650150607106,
543
+ "ap_weighted": 0.9441650150607106,
544
+ "f1": 0.9066033755274261,
545
+ "f1_weighted": 0.9197400445482697
546
+ },
547
+ {
548
+ "accuracy": 0.9163090128755365,
549
+ "ap": 0.9451975204601136,
550
+ "ap_weighted": 0.9451975204601136,
551
+ "f1": 0.9046419745313163,
552
+ "f1_weighted": 0.9178121282450371
553
+ },
554
+ {
555
+ "accuracy": 0.9141630901287554,
556
+ "ap": 0.9462468343115115,
557
+ "ap_weighted": 0.9462468343115115,
558
+ "f1": 0.9026916410866797,
559
+ "f1_weighted": 0.9158838074850666
560
+ },
561
+ {
562
+ "accuracy": 0.9098712446351931,
563
+ "ap": 0.9385135168778312,
564
+ "ap_weighted": 0.9385135168778312,
565
+ "f1": 0.897130242825607,
566
+ "f1_weighted": 0.9114250253436791
567
+ },
568
+ {
569
+ "accuracy": 0.9141630901287554,
570
+ "ap": 0.9290668573693585,
571
+ "ap_weighted": 0.9290668573693585,
572
+ "f1": 0.8994996549344376,
573
+ "f1_weighted": 0.9146573632251931
574
+ },
575
+ {
576
+ "accuracy": 0.9206008583690987,
577
+ "ap": 0.9530466820732915,
578
+ "ap_weighted": 0.9530466820732915,
579
+ "f1": 0.9101394136807817,
580
+ "f1_weighted": 0.9222457396094002
581
+ },
582
+ {
583
+ "accuracy": 0.9098712446351931,
584
+ "ap": 0.932794221772597,
585
+ "ap_weighted": 0.932794221772597,
586
+ "f1": 0.8960351861295257,
587
+ "f1_weighted": 0.9110106847474245
588
+ },
589
+ {
590
+ "accuracy": 0.9141630901287554,
591
+ "ap": 0.9365350168096397,
592
+ "ap_weighted": 0.9365350168096397,
593
+ "f1": 0.9009858915519293,
594
+ "f1_weighted": 0.9152482711880234
595
+ }
596
+ ]
597
+ },
598
+ {
599
+ "accuracy": 0.9324034334763949,
600
+ "ap": 0.4585340912431949,
601
+ "ap_weighted": 0.4585340912431949,
602
+ "f1": 0.8057991764854651,
603
+ "f1_weighted": 0.9316412551199228,
604
+ "hf_subset": "ja",
605
+ "languages": [
606
+ "jpn-Jpan"
607
+ ],
608
+ "main_score": 0.9324034334763949,
609
+ "scores_per_experiment": [
610
+ {
611
+ "accuracy": 0.9227467811158798,
612
+ "ap": 0.40913700641748546,
613
+ "ap_weighted": 0.40913700641748546,
614
+ "f1": 0.7829192546583852,
615
+ "f1_weighted": 0.9227467811158798
616
+ },
617
+ {
618
+ "accuracy": 0.9334763948497854,
619
+ "ap": 0.4543212011705861,
620
+ "ap_weighted": 0.4543212011705861,
621
+ "f1": 0.8034958851934979,
622
+ "f1_weighted": 0.9317616124004939
623
+ },
624
+ {
625
+ "accuracy": 0.9377682403433476,
626
+ "ap": 0.4893398195602269,
627
+ "ap_weighted": 0.4893398195602269,
628
+ "f1": 0.8198781771895451,
629
+ "f1_weighted": 0.9368301231299222
630
+ },
631
+ {
632
+ "accuracy": 0.9334763948497854,
633
+ "ap": 0.46165763310579466,
634
+ "ap_weighted": 0.46165763310579466,
635
+ "f1": 0.807455982512962,
636
+ "f1_weighted": 0.932473579897503
637
+ },
638
+ {
639
+ "accuracy": 0.9313304721030042,
640
+ "ap": 0.45199452068737384,
641
+ "ap_weighted": 0.45199452068737384,
642
+ "f1": 0.8032198469253101,
643
+ "f1_weighted": 0.9306490326073782
644
+ },
645
+ {
646
+ "accuracy": 0.9334763948497854,
647
+ "ap": 0.4543212011705861,
648
+ "ap_weighted": 0.4543212011705861,
649
+ "f1": 0.8034958851934979,
650
+ "f1_weighted": 0.9317616124004939
651
+ },
652
+ {
653
+ "accuracy": 0.9399141630901288,
654
+ "ap": 0.5001732732652681,
655
+ "ap_weighted": 0.5001732732652681,
656
+ "f1": 0.8243214131839725,
657
+ "f1_weighted": 0.9386909593874181
658
+ },
659
+ {
660
+ "accuracy": 0.9356223175965666,
661
+ "ap": 0.48634763137184905,
662
+ "ap_weighted": 0.48634763137184905,
663
+ "f1": 0.8190993788819876,
664
+ "f1_weighted": 0.9356223175965666
665
+ },
666
+ {
667
+ "accuracy": 0.924892703862661,
668
+ "ap": 0.4336249147914437,
669
+ "ap_weighted": 0.4336249147914437,
670
+ "f1": 0.7948814689052379,
671
+ "f1_weighted": 0.9259440076493786
672
+ },
673
+ {
674
+ "accuracy": 0.9313304721030042,
675
+ "ap": 0.4444237108913355,
676
+ "ap_weighted": 0.4444237108913355,
677
+ "f1": 0.7992244722102542,
678
+ "f1_weighted": 0.9299325250141921
679
+ }
680
+ ]
681
+ }
682
+ ]
683
+ },
684
+ "task_name": "AmazonCounterfactualClassification"
685
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/ArXivHierarchicalClusteringP2P.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "0bbdb47bcbe3a90093699aefeed338a0f28a7ee8",
3
+ "evaluation_time": 5.450691223144531,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.5892914985808295,
14
+ "v_measure": 0.5892914985808295,
15
+ "v_measure_std": 0.01885782145704248,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.5952830205051786,
19
+ 0.5535311544004632,
20
+ 0.5823384788034528,
21
+ 0.576531061756864,
22
+ 0.6017520066148326,
23
+ 0.6140376056370824,
24
+ 0.607302581658317,
25
+ 0.6209719048456509,
26
+ 0.6134199642797442,
27
+ 0.5778339253227
28
+ ],
29
+ "Level 1": [
30
+ 0.603648672127484,
31
+ 0.5608773368150661,
32
+ 0.5778523375176597,
33
+ 0.6073137489609124,
34
+ 0.567769120700672,
35
+ 0.5844700034529409,
36
+ 0.6057735464079348,
37
+ 0.589154374055415,
38
+ 0.569966517774814,
39
+ 0.5760026099794041
40
+ ]
41
+ }
42
+ }
43
+ ]
44
+ },
45
+ "task_name": "ArXivHierarchicalClusteringP2P"
46
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/ArXivHierarchicalClusteringS2S.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "b73bd54100e5abfa6e3a23dcafb46fe4d2438dc3",
3
+ "evaluation_time": 6.010518550872803,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.5594901652486779,
14
+ "v_measure": 0.5594901652486779,
15
+ "v_measure_std": 0.026372898077789714,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.54028682818045,
19
+ 0.5615052045413732,
20
+ 0.5136613347597976,
21
+ 0.5601146631369621,
22
+ 0.5894671688190903,
23
+ 0.5800932337790778,
24
+ 0.47503743265187376,
25
+ 0.5493468576847207,
26
+ 0.5883488067998656,
27
+ 0.5629632364714117
28
+ ],
29
+ "Level 1": [
30
+ 0.5537088864562819,
31
+ 0.5811471355652746,
32
+ 0.5800114881870035,
33
+ 0.5779367248189701,
34
+ 0.5473625712479262,
35
+ 0.5637146000566569,
36
+ 0.5556400428584185,
37
+ 0.5816173836342093,
38
+ 0.558374592882095,
39
+ 0.5694651124420991
40
+ ]
41
+ }
42
+ }
43
+ ]
44
+ },
45
+ "task_name": "ArXivHierarchicalClusteringS2S"
46
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/ArguAna.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "c22ab2a51041ffd869aaddef7af8d8215647e41a",
3
+ "evaluation_time": 51.310452461242676,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.43288,
14
+ "map_at_1": 0.19488,
15
+ "map_at_10": 0.33874,
16
+ "map_at_100": 0.35204,
17
+ "map_at_1000": 0.35211,
18
+ "map_at_20": 0.34916,
19
+ "map_at_3": 0.28082,
20
+ "map_at_5": 0.30995,
21
+ "mrr_at_1": 0.1984352773826458,
22
+ "mrr_at_10": 0.3402043961254483,
23
+ "mrr_at_100": 0.3535055415673981,
24
+ "mrr_at_1000": 0.35357269754391624,
25
+ "mrr_at_20": 0.3506191854743664,
26
+ "mrr_at_3": 0.2823613086770978,
27
+ "mrr_at_5": 0.3114153627311512,
28
+ "nauc_map_at_1000_diff1": 0.15285422099285959,
29
+ "nauc_map_at_1000_max": -0.0009723990237626321,
30
+ "nauc_map_at_1000_std": -0.1266540378146074,
31
+ "nauc_map_at_100_diff1": 0.15282896540272325,
32
+ "nauc_map_at_100_max": -0.0008902572512664901,
33
+ "nauc_map_at_100_std": -0.12650445341019953,
34
+ "nauc_map_at_10_diff1": 0.15211497033647214,
35
+ "nauc_map_at_10_max": 0.0013040925218525206,
36
+ "nauc_map_at_10_std": -0.1256071556430485,
37
+ "nauc_map_at_1_diff1": 0.18305359201241192,
38
+ "nauc_map_at_1_max": -0.04169843790858049,
39
+ "nauc_map_at_1_std": -0.13653951827312494,
40
+ "nauc_map_at_20_diff1": 0.1528609744358619,
41
+ "nauc_map_at_20_max": 0.0010018129294730811,
42
+ "nauc_map_at_20_std": -0.12770210777849714,
43
+ "nauc_map_at_3_diff1": 0.15375149601193486,
44
+ "nauc_map_at_3_max": -0.008904170789018413,
45
+ "nauc_map_at_3_std": -0.12595944542992826,
46
+ "nauc_map_at_5_diff1": 0.15152069102968418,
47
+ "nauc_map_at_5_max": -0.002168823616656122,
48
+ "nauc_map_at_5_std": -0.12638155277726296,
49
+ "nauc_mrr_at_1000_diff1": 0.14057697873718375,
50
+ "nauc_mrr_at_1000_max": -0.0025378375995667246,
51
+ "nauc_mrr_at_1000_std": -0.12389118385216313,
52
+ "nauc_mrr_at_100_diff1": 0.14055471989352855,
53
+ "nauc_mrr_at_100_max": -0.0024559036595358926,
54
+ "nauc_mrr_at_100_std": -0.12374326833509577,
55
+ "nauc_mrr_at_10_diff1": 0.14036848000348606,
56
+ "nauc_mrr_at_10_max": -0.00016746565272508337,
57
+ "nauc_mrr_at_10_std": -0.12295456115715546,
58
+ "nauc_mrr_at_1_diff1": 0.1680727218650125,
59
+ "nauc_mrr_at_1_max": -0.03453021701950542,
60
+ "nauc_mrr_at_1_std": -0.12883370120343468,
61
+ "nauc_mrr_at_20_diff1": 0.14072105323520512,
62
+ "nauc_mrr_at_20_max": -0.000543634173545447,
63
+ "nauc_mrr_at_20_std": -0.12495528570616217,
64
+ "nauc_mrr_at_3_diff1": 0.1403650790479082,
65
+ "nauc_mrr_at_3_max": -0.012851052620595974,
66
+ "nauc_mrr_at_3_std": -0.12463611595680804,
67
+ "nauc_mrr_at_5_diff1": 0.14053088727519183,
68
+ "nauc_mrr_at_5_max": -0.003053225308426329,
69
+ "nauc_mrr_at_5_std": -0.12329969011489846,
70
+ "nauc_ndcg_at_1000_diff1": 0.14721359395726127,
71
+ "nauc_ndcg_at_1000_max": 0.008919673255638687,
72
+ "nauc_ndcg_at_1000_std": -0.12080784819276472,
73
+ "nauc_ndcg_at_100_diff1": 0.1467874117712421,
74
+ "nauc_ndcg_at_100_max": 0.012184881704298718,
75
+ "nauc_ndcg_at_100_std": -0.11539633967808377,
76
+ "nauc_ndcg_at_10_diff1": 0.1425053578119806,
77
+ "nauc_ndcg_at_10_max": 0.02365384789820078,
78
+ "nauc_ndcg_at_10_std": -0.11852036610124699,
79
+ "nauc_ndcg_at_1_diff1": 0.18305359201241192,
80
+ "nauc_ndcg_at_1_max": -0.04169843790858049,
81
+ "nauc_ndcg_at_1_std": -0.13653951827312494,
82
+ "nauc_ndcg_at_20_diff1": 0.14677514704480515,
83
+ "nauc_ndcg_at_20_max": 0.027769038622374613,
84
+ "nauc_ndcg_at_20_std": -0.12367381328807671,
85
+ "nauc_ndcg_at_3_diff1": 0.14574377609232234,
86
+ "nauc_ndcg_at_3_max": -8.384037744546864e-06,
87
+ "nauc_ndcg_at_3_std": -0.12110101721064988,
88
+ "nauc_ndcg_at_5_diff1": 0.14263258635332104,
89
+ "nauc_ndcg_at_5_max": 0.012010180169864937,
90
+ "nauc_ndcg_at_5_std": -0.12151033786816579,
91
+ "nauc_precision_at_1000_diff1": -0.5218514632397261,
92
+ "nauc_precision_at_1000_max": 0.07322384873180168,
93
+ "nauc_precision_at_1000_std": 0.6242691466360337,
94
+ "nauc_precision_at_100_diff1": -0.0312741408374384,
95
+ "nauc_precision_at_100_max": 0.3948140372864331,
96
+ "nauc_precision_at_100_std": 0.641773386699191,
97
+ "nauc_precision_at_10_diff1": 0.10559479320094883,
98
+ "nauc_precision_at_10_max": 0.1184084824338382,
99
+ "nauc_precision_at_10_std": -0.08867917228444891,
100
+ "nauc_precision_at_1_diff1": 0.18305359201241192,
101
+ "nauc_precision_at_1_max": -0.04169843790858049,
102
+ "nauc_precision_at_1_std": -0.13653951827312494,
103
+ "nauc_precision_at_20_diff1": 0.1167193268547961,
104
+ "nauc_precision_at_20_max": 0.26552757318226816,
105
+ "nauc_precision_at_20_std": -0.09165708997330636,
106
+ "nauc_precision_at_3_diff1": 0.12530517351827625,
107
+ "nauc_precision_at_3_max": 0.02267799488966243,
108
+ "nauc_precision_at_3_std": -0.10813985325867193,
109
+ "nauc_precision_at_5_diff1": 0.1191340217914963,
110
+ "nauc_precision_at_5_max": 0.05148418864317921,
111
+ "nauc_precision_at_5_std": -0.10782213928027695,
112
+ "nauc_recall_at_1000_diff1": -0.5218514632397365,
113
+ "nauc_recall_at_1000_max": 0.07322384873173297,
114
+ "nauc_recall_at_1000_std": 0.6242691466359738,
115
+ "nauc_recall_at_100_diff1": -0.031274140837450524,
116
+ "nauc_recall_at_100_max": 0.3948140372864194,
117
+ "nauc_recall_at_100_std": 0.6417733866991796,
118
+ "nauc_recall_at_10_diff1": 0.10559479320094893,
119
+ "nauc_recall_at_10_max": 0.1184084824338383,
120
+ "nauc_recall_at_10_std": -0.088679172284449,
121
+ "nauc_recall_at_1_diff1": 0.18305359201241192,
122
+ "nauc_recall_at_1_max": -0.04169843790858049,
123
+ "nauc_recall_at_1_std": -0.13653951827312494,
124
+ "nauc_recall_at_20_diff1": 0.11671932685479651,
125
+ "nauc_recall_at_20_max": 0.26552757318226794,
126
+ "nauc_recall_at_20_std": -0.09165708997330647,
127
+ "nauc_recall_at_3_diff1": 0.12530517351827644,
128
+ "nauc_recall_at_3_max": 0.022677994889662646,
129
+ "nauc_recall_at_3_std": -0.10813985325867156,
130
+ "nauc_recall_at_5_diff1": 0.11913402179149633,
131
+ "nauc_recall_at_5_max": 0.051484188643179545,
132
+ "nauc_recall_at_5_std": -0.10782213928027633,
133
+ "ndcg_at_1": 0.19488,
134
+ "ndcg_at_10": 0.43288,
135
+ "ndcg_at_100": 0.48878,
136
+ "ndcg_at_1000": 0.49052,
137
+ "ndcg_at_20": 0.46989,
138
+ "ndcg_at_3": 0.31066,
139
+ "ndcg_at_5": 0.36328,
140
+ "precision_at_1": 0.19488,
141
+ "precision_at_10": 0.07404,
142
+ "precision_at_100": 0.00983,
143
+ "precision_at_1000": 0.001,
144
+ "precision_at_20": 0.04424,
145
+ "precision_at_3": 0.13253,
146
+ "precision_at_5": 0.10512,
147
+ "recall_at_1": 0.19488,
148
+ "recall_at_10": 0.7404,
149
+ "recall_at_100": 0.98293,
150
+ "recall_at_1000": 0.99644,
151
+ "recall_at_20": 0.88478,
152
+ "recall_at_3": 0.39758,
153
+ "recall_at_5": 0.5256
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "ArguAna"
158
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/ArmenianParaphrasePC.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "f43b4f32987048043a8b31e5e26be4d360c2438f",
3
+ "evaluation_time": 2.405421257019043,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_accuracy": 0.9074829931972789,
10
+ "cosine_accuracy_threshold": 0.8969008922576904,
11
+ "cosine_ap": 0.9580027459925256,
12
+ "cosine_f1": 0.9368029739776951,
13
+ "cosine_f1_threshold": 0.8731256127357483,
14
+ "cosine_precision": 0.8912466843501327,
15
+ "cosine_recall": 0.9872673849167483,
16
+ "dot_accuracy": 0.9074829931972789,
17
+ "dot_accuracy_threshold": 0.8764899373054504,
18
+ "dot_ap": 0.9551668172756757,
19
+ "dot_f1": 0.9368029739776951,
20
+ "dot_f1_threshold": 0.8727067112922668,
21
+ "dot_precision": 0.8912466843501327,
22
+ "dot_recall": 0.9872673849167483,
23
+ "euclidean_accuracy": 0.9074829931972789,
24
+ "euclidean_accuracy_threshold": 0.45431822538375854,
25
+ "euclidean_ap": 0.9580568147862285,
26
+ "euclidean_f1": 0.9368029739776951,
27
+ "euclidean_f1_threshold": 0.5041760802268982,
28
+ "euclidean_precision": 0.8912466843501327,
29
+ "euclidean_recall": 0.9872673849167483,
30
+ "hf_subset": "default",
31
+ "languages": [
32
+ "hye-Armn"
33
+ ],
34
+ "main_score": 0.9580568147862285,
35
+ "manhattan_accuracy": 0.9074829931972789,
36
+ "manhattan_accuracy_threshold": 12.365119934082031,
37
+ "manhattan_ap": 0.9578369027690661,
38
+ "manhattan_f1": 0.9368029739776951,
39
+ "manhattan_f1_threshold": 12.365119934082031,
40
+ "manhattan_precision": 0.8912466843501327,
41
+ "manhattan_recall": 0.9872673849167483,
42
+ "max_accuracy": 0.9074829931972789,
43
+ "max_ap": 0.9580568147862285,
44
+ "max_f1": 0.9368029739776951,
45
+ "max_precision": 0.8912466843501327,
46
+ "max_recall": 0.9872673849167483,
47
+ "similarity_accuracy": 0.9074829931972789,
48
+ "similarity_accuracy_threshold": 0.8969008922576904,
49
+ "similarity_ap": 0.9580027459925256,
50
+ "similarity_f1": 0.9368029739776951,
51
+ "similarity_f1_threshold": 0.8731256127357483,
52
+ "similarity_precision": 0.8912466843501327,
53
+ "similarity_recall": 0.9872673849167483
54
+ }
55
+ ]
56
+ },
57
+ "task_name": "ArmenianParaphrasePC"
58
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BUCC.v2.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "1739dc11ffe9b7bfccd7f3d585aeb4c544fc6677",
3
+ "evaluation_time": 108.12968945503235,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.9911952454325336,
10
+ "f1": 0.9882786704820603,
11
+ "hf_subset": "fr-en",
12
+ "languages": [
13
+ "fra-Latn",
14
+ "eng-Latn"
15
+ ],
16
+ "main_score": 0.9882786704820603,
17
+ "precision": 0.9868295546261648,
18
+ "recall": 0.9911952454325336
19
+ },
20
+ {
21
+ "accuracy": 0.9954070981210856,
22
+ "f1": 0.993876130828114,
23
+ "hf_subset": "de-en",
24
+ "languages": [
25
+ "deu-Latn",
26
+ "eng-Latn"
27
+ ],
28
+ "main_score": 0.993876130828114,
29
+ "precision": 0.9931106471816284,
30
+ "recall": 0.9954070981210856
31
+ },
32
+ {
33
+ "accuracy": 0.9801177693107032,
34
+ "f1": 0.9738944694608013,
35
+ "hf_subset": "ru-en",
36
+ "languages": [
37
+ "rus-Cyrl",
38
+ "eng-Latn"
39
+ ],
40
+ "main_score": 0.9738944694608013,
41
+ "precision": 0.9708116845629836,
42
+ "recall": 0.9801177693107032
43
+ },
44
+ {
45
+ "accuracy": 0.9847288046340179,
46
+ "f1": 0.9799017026505178,
47
+ "hf_subset": "zh-en",
48
+ "languages": [
49
+ "cmn-Hans",
50
+ "eng-Latn"
51
+ ],
52
+ "main_score": 0.9799017026505178,
53
+ "precision": 0.9776197998946814,
54
+ "recall": 0.9847288046340179
55
+ }
56
+ ]
57
+ },
58
+ "task_name": "BUCC.v2"
59
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BelebeleRetrieval.json ADDED
The diff for this file is too large to render. See raw diff
 
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BibleNLPBitextMining.json ADDED
The diff for this file is too large to render. See raw diff
 
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BigPatentClustering.v2.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "58a863a958586a5d6ba51088b94ac74a46aa864f",
3
+ "evaluation_time": 13.491792917251587,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.3713039996033536,
14
+ "v_measure": 0.3713039996033536,
15
+ "v_measure_std": 0.03972587883965748,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.31212528871125894,
19
+ 0.31455385657098456,
20
+ 0.4343813191088779,
21
+ 0.38729041638588485,
22
+ 0.37688017432476995,
23
+ 0.35885202871698346,
24
+ 0.3461838072842116,
25
+ 0.4296060606025785,
26
+ 0.39320272464059836,
27
+ 0.3599643196873877
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "BigPatentClustering.v2"
34
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BiorxivClusteringP2P.v2.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "f5dbc242e11dd8e24def4c4268607a49e02946dc",
3
+ "evaluation_time": 19.023887634277344,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.39969707713049957,
14
+ "v_measure": 0.39969707713049957,
15
+ "v_measure_std": 0.010034135267156524,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.38287257237764394,
19
+ 0.40332558958160475,
20
+ 0.40056115755163746,
21
+ 0.4050171920703406,
22
+ 0.4049991243769569,
23
+ 0.3935241416806603,
24
+ 0.4064305931526453,
25
+ 0.4187988645907378,
26
+ 0.3954098389245237,
27
+ 0.38603169699824535
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "BiorxivClusteringP2P.v2"
34
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BornholmBitextMining.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "3bc5cfb4ec514264fe2db5615fac9016f7251552",
3
+ "evaluation_time": 2.620206356048584,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.456,
10
+ "f1": 0.37276666666666664,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "dan-Latn"
14
+ ],
15
+ "main_score": 0.37276666666666664,
16
+ "precision": 0.34208571428571427,
17
+ "recall": 0.456
18
+ }
19
+ ]
20
+ },
21
+ "task_name": "BornholmBitextMining"
22
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BrazilianToxicTweetsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "f333c1fcfa3ab43f008a327c8bd0140441354d34",
3
+ "evaluation_time": 3.229675531387329,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.197412109375,
10
+ "f1": 0.16076574073261307,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "por-Latn"
14
+ ],
15
+ "lrap": 0.7922946506076398,
16
+ "main_score": 0.197412109375,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.11376953125,
20
+ "f1": 0.16577463191223482,
21
+ "lrap": 0.8129340277777785
22
+ },
23
+ {
24
+ "accuracy": 0.10546875,
25
+ "f1": 0.16607305964539346,
26
+ "lrap": 0.7943657769097232
27
+ },
28
+ {
29
+ "accuracy": 0.109375,
30
+ "f1": 0.1526649787509641,
31
+ "lrap": 0.7940673828125011
32
+ },
33
+ {
34
+ "accuracy": 0.2763671875,
35
+ "f1": 0.158557746511822,
36
+ "lrap": 0.8193359375000004
37
+ },
38
+ {
39
+ "accuracy": 0.30712890625,
40
+ "f1": 0.1779843255381048,
41
+ "lrap": 0.7957221137152788
42
+ },
43
+ {
44
+ "accuracy": 0.208984375,
45
+ "f1": 0.15013748211958702,
46
+ "lrap": 0.7923990885416681
47
+ },
48
+ {
49
+ "accuracy": 0.240234375,
50
+ "f1": 0.12101156956667601,
51
+ "lrap": 0.7417534722222228
52
+ },
53
+ {
54
+ "accuracy": 0.09912109375,
55
+ "f1": 0.16934707589093959,
56
+ "lrap": 0.7856445312500004
57
+ },
58
+ {
59
+ "accuracy": 0.24462890625,
60
+ "f1": 0.17437501024385083,
61
+ "lrap": 0.8036702473958346
62
+ },
63
+ {
64
+ "accuracy": 0.26904296875,
65
+ "f1": 0.171731527146558,
66
+ "lrap": 0.7830539279513902
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "BrazilianToxicTweetsClassification"
73
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/BulgarianStoreReviewSentimentClassfication.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "701984d6c6efea0e14a1c7850ef70e464c5577c0",
3
+ "evaluation_time": 9.750242948532104,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.7604395604395604,
10
+ "f1": 0.5355943950306974,
11
+ "f1_weighted": 0.7640448074727606,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "bul-Cyrl"
15
+ ],
16
+ "main_score": 0.7604395604395604,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.7472527472527473,
20
+ "f1": 0.5335646379764026,
21
+ "f1_weighted": 0.7511493175358723
22
+ },
23
+ {
24
+ "accuracy": 0.7527472527472527,
25
+ "f1": 0.5032165138050128,
26
+ "f1_weighted": 0.7591755506122481
27
+ },
28
+ {
29
+ "accuracy": 0.7252747252747253,
30
+ "f1": 0.48321032360312544,
31
+ "f1_weighted": 0.7299926245801964
32
+ },
33
+ {
34
+ "accuracy": 0.7857142857142857,
35
+ "f1": 0.5364950216319375,
36
+ "f1_weighted": 0.7774270696100386
37
+ },
38
+ {
39
+ "accuracy": 0.7362637362637363,
40
+ "f1": 0.5142818322981366,
41
+ "f1_weighted": 0.743306657111005
42
+ },
43
+ {
44
+ "accuracy": 0.7527472527472527,
45
+ "f1": 0.5522222222222222,
46
+ "f1_weighted": 0.7658152658152658
47
+ },
48
+ {
49
+ "accuracy": 0.7692307692307693,
50
+ "f1": 0.5173582995951418,
51
+ "f1_weighted": 0.7617400256064224
52
+ },
53
+ {
54
+ "accuracy": 0.7802197802197802,
55
+ "f1": 0.5673473869126043,
56
+ "f1_weighted": 0.7828287654374613
57
+ },
58
+ {
59
+ "accuracy": 0.7857142857142857,
60
+ "f1": 0.5944043657390616,
61
+ "f1_weighted": 0.7875717064570426
62
+ },
63
+ {
64
+ "accuracy": 0.7692307692307693,
65
+ "f1": 0.5538433465233297,
66
+ "f1_weighted": 0.7814410919620539
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "BulgarianStoreReviewSentimentClassfication"
73
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CEDRClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "c0ba03d058e3e1b2f3fd20518875a4563dd12db4",
3
+ "evaluation_time": 3.612415313720703,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.4719978746014878,
10
+ "f1": 0.41180935677980235,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "rus-Cyrl"
14
+ ],
15
+ "lrap": 0.7282438894792875,
16
+ "main_score": 0.4719978746014878,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.4165781083953241,
20
+ "f1": 0.38158819579465797,
21
+ "lrap": 0.6747077577045816
22
+ },
23
+ {
24
+ "accuracy": 0.4022316684378321,
25
+ "f1": 0.3775677793754999,
26
+ "lrap": 0.7366365568544203
27
+ },
28
+ {
29
+ "accuracy": 0.4798087141339001,
30
+ "f1": 0.3991618679588411,
31
+ "lrap": 0.6956429330499576
32
+ },
33
+ {
34
+ "accuracy": 0.5069075451647184,
35
+ "f1": 0.4400502026942411,
36
+ "lrap": 0.731668437832104
37
+ },
38
+ {
39
+ "accuracy": 0.485653560042508,
40
+ "f1": 0.40574585729201,
41
+ "lrap": 0.7530818278427301
42
+ },
43
+ {
44
+ "accuracy": 0.46174282678002126,
45
+ "f1": 0.3955828173008574,
46
+ "lrap": 0.6903825717322111
47
+ },
48
+ {
49
+ "accuracy": 0.48831030818278426,
50
+ "f1": 0.4363110301027425,
51
+ "lrap": 0.7408607863974597
52
+ },
53
+ {
54
+ "accuracy": 0.5377258235919234,
55
+ "f1": 0.42224757911693755,
56
+ "lrap": 0.7590329436769491
57
+ },
58
+ {
59
+ "accuracy": 0.4718384697130712,
60
+ "f1": 0.3848252003994439,
61
+ "lrap": 0.7396386822529322
62
+ },
63
+ {
64
+ "accuracy": 0.4691817215727949,
65
+ "f1": 0.47501303776279225,
66
+ "lrap": 0.7607863974495306
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "CEDRClassification"
73
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CLSClusteringP2P.v2.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "4b6227591c6c1a73bc76b1055f3b7f3588e72476",
3
+ "evaluation_time": 9.756962537765503,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "cmn-Hans"
12
+ ],
13
+ "main_score": 0.3940192795763958,
14
+ "v_measure": 0.3940192795763958,
15
+ "v_measure_std": 0.016698165577153624,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.4158548047245891,
19
+ 0.3922380456962386,
20
+ 0.40627662413382315,
21
+ 0.39260838414917587,
22
+ 0.3798827257693119,
23
+ 0.4159297401441589,
24
+ 0.3736020327028448,
25
+ 0.39457596631226793,
26
+ 0.3636119739101462,
27
+ 0.40561249822140116
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "CLSClusteringP2P.v2"
34
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CSFDSKMovieReviewSentimentClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "23a20c659d868740ef9c54854de631fe19cd5c17",
3
+ "evaluation_time": 10.426575183868408,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.391357421875,
10
+ "f1": 0.37058195236422903,
11
+ "f1_weighted": 0.3734173801816453,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "slk-Latn"
15
+ ],
16
+ "main_score": 0.391357421875,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.38232421875,
20
+ "f1": 0.36495038177508526,
21
+ "f1_weighted": 0.368499388378136
22
+ },
23
+ {
24
+ "accuracy": 0.39794921875,
25
+ "f1": 0.3662932637557292,
26
+ "f1_weighted": 0.36901200488164
27
+ },
28
+ {
29
+ "accuracy": 0.40380859375,
30
+ "f1": 0.3782109405338343,
31
+ "f1_weighted": 0.38167245565789987
32
+ },
33
+ {
34
+ "accuracy": 0.38916015625,
35
+ "f1": 0.3760811695327086,
36
+ "f1_weighted": 0.3775636095644386
37
+ },
38
+ {
39
+ "accuracy": 0.3974609375,
40
+ "f1": 0.37834149039580117,
41
+ "f1_weighted": 0.3808441087320112
42
+ },
43
+ {
44
+ "accuracy": 0.37158203125,
45
+ "f1": 0.3331940946607299,
46
+ "f1_weighted": 0.336595864344805
47
+ },
48
+ {
49
+ "accuracy": 0.39794921875,
50
+ "f1": 0.38038731015956034,
51
+ "f1_weighted": 0.38352581156294685
52
+ },
53
+ {
54
+ "accuracy": 0.4013671875,
55
+ "f1": 0.3958801780532009,
56
+ "f1_weighted": 0.3984434807470749
57
+ },
58
+ {
59
+ "accuracy": 0.3876953125,
60
+ "f1": 0.3742630693301236,
61
+ "f1_weighted": 0.37724122607319893
62
+ },
63
+ {
64
+ "accuracy": 0.38427734375,
65
+ "f1": 0.3582176254455169,
66
+ "f1_weighted": 0.3607758518743018
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "CSFDSKMovieReviewSentimentClassification"
73
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CTKFactsNLI.json ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "387ae4582c8054cb52ef57ef0941f19bd8012abf",
3
+ "evaluation_time": 0.8582019805908203,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_accuracy": 0.7146666666666667,
10
+ "cosine_accuracy_threshold": 0.6814722418785095,
11
+ "cosine_ap": 0.8019021520095191,
12
+ "cosine_f1": 0.8256,
13
+ "cosine_f1_threshold": 0.616237461566925,
14
+ "cosine_precision": 0.7068493150684931,
15
+ "cosine_recall": 0.9923076923076923,
16
+ "dot_accuracy": 0.712,
17
+ "dot_accuracy_threshold": 0.6811537742614746,
18
+ "dot_ap": 0.8015475596434132,
19
+ "dot_f1": 0.8256,
20
+ "dot_f1_threshold": 0.6163125038146973,
21
+ "dot_precision": 0.7068493150684931,
22
+ "dot_recall": 0.9923076923076923,
23
+ "euclidean_accuracy": 0.712,
24
+ "euclidean_accuracy_threshold": 0.7966817617416382,
25
+ "euclidean_ap": 0.8021632924784958,
26
+ "euclidean_f1": 0.8256,
27
+ "euclidean_f1_threshold": 0.8761398196220398,
28
+ "euclidean_precision": 0.7068493150684931,
29
+ "euclidean_recall": 0.9923076923076923,
30
+ "hf_subset": "default",
31
+ "languages": [
32
+ "ces-Latn"
33
+ ],
34
+ "main_score": 0.8021632924784958,
35
+ "manhattan_accuracy": 0.7146666666666667,
36
+ "manhattan_accuracy_threshold": 19.589038848876953,
37
+ "manhattan_ap": 0.8008816583291862,
38
+ "manhattan_f1": 0.8252427184466019,
39
+ "manhattan_f1_threshold": 20.464435577392578,
40
+ "manhattan_precision": 0.7122905027932961,
41
+ "manhattan_recall": 0.9807692307692307,
42
+ "max_accuracy": 0.7146666666666667,
43
+ "max_ap": 0.8021632924784958,
44
+ "max_f1": 0.8256,
45
+ "max_precision": 0.7122905027932961,
46
+ "max_recall": 0.9923076923076923,
47
+ "similarity_accuracy": 0.7146666666666667,
48
+ "similarity_accuracy_threshold": 0.6814722418785095,
49
+ "similarity_ap": 0.8019021520095191,
50
+ "similarity_f1": 0.8256,
51
+ "similarity_f1_threshold": 0.616237461566925,
52
+ "similarity_precision": 0.7068493150684931,
53
+ "similarity_recall": 0.9923076923076923
54
+ }
55
+ ],
56
+ "validation": [
57
+ {
58
+ "cosine_accuracy": 0.6524590163934426,
59
+ "cosine_accuracy_threshold": 0.6725249290466309,
60
+ "cosine_ap": 0.7794681081805134,
61
+ "cosine_f1": 0.7775467775467776,
62
+ "cosine_f1_threshold": 0.6467993259429932,
63
+ "cosine_precision": 0.6448275862068965,
64
+ "cosine_recall": 0.9790575916230366,
65
+ "dot_accuracy": 0.6524590163934426,
66
+ "dot_accuracy_threshold": 0.6680096387863159,
67
+ "dot_ap": 0.7802229859854722,
68
+ "dot_f1": 0.7775467775467776,
69
+ "dot_f1_threshold": 0.6481204628944397,
70
+ "dot_precision": 0.6448275862068965,
71
+ "dot_recall": 0.9790575916230366,
72
+ "euclidean_accuracy": 0.6524590163934426,
73
+ "euclidean_accuracy_threshold": 0.8099972605705261,
74
+ "euclidean_ap": 0.7797810588826402,
75
+ "euclidean_f1": 0.7775467775467776,
76
+ "euclidean_f1_threshold": 0.8403406143188477,
77
+ "euclidean_precision": 0.6448275862068965,
78
+ "euclidean_recall": 0.9790575916230366,
79
+ "hf_subset": "default",
80
+ "languages": [
81
+ "ces-Latn"
82
+ ],
83
+ "main_score": 0.7808858873172826,
84
+ "manhattan_accuracy": 0.6557377049180327,
85
+ "manhattan_accuracy_threshold": 19.931034088134766,
86
+ "manhattan_ap": 0.7808858873172826,
87
+ "manhattan_f1": 0.7807933194154488,
88
+ "manhattan_f1_threshold": 20.237449645996094,
89
+ "manhattan_precision": 0.6493055555555556,
90
+ "manhattan_recall": 0.9790575916230366,
91
+ "max_accuracy": 0.6557377049180327,
92
+ "max_ap": 0.7808858873172826,
93
+ "max_f1": 0.7807933194154488,
94
+ "max_precision": 0.6493055555555556,
95
+ "max_recall": 0.9790575916230366,
96
+ "similarity_accuracy": 0.6524590163934426,
97
+ "similarity_accuracy_threshold": 0.6725249290466309,
98
+ "similarity_ap": 0.7794681081805134,
99
+ "similarity_f1": 0.7775467775467776,
100
+ "similarity_f1_threshold": 0.6467993259429932,
101
+ "similarity_precision": 0.6448275862068965,
102
+ "similarity_recall": 0.9790575916230366
103
+ }
104
+ ]
105
+ },
106
+ "task_name": "CTKFactsNLI"
107
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CataloniaTweetClassification.json ADDED
@@ -0,0 +1,261 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "cf24d44e517efa534f048e5fc5981f399ed25bee",
3
+ "evaluation_time": 20.34604287147522,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.4556,
10
+ "f1": 0.4549194142182621,
11
+ "f1_weighted": 0.44670806044501987,
12
+ "hf_subset": "spanish",
13
+ "languages": [
14
+ "spa-Latn"
15
+ ],
16
+ "main_score": 0.4556,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.4685,
20
+ "f1": 0.46844583218088515,
21
+ "f1_weighted": 0.4678505671882624
22
+ },
23
+ {
24
+ "accuracy": 0.443,
25
+ "f1": 0.4451230025847261,
26
+ "f1_weighted": 0.4251534487770984
27
+ },
28
+ {
29
+ "accuracy": 0.385,
30
+ "f1": 0.38312084446894473,
31
+ "f1_weighted": 0.3749735700559902
32
+ },
33
+ {
34
+ "accuracy": 0.427,
35
+ "f1": 0.42355364102848947,
36
+ "f1_weighted": 0.4174558101686062
37
+ },
38
+ {
39
+ "accuracy": 0.44,
40
+ "f1": 0.426348268791825,
41
+ "f1_weighted": 0.4216486071554449
42
+ },
43
+ {
44
+ "accuracy": 0.503,
45
+ "f1": 0.508770752438338,
46
+ "f1_weighted": 0.49980967174468294
47
+ },
48
+ {
49
+ "accuracy": 0.493,
50
+ "f1": 0.5058851885606271,
51
+ "f1_weighted": 0.48981618076124656
52
+ },
53
+ {
54
+ "accuracy": 0.443,
55
+ "f1": 0.42139249511143967,
56
+ "f1_weighted": 0.41304827669927546
57
+ },
58
+ {
59
+ "accuracy": 0.4355,
60
+ "f1": 0.4296559960366144,
61
+ "f1_weighted": 0.43838404904781
62
+ },
63
+ {
64
+ "accuracy": 0.518,
65
+ "f1": 0.5368981209807316,
66
+ "f1_weighted": 0.5189404228517818
67
+ }
68
+ ]
69
+ },
70
+ {
71
+ "accuracy": 0.45430000000000004,
72
+ "f1": 0.44700387891492427,
73
+ "f1_weighted": 0.44649526249913746,
74
+ "hf_subset": "catalan",
75
+ "languages": [
76
+ "cat-Latn"
77
+ ],
78
+ "main_score": 0.45430000000000004,
79
+ "scores_per_experiment": [
80
+ {
81
+ "accuracy": 0.498,
82
+ "f1": 0.4878926638652186,
83
+ "f1_weighted": 0.4985115220623073
84
+ },
85
+ {
86
+ "accuracy": 0.3885,
87
+ "f1": 0.3868162462942548,
88
+ "f1_weighted": 0.3754741147910188
89
+ },
90
+ {
91
+ "accuracy": 0.48,
92
+ "f1": 0.4553711449742925,
93
+ "f1_weighted": 0.4540716689213119
94
+ },
95
+ {
96
+ "accuracy": 0.482,
97
+ "f1": 0.4777454962659026,
98
+ "f1_weighted": 0.4760240689824773
99
+ },
100
+ {
101
+ "accuracy": 0.435,
102
+ "f1": 0.4156183614612236,
103
+ "f1_weighted": 0.42780667056536603
104
+ },
105
+ {
106
+ "accuracy": 0.4785,
107
+ "f1": 0.478850053133082,
108
+ "f1_weighted": 0.477582795002293
109
+ },
110
+ {
111
+ "accuracy": 0.477,
112
+ "f1": 0.46318796450187016,
113
+ "f1_weighted": 0.46339815696636927
114
+ },
115
+ {
116
+ "accuracy": 0.4075,
117
+ "f1": 0.407293757929666,
118
+ "f1_weighted": 0.394140272319857
119
+ },
120
+ {
121
+ "accuracy": 0.4525,
122
+ "f1": 0.45348043736935223,
123
+ "f1_weighted": 0.45255650382594137
124
+ },
125
+ {
126
+ "accuracy": 0.444,
127
+ "f1": 0.44378266335438016,
128
+ "f1_weighted": 0.4453868515544325
129
+ }
130
+ ]
131
+ }
132
+ ],
133
+ "validation": [
134
+ {
135
+ "accuracy": 0.4454000000000001,
136
+ "f1": 0.4431880736559732,
137
+ "f1_weighted": 0.43716832911655884,
138
+ "hf_subset": "spanish",
139
+ "languages": [
140
+ "spa-Latn"
141
+ ],
142
+ "main_score": 0.4454000000000001,
143
+ "scores_per_experiment": [
144
+ {
145
+ "accuracy": 0.465,
146
+ "f1": 0.4644787787352229,
147
+ "f1_weighted": 0.4639656951420143
148
+ },
149
+ {
150
+ "accuracy": 0.4655,
151
+ "f1": 0.46221231139162705,
152
+ "f1_weighted": 0.4514106284276498
153
+ },
154
+ {
155
+ "accuracy": 0.3705,
156
+ "f1": 0.367999413085551,
157
+ "f1_weighted": 0.3589196866783966
158
+ },
159
+ {
160
+ "accuracy": 0.4255,
161
+ "f1": 0.4208614995624728,
162
+ "f1_weighted": 0.41890354402317675
163
+ },
164
+ {
165
+ "accuracy": 0.432,
166
+ "f1": 0.42068249474710306,
167
+ "f1_weighted": 0.41115176099003053
168
+ },
169
+ {
170
+ "accuracy": 0.495,
171
+ "f1": 0.5014363299471257,
172
+ "f1_weighted": 0.4918076584804851
173
+ },
174
+ {
175
+ "accuracy": 0.452,
176
+ "f1": 0.4616606210204175,
177
+ "f1_weighted": 0.4504703655222057
178
+ },
179
+ {
180
+ "accuracy": 0.4505,
181
+ "f1": 0.4235990237918889,
182
+ "f1_weighted": 0.4237010110414535
183
+ },
184
+ {
185
+ "accuracy": 0.406,
186
+ "f1": 0.40319750938015925,
187
+ "f1_weighted": 0.4073892729846847
188
+ },
189
+ {
190
+ "accuracy": 0.492,
191
+ "f1": 0.5057527548981631,
192
+ "f1_weighted": 0.49396366787549184
193
+ }
194
+ ]
195
+ },
196
+ {
197
+ "accuracy": 0.44814999999999994,
198
+ "f1": 0.4417565935927034,
199
+ "f1_weighted": 0.4419877060109865,
200
+ "hf_subset": "catalan",
201
+ "languages": [
202
+ "cat-Latn"
203
+ ],
204
+ "main_score": 0.44814999999999994,
205
+ "scores_per_experiment": [
206
+ {
207
+ "accuracy": 0.485,
208
+ "f1": 0.47727786900200697,
209
+ "f1_weighted": 0.4856109688013136
210
+ },
211
+ {
212
+ "accuracy": 0.392,
213
+ "f1": 0.38974810175127833,
214
+ "f1_weighted": 0.38267317387057576
215
+ },
216
+ {
217
+ "accuracy": 0.456,
218
+ "f1": 0.4367085180715435,
219
+ "f1_weighted": 0.4331780707797633
220
+ },
221
+ {
222
+ "accuracy": 0.506,
223
+ "f1": 0.4978504750228354,
224
+ "f1_weighted": 0.5027094236425065
225
+ },
226
+ {
227
+ "accuracy": 0.421,
228
+ "f1": 0.40253905285935393,
229
+ "f1_weighted": 0.40978228003473566
230
+ },
231
+ {
232
+ "accuracy": 0.4645,
233
+ "f1": 0.46369578175748316,
234
+ "f1_weighted": 0.46634989815906497
235
+ },
236
+ {
237
+ "accuracy": 0.4795,
238
+ "f1": 0.4733711137670194,
239
+ "f1_weighted": 0.4672327524142362
240
+ },
241
+ {
242
+ "accuracy": 0.4075,
243
+ "f1": 0.4069749854598233,
244
+ "f1_weighted": 0.3977411571081961
245
+ },
246
+ {
247
+ "accuracy": 0.447,
248
+ "f1": 0.44675965836993053,
249
+ "f1_weighted": 0.45028004354182544
250
+ },
251
+ {
252
+ "accuracy": 0.423,
253
+ "f1": 0.42264037986575925,
254
+ "f1_weighted": 0.4243192917576482
255
+ }
256
+ ]
257
+ }
258
+ ]
259
+ },
260
+ "task_name": "CataloniaTweetClassification"
261
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/Core17InstructionRetrieval.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e39ff896cf3efbbdeeb950e6bd7c79f266995b07",
3
+ "evaluation_time": 222.37063264846802,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "individual": {
11
+ "changed": {
12
+ "map_at_1": 0.02401,
13
+ "map_at_10": 0.10667,
14
+ "map_at_100": 0.24395,
15
+ "map_at_1000": 0.31132,
16
+ "map_at_20": 0.15836,
17
+ "map_at_3": 0.04884,
18
+ "map_at_5": 0.07081,
19
+ "mrr_at_1": 0.75,
20
+ "mrr_at_10": 0.85,
21
+ "mrr_at_100": 0.85,
22
+ "mrr_at_1000": 0.85,
23
+ "mrr_at_20": 0.85,
24
+ "mrr_at_3": 0.85,
25
+ "mrr_at_5": 0.85,
26
+ "naucs_at_1000_diff1": -0.11341242710637385,
27
+ "naucs_at_1000_max": -0.1270288127316076,
28
+ "naucs_at_1000_std": -0.42141851713538875,
29
+ "naucs_at_100_diff1": -0.11341242710637385,
30
+ "naucs_at_100_max": -0.1270288127316076,
31
+ "naucs_at_100_std": -0.42141851713538875,
32
+ "naucs_at_10_diff1": -0.11341242710637385,
33
+ "naucs_at_10_max": -0.1270288127316076,
34
+ "naucs_at_10_std": -0.42141851713538875,
35
+ "naucs_at_1_diff1": -0.1477760334903194,
36
+ "naucs_at_1_max": -0.06457352171637874,
37
+ "naucs_at_1_std": -0.512925170068027,
38
+ "naucs_at_20_diff1": -0.11341242710637385,
39
+ "naucs_at_20_max": -0.1270288127316076,
40
+ "naucs_at_20_std": -0.42141851713538875,
41
+ "naucs_at_3_diff1": -0.11341242710637385,
42
+ "naucs_at_3_max": -0.1270288127316076,
43
+ "naucs_at_3_std": -0.42141851713538875,
44
+ "naucs_at_5_diff1": -0.11341242710637385,
45
+ "naucs_at_5_max": -0.1270288127316076,
46
+ "naucs_at_5_std": -0.42141851713538875,
47
+ "ndcg_at_1": 0.65,
48
+ "ndcg_at_10": 0.49658,
49
+ "ndcg_at_100": 0.45744,
50
+ "ndcg_at_1000": 0.68909,
51
+ "ndcg_at_20": 0.45883,
52
+ "ndcg_at_3": 0.56301,
53
+ "ndcg_at_5": 0.5377,
54
+ "precision_at_1": 0.75,
55
+ "precision_at_10": 0.555,
56
+ "precision_at_100": 0.2215,
57
+ "precision_at_1000": 0.0545,
58
+ "precision_at_20": 0.4575,
59
+ "precision_at_3": 0.65,
60
+ "precision_at_5": 0.61,
61
+ "recall_at_1": 0.02401,
62
+ "recall_at_10": 0.12539,
63
+ "recall_at_100": 0.44044,
64
+ "recall_at_1000": 1.0,
65
+ "recall_at_20": 0.20279,
66
+ "recall_at_3": 0.0517,
67
+ "recall_at_5": 0.07741
68
+ },
69
+ "original": {
70
+ "map_at_1": 0.00426,
71
+ "map_at_10": 0.05567,
72
+ "map_at_100": 0.13737,
73
+ "map_at_1000": 0.17745,
74
+ "map_at_20": 0.08737,
75
+ "map_at_3": 0.02003,
76
+ "map_at_5": 0.03339,
77
+ "mrr_at_1": 0.15,
78
+ "mrr_at_10": 0.3763888888888889,
79
+ "mrr_at_100": 0.3827408031037063,
80
+ "mrr_at_1000": 0.3827408031037063,
81
+ "mrr_at_20": 0.3802350427350427,
82
+ "mrr_at_3": 0.325,
83
+ "mrr_at_5": 0.3575,
84
+ "naucs_at_1000_diff1": 0.25809504258870614,
85
+ "naucs_at_1000_max": 0.24262004323936823,
86
+ "naucs_at_1000_std": -0.3028182286660371,
87
+ "naucs_at_100_diff1": 0.25809504258870614,
88
+ "naucs_at_100_max": 0.24262004323936823,
89
+ "naucs_at_100_std": -0.3028182286660371,
90
+ "naucs_at_10_diff1": 0.27122441311238943,
91
+ "naucs_at_10_max": 0.2283441056536398,
92
+ "naucs_at_10_std": -0.28624278447878,
93
+ "naucs_at_1_diff1": 0.003830259046466964,
94
+ "naucs_at_1_max": 0.3413970365890533,
95
+ "naucs_at_1_std": -0.2819272250781172,
96
+ "naucs_at_20_diff1": 0.2638519135511177,
97
+ "naucs_at_20_max": 0.24880761619347175,
98
+ "naucs_at_20_std": -0.3051695548015811,
99
+ "naucs_at_3_diff1": 0.28786338686724944,
100
+ "naucs_at_3_max": 0.1554787558446837,
101
+ "naucs_at_3_std": -0.3101849969506,
102
+ "naucs_at_5_diff1": 0.2769593197460224,
103
+ "naucs_at_5_max": 0.2603183746813458,
104
+ "naucs_at_5_std": -0.26050752518066045,
105
+ "ndcg_at_1": 0.15,
106
+ "ndcg_at_10": 0.25722,
107
+ "ndcg_at_100": 0.32818,
108
+ "ndcg_at_1000": 0.52873,
109
+ "ndcg_at_20": 0.25579,
110
+ "ndcg_at_3": 0.24288,
111
+ "ndcg_at_5": 0.25472,
112
+ "precision_at_1": 0.15,
113
+ "precision_at_10": 0.3,
114
+ "precision_at_100": 0.1295,
115
+ "precision_at_1000": 0.0327,
116
+ "precision_at_20": 0.25,
117
+ "precision_at_3": 0.31667,
118
+ "precision_at_5": 0.32,
119
+ "recall_at_1": 0.00426,
120
+ "recall_at_10": 0.10479,
121
+ "recall_at_100": 0.4141,
122
+ "recall_at_1000": 1.0,
123
+ "recall_at_20": 0.17496,
124
+ "recall_at_3": 0.02774,
125
+ "recall_at_5": 0.05636
126
+ }
127
+ },
128
+ "languages": [
129
+ "eng-Latn"
130
+ ],
131
+ "main_score": -0.0006006880001947074,
132
+ "p-MRR": -0.0006006880001947074
133
+ }
134
+ ]
135
+ },
136
+ "task_name": "Core17InstructionRetrieval"
137
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CovidRetrieval.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "1271c7809071a13532e05f25fb53511ffce77117",
3
+ "evaluation_time": 340.78635263442993,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "dev": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "cmn-Hans"
12
+ ],
13
+ "main_score": 0.78546,
14
+ "map_at_1": 0.67123,
15
+ "map_at_10": 0.7496,
16
+ "map_at_100": 0.75344,
17
+ "map_at_1000": 0.75353,
18
+ "map_at_20": 0.75233,
19
+ "map_at_3": 0.73446,
20
+ "map_at_5": 0.74306,
21
+ "mrr_at_1": 0.6733403582718651,
22
+ "mrr_at_10": 0.749646244166792,
23
+ "mrr_at_100": 0.7534831928292228,
24
+ "mrr_at_1000": 0.7535682998899956,
25
+ "mrr_at_20": 0.7523757305845015,
26
+ "mrr_at_3": 0.7349841938883034,
27
+ "mrr_at_5": 0.7436775553213908,
28
+ "nauc_map_at_1000_diff1": 0.7791405892299247,
29
+ "nauc_map_at_1000_max": 0.5547292726037599,
30
+ "nauc_map_at_1000_std": -0.36962170987940157,
31
+ "nauc_map_at_100_diff1": 0.7790941201864313,
32
+ "nauc_map_at_100_max": 0.5548570698500994,
33
+ "nauc_map_at_100_std": -0.3694761927220403,
34
+ "nauc_map_at_10_diff1": 0.7768009613416262,
35
+ "nauc_map_at_10_max": 0.5554853156034927,
36
+ "nauc_map_at_10_std": -0.36819296760763565,
37
+ "nauc_map_at_1_diff1": 0.8125670926685186,
38
+ "nauc_map_at_1_max": 0.541399757732875,
39
+ "nauc_map_at_1_std": -0.3839645178999091,
40
+ "nauc_map_at_20_diff1": 0.7784830933732259,
41
+ "nauc_map_at_20_max": 0.5541379316239704,
42
+ "nauc_map_at_20_std": -0.37096709239622144,
43
+ "nauc_map_at_3_diff1": 0.7813568888619664,
44
+ "nauc_map_at_3_max": 0.5551932023150712,
45
+ "nauc_map_at_3_std": -0.37307171750948315,
46
+ "nauc_map_at_5_diff1": 0.7778836857778701,
47
+ "nauc_map_at_5_max": 0.5500587628775865,
48
+ "nauc_map_at_5_std": -0.3814485615466191,
49
+ "nauc_mrr_at_1000_diff1": 0.7792508604111916,
50
+ "nauc_mrr_at_1000_max": 0.5566167364551637,
51
+ "nauc_mrr_at_1000_std": -0.3676578474637827,
52
+ "nauc_mrr_at_100_diff1": 0.7792043492569733,
53
+ "nauc_mrr_at_100_max": 0.5567437802692918,
54
+ "nauc_mrr_at_100_std": -0.36751311467881204,
55
+ "nauc_mrr_at_10_diff1": 0.7769111749199284,
56
+ "nauc_mrr_at_10_max": 0.5573273286456696,
57
+ "nauc_mrr_at_10_std": -0.3663024949932281,
58
+ "nauc_mrr_at_1_diff1": 0.8106820337949446,
59
+ "nauc_mrr_at_1_max": 0.5448112997474763,
60
+ "nauc_mrr_at_1_std": -0.375622746182767,
61
+ "nauc_mrr_at_20_diff1": 0.7785946676179826,
62
+ "nauc_mrr_at_20_max": 0.5560029558169208,
63
+ "nauc_mrr_at_20_std": -0.36905299049649715,
64
+ "nauc_mrr_at_3_diff1": 0.7814144227474652,
65
+ "nauc_mrr_at_3_max": 0.5581231287734342,
66
+ "nauc_mrr_at_3_std": -0.3692372594778035,
67
+ "nauc_mrr_at_5_diff1": 0.7778033110341576,
68
+ "nauc_mrr_at_5_max": 0.5532251352802099,
69
+ "nauc_mrr_at_5_std": -0.37716170918102143,
70
+ "nauc_ndcg_at_1000_diff1": 0.7722323593069391,
71
+ "nauc_ndcg_at_1000_max": 0.5598553194209255,
72
+ "nauc_ndcg_at_1000_std": -0.35795292420809033,
73
+ "nauc_ndcg_at_100_diff1": 0.7710551926033649,
74
+ "nauc_ndcg_at_100_max": 0.5642684910970744,
75
+ "nauc_ndcg_at_100_std": -0.352410590361872,
76
+ "nauc_ndcg_at_10_diff1": 0.7590907345893284,
77
+ "nauc_ndcg_at_10_max": 0.563090194448966,
78
+ "nauc_ndcg_at_10_std": -0.3513773213860779,
79
+ "nauc_ndcg_at_1_diff1": 0.8106820337949446,
80
+ "nauc_ndcg_at_1_max": 0.5448112997474763,
81
+ "nauc_ndcg_at_1_std": -0.375622746182767,
82
+ "nauc_ndcg_at_20_diff1": 0.765711506013049,
83
+ "nauc_ndcg_at_20_max": 0.5580213511826945,
84
+ "nauc_ndcg_at_20_std": -0.3629327591959577,
85
+ "nauc_ndcg_at_3_diff1": 0.7702376457119041,
86
+ "nauc_ndcg_at_3_max": 0.5597214646814901,
87
+ "nauc_ndcg_at_3_std": -0.36825016162640445,
88
+ "nauc_ndcg_at_5_diff1": 0.7622101063561283,
89
+ "nauc_ndcg_at_5_max": 0.5497098434226116,
90
+ "nauc_ndcg_at_5_std": -0.38361512031192513,
91
+ "nauc_precision_at_1000_diff1": -0.10677277231205465,
92
+ "nauc_precision_at_1000_max": 0.22052181057322065,
93
+ "nauc_precision_at_1000_std": 0.5320447031120424,
94
+ "nauc_precision_at_100_diff1": 0.22012743131900256,
95
+ "nauc_precision_at_100_max": 0.4998439188056743,
96
+ "nauc_precision_at_100_std": 0.3497354531359946,
97
+ "nauc_precision_at_10_diff1": 0.49802210345564313,
98
+ "nauc_precision_at_10_max": 0.5353256671816794,
99
+ "nauc_precision_at_10_std": -0.10016277287025022,
100
+ "nauc_precision_at_1_diff1": 0.8106820337949446,
101
+ "nauc_precision_at_1_max": 0.5448112997474763,
102
+ "nauc_precision_at_1_std": -0.375622746182767,
103
+ "nauc_precision_at_20_diff1": 0.4286789166357217,
104
+ "nauc_precision_at_20_max": 0.47785985177903095,
105
+ "nauc_precision_at_20_std": -0.08054175609637773,
106
+ "nauc_precision_at_3_diff1": 0.6833760665952497,
107
+ "nauc_precision_at_3_max": 0.5587992959225945,
108
+ "nauc_precision_at_3_std": -0.30220851092488615,
109
+ "nauc_precision_at_5_diff1": 0.6213772348634038,
110
+ "nauc_precision_at_5_max": 0.5182927650152851,
111
+ "nauc_precision_at_5_std": -0.32254088428614847,
112
+ "nauc_recall_at_1000_diff1": 0.6874225248372537,
113
+ "nauc_recall_at_1000_max": 0.7766234801575146,
114
+ "nauc_recall_at_1000_std": 0.34892593232266444,
115
+ "nauc_recall_at_100_diff1": 0.6885009566246413,
116
+ "nauc_recall_at_100_max": 0.8643185620958282,
117
+ "nauc_recall_at_100_std": 0.16430185700017508,
118
+ "nauc_recall_at_10_diff1": 0.6464026024585438,
119
+ "nauc_recall_at_10_max": 0.6114841755493923,
120
+ "nauc_recall_at_10_std": -0.2343288110122592,
121
+ "nauc_recall_at_1_diff1": 0.8125670926685186,
122
+ "nauc_recall_at_1_max": 0.541399757732875,
123
+ "nauc_recall_at_1_std": -0.3839645178999091,
124
+ "nauc_recall_at_20_diff1": 0.6519858130705691,
125
+ "nauc_recall_at_20_max": 0.5867727279001567,
126
+ "nauc_recall_at_20_std": -0.29368734961770554,
127
+ "nauc_recall_at_3_diff1": 0.7276112642910515,
128
+ "nauc_recall_at_3_max": 0.5735029494701098,
129
+ "nauc_recall_at_3_std": -0.35289222179627106,
130
+ "nauc_recall_at_5_diff1": 0.6898094868983334,
131
+ "nauc_recall_at_5_max": 0.540825493954069,
132
+ "nauc_recall_at_5_std": -0.40135376982021587,
133
+ "ndcg_at_1": 0.67334,
134
+ "ndcg_at_10": 0.78546,
135
+ "ndcg_at_100": 0.80285,
136
+ "ndcg_at_1000": 0.80513,
137
+ "ndcg_at_20": 0.79552,
138
+ "ndcg_at_3": 0.75465,
139
+ "ndcg_at_5": 0.77018,
140
+ "precision_at_1": 0.67334,
141
+ "precision_at_10": 0.09062,
142
+ "precision_at_100": 0.00985,
143
+ "precision_at_1000": 0.001,
144
+ "precision_at_20": 0.04731,
145
+ "precision_at_3": 0.27257,
146
+ "precision_at_5": 0.17134,
147
+ "recall_at_1": 0.67123,
148
+ "recall_at_10": 0.89673,
149
+ "recall_at_100": 0.97471,
150
+ "recall_at_1000": 0.99262,
151
+ "recall_at_20": 0.93678,
152
+ "recall_at_3": 0.81243,
153
+ "recall_at_5": 0.84984
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "CovidRetrieval"
158
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CyrillicTurkicLangClassification.json ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e42d330f33d65b7b72dfd408883daf1661f06f18",
3
+ "evaluation_time": 6.860665559768677,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.433056640625,
10
+ "f1": 0.4207841215576139,
11
+ "f1_weighted": 0.4209241468074157,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "bak-Cyrl",
15
+ "chv-Cyrl",
16
+ "tat-Cyrl",
17
+ "kir-Cyrl",
18
+ "rus-Cyrl",
19
+ "kaz-Cyrl",
20
+ "tyv-Cyrl",
21
+ "krc-Cyrl",
22
+ "sah-Cyrl"
23
+ ],
24
+ "main_score": 0.433056640625,
25
+ "scores_per_experiment": [
26
+ {
27
+ "accuracy": 0.4482421875,
28
+ "f1": 0.4391238271880482,
29
+ "f1_weighted": 0.43930243082322323
30
+ },
31
+ {
32
+ "accuracy": 0.41796875,
33
+ "f1": 0.39877330866231164,
34
+ "f1_weighted": 0.39880914206371276
35
+ },
36
+ {
37
+ "accuracy": 0.4541015625,
38
+ "f1": 0.4387476073163245,
39
+ "f1_weighted": 0.4388893413486905
40
+ },
41
+ {
42
+ "accuracy": 0.43701171875,
43
+ "f1": 0.42270354679800703,
44
+ "f1_weighted": 0.4228346151292825
45
+ },
46
+ {
47
+ "accuracy": 0.4560546875,
48
+ "f1": 0.44332195894467763,
49
+ "f1_weighted": 0.4434844052044929
50
+ },
51
+ {
52
+ "accuracy": 0.4296875,
53
+ "f1": 0.4324559051336851,
54
+ "f1_weighted": 0.4326004981615243
55
+ },
56
+ {
57
+ "accuracy": 0.3916015625,
58
+ "f1": 0.3778201912992431,
59
+ "f1_weighted": 0.3779469266857768
60
+ },
61
+ {
62
+ "accuracy": 0.43359375,
63
+ "f1": 0.42224938097607567,
64
+ "f1_weighted": 0.4224367190983771
65
+ },
66
+ {
67
+ "accuracy": 0.419921875,
68
+ "f1": 0.4032598919465921,
69
+ "f1_weighted": 0.4034704840786278
70
+ },
71
+ {
72
+ "accuracy": 0.4423828125,
73
+ "f1": 0.42938559731117454,
74
+ "f1_weighted": 0.42946690548044875
75
+ }
76
+ ]
77
+ }
78
+ ]
79
+ },
80
+ "task_name": "CyrillicTurkicLangClassification"
81
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/CzechProductReviewSentimentClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "2e6fedf42c9c104e83dfd95c3a453721e683e244",
3
+ "evaluation_time": 10.387732028961182,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.619091796875,
10
+ "f1": 0.6014830003753803,
11
+ "f1_weighted": 0.6014274272424769,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "ces-Latn"
15
+ ],
16
+ "main_score": 0.619091796875,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.6435546875,
20
+ "f1": 0.6273530072680945,
21
+ "f1_weighted": 0.627306121161528
22
+ },
23
+ {
24
+ "accuracy": 0.61181640625,
25
+ "f1": 0.5932272755843432,
26
+ "f1_weighted": 0.5931612994216926
27
+ },
28
+ {
29
+ "accuracy": 0.6015625,
30
+ "f1": 0.5732758881627685,
31
+ "f1_weighted": 0.5731976017031912
32
+ },
33
+ {
34
+ "accuracy": 0.57373046875,
35
+ "f1": 0.5527572369742118,
36
+ "f1_weighted": 0.5527112686965908
37
+ },
38
+ {
39
+ "accuracy": 0.62451171875,
40
+ "f1": 0.6120010496958214,
41
+ "f1_weighted": 0.6119566239023979
42
+ },
43
+ {
44
+ "accuracy": 0.611328125,
45
+ "f1": 0.5907278612487984,
46
+ "f1_weighted": 0.5906764936257848
47
+ },
48
+ {
49
+ "accuracy": 0.6337890625,
50
+ "f1": 0.6241344899624469,
51
+ "f1_weighted": 0.6240855499495558
52
+ },
53
+ {
54
+ "accuracy": 0.626953125,
55
+ "f1": 0.6100441942535237,
56
+ "f1_weighted": 0.6099833978008838
57
+ },
58
+ {
59
+ "accuracy": 0.63818359375,
60
+ "f1": 0.6111107005843848,
61
+ "f1_weighted": 0.6110493082969439
62
+ },
63
+ {
64
+ "accuracy": 0.62548828125,
65
+ "f1": 0.6201983000194083,
66
+ "f1_weighted": 0.6201466078662015
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "CzechProductReviewSentimentClassification"
73
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/DBpediaClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "9abd46cf7fc8b4c64290f26993c540b92aa145ac",
3
+ "evaluation_time": 11.836910486221313,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.760546875,
10
+ "f1": 0.7521020119361341,
11
+ "f1_weighted": 0.7521194172865739,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "eng-Latn"
15
+ ],
16
+ "main_score": 0.760546875,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.7724609375,
20
+ "f1": 0.7666250798259107,
21
+ "f1_weighted": 0.7666511014391251
22
+ },
23
+ {
24
+ "accuracy": 0.7548828125,
25
+ "f1": 0.7465668847321398,
26
+ "f1_weighted": 0.7466235362653909
27
+ },
28
+ {
29
+ "accuracy": 0.7392578125,
30
+ "f1": 0.7310516929851555,
31
+ "f1_weighted": 0.7310754577242813
32
+ },
33
+ {
34
+ "accuracy": 0.76708984375,
35
+ "f1": 0.7611068580912449,
36
+ "f1_weighted": 0.7610999875639963
37
+ },
38
+ {
39
+ "accuracy": 0.75146484375,
40
+ "f1": 0.7448177775515663,
41
+ "f1_weighted": 0.7448297603319657
42
+ },
43
+ {
44
+ "accuracy": 0.7666015625,
45
+ "f1": 0.7585974028511066,
46
+ "f1_weighted": 0.7586090595368074
47
+ },
48
+ {
49
+ "accuracy": 0.76513671875,
50
+ "f1": 0.7571505656657413,
51
+ "f1_weighted": 0.7570814163349027
52
+ },
53
+ {
54
+ "accuracy": 0.75048828125,
55
+ "f1": 0.7387789343013292,
56
+ "f1_weighted": 0.738758782732261
57
+ },
58
+ {
59
+ "accuracy": 0.77734375,
60
+ "f1": 0.7673204031909897,
61
+ "f1_weighted": 0.7673438338086436
62
+ },
63
+ {
64
+ "accuracy": 0.7607421875,
65
+ "f1": 0.7490045201661578,
66
+ "f1_weighted": 0.7491212371283648
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "DBpediaClassification"
73
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/DalajClassification.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "7ebf0b4caa7b2ae39698a889de782c09e6f5ee56",
3
+ "evaluation_time": 2.5870473384857178,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.5012387387387387,
10
+ "ap": 0.5006372904185549,
11
+ "ap_weighted": 0.5006372904185549,
12
+ "f1": 0.4985051578463787,
13
+ "f1_weighted": 0.4985051578463787,
14
+ "hf_subset": "default",
15
+ "languages": [
16
+ "swe-Latn"
17
+ ],
18
+ "main_score": 0.5012387387387387,
19
+ "scores_per_experiment": [
20
+ {
21
+ "accuracy": 0.5033783783783784,
22
+ "ap": 0.5016992239764517,
23
+ "ap_weighted": 0.5016992239764517,
24
+ "f1": 0.5010237977225438,
25
+ "f1_weighted": 0.5010237977225438
26
+ },
27
+ {
28
+ "accuracy": 0.5,
29
+ "ap": 0.5,
30
+ "ap_weighted": 0.5,
31
+ "f1": 0.4998756894163492,
32
+ "f1_weighted": 0.49987568941634913
33
+ },
34
+ {
35
+ "accuracy": 0.4988738738738739,
36
+ "ap": 0.49943863803380417,
37
+ "ap_weighted": 0.49943863803380417,
38
+ "f1": 0.4906255035287293,
39
+ "f1_weighted": 0.4906255035287293
40
+ },
41
+ {
42
+ "accuracy": 0.4954954954954955,
43
+ "ap": 0.49777456027456035,
44
+ "ap_weighted": 0.49777456027456035,
45
+ "f1": 0.4879209144269385,
46
+ "f1_weighted": 0.4879209144269385
47
+ },
48
+ {
49
+ "accuracy": 0.5056306306306306,
50
+ "ap": 0.5028481278481278,
51
+ "ap_weighted": 0.5028481278481278,
52
+ "f1": 0.5054895289850936,
53
+ "f1_weighted": 0.5054895289850936
54
+ },
55
+ {
56
+ "accuracy": 0.4966216216216216,
57
+ "ap": 0.4983241817014904,
58
+ "ap_weighted": 0.4983241817014904,
59
+ "f1": 0.4939100034552268,
60
+ "f1_weighted": 0.4939100034552268
61
+ },
62
+ {
63
+ "accuracy": 0.5022522522522522,
64
+ "ap": 0.501131043707463,
65
+ "ap_weighted": 0.501131043707463,
66
+ "f1": 0.5021285016261854,
67
+ "f1_weighted": 0.5021285016261854
68
+ },
69
+ {
70
+ "accuracy": 0.5078828828828829,
71
+ "ap": 0.5040170307293595,
72
+ "ap_weighted": 0.5040170307293595,
73
+ "f1": 0.5039569067228427,
74
+ "f1_weighted": 0.5039569067228427
75
+ },
76
+ {
77
+ "accuracy": 0.4988738738738739,
78
+ "ap": 0.4994383341157535,
79
+ "ap_weighted": 0.4994383341157535,
80
+ "f1": 0.4978033024808639,
81
+ "f1_weighted": 0.4978033024808639
82
+ },
83
+ {
84
+ "accuracy": 0.5033783783783784,
85
+ "ap": 0.5017017637985379,
86
+ "ap_weighted": 0.5017017637985379,
87
+ "f1": 0.5023174300990134,
88
+ "f1_weighted": 0.5023174300990135
89
+ }
90
+ ]
91
+ }
92
+ ]
93
+ },
94
+ "task_name": "DalajClassification"
95
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/DiaBlaBitextMining.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "5345895c56a601afe1a98519ce3199be60a27dba",
3
+ "evaluation_time": 37.72365617752075,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.8745650661099513,
10
+ "f1": 0.8514483689588075,
11
+ "hf_subset": "fr-en",
12
+ "languages": [
13
+ "fra-Latn",
14
+ "eng-Latn"
15
+ ],
16
+ "main_score": 0.8514483689588075,
17
+ "precision": 0.8423583409361071,
18
+ "recall": 0.8745650661099513
19
+ },
20
+ {
21
+ "accuracy": 0.8745650661099513,
22
+ "f1": 0.8514483689588075,
23
+ "hf_subset": "en-fr",
24
+ "languages": [
25
+ "eng-Latn",
26
+ "fra-Latn"
27
+ ],
28
+ "main_score": 0.8514483689588075,
29
+ "precision": 0.8423583409361071,
30
+ "recall": 0.8745650661099513
31
+ }
32
+ ]
33
+ },
34
+ "task_name": "DiaBlaBitextMining"
35
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/EstonianValenceClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "9157397f05a127b3ac93b93dd88abf1bdf710c22",
3
+ "evaluation_time": 3.2780234813690186,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.4880195599022005,
10
+ "f1": 0.43073867168569324,
11
+ "f1_weighted": 0.505703708033557,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "est-Latn"
15
+ ],
16
+ "main_score": 0.4880195599022005,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.46577017114914426,
20
+ "f1": 0.4070050721200674,
21
+ "f1_weighted": 0.48092282539527587
22
+ },
23
+ {
24
+ "accuracy": 0.49877750611246946,
25
+ "f1": 0.442466925294393,
26
+ "f1_weighted": 0.5160540151692992
27
+ },
28
+ {
29
+ "accuracy": 0.4731051344743276,
30
+ "f1": 0.41378418018025664,
31
+ "f1_weighted": 0.4949784599271765
32
+ },
33
+ {
34
+ "accuracy": 0.5073349633251834,
35
+ "f1": 0.4327548800993326,
36
+ "f1_weighted": 0.5201528322992435
37
+ },
38
+ {
39
+ "accuracy": 0.4963325183374083,
40
+ "f1": 0.447682545893763,
41
+ "f1_weighted": 0.5217052232231976
42
+ },
43
+ {
44
+ "accuracy": 0.4743276283618582,
45
+ "f1": 0.44238144979109906,
46
+ "f1_weighted": 0.4960594297185096
47
+ },
48
+ {
49
+ "accuracy": 0.48166259168704156,
50
+ "f1": 0.4100081354228473,
51
+ "f1_weighted": 0.4942821101973126
52
+ },
53
+ {
54
+ "accuracy": 0.5048899755501223,
55
+ "f1": 0.44738163480707704,
56
+ "f1_weighted": 0.5229124834498211
57
+ },
58
+ {
59
+ "accuracy": 0.49755501222493886,
60
+ "f1": 0.44451241865247026,
61
+ "f1_weighted": 0.5160130903523614
62
+ },
63
+ {
64
+ "accuracy": 0.480440097799511,
65
+ "f1": 0.4194094745956261,
66
+ "f1_weighted": 0.49395661060337276
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "EstonianValenceClassification"
73
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/FaroeseSTS.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "8cb36efa69428b3dc290e1125995a999963163c5",
3
+ "evaluation_time": 0.8660893440246582,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "train": [
8
+ {
9
+ "cosine_pearson": 0.8029315386725461,
10
+ "cosine_spearman": 0.8081871117320545,
11
+ "euclidean_pearson": 0.8050058787871058,
12
+ "euclidean_spearman": 0.8081763484656946,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "fao-Latn"
16
+ ],
17
+ "main_score": 0.8081871117320545,
18
+ "manhattan_pearson": 0.8050867165783648,
19
+ "manhattan_spearman": 0.8089429287677464,
20
+ "pearson": 0.8029315386725461,
21
+ "spearman": 0.8081871117320545
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "FaroeseSTS"
26
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/FilipinoShopeeReviewsClassification.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "d096f402fdc76886458c0cfb5dedc829bea2b935",
3
+ "evaluation_time": 10.240617036819458,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.39453125,
10
+ "f1": 0.3721769497695471,
11
+ "f1_weighted": 0.37215175146677154,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "fil-Latn"
15
+ ],
16
+ "main_score": 0.39453125,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.40966796875,
20
+ "f1": 0.3869097471446986,
21
+ "f1_weighted": 0.38687585886615694
22
+ },
23
+ {
24
+ "accuracy": 0.37646484375,
25
+ "f1": 0.35196788048953154,
26
+ "f1_weighted": 0.35196877733023596
27
+ },
28
+ {
29
+ "accuracy": 0.3857421875,
30
+ "f1": 0.3736302482939824,
31
+ "f1_weighted": 0.3736018723628032
32
+ },
33
+ {
34
+ "accuracy": 0.39306640625,
35
+ "f1": 0.37367631089636644,
36
+ "f1_weighted": 0.3736354799773757
37
+ },
38
+ {
39
+ "accuracy": 0.38720703125,
40
+ "f1": 0.35763451930862145,
41
+ "f1_weighted": 0.3575829546830991
42
+ },
43
+ {
44
+ "accuracy": 0.4072265625,
45
+ "f1": 0.3776779896940723,
46
+ "f1_weighted": 0.3776351023012192
47
+ },
48
+ {
49
+ "accuracy": 0.38525390625,
50
+ "f1": 0.36639950668710963,
51
+ "f1_weighted": 0.36640221302849685
52
+ },
53
+ {
54
+ "accuracy": 0.40673828125,
55
+ "f1": 0.3850698598994804,
56
+ "f1_weighted": 0.3850656407142965
57
+ },
58
+ {
59
+ "accuracy": 0.4287109375,
60
+ "f1": 0.40685801691649653,
61
+ "f1_weighted": 0.40679830549229645
62
+ },
63
+ {
64
+ "accuracy": 0.365234375,
65
+ "f1": 0.3419454183651117,
66
+ "f1_weighted": 0.34195130991173567
67
+ }
68
+ ]
69
+ }
70
+ ],
71
+ "validation": [
72
+ {
73
+ "accuracy": 0.375048828125,
74
+ "f1": 0.35343444615015046,
75
+ "f1_weighted": 0.35340801263537863,
76
+ "hf_subset": "default",
77
+ "languages": [
78
+ "fil-Latn"
79
+ ],
80
+ "main_score": 0.375048828125,
81
+ "scores_per_experiment": [
82
+ {
83
+ "accuracy": 0.3974609375,
84
+ "f1": 0.37768935014724797,
85
+ "f1_weighted": 0.3776526995793823
86
+ },
87
+ {
88
+ "accuracy": 0.3603515625,
89
+ "f1": 0.33755367423244165,
90
+ "f1_weighted": 0.33754957667053637
91
+ },
92
+ {
93
+ "accuracy": 0.353515625,
94
+ "f1": 0.34445167344085226,
95
+ "f1_weighted": 0.34443011154447273
96
+ },
97
+ {
98
+ "accuracy": 0.3955078125,
99
+ "f1": 0.3787354108275659,
100
+ "f1_weighted": 0.3786945546113438
101
+ },
102
+ {
103
+ "accuracy": 0.359375,
104
+ "f1": 0.331584606802685,
105
+ "f1_weighted": 0.33154373398798215
106
+ },
107
+ {
108
+ "accuracy": 0.38916015625,
109
+ "f1": 0.36185903165137556,
110
+ "f1_weighted": 0.3618178234607547
111
+ },
112
+ {
113
+ "accuracy": 0.36328125,
114
+ "f1": 0.34015343289128097,
115
+ "f1_weighted": 0.34016514321371133
116
+ },
117
+ {
118
+ "accuracy": 0.3916015625,
119
+ "f1": 0.3698868269507015,
120
+ "f1_weighted": 0.3698638531296065
121
+ },
122
+ {
123
+ "accuracy": 0.396484375,
124
+ "f1": 0.37282687450902724,
125
+ "f1_weighted": 0.3727589754962555
126
+ },
127
+ {
128
+ "accuracy": 0.34375,
129
+ "f1": 0.3196035800483266,
130
+ "f1_weighted": 0.31960365465974117
131
+ }
132
+ ]
133
+ }
134
+ ]
135
+ },
136
+ "task_name": "FilipinoShopeeReviewsClassification"
137
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/FinParaSTS.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e4428e399de70a21b8857464e76f0fe859cabe05",
3
+ "evaluation_time": 2.353466272354126,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.2252326931221018,
10
+ "cosine_spearman": 0.22810110190184646,
11
+ "euclidean_pearson": 0.23417389115770884,
12
+ "euclidean_spearman": 0.22841094072620427,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "fin-Latn"
16
+ ],
17
+ "main_score": 0.22810110190184646,
18
+ "manhattan_pearson": 0.2351167216220935,
19
+ "manhattan_spearman": 0.22913216840025472,
20
+ "pearson": 0.2252326931221018,
21
+ "spearman": 0.22810110190184646
22
+ }
23
+ ],
24
+ "validation": [
25
+ {
26
+ "cosine_pearson": 0.19589792391260902,
27
+ "cosine_spearman": 0.21946628839725615,
28
+ "euclidean_pearson": 0.21010503658168891,
29
+ "euclidean_spearman": 0.21951271732778851,
30
+ "hf_subset": "default",
31
+ "languages": [
32
+ "fin-Latn"
33
+ ],
34
+ "main_score": 0.21946628839725615,
35
+ "manhattan_pearson": 0.21172989096234646,
36
+ "manhattan_spearman": 0.22017630248620004,
37
+ "pearson": 0.19589792391260902,
38
+ "spearman": 0.21946628839725615
39
+ }
40
+ ]
41
+ },
42
+ "task_name": "FinParaSTS"
43
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/FinancialPhrasebankClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "1484d06fe7af23030c7c977b12556108d1f67039",
3
+ "evaluation_time": 9.08685851097107,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "train": [
8
+ {
9
+ "accuracy": 0.7950088339222614,
10
+ "f1": 0.752309908730233,
11
+ "f1_weighted": 0.7973311533423167,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "eng-Latn"
15
+ ],
16
+ "main_score": 0.7950088339222614,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.8374558303886925,
20
+ "f1": 0.7859053155012874,
21
+ "f1_weighted": 0.83201457715215
22
+ },
23
+ {
24
+ "accuracy": 0.8206713780918727,
25
+ "f1": 0.765358476100734,
26
+ "f1_weighted": 0.8174399276496798
27
+ },
28
+ {
29
+ "accuracy": 0.7195229681978799,
30
+ "f1": 0.685578785943206,
31
+ "f1_weighted": 0.7315991782273273
32
+ },
33
+ {
34
+ "accuracy": 0.8096289752650176,
35
+ "f1": 0.7681712087968703,
36
+ "f1_weighted": 0.8101050442197438
37
+ },
38
+ {
39
+ "accuracy": 0.8069787985865724,
40
+ "f1": 0.7597293061156662,
41
+ "f1_weighted": 0.8059139080380809
42
+ },
43
+ {
44
+ "accuracy": 0.7009717314487632,
45
+ "f1": 0.6721062298127468,
46
+ "f1_weighted": 0.7160864029172662
47
+ },
48
+ {
49
+ "accuracy": 0.8272968197879859,
50
+ "f1": 0.7887828896753346,
51
+ "f1_weighted": 0.8298322499465806
52
+ },
53
+ {
54
+ "accuracy": 0.7897526501766784,
55
+ "f1": 0.7455205594343011,
56
+ "f1_weighted": 0.7933973447894309
57
+ },
58
+ {
59
+ "accuracy": 0.806095406360424,
60
+ "f1": 0.7546692678240059,
61
+ "f1_weighted": 0.806921425892424
62
+ },
63
+ {
64
+ "accuracy": 0.8317137809187279,
65
+ "f1": 0.797277048098178,
66
+ "f1_weighted": 0.830001474590484
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "FinancialPhrasebankClassification"
73
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/FloresBitextMining.json ADDED
The diff for this file is too large to render. See raw diff
 
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/GermanSTSBenchmark.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e36907544d44c3a247898ed81540310442329e20",
3
+ "evaluation_time": 3.461139440536499,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.8655593433745424,
10
+ "cosine_spearman": 0.8751210055806788,
11
+ "euclidean_pearson": 0.8697325897710915,
12
+ "euclidean_spearman": 0.8751120465895358,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "deu-Latn"
16
+ ],
17
+ "main_score": 0.8751210055806788,
18
+ "manhattan_pearson": 0.8689278217429843,
19
+ "manhattan_spearman": 0.8747357941594118,
20
+ "pearson": 0.8655593433745424,
21
+ "spearman": 0.8751210055806788
22
+ }
23
+ ],
24
+ "validation": [
25
+ {
26
+ "cosine_pearson": 0.8718957764321714,
27
+ "cosine_spearman": 0.8823152672673511,
28
+ "euclidean_pearson": 0.8811254797066426,
29
+ "euclidean_spearman": 0.8822949505262861,
30
+ "hf_subset": "default",
31
+ "languages": [
32
+ "deu-Latn"
33
+ ],
34
+ "main_score": 0.8823152672673511,
35
+ "manhattan_pearson": 0.8806158199969483,
36
+ "manhattan_spearman": 0.8819941300182129,
37
+ "pearson": 0.8718957764321714,
38
+ "spearman": 0.8823152672673511
39
+ }
40
+ ]
41
+ },
42
+ "task_name": "GermanSTSBenchmark"
43
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/GreekLegalCodeClassification.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "de0fdb34424f07d1ac6f0ede23ee0ed44bd9f5d1",
3
+ "evaluation_time": 1669.8070709705353,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.11171875,
10
+ "f1": 0.09740276046422339,
11
+ "f1_weighted": 0.10493057406764247,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "ell-Grek"
15
+ ],
16
+ "main_score": 0.11171875,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.107421875,
20
+ "f1": 0.09473745899837162,
21
+ "f1_weighted": 0.09609382111777107
22
+ },
23
+ {
24
+ "accuracy": 0.10986328125,
25
+ "f1": 0.09680248257977392,
26
+ "f1_weighted": 0.10185522306092659
27
+ },
28
+ {
29
+ "accuracy": 0.09619140625,
30
+ "f1": 0.09145399402795389,
31
+ "f1_weighted": 0.08779607212578028
32
+ },
33
+ {
34
+ "accuracy": 0.103515625,
35
+ "f1": 0.0985856843136613,
36
+ "f1_weighted": 0.09408820573944612
37
+ },
38
+ {
39
+ "accuracy": 0.12646484375,
40
+ "f1": 0.10546427303012272,
41
+ "f1_weighted": 0.1248091762830787
42
+ },
43
+ {
44
+ "accuracy": 0.119140625,
45
+ "f1": 0.09798310415776788,
46
+ "f1_weighted": 0.11285397301182779
47
+ },
48
+ {
49
+ "accuracy": 0.10888671875,
50
+ "f1": 0.09056674500616094,
51
+ "f1_weighted": 0.1098264689034471
52
+ },
53
+ {
54
+ "accuracy": 0.11376953125,
55
+ "f1": 0.10169244895282238,
56
+ "f1_weighted": 0.10431656012196774
57
+ },
58
+ {
59
+ "accuracy": 0.11376953125,
60
+ "f1": 0.09466905250268577,
61
+ "f1_weighted": 0.10914658576635926
62
+ },
63
+ {
64
+ "accuracy": 0.1181640625,
65
+ "f1": 0.10207236107291351,
66
+ "f1_weighted": 0.10851965454582016
67
+ }
68
+ ]
69
+ }
70
+ ],
71
+ "validation": [
72
+ {
73
+ "accuracy": 0.119873046875,
74
+ "f1": 0.09555012851319404,
75
+ "f1_weighted": 0.11561146767089762,
76
+ "hf_subset": "default",
77
+ "languages": [
78
+ "ell-Grek"
79
+ ],
80
+ "main_score": 0.119873046875,
81
+ "scores_per_experiment": [
82
+ {
83
+ "accuracy": 0.1181640625,
84
+ "f1": 0.08465031721451456,
85
+ "f1_weighted": 0.11239959315236794
86
+ },
87
+ {
88
+ "accuracy": 0.119140625,
89
+ "f1": 0.10057068186966178,
90
+ "f1_weighted": 0.11542811988235524
91
+ },
92
+ {
93
+ "accuracy": 0.115234375,
94
+ "f1": 0.09580121305749807,
95
+ "f1_weighted": 0.10940791331719135
96
+ },
97
+ {
98
+ "accuracy": 0.12353515625,
99
+ "f1": 0.09786892382863598,
100
+ "f1_weighted": 0.11324330565525849
101
+ },
102
+ {
103
+ "accuracy": 0.1240234375,
104
+ "f1": 0.09698916661314783,
105
+ "f1_weighted": 0.12302996730661683
106
+ },
107
+ {
108
+ "accuracy": 0.11669921875,
109
+ "f1": 0.09057386588935139,
110
+ "f1_weighted": 0.11289592845644894
111
+ },
112
+ {
113
+ "accuracy": 0.11572265625,
114
+ "f1": 0.09662595589431519,
115
+ "f1_weighted": 0.11402412816697269
116
+ },
117
+ {
118
+ "accuracy": 0.11376953125,
119
+ "f1": 0.09237709985345423,
120
+ "f1_weighted": 0.1108164840557268
121
+ },
122
+ {
123
+ "accuracy": 0.12158203125,
124
+ "f1": 0.09150548627572357,
125
+ "f1_weighted": 0.11751122697600944
126
+ },
127
+ {
128
+ "accuracy": 0.130859375,
129
+ "f1": 0.10853857463563786,
130
+ "f1_weighted": 0.1273580097400287
131
+ }
132
+ ]
133
+ }
134
+ ]
135
+ },
136
+ "task_name": "GreekLegalCodeClassification"
137
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/GujaratiNewsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "1a5f2fa2914bfeff4fcdc6fff4194fa8ec8fa19e",
3
+ "evaluation_time": 2.8942301273345947,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.8622913505311077,
10
+ "f1": 0.8371014372650579,
11
+ "f1_weighted": 0.8639574407192099,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "guj-Gujr"
15
+ ],
16
+ "main_score": 0.8622913505311077,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.8368740515933232,
20
+ "f1": 0.8114658375417841,
21
+ "f1_weighted": 0.8405777705932624
22
+ },
23
+ {
24
+ "accuracy": 0.8892261001517451,
25
+ "f1": 0.8672039472847386,
26
+ "f1_weighted": 0.8889014921234352
27
+ },
28
+ {
29
+ "accuracy": 0.8019726858877086,
30
+ "f1": 0.7678770673727531,
31
+ "f1_weighted": 0.8026334640502129
32
+ },
33
+ {
34
+ "accuracy": 0.8839150227617603,
35
+ "f1": 0.8613462422504096,
36
+ "f1_weighted": 0.8839994972284902
37
+ },
38
+ {
39
+ "accuracy": 0.8877086494688923,
40
+ "f1": 0.8680073264032417,
41
+ "f1_weighted": 0.8882798954210303
42
+ },
43
+ {
44
+ "accuracy": 0.8657056145675266,
45
+ "f1": 0.8438649541365079,
46
+ "f1_weighted": 0.8697123924188362
47
+ },
48
+ {
49
+ "accuracy": 0.8391502276176024,
50
+ "f1": 0.8147161123599552,
51
+ "f1_weighted": 0.8439931127668241
52
+ },
53
+ {
54
+ "accuracy": 0.877845220030349,
55
+ "f1": 0.8548416275567609,
56
+ "f1_weighted": 0.8789343045743218
57
+ },
58
+ {
59
+ "accuracy": 0.8573596358118362,
60
+ "f1": 0.8229162525061771,
61
+ "f1_weighted": 0.8574182329323549
62
+ },
63
+ {
64
+ "accuracy": 0.8831562974203339,
65
+ "f1": 0.8587750052382508,
66
+ "f1_weighted": 0.8851242450833307
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "GujaratiNewsClassification"
73
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/HALClusteringS2S.v2.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e06ebbbb123f8144bef1a5d18796f3dec9ae2915",
3
+ "evaluation_time": 6.149868965148926,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "fra-Latn"
12
+ ],
13
+ "main_score": 0.2926211186445843,
14
+ "v_measure": 0.2926211186445843,
15
+ "v_measure_std": 0.009992613072093478,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.27453065675914207,
19
+ 0.2910469679184419,
20
+ 0.28180591906217445,
21
+ 0.2839343488574907,
22
+ 0.3090729167386895,
23
+ 0.3060730510436298,
24
+ 0.2957102867945612,
25
+ 0.29535430905879134,
26
+ 0.2952910902591037,
27
+ 0.2933916399538188
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "HALClusteringS2S.v2"
34
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/HagridRetrieval.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "b2a085913606be3c4f2f1a8bff1810e38bade8fa",
3
+ "evaluation_time": 2.3364994525909424,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "dev": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.98694,
14
+ "map_at_1": 0.98185,
15
+ "map_at_10": 0.98589,
16
+ "map_at_100": 0.98598,
17
+ "map_at_1000": 0.98601,
18
+ "map_at_20": 0.98589,
19
+ "map_at_3": 0.98589,
20
+ "map_at_5": 0.98589,
21
+ "mrr_at_1": 0.9818548387096774,
22
+ "mrr_at_10": 0.9858870967741935,
23
+ "mrr_at_100": 0.9859831029185868,
24
+ "mrr_at_1000": 0.9860076014321154,
25
+ "mrr_at_20": 0.9858870967741935,
26
+ "mrr_at_3": 0.9858870967741935,
27
+ "mrr_at_5": 0.9858870967741935,
28
+ "nauc_map_at_1000_diff1": 0.9811948398920487,
29
+ "nauc_map_at_1000_max": 0.7901900619911458,
30
+ "nauc_map_at_1000_std": 0.204625658955274,
31
+ "nauc_map_at_100_diff1": 0.9811810958181444,
32
+ "nauc_map_at_100_max": 0.7905110224632653,
33
+ "nauc_map_at_100_std": 0.20495433795475823,
34
+ "nauc_map_at_10_diff1": 0.9813091155744836,
35
+ "nauc_map_at_10_max": 0.7919361175485493,
36
+ "nauc_map_at_10_std": 0.2043961834002389,
37
+ "nauc_map_at_1_diff1": 0.9854626454468206,
38
+ "nauc_map_at_1_max": 0.6908824262953818,
39
+ "nauc_map_at_1_std": 0.12309880863143513,
40
+ "nauc_map_at_20_diff1": 0.9813091155744836,
41
+ "nauc_map_at_20_max": 0.7919361175485493,
42
+ "nauc_map_at_20_std": 0.2043961834002389,
43
+ "nauc_map_at_3_diff1": 0.9813091155744836,
44
+ "nauc_map_at_3_max": 0.7919361175485493,
45
+ "nauc_map_at_3_std": 0.2043961834002389,
46
+ "nauc_map_at_5_diff1": 0.9813091155744836,
47
+ "nauc_map_at_5_max": 0.7919361175485493,
48
+ "nauc_map_at_5_std": 0.2043961834002389,
49
+ "nauc_mrr_at_1000_diff1": 0.9811949256359416,
50
+ "nauc_mrr_at_1000_max": 0.7901910186392546,
51
+ "nauc_mrr_at_1000_std": 0.2046254734161775,
52
+ "nauc_mrr_at_100_diff1": 0.9811810958181444,
53
+ "nauc_mrr_at_100_max": 0.7905110224632653,
54
+ "nauc_mrr_at_100_std": 0.20495433795475823,
55
+ "nauc_mrr_at_10_diff1": 0.9813091155744836,
56
+ "nauc_mrr_at_10_max": 0.7919361175485493,
57
+ "nauc_mrr_at_10_std": 0.2043961834002389,
58
+ "nauc_mrr_at_1_diff1": 0.9854626454468206,
59
+ "nauc_mrr_at_1_max": 0.6908824262953818,
60
+ "nauc_mrr_at_1_std": 0.12309880863143513,
61
+ "nauc_mrr_at_20_diff1": 0.9813091155744836,
62
+ "nauc_mrr_at_20_max": 0.7919361175485493,
63
+ "nauc_mrr_at_20_std": 0.2043961834002389,
64
+ "nauc_mrr_at_3_diff1": 0.9813091155744836,
65
+ "nauc_mrr_at_3_max": 0.7919361175485493,
66
+ "nauc_mrr_at_3_std": 0.2043961834002389,
67
+ "nauc_mrr_at_5_diff1": 0.9813091155744836,
68
+ "nauc_mrr_at_5_max": 0.7919361175485493,
69
+ "nauc_mrr_at_5_std": 0.2043961834002389,
70
+ "nauc_ndcg_at_1000_diff1": 0.9799671200511393,
71
+ "nauc_ndcg_at_1000_max": 0.8105834822646681,
72
+ "nauc_ndcg_at_1000_std": 0.22421908344514122,
73
+ "nauc_ndcg_at_100_diff1": 0.9790730305020617,
74
+ "nauc_ndcg_at_100_max": 0.8225660998166753,
75
+ "nauc_ndcg_at_100_std": 0.23796446920921113,
76
+ "nauc_ndcg_at_10_diff1": 0.9797976352064889,
77
+ "nauc_ndcg_at_10_max": 0.8287098197094255,
78
+ "nauc_ndcg_at_10_std": 0.23398051039643453,
79
+ "nauc_ndcg_at_1_diff1": 0.9854626454468206,
80
+ "nauc_ndcg_at_1_max": 0.6908824262953818,
81
+ "nauc_ndcg_at_1_std": 0.12309880863143513,
82
+ "nauc_ndcg_at_20_diff1": 0.9797976352064889,
83
+ "nauc_ndcg_at_20_max": 0.8287098197094255,
84
+ "nauc_ndcg_at_20_std": 0.23398051039643453,
85
+ "nauc_ndcg_at_3_diff1": 0.9797976352064889,
86
+ "nauc_ndcg_at_3_max": 0.8287098197094255,
87
+ "nauc_ndcg_at_3_std": 0.23398051039643453,
88
+ "nauc_ndcg_at_5_diff1": 0.9797976352064889,
89
+ "nauc_ndcg_at_5_max": 0.8287098197094255,
90
+ "nauc_ndcg_at_5_std": 0.23398051039643453,
91
+ "nauc_precision_at_1000_diff1": 1.0,
92
+ "nauc_precision_at_1000_max": 1.0,
93
+ "nauc_precision_at_1000_std": 1.0,
94
+ "nauc_precision_at_100_diff1": 0.9672909522553235,
95
+ "nauc_precision_at_100_max": 0.9672909522553235,
96
+ "nauc_precision_at_100_std": 0.4076879178705973,
97
+ "nauc_precision_at_10_diff1": 0.9738327618042669,
98
+ "nauc_precision_at_10_max": 0.9738327618042669,
99
+ "nauc_precision_at_10_std": 0.3507314579840428,
100
+ "nauc_precision_at_1_diff1": 0.9854626454468206,
101
+ "nauc_precision_at_1_max": 0.6908824262953818,
102
+ "nauc_precision_at_1_std": 0.12309880863143513,
103
+ "nauc_precision_at_20_diff1": 0.9738327618042669,
104
+ "nauc_precision_at_20_max": 0.9738327618042669,
105
+ "nauc_precision_at_20_std": 0.3507314579840428,
106
+ "nauc_precision_at_3_diff1": 0.9738327618042841,
107
+ "nauc_precision_at_3_max": 0.9738327618042841,
108
+ "nauc_precision_at_3_std": 0.3507314579840766,
109
+ "nauc_precision_at_5_diff1": 0.9738327618042669,
110
+ "nauc_precision_at_5_max": 0.9738327618042669,
111
+ "nauc_precision_at_5_std": 0.3507314579840428,
112
+ "nauc_recall_at_1000_diff1": NaN,
113
+ "nauc_recall_at_1000_max": NaN,
114
+ "nauc_recall_at_1000_std": NaN,
115
+ "nauc_recall_at_100_diff1": 0.9672909522553301,
116
+ "nauc_recall_at_100_max": 0.9672909522553301,
117
+ "nauc_recall_at_100_std": 0.40768791787061504,
118
+ "nauc_recall_at_10_diff1": 0.9738327618042774,
119
+ "nauc_recall_at_10_max": 0.9738327618042774,
120
+ "nauc_recall_at_10_std": 0.35073145798406824,
121
+ "nauc_recall_at_1_diff1": 0.9854626454468206,
122
+ "nauc_recall_at_1_max": 0.6908824262953818,
123
+ "nauc_recall_at_1_std": 0.12309880863143513,
124
+ "nauc_recall_at_20_diff1": 0.9738327618042774,
125
+ "nauc_recall_at_20_max": 0.9738327618042774,
126
+ "nauc_recall_at_20_std": 0.35073145798406824,
127
+ "nauc_recall_at_3_diff1": 0.9738327618042774,
128
+ "nauc_recall_at_3_max": 0.9738327618042774,
129
+ "nauc_recall_at_3_std": 0.35073145798406824,
130
+ "nauc_recall_at_5_diff1": 0.9738327618042774,
131
+ "nauc_recall_at_5_max": 0.9738327618042774,
132
+ "nauc_recall_at_5_std": 0.35073145798406824,
133
+ "ndcg_at_1": 0.98185,
134
+ "ndcg_at_10": 0.98694,
135
+ "ndcg_at_100": 0.9874,
136
+ "ndcg_at_1000": 0.98835,
137
+ "ndcg_at_20": 0.98694,
138
+ "ndcg_at_3": 0.98694,
139
+ "ndcg_at_5": 0.98694,
140
+ "precision_at_1": 0.98185,
141
+ "precision_at_10": 0.09899,
142
+ "precision_at_100": 0.00992,
143
+ "precision_at_1000": 0.001,
144
+ "precision_at_20": 0.0495,
145
+ "precision_at_3": 0.32997,
146
+ "precision_at_5": 0.19798,
147
+ "recall_at_1": 0.98185,
148
+ "recall_at_10": 0.98992,
149
+ "recall_at_100": 0.99194,
150
+ "recall_at_1000": 1.0,
151
+ "recall_at_20": 0.98992,
152
+ "recall_at_3": 0.98992,
153
+ "recall_at_5": 0.98992
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "HagridRetrieval"
158
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/IN22GenBitextMining.json ADDED
The diff for this file is too large to render. See raw diff
 
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/IndicCrosslingualSTS.json ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "0ca7b87dda68ef4ebb2f50a20a62b9dbebcac3e4",
3
+ "evaluation_time": 4.393025875091553,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.5867372974192103,
10
+ "cosine_spearman": 0.5796641324959597,
11
+ "euclidean_pearson": 0.5719596457946108,
12
+ "euclidean_spearman": 0.5793717743282025,
13
+ "hf_subset": "en-te",
14
+ "languages": [
15
+ "eng-Latn",
16
+ "tel-Telu"
17
+ ],
18
+ "main_score": 0.5796641324959597,
19
+ "manhattan_pearson": 0.5702568100162838,
20
+ "manhattan_spearman": 0.5804571093987366,
21
+ "pearson": 0.5867372974192103,
22
+ "spearman": 0.5796641324959597
23
+ },
24
+ {
25
+ "cosine_pearson": 0.7110030231016488,
26
+ "cosine_spearman": 0.7070727021021247,
27
+ "euclidean_pearson": 0.6996483018639007,
28
+ "euclidean_spearman": 0.7068894560709681,
29
+ "hf_subset": "en-gu",
30
+ "languages": [
31
+ "eng-Latn",
32
+ "guj-Gujr"
33
+ ],
34
+ "main_score": 0.7070727021021247,
35
+ "manhattan_pearson": 0.6979440271130216,
36
+ "manhattan_spearman": 0.7051712082758634,
37
+ "pearson": 0.7110030231016488,
38
+ "spearman": 0.7070727021021247
39
+ },
40
+ {
41
+ "cosine_pearson": 0.059751038893235436,
42
+ "cosine_spearman": 0.1228412957850981,
43
+ "euclidean_pearson": 0.06711401117934034,
44
+ "euclidean_spearman": 0.12284828948871883,
45
+ "hf_subset": "en-or",
46
+ "languages": [
47
+ "eng-Latn",
48
+ "ory-Orya"
49
+ ],
50
+ "main_score": 0.1228412957850981,
51
+ "manhattan_pearson": 0.06391862932320345,
52
+ "manhattan_spearman": 0.12195698081616799,
53
+ "pearson": 0.059751038893235436,
54
+ "spearman": 0.1228412957850981
55
+ },
56
+ {
57
+ "cosine_pearson": 0.6523968031989974,
58
+ "cosine_spearman": 0.6451615740964641,
59
+ "euclidean_pearson": 0.6334950658090405,
60
+ "euclidean_spearman": 0.6452816238600034,
61
+ "hf_subset": "en-kn",
62
+ "languages": [
63
+ "eng-Latn",
64
+ "kan-Knda"
65
+ ],
66
+ "main_score": 0.6451615740964641,
67
+ "manhattan_pearson": 0.6331477512632324,
68
+ "manhattan_spearman": 0.6451336471121967,
69
+ "pearson": 0.6523968031989974,
70
+ "spearman": 0.6451615740964641
71
+ },
72
+ {
73
+ "cosine_pearson": 0.7198523985371674,
74
+ "cosine_spearman": 0.7569392731559716,
75
+ "euclidean_pearson": 0.7250788259084947,
76
+ "euclidean_spearman": 0.7561964063177138,
77
+ "hf_subset": "en-hi",
78
+ "languages": [
79
+ "eng-Latn",
80
+ "hin-Deva"
81
+ ],
82
+ "main_score": 0.7569392731559716,
83
+ "manhattan_pearson": 0.7254683899911072,
84
+ "manhattan_spearman": 0.7566993386989479,
85
+ "pearson": 0.7198523985371674,
86
+ "spearman": 0.7569392731559716
87
+ },
88
+ {
89
+ "cosine_pearson": 0.5964906468474553,
90
+ "cosine_spearman": 0.6080463867724732,
91
+ "euclidean_pearson": 0.5955895923052317,
92
+ "euclidean_spearman": 0.6083186998969184,
93
+ "hf_subset": "en-ta",
94
+ "languages": [
95
+ "eng-Latn",
96
+ "tam-Taml"
97
+ ],
98
+ "main_score": 0.6080463867724732,
99
+ "manhattan_pearson": 0.5889145231220938,
100
+ "manhattan_spearman": 0.5999220706526629,
101
+ "pearson": 0.5964906468474553,
102
+ "spearman": 0.6080463867724732
103
+ },
104
+ {
105
+ "cosine_pearson": 0.5343775572762897,
106
+ "cosine_spearman": 0.5541074379410089,
107
+ "euclidean_pearson": 0.5148582890304172,
108
+ "euclidean_spearman": 0.5537118894021201,
109
+ "hf_subset": "en-bn",
110
+ "languages": [
111
+ "eng-Latn",
112
+ "ben-Beng"
113
+ ],
114
+ "main_score": 0.5541074379410089,
115
+ "manhattan_pearson": 0.5179044521376157,
116
+ "manhattan_spearman": 0.5549932656072789,
117
+ "pearson": 0.5343775572762897,
118
+ "spearman": 0.5541074379410089
119
+ },
120
+ {
121
+ "cosine_pearson": 0.542455242826086,
122
+ "cosine_spearman": 0.5883389430942628,
123
+ "euclidean_pearson": 0.5505503459216123,
124
+ "euclidean_spearman": 0.588632788684279,
125
+ "hf_subset": "en-mr",
126
+ "languages": [
127
+ "eng-Latn",
128
+ "mar-Deva"
129
+ ],
130
+ "main_score": 0.5883389430942628,
131
+ "manhattan_pearson": 0.5550286362242749,
132
+ "manhattan_spearman": 0.5919747394525059,
133
+ "pearson": 0.542455242826086,
134
+ "spearman": 0.5883389430942628
135
+ },
136
+ {
137
+ "cosine_pearson": 0.6281012789769389,
138
+ "cosine_spearman": 0.6199942499940971,
139
+ "euclidean_pearson": 0.6188328932053261,
140
+ "euclidean_spearman": 0.6199598025098569,
141
+ "hf_subset": "en-pa",
142
+ "languages": [
143
+ "eng-Latn",
144
+ "pan-Guru"
145
+ ],
146
+ "main_score": 0.6199942499940971,
147
+ "manhattan_pearson": 0.6233024885138049,
148
+ "manhattan_spearman": 0.6238706913589096,
149
+ "pearson": 0.6281012789769389,
150
+ "spearman": 0.6199942499940971
151
+ },
152
+ {
153
+ "cosine_pearson": 0.5794895332299972,
154
+ "cosine_spearman": 0.578394311734721,
155
+ "euclidean_pearson": 0.5866583980290372,
156
+ "euclidean_spearman": 0.5783485475880471,
157
+ "hf_subset": "en-as",
158
+ "languages": [
159
+ "eng-Latn",
160
+ "asm-Beng"
161
+ ],
162
+ "main_score": 0.578394311734721,
163
+ "manhattan_pearson": 0.5872109173504572,
164
+ "manhattan_spearman": 0.5796050797097138,
165
+ "pearson": 0.5794895332299972,
166
+ "spearman": 0.578394311734721
167
+ },
168
+ {
169
+ "cosine_pearson": 0.18679689925075604,
170
+ "cosine_spearman": 0.1778608235427975,
171
+ "euclidean_pearson": 0.19220841707433878,
172
+ "euclidean_spearman": 0.17785527326651626,
173
+ "hf_subset": "en-ur",
174
+ "languages": [
175
+ "eng-Latn",
176
+ "urd-Arab"
177
+ ],
178
+ "main_score": 0.1778608235427975,
179
+ "manhattan_pearson": 0.19376654503786978,
180
+ "manhattan_spearman": 0.1819672879540764,
181
+ "pearson": 0.18679689925075604,
182
+ "spearman": 0.1778608235427975
183
+ },
184
+ {
185
+ "cosine_pearson": 0.6329399833962733,
186
+ "cosine_spearman": 0.6210929978317676,
187
+ "euclidean_pearson": 0.6322144346089629,
188
+ "euclidean_spearman": 0.6212123039214972,
189
+ "hf_subset": "en-ml",
190
+ "languages": [
191
+ "eng-Latn",
192
+ "mal-Mlym"
193
+ ],
194
+ "main_score": 0.6210929978317676,
195
+ "manhattan_pearson": 0.6352092415298338,
196
+ "manhattan_spearman": 0.6233616266990848,
197
+ "pearson": 0.6329399833962733,
198
+ "spearman": 0.6210929978317676
199
+ }
200
+ ]
201
+ },
202
+ "task_name": "IndicCrosslingualSTS"
203
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/IndicGenBenchFloresBitextMining.json ADDED
@@ -0,0 +1,1405 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "f8650438298df086750ff4973661bb58a201a5ee",
3
+ "evaluation_time": 399.599981546402,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 1.0,
10
+ "f1": 1.0,
11
+ "hf_subset": "ben-eng",
12
+ "languages": [
13
+ "ben-Beng",
14
+ "eng-Latn"
15
+ ],
16
+ "main_score": 1.0,
17
+ "precision": 1.0,
18
+ "recall": 1.0
19
+ },
20
+ {
21
+ "accuracy": 0.9970355731225297,
22
+ "f1": 0.9960474308300395,
23
+ "hf_subset": "eng-ben",
24
+ "languages": [
25
+ "eng-Latn",
26
+ "ben-Beng"
27
+ ],
28
+ "main_score": 0.9960474308300395,
29
+ "precision": 0.9955533596837944,
30
+ "recall": 0.9970355731225297
31
+ },
32
+ {
33
+ "accuracy": 0.9990118577075099,
34
+ "f1": 0.9986824769433464,
35
+ "hf_subset": "guj-eng",
36
+ "languages": [
37
+ "guj-Gujr",
38
+ "eng-Latn"
39
+ ],
40
+ "main_score": 0.9986824769433464,
41
+ "precision": 0.9985177865612648,
42
+ "recall": 0.9990118577075099
43
+ },
44
+ {
45
+ "accuracy": 0.9980237154150198,
46
+ "f1": 0.997364953886693,
47
+ "hf_subset": "eng-guj",
48
+ "languages": [
49
+ "eng-Latn",
50
+ "guj-Gujr"
51
+ ],
52
+ "main_score": 0.997364953886693,
53
+ "precision": 0.9970355731225297,
54
+ "recall": 0.9980237154150198
55
+ },
56
+ {
57
+ "accuracy": 1.0,
58
+ "f1": 1.0,
59
+ "hf_subset": "hin-eng",
60
+ "languages": [
61
+ "hin-Deva",
62
+ "eng-Latn"
63
+ ],
64
+ "main_score": 1.0,
65
+ "precision": 1.0,
66
+ "recall": 1.0
67
+ },
68
+ {
69
+ "accuracy": 1.0,
70
+ "f1": 1.0,
71
+ "hf_subset": "eng-hin",
72
+ "languages": [
73
+ "eng-Latn",
74
+ "hin-Deva"
75
+ ],
76
+ "main_score": 1.0,
77
+ "precision": 1.0,
78
+ "recall": 1.0
79
+ },
80
+ {
81
+ "accuracy": 0.9990118577075099,
82
+ "f1": 0.9986824769433464,
83
+ "hf_subset": "kan-eng",
84
+ "languages": [
85
+ "kan-Knda",
86
+ "eng-Latn"
87
+ ],
88
+ "main_score": 0.9986824769433464,
89
+ "precision": 0.9985177865612648,
90
+ "recall": 0.9990118577075099
91
+ },
92
+ {
93
+ "accuracy": 0.9950592885375494,
94
+ "f1": 0.9934123847167324,
95
+ "hf_subset": "eng-kan",
96
+ "languages": [
97
+ "eng-Latn",
98
+ "kan-Knda"
99
+ ],
100
+ "main_score": 0.9934123847167324,
101
+ "precision": 0.9925889328063241,
102
+ "recall": 0.9950592885375494
103
+ },
104
+ {
105
+ "accuracy": 0.9990118577075099,
106
+ "f1": 0.9986824769433464,
107
+ "hf_subset": "mal-eng",
108
+ "languages": [
109
+ "mal-Mlym",
110
+ "eng-Latn"
111
+ ],
112
+ "main_score": 0.9986824769433464,
113
+ "precision": 0.9985177865612648,
114
+ "recall": 0.9990118577075099
115
+ },
116
+ {
117
+ "accuracy": 0.9990118577075099,
118
+ "f1": 0.9986824769433464,
119
+ "hf_subset": "eng-mal",
120
+ "languages": [
121
+ "eng-Latn",
122
+ "mal-Mlym"
123
+ ],
124
+ "main_score": 0.9986824769433464,
125
+ "precision": 0.9985177865612648,
126
+ "recall": 0.9990118577075099
127
+ },
128
+ {
129
+ "accuracy": 1.0,
130
+ "f1": 1.0,
131
+ "hf_subset": "mar-eng",
132
+ "languages": [
133
+ "mar-Deva",
134
+ "eng-Latn"
135
+ ],
136
+ "main_score": 1.0,
137
+ "precision": 1.0,
138
+ "recall": 1.0
139
+ },
140
+ {
141
+ "accuracy": 0.9980237154150198,
142
+ "f1": 0.997364953886693,
143
+ "hf_subset": "eng-mar",
144
+ "languages": [
145
+ "eng-Latn",
146
+ "mar-Deva"
147
+ ],
148
+ "main_score": 0.997364953886693,
149
+ "precision": 0.9970355731225297,
150
+ "recall": 0.9980237154150198
151
+ },
152
+ {
153
+ "accuracy": 0.9980237154150198,
154
+ "f1": 0.997364953886693,
155
+ "hf_subset": "tam-eng",
156
+ "languages": [
157
+ "tam-Taml",
158
+ "eng-Latn"
159
+ ],
160
+ "main_score": 0.997364953886693,
161
+ "precision": 0.9970355731225297,
162
+ "recall": 0.9980237154150198
163
+ },
164
+ {
165
+ "accuracy": 0.9950592885375494,
166
+ "f1": 0.9934123847167324,
167
+ "hf_subset": "eng-tam",
168
+ "languages": [
169
+ "eng-Latn",
170
+ "tam-Taml"
171
+ ],
172
+ "main_score": 0.9934123847167324,
173
+ "precision": 0.9925889328063241,
174
+ "recall": 0.9950592885375494
175
+ },
176
+ {
177
+ "accuracy": 0.9990118577075099,
178
+ "f1": 0.9986824769433466,
179
+ "hf_subset": "tel-eng",
180
+ "languages": [
181
+ "tel-Telu",
182
+ "eng-Latn"
183
+ ],
184
+ "main_score": 0.9986824769433466,
185
+ "precision": 0.9985177865612648,
186
+ "recall": 0.9990118577075099
187
+ },
188
+ {
189
+ "accuracy": 0.9950592885375494,
190
+ "f1": 0.9934123847167325,
191
+ "hf_subset": "eng-tel",
192
+ "languages": [
193
+ "eng-Latn",
194
+ "tel-Telu"
195
+ ],
196
+ "main_score": 0.9934123847167325,
197
+ "precision": 0.9925889328063241,
198
+ "recall": 0.9950592885375494
199
+ },
200
+ {
201
+ "accuracy": 0.9980237154150198,
202
+ "f1": 0.997364953886693,
203
+ "hf_subset": "urd-eng",
204
+ "languages": [
205
+ "urd-Arab",
206
+ "eng-Latn"
207
+ ],
208
+ "main_score": 0.997364953886693,
209
+ "precision": 0.9970355731225297,
210
+ "recall": 0.9980237154150198
211
+ },
212
+ {
213
+ "accuracy": 0.9960474308300395,
214
+ "f1": 0.9947299077733859,
215
+ "hf_subset": "eng-urd",
216
+ "languages": [
217
+ "eng-Latn",
218
+ "urd-Arab"
219
+ ],
220
+ "main_score": 0.9947299077733859,
221
+ "precision": 0.9940711462450593,
222
+ "recall": 0.9960474308300395
223
+ },
224
+ {
225
+ "accuracy": 0.9841897233201581,
226
+ "f1": 0.9794795783926219,
227
+ "hf_subset": "asm-eng",
228
+ "languages": [
229
+ "asm-Beng",
230
+ "eng-Latn"
231
+ ],
232
+ "main_score": 0.9794795783926219,
233
+ "precision": 0.9773550724637681,
234
+ "recall": 0.9841897233201581
235
+ },
236
+ {
237
+ "accuracy": 0.9664031620553359,
238
+ "f1": 0.9556982872200264,
239
+ "hf_subset": "eng-asm",
240
+ "languages": [
241
+ "eng-Latn",
242
+ "asm-Beng"
243
+ ],
244
+ "main_score": 0.9556982872200264,
245
+ "precision": 0.950592885375494,
246
+ "recall": 0.9664031620553359
247
+ },
248
+ {
249
+ "accuracy": 0.991106719367589,
250
+ "f1": 0.9881422924901185,
251
+ "hf_subset": "bho-eng",
252
+ "languages": [
253
+ "bho-Deva",
254
+ "eng-Latn"
255
+ ],
256
+ "main_score": 0.9881422924901185,
257
+ "precision": 0.9866600790513834,
258
+ "recall": 0.991106719367589
259
+ },
260
+ {
261
+ "accuracy": 0.9802371541501976,
262
+ "f1": 0.9736495388669302,
263
+ "hf_subset": "eng-bho",
264
+ "languages": [
265
+ "eng-Latn",
266
+ "bho-Deva"
267
+ ],
268
+ "main_score": 0.9736495388669302,
269
+ "precision": 0.9703557312252964,
270
+ "recall": 0.9802371541501976
271
+ },
272
+ {
273
+ "accuracy": 0.9970355731225297,
274
+ "f1": 0.9963768115942028,
275
+ "hf_subset": "nep-eng",
276
+ "languages": [
277
+ "nep-Deva",
278
+ "eng-Latn"
279
+ ],
280
+ "main_score": 0.9963768115942028,
281
+ "precision": 0.9960474308300395,
282
+ "recall": 0.9970355731225297
283
+ },
284
+ {
285
+ "accuracy": 0.9940711462450593,
286
+ "f1": 0.9924242424242423,
287
+ "hf_subset": "eng-nep",
288
+ "languages": [
289
+ "eng-Latn",
290
+ "nep-Deva"
291
+ ],
292
+ "main_score": 0.9924242424242423,
293
+ "precision": 0.991600790513834,
294
+ "recall": 0.9940711462450593
295
+ },
296
+ {
297
+ "accuracy": 0.9970355731225297,
298
+ "f1": 0.9960474308300395,
299
+ "hf_subset": "ory-eng",
300
+ "languages": [
301
+ "ory-Orya",
302
+ "eng-Latn"
303
+ ],
304
+ "main_score": 0.9960474308300395,
305
+ "precision": 0.9955533596837944,
306
+ "recall": 0.9970355731225297
307
+ },
308
+ {
309
+ "accuracy": 0.9940711462450593,
310
+ "f1": 0.9920948616600791,
311
+ "hf_subset": "eng-ory",
312
+ "languages": [
313
+ "eng-Latn",
314
+ "ory-Orya"
315
+ ],
316
+ "main_score": 0.9920948616600791,
317
+ "precision": 0.991106719367589,
318
+ "recall": 0.9940711462450593
319
+ },
320
+ {
321
+ "accuracy": 0.9950592885375494,
322
+ "f1": 0.9934123847167324,
323
+ "hf_subset": "pan-eng",
324
+ "languages": [
325
+ "pan-Guru",
326
+ "eng-Latn"
327
+ ],
328
+ "main_score": 0.9934123847167324,
329
+ "precision": 0.9925889328063241,
330
+ "recall": 0.9950592885375494
331
+ },
332
+ {
333
+ "accuracy": 0.9980237154150198,
334
+ "f1": 0.997364953886693,
335
+ "hf_subset": "eng-pan",
336
+ "languages": [
337
+ "eng-Latn",
338
+ "pan-Guru"
339
+ ],
340
+ "main_score": 0.997364953886693,
341
+ "precision": 0.9970355731225297,
342
+ "recall": 0.9980237154150198
343
+ },
344
+ {
345
+ "accuracy": 0.9950592885375494,
346
+ "f1": 0.9934123847167324,
347
+ "hf_subset": "pus-eng",
348
+ "languages": [
349
+ "pus-Arab",
350
+ "eng-Latn"
351
+ ],
352
+ "main_score": 0.9934123847167324,
353
+ "precision": 0.9925889328063241,
354
+ "recall": 0.9950592885375494
355
+ },
356
+ {
357
+ "accuracy": 0.9871541501976284,
358
+ "f1": 0.9830368906455862,
359
+ "hf_subset": "eng-pus",
360
+ "languages": [
361
+ "eng-Latn",
362
+ "pus-Arab"
363
+ ],
364
+ "main_score": 0.9830368906455862,
365
+ "precision": 0.9810606060606061,
366
+ "recall": 0.9871541501976284
367
+ },
368
+ {
369
+ "accuracy": 0.9723320158102767,
370
+ "f1": 0.9636034255599474,
371
+ "hf_subset": "san-eng",
372
+ "languages": [
373
+ "san-Deva",
374
+ "eng-Latn"
375
+ ],
376
+ "main_score": 0.9636034255599474,
377
+ "precision": 0.9593214756258234,
378
+ "recall": 0.9723320158102767
379
+ },
380
+ {
381
+ "accuracy": 0.9496047430830039,
382
+ "f1": 0.9342885375494071,
383
+ "hf_subset": "eng-san",
384
+ "languages": [
385
+ "eng-Latn",
386
+ "san-Deva"
387
+ ],
388
+ "main_score": 0.9342885375494071,
389
+ "precision": 0.9268774703557312,
390
+ "recall": 0.9496047430830039
391
+ },
392
+ {
393
+ "accuracy": 0.9901185770750988,
394
+ "f1": 0.9878787878787879,
395
+ "hf_subset": "awa-eng",
396
+ "languages": [
397
+ "awa-Deva",
398
+ "eng-Latn"
399
+ ],
400
+ "main_score": 0.9878787878787879,
401
+ "precision": 0.986907114624506,
402
+ "recall": 0.9901185770750988
403
+ },
404
+ {
405
+ "accuracy": 0.9881422924901185,
406
+ "f1": 0.9856719367588933,
407
+ "hf_subset": "eng-awa",
408
+ "languages": [
409
+ "eng-Latn",
410
+ "awa-Deva"
411
+ ],
412
+ "main_score": 0.9856719367588933,
413
+ "precision": 0.9845191040843215,
414
+ "recall": 0.9881422924901185
415
+ },
416
+ {
417
+ "accuracy": 0.9990118577075099,
418
+ "f1": 0.9986824769433464,
419
+ "hf_subset": "bgc-eng",
420
+ "languages": [
421
+ "bgc-Deva",
422
+ "eng-Latn"
423
+ ],
424
+ "main_score": 0.9986824769433464,
425
+ "precision": 0.9985177865612648,
426
+ "recall": 0.9990118577075099
427
+ },
428
+ {
429
+ "accuracy": 0.9970355731225297,
430
+ "f1": 0.9960474308300395,
431
+ "hf_subset": "eng-bgc",
432
+ "languages": [
433
+ "eng-Latn",
434
+ "bgc-Deva"
435
+ ],
436
+ "main_score": 0.9960474308300395,
437
+ "precision": 0.9955533596837944,
438
+ "recall": 0.9970355731225297
439
+ },
440
+ {
441
+ "accuracy": 0.06225296442687747,
442
+ "f1": 0.05195512572959307,
443
+ "hf_subset": "bod-eng",
444
+ "languages": [
445
+ "bod-Tibt",
446
+ "eng-Latn"
447
+ ],
448
+ "main_score": 0.05195512572959307,
449
+ "precision": 0.04924386532303237,
450
+ "recall": 0.06225296442687747
451
+ },
452
+ {
453
+ "accuracy": 0.12450592885375494,
454
+ "f1": 0.07216681794283067,
455
+ "hf_subset": "eng-bod",
456
+ "languages": [
457
+ "eng-Latn",
458
+ "bod-Tibt"
459
+ ],
460
+ "main_score": 0.07216681794283067,
461
+ "precision": 0.06265583088909271,
462
+ "recall": 0.12450592885375494
463
+ },
464
+ {
465
+ "accuracy": 0.3438735177865613,
466
+ "f1": 0.28890009519400556,
467
+ "hf_subset": "boy-eng",
468
+ "languages": [
469
+ "boy-Deva",
470
+ "eng-Latn"
471
+ ],
472
+ "main_score": 0.28890009519400556,
473
+ "precision": 0.27318244633149635,
474
+ "recall": 0.3438735177865613
475
+ },
476
+ {
477
+ "accuracy": 0.3774703557312253,
478
+ "f1": 0.29422214807590297,
479
+ "hf_subset": "eng-boy",
480
+ "languages": [
481
+ "eng-Latn",
482
+ "boy-Deva"
483
+ ],
484
+ "main_score": 0.29422214807590297,
485
+ "precision": 0.2673607902856915,
486
+ "recall": 0.3774703557312253
487
+ },
488
+ {
489
+ "accuracy": 0.9940711462450593,
490
+ "f1": 0.9920948616600791,
491
+ "hf_subset": "gbm-eng",
492
+ "languages": [
493
+ "gbm-Deva",
494
+ "eng-Latn"
495
+ ],
496
+ "main_score": 0.9920948616600791,
497
+ "precision": 0.991106719367589,
498
+ "recall": 0.9940711462450593
499
+ },
500
+ {
501
+ "accuracy": 0.9901185770750988,
502
+ "f1": 0.9868247694334651,
503
+ "hf_subset": "eng-gbm",
504
+ "languages": [
505
+ "eng-Latn",
506
+ "gbm-Deva"
507
+ ],
508
+ "main_score": 0.9868247694334651,
509
+ "precision": 0.9851778656126482,
510
+ "recall": 0.9901185770750988
511
+ },
512
+ {
513
+ "accuracy": 0.9179841897233202,
514
+ "f1": 0.8943675889328062,
515
+ "hf_subset": "gom-eng",
516
+ "languages": [
517
+ "gom-Deva",
518
+ "eng-Latn"
519
+ ],
520
+ "main_score": 0.8943675889328062,
521
+ "precision": 0.8835638998682478,
522
+ "recall": 0.9179841897233202
523
+ },
524
+ {
525
+ "accuracy": 0.8932806324110671,
526
+ "f1": 0.8611330698287221,
527
+ "hf_subset": "eng-gom",
528
+ "languages": [
529
+ "eng-Latn",
530
+ "gom-Deva"
531
+ ],
532
+ "main_score": 0.8611330698287221,
533
+ "precision": 0.8461791831357048,
534
+ "recall": 0.8932806324110671
535
+ },
536
+ {
537
+ "accuracy": 0.991106719367589,
538
+ "f1": 0.9881422924901185,
539
+ "hf_subset": "hne-eng",
540
+ "languages": [
541
+ "hne-Deva",
542
+ "eng-Latn"
543
+ ],
544
+ "main_score": 0.9881422924901185,
545
+ "precision": 0.9866600790513834,
546
+ "recall": 0.991106719367589
547
+ },
548
+ {
549
+ "accuracy": 0.9841897233201581,
550
+ "f1": 0.9789196310935442,
551
+ "hf_subset": "eng-hne",
552
+ "languages": [
553
+ "eng-Latn",
554
+ "hne-Deva"
555
+ ],
556
+ "main_score": 0.9789196310935442,
557
+ "precision": 0.9762845849802372,
558
+ "recall": 0.9841897233201581
559
+ },
560
+ {
561
+ "accuracy": 0.9940711462450593,
562
+ "f1": 0.9920948616600791,
563
+ "hf_subset": "raj-eng",
564
+ "languages": [
565
+ "raj-Deva",
566
+ "eng-Latn"
567
+ ],
568
+ "main_score": 0.9920948616600791,
569
+ "precision": 0.991106719367589,
570
+ "recall": 0.9940711462450593
571
+ },
572
+ {
573
+ "accuracy": 0.991106719367589,
574
+ "f1": 0.9881422924901185,
575
+ "hf_subset": "eng-raj",
576
+ "languages": [
577
+ "eng-Latn",
578
+ "raj-Deva"
579
+ ],
580
+ "main_score": 0.9881422924901185,
581
+ "precision": 0.9866600790513834,
582
+ "recall": 0.991106719367589
583
+ },
584
+ {
585
+ "accuracy": 0.9970355731225297,
586
+ "f1": 0.9960474308300395,
587
+ "hf_subset": "mai-eng",
588
+ "languages": [
589
+ "mai-Deva",
590
+ "eng-Latn"
591
+ ],
592
+ "main_score": 0.9960474308300395,
593
+ "precision": 0.9955533596837944,
594
+ "recall": 0.9970355731225297
595
+ },
596
+ {
597
+ "accuracy": 0.9901185770750988,
598
+ "f1": 0.9868247694334651,
599
+ "hf_subset": "eng-mai",
600
+ "languages": [
601
+ "eng-Latn",
602
+ "mai-Deva"
603
+ ],
604
+ "main_score": 0.9868247694334651,
605
+ "precision": 0.9851778656126482,
606
+ "recall": 0.9901185770750988
607
+ },
608
+ {
609
+ "accuracy": 0.31225296442687744,
610
+ "f1": 0.2711554232516321,
611
+ "hf_subset": "mni-eng",
612
+ "languages": [
613
+ "mni-Mtei",
614
+ "eng-Latn"
615
+ ],
616
+ "main_score": 0.2711554232516321,
617
+ "precision": 0.2604637337425459,
618
+ "recall": 0.31225296442687744
619
+ },
620
+ {
621
+ "accuracy": 0.33794466403162055,
622
+ "f1": 0.24959619221242893,
623
+ "hf_subset": "eng-mni",
624
+ "languages": [
625
+ "eng-Latn",
626
+ "mni-Mtei"
627
+ ],
628
+ "main_score": 0.24959619221242893,
629
+ "precision": 0.22386843406664378,
630
+ "recall": 0.33794466403162055
631
+ },
632
+ {
633
+ "accuracy": 0.9960474308300395,
634
+ "f1": 0.9947299077733861,
635
+ "hf_subset": "mup-eng",
636
+ "languages": [
637
+ "mup-Deva",
638
+ "eng-Latn"
639
+ ],
640
+ "main_score": 0.9947299077733861,
641
+ "precision": 0.9940711462450593,
642
+ "recall": 0.9960474308300395
643
+ },
644
+ {
645
+ "accuracy": 0.9881422924901185,
646
+ "f1": 0.9841897233201581,
647
+ "hf_subset": "eng-mup",
648
+ "languages": [
649
+ "eng-Latn",
650
+ "mup-Deva"
651
+ ],
652
+ "main_score": 0.9841897233201581,
653
+ "precision": 0.9822134387351779,
654
+ "recall": 0.9881422924901185
655
+ },
656
+ {
657
+ "accuracy": 0.9960474308300395,
658
+ "f1": 0.9947299077733861,
659
+ "hf_subset": "mwr-eng",
660
+ "languages": [
661
+ "mwr-Deva",
662
+ "eng-Latn"
663
+ ],
664
+ "main_score": 0.9947299077733861,
665
+ "precision": 0.9940711462450593,
666
+ "recall": 0.9960474308300395
667
+ },
668
+ {
669
+ "accuracy": 0.991106719367589,
670
+ "f1": 0.9881422924901185,
671
+ "hf_subset": "eng-mwr",
672
+ "languages": [
673
+ "eng-Latn",
674
+ "mwr-Deva"
675
+ ],
676
+ "main_score": 0.9881422924901185,
677
+ "precision": 0.9866600790513834,
678
+ "recall": 0.991106719367589
679
+ },
680
+ {
681
+ "accuracy": 0.014822134387351778,
682
+ "f1": 0.01153062208205368,
683
+ "hf_subset": "sat-eng",
684
+ "languages": [
685
+ "sat-Olck",
686
+ "eng-Latn"
687
+ ],
688
+ "main_score": 0.01153062208205368,
689
+ "precision": 0.010574271532309954,
690
+ "recall": 0.014822134387351778
691
+ },
692
+ {
693
+ "accuracy": 0.022727272727272728,
694
+ "f1": 0.003626383193063275,
695
+ "hf_subset": "eng-sat",
696
+ "languages": [
697
+ "eng-Latn",
698
+ "sat-Olck"
699
+ ],
700
+ "main_score": 0.003626383193063275,
701
+ "precision": 0.0021283574700659805,
702
+ "recall": 0.022727272727272728
703
+ }
704
+ ],
705
+ "validation": [
706
+ {
707
+ "accuracy": 1.0,
708
+ "f1": 1.0,
709
+ "hf_subset": "ben-eng",
710
+ "languages": [
711
+ "ben-Beng",
712
+ "eng-Latn"
713
+ ],
714
+ "main_score": 1.0,
715
+ "precision": 1.0,
716
+ "recall": 1.0
717
+ },
718
+ {
719
+ "accuracy": 0.9989969909729187,
720
+ "f1": 0.9986626546305584,
721
+ "hf_subset": "eng-ben",
722
+ "languages": [
723
+ "eng-Latn",
724
+ "ben-Beng"
725
+ ],
726
+ "main_score": 0.9986626546305584,
727
+ "precision": 0.9984954864593781,
728
+ "recall": 0.9989969909729187
729
+ },
730
+ {
731
+ "accuracy": 0.9979939819458375,
732
+ "f1": 0.9973253092611166,
733
+ "hf_subset": "guj-eng",
734
+ "languages": [
735
+ "guj-Gujr",
736
+ "eng-Latn"
737
+ ],
738
+ "main_score": 0.9973253092611166,
739
+ "precision": 0.9969909729187563,
740
+ "recall": 0.9979939819458375
741
+ },
742
+ {
743
+ "accuracy": 0.9979939819458375,
744
+ "f1": 0.9973253092611167,
745
+ "hf_subset": "eng-guj",
746
+ "languages": [
747
+ "eng-Latn",
748
+ "guj-Gujr"
749
+ ],
750
+ "main_score": 0.9973253092611167,
751
+ "precision": 0.9969909729187563,
752
+ "recall": 0.9979939819458375
753
+ },
754
+ {
755
+ "accuracy": 0.9989969909729187,
756
+ "f1": 0.9986626546305584,
757
+ "hf_subset": "hin-eng",
758
+ "languages": [
759
+ "hin-Deva",
760
+ "eng-Latn"
761
+ ],
762
+ "main_score": 0.9986626546305584,
763
+ "precision": 0.9984954864593781,
764
+ "recall": 0.9989969909729187
765
+ },
766
+ {
767
+ "accuracy": 1.0,
768
+ "f1": 1.0,
769
+ "hf_subset": "eng-hin",
770
+ "languages": [
771
+ "eng-Latn",
772
+ "hin-Deva"
773
+ ],
774
+ "main_score": 1.0,
775
+ "precision": 1.0,
776
+ "recall": 1.0
777
+ },
778
+ {
779
+ "accuracy": 0.9979939819458375,
780
+ "f1": 0.9973253092611166,
781
+ "hf_subset": "kan-eng",
782
+ "languages": [
783
+ "kan-Knda",
784
+ "eng-Latn"
785
+ ],
786
+ "main_score": 0.9973253092611166,
787
+ "precision": 0.9969909729187563,
788
+ "recall": 0.9979939819458375
789
+ },
790
+ {
791
+ "accuracy": 0.995987963891675,
792
+ "f1": 0.9946506185222334,
793
+ "hf_subset": "eng-kan",
794
+ "languages": [
795
+ "eng-Latn",
796
+ "kan-Knda"
797
+ ],
798
+ "main_score": 0.9946506185222334,
799
+ "precision": 0.9939819458375125,
800
+ "recall": 0.995987963891675
801
+ },
802
+ {
803
+ "accuracy": 0.995987963891675,
804
+ "f1": 0.9948177866934135,
805
+ "hf_subset": "mal-eng",
806
+ "languages": [
807
+ "mal-Mlym",
808
+ "eng-Latn"
809
+ ],
810
+ "main_score": 0.9948177866934135,
811
+ "precision": 0.994316282179873,
812
+ "recall": 0.995987963891675
813
+ },
814
+ {
815
+ "accuracy": 0.9969909729187563,
816
+ "f1": 0.995987963891675,
817
+ "hf_subset": "eng-mal",
818
+ "languages": [
819
+ "eng-Latn",
820
+ "mal-Mlym"
821
+ ],
822
+ "main_score": 0.995987963891675,
823
+ "precision": 0.9954864593781344,
824
+ "recall": 0.9969909729187563
825
+ },
826
+ {
827
+ "accuracy": 0.9969909729187563,
828
+ "f1": 0.995987963891675,
829
+ "hf_subset": "mar-eng",
830
+ "languages": [
831
+ "mar-Deva",
832
+ "eng-Latn"
833
+ ],
834
+ "main_score": 0.995987963891675,
835
+ "precision": 0.9954864593781344,
836
+ "recall": 0.9969909729187563
837
+ },
838
+ {
839
+ "accuracy": 0.9969909729187563,
840
+ "f1": 0.995987963891675,
841
+ "hf_subset": "eng-mar",
842
+ "languages": [
843
+ "eng-Latn",
844
+ "mar-Deva"
845
+ ],
846
+ "main_score": 0.995987963891675,
847
+ "precision": 0.9954864593781344,
848
+ "recall": 0.9969909729187563
849
+ },
850
+ {
851
+ "accuracy": 0.995987963891675,
852
+ "f1": 0.9946506185222334,
853
+ "hf_subset": "tam-eng",
854
+ "languages": [
855
+ "tam-Taml",
856
+ "eng-Latn"
857
+ ],
858
+ "main_score": 0.9946506185222334,
859
+ "precision": 0.9939819458375125,
860
+ "recall": 0.995987963891675
861
+ },
862
+ {
863
+ "accuracy": 0.995987963891675,
864
+ "f1": 0.9946506185222334,
865
+ "hf_subset": "eng-tam",
866
+ "languages": [
867
+ "eng-Latn",
868
+ "tam-Taml"
869
+ ],
870
+ "main_score": 0.9946506185222334,
871
+ "precision": 0.9939819458375125,
872
+ "recall": 0.995987963891675
873
+ },
874
+ {
875
+ "accuracy": 0.995987963891675,
876
+ "f1": 0.9946506185222335,
877
+ "hf_subset": "tel-eng",
878
+ "languages": [
879
+ "tel-Telu",
880
+ "eng-Latn"
881
+ ],
882
+ "main_score": 0.9946506185222335,
883
+ "precision": 0.9939819458375125,
884
+ "recall": 0.995987963891675
885
+ },
886
+ {
887
+ "accuracy": 0.9909729187562688,
888
+ "f1": 0.9879638916750251,
889
+ "hf_subset": "eng-tel",
890
+ "languages": [
891
+ "eng-Latn",
892
+ "tel-Telu"
893
+ ],
894
+ "main_score": 0.9879638916750251,
895
+ "precision": 0.9864593781344032,
896
+ "recall": 0.9909729187562688
897
+ },
898
+ {
899
+ "accuracy": 0.9949849548645938,
900
+ "f1": 0.9933132731527917,
901
+ "hf_subset": "urd-eng",
902
+ "languages": [
903
+ "urd-Arab",
904
+ "eng-Latn"
905
+ ],
906
+ "main_score": 0.9933132731527917,
907
+ "precision": 0.9924774322968907,
908
+ "recall": 0.9949849548645938
909
+ },
910
+ {
911
+ "accuracy": 0.9939819458375125,
912
+ "f1": 0.9919759277833501,
913
+ "hf_subset": "eng-urd",
914
+ "languages": [
915
+ "eng-Latn",
916
+ "urd-Arab"
917
+ ],
918
+ "main_score": 0.9919759277833501,
919
+ "precision": 0.9909729187562688,
920
+ "recall": 0.9939819458375125
921
+ },
922
+ {
923
+ "accuracy": 0.9829488465396189,
924
+ "f1": 0.9772651287194919,
925
+ "hf_subset": "asm-eng",
926
+ "languages": [
927
+ "asm-Beng",
928
+ "eng-Latn"
929
+ ],
930
+ "main_score": 0.9772651287194919,
931
+ "precision": 0.9744232698094283,
932
+ "recall": 0.9829488465396189
933
+ },
934
+ {
935
+ "accuracy": 0.9699097291875627,
936
+ "f1": 0.9607823470411234,
937
+ "hf_subset": "eng-asm",
938
+ "languages": [
939
+ "eng-Latn",
940
+ "asm-Beng"
941
+ ],
942
+ "main_score": 0.9607823470411234,
943
+ "precision": 0.9566198595787362,
944
+ "recall": 0.9699097291875627
945
+ },
946
+ {
947
+ "accuracy": 0.9929789368104313,
948
+ "f1": 0.9909729187562688,
949
+ "hf_subset": "bho-eng",
950
+ "languages": [
951
+ "bho-Deva",
952
+ "eng-Latn"
953
+ ],
954
+ "main_score": 0.9909729187562688,
955
+ "precision": 0.9899699097291875,
956
+ "recall": 0.9929789368104313
957
+ },
958
+ {
959
+ "accuracy": 0.9759277833500501,
960
+ "f1": 0.9679037111334002,
961
+ "hf_subset": "eng-bho",
962
+ "languages": [
963
+ "eng-Latn",
964
+ "bho-Deva"
965
+ ],
966
+ "main_score": 0.9679037111334002,
967
+ "precision": 0.9638916750250752,
968
+ "recall": 0.9759277833500501
969
+ },
970
+ {
971
+ "accuracy": 0.9919759277833501,
972
+ "f1": 0.9896355733868271,
973
+ "hf_subset": "nep-eng",
974
+ "languages": [
975
+ "nep-Deva",
976
+ "eng-Latn"
977
+ ],
978
+ "main_score": 0.9896355733868271,
979
+ "precision": 0.9884653961885657,
980
+ "recall": 0.9919759277833501
981
+ },
982
+ {
983
+ "accuracy": 0.9929789368104313,
984
+ "f1": 0.9909729187562688,
985
+ "hf_subset": "eng-nep",
986
+ "languages": [
987
+ "eng-Latn",
988
+ "nep-Deva"
989
+ ],
990
+ "main_score": 0.9909729187562688,
991
+ "precision": 0.9899699097291875,
992
+ "recall": 0.9929789368104313
993
+ },
994
+ {
995
+ "accuracy": 0.9969909729187563,
996
+ "f1": 0.9963223002340353,
997
+ "hf_subset": "ory-eng",
998
+ "languages": [
999
+ "ory-Orya",
1000
+ "eng-Latn"
1001
+ ],
1002
+ "main_score": 0.9963223002340353,
1003
+ "precision": 0.995987963891675,
1004
+ "recall": 0.9969909729187563
1005
+ },
1006
+ {
1007
+ "accuracy": 0.9939819458375125,
1008
+ "f1": 0.9919759277833501,
1009
+ "hf_subset": "eng-ory",
1010
+ "languages": [
1011
+ "eng-Latn",
1012
+ "ory-Orya"
1013
+ ],
1014
+ "main_score": 0.9919759277833501,
1015
+ "precision": 0.9909729187562688,
1016
+ "recall": 0.9939819458375125
1017
+ },
1018
+ {
1019
+ "accuracy": 1.0,
1020
+ "f1": 1.0,
1021
+ "hf_subset": "pan-eng",
1022
+ "languages": [
1023
+ "pan-Guru",
1024
+ "eng-Latn"
1025
+ ],
1026
+ "main_score": 1.0,
1027
+ "precision": 1.0,
1028
+ "recall": 1.0
1029
+ },
1030
+ {
1031
+ "accuracy": 0.9989969909729187,
1032
+ "f1": 0.9986626546305583,
1033
+ "hf_subset": "eng-pan",
1034
+ "languages": [
1035
+ "eng-Latn",
1036
+ "pan-Guru"
1037
+ ],
1038
+ "main_score": 0.9986626546305583,
1039
+ "precision": 0.9984954864593781,
1040
+ "recall": 0.9989969909729187
1041
+ },
1042
+ {
1043
+ "accuracy": 0.9909729187562688,
1044
+ "f1": 0.9879638916750251,
1045
+ "hf_subset": "pus-eng",
1046
+ "languages": [
1047
+ "pus-Arab",
1048
+ "eng-Latn"
1049
+ ],
1050
+ "main_score": 0.9879638916750251,
1051
+ "precision": 0.9864593781344032,
1052
+ "recall": 0.9909729187562688
1053
+ },
1054
+ {
1055
+ "accuracy": 0.9829488465396189,
1056
+ "f1": 0.9772651287194918,
1057
+ "hf_subset": "eng-pus",
1058
+ "languages": [
1059
+ "eng-Latn",
1060
+ "pus-Arab"
1061
+ ],
1062
+ "main_score": 0.9772651287194918,
1063
+ "precision": 0.9744232698094283,
1064
+ "recall": 0.9829488465396189
1065
+ },
1066
+ {
1067
+ "accuracy": 0.958876629889669,
1068
+ "f1": 0.9466399197592777,
1069
+ "hf_subset": "san-eng",
1070
+ "languages": [
1071
+ "san-Deva",
1072
+ "eng-Latn"
1073
+ ],
1074
+ "main_score": 0.9466399197592777,
1075
+ "precision": 0.9409896355733868,
1076
+ "recall": 0.958876629889669
1077
+ },
1078
+ {
1079
+ "accuracy": 0.9448345035105316,
1080
+ "f1": 0.9281176863925108,
1081
+ "hf_subset": "eng-san",
1082
+ "languages": [
1083
+ "eng-Latn",
1084
+ "san-Deva"
1085
+ ],
1086
+ "main_score": 0.9281176863925108,
1087
+ "precision": 0.9202607823470411,
1088
+ "recall": 0.9448345035105316
1089
+ },
1090
+ {
1091
+ "accuracy": 0.9949849548645938,
1092
+ "f1": 0.9939819458375125,
1093
+ "hf_subset": "awa-eng",
1094
+ "languages": [
1095
+ "awa-Deva",
1096
+ "eng-Latn"
1097
+ ],
1098
+ "main_score": 0.9939819458375125,
1099
+ "precision": 0.993480441323972,
1100
+ "recall": 0.9949849548645938
1101
+ },
1102
+ {
1103
+ "accuracy": 0.9879638916750251,
1104
+ "f1": 0.984620528251421,
1105
+ "hf_subset": "eng-awa",
1106
+ "languages": [
1107
+ "eng-Latn",
1108
+ "awa-Deva"
1109
+ ],
1110
+ "main_score": 0.984620528251421,
1111
+ "precision": 0.9829488465396189,
1112
+ "recall": 0.9879638916750251
1113
+ },
1114
+ {
1115
+ "accuracy": 0.9969909729187563,
1116
+ "f1": 0.995987963891675,
1117
+ "hf_subset": "bgc-eng",
1118
+ "languages": [
1119
+ "bgc-Deva",
1120
+ "eng-Latn"
1121
+ ],
1122
+ "main_score": 0.995987963891675,
1123
+ "precision": 0.9954864593781344,
1124
+ "recall": 0.9969909729187563
1125
+ },
1126
+ {
1127
+ "accuracy": 0.995987963891675,
1128
+ "f1": 0.9946506185222335,
1129
+ "hf_subset": "eng-bgc",
1130
+ "languages": [
1131
+ "eng-Latn",
1132
+ "bgc-Deva"
1133
+ ],
1134
+ "main_score": 0.9946506185222335,
1135
+ "precision": 0.9939819458375125,
1136
+ "recall": 0.995987963891675
1137
+ },
1138
+ {
1139
+ "accuracy": 0.0732196589769308,
1140
+ "f1": 0.06026291249608258,
1141
+ "hf_subset": "bod-eng",
1142
+ "languages": [
1143
+ "bod-Tibt",
1144
+ "eng-Latn"
1145
+ ],
1146
+ "main_score": 0.06026291249608258,
1147
+ "precision": 0.05650398384886611,
1148
+ "recall": 0.0732196589769308
1149
+ },
1150
+ {
1151
+ "accuracy": 0.13139418254764293,
1152
+ "f1": 0.07022316824922582,
1153
+ "hf_subset": "eng-bod",
1154
+ "languages": [
1155
+ "eng-Latn",
1156
+ "bod-Tibt"
1157
+ ],
1158
+ "main_score": 0.07022316824922582,
1159
+ "precision": 0.05841478343537769,
1160
+ "recall": 0.13139418254764293
1161
+ },
1162
+ {
1163
+ "accuracy": 0.35707121364092276,
1164
+ "f1": 0.308822316572886,
1165
+ "hf_subset": "boy-eng",
1166
+ "languages": [
1167
+ "boy-Deva",
1168
+ "eng-Latn"
1169
+ ],
1170
+ "main_score": 0.308822316572886,
1171
+ "precision": 0.2937931503431809,
1172
+ "recall": 0.35707121364092276
1173
+ },
1174
+ {
1175
+ "accuracy": 0.4012036108324975,
1176
+ "f1": 0.31562020753713477,
1177
+ "hf_subset": "eng-boy",
1178
+ "languages": [
1179
+ "eng-Latn",
1180
+ "boy-Deva"
1181
+ ],
1182
+ "main_score": 0.31562020753713477,
1183
+ "precision": 0.2865049513620226,
1184
+ "recall": 0.4012036108324975
1185
+ },
1186
+ {
1187
+ "accuracy": 0.9909729187562688,
1188
+ "f1": 0.9879638916750251,
1189
+ "hf_subset": "gbm-eng",
1190
+ "languages": [
1191
+ "gbm-Deva",
1192
+ "eng-Latn"
1193
+ ],
1194
+ "main_score": 0.9879638916750251,
1195
+ "precision": 0.9864593781344032,
1196
+ "recall": 0.9909729187562688
1197
+ },
1198
+ {
1199
+ "accuracy": 0.9859578736208626,
1200
+ "f1": 0.981444332998997,
1201
+ "hf_subset": "eng-gbm",
1202
+ "languages": [
1203
+ "eng-Latn",
1204
+ "gbm-Deva"
1205
+ ],
1206
+ "main_score": 0.981444332998997,
1207
+ "precision": 0.9792711467736542,
1208
+ "recall": 0.9859578736208626
1209
+ },
1210
+ {
1211
+ "accuracy": 0.905717151454363,
1212
+ "f1": 0.8821488274346849,
1213
+ "hf_subset": "gom-eng",
1214
+ "languages": [
1215
+ "gom-Deva",
1216
+ "eng-Latn"
1217
+ ],
1218
+ "main_score": 0.8821488274346849,
1219
+ "precision": 0.8722930696852461,
1220
+ "recall": 0.905717151454363
1221
+ },
1222
+ {
1223
+ "accuracy": 0.876629889669007,
1224
+ "f1": 0.8416248746238716,
1225
+ "hf_subset": "eng-gom",
1226
+ "languages": [
1227
+ "eng-Latn",
1228
+ "gom-Deva"
1229
+ ],
1230
+ "main_score": 0.8416248746238716,
1231
+ "precision": 0.8260113674356402,
1232
+ "recall": 0.876629889669007
1233
+ },
1234
+ {
1235
+ "accuracy": 0.9819458375125376,
1236
+ "f1": 0.9765964560347711,
1237
+ "hf_subset": "hne-eng",
1238
+ "languages": [
1239
+ "hne-Deva",
1240
+ "eng-Latn"
1241
+ ],
1242
+ "main_score": 0.9765964560347711,
1243
+ "precision": 0.9739217652958877,
1244
+ "recall": 0.9819458375125376
1245
+ },
1246
+ {
1247
+ "accuracy": 0.9729187562688064,
1248
+ "f1": 0.9656302240053493,
1249
+ "hf_subset": "eng-hne",
1250
+ "languages": [
1251
+ "eng-Latn",
1252
+ "hne-Deva"
1253
+ ],
1254
+ "main_score": 0.9656302240053493,
1255
+ "precision": 0.9623035773988633,
1256
+ "recall": 0.9729187562688064
1257
+ },
1258
+ {
1259
+ "accuracy": 0.9909729187562688,
1260
+ "f1": 0.9879638916750251,
1261
+ "hf_subset": "raj-eng",
1262
+ "languages": [
1263
+ "raj-Deva",
1264
+ "eng-Latn"
1265
+ ],
1266
+ "main_score": 0.9879638916750251,
1267
+ "precision": 0.9864593781344032,
1268
+ "recall": 0.9909729187562688
1269
+ },
1270
+ {
1271
+ "accuracy": 0.9879638916750251,
1272
+ "f1": 0.9839518555667001,
1273
+ "hf_subset": "eng-raj",
1274
+ "languages": [
1275
+ "eng-Latn",
1276
+ "raj-Deva"
1277
+ ],
1278
+ "main_score": 0.9839518555667001,
1279
+ "precision": 0.9819458375125376,
1280
+ "recall": 0.9879638916750251
1281
+ },
1282
+ {
1283
+ "accuracy": 1.0,
1284
+ "f1": 1.0,
1285
+ "hf_subset": "mai-eng",
1286
+ "languages": [
1287
+ "mai-Deva",
1288
+ "eng-Latn"
1289
+ ],
1290
+ "main_score": 1.0,
1291
+ "precision": 1.0,
1292
+ "recall": 1.0
1293
+ },
1294
+ {
1295
+ "accuracy": 0.9939819458375125,
1296
+ "f1": 0.9919759277833501,
1297
+ "hf_subset": "eng-mai",
1298
+ "languages": [
1299
+ "eng-Latn",
1300
+ "mai-Deva"
1301
+ ],
1302
+ "main_score": 0.9919759277833501,
1303
+ "precision": 0.9909729187562688,
1304
+ "recall": 0.9939819458375125
1305
+ },
1306
+ {
1307
+ "accuracy": 0.3069207622868606,
1308
+ "f1": 0.2679830795358813,
1309
+ "hf_subset": "mni-eng",
1310
+ "languages": [
1311
+ "mni-Mtei",
1312
+ "eng-Latn"
1313
+ ],
1314
+ "main_score": 0.2679830795358813,
1315
+ "precision": 0.2582923446771663,
1316
+ "recall": 0.3069207622868606
1317
+ },
1318
+ {
1319
+ "accuracy": 0.34904714142427284,
1320
+ "f1": 0.26233805654831893,
1321
+ "hf_subset": "eng-mni",
1322
+ "languages": [
1323
+ "eng-Latn",
1324
+ "mni-Mtei"
1325
+ ],
1326
+ "main_score": 0.26233805654831893,
1327
+ "precision": 0.23685423520959542,
1328
+ "recall": 0.34904714142427284
1329
+ },
1330
+ {
1331
+ "accuracy": 0.9929789368104313,
1332
+ "f1": 0.9909729187562688,
1333
+ "hf_subset": "mup-eng",
1334
+ "languages": [
1335
+ "mup-Deva",
1336
+ "eng-Latn"
1337
+ ],
1338
+ "main_score": 0.9909729187562688,
1339
+ "precision": 0.9899699097291875,
1340
+ "recall": 0.9929789368104313
1341
+ },
1342
+ {
1343
+ "accuracy": 0.9879638916750251,
1344
+ "f1": 0.9839518555667001,
1345
+ "hf_subset": "eng-mup",
1346
+ "languages": [
1347
+ "eng-Latn",
1348
+ "mup-Deva"
1349
+ ],
1350
+ "main_score": 0.9839518555667001,
1351
+ "precision": 0.9819458375125376,
1352
+ "recall": 0.9879638916750251
1353
+ },
1354
+ {
1355
+ "accuracy": 0.9929789368104313,
1356
+ "f1": 0.9906385824139085,
1357
+ "hf_subset": "mwr-eng",
1358
+ "languages": [
1359
+ "mwr-Deva",
1360
+ "eng-Latn"
1361
+ ],
1362
+ "main_score": 0.9906385824139085,
1363
+ "precision": 0.9894684052156469,
1364
+ "recall": 0.9929789368104313
1365
+ },
1366
+ {
1367
+ "accuracy": 0.9879638916750251,
1368
+ "f1": 0.9839518555667001,
1369
+ "hf_subset": "eng-mwr",
1370
+ "languages": [
1371
+ "eng-Latn",
1372
+ "mwr-Deva"
1373
+ ],
1374
+ "main_score": 0.9839518555667001,
1375
+ "precision": 0.9819458375125376,
1376
+ "recall": 0.9879638916750251
1377
+ },
1378
+ {
1379
+ "accuracy": 0.013039117352056168,
1380
+ "f1": 0.009495890130126436,
1381
+ "hf_subset": "sat-eng",
1382
+ "languages": [
1383
+ "sat-Olck",
1384
+ "eng-Latn"
1385
+ ],
1386
+ "main_score": 0.009495890130126436,
1387
+ "precision": 0.0090980559335168,
1388
+ "recall": 0.013039117352056168
1389
+ },
1390
+ {
1391
+ "accuracy": 0.020060180541624874,
1392
+ "f1": 0.0028915084938133354,
1393
+ "hf_subset": "eng-sat",
1394
+ "languages": [
1395
+ "eng-Latn",
1396
+ "sat-Olck"
1397
+ ],
1398
+ "main_score": 0.0028915084938133354,
1399
+ "precision": 0.0020248878206575094,
1400
+ "recall": 0.020060180541624874
1401
+ }
1402
+ ]
1403
+ },
1404
+ "task_name": "IndicGenBenchFloresBitextMining"
1405
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/IndicLangClassification.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "c54a95d9b9d62c891a03bd5da60715df7176b097",
3
+ "evaluation_time": 57.55413055419922,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.1804194884607798,
10
+ "f1": 0.19702314364696488,
11
+ "f1_weighted": 0.16222050261007115,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "asm-Beng",
15
+ "brx-Deva",
16
+ "ben-Beng",
17
+ "doi-Deva",
18
+ "gom-Deva",
19
+ "guj-Gujr",
20
+ "hin-Deva",
21
+ "kan-Knda",
22
+ "kas-Arab",
23
+ "kas-Deva",
24
+ "mai-Deva",
25
+ "mal-Mlym",
26
+ "mar-Deva",
27
+ "mni-Beng",
28
+ "mni-Mtei",
29
+ "npi-Deva",
30
+ "ory-Orya",
31
+ "pan-Guru",
32
+ "san-Deva",
33
+ "sat-Olck",
34
+ "snd-Arab",
35
+ "tam-Taml",
36
+ "tel-Telu",
37
+ "urd-Arab"
38
+ ],
39
+ "main_score": 0.1804194884607798,
40
+ "scores_per_experiment": [
41
+ {
42
+ "accuracy": 0.17785521730554277,
43
+ "f1": 0.19221260852770583,
44
+ "f1_weighted": 0.15832381969517617
45
+ },
46
+ {
47
+ "accuracy": 0.1769347097113551,
48
+ "f1": 0.19128722428128592,
49
+ "f1_weighted": 0.16116978944499902
50
+ },
51
+ {
52
+ "accuracy": 0.17640870537181932,
53
+ "f1": 0.19419606544705806,
54
+ "f1_weighted": 0.1620260530518525
55
+ },
56
+ {
57
+ "accuracy": 0.17607995265960943,
58
+ "f1": 0.19964605562743418,
59
+ "f1_weighted": 0.16120565024033118
60
+ },
61
+ {
62
+ "accuracy": 0.18571240712735881,
63
+ "f1": 0.1989443679193546,
64
+ "f1_weighted": 0.16405120705789747
65
+ },
66
+ {
67
+ "accuracy": 0.1843645210072983,
68
+ "f1": 0.20295608352861838,
69
+ "f1_weighted": 0.165678592506518
70
+ },
71
+ {
72
+ "accuracy": 0.18972319021631928,
73
+ "f1": 0.20239960662382558,
74
+ "f1_weighted": 0.16842035119545296
75
+ },
76
+ {
77
+ "accuracy": 0.1835426392267736,
78
+ "f1": 0.19990390541185907,
79
+ "f1_weighted": 0.1655152530029549
80
+ },
81
+ {
82
+ "accuracy": 0.17315405352094154,
83
+ "f1": 0.19075772070499292,
84
+ "f1_weighted": 0.15493448154040562
85
+ },
86
+ {
87
+ "accuracy": 0.1804194884607798,
88
+ "f1": 0.19792779839751454,
89
+ "f1_weighted": 0.1608798283651236
90
+ }
91
+ ]
92
+ }
93
+ ]
94
+ },
95
+ "task_name": "IndicLangClassification"
96
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/IndonesianIdClickbaitClassification.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "9fa4d0824015fe537ae2c8166781f5c79873da2c",
3
+ "evaluation_time": 4.125167608261108,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "train": [
8
+ {
9
+ "accuracy": 0.5861328125,
10
+ "ap": 0.4744011695554763,
11
+ "ap_weighted": 0.4744011695554763,
12
+ "f1": 0.581498923939676,
13
+ "f1_weighted": 0.5863086773364465,
14
+ "hf_subset": "default",
15
+ "languages": [
16
+ "ind-Latn"
17
+ ],
18
+ "main_score": 0.581498923939676,
19
+ "scores_per_experiment": [
20
+ {
21
+ "accuracy": 0.64208984375,
22
+ "ap": 0.5070672534313982,
23
+ "ap_weighted": 0.5070672534313982,
24
+ "f1": 0.6316365088132616,
25
+ "f1_weighted": 0.6416353509266636
26
+ },
27
+ {
28
+ "accuracy": 0.48876953125,
29
+ "ap": 0.4218666216771405,
30
+ "ap_weighted": 0.4218666216771405,
31
+ "f1": 0.48858407411020316,
32
+ "f1_weighted": 0.48701482138884544
33
+ },
34
+ {
35
+ "accuracy": 0.56005859375,
36
+ "ap": 0.4462599354153949,
37
+ "ap_weighted": 0.4462599354153949,
38
+ "f1": 0.5485493745986116,
39
+ "f1_weighted": 0.5601641829165265
40
+ },
41
+ {
42
+ "accuracy": 0.5869140625,
43
+ "ap": 0.4762529698399197,
44
+ "ap_weighted": 0.4762529698399197,
45
+ "f1": 0.5864028722022764,
46
+ "f1_weighted": 0.5887458277335097
47
+ },
48
+ {
49
+ "accuracy": 0.625,
50
+ "ap": 0.5049051080330309,
51
+ "ap_weighted": 0.5049051080330309,
52
+ "f1": 0.6245861259093772,
53
+ "f1_weighted": 0.6265946325256344
54
+ },
55
+ {
56
+ "accuracy": 0.57080078125,
57
+ "ap": 0.465395428911193,
58
+ "ap_weighted": 0.465395428911193,
59
+ "f1": 0.570340933907583,
60
+ "f1_weighted": 0.5726058536538158
61
+ },
62
+ {
63
+ "accuracy": 0.6474609375,
64
+ "ap": 0.5161534465158583,
65
+ "ap_weighted": 0.5161534465158583,
66
+ "f1": 0.6427165849000437,
67
+ "f1_weighted": 0.6493506372643894
68
+ },
69
+ {
70
+ "accuracy": 0.59521484375,
71
+ "ap": 0.4903632132355194,
72
+ "ap_weighted": 0.4903632132355194,
73
+ "f1": 0.5941883908841348,
74
+ "f1_weighted": 0.590899755488644
75
+ },
76
+ {
77
+ "accuracy": 0.55859375,
78
+ "ap": 0.4433536532225051,
79
+ "ap_weighted": 0.4433536532225051,
80
+ "f1": 0.5437003561883411,
81
+ "f1_weighted": 0.5569836533717125
82
+ },
83
+ {
84
+ "accuracy": 0.58642578125,
85
+ "ap": 0.47239406527280303,
86
+ "ap_weighted": 0.47239406527280303,
87
+ "f1": 0.5842840178829269,
88
+ "f1_weighted": 0.5890920580947235
89
+ }
90
+ ]
91
+ }
92
+ ]
93
+ },
94
+ "task_name": "IndonesianIdClickbaitClassification"
95
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/IsiZuluNewsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "55caf0e52693a1ea63b15a4980a73fc137fb862b",
3
+ "evaluation_time": 4.089668273925781,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "train": [
8
+ {
9
+ "accuracy": 0.2081117021276596,
10
+ "f1": 0.15293472230091704,
11
+ "f1_weighted": 0.21298587620298584,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "zul-Latn"
15
+ ],
16
+ "main_score": 0.2081117021276596,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.1901595744680851,
20
+ "f1": 0.14152598517288545,
21
+ "f1_weighted": 0.19228995522959413
22
+ },
23
+ {
24
+ "accuracy": 0.21941489361702127,
25
+ "f1": 0.16279991234868849,
26
+ "f1_weighted": 0.22128736585676084
27
+ },
28
+ {
29
+ "accuracy": 0.21941489361702127,
30
+ "f1": 0.15340949729860218,
31
+ "f1_weighted": 0.22777342081830212
32
+ },
33
+ {
34
+ "accuracy": 0.19281914893617022,
35
+ "f1": 0.14104435722725606,
36
+ "f1_weighted": 0.193518558558964
37
+ },
38
+ {
39
+ "accuracy": 0.19946808510638298,
40
+ "f1": 0.14951306948863874,
41
+ "f1_weighted": 0.208804402500431
42
+ },
43
+ {
44
+ "accuracy": 0.23803191489361702,
45
+ "f1": 0.1607681992524459,
46
+ "f1_weighted": 0.2504337430087765
47
+ },
48
+ {
49
+ "accuracy": 0.22340425531914893,
50
+ "f1": 0.16333084799802144,
51
+ "f1_weighted": 0.22869114189445786
52
+ },
53
+ {
54
+ "accuracy": 0.16888297872340424,
55
+ "f1": 0.13485767603822738,
56
+ "f1_weighted": 0.16588163154217023
57
+ },
58
+ {
59
+ "accuracy": 0.20877659574468085,
60
+ "f1": 0.15318006413803742,
61
+ "f1_weighted": 0.21366106328174927
62
+ },
63
+ {
64
+ "accuracy": 0.22074468085106383,
65
+ "f1": 0.1689176140463672,
66
+ "f1_weighted": 0.2275174793386524
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "IsiZuluNewsClassification"
73
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/ItaCaseholdClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "fafcfc4fee815f7017848e54b26c47ece8ff1626",
3
+ "evaluation_time": 84.54927706718445,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.6280542986425339,
10
+ "f1": 0.26729418514460834,
11
+ "f1_weighted": 0.5713464961681682,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "ita-Latn"
15
+ ],
16
+ "main_score": 0.6280542986425339,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.6199095022624435,
20
+ "f1": 0.26462824540652924,
21
+ "f1_weighted": 0.5643705380698063
22
+ },
23
+ {
24
+ "accuracy": 0.6425339366515838,
25
+ "f1": 0.2659192068499133,
26
+ "f1_weighted": 0.5852604889590922
27
+ },
28
+ {
29
+ "accuracy": 0.6334841628959276,
30
+ "f1": 0.2632617517557449,
31
+ "f1_weighted": 0.5770045315024969
32
+ },
33
+ {
34
+ "accuracy": 0.6018099547511312,
35
+ "f1": 0.25420248154623154,
36
+ "f1_weighted": 0.5376413932975019
37
+ },
38
+ {
39
+ "accuracy": 0.6153846153846154,
40
+ "f1": 0.26107240261335085,
41
+ "f1_weighted": 0.5615861594875481
42
+ },
43
+ {
44
+ "accuracy": 0.6334841628959276,
45
+ "f1": 0.2722081095374147,
46
+ "f1_weighted": 0.5767253162284472
47
+ },
48
+ {
49
+ "accuracy": 0.6244343891402715,
50
+ "f1": 0.277954883291591,
51
+ "f1_weighted": 0.5689940420932709
52
+ },
53
+ {
54
+ "accuracy": 0.6289592760180995,
55
+ "f1": 0.2611167784202058,
56
+ "f1_weighted": 0.5750607283157276
57
+ },
58
+ {
59
+ "accuracy": 0.6289592760180995,
60
+ "f1": 0.2649554485787943,
61
+ "f1_weighted": 0.569311612562191
62
+ },
63
+ {
64
+ "accuracy": 0.6515837104072398,
65
+ "f1": 0.28762254344630744,
66
+ "f1_weighted": 0.5975101511655997
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "ItaCaseholdClassification"
73
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/JSICK.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e4af6c73182bebb41d94cb336846e5a452454ea7",
3
+ "evaluation_time": 2.1676719188690186,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.8063508574783445,
10
+ "cosine_spearman": 0.7816101828927973,
11
+ "euclidean_pearson": 0.8174023692991947,
12
+ "euclidean_spearman": 0.7815307184655915,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "jpn-Jpan"
16
+ ],
17
+ "main_score": 0.7816101828927973,
18
+ "manhattan_pearson": 0.816511561670854,
19
+ "manhattan_spearman": 0.7807349943946912,
20
+ "pearson": 0.8063508574783445,
21
+ "spearman": 0.7816101828927973
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "JSICK"
26
+ }
results/jinaai__jina-embeddings-v3/215a6e121fa0183376388ac6b1ae230326bfeaed/KorHateSpeechMLClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "c657d15baf277c48d467f0625f7d33c50d4352ef",
3
+ "evaluation_time": 5.570185661315918,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.18.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.10574374079528719,
10
+ "f1": 0.23781790325759516,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "kor-Hang"
14
+ ],
15
+ "lrap": 0.2436917034855103,
16
+ "main_score": 0.10574374079528719,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.08394698085419734,
20
+ "f1": 0.229483297662714,
21
+ "lrap": 0.22533136966125966
22
+ },
23
+ {
24
+ "accuracy": 0.10112911143838979,
25
+ "f1": 0.24992257183350178,
26
+ "lrap": 0.23849069983089838
27
+ },
28
+ {
29
+ "accuracy": 0.06480117820324006,
30
+ "f1": 0.20919647904193378,
31
+ "lrap": 0.19988545246276643
32
+ },
33
+ {
34
+ "accuracy": 0.09916543937162493,
35
+ "f1": 0.2257088433751543,
36
+ "lrap": 0.24671357661047608
37
+ },
38
+ {
39
+ "accuracy": 0.17722140402552775,
40
+ "f1": 0.26664667935366293,
41
+ "lrap": 0.3126329569628437
42
+ },
43
+ {
44
+ "accuracy": 0.10358370152184586,
45
+ "f1": 0.26144279718942515,
46
+ "lrap": 0.23970435826105152
47
+ },
48
+ {
49
+ "accuracy": 0.1212567501227295,
50
+ "f1": 0.23960473333191754,
51
+ "lrap": 0.25864561173838646
52
+ },
53
+ {
54
+ "accuracy": 0.1055473735886107,
55
+ "f1": 0.2508844949871627,
56
+ "lrap": 0.23765886652483853
57
+ },
58
+ {
59
+ "accuracy": 0.08296514482081492,
60
+ "f1": 0.19396678332012404,
61
+ "lrap": 0.21804941908034037
62
+ },
63
+ {
64
+ "accuracy": 0.11782032400589101,
65
+ "f1": 0.25132235248035506,
66
+ "lrap": 0.2598047237222414
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "KorHateSpeechMLClassification"
73
+ }