File size: 1,455 Bytes
fba41a4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
{
"dataset_revision": "8b6510b0b1fa4e4c4f879467980e9be563ec1cdf",
"mteb_dataset_name": "TwitterURLCorpus",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.882504754142896,
"accuracy_threshold": 0.6595447659492493,
"ap": 0.8470165834383896,
"f1": 0.7657057281916886,
"f1_threshold": 0.6311962008476257,
"precision": 0.745226643346451,
"recall": 0.7873421619956883
},
"dot": {
"accuracy": 0.882504754142896,
"accuracy_threshold": 0.6595448851585388,
"ap": 0.8470166526011678,
"f1": 0.7657057281916886,
"f1_threshold": 0.6311962604522705,
"precision": 0.745226643346451,
"recall": 0.7873421619956883
},
"euclidean": {
"accuracy": 0.882504754142896,
"accuracy_threshold": 0.8251729607582092,
"ap": 0.847016568342695,
"f1": 0.7657057281916886,
"f1_threshold": 0.858840823173523,
"precision": 0.745226643346451,
"recall": 0.7873421619956883
},
"evaluation_time": 35.6,
"manhattan": {
"accuracy": 0.8827376101214732,
"accuracy_threshold": 12.611679077148438,
"ap": 0.846351848497952,
"f1": 0.7655138674594514,
"f1_threshold": 13.285348892211914,
"precision": 0.7486934118513066,
"recall": 0.7831074838312289
},
"max": {
"accuracy": 0.8827376101214732,
"ap": 0.8470166526011678,
"f1": 0.7657057281916886
}
}
} |