File size: 1,457 Bytes
fba41a4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
{
"dataset_revision": "8b6510b0b1fa4e4c4f879467980e9be563ec1cdf",
"mteb_dataset_name": "TwitterURLCorpus",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.8822913028292001,
"accuracy_threshold": 0.6609122157096863,
"ap": 0.846976901109128,
"f1": 0.7661019479065602,
"f1_threshold": 0.6262260675430298,
"precision": 0.7433019551049964,
"recall": 0.7903449337850323
},
"dot": {
"accuracy": 0.8514960996623588,
"accuracy_threshold": 13.692197799682617,
"ap": 0.7666287617149927,
"f1": 0.7092627336529774,
"f1_threshold": 11.69295597076416,
"precision": 0.64625807268583,
"recall": 0.7858792731752386
},
"euclidean": {
"accuracy": 0.8575697597702487,
"accuracy_threshold": 3.5195302963256836,
"ap": 0.7850038350910886,
"f1": 0.7016092645510604,
"f1_threshold": 3.8645129203796387,
"precision": 0.685626102292769,
"recall": 0.7183554049892208
},
"evaluation_time": 70.05,
"manhattan": {
"accuracy": 0.8574533317809602,
"accuracy_threshold": 55.26979064941406,
"ap": 0.7843165101218974,
"f1": 0.7006873879258816,
"f1_threshold": 60.24797058105469,
"precision": 0.6805515239477503,
"recall": 0.7220511241145673
},
"max": {
"accuracy": 0.8822913028292001,
"ap": 0.846976901109128,
"f1": 0.7661019479065602
}
}
} |