qMTEB / results /optimum /all-MiniLM-L6-v2-q8 /TwitterURLCorpus.json
varun4's picture
added results
fba41a4
raw
history blame
1.46 kB
{
"dataset_revision": "8b6510b0b1fa4e4c4f879467980e9be563ec1cdf",
"mteb_dataset_name": "TwitterURLCorpus",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.8822913028292001,
"accuracy_threshold": 0.6609122157096863,
"ap": 0.846976901109128,
"f1": 0.7661019479065602,
"f1_threshold": 0.6262260675430298,
"precision": 0.7433019551049964,
"recall": 0.7903449337850323
},
"dot": {
"accuracy": 0.8514960996623588,
"accuracy_threshold": 13.692197799682617,
"ap": 0.7666287617149927,
"f1": 0.7092627336529774,
"f1_threshold": 11.69295597076416,
"precision": 0.64625807268583,
"recall": 0.7858792731752386
},
"euclidean": {
"accuracy": 0.8575697597702487,
"accuracy_threshold": 3.5195302963256836,
"ap": 0.7850038350910886,
"f1": 0.7016092645510604,
"f1_threshold": 3.8645129203796387,
"precision": 0.685626102292769,
"recall": 0.7183554049892208
},
"evaluation_time": 70.05,
"manhattan": {
"accuracy": 0.8574533317809602,
"accuracy_threshold": 55.26979064941406,
"ap": 0.7843165101218974,
"f1": 0.7006873879258816,
"f1_threshold": 60.24797058105469,
"precision": 0.6805515239477503,
"recall": 0.7220511241145673
},
"max": {
"accuracy": 0.8822913028292001,
"ap": 0.846976901109128,
"f1": 0.7661019479065602
}
}
}