qMTEB / results /optimum /all-MiniLM-L6-v2-q8 /TwitterSemEval2015.json
varun4's picture
added results
fba41a4
raw
history blame
1.46 kB
{
"dataset_revision": "70970daeab8776df92f5ea462b6173c0b46fd2d1",
"mteb_dataset_name": "TwitterSemEval2015",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.8393038087858378,
"accuracy_threshold": 0.7425773739814758,
"ap": 0.6783801443900392,
"f1": 0.638964096527369,
"f1_threshold": 0.6870745420455933,
"precision": 0.5768331562167907,
"recall": 0.7160949868073878
},
"dot": {
"accuracy": 0.7894140787983549,
"accuracy_threshold": 29.843990325927734,
"ap": 0.48171195666313793,
"f1": 0.5091934809862098,
"f1_threshold": 21.281982421875,
"precision": 0.4214804565894154,
"recall": 0.6430079155672823
},
"euclidean": {
"accuracy": 0.8322703701496096,
"accuracy_threshold": 3.9689555168151855,
"ap": 0.6530047778986019,
"f1": 0.6224239450441609,
"f1_threshold": 4.39756965637207,
"precision": 0.5816139385602934,
"recall": 0.6693931398416887
},
"evaluation_time": 15.41,
"manhattan": {
"accuracy": 0.8324491863861239,
"accuracy_threshold": 61.23781204223633,
"ap": 0.6517381509783486,
"f1": 0.622907699665232,
"f1_threshold": 69.10938262939453,
"precision": 0.5695233930913861,
"recall": 0.6873350923482849
},
"max": {
"accuracy": 0.8393038087858378,
"ap": 0.6783801443900392,
"f1": 0.638964096527369
}
}
}