qMTEB / results /all-MiniLM-L6-v2 /TwitterSemEval2015.json
varun4's picture
added results
fba41a4
raw
history blame
1.46 kB
{
"dataset_revision": "70970daeab8776df92f5ea462b6173c0b46fd2d1",
"mteb_dataset_name": "TwitterSemEval2015",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.841151576563152,
"accuracy_threshold": 0.760004997253418,
"ap": 0.6785804277968714,
"f1": 0.6408006919560113,
"f1_threshold": 0.7039602994918823,
"precision": 0.602602835231234,
"recall": 0.6841688654353562
},
"dot": {
"accuracy": 0.841151576563152,
"accuracy_threshold": 0.7600049376487732,
"ap": 0.6785805581908406,
"f1": 0.6408006919560113,
"f1_threshold": 0.7039604187011719,
"precision": 0.602602835231234,
"recall": 0.6841688654353562
},
"euclidean": {
"accuracy": 0.841151576563152,
"accuracy_threshold": 0.6928131580352783,
"ap": 0.678580330833871,
"f1": 0.6408006919560113,
"f1_threshold": 0.7694668769836426,
"precision": 0.602602835231234,
"recall": 0.6841688654353562
},
"evaluation_time": 8.91,
"manhattan": {
"accuracy": 0.8396614412588663,
"accuracy_threshold": 10.996787071228027,
"ap": 0.6766935756595975,
"f1": 0.6382363570654138,
"f1_threshold": 12.064617156982422,
"precision": 0.5872312125914432,
"recall": 0.6989445910290237
},
"max": {
"accuracy": 0.841151576563152,
"ap": 0.6785805581908406,
"f1": 0.6408006919560113
}
}
}