varun4's picture
added results
fba41a4
raw
history blame
1.46 kB
{
"dataset_revision": "70970daeab8776df92f5ea462b6173c0b46fd2d1",
"mteb_dataset_name": "TwitterSemEval2015",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.841151576563152,
"accuracy_threshold": 0.7600052356719971,
"ap": 0.6785801239192376,
"f1": 0.6408006919560113,
"f1_threshold": 0.7039604187011719,
"precision": 0.602602835231234,
"recall": 0.6841688654353562
},
"dot": {
"accuracy": 0.7892352625618406,
"accuracy_threshold": 28.787059783935547,
"ap": 0.4828865284643087,
"f1": 0.5088373919874313,
"f1_threshold": 20.959426879882812,
"precision": 0.40522364716922116,
"recall": 0.683641160949868
},
"euclidean": {
"accuracy": 0.833581689217381,
"accuracy_threshold": 4.012348651885986,
"ap": 0.656114001142199,
"f1": 0.6250154340041981,
"f1_threshold": 4.361933708190918,
"precision": 0.5873752610814574,
"recall": 0.6678100263852242
},
"evaluation_time": 18.32,
"manhattan": {
"accuracy": 0.8338797162782381,
"accuracy_threshold": 59.877410888671875,
"ap": 0.6546088131355778,
"f1": 0.6246687545169839,
"f1_threshold": 68.5105972290039,
"precision": 0.5746897163120568,
"recall": 0.6841688654353562
},
"max": {
"accuracy": 0.841151576563152,
"ap": 0.6785801239192376,
"f1": 0.6408006919560113
}
}
}