varun4's picture
added results
fba41a4
raw
history blame
1.45 kB
{
"dataset_revision": "70970daeab8776df92f5ea462b6173c0b46fd2d1",
"mteb_dataset_name": "TwitterSemEval2015",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.8402574953805806,
"accuracy_threshold": 0.7548823356628418,
"ap": 0.6782973844761395,
"f1": 0.6368255504476167,
"f1_threshold": 0.6944434642791748,
"precision": 0.5880250223413762,
"recall": 0.6944591029023747
},
"dot": {
"accuracy": 0.7901889491565834,
"accuracy_threshold": 30.103286743164062,
"ap": 0.482856775311342,
"f1": 0.5083665338645418,
"f1_threshold": 20.793228149414062,
"precision": 0.40832,
"recall": 0.6733509234828496
},
"euclidean": {
"accuracy": 0.8328068188591524,
"accuracy_threshold": 3.928983211517334,
"ap": 0.6539558962841057,
"f1": 0.6220163876024225,
"f1_threshold": 4.456747055053711,
"precision": 0.5655366011660549,
"recall": 0.6910290237467018
},
"evaluation_time": 15.28,
"manhattan": {
"accuracy": 0.8323895809739524,
"accuracy_threshold": 60.54985046386719,
"ap": 0.652800674337047,
"f1": 0.6223784701175901,
"f1_threshold": 68.62921142578125,
"precision": 0.5756896165059431,
"recall": 0.6773087071240106
},
"max": {
"accuracy": 0.8402574953805806,
"ap": 0.6782973844761395,
"f1": 0.6368255504476167
}
}
}