varun4's picture
added results
fba41a4
raw
history blame
1.46 kB
{
"dataset_revision": "8b6510b0b1fa4e4c4f879467980e9be563ec1cdf",
"mteb_dataset_name": "TwitterURLCorpus",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.8826793961268289,
"accuracy_threshold": 0.674436092376709,
"ap": 0.8468494238463535,
"f1": 0.7654432523997741,
"f1_threshold": 0.6305196285247803,
"precision": 0.7488399499152979,
"recall": 0.78279950723745
},
"dot": {
"accuracy": 0.8508363410563899,
"accuracy_threshold": 13.469935417175293,
"ap": 0.7659922234196515,
"f1": 0.7114903218982477,
"f1_threshold": 11.886985778808594,
"precision": 0.656260162601626,
"recall": 0.7768709578072066
},
"euclidean": {
"accuracy": 0.8572398804672643,
"accuracy_threshold": 3.583315849304199,
"ap": 0.7846270644934776,
"f1": 0.7020438498699368,
"f1_threshold": 3.890807628631592,
"precision": 0.6784944691854619,
"recall": 0.7272867262088081
},
"evaluation_time": 69.57,
"manhattan": {
"accuracy": 0.8571234524779757,
"accuracy_threshold": 55.01682662963867,
"ap": 0.7838805708871157,
"f1": 0.7009824375071123,
"f1_threshold": 59.76050567626953,
"precision": 0.6908411214953271,
"recall": 0.7114259316291962
},
"max": {
"accuracy": 0.8826793961268289,
"ap": 0.8468494238463535,
"f1": 0.7654432523997741
}
}
}