qMTEB / results /gte-base /TwitterSemEval2015.json
varun4's picture
added results
fba41a4
raw
history blame
1.47 kB
{
"dataset_revision": "70970daeab8776df92f5ea462b6173c0b46fd2d1",
"mteb_dataset_name": "TwitterSemEval2015",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.8564105620790368,
"accuracy_threshold": 0.9224622845649719,
"ap": 0.7246544507254236,
"f1": 0.6796572367648784,
"f1_threshold": 0.9091762900352478,
"precision": 0.6121801649397336,
"recall": 0.7638522427440633
},
"dot": {
"accuracy": 0.8564105620790368,
"accuracy_threshold": 0.9224622249603271,
"ap": 0.7246545978977025,
"f1": 0.6796572367648784,
"f1_threshold": 0.9091762900352478,
"precision": 0.6121801649397336,
"recall": 0.7638522427440633
},
"euclidean": {
"accuracy": 0.8564105620790368,
"accuracy_threshold": 0.3937961757183075,
"ap": 0.7246547648505838,
"f1": 0.6796572367648784,
"f1_threshold": 0.42620110511779785,
"precision": 0.6121801649397336,
"recall": 0.7638522427440633
},
"evaluation_time": 28.44,
"manhattan": {
"accuracy": 0.8567085891398939,
"accuracy_threshold": 8.628108024597168,
"ap": 0.7236803599101469,
"f1": 0.6801418439716312,
"f1_threshold": 9.374834060668945,
"precision": 0.6160599571734475,
"recall": 0.7591029023746702
},
"max": {
"accuracy": 0.8567085891398939,
"ap": 0.7246547648505838,
"f1": 0.6801418439716312
}
}
}