qMTEB / results /gte-base /TwitterURLCorpus.json
varun4's picture
added results
fba41a4
raw
history blame
1.46 kB
{
"dataset_revision": "8b6510b0b1fa4e4c4f879467980e9be563ec1cdf",
"mteb_dataset_name": "TwitterURLCorpus",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.887297706368611,
"accuracy_threshold": 0.8953593969345093,
"ap": 0.8551813671848673,
"f1": 0.7795627934272301,
"f1_threshold": 0.8873952627182007,
"precision": 0.744396189408798,
"recall": 0.8182168155220203
},
"dot": {
"accuracy": 0.887297706368611,
"accuracy_threshold": 0.8953593969345093,
"ap": 0.8551813435958813,
"f1": 0.7795627934272301,
"f1_threshold": 0.8873952627182007,
"precision": 0.744396189408798,
"recall": 0.8182168155220203
},
"euclidean": {
"accuracy": 0.887297706368611,
"accuracy_threshold": 0.457472562789917,
"ap": 0.8551813732980336,
"f1": 0.7795627934272301,
"f1_threshold": 0.47456246614456177,
"precision": 0.744396189408798,
"recall": 0.8182168155220203
},
"evaluation_time": 131.42,
"manhattan": {
"accuracy": 0.8869290177358637,
"accuracy_threshold": 10.028573989868164,
"ap": 0.8549390135903003,
"f1": 0.7796471983397568,
"f1_threshold": 10.410341262817383,
"precision": 0.751571877679337,
"recall": 0.8099014474899907
},
"max": {
"accuracy": 0.887297706368611,
"ap": 0.8551813732980336,
"f1": 0.7796471983397568
}
}
}