varun4's picture
added results
fba41a4
raw
history blame
1.46 kB
{
"dataset_revision": "8b6510b0b1fa4e4c4f879467980e9be563ec1cdf",
"mteb_dataset_name": "TwitterURLCorpus",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.882504754142896,
"accuracy_threshold": 0.6595448851585388,
"ap": 0.8470165722410063,
"f1": 0.7657057281916886,
"f1_threshold": 0.6311963796615601,
"precision": 0.745226643346451,
"recall": 0.7873421619956883
},
"dot": {
"accuracy": 0.8516125276516474,
"accuracy_threshold": 14.463157653808594,
"ap": 0.7668983860779237,
"f1": 0.7101665954720207,
"f1_threshold": 12.37724494934082,
"precision": 0.6604210805084746,
"recall": 0.7680166307360641
},
"euclidean": {
"accuracy": 0.8597236775720883,
"accuracy_threshold": 3.5921688079833984,
"ap": 0.7900028133299262,
"f1": 0.7074245510090724,
"f1_threshold": 3.922621488571167,
"precision": 0.6814582292930014,
"recall": 0.7354481059439483
},
"evaluation_time": 84.53,
"manhattan": {
"accuracy": 0.8594132029339854,
"accuracy_threshold": 55.53254699707031,
"ap": 0.7894101286308219,
"f1": 0.7058084540348802,
"f1_threshold": 60.99834442138672,
"precision": 0.6785308326229043,
"recall": 0.7353711117955035
},
"max": {
"accuracy": 0.882504754142896,
"ap": 0.8470165722410063,
"f1": 0.7657057281916886
}
}
}