qMTEB / results /optimum /all-MiniLM-L6-v2-unquantized /SprintDuplicateQuestions.json
varun4's picture
added results
fba41a4
raw
history blame
1.41 kB
{
"dataset_revision": "d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46",
"mteb_dataset_name": "SprintDuplicateQuestions",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.9978514851485148,
"accuracy_threshold": 0.7130525708198547,
"ap": 0.9455063045792448,
"f1": 0.890126582278481,
"f1_threshold": 0.7130525708198547,
"precision": 0.9015384615384615,
"recall": 0.879
},
"dot": {
"accuracy": 0.9949405940594059,
"accuracy_threshold": 22.973613739013672,
"ap": 0.807591492776382,
"f1": 0.7377605428986913,
"f1_threshold": 21.528966903686523,
"precision": 0.715898400752587,
"recall": 0.761
},
"euclidean": {
"accuracy": 0.9975247524752475,
"accuracy_threshold": 4.005368232727051,
"ap": 0.922948863946992,
"f1": 0.8700155359917141,
"f1_threshold": 4.0734686851501465,
"precision": 0.9022556390977443,
"recall": 0.84
},
"evaluation_time": 13.06,
"manhattan": {
"accuracy": 0.9975247524752475,
"accuracy_threshold": 62.37427520751953,
"ap": 0.9235450475118805,
"f1": 0.8698347107438016,
"f1_threshold": 63.49694061279297,
"precision": 0.8995726495726496,
"recall": 0.842
},
"max": {
"accuracy": 0.9978514851485148,
"ap": 0.9455063045792448,
"f1": 0.890126582278481
}
}
}