qMTEB / results /gte-base /SprintDuplicateQuestions.json
varun4's picture
added results
fba41a4
raw
history blame
1.41 kB
{
"dataset_revision": "d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46",
"mteb_dataset_name": "SprintDuplicateQuestions",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.9982178217821782,
"accuracy_threshold": 0.9155436158180237,
"ap": 0.9571220118360753,
"f1": 0.9073120494335737,
"f1_threshold": 0.9155436158180237,
"precision": 0.935244161358811,
"recall": 0.881
},
"dot": {
"accuracy": 0.9982178217821782,
"accuracy_threshold": 0.9155436754226685,
"ap": 0.9571219977317563,
"f1": 0.9073120494335737,
"f1_threshold": 0.9155436754226685,
"precision": 0.935244161358811,
"recall": 0.881
},
"euclidean": {
"accuracy": 0.9982178217821782,
"accuracy_threshold": 0.4109899401664734,
"ap": 0.9571219793104613,
"f1": 0.9073120494335737,
"f1_threshold": 0.4109899401664734,
"precision": 0.935244161358811,
"recall": 0.881
},
"evaluation_time": 30.77,
"manhattan": {
"accuracy": 0.9982178217821782,
"accuracy_threshold": 8.906990051269531,
"ap": 0.956972725802545,
"f1": 0.9077001529831717,
"f1_threshold": 9.168195724487305,
"precision": 0.9261186264308012,
"recall": 0.89
},
"max": {
"accuracy": 0.9982178217821782,
"ap": 0.9571220118360753,
"f1": 0.9077001529831717
}
}
}