File size: 1,466 Bytes
fba41a4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
{
  "dataset_revision": "70970daeab8776df92f5ea462b6173c0b46fd2d1",
  "mteb_dataset_name": "TwitterSemEval2015",
  "mteb_version": "1.1.0",
  "test": {
    "cos_sim": {
      "accuracy": 0.8564105620790368,
      "accuracy_threshold": 0.9224622845649719,
      "ap": 0.7246544507254236,
      "f1": 0.6796572367648784,
      "f1_threshold": 0.9091762900352478,
      "precision": 0.6121801649397336,
      "recall": 0.7638522427440633
    },
    "dot": {
      "accuracy": 0.8564105620790368,
      "accuracy_threshold": 0.9224622249603271,
      "ap": 0.7246545978977025,
      "f1": 0.6796572367648784,
      "f1_threshold": 0.9091762900352478,
      "precision": 0.6121801649397336,
      "recall": 0.7638522427440633
    },
    "euclidean": {
      "accuracy": 0.8564105620790368,
      "accuracy_threshold": 0.3937961757183075,
      "ap": 0.7246547648505838,
      "f1": 0.6796572367648784,
      "f1_threshold": 0.42620110511779785,
      "precision": 0.6121801649397336,
      "recall": 0.7638522427440633
    },
    "evaluation_time": 28.44,
    "manhattan": {
      "accuracy": 0.8567085891398939,
      "accuracy_threshold": 8.628108024597168,
      "ap": 0.7236803599101469,
      "f1": 0.6801418439716312,
      "f1_threshold": 9.374834060668945,
      "precision": 0.6160599571734475,
      "recall": 0.7591029023746702
    },
    "max": {
      "accuracy": 0.8567085891398939,
      "ap": 0.7246547648505838,
      "f1": 0.6801418439716312
    }
  }
}