IlyasMoutawwakil HF staff commited on
Commit
b3fb660
1 Parent(s): df03c6e

Upload cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "load": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 969.551872,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -11,15 +11,15 @@
11
  "latency": {
12
  "unit": "s",
13
  "values": [
14
- 1.3108094489999758
15
  ],
16
  "count": 1,
17
- "total": 1.3108094489999758,
18
- "mean": 1.3108094489999758,
19
- "p50": 1.3108094489999758,
20
- "p90": 1.3108094489999758,
21
- "p95": 1.3108094489999758,
22
- "p99": 1.3108094489999758,
23
  "stdev": 0,
24
  "stdev_": 0
25
  },
@@ -30,7 +30,7 @@
30
  "forward": {
31
  "memory": {
32
  "unit": "MB",
33
- "max_ram": 883.56864,
34
  "max_global_vram": null,
35
  "max_process_vram": null,
36
  "max_reserved": null,
@@ -39,53 +39,52 @@
39
  "latency": {
40
  "unit": "s",
41
  "values": [
42
- 0.04695357100001729,
43
- 0.04646406699998806,
44
- 0.04684953700001415,
45
- 0.04711026399996854,
46
- 0.04667652299997371,
47
- 0.04601731199994674,
48
- 0.04632260099998575,
49
- 0.04702613699998892,
50
- 0.046936539999990146,
51
- 0.04667279699998517,
52
- 0.04643505199999254,
53
- 0.046651716999974724,
54
- 0.045891986999947676,
55
- 0.046976865000033285,
56
- 0.046330748000002586,
57
- 0.0483452319999742,
58
- 0.0453818440000191,
59
- 0.04562795399999686,
60
- 0.04054750400001694,
61
- 0.04584435900000017,
62
- 0.04337437200001659,
63
- 0.0396962789999975
64
  ],
65
- "count": 22,
66
- "total": 1.0081332619998307,
67
- "mean": 0.045824239181810486,
68
- "p50": 0.0464495594999903,
69
- "p90": 0.04702120979999336,
70
- "p95": 0.04710605764996956,
71
- "p99": 0.04808588871997301,
72
- "stdev": 0.0020162853560809872,
73
- "stdev_": 4.400041096331684
74
  },
75
  "throughput": {
76
  "unit": "samples/s",
77
- "value": 21.822511794084317
78
  },
79
  "energy": {
80
  "unit": "kWh",
81
- "cpu": 1.611146240666674e-06,
82
- "ram": 6.736782486130485e-08,
83
  "gpu": 0.0,
84
- "total": 1.678514065527979e-06
85
  },
86
  "efficiency": {
87
  "unit": "samples/kWh",
88
- "value": 595765.0403635126
89
  }
90
  }
91
  }
 
2
  "load": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 970.899456,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
11
  "latency": {
12
  "unit": "s",
13
  "values": [
14
+ 1.317152555000007
15
  ],
16
  "count": 1,
17
+ "total": 1.317152555000007,
18
+ "mean": 1.317152555000007,
19
+ "p50": 1.317152555000007,
20
+ "p90": 1.317152555000007,
21
+ "p95": 1.317152555000007,
22
+ "p99": 1.317152555000007,
23
  "stdev": 0,
24
  "stdev_": 0
25
  },
 
30
  "forward": {
31
  "memory": {
32
  "unit": "MB",
33
+ "max_ram": 884.785152,
34
  "max_global_vram": null,
35
  "max_process_vram": null,
36
  "max_reserved": null,
 
39
  "latency": {
40
  "unit": "s",
41
  "values": [
42
+ 0.04893034999997781,
43
+ 0.04933273299999996,
44
+ 0.04940153199999031,
45
+ 0.048061482999997907,
46
+ 0.04845851599998241,
47
+ 0.048120995000004996,
48
+ 0.04769704099999217,
49
+ 0.0480179509999914,
50
+ 0.04878281299997411,
51
+ 0.048490686000008054,
52
+ 0.047670457999998916,
53
+ 0.04906001599999854,
54
+ 0.04761642400001165,
55
+ 0.04878908899999601,
56
+ 0.04808342800001242,
57
+ 0.04883948299999474,
58
+ 0.049956234000006816,
59
+ 0.04861369400001081,
60
+ 0.0438475379999943,
61
+ 0.04385777799998891,
62
+ 0.04458401700000536
 
63
  ],
64
+ "count": 21,
65
+ "total": 1.0062122589999376,
66
+ "mean": 0.047914869476187505,
67
+ "p50": 0.04845851599998241,
68
+ "p90": 0.04933273299999996,
69
+ "p95": 0.04940153199999031,
70
+ "p99": 0.04984529360000352,
71
+ "stdev": 0.0016706889424348956,
72
+ "stdev_": 3.4867859616422128
73
  },
74
  "throughput": {
75
  "unit": "samples/s",
76
+ "value": 20.87034799284959
77
  },
78
  "energy": {
79
  "unit": "kWh",
80
+ "cpu": 1.7173942265700344e-06,
81
+ "ram": 7.181181576903356e-08,
82
  "gpu": 0.0,
83
+ "total": 1.789206042339068e-06
84
  },
85
  "efficiency": {
86
  "unit": "samples/kWh",
87
+ "value": 558907.1221180755
88
  }
89
  }
90
  }