IlyasMoutawwakil HF staff commited on
Commit
f89b4f1
1 Parent(s): ae8c145

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 939.892736,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,55 +10,54 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 24,
14
- "total": 1.022657620000075,
15
- "mean": 0.04261073416666979,
16
- "stdev": 0.002861229358891129,
17
- "p50": 0.040853150999993204,
18
- "p90": 0.046799132199981844,
19
- "p95": 0.04704138175001162,
20
- "p99": 0.047262947879991656,
21
  "values": [
22
- 0.04124756200002366,
23
- 0.04093591999998125,
24
- 0.040770382000005156,
25
- 0.04051992700004803,
26
- 0.04035733099999561,
27
- 0.04017004099995347,
28
- 0.04030672700002924,
29
- 0.04025913700002093,
30
- 0.04011221100000739,
31
- 0.03981966799995007,
32
- 0.0404108600000086,
33
- 0.04054646299999831,
34
- 0.0405665310000245,
35
- 0.04312512600000673,
36
- 0.0473213089999831,
37
- 0.04591253799998185,
38
- 0.047067565000020295,
39
- 0.04689300999996249,
40
- 0.04600565200001938,
41
- 0.046580084000027,
42
- 0.04619669900000645,
43
- 0.04626443500001187,
44
- 0.04163718899997093,
45
- 0.03963125300003867
46
  ]
47
  },
48
  "throughput": {
49
  "unit": "samples/s",
50
- "value": 23.46826497024316
51
  },
52
  "energy": {
53
  "unit": "kWh",
54
- "cpu": 1.5145490312168743e-06,
55
- "ram": 6.329570398195417e-08,
56
  "gpu": 0.0,
57
- "total": 1.5778447351988286e-06
58
  },
59
  "efficiency": {
60
  "unit": "samples/kWh",
61
- "value": 633775.9208443201
62
  }
63
  }
64
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 939.925504,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 23,
14
+ "total": 1.0351620550000007,
15
+ "mean": 0.04500704586956525,
16
+ "stdev": 0.0012941250724443226,
17
+ "p50": 0.045107674999997016,
18
+ "p90": 0.04621405299999424,
19
+ "p95": 0.046501650400017525,
20
+ "p99": 0.04667339030001131,
21
  "values": [
22
+ 0.046529258000020945,
23
+ 0.04491606699997419,
24
+ 0.04625318199998674,
25
+ 0.04361633099995288,
26
+ 0.04573945399999957,
27
+ 0.045107856000015545,
28
+ 0.044998050000003786,
29
+ 0.04541613000003508,
30
+ 0.04304776999998694,
31
+ 0.04605753700002424,
32
+ 0.04545774700000038,
33
+ 0.04506622700000662,
34
+ 0.045107674999997016,
35
+ 0.04538389999999026,
36
+ 0.04418631499999037,
37
+ 0.04458424800003513,
38
+ 0.04596036599997433,
39
+ 0.04482587799998328,
40
+ 0.044786404999968,
41
+ 0.04671404300000859,
42
+ 0.04591843800000106,
43
+ 0.045075625000038144,
44
+ 0.040413553000007596
 
45
  ]
46
  },
47
  "throughput": {
48
  "unit": "samples/s",
49
+ "value": 22.21874332516949
50
  },
51
  "energy": {
52
  "unit": "kWh",
53
+ "cpu": 1.4947912820572717e-06,
54
+ "ram": 6.246708929190255e-08,
55
  "gpu": 0.0,
56
+ "total": 1.5572583713491743e-06
57
  },
58
  "efficiency": {
59
  "unit": "samples/kWh",
60
+ "value": 642154.1976580431
61
  }
62
  }
63
  }