IlyasMoutawwakil HF staff commited on
Commit
8271702
1 Parent(s): 9f7c417

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "load": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 946.962432,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -11,15 +11,15 @@
11
  "latency": {
12
  "unit": "s",
13
  "values": [
14
- 1.2280992309999874
15
  ],
16
  "count": 1,
17
- "total": 1.2280992309999874,
18
- "mean": 1.2280992309999874,
19
- "p50": 1.2280992309999874,
20
- "p90": 1.2280992309999874,
21
- "p95": 1.2280992309999874,
22
- "p99": 1.2280992309999874,
23
  "stdev": 0,
24
  "stdev_": 0
25
  },
@@ -30,7 +30,7 @@
30
  "forward": {
31
  "memory": {
32
  "unit": "MB",
33
- "max_ram": 958.623744,
34
  "max_global_vram": null,
35
  "max_process_vram": null,
36
  "max_reserved": null,
@@ -39,53 +39,53 @@
39
  "latency": {
40
  "unit": "s",
41
  "values": [
42
- 0.04607624499999474,
43
- 0.04702626499999951,
44
- 0.04533179699996026,
45
- 0.045458303000032174,
46
- 0.045950520000019424,
47
- 0.04527365799998506,
48
- 0.045123308999961864,
49
- 0.045500630999981695,
50
- 0.0455951769999956,
51
- 0.04551481099997545,
52
- 0.0446602210000151,
53
- 0.04502046200002496,
54
- 0.045427100000040355,
55
- 0.04648133399996368,
56
- 0.04529361099997686,
57
- 0.04518399699998099,
58
- 0.04553904799996644,
59
- 0.04502089599998271,
60
- 0.04067614699999922,
61
- 0.04370037199998933,
62
- 0.04887052700001959,
63
- 0.048508371999957944
64
  ],
65
  "count": 22,
66
- "total": 1.001232802999823,
67
- "mean": 0.04551058195453741,
68
- "p50": 0.045442701500036264,
69
- "p90": 0.04697177189999593,
70
- "p95": 0.04843426664996002,
71
- "p99": 0.04879447445000665,
72
- "stdev": 0.001541700552855504,
73
- "stdev_": 3.3875650159683275
74
  },
75
  "throughput": {
76
  "unit": "samples/s",
77
- "value": 21.97291172850625
78
  },
79
  "energy": {
80
  "unit": "kWh",
81
- "cpu": 1.5664007784444924e-06,
82
- "ram": 6.549945776430798e-08,
83
  "gpu": 0.0,
84
- "total": 1.6319002362088004e-06
85
  },
86
  "efficiency": {
87
  "unit": "samples/kWh",
88
- "value": 612782.5572984663
89
  }
90
  }
91
  }
 
2
  "load": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 949.444608,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
11
  "latency": {
12
  "unit": "s",
13
  "values": [
14
+ 1.2448114939999755
15
  ],
16
  "count": 1,
17
+ "total": 1.2448114939999755,
18
+ "mean": 1.2448114939999755,
19
+ "p50": 1.2448114939999755,
20
+ "p90": 1.2448114939999755,
21
+ "p95": 1.2448114939999755,
22
+ "p99": 1.2448114939999755,
23
  "stdev": 0,
24
  "stdev_": 0
25
  },
 
30
  "forward": {
31
  "memory": {
32
  "unit": "MB",
33
+ "max_ram": 960.978944,
34
  "max_global_vram": null,
35
  "max_process_vram": null,
36
  "max_reserved": null,
 
39
  "latency": {
40
  "unit": "s",
41
  "values": [
42
+ 0.04675865999996631,
43
+ 0.0460731160000023,
44
+ 0.04716014400003132,
45
+ 0.04689999700002545,
46
+ 0.048961000999952375,
47
+ 0.04683846100004985,
48
+ 0.046676837999996224,
49
+ 0.04685680500000444,
50
+ 0.049351263000005474,
51
+ 0.046964806999994835,
52
+ 0.046296584000003804,
53
+ 0.04721702000000505,
54
+ 0.04626634800001739,
55
+ 0.04746020599998246,
56
+ 0.04925340799997002,
57
+ 0.04978536599998051,
58
+ 0.047687994000000344,
59
+ 0.04571680800000877,
60
+ 0.04380170700000008,
61
+ 0.04163597399997343,
62
+ 0.04413249500004213,
63
+ 0.042137774999957855
64
  ],
65
  "count": 22,
66
+ "total": 1.0239327769999704,
67
+ "mean": 0.04654239895454411,
68
+ "p50": 0.046847633000027145,
69
+ "p90": 0.04922416729996826,
70
+ "p95": 0.0493463702500037,
71
+ "p99": 0.049694204369985756,
72
+ "stdev": 0.002055879525164943,
73
+ "stdev_": 4.417218646535235
74
  },
75
  "throughput": {
76
  "unit": "samples/s",
77
+ "value": 21.485785487264106
78
  },
79
  "energy": {
80
  "unit": "kWh",
81
+ "cpu": 1.67085248263886e-06,
82
+ "ram": 6.986470916328928e-08,
83
  "gpu": 0.0,
84
+ "total": 1.7407171918021492e-06
85
  },
86
  "efficiency": {
87
  "unit": "samples/kWh",
88
+ "value": 574475.8566810664
89
  }
90
  }
91
  }