IlyasMoutawwakil HF staff commited on
Commit
0335deb
1 Parent(s): abc8466

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.0",
83
- "optimum_benchmark_commit": "4aa72a2fd35e7d46a674935ffaf335b1c0795600",
84
  "transformers_version": "4.40.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.30.1",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 938.254336,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,58 +107,61 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 27,
111
- "total": 1.0046153540000091,
112
- "mean": 0.03720797607407441,
113
- "stdev": 0.0009843476805070945,
114
- "p50": 0.037250452000023415,
115
- "p90": 0.038279575800027034,
116
- "p95": 0.03853195810001466,
117
- "p99": 0.039395848540004864,
118
  "values": [
119
- 0.039683986000000004,
120
- 0.03772695200001408,
121
- 0.03687086399997952,
122
- 0.037313901000004535,
123
- 0.03509454799996092,
124
- 0.03794798400002719,
125
- 0.036717787999975826,
126
- 0.03649284800002306,
127
- 0.03631129999996574,
128
- 0.03696906699997271,
129
- 0.035353200999963974,
130
- 0.035752376000004915,
131
- 0.037250452000023415,
132
- 0.03728807199996709,
133
- 0.03788238100003127,
134
- 0.03676494499995897,
135
- 0.03663502400002017,
136
- 0.03722287100003996,
137
- 0.03857576500001869,
138
- 0.036294929000007414,
139
- 0.03702261699999099,
140
- 0.03761533300001929,
141
- 0.037359624999965035,
142
- 0.038179465000041546,
143
- 0.03776106500004062,
144
- 0.038098252999986926,
145
- 0.03842974200000526
 
 
 
146
  ]
147
  },
148
  "throughput": {
149
  "unit": "samples/s",
150
- "value": 26.875957940017464
151
  },
152
  "energy": {
153
  "unit": "kWh",
154
- "cpu": 1.2526160313023462e-06,
155
- "ram": 5.234884598742736e-08,
156
  "gpu": 0.0,
157
- "total": 1.3049648772897735e-06
158
  },
159
  "efficiency": {
160
  "unit": "samples/kWh",
161
- "value": 766304.1491789862
162
  }
163
  }
164
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.0",
83
+ "optimum_benchmark_commit": "ebd20fcf042acf5db8d8956e7057fa93c82e14ab",
84
  "transformers_version": "4.40.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.30.1",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 938.184704,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
107
  },
108
  "latency": {
109
  "unit": "s",
110
+ "count": 30,
111
+ "total": 1.0082519999999704,
112
+ "mean": 0.03360839999999901,
113
+ "stdev": 0.0011557893681127009,
114
+ "p50": 0.03358253450002735,
115
+ "p90": 0.03504524500000344,
116
+ "p95": 0.03552685609996331,
117
+ "p99": 0.03571159646001888,
118
  "values": [
119
+ 0.03358627100004696,
120
+ 0.03428198699998575,
121
+ 0.03303920199999766,
122
+ 0.0332638009999755,
123
+ 0.03391645599998583,
124
+ 0.03497853300001452,
125
+ 0.03354319999999689,
126
+ 0.03184948699998813,
127
+ 0.03185056900002792,
128
+ 0.03238422200001878,
129
+ 0.03254428100001405,
130
+ 0.032321064999962346,
131
+ 0.03450406000001749,
132
+ 0.03552011199997196,
133
+ 0.03453324399998792,
134
+ 0.03244097799995416,
135
+ 0.03234579100001156,
136
+ 0.03245402200002445,
137
+ 0.03194622799998115,
138
+ 0.03473418800001582,
139
+ 0.03366266299997278,
140
+ 0.03357879800000774,
141
+ 0.033161209000013514,
142
+ 0.035532373999956235,
143
+ 0.03390343199998824,
144
+ 0.03264342500000339,
145
+ 0.03499248200000693,
146
+ 0.03578480000004447,
147
+ 0.034552249000000756,
148
+ 0.034402870999997504
149
  ]
150
  },
151
  "throughput": {
152
  "unit": "samples/s",
153
+ "value": 29.754466145369296
154
  },
155
  "energy": {
156
  "unit": "kWh",
157
+ "cpu": 1.3016993028146252e-06,
158
+ "ram": 5.440036534121381e-08,
159
  "gpu": 0.0,
160
+ "total": 1.356099668155839e-06
161
  },
162
  "efficiency": {
163
  "unit": "samples/kWh",
164
+ "value": 737408.9261152175
165
  }
166
  }
167
  }