IlyasMoutawwakil HF staff commited on
Commit
d177c69
1 Parent(s): 7760805

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0",
83
- "optimum_benchmark_commit": "6646634d4624f02d9aa14bd6ca3ed947e6cd1ec9",
84
  "transformers_version": "4.45.1",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.2",
@@ -99,7 +99,7 @@
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 947.093504,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,15 +108,15 @@
108
  "latency": {
109
  "unit": "s",
110
  "values": [
111
- 1.2522911869999689
112
  ],
113
  "count": 1,
114
- "total": 1.2522911869999689,
115
- "mean": 1.2522911869999689,
116
- "p50": 1.2522911869999689,
117
- "p90": 1.2522911869999689,
118
- "p95": 1.2522911869999689,
119
- "p99": 1.2522911869999689,
120
  "stdev": 0,
121
  "stdev_": 0
122
  },
@@ -127,7 +127,7 @@
127
  "forward": {
128
  "memory": {
129
  "unit": "MB",
130
- "max_ram": 955.35104,
131
  "max_global_vram": null,
132
  "max_process_vram": null,
133
  "max_reserved": null,
@@ -136,58 +136,58 @@
136
  "latency": {
137
  "unit": "s",
138
  "values": [
139
- 0.037112856000021566,
140
- 0.03680856699998003,
141
- 0.03855008099998258,
142
- 0.04351243500002511,
143
- 0.038444324000010965,
144
- 0.04249061499996287,
145
- 0.03967680700003484,
146
- 0.036969195999972726,
147
- 0.03662310999999363,
148
- 0.03683737099999007,
149
- 0.037082419000000755,
150
- 0.03750861499997882,
151
- 0.03801908899998807,
152
- 0.03729297299997825,
153
- 0.036803587999997944,
154
- 0.037178117999985716,
155
- 0.0368374110000218,
156
- 0.04185465699998758,
157
- 0.04300159000001713,
158
- 0.043059829000014815,
159
- 0.03928889200000185,
160
- 0.03712887500000761,
161
- 0.03749220399998876,
162
- 0.03722058699997888,
163
- 0.033635967999998684,
164
- 0.03423235200000363,
165
- 0.03209713200004671
166
  ],
167
  "count": 27,
168
- "total": 1.0267596609999714,
169
- "mean": 0.038028135592591536,
170
- "p50": 0.03722058699997888,
171
- "p90": 0.042695004999984576,
172
- "p95": 0.04304235730001551,
173
- "p99": 0.04339475744002243,
174
- "stdev": 0.00273690196150437,
175
- "stdev_": 7.197044816568815
176
  },
177
  "throughput": {
178
  "unit": "samples/s",
179
- "value": 26.29631940711854
180
  },
181
  "energy": {
182
  "unit": "kWh",
183
- "cpu": 1.3237095899999771e-06,
184
- "ram": 5.535123735262504e-08,
185
  "gpu": 0.0,
186
- "total": 1.3790608273526022e-06
187
  },
188
  "efficiency": {
189
  "unit": "samples/kWh",
190
- "value": 725131.1763526129
191
  }
192
  }
193
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0",
83
+ "optimum_benchmark_commit": "0600d2e2ba71f8a4277cfa9b8287625223e3f7a0",
84
  "transformers_version": "4.45.1",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.34.2",
 
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 949.02272,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "values": [
111
+ 1.4273132989999908
112
  ],
113
  "count": 1,
114
+ "total": 1.4273132989999908,
115
+ "mean": 1.4273132989999908,
116
+ "p50": 1.4273132989999908,
117
+ "p90": 1.4273132989999908,
118
+ "p95": 1.4273132989999908,
119
+ "p99": 1.4273132989999908,
120
  "stdev": 0,
121
  "stdev_": 0
122
  },
 
127
  "forward": {
128
  "memory": {
129
  "unit": "MB",
130
+ "max_ram": 957.280256,
131
  "max_global_vram": null,
132
  "max_process_vram": null,
133
  "max_reserved": null,
 
136
  "latency": {
137
  "unit": "s",
138
  "values": [
139
+ 0.03981268799998361,
140
+ 0.03910152699995706,
141
+ 0.038372281999954794,
142
+ 0.03906676100001505,
143
+ 0.03936000100003412,
144
+ 0.040058408999982476,
145
+ 0.03827129199999035,
146
+ 0.038474592999989454,
147
+ 0.03948708899997655,
148
+ 0.03938883499995427,
149
+ 0.039154467000003024,
150
+ 0.039820372999997744,
151
+ 0.038498207000031925,
152
+ 0.039486547999956656,
153
+ 0.03888950999998997,
154
+ 0.03876054900001691,
155
+ 0.037048053000035,
156
+ 0.03343306599998641,
157
+ 0.0333484469999803,
158
+ 0.03389648299997816,
159
+ 0.03348706700001003,
160
+ 0.03287189600001739,
161
+ 0.03333767699996315,
162
+ 0.036412813999959326,
163
+ 0.03568130400003611,
164
+ 0.033097236999992674,
165
+ 0.03348883000001024
166
  ],
167
  "count": 27,
168
+ "total": 1.0021060049998027,
169
+ "mean": 0.03711503722221492,
170
+ "p50": 0.038474592999989454,
171
+ "p90": 0.03961732859997937,
172
+ "p95": 0.039818067499993504,
173
+ "p99": 0.03999651963998645,
174
+ "stdev": 0.002614446460988622,
175
+ "stdev_": 7.044170386615605
176
  },
177
  "throughput": {
178
  "unit": "samples/s",
179
+ "value": 26.943257365277752
180
  },
181
  "energy": {
182
  "unit": "kWh",
183
+ "cpu": 1.2213696140625076e-06,
184
+ "ram": 5.107078070253942e-08,
185
  "gpu": 0.0,
186
+ "total": 1.272440394765047e-06
187
  },
188
  "efficiency": {
189
  "unit": "samples/kWh",
190
+ "value": 785891.4288748649
191
  }
192
  }
193
  }