IlyasMoutawwakil HF staff commited on
Commit
cacab61
1 Parent(s): e75a909

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0.dev0",
83
- "optimum_benchmark_commit": "8bd7430b5a210454e1554b813733827ac7ec1b24",
84
  "transformers_version": "4.45.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "1.0.1",
@@ -101,7 +101,7 @@
101
  "load": {
102
  "memory": {
103
  "unit": "MB",
104
- "max_ram": 946.958336,
105
  "max_global_vram": null,
106
  "max_process_vram": null,
107
  "max_reserved": null,
@@ -110,15 +110,15 @@
110
  "latency": {
111
  "unit": "s",
112
  "values": [
113
- 1.2301474199999802
114
  ],
115
  "count": 1,
116
- "total": 1.2301474199999802,
117
- "mean": 1.2301474199999802,
118
- "p50": 1.2301474199999802,
119
- "p90": 1.2301474199999802,
120
- "p95": 1.2301474199999802,
121
- "p99": 1.2301474199999802,
122
  "stdev": 0,
123
  "stdev_": 0
124
  },
@@ -129,7 +129,7 @@
129
  "forward": {
130
  "memory": {
131
  "unit": "MB",
132
- "max_ram": 955.346944,
133
  "max_global_vram": null,
134
  "max_process_vram": null,
135
  "max_reserved": null,
@@ -138,61 +138,60 @@
138
  "latency": {
139
  "unit": "s",
140
  "values": [
141
- 0.03499799999997322,
142
- 0.034658918000047834,
143
- 0.03499960399994961,
144
- 0.03473322600001438,
145
- 0.03470618599999398,
146
- 0.03503857599997673,
147
- 0.034484872999996696,
148
- 0.03404198699996641,
149
- 0.03462909199998876,
150
- 0.03483069899999691,
151
- 0.03481744399999798,
152
- 0.0351458970000067,
153
- 0.03463819999996076,
154
- 0.03481311600000936,
155
- 0.0344798849999961,
156
- 0.034333161000006385,
157
- 0.03508685700001024,
158
- 0.035360235999974066,
159
- 0.03452867599997944,
160
- 0.0345556650000276,
161
- 0.034613773999979,
162
- 0.03475780299999087,
163
- 0.03176077700004498,
164
- 0.029207167999970807,
165
- 0.02922234700002946,
166
- 0.030491713999992953,
167
- 0.02938609200003839,
168
- 0.029087313999980324,
169
- 0.029252453000026435,
170
- 0.029168436000020392
171
  ],
172
- "count": 30,
173
- "total": 1.0018281759999468,
174
- "mean": 0.03339427253333156,
175
- "p50": 0.03462143299998388,
176
- "p90": 0.03504340409998008,
177
- "p95": 0.03511932900000829,
178
- "p99": 0.03529807768998353,
179
- "stdev": 0.002289445915144382,
180
- "stdev_": 6.85580412886442
181
  },
182
  "throughput": {
183
  "unit": "samples/s",
184
- "value": 29.9452548038553
185
  },
186
  "energy": {
187
  "unit": "kWh",
188
- "cpu": 1.1469016393790505e-06,
189
- "ram": 4.795816995751087e-08,
190
  "gpu": 0.0,
191
- "total": 1.1948598093365613e-06
192
  },
193
  "efficiency": {
194
  "unit": "samples/kWh",
195
- "value": 836918.2662150499
196
  }
197
  }
198
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0.dev0",
83
+ "optimum_benchmark_commit": "08c9f59440cf4e5a5d6711ec19e8329ab2de652d",
84
  "transformers_version": "4.45.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "1.0.1",
 
101
  "load": {
102
  "memory": {
103
  "unit": "MB",
104
+ "max_ram": 947.167232,
105
  "max_global_vram": null,
106
  "max_process_vram": null,
107
  "max_reserved": null,
 
110
  "latency": {
111
  "unit": "s",
112
  "values": [
113
+ 1.2332180920000155
114
  ],
115
  "count": 1,
116
+ "total": 1.2332180920000155,
117
+ "mean": 1.2332180920000155,
118
+ "p50": 1.2332180920000155,
119
+ "p90": 1.2332180920000155,
120
+ "p95": 1.2332180920000155,
121
+ "p99": 1.2332180920000155,
122
  "stdev": 0,
123
  "stdev_": 0
124
  },
 
129
  "forward": {
130
  "memory": {
131
  "unit": "MB",
132
+ "max_ram": 955.424768,
133
  "max_global_vram": null,
134
  "max_process_vram": null,
135
  "max_reserved": null,
 
138
  "latency": {
139
  "unit": "s",
140
  "values": [
141
+ 0.03658645500001967,
142
+ 0.0361032659999978,
143
+ 0.03610821500001293,
144
+ 0.036316623999994135,
145
+ 0.035721485999999913,
146
+ 0.03647771300001068,
147
+ 0.03574233499995216,
148
+ 0.0360932980000257,
149
+ 0.03616041300000461,
150
+ 0.03680985200003306,
151
+ 0.03691355400002294,
152
+ 0.036534498999969856,
153
+ 0.036020262000022285,
154
+ 0.03553399699995907,
155
+ 0.035940604000018084,
156
+ 0.036153218999970704,
157
+ 0.03606949400000303,
158
+ 0.03632082100000389,
159
+ 0.0355914040000016,
160
+ 0.035376623999979984,
161
+ 0.03604786400001103,
162
+ 0.036388205999969614,
163
+ 0.035578268999984175,
164
+ 0.032220391999999265,
165
+ 0.03168374300003052,
166
+ 0.03233158899996624,
167
+ 0.03141736700001729,
168
+ 0.0313566550000246,
169
+ 0.03159928499997022
 
170
  ],
171
+ "count": 29,
172
+ "total": 1.021197504999975,
173
+ "mean": 0.03521370706896466,
174
+ "p50": 0.03604786400001103,
175
+ "p90": 0.03654489019997982,
176
+ "p95": 0.0367204932000277,
177
+ "p99": 0.03688451744002577,
178
+ "stdev": 0.0018028598173541696,
179
+ "stdev_": 5.11976717993178
180
  },
181
  "throughput": {
182
  "unit": "samples/s",
183
+ "value": 28.39803256276141
184
  },
185
  "energy": {
186
  "unit": "kWh",
187
+ "cpu": 1.214893873737353e-06,
188
+ "ram": 5.0801180209474435e-08,
189
  "gpu": 0.0,
190
+ "total": 1.2656950539468274e-06
191
  },
192
  "efficiency": {
193
  "unit": "samples/kWh",
194
+ "value": 790079.7248766136
195
  }
196
  }
197
  }