IlyasMoutawwakil HF staff commited on
Commit
4e7acea
·
verified ·
1 Parent(s): 42f9703

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.4.0",
83
- "optimum_benchmark_commit": "1e6f9712dc17e792bdaa02aa527364d60a088987",
84
  "transformers_version": "4.44.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.33.0",
@@ -99,7 +99,7 @@
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 944.656384,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,31 +108,31 @@
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
- "total": 4.2632737330000054,
112
- "mean": 4.2632737330000054,
113
  "stdev": 0.0,
114
- "p50": 4.2632737330000054,
115
- "p90": 4.2632737330000054,
116
- "p95": 4.2632737330000054,
117
- "p99": 4.2632737330000054,
118
  "values": [
119
- 4.2632737330000054
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
- "cpu": 4.8498598166667425e-05,
126
- "ram": 2.027013558164608e-06,
127
  "gpu": 0,
128
- "total": 5.052561172483203e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
- "max_ram": 955.55584,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
@@ -140,54 +140,56 @@
140
  },
141
  "latency": {
142
  "unit": "s",
143
- "count": 23,
144
- "total": 1.035720142999935,
145
- "mean": 0.04503131056521457,
146
- "stdev": 0.001954123367973739,
147
- "p50": 0.04560434299997951,
148
- "p90": 0.04642472720000797,
149
- "p95": 0.04651203849997785,
150
- "p99": 0.04708871659998181,
151
  "values": [
152
- 0.0456470539999998,
153
- 0.044865310999995245,
154
- 0.04388692899999569,
155
- 0.046461418000006915,
156
- 0.04587488999999323,
157
- 0.045488607000010006,
158
- 0.044566391999978805,
159
- 0.044023976000005405,
160
- 0.04651766299997462,
161
- 0.045834874999997055,
162
- 0.045547126000002436,
163
- 0.04500000300001261,
164
- 0.04597806299997842,
165
- 0.04547915899999566,
166
- 0.047249782999983836,
167
- 0.04617412999999715,
168
- 0.0459517839999819,
169
- 0.04454242700001032,
170
- 0.04624568400001294,
171
- 0.04627796400001216,
172
- 0.04560434299997951,
173
- 0.03957305800000199,
174
- 0.038929504000009274
 
 
175
  ]
176
  },
177
  "throughput": {
178
  "unit": "samples/s",
179
- "value": 22.206770965543964
180
  },
181
  "energy": {
182
  "unit": "kWh",
183
- "cpu": 1.5200637711538288e-06,
184
- "ram": 6.352701159669065e-08,
185
  "gpu": 0.0,
186
- "total": 1.5835907827505195e-06
187
  },
188
  "efficiency": {
189
  "unit": "samples/kWh",
190
- "value": 631476.2695594326
191
  }
192
  }
193
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.4.0",
83
+ "optimum_benchmark_commit": "a5a350c56b08f0a4cf90d3c7d06fa41c798bfc83",
84
  "transformers_version": "4.44.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.33.0",
 
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 947.781632,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
+ "total": 4.29873779899998,
112
+ "mean": 4.29873779899998,
113
  "stdev": 0.0,
114
+ "p50": 4.29873779899998,
115
+ "p90": 4.29873779899998,
116
+ "p95": 4.29873779899998,
117
+ "p99": 4.29873779899998,
118
  "values": [
119
+ 4.29873779899998
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
+ "cpu": 4.812599446666595e-05,
126
+ "ram": 2.0114339774498277e-06,
127
  "gpu": 0,
128
+ "total": 5.0137428444115775e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
+ "max_ram": 958.398464,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
 
140
  },
141
  "latency": {
142
  "unit": "s",
143
+ "count": 25,
144
+ "total": 1.0248644289998765,
145
+ "mean": 0.04099457715999506,
146
+ "stdev": 0.0008643700008213193,
147
+ "p50": 0.04078190600000653,
148
+ "p90": 0.04166415439999582,
149
+ "p95": 0.04232261600000129,
150
+ "p99": 0.04389335619998746,
151
  "values": [
152
+ 0.04123897800002396,
153
+ 0.040736231000039425,
154
+ 0.040427175999980136,
155
+ 0.040670618999968156,
156
+ 0.04078190600000653,
157
+ 0.04154043799996998,
158
+ 0.0407234979999771,
159
+ 0.04063856999999871,
160
+ 0.04081477699998004,
161
+ 0.04078533299997389,
162
+ 0.04076291199999105,
163
+ 0.040994682000018656,
164
+ 0.04054065799999762,
165
+ 0.04100303799998528,
166
+ 0.04041435199997068,
167
+ 0.04113482299999305,
168
+ 0.04174663200001305,
169
+ 0.04246661199999835,
170
+ 0.04121110599999156,
171
+ 0.040365161000011085,
172
+ 0.04121264899998778,
173
+ 0.04434390699998403,
174
+ 0.04030620100002125,
175
+ 0.03995546799995964,
176
+ 0.04004870200003552
177
  ]
178
  },
179
  "throughput": {
180
  "unit": "samples/s",
181
+ "value": 24.39347028991579
182
  },
183
  "energy": {
184
  "unit": "kWh",
185
+ "cpu": 1.5635969968888682e-06,
186
+ "ram": 6.53430474354232e-08,
187
  "gpu": 0.0,
188
+ "total": 1.6289400443242914e-06
189
  },
190
  "efficiency": {
191
  "unit": "samples/kWh",
192
+ "value": 613896.1366222751
193
  }
194
  }
195
  }