IlyasMoutawwakil HF staff commited on
Commit
f836f82
1 Parent(s): 81ad83b

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.0",
83
- "optimum_benchmark_commit": "bb03882362b0d86ebdf2f3e66d54ac5dde388b89",
84
  "transformers_version": "4.40.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.30.1",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 941.965312,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,53 +108,53 @@
108
  "latency": {
109
  "unit": "s",
110
  "count": 23,
111
- "total": 1.0066838379998444,
112
- "mean": 0.043768862521732364,
113
- "stdev": 0.002391643364277399,
114
- "p50": 0.04491251199999624,
115
- "p90": 0.04550604919999159,
116
- "p95": 0.04557662229998414,
117
- "p99": 0.04575719585999195,
118
  "values": [
119
- 0.04552068999998937,
120
- 0.045582836999983556,
121
- 0.04529008800000156,
122
- 0.04532078600004752,
123
- 0.04580637399999432,
124
- 0.045060108000029686,
125
- 0.0443377050000322,
126
- 0.04440597299998217,
127
- 0.04491251199999624,
128
- 0.04493002300000626,
129
- 0.04463786699994898,
130
- 0.04434072099996911,
131
- 0.04457982200000288,
132
- 0.04544748600000048,
133
- 0.04491579099999399,
134
- 0.044790095999985624,
135
- 0.04492976699998508,
136
- 0.045021469000005254,
137
- 0.04119475799996053,
138
- 0.039397992999965936,
139
- 0.039160900000013044,
140
- 0.03859309699998903,
141
- 0.038506974999961585
142
  ]
143
  },
144
  "throughput": {
145
  "unit": "samples/s",
146
- "value": 22.84729239887087
147
  },
148
  "energy": {
149
  "unit": "kWh",
150
- "cpu": 1.5088596914568522e-06,
151
- "ram": 6.305824399858624e-08,
152
  "gpu": 0.0,
153
- "total": 1.5719179354554383e-06
154
  },
155
  "efficiency": {
156
  "unit": "samples/kWh",
157
- "value": 636165.5258486925
158
  }
159
  }
160
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.0",
83
+ "optimum_benchmark_commit": "4aa72a2fd35e7d46a674935ffaf335b1c0795600",
84
  "transformers_version": "4.40.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.30.1",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 941.080576,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "count": 23,
111
+ "total": 1.0277896999999143,
112
+ "mean": 0.04468650869564845,
113
+ "stdev": 0.0018171878157618368,
114
+ "p50": 0.04527248499999814,
115
+ "p90": 0.04606567260002521,
116
+ "p95": 0.04642967580004438,
117
+ "p99": 0.04689601713997263,
118
  "values": [
119
+ 0.045909854999990785,
120
+ 0.04573497899997392,
121
+ 0.045647444999985964,
122
+ 0.045688282000014624,
123
+ 0.04527248499999814,
124
+ 0.04545663900000818,
125
+ 0.04474496000000272,
126
+ 0.04480953999996018,
127
+ 0.045002238999984456,
128
+ 0.046077198000034514,
129
+ 0.04701650299995208,
130
+ 0.045950340999979744,
131
+ 0.046468840000045475,
132
+ 0.04535282599999846,
133
+ 0.04520682200001147,
134
+ 0.044900710999968396,
135
+ 0.04525987099998474,
136
+ 0.04601957099998799,
137
+ 0.04296260099999927,
138
+ 0.04076086800000667,
139
+ 0.04141492899998411,
140
+ 0.04088080200000377,
141
+ 0.04125139300003866
142
  ]
143
  },
144
  "throughput": {
145
  "unit": "samples/s",
146
+ "value": 22.378118792202255
147
  },
148
  "energy": {
149
  "unit": "kWh",
150
+ "cpu": 1.606640762752957e-06,
151
+ "ram": 6.714315661727141e-08,
152
  "gpu": 0.0,
153
+ "total": 1.6737839193702285e-06
154
  },
155
  "efficiency": {
156
  "unit": "samples/kWh",
157
+ "value": 597448.6840429535
158
  }
159
  }
160
  }