IlyasMoutawwakil HF staff commited on
Commit
6840d6c
1 Parent(s): 980fbd8

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -79,8 +79,8 @@
79
  "platform": "Linux-6.5.0-1022-azure-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
- "optimum_benchmark_version": "0.2.1",
83
- "optimum_benchmark_commit": "3731aa19b0b76022fb42f78436721c579f50c777",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 940.60544,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,54 +107,53 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 23,
111
- "total": 1.0075318320000406,
112
- "mean": 0.04380573182608872,
113
- "stdev": 0.0028614411417156285,
114
- "p50": 0.044998319000001175,
115
- "p90": 0.04590145940003367,
116
- "p95": 0.04605551700000774,
117
- "p99": 0.04655986413998221,
118
  "values": [
119
- 0.04669832399997631,
120
- 0.04506544999998141,
121
- 0.044602386999997634,
122
- 0.04576921299997139,
123
- 0.04541668500002061,
124
- 0.04532359099999894,
125
- 0.044050789000039,
126
- 0.045284729000002244,
127
- 0.04606896100000313,
128
- 0.04490662500001008,
129
- 0.045599987000002784,
130
- 0.045934521000049244,
131
- 0.045100214000001415,
132
- 0.045747851999976774,
133
- 0.04473391199996968,
134
- 0.044998319000001175,
135
- 0.044944550000025174,
136
- 0.0446361460000162,
137
- 0.04026164200001858,
138
- 0.038125396999987515,
139
- 0.03803132100000539,
140
- 0.03808979999996609,
141
- 0.03814141700001983
142
  ]
143
  },
144
  "throughput": {
145
  "unit": "samples/s",
146
- "value": 22.828062865609862
147
  },
148
  "energy": {
149
  "unit": "kWh",
150
- "cpu": 1.4831602818681383e-06,
151
- "ram": 6.198416202323642e-08,
152
  "gpu": 0.0,
153
- "total": 1.5451444438913747e-06
154
  },
155
  "efficiency": {
156
  "unit": "samples/kWh",
157
- "value": 647188.6844970599
158
  }
159
  }
160
  }
 
79
  "platform": "Linux-6.5.0-1022-azure-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
+ "optimum_benchmark_version": "0.3.0",
83
+ "optimum_benchmark_commit": "748abd0c7ac21cfb1798768cad39007b466ce8e8",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 941.039616,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
107
  },
108
  "latency": {
109
  "unit": "s",
110
+ "count": 22,
111
+ "total": 1.0359433090000607,
112
+ "mean": 0.04708833222727549,
113
+ "stdev": 0.003186730771292657,
114
+ "p50": 0.04773207200000229,
115
+ "p90": 0.049897197399982926,
116
+ "p95": 0.049999193300004664,
117
+ "p99": 0.05194411247000516,
118
  "values": [
119
+ 0.04968575499998451,
120
+ 0.04751558400002409,
121
+ 0.04793182800000295,
122
+ 0.048670520000001716,
123
+ 0.04774996000000442,
124
+ 0.047181273999996165,
125
+ 0.04714005799999654,
126
+ 0.048065406000006305,
127
+ 0.05246001800000499,
128
+ 0.04745143600001711,
129
+ 0.047389529999975366,
130
+ 0.050003325000005816,
131
+ 0.0475034720000167,
132
+ 0.04992069099998275,
133
+ 0.04872668500001964,
134
+ 0.04792898200000195,
135
+ 0.049013697000020784,
136
+ 0.04771418400000016,
137
+ 0.04354034200000001,
138
+ 0.04049645899999632,
139
+ 0.03992759199999796,
140
+ 0.03992651100000444
 
141
  ]
142
  },
143
  "throughput": {
144
  "unit": "samples/s",
145
+ "value": 21.23668332897038
146
  },
147
  "energy": {
148
  "unit": "kWh",
149
+ "cpu": 1.5814818276299372e-06,
150
+ "ram": 6.609270549961365e-08,
151
  "gpu": 0.0,
152
+ "total": 1.6475745331295509e-06
153
  },
154
  "efficiency": {
155
  "unit": "samples/kWh",
156
+ "value": 606952.8145112259
157
  }
158
  }
159
  }