IlyasMoutawwakil HF staff commited on
Commit
2aa361a
1 Parent(s): b135bf6

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.1",
83
- "optimum_benchmark_commit": "9a8b29987613b8b04c221447a49b37ee314548ff",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 937.132032,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,59 +108,59 @@
108
  "latency": {
109
  "unit": "s",
110
  "count": 29,
111
- "total": 1.0230030259999694,
112
- "mean": 0.03527596641379205,
113
- "stdev": 0.00243394545893943,
114
- "p50": 0.03547241999996231,
115
- "p90": 0.03754842620001,
116
- "p95": 0.038270623399989746,
117
- "p99": 0.040936517719992474,
118
  "values": [
119
- 0.03684937799999943,
120
- 0.03570686900002329,
121
- 0.03597218600003771,
122
- 0.03489807500000097,
123
- 0.036296000999982425,
124
- 0.041932981999991625,
125
- 0.038115286999982345,
126
- 0.03837418099999468,
127
- 0.03740671100001691,
128
- 0.03547241999996231,
129
- 0.03578522600002998,
130
- 0.03533132599994815,
131
- 0.03487983100001202,
132
- 0.03546148999998877,
133
- 0.03521220400000402,
134
- 0.03517644599997993,
135
- 0.03516457400002082,
136
- 0.03604646399998046,
137
- 0.03564741799999638,
138
- 0.03528632199999038,
139
- 0.035078704000000016,
140
- 0.035827616000005946,
141
- 0.036283308000008674,
142
- 0.035693132999995214,
143
- 0.03346759800001564,
144
- 0.030755992999957016,
145
- 0.03031716100002768,
146
- 0.03031341500002327,
147
- 0.030250706999993326
148
  ]
149
  },
150
  "throughput": {
151
  "unit": "samples/s",
152
- "value": 28.347912237750183
153
  },
154
  "energy": {
155
  "unit": "kWh",
156
- "cpu": 1.1739758494632697e-06,
157
- "ram": 4.906256185479914e-08,
158
  "gpu": 0.0,
159
- "total": 1.2230384113180688e-06
160
  },
161
  "efficiency": {
162
  "unit": "samples/kWh",
163
- "value": 817635.8082836497
164
  }
165
  }
166
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.1",
83
+ "optimum_benchmark_commit": "3b8c49a169ebd79001b2a83fbf2b332612417102",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 937.013248,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "count": 29,
111
+ "total": 1.0252021000001719,
112
+ "mean": 0.03535179655173006,
113
+ "stdev": 0.0018585449317606404,
114
+ "p50": 0.03603260200003433,
115
+ "p90": 0.03686585699997522,
116
+ "p95": 0.037266597199982245,
117
+ "p99": 0.03766964303998975,
118
  "values": [
119
+ 0.03782582200000206,
120
+ 0.03609063999999762,
121
+ 0.03605331100004605,
122
+ 0.035014170000010836,
123
+ 0.03593772499999659,
124
+ 0.03618988700003456,
125
+ 0.0359512699999982,
126
+ 0.03612088800002766,
127
+ 0.03604783100001896,
128
+ 0.034670006999988345,
129
+ 0.03676621299996441,
130
+ 0.0355369160000123,
131
+ 0.036157887000001665,
132
+ 0.03726803999995809,
133
+ 0.03550145000002658,
134
+ 0.03588366400003906,
135
+ 0.035515005000036126,
136
+ 0.03636049499999672,
137
+ 0.0361986629999933,
138
+ 0.03603260200003433,
139
+ 0.03558536700001014,
140
+ 0.03654423800003315,
141
+ 0.03726443300001847,
142
+ 0.03608338599997296,
143
+ 0.03420808499998884,
144
+ 0.0312510310000107,
145
+ 0.030570088999979816,
146
+ 0.032109223999952974,
147
+ 0.03046376100002135
148
  ]
149
  },
150
  "throughput": {
151
  "unit": "samples/s",
152
+ "value": 28.28710553752781
153
  },
154
  "energy": {
155
  "unit": "kWh",
156
+ "cpu": 1.1982839115540989e-06,
157
+ "ram": 5.007818293537721e-08,
158
  "gpu": 0.0,
159
+ "total": 1.2483620944894761e-06
160
  },
161
  "efficiency": {
162
  "unit": "samples/kWh",
163
+ "value": 801049.6348889502
164
  }
165
  }
166
  }