IlyasMoutawwakil HF staff commited on
Commit
e1b8d3a
1 Parent(s): ef68110

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -78,7 +78,7 @@
78
  "processor": "x86_64",
79
  "python_version": "3.10.14",
80
  "optimum_benchmark_version": "0.2.1",
81
- "optimum_benchmark_commit": "56d026bf244c6516d8cb780280ce7cc6505f270e",
82
  "transformers_version": "4.40.2",
83
  "transformers_commit": null,
84
  "accelerate_version": "0.30.1",
@@ -97,7 +97,7 @@
97
  "forward": {
98
  "memory": {
99
  "unit": "MB",
100
- "max_ram": 936.8576,
101
  "max_global_vram": null,
102
  "max_process_vram": null,
103
  "max_reserved": null,
@@ -105,60 +105,63 @@
105
  },
106
  "latency": {
107
  "unit": "s",
108
- "count": 29,
109
- "total": 1.0165581000000543,
110
- "mean": 0.03505372758620877,
111
- "stdev": 0.0009159489526071956,
112
- "p50": 0.034928329000024405,
113
- "p90": 0.03638322659998039,
114
- "p95": 0.036566704400036086,
115
- "p99": 0.03685525792003091,
116
  "values": [
117
- 0.03553541299999097,
118
- 0.034703830000012204,
119
- 0.034620413999959965,
120
- 0.035291958000016166,
121
- 0.03420142200002374,
122
- 0.03441236700001582,
123
- 0.034928329000024405,
124
- 0.034950048999974115,
125
- 0.034483508999983314,
126
- 0.03560146500001338,
127
- 0.035092415999997684,
128
- 0.03252300100001548,
129
- 0.0351824020000322,
130
- 0.034734517999993386,
131
- 0.03420500900000434,
132
- 0.034816250000005766,
133
- 0.03559958099998539,
134
- 0.03366153500002156,
135
- 0.035485188999984985,
136
- 0.03484201700001677,
137
- 0.03407772200000636,
138
- 0.03484153699997705,
139
- 0.03555395600000111,
140
- 0.03472743499997932,
141
- 0.036438257000042995,
142
- 0.036934172000030685,
143
- 0.03665233600003148,
144
- 0.03636946899996474,
145
- 0.036092541999948935
 
 
 
146
  ]
147
  },
148
  "throughput": {
149
  "unit": "samples/s",
150
- "value": 28.527636541382584
151
  },
152
  "energy": {
153
  "unit": "kWh",
154
- "cpu": 1.165485576866499e-06,
155
- "ram": 4.87078374442346e-08,
156
  "gpu": 0.0,
157
- "total": 1.2141934143107336e-06
158
  },
159
  "efficiency": {
160
  "unit": "samples/kWh",
161
- "value": 823592.014430151
162
  }
163
  }
164
  }
 
78
  "processor": "x86_64",
79
  "python_version": "3.10.14",
80
  "optimum_benchmark_version": "0.2.1",
81
+ "optimum_benchmark_commit": "c1d0b062e90b79e7705510c58cea731c0d90da8a",
82
  "transformers_version": "4.40.2",
83
  "transformers_commit": null,
84
  "accelerate_version": "0.30.1",
 
97
  "forward": {
98
  "memory": {
99
  "unit": "MB",
100
+ "max_ram": 934.416384,
101
  "max_global_vram": null,
102
  "max_process_vram": null,
103
  "max_reserved": null,
 
105
  },
106
  "latency": {
107
  "unit": "s",
108
+ "count": 32,
109
+ "total": 1.0009807630000296,
110
+ "mean": 0.031280648843750924,
111
+ "stdev": 0.0007399869799896267,
112
+ "p50": 0.031084461999995483,
113
+ "p90": 0.0315559685000153,
114
+ "p95": 0.03250975920000769,
115
+ "p99": 0.03407493950002732,
116
  "values": [
117
+ 0.03248157299998411,
118
+ 0.03129141099998378,
119
+ 0.031029142999955184,
120
+ 0.03096282900003189,
121
+ 0.03083444999998619,
122
+ 0.031140110000023924,
123
+ 0.03127116500002103,
124
+ 0.031125153000004957,
125
+ 0.0314091019999978,
126
+ 0.031073777000017344,
127
+ 0.030998785999997835,
128
+ 0.031224197000028653,
129
+ 0.03086090800002239,
130
+ 0.030878731999962383,
131
+ 0.031208657999968636,
132
+ 0.031026218000022254,
133
+ 0.030995179999990796,
134
+ 0.03113300600000457,
135
+ 0.030690692000007402,
136
+ 0.030979820999959884,
137
+ 0.031095146999973622,
138
+ 0.031061122999972213,
139
+ 0.0312922439999852,
140
+ 0.03157228700001724,
141
+ 0.03127936999999292,
142
+ 0.031000028999983442,
143
+ 0.030911193999997977,
144
+ 0.030970873999990545,
145
+ 0.030667097000048216,
146
+ 0.031209619000037492,
147
+ 0.03476265900002318,
148
+ 0.03254420900003652
149
  ]
150
  },
151
  "throughput": {
152
  "unit": "samples/s",
153
+ "value": 31.968646334514077
154
  },
155
  "energy": {
156
  "unit": "kWh",
157
+ "cpu": 1.1619456453261034e-06,
158
+ "ram": 4.8559800220765647e-08,
159
  "gpu": 0.0,
160
+ "total": 1.210505445546869e-06
161
  },
162
  "efficiency": {
163
  "unit": "samples/kWh",
164
+ "value": 826101.1990311459
165
  }
166
  }
167
  }