IlyasMoutawwakil HF staff commited on
Commit
be0c40d
1 Parent(s): 057131b

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.0",
83
- "optimum_benchmark_commit": "8ef7b0420a1d5aa903d786290f7ab247769da879",
84
  "transformers_version": "4.40.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.30.1",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 941.412352,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,62 +107,63 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 31,
111
- "total": 1.0028979849999473,
112
- "mean": 0.03235154790322411,
113
- "stdev": 0.0007254913291455751,
114
- "p50": 0.0321730819999857,
115
- "p90": 0.032961761000024126,
116
- "p95": 0.033839419999992515,
117
- "p99": 0.0347247656999798,
118
  "values": [
119
- 0.034753568999974505,
120
- 0.03257596299999932,
121
- 0.032491295999989234,
122
- 0.03238255299999082,
123
- 0.03199964000003774,
124
- 0.03161154600002192,
125
- 0.03302128199999288,
126
- 0.03189084699999967,
127
- 0.03237584999999399,
128
- 0.03226585499999146,
129
- 0.03179584000002933,
130
- 0.03183968199999754,
131
- 0.03179251399996019,
132
- 0.03208153200000652,
133
- 0.032130536000011034,
134
- 0.0321730819999857,
135
- 0.032154007000031015,
136
- 0.03189455399996177,
137
- 0.03170949000002565,
138
- 0.031932163999954355,
139
- 0.03212115499997026,
140
- 0.03159412400003703,
141
- 0.03465755799999215,
142
- 0.032474474000025566,
143
- 0.032671211999968364,
144
- 0.032961761000024126,
145
- 0.03241093399998363,
146
- 0.032696487999999135,
147
- 0.03245410599998877,
148
- 0.03152453500001684,
149
- 0.03245983599998681
 
150
  ]
151
  },
152
  "throughput": {
153
  "unit": "samples/s",
154
- "value": 30.910422060526553
155
  },
156
  "energy": {
157
  "unit": "kWh",
158
- "cpu": 1.221868023276329e-06,
159
- "ram": 5.1063289788414087e-08,
160
  "gpu": 0.0,
161
- "total": 1.272931313064743e-06
162
  },
163
  "efficiency": {
164
  "unit": "samples/kWh",
165
- "value": 785588.342227495
166
  }
167
  }
168
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.0",
83
+ "optimum_benchmark_commit": "e5cc8d8069420e159473795b6d1ea703cadf2a8b",
84
  "transformers_version": "4.40.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.30.1",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 939.343872,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
107
  },
108
  "latency": {
109
  "unit": "s",
110
+ "count": 32,
111
+ "total": 1.030907060000061,
112
+ "mean": 0.032215845625001904,
113
+ "stdev": 0.0007459957801647178,
114
+ "p50": 0.032048246499982724,
115
+ "p90": 0.032668799700019235,
116
+ "p95": 0.03299822035002649,
117
+ "p99": 0.03506179639999971,
118
  "values": [
119
+ 0.03254714399997738,
120
+ 0.03189269099999592,
121
+ 0.03211841300003471,
122
+ 0.032682317000023886,
123
+ 0.03163615099998651,
124
+ 0.032412109999995664,
125
+ 0.03268702600001916,
126
+ 0.03194115100001227,
127
+ 0.032197952000046826,
128
+ 0.03240295499995227,
129
+ 0.031988269999999375,
130
+ 0.03171021999997947,
131
+ 0.03166960400000107,
132
+ 0.031991805999950884,
133
+ 0.03215371999999661,
134
+ 0.03201253400004589,
135
+ 0.03203597899999977,
136
+ 0.0316945399999895,
137
+ 0.03166980400004604,
138
+ 0.03210710199999767,
139
+ 0.03202093100003367,
140
+ 0.03171705199997632,
141
+ 0.03206051399996568,
142
+ 0.032262532000004285,
143
+ 0.032179246999987754,
144
+ 0.03189115899999706,
145
+ 0.03172356499999296,
146
+ 0.03207491200004142,
147
+ 0.03171822400003066,
148
+ 0.03251083699996116,
149
+ 0.03581802899998365,
150
+ 0.03337856900003544
151
  ]
152
  },
153
  "throughput": {
154
  "unit": "samples/s",
155
+ "value": 31.040625524475608
156
  },
157
  "energy": {
158
  "unit": "kWh",
159
+ "cpu": 1.2249529361724854e-06,
160
+ "ram": 5.119280590848577e-08,
161
  "gpu": 0.0,
162
+ "total": 1.2761457420809711e-06
163
  },
164
  "efficiency": {
165
  "unit": "samples/kWh",
166
+ "value": 783609.5572981588
167
  }
168
  }
169
  }