IlyasMoutawwakil HF staff commited on
Commit
92f2e8c
1 Parent(s): ac876d4

Upload cpu_training_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_training_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cpu_training_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
@@ -79,23 +79,23 @@
79
  "environment": {
80
  "cpu": " AMD EPYC 7763 64-Core Processor",
81
  "cpu_count": 4,
82
- "cpu_ram_mb": 16766.783488,
83
  "system": "Linux",
84
  "machine": "x86_64",
85
- "platform": "Linux-6.8.0-1015-azure-x86_64-with-glibc2.39",
86
  "processor": "x86_64",
87
  "python_version": "3.10.15",
88
  "optimum_benchmark_version": "0.5.0.dev0",
89
- "optimum_benchmark_commit": "08c9f59440cf4e5a5d6711ec19e8329ab2de652d",
90
- "transformers_version": "4.45.2",
91
  "transformers_commit": null,
92
- "accelerate_version": "1.0.1",
93
  "accelerate_commit": null,
94
- "diffusers_version": "0.30.3",
95
  "diffusers_commit": null,
96
  "optimum_version": null,
97
  "optimum_commit": null,
98
- "timm_version": "1.0.9",
99
  "timm_commit": null,
100
  "peft_version": null,
101
  "peft_commit": null
@@ -107,7 +107,7 @@
107
  "overall": {
108
  "memory": {
109
  "unit": "MB",
110
- "max_ram": 2837.921792,
111
  "max_global_vram": null,
112
  "max_process_vram": null,
113
  "max_reserved": null,
@@ -116,42 +116,42 @@
116
  "latency": {
117
  "unit": "s",
118
  "values": [
119
- 0.8517281370000092,
120
- 0.737927233999983,
121
- 0.7432572670000184,
122
- 0.7577234400000066,
123
- 0.7178666339999893
124
  ],
125
  "count": 5,
126
- "total": 3.8085027120000063,
127
- "mean": 0.7617005424000013,
128
- "p50": 0.7432572670000184,
129
- "p90": 0.8141262582000082,
130
- "p95": 0.8329271976000087,
131
- "p99": 0.8479679491200091,
132
- "stdev": 0.0467921387806671,
133
- "stdev_": 6.143114803782634
134
  },
135
  "throughput": {
136
  "unit": "samples/s",
137
- "value": 13.128518943273347
138
  },
139
  "energy": {
140
  "unit": "kWh",
141
- "cpu": 0.00016087763887222292,
142
- "ram": 6.728507669543458e-06,
143
  "gpu": 0,
144
- "total": 0.00016760614654176637
145
  },
146
  "efficiency": {
147
  "unit": "samples/kWh",
148
- "value": 59663.683023152524
149
  }
150
  },
151
  "warmup": {
152
  "memory": {
153
  "unit": "MB",
154
- "max_ram": 2837.921792,
155
  "max_global_vram": null,
156
  "max_process_vram": null,
157
  "max_reserved": null,
@@ -160,22 +160,22 @@
160
  "latency": {
161
  "unit": "s",
162
  "values": [
163
- 0.8517281370000092,
164
- 0.737927233999983
165
  ],
166
  "count": 2,
167
- "total": 1.589655370999992,
168
- "mean": 0.794827685499996,
169
- "p50": 0.794827685499996,
170
- "p90": 0.8403480467000065,
171
- "p95": 0.8460380918500079,
172
- "p99": 0.8505901279700089,
173
- "stdev": 0.056900451500013105,
174
- "stdev_": 7.158841159919987
175
  },
176
  "throughput": {
177
  "unit": "samples/s",
178
- "value": 5.032537332269385
179
  },
180
  "energy": null,
181
  "efficiency": null
@@ -183,7 +183,7 @@
183
  "train": {
184
  "memory": {
185
  "unit": "MB",
186
- "max_ram": 2837.921792,
187
  "max_global_vram": null,
188
  "max_process_vram": null,
189
  "max_reserved": null,
@@ -192,23 +192,23 @@
192
  "latency": {
193
  "unit": "s",
194
  "values": [
195
- 0.7432572670000184,
196
- 0.7577234400000066,
197
- 0.7178666339999893
198
  ],
199
  "count": 3,
200
- "total": 2.2188473410000142,
201
- "mean": 0.7396157803333381,
202
- "p50": 0.7432572670000184,
203
- "p90": 0.7548302054000089,
204
- "p95": 0.7562768227000077,
205
- "p99": 0.7574341165400068,
206
- "stdev": 0.016473950446861275,
207
- "stdev_": 2.2273660033911953
208
  },
209
  "throughput": {
210
  "unit": "samples/s",
211
- "value": 8.11232015262824
212
  },
213
  "energy": null,
214
  "efficiency": null
 
3
  "name": "cpu_training_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.5.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
 
79
  "environment": {
80
  "cpu": " AMD EPYC 7763 64-Core Processor",
81
  "cpu_count": 4,
82
+ "cpu_ram_mb": 16757.342208,
83
  "system": "Linux",
84
  "machine": "x86_64",
85
+ "platform": "Linux-6.5.0-1025-azure-x86_64-with-glibc2.35",
86
  "processor": "x86_64",
87
  "python_version": "3.10.15",
88
  "optimum_benchmark_version": "0.5.0.dev0",
89
+ "optimum_benchmark_commit": "6807ba28334c8c98abf72a03d78f86133328d180",
90
+ "transformers_version": "4.46.3",
91
  "transformers_commit": null,
92
+ "accelerate_version": "1.1.1",
93
  "accelerate_commit": null,
94
+ "diffusers_version": "0.31.0",
95
  "diffusers_commit": null,
96
  "optimum_version": null,
97
  "optimum_commit": null,
98
+ "timm_version": "1.0.11",
99
  "timm_commit": null,
100
  "peft_version": null,
101
  "peft_commit": null
 
107
  "overall": {
108
  "memory": {
109
  "unit": "MB",
110
+ "max_ram": 2872.283136,
111
  "max_global_vram": null,
112
  "max_process_vram": null,
113
  "max_reserved": null,
 
116
  "latency": {
117
  "unit": "s",
118
  "values": [
119
+ 0.8178629320000255,
120
+ 0.7014428329999873,
121
+ 0.7075342919999912,
122
+ 0.7076107400000069,
123
+ 0.6982235800000467
124
  ],
125
  "count": 5,
126
+ "total": 3.6326743770000576,
127
+ "mean": 0.7265348754000115,
128
+ "p50": 0.7075342919999912,
129
+ "p90": 0.7737620552000181,
130
+ "p95": 0.7958124936000217,
131
+ "p99": 0.8134528443200247,
132
+ "stdev": 0.045806329325853795,
133
+ "stdev_": 6.304766760251393
134
  },
135
  "throughput": {
136
  "unit": "samples/s",
137
+ "value": 13.763964179275298
138
  },
139
  "energy": {
140
  "unit": "kWh",
141
+ "cpu": 0.00014890678425000015,
142
+ "ram": 6.22417347027335e-06,
143
  "gpu": 0,
144
+ "total": 0.0001551309577202735
145
  },
146
  "efficiency": {
147
  "unit": "samples/kWh",
148
+ "value": 64461.66611071684
149
  }
150
  },
151
  "warmup": {
152
  "memory": {
153
  "unit": "MB",
154
+ "max_ram": 2872.283136,
155
  "max_global_vram": null,
156
  "max_process_vram": null,
157
  "max_reserved": null,
 
160
  "latency": {
161
  "unit": "s",
162
  "values": [
163
+ 0.8178629320000255,
164
+ 0.7014428329999873
165
  ],
166
  "count": 2,
167
+ "total": 1.5193057650000128,
168
+ "mean": 0.7596528825000064,
169
+ "p50": 0.7596528825000064,
170
+ "p90": 0.8062209221000216,
171
+ "p95": 0.8120419270500235,
172
+ "p99": 0.8166987310100251,
173
+ "stdev": 0.05821004950001907,
174
+ "stdev_": 7.662716859370118
175
  },
176
  "throughput": {
177
  "unit": "samples/s",
178
+ "value": 5.265562853965695
179
  },
180
  "energy": null,
181
  "efficiency": null
 
183
  "train": {
184
  "memory": {
185
  "unit": "MB",
186
+ "max_ram": 2872.283136,
187
  "max_global_vram": null,
188
  "max_process_vram": null,
189
  "max_reserved": null,
 
192
  "latency": {
193
  "unit": "s",
194
  "values": [
195
+ 0.7075342919999912,
196
+ 0.7076107400000069,
197
+ 0.6982235800000467
198
  ],
199
  "count": 3,
200
+ "total": 2.1133686120000448,
201
+ "mean": 0.704456204000015,
202
+ "p50": 0.7075342919999912,
203
+ "p90": 0.7075954504000037,
204
+ "p95": 0.7076030952000053,
205
+ "p99": 0.7076092110400066,
206
+ "stdev": 0.004407241201924517,
207
+ "stdev_": 0.6256231653436363
208
  },
209
  "throughput": {
210
  "unit": "samples/s",
211
+ "value": 8.517207976778458
212
  },
213
  "energy": null,
214
  "efficiency": null