IlyasMoutawwakil HF staff commited on
Commit
dd15836
1 Parent(s): d447bf1

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
@@ -44,9 +44,9 @@
44
  "duration": 1,
45
  "warmup_runs": 1,
46
  "input_shapes": {
47
- "batch_size": 1,
48
- "num_choices": 2,
49
- "sequence_length": 2
50
  },
51
  "new_tokens": null,
52
  "memory": true,
@@ -73,23 +73,23 @@
73
  "environment": {
74
  "cpu": " AMD EPYC 7763 64-Core Processor",
75
  "cpu_count": 4,
76
- "cpu_ram_mb": 16766.783488,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
- "platform": "Linux-6.8.0-1015-azure-x86_64-with-glibc2.39",
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0.dev0",
83
- "optimum_benchmark_commit": "08c9f59440cf4e5a5d6711ec19e8329ab2de652d",
84
- "transformers_version": "4.45.2",
85
  "transformers_commit": null,
86
- "accelerate_version": "1.0.1",
87
  "accelerate_commit": null,
88
- "diffusers_version": "0.30.3",
89
  "diffusers_commit": null,
90
  "optimum_version": null,
91
  "optimum_commit": null,
92
- "timm_version": "1.0.9",
93
  "timm_commit": null,
94
  "peft_version": null,
95
  "peft_commit": null
@@ -101,7 +101,7 @@
101
  "load": {
102
  "memory": {
103
  "unit": "MB",
104
- "max_ram": 946.95424,
105
  "max_global_vram": null,
106
  "max_process_vram": null,
107
  "max_reserved": null,
@@ -110,15 +110,15 @@
110
  "latency": {
111
  "unit": "s",
112
  "values": [
113
- 1.2193013000000121
114
  ],
115
  "count": 1,
116
- "total": 1.2193013000000121,
117
- "mean": 1.2193013000000121,
118
- "p50": 1.2193013000000121,
119
- "p90": 1.2193013000000121,
120
- "p95": 1.2193013000000121,
121
- "p99": 1.2193013000000121,
122
  "stdev": 0,
123
  "stdev_": 0
124
  },
@@ -129,7 +129,7 @@
129
  "forward": {
130
  "memory": {
131
  "unit": "MB",
132
- "max_ram": 958.869504,
133
  "max_global_vram": null,
134
  "max_process_vram": null,
135
  "max_reserved": null,
@@ -138,55 +138,39 @@
138
  "latency": {
139
  "unit": "s",
140
  "values": [
141
- 0.04616789599998583,
142
- 0.044532373000009784,
143
- 0.043579761000017925,
144
- 0.04393427000002248,
145
- 0.043614385000012135,
146
- 0.04306340000002251,
147
- 0.04390326200001482,
148
- 0.043052499999987504,
149
- 0.044416527000009864,
150
- 0.04373782499999379,
151
- 0.04276013499998044,
152
- 0.04314133500000139,
153
- 0.04251446899999678,
154
- 0.04311764999999923,
155
- 0.044444550000008576,
156
- 0.04435035400001652,
157
- 0.042877333000006956,
158
- 0.044962365999992926,
159
- 0.04063359899998886,
160
- 0.03932723799999849,
161
- 0.03981202899998948,
162
- 0.038777404999990495,
163
- 0.039950648000001365,
164
- 0.04234315999997307
165
  ],
166
- "count": 24,
167
- "total": 1.0290144700000212,
168
- "mean": 0.04287560291666755,
169
- "p50": 0.04312949250000031,
170
- "p90": 0.04450602610000942,
171
- "p95": 0.04489786704999545,
172
- "p99": 0.04589062409998746,
173
- "stdev": 0.0018471608259562047,
174
- "stdev_": 4.3081862418269
175
  },
176
  "throughput": {
177
  "unit": "samples/s",
178
- "value": 23.32328718370647
179
  },
180
  "energy": {
181
  "unit": "kWh",
182
- "cpu": 1.5413222297008626e-06,
183
- "ram": 6.444993102457781e-08,
184
  "gpu": 0.0,
185
- "total": 1.6057721607254403e-06
186
  },
187
  "efficiency": {
188
  "unit": "samples/kWh",
189
- "value": 622753.354715173
190
  }
191
  }
192
  }
 
3
  "name": "cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.5.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
 
44
  "duration": 1,
45
  "warmup_runs": 1,
46
  "input_shapes": {
47
+ "batch_size": 2,
48
+ "sequence_length": 16,
49
+ "num_choices": 2
50
  },
51
  "new_tokens": null,
52
  "memory": true,
 
73
  "environment": {
74
  "cpu": " AMD EPYC 7763 64-Core Processor",
75
  "cpu_count": 4,
76
+ "cpu_ram_mb": 16757.342208,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
+ "platform": "Linux-6.5.0-1025-azure-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0.dev0",
83
+ "optimum_benchmark_commit": "04bb8676c2e912cf262caaa2d4796a0191a83273",
84
+ "transformers_version": "4.46.3",
85
  "transformers_commit": null,
86
+ "accelerate_version": "1.1.1",
87
  "accelerate_commit": null,
88
+ "diffusers_version": "0.31.0",
89
  "diffusers_commit": null,
90
  "optimum_version": null,
91
  "optimum_commit": null,
92
+ "timm_version": "1.0.11",
93
  "timm_commit": null,
94
  "peft_version": null,
95
  "peft_commit": null
 
101
  "load": {
102
  "memory": {
103
  "unit": "MB",
104
+ "max_ram": 966.627328,
105
  "max_global_vram": null,
106
  "max_process_vram": null,
107
  "max_reserved": null,
 
110
  "latency": {
111
  "unit": "s",
112
  "values": [
113
+ 1.215279728999974
114
  ],
115
  "count": 1,
116
+ "total": 1.215279728999974,
117
+ "mean": 1.215279728999974,
118
+ "p50": 1.215279728999974,
119
+ "p90": 1.215279728999974,
120
+ "p95": 1.215279728999974,
121
+ "p99": 1.215279728999974,
122
  "stdev": 0,
123
  "stdev_": 0
124
  },
 
129
  "forward": {
130
  "memory": {
131
  "unit": "MB",
132
+ "max_ram": 983.535616,
133
  "max_global_vram": null,
134
  "max_process_vram": null,
135
  "max_reserved": null,
 
138
  "latency": {
139
  "unit": "s",
140
  "values": [
141
+ 0.17443563800003403,
142
+ 0.17703661100000545,
143
+ 0.1795938349999915,
144
+ 0.1293138830000089,
145
+ 0.09296477599997388,
146
+ 0.09255506399995284,
147
+ 0.09157841800004007,
148
+ 0.0911549719999698
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149
  ],
150
+ "count": 8,
151
+ "total": 1.0286331969999765,
152
+ "mean": 0.12857914962499706,
153
+ "p50": 0.11113932949999139,
154
+ "p90": 0.17780377820000126,
155
+ "p95": 0.17869880659999637,
156
+ "p99": 0.17941482931999247,
157
+ "stdev": 0.03935372061755614,
158
+ "stdev_": 30.60661136143133
159
  },
160
  "throughput": {
161
  "unit": "samples/s",
162
+ "value": 15.55462145949035
163
  },
164
  "energy": {
165
  "unit": "kWh",
166
+ "cpu": 3.7052386494949743e-06,
167
+ "ram": 1.5484962988065027e-07,
168
  "gpu": 0.0,
169
+ "total": 3.8600882793756244e-06
170
  },
171
  "efficiency": {
172
  "unit": "samples/kWh",
173
+ "value": 518122.8653981725
174
  }
175
  }
176
  }