IlyasMoutawwakil HF staff commited on
Commit
d425e8d
1 Parent(s): e1a2b56

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.3.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
@@ -76,14 +76,14 @@
76
  "cpu_ram_mb": 16757.342208,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
- "platform": "Linux-6.5.0-1023-azure-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
- "optimum_benchmark_commit": "328b924ef0be0164f0dc86652abfd3746f634c6b",
84
  "transformers_version": "4.42.4",
85
  "transformers_commit": null,
86
- "accelerate_version": "0.32.1",
87
  "accelerate_commit": null,
88
  "diffusers_version": "0.29.2",
89
  "diffusers_commit": null,
@@ -99,7 +99,7 @@
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 941.71136,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,31 +108,31 @@
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
- "total": 4.287023067999996,
112
- "mean": 4.287023067999996,
113
  "stdev": 0.0,
114
- "p50": 4.287023067999996,
115
- "p90": 4.287023067999996,
116
- "p95": 4.287023067999996,
117
- "p99": 4.287023067999996,
118
  "values": [
119
- 4.287023067999996
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
- "cpu": 4.759811692767673e-05,
126
- "ram": 1.989373707093591e-06,
127
  "gpu": 0,
128
- "total": 4.958749063477032e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
- "max_ram": 952.983552,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
@@ -140,56 +140,53 @@
140
  },
141
  "latency": {
142
  "unit": "s",
143
- "count": 25,
144
- "total": 1.0377472490001765,
145
- "mean": 0.04150988996000706,
146
- "stdev": 0.0011691304561456263,
147
- "p50": 0.0413546479999809,
148
- "p90": 0.042518649800013006,
149
- "p95": 0.04402539620002698,
150
- "p99": 0.04474112916004287,
151
  "values": [
152
- 0.04173045999999658,
153
- 0.040863729000022886,
154
- 0.041269528000043465,
155
- 0.041737554000008004,
156
- 0.04100792899998851,
157
- 0.042637977000026694,
158
- 0.041212932999997065,
159
- 0.041786614999978156,
160
- 0.04154684600001701,
161
- 0.04195522100002336,
162
- 0.042172946999983196,
163
- 0.04485761700004787,
164
- 0.04233965899999248,
165
- 0.0418381719999843,
166
- 0.040965088999996624,
167
- 0.04169690700001638,
168
- 0.0413546479999809,
169
- 0.041019440000013674,
170
- 0.0403102949999834,
171
- 0.040927338000017244,
172
- 0.04437225100002706,
173
- 0.04009308899998132,
174
- 0.04003108200004135,
175
- 0.03997854400000733,
176
- 0.0400413790000016
177
  ]
178
  },
179
  "throughput": {
180
  "unit": "samples/s",
181
- "value": 24.0906444455395
182
  },
183
  "energy": {
184
  "unit": "kWh",
185
- "cpu": 1.5766556527879504e-06,
186
- "ram": 6.589148354892131e-08,
187
  "gpu": 0.0,
188
- "total": 1.6425471363368718e-06
189
  },
190
  "efficiency": {
191
  "unit": "samples/kWh",
192
- "value": 608810.5344910533
193
  }
194
  }
195
  }
 
3
  "name": "cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
 
76
  "cpu_ram_mb": 16757.342208,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
+ "platform": "Linux-6.5.0-1024-azure-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
+ "optimum_benchmark_commit": "b0801269b3611e452bb077a62163b08a99ceb2a9",
84
  "transformers_version": "4.42.4",
85
  "transformers_commit": null,
86
+ "accelerate_version": "0.33.0",
87
  "accelerate_commit": null,
88
  "diffusers_version": "0.29.2",
89
  "diffusers_commit": null,
 
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 945.053696,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
+ "total": 4.347855818000028,
112
+ "mean": 4.347855818000028,
113
  "stdev": 0.0,
114
+ "p50": 4.347855818000028,
115
+ "p90": 4.347855818000028,
116
+ "p95": 4.347855818000028,
117
+ "p99": 4.347855818000028,
118
  "values": [
119
+ 4.347855818000028
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
+ "cpu": 4.8796918657090935e-05,
126
+ "ram": 2.0394726085510227e-06,
127
  "gpu": 0,
128
+ "total": 5.0836391265641955e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
+ "max_ram": 955.932672,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
 
140
  },
141
  "latency": {
142
  "unit": "s",
143
+ "count": 22,
144
+ "total": 1.0172125079999432,
145
+ "mean": 0.0462369321818156,
146
+ "stdev": 0.0009397911811358658,
147
+ "p50": 0.04627410749998262,
148
+ "p90": 0.047383031800035266,
149
+ "p95": 0.04762960410002108,
150
+ "p99": 0.04808147007001594,
151
  "values": [
152
+ 0.046382575999984965,
153
+ 0.045859977000020535,
154
+ 0.04425357700000632,
155
+ 0.047641256000019894,
156
+ 0.04659485400003405,
157
+ 0.046202107999988584,
158
+ 0.045493670999974256,
159
+ 0.04449169399998709,
160
+ 0.04560688199995866,
161
+ 0.047025721000011345,
162
+ 0.04569542899997714,
163
+ 0.046346106999976655,
164
+ 0.04507555799995089,
165
+ 0.04819848900001489,
166
+ 0.046679663000020355,
167
+ 0.04740821800004369,
168
+ 0.04651216000002023,
169
+ 0.046185686999990594,
170
+ 0.045658741000011105,
171
+ 0.04715635599995949,
172
+ 0.046017873999971926,
173
+ 0.046725910000020576
 
 
 
174
  ]
175
  },
176
  "throughput": {
177
  "unit": "samples/s",
178
+ "value": 21.627732481639157
179
  },
180
  "energy": {
181
  "unit": "kWh",
182
+ "cpu": 1.558457268608941e-06,
183
+ "ram": 6.51309362021948e-08,
184
  "gpu": 0.0,
185
+ "total": 1.623588204811136e-06
186
  },
187
  "efficiency": {
188
  "unit": "samples/kWh",
189
+ "value": 615919.7246178105
190
  }
191
  }
192
  }