IlyasMoutawwakil HF staff commited on
Commit
1eb3b24
1 Parent(s): d845cd1

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -73,14 +73,14 @@
73
  "environment": {
74
  "cpu": " AMD EPYC 7763 64-Core Processor",
75
  "cpu_count": 4,
76
- "cpu_ram_mb": 16757.342208,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
  "platform": "Linux-6.5.0-1025-azure-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0.dev0",
83
- "optimum_benchmark_commit": "66c837b53c9420caaed58be9b5b51d16360a0e19",
84
  "transformers_version": "4.46.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "1.1.1",
@@ -101,7 +101,7 @@
101
  "load": {
102
  "memory": {
103
  "unit": "MB",
104
- "max_ram": 967.286784,
105
  "max_global_vram": null,
106
  "max_process_vram": null,
107
  "max_reserved": null,
@@ -110,15 +110,15 @@
110
  "latency": {
111
  "unit": "s",
112
  "values": [
113
- 1.2081203230000028
114
  ],
115
  "count": 1,
116
- "total": 1.2081203230000028,
117
- "mean": 1.2081203230000028,
118
- "p50": 1.2081203230000028,
119
- "p90": 1.2081203230000028,
120
- "p95": 1.2081203230000028,
121
- "p99": 1.2081203230000028,
122
  "stdev": 0,
123
  "stdev_": 0
124
  },
@@ -129,7 +129,7 @@
129
  "forward": {
130
  "memory": {
131
  "unit": "MB",
132
- "max_ram": 981.573632,
133
  "max_global_vram": null,
134
  "max_process_vram": null,
135
  "max_reserved": null,
@@ -138,42 +138,46 @@
138
  "latency": {
139
  "unit": "s",
140
  "values": [
141
- 0.12362005099998896,
142
- 0.12140827799998988,
143
- 0.12184100099995021,
144
- 0.12109711400000833,
145
- 0.1294178399999737,
146
- 0.10377937399999837,
147
- 0.062250968999990164,
148
- 0.06226469399996404,
149
- 0.06176833199998555,
150
- 0.061504056999979184,
151
- 0.06083423800004084
 
 
 
 
152
  ],
153
- "count": 11,
154
- "total": 1.0297859479998692,
155
- "mean": 0.09361690436362448,
156
- "p50": 0.10377937399999837,
157
- "p90": 0.12362005099998896,
158
- "p95": 0.12651894549998133,
159
- "p99": 0.12883806109997523,
160
- "stdev": 0.029689830410819462,
161
- "stdev_": 31.714176635769704
162
  },
163
  "throughput": {
164
  "unit": "samples/s",
165
- "value": 21.363663043499592
166
  },
167
  "energy": {
168
  "unit": "kWh",
169
- "cpu": 2.411774219281083e-06,
170
- "ram": 1.0079315167867893e-07,
171
  "gpu": 0.0,
172
- "total": 2.5125673709597615e-06
173
  },
174
  "efficiency": {
175
  "unit": "samples/kWh",
176
- "value": 795998.5563435982
177
  }
178
  }
179
  }
 
73
  "environment": {
74
  "cpu": " AMD EPYC 7763 64-Core Processor",
75
  "cpu_count": 4,
76
+ "cpu_ram_mb": 16757.338112,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
  "platform": "Linux-6.5.0-1025-azure-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0.dev0",
83
+ "optimum_benchmark_commit": "7f5d48690ca4e72398f773487e70a5e7bfdf8cfd",
84
  "transformers_version": "4.46.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "1.1.1",
 
101
  "load": {
102
  "memory": {
103
  "unit": "MB",
104
+ "max_ram": 966.57408,
105
  "max_global_vram": null,
106
  "max_process_vram": null,
107
  "max_reserved": null,
 
110
  "latency": {
111
  "unit": "s",
112
  "values": [
113
+ 1.2200339379999718
114
  ],
115
  "count": 1,
116
+ "total": 1.2200339379999718,
117
+ "mean": 1.2200339379999718,
118
+ "p50": 1.2200339379999718,
119
+ "p90": 1.2200339379999718,
120
+ "p95": 1.2200339379999718,
121
+ "p99": 1.2200339379999718,
122
  "stdev": 0,
123
  "stdev_": 0
124
  },
 
129
  "forward": {
130
  "memory": {
131
  "unit": "MB",
132
+ "max_ram": 983.212032,
133
  "max_global_vram": null,
134
  "max_process_vram": null,
135
  "max_reserved": null,
 
138
  "latency": {
139
  "unit": "s",
140
  "values": [
141
+ 0.07105428500000244,
142
+ 0.06966863500002773,
143
+ 0.07009183499997107,
144
+ 0.06852474499999062,
145
+ 0.06964770700000145,
146
+ 0.06732668399996555,
147
+ 0.06926920199998676,
148
+ 0.06710941000000048,
149
+ 0.06795657800000754,
150
+ 0.07233472199999369,
151
+ 0.06895773200000122,
152
+ 0.06914482000001954,
153
+ 0.07145710899999358,
154
+ 0.06922306599994954,
155
+ 0.06931671000000961
156
  ],
157
+ "count": 15,
158
+ "total": 1.0410832399999208,
159
+ "mean": 0.06940554933332806,
160
+ "p50": 0.06926920199998676,
161
+ "p90": 0.07129597939999713,
162
+ "p95": 0.07172039289999362,
163
+ "p99": 0.07221185617999368,
164
+ "stdev": 0.0013881329365925629,
165
+ "stdev_": 2.0000316256066157
166
  },
167
  "throughput": {
168
  "unit": "samples/s",
169
+ "value": 28.816139620115564
170
  },
171
  "energy": {
172
  "unit": "kWh",
173
+ "cpu": 2.457436530208332e-06,
174
+ "ram": 1.0269971323510385e-07,
175
  "gpu": 0.0,
176
+ "total": 2.560136243443436e-06
177
  },
178
  "efficiency": {
179
  "unit": "samples/kWh",
180
+ "value": 781208.4240133872
181
  }
182
  }
183
  }