IlyasMoutawwakil HF staff commited on
Commit
b78bbdc
1 Parent(s): 2d7b90d

Upload cpu_inference_timm_image-classification_timm/resnet50.a1_in1k/benchmark.json with huggingface_hub

Browse files
cpu_inference_timm_image-classification_timm/resnet50.a1_in1k/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cpu_inference_timm_image-classification_timm/resnet50.a1_in1k",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "timm",
@@ -45,7 +45,6 @@
45
  "warmup_runs": 1,
46
  "input_shapes": {
47
  "batch_size": 1,
48
- "num_choices": 2,
49
  "sequence_length": 2
50
  },
51
  "new_tokens": null,
@@ -73,23 +72,23 @@
73
  "environment": {
74
  "cpu": " AMD EPYC 7763 64-Core Processor",
75
  "cpu_count": 4,
76
- "cpu_ram_mb": 16766.783488,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
- "platform": "Linux-6.8.0-1015-azure-x86_64-with-glibc2.39",
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0.dev0",
83
- "optimum_benchmark_commit": "08c9f59440cf4e5a5d6711ec19e8329ab2de652d",
84
- "transformers_version": "4.45.2",
85
  "transformers_commit": null,
86
- "accelerate_version": "1.0.1",
87
  "accelerate_commit": null,
88
- "diffusers_version": "0.30.3",
89
  "diffusers_commit": null,
90
  "optimum_version": null,
91
  "optimum_commit": null,
92
- "timm_version": "1.0.9",
93
  "timm_commit": null,
94
  "peft_version": null,
95
  "peft_commit": null
@@ -101,7 +100,7 @@
101
  "load": {
102
  "memory": {
103
  "unit": "MB",
104
- "max_ram": 527.93344,
105
  "max_global_vram": null,
106
  "max_process_vram": null,
107
  "max_reserved": null,
@@ -110,15 +109,15 @@
110
  "latency": {
111
  "unit": "s",
112
  "values": [
113
- 0.25145040599997515
114
  ],
115
  "count": 1,
116
- "total": 0.25145040599997515,
117
- "mean": 0.25145040599997515,
118
- "p50": 0.25145040599997515,
119
- "p90": 0.25145040599997515,
120
- "p95": 0.25145040599997515,
121
- "p99": 0.25145040599997515,
122
  "stdev": 0,
123
  "stdev_": 0
124
  },
@@ -129,7 +128,7 @@
129
  "forward": {
130
  "memory": {
131
  "unit": "MB",
132
- "max_ram": 564.625408,
133
  "max_global_vram": null,
134
  "max_process_vram": null,
135
  "max_reserved": null,
@@ -138,43 +137,43 @@
138
  "latency": {
139
  "unit": "s",
140
  "values": [
141
- 0.09016318199999773,
142
- 0.0888788719999809,
143
- 0.0871197900000027,
144
- 0.09159124999999335,
145
- 0.08800540699999715,
146
- 0.087891084000006,
147
- 0.08812491000000477,
148
- 0.0898069200000009,
149
- 0.09553283299999293,
150
- 0.08288146300000676,
151
- 0.0796067289999769,
152
- 0.07841185600000244
153
  ],
154
  "count": 12,
155
- "total": 1.0480142959999625,
156
- "mean": 0.08733452466666354,
157
- "p50": 0.08806515850000096,
158
- "p90": 0.09144844319999379,
159
- "p95": 0.09336496234999316,
160
- "p99": 0.09509925886999299,
161
- "stdev": 0.004676397129276531,
162
- "stdev_": 5.354580158448561
163
  },
164
  "throughput": {
165
  "unit": "samples/s",
166
- "value": 11.450225484329108
167
  },
168
  "energy": {
169
  "unit": "kWh",
170
- "cpu": 3.0726738568376434e-06,
171
- "ram": 1.284809209970042e-07,
172
  "gpu": 0.0,
173
- "total": 3.2011547778346473e-06
174
  },
175
  "efficiency": {
176
  "unit": "samples/kWh",
177
- "value": 312387.2694079568
178
  }
179
  }
180
  }
 
3
  "name": "cpu_inference_timm_image-classification_timm/resnet50.a1_in1k",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.5.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "timm",
 
45
  "warmup_runs": 1,
46
  "input_shapes": {
47
  "batch_size": 1,
 
48
  "sequence_length": 2
49
  },
50
  "new_tokens": null,
 
72
  "environment": {
73
  "cpu": " AMD EPYC 7763 64-Core Processor",
74
  "cpu_count": 4,
75
+ "cpu_ram_mb": 16757.342208,
76
  "system": "Linux",
77
  "machine": "x86_64",
78
+ "platform": "Linux-6.5.0-1025-azure-x86_64-with-glibc2.35",
79
  "processor": "x86_64",
80
  "python_version": "3.10.15",
81
  "optimum_benchmark_version": "0.5.0.dev0",
82
+ "optimum_benchmark_commit": "6807ba28334c8c98abf72a03d78f86133328d180",
83
+ "transformers_version": "4.46.3",
84
  "transformers_commit": null,
85
+ "accelerate_version": "1.1.1",
86
  "accelerate_commit": null,
87
+ "diffusers_version": "0.31.0",
88
  "diffusers_commit": null,
89
  "optimum_version": null,
90
  "optimum_commit": null,
91
+ "timm_version": "1.0.11",
92
  "timm_commit": null,
93
  "peft_version": null,
94
  "peft_commit": null
 
100
  "load": {
101
  "memory": {
102
  "unit": "MB",
103
+ "max_ram": 546.848768,
104
  "max_global_vram": null,
105
  "max_process_vram": null,
106
  "max_reserved": null,
 
109
  "latency": {
110
  "unit": "s",
111
  "values": [
112
+ 0.2555340830000432
113
  ],
114
  "count": 1,
115
+ "total": 0.2555340830000432,
116
+ "mean": 0.2555340830000432,
117
+ "p50": 0.2555340830000432,
118
+ "p90": 0.2555340830000432,
119
+ "p95": 0.2555340830000432,
120
+ "p99": 0.2555340830000432,
121
  "stdev": 0,
122
  "stdev_": 0
123
  },
 
128
  "forward": {
129
  "memory": {
130
  "unit": "MB",
131
+ "max_ram": 577.7408,
132
  "max_global_vram": null,
133
  "max_process_vram": null,
134
  "max_reserved": null,
 
137
  "latency": {
138
  "unit": "s",
139
  "values": [
140
+ 0.09017958599997655,
141
+ 0.087745716000029,
142
+ 0.08654123799999525,
143
+ 0.08947090300000582,
144
+ 0.08668929300000627,
145
+ 0.08827343999996629,
146
+ 0.08725954499999489,
147
+ 0.0863580250000382,
148
+ 0.09329994799998076,
149
+ 0.09348609500000293,
150
+ 0.07862296399997604,
151
+ 0.07857237400003214
152
  ],
153
  "count": 12,
154
+ "total": 1.0464991270000041,
155
+ "mean": 0.08720826058333368,
156
+ "p50": 0.08750263050001195,
157
+ "p90": 0.09298791179998034,
158
+ "p95": 0.09338371414999073,
159
+ "p99": 0.0934656188300005,
160
+ "stdev": 0.00448717880112048,
161
+ "stdev_": 5.145359821541977
162
  },
163
  "throughput": {
164
  "unit": "samples/s",
165
+ "value": 11.466803641203565
166
  },
167
  "energy": {
168
  "unit": "kWh",
169
+ "cpu": 3.1635493914530474e-06,
170
+ "ram": 1.3221026070587393e-07,
171
  "gpu": 0.0,
172
+ "total": 3.295759652158921e-06
173
  },
174
  "efficiency": {
175
  "unit": "samples/kWh",
176
+ "value": 303420.183976383
177
  }
178
  }
179
  }