IlyasMoutawwakil HF staff commited on
Commit
4daed91
1 Parent(s): 567f50f

Upload cpu_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cpu_inference_transformers_image-classification_google/vit-base-patch16-224",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "transformers",
@@ -45,7 +45,6 @@
45
  "warmup_runs": 1,
46
  "input_shapes": {
47
  "batch_size": 1,
48
- "num_choices": 2,
49
  "sequence_length": 2
50
  },
51
  "new_tokens": null,
@@ -73,23 +72,23 @@
73
  "environment": {
74
  "cpu": " AMD EPYC 7763 64-Core Processor",
75
  "cpu_count": 4,
76
- "cpu_ram_mb": 16766.783488,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
- "platform": "Linux-6.8.0-1015-azure-x86_64-with-glibc2.39",
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0.dev0",
83
- "optimum_benchmark_commit": "08c9f59440cf4e5a5d6711ec19e8329ab2de652d",
84
- "transformers_version": "4.45.2",
85
  "transformers_commit": null,
86
- "accelerate_version": "1.0.1",
87
  "accelerate_commit": null,
88
- "diffusers_version": "0.30.3",
89
  "diffusers_commit": null,
90
  "optimum_version": null,
91
  "optimum_commit": null,
92
- "timm_version": "1.0.9",
93
  "timm_commit": null,
94
  "peft_version": null,
95
  "peft_commit": null
@@ -101,7 +100,7 @@
101
  "load": {
102
  "memory": {
103
  "unit": "MB",
104
- "max_ram": 807.911424,
105
  "max_global_vram": null,
106
  "max_process_vram": null,
107
  "max_reserved": null,
@@ -110,15 +109,15 @@
110
  "latency": {
111
  "unit": "s",
112
  "values": [
113
- 1.158696722000002
114
  ],
115
  "count": 1,
116
- "total": 1.158696722000002,
117
- "mean": 1.158696722000002,
118
- "p50": 1.158696722000002,
119
- "p90": 1.158696722000002,
120
- "p95": 1.158696722000002,
121
- "p99": 1.158696722000002,
122
  "stdev": 0,
123
  "stdev_": 0
124
  },
@@ -129,7 +128,7 @@
129
  "forward": {
130
  "memory": {
131
  "unit": "MB",
132
- "max_ram": 826.753024,
133
  "max_global_vram": null,
134
  "max_process_vram": null,
135
  "max_reserved": null,
@@ -138,35 +137,35 @@
138
  "latency": {
139
  "unit": "s",
140
  "values": [
141
- 0.26408713600000056,
142
- 0.26490086000001156,
143
- 0.2591085029999931,
144
- 0.23091204800005016
145
  ],
146
  "count": 4,
147
- "total": 1.0190085470000554,
148
- "mean": 0.25475213675001385,
149
- "p50": 0.26159781949999683,
150
- "p90": 0.2646567428000083,
151
- "p95": 0.2647788014000099,
152
- "p99": 0.2648764482800112,
153
- "stdev": 0.01394154361174185,
154
- "stdev_": 5.47259143322616
155
  },
156
  "throughput": {
157
  "unit": "samples/s",
158
- "value": 3.9253841508748235
159
  },
160
  "energy": {
161
  "unit": "kWh",
162
- "cpu": 9.066084727777543e-06,
163
- "ram": 3.791149450087479e-07,
164
  "gpu": 0.0,
165
- "total": 9.44519967278629e-06
166
  },
167
  "efficiency": {
168
  "unit": "samples/kWh",
169
- "value": 105873.88669836395
170
  }
171
  }
172
  }
 
3
  "name": "cpu_inference_transformers_image-classification_google/vit-base-patch16-224",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.5.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "transformers",
 
45
  "warmup_runs": 1,
46
  "input_shapes": {
47
  "batch_size": 1,
 
48
  "sequence_length": 2
49
  },
50
  "new_tokens": null,
 
72
  "environment": {
73
  "cpu": " AMD EPYC 7763 64-Core Processor",
74
  "cpu_count": 4,
75
+ "cpu_ram_mb": 16757.342208,
76
  "system": "Linux",
77
  "machine": "x86_64",
78
+ "platform": "Linux-6.5.0-1025-azure-x86_64-with-glibc2.35",
79
  "processor": "x86_64",
80
  "python_version": "3.10.15",
81
  "optimum_benchmark_version": "0.5.0.dev0",
82
+ "optimum_benchmark_commit": "6807ba28334c8c98abf72a03d78f86133328d180",
83
+ "transformers_version": "4.46.3",
84
  "transformers_commit": null,
85
+ "accelerate_version": "1.1.1",
86
  "accelerate_commit": null,
87
+ "diffusers_version": "0.31.0",
88
  "diffusers_commit": null,
89
  "optimum_version": null,
90
  "optimum_commit": null,
91
+ "timm_version": "1.0.11",
92
  "timm_commit": null,
93
  "peft_version": null,
94
  "peft_commit": null
 
100
  "load": {
101
  "memory": {
102
  "unit": "MB",
103
+ "max_ram": 831.520768,
104
  "max_global_vram": null,
105
  "max_process_vram": null,
106
  "max_reserved": null,
 
109
  "latency": {
110
  "unit": "s",
111
  "values": [
112
+ 1.177148917000011
113
  ],
114
  "count": 1,
115
+ "total": 1.177148917000011,
116
+ "mean": 1.177148917000011,
117
+ "p50": 1.177148917000011,
118
+ "p90": 1.177148917000011,
119
+ "p95": 1.177148917000011,
120
+ "p99": 1.177148917000011,
121
  "stdev": 0,
122
  "stdev_": 0
123
  },
 
128
  "forward": {
129
  "memory": {
130
  "unit": "MB",
131
+ "max_ram": 850.395136,
132
  "max_global_vram": null,
133
  "max_process_vram": null,
134
  "max_reserved": null,
 
137
  "latency": {
138
  "unit": "s",
139
  "values": [
140
+ 0.2948390839999888,
141
+ 0.4244483509999668,
142
+ 0.23291660699999284,
143
+ 0.23102626000002147
144
  ],
145
  "count": 4,
146
+ "total": 1.18323030199997,
147
+ "mean": 0.2958075754999925,
148
+ "p50": 0.2638778454999908,
149
+ "p90": 0.38556557089997345,
150
+ "p95": 0.4050069609499701,
151
+ "p99": 0.42056007298996745,
152
+ "stdev": 0.07858320407980815,
153
+ "stdev_": 26.565649627796684
154
  },
155
  "throughput": {
156
  "unit": "samples/s",
157
+ "value": 3.380576032610853
158
  },
159
  "energy": {
160
  "unit": "kWh",
161
+ "cpu": 9.023101321110845e-06,
162
+ "ram": 3.771018160836499e-07,
163
  "gpu": 0.0,
164
+ "total": 9.400203137194496e-06
165
  },
166
  "efficiency": {
167
  "unit": "samples/kWh",
168
+ "value": 106380.67980076135
169
  }
170
  }
171
  }