IlyasMoutawwakil HF staff commited on
Commit
e2b06bf
1 Parent(s): 1f0112a

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.1",
83
- "optimum_benchmark_commit": "8320ce6f24eaa5099bd3b49d8d44c0c1368af14b",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 939.044864,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,62 +107,60 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 31,
111
- "total": 1.0170097550001742,
112
- "mean": 0.0328067662903282,
113
- "stdev": 0.0012534424279642537,
114
- "p50": 0.0325503919999619,
115
- "p90": 0.03432389400001057,
116
- "p95": 0.035196475000020655,
117
- "p99": 0.03688595170000326,
118
  "values": [
119
- 0.03329010800001697,
120
- 0.032753242000012506,
121
- 0.0323538740000231,
122
- 0.03208663399999523,
123
- 0.03208828600003244,
124
- 0.03259089800002357,
125
- 0.03203295299999809,
126
- 0.03188997400002336,
127
- 0.03281402599998273,
128
- 0.03364345900001808,
129
- 0.03195632900002465,
130
- 0.03252420300003678,
131
- 0.0325503919999619,
132
- 0.032782827000005454,
133
- 0.031958463000023585,
134
- 0.031905854000001455,
135
- 0.032671144999994794,
136
- 0.032172424000009414,
137
- 0.03329541700003347,
138
- 0.03277426099998593,
139
- 0.03311885699997674,
140
- 0.03485286800002996,
141
- 0.032859260999998696,
142
- 0.03554008200001135,
143
- 0.03746275299999979,
144
- 0.03432389400001057,
145
- 0.032140733999995064,
146
- 0.03133081699996865,
147
- 0.03154250399995817,
148
- 0.031536683000013,
149
- 0.03216653300000871
150
  ]
151
  },
152
  "throughput": {
153
  "unit": "samples/s",
154
- "value": 30.481516866074397
155
  },
156
  "energy": {
157
  "unit": "kWh",
158
- "cpu": 1.2368307345443303e-06,
159
- "ram": 5.1688825600493256e-08,
160
  "gpu": 0.0,
161
- "total": 1.2885195601448236e-06
162
  },
163
  "efficiency": {
164
  "unit": "samples/kWh",
165
- "value": 776084.454540686
166
  }
167
  }
168
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.1",
83
+ "optimum_benchmark_commit": "cc9b50765eac8085269af1a207e20c6a9b260b67",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 938.319872,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
107
  },
108
  "latency": {
109
  "unit": "s",
110
+ "count": 29,
111
+ "total": 1.0128014939998025,
112
+ "mean": 0.03492418944826905,
113
+ "stdev": 0.0019337774093019329,
114
+ "p50": 0.03563986000000341,
115
+ "p90": 0.03626556980001396,
116
+ "p95": 0.03654749099997616,
117
+ "p99": 0.03666277067997498,
118
  "values": [
119
+ 0.03669502499997179,
120
+ 0.03565520799998012,
121
+ 0.03597988299998178,
122
+ 0.034730174000003444,
123
+ 0.03618538599999965,
124
+ 0.03582947299997841,
125
+ 0.035484359999998105,
126
+ 0.03582267999996702,
127
+ 0.03546911099999761,
128
+ 0.03462150100000372,
129
+ 0.03657983099998319,
130
+ 0.03547607399997332,
131
+ 0.035578244000021186,
132
+ 0.03563986000000341,
133
+ 0.03594599000001608,
134
+ 0.03537362299999813,
135
+ 0.035477237000009154,
136
+ 0.03612183799998547,
137
+ 0.035718957000028695,
138
+ 0.035527399999978115,
139
+ 0.03594489799996836,
140
+ 0.03620721700002605,
141
+ 0.036498980999965625,
142
+ 0.035889484999984234,
143
+ 0.032514574000003904,
144
+ 0.030563399000016034,
145
+ 0.030306940999992094,
146
+ 0.030603262999989056,
147
+ 0.03036088099997869
 
 
148
  ]
149
  },
150
  "throughput": {
151
  "unit": "samples/s",
152
+ "value": 28.633449073492045
153
  },
154
  "energy": {
155
  "unit": "kWh",
156
+ "cpu": 1.1815988254868221e-06,
157
+ "ram": 4.938077681148652e-08,
158
  "gpu": 0.0,
159
+ "total": 1.2309796022983088e-06
160
  },
161
  "efficiency": {
162
  "unit": "samples/kWh",
163
+ "value": 812361.1456541955
164
  }
165
  }
166
  }