IlyasMoutawwakil HF staff commited on
Commit
e335776
1 Parent(s): 2b5c800

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -78,7 +78,7 @@
78
  "processor": "x86_64",
79
  "python_version": "3.10.14",
80
  "optimum_benchmark_version": "0.2.0",
81
- "optimum_benchmark_commit": "791c6e35de952695e40c34df217c20ec4e890c8e",
82
  "transformers_version": "4.40.2",
83
  "transformers_commit": null,
84
  "accelerate_version": "0.30.1",
@@ -97,7 +97,7 @@
97
  "forward": {
98
  "memory": {
99
  "unit": "MB",
100
- "max_ram": 937.099264,
101
  "max_global_vram": null,
102
  "max_process_vram": null,
103
  "max_reserved": null,
@@ -105,59 +105,61 @@
105
  },
106
  "latency": {
107
  "unit": "s",
108
- "count": 28,
109
- "total": 1.0172840590000192,
110
- "mean": 0.03633157353571497,
111
- "stdev": 0.0013946756565791322,
112
- "p50": 0.03672506449998991,
113
- "p90": 0.03729972290001911,
114
- "p95": 0.0374090029999735,
115
- "p99": 0.03778023807997841,
116
  "values": [
117
- 0.03659225200004812,
118
- 0.03686416900001177,
119
- 0.036792756000011195,
120
- 0.036583625000048414,
121
- 0.03685583300000417,
122
- 0.03655214700000897,
123
- 0.03686132299998235,
124
- 0.03651367500003744,
125
- 0.03617546399999583,
126
- 0.037414875999957076,
127
- 0.03665737299996863,
128
- 0.03687616100000923,
129
- 0.03707308899998907,
130
- 0.03739809600000399,
131
- 0.03524404600000253,
132
- 0.03715390999997226,
133
- 0.03519551499999807,
134
- 0.03635019000000739,
135
- 0.036554000000023734,
136
- 0.03689608799999178,
137
- 0.037257563000025584,
138
- 0.03543118499999309,
139
- 0.036294134999991456,
140
- 0.03791537199998629,
141
- 0.03716521199999079,
142
- 0.0369275669999638,
143
- 0.03251181199999564,
144
- 0.031176625000000513
 
 
145
  ]
146
  },
147
  "throughput": {
148
  "unit": "samples/s",
149
- "value": 27.524268912189324
150
  },
151
  "energy": {
152
  "unit": "kWh",
153
- "cpu": 1.2076810554221825e-06,
154
- "ram": 5.0470838287105494e-08,
155
  "gpu": 0.0,
156
- "total": 1.2581518937092882e-06
157
  },
158
  "efficiency": {
159
  "unit": "samples/kWh",
160
- "value": 794816.5917008606
161
  }
162
  }
163
  }
 
78
  "processor": "x86_64",
79
  "python_version": "3.10.14",
80
  "optimum_benchmark_version": "0.2.0",
81
+ "optimum_benchmark_commit": "466c44af5d8cf99af048e74ddb3b2f8ec6606263",
82
  "transformers_version": "4.40.2",
83
  "transformers_commit": null,
84
  "accelerate_version": "0.30.1",
 
97
  "forward": {
98
  "memory": {
99
  "unit": "MB",
100
+ "max_ram": 936.771584,
101
  "max_global_vram": null,
102
  "max_process_vram": null,
103
  "max_reserved": null,
 
105
  },
106
  "latency": {
107
  "unit": "s",
108
+ "count": 30,
109
+ "total": 1.0271038589999648,
110
+ "mean": 0.034236795299998825,
111
+ "stdev": 0.0021437368533424946,
112
+ "p50": 0.03524625299999684,
113
+ "p90": 0.035639665400020705,
114
+ "p95": 0.03570536530002073,
115
+ "p99": 0.03580954912002426,
116
  "values": [
117
+ 0.03483010499996908,
118
+ 0.035635139000021354,
119
+ 0.03528264700003092,
120
+ 0.0350599479999687,
121
+ 0.035569274000010864,
122
+ 0.035431515999960084,
123
+ 0.03584376100002373,
124
+ 0.03506825400000935,
125
+ 0.03504128200000878,
126
+ 0.03547050899999249,
127
+ 0.03539834399998654,
128
+ 0.03572578900002554,
129
+ 0.03535935000002155,
130
+ 0.035164383999983784,
131
+ 0.03529412700004286,
132
+ 0.03524356299999454,
133
+ 0.03526700700001584,
134
+ 0.03562901699996246,
135
+ 0.035425825999993776,
136
+ 0.035248942999999144,
137
+ 0.034986099000036575,
138
+ 0.03568040300001485,
139
+ 0.03513595100002931,
140
+ 0.034368072999996,
141
+ 0.030366867999987335,
142
+ 0.030147615999965183,
143
+ 0.029856117999997878,
144
+ 0.02980495199994948,
145
+ 0.029916071000002376,
146
+ 0.02985292299996445
147
  ]
148
  },
149
  "throughput": {
150
  "unit": "samples/s",
151
+ "value": 29.208341237476578
152
  },
153
  "energy": {
154
  "unit": "kWh",
155
+ "cpu": 1.1773802486120487e-06,
156
+ "ram": 4.9204821180131154e-08,
157
  "gpu": 0.0,
158
+ "total": 1.22658506979218e-06
159
  },
160
  "efficiency": {
161
  "unit": "samples/kWh",
162
+ "value": 815271.6225132513
163
  }
164
  }
165
  }