IlyasMoutawwakil HF staff commited on
Commit
aa84ab7
·
verified ·
1 Parent(s): 721baab

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 939.15136,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -11,58 +11,58 @@
11
  "latency": {
12
  "unit": "s",
13
  "count": 28,
14
- "total": 1.0073551040000552,
15
- "mean": 0.035976968000001976,
16
- "stdev": 0.0007639329100182599,
17
- "p50": 0.036214992500021026,
18
- "p90": 0.03678551820001417,
19
- "p95": 0.03702797020000901,
20
- "p99": 0.03713457283997854,
21
  "values": [
22
- 0.03716170999996393,
23
- 0.03636561899998014,
24
- 0.035734768999986954,
25
- 0.0363423960000091,
26
- 0.03426812399999335,
27
- 0.036389765000024,
28
- 0.03610537200000863,
29
- 0.036271754000040346,
30
- 0.035466367000026366,
31
- 0.0361885880000159,
32
- 0.03584433399998943,
33
- 0.033916906000001745,
34
- 0.03624891099997285,
35
- 0.036966253999992205,
36
- 0.03564697500002012,
37
- 0.035568777999969825,
38
- 0.036023478999993586,
39
- 0.03450458599996864,
40
- 0.037061202000018056,
41
- 0.03500256900002796,
42
- 0.036241397000026154,
43
- 0.036244312000007994,
44
- 0.03634723599998324,
45
- 0.03626445100002229,
46
- 0.035882184999991296,
47
- 0.036067172000002756,
48
- 0.036521832999994785,
49
- 0.03670806000002358
50
  ]
51
  },
52
  "throughput": {
53
  "unit": "samples/s",
54
- "value": 27.79556075987129
55
  },
56
  "energy": {
57
  "unit": "kWh",
58
- "cpu": 1.194548486459135e-06,
59
- "ram": 4.9922078020406295e-08,
60
  "gpu": 0.0,
61
- "total": 1.2444705644795412e-06
62
  },
63
  "efficiency": {
64
  "unit": "samples/kWh",
65
- "value": 803554.5625124666
66
  }
67
  }
68
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 941.211648,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
11
  "latency": {
12
  "unit": "s",
13
  "count": 28,
14
+ "total": 1.0132585739999058,
15
+ "mean": 0.03618780621428235,
16
+ "stdev": 0.0008053944628156695,
17
+ "p50": 0.03630314699998394,
18
+ "p90": 0.037034712200005517,
19
+ "p95": 0.037139724299979096,
20
+ "p99": 0.037579104339975515,
21
  "values": [
22
+ 0.03699890600000799,
23
+ 0.036737686000037684,
24
+ 0.03643077099997072,
25
+ 0.037118259999999736,
26
+ 0.034475253999971756,
27
+ 0.036755330000005415,
28
+ 0.03618499000003794,
29
+ 0.03514075199996114,
30
+ 0.036255281999956424,
31
+ 0.036846369999977924,
32
+ 0.0360508379999942,
33
+ 0.034820121999985076,
34
+ 0.03699084100003347,
35
+ 0.03679513499997711,
36
+ 0.03595709199998964,
37
+ 0.03605068900003516,
38
+ 0.03656843999999637,
39
+ 0.03524655100000018,
40
+ 0.0377373399999783,
41
+ 0.035400138999989395,
42
+ 0.03715128199996798,
43
+ 0.035935301999984404,
44
+ 0.0357431820000329,
45
+ 0.0356066960000021,
46
+ 0.03461956400002464,
47
+ 0.03652623099998209,
48
+ 0.03676451699999461,
49
+ 0.03635101200001145
50
  ]
51
  },
52
  "throughput": {
53
  "unit": "samples/s",
54
+ "value": 27.633617635692072
55
  },
56
  "energy": {
57
  "unit": "kWh",
58
+ "cpu": 1.1867062009946266e-06,
59
+ "ram": 4.959422222038318e-08,
60
  "gpu": 0.0,
61
+ "total": 1.2363004232150097e-06
62
  },
63
  "efficiency": {
64
  "unit": "samples/kWh",
65
+ "value": 808864.8852837011
66
  }
67
  }
68
  }