IlyasMoutawwakil HF staff commited on
Commit
eaaf5b6
1 Parent(s): 4fbadc5

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 938.000384,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,60 +10,59 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 29,
14
- "total": 1.0195579560000851,
15
- "mean": 0.03515717089655466,
16
- "stdev": 0.0016951666582072868,
17
- "p50": 0.03561454399999775,
18
- "p90": 0.036774820600032856,
19
- "p95": 0.0370516898000119,
20
- "p99": 0.037224270839976724,
21
  "values": [
22
- 0.0364518549999957,
23
- 0.03621295700003202,
24
- 0.03694045100002086,
25
- 0.03419317800000954,
26
- 0.03658557600004997,
27
- 0.03564331799998399,
28
- 0.03540404000000308,
29
- 0.03605307699996274,
30
- 0.03397549999999683,
31
- 0.035340880999967794,
32
- 0.036328063000041766,
33
- 0.035238499000001866,
34
- 0.03553931300001523,
35
- 0.03476208500001121,
36
- 0.03673341300003585,
37
- 0.03464397399994823,
38
- 0.03561454399999775,
39
- 0.03604753700000174,
40
- 0.03643175700000256,
41
- 0.035032472000011694,
42
- 0.03712584900000593,
43
- 0.03726254599996537,
44
- 0.03651208799999495,
45
- 0.03606907699997919,
46
- 0.031929061000028014,
47
- 0.031537547000027644,
48
- 0.03180835500000967,
49
- 0.032183928999984346,
50
- 0.03195701399999962
51
  ]
52
  },
53
  "throughput": {
54
  "unit": "samples/s",
55
- "value": 28.443699379064608
56
  },
57
  "energy": {
58
  "unit": "kWh",
59
- "cpu": 1.2466133882602057e-06,
60
- "ram": 5.2097879873187484e-08,
61
  "gpu": 0.0,
62
- "total": 1.2987112681333932e-06
63
  },
64
  "efficiency": {
65
  "unit": "samples/kWh",
66
- "value": 769994.0891690854
67
  }
68
  }
69
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 937.058304,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 28,
14
+ "total": 1.0062479960001838,
15
+ "mean": 0.03593742842857799,
16
+ "stdev": 0.0019255354945074497,
17
+ "p50": 0.036259797000042227,
18
+ "p90": 0.03768781330002184,
19
+ "p95": 0.037845974850011996,
20
+ "p99": 0.03978163751000409,
21
  "values": [
22
+ 0.03592640199997277,
23
+ 0.03592838500003381,
24
+ 0.036318371000049865,
25
+ 0.040478801000006115,
26
+ 0.03775174500003686,
27
+ 0.03789671399999861,
28
+ 0.03681972200001837,
29
+ 0.036570729000004576,
30
+ 0.03595944299996745,
31
+ 0.03766041400001541,
32
+ 0.036705138999991505,
33
+ 0.036600965000047836,
34
+ 0.03639323999999533,
35
+ 0.03605881699996871,
36
+ 0.03610589599998093,
37
+ 0.03551362499996458,
38
+ 0.03620122300003459,
39
+ 0.03643630999999914,
40
+ 0.036194249000004675,
41
+ 0.03526911000000155,
42
+ 0.03674177700003156,
43
+ 0.03676516000001584,
44
+ 0.036388641000030475,
45
+ 0.036157319999972515,
46
+ 0.03231824800002414,
47
+ 0.0319343030000141,
48
+ 0.032182685000009315,
49
+ 0.03097056199999315
 
50
  ]
51
  },
52
  "throughput": {
53
  "unit": "samples/s",
54
+ "value": 27.826142373748276
55
  },
56
  "energy": {
57
  "unit": "kWh",
58
+ "cpu": 1.2533890704313914e-06,
59
+ "ram": 5.238118496137644e-08,
60
  "gpu": 0.0,
61
+ "total": 1.305770255392768e-06
62
  },
63
  "efficiency": {
64
  "unit": "samples/kWh",
65
+ "value": 765831.5050982732
66
  }
67
  }
68
  }