IlyasMoutawwakil HF staff commited on
Commit
ff06247
1 Parent(s): 420ede2

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 936.624128,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,60 +10,64 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 29,
14
- "total": 1.031561813999872,
15
- "mean": 0.03557109703447834,
16
- "stdev": 0.0009883377420872616,
17
- "p50": 0.035553504999995766,
18
- "p90": 0.03661709300001803,
19
- "p95": 0.03718727699998681,
20
- "p99": 0.03771390155998461,
21
  "values": [
22
- 0.035553504999995766,
23
- 0.03495569799997611,
24
- 0.035331939999991846,
25
- 0.035469476999992366,
26
- 0.03377281899997797,
27
- 0.03553527000002532,
28
- 0.035255656999993334,
29
- 0.034973930999967706,
30
- 0.03506733599999734,
31
- 0.03593241099997613,
32
- 0.03467393099998617,
33
- 0.03360165899999856,
34
- 0.035765739999988,
35
- 0.0378403839999919,
36
- 0.03738866099996585,
37
- 0.03688520100001824,
38
- 0.03570230200000424,
39
- 0.033809296000015365,
40
- 0.03648233799998479,
41
- 0.03462817500002302,
42
- 0.03615003799995975,
43
- 0.03614818400001241,
44
- 0.03536391000000094,
45
- 0.03577802300003441,
46
- 0.03448319400001765,
47
- 0.03633892099998093,
48
- 0.03628114299999652,
49
- 0.03655006600001798,
50
- 0.03584260399998129
 
 
 
 
51
  ]
52
  },
53
  "throughput": {
54
  "unit": "samples/s",
55
- "value": 28.1127118185509
56
  },
57
  "energy": {
58
  "unit": "kWh",
59
- "cpu": 1.1915312070236464e-06,
60
- "ram": 4.9795594848970635e-08,
61
  "gpu": 0.0,
62
- "total": 1.241326801872617e-06
63
  },
64
  "efficiency": {
65
  "unit": "samples/kWh",
66
- "value": 805589.630781708
67
  }
68
  }
69
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 937.025536,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 33,
14
+ "total": 1.0114617630001135,
15
+ "mean": 0.03065035645454889,
16
+ "stdev": 0.0011875507030811637,
17
+ "p50": 0.030365853000034804,
18
+ "p90": 0.031478325200009746,
19
+ "p95": 0.03264780499998778,
20
+ "p99": 0.03502172852001422,
21
  "values": [
22
+ 0.03154161700001623,
23
+ 0.03082063200002949,
24
+ 0.030669419999981073,
25
+ 0.030112801000029776,
26
+ 0.029460423999978502,
27
+ 0.029696754000042347,
28
+ 0.030662255999970967,
29
+ 0.030084738999960337,
30
+ 0.03064913099996147,
31
+ 0.030869894000034037,
32
+ 0.030288358000007065,
33
+ 0.03023467799999935,
34
+ 0.030057548000002043,
35
+ 0.030521123000028183,
36
+ 0.030217246000006526,
37
+ 0.030705667000006542,
38
+ 0.030365853000034804,
39
+ 0.030133068999987245,
40
+ 0.031646572999989075,
41
+ 0.03043232700002818,
42
+ 0.03031435700000884,
43
+ 0.03053655200000094,
44
+ 0.031119548999981816,
45
+ 0.035432117000027574,
46
+ 0.030814829999997073,
47
+ 0.029846775000009984,
48
+ 0.029858066000031158,
49
+ 0.029664915000012115,
50
+ 0.029774008999993384,
51
+ 0.03003562700001794,
52
+ 0.03122515799998382,
53
+ 0.03414965299998585,
54
+ 0.02952004499996974
55
  ]
56
  },
57
  "throughput": {
58
  "unit": "samples/s",
59
+ "value": 32.62604797053144
60
  },
61
  "energy": {
62
  "unit": "kWh",
63
+ "cpu": 1.1624875021915811e-06,
64
+ "ram": 4.858249665847651e-08,
65
  "gpu": 0.0,
66
+ "total": 1.2110699988500574e-06
67
  },
68
  "efficiency": {
69
  "unit": "samples/kWh",
70
+ "value": 825716.1030737497
71
  }
72
  }
73
  }