IlyasMoutawwakil HF staff commited on
Commit
3e091aa
1 Parent(s): e5c5cea

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 942.211072,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,55 +10,57 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 24,
14
- "total": 1.036298180000017,
15
- "mean": 0.043179090833334044,
16
- "stdev": 0.002189473122369011,
17
- "p50": 0.044610296999991306,
18
- "p90": 0.045187591000012614,
19
- "p95": 0.04525405059999912,
20
- "p99": 0.04529109219998418,
21
  "values": [
22
- 0.04529959299998154,
23
- 0.04470166800001607,
24
- 0.04462644700004148,
25
- 0.04460547899998346,
26
- 0.0446821909999926,
27
- 0.045205417000033776,
28
- 0.04504779299998063,
29
- 0.044749747000025764,
30
- 0.04425163899998097,
31
- 0.04461511499999915,
32
- 0.04492204899997887,
33
- 0.045262632999993,
34
- 0.04426653600000918,
35
- 0.045145996999963245,
36
- 0.0448085269999865,
37
- 0.04109826899997415,
38
- 0.03982194900004288,
39
- 0.04009471799997755,
40
- 0.04006357000002936,
41
- 0.042594269000005625,
42
- 0.040380871000024854,
43
- 0.039894153000034294,
44
- 0.03958470699996042,
45
- 0.04057484300000169
 
 
46
  ]
47
  },
48
  "throughput": {
49
  "unit": "samples/s",
50
- "value": 23.159357473733674
51
  },
52
  "energy": {
53
  "unit": "kWh",
54
- "cpu": 1.5446457597944473e-06,
55
- "ram": 6.455342116480232e-08,
56
  "gpu": 0.0,
57
- "total": 1.6091991809592498e-06
58
  },
59
  "efficiency": {
60
  "unit": "samples/kWh",
61
- "value": 621427.1122136019
62
  }
63
  }
64
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 941.477888,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 26,
14
+ "total": 1.0257049300000176,
15
+ "mean": 0.039450189615385295,
16
+ "stdev": 0.0007555241692689364,
17
+ "p50": 0.03926879249999615,
18
+ "p90": 0.03986674450000294,
19
+ "p95": 0.0407378977499846,
20
+ "p99": 0.04200367574999575,
21
  "values": [
22
+ 0.03981022400000711,
23
+ 0.03938872599999854,
24
+ 0.03887906399998542,
25
+ 0.0390619460000039,
26
+ 0.03926255999999739,
27
+ 0.03926977399999032,
28
+ 0.039281495000011546,
29
+ 0.03930041099999926,
30
+ 0.03923514799998884,
31
+ 0.039869976000005636,
32
+ 0.03964665800000944,
33
+ 0.03962592799999243,
34
+ 0.039214310000005526,
35
+ 0.03901726300000519,
36
+ 0.03986351300000024,
37
+ 0.03924718199999688,
38
+ 0.039267811000001984,
39
+ 0.038931141000006164,
40
+ 0.03970928399999707,
41
+ 0.042329166000001806,
42
+ 0.03871840499999735,
43
+ 0.041027204999977585,
44
+ 0.039412671000008004,
45
+ 0.03897991200000206,
46
+ 0.03904363200001626,
47
+ 0.03831152500001167
48
  ]
49
  },
50
  "throughput": {
51
  "unit": "samples/s",
52
+ "value": 25.348420622292956
53
  },
54
  "energy": {
55
  "unit": "kWh",
56
+ "cpu": 1.5182142583732933e-06,
57
+ "ram": 6.344938224204137e-08,
58
  "gpu": 0.0,
59
+ "total": 1.5816636406153346e-06
60
  },
61
  "efficiency": {
62
  "unit": "samples/kWh",
63
+ "value": 632245.6774760007
64
  }
65
  }
66
  }