IlyasMoutawwakil HF staff commited on
Commit
b9172be
1 Parent(s): 465a0c5

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 940.679168,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,56 +10,52 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 25,
14
- "total": 1.0218909339997708,
15
- "mean": 0.040875637359990835,
16
- "stdev": 0.0022957200548870287,
17
- "p50": 0.039370542999961344,
18
- "p90": 0.044022964799967215,
19
- "p95": 0.044071162800003096,
20
- "p99": 0.04543273119999639,
21
  "values": [
22
- 0.04405780999996978,
23
- 0.043970696999963366,
24
- 0.043229585000005954,
25
- 0.043503595000004225,
26
- 0.04356754500003035,
27
- 0.04586164599999165,
28
- 0.04407450100001142,
29
- 0.04310136499998407,
30
- 0.03914134399997238,
31
- 0.03928243799998654,
32
- 0.03922671499998387,
33
- 0.03888218099996266,
34
- 0.03911322200002587,
35
- 0.038929609999968307,
36
- 0.039155328999981975,
37
- 0.039260025999965364,
38
- 0.04012748500002772,
39
- 0.04000445600001967,
40
- 0.039370542999961344,
41
- 0.038810697999963395,
42
- 0.03956443399999898,
43
- 0.04318828899999971,
44
- 0.03891285899999275,
45
- 0.038911866999967515,
46
- 0.03864269400003195
47
  ]
48
  },
49
  "throughput": {
50
  "unit": "samples/s",
51
- "value": 24.464450332432058
52
  },
53
  "energy": {
54
  "unit": "kWh",
55
- "cpu": 1.5440284696399658e-06,
56
- "ram": 6.452834958481938e-08,
57
  "gpu": 0.0,
58
- "total": 1.608556819224785e-06
59
  },
60
  "efficiency": {
61
  "unit": "samples/kWh",
62
- "value": 621675.2731693569
63
  }
64
  }
65
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 941.883392,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 21,
14
+ "total": 1.039199955000015,
15
+ "mean": 0.04948571214285786,
16
+ "stdev": 0.0024762696225591346,
17
+ "p50": 0.049116929999968306,
18
+ "p90": 0.05214009299999134,
19
+ "p95": 0.052552026999990176,
20
+ "p99": 0.05701020860000199,
21
  "values": [
22
+ 0.049865705999991405,
23
+ 0.047501308999983394,
24
+ 0.04864748999995072,
25
+ 0.04658419800000502,
26
+ 0.04942295500001137,
27
+ 0.047885870000015984,
28
+ 0.04794697599999154,
29
+ 0.04890816900001482,
30
+ 0.04611559800002851,
31
+ 0.05812475400000494,
32
+ 0.052552026999990176,
33
+ 0.05092260899999701,
34
+ 0.05029630300003873,
35
+ 0.04939228700004605,
36
+ 0.05214009299999134,
37
+ 0.049792507999995905,
38
+ 0.049116929999968306,
39
+ 0.04861624099999062,
40
+ 0.048050178999972104,
41
+ 0.04805953700002874,
42
+ 0.049258215999998356
 
 
 
 
43
  ]
44
  },
45
  "throughput": {
46
  "unit": "samples/s",
47
+ "value": 20.207853069046465
48
  },
49
  "energy": {
50
  "unit": "kWh",
51
+ "cpu": 1.6230854723188615e-06,
52
+ "ram": 6.783072390156124e-08,
53
  "gpu": 0.0,
54
+ "total": 1.6909161962204226e-06
55
  },
56
  "efficiency": {
57
  "unit": "samples/kWh",
58
+ "value": 591395.3643801062
59
  }
60
  }
61
  }