IlyasMoutawwakil HF staff commited on
Commit
2b0f36f
1 Parent(s): aaa0b74

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 945.958912,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,55 +10,53 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 24,
14
- "total": 1.0163190379999492,
15
- "mean": 0.042346626583331215,
16
- "stdev": 0.0010597026475171844,
17
- "p50": 0.04198648649997949,
18
- "p90": 0.043153495399997154,
19
- "p95": 0.044029003949970044,
20
- "p99": 0.04598615034998147,
21
  "values": [
22
- 0.04326981200000546,
23
- 0.04193209800001796,
24
- 0.042044556999996985,
25
- 0.04176438299998608,
26
- 0.04197457999998733,
27
- 0.04225703500003419,
28
- 0.042523861999995916,
29
- 0.04199839299997166,
30
- 0.042147710999984156,
31
- 0.04167466900003092,
32
- 0.04194267799999807,
33
- 0.04229581499998858,
34
- 0.04197099100002788,
35
- 0.041422097999998186,
36
- 0.041501436000032754,
37
- 0.04252106699999558,
38
- 0.04653073399998675,
39
- 0.042607959999998,
40
- 0.041911911999989115,
41
- 0.0416770530000008,
42
- 0.041513557999962813,
43
- 0.0441629789999638,
44
- 0.04288208999997778,
45
- 0.041791567000018404
46
  ]
47
  },
48
  "throughput": {
49
  "unit": "samples/s",
50
- "value": 23.614631924272977
51
  },
52
  "energy": {
53
  "unit": "kWh",
54
- "cpu": 1.6500012190253648e-06,
55
- "ram": 6.895657958750689e-08,
56
  "gpu": 0.0,
57
- "total": 1.7189577986128717e-06
58
  },
59
  "efficiency": {
60
  "unit": "samples/kWh",
61
- "value": 581747.8479151488
62
  }
63
  }
64
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 945.242112,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 22,
14
+ "total": 1.0186817999999676,
15
+ "mean": 0.04630371818181671,
16
+ "stdev": 0.001301385918725245,
17
+ "p50": 0.04626180500000032,
18
+ "p90": 0.04760617880001803,
19
+ "p95": 0.04789228735000535,
20
+ "p99": 0.04801512183000654,
21
  "values": [
22
+ 0.04763096800002131,
23
+ 0.046475633000000016,
24
+ 0.04686485999999945,
25
+ 0.045511796999988974,
26
+ 0.04738307599998848,
27
+ 0.04613237399999548,
28
+ 0.046597160999994,
29
+ 0.04716843599999265,
30
+ 0.04458217399999853,
31
+ 0.048044118000007074,
32
+ 0.04613028999997937,
33
+ 0.04621324500001833,
34
+ 0.046204238000001396,
35
+ 0.04790604100000451,
36
+ 0.04619291700001327,
37
+ 0.04735353099999884,
38
+ 0.04614026799998783,
39
+ 0.0453522489999898,
40
+ 0.04631036499998231,
41
+ 0.046942184000016596,
42
+ 0.04584969600000477,
43
+ 0.04169617899998457
 
 
44
  ]
45
  },
46
  "throughput": {
47
  "unit": "samples/s",
48
+ "value": 21.5965378001263
49
  },
50
  "energy": {
51
  "unit": "kWh",
52
+ "cpu": 1.5399246134309688e-06,
53
+ "ram": 6.435648088348409e-08,
54
  "gpu": 0.0,
55
+ "total": 1.604281094314453e-06
56
  },
57
  "efficiency": {
58
  "unit": "samples/kWh",
59
+ "value": 623332.1601457403
60
  }
61
  }
62
  }