IlyasMoutawwakil HF staff commited on
Commit
8c88ce2
·
verified ·
1 Parent(s): 3765bc1

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 942.256128,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,56 +10,53 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 25,
14
- "total": 1.0344535810000366,
15
- "mean": 0.04137814324000146,
16
- "stdev": 0.0007554426133065928,
17
- "p50": 0.04104981399996177,
18
- "p90": 0.04242774700001064,
19
- "p95": 0.042488004600022576,
20
- "p99": 0.043137132240017306,
21
  "values": [
22
- 0.04087464599996338,
23
- 0.04248816900002339,
24
- 0.04104981399996177,
25
- 0.041132188000005954,
26
- 0.040866349999987506,
27
- 0.040800415999967754,
28
- 0.04099159499997995,
29
- 0.040969784000026266,
30
- 0.04159802999998874,
31
- 0.04149797299999136,
32
- 0.042487347000019327,
33
- 0.041719929999999295,
34
- 0.04148687199995038,
35
- 0.040652889000000414,
36
- 0.04233834699999761,
37
- 0.04226485099997035,
38
- 0.04094710100002885,
39
- 0.04098006300000634,
40
- 0.04334206800001539,
41
- 0.04227942800002893,
42
- 0.04154735600002368,
43
- 0.04083849800002781,
44
- 0.04056997500003945,
45
- 0.04041915200002677,
46
- 0.040310739000005924
47
  ]
48
  },
49
  "throughput": {
50
  "unit": "samples/s",
51
- "value": 24.167348307530403
52
  },
53
  "energy": {
54
  "unit": "kWh",
55
- "cpu": 1.5666487481858996e-06,
56
- "ram": 6.547255926125217e-08,
57
  "gpu": 0.0,
58
- "total": 1.6321213074471516e-06
59
  },
60
  "efficiency": {
61
  "unit": "samples/kWh",
62
- "value": 612699.5557481748
63
  }
64
  }
65
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 942.247936,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 22,
14
+ "total": 1.039614972000038,
15
+ "mean": 0.04725522600000173,
16
+ "stdev": 0.005689386059377898,
17
+ "p50": 0.04785896199999229,
18
+ "p90": 0.05419865650002863,
19
+ "p95": 0.05941182479999156,
20
+ "p99": 0.06028350320002175,
21
  "values": [
22
+ 0.04891997800001491,
23
+ 0.04882707199999459,
24
+ 0.04915786499998376,
25
+ 0.047980044999974325,
26
+ 0.048276484999973945,
27
+ 0.04755020600003945,
28
+ 0.05447638100002905,
29
+ 0.060446165000030305,
30
+ 0.05967158499998959,
31
+ 0.04787214199996015,
32
+ 0.05169913600002474,
33
+ 0.04841797099999212,
34
+ 0.04784578200002443,
35
+ 0.047327054999982465,
36
+ 0.04594960800000081,
37
+ 0.041178494000007504,
38
+ 0.0405867789999661,
39
+ 0.04067345200002137,
40
+ 0.041333957000006194,
41
+ 0.04033369199999015,
42
+ 0.040425615000003745,
43
+ 0.040665507000028356
 
 
 
44
  ]
45
  },
46
  "throughput": {
47
  "unit": "samples/s",
48
+ "value": 21.161680614964435
49
  },
50
  "energy": {
51
  "unit": "kWh",
52
+ "cpu": 1.7948482975815285e-06,
53
+ "ram": 7.500851978007135e-08,
54
  "gpu": 0.0,
55
+ "total": 1.8698568173615999e-06
56
  },
57
  "efficiency": {
58
  "unit": "samples/kWh",
59
+ "value": 534800.307015495
60
  }
61
  }
62
  }