IlyasMoutawwakil HF staff commited on
Commit
638e62c
1 Parent(s): 810fbab

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 939.835392,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -11,55 +11,55 @@
11
  "latency": {
12
  "unit": "s",
13
  "count": 25,
14
- "total": 1.0143559390000405,
15
- "mean": 0.04057423756000162,
16
- "stdev": 0.0009884393138469784,
17
- "p50": 0.04038991099997702,
18
- "p90": 0.042005376199995226,
19
- "p95": 0.042404397400014206,
20
- "p99": 0.04291444867998279,
21
  "values": [
22
- 0.042446712000014486,
23
- 0.04223513900001308,
24
- 0.040107686999988346,
25
- 0.040351901000008183,
26
- 0.04012147200000982,
27
- 0.040354516000036256,
28
- 0.040741104000005635,
29
- 0.04090935600004286,
30
- 0.04038991099997702,
31
- 0.04058186800000385,
32
- 0.04079367099996034,
33
- 0.04023655600002485,
34
- 0.040704656000002615,
35
- 0.04166073199996845,
36
- 0.041299329000025864,
37
- 0.04091691999997238,
38
- 0.04098874299995714,
39
- 0.040017569000042386,
40
- 0.0399713639999959,
41
- 0.04306215499997279,
42
- 0.03946148599999333,
43
- 0.03891260699998611,
44
- 0.03939847000003738,
45
- 0.03922455700001137,
46
- 0.039467457999990074
47
  ]
48
  },
49
  "throughput": {
50
  "unit": "samples/s",
51
- "value": 24.64618092998517
52
  },
53
  "energy": {
54
  "unit": "kWh",
55
- "cpu": 1.579658243391249e-06,
56
- "ram": 6.601622786283769e-08,
57
  "gpu": 0.0,
58
- "total": 1.6456744712540868e-06
59
  },
60
  "efficiency": {
61
  "unit": "samples/kWh",
62
- "value": 607653.5897393788
63
  }
64
  }
65
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 939.737088,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
11
  "latency": {
12
  "unit": "s",
13
  "count": 25,
14
+ "total": 1.0346989849998067,
15
+ "mean": 0.04138795939999227,
16
+ "stdev": 0.0021007572398984557,
17
+ "p50": 0.04082370200001151,
18
+ "p90": 0.0449036985999669,
19
+ "p95": 0.04525575499995966,
20
+ "p99": 0.04546071503998291,
21
  "values": [
22
+ 0.04134019000002809,
23
+ 0.04120807300000706,
24
+ 0.040783396999984234,
25
+ 0.04057489600000963,
26
+ 0.04064198199995417,
27
+ 0.040915243999961604,
28
+ 0.04070804599996336,
29
+ 0.04082370200001151,
30
+ 0.04049827199997935,
31
+ 0.04075733900003797,
32
+ 0.040987820999987434,
33
+ 0.04093269700001656,
34
+ 0.04311318800000663,
35
+ 0.045305329999962396,
36
+ 0.045509783999989395,
37
+ 0.04505745499994873,
38
+ 0.044673063999994156,
39
+ 0.04440395900002159,
40
+ 0.04252008999998225,
41
+ 0.03860290699998359,
42
+ 0.04009890300000052,
43
+ 0.038599960999988525,
44
+ 0.038785960000041086,
45
+ 0.039196339999989505,
46
+ 0.03866038499995739
47
  ]
48
  },
49
  "throughput": {
50
  "unit": "samples/s",
51
+ "value": 24.16161643379274
52
  },
53
  "energy": {
54
  "unit": "kWh",
55
+ "cpu": 1.526077129901984e-06,
56
+ "ram": 6.377716727165797e-08,
57
  "gpu": 0.0,
58
+ "total": 1.589854297173642e-06
59
  },
60
  "efficiency": {
61
  "unit": "samples/kWh",
62
+ "value": 628988.4562237852
63
  }
64
  }
65
  }