IlyasMoutawwakil HF staff commited on
Commit
59395b1
1 Parent(s): 8a9fb71

Upload cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "load": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 969.715712,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -11,15 +11,15 @@
11
  "latency": {
12
  "unit": "s",
13
  "values": [
14
- 1.3012931480000134
15
  ],
16
  "count": 1,
17
- "total": 1.3012931480000134,
18
- "mean": 1.3012931480000134,
19
- "p50": 1.3012931480000134,
20
- "p90": 1.3012931480000134,
21
- "p95": 1.3012931480000134,
22
- "p99": 1.3012931480000134,
23
  "stdev": 0,
24
  "stdev_": 0
25
  },
@@ -30,7 +30,7 @@
30
  "forward": {
31
  "memory": {
32
  "unit": "MB",
33
- "max_ram": 883.73248,
34
  "max_global_vram": null,
35
  "max_process_vram": null,
36
  "max_reserved": null,
@@ -39,54 +39,50 @@
39
  "latency": {
40
  "unit": "s",
41
  "values": [
42
- 0.04626103900000089,
43
- 0.04535377799999196,
44
- 0.04624384800001735,
45
- 0.04671350299997812,
46
- 0.045349679999958425,
47
- 0.04494575600000417,
48
- 0.04525430200004621,
49
- 0.0463058930000102,
50
- 0.045988771999986966,
51
- 0.04503524400001879,
52
- 0.04529629999996132,
53
- 0.04556148499995061,
54
- 0.04576435300003823,
55
- 0.04574031800001421,
56
- 0.04561390300000312,
57
- 0.04632477799998469,
58
- 0.04517097699999795,
59
- 0.039188086000024214,
60
- 0.03989096899999822,
61
- 0.03935663300001124,
62
- 0.03950592000001052,
63
- 0.04032272400002057,
64
- 0.040892407000001185
65
  ],
66
- "count": 23,
67
- "total": 1.0160806680000292,
68
- "mean": 0.04417742034782735,
69
- "p50": 0.045349679999958425,
70
- "p90": 0.04629692220000834,
71
- "p95": 0.04632288949998724,
72
- "p99": 0.04662798349997956,
73
- "stdev": 0.002619898099842317,
74
- "stdev_": 5.930400822897219
75
  },
76
  "throughput": {
77
  "unit": "samples/s",
78
- "value": 22.635998030817117
79
  },
80
  "energy": {
81
  "unit": "kWh",
82
- "cpu": 1.5532502730768868e-06,
83
- "ram": 6.494649694080823e-08,
84
  "gpu": 0.0,
85
- "total": 1.618196770017695e-06
86
  },
87
  "efficiency": {
88
  "unit": "samples/kWh",
89
- "value": 617971.818092966
90
  }
91
  }
92
  }
 
2
  "load": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 969.756672,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
11
  "latency": {
12
  "unit": "s",
13
  "values": [
14
+ 1.3268693419999522
15
  ],
16
  "count": 1,
17
+ "total": 1.3268693419999522,
18
+ "mean": 1.3268693419999522,
19
+ "p50": 1.3268693419999522,
20
+ "p90": 1.3268693419999522,
21
+ "p95": 1.3268693419999522,
22
+ "p99": 1.3268693419999522,
23
  "stdev": 0,
24
  "stdev_": 0
25
  },
 
30
  "forward": {
31
  "memory": {
32
  "unit": "MB",
33
+ "max_ram": 883.642368,
34
  "max_global_vram": null,
35
  "max_process_vram": null,
36
  "max_reserved": null,
 
39
  "latency": {
40
  "unit": "s",
41
  "values": [
42
+ 0.056869258000006084,
43
+ 0.058061517999988155,
44
+ 0.0522853390000364,
45
+ 0.05243081999998367,
46
+ 0.05326780699999745,
47
+ 0.055420992999984264,
48
+ 0.05466478699997879,
49
+ 0.05310630600001787,
50
+ 0.05267342199999803,
51
+ 0.054737862000024506,
52
+ 0.05671406999999817,
53
+ 0.05285651100001587,
54
+ 0.055796852000014496,
55
+ 0.052001501000006556,
56
+ 0.05440275799998062,
57
+ 0.05993688600000269,
58
+ 0.055317720999994435,
59
+ 0.05281495399998448,
60
+ 0.05182520199997498
 
 
 
 
61
  ],
62
+ "count": 19,
63
+ "total": 1.0351845669999875,
64
+ "mean": 0.054483398263157234,
65
+ "p50": 0.05440275799998062,
66
+ "p90": 0.0571077100000025,
67
+ "p95": 0.058249054799989604,
68
+ "p99": 0.059599319760000075,
69
+ "stdev": 0.00218809072210616,
70
+ "stdev_": 4.016068732602884
71
  },
72
  "throughput": {
73
  "unit": "samples/s",
74
+ "value": 18.3542148962507
75
  },
76
  "energy": {
77
  "unit": "kWh",
78
+ "cpu": 2.3141269575164147e-06,
79
+ "ram": 9.675788286311704e-08,
80
  "gpu": 0.0,
81
+ "total": 2.410884840379532e-06
82
  },
83
  "efficiency": {
84
  "unit": "samples/kWh",
85
+ "value": 414785.46932278015
86
  }
87
  }
88
  }