IlyasMoutawwakil HF staff commited on
Commit
160e8e9
1 Parent(s): 0bb74b5

Upload cpu_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 796.85632,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -11,34 +11,34 @@
11
  "latency": {
12
  "unit": "s",
13
  "count": 4,
14
- "total": 1.1168391730000735,
15
- "mean": 0.2792097932500184,
16
- "stdev": 0.014728191162737467,
17
- "p50": 0.27901741850001827,
18
- "p90": 0.294945443000023,
19
- "p95": 0.29596932200002185,
20
- "p99": 0.2967884252000209,
21
  "values": [
22
- 0.2618111350000163,
23
- 0.2901673410000285,
24
- 0.2969932010000207,
25
- 0.26786749600000803
26
  ]
27
  },
28
  "throughput": {
29
  "unit": "samples/s",
30
- "value": 3.581536264756122
31
  },
32
  "energy": {
33
  "unit": "kWh",
34
- "cpu": 9.778956572214765e-06,
35
- "ram": 4.08677639737931e-07,
36
  "gpu": 0.0,
37
- "total": 1.0187634211952695e-05
38
  },
39
  "efficiency": {
40
  "unit": "samples/kWh",
41
- "value": 98158.21604850562
42
  }
43
  }
44
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 794.918912,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
11
  "latency": {
12
  "unit": "s",
13
  "count": 4,
14
+ "total": 1.1850789909999548,
15
+ "mean": 0.2962697477499887,
16
+ "stdev": 0.0037022146511117163,
17
+ "p50": 0.2959121454999831,
18
+ "p90": 0.30038091399998734,
19
+ "p95": 0.3008802984999903,
20
+ "p99": 0.3012798060999927,
21
  "values": [
22
+ 0.29805045299997346,
23
+ 0.2918750169999953,
24
+ 0.3013796829999933,
25
+ 0.2937738379999928
26
  ]
27
  },
28
  "throughput": {
29
  "unit": "samples/s",
30
+ "value": 3.375302431633566
31
  },
32
  "energy": {
33
  "unit": "kWh",
34
+ "cpu": 1.0251560476091173e-05,
35
+ "ram": 4.284317343614674e-07,
36
  "gpu": 0.0,
37
+ "total": 1.067999221045264e-05
38
  },
39
  "efficiency": {
40
  "unit": "samples/kWh",
41
+ "value": 93633.02709352988
42
  }
43
  }
44
  }