Spaces:
Running
Running
Ron Au
commited on
Commit
·
0a51235
1
Parent(s):
9421891
build(logs): Print GPU memory information
Browse files- modules/inference.py +4 -0
modules/inference.py
CHANGED
@@ -8,6 +8,10 @@ from io import BytesIO
|
|
8 |
import base64
|
9 |
|
10 |
print(f"GPUs available: {torch.cuda.device_count()}")
|
|
|
|
|
|
|
|
|
11 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
12 |
fp16 = torch.cuda.is_available()
|
13 |
|
|
|
8 |
import base64
|
9 |
|
10 |
print(f"GPUs available: {torch.cuda.device_count()}")
|
11 |
+
print(f"GPU[0] memory: {int(torch.cuda.get_device_properties(0).total_memory / 1048576)}Mib")
|
12 |
+
print(f"GPU[0] memory reserved: {int(torch.cuda.memory_reserved(0) / 1048576)}Mib")
|
13 |
+
print(f"GPU[0] memory allocated: {int(torch.cuda.memory_allocated(0) / 1048576)}Mib")
|
14 |
+
|
15 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
16 |
fp16 = torch.cuda.is_available()
|
17 |
|