Thomasboosinger
commited on
Update handler.py
Browse files- handler.py +12 -1
handler.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
from transformers import pipeline
|
|
|
2 |
from PIL import Image
|
3 |
from io import BytesIO
|
4 |
import base64
|
@@ -6,8 +7,18 @@ from typing import Dict, List, Any
|
|
6 |
|
7 |
class EndpointHandler():
|
8 |
def __init__(self, model_path=""):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
# Initialize the pipeline with the specified model and set the device to GPU
|
10 |
-
self.pipeline = pipeline(task="zero-shot-object-detection", model=model_path, device=
|
11 |
|
12 |
def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
13 |
"""
|
|
|
1 |
from transformers import pipeline
|
2 |
+
import torch
|
3 |
from PIL import Image
|
4 |
from io import BytesIO
|
5 |
import base64
|
|
|
7 |
|
8 |
class EndpointHandler():
|
9 |
def __init__(self, model_path=""):
|
10 |
+
# Check if a GPU is available
|
11 |
+
if torch.cuda.is_available():
|
12 |
+
device = 0
|
13 |
+
gpu_info = torch.cuda.get_device_name(device)
|
14 |
+
print(f"Using GPU: {gpu_info}")
|
15 |
+
else:
|
16 |
+
device = -1
|
17 |
+
cpu_info = torch.get_num_threads()
|
18 |
+
print(f"Using CPU with {cpu_info} threads")
|
19 |
+
|
20 |
# Initialize the pipeline with the specified model and set the device to GPU
|
21 |
+
self.pipeline = pipeline(task="zero-shot-object-detection", model=model_path, device=device)
|
22 |
|
23 |
def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
24 |
"""
|