File size: 1,011 Bytes
6474f77 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
from typing import List
from transformers import CLIPTokenizer, CLIPModel
class EndpointHandler():
def __init__(self, path=""):
# self.model= load_model(path)
hf_model_path = "openai/clip-vit-large-patch14"
self.model = CLIPModel.from_pretrained(hf_model_path)
self.tokenizer = CLIPTokenizer.from_pretrained(hf_model_path)
def __call__(self, inputs: str) -> List[float]:
"""
data args:
inputs (:obj: `str` | `PIL.Image` | `np.array`)
kwargs
Return:
A :obj:`list` | `dict`: will be serialized and returned
"""
# pseudo
# self.model(input)
token_inputs = self.tokenizer([inputs], padding=True, return_tensors="pt")
query_embed = self.model.get_text_features(**token_inputs)
np_query_embed = query_embed.detach().cpu().numpy()[0].tolist()
return np_query_embed
# if __name__ == "__main__":
# handler = EndpointHandler()
# print(handler("a dog"))
|