File size: 1,048 Bytes
4ddac9a
6474f77
 
 
4ddac9a
6474f77
 
 
 
 
 
4ddac9a
6474f77
4ddac9a
 
 
 
 
6474f77
e4a55aa
 
6474f77
 
 
 
 
4ddac9a
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
from typing import Dict, List, Any
from transformers import CLIPTokenizer, CLIPModel


class EndpointHandler:
    def __init__(self, path=""):
        # self.model= load_model(path)
        hf_model_path = "openai/clip-vit-large-patch14"
        self.model = CLIPModel.from_pretrained(hf_model_path)
        self.tokenizer = CLIPTokenizer.from_pretrained(hf_model_path)

    def __call__(self, data: Dict[str, Any]) -> List[float]:
        """
         data args:
              inputs (:obj: `str` | `PIL.Image` | `np.array`)
              kwargs
        Return:
              A :obj:`list` | `dict`: will be serialized and returned
        """
        print("doesn this even get updated:", data)
        token_inputs = self.tokenizer([data["inputs"]], padding=True, return_tensors="pt")
        query_embed = self.model.get_text_features(**token_inputs)
        np_query_embed = query_embed.detach().cpu().numpy()[0].tolist()
        return np_query_embed


if __name__ == "__main__":
    eh = EndpointHandler()
    print(eh({"inputs": "a dog"}))