File size: 2,119 Bytes
58974f8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5f6131e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
faf675e
 
 
 
 
 
 
 
203c680
58974f8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
from .base_model import BaseModel

import openai
from tqdm import tqdm
class GPT4Model(BaseModel):
    def __init__(self, 
                 generation_model="gpt-4-vision-preview", 
                 embedding_model="text-embedding-ada-002",
                 temperature=0, 
        ) -> None:
        self.generation_model = generation_model
        self.embedding_model = embedding_model
        self.temperature = temperature
    
    def respond(self, messages: list) -> str:
        try:
            response = openai.ChatCompletion.create(
                messages=messages,
                model=self.generation_model,
                temperature=self.temperature,
                max_tokens=1000,
            ).choices[0]['message']['content']
        except:
            try:
                response = openai.ChatCompletion.create(
                    messages=messages,
                    model=self.generation_model,
                    temperature=self.temperature,
                    max_tokens=1000,
                ).choices[0]['message']['content']
            except:
                try:
                    response = openai.ChatCompletion.create(
                        messages=messages,
                        model=self.generation_model,
                        temperature=self.temperature,
                        max_tokens=1000,
                    ).choices[0]['message']['content']
                except:
                    response = "No answer was provided."
        # content = response.choices[0]['message']['content']
        
        return response
    
    def embedding(self, texts: list) -> list:
        data = []
        # print(f"{self.embedding_model} Embedding:")
        for i in range(0, len(texts), 2048):
            lower = i
            upper = min(i+2048, len(texts))
            data += openai.Embedding.create(input=texts[lower:upper],
                                            model=self.embedding_model
                                            )["data"]
            
        embeddings = [d["embedding"] for d in data]
        
        return embeddings