t.me/xtekky commited on
Commit
81e0330
1 Parent(s): 9922a20

t3nsor api gpt-3.5

Browse files
Files changed (2) hide show
  1. README.md +42 -1
  2. t3nsor/__init__.py +118 -0
README.md CHANGED
@@ -1 +1,42 @@
1
- soon.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ working on it...
2
+
3
+ `t3nsor` (use like openai pypi package)
4
+
5
+ Import t3nsor:
6
+
7
+ ```python
8
+ import t3nsor
9
+
10
+ # t3nsor.Completion.create
11
+ # t3nsor.StreamCompletion.create
12
+ ```
13
+
14
+ Example Chatbot
15
+ ```python
16
+ messages = []
17
+
18
+ while True:
19
+ user = input('you: ')
20
+
21
+ t3nsor_cmpl = t3nsor.Completion.create(
22
+ prompt = user,
23
+ messages = messages
24
+ )
25
+
26
+ print('gpt:', t3nsor_cmpl.completion.choices[0].text)
27
+
28
+ messages.extend([
29
+ {'role': 'user', 'content': user },
30
+ {'role': 'assistant', 'content': t3nsor_cmpl.completion.choices[0].text}
31
+ ])
32
+ ```
33
+
34
+ Streaming Response:
35
+
36
+ ```python
37
+ for response in t3nsor.StreamCompletion.create(
38
+ prompt = 'write python code to reverse a string',
39
+ messages = []):
40
+
41
+ print(response.completion.choices[0].text)
42
+ ```
t3nsor/__init__.py ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from requests import post
2
+ from time import time
3
+
4
+ class T3nsorResponse:
5
+
6
+ class Completion:
7
+
8
+ class Choices:
9
+ def __init__(self, choice: dict) -> None:
10
+ self.text = choice['text']
11
+ self.content = self.text.encode()
12
+ self.index = choice['index']
13
+ self.logprobs = choice['logprobs']
14
+ self.finish_reason = choice['finish_reason']
15
+
16
+ def __repr__(self) -> str:
17
+ return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
18
+
19
+ def __init__(self, choices: dict) -> None:
20
+ self.choices = [self.Choices(choice) for choice in choices]
21
+
22
+ class Usage:
23
+ def __init__(self, usage_dict: dict) -> None:
24
+ self.prompt_tokens = usage_dict['prompt_tokens']
25
+ self.completion_tokens = usage_dict['completion_tokens']
26
+ self.total_tokens = usage_dict['total_tokens']
27
+
28
+ def __repr__(self):
29
+ return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
30
+
31
+ def __init__(self, response_dict: dict) -> None:
32
+
33
+ self.response_dict = response_dict
34
+ self.id = response_dict['id']
35
+ self.object = response_dict['object']
36
+ self.created = response_dict['created']
37
+ self.model = response_dict['model']
38
+ self.completion = self.Completion(response_dict['choices'])
39
+ self.usage = self.Usage(response_dict['usage'])
40
+
41
+ def json(self) -> dict:
42
+ return self.response_dict
43
+
44
+ class Completion:
45
+ model = {
46
+ 'model': {
47
+ 'id' : 'gpt-3.5-turbo',
48
+ 'name' : 'Default (GPT-3.5)'
49
+ }
50
+ }
51
+
52
+ def create(
53
+ prompt: str = 'hello world',
54
+ messages: list = []) -> T3nsorResponse:
55
+
56
+ response = post('https://www.t3nsor.tech/api/chat', json = Completion.model | {
57
+ 'messages' : messages,
58
+ 'key' : '',
59
+ 'prompt' : prompt
60
+ })
61
+
62
+ return T3nsorResponse({
63
+ 'id' : f'cmpl-1337-{int(time())}',
64
+ 'object' : 'text_completion',
65
+ 'created': int(time()),
66
+ 'model' : Completion.model,
67
+ 'choices': [{
68
+ 'text' : response.text,
69
+ 'index' : 0,
70
+ 'logprobs' : None,
71
+ 'finish_reason' : 'stop'
72
+ }],
73
+ 'usage': {
74
+ 'prompt_chars' : len(prompt),
75
+ 'completion_chars' : len(response.text),
76
+ 'total_chars' : len(prompt) + len(response.text)
77
+ }
78
+ })
79
+
80
+ class StreamCompletion:
81
+ model = {
82
+ 'model': {
83
+ 'id' : 'gpt-3.5-turbo',
84
+ 'name' : 'Default (GPT-3.5)'
85
+ }
86
+ }
87
+
88
+ def create(
89
+ prompt: str = 'hello world',
90
+ messages: list = []) -> T3nsorResponse:
91
+
92
+ response = post('https://www.t3nsor.tech/api/chat', stream = True, json = Completion.model | {
93
+ 'messages' : messages,
94
+ 'key' : '',
95
+ 'prompt' : prompt
96
+ })
97
+
98
+ for resp in response.iter_lines():
99
+ if resp:
100
+ yield T3nsorResponse({
101
+ 'id' : f'cmpl-1337-{int(time())}',
102
+ 'object' : 'text_completion',
103
+ 'created': int(time()),
104
+ 'model' : Completion.model,
105
+
106
+ 'choices': [{
107
+ 'text' : resp.decode(),
108
+ 'index' : 0,
109
+ 'logprobs' : None,
110
+ 'finish_reason' : 'stop'
111
+ }],
112
+
113
+ 'usage': {
114
+ 'prompt_chars' : len(prompt),
115
+ 'completion_chars' : len(resp.decode()),
116
+ 'total_chars' : len(prompt) + len(resp.decode())
117
+ }
118
+ })