OssamaLafhel commited on
Commit
859f302
·
1 Parent(s): 35d9624

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +0 -26
handler.py CHANGED
@@ -1,7 +1,5 @@
1
  import time
2
  import json
3
- from fastapi import FastAPI
4
- from fastapi.middleware.cors import CORSMiddleware
5
  from pydantic import BaseModel
6
  import transformers
7
  from transformers import pipeline
@@ -159,30 +157,6 @@ class GPTJForCausalLM(transformers.models.gptj.modeling_gptj.GPTJForCausalLM):
159
  transformers.models.gptj.modeling_gptj.GPTJBlock = GPTJBlock # monkey-patch GPT-J
160
 
161
 
162
- class Message(BaseModel):
163
- input: str = None
164
- output: dict = None
165
- length: str = None
166
- temperature: str = None
167
-
168
-
169
- app = FastAPI()
170
-
171
- origins = [
172
- "http://localhost:8000",
173
- "http://localhost",
174
- "http://localhost:3000",
175
- "http://127.0.0.1:3000"
176
- ]
177
-
178
- app.add_middleware(
179
- CORSMiddleware,
180
- allow_origins=origins,
181
- allow_credentials=True,
182
- allow_methods=["POST"],
183
- allow_headers=["*"],
184
- )
185
-
186
  # -----------------------------------------> API <---------------------------------------
187
  tokenizer = transformers.AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
188
  model = GPTJForCausalLM.from_pretrained("Kanpredict/gptj-6b-8bits", low_cpu_mem_usage=True)
 
1
  import time
2
  import json
 
 
3
  from pydantic import BaseModel
4
  import transformers
5
  from transformers import pipeline
 
157
  transformers.models.gptj.modeling_gptj.GPTJBlock = GPTJBlock # monkey-patch GPT-J
158
 
159
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
160
  # -----------------------------------------> API <---------------------------------------
161
  tokenizer = transformers.AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
162
  model = GPTJForCausalLM.from_pretrained("Kanpredict/gptj-6b-8bits", low_cpu_mem_usage=True)