Bunpheng commited on
Commit
f22612e
1 Parent(s): a3ca12e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -9
app.py CHANGED
@@ -4,17 +4,12 @@ import os
4
 
5
  app = FastAPI()
6
 
7
- hf_token = os.getenv('HUGGINGFACE_HUB_TOKEN')
8
 
9
- model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-v0.3", use_auth_token=hf_token)
10
- tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-v0.3")
11
 
12
- @app.post("/inference")
13
- async def inference(prompt: str):
14
- inputs = tokenizer(prompt, return_tensors="pt")
15
- outputs = model.generate(**inputs)
16
- response = tokenizer.decode(outputs[0], skip_special_tokens=True)
17
- return {"response": response}
18
 
19
 
20
  @app.get("/")
 
4
 
5
  app = FastAPI()
6
 
7
+ pipe_flan = pipeline("text2text-generation", model="google/flan-t5-small")
8
 
 
 
9
 
10
+ def t5(input):
11
+ output = pipe_flan(input)
12
+ return {"output": output[0]["generated_text"]}
 
 
 
13
 
14
 
15
  @app.get("/")