Llama / app.py
Hivra's picture
Update app.py
0bd4e7a verified
raw
history blame
366 Bytes
from fastapi import FastAPI
# Use a pipeline as a high-level helper
from transformers import pipeline
pipe = pipeline("text-generation", model="deepseek-ai/DeepSeek-V2.5-1210", trust_remote_code=True)
app = FastAPI()
@app.get('/')
def home():
return {"hello": "Detail"}
@app.get('/ask')
def ask(prompt: str):
result = pipe(prompt)
return result[0]