GPT2-L-Docker / main.py
IsmaelMousa's picture
Upload 3 files
6352a01 verified
raw
history blame
641 Bytes
from fastapi import FastAPI
from transformers import pipeline
app = FastAPI()
pipe = pipeline(task="text-generation", model="gpt2-large", framework="pt")
@app.get("/")
def root():
"""
Returns home page.
"""
return {"message": "Hello Ismael"}
@app.get("/generate")
def generate(text: str):
"""
Using the text-generation pipeline from `transformers`, generate text
from the given input text. The model used is `openai-community/gpt2-large`, which
can be found [here](<https://huggingface.co/openai-community/gpt2-large>).
"""
output = pipe(text)
return {"output": output[0]["generated_text"]}