from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from src.TinyLLama import text_generation from src.classmodels.inputforgeneration import InputForGeneration import uvicorn app = FastAPI() origins = ["*"] app.add_middleware( CORSMiddleware, allow_origins=origins, allow_credentials=True, allow_methods=["*"], allow_headers=["*"] ) @app.get("/cmsai/warmuptextgenerationmodel") def warmupGenerationModel(): warmupModelMessage = text_generation.warmupTextGenerationModel() return warmupModelMessage @app.post("/cmsai/generatetext") async def generateTextUsingLLama(inputSettings:InputForGeneration): try: output = text_generation.generateText(inputSettings) if output is not None: return output else: return "error when generating text" except Exception as e: return str(e) if __name__ == "__main__": uvicorn.run(app=app)