from fastapi import FastAPI, HTTPException, Request from fastapi.responses import StreamingResponse, FileResponse from fastapi.middleware.cors import CORSMiddleware import aiohttp import json import ast import os from pydantic import BaseModel from datetime import datetime, timedelta from aiocache import cached, SimpleMemoryCache from aiocache.serializers import JsonSerializer SAMBA_NOVA_API_KEY = os.environ.get("SAMBA_NOVA_API_KEY", None) app = FastAPI() # Constants for Caching CACHE_TIME_SECONDS = 24 * 3600 # 24 hours in seconds # Cache Middleware to add caching headers @app.middleware("http") async def add_cache_headers(request: Request, call_next): response = await call_next(request) if response.status_code == 200 and request.url.path != "/" and request.url.path != "/script1.js" and request.url.path != "/script2.js" and request.url.path != "/styles.css": expires = datetime.utcnow() + timedelta(seconds=CACHE_TIME_SECONDS) response.headers["Cache-Control"] = f"public, max-age={CACHE_TIME_SECONDS}" response.headers["Expires"] = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") return response class StreamTextRequest(BaseModel): query: str history: str = "[]" model: str = "llama3-8b" api_key: str = None @app.post("/stream_text") @cached(ttl=600, cache=SimpleMemoryCache, serializer=JsonSerializer()) async def stream_text(request: StreamTextRequest): # Model selection logic if "405" in request.model: fmodel = "Meta-Llama-3.1-405B-Instruct" elif "70" in request.model: fmodel = "Meta-Llama-3.3-70B-Instruct" else: fmodel = "Meta-Llama-3.1-8B-Instruct" system_message = """You are Voicee, a friendly and intelligent voice assistant created by KingNish. Your primary goal is to provide accurate, concise, and engaging responses while maintaining a positive and upbeat tone. Always aim to provide clear and relevant information that directly addresses the user's query, but feel free to sprinkle in a dash of humor—after all, laughter is the best app! Keep your responses brief and to the point, avoiding unnecessary details or tangents, unless they’re hilariously relevant. Use a friendly and approachable tone to create a pleasant interaction, and don’t shy away from a cheeky pun or two! Tailor your responses based on the user's input and previous interactions, ensuring a personalized experience that feels like chatting with a witty friend. Invite users to ask follow-up questions or clarify their needs, fostering a conversational flow that’s as smooth as butter on a hot pancake. Aim to put a smile on the user's face with light-hearted and fun responses, and be proactive in offering additional help or suggestions related to the user's query. Remember, your goal is to be the go-to assistant for users, making their experience enjoyable and informative—like a delightful dessert after a hearty meal!""" messages = [{'role': 'system', 'content': system_message}] messages.extend(ast.literal_eval(request.history)) messages.append({'role': 'user', 'content': request.query}) data = {'messages': messages, 'stream': True, 'model': fmodel} api_key = request.api_key if request.api_key != 'none' else SAMBA_NOVA_API_KEY async def stream_response(): async with aiohttp.ClientSession() as session: async with session.post('https://api.sambanova.ai/v1/chat/completions', headers={'Authorization': f'Bearer {api_key}', 'Content-Type': 'application/json'}, json=data) as response: if response.status != 200: raise HTTPException(status_code=response.status, detail="Error fetching AI response") async for line in response.content: line = line.decode('utf-8').strip() if line.startswith('data: {'): json_data = line[6:] try: parsed_data = json.loads(json_data) content = parsed_data.get("choices", [{}])[0].get("delta", {}).get("content", '') if content: content = content.replace("\n", " ") yield f"data: {content}\n\n" except json.JSONDecodeError as e: print(f"Error decoding JSON: {e}") yield f"data: Error decoding JSON\n\n" return StreamingResponse(stream_response(), media_type='text/event-stream') @app.get("/script1.js") async def script1_js(): return FileResponse("script1.js") @app.get("/script2.js") async def script2_js(): return FileResponse("script2.js") @app.get("/styles.css") async def styles_css(): return FileResponse("styles.css") @app.get("/") async def read_index(): return FileResponse('index.html') if __name__ == "__main__": import uvicorn uvicorn.run(app, host="0.0.0.0", port=7068, reload=True)