from fastapi import FastAPI, Request from pydantic import BaseModel import request import os app = FastAPI() class RequestBody(BaseModel): model: str key_body: str text: str @app.post("/api/v1") async def generate_response(request_body: RequestBody): input_text = request_body.text model = request_body.model key_true = os.environ['key'] key_body = request_body.key_body if key_body == key_true: if model == "gemini": key_gemini = os.environ['key_gemini'] headers = {'Content-Type': 'application/json',} params = {'key': key_gemini} json_data = {'contents': [{'parts': [{'text': input_text}]}]} response = requests.post('https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-latest:generateContent',params=params,headers=headers,json=json_data,) all_chunk = response.json()['candidates'][0]['content']['parts'][0]['text'] if model == 'groq': key_groq = os.environ['key_groq'] headers = {'Authorization': f'Bearer {key_groq}','Content-Type': 'application/json'} json_data = {'messages': [{'role': 'user','content': input_text}],'model': 'llama-3.1-70b-versatile',} response = requests.post('https://api.groq.com/openai/v1/chat/completions', headers=headers, json=json_data) all_chunk = response.json()["choices"][0]["message"]["content"] if key_body != key_true: all_chunk = "How's the hack going?" return {"response": all_chunk}