Spaces:
Running
Running
from fastapi import FastAPI, Request | |
from pydantic import BaseModel | |
import requests | |
import os | |
app = FastAPI() | |
class RequestBody(BaseModel): | |
model: str | |
key_body: str | |
text: str | |
async def generate_response(request_body: RequestBody): | |
input_text = request_body.text | |
model = request_body.model | |
key_true = os.environ['key'] | |
key_body = request_body.key_body | |
if key_body == key_true: | |
if model == "gemini": | |
key_gemini = os.environ['key_gemini'] | |
headers = {'Content-Type': 'application/json',} | |
params = {'key': key_gemini} | |
json_data = {'contents': [{'parts': [{'text': input_text}]}]} | |
response = requests.post('https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-latest:generateContent',params=params,headers=headers,json=json_data,) | |
all_chunk = response.json()['candidates'][0]['content']['parts'][0]['text'] | |
if model == 'groq': | |
key_groq = os.environ['key_groq'] | |
headers = {'Authorization': f'Bearer {key_groq}','Content-Type': 'application/json'} | |
json_data = {'messages': [{'role': 'user','content': input_text}],'model': 'llama-3.1-70b-versatile',} | |
response = requests.post('https://api.groq.com/openai/v1/chat/completions', headers=headers, json=json_data) | |
all_chunk = response.json()["choices"][0]["message"]["content"] | |
if model == 'bing': | |
from g4f.client import Client | |
from g4f import Provider | |
client = Client() | |
response = client.chat.completions.create(model="gpt-4o", messages=[{"role": "user", "content": input_text}], provider=Provider.Bing) | |
all_chunk = response.choices[0].message.content | |
if model == "cohere": | |
key_cohere = os.environ['key_cohere'] | |
headers = {'accept': 'application/json','content-type': 'application/json','Authorization': f'Bearer {key_cohere}',} | |
data = {"model":"command-r-plus","messages":[{ "role": "user", "content": input_text}]} | |
response = requests.post('https://api.cohere.com/v2/chat', headers=headers, json=data) | |
all_chunk = response.json()['message']['content'][0]['text'] | |
if key_body != key_true: | |
all_chunk = "How's the hack going?" | |
return {"response": all_chunk} |