Spaces:
Running
Running
lalashechka
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -1,28 +1,39 @@
|
|
1 |
from fastapi import FastAPI, Request
|
2 |
from pydantic import BaseModel
|
|
|
|
|
3 |
|
4 |
app = FastAPI()
|
5 |
|
6 |
-
# Определяем модель для запроса
|
7 |
-
class Message(BaseModel):
|
8 |
-
role: str
|
9 |
-
content: str
|
10 |
|
11 |
class RequestBody(BaseModel):
|
12 |
model: str
|
13 |
-
|
14 |
-
|
15 |
-
top_p: float
|
16 |
-
max_tokens: int
|
17 |
|
18 |
|
19 |
@app.post("/api/v1")
|
20 |
async def generate_response(request_body: RequestBody):
|
21 |
-
|
22 |
-
|
23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from fastapi import FastAPI, Request
|
2 |
from pydantic import BaseModel
|
3 |
+
import os
|
4 |
+
|
5 |
|
6 |
app = FastAPI()
|
7 |
|
|
|
|
|
|
|
|
|
8 |
|
9 |
class RequestBody(BaseModel):
|
10 |
model: str
|
11 |
+
key_body: str
|
12 |
+
text: str
|
|
|
|
|
13 |
|
14 |
|
15 |
@app.post("/api/v1")
|
16 |
async def generate_response(request_body: RequestBody):
|
17 |
+
input_text = request_body.text
|
18 |
+
model = request_body.model
|
19 |
+
key_true = os.environ['key']
|
20 |
+
key_body = request_body.key_body
|
21 |
+
if key_body == key_true:
|
22 |
+
if model == "gemini":
|
23 |
+
key_gemini = os.environ['key_gemini']
|
24 |
+
headers = {'Content-Type': 'application/json',}
|
25 |
+
params = {'key': key_gemini}
|
26 |
+
json_data = {'contents': [{'parts': [{'text': promt}]}]}
|
27 |
+
response = requests.post('https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-latest:generateContent',params=params,headers=headers,json=json_data,)
|
28 |
+
all_chunk = response.json()['candidates'][0]['content']['parts'][0]['text']
|
29 |
|
30 |
+
if model == 'groq':
|
31 |
+
key_groq = os.environ['key_groq']
|
32 |
+
headers = {'Authorization': f'Bearer {key_groq}','Content-Type': 'application/json'}
|
33 |
+
json_data = {'messages': [{'role': 'user','content': promt}],'model': 'llama-3.1-70b-versatile',}
|
34 |
+
response = requests.post('https://api.groq.com/openai/v1/chat/completions', headers=headers, json=json_data)
|
35 |
+
all_chunk = response.json()["choices"][0]["message"]["content"]
|
36 |
+
|
37 |
+
if key_body != key_true:
|
38 |
+
all_chunk = "How's the hack going?"
|
39 |
+
return {"response": all_chunk}
|