Spaces:
Sleeping
Sleeping
import os | |
from groq import Groq | |
import gradio as gr | |
# | |
api_key = os.getenv("GROQ_API_KEY2") | |
#. | |
client = Groq(api_key=api_key) | |
# | |
system_prompt = { | |
"role": "system", | |
"content": "You are a useful assistant. You reply with efficient answers." | |
} | |
# | |
async def chat_groq(message, history): | |
messages = [system_prompt] | |
for msg in history: | |
messages.append({"role": "user", "content": str(msg[0])}) | |
messages.append({"role": "assistant", "content": str(msg[1])}) | |
messages.append({"role": "user", "content": str(message)}) | |
response_content = '' | |
# modelo `llama-3.1-70b-versatile` | |
stream = client.chat.completions.create( | |
model="llama-3.1-70b-versatile", | |
messages=messages, | |
max_tokens=1024, | |
temperature=1.3, | |
stream=True | |
) | |
for chunk in stream: | |
content = chunk.choices[0].delta.content | |
if content: | |
response_content += chunk.choices[0].delta.content | |
yield response_content | |
# Interface Gradio | |
with gr.Blocks(theme=gr.themes.Monochrome()) as demo: | |
gr.ChatInterface(chat_groq, | |
clear_btn=None, | |
undo_btn=None, | |
retry_btn=None) | |
demo.queue() | |
demo.launch() | |