Spaces:
Runtime error
Runtime error
import gradio as gr | |
from groq import Groq | |
import os | |
def chatbot(message, history, api_key): | |
# Initialize Groq client with the provided API key | |
client = Groq(api_key=api_key) | |
# Prepare the messages including the conversation history | |
messages = [ | |
{"role": "system", "content": "You are a helpful assistant."} | |
] | |
for human, assistant in history: | |
messages.append({"role": "user", "content": human}) | |
messages.append({"role": "assistant", "content": assistant}) | |
messages.append({"role": "user", "content": message}) | |
try: | |
# Create the chat completion | |
completion = client.chat.completions.create( | |
model="llama-3.2-90b-text-preview", | |
messages=messages, | |
temperature=1, | |
max_tokens=1024, | |
top_p=1, | |
stream=True, | |
stop=None, | |
) | |
# Stream the response | |
partial_message = "" | |
for chunk in completion: | |
if chunk.choices[0].delta.content is not None: | |
partial_message += chunk.choices[0].delta.content | |
yield partial_message | |
except Exception as e: | |
yield f"Error: {str(e)}" | |
# Create the Gradio interface | |
with gr.Blocks(theme="soft") as iface: | |
gr.Markdown("# Groq LLaMA 3.2 90B Chatbot") | |
gr.Markdown("Chat with the LLaMA 3.2 90B model using Groq API") | |
with gr.Row(): | |
api_key_input = gr.Textbox( | |
label="Enter your Groq API Key", | |
placeholder="sk-...", | |
type="password" | |
) | |
chatbot = gr.ChatInterface( | |
chatbot, | |
additional_inputs=[api_key_input], | |
examples=[ | |
"Tell me a short story about a robot learning to paint.", | |
"Explain quantum computing in simple terms.", | |
"What are some creative ways to reduce plastic waste?", | |
], | |
retry_btn=None, | |
undo_btn="Delete Last", | |
clear_btn="Clear", | |
) | |
# Launch the interface | |
iface.launch() |