Spaces:
Sleeping
Sleeping
import streamlit as st | |
from g4f import ChatCompletion | |
# List of available models | |
models = [ | |
"gpt-4o", "gpt-4o-mini", "gpt-4", | |
"gpt-4-turbo", "gpt-3.5-turbo", | |
"claude-3.5-sonnet", "claude-3-opus", "claude-3-haiku", "claude-3.5", "llama-3.1-405b", "gemini-flash", "blackboxai-pro", "openchat-3.5", "glm-4-9B", "blackboxai" | |
] | |
# Initialize chat history | |
if "messages" not in st.session_state: | |
st.session_state.messages = [] | |
# Streamlit app title | |
st.title("Chat with AI Models") | |
# Model selection | |
selected_model = st.selectbox("Choose a model:", models) | |
# Display chat messages from history | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"]): | |
st.markdown(message["content"]) | |
# User input | |
if user_input := st.chat_input("What do you want to ask?"): | |
# Display user message | |
st.chat_message("user").markdown(user_input) | |
st.session_state.messages.append({"role": "user", "content": user_input}) | |
# Get response from selected model | |
response = ChatCompletion.create( | |
model=selected_model, | |
messages=st.session_state.messages | |
) | |
# Check and handle response type | |
if isinstance(response, str): | |
response_content = response # Directly use if it's a string | |
else: | |
try: | |
response_content = response['choices'][0]['message']['content'] | |
except (IndexError, KeyError): | |
response_content = "Error: Unexpected response structure." | |
# Display assistant response | |
with st.chat_message("assistant"): | |
st.markdown(response_content) | |
# Add assistant response to chat history | |
st.session_state.messages.append({"role": "assistant", "content": response_content}) |