Spaces:
Sleeping
Sleeping
File size: 3,383 Bytes
e6868fd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 |
import gradio as gr
from openai import OpenAI
import time
import html
def predict(message, history, character, api_key, progress=gr.Progress()):
client = OpenAI(api_key=api_key)
history_openai_format = []
for human, assistant in history:
history_openai_format.append({"role": "user", "content": human})
history_openai_format.append({"role": "assistant", "content": assistant})
history_openai_format.append({"role": "user", "content": message})
response = client.chat.completions.create(
model='gpt-4',
messages=history_openai_format,
temperature=1.0,
stream=True
)
partial_message = ""
for chunk in progress.tqdm(response, desc="Generating"):
if chunk.choices[0].delta.content:
partial_message += chunk.choices[0].delta.content
yield partial_message
time.sleep(0.01)
def format_history(history):
html_content = ""
for human, ai in history:
human_formatted = html.escape(human).replace('\n', '<br>')
html_content += f'<div class="message user-message"><strong>You:</strong> {human_formatted}</div>'
if ai:
ai_formatted = html.escape(ai).replace('\n', '<br>')
html_content += f'<div class="message ai-message"><strong>AI:</strong> {ai_formatted}</div>'
return html_content
css = """
#chat-display {
height: 600px;
overflow-y: auto;
border: 1px solid #ccc;
padding: 10px;
margin-bottom: 10px;
}
.message {
margin-bottom: 10px;
word-wrap: break-word;
}
.user-message {
background-color: #e6f3ff;
padding: 5px;
border-radius: 5px;
}
.ai-message {
background-color: #f0f0f0;
padding: 5px;
border-radius: 5px;
}
"""
def user(user_message, history, character, api_key):
if user_message.strip() == "":
return "", history, format_history(history)
history.append([user_message, None])
formatted_history = format_history(history)
# Start bot response generation
bot_message_generator = predict(user_message, history[:-1], character, api_key)
for chunk in bot_message_generator:
history[-1][1] = chunk
formatted_history = format_history(history)
yield "", history, formatted_history
with gr.Blocks(css=css) as demo:
gr.Markdown("<h1 style='text-align: center; margin-bottom: 1rem'>My Chatbot</h1>")
chat_history = gr.State([])
chat_display = gr.HTML(elem_id="chat-display")
msg = gr.Textbox(
label="Your message",
lines=1,
placeholder="Type your message here... (Press Enter to send)"
)
clear = gr.Button("Clear")
dropdown = gr.Dropdown(
["Character 1", "Character 2", "Character 3", "Character 4", "Character 5", "Character 6", "Character 7", "Character 8", "Character 9", "Character 10", "Character 11", "Character 12", "Character 13"],
label="Characters",
info="Select the character that you'd like to speak to",
value="Character 1"
)
api_key = gr.Textbox(type="password", label="OpenAI API Key")
msg.submit(user, [msg, chat_history, dropdown, api_key], [msg, chat_history, chat_display])
clear.click(lambda: ([], []), None, [chat_history, chat_display], queue=False)
dropdown.change(lambda x: ([], []), dropdown, [chat_history, chat_display])
demo.queue()
demo.launch(max_threads=20) |