File size: 1,616 Bytes
e3d560c 1133401 e3d560c 1133401 e3d560c 1133401 e3d560c 1133401 e3d560c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
import gradio as gr
from huggingface_hub import InferenceClient
client = InferenceClient(
"meta-llama/Meta-Llama-3-8B-Instruct",
)
def chat_mem(message,chat_history):
print(len(chat_history))
chat_history_role = [{"role": "system", "content": "You are a helpful assistant." },]
if chat_history != []:
for i in range(len(chat_history)):
chat_history_role.append({"role": "user", "content": chat_history[i][0]})
chat_history_role.append({"role": "assistant", "content": chat_history[i][1]})
chat_history_role.append({"role": "user", "content": message})
chat_completion = client.chat_completion(
messages=chat_history_role,
# stream=True
)
chat_history_role.append({"role": "assistant", "content": chat_completion.choices[0].message.content})
print(chat_history_role)
modified = map(lambda x: x["content"], chat_history_role)
a = list(modified)
chat_history=[(a[i*2+1], a[i*2+2]) for i in range(len(a)//2)]
return "", chat_history
with gr.Blocks() as demo:
with gr.Row():
with gr.Column():
chatbot = gr.Chatbot()
msg = gr.Textbox(interactive=True, )
with gr.Row():
clear = gr.ClearButton([msg, chatbot], icon="https://img.icons8.com/?size=100&id=Xnx8cxDef16O&format=png&color=000000")
send_btn = gr.Button("Send", variant='primary', icon="https://img.icons8.com/?size=100&id=g8ltXTwIfJ1n&format=png&color=000000")
msg.submit(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
send_btn.click(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
if __name__ == "__main__":
demo.launch() |