|
import gradio as gr |
|
from huggingface_hub import InferenceClient |
|
|
|
client = InferenceClient( |
|
"meta-llama/Meta-Llama-3-8B-Instruct", |
|
) |
|
|
|
def chat_mem(message,chat_history): |
|
|
|
print(len(chat_history)) |
|
chat_history_role = [{"role": "system", "content": "You are a helpful assistant." },] |
|
if chat_history != []: |
|
for i in range(len(chat_history)): |
|
chat_history_role.append({"role": "user", "content": chat_history[i][0]}) |
|
chat_history_role.append({"role": "assistant", "content": chat_history[i][1]}) |
|
chat_history_role.append({"role": "user", "content": message}) |
|
|
|
|
|
chat_completion = client.chat_completion( |
|
messages=chat_history_role, |
|
max_tokens=500, |
|
|
|
) |
|
chat_history_role.append({"role": "assistant", "content": chat_completion.choices[0].message.content}) |
|
print(chat_history_role) |
|
|
|
modified = map(lambda x: x["content"], chat_history_role) |
|
a = list(modified) |
|
chat_history=[(a[i*2+1], a[i*2+2]) for i in range(len(a)//2)] |
|
|
|
return "", chat_history |
|
|
|
|
|
with gr.Blocks() as demo: |
|
with gr.Row(): |
|
with gr.Column(): |
|
chatbot = gr.Chatbot() |
|
msg = gr.Textbox(interactive=True, ) |
|
with gr.Row(): |
|
clear = gr.ClearButton([msg, chatbot], icon="https://img.icons8.com/?size=100&id=Xnx8cxDef16O&format=png&color=000000") |
|
send_btn = gr.Button("Send", variant='primary', icon="https://img.icons8.com/?size=100&id=g8ltXTwIfJ1n&format=png&color=000000") |
|
msg.submit(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot]) |
|
send_btn.click(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot]) |
|
|
|
|
|
if __name__ == "__main__": |
|
demo.launch() |