File size: 1,961 Bytes
e3d560c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
import os
from langchain_community.llms import HuggingFaceEndpoint
from langchain.chains import LLMChain
from langchain_core.prompts import PromptTemplate
import gradio as gr
from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_core.messages import HumanMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
repo_id = "mistralai/Mistral-7B-Instruct-v0.3"
llm = HuggingFaceEndpoint(
repo_id=repo_id, max_length=128, temperature=0.2
)
store = {}
def llm_chain(question, chat_history):
def get_session_history(session_id: str) -> BaseChatMessageHistory:
if session_id not in store:
store[session_id] = ChatMessageHistory()
return store[session_id]
template = """Question: {question}
"""
prompt = PromptTemplate.from_template(template)
chain = prompt | llm
with_message_history = RunnableWithMessageHistory(chain, get_session_history)
config = {"configurable": {"session_id": "abc1"}}
response = with_message_history.invoke(
[HumanMessage(content=question)],
config=config,
)
chat_history.append((question, response))
return "", chat_history
with gr.Blocks() as demo:
with gr.Row():
with gr.Column():
chatbot = gr.Chatbot()
msg = gr.Textbox(interactive=True, )
with gr.Row():
clear = gr.ClearButton([msg, chatbot], icon="https://img.icons8.com/?size=100&id=Xnx8cxDef16O&format=png&color=000000")
send_btn = gr.Button("Send", variant='primary', icon="https://img.icons8.com/?size=100&id=g8ltXTwIfJ1n&format=png&color=000000")
msg.submit(fn=llm_chain, inputs=[msg, chatbot], outputs=[msg, chatbot])
send_btn.click(fn=llm_chain, inputs=[msg, chatbot], outputs=[msg, chatbot])
if __name__ == "__main__":
demo.launch() |