jamesthong commited on
Commit
1133401
1 Parent(s): f5f5787

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -45
app.py CHANGED
@@ -1,51 +1,35 @@
1
- import os
2
- from langchain_community.llms import HuggingFaceEndpoint
3
- from langchain.chains import LLMChain
4
- from langchain_core.prompts import PromptTemplate
5
  import gradio as gr
6
- from langchain_community.chat_message_histories import ChatMessageHistory
7
- from langchain_core.chat_history import BaseChatMessageHistory
8
- from langchain_core.runnables.history import RunnableWithMessageHistory
9
- from langchain_core.messages import HumanMessage
10
- from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
11
-
12
-
13
- repo_id = "mistralai/Mistral-7B-Instruct-v0.3"
14
- llm = HuggingFaceEndpoint(
15
- repo_id=repo_id, max_length=128, temperature=0.2
 
 
 
 
 
 
 
 
 
 
16
  )
 
 
17
 
18
- store = {}
19
-
20
- def llm_chain(question, chat_history):
21
-
22
- def get_session_history(session_id: str) -> BaseChatMessageHistory:
23
- if session_id not in store:
24
- store[session_id] = ChatMessageHistory()
25
- return store[session_id]
26
-
27
-
28
-
29
- template = """Question: {question}
30
-
31
- """
32
-
33
- prompt = PromptTemplate.from_template(template)
34
-
35
- chain = prompt | llm
36
-
37
- with_message_history = RunnableWithMessageHistory(chain, get_session_history)
38
- config = {"configurable": {"session_id": "abc1"}}
39
- response = with_message_history.invoke(
40
- [HumanMessage(content=question)],
41
- config=config,
42
- )
43
- chat_history.append((question, response))
44
 
45
  return "", chat_history
46
 
47
 
48
-
49
  with gr.Blocks() as demo:
50
  with gr.Row():
51
  with gr.Column():
@@ -53,10 +37,9 @@ with gr.Blocks() as demo:
53
  msg = gr.Textbox(interactive=True, )
54
  with gr.Row():
55
  clear = gr.ClearButton([msg, chatbot], icon="https://img.icons8.com/?size=100&id=Xnx8cxDef16O&format=png&color=000000")
56
- send_btn = gr.Button("Send", variant='primary', icon="https://img.icons8.com/?size=100&id=g8ltXTwIfJ1n&format=png&color=000000")
57
- msg.submit(fn=llm_chain, inputs=[msg, chatbot], outputs=[msg, chatbot])
58
- send_btn.click(fn=llm_chain, inputs=[msg, chatbot], outputs=[msg, chatbot])
59
-
60
 
61
 
62
  if __name__ == "__main__":
 
 
 
 
 
1
  import gradio as gr
2
+ from huggingface_hub import InferenceClient
3
+
4
+ client = InferenceClient(
5
+ "meta-llama/Meta-Llama-3-8B-Instruct",
6
+ )
7
+
8
+ def chat_mem(message,chat_history):
9
+
10
+ print(len(chat_history))
11
+ chat_history_role = [{"role": "system", "content": "You are a helpful assistant." },]
12
+ if chat_history != []:
13
+ for i in range(len(chat_history)):
14
+ chat_history_role.append({"role": "user", "content": chat_history[i][0]})
15
+ chat_history_role.append({"role": "assistant", "content": chat_history[i][1]})
16
+ chat_history_role.append({"role": "user", "content": message})
17
+
18
+
19
+ chat_completion = client.chat_completion(
20
+ messages=chat_history_role,
21
+ # stream=True
22
  )
23
+ chat_history_role.append({"role": "assistant", "content": chat_completion.choices[0].message.content})
24
+ print(chat_history_role)
25
 
26
+ modified = map(lambda x: x["content"], chat_history_role)
27
+ a = list(modified)
28
+ chat_history=[(a[i*2+1], a[i*2+2]) for i in range(len(a)//2)]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
 
30
  return "", chat_history
31
 
32
 
 
33
  with gr.Blocks() as demo:
34
  with gr.Row():
35
  with gr.Column():
 
37
  msg = gr.Textbox(interactive=True, )
38
  with gr.Row():
39
  clear = gr.ClearButton([msg, chatbot], icon="https://img.icons8.com/?size=100&id=Xnx8cxDef16O&format=png&color=000000")
40
+ send_btn = gr.Button("Send", variant='primary', icon="https://img.icons8.com/?size=100&id=g8ltXTwIfJ1n&format=png&color=000000")
41
+ msg.submit(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
42
+ send_btn.click(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
 
43
 
44
 
45
  if __name__ == "__main__":