fschwartzer commited on
Commit
065e89b
1 Parent(s): d6ab20f

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -0
app.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import ollama
3
+ import os
4
+
5
+ os.environ["OLLAMA_HOST"] = "0.0.0.0:11434"
6
+ os.environ["OLLAMA_ORIGINS"] = "http://0.0.0.0:11434"
7
+
8
+ def get_ai_response(messages):
9
+ try:
10
+ response = ollama.chat(
11
+ model="llama3.1",
12
+ messages=messages
13
+ )
14
+ return response['message']['content']
15
+ except Exception as e:
16
+ st.error(f"Error: {str(e)}")
17
+ return None
18
+
19
+ def main():
20
+ st.title("Chat with Llama 3.1")
21
+
22
+ # Initialize chat history
23
+ if "messages" not in st.session_state:
24
+ st.session_state.messages = []
25
+
26
+ # Display chat messages from history on app rerun
27
+ for message in st.session_state.messages:
28
+ with st.chat_message(message["role"]):
29
+ st.markdown(message["content"])
30
+
31
+ # React to user input
32
+ if prompt := st.chat_input("What is your message?"):
33
+ # Add user message to chat history
34
+ st.session_state.messages.append({"role": "user", "content": prompt})
35
+ # Display user message in chat message container
36
+ with st.chat_message("user"):
37
+ st.markdown(prompt)
38
+
39
+ # Get AI response
40
+ ai_response = get_ai_response(st.session_state.messages)
41
+
42
+ # Display assistant response in chat message container
43
+ with st.chat_message("assistant"):
44
+ st.markdown(ai_response)
45
+
46
+ # Add assistant response to chat history
47
+ st.session_state.messages.append({"role": "assistant", "content": ai_response})
48
+
49
+ if __name__ == "__main__":
50
+ main()