Niansuh commited on
Commit
7a6824f
1 Parent(s): c5e8762

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -9
app.py CHANGED
@@ -1,6 +1,7 @@
1
  """ Chatbot
2
  @author: NiansuhAI
3
  @email: niansuhtech@gmail.com
 
4
  """
5
  import numpy as np
6
  import streamlit as st
@@ -10,6 +11,10 @@ import sys
10
  from dotenv import load_dotenv, dotenv_values
11
  load_dotenv()
12
 
 
 
 
 
13
  # initialize the client
14
  client = OpenAI(
15
  base_url="https://api-inference.huggingface.co/v1",
@@ -29,7 +34,7 @@ model_links = {
29
  "Phi-3-mini": "microsoft/Phi-3-mini-4k-instruct",
30
  }
31
 
32
- # Random dog images for error message
33
  random_dog = ["0f476473-2d8b-415e-b944-483768418a95.jpg",
34
  "1bd75c81-f1d7-4e55-9310-a27595fa8762.jpg",
35
  "526590d2-8817-4ff0-8c62-fdcba5306d02.jpg",
@@ -44,6 +49,8 @@ random_dog = ["0f476473-2d8b-415e-b944-483768418a95.jpg",
44
  "6edac66e-c0de-4e69-a9d6-b2e6f6f9001b.jpg",
45
  "bfb9e165-c643-4993-9b3a-7e73571672a6.jpg"]
46
 
 
 
47
  def reset_conversation():
48
  '''
49
  Resets Conversation
@@ -51,45 +58,66 @@ def reset_conversation():
51
  st.session_state.conversation = []
52
  st.session_state.messages = []
53
  return None
 
 
 
54
 
55
  # Define the available models
56
- models = [key for key in model_links.keys()]
57
 
58
  # Create the sidebar with the dropdown for model selection
59
  selected_model = st.sidebar.selectbox("Select Model", models)
60
 
61
- # Create a temperature slider
62
  temp_values = st.sidebar.slider('Select a temperature value', 0.0, 1.0, (0.5))
63
 
64
- # Add reset button to clear conversation
65
- st.sidebar.button('Reset Chat', on_click=reset_conversation) # Reset button
 
 
66
 
67
  # Create model description
68
  st.sidebar.write(f"You're now chatting with **{selected_model}**")
69
  st.sidebar.markdown("*Generated content may be inaccurate or false.*")
 
 
 
 
70
 
71
  if "prev_option" not in st.session_state:
72
  st.session_state.prev_option = selected_model
73
 
74
  if st.session_state.prev_option != selected_model:
75
  st.session_state.messages = []
 
76
  st.session_state.prev_option = selected_model
77
  reset_conversation()
78
 
79
- # Pull in the model we want to use
 
 
80
  repo_id = model_links[selected_model]
81
 
 
82
  st.subheader(f'AI - {selected_model}')
 
 
 
 
 
83
 
84
  # Initialize chat history
85
  if "messages" not in st.session_state:
86
  st.session_state.messages = []
87
 
 
88
  # Display chat messages from history on app rerun
89
  for message in st.session_state.messages:
90
  with st.chat_message(message["role"]):
91
  st.markdown(message["content"])
92
 
 
 
93
  # Accept user input
94
  if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
95
 
@@ -99,6 +127,7 @@ if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
99
  # Add user message to chat history
100
  st.session_state.messages.append({"role": "user", "content": prompt})
101
 
 
102
  # Display assistant response in chat message container
103
  with st.chat_message("assistant"):
104
 
@@ -109,14 +138,15 @@ if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
109
  {"role": m["role"], "content": m["content"]}
110
  for m in st.session_state.messages
111
  ],
112
- temperature=temp_values,
113
  stream=True,
114
  max_tokens=3000,
115
  )
116
-
117
  response = st.write_stream(stream)
118
 
119
  except Exception as e:
 
120
  response = "😵‍💫 Looks like someone unplugged something!\
121
  \n Either the model space is being updated or something is down.\
122
  \n\
@@ -124,9 +154,12 @@ if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
124
  \n\
125
  \n Here's a random pic of a 🐶:"
126
  st.write(response)
127
- random_dog_pick = 'https://random.dog/' + random_dog[np.random.randint(len(random_dog))]
128
  st.image(random_dog_pick)
129
  st.write("This was the error message:")
130
  st.write(e)
131
 
 
 
 
132
  st.session_state.messages.append({"role": "assistant", "content": response})
 
1
  """ Chatbot
2
  @author: NiansuhAI
3
  @email: niansuhtech@gmail.com
4
+
5
  """
6
  import numpy as np
7
  import streamlit as st
 
11
  from dotenv import load_dotenv, dotenv_values
12
  load_dotenv()
13
 
14
+
15
+
16
+
17
+
18
  # initialize the client
19
  client = OpenAI(
20
  base_url="https://api-inference.huggingface.co/v1",
 
34
  "Phi-3-mini": "microsoft/Phi-3-mini-4k-instruct",
35
  }
36
 
37
+ #Random dog images for error message
38
  random_dog = ["0f476473-2d8b-415e-b944-483768418a95.jpg",
39
  "1bd75c81-f1d7-4e55-9310-a27595fa8762.jpg",
40
  "526590d2-8817-4ff0-8c62-fdcba5306d02.jpg",
 
49
  "6edac66e-c0de-4e69-a9d6-b2e6f6f9001b.jpg",
50
  "bfb9e165-c643-4993-9b3a-7e73571672a6.jpg"]
51
 
52
+
53
+
54
  def reset_conversation():
55
  '''
56
  Resets Conversation
 
58
  st.session_state.conversation = []
59
  st.session_state.messages = []
60
  return None
61
+
62
+
63
+
64
 
65
  # Define the available models
66
+ models =[key for key in model_links.keys()]
67
 
68
  # Create the sidebar with the dropdown for model selection
69
  selected_model = st.sidebar.selectbox("Select Model", models)
70
 
71
+ #Create a temperature slider
72
  temp_values = st.sidebar.slider('Select a temperature value', 0.0, 1.0, (0.5))
73
 
74
+
75
+ #Add reset button to clear conversation
76
+ st.sidebar.button('Reset Chat', on_click=reset_conversation) #Reset button
77
+
78
 
79
  # Create model description
80
  st.sidebar.write(f"You're now chatting with **{selected_model}**")
81
  st.sidebar.markdown("*Generated content may be inaccurate or false.*")
82
+ st.sidebar.markdown("\n[ChatGPTBots](https://chatgptbots.net).")
83
+
84
+
85
+
86
 
87
  if "prev_option" not in st.session_state:
88
  st.session_state.prev_option = selected_model
89
 
90
  if st.session_state.prev_option != selected_model:
91
  st.session_state.messages = []
92
+ # st.write(f"Changed to {selected_model}")
93
  st.session_state.prev_option = selected_model
94
  reset_conversation()
95
 
96
+
97
+
98
+ #Pull in the model we want to use
99
  repo_id = model_links[selected_model]
100
 
101
+
102
  st.subheader(f'AI - {selected_model}')
103
+ # st.title(f'ChatBot Using {selected_model}')
104
+
105
+ # Set a default model
106
+ if selected_model not in st.session_state:
107
+ st.session_state[selected_model] = model_links[selected_model]
108
 
109
  # Initialize chat history
110
  if "messages" not in st.session_state:
111
  st.session_state.messages = []
112
 
113
+
114
  # Display chat messages from history on app rerun
115
  for message in st.session_state.messages:
116
  with st.chat_message(message["role"]):
117
  st.markdown(message["content"])
118
 
119
+
120
+
121
  # Accept user input
122
  if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
123
 
 
127
  # Add user message to chat history
128
  st.session_state.messages.append({"role": "user", "content": prompt})
129
 
130
+
131
  # Display assistant response in chat message container
132
  with st.chat_message("assistant"):
133
 
 
138
  {"role": m["role"], "content": m["content"]}
139
  for m in st.session_state.messages
140
  ],
141
+ temperature=temp_values,#0.5,
142
  stream=True,
143
  max_tokens=3000,
144
  )
145
+
146
  response = st.write_stream(stream)
147
 
148
  except Exception as e:
149
+ # st.empty()
150
  response = "😵‍💫 Looks like someone unplugged something!\
151
  \n Either the model space is being updated or something is down.\
152
  \n\
 
154
  \n\
155
  \n Here's a random pic of a 🐶:"
156
  st.write(response)
157
+ random_dog_pick = 'https://random.dog/'+ random_dog[np.random.randint(len(random_dog))]
158
  st.image(random_dog_pick)
159
  st.write("This was the error message:")
160
  st.write(e)
161
 
162
+
163
+
164
+
165
  st.session_state.messages.append({"role": "assistant", "content": response})