openfree commited on
Commit
72fb2d4
β€’
1 Parent(s): 95ce689

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -20
app.py CHANGED
@@ -16,7 +16,6 @@ LLM_MODELS = {
16
  "Meta Llama3.3-70B": "meta-llama/Llama-3.3-70B-Instruct" # Backup model
17
  }
18
 
19
- # λŒ€ν™” νžˆμŠ€ν† λ¦¬λ₯Ό μ €μž₯ν•  클래슀
20
  class ChatHistory:
21
  def __init__(self):
22
  self.history = []
@@ -26,7 +25,7 @@ class ChatHistory:
26
  def add_conversation(self, user_msg: str, assistant_msg: str):
27
  conversation = {
28
  "timestamp": datetime.now().isoformat(),
29
- "conversation": [
30
  {"role": "user", "content": user_msg},
31
  {"role": "assistant", "content": assistant_msg}
32
  ]
@@ -34,17 +33,16 @@ class ChatHistory:
34
  self.history.append(conversation)
35
  self.save_history()
36
 
37
- def get_recent_conversations(self, limit=10):
38
- return self.history[-limit:] if self.history else []
39
-
40
  def format_for_display(self):
41
  formatted = []
42
  for conv in self.history:
43
  formatted.extend([
44
- [conv["conversation"][0]["content"], conv["conversation"][1]["content"]]
 
45
  ])
46
  return formatted
47
 
 
48
  def clear_history(self):
49
  self.history = []
50
  self.save_history()
@@ -201,29 +199,28 @@ def chat(message, history, uploaded_file, system_message="", max_tokens=4000, te
201
  client = get_client()
202
  partial_message = ""
203
 
204
- # 슀트리밍 응닡 처리
205
- for msg in client.chat_completion(
206
- messages,
207
- max_tokens=max_tokens,
208
- stream=True,
209
- temperature=temperature,
210
- top_p=top_p,
211
- ):
212
  token = msg.choices[0].delta.get('content', None)
213
  if token:
214
  partial_message += token
215
- current_history = history + [[message, partial_message]]
 
 
 
216
  yield "", current_history
217
 
218
- # μ™„μ„±λœ λŒ€ν™” μ €μž₯
219
  chat_history.add_conversation(message, partial_message)
220
 
221
  except Exception as e:
222
  error_msg = f"❌ 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"
223
  chat_history.add_conversation(message, error_msg)
224
- yield "", history + [[message, error_msg]]
 
 
 
225
 
226
 
 
227
  with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo:
228
  # κΈ°μ‘΄ νžˆμŠ€ν† λ¦¬ λ‘œλ“œ
229
  initial_history = chat_history.format_for_display()
@@ -231,12 +228,12 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo
231
  with gr.Row():
232
  with gr.Column(scale=2):
233
  chatbot = gr.Chatbot(
234
- value=initial_history, # 초기 νžˆμŠ€ν† λ¦¬ μ„€μ •
235
  height=600,
236
  label="λŒ€ν™”μ°½ πŸ’¬",
237
- show_label=True,
238
- type="messages"
239
  )
 
240
  msg = gr.Textbox(
241
  label="λ©”μ‹œμ§€ μž…λ ₯",
242
  show_label=False,
 
16
  "Meta Llama3.3-70B": "meta-llama/Llama-3.3-70B-Instruct" # Backup model
17
  }
18
 
 
19
  class ChatHistory:
20
  def __init__(self):
21
  self.history = []
 
25
  def add_conversation(self, user_msg: str, assistant_msg: str):
26
  conversation = {
27
  "timestamp": datetime.now().isoformat(),
28
+ "messages": [
29
  {"role": "user", "content": user_msg},
30
  {"role": "assistant", "content": assistant_msg}
31
  ]
 
33
  self.history.append(conversation)
34
  self.save_history()
35
 
 
 
 
36
  def format_for_display(self):
37
  formatted = []
38
  for conv in self.history:
39
  formatted.extend([
40
+ {"role": "user", "content": conv["messages"][0]["content"]},
41
+ {"role": "assistant", "content": conv["messages"][1]["content"]}
42
  ])
43
  return formatted
44
 
45
+
46
  def clear_history(self):
47
  self.history = []
48
  self.save_history()
 
199
  client = get_client()
200
  partial_message = ""
201
 
202
+ for msg in client.chat_completion(...):
 
 
 
 
 
 
 
203
  token = msg.choices[0].delta.get('content', None)
204
  if token:
205
  partial_message += token
206
+ current_history = history + [
207
+ {"role": "user", "content": message},
208
+ {"role": "assistant", "content": partial_message}
209
+ ]
210
  yield "", current_history
211
 
 
212
  chat_history.add_conversation(message, partial_message)
213
 
214
  except Exception as e:
215
  error_msg = f"❌ 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"
216
  chat_history.add_conversation(message, error_msg)
217
+ yield "", history + [
218
+ {"role": "user", "content": message},
219
+ {"role": "assistant", "content": error_msg}
220
+ ]
221
 
222
 
223
+
224
  with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo:
225
  # κΈ°μ‘΄ νžˆμŠ€ν† λ¦¬ λ‘œλ“œ
226
  initial_history = chat_history.format_for_display()
 
228
  with gr.Row():
229
  with gr.Column(scale=2):
230
  chatbot = gr.Chatbot(
231
+ value=[], # μ΄ˆκΈ°κ°’μ€ 빈 리슀트둜 μ„€μ •
232
  height=600,
233
  label="λŒ€ν™”μ°½ πŸ’¬",
234
+ show_label=True
 
235
  )
236
+
237
  msg = gr.Textbox(
238
  label="λ©”μ‹œμ§€ μž…λ ₯",
239
  show_label=False,