openfree commited on
Commit
c22a5cd
β€’
1 Parent(s): 1446c41

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +66 -9
app.py CHANGED
@@ -4,14 +4,11 @@ import gradio as gr
4
  from huggingface_hub import InferenceClient
5
  import pandas as pd
6
  from typing import List, Tuple
 
 
7
 
8
- # .env 파일 λ‘œλ“œ
9
- load_dotenv()
10
-
11
- # HuggingFace 토큰 μ„€μ •
12
  HF_TOKEN = os.getenv("HF_TOKEN")
13
- if not HF_TOKEN:
14
- raise ValueError("HF_TOKEN이 μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€. .env νŒŒμΌμ— HF_TOKEN을 μ„€μ •ν•΄μ£Όμ„Έμš”.")
15
 
16
  # LLM Models Definition
17
  LLM_MODELS = {
@@ -19,11 +16,52 @@ LLM_MODELS = {
19
  "Meta Llama3.3-70B": "meta-llama/Llama-3.3-70B-Instruct" # Backup model
20
  }
21
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  def get_client(model_name="Cohere c4ai-crp-08-2024"):
23
  try:
24
  return InferenceClient(LLM_MODELS[model_name], token=HF_TOKEN)
25
  except Exception:
26
- # If primary model fails, try backup model
27
  return InferenceClient(LLM_MODELS["Meta Llama3.3-70B"], token=HF_TOKEN)
28
 
29
  def analyze_file_content(content, file_type):
@@ -117,10 +155,16 @@ def chat(message, history, uploaded_file, system_message="", max_tokens=4000, te
117
  항상 예의 λ°”λ₯΄κ³  μΉœμ ˆν•˜κ²Œ μ‘λ‹΅ν•˜λ©°, ν•„μš”ν•œ 경우 ꡬ체적인 μ˜ˆμ‹œλ‚˜ μ„€λͺ…을 μΆ”κ°€ν•˜μ—¬
118
  이해λ₯Ό λ•κ² μŠ΅λ‹ˆλ‹€."""
119
 
 
 
 
120
  if uploaded_file:
121
  content, file_type = read_uploaded_file(uploaded_file)
122
  if file_type == "error":
123
- return "", [{"role": "user", "content": message}, {"role": "assistant", "content": content}]
 
 
 
124
 
125
  file_summary = analyze_file_content(content, file_type)
126
 
@@ -172,16 +216,19 @@ def chat(message, history, uploaded_file, system_message="", max_tokens=4000, te
172
  {"role": "assistant", "content": partial_message}
173
  ]
174
  yield "", current_history
 
 
 
175
 
176
  except Exception as e:
177
  error_msg = f"❌ 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"
 
178
  error_history = [
179
  {"role": "user", "content": message},
180
  {"role": "assistant", "content": error_msg}
181
  ]
182
  yield "", error_history
183
 
184
-
185
  with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo:
186
  gr.HTML(
187
  """
@@ -236,6 +283,11 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo
236
  inputs=msg,
237
  )
238
 
 
 
 
 
 
239
  # 이벀트 바인딩
240
  msg.submit(
241
  chat,
@@ -249,6 +301,11 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo
249
  outputs=[msg, chatbot]
250
  )
251
 
 
 
 
 
 
252
  # 파일 μ—…λ‘œλ“œμ‹œ μžλ™ 뢄석
253
  file_upload.change(
254
  lambda: "파일 뢄석을 μ‹œμž‘ν•©λ‹ˆλ‹€...",
 
4
  from huggingface_hub import InferenceClient
5
  import pandas as pd
6
  from typing import List, Tuple
7
+ import json
8
+ from datetime import datetime
9
 
10
+ # ν™˜κ²½ λ³€μˆ˜ μ„€μ •
 
 
 
11
  HF_TOKEN = os.getenv("HF_TOKEN")
 
 
12
 
13
  # LLM Models Definition
14
  LLM_MODELS = {
 
16
  "Meta Llama3.3-70B": "meta-llama/Llama-3.3-70B-Instruct" # Backup model
17
  }
18
 
19
+ # λŒ€ν™” νžˆμŠ€ν† λ¦¬λ₯Ό μ €μž₯ν•  클래슀
20
+ class ChatHistory:
21
+ def __init__(self):
22
+ self.history = []
23
+ self.history_file = "/tmp/chat_history.json" # HF Spaceμ—μ„œ μ‚¬μš©ν•  μž„μ‹œ 경둜
24
+ self.load_history()
25
+
26
+ def add_message(self, role: str, content: str):
27
+ message = {
28
+ "role": role,
29
+ "content": content,
30
+ "timestamp": datetime.now().isoformat()
31
+ }
32
+ self.history.append(message)
33
+ self.save_history()
34
+
35
+ def get_history(self):
36
+ return self.history
37
+
38
+ def clear_history(self):
39
+ self.history = []
40
+ self.save_history()
41
+
42
+ def save_history(self):
43
+ try:
44
+ with open(self.history_file, 'w', encoding='utf-8') as f:
45
+ json.dump(self.history, f, ensure_ascii=False, indent=2)
46
+ except Exception as e:
47
+ print(f"νžˆμŠ€ν† λ¦¬ μ €μž₯ μ‹€νŒ¨: {e}")
48
+
49
+ def load_history(self):
50
+ try:
51
+ if os.path.exists(self.history_file):
52
+ with open(self.history_file, 'r', encoding='utf-8') as f:
53
+ self.history = json.load(f)
54
+ except Exception as e:
55
+ print(f"νžˆμŠ€ν† λ¦¬ λ‘œλ“œ μ‹€νŒ¨: {e}")
56
+ self.history = []
57
+
58
+ # μ „μ—­ ChatHistory μΈμŠ€ν„΄μŠ€ 생성
59
+ chat_history = ChatHistory()
60
+
61
  def get_client(model_name="Cohere c4ai-crp-08-2024"):
62
  try:
63
  return InferenceClient(LLM_MODELS[model_name], token=HF_TOKEN)
64
  except Exception:
 
65
  return InferenceClient(LLM_MODELS["Meta Llama3.3-70B"], token=HF_TOKEN)
66
 
67
  def analyze_file_content(content, file_type):
 
155
  항상 예의 λ°”λ₯΄κ³  μΉœμ ˆν•˜κ²Œ μ‘λ‹΅ν•˜λ©°, ν•„μš”ν•œ 경우 ꡬ체적인 μ˜ˆμ‹œλ‚˜ μ„€λͺ…을 μΆ”κ°€ν•˜μ—¬
156
  이해λ₯Ό λ•κ² μŠ΅λ‹ˆλ‹€."""
157
 
158
+ # μ‚¬μš©μž λ©”μ‹œμ§€ μ €μž₯
159
+ chat_history.add_message("user", message)
160
+
161
  if uploaded_file:
162
  content, file_type = read_uploaded_file(uploaded_file)
163
  if file_type == "error":
164
+ error_message = content
165
+ chat_history.add_message("assistant", error_message)
166
+ return "", [{"role": "user", "content": message},
167
+ {"role": "assistant", "content": error_message}]
168
 
169
  file_summary = analyze_file_content(content, file_type)
170
 
 
216
  {"role": "assistant", "content": partial_message}
217
  ]
218
  yield "", current_history
219
+
220
+ # μ™„μ„±λœ 응닡 μ €μž₯
221
+ chat_history.add_message("assistant", partial_message)
222
 
223
  except Exception as e:
224
  error_msg = f"❌ 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"
225
+ chat_history.add_message("assistant", error_msg)
226
  error_history = [
227
  {"role": "user", "content": message},
228
  {"role": "assistant", "content": error_msg}
229
  ]
230
  yield "", error_history
231
 
 
232
  with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo:
233
  gr.HTML(
234
  """
 
283
  inputs=msg,
284
  )
285
 
286
+ # λŒ€ν™”λ‚΄μš© μ§€μš°κΈ° λ²„νŠΌμ— νžˆμŠ€ν† λ¦¬ μ΄ˆκΈ°ν™” κΈ°λŠ₯ μΆ”κ°€
287
+ def clear_chat():
288
+ chat_history.clear_history()
289
+ return None, None
290
+
291
  # 이벀트 바인딩
292
  msg.submit(
293
  chat,
 
301
  outputs=[msg, chatbot]
302
  )
303
 
304
+ clear.click(
305
+ clear_chat,
306
+ outputs=[msg, chatbot]
307
+ )
308
+
309
  # 파일 μ—…λ‘œλ“œμ‹œ μžλ™ 뢄석
310
  file_upload.change(
311
  lambda: "파일 뢄석을 μ‹œμž‘ν•©λ‹ˆλ‹€...",