cutechicken commited on
Commit
1cfe513
β€’
1 Parent(s): 229bfa6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -30
app.py CHANGED
@@ -6,15 +6,36 @@ import pandas as pd
6
  from typing import List, Tuple
7
  import json
8
  from datetime import datetime
 
 
 
9
 
10
  # ν™˜κ²½ λ³€μˆ˜ μ„€μ •
11
  HF_TOKEN = os.getenv("HF_TOKEN")
 
12
 
13
- # LLM Models Definition
14
- LLM_MODELS = {
15
- "Cohere c4ai-crp-08-2024": "CohereForAI/c4ai-command-r-plus-08-2024", # Default
16
- "Meta Llama3.3-70B": "meta-llama/Llama-3.3-70B-Instruct" # Backup model
17
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
  class ChatHistory:
20
  def __init__(self):
@@ -34,17 +55,15 @@ class ChatHistory:
34
  self.save_history()
35
 
36
  def format_for_display(self):
37
- # Gradio Chatbot μ»΄ν¬λ„ŒνŠΈμ— λ§žλŠ” ν˜•μ‹μœΌλ‘œ λ³€ν™˜
38
  formatted = []
39
  for conv in self.history:
40
  formatted.append([
41
- conv["messages"][0]["content"], # user message
42
- conv["messages"][1]["content"] # assistant message
43
  ])
44
  return formatted
45
 
46
  def get_messages_for_api(self):
47
- # API ν˜ΈμΆœμ„ μœ„ν•œ λ©”μ‹œμ§€ ν˜•μ‹
48
  messages = []
49
  for conv in self.history:
50
  messages.extend([
@@ -73,15 +92,12 @@ class ChatHistory:
73
  print(f"νžˆμŠ€ν† λ¦¬ λ‘œλ“œ μ‹€νŒ¨: {e}")
74
  self.history = []
75
 
76
-
77
- # μ „μ—­ ChatHistory μΈμŠ€ν„΄μŠ€ 생성
78
  chat_history = ChatHistory()
 
79
 
80
- def get_client(model_name="Cohere c4ai-crp-08-2024"):
81
- try:
82
- return InferenceClient(LLM_MODELS[model_name], token=HF_TOKEN)
83
- except Exception:
84
- return InferenceClient(LLM_MODELS["Meta Llama3.3-70B"], token=HF_TOKEN)
85
 
86
  def analyze_file_content(content, file_type):
87
  """Analyze file content and return structural summary"""
@@ -164,12 +180,10 @@ def chat(message, history, uploaded_file, system_message="", max_tokens=4000, te
164
  3. 🎯 질문의 μ˜λ„λ₯Ό μ •ν™•νžˆ νŒŒμ•…ν•˜μ—¬ λ§žμΆ€ν˜• λ‹΅λ³€
165
  4. πŸ“š ν•„μš”ν•œ 경우 μ—…λ‘œλ“œλœ 파일 λ‚΄μš©μ„ μ°Έκ³ ν•˜μ—¬ ꡬ체적인 도움 제곡
166
  5. ✨ 좔가적인 톡찰과 μ œμ•ˆμ„ ν†΅ν•œ κ°€μΉ˜ μžˆλŠ” λŒ€ν™”
167
-
168
  항상 예의 λ°”λ₯΄κ³  μΉœμ ˆν•˜κ²Œ μ‘λ‹΅ν•˜λ©°, ν•„μš”ν•œ 경우 ꡬ체적인 μ˜ˆμ‹œλ‚˜ μ„€λͺ…을 μΆ”κ°€ν•˜μ—¬
169
  이해λ₯Ό λ•κ² μŠ΅λ‹ˆλ‹€."""
170
 
171
  try:
172
- # 파일 μ—…λ‘œλ“œ 처리
173
  if uploaded_file:
174
  content, file_type = read_uploaded_file(uploaded_file)
175
  if file_type == "error":
@@ -193,10 +207,8 @@ def chat(message, history, uploaded_file, system_message="", max_tokens=4000, te
193
  4. ✨ κ°œμ„  μ œμ•ˆ
194
  5. πŸ’¬ μΆ”κ°€ μ§ˆλ¬Έμ΄λ‚˜ ν•„μš”ν•œ μ„€λͺ…"""
195
 
196
- # λ©”μ‹œμ§€ 처리
197
  messages = [{"role": "system", "content": system_prefix + system_message}]
198
 
199
- # 이전 λŒ€ν™” νžˆμŠ€ν† λ¦¬ μΆ”κ°€
200
  if history:
201
  for user_msg, assistant_msg in history:
202
  messages.append({"role": "user", "content": user_msg})
@@ -204,7 +216,6 @@ def chat(message, history, uploaded_file, system_message="", max_tokens=4000, te
204
 
205
  messages.append({"role": "user", "content": message})
206
 
207
- # API 호좜 및 응닡 처리
208
  client = get_client()
209
  partial_message = ""
210
 
@@ -221,7 +232,6 @@ def chat(message, history, uploaded_file, system_message="", max_tokens=4000, te
221
  current_history = history + [[message, partial_message]]
222
  yield "", current_history
223
 
224
- # μ™„μ„±λœ λŒ€ν™” μ €μž₯
225
  chat_history.add_conversation(message, partial_message)
226
 
227
  except Exception as e:
@@ -230,18 +240,16 @@ def chat(message, history, uploaded_file, system_message="", max_tokens=4000, te
230
  yield "", history + [[message, error_msg]]
231
 
232
  with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo:
233
- # κΈ°μ‘΄ νžˆμŠ€ν† λ¦¬ λ‘œλ“œ
234
  initial_history = chat_history.format_for_display()
235
  with gr.Row():
236
  with gr.Column(scale=2):
237
  chatbot = gr.Chatbot(
238
- value=initial_history, # μ €μž₯된 νžˆμŠ€ν† λ¦¬λ‘œ μ΄ˆκΈ°ν™”
239
  height=600,
240
  label="λŒ€ν™”μ°½ πŸ’¬",
241
  show_label=True
242
  )
243
 
244
-
245
  msg = gr.Textbox(
246
  label="λ©”μ‹œμ§€ μž…λ ₯",
247
  show_label=False,
@@ -266,7 +274,6 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo
266
  temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="μ°½μ˜μ„± μˆ˜μ€€ 🌑️")
267
  top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="응닡 λ‹€μ–‘μ„± πŸ“ˆ")
268
 
269
- # μ˜ˆμ‹œ 질문
270
  gr.Examples(
271
  examples=[
272
  ["μ•ˆλ…•ν•˜μ„Έμš”! μ–΄λ–€ 도움이 ν•„μš”ν•˜μ‹ κ°€μš”? 🀝"],
@@ -278,12 +285,10 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo
278
  inputs=msg,
279
  )
280
 
281
- # λŒ€ν™”λ‚΄μš© μ§€μš°κΈ° λ²„νŠΌμ— νžˆμŠ€ν† λ¦¬ μ΄ˆκΈ°ν™” κΈ°λŠ₯ μΆ”κ°€
282
  def clear_chat():
283
  chat_history.clear_history()
284
  return None, None
285
 
286
- # 이벀트 바인딩
287
  msg.submit(
288
  chat,
289
  inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
@@ -301,7 +306,6 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo
301
  outputs=[msg, chatbot]
302
  )
303
 
304
- # 파일 μ—…λ‘œλ“œμ‹œ μžλ™ 뢄석
305
  file_upload.change(
306
  lambda: "파일 뢄석을 μ‹œμž‘ν•©λ‹ˆλ‹€...",
307
  outputs=msg
@@ -312,4 +316,4 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo
312
  )
313
 
314
  if __name__ == "__main__":
315
- demo.launch()
 
6
  from typing import List, Tuple
7
  import json
8
  from datetime import datetime
9
+ import torch
10
+ from transformers import AutoModelForCausalLM, AutoTokenizer
11
+ import spaces
12
 
13
  # ν™˜κ²½ λ³€μˆ˜ μ„€μ •
14
  HF_TOKEN = os.getenv("HF_TOKEN")
15
+ MODEL_ID = "CohereForAI/c4ai-command-r-plus-08-2024"
16
 
17
+ class ModelManager:
18
+ def __init__(self):
19
+ self.model = None
20
+ self.tokenizer = None
21
+ self.setup_model()
22
+
23
+ def setup_model(self):
24
+ try:
25
+ self.tokenizer = AutoTokenizer.from_pretrained(
26
+ MODEL_ID,
27
+ token=HF_TOKEN,
28
+ trust_remote_code=True
29
+ )
30
+ self.model = AutoModelForCausalLM.from_pretrained(
31
+ MODEL_ID,
32
+ token=HF_TOKEN,
33
+ torch_dtype=torch.float16,
34
+ device_map="auto",
35
+ trust_remote_code=True
36
+ )
37
+ except Exception as e:
38
+ raise Exception(f"Model loading failed: {e}")
39
 
40
  class ChatHistory:
41
  def __init__(self):
 
55
  self.save_history()
56
 
57
  def format_for_display(self):
 
58
  formatted = []
59
  for conv in self.history:
60
  formatted.append([
61
+ conv["messages"][0]["content"],
62
+ conv["messages"][1]["content"]
63
  ])
64
  return formatted
65
 
66
  def get_messages_for_api(self):
 
67
  messages = []
68
  for conv in self.history:
69
  messages.extend([
 
92
  print(f"νžˆμŠ€ν† λ¦¬ λ‘œλ“œ μ‹€νŒ¨: {e}")
93
  self.history = []
94
 
95
+ # μ „μ—­ μΈμŠ€ν„΄μŠ€ 생성
 
96
  chat_history = ChatHistory()
97
+ model_manager = ModelManager()
98
 
99
+ def get_client():
100
+ return InferenceClient(MODEL_ID, token=HF_TOKEN)
 
 
 
101
 
102
  def analyze_file_content(content, file_type):
103
  """Analyze file content and return structural summary"""
 
180
  3. 🎯 질문의 μ˜λ„λ₯Ό μ •ν™•νžˆ νŒŒμ•…ν•˜μ—¬ λ§žμΆ€ν˜• λ‹΅λ³€
181
  4. πŸ“š ν•„μš”ν•œ 경우 μ—…λ‘œλ“œλœ 파일 λ‚΄μš©μ„ μ°Έκ³ ν•˜μ—¬ ꡬ체적인 도움 제곡
182
  5. ✨ 좔가적인 톡찰과 μ œμ•ˆμ„ ν†΅ν•œ κ°€μΉ˜ μžˆλŠ” λŒ€ν™”
 
183
  항상 예의 λ°”λ₯΄κ³  μΉœμ ˆν•˜κ²Œ μ‘λ‹΅ν•˜λ©°, ν•„μš”ν•œ 경우 ꡬ체적인 μ˜ˆμ‹œλ‚˜ μ„€λͺ…을 μΆ”κ°€ν•˜μ—¬
184
  이해λ₯Ό λ•κ² μŠ΅λ‹ˆλ‹€."""
185
 
186
  try:
 
187
  if uploaded_file:
188
  content, file_type = read_uploaded_file(uploaded_file)
189
  if file_type == "error":
 
207
  4. ✨ κ°œμ„  μ œμ•ˆ
208
  5. πŸ’¬ μΆ”κ°€ μ§ˆλ¬Έμ΄λ‚˜ ν•„μš”ν•œ μ„€λͺ…"""
209
 
 
210
  messages = [{"role": "system", "content": system_prefix + system_message}]
211
 
 
212
  if history:
213
  for user_msg, assistant_msg in history:
214
  messages.append({"role": "user", "content": user_msg})
 
216
 
217
  messages.append({"role": "user", "content": message})
218
 
 
219
  client = get_client()
220
  partial_message = ""
221
 
 
232
  current_history = history + [[message, partial_message]]
233
  yield "", current_history
234
 
 
235
  chat_history.add_conversation(message, partial_message)
236
 
237
  except Exception as e:
 
240
  yield "", history + [[message, error_msg]]
241
 
242
  with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo:
 
243
  initial_history = chat_history.format_for_display()
244
  with gr.Row():
245
  with gr.Column(scale=2):
246
  chatbot = gr.Chatbot(
247
+ value=initial_history,
248
  height=600,
249
  label="λŒ€ν™”μ°½ πŸ’¬",
250
  show_label=True
251
  )
252
 
 
253
  msg = gr.Textbox(
254
  label="λ©”μ‹œμ§€ μž…λ ₯",
255
  show_label=False,
 
274
  temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="μ°½μ˜μ„± μˆ˜μ€€ 🌑️")
275
  top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="응닡 λ‹€μ–‘μ„± πŸ“ˆ")
276
 
 
277
  gr.Examples(
278
  examples=[
279
  ["μ•ˆλ…•ν•˜μ„Έμš”! μ–΄λ–€ 도움이 ν•„μš”ν•˜μ‹ κ°€μš”? 🀝"],
 
285
  inputs=msg,
286
  )
287
 
 
288
  def clear_chat():
289
  chat_history.clear_history()
290
  return None, None
291
 
 
292
  msg.submit(
293
  chat,
294
  inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
 
306
  outputs=[msg, chatbot]
307
  )
308
 
 
309
  file_upload.change(
310
  lambda: "파일 뢄석을 μ‹œμž‘ν•©λ‹ˆλ‹€...",
311
  outputs=msg
 
316
  )
317
 
318
  if __name__ == "__main__":
319
+ demo.launch()