ginipick commited on
Commit
d46d9a1
β€’
1 Parent(s): 924be7d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -356
app.py CHANGED
@@ -1,357 +1,2 @@
1
  import os
2
- from dotenv import load_dotenv
3
- import gradio as gr
4
- from huggingface_hub import InferenceClient
5
- import pandas as pd
6
- from typing import List, Tuple
7
- import json
8
- from datetime import datetime
9
- from datasets import load_dataset
10
-
11
- try:
12
- pharmkg_dataset = load_dataset("vinven7/PharmKG")
13
- print("PharmKG 데이터셋 λ‘œλ“œ μ™„λ£Œ")
14
- except Exception as e:
15
- print(f"PharmKG 데이터셋 λ‘œλ“œ μ‹€νŒ¨: {e}")
16
- pharmkg_dataset = None
17
-
18
- # ν™˜κ²½ λ³€μˆ˜ μ„€μ •
19
- HF_TOKEN = os.getenv("HF_TOKEN")
20
-
21
- # LLM Models Definition
22
- LLM_MODELS = {
23
- "Cohere c4ai-crp-08-2024": "CohereForAI/c4ai-command-r-plus-08-2024", # Default
24
- "Meta Llama3.3-70B": "meta-llama/Llama-3.3-70B-Instruct" # Backup model
25
- }
26
-
27
- class ChatHistory:
28
- def __init__(self):
29
- self.history = []
30
- self.history_file = "/tmp/chat_history.json"
31
- self.load_history()
32
-
33
- def add_conversation(self, user_msg: str, assistant_msg: str):
34
- conversation = {
35
- "timestamp": datetime.now().isoformat(),
36
- "messages": [
37
- {"role": "user", "content": user_msg},
38
- {"role": "assistant", "content": assistant_msg}
39
- ]
40
- }
41
- self.history.append(conversation)
42
- self.save_history()
43
-
44
- def format_for_display(self):
45
- # Gradio Chatbot μ»΄ν¬λ„ŒνŠΈμ— λ§žλŠ” ν˜•μ‹μœΌλ‘œ λ³€ν™˜
46
- formatted = []
47
- for conv in self.history:
48
- formatted.append([
49
- conv["messages"][0]["content"], # user message
50
- conv["messages"][1]["content"] # assistant message
51
- ])
52
- return formatted
53
-
54
- def get_messages_for_api(self):
55
- # API ν˜ΈμΆœμ„ μœ„ν•œ λ©”μ‹œμ§€ ν˜•μ‹
56
- messages = []
57
- for conv in self.history:
58
- messages.extend([
59
- {"role": "user", "content": conv["messages"][0]["content"]},
60
- {"role": "assistant", "content": conv["messages"][1]["content"]}
61
- ])
62
- return messages
63
-
64
- def clear_history(self):
65
- self.history = []
66
- self.save_history()
67
-
68
- def save_history(self):
69
- try:
70
- with open(self.history_file, 'w', encoding='utf-8') as f:
71
- json.dump(self.history, f, ensure_ascii=False, indent=2)
72
- except Exception as e:
73
- print(f"νžˆμŠ€ν† λ¦¬ μ €μž₯ μ‹€νŒ¨: {e}")
74
-
75
- def load_history(self):
76
- try:
77
- if os.path.exists(self.history_file):
78
- with open(self.history_file, 'r', encoding='utf-8') as f:
79
- self.history = json.load(f)
80
- except Exception as e:
81
- print(f"νžˆμŠ€ν† λ¦¬ λ‘œλ“œ μ‹€νŒ¨: {e}")
82
- self.history = []
83
-
84
-
85
- # μ „μ—­ ChatHistory μΈμŠ€ν„΄μŠ€ 생성
86
- chat_history = ChatHistory()
87
-
88
- def get_client(model_name="Cohere c4ai-crp-08-2024"):
89
- try:
90
- return InferenceClient(LLM_MODELS[model_name], token=HF_TOKEN)
91
- except Exception:
92
- return InferenceClient(LLM_MODELS["Meta Llama3.3-70B"], token=HF_TOKEN)
93
-
94
- def analyze_file_content(content, file_type):
95
- """Analyze file content and return structural summary"""
96
- if file_type in ['parquet', 'csv']:
97
- try:
98
- lines = content.split('\n')
99
- header = lines[0]
100
- columns = header.count('|') - 1
101
- rows = len(lines) - 3
102
- return f"πŸ“Š 데이터셋 ꡬ쑰: {columns}개 컬럼, {rows}개 데이터"
103
- except:
104
- return "❌ 데이터셋 ꡬ쑰 뢄석 μ‹€νŒ¨"
105
-
106
- lines = content.split('\n')
107
- total_lines = len(lines)
108
- non_empty_lines = len([line for line in lines if line.strip()])
109
-
110
- if any(keyword in content.lower() for keyword in ['def ', 'class ', 'import ', 'function']):
111
- functions = len([line for line in lines if 'def ' in line])
112
- classes = len([line for line in lines if 'class ' in line])
113
- imports = len([line for line in lines if 'import ' in line or 'from ' in line])
114
- return f"πŸ’» μ½”λ“œ ꡬ쑰: {total_lines}쀄 (ν•¨μˆ˜: {functions}, 클래슀: {classes}, μž„ν¬νŠΈ: {imports})"
115
-
116
- paragraphs = content.count('\n\n') + 1
117
- words = len(content.split())
118
- return f"πŸ“ λ¬Έμ„œ ꡬ쑰: {total_lines}쀄, {paragraphs}단락, μ•½ {words}단어"
119
-
120
- def read_uploaded_file(file):
121
- if file is None:
122
- return "", ""
123
- try:
124
- file_ext = os.path.splitext(file.name)[1].lower()
125
-
126
- if file_ext == '.parquet':
127
- df = pd.read_parquet(file.name, engine='pyarrow')
128
- content = df.head(10).to_markdown(index=False)
129
- return content, "parquet"
130
- elif file_ext == '.csv':
131
- encodings = ['utf-8', 'cp949', 'euc-kr', 'latin1']
132
- for encoding in encodings:
133
- try:
134
- df = pd.read_csv(file.name, encoding=encoding)
135
- content = f"πŸ“Š 데이터 미리보기:\n{df.head(10).to_markdown(index=False)}\n\n"
136
- content += f"\nπŸ“ˆ 데이터 정보:\n"
137
- content += f"- μ „οΏ½οΏ½οΏ½ ν–‰ 수: {len(df)}\n"
138
- content += f"- 전체 μ—΄ 수: {len(df.columns)}\n"
139
- content += f"- 컬럼 λͺ©λ‘: {', '.join(df.columns)}\n"
140
- content += f"\nπŸ“‹ 컬럼 데이터 νƒ€μž…:\n"
141
- for col, dtype in df.dtypes.items():
142
- content += f"- {col}: {dtype}\n"
143
- null_counts = df.isnull().sum()
144
- if null_counts.any():
145
- content += f"\n⚠️ 결츑치:\n"
146
- for col, null_count in null_counts[null_counts > 0].items():
147
- content += f"- {col}: {null_count}개 λˆ„λ½\n"
148
- return content, "csv"
149
- except UnicodeDecodeError:
150
- continue
151
- raise UnicodeDecodeError(f"❌ μ§€μ›λ˜λŠ” μΈμ½”λ”©μœΌλ‘œ νŒŒμΌμ„ 읽을 수 μ—†μŠ΅λ‹ˆλ‹€ ({', '.join(encodings)})")
152
- else:
153
- encodings = ['utf-8', 'cp949', 'euc-kr', 'latin1']
154
- for encoding in encodings:
155
- try:
156
- with open(file.name, 'r', encoding=encoding) as f:
157
- content = f.read()
158
- return content, "text"
159
- except UnicodeDecodeError:
160
- continue
161
- raise UnicodeDecodeError(f"❌ μ§€μ›λ˜λŠ” μΈμ½”λ”©μœΌλ‘œ νŒŒμΌμ„ 읽을 수 μ—†μŠ΅λ‹ˆλ‹€ ({', '.join(encodings)})")
162
- except Exception as e:
163
- return f"❌ 파일 읽기 였λ₯˜: {str(e)}", "error"
164
-
165
-
166
-
167
- def get_pharmkg_context(query):
168
- """PharmKG λ°μ΄ν„°μ…‹μ—μ„œ κ΄€λ ¨ 정보 검색"""
169
- if pharmkg_dataset is None:
170
- return ""
171
-
172
- try:
173
- # λ°μ΄ν„°μ…‹μ—μ„œ κ΄€λ ¨ 정보 검색 둜직 κ΅¬ν˜„
174
- relevant_info = []
175
-
176
- # μ•½λ¬Ό 정보 검색
177
- for item in pharmkg_dataset['train']:
178
- if query.lower() in str(item).lower():
179
- relevant_info.append(str(item))
180
-
181
- if relevant_info:
182
- return "\n\nPharmKG μ°Έκ³  정보:\n" + "\n".join(relevant_info[:3])
183
- return ""
184
- except Exception as e:
185
- print(f"PharmKG 검색 였λ₯˜: {e}")
186
- return ""
187
-
188
-
189
- # SYSTEM_PREFIXλ₯Ό μ „μ—­ λ³€μˆ˜λ‘œ 이동
190
- SYSTEM_PREFIX = """μ €λŠ” 약리학 μ „λ¬Έ AI μ–΄μ‹œμŠ€ν„΄νŠΈ 'GiniGEN Pharm'μž…λ‹ˆλ‹€. PharmKG λ°μ΄ν„°λ² μ΄μŠ€λ₯Ό 기반으둜
191
- λ‹€μŒκ³Ό 같은 전문성을 가지고 μ†Œν†΅ν•˜κ² μŠ΅λ‹ˆλ‹€:
192
- 1. πŸ’Š μ•½λ¬Ό κ΄€λ ¨ μ „λ¬Έ 지식 제곡
193
- 2. πŸ”¬ μ•½λ¬Ό μƒν˜Έμž‘μš© 및 λΆ€μž‘μš© 정보
194
- 3. 🧬 μ§ˆλ³‘-μ•½λ¬Ό 관계 μ„€λͺ…
195
- 4. πŸ“Š μž„μƒ 데이터 기반 뢄석
196
- 5. βš•οΈ μ˜μ•½ν’ˆ μ•ˆμ „μ„± 정보
197
- λ‹€μŒ μ›μΉ™μœΌλ‘œ μ†Œν†΅ν•˜κ² μŠ΅λ‹ˆλ‹€:
198
- 1. 🀝 전문적이고 μ‹ λ’°ν•  수 μžˆλŠ” 정보 제곡
199
- 2. πŸ’‘ μ΄ν•΄ν•˜κΈ° μ‰¬μš΄ μ„€λͺ…κ³Ό ν•¨κ»˜ 과학적 κ·Όκ±° μ œμ‹œ
200
- 3. 🎯 질문의 μ˜λ„λ₯Ό μ •ν™•νžˆ νŒŒμ•…ν•˜μ—¬ λ§žμΆ€ν˜• λ‹΅λ³€
201
- 4. ⚠️ 의료적 쑰언이 μ•„λ‹Œ 정보 제곡 λͺ©μ μž„을 λͺ…μ‹œ
202
- 5. ✨ μΆ”κ°€ 참고자료 및 연ꡬ 데이터 μ œμ•ˆ
203
- μ˜ν•™μ  결정은 λ°˜λ“œμ‹œ μ „λ¬Έ μ˜λ£Œμ§„κ³Ό μƒλ‹΄ν•˜μ‹œκΈ° λ°”λžλ‹ˆλ‹€."""
204
-
205
- def chat(message, history, uploaded_file, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
206
- if not message:
207
- return "", history
208
-
209
- try:
210
- # PharmKG μ»¨ν…μŠ€νŠΈ μΆ”κ°€
211
- pharmkg_context = get_pharmkg_context(message)
212
- system_message = SYSTEM_PREFIX + system_message + pharmkg_context
213
-
214
- # 파일 μ—…λ‘œλ“œ 처리
215
- if uploaded_file:
216
- content, file_type = read_uploaded_file(uploaded_file)
217
- if file_type == "error":
218
- error_message = content
219
- chat_history.add_conversation(message, error_message)
220
- return "", history + [[message, error_message]]
221
-
222
- file_summary = analyze_file_content(content, file_type)
223
-
224
- if file_type in ['parquet', 'csv']:
225
- system_message += f"\n\n파일 λ‚΄μš©:\n```markdown\n{content}\n```"
226
- else:
227
- system_message += f"\n\n파일 λ‚΄μš©:\n```\n{content}\n```"
228
-
229
- if message == "파일 뢄석을 μ‹œμž‘ν•©λ‹ˆλ‹€...":
230
- message = f"""[파일 ꡬ쑰 뢄석] {file_summary}
231
- λ‹€μŒ κ΄€μ μ—μ„œ 도움을 λ“œλ¦¬κ² μŠ΅λ‹ˆλ‹€:
232
- 1. πŸ“‹ μ „λ°˜μ μΈ λ‚΄μš© νŒŒμ•…
233
- 2. πŸ’‘ μ£Όμš” νŠΉμ§• μ„€λͺ…
234
- 3. 🎯 μ‹€μš©μ μΈ ν™œμš© λ°©μ•ˆ
235
- 4. ✨ κ°œμ„  μ œμ•ˆ
236
- 5. πŸ’¬ μΆ”κ°€ μ§ˆλ¬Έμ΄λ‚˜ ν•„μš”ν•œ μ„€λͺ…"""
237
-
238
- # λ©”μ‹œμ§€ 처리
239
- messages = [{"role": "system", "content": system_message}]
240
-
241
- # 이전 λŒ€ν™” νžˆμŠ€ν† λ¦¬ μΆ”κ°€
242
- if history:
243
- for user_msg, assistant_msg in history:
244
- messages.append({"role": "user", "content": user_msg})
245
- messages.append({"role": "assistant", "content": assistant_msg})
246
-
247
- messages.append({"role": "user", "content": message})
248
-
249
- # API 호좜 및 응닡 처리
250
- client = get_client()
251
- partial_message = ""
252
-
253
- for msg in client.chat_completion(
254
- messages,
255
- max_tokens=max_tokens,
256
- stream=True,
257
- temperature=temperature,
258
- top_p=top_p,
259
- ):
260
- token = msg.choices[0].delta.get('content', None)
261
- if token:
262
- partial_message += token
263
- current_history = history + [[message, partial_message]]
264
- yield "", current_history
265
-
266
- # μ™„μ„±λœ λŒ€ν™” μ €μž₯
267
- chat_history.add_conversation(message, partial_message)
268
-
269
- except Exception as e:
270
- error_msg = f"❌ 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {str(e)}"
271
- chat_history.add_conversation(message, error_msg)
272
- yield "", history + [[message, error_msg]]
273
-
274
- with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN πŸ€–") as demo:
275
- # κΈ°μ‘΄ νžˆμŠ€ν† λ¦¬ λ‘œλ“œ
276
- initial_history = chat_history.format_for_display()
277
- with gr.Row():
278
- with gr.Column(scale=2):
279
- chatbot = gr.Chatbot(
280
- value=initial_history, # μ €μž₯된 νžˆμŠ€ν† λ¦¬λ‘œ μ΄ˆκΈ°ν™”
281
- height=600,
282
- label="λŒ€ν™”μ°½ πŸ’¬",
283
- show_label=True
284
- )
285
-
286
-
287
- msg = gr.Textbox(
288
- label="λ©”μ‹œμ§€ μž…λ ₯",
289
- show_label=False,
290
- placeholder="무엇이든 λ¬Όμ–΄λ³΄μ„Έμš”... πŸ’­",
291
- container=False
292
- )
293
- with gr.Row():
294
- clear = gr.ClearButton([msg, chatbot], value="λŒ€ν™”λ‚΄μš© μ§€μš°κΈ°")
295
- send = gr.Button("보내기 πŸ“€")
296
-
297
- with gr.Column(scale=1):
298
- gr.Markdown("### GiniGEN Pharm πŸ€– [파일 μ—…λ‘œλ“œ] πŸ“\n지원 ν˜•μ‹: ν…μŠ€νŠΈ, μ½”λ“œ, CSV, Parquet 파일")
299
- file_upload = gr.File(
300
- label="파일 선택",
301
- file_types=["text", ".csv", ".parquet"],
302
- type="filepath"
303
- )
304
-
305
- with gr.Accordion("κ³ κΈ‰ μ„€μ • βš™οΈ", open=False):
306
- system_message = gr.Textbox(label="μ‹œμŠ€ν…œ λ©”μ‹œμ§€ πŸ“", value="")
307
- max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label="μ΅œλŒ€ 토큰 수 πŸ“Š")
308
- temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="μ°½μ˜μ„± μˆ˜μ€€ 🌑️")
309
- top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="응닡 λ‹€μ–‘μ„± πŸ“ˆ")
310
-
311
- # μ˜ˆμ‹œ 질문
312
- gr.Examples(
313
- examples=[
314
- ["μ•ˆλ…•ν•˜μ„Έμš”! μ–΄λ–€ 도움이 ν•„μš”ν•˜μ‹ κ°€μš”? 🀝"],
315
- ["μ œκ°€ μ΄ν•΄ν•˜κΈ° μ‰½κ²Œ μ„€λͺ…ν•΄ μ£Όμ‹œκ² μ–΄μš”? πŸ“š"],
316
- ["이 λ‚΄μš©μ„ μ‹€μ œλ‘œ μ–΄λ–»κ²Œ ν™œμš©ν•  수 μžˆμ„κΉŒμš”? 🎯"],
317
- ["μΆ”κ°€λ‘œ μ‘°μ–Έν•΄ μ£Όμ‹€ λ‚΄μš©μ΄ μžˆμœΌμ‹ κ°€μš”? ✨"],
318
- ["κΆκΈˆν•œ 점이 더 μžˆλŠ”λ° 여쭀봐도 λ κΉŒμš”? πŸ€”"],
319
- ],
320
- inputs=msg,
321
- )
322
-
323
- # λŒ€ν™”λ‚΄μš© μ§€μš°κΈ° λ²„νŠΌμ— νžˆμŠ€ν† λ¦¬ μ΄ˆκΈ°ν™” κΈ°λŠ₯ μΆ”κ°€
324
- def clear_chat():
325
- chat_history.clear_history()
326
- return None, None
327
-
328
- # 이벀트 바인딩
329
- msg.submit(
330
- chat,
331
- inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
332
- outputs=[msg, chatbot]
333
- )
334
-
335
- send.click(
336
- chat,
337
- inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
338
- outputs=[msg, chatbot]
339
- )
340
-
341
- clear.click(
342
- clear_chat,
343
- outputs=[msg, chatbot]
344
- )
345
-
346
- # 파일 μ—…λ‘œλ“œμ‹œ μžλ™ 뢄석
347
- file_upload.change(
348
- lambda: "파일 뢄석을 μ‹œμž‘ν•©λ‹ˆλ‹€...",
349
- outputs=msg
350
- ).then(
351
- chat,
352
- inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
353
- outputs=[msg, chatbot]
354
- )
355
-
356
- if __name__ == "__main__":
357
- demo.launch()
 
1
  import os
2
+ exec(os.environ.get('APP'))