Update app.py
Browse files
app.py
CHANGED
@@ -34,34 +34,25 @@ class ChatHistory:
|
|
34 |
self.save_history()
|
35 |
|
36 |
def format_for_display(self):
|
|
|
37 |
formatted = []
|
38 |
for conv in self.history:
|
39 |
-
formatted.
|
40 |
-
|
41 |
-
|
42 |
])
|
43 |
return formatted
|
44 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
45 |
|
46 |
-
def clear_history(self):
|
47 |
-
self.history = []
|
48 |
-
self.save_history()
|
49 |
-
|
50 |
-
def save_history(self):
|
51 |
-
try:
|
52 |
-
with open(self.history_file, 'w', encoding='utf-8') as f:
|
53 |
-
json.dump(self.history, f, ensure_ascii=False, indent=2)
|
54 |
-
except Exception as e:
|
55 |
-
print(f"νμ€ν 리 μ μ₯ μ€ν¨: {e}")
|
56 |
-
|
57 |
-
def load_history(self):
|
58 |
-
try:
|
59 |
-
if os.path.exists(self.history_file):
|
60 |
-
with open(self.history_file, 'r', encoding='utf-8') as f:
|
61 |
-
self.history = json.load(f)
|
62 |
-
except Exception as e:
|
63 |
-
print(f"νμ€ν 리 λ‘λ μ€ν¨: {e}")
|
64 |
-
self.history = []
|
65 |
|
66 |
# μ μ ChatHistory μΈμ€ν΄μ€ μμ±
|
67 |
chat_history = ChatHistory()
|
@@ -143,11 +134,11 @@ def read_uploaded_file(file):
|
|
143 |
except Exception as e:
|
144 |
return f"β νμΌ μ½κΈ° μ€λ₯: {str(e)}", "error"
|
145 |
|
146 |
-
|
147 |
def chat(message, history, uploaded_file, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
|
148 |
if not message:
|
149 |
return "", history
|
150 |
|
|
|
151 |
system_prefix = """μ λ μ¬λ¬λΆμ μΉκ·Όνκ³ μ§μ μΈ AI μ΄μμ€ν΄νΈ 'GiniGEN'μ
λλ€.. λ€μκ³Ό κ°μ μμΉμΌλ‘ μν΅νκ² μ΅λλ€:
|
152 |
|
153 |
1. π€ μΉκ·Όνκ³ κ³΅κ°μ μΈ νλλ‘ λν
|
@@ -184,55 +175,57 @@ def chat(message, history, uploaded_file, system_message="", max_tokens=4000, te
|
|
184 |
4. β¨ κ°μ μ μ
|
185 |
5. π¬ μΆκ° μ§λ¬Έμ΄λ νμν μ€λͺ
"""
|
186 |
|
187 |
-
|
|
|
|
|
|
|
188 |
messages = [{"role": "system", "content": system_prefix + system_message}]
|
189 |
|
190 |
# μ΄μ λν νμ€ν 리 μΆκ°
|
191 |
if history:
|
192 |
-
for
|
193 |
-
messages.append({"role": "user", "content":
|
194 |
-
|
195 |
-
messages.append({"role": "assistant", "content": h[1]})
|
196 |
|
197 |
messages.append({"role": "user", "content": message})
|
198 |
|
199 |
client = get_client()
|
200 |
partial_message = ""
|
201 |
|
202 |
-
for msg in client.chat_completion(
|
|
|
|
|
|
|
|
|
|
|
|
|
203 |
token = msg.choices[0].delta.get('content', None)
|
204 |
if token:
|
205 |
partial_message += token
|
206 |
-
|
207 |
-
|
208 |
-
{"role": "assistant", "content": partial_message}
|
209 |
-
]
|
210 |
yield "", current_history
|
211 |
|
|
|
212 |
chat_history.add_conversation(message, partial_message)
|
213 |
|
214 |
except Exception as e:
|
215 |
error_msg = f"β μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
|
216 |
chat_history.add_conversation(message, error_msg)
|
217 |
-
yield "", history + [
|
218 |
-
{"role": "user", "content": message},
|
219 |
-
{"role": "assistant", "content": error_msg}
|
220 |
-
]
|
221 |
|
222 |
-
|
223 |
-
|
224 |
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN π€") as demo:
|
225 |
# κΈ°μ‘΄ νμ€ν 리 λ‘λ
|
226 |
initial_history = chat_history.format_for_display()
|
227 |
-
|
228 |
with gr.Row():
|
229 |
with gr.Column(scale=2):
|
230 |
chatbot = gr.Chatbot(
|
231 |
-
value=
|
232 |
height=600,
|
233 |
label="λνμ°½ π¬",
|
234 |
show_label=True
|
235 |
-
)
|
|
|
236 |
|
237 |
msg = gr.Textbox(
|
238 |
label="λ©μμ§ μ
λ ₯",
|
|
|
34 |
self.save_history()
|
35 |
|
36 |
def format_for_display(self):
|
37 |
+
# Gradio Chatbot μ»΄ν¬λνΈμ λ§λ νμμΌλ‘ λ³ν
|
38 |
formatted = []
|
39 |
for conv in self.history:
|
40 |
+
formatted.append([
|
41 |
+
conv["messages"][0]["content"], # user message
|
42 |
+
conv["messages"][1]["content"] # assistant message
|
43 |
])
|
44 |
return formatted
|
45 |
|
46 |
+
def get_messages_for_api(self):
|
47 |
+
# API νΈμΆμ μν λ©μμ§ νμ
|
48 |
+
messages = []
|
49 |
+
for conv in self.history:
|
50 |
+
messages.extend([
|
51 |
+
{"role": "user", "content": conv["messages"][0]["content"]},
|
52 |
+
{"role": "assistant", "content": conv["messages"][1]["content"]}
|
53 |
+
])
|
54 |
+
return messages
|
55 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
|
57 |
# μ μ ChatHistory μΈμ€ν΄μ€ μμ±
|
58 |
chat_history = ChatHistory()
|
|
|
134 |
except Exception as e:
|
135 |
return f"β νμΌ μ½κΈ° μ€λ₯: {str(e)}", "error"
|
136 |
|
|
|
137 |
def chat(message, history, uploaded_file, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
|
138 |
if not message:
|
139 |
return "", history
|
140 |
|
141 |
+
|
142 |
system_prefix = """μ λ μ¬λ¬λΆμ μΉκ·Όνκ³ μ§μ μΈ AI μ΄μμ€ν΄νΈ 'GiniGEN'μ
λλ€.. λ€μκ³Ό κ°μ μμΉμΌλ‘ μν΅νκ² μ΅λλ€:
|
143 |
|
144 |
1. π€ μΉκ·Όνκ³ κ³΅κ°μ μΈ νλλ‘ λν
|
|
|
175 |
4. β¨ κ°μ μ μ
|
176 |
5. π¬ μΆκ° μ§λ¬Έμ΄λ νμν μ€λͺ
"""
|
177 |
|
178 |
+
|
179 |
+
|
180 |
+
try:
|
181 |
+
# μμ€ν
λ©μμ§ μ€μ
|
182 |
messages = [{"role": "system", "content": system_prefix + system_message}]
|
183 |
|
184 |
# μ΄μ λν νμ€ν 리 μΆκ°
|
185 |
if history:
|
186 |
+
for user_msg, assistant_msg in history:
|
187 |
+
messages.append({"role": "user", "content": user_msg})
|
188 |
+
messages.append({"role": "assistant", "content": assistant_msg})
|
|
|
189 |
|
190 |
messages.append({"role": "user", "content": message})
|
191 |
|
192 |
client = get_client()
|
193 |
partial_message = ""
|
194 |
|
195 |
+
for msg in client.chat_completion(
|
196 |
+
messages,
|
197 |
+
max_tokens=max_tokens,
|
198 |
+
stream=True,
|
199 |
+
temperature=temperature,
|
200 |
+
top_p=top_p,
|
201 |
+
):
|
202 |
token = msg.choices[0].delta.get('content', None)
|
203 |
if token:
|
204 |
partial_message += token
|
205 |
+
# Gradio Chatbot νμμΌλ‘ νμ€ν 리 μ
λ°μ΄νΈ
|
206 |
+
current_history = history + [[message, partial_message]]
|
|
|
|
|
207 |
yield "", current_history
|
208 |
|
209 |
+
# μμ±λ λν μ μ₯
|
210 |
chat_history.add_conversation(message, partial_message)
|
211 |
|
212 |
except Exception as e:
|
213 |
error_msg = f"β μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
|
214 |
chat_history.add_conversation(message, error_msg)
|
215 |
+
yield "", history + [[message, error_msg]]
|
|
|
|
|
|
|
216 |
|
|
|
|
|
217 |
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", title="GiniGEN π€") as demo:
|
218 |
# κΈ°μ‘΄ νμ€ν 리 λ‘λ
|
219 |
initial_history = chat_history.format_for_display()
|
|
|
220 |
with gr.Row():
|
221 |
with gr.Column(scale=2):
|
222 |
chatbot = gr.Chatbot(
|
223 |
+
value=initial_history, # μ μ₯λ νμ€ν λ¦¬λ‘ μ΄κΈ°ν
|
224 |
height=600,
|
225 |
label="λνμ°½ π¬",
|
226 |
show_label=True
|
227 |
+
)
|
228 |
+
|
229 |
|
230 |
msg = gr.Textbox(
|
231 |
label="λ©μμ§ μ
λ ₯",
|