openfree commited on
Commit
ad9db85
β€’
1 Parent(s): 97e8dd0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -7
app.py CHANGED
@@ -4,8 +4,20 @@ import os
4
  import pandas as pd
5
  from typing import List, Tuple
6
 
7
- # μΆ”λ‘  API ν΄λΌμ΄μ–ΈνŠΈ μ„€μ •
8
- hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
  def read_uploaded_file(file):
11
  if file is None:
@@ -30,7 +42,7 @@ def format_history(history):
30
  formatted_history.append({"role": "assistant", "content": assistant_msg})
31
  return formatted_history
32
 
33
- def chat(message, history, uploaded_file, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
34
  system_prefix = """λ°˜λ“œμ‹œ ν•œκΈ€λ‘œ 닡변할것. λ„ˆλŠ” 주어진 μ†ŒμŠ€μ½”λ“œλ‚˜ 데이터λ₯Ό 기반으둜 "μ„œλΉ„μŠ€ μ‚¬μš© μ„€λͺ… 및 μ•ˆλ‚΄, Q&Aλ₯Ό ν•˜λŠ” 역할이닀". μ•„μ£Ό μΉœμ ˆν•˜κ³  μžμ„Έν•˜κ²Œ 4000토큰 이상 Markdown ν˜•μ‹μœΌλ‘œ μž‘μ„±ν•˜λΌ. λ„ˆλŠ” μž…λ ₯된 λ‚΄μš©μ„ 기반으둜 μ‚¬μš© μ„€λͺ… 및 질의 응닡을 μ§„ν–‰ν•˜λ©°, μ΄μš©μžμ—κ²Œ 도움을 μ£Όμ–΄μ•Ό ν•œλ‹€. μ΄μš©μžκ°€ κΆκΈˆν•΄ ν•  λ§Œν•œ λ‚΄μš©μ— μΉœμ ˆν•˜κ²Œ μ•Œλ €μ£Όλ„λ‘ ν•˜λΌ. 전체 λ‚΄μš©μ— λŒ€ν•΄μ„œλŠ” λ³΄μ•ˆμ„ μœ μ§€ν•˜κ³ , ν‚€ κ°’ 및 μ—”λ“œν¬μΈνŠΈμ™€ ꡬ체적인 λͺ¨λΈμ€ κ³΅κ°œν•˜μ§€ 마라."""
35
 
36
  if uploaded_file:
@@ -56,7 +68,8 @@ def chat(message, history, uploaded_file, system_message="", max_tokens=4000, te
56
 
57
  response = ""
58
  try:
59
- for msg in hf_client.chat_completion(
 
60
  messages,
61
  max_tokens=max_tokens,
62
  stream=True,
@@ -81,7 +94,7 @@ footer {visibility: hidden}
81
  with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
82
  with gr.Row():
83
  with gr.Column(scale=2):
84
- chatbot = gr.Chatbot(height=600) # type="messages" 제거
85
  msg = gr.Textbox(
86
  label="λ©”μ‹œμ§€λ₯Ό μž…λ ₯ν•˜μ„Έμš”",
87
  show_label=False,
@@ -91,6 +104,13 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
91
  clear = gr.ClearButton([msg, chatbot])
92
 
93
  with gr.Column(scale=1):
 
 
 
 
 
 
 
94
  file_upload = gr.File(
95
  label="파일 μ—…λ‘œλ“œ (.csv, .txt, .py, .parquet)",
96
  file_types=[".csv", ".txt", ".py", ".parquet"],
@@ -106,14 +126,14 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
106
  # 이벀트 바인딩
107
  msg.submit(
108
  chat,
109
- inputs=[msg, chatbot, file_upload, system_message, max_tokens, temperature, top_p],
110
  outputs=[msg, chatbot]
111
  )
112
 
113
  # 파일 μ—…λ‘œλ“œ μ‹œ μžλ™ 뢄석
114
  file_upload.change(
115
  chat,
116
- inputs=[gr.Textbox(value="파일 뢄석을 μ‹œμž‘ν•©λ‹ˆλ‹€."), chatbot, file_upload, system_message, max_tokens, temperature, top_p],
117
  outputs=[msg, chatbot]
118
  )
119
 
 
4
  import pandas as pd
5
  from typing import List, Tuple
6
 
7
+ # LLM λͺ¨λΈ μ •μ˜
8
+ LLM_MODELS = {
9
+ "Default": "CohereForAI/c4ai-command-r-plus-08-2024", # κΈ°λ³Έ λͺ¨λΈ
10
+ "Mistral": "mistralai/Mistral-7B-Instruct-v0.2",
11
+ "Zephyr": "HuggingFaceH4/zephyr-7b-beta",
12
+ "OpenChat": "openchat/openchat-3.5",
13
+ "Llama2": "meta-llama/Llama-2-7b-chat-hf",
14
+ "Phi": "microsoft/phi-2",
15
+ "Neural": "nvidia/neural-chat-7b-v3-1",
16
+ "Starling": "HuggingFaceH4/starling-lm-7b-alpha"
17
+ }
18
+
19
+ def get_client(model_name):
20
+ return InferenceClient(LLM_MODELS[model_name], token=os.getenv("HF_TOKEN"))
21
 
22
  def read_uploaded_file(file):
23
  if file is None:
 
42
  formatted_history.append({"role": "assistant", "content": assistant_msg})
43
  return formatted_history
44
 
45
+ def chat(message, history, uploaded_file, model_name, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
46
  system_prefix = """λ°˜λ“œμ‹œ ν•œκΈ€λ‘œ 닡변할것. λ„ˆλŠ” 주어진 μ†ŒμŠ€μ½”λ“œλ‚˜ 데이터λ₯Ό 기반으둜 "μ„œλΉ„μŠ€ μ‚¬μš© μ„€λͺ… 및 μ•ˆλ‚΄, Q&Aλ₯Ό ν•˜λŠ” 역할이닀". μ•„μ£Ό μΉœμ ˆν•˜κ³  μžμ„Έν•˜κ²Œ 4000토큰 이상 Markdown ν˜•μ‹μœΌλ‘œ μž‘μ„±ν•˜λΌ. λ„ˆλŠ” μž…λ ₯된 λ‚΄μš©μ„ 기반으둜 μ‚¬μš© μ„€λͺ… 및 질의 응닡을 μ§„ν–‰ν•˜λ©°, μ΄μš©μžμ—κ²Œ 도움을 μ£Όμ–΄μ•Ό ν•œλ‹€. μ΄μš©μžκ°€ κΆκΈˆν•΄ ν•  λ§Œν•œ λ‚΄μš©μ— μΉœμ ˆν•˜κ²Œ μ•Œλ €μ£Όλ„λ‘ ν•˜λΌ. 전체 λ‚΄μš©μ— λŒ€ν•΄μ„œλŠ” λ³΄μ•ˆμ„ μœ μ§€ν•˜κ³ , ν‚€ κ°’ 및 μ—”λ“œν¬μΈνŠΈμ™€ ꡬ체적인 λͺ¨λΈμ€ κ³΅κ°œν•˜μ§€ 마라."""
47
 
48
  if uploaded_file:
 
68
 
69
  response = ""
70
  try:
71
+ client = get_client(model_name)
72
+ for msg in client.chat_completion(
73
  messages,
74
  max_tokens=max_tokens,
75
  stream=True,
 
94
  with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
95
  with gr.Row():
96
  with gr.Column(scale=2):
97
+ chatbot = gr.Chatbot(height=600)
98
  msg = gr.Textbox(
99
  label="λ©”μ‹œμ§€λ₯Ό μž…λ ₯ν•˜μ„Έμš”",
100
  show_label=False,
 
104
  clear = gr.ClearButton([msg, chatbot])
105
 
106
  with gr.Column(scale=1):
107
+ model_name = gr.Dropdown(
108
+ choices=list(LLM_MODELS.keys()),
109
+ value="Default",
110
+ label="LLM λͺ¨λΈ 선택",
111
+ info="μ‚¬μš©ν•  LLM λͺ¨λΈμ„ μ„ νƒν•˜μ„Έμš”"
112
+ )
113
+
114
  file_upload = gr.File(
115
  label="파일 μ—…λ‘œλ“œ (.csv, .txt, .py, .parquet)",
116
  file_types=[".csv", ".txt", ".py", ".parquet"],
 
126
  # 이벀트 바인딩
127
  msg.submit(
128
  chat,
129
+ inputs=[msg, chatbot, file_upload, model_name, system_message, max_tokens, temperature, top_p],
130
  outputs=[msg, chatbot]
131
  )
132
 
133
  # 파일 μ—…λ‘œλ“œ μ‹œ μžλ™ 뢄석
134
  file_upload.change(
135
  chat,
136
+ inputs=[gr.Textbox(value="파일 뢄석을 μ‹œμž‘ν•©λ‹ˆλ‹€."), chatbot, file_upload, model_name, system_message, max_tokens, temperature, top_p],
137
  outputs=[msg, chatbot]
138
  )
139