Spaces:
Sleeping
Sleeping
File size: 5,547 Bytes
8df7c3a 7708244 e7dcf3d 624dfff a826dc5 99909af e7dcf3d 99909af e7dcf3d 8df7c3a 99909af df96623 99909af e7dcf3d 99909af 8c76211 a826dc5 8c76211 99909af 32c4ce1 99909af df96623 3d77e8d df96623 99909af e7dcf3d 99909af 4450291 99909af e7dcf3d 8df7c3a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 |
import gradio as gr
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from openai import OpenAI
from translate_utils import translate_ko_to_en
import os
YOUR_OPENAI_API_KEY = os.getenv("YOUR_OPENAI_API_KEY")
# μλ² λ© λͺ¨λΈ λ‘λ
embedding_model_name = "snunlp/KR-SBERT-V40K-klueNLI-augSTS"
embedding_model = HuggingFaceEmbeddings(
model_name=embedding_model_name,
model_kwargs={"device": "cpu"}, # GPU μ¬μ© μ "cuda"λ‘ λ³κ²½
encode_kwargs={"normalize_embeddings": True},
)
# vector DB λ‘λ
save_path = "./version-2024-12-31"
vectorstore = FAISS.load_local(save_path, embedding_model, allow_dangerous_deserialization=True)
def chatbot(input_question, eng_trans=True, num_ref=3):
"""μ±λ΄ ν¨μ"""
try:
retriever = vectorstore.as_retriever(search_kwargs={"k": num_ref})
# νκ΅μ΄λ‘ μ§μ
if not eng_trans:
basic_docs = retriever.invoke(input_question)
# μμ΄λ‘ λ²μ ν μ§μ(μμ΄, νκΈ λ λ€ κ²μ)
else:
eng = translate_ko_to_en(input_question)
basic_docs = retriever.invoke(input_question)
eng_docs = retriever.invoke(eng)
basic_docs = basic_docs + eng_docs
context = "\n".join([doc.page_content for doc in basic_docs])
client = OpenAI(api_key=YOUR_OPENAI_API_KEY)
# GPT-4 or GPT-4o-mini λ± λͺ¨λΈ μ§μ
completion = client.chat.completions.create(
model="gpt-4o-mini-2024-07-18",
messages=[
{
"role": "user",
"content": f"""λΉμ μ λ°λ체μ μ°¨μΈλλ°λ체νκ³Όμ λν΄μ μ€λͺ
νλ Assistantμ
λλ€.
μ°¨μΈλλ°λ체νκ³Όλ μμΈμ μ€μλνκ΅ μ°½μICT곡과λνμ μ€λ¦½λ νκ³Όμ
λλ€. νκ³Όμ₯ λ° λ¨μ₯μ μ‘μν κ΅μμ΄λ©° νμ¬ μν¬ν°μ¦ 3κΈ° νμ₯μ μ κ·λ¨ νμμ
λλ€.
μ΄ νκ³Όλ μμΈλνκ΅, μ€μλνκ΅, ν¬ν곡과λνκ΅, μμ€λνκ΅, κ°μλνκ΅, λꡬλνκ΅, μ‘°μ μ΄κ³΅λνκ΅κ° νμ κ΅λ₯λ₯Ό ν΅ν΄ μκ°ν μ μλ νκ³Όμ
λλ€.
λ€μ λ§₯λ½μ λ§κ² μ§λ¬Έμ νκΈλ‘ λ΅νμΈμ. λ΅μ λͺ¨λ₯΄κ² λ€λ©΄ semicon@cau.ac.kr λλ 02-881-7301 λ‘ λ¬ΈμνλΌκ³ νμΈμ.
λ§₯λ½: {context}
μ§λ¬Έ: {input_question}
"""
}
]
)
return completion.choices[0].message.content
except openai.error.RateLimitError:
return 'API μ¬μ© λΉμ©μ μ΄κ³Όνμ¬ μλ¬κ° λ¬μ΅λλ€. κ΄λ¦¬μμκ² λ¬Έμνμμμ€.'
except Exception as e:
return f'μ€λ₯ λ°μ: {str(e)}'
# Gradio Blocks λ μ΄μμμΌλ‘ κΎΈλ―Ό λ²μ
with gr.Blocks() as demo:
# μλ¨ μ΄λ―Έμ§
gr.Image(
value="head.png", # μ΄λ―Έμ§ κ²½λ‘ λλ URL
elem_id="top-image",
label=None
)
# μλ΄ λ¬Έκ΅¬
gr.Markdown(
"""
# μ°¨μΈλλ°λ체νκ³Ό νΉν μμ΄μ νΈ
- μ΄ μ±λ΄μ νκ΅μ΄ μ§μμ λν΄ λ°λ체 κ΄λ ¨ μ 보, μ°¨μΈλλ°λ체νκ³Ό κ΄λ ¨ μ 보λ₯Ό μΉμ νκ² μ 곡ν©λλ€.<br>
- λ°λ체 μ λ¬Έμ©μ΄λ μΌλΆ μμ΄λ‘ λ΅λ³λ μ μμ΅λλ€.<br>
### μμ΄ λ²μ μ¬μ© μ¬λΆ κΈ°λ₯
- μμ΄ λ²μ μ¬μ© μ¬λΆλ₯Ό ν€λ©΄, μ νλκ° μμΉλ μ μμΌλ, μΆλ‘ μκ°μ΄ κΈΈμ΄μ§ μ μμ΅λλ€. <br>
### κ²μ λ¬Έμ κ°μ λ³κ²½ κΈ°λ₯
- κ²μν λ¬Έμ κ°μλ₯Ό λ리면 μ νλκ° μμΉλ μ μμΌλ, μΆλ‘ μκ°μ΄ κΈΈμ΄μ§ μ μμ΅λλ€. <br>
#### μ§λ¬Έμ λν΄μ νλ² λ΅ν λλ§λ€ μ μμμκ² λΉμ©μ΄ μ²κ΅¬λλ κ³Όν μ¬μ©μ μΌκ°μ£ΌμκΈ° λ°λλλ€.
- λ΅λ³ λΆλΆμ μ€λ₯κ° λ¨λ©΄ API μ¬μ© ν¬λ λ§(κΈμ‘)μ λͺ¨λ μ¬μ©νλ€λ λ»μ
λλ€. μ¬μ¬μ κ²½μ°, wjdrbeks1021@cau.ac.kr λ‘ νκΈμ μΆ©μ νλΌκ³ λ©μΌ λΆνλ립λλ€.
#### μμμ§λ¬Έ
- μ‘μν κ΅μλμ΄ λꡬμΌ?
- μ°¨μΈλλ°λ체νκ³Ό μ΄μ°μ£Ό κ΅μλμ μ΄λ€ κ°μλ₯Ό νμμ§?
- nMOSμ ꡬλ μ‘°κ±΄μ΄ λμΌ?
""",
elem_id="description"
)
# λ©μΈ UI
with gr.Group():
with gr.Row():
# μ
λ ₯ ννΈ
with gr.Column():
input_question = gr.Textbox(
label="μ§λ¬Έ μ
λ ₯",
placeholder="λ°λ체μ μ°¨μΈλλ°λ체νκ³Όμ λν΄ κΆκΈν μ μ μ
λ ₯νμΈμ."
)
eng_trans = gr.Checkbox(
label="μμ΄ λ²μ μ¬μ© μ¬λΆ",
value=False
)
num_ref = gr.Slider(
minimum=1,
maximum=5,
value=3,
step=1,
label="κ²μν λ¬Έμ κ°μ"
)
submit_btn = gr.Button("μ§λ¬ΈνκΈ°")
# μΆλ ₯ ννΈ
with gr.Column():
output_answer = gr.Textbox(
label="λ΅λ³",
placeholder="λ΅λ³μ΄ μ¬κΈ°μ νμλ©λλ€...",
lines=10
)
# λ²νΌκ³Ό ν¨μ μ°κ²°
submit_btn.click(
fn=chatbot,
inputs=[input_question, eng_trans, num_ref],
outputs=output_answer
)
demo.launch()
|