|
import os |
|
|
|
from langchain_openai import ChatOpenAI |
|
from langchain_core.output_parsers import StrOutputParser |
|
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder |
|
from langchain_community.chat_message_histories import ChatMessageHistory |
|
from langchain_core.chat_history import BaseChatMessageHistory |
|
from langchain_core.runnables.history import RunnableWithMessageHistory |
|
|
|
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") |
|
|
|
class LangchainClient: |
|
|
|
def __init__(self): |
|
self.llm = ChatOpenAI( |
|
openai_api_key=OPENAI_API_KEY, |
|
temperature=0, |
|
model_name='gpt-4o' |
|
) |
|
self.store = {} |
|
|
|
def create_prompt(self): |
|
template_prompt = """You are a chatbot that can answer questions in English and Bahasa Indonesia. |
|
answer using language from user, if user use bahasa indonesia answer in bahasa indonesia. |
|
if user language is english answer in english""" |
|
|
|
prompt = ChatPromptTemplate.from_messages( |
|
[ |
|
( |
|
"system", |
|
template_prompt, |
|
), |
|
MessagesPlaceholder(variable_name="history"), |
|
("human", "{question}"), |
|
] |
|
) |
|
|
|
return prompt |
|
|
|
def get_session_history(self, session_id: str) -> BaseChatMessageHistory: |
|
if session_id not in self.store: |
|
self.store[session_id] = ChatMessageHistory() |
|
return self.store[session_id] |
|
|
|
def create_model(self): |
|
prompt = self.create_prompt() |
|
parser = StrOutputParser() |
|
conversation_chain = prompt | self.llm | parser |
|
conversation_chain_history = RunnableWithMessageHistory( |
|
conversation_chain, |
|
self.get_session_history, |
|
input_messages_key="question", |
|
history_messages_key="history", |
|
) |
|
return conversation_chain_history |
|
|
|
def invoke_llm(self, model, text): |
|
response = model.invoke( |
|
{"question": text}, |
|
config={"configurable": {"session_id": "default"}} |
|
) |
|
return response |