MefhigosetH's picture
Implementamos modulo LLM y VectorStore.
7ffe358
raw
history blame
529 Bytes
"""
Chatbot Nuevo Régimen Académico
"""
from chatbot.ui import ChatbotInterface
from chatbot.llm import GeminiAI
from langchain.globals import set_verbose, set_debug
def respond(message, history):
prompt = llm.getMainTemplate()
chain = prompt | llm.llm
response = chain.invoke({"message": message, "history": history})
return response.content
if __name__ == "__main__":
set_verbose(True)
set_debug(True)
llm = GeminiAI("gemini-1.5-flash")
ui = ChatbotInterface(respond)
ui.app.launch()