from langchain.vectorstores import FAISS # from langchain.chains import ConversationalRetrievalChain from langchain.chains import RetrievalQA from langchain.llms import HuggingFaceHub import gradio as gr import os from langchain.embeddings import HuggingFaceEmbeddings from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent from langchain.document_loaders import PyPDFDirectoryLoader from langchain.document_loaders.csv_loader import CSVLoader from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain.memory import ConversationSummaryBufferMemory import io import contextlib embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L12-v2") vector_store= FAISS.load_local("vector_db/", embeddings) repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1" llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={"temperature": 0.01, "max_new_tokens": 2048}) # memory = ConversationSummaryBufferMemory( # llm=llm, # output_key='answer', # memory_key='chat_history', # max_token_limit=300, # return_messages=True) # retriever = vector_store.as_retriever( # search_type="similarity", # search_kwargs={"k": 10, "include_metadata": True}) # qa = ConversationalRetrievalChain.from_llm( # llm=llm, # memory=memory, # chain_type="stuff", # retriever=retriever, # return_source_documents=True, # get_chat_history=lambda h : h, # verbose=True) agent=create_csv_agent(llm,['data/Gretel_Data.csv','data/RAN_Data _T.csv'],verbose=True) def echo(message, history): try: qa=RetrievalQA.from_chain_type(llm=llm, retriever=retriever,return_source_documents=True) message= "Your name is Clara. You are a senior telecom network engineer having access to troubleshooting tickets data and other technical and product documentation.Stick to the knowledge from these tickets. Ask clarification questions if needed. "+message result=qa({"query":message}) bold_answer= "" + result['result'] + "" return bold_answer + "

" +'1. ' + str(result["source_documents"][0]) +"
" + '2. ' + str(result["source_documents"][1]) + "
" + "3. " + str(result["source_documents"][2]) except Exception as e: error_message = f"An error occurred: {e}"+str(e.with_traceback) + str(e.args) def echo_agent(message, history): message="There are 2 df's. If you find a KeyError check for the same in the other df." + "
" + message try: with io.StringIO() as buffer: with contextlib.redirect_stdout(buffer): result= agent.run(message) verbose_output = buffer.getvalue() verbose_output = verbose_output.replace("\x1b[36;1m\x1b[1;3m", "") verbose_output = verbose_output.replace("> ", "") verbose_output = verbose_output.replace("", "") verbose_output = verbose_output.replace("", "") result= "" + verbose_output + "
" + result + "
" return result except Exception as e: error_message = f"An error occurred: {e}"+str(e.with_traceback) + str(e.args) return error_message demo=gr.ChatInterface( fn=echo, chatbot=gr.Chatbot(height=300, label="Hi I am Clara!", show_label=True), textbox=gr.Textbox(placeholder="Ask me a question", container=True, autofocus=True, scale=7), title="Network Ticket Knowledge Management", description="Welcome to Verizon Network Operations Center!! I am here to help the Verizon Field Operations team with technical queries & escalation. I am trained on 1000s of RAN, Backhaul, Core network & End user equipment trouble tickets. Ask me!!! ☺", theme=gr.themes.Soft(), examples=["wifi connected but no internet showing", "internet stopped working after primary link down", "internet stopped working link not shifted to secondary after primary link down"], cache_examples=False, retry_btn=None, undo_btn="Delete Previous", clear_btn="Clear", stop_btn="Stop", ) demo1=gr.ChatInterface( fn=echo_agent, chatbot=gr.Chatbot(height=300, label="Hi I am Sam!", show_label=True), textbox=gr.Textbox(placeholder="Ask me a question", container=True, autofocus=True, scale=7), title="LLM Powered Agent", description="Welcome to Verizon RAN Visualization & Analytics powered by GEN AI. I have access 100 of metrices generated by a RAN base station and can help in visualizing, correlating and generating insights, using power of Conversational AI  ☺", theme=gr.themes.Soft(), retry_btn=None, undo_btn="Delete Previous", clear_btn="Clear", stop_btn="Stop", ) demo2=gr.TabbedInterface([demo,demo1],["RAG","AGENT"], title='INCEDO', theme=gr.themes.Soft()) demo2.launch(auth=("admin", "Sam&Clara"))