chat_csv / app.py
darshan8950's picture
Update app.py
f2d4b53 verified
raw
history blame
1.84 kB
import streamlit as st
import tempfile
import pandas as pd
from langchain import HuggingFacePipeline
from transformers import AutoTokenizer
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.document_loaders.csv_loader import CSVLoader
from langchain.vectorstores import FAISS
from langchain.chains import RetrievalQA
import transformers
import torch
import textwrap
def main():
st.set_page_config(page_title="Talk with BORROWER data")
st.title("Talk with BORROWER data")
query = st.text_input("Send a Message")
if st.button("Submit Query", type="primary"):
DB_FAISS_PATH = "vectorstore/db_faiss"
loader = CSVLoader(file_path="./borrower_data.csv", encoding="utf-8", csv_args={
'delimiter': ','})
data = loader.load()
model = "stabilityai/stablelm-zephyr-3b"
tokenizer = AutoTokenizer.from_pretrained(model)
pipeline = transformers.pipeline("text-generation", model=model, tokenizer=tokenizer, torch_dtype=torch.bfloat16, device_map="auto", do_sample=True, top_k=1, num_return_sequences=1, eos_token_id=tokenizer.eos_token_id,offload_folder="offload")
llm = HuggingFacePipeline(pipeline=pipeline, model_kwargs={'temperature': 0})
embeddings = HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2')
vectorstore = FAISS.from_documents(data, embeddings,allow_dangerous_deserialization=True)
vectorstore.save_local(DB_FAISS_PATH)
# Load the saved vectorstore
vectorstore = FAISS.load_local(DB_FAISS_PATH, embeddings)
chain = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", return_source_documents=True, retriever=vectorstore.as_retriever())
result = chain(query)
st.write(result['result'])
if __name__ == '__main__':
main()