|
|
|
|
|
from flask import Flask,request |
|
from dotenv import load_dotenv |
|
|
|
from langchain.document_loaders import WebBaseLoader |
|
from langchain.text_splitter import RecursiveCharacterTextSplitter |
|
from langchain.embeddings import HuggingFaceEmbeddings |
|
from langchain.vectorstores import FAISS |
|
from transformers import AutoTokenizer, AutoModelForQuestionAnswering |
|
from transformers import AutoTokenizer, pipeline |
|
from langchain import HuggingFacePipeline |
|
from langchain.chains import RetrievalQA |
|
|
|
|
|
app = Flask(__name__) |
|
load_dotenv() |
|
|
|
@app.route("/train/faq", methods=['GET','POST']) |
|
def embeddings_faqs(): |
|
|
|
data = WebBaseLoader("https://rise.mmu.ac.uk/what-is-rise/").load() |
|
|
|
|
|
|
|
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=150) |
|
|
|
|
|
docs = text_splitter.split_documents(data) |
|
|
|
|
|
modelPath = "sentence-transformers/all-MiniLM-l6-v2" |
|
|
|
|
|
model_kwargs = {'device':'cpu'} |
|
|
|
|
|
encode_kwargs = {'normalize_embeddings': False} |
|
|
|
|
|
embeddings = HuggingFaceEmbeddings( |
|
model_name=modelPath, |
|
model_kwargs=model_kwargs, |
|
encode_kwargs=encode_kwargs |
|
) |
|
|
|
|
|
vectorstore = FAISS.from_documents(docs, embeddings) |
|
|
|
vectorstore.save_local("_rise_faq_db"); |
|
|
|
return {"trained":"success"} |
|
|
|
@app.route('/ask', methods=['GET','POST']) |
|
def ask(): |
|
|
|
model_name = "Intel/dynamic_tinybert" |
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained(model_name, padding=True, truncation=True, max_length=512) |
|
|
|
|
|
question_answerer = pipeline( |
|
"question-answering", |
|
model=model_name, |
|
tokenizer=tokenizer, |
|
return_tensors='pt' |
|
) |
|
|
|
|
|
|
|
llm = HuggingFacePipeline( |
|
pipeline=question_answerer, |
|
model_kwargs={"temperature": 0.7, "max_length": 512}, |
|
) |
|
|
|
|
|
modelPath = "sentence-transformers/all-MiniLM-l6-v2" |
|
|
|
|
|
model_kwargs = {'device':'cpu'} |
|
|
|
|
|
encode_kwargs = {'normalize_embeddings': False} |
|
|
|
|
|
embeddings = HuggingFaceEmbeddings( |
|
model_name=modelPath, |
|
model_kwargs=model_kwargs, |
|
encode_kwargs=encode_kwargs |
|
) |
|
persisted_vectorstore = FAISS.load_local("_rise_faq_db", embeddings) |
|
|
|
|
|
|
|
retriever = persisted_vectorstore.as_retriever() |
|
|
|
docs = retriever.get_relevant_documents("What are the benefits?") |
|
print(docs[0].page_content) |
|
|
|
return "uip" |
|
|
|
@app.route('/', methods=['GET','POST']) |
|
def index(): |
|
return {"response":"just some junk response"} |