File size: 2,076 Bytes
5e20c77
10330bc
5e20c77
 
 
 
99a3f34
10330bc
 
 
 
5e20c77
 
 
 
99a3f34
5e20c77
 
 
 
 
99a3f34
5e20c77
 
 
 
 
 
 
 
99a3f34
5e20c77
 
 
10330bc
99a3f34
 
 
10330bc
99a3f34
10330bc
 
 
 
 
 
 
 
 
 
 
 
99a3f34
 
10330bc
 
99a3f34
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10330bc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
import os
import logging

from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.embeddings.openai import OpenAIEmbeddings
import chainlit as cl
from src.utils import get_docSearch, get_source
from src.model import load_chain








welcome_message = """ Upload your file here"""

@cl.on_chat_start
async def start():
    await cl.Message("you are in ").send()
    logging.info(f"app started")
    files = None
    while files is None:
        files = await cl.AskFileMessage(
            content=welcome_message,
            accept=["text/plain", "application/pdf"],
            max_size_mb=10,
            timeout=90
        ).send()
    logging.info("uploader excecuted")
    file = files[0]
    msg = cl.Message(content=f"Processing `{type(files)}` {file.name}....")
    await msg.send()

    logging.info("processing started")

    docsearch = get_docSearch(file,cl)
    
    logging.info("document uploaded success")

    chain = load_chain(docsearch)

    logging.info(f"Model loaded successfully")

    
    ## let the user know when system is ready

    msg.content = f"{file.name} processed. You begin asking questions"

    await msg.update()

    logging.info("processing completed")

    cl.user_session.set("chain", chain)

@cl.on_message
async def main(message):
    chain = cl.user_session.get("chain")
    cb = cl.AsyncLangchainCallbackHandler(
        stream_final_answer=True, answer_prefix_tokens=["FINAL","ANSWER"]
    )

    cb.answer_reached = True
    res = await chain.acall(message, callbacks=[cb])

    answer = res["answer"]
    sources = res["sources"].strip()
    

    ## get doc from user session
    docs = cl.user_session.get("docs")
    metadatas = [doc.metadata for doc in docs]
    all_sources = [m["source"]for m in metadatas]

    source_elements,answer = get_source(sources,all_sources,docs,cl)

    if cb.has_streamed_final_answer:
        cb.final_stream.elements = source_elements
        await cb.final_stream.update()
    else:
        await cl.Message(content=answer, elements=source_elements).send()