File size: 1,051 Bytes
5e20c77
10330bc
5e20c77
 
 
 
10330bc
 
 
 
 
5e20c77
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10330bc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import os
import logging

from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.embeddings.openai import OpenAIEmbeddings
import chainlit as cl
from src.utils import get_docSearch
from src.model import load_chain







welcome_message = """ Upload your file here"""

@cl.on_chat_start
async def start():
    await cl.Message("you are in ").send()
    files = None
    while files is None:
        files = await cl.AskFileMessage(
            content=welcome_message,
            accept=["text/plain", "application/pdf"],
            max_size_mb=10,
            timeout=90
        ).send()
    file = files[0]
    msg = cl.Message(content=f"Processing `{type(files)}` {file.name}....")
    await msg.send()

    docsearch = get_docSearch(file)
    

    chain = load_chain(docsearch)

    logging.info(f"Model loaded successfully")

    
    ## let the user know when system is ready

    msg.content = f"{file.name} processed. You begin asking questions"

    await msg.update()

    cl.user_session.set("chain", chain)