Spaces:
Sleeping
Sleeping
File size: 2,437 Bytes
5e20c77 10330bc 5e20c77 99a3f34 10330bc 5e20c77 1cb46fc 99a3f34 5e20c77 99a3f34 5e20c77 1cb46fc 5e20c77 10330bc 99a3f34 10330bc 99a3f34 10330bc 99a3f34 10330bc 1cb46fc 99a3f34 1cb46fc 99a3f34 1cb46fc 99a3f34 1cb46fc 99a3f34 1cb46fc 99a3f34 1cb46fc 99a3f34 1cb46fc 99a3f34 1cb46fc 99a3f34 1cb46fc 99a3f34 1cb46fc 10330bc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 |
import os
import logging
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.embeddings.openai import OpenAIEmbeddings
import chainlit as cl
from src.utils import get_docSearch, get_source
from src.model import load_chain
welcome_message = """ Upload your file here"""
@cl.on_chat_start
async def start():
await cl.Message(content="you are in ").send()
logging.info(f"app started")
files = None
while files is None:
files = await cl.AskFileMessage(
content=welcome_message,
accept=["text/plain", "application/pdf"],
max_size_mb=10,
timeout=90
).send()
logging.info("uploader excecuted")
file = files[0]
msg = cl.Message(content=f"Processing {file.name}....")
await msg.send()
logging.info("processing started")
docsearch = get_docSearch(file,cl)
logging.info("document uploaded success")
chain = load_chain(docsearch)
logging.info(f"Model loaded successfully")
## let the user know when system is ready
msg.content = f"{file.name} processed. You begin asking questions"
await msg.update()
logging.info("processing completed")
cl.user_session.set("chain", chain)
logging.info("chain saved for active session")
@cl.on_message
async def main(message):
chain = cl.user_session.get("chain")
logging.info(f"retrived chain for QA {type(chain)}")
cb = cl.AsyncLangchainCallbackHandler(
stream_final_answer=True, answer_prefix_tokens=["FINAL", "ANSWER"]
)
logging.info("define call backs")
cb.answer_reached = True
logging.info("answer reached")
res = await chain.acall(message, callbacks=[cb])
logging.info("define res")
logging.info("call backs ")
answer = res["answer"]
sources = res["sources"].strip()
## get doc from user session
docs = cl.user_session.get("docs")
metadatas = [doc.metadata for doc in docs]
all_sources = [m["source"]for m in metadatas]
source_elements = get_source(sources,all_sources,docs,cl)
logging.info("getting source")
if cb.has_streamed_final_answer:
cb.final_stream.elements = source_elements
await cb.final_stream.update()
logging.info("call back triggred")
else:
await cl.Message(content=answer, elements=source_elements).send()
logging.info("post message")
|