Spaces:
Paused
Paused
File size: 4,288 Bytes
4c95dc7 3f2dc36 d7ef377 0f09cc9 28c2531 4c95dc7 dbcfa6e d7ef377 4c95dc7 dbcfa6e 908a486 dbcfa6e fc215cd 8c1aa09 fc215cd bd1653c dbcfa6e 8c1aa09 28c2531 dbcfa6e 28c2531 b179830 b745365 28c2531 b179830 dbcfa6e 28c2531 0f09cc9 962e642 b179830 dbcfa6e ec93c75 a00c36c ec93c75 dbcfa6e ec93c75 dbcfa6e ec93c75 dbcfa6e ec93c75 6010701 ec93c75 3f2dc36 0159ca5 2389c43 0159ca5 b94fa9d 16556a2 ec93c75 2389c43 0159ca5 ec93c75 9881234 ec93c75 dbcfa6e ec93c75 a00c36c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 |
import chainlit as cl
from helper_functions import process_file, load_documents_from_url
import models
import agents
import graph
import asyncio
@cl.on_chat_start
async def on_chat_start():
global qdrant_store
qdrant_store = models.semantic_tuned_Qdrant_vs
global retrieval_augmented_qa_chain
retrieval_augmented_qa_chain = agents.simple_rag_chain
res = await ask_action()
await handle_response(res)
@cl.author_rename
def rename(orig_author: str):
return "AI Assistant"
@cl.on_message
async def main(message: cl.Message):
# await cl.Message(f"Processing `{message.content}`", disable_human_feedback=True)
if message.content.startswith("http://") or message.content.startswith("https://"):
message_type = "url"
else:
message_type = "question"
if message_type == "url":
await cl.Message(content=f"Processing `{message.content}`", disable_human_feedback=True).send()
try:
# Run the document loading and splitting in a thread
docs = await asyncio.to_thread(load_documents_from_url, message.content)
await cl.Message(content="loaded docs").send()
splits = await asyncio.to_thread(models.semanticChunker_tuned.split_documents, docs)
await cl.Message(content="split docs").send()
for i, doc in enumerate(splits):
doc.metadata["user_upload_source"] = f"source_{i}"
print(f"Processing {len(docs)} text chunks")
# Add to the qdrant_store asynchronously
await asyncio.to_thread(qdrant_store.add_documents, splits)
await cl.Message(f"Processing `{message.content}` done. You can now ask questions!").send()
except Exception as e:
await cl.Message(f"Error processing the document: {e}").send()
res = await ask_action()
await handle_response(res)
else:
# Handle the question as usual
await cl.Message(content="Our specialist is working...", disable_human_feedback=True).send()
#response = await asyncio.to_thread(retrieval_augmented_qa_chain.invoke, {"question": message.content})
response = await asyncio.to_thread(graph.getSocialMediaPost, message.content)
print(response)
await cl.Message(content=response).send()
res = await ask_action()
await handle_response(res)
## Chainlit helper functions
async def ask_action():
res = await cl.AskActionMessage(
content="Pick an action!",
actions=[
cl.Action(name="Question", value="question", label="Create a post"),
cl.Action(name="File", value="file", label="Import a file"),
cl.Action(name="Url", value="url", label="Import a Webpage"),
],
).send()
return res
async def handle_response(res):
if res and res.get("value") == "file":
files = None
files = await cl.AskFileMessage(
content="Please upload a Text or PDF file to begin!",
accept=["text/plain", "application/pdf"],
max_size_mb=12,
).send()
file = files[0]
msg = cl.Message(
content=f"Processing `{file.name}`...", disable_human_feedback=True
)
await msg.send()
# load the file
docs = await asyncio.to_thread(process_file, file)
await cl.Message(content="loaded docs").send()
splits = await asyncio.to_thread(models.semanticChunker_tuned.split_documents, docs)
await cl.Message(content="split docs").send()
for i, doc in enumerate(splits):
doc.metadata["user_upload_source"] = f"source_{i}"
print(f"Processing {len(docs)} text chunks")
# Add to the qdrant_store
await asyncio.to_thread(qdrant_store.add_documents, splits)
await cl.Message(content="added to vs").send()
await cl.Message(content=f"Processing `{file.name}` done.").send()
res = await ask_action()
await handle_response(res)
if res and res.get("value") == "url":
await cl.Message(content="Submit a url link in the message box below.").send()
if res and res.get("value") == "question":
await cl.Message(content="Give us your idea!").send()
|