danicafisher commited on
Commit
dbcfa6e
·
1 Parent(s): 7a2f93b

Adds third selection

Browse files
Files changed (1) hide show
  1. app.py +64 -32
app.py CHANGED
@@ -8,11 +8,11 @@ from langchain_core.caches import InMemoryCache
8
  from operator import itemgetter
9
  from langchain_core.runnables.passthrough import RunnablePassthrough
10
  from langchain_qdrant import QdrantVectorStore, Qdrant
11
- from langchain_community.document_loaders import PyMuPDFLoader
12
  import uuid
13
  import chainlit as cl
14
  import os
15
- from helper_functions import process_file, add_to_qdrant
16
 
17
  chat_model = ChatOpenAI(model="gpt-4o-mini")
18
  te3_small = OpenAIEmbeddings(model="text-embedding-3-small")
@@ -38,24 +38,81 @@ chat_prompt = ChatPromptTemplate.from_messages([("system", rag_system_prompt_tem
38
  @cl.on_chat_start
39
  async def on_chat_start():
40
  qdrant_client = QdrantClient(url=os.environ["QDRANT_ENDPOINT"], api_key=os.environ["QDRANT_API_KEY"])
 
41
  qdrant_store = Qdrant(
42
  client=qdrant_client,
43
  collection_name="kai_test_docs",
44
  embeddings=te3_small
45
  )
46
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  res = await cl.AskActionMessage(
48
  content="Pick an action!",
49
  actions=[
50
  cl.Action(name="Question", value="question", label="Ask a question"),
51
- cl.Action(name="File", value="file", label="Upload a file or URL"),
 
52
  ],
53
  ).send()
 
54
 
 
55
  if res and res.get("value") == "file":
56
  files = None
57
  files = await cl.AskFileMessage(
58
- content="Please upload a URL, Text, PDF file to begin!",
59
  accept=["text/plain", "application/pdf"],
60
  max_size_mb=12,
61
  ).send()
@@ -82,33 +139,8 @@ async def on_chat_start():
82
  msg.content = f"Processing `{file.name}` done. You can now ask questions!"
83
  await msg.update()
84
 
 
 
 
85
  if res and res.get("value") == "question":
86
  await cl.Message(content="Ask away!").send()
87
-
88
- # Load the style guide from the local file system
89
- style_guide_path = "./public/CoExperiences Writing Style Guide V1 (2024).pdf"
90
- loader = PyMuPDFLoader(style_guide_path)
91
- style_guide_docs = loader.load()
92
- style_guide_text = "\n".join([doc.page_content for doc in style_guide_docs])
93
-
94
- retriever = qdrant_store.as_retriever()
95
- global retrieval_augmented_qa_chain
96
- retrieval_augmented_qa_chain = (
97
- {
98
- "context": itemgetter("question") | retriever,
99
- "question": itemgetter("question"),
100
- "writing_style_guide": lambda _: style_guide_text
101
- }
102
- | RunnablePassthrough.assign(context=itemgetter("context"))
103
- | chat_prompt
104
- | chat_model
105
- )
106
-
107
- @cl.author_rename
108
- def rename(orig_author: str):
109
- return "AI Assistant"
110
-
111
- @cl.on_message
112
- async def main(message: cl.Message):
113
- response = retrieval_augmented_qa_chain.invoke({"question": message.content})
114
- await cl.Message(content=response.content).send()
 
8
  from operator import itemgetter
9
  from langchain_core.runnables.passthrough import RunnablePassthrough
10
  from langchain_qdrant import QdrantVectorStore, Qdrant
11
+ from langchain_community.document_loaders import PyPDFLoader
12
  import uuid
13
  import chainlit as cl
14
  import os
15
+ from helper_functions import process_file, load_documents_from_url, add_to_qdrant
16
 
17
  chat_model = ChatOpenAI(model="gpt-4o-mini")
18
  te3_small = OpenAIEmbeddings(model="text-embedding-3-small")
 
38
  @cl.on_chat_start
39
  async def on_chat_start():
40
  qdrant_client = QdrantClient(url=os.environ["QDRANT_ENDPOINT"], api_key=os.environ["QDRANT_API_KEY"])
41
+ global qdrant_store
42
  qdrant_store = Qdrant(
43
  client=qdrant_client,
44
  collection_name="kai_test_docs",
45
  embeddings=te3_small
46
  )
47
 
48
+ res = await ask_action()
49
+ await handle_response(res)
50
+
51
+ # Load the style guide from the local file system
52
+ style_guide_path = "./public/CoExperiences Writing Style Guide V1 (2024).pdf"
53
+ loader = PyPDFLoader(style_guide_path)
54
+ style_guide_docs = loader.load()
55
+ style_guide_text = "\n".join([doc.page_content for doc in style_guide_docs])
56
+
57
+ retriever = qdrant_store.as_retriever()
58
+ global retrieval_augmented_qa_chain
59
+ retrieval_augmented_qa_chain = (
60
+ {
61
+ "context": itemgetter("question") | retriever,
62
+ "question": itemgetter("question"),
63
+ "writing_style_guide": lambda _: style_guide_text
64
+ }
65
+ | RunnablePassthrough.assign(context=itemgetter("context"))
66
+ | chat_prompt
67
+ | chat_model
68
+ )
69
+
70
+ @cl.author_rename
71
+ def rename(orig_author: str):
72
+ return "Marketing Assistant"
73
+
74
+ @cl.on_message
75
+ async def main(message: cl.Message, message_type: str):
76
+ if message_type == "url":
77
+ # load the file
78
+ docs = load_documents_from_url(message.content)
79
+ splits = text_splitter.split_documents(docs)
80
+ for i, doc in enumerate(splits):
81
+ doc.metadata["user_upload_source"] = f"source_{i}"
82
+ print(f"Processing {len(docs)} text chunks")
83
+
84
+ # Add to the qdrant_store
85
+ qdrant_store.add_documents(
86
+ documents=splits
87
+ )
88
+
89
+ await cl.Message(f"Processing `{response.url}` done. You can now ask questions!").send()
90
+
91
+ else:
92
+ response = retrieval_augmented_qa_chain.invoke({"question": message.content})
93
+ await cl.Message(content=response.content).send()
94
+
95
+ res = await ask_action()
96
+ await handle_response(res)
97
+
98
+
99
+ ## Chainlit helper functions
100
+ async def ask_action():
101
  res = await cl.AskActionMessage(
102
  content="Pick an action!",
103
  actions=[
104
  cl.Action(name="Question", value="question", label="Ask a question"),
105
+ cl.Action(name="File", value="file", label="Upload a file"),
106
+ cl.Action(name="Url", value="url", label="Upload a URL"),
107
  ],
108
  ).send()
109
+ return res
110
 
111
+ async def handle_response(res):
112
  if res and res.get("value") == "file":
113
  files = None
114
  files = await cl.AskFileMessage(
115
+ content="Please upload a Text or PDF file to begin!",
116
  accept=["text/plain", "application/pdf"],
117
  max_size_mb=12,
118
  ).send()
 
139
  msg.content = f"Processing `{file.name}` done. You can now ask questions!"
140
  await msg.update()
141
 
142
+ if res and res.get("value") == "url":
143
+ await cl.Message(content="Submit a url link in the message box below.").send()
144
+
145
  if res and res.get("value") == "question":
146
  await cl.Message(content="Ask away!").send()