Cheselle commited on
Commit
44e5994
·
verified ·
1 Parent(s): 1e61831

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -9
app.py CHANGED
@@ -22,8 +22,8 @@ import chainlit as cl
22
  load_dotenv()
23
 
24
 
25
- ai_framework_document = PyMuPDFLoader(file_path="https://nvlpubs.nist.gov/nistpubs/ai/NIST.AI.600-1.pdf").load()
26
- ai_blueprint_document = PyMuPDFLoader(file_path="https://www.whitehouse.gov/wp-content/uploads/2022/10/Blueprint-for-an-AI-Bill-of-Rights.pdf").load()
27
 
28
 
29
  def metadata_generator(document, name):
@@ -37,22 +37,20 @@ def metadata_generator(document, name):
37
  doc.metadata["source"] = name
38
  return collection
39
 
40
- recursive_framework_document = metadata_generator(ai_framework_document, "AI Framework")
41
- recursive_blueprint_document = metadata_generator(ai_blueprint_document, "AI Blueprint")
42
- combined_documents = recursive_framework_document + recursive_blueprint_document
43
 
44
  embeddings = OpenAIEmbeddings(model="text-embedding-3-small")
45
 
46
  vectorstore = Qdrant.from_documents(
47
- documents=combined_documents,
48
  embedding=embeddings,
49
  location=":memory:",
50
- collection_name="ai_policy"
51
  )
52
  alt_retriever = vectorstore.as_retriever()
53
 
54
  ## Generation LLM
55
- llm = ChatOpenAI(model="gpt-4o-mini")
56
 
57
  RAG_PROMPT = """\
58
  You are an AI Policy Expert.
@@ -79,7 +77,7 @@ retrieval_augmented_qa_chain = (
79
  | {"response": rag_prompt | llm, "context": itemgetter("context")}
80
  )
81
 
82
- #alt_rag_chain.invoke({"question" : "What is the AI framework all about?"})
83
 
84
  @cl.on_message
85
  async def handle_message(message):
 
22
  load_dotenv()
23
 
24
 
25
+ document = PyMuPDFLoader(file_path="sk-proj-eIwEnyq0pSHm0s3yiMdZT3BlbkFJ2aeAlVApXY6jveE8aJ9t").load()
26
+
27
 
28
 
29
  def metadata_generator(document, name):
 
37
  doc.metadata["source"] = name
38
  return collection
39
 
40
+ documents = metadata_generator(document, "Propaganda")
 
 
41
 
42
  embeddings = OpenAIEmbeddings(model="text-embedding-3-small")
43
 
44
  vectorstore = Qdrant.from_documents(
45
+ documents=documents,
46
  embedding=embeddings,
47
  location=":memory:",
48
+ collection_name="Propaganda"
49
  )
50
  alt_retriever = vectorstore.as_retriever()
51
 
52
  ## Generation LLM
53
+ llm = ChatOpenAI(model="gpt-4o")
54
 
55
  RAG_PROMPT = """\
56
  You are an AI Policy Expert.
 
77
  | {"response": rag_prompt | llm, "context": itemgetter("context")}
78
  )
79
 
80
+
81
 
82
  @cl.on_message
83
  async def handle_message(message):