jchen8000 commited on
Commit
c0a84b3
·
verified ·
1 Parent(s): 7279c69

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -14
app.py CHANGED
@@ -47,22 +47,24 @@ def index_pdf(pdf):
47
 
48
  return "PDF indexed successfully!"
49
 
50
- # Function to handle chatbot queries
51
- def chatbot_query(query):
52
- if vector_store is None:
53
- return "Please upload and index a PDF first."
54
 
55
- # Create a retrieval-based QA chain
56
- retriever = vector_store.as_retriever()
57
- qa_chain = RetrievalQA(llm=OpenAI(), retriever=retriever)
58
 
59
- # Get the response from the QA chain
60
- response = qa_chain.run(query)
61
 
62
- return response
63
-
64
 
65
 
 
 
 
66
 
67
  def generate_response(query, history, model, temperature, max_tokens, top_p, seed):
68
  if vector_store is None:
@@ -70,11 +72,11 @@ def generate_response(query, history, model, temperature, max_tokens, top_p, see
70
 
71
  if seed == 0:
72
  seed = random.randint(1, 100000)
73
-
74
- llm = ChatGroq(groq_api_key=os.environ.get("GROQ_API_KEY"), model=model)
75
 
 
 
76
  custom_rag_prompt = PromptTemplate.from_template(template)
77
-
78
  rag_chain = (
79
  {"context": retriever | format_docs, "question": RunnablePassthrough()}
80
  | custom_rag_prompt
 
47
 
48
  return "PDF indexed successfully!"
49
 
50
+ # # Function to handle chatbot queries
51
+ # def chatbot_query(query):
52
+ # if vector_store is None:
53
+ # return "Please upload and index a PDF first."
54
 
55
+ # # Create a retrieval-based QA chain
56
+ # retriever = vector_store.as_retriever()
57
+ # qa_chain = RetrievalQA(llm=OpenAI(), retriever=retriever)
58
 
59
+ # # Get the response from the QA chain
60
+ # response = qa_chain.run(query)
61
 
62
+ # return response
 
63
 
64
 
65
+ def format_docs(docs):
66
+ return "\n\n".join(doc.page_content for doc in docs)
67
+
68
 
69
  def generate_response(query, history, model, temperature, max_tokens, top_p, seed):
70
  if vector_store is None:
 
72
 
73
  if seed == 0:
74
  seed = random.randint(1, 100000)
 
 
75
 
76
+ retriever = vector_store.as_retriever(search_type="similarity", search_kwargs={"k": 8})
77
+ llm = ChatGroq(groq_api_key=os.environ.get("GROQ_API_KEY"), model=model)
78
  custom_rag_prompt = PromptTemplate.from_template(template)
79
+
80
  rag_chain = (
81
  {"context": retriever | format_docs, "question": RunnablePassthrough()}
82
  | custom_rag_prompt