Spaces:
Sleeping
Sleeping
Chandranshu Jain
commited on
Commit
•
364b225
1
Parent(s):
f9a94b7
Update app.py
Browse files
app.py
CHANGED
@@ -42,10 +42,6 @@ def text_splitter(text):
|
|
42 |
|
43 |
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
|
44 |
|
45 |
-
def embedding(chunk):
|
46 |
-
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
|
47 |
-
db = Chroma.from_documents(chunk,embeddings, persist_directory="./chroma_db")
|
48 |
-
|
49 |
def get_conversational_chain():
|
50 |
prompt_template = """
|
51 |
Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in
|
@@ -60,31 +56,33 @@ def get_conversational_chain():
|
|
60 |
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|
61 |
return chain
|
62 |
|
63 |
-
def
|
64 |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
|
65 |
-
|
66 |
docs = db3.similarity_search(query)
|
67 |
chain = get_conversational_chain()
|
68 |
response = chain({"input_documents": docs, "question": query}, return_only_outputs=True)
|
69 |
st.write("Reply: ", response["output_text"])
|
70 |
|
|
|
|
|
|
|
71 |
def main():
|
72 |
st.header("Chat with your pdf💁")
|
73 |
-
|
|
|
74 |
query = st.text_input("Ask a Question from the PDF Files", key="query")
|
|
|
|
|
|
|
|
|
|
|
|
|
75 |
|
76 |
if query:
|
77 |
user_call(query)
|
78 |
|
79 |
-
|
80 |
-
with st.sidebar:
|
81 |
-
st.title("Menu:")
|
82 |
-
pdf_docs = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True, key="pdf_uploader")
|
83 |
-
if st.button("Submit & Process", key="process_button"):
|
84 |
-
with st.spinner("Processing..."):
|
85 |
-
raw_text = get_pdf(pdf_docs)
|
86 |
-
text_chunks = text_splitter(raw_text)
|
87 |
-
embedding(text_chunks)
|
88 |
st.success("Done")
|
89 |
|
90 |
if __name__ == "__main__":
|
|
|
42 |
|
43 |
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
|
44 |
|
|
|
|
|
|
|
|
|
45 |
def get_conversational_chain():
|
46 |
prompt_template = """
|
47 |
Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in
|
|
|
56 |
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|
57 |
return chain
|
58 |
|
59 |
+
def embedding(chunk,query):
|
60 |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
|
61 |
+
db = Chroma.from_documents(chunk,embeddings, persist_directory="./chroma_db")
|
62 |
docs = db3.similarity_search(query)
|
63 |
chain = get_conversational_chain()
|
64 |
response = chain({"input_documents": docs, "question": query}, return_only_outputs=True)
|
65 |
st.write("Reply: ", response["output_text"])
|
66 |
|
67 |
+
|
68 |
+
|
69 |
+
|
70 |
def main():
|
71 |
st.header("Chat with your pdf💁")
|
72 |
+
st.title("Menu:")
|
73 |
+
pdf_docs = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True, key="pdf_uploader")
|
74 |
query = st.text_input("Ask a Question from the PDF Files", key="query")
|
75 |
+
if st.button("Submit & Process", key="process_button"):
|
76 |
+
with st.spinner("Processing..."):
|
77 |
+
raw_text = get_pdf(pdf_docs)
|
78 |
+
text_chunks = text_splitter(raw_text)
|
79 |
+
embedding(text_chunks)
|
80 |
+
|
81 |
|
82 |
if query:
|
83 |
user_call(query)
|
84 |
|
85 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
86 |
st.success("Done")
|
87 |
|
88 |
if __name__ == "__main__":
|