Spaces:
Runtime error
Runtime error
File size: 2,739 Bytes
e34684a 0120a09 e34684a 0120a09 e34684a 0120a09 e34684a 232e1ea |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 |
import streamlit as st
from PyPDF2 import PdfReader
from langchain.text_splitter import RecursiveCharacterTextSplitter
import google.generativeai as palm
from langchain.embeddings import GooglePalmEmbeddings
from langchain.llms import GooglePalm
from langchain.vectorstores import FAISS
from langchain.chains import ConversationalRetrievalChain
from langchain.memory import ConversationBufferMemory
import os
os.environ['GOOGLE_API_KEY'] = 'AIzaSyAO1uqCO_1CTZV1zgIlUhk5Mv4Ey08cjzI'
def get_pdf_text(pdf_docs):
text=""
for pdf in pdf_docs:
pdf_reader= PdfReader(pdf)
for page in pdf_reader.pages:
text+= page.extract_text()
return text
def get_text_chunks(text):
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=20)
chunks = text_splitter.split_text(text)
return chunks
def get_vector_store(text_chunks):
embeddings = GooglePalmEmbeddings()
vector_store = FAISS.from_texts(text_chunks, embedding=embeddings)
return vector_store
def get_conversational_chain(vector_store):
llm=GooglePalm()
memory = ConversationBufferMemory(memory_key = "chat_history", return_messages=True)
conversation_chain = ConversationalRetrievalChain.from_llm(llm=llm, retriever=vector_store.as_retriever(), memory=memory)
return conversation_chain
def user_input(user_question):
response = st.session_state.conversation({'question': user_question})
st.session_state.chatHistory = response['chat_history']
for i, message in enumerate(st.session_state.chatHistory):
if i%2 == 0:
st.write("Me: ", message.content)
else:
st.write("mGPT: ", message.content)
def main():
st.set_page_config("palm2 pdf ")
st.header("Hi , ask me anything from your pdf 😎 ")
user_question = st.text_input("Ask a Question from the PDF Files")
if "conversation" not in st.session_state:
st.session_state.conversation = None
if "chatHistory" not in st.session_state:
st.session_state.chatHistory = None
if user_question:
user_input(user_question)
with st.sidebar:
st.title("Settings")
st.subheader("Upload your Documents")
pdf_docs = st.file_uploader("Upload your PDF Files and Click on the Process Button", accept_multiple_files=True)
if st.button("Process"):
with st.spinner("Processing"):
raw_text = get_pdf_text(pdf_docs)
text_chunks = get_text_chunks(raw_text)
vector_store = get_vector_store(text_chunks)
st.session_state.conversation = get_conversational_chain(vector_store)
st.success("Done")
if __name__ == "__main__":
main()
#M |