Spaces:
Running
Running
File size: 3,362 Bytes
cbcf653 29002d7 cbcf653 29002d7 cbcf653 f366e0e 29002d7 cbcf653 29002d7 cbcf653 29002d7 cbcf653 29002d7 cbcf653 29002d7 cbcf653 29002d7 cbcf653 29002d7 cbcf653 29002d7 cbcf653 29002d7 cbcf653 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 |
import streamlit as st
import os
from streamlit_chat import message
from PyPDF2 import PdfReader
import google.generativeai as genai
from langchain.prompts import PromptTemplate
from langchain import LLMChain
from langchain_google_genai import ChatGoogleGenerativeAI
os.environ["GOOGLE_API_KEY"] = os.getenv("GOOGLE_API_KEY")
genai.configure(api_key=os.environ["GOOGLE_API_KEY"])
llm = ChatGoogleGenerativeAI(model="gemini-pro",
temperature=0.4)
template = """You are a friendly chat assistant called "CRETA" having a conversation with a human and you are created by Pachaiappan an AI Specialist.
provided document:
{provided_docs}
previous_chat:
{chat_history}
Human: {human_input}
Chatbot:"""
prompt = PromptTemplate(
input_variables=["chat_history", "human_input", "provided_docs"], template=template
)
llm_chain = LLMChain(
llm=llm,
prompt=prompt,
verbose=True,
)
previous_response = ""
provided_docs = ""
def conversational_chat(query):
global previous_response, provided_docs
for i in st.session_state['history']:
if i is not None:
previous_response += f"Human: {i[0]}\n Chatbot: {i[1]}"
docs = ""
for j in st.session_state["docs"]:
if j is not None:
docs += j
provided_docs = docs
result = llm_chain.predict(chat_history=previous_response, human_input=query, provided_docs=provided_docs)
st.session_state['history'].append((query, result))
return result
st.title("Chat Bot:")
st.text("I am CRETA Your Friendly Assitant")
if 'history' not in st.session_state:
st.session_state['history'] = []
# Initialize messages
if 'generated' not in st.session_state:
st.session_state['generated'] = ["Hello ! Ask me anything"]
if 'past' not in st.session_state:
st.session_state['past'] = [" "]
if 'docs' not in st.session_state:
st.session_state['docs'] = []
def get_pdf_text(pdf_docs):
text = ""
for pdf in pdf_docs:
pdf_reader = PdfReader(pdf)
for page in pdf_reader.pages:
text += page.extract_text()
return text
with st.sidebar:
st.title("Add a file for CRETA memory:")
uploaded_file = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True)
uploaded_url = st.text_area("please upload an url..")
if st.button("Submit & Process"):
with st.spinner("Processing..."):
st.session_state["docs"] += get_pdf_text(uploaded_file)
st.success("Done")
# Create containers for chat history and user input
response_container = st.container()
container = st.container()
# User input form
user_input = st.chat_input("Ask Your Questions 👉..")
with container:
if user_input:
output = conversational_chat(user_input)
# answer = response_generator(output)
st.session_state['past'].append(user_input)
st.session_state['generated'].append(output)
# Display chat history
if st.session_state['generated']:
with response_container:
for i in range(len(st.session_state['generated'])):
if i != 0:
message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="adventurer")
message(st.session_state["generated"][i], key=str(i), avatar_style="bottts") |