Spaces:
Sleeping
Sleeping
File size: 5,875 Bytes
e76c232 946c214 f5f42e8 946c214 f5f42e8 946c214 f5f42e8 cfcf4c4 17b077e f5f42e8 03a8b15 f5f42e8 0025151 fd17b4e f5f42e8 17b077e f5f42e8 17b077e e76c232 f5f42e8 e76c232 f5f42e8 17b077e 376796a 17b077e 376796a 17b077e f5f42e8 376796a f5f42e8 376796a 17b077e f5f42e8 e76c232 f5f42e8 e76c232 e2984c7 f5f42e8 b9e530e 5ffa02d e76c232 17b077e 376796a 17b077e f5f42e8 e2984c7 f5f42e8 376796a daefa1d cbfa890 f5f42e8 e76c232 cfcf4c4 e76c232 cfcf4c4 e76c232 cfcf4c4 f5f42e8 e76c232 f5f42e8 cfcf4c4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 |
import streamlit as st
import os
from streamlit_chat import message
from PyPDF2 import PdfReader
import google.generativeai as genai
from langchain.prompts import PromptTemplate
from langchain import LLMChain
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_community.document_loaders import WebBaseLoader
from langchain_community.tools import DuckDuckGoSearchRun
os.environ["GOOGLE_API_KEY"] = os.getenv("GOOGLE_API_KEY")
genai.configure(api_key=os.environ["GOOGLE_API_KEY"])
llm = ChatGoogleGenerativeAI(model="gemini-1.5-flash-latest",
temperature=0.2)
search_engine = DuckDuckGoSearchRun()
def get_web_result(question):
result = search_engine.invoke(question)
return result.strip()
template = """You are a friendly chat assistant called "CRETA" having a conversation with a human and you are created by Pachaiappan [portfolio](https://mr-vicky-01.github.io/Portfolio/) an AI Specialist. If the question pertains to information that you do not have access to, respond with string 'search_query' only nothing else.
provided document:
{provided_docs}
previous_chat:
{chat_history}
Human: {human_input}
Chatbot:"""
prompt = PromptTemplate(
input_variables=["chat_history", "human_input", "provided_docs"], template=template
)
llm_chain = LLMChain(
llm=llm,
prompt=prompt,
# verbose=True,
)
template_2 = """read the web_result and Answer the fallowing question carefully, your response must be short and informative
web_result:
{web_result}
previous_chat:
{chat_history}
Question: {human_input}
Chatbot:"""
prompt2 = PromptTemplate(
input_variables=["web_result", "chat_history", "human_input", "provided_docs"], template=template_2
)
llm_chain_2 = LLMChain(
llm=llm,
prompt=prompt2,
verbose=True,
)
search_template = """Write a brief, user-friendly search query based on the details below. The response should be concise and ready for direct use on a search engine.
Chat History:
{chat_history}
Question: {human_input}
Search Query::"""
search_prompt = PromptTemplate(
input_variables=["chat_history", "human_input"], template=search_template
)
search_llm = LLMChain(
llm=llm,
prompt=search_prompt,
# verbose=True,
)
previous_response = ""
provided_docs = ""
def conversational_chat(query):
global previous_response, provided_docs
for i in st.session_state['history'][-5:]:
if i is not None:
previous_response += f"Human: {i[0]}\n Chatbot: {i[1]}\n"
provided_docs = "".join(st.session_state["docs"])
result = llm_chain.predict(chat_history=previous_response, human_input=query, provided_docs=provided_docs)
if 'search_query'in result.strip():
search_query = search_llm.predict(chat_history=previous_response, human_input=query)
print(search_query)
web_result = get_web_result(search_query.strip())
result = llm_chain_2.predict(web_result= web_result,chat_history=previous_response, human_input=query, provided_docs=provided_docs)
st.session_state['history'].append((query, result))
# Keep only the last 5 history entries
st.session_state['history'] = st.session_state['history'][-5:]
return result
st.title("CRETA π€")
st.text("I am CRETA Your Friendly Assitant")
if 'history' not in st.session_state:
st.session_state['history'] = []
# Initialize messages
if 'generated' not in st.session_state:
st.session_state['generated'] = ["Hello ! Ask me anything"]
if 'past' not in st.session_state:
st.session_state['past'] = [" "]
if 'docs' not in st.session_state:
st.session_state['docs'] = []
def get_pdf_text(pdf_docs):
text = ""
for pdf in pdf_docs:
pdf_reader = PdfReader(pdf)
for page in pdf_reader.pages:
text += page.extract_text()
return text
def get_url_text(url_link):
try:
loader = WebBaseLoader(url_link)
loader.requests_per_second = 1
docs = loader.aload()
extracted_text = ""
for page in docs:
extracted_text += page.page_content
return extracted_text
except Exception as e:
print(f"Error fetching or processing URL: {e}")
return ""
with st.sidebar:
st.title("Add a file for CRETA memory:")
uploaded_files = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True)
uploaded_url = st.text_input("Paste the Documentation URL:")
if st.button("Submit & Process"):
if uploaded_files or uploaded_url:
with st.spinner("Processing..."):
if uploaded_files:
st.session_state["docs"] += get_pdf_text(uploaded_files)
if uploaded_url:
st.session_state["docs"] += get_url_text(uploaded_url)
st.success("Processing complete!")
else:
st.error("Please upload at least one PDF file or provide a URL.")
# Create containers for chat history and user input
response_container = st.container()
container = st.container()
# User input form
user_input = st.chat_input("Ask Your Questions π..")
with container:
if user_input:
output = conversational_chat(user_input)
# answer = response_generator(output)
st.session_state['past'].append(user_input)
st.session_state['generated'].append(output)
# Display chat history
if st.session_state['generated']:
with response_container:
for i in range(len(st.session_state['generated'])):
if i != 0:
message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="adventurer")
message(st.session_state["generated"][i], key=str(i), avatar_style="bottts")
|