Mr-Vicky-01 commited on
Commit
e76c232
·
verified ·
1 Parent(s): 19b2125

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -8
app.py CHANGED
@@ -1,9 +1,10 @@
1
- import streamlit as st
2
  import os
 
 
 
3
  from streamlit_chat import message
4
  import google.generativeai as genai
5
  from langchain.prompts import PromptTemplate
6
- from langchain import LLMChain
7
  from langchain_google_genai import ChatGoogleGenerativeAI
8
 
9
  os.environ["GOOGLE_API_KEY"] = os.getenv("GOOGLE_API_KEY")
@@ -14,13 +15,15 @@ llm = ChatGoogleGenerativeAI(model="gemini-pro",
14
 
15
 
16
  template = """You are a friendly chat assistant called "CRETA" having a conversation with a human and you are created by Pachaiappan an AI Specialist.
 
 
17
  previous_chat:
18
  {chat_history}
19
  Human: {human_input}
20
  Chatbot:"""
21
 
22
  prompt = PromptTemplate(
23
- input_variables=["chat_history", "human_input"], template=template
24
  )
25
 
26
  llm_chain = LLMChain(
@@ -31,13 +34,16 @@ llm_chain = LLMChain(
31
 
32
 
33
  previous_response = ""
 
34
  def conversational_chat(query):
35
- global previous_response
36
  for i in st.session_state['history']:
37
  if i is not None:
38
  previous_response += f"Human: {i[0]}\n Chatbot: {i[1]}"
39
- print(previous_response)
40
- result = llm_chain.predict(chat_history=previous_response, human_input=query)
 
 
41
  st.session_state['history'].append((query, result))
42
  return result
43
 
@@ -54,6 +60,25 @@ if 'generated' not in st.session_state:
54
  if 'past' not in st.session_state:
55
  st.session_state['past'] = [" "]
56
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  # Create containers for chat history and user input
58
  response_container = st.container()
59
  container = st.container()
@@ -66,11 +91,12 @@ with container:
66
  # answer = response_generator(output)
67
  st.session_state['past'].append(user_input)
68
  st.session_state['generated'].append(output)
69
-
 
70
  # Display chat history
71
  if st.session_state['generated']:
72
  with response_container:
73
  for i in range(len(st.session_state['generated'])):
74
  if i != 0:
75
  message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="adventurer")
76
- message(st.session_state["generated"][i], key=str(i), avatar_style="bottts")
 
 
1
  import os
2
+ import streamlit as st
3
+ from PyPDF2 import PdfReader
4
+ from langchain import LLMChain
5
  from streamlit_chat import message
6
  import google.generativeai as genai
7
  from langchain.prompts import PromptTemplate
 
8
  from langchain_google_genai import ChatGoogleGenerativeAI
9
 
10
  os.environ["GOOGLE_API_KEY"] = os.getenv("GOOGLE_API_KEY")
 
15
 
16
 
17
  template = """You are a friendly chat assistant called "CRETA" having a conversation with a human and you are created by Pachaiappan an AI Specialist.
18
+ provided document:
19
+ {provided_docs}
20
  previous_chat:
21
  {chat_history}
22
  Human: {human_input}
23
  Chatbot:"""
24
 
25
  prompt = PromptTemplate(
26
+ input_variables=["chat_history", "human_input", "provided_docs"], template=template
27
  )
28
 
29
  llm_chain = LLMChain(
 
34
 
35
 
36
  previous_response = ""
37
+ provided_docs = ""
38
  def conversational_chat(query):
39
+ global previous_response, provided_docs
40
  for i in st.session_state['history']:
41
  if i is not None:
42
  previous_response += f"Human: {i[0]}\n Chatbot: {i[1]}"
43
+ for j in st.session_state["docs"]:
44
+ if j is not None:
45
+ provided_docs += j
46
+ result = llm_chain.predict(chat_history=previous_response, human_input=query, provided_docs=provided_docs)
47
  st.session_state['history'].append((query, result))
48
  return result
49
 
 
60
  if 'past' not in st.session_state:
61
  st.session_state['past'] = [" "]
62
 
63
+ if 'docs' not in st.session_state:
64
+ st.session_state['docs'] = []
65
+
66
+ def get_pdf_text(pdf_docs):
67
+ text = ""
68
+ for pdf in pdf_docs:
69
+ pdf_reader = PdfReader(pdf)
70
+ for page in pdf_reader.pages:
71
+ text += page.extract_text()
72
+ return text
73
+
74
+ with st.sidebar:
75
+ st.title("Add a file for CRETA memory:")
76
+ uploaded_file = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True)
77
+ if st.button("Submit & Process"):
78
+ with st.spinner("Processing..."):
79
+ st.session_state["docs"] += get_pdf_text(uploaded_file)
80
+ st.success("Done")
81
+
82
  # Create containers for chat history and user input
83
  response_container = st.container()
84
  container = st.container()
 
91
  # answer = response_generator(output)
92
  st.session_state['past'].append(user_input)
93
  st.session_state['generated'].append(output)
94
+
95
+
96
  # Display chat history
97
  if st.session_state['generated']:
98
  with response_container:
99
  for i in range(len(st.session_state['generated'])):
100
  if i != 0:
101
  message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="adventurer")
102
+ message(st.session_state["generated"][i], key=str(i), avatar_style="bottts")