Vishnu-add commited on
Commit
3fa3177
·
1 Parent(s): 0b5fda4

Added chatbot code

Browse files
Files changed (1) hide show
  1. app.py +55 -11
app.py CHANGED
@@ -8,6 +8,8 @@ from langchain.embeddings import SentenceTransformerEmbeddings
8
  from langchain.vectorstores import Chroma
9
  from langchain.llms.huggingface_pipeline import HuggingFacePipeline
10
  from langchain.chains import RetrievalQA
 
 
11
 
12
  @st.cache_resource
13
  def get_model():
@@ -32,7 +34,7 @@ def llm_pipeline():
32
  'text2text-generation',
33
  model = base_model,
34
  tokenizer=tokenizer,
35
- max_length = 256,
36
  do_sample = True,
37
  temperature = 0.3,
38
  top_p = 0.95,
@@ -65,21 +67,63 @@ def process_answer(instruction):
65
  answer = generated_text['result']
66
  return answer, generated_text
67
 
 
 
 
 
 
 
 
 
 
 
 
 
68
  def main():
69
- st.title("Search your pdf📚")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  with st.expander("About the App"):
71
  st.markdown(
72
- """This is a Generative AI powered Question and Answering app that responds to questions about your PDF file.
73
- """
 
74
  )
75
 
76
- question = st.text_area("Enter Your Question")
77
- if st.button("Search"):
78
- st.info("Your question: "+question)
79
- st.info("Your Answer")
80
- answer, metadata = process_answer(question)
81
- st.write(answer)
82
- st.write(metadata)
 
 
 
 
 
 
 
 
 
 
 
 
83
 
84
 
85
  if __name__ == "__main__":
 
8
  from langchain.vectorstores import Chroma
9
  from langchain.llms.huggingface_pipeline import HuggingFacePipeline
10
  from langchain.chains import RetrievalQA
11
+ from streamlit_chat import message
12
+
13
 
14
  @st.cache_resource
15
  def get_model():
 
34
  'text2text-generation',
35
  model = base_model,
36
  tokenizer=tokenizer,
37
+ max_length = 512,
38
  do_sample = True,
39
  temperature = 0.3,
40
  top_p = 0.95,
 
67
  answer = generated_text['result']
68
  return answer, generated_text
69
 
70
+ # Display conversation history using Streamlit messages
71
+ def display_conversation(history):
72
+ st.write(history)
73
+ for i in range(len(history["generated"])):
74
+ message(history["past"][i] , is_user=True, key= str(i) + "_user")
75
+ if isinstance(history["generated"][i],str):
76
+ message(history["generated"][i] , key= str(i))
77
+ else:
78
+ message(history["generated"][i][0] , key= str(i))
79
+ for source in history["generated"][i][1]['source_documents']:
80
+ st.write(source.metadata['source'])
81
+
82
  def main():
83
+ # Search with pdf code
84
+ # st.title("Search your pdf📚")
85
+ # with st.expander("About the App"):
86
+ # st.markdown(
87
+ # """This is a Generative AI powered Question and Answering app that responds to questions about your PDF file.
88
+ # """
89
+ # )
90
+
91
+ # question = st.text_area("Enter Your Question")
92
+ # if st.button("Search"):
93
+ # st.info("Your question: "+question)
94
+ # st.info("Your Answer")
95
+ # answer, metadata = process_answer(question)
96
+ # st.write(answer)
97
+ # st.write(metadata)
98
+
99
+ # Chat with pdf code
100
+ st.title("Chat with your pdf📚")
101
  with st.expander("About the App"):
102
  st.markdown(
103
+ """
104
+ This is a Generative AI powered Question and Answering app that responds to questions about your PDF file.
105
+ """
106
  )
107
 
108
+ user_input = st.text_input("",key="input")
109
+
110
+ # Initialize session state for generated responses and past messages
111
+ if "generated" not in st.session_state:
112
+ st.session_state["generated"] = ["I am ready to help you"]
113
+ if "past" not in st.session_state:
114
+ st.session_state["past"] = ["Hey There!"]
115
+
116
+ # Search the database for a response based on user input and update session state
117
+ if user_input:
118
+ answer = process_answer({"query" : user_input})
119
+ st.session_state["past"].append(user_input)
120
+ response = answer
121
+ st.session_state["generated"].append(response)
122
+
123
+ # Display Conversation history using Streamlit messages
124
+ if st.session_state["generated"]:
125
+ display_conversation(st.session_state)
126
+
127
 
128
 
129
  if __name__ == "__main__":