binqiangliu commited on
Commit
2a536a8
·
1 Parent(s): 86cd202

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -39
app.py CHANGED
@@ -121,8 +121,8 @@ with st.sidebar:
121
  with st.spinner("Preparing website materials for you..."):
122
  try:
123
  url="https://www.usinoip.com"
124
- loader = WebBaseLoader(url)
125
- raw_text = loader.load()
126
  page_content = raw_text[0].page_content
127
  page_content = str(page_content)
128
  temp_texts = text_splitter.split_text(page_content)
@@ -134,40 +134,30 @@ with st.sidebar:
134
  print("Please enter a valide URL.")
135
  st.stop()
136
 
137
- if user_question.strip().isspace() or user_question.isspace():
138
- st.write("Query Empty. Please enter a valid query first.")
139
- st.stop()
140
- elif user_question == "exit":
141
- st.stop()
142
- elif user_question == "":
143
- print("Query Empty. Please enter a valid query first.")
144
- st.stop()
145
- elif user_question != "":
146
- #st.write("Your query: "+user_question)
147
- print("Your query: "+user_question)
148
-
149
- with st.spinner("AI Thinking...Please wait a while to Cheers!"):
150
- q_embedding=get_embeddings(user_question)
151
- final_q_embedding = torch.FloatTensor(q_embedding)
152
- hits = semantic_search(final_q_embedding, db_embeddings, top_k=5)
153
- page_contents = []
154
- for i in range(len(hits[0])):
155
- page_content = texts[hits[0][i]['corpus_id']]
156
- page_contents.append(page_content)
157
- temp_page_contents=str(page_contents)
158
- final_page_contents = temp_page_contents.replace('\\n', '')
159
- random_string = generate_random_string(20)
160
- i_file_path = random_string + ".txt"
161
- with open(i_file_path, "w", encoding="utf-8") as file:
162
- file.write(final_page_contents)
163
- loader = TextLoader(i_file_path, encoding="utf-8")
164
- loaded_documents = loader.load()
165
- temp_ai_response=chain({"input_documents": loaded_documents, "question": user_question}, return_only_outputs=False)
166
- initial_ai_response=temp_ai_response['output_text']
167
- cleaned_initial_ai_response = remove_context(initial_ai_response)
168
- final_ai_response = cleaned_initial_ai_response.split('<|end|>\n<|system|>\n<|end|>\n<|user|>')[0].strip().replace('\n\n', '\n').replace('<|end|>', '').replace('<|user|>', '').replace('<|system|>', '').replace('<|assistant|>', '')
169
- #temp_ai_response = temp_ai_response['output_text']
170
- #final_ai_response=temp_ai_response.partition('<|end|>')[0]
171
- #i_final_ai_response = final_ai_response.replace('\n', '')
172
- st.write("AI Response:")
173
- st.write(final_ai_response)
 
121
  with st.spinner("Preparing website materials for you..."):
122
  try:
123
  url="https://www.usinoip.com"
124
+ url_loader = WebBaseLoader(url)
125
+ raw_text = url_loader.load()
126
  page_content = raw_text[0].page_content
127
  page_content = str(page_content)
128
  temp_texts = text_splitter.split_text(page_content)
 
134
  print("Please enter a valide URL.")
135
  st.stop()
136
 
137
+ if st.button('Get AI Response'):
138
+ if user_question !="" and not user_question.strip().isspace() and not user_question == "" and not user_question.strip() == "" and not user_question.isspace():
139
+ with st.spinner("AI Thinking...Please wait a while to Cheers!"):
140
+ q_embedding=get_embeddings(user_question)
141
+ final_q_embedding = torch.FloatTensor(q_embedding)
142
+ hits = semantic_search(final_q_embedding, db_embeddings, top_k=5)
143
+ page_contents = []
144
+ for i in range(len(hits[0])):
145
+ page_content = texts[hits[0][i]['corpus_id']]
146
+ page_contents.append(page_content)
147
+ temp_page_contents=str(page_contents)
148
+ final_page_contents = temp_page_contents.replace('\\n', '')
149
+ random_string = generate_random_string(20)
150
+ i_file_path = random_string + ".txt"
151
+ with open(i_file_path, "w", encoding="utf-8") as file:
152
+ file.write(final_page_contents)
153
+ text_loader = TextLoader(i_file_path, encoding="utf-8")
154
+ loaded_documents = text_loader.load()
155
+ temp_ai_response=chain({"input_documents": loaded_documents, "question": user_question}, return_only_outputs=False)
156
+ initial_ai_response=temp_ai_response['output_text']
157
+ cleaned_initial_ai_response = remove_context(initial_ai_response)
158
+ final_ai_response = cleaned_initial_ai_response.split('<|end|>\n<|system|>\n<|end|>\n<|user|>')[0].strip().replace('\n\n', '\n').replace('<|end|>', '').replace('<|user|>', '').replace('<|system|>', '').replace('<|assistant|>', '')
159
+ #temp_ai_response = temp_ai_response['output_text']
160
+ #final_ai_response=temp_ai_response.partition('<|end|>')[0]
161
+ #i_final_ai_response = final_ai_response.replace('\n', '')
162
+ st.write("AI Response:")
163
+ st.write(final_ai_response)