KushwanthK commited on
Commit
7819bd1
·
verified ·
1 Parent(s): 2680167

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -5
app.py CHANGED
@@ -23,6 +23,7 @@ import torch
23
  from langchain_community.llms.ollama import Ollama
24
  from langchain.prompts import ChatPromptTemplate
25
  from langchain_community.vectorstores import FAISS
 
26
 
27
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
28
 
@@ -149,7 +150,6 @@ def get_faiss_semantic_index():
149
  except Exception as e:
150
  st.error(f"Error loading embeddings: {e}")
151
  return None
152
-
153
  faiss_index = get_faiss_semantic_index()
154
  print(faiss_index)
155
 
@@ -188,10 +188,6 @@ Answer the question based on the above context: {question}
188
  # st.divider()
189
 
190
  def chat_actions():
191
-
192
- faiss_index = get_faiss_semantic_index()
193
-
194
- print(faiss_index)
195
 
196
  st.session_state["chat_history"].append(
197
  {"role": "user", "content": st.session_state["chat_input"]},
 
23
  from langchain_community.llms.ollama import Ollama
24
  from langchain.prompts import ChatPromptTemplate
25
  from langchain_community.vectorstores import FAISS
26
+ from langchain.vectorstores import faiss
27
 
28
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
29
 
 
150
  except Exception as e:
151
  st.error(f"Error loading embeddings: {e}")
152
  return None
 
153
  faiss_index = get_faiss_semantic_index()
154
  print(faiss_index)
155
 
 
188
  # st.divider()
189
 
190
  def chat_actions():
 
 
 
 
191
 
192
  st.session_state["chat_history"].append(
193
  {"role": "user", "content": st.session_state["chat_input"]},