Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,13 +1,12 @@
|
|
1 |
import streamlit as st
|
2 |
import os
|
|
|
3 |
from langchain_community.vectorstores import FAISS
|
4 |
from langchain_community.document_loaders import PyPDFLoader
|
5 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
6 |
-
from
|
7 |
-
from langchain_huggingface import HuggingFaceEndpoint # Updated import
|
8 |
from langchain.chains import ConversationalRetrievalChain
|
9 |
from langchain.memory import ConversationBufferMemory
|
10 |
-
import tempfile
|
11 |
|
12 |
api_token = os.getenv("HF_TOKEN")
|
13 |
list_llm = ["meta-llama/Meta-Llama-3-8B-Instruct", "mistralai/Mistral-7B-Instruct-v0.2"]
|
@@ -186,4 +185,4 @@ def main():
|
|
186 |
st.session_state['sources'] = sources
|
187 |
|
188 |
if __name__ == "__main__":
|
189 |
-
main()
|
|
|
1 |
import streamlit as st
|
2 |
import os
|
3 |
+
import tempfile
|
4 |
from langchain_community.vectorstores import FAISS
|
5 |
from langchain_community.document_loaders import PyPDFLoader
|
6 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
7 |
+
from langchain_huggingface import HuggingFaceEmbeddings, HuggingFaceEndpoint
|
|
|
8 |
from langchain.chains import ConversationalRetrievalChain
|
9 |
from langchain.memory import ConversationBufferMemory
|
|
|
10 |
|
11 |
api_token = os.getenv("HF_TOKEN")
|
12 |
list_llm = ["meta-llama/Meta-Llama-3-8B-Instruct", "mistralai/Mistral-7B-Instruct-v0.2"]
|
|
|
185 |
st.session_state['sources'] = sources
|
186 |
|
187 |
if __name__ == "__main__":
|
188 |
+
main()
|