Spaces:
Sleeping
Sleeping
hf_token
Browse files
app.py
CHANGED
@@ -30,6 +30,7 @@ MODEL_RRK = "mixedbread-ai/mxbai-rerank-large-v1"
|
|
30 |
LLM_NAME = "gpt-4o-mini"
|
31 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
32 |
MXBAI_API_KEY = os.environ.get("MXBAI_API_KEY")
|
|
|
33 |
|
34 |
# Load the reranker model
|
35 |
device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
@@ -38,7 +39,7 @@ model_emb = "mixedbread-ai/mxbai-embed-large-v1"
|
|
38 |
|
39 |
# Set up ChromaDB
|
40 |
client = chromadb.Client()
|
41 |
-
dataset = load_dataset("eliot-hub/memoires_vec_800", split="data")
|
42 |
# client = chromadb.PersistentClient(path=os.path.join(os.path.abspath(os.getcwd()), "01_Notebooks", "RAG-ollama", "chatbot_actuariat_APP", CHROMA_PATH))
|
43 |
|
44 |
|
|
|
30 |
LLM_NAME = "gpt-4o-mini"
|
31 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
32 |
MXBAI_API_KEY = os.environ.get("MXBAI_API_KEY")
|
33 |
+
HF_TOKEN = os.environ.get("HF_TOKEN")
|
34 |
|
35 |
# Load the reranker model
|
36 |
device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
|
|
39 |
|
40 |
# Set up ChromaDB
|
41 |
client = chromadb.Client()
|
42 |
+
dataset = load_dataset("eliot-hub/memoires_vec_800", split="data", token=HF_TOKEN)
|
43 |
# client = chromadb.PersistentClient(path=os.path.join(os.path.abspath(os.getcwd()), "01_Notebooks", "RAG-ollama", "chatbot_actuariat_APP", CHROMA_PATH))
|
44 |
|
45 |
|