danielRamon commited on
Commit
44bec2e
·
1 Parent(s): 458a8ea

feat ✨: Add caching mechanism for sentence transformers models

Browse files
Files changed (2) hide show
  1. .gitignore +3 -2
  2. chroma_utils.py +3 -0
.gitignore CHANGED
@@ -1,3 +1,4 @@
1
  .vscode/launch.json
2
- __pycache__/chroma_utils.cpython-311.pyc
3
- chroma_data/
 
 
1
  .vscode/launch.json
2
+ __pycache__/
3
+ chroma_data/
4
+
chroma_utils.py CHANGED
@@ -1,8 +1,11 @@
 
1
  from langchain_chroma import Chroma
2
  # from langchain_ollama import OllamaEmbeddings
3
  from langchain_huggingface import HuggingFaceEmbeddings
4
  from langchain_core.documents import Document
5
 
 
 
6
  embed = HuggingFaceEmbeddings(
7
  model_name="sentence-transformers/all-mpnet-base-v2")
8
 
 
1
+ import os
2
  from langchain_chroma import Chroma
3
  # from langchain_ollama import OllamaEmbeddings
4
  from langchain_huggingface import HuggingFaceEmbeddings
5
  from langchain_core.documents import Document
6
 
7
+ os.environ['SENTENCE_TRANSFORMERS_HOME'] = './.cache'
8
+
9
  embed = HuggingFaceEmbeddings(
10
  model_name="sentence-transformers/all-mpnet-base-v2")
11