File size: 1,150 Bytes
6158da4
 
b83cc65
57b7b8d
ce9ef3e
 
 
 
b83cc65
6158da4
 
 
 
 
 
 
 
 
 
 
 
 
57b7b8d
6158da4
 
 
 
 
 
b83cc65
 
 
 
6158da4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
from langchain_community.embeddings import OpenAIEmbeddings
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.embeddings import LlamaCppEmbeddings

try:
    from modules.constants import *
except:
    from constants import *
import os


class EmbeddingModelLoader:
    def __init__(self, config):
        self.config = config

    def load_embedding_model(self):
        if self.config["embedding_options"]["model"] in ["text-embedding-ada-002"]:
            embedding_model = OpenAIEmbeddings(
                deployment="SL-document_embedder",
                model=self.config["embedding_options"]["model"],
                show_progress_bar=True,
                openai_api_key=OPENAI_API_KEY,
                disallowed_special=(),
            )
        else:
            embedding_model = HuggingFaceEmbeddings(
                model_name="sentence-transformers/all-MiniLM-L6-v2",
                model_kwargs={"device": "cpu"},
            )
            # embedding_model = LlamaCppEmbeddings(
            #     model_path=os.path.abspath("storage/llama-7b.ggmlv3.q4_0.bin")
            # )

        return embedding_model