Spaces:
Running
Running
Enhanced the support for the excel file and added endpoint to have optimized vector store and Rag for the Excel.
b953016
# src/utils/llm_utils.py | |
from fastapi import HTTPException | |
from typing import Tuple | |
from src.llms.openai_llm import OpenAILanguageModel | |
from src.llms.ollama_llm import OllamaLanguageModel | |
from src.llms.bert_llm import BERTLanguageModel | |
from src.llms.falcon_llm import FalconLanguageModel | |
from src.llms.llama_llm import LlamaLanguageModel | |
from src.embeddings.huggingface_embedding import HuggingFaceEmbedding | |
from src.vectorstores.chroma_vectorstore import ChromaVectorStore | |
from src.vectorstores.optimized_vectorstore import get_optimized_vector_store | |
from src.utils.logger import logger | |
from config.config import settings | |
def get_llm_instance(provider: str): | |
""" | |
Get LLM instance based on provider | |
Args: | |
provider (str): Name of the LLM provider | |
Returns: | |
BaseLLM: Instance of the LLM | |
Raises: | |
ValueError: If provider is not supported | |
""" | |
llm_map = { | |
'openai': lambda: OpenAILanguageModel(api_key=settings.OPENAI_API_KEY), | |
'ollama': lambda: OllamaLanguageModel(base_url=settings.OLLAMA_BASE_URL), | |
'bert': lambda: BERTLanguageModel(), | |
'falcon': lambda: FalconLanguageModel(), | |
'llama': lambda: LlamaLanguageModel(), | |
} | |
if provider not in llm_map: | |
raise ValueError(f"Unsupported LLM provider: {provider}") | |
return llm_map[provider]() | |
async def get_vector_store() -> Tuple[ChromaVectorStore, HuggingFaceEmbedding]: | |
""" | |
Get vector store and embedding model instances | |
Uses optimized implementation while maintaining backward compatibility | |
Returns: | |
Tuple[ChromaVectorStore, HuggingFaceEmbedding]: | |
Vector store and embedding model instances | |
""" | |
try: | |
return await get_optimized_vector_store() | |
except Exception as e: | |
logger.error(f"Error getting optimized vector store: {str(e)}") | |
# Fallback to original implementation if optimization fails | |
logger.warning("Falling back to standard vector store implementation") | |
embedding = HuggingFaceEmbedding(model_name=settings.EMBEDDING_MODEL) | |
vector_store = ChromaVectorStore( | |
embedding_function=embedding.embed_documents, | |
persist_directory=settings.CHROMA_PATH | |
) | |
return vector_store, embedding |