llm / config /config.py
Chris4K's picture
Update config/config.py
3681d0c verified
raw
history blame
839 Bytes
# config/config.py
from pydantic_settings import BaseSettings
from pathlib import Path
import torch
class Settings(BaseSettings):
secret_key: str
api_key: str
MODEL_NAME: str = "meta-llama/Llama-3.2-3B-Instruct"
EMBEDDER_MODEL: str = "distiluse-base-multilingual-cased"
CHUNK_SIZE: int = 1000
CHUNK_OVERLAP: int = 100
CSV_URL: str = 'https://www.bofrost.de/datafeed/DE/products.csv'
PDF_FOLDER: Path = Path("./pdfs")
DEVICE: str = "cuda" if torch.cuda.is_available() else "cpu"
QUANTIZATION_BITS: int = 8
FAQ_ROOT_URL: str = "https://www.bofrost.de/faq/"
CACHE_DURATION: int = 3600
MAX_RETRIES: int = 3
TIMEOUT: int = 30
class Config:
extra = "allow" # This allows additional fields beyond those defined in the class
env_file = ".env"
settings = Settings()