|
import os |
|
import logging |
|
from typing import Optional |
|
from huggingface_hub import HfApi |
|
from pathlib import Path |
|
from app.core.cache import cache_config |
|
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
HF_ORGANIZATION = "stacklok" |
|
|
|
|
|
HF_TOKEN = os.environ.get("HF_TOKEN") |
|
if not HF_TOKEN: |
|
logger.warning("HF_TOKEN not found in environment variables. Some features may be limited.") |
|
|
|
|
|
API = HfApi(token=HF_TOKEN) |
|
|
|
|
|
QUEUE_REPO = f"{HF_ORGANIZATION}/llm-security-leaderboard-requests" |
|
AGGREGATED_REPO = f"{HF_ORGANIZATION}/llm-security-leaderboard-contents" |
|
VOTES_REPO = f"{HF_ORGANIZATION}/llm-security-leaderboard-votes" |
|
OFFICIAL_PROVIDERS_REPO = f"open-llm-leaderboard/official-providers" |
|
|
|
logger.info(f"QUEUE_REPO: {QUEUE_REPO}") |
|
logger.info(f"AGGREGATED_REPO: {AGGREGATED_REPO}") |
|
logger.info(f"VOTES_REPO: {VOTES_REPO}") |
|
logger.info(f"OFFICIAL_PROVIDERS_REPO: {OFFICIAL_PROVIDERS_REPO}") |
|
|
|
|
|
VOTES_PATH = cache_config.votes_file |
|
EVAL_REQUESTS_PATH = cache_config.eval_requests_file |
|
MODEL_CACHE_DIR = cache_config.models_cache |
|
|