Spaces:
Runtime error
Runtime error
Commit
·
24b4bfe
1
Parent(s):
145578f
Added resoruces folder for configs
Browse files- main/main.py +2 -9
- main/{config.yaml → resources/huggingface_config.yaml} +0 -0
- main/resources/local_config.yaml +24 -0
- main/utils.py +39 -1
main/main.py
CHANGED
@@ -2,26 +2,19 @@
|
|
2 |
LLM Inference Server main application using LitServe framework.
|
3 |
"""
|
4 |
import litserve as ls
|
5 |
-
import yaml
|
6 |
import logging
|
7 |
import os
|
8 |
-
from pathlib import Path
|
9 |
from fastapi.middleware.cors import CORSMiddleware
|
10 |
from huggingface_hub import login
|
11 |
from .routes import router, init_router
|
12 |
from .api import InferenceApi
|
|
|
13 |
|
14 |
# Store process list globally so it doesn't get garbage collected
|
15 |
_WORKER_PROCESSES = []
|
16 |
_MANAGER = None
|
17 |
|
18 |
-
|
19 |
-
def load_config():
|
20 |
-
"""Load configuration from config.yaml"""
|
21 |
-
config_path = Path(__file__).parent / "config.yaml"
|
22 |
-
with open(config_path) as f:
|
23 |
-
return yaml.safe_load(f)
|
24 |
-
|
25 |
config = load_config()
|
26 |
|
27 |
|
|
|
2 |
LLM Inference Server main application using LitServe framework.
|
3 |
"""
|
4 |
import litserve as ls
|
|
|
5 |
import logging
|
6 |
import os
|
|
|
7 |
from fastapi.middleware.cors import CORSMiddleware
|
8 |
from huggingface_hub import login
|
9 |
from .routes import router, init_router
|
10 |
from .api import InferenceApi
|
11 |
+
from .utils import load_config
|
12 |
|
13 |
# Store process list globally so it doesn't get garbage collected
|
14 |
_WORKER_PROCESSES = []
|
15 |
_MANAGER = None
|
16 |
|
17 |
+
# Load configuration
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
config = load_config()
|
19 |
|
20 |
|
main/{config.yaml → resources/huggingface_config.yaml}
RENAMED
File without changes
|
main/resources/local_config.yaml
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
server:
|
2 |
+
host: "0.0.0.0"
|
3 |
+
port: 8002
|
4 |
+
timeout: 60
|
5 |
+
max_batch_size: 1
|
6 |
+
|
7 |
+
llm_server:
|
8 |
+
host: "0.0.0.0"
|
9 |
+
port: 8002 # Will be ignored for hf.space URLs
|
10 |
+
timeout: 60.0
|
11 |
+
api_prefix: "/api/v1" # This will be used for route prefixing
|
12 |
+
endpoints:
|
13 |
+
generate: "/generate"
|
14 |
+
generate_stream: "/generate/stream"
|
15 |
+
embedding: "/embedding"
|
16 |
+
system_status: "/system/status"
|
17 |
+
system_validate: "/system/validate"
|
18 |
+
model_initialize: "/model/initialize"
|
19 |
+
model_initialize_embedding: "/model/initialize/embedding"
|
20 |
+
model_download: "/model/download"
|
21 |
+
|
22 |
+
model:
|
23 |
+
defaults:
|
24 |
+
model_name: "microsoft/Phi-3.5-mini-instruct"
|
main/utils.py
CHANGED
@@ -1,8 +1,14 @@
|
|
1 |
"""Utility functions for the inference API."""
|
2 |
import json
|
|
|
|
|
3 |
import re
|
|
|
4 |
from typing import Dict, Any
|
5 |
|
|
|
|
|
|
|
6 |
def extract_json(text: str) -> Dict[str, Any]:
|
7 |
"""Extract JSON from text that might contain other content.
|
8 |
|
@@ -25,4 +31,36 @@ def extract_json(text: str) -> Dict[str, Any]:
|
|
25 |
continue
|
26 |
|
27 |
# If we couldn't find any valid JSON, raise an error
|
28 |
-
raise ValueError("No valid JSON found in response")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
"""Utility functions for the inference API."""
|
2 |
import json
|
3 |
+
import logging
|
4 |
+
import os
|
5 |
import re
|
6 |
+
from pathlib import Path
|
7 |
from typing import Dict, Any
|
8 |
|
9 |
+
import yaml
|
10 |
+
|
11 |
+
|
12 |
def extract_json(text: str) -> Dict[str, Any]:
|
13 |
"""Extract JSON from text that might contain other content.
|
14 |
|
|
|
31 |
continue
|
32 |
|
33 |
# If we couldn't find any valid JSON, raise an error
|
34 |
+
raise ValueError("No valid JSON found in response")
|
35 |
+
|
36 |
+
def load_config():
|
37 |
+
"""
|
38 |
+
Load configuration from config files in the resources directory.
|
39 |
+
Uses CONFIG_ENV environment variable to determine which config to load.
|
40 |
+
Defaults to 'local' if no environment is specified.
|
41 |
+
"""
|
42 |
+
# Get environment name from env var, default to 'local'
|
43 |
+
env_name = os.environ.get("CONFIG_ENV", "local")
|
44 |
+
|
45 |
+
# Construct path to resources directory and config file
|
46 |
+
resources_dir = Path(__file__).parent / "resources"
|
47 |
+
config_path = resources_dir / f"{env_name}_config.yaml"
|
48 |
+
|
49 |
+
# Create resources directory if it doesn't exist
|
50 |
+
resources_dir.mkdir(exist_ok=True)
|
51 |
+
|
52 |
+
# Check if config file exists
|
53 |
+
if not config_path.exists():
|
54 |
+
logging.warning(f"Config file {config_path} not found, falling back to local_config.yaml")
|
55 |
+
config_path = resources_dir / "local_config.yaml"
|
56 |
+
|
57 |
+
# If even local config doesn't exist, raise error
|
58 |
+
if not config_path.exists():
|
59 |
+
raise FileNotFoundError(
|
60 |
+
f"No configuration file found at {config_path}. "
|
61 |
+
"Please ensure at least local_config.yaml exists in the resources directory."
|
62 |
+
)
|
63 |
+
|
64 |
+
# Load and return config
|
65 |
+
with open(config_path) as f:
|
66 |
+
return yaml.safe_load(f)
|