File size: 4,292 Bytes
a2d6347 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 |
from dataclasses import dataclass
from enum import Enum
from inspect import getmembers, ismethod
from typing import List, Optional
# This file has class definitions for config yaml files
# TODO: add comments for class definition and variable definition
class UniversalBaseClass:
def __str__(self) -> str:
attributes_string = []
for member in getmembers(self):
# remove private and protected attributes
if not member[0].startswith('_'):
# remove methods that does not start with underscore
if not ismethod(member[1]):
attributes_string.append(member)
return str(attributes_string)
######################################################################################
# Classes related to llm_config.yaml
@dataclass
class LLMModel(UniversalBaseClass):
unique_model_id: str
model_type: str
track_tokens: str
req_per_min: int
tokens_per_min: int
error_backoff_in_seconds: int
@dataclass
class UserLimits(UniversalBaseClass):
max_num_requests_in_time_window: int
time_window_length_in_seconds: int
@dataclass
class LLMQueueSchedulerLimits(UniversalBaseClass):
ttl_in_seconds: int
max_queue_size: int
@dataclass
class AzureAOIModels(LLMModel, UniversalBaseClass):
model_name_in_azure: str
deployment_name_in_azure: str
@dataclass
class AzureAOILM(UniversalBaseClass):
api_key: str
api_version: str
api_type: str
azure_endpoint: str
azure_oai_models: List[AzureAOIModels]
def __post_init__(self):
azure_oai_models_obj = []
if self.azure_oai_models:
for azure_oai_model in self.azure_oai_models:
azure_oai_models_obj.append(AzureAOIModels(**azure_oai_model))
self.azure_oai_models = azure_oai_models_obj
@dataclass
class CustomLLM(LLMModel):
path_to_py_file: str
class_name: str
@dataclass
class LLMConfig(UniversalBaseClass):
azure_open_ai: AzureAOILM
user_limits: UserLimits
scheduler_limits: LLMQueueSchedulerLimits
custom_models: List[CustomLLM]
def __post_init__(self):
self.azure_open_ai = AzureAOILM(**self.azure_open_ai)
custom_model_obj = []
if self.custom_models:
for custom_model in self.custom_models:
custom_model_obj.append(CustomLLM(**custom_model))
self.custom_models = custom_model_obj
######################################################################################
# Classes related to setup_config.yaml
@dataclass
class AssistantLLM(UniversalBaseClass):
prompt_opt: str
@dataclass
class Dir(UniversalBaseClass):
base_dir: str
log_dir_name: str
class OperationMode(Enum):
ONLINE = "online"
OFFLINE = "offline"
@dataclass
class SetupConfig(UniversalBaseClass):
assistant_llm: AssistantLLM
dir_info: Dir
experiment_name: str
mode: OperationMode
description: str
def __post_init__(self):
if self.dir_info:
self.dir_info = Dir(**self.dir_info)
if self.assistant_llm:
self.assistant_llm = AssistantLLM(**self.assistant_llm)
######################################################################################
# Classes related to prompt_library_config.yaml
@dataclass
class TaskConfig:
name: str
prompt_template: str
llm_request_type: str
prepend_system_prompts: Optional[bool] = True
prepend_system_guidelines: Optional[bool] = True
emb_model_id: Optional[str] = None
llm_model_id: Optional[str] = None
@dataclass
class Mode:
chat: List[TaskConfig]
generation: List[TaskConfig]
def __post_init__(self):
chat_obj = []
if self.chat:
for chat_config in self.chat:
chat_obj.append(TaskConfig(**chat_config))
self.chat = chat_obj
gen_obj = []
if self.generation:
for gen_config in self.generation:
gen_obj.append(TaskConfig(**gen_config))
self.generation = gen_obj
@dataclass
class PromptLibraryConfig:
mode: Mode
system_prompts: Optional[str] = None
system_guidelines: Optional[str] = None
def __post_init__(self):
if self.mode:
self.mode = Mode(**self.mode)
|