Spaces:
Configuration error
Configuration error
File size: 1,669 Bytes
447ebeb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
"""
Bridge for transforming API requests to another API requests
"""
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Any, AsyncIterator, Iterator, List, Optional, Union
if TYPE_CHECKING:
from pydantic import BaseModel
from litellm import LiteLLMLoggingObj, ModelResponse
from litellm.llms.base_llm.base_model_iterator import BaseModelResponseIterator
from litellm.types.llms.openai import AllMessageValues
class CompletionTransformationBridge(ABC):
@abstractmethod
def transform_request(
self,
model: str,
messages: List["AllMessageValues"],
optional_params: dict,
litellm_params: dict,
headers: dict,
) -> dict:
"""Transform /chat/completions api request to another request"""
pass
@abstractmethod
def transform_response(
self,
model: str,
raw_response: "BaseModel", # the response from the other API
model_response: "ModelResponse",
logging_obj: "LiteLLMLoggingObj",
request_data: dict,
messages: List["AllMessageValues"],
optional_params: dict,
litellm_params: dict,
encoding: Any,
api_key: Optional[str] = None,
json_mode: Optional[bool] = None,
) -> "ModelResponse":
"""Transform another response to /chat/completions api response"""
pass
@abstractmethod
def get_model_response_iterator(
self,
streaming_response: Union[Iterator[str], AsyncIterator[str], "ModelResponse"],
sync_stream: bool,
json_mode: Optional[bool] = False,
) -> "BaseModelResponseIterator":
pass
|