from typing import Any, Iterator, Literal, TypeVar import openai.types.chat as openai_types from openai._streaming import Stream from neollm.types.openai.chat_completion import ChatCompletion from neollm.types.openai.chat_completion_chunk import ChatCompletionChunk Role = Literal["system", "user", "assistant", "tool", "function"] # Settings LLMSettings = dict[str, Any] ClientSettings = dict[str, Any] # Message Message = openai_types.ChatCompletionMessageParam Messages = list[Message] Tools = Any Functions = Any # Response Response = ChatCompletion Chunk = ChatCompletionChunk StreamResponse = Iterator[Chunk] # IO InputType = TypeVar("InputType") OutputType = TypeVar("OutputType") StreamOutputType = Any # OpenAI -------------------------------------------- OpenAIResponse = openai_types.ChatCompletion OpenAIChunk = openai_types.ChatCompletionChunk OpenAIStreamResponse = Stream[OpenAIChunk] # OpneAI StreamResponse OpenAIMessages = list[openai_types.ChatCompletionMessageParam]