File size: 1,893 Bytes
4bfba1f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
from openai import OpenAI
from typing import Optional, Dict, Any
class AIAssistant:
"""
A wrapper class for consistent LLM API interactions.
This class provides:
- Unified interface for different LLM providers
- Consistent handling of generation parameters
- Support for streaming responses
Attributes:
client: Initialized API client (OpenAI, Anthropic, etc.)
model: Name of the model to use
"""
def __init__(self, client: OpenAI, model: str):
self.client = client
self.model = model
def generate_response(self,
prompt_template: Any,
generation_params: Optional[Dict] = None,
stream: bool = False,
**kwargs):
"""
Generate LLM response using pthe rovided template and parameters.
Args:
prompt_template: Template object with format method
generation_params: Optional generation parameters
stream: Whether to stream the response
**kwargs: Variables for prompt template
Returns:
API response object or streamed response
Example:
assistant.generate_response(
prompt_template=template,
temperature=0.7,
topic="AI safety"
)
"""
messages = prompt_template.format(**kwargs)
params = generation_params or {}
completion = self.client.chat.completions.create(
model=self.model,
messages=messages,
stream=stream,
**params
)
if stream:
for chunk in completion:
if chunk.choices[0].delta.content is not None:
print(chunk.choices[0].delta.content, end="")
return completion
return completion |