harpreetsahota commited on
Commit
4bfba1f
·
verified ·
1 Parent(s): fa529c6

Create assistant.py

Browse files
Files changed (1) hide show
  1. assistant.py +61 -0
assistant.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from openai import OpenAI
2
+ from typing import Optional, Dict, Any
3
+
4
+ class AIAssistant:
5
+ """
6
+ A wrapper class for consistent LLM API interactions.
7
+
8
+ This class provides:
9
+ - Unified interface for different LLM providers
10
+ - Consistent handling of generation parameters
11
+ - Support for streaming responses
12
+
13
+ Attributes:
14
+ client: Initialized API client (OpenAI, Anthropic, etc.)
15
+ model: Name of the model to use
16
+ """
17
+ def __init__(self, client: OpenAI, model: str):
18
+ self.client = client
19
+ self.model = model
20
+
21
+ def generate_response(self,
22
+ prompt_template: Any,
23
+ generation_params: Optional[Dict] = None,
24
+ stream: bool = False,
25
+ **kwargs):
26
+ """
27
+ Generate LLM response using pthe rovided template and parameters.
28
+
29
+ Args:
30
+ prompt_template: Template object with format method
31
+ generation_params: Optional generation parameters
32
+ stream: Whether to stream the response
33
+ **kwargs: Variables for prompt template
34
+
35
+ Returns:
36
+ API response object or streamed response
37
+
38
+ Example:
39
+ assistant.generate_response(
40
+ prompt_template=template,
41
+ temperature=0.7,
42
+ topic="AI safety"
43
+ )
44
+ """
45
+ messages = prompt_template.format(**kwargs)
46
+ params = generation_params or {}
47
+
48
+ completion = self.client.chat.completions.create(
49
+ model=self.model,
50
+ messages=messages,
51
+ stream=stream,
52
+ **params
53
+ )
54
+
55
+ if stream:
56
+ for chunk in completion:
57
+ if chunk.choices[0].delta.content is not None:
58
+ print(chunk.choices[0].delta.content, end="")
59
+ return completion
60
+
61
+ return completion