ciyidogan commited on
Commit
7a6c758
·
verified ·
1 Parent(s): e0ae5ce

Create llm_openai.py

Browse files
Files changed (1) hide show
  1. llm_openai.py +73 -0
llm_openai.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ OpenAI GPT Implementation
3
+ """
4
+ from openai import AsyncOpenAI
5
+ from typing import Dict, List, Any
6
+ from llm_interface import LLMInterface
7
+ from utils import log
8
+
9
+ class OpenAILLM(LLMInterface):
10
+ """OpenAI GPT integration (GPT-4o, GPT-4o-mini)"""
11
+
12
+ def __init__(self, api_key: str, model: str, settings: Dict[str, Any] = None):
13
+ super().__init__(settings)
14
+ self.client = AsyncOpenAI(api_key=api_key)
15
+ self.model = self._map_model_name(model)
16
+ self.temperature = settings.get("temperature", 0.7) if settings else 0.7
17
+ self.max_tokens = settings.get("max_tokens", 1000) if settings else 1000
18
+ log(f"🤖 Initialized OpenAI LLM with model: {self.model}")
19
+
20
+ def _map_model_name(self, model: str) -> str:
21
+ """Map provider name to actual model name"""
22
+ mappings = {
23
+ "gpt4o": "gpt-4",
24
+ "gpt4o-mini": "gpt-4o-mini"
25
+ }
26
+ return mappings.get(model, model)
27
+
28
+ async def generate(self, system_prompt: str, user_input: str, context: List[Dict]) -> str:
29
+ """Generate response from OpenAI"""
30
+ try:
31
+ # Build messages
32
+ messages = [{"role": "system", "content": system_prompt}]
33
+
34
+ # Add context
35
+ for msg in context:
36
+ messages.append({
37
+ "role": msg.get("role", "user"),
38
+ "content": msg.get("content", "")
39
+ })
40
+
41
+ # Add current user input
42
+ messages.append({"role": "user", "content": user_input})
43
+
44
+ # Call OpenAI
45
+ response = await self.client.chat.completions.create(
46
+ model=self.model,
47
+ messages=messages,
48
+ temperature=self.temperature,
49
+ max_tokens=self.max_tokens
50
+ )
51
+
52
+ return response.choices[0].message.content.strip()
53
+ except Exception as e:
54
+ log(f"❌ OpenAI error: {e}")
55
+ raise
56
+
57
+ async def startup(self, project_config: Dict) -> bool:
58
+ """GPT doesn't need startup, always return True"""
59
+ log("✅ GPT provider ready (no startup needed)")
60
+ return True
61
+
62
+ def get_provider_name(self) -> str:
63
+ """Get provider name"""
64
+ return self.model
65
+
66
+ def get_model_info(self) -> Dict[str, Any]:
67
+ """Get model information"""
68
+ return {
69
+ "provider": "openai",
70
+ "model": self.model,
71
+ "temperature": self.temperature,
72
+ "max_tokens": self.max_tokens
73
+ }