File size: 3,550 Bytes
beb52ea
 
 
 
 
5fb84cd
beb52ea
 
 
 
 
 
 
 
 
 
 
02031df
5fb84cd
beb52ea
 
 
 
 
 
 
 
02031df
 
beb52ea
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
02031df
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5fb84cd
02031df
 
 
 
 
beb52ea
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
import os
from typing import Optional
import openai
import anthropic
from duckduckgo_search import DDGS
from perplexity.client import PerplexityClient

class Agent:
    def __init__(self, base_model: str = "gpt-3.5-turbo", search_engine: str = "duckduckgo"):
        self.base_model = base_model
        self.search_engine = search_engine
        
        # Initialize API clients
        if "gpt" in base_model:
            openai.api_key = os.getenv("OPENAI_API_KEY")
        elif "claude" in base_model:
            self.claude = anthropic.Anthropic(api_key=os.getenv("ANTHROPIC_API_KEY"))
        elif "sonar" in base_model:
            self.perplexity = PerplexityClient(api_key=os.getenv("PERPLEXITY_API_KEY"))

    def execute(self, message: str, project_name: str) -> Optional[str]:
        try:
            # Process the message based on the selected model
            if "gpt" in self.base_model:
                response = self._process_with_gpt(message)
            elif "claude" in self.base_model:
                response = self._process_with_claude(message)
            elif "sonar" in self.base_model:
                response = self._process_with_perplexity(message)
            else:
                response = "Unsupported model selected"
                
            return response
        except Exception as e:
            return f"Error processing message: {str(e)}"

    def subsequent_execute(self, message: str, project_name: str) -> Optional[str]:
        return self.execute(message, project_name)

    def _process_with_gpt(self, message: str) -> str:
        response = openai.chat.completions.create(
            model=self.base_model,
            messages=[{"role": "user", "content": message}]
        )
        return response.choices[0].message.content

    def _process_with_claude(self, message: str) -> str:
        message = anthropic.Message(
            role="user",
            content=message
        )
        response = self.claude.messages.create(
            model="claude-3-opus-20240229",
            messages=[message]
        )
        return response.content[0].text

    def _process_with_perplexity(self, message: str) -> str:
        # Map model names to Perplexity models
        model_mapping = {
            # Current models
            "sonar-reasoning-pro": "sonar-reasoning-pro",  # 127k context, 8k output
            "sonar-reasoning": "sonar-reasoning",          # 127k context
            "sonar-pro": "sonar-pro",                     # 200k context, 8k output
            "sonar": "sonar",                             # 127k context
            # Legacy models (will be deprecated after 2/22/2025)
            "llama-3.1-sonar-small-128k-online": "llama-3.1-sonar-small-128k-online",
            "llama-3.1-sonar-large-128k-online": "llama-3.1-sonar-large-128k-online",
            "llama-3.1-sonar-huge-128k-online": "llama-3.1-sonar-huge-128k-online"
        }
        
        # Use the mapped model or default to sonar
        model = model_mapping.get(self.base_model, "sonar")
        
        response = self.perplexity.chat.create(
            model=model,
            messages=[{"role": "user", "content": message}]
        )
        return response.choices[0].message.content

    def _search_web(self, query: str, num_results: int = 5) -> list:
        if self.search_engine == "duckduckgo":
            with DDGS() as ddgs:
                return list(ddgs.text(query, max_results=num_results))
        # Add support for other search engines as needed
        return []