Spaces:
Sleeping
Sleeping
File size: 2,135 Bytes
beb52ea |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
import os
from typing import Optional
import openai
import anthropic
from duckduckgo_search import DDGS
class Agent:
def __init__(self, base_model: str = "gpt-3.5-turbo", search_engine: str = "duckduckgo"):
self.base_model = base_model
self.search_engine = search_engine
# Initialize API clients
if "gpt" in base_model:
openai.api_key = os.getenv("OPENAI_API_KEY")
elif "claude" in base_model:
self.claude = anthropic.Anthropic(api_key=os.getenv("ANTHROPIC_API_KEY"))
def execute(self, message: str, project_name: str) -> Optional[str]:
try:
# Process the message based on the selected model
if "gpt" in self.base_model:
response = self._process_with_gpt(message)
elif "claude" in self.base_model:
response = self._process_with_claude(message)
else:
response = "Unsupported model selected"
return response
except Exception as e:
return f"Error processing message: {str(e)}"
def subsequent_execute(self, message: str, project_name: str) -> Optional[str]:
return self.execute(message, project_name)
def _process_with_gpt(self, message: str) -> str:
response = openai.chat.completions.create(
model=self.base_model,
messages=[{"role": "user", "content": message}]
)
return response.choices[0].message.content
def _process_with_claude(self, message: str) -> str:
message = anthropic.Message(
role="user",
content=message
)
response = self.claude.messages.create(
model="claude-3-opus-20240229",
messages=[message]
)
return response.content[0].text
def _search_web(self, query: str, num_results: int = 5) -> list:
if self.search_engine == "duckduckgo":
with DDGS() as ddgs:
return list(ddgs.text(query, max_results=num_results))
# Add support for other search engines as needed
return [] |