ReyDev's picture
✨ feat(ai.py): add traceable decorator to get_anthropic_response methods for better debugging and tracking
e6f89d5 unverified
raw
history blame
1.27 kB
from anthropic import HUMAN_PROMPT, Anthropic, AsyncAnthropic
from dotenv import load_dotenv
from langsmith.run_helpers import traceable
load_dotenv()
class AnthropicCustom:
def __init__(self, api_key, model, max_tokens=1000, prompt=""):
self.api_key = api_key
self.model = model
self.max_tokens = max_tokens
self.prompt = prompt
@traceable(run_type="llm", name="Claude", tags=["ai", "anthropic"])
def get_anthropic_response(self):
syncClient = Anthropic(api_key=self.api_key, timeout=5)
response = syncClient.completions.create(
prompt=self.prompt,
model=self.model,
max_tokens_to_sample=self.max_tokens,
)
return response.completion
@traceable(run_type="llm", name="Claude", tags=["ai", "anthropic"])
async def get_anthropic_response_async(self):
asyncClient = AsyncAnthropic(api_key=self.api_key, timeout=60)
async for line in await asyncClient.completions.create(
prompt=self.prompt,
model=self.model,
max_tokens_to_sample=self.max_tokens,
stop_sequences=[
HUMAN_PROMPT,
],
stream=True,
):
yield line.completion