ReyDev's picture
🔧 refactor(ai.py): add RunTree parameter to AnthropicCustom class for better traceability
4d6ca8c unverified
import datetime
import requests
from anthropic import Anthropic, AsyncAnthropic
from dotenv import load_dotenv
from langsmith.run_helpers import traceable
from langsmith.run_trees import RunTree
from claude_space.settings import settings
load_dotenv()
@traceable(run_type="llm")
class AnthropicCustom:
def __init__(
self,
api_key,
model,
run_tree: RunTree,
max_tokens=1000,
prompt="",
):
self.api_key = api_key
self.model = model
self.max_tokens = max_tokens
self.prompt = prompt
self.run_tree = run_tree
def get_anthropic_response(self):
syncClient = Anthropic(api_key=self.api_key, timeout=5)
response = syncClient.completions.create(
prompt=self.prompt,
model=self.model,
max_tokens_to_sample=self.max_tokens,
)
self.final_response = response.completion
return response.completion
async def get_anthropic_response_async(self):
asyncClient = AsyncAnthropic(api_key=self.api_key, timeout=60)
self.final_response = ""
try:
async for line in await asyncClient.completions.create(
prompt=self.prompt,
model=self.model,
max_tokens_to_sample=self.max_tokens,
stream=True,
):
self.final_response += line.completion
yield line.completion
finally:
self.get_final_response()
def get_final_response(self):
requests.post(
url=f"{settings.LANGCHAIN_ENDPOINT}/runs",
json={
"name": "Anthropic",
"outputs": {"text": self.final_response},
"run_type": "chain",
"inputs": {"text": self.prompt},
"start_time": datetime.datetime.utcnow().isoformat(),
"end_time": datetime.datetime.utcnow().isoformat(),
"tags": [self.model, self.max_tokens],
"parent_run_id": str(self.run_tree.id),
"session_name": settings.LANGCHAIN_PROJECT,
},
headers={
"x-api-key": settings.LANGCHAIN_API_KEY,
},
)
def __str__(self):
return self.final_response
def __repr__(self):
return self.final_response
def __iter__(self):
return iter(self.final_response)