TestLLM / litellm /llms /anthropic /cost_calculation.py
Raju2024's picture
Upload 1072 files
e3278e4 verified
raw
history blame contribute delete
763 Bytes
"""
Helper util for handling anthropic-specific cost calculation
- e.g.: prompt caching
"""
from typing import Tuple
from litellm.litellm_core_utils.llm_cost_calc.utils import generic_cost_per_token
from litellm.types.utils import Usage
def cost_per_token(model: str, usage: Usage) -> Tuple[float, float]:
"""
Calculates the cost per token for a given model, prompt tokens, and completion tokens.
Input:
- model: str, the model name without provider prefix
- usage: LiteLLM Usage block, containing anthropic caching information
Returns:
Tuple[float, float] - prompt_cost_in_usd, completion_cost_in_usd
"""
return generic_cost_per_token(
model=model, usage=usage, custom_llm_provider="anthropic"
)