Spaces:
Sleeping
Sleeping
File size: 1,918 Bytes
0bcc252 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
import {EventEmitter} from 'events';
import {TokenUsage} from '../types';
import {LanguageModelUsage} from "ai";
export class TokenTracker extends EventEmitter {
private usages: TokenUsage[] = [];
private budget?: number;
constructor(budget?: number) {
super();
this.budget = budget;
if ('asyncLocalContext' in process) {
const asyncLocalContext = process.asyncLocalContext as any;
this.on('usage', () => {
if (asyncLocalContext.available()) {
asyncLocalContext.ctx.chargeAmount = this.getTotalUsage().totalTokens;
}
});
}
}
trackUsage(tool: string, usage: LanguageModelUsage) {
const u = {tool, usage};
this.usages.push(u);
this.emit('usage', usage);
}
getTotalUsage(): LanguageModelUsage {
return this.usages.reduce((acc, {usage}) => {
acc.promptTokens += usage.promptTokens;
acc.completionTokens += usage.completionTokens;
acc.totalTokens += usage.totalTokens;
return acc;
}, {promptTokens: 0, completionTokens: 0, totalTokens: 0});
}
getTotalUsageSnakeCase(): {prompt_tokens: number, completion_tokens: number, total_tokens: number} {
return this.usages.reduce((acc, {usage}) => {
acc.prompt_tokens += usage.promptTokens;
acc.completion_tokens += usage.completionTokens;
acc.total_tokens += usage.totalTokens;
return acc;
}, {prompt_tokens: 0, completion_tokens: 0, total_tokens: 0});
}
getUsageBreakdown(): Record<string, number> {
return this.usages.reduce((acc, {tool, usage}) => {
acc[tool] = (acc[tool] || 0) + usage.totalTokens;
return acc;
}, {} as Record<string, number>);
}
printSummary() {
const breakdown = this.getUsageBreakdown();
console.log('Token Usage Summary:', {
budget: this.budget,
total: this.getTotalUsage(),
breakdown
});
}
reset() {
this.usages = [];
}
}
|