File size: 1,178 Bytes
f415c95
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
import type { MaxTokensCache } from "./index.js";

const TOGETHER_API_URL = "https://api.together.xyz/v1/models";

// Accept apiKey as an argument
export async function fetchTogetherData(apiKey: string | undefined): Promise<MaxTokensCache["together"]> {
	if (!apiKey) {
		console.warn("Together AI API key not provided. Skipping Together AI fetch.");
		return {};
	}
	try {
		const response = await fetch(TOGETHER_API_URL, {
			headers: {
				Authorization: `Bearer ${apiKey}`, // Use passed-in apiKey
			},
		});
		if (!response.ok) {
			throw new Error(`Together AI API request failed: ${response.status} ${response.statusText}`);
		}
		// eslint-disable-next-line @typescript-eslint/no-explicit-any
		const data: any[] = await response.json();
		const modelsData: MaxTokensCache["together"] = {};

		if (Array.isArray(data)) {
			for (const model of data) {
				const contextLength = model.context_length ?? model.config?.max_tokens ?? null;
				if (model.id && typeof contextLength === "number") {
					modelsData[model.id] = contextLength;
				}
			}
		}
		return modelsData;
	} catch (error) {
		console.error("Error fetching Together AI data:", error);
		return {};
	}
}