File size: 1,320 Bytes
f415c95
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import type { MaxTokensCache } from "./index.js";

interface NebiusModel {
	id: string;
	config?: {
		max_tokens?: number;
	};
	context_length?: number;
}

interface NebiusResponse {
	data?: NebiusModel[];
}

const NEBIUS_API_URL = "https://api.studio.nebius.com/v1/models?verbose=true";

export async function fetchNebiusData(apiKey: string | undefined): Promise<MaxTokensCache["nebius"]> {
	if (!apiKey) {
		console.warn("Nebius API key not provided. Skipping Nebius fetch.");
		return {};
	}
	try {
		const response = await fetch(NEBIUS_API_URL, {
			headers: {
				Authorization: `Bearer ${apiKey}`,
			},
		});
		if (!response.ok) {
			throw new Error(`Nebius API request failed: ${response.status} ${response.statusText}`);
		}
		const data: NebiusResponse = await response.json();
		const modelsData: MaxTokensCache["nebius"] = {};

		if (data?.data && Array.isArray(data.data)) {
			for (const model of data.data) {
				const contextLength = model.context_length ?? model.config?.max_tokens ?? null;
				if (model.id && typeof contextLength === "number") {
					modelsData[model.id] = contextLength;
				}
			}
		} else {
			console.warn("Unexpected response structure from Nebius API:", data);
		}
		return modelsData;
	} catch (error) {
		console.error("Error fetching Nebius data:", error);
		return {};
	}
}