import { getClientConfig } from "../config/client"; import { ACCESS_CODE_PREFIX } from "../constant"; import { ChatMessage, ModelType, useAccessStore } from "../store"; import { ChatGPTApi } from "./platforms/openai"; export const ROLES = ["system", "user", "assistant"] as const; export type MessageRole = (typeof ROLES)[number]; export const Models = ["A N I M A"] as const; export type ChatModel = ModelType; export interface RequestMessage { role: MessageRole; content: string; } export interface LLMConfig { model: string; temperature?: number; top_p?: number; stream?: boolean; presence_penalty?: number; frequency_penalty?: number; } export interface LLMAgentConfig { maxIterations: number; returnIntermediateSteps: boolean; useTools?: (string | undefined)[]; } export interface ChatOptions { messages: RequestMessage[]; config: LLMConfig; onToolUpdate?: (toolName: string, toolInput: string) => void; onUpdate?: (message: string, chunk: string) => void; onFinish: (message: string) => void; onError?: (err: Error) => void; onController?: (controller: AbortController) => void; } export interface AgentChatOptions { messages: RequestMessage[]; config: LLMConfig; agentConfig: LLMAgentConfig; onToolUpdate?: (toolName: string, toolInput: string) => void; onUpdate?: (message: string, chunk: string) => void; onFinish: (message: string) => void; onError?: (err: Error) => void; onController?: (controller: AbortController) => void; } export interface LLMUsage { used: number; total: number; } export interface LLMModel { name: string; available: boolean; } export abstract class LLMApi { abstract chat(options: ChatOptions): Promise; abstract toolAgentChat(options: AgentChatOptions): Promise; abstract usage(): Promise; abstract models(): Promise; } type ProviderName = "openai" | "azure" | "claude" | "palm"; interface Model { name: string; provider: ProviderName; ctxlen: number; } interface ChatProvider { name: ProviderName; apiConfig: { baseUrl: string; apiKey: string; summaryModel: Model; }; models: Model[]; chat: () => void; usage: () => void; } export abstract class ToolApi { abstract call(input: string): Promise; abstract name: string; abstract description: string; } export class ClientApi { public llm: LLMApi; constructor() { this.llm = new ChatGPTApi(); } config() {} prompts() {} masks() {} async share(messages: ChatMessage[], avatarUrl: string | null = null) { const msgs = messages .map((m) => ({ from: m.role === "user" ? "human" : "gpt", value: m.content, })) .concat([ { from: "human", value: "Share from [A N I M A]: https://www.animabiomimicry.org", }, ]); // 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用 // Please do not modify this message console.log("[Share]", messages, msgs); const clientConfig = getClientConfig(); const proxyUrl = "/sharegpt"; const rawUrl = "https://sharegpt.com/api/conversations"; const shareUrl = clientConfig?.isApp ? rawUrl : proxyUrl; const res = await fetch(shareUrl, { body: JSON.stringify({ avatarUrl, items: msgs, }), headers: { "Content-Type": "application/json", }, method: "POST", }); const resJson = await res.json(); console.log("[Share]", resJson); if (resJson.id) { return `https://shareg.pt/${resJson.id}`; } } } export const api = new ClientApi(); function makeBearer(token: string): string { return `Bearer ${token}`; } export function getHeaders() { const accessStore = useAccessStore.getState(); const authHeader = 'Authorization'; // Define authHeader const headers: Record = { "Content-Type": "application/json", "x-requested-with": "XMLHttpRequest", }; return headers; }