Spaces:
Build error
Build error
File size: 4,060 Bytes
f0499d2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 |
import { getClientConfig } from "../config/client";
import { ACCESS_CODE_PREFIX } from "../constant";
import { ChatMessage, ModelType, useAccessStore } from "../store";
import { ChatGPTApi } from "./platforms/openai";
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
export const Models = ["A N I M A"] as const;
export type ChatModel = ModelType;
export interface RequestMessage {
role: MessageRole;
content: string;
}
export interface LLMConfig {
model: string;
temperature?: number;
top_p?: number;
stream?: boolean;
presence_penalty?: number;
frequency_penalty?: number;
}
export interface LLMAgentConfig {
maxIterations: number;
returnIntermediateSteps: boolean;
useTools?: (string | undefined)[];
}
export interface ChatOptions {
messages: RequestMessage[];
config: LLMConfig;
onToolUpdate?: (toolName: string, toolInput: string) => void;
onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
}
export interface AgentChatOptions {
messages: RequestMessage[];
config: LLMConfig;
agentConfig: LLMAgentConfig;
onToolUpdate?: (toolName: string, toolInput: string) => void;
onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
}
export interface LLMUsage {
used: number;
total: number;
}
export interface LLMModel {
name: string;
available: boolean;
}
export abstract class LLMApi {
abstract chat(options: ChatOptions): Promise<void>;
abstract toolAgentChat(options: AgentChatOptions): Promise<void>;
abstract usage(): Promise<LLMUsage>;
abstract models(): Promise<LLMModel[]>;
}
type ProviderName = "openai" | "azure" | "claude" | "palm";
interface Model {
name: string;
provider: ProviderName;
ctxlen: number;
}
interface ChatProvider {
name: ProviderName;
apiConfig: {
baseUrl: string;
apiKey: string;
summaryModel: Model;
};
models: Model[];
chat: () => void;
usage: () => void;
}
export abstract class ToolApi {
abstract call(input: string): Promise<string>;
abstract name: string;
abstract description: string;
}
export class ClientApi {
public llm: LLMApi;
constructor() {
this.llm = new ChatGPTApi();
}
config() {}
prompts() {}
masks() {}
async share(messages: ChatMessage[], avatarUrl: string | null = null) {
const msgs = messages
.map((m) => ({
from: m.role === "user" ? "human" : "gpt",
value: m.content,
}))
.concat([
{
from: "human",
value: "Share from [A N I M A]: https://www.animabiomimicry.org",
},
]);
// 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用
// Please do not modify this message
console.log("[Share]", messages, msgs);
const clientConfig = getClientConfig();
const proxyUrl = "/sharegpt";
const rawUrl = "https://sharegpt.com/api/conversations";
const shareUrl = clientConfig?.isApp ? rawUrl : proxyUrl;
const res = await fetch(shareUrl, {
body: JSON.stringify({
avatarUrl,
items: msgs,
}),
headers: {
"Content-Type": "application/json",
},
method: "POST",
});
const resJson = await res.json();
console.log("[Share]", resJson);
if (resJson.id) {
return `https://shareg.pt/${resJson.id}`;
}
}
}
export const api = new ClientApi();
function makeBearer(token: string): string {
return `Bearer ${token}`;
}
export function getHeaders() {
const accessStore = useAccessStore.getState();
const authHeader = 'Authorization'; // Define authHeader
const headers: Record<string, string> = {
"Content-Type": "application/json",
"x-requested-with": "XMLHttpRequest",
};
return headers;
}
|