Thomas G. Lopes commited on
Commit
ffb2286
·
1 Parent(s): 0bd4051

allow interrupting custom endpoints

Browse files
src/lib/components/inference-playground/utils.ts CHANGED
@@ -70,7 +70,7 @@ function parseOpenAIMessages(
70
  ];
71
  }
72
 
73
- function getCompletionMetadata(conversation: Conversation): CompletionMetadata {
74
  const { model, systemMessage } = conversation;
75
 
76
  // Handle OpenAI-compatible models
@@ -79,6 +79,9 @@ function getCompletionMetadata(conversation: Conversation): CompletionMetadata {
79
  apiKey: model.accessToken,
80
  baseURL: model.endpointUrl,
81
  dangerouslyAllowBrowser: true,
 
 
 
82
  });
83
 
84
  return {
@@ -114,7 +117,7 @@ export async function handleStreamingResponse(
114
  onChunk: (content: string) => void,
115
  abortController: AbortController
116
  ): Promise<void> {
117
- const metadata = getCompletionMetadata(conversation);
118
 
119
  if (metadata.type === "openai") {
120
  const stream = await metadata.client.chat.completions.create({
 
70
  ];
71
  }
72
 
73
+ function getCompletionMetadata(conversation: Conversation, signal?: AbortSignal): CompletionMetadata {
74
  const { model, systemMessage } = conversation;
75
 
76
  // Handle OpenAI-compatible models
 
79
  apiKey: model.accessToken,
80
  baseURL: model.endpointUrl,
81
  dangerouslyAllowBrowser: true,
82
+ fetch: (...args: Parameters<typeof fetch>) => {
83
+ return fetch(args[0], { ...args[1], signal });
84
+ },
85
  });
86
 
87
  return {
 
117
  onChunk: (content: string) => void,
118
  abortController: AbortController
119
  ): Promise<void> {
120
+ const metadata = getCompletionMetadata(conversation, abortController.signal);
121
 
122
  if (metadata.type === "openai") {
123
  const stream = await metadata.client.chat.completions.create({