Thomas G. Lopes
commited on
Commit
·
2168f52
1
Parent(s):
a251d41
basic projects structure
Browse files
src/lib/components/InferencePlayground/InferencePlayground.svelte
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
<script lang="ts">
|
2 |
-
import type { Conversation, ConversationMessage, ModelWithTokenizer } from "$lib/types";
|
3 |
|
4 |
import {
|
5 |
handleNonStreamingResponse,
|
@@ -28,6 +28,13 @@
|
|
28 |
|
29 |
const startMessageUser: ConversationMessage = { role: "user", content: "" };
|
30 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
31 |
let viewCode = false;
|
32 |
let viewSettings = false;
|
33 |
let loading = false;
|
@@ -39,15 +46,15 @@
|
|
39 |
latency: number;
|
40 |
generatedTokensCount: number;
|
41 |
}
|
42 |
-
let generationStats =
|
43 |
| [GenerationStatistics]
|
44 |
| [GenerationStatistics, GenerationStatistics];
|
45 |
|
46 |
-
$: systemPromptSupported =
|
47 |
-
$: compareActive =
|
48 |
|
49 |
function addMessage(conversationIdx: number) {
|
50 |
-
const conversation =
|
51 |
if (!conversation) return;
|
52 |
const msgs = conversation.messages.slice();
|
53 |
conversation.messages = [
|
@@ -61,12 +68,12 @@
|
|
61 |
}
|
62 |
|
63 |
function deleteMessage(conversationIdx: number, idx: number) {
|
64 |
-
|
65 |
$session = $session;
|
66 |
}
|
67 |
|
68 |
function reset() {
|
69 |
-
|
70 |
conversation.systemMessage.content = "";
|
71 |
conversation.messages = [{ ...startMessageUser }];
|
72 |
});
|
@@ -136,10 +143,10 @@
|
|
136 |
return;
|
137 |
}
|
138 |
|
139 |
-
for (const [idx, conversation] of
|
140 |
if (conversation.messages.at(-1)?.role === "assistant") {
|
141 |
let prefix = "";
|
142 |
-
if (
|
143 |
prefix = `Error on ${idx === 0 ? "left" : "right"} conversation. `;
|
144 |
}
|
145 |
return alert(`${prefix}Messages must alternate between user/assistant roles.`);
|
@@ -150,10 +157,10 @@
|
|
150 |
loading = true;
|
151 |
|
152 |
try {
|
153 |
-
const promises =
|
154 |
await Promise.all(promises);
|
155 |
} catch (error) {
|
156 |
-
for (const conversation of
|
157 |
if (conversation.messages.at(-1)?.role === "assistant" && !conversation.messages.at(-1)?.content?.trim()) {
|
158 |
conversation.messages.pop();
|
159 |
conversation.messages = [...conversation.messages];
|
@@ -197,16 +204,16 @@
|
|
197 |
|
198 |
function addCompareModel(modelId: ModelWithTokenizer["id"]) {
|
199 |
const model = $models.find(m => m.id === modelId);
|
200 |
-
if (!model ||
|
201 |
return;
|
202 |
}
|
203 |
-
const newConversation = { ...JSON.parse(JSON.stringify(
|
204 |
-
|
205 |
generationStats = [generationStats[0], { latency: 0, generatedTokensCount: 0 }];
|
206 |
}
|
207 |
|
208 |
function removeCompareModal(conversationIdx: number) {
|
209 |
-
|
210 |
$session = $session;
|
211 |
generationStats.splice(conversationIdx, 1)[0];
|
212 |
generationStats = generationStats;
|
@@ -246,9 +253,9 @@
|
|
246 |
placeholder={systemPromptSupported
|
247 |
? "Enter a custom prompt"
|
248 |
: "System prompt is not supported with the chosen model."}
|
249 |
-
value={systemPromptSupported ?
|
250 |
on:input={e => {
|
251 |
-
for (const conversation of
|
252 |
conversation.systemMessage.content = e.currentTarget.value;
|
253 |
}
|
254 |
$session = $session;
|
@@ -261,7 +268,7 @@
|
|
261 |
<div
|
262 |
class="flex h-[calc(100dvh-5rem-120px)] divide-x divide-gray-200 overflow-x-auto overflow-y-hidden *:w-full max-sm:w-dvw md:h-[calc(100dvh-5rem)] md:pt-3 dark:divide-gray-800"
|
263 |
>
|
264 |
-
{#each
|
265 |
<div class="max-sm:min-w-full">
|
266 |
{#if compareActive}
|
267 |
<PlaygroundConversationHeader
|
@@ -331,7 +338,7 @@
|
|
331 |
{#if loading}
|
332 |
<div class="flex flex-none items-center gap-[3px]">
|
333 |
<span class="mr-2">
|
334 |
-
{#if
|
335 |
Stop
|
336 |
{:else}
|
337 |
Cancel
|
@@ -366,7 +373,7 @@
|
|
366 |
class="flex flex-1 flex-col gap-6 overflow-y-hidden rounded-xl border border-gray-200/80 bg-white bg-linear-to-b from-white via-white p-3 shadow-xs dark:border-white/5 dark:bg-gray-900 dark:from-gray-800/40 dark:via-gray-800/40"
|
367 |
>
|
368 |
<div class="flex flex-col gap-2">
|
369 |
-
<ModelSelector bind:conversation={
|
370 |
<div class="flex items-center gap-2 self-end px-2 text-xs whitespace-nowrap">
|
371 |
<button
|
372 |
class="flex items-center gap-0.5 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-300"
|
@@ -376,7 +383,7 @@
|
|
376 |
Compare
|
377 |
</button>
|
378 |
<a
|
379 |
-
href="https://huggingface.co/{
|
380 |
.conversations[0].provider}"
|
381 |
target="_blank"
|
382 |
class="flex items-center gap-0.5 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-300"
|
@@ -387,7 +394,7 @@
|
|
387 |
</div>
|
388 |
</div>
|
389 |
|
390 |
-
<GenerationConfig bind:conversation={
|
391 |
{#if $token.value}
|
392 |
<button
|
393 |
on:click={token.reset}
|
@@ -444,7 +451,7 @@
|
|
444 |
|
445 |
{#if selectCompareModelOpen}
|
446 |
<ModelSelectorModal
|
447 |
-
conversation={
|
448 |
on:modelSelected={e => addCompareModel(e.detail)}
|
449 |
on:close={() => (selectCompareModelOpen = false)}
|
450 |
/>
|
|
|
1 |
<script lang="ts">
|
2 |
+
import type { Conversation, ConversationMessage, ModelWithTokenizer, Session } from "$lib/types";
|
3 |
|
4 |
import {
|
5 |
handleNonStreamingResponse,
|
|
|
28 |
|
29 |
const startMessageUser: ConversationMessage = { role: "user", content: "" };
|
30 |
|
31 |
+
function getActiveProject(s: Session) {
|
32 |
+
return s.projects.find(p => p.id === s.activeProjectId) ?? s.projects[0]!;
|
33 |
+
}
|
34 |
+
|
35 |
+
$: project = getActiveProject($session);
|
36 |
+
project = getActiveProject($session); // needed, otherwise its undefined on startup (not sure why).
|
37 |
+
|
38 |
let viewCode = false;
|
39 |
let viewSettings = false;
|
40 |
let loading = false;
|
|
|
46 |
latency: number;
|
47 |
generatedTokensCount: number;
|
48 |
}
|
49 |
+
let generationStats = project.conversations.map(_ => ({ latency: 0, generatedTokensCount: 0 })) as
|
50 |
| [GenerationStatistics]
|
51 |
| [GenerationStatistics, GenerationStatistics];
|
52 |
|
53 |
+
$: systemPromptSupported = project.conversations.some(conversation => isSystemPromptSupported(conversation.model));
|
54 |
+
$: compareActive = project.conversations.length === 2;
|
55 |
|
56 |
function addMessage(conversationIdx: number) {
|
57 |
+
const conversation = project.conversations[conversationIdx];
|
58 |
if (!conversation) return;
|
59 |
const msgs = conversation.messages.slice();
|
60 |
conversation.messages = [
|
|
|
68 |
}
|
69 |
|
70 |
function deleteMessage(conversationIdx: number, idx: number) {
|
71 |
+
project.conversations[conversationIdx]?.messages.splice(idx, 1)[0];
|
72 |
$session = $session;
|
73 |
}
|
74 |
|
75 |
function reset() {
|
76 |
+
project.conversations.map(conversation => {
|
77 |
conversation.systemMessage.content = "";
|
78 |
conversation.messages = [{ ...startMessageUser }];
|
79 |
});
|
|
|
143 |
return;
|
144 |
}
|
145 |
|
146 |
+
for (const [idx, conversation] of project.conversations.entries()) {
|
147 |
if (conversation.messages.at(-1)?.role === "assistant") {
|
148 |
let prefix = "";
|
149 |
+
if (project.conversations.length === 2) {
|
150 |
prefix = `Error on ${idx === 0 ? "left" : "right"} conversation. `;
|
151 |
}
|
152 |
return alert(`${prefix}Messages must alternate between user/assistant roles.`);
|
|
|
157 |
loading = true;
|
158 |
|
159 |
try {
|
160 |
+
const promises = project.conversations.map((conversation, idx) => runInference(conversation, idx));
|
161 |
await Promise.all(promises);
|
162 |
} catch (error) {
|
163 |
+
for (const conversation of project.conversations) {
|
164 |
if (conversation.messages.at(-1)?.role === "assistant" && !conversation.messages.at(-1)?.content?.trim()) {
|
165 |
conversation.messages.pop();
|
166 |
conversation.messages = [...conversation.messages];
|
|
|
204 |
|
205 |
function addCompareModel(modelId: ModelWithTokenizer["id"]) {
|
206 |
const model = $models.find(m => m.id === modelId);
|
207 |
+
if (!model || project.conversations.length === 2) {
|
208 |
return;
|
209 |
}
|
210 |
+
const newConversation = { ...JSON.parse(JSON.stringify(project.conversations[0])), model };
|
211 |
+
project.conversations = [...project.conversations, newConversation];
|
212 |
generationStats = [generationStats[0], { latency: 0, generatedTokensCount: 0 }];
|
213 |
}
|
214 |
|
215 |
function removeCompareModal(conversationIdx: number) {
|
216 |
+
project.conversations.splice(conversationIdx, 1)[0];
|
217 |
$session = $session;
|
218 |
generationStats.splice(conversationIdx, 1)[0];
|
219 |
generationStats = generationStats;
|
|
|
253 |
placeholder={systemPromptSupported
|
254 |
? "Enter a custom prompt"
|
255 |
: "System prompt is not supported with the chosen model."}
|
256 |
+
value={systemPromptSupported ? project.conversations[0].systemMessage.content : ""}
|
257 |
on:input={e => {
|
258 |
+
for (const conversation of project.conversations) {
|
259 |
conversation.systemMessage.content = e.currentTarget.value;
|
260 |
}
|
261 |
$session = $session;
|
|
|
268 |
<div
|
269 |
class="flex h-[calc(100dvh-5rem-120px)] divide-x divide-gray-200 overflow-x-auto overflow-y-hidden *:w-full max-sm:w-dvw md:h-[calc(100dvh-5rem)] md:pt-3 dark:divide-gray-800"
|
270 |
>
|
271 |
+
{#each project.conversations as conversation, conversationIdx}
|
272 |
<div class="max-sm:min-w-full">
|
273 |
{#if compareActive}
|
274 |
<PlaygroundConversationHeader
|
|
|
338 |
{#if loading}
|
339 |
<div class="flex flex-none items-center gap-[3px]">
|
340 |
<span class="mr-2">
|
341 |
+
{#if project.conversations[0].streaming || project.conversations[1]?.streaming}
|
342 |
Stop
|
343 |
{:else}
|
344 |
Cancel
|
|
|
373 |
class="flex flex-1 flex-col gap-6 overflow-y-hidden rounded-xl border border-gray-200/80 bg-white bg-linear-to-b from-white via-white p-3 shadow-xs dark:border-white/5 dark:bg-gray-900 dark:from-gray-800/40 dark:via-gray-800/40"
|
374 |
>
|
375 |
<div class="flex flex-col gap-2">
|
376 |
+
<ModelSelector bind:conversation={project.conversations[0]} />
|
377 |
<div class="flex items-center gap-2 self-end px-2 text-xs whitespace-nowrap">
|
378 |
<button
|
379 |
class="flex items-center gap-0.5 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-300"
|
|
|
383 |
Compare
|
384 |
</button>
|
385 |
<a
|
386 |
+
href="https://huggingface.co/{project.conversations[0].model.id}?inference_provider={project
|
387 |
.conversations[0].provider}"
|
388 |
target="_blank"
|
389 |
class="flex items-center gap-0.5 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-300"
|
|
|
394 |
</div>
|
395 |
</div>
|
396 |
|
397 |
+
<GenerationConfig bind:conversation={project.conversations[0]} />
|
398 |
{#if $token.value}
|
399 |
<button
|
400 |
on:click={token.reset}
|
|
|
451 |
|
452 |
{#if selectCompareModelOpen}
|
453 |
<ModelSelectorModal
|
454 |
+
conversation={project.conversations[0]}
|
455 |
on:modelSelected={e => addCompareModel(e.detail)}
|
456 |
on:close={() => (selectCompareModelOpen = false)}
|
457 |
/>
|
src/lib/stores/session.ts
CHANGED
@@ -6,6 +6,7 @@ import {
|
|
6 |
type Conversation,
|
7 |
type ConversationMessage,
|
8 |
type ModelWithTokenizer,
|
|
|
9 |
type Session,
|
10 |
} from "$lib/types";
|
11 |
import { safeParse } from "$lib/utils/json";
|
@@ -56,9 +57,16 @@ function createSessionStore() {
|
|
56 |
streaming: true,
|
57 |
};
|
58 |
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
// Get saved session from localStorage if available
|
60 |
let savedSession: Session = {
|
61 |
-
|
|
|
62 |
};
|
63 |
|
64 |
const savedData = localStorage.getItem(LOCAL_STORAGE_KEY);
|
@@ -73,25 +81,25 @@ function createSessionStore() {
|
|
73 |
// Query params models and providers take precedence over savedSession's.
|
74 |
// In any case, we try to merge the two, and the amount of conversations
|
75 |
// is the maximum between the two.
|
76 |
-
const max = Math.max(savedSession.conversations.length, modelsFromSearch.length, searchProviders.length);
|
77 |
-
for (let i = 0; i < max; i++) {
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
}
|
85 |
|
86 |
set(savedSession);
|
87 |
});
|
88 |
|
89 |
// Override update method to sync with localStorage and URL params
|
90 |
const update: typeof store.update = cb => {
|
91 |
-
const prevQuery = window.location.search;
|
92 |
-
const query = new URLSearchParams(window.location.search);
|
93 |
-
query.delete("modelId");
|
94 |
-
query.delete("provider");
|
95 |
|
96 |
store.update($s => {
|
97 |
const s = cb($s);
|
@@ -104,22 +112,22 @@ function createSessionStore() {
|
|
104 |
}
|
105 |
|
106 |
// Update URL query parameters
|
107 |
-
const modelIds = s.conversations.map(c => c.model.id);
|
108 |
-
modelIds.forEach(m => query.append("modelId", m));
|
109 |
-
|
110 |
-
const providers = s.conversations.map(c => c.provider ?? "hf-inference");
|
111 |
-
providers.forEach(p => query.append("provider", p));
|
112 |
-
|
113 |
-
const newQuery = query.toString();
|
114 |
-
if (newQuery !== prevQuery.slice(1)) {
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
}
|
123 |
|
124 |
return s;
|
125 |
});
|
|
|
6 |
type Conversation,
|
7 |
type ConversationMessage,
|
8 |
type ModelWithTokenizer,
|
9 |
+
type Project,
|
10 |
type Session,
|
11 |
} from "$lib/types";
|
12 |
import { safeParse } from "$lib/utils/json";
|
|
|
57 |
streaming: true,
|
58 |
};
|
59 |
|
60 |
+
const defaultProject: Project = {
|
61 |
+
name: "default",
|
62 |
+
id: crypto.randomUUID(),
|
63 |
+
conversations: [defaultConversation],
|
64 |
+
};
|
65 |
+
|
66 |
// Get saved session from localStorage if available
|
67 |
let savedSession: Session = {
|
68 |
+
projects: [defaultProject],
|
69 |
+
activeProjectId: defaultProject.id,
|
70 |
};
|
71 |
|
72 |
const savedData = localStorage.getItem(LOCAL_STORAGE_KEY);
|
|
|
81 |
// Query params models and providers take precedence over savedSession's.
|
82 |
// In any case, we try to merge the two, and the amount of conversations
|
83 |
// is the maximum between the two.
|
84 |
+
// const max = Math.max(savedSession.conversations.length, modelsFromSearch.length, searchProviders.length);
|
85 |
+
// for (let i = 0; i < max; i++) {
|
86 |
+
// const conversation = savedSession.conversations[i] ?? defaultConversation;
|
87 |
+
// savedSession.conversations[i] = {
|
88 |
+
// ...conversation,
|
89 |
+
// model: modelsFromSearch[i] ?? conversation.model,
|
90 |
+
// provider: searchProviders[i] ?? conversation.provider,
|
91 |
+
// };
|
92 |
+
// }
|
93 |
|
94 |
set(savedSession);
|
95 |
});
|
96 |
|
97 |
// Override update method to sync with localStorage and URL params
|
98 |
const update: typeof store.update = cb => {
|
99 |
+
// const prevQuery = window.location.search;
|
100 |
+
// const query = new URLSearchParams(window.location.search);
|
101 |
+
// query.delete("modelId");
|
102 |
+
// query.delete("provider");
|
103 |
|
104 |
store.update($s => {
|
105 |
const s = cb($s);
|
|
|
112 |
}
|
113 |
|
114 |
// Update URL query parameters
|
115 |
+
// const modelIds = s.conversations.map(c => c.model.id);
|
116 |
+
// modelIds.forEach(m => query.append("modelId", m));
|
117 |
+
//
|
118 |
+
// const providers = s.conversations.map(c => c.provider ?? "hf-inference");
|
119 |
+
// providers.forEach(p => query.append("provider", p));
|
120 |
+
|
121 |
+
// const newQuery = query.toString();
|
122 |
+
// if (newQuery !== prevQuery.slice(1)) {
|
123 |
+
// window.parent.postMessage(
|
124 |
+
// {
|
125 |
+
// queryString: query.toString(),
|
126 |
+
// },
|
127 |
+
// "https://huggingface.co"
|
128 |
+
// );
|
129 |
+
// goto(`?${query}`, { replaceState: true });
|
130 |
+
// }
|
131 |
|
132 |
return s;
|
133 |
});
|
src/lib/types.ts
CHANGED
@@ -12,8 +12,15 @@ export type Conversation = {
|
|
12 |
provider?: string;
|
13 |
};
|
14 |
|
15 |
-
export type
|
16 |
conversations: [Conversation] | [Conversation, Conversation];
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
};
|
18 |
|
19 |
interface TokenizerConfig {
|
|
|
12 |
provider?: string;
|
13 |
};
|
14 |
|
15 |
+
export type Project = {
|
16 |
conversations: [Conversation] | [Conversation, Conversation];
|
17 |
+
id: string;
|
18 |
+
name: string;
|
19 |
+
};
|
20 |
+
|
21 |
+
export type Session = {
|
22 |
+
projects: Project[];
|
23 |
+
activeProjectId: string;
|
24 |
};
|
25 |
|
26 |
interface TokenizerConfig {
|