Thomas G. Lopes commited on
Commit
39318e7
·
1 Parent(s): b924465

improve search

Browse files
src/lib/components/InferencePlayground/InferencePlaygroundModelSelectorModal.svelte CHANGED
@@ -1,12 +1,12 @@
1
  <script lang="ts">
2
  import type { Conversation } from "$lib/types";
3
 
4
- import { createEventDispatcher, tick } from "svelte";
5
 
6
  import { models } from "$lib/stores/models";
7
  import IconSearch from "../Icons/IconSearch.svelte";
8
  import IconStar from "../Icons/IconStar.svelte";
9
- import { FEATURED_MODELS_IDS } from "./inferencePlaygroundUtils";
10
 
11
  export let conversation: Conversation;
12
 
@@ -14,17 +14,26 @@
14
  let highlightIdx = 0;
15
  let ignoreCursorHighlight = false;
16
  let containerEl: HTMLDivElement;
 
17
 
18
  const dispatch = createEventDispatcher<{ modelSelected: string; close: void }>();
19
 
20
- let featuredModels = $models.filter(m => FEATURED_MODELS_IDS.includes(m.id));
21
- let otherModels = $models.filter(m => !FEATURED_MODELS_IDS.includes(m.id));
22
 
23
- if (featuredModels.findIndex(model => model.id === conversation.model.id) !== -1) {
24
- highlightIdx = featuredModels.findIndex(model => model.id === conversation.model.id);
25
- } else {
26
- highlightIdx = featuredModels.length + otherModels.findIndex(model => model.id === conversation.model.id);
27
- }
 
 
 
 
 
 
 
 
 
28
 
29
  function handleKeydown(event: KeyboardEvent) {
30
  const { key } = event;
@@ -79,20 +88,6 @@
79
  dispatch("close");
80
  }
81
  }
82
-
83
- function filterModels(query: string) {
84
- featuredModels = $models.filter(m =>
85
- query
86
- ? FEATURED_MODELS_IDS.includes(m.id) && m.id.toLocaleLowerCase().includes(query.toLocaleLowerCase().trim())
87
- : FEATURED_MODELS_IDS.includes(m.id)
88
- );
89
-
90
- otherModels = $models.filter(m =>
91
- query
92
- ? !FEATURED_MODELS_IDS.includes(m.id) && m.id.toLocaleLowerCase().includes(query.toLocaleLowerCase().trim())
93
- : !FEATURED_MODELS_IDS.includes(m.id)
94
- );
95
- }
96
  </script>
97
 
98
  <svelte:window on:keydown={handleKeydown} on:mousemove={() => (ignoreCursorHighlight = false)} />
@@ -115,7 +110,7 @@
115
  autofocus
116
  class="flex h-10 w-full rounded-md bg-transparent py-3 text-sm placeholder-gray-400 outline-hidden"
117
  placeholder="Search models ..."
118
- on:input={e => filterModels(e.currentTarget.value)}
119
  />
120
  </div>
121
  <div class="max-h-[300px] overflow-x-hidden overflow-y-auto">
 
1
  <script lang="ts">
2
  import type { Conversation } from "$lib/types";
3
 
4
+ import { createEventDispatcher, onMount, tick } from "svelte";
5
 
6
  import { models } from "$lib/stores/models";
7
  import IconSearch from "../Icons/IconSearch.svelte";
8
  import IconStar from "../Icons/IconStar.svelte";
9
+ import { getTrending } from "$lib/utils/model";
10
 
11
  export let conversation: Conversation;
12
 
 
14
  let highlightIdx = 0;
15
  let ignoreCursorHighlight = false;
16
  let containerEl: HTMLDivElement;
17
+ let query = "";
18
 
19
  const dispatch = createEventDispatcher<{ modelSelected: string; close: void }>();
20
 
21
+ $: trendingModels = getTrending($models);
 
22
 
23
+ $: featuredModels = trendingModels.filter(m => {
24
+ return m.id.toLocaleLowerCase().includes(query.toLocaleLowerCase().trim());
25
+ });
26
+ $: otherModels = $models.filter(m => {
27
+ return m.id.toLocaleLowerCase().includes(query.toLocaleLowerCase().trim());
28
+ });
29
+
30
+ onMount(() => {
31
+ if (featuredModels.findIndex(model => model.id === conversation.model.id) !== -1) {
32
+ highlightIdx = featuredModels.findIndex(model => model.id === conversation.model.id);
33
+ } else {
34
+ highlightIdx = featuredModels.length + otherModels.findIndex(model => model.id === conversation.model.id);
35
+ }
36
+ });
37
 
38
  function handleKeydown(event: KeyboardEvent) {
39
  const { key } = event;
 
88
  dispatch("close");
89
  }
90
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
  </script>
92
 
93
  <svelte:window on:keydown={handleKeydown} on:mousemove={() => (ignoreCursorHighlight = false)} />
 
110
  autofocus
111
  class="flex h-10 w-full rounded-md bg-transparent py-3 text-sm placeholder-gray-400 outline-hidden"
112
  placeholder="Search models ..."
113
+ bind:value={query}
114
  />
115
  </div>
116
  <div class="max-h-[300px] overflow-x-hidden overflow-y-auto">
src/lib/components/InferencePlayground/inferencePlaygroundUtils.ts CHANGED
@@ -63,14 +63,6 @@ export function isSystemPromptSupported(model: ModelWithTokenizer) {
63
  return model?.tokenizerConfig?.chat_template?.includes("system");
64
  }
65
 
66
- export const FEATURED_MODELS_IDS = [
67
- "meta-llama/Llama-3.3-70B-Instruct",
68
- "meta-llama/Llama-3.1-8B-Instruct",
69
- "meta-llama/Llama-3.2-3B-Instruct",
70
- "Qwen/Qwen2.5-72B-Instruct",
71
- "Qwen/QwQ-32B-Preview",
72
- ];
73
-
74
  export const defaultSystemMessage: { [key: string]: string } = {
75
  "Qwen/QwQ-32B-Preview":
76
  "You are a helpful and harmless assistant. You are Qwen developed by Alibaba. You should think step-by-step.",
 
63
  return model?.tokenizerConfig?.chat_template?.includes("system");
64
  }
65
 
 
 
 
 
 
 
 
 
66
  export const defaultSystemMessage: { [key: string]: string } = {
67
  "Qwen/QwQ-32B-Preview":
68
  "You are a helpful and harmless assistant. You are Qwen developed by Alibaba. You should think step-by-step.",
src/lib/stores/session.ts CHANGED
@@ -1,14 +1,12 @@
1
  import { browser } from "$app/environment";
2
  import { goto } from "$app/navigation";
3
  import { defaultGenerationConfig } from "$lib/components/InferencePlayground/generationConfigSettings";
4
- import {
5
- defaultSystemMessage,
6
- FEATURED_MODELS_IDS,
7
- } from "$lib/components/InferencePlayground/inferencePlaygroundUtils";
8
  import { PipelineTag, type Conversation, type ConversationMessage, type Session } from "$lib/types";
9
 
10
  import { models } from "$lib/stores/models";
11
  import { get, writable } from "svelte/store";
 
12
 
13
  function createSessionStore() {
14
  const store = writable<Session>(undefined, (set, update) => {
@@ -25,11 +23,14 @@ function createSessionStore() {
25
  content: modelIdsFromSearchParam?.[0] ? (defaultSystemMessage?.[modelIdsFromSearchParam[0]] ?? "") : "",
26
  };
27
 
 
 
 
28
  set({
29
  conversations: [
30
  {
31
- model: get(models).find(m => FEATURED_MODELS_IDS.includes(m.id)) ??
32
- get(models)[0] ?? {
33
  _id: "",
34
  inferenceProviderMapping: [],
35
  pipeline_tag: PipelineTag.TextGeneration,
 
1
  import { browser } from "$app/environment";
2
  import { goto } from "$app/navigation";
3
  import { defaultGenerationConfig } from "$lib/components/InferencePlayground/generationConfigSettings";
4
+ import { defaultSystemMessage } from "$lib/components/InferencePlayground/inferencePlaygroundUtils";
 
 
 
5
  import { PipelineTag, type Conversation, type ConversationMessage, type Session } from "$lib/types";
6
 
7
  import { models } from "$lib/stores/models";
8
  import { get, writable } from "svelte/store";
9
+ import { getTrending } from "$lib/utils/model";
10
 
11
  function createSessionStore() {
12
  const store = writable<Session>(undefined, (set, update) => {
 
23
  content: modelIdsFromSearchParam?.[0] ? (defaultSystemMessage?.[modelIdsFromSearchParam[0]] ?? "") : "",
24
  };
25
 
26
+ const $models = get(models);
27
+ const featured = getTrending($models);
28
+
29
  set({
30
  conversations: [
31
  {
32
+ model: featured[0] ??
33
+ $models[0] ?? {
34
  _id: "",
35
  inferenceProviderMapping: [],
36
  pipeline_tag: PipelineTag.TextGeneration,
src/lib/utils/model.ts ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ import type { Model, ModelWithTokenizer } from "$lib/types";
2
+
3
+ export function getTrending(models: ModelWithTokenizer[], limit = 5) {
4
+ return models.toSorted((a, b) => b.trendingScore - a.trendingScore).slice(0, limit);
5
+ }