Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
fix reactivity
Browse files
src/lib/components/InferencePlayground/InferencePlayground.svelte
CHANGED
|
@@ -29,10 +29,12 @@
|
|
| 29 |
export let models: ModelEntryWithTokenizer[];
|
| 30 |
|
| 31 |
const startMessageUser: ChatCompletionInputMessage = { role: "user", content: "" };
|
| 32 |
-
let systemMessage: ChatCompletionInputMessage = { role: "system", content: "" };
|
| 33 |
-
|
| 34 |
const modelIdsFromQueryParam = $page.url.searchParams.get("modelId")?.split(",");
|
| 35 |
const modelsFromQueryParam = modelIdsFromQueryParam?.map(id => models.find(model => model.id === id));
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
|
| 37 |
let session: Session = {
|
| 38 |
conversations: [
|
|
@@ -81,9 +83,6 @@
|
|
| 81 |
const hfTokenLocalStorageKey = "hf_token";
|
| 82 |
|
| 83 |
$: systemPromptSupported = session.conversations.some(conversation => isSystemPromptSupported(conversation.model));
|
| 84 |
-
$: if (session.conversations[0].model.id) {
|
| 85 |
-
session.conversations[0].systemMessage.content = defaultSystemMessage?.[session.conversations[0].model.id] ?? "";
|
| 86 |
-
}
|
| 87 |
$: compareActive = session.conversations.length === 2;
|
| 88 |
|
| 89 |
function addMessage(conversationIdx: number) {
|
|
|
|
| 29 |
export let models: ModelEntryWithTokenizer[];
|
| 30 |
|
| 31 |
const startMessageUser: ChatCompletionInputMessage = { role: "user", content: "" };
|
|
|
|
|
|
|
| 32 |
const modelIdsFromQueryParam = $page.url.searchParams.get("modelId")?.split(",");
|
| 33 |
const modelsFromQueryParam = modelIdsFromQueryParam?.map(id => models.find(model => model.id === id));
|
| 34 |
+
const systemMessage: ChatCompletionInputMessage = {
|
| 35 |
+
role: "system",
|
| 36 |
+
content: modelIdsFromQueryParam ? (defaultSystemMessage?.[modelIdsFromQueryParam[0]] ?? "") : "",
|
| 37 |
+
};
|
| 38 |
|
| 39 |
let session: Session = {
|
| 40 |
conversations: [
|
|
|
|
| 83 |
const hfTokenLocalStorageKey = "hf_token";
|
| 84 |
|
| 85 |
$: systemPromptSupported = session.conversations.some(conversation => isSystemPromptSupported(conversation.model));
|
|
|
|
|
|
|
|
|
|
| 86 |
$: compareActive = session.conversations.length === 2;
|
| 87 |
|
| 88 |
function addMessage(conversationIdx: number) {
|
src/lib/components/InferencePlayground/InferencePlaygroundModelSelector.svelte
CHANGED
|
@@ -7,6 +7,7 @@
|
|
| 7 |
import IconCaret from "../Icons/IconCaret.svelte";
|
| 8 |
import ModelSelectorModal from "./InferencePlaygroundModelSelectorModal.svelte";
|
| 9 |
import Avatar from "../Avatar.svelte";
|
|
|
|
| 10 |
|
| 11 |
export let models: ModelEntryWithTokenizer[] = [];
|
| 12 |
export let conversation: Conversation;
|
|
@@ -19,6 +20,7 @@
|
|
| 19 |
return;
|
| 20 |
}
|
| 21 |
conversation.model = model;
|
|
|
|
| 22 |
|
| 23 |
const url = new URL($page.url);
|
| 24 |
url.searchParams.set("modelId", model.id);
|
|
|
|
| 7 |
import IconCaret from "../Icons/IconCaret.svelte";
|
| 8 |
import ModelSelectorModal from "./InferencePlaygroundModelSelectorModal.svelte";
|
| 9 |
import Avatar from "../Avatar.svelte";
|
| 10 |
+
import { defaultSystemMessage } from "./InferencePlaygroundGenerationConfig.svelte";
|
| 11 |
|
| 12 |
export let models: ModelEntryWithTokenizer[] = [];
|
| 13 |
export let conversation: Conversation;
|
|
|
|
| 20 |
return;
|
| 21 |
}
|
| 22 |
conversation.model = model;
|
| 23 |
+
conversation.systemMessage = { role: "system", content: defaultSystemMessage?.[modelId] ?? "" };
|
| 24 |
|
| 25 |
const url = new URL($page.url);
|
| 26 |
url.searchParams.set("modelId", model.id);
|