Spaces:
Runtime error
Runtime error
update
Browse files
src/lib/components/Playground/Playground.svelte
CHANGED
|
@@ -18,7 +18,7 @@
|
|
| 18 |
|
| 19 |
let hfToken: string | null = '';
|
| 20 |
let currentModel = compatibleModels[0];
|
| 21 |
-
let systemMessage: Message = { role: 'system', content: '' };
|
| 22 |
let messages: Message[] = startMessages;
|
| 23 |
let temperature = 0.5;
|
| 24 |
let maxTokens = 2048;
|
|
@@ -38,7 +38,6 @@
|
|
| 38 |
|
| 39 |
function deleteMessage(i: number) {
|
| 40 |
messages = messages.filter((_, j) => j !== i);
|
| 41 |
-
// Don't scroll after deleting a message
|
| 42 |
}
|
| 43 |
|
| 44 |
function reset() {
|
|
@@ -46,7 +45,6 @@
|
|
| 46 |
}
|
| 47 |
|
| 48 |
function onKeydown(event: KeyboardEvent) {
|
| 49 |
-
// check if the user is pressing the enter key + ctrl key or command key
|
| 50 |
if ((event.ctrlKey || event.metaKey) && event.key === 'Enter') {
|
| 51 |
submit();
|
| 52 |
}
|
|
@@ -67,7 +65,7 @@
|
|
| 67 |
try {
|
| 68 |
const hf = new HfInference(hfToken);
|
| 69 |
const requestMessages = [
|
| 70 |
-
|
| 71 |
...messages.map(({ role, content }) => ({ role, content }))
|
| 72 |
];
|
| 73 |
|
|
@@ -80,8 +78,7 @@
|
|
| 80 |
model: currentModel,
|
| 81 |
messages: requestMessages,
|
| 82 |
temperature,
|
| 83 |
-
max_tokens: maxTokens
|
| 84 |
-
seed: 0
|
| 85 |
})) {
|
| 86 |
if (chunk.choices && chunk.choices.length > 0) {
|
| 87 |
if (streamingMessage && chunk.choices[0]?.delta?.content) {
|
|
@@ -97,8 +94,7 @@
|
|
| 97 |
model: currentModel,
|
| 98 |
messages: requestMessages,
|
| 99 |
temperature,
|
| 100 |
-
max_tokens: maxTokens
|
| 101 |
-
seed: 0
|
| 102 |
});
|
| 103 |
|
| 104 |
if (response.choices && response.choices.length > 0) {
|
|
@@ -118,8 +114,6 @@
|
|
| 118 |
}
|
| 119 |
}
|
| 120 |
|
| 121 |
-
$: console.log(messages);
|
| 122 |
-
|
| 123 |
function scrollToBottom() {
|
| 124 |
if (messageContainer) {
|
| 125 |
messageContainer.scrollTop = messageContainer.scrollHeight;
|
|
|
|
| 18 |
|
| 19 |
let hfToken: string | null = '';
|
| 20 |
let currentModel = compatibleModels[0];
|
| 21 |
+
let systemMessage: Message = { role: 'system', content: 'only answer in uppercase letters' };
|
| 22 |
let messages: Message[] = startMessages;
|
| 23 |
let temperature = 0.5;
|
| 24 |
let maxTokens = 2048;
|
|
|
|
| 38 |
|
| 39 |
function deleteMessage(i: number) {
|
| 40 |
messages = messages.filter((_, j) => j !== i);
|
|
|
|
| 41 |
}
|
| 42 |
|
| 43 |
function reset() {
|
|
|
|
| 45 |
}
|
| 46 |
|
| 47 |
function onKeydown(event: KeyboardEvent) {
|
|
|
|
| 48 |
if ((event.ctrlKey || event.metaKey) && event.key === 'Enter') {
|
| 49 |
submit();
|
| 50 |
}
|
|
|
|
| 65 |
try {
|
| 66 |
const hf = new HfInference(hfToken);
|
| 67 |
const requestMessages = [
|
| 68 |
+
systemMessage,
|
| 69 |
...messages.map(({ role, content }) => ({ role, content }))
|
| 70 |
];
|
| 71 |
|
|
|
|
| 78 |
model: currentModel,
|
| 79 |
messages: requestMessages,
|
| 80 |
temperature,
|
| 81 |
+
max_tokens: maxTokens
|
|
|
|
| 82 |
})) {
|
| 83 |
if (chunk.choices && chunk.choices.length > 0) {
|
| 84 |
if (streamingMessage && chunk.choices[0]?.delta?.content) {
|
|
|
|
| 94 |
model: currentModel,
|
| 95 |
messages: requestMessages,
|
| 96 |
temperature,
|
| 97 |
+
max_tokens: maxTokens
|
|
|
|
| 98 |
});
|
| 99 |
|
| 100 |
if (response.choices && response.choices.length > 0) {
|
|
|
|
| 114 |
}
|
| 115 |
}
|
| 116 |
|
|
|
|
|
|
|
| 117 |
function scrollToBottom() {
|
| 118 |
if (messageContainer) {
|
| 119 |
messageContainer.scrollTop = messageContainer.scrollHeight;
|