Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
conv ui
Browse files
src/lib/components/Playground/Playground.svelte
CHANGED
|
@@ -187,37 +187,53 @@
|
|
| 187 |
</div>
|
| 188 |
<div class="relative divide-y divide-gray-200 dark:divide-gray-800">
|
| 189 |
<div
|
| 190 |
-
class="
|
| 191 |
-
|
|
|
|
|
|
|
|
|
|
| 192 |
>
|
| 193 |
-
{#
|
| 194 |
-
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
<button
|
| 199 |
-
class="flex px-6 py-6 hover:bg-gray-50 dark:hover:bg-gray-800/50"
|
| 200 |
-
on:click={addMessage}
|
| 201 |
>
|
| 202 |
-
|
| 203 |
-
<
|
| 204 |
-
|
| 205 |
-
|
| 206 |
-
|
| 207 |
-
|
| 208 |
-
|
| 209 |
-
|
| 210 |
-
|
| 211 |
-
|
| 212 |
-
|
| 213 |
-
>Add message
|
| 214 |
-
</div>
|
| 215 |
-
</button>
|
| 216 |
-
{:else}
|
| 217 |
-
<PlaygroundCode {...currentConversation} {...currentConversation.config} />
|
| 218 |
-
{/if}
|
| 219 |
-
</div>
|
| 220 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 221 |
<div
|
| 222 |
class="inset-x-0 bottom-0 flex h-20 items-center gap-2 overflow-hidden whitespace-nowrap px-5 md:absolute"
|
| 223 |
>
|
|
|
|
| 187 |
</div>
|
| 188 |
<div class="relative divide-y divide-gray-200 dark:divide-gray-800">
|
| 189 |
<div
|
| 190 |
+
class="flex h-[calc(100dvh-5rem)] divide-x divide-gray-200 {conversations.length === 2
|
| 191 |
+
? '*:w-1/2'
|
| 192 |
+
: conversations.length == 3
|
| 193 |
+
? '*:w-1/3'
|
| 194 |
+
: ''} dark:divide-gray-800"
|
| 195 |
>
|
| 196 |
+
{#each conversations as conversation}
|
| 197 |
+
<div
|
| 198 |
+
class="@container flex max-h-[calc(100dvh-5rem)] flex-col divide-y divide-gray-200 overflow-y-auto overflow-x-hidden dark:divide-gray-800"
|
| 199 |
+
bind:this={messageContainer}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 200 |
>
|
| 201 |
+
{#if conversations.length > 1}
|
| 202 |
+
<div
|
| 203 |
+
class="flex h-10 items-center bg-gradient-to-r from-gray-50 px-6 text-gray-500 dark:from-gray-400/20"
|
| 204 |
+
>
|
| 205 |
+
{conversation.model}
|
| 206 |
+
</div>
|
| 207 |
+
{/if}
|
| 208 |
+
{#if !viewCode}
|
| 209 |
+
{#each messages as message, i}
|
| 210 |
+
<PlaygroundMessage {message} on:delete={() => deleteMessage(i)} />
|
| 211 |
+
{/each}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 212 |
|
| 213 |
+
<button
|
| 214 |
+
class="flex px-6 py-6 hover:bg-gray-50 dark:hover:bg-gray-800/50"
|
| 215 |
+
on:click={addMessage}
|
| 216 |
+
>
|
| 217 |
+
<div class="flex items-center gap-2 !p-0 text-sm font-semibold">
|
| 218 |
+
<svg
|
| 219 |
+
xmlns="http://www.w3.org/2000/svg"
|
| 220 |
+
width="1em"
|
| 221 |
+
height="1em"
|
| 222 |
+
viewBox="0 0 32 32"
|
| 223 |
+
class="text-lg"
|
| 224 |
+
><path
|
| 225 |
+
fill="currentColor"
|
| 226 |
+
d="M16 2A14.172 14.172 0 0 0 2 16a14.172 14.172 0 0 0 14 14a14.172 14.172 0 0 0 14-14A14.172 14.172 0 0 0 16 2Zm8 15h-7v7h-2v-7H8v-2h7V8h2v7h7Z"
|
| 227 |
+
/><path fill="none" d="M24 17h-7v7h-2v-7H8v-2h7V8h2v7h7v2z" /></svg
|
| 228 |
+
>Add message
|
| 229 |
+
</div>
|
| 230 |
+
</button>
|
| 231 |
+
{:else}
|
| 232 |
+
<PlaygroundCode {...currentConversation} {...currentConversation.config} />
|
| 233 |
+
{/if}
|
| 234 |
+
</div>
|
| 235 |
+
{/each}
|
| 236 |
+
</div>
|
| 237 |
<div
|
| 238 |
class="inset-x-0 bottom-0 flex h-20 items-center gap-2 overflow-hidden whitespace-nowrap px-5 md:absolute"
|
| 239 |
>
|
src/lib/components/Playground/playgroundUtils.ts
CHANGED
|
@@ -1,64 +1,60 @@
|
|
| 1 |
import { HfInference } from '@huggingface/inference';
|
| 2 |
-
import type { ChatCompletionStreamOutput, ChatCompletionOutput } from '@huggingface/inference';
|
| 3 |
|
| 4 |
export interface Message {
|
| 5 |
-
|
| 6 |
-
|
| 7 |
}
|
| 8 |
|
| 9 |
export function createHfInference(token: string): HfInference {
|
| 10 |
-
|
| 11 |
}
|
| 12 |
|
| 13 |
export function prepareRequestMessages(systemMessage: Message, messages: Message[]): Message[] {
|
| 14 |
-
|
| 15 |
-
...(systemMessage.content.length ? [systemMessage] : []),
|
| 16 |
-
...messages
|
| 17 |
-
];
|
| 18 |
}
|
| 19 |
|
| 20 |
export async function handleStreamingResponse(
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
): Promise<void> {
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
}
|
| 43 |
|
| 44 |
export async function handleNonStreamingResponse(
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
): Promise<Message> {
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
}
|
|
|
|
| 1 |
import { HfInference } from '@huggingface/inference';
|
|
|
|
| 2 |
|
| 3 |
export interface Message {
|
| 4 |
+
role: string;
|
| 5 |
+
content: string;
|
| 6 |
}
|
| 7 |
|
| 8 |
export function createHfInference(token: string): HfInference {
|
| 9 |
+
return new HfInference(token);
|
| 10 |
}
|
| 11 |
|
| 12 |
export function prepareRequestMessages(systemMessage: Message, messages: Message[]): Message[] {
|
| 13 |
+
return [...(systemMessage.content.length ? [systemMessage] : []), ...messages];
|
|
|
|
|
|
|
|
|
|
| 14 |
}
|
| 15 |
|
| 16 |
export async function handleStreamingResponse(
|
| 17 |
+
hf: HfInference,
|
| 18 |
+
model: string,
|
| 19 |
+
messages: Message[],
|
| 20 |
+
temperature: number,
|
| 21 |
+
maxTokens: number,
|
| 22 |
+
jsonMode: boolean,
|
| 23 |
+
onChunk: (content: string) => void
|
| 24 |
): Promise<void> {
|
| 25 |
+
let out = '';
|
| 26 |
+
for await (const chunk of hf.chatCompletionStream({
|
| 27 |
+
model: model,
|
| 28 |
+
messages: messages,
|
| 29 |
+
temperature: temperature,
|
| 30 |
+
max_tokens: maxTokens,
|
| 31 |
+
json_mode: jsonMode
|
| 32 |
+
})) {
|
| 33 |
+
if (chunk.choices && chunk.choices.length > 0 && chunk.choices[0]?.delta?.content) {
|
| 34 |
+
out += chunk.choices[0].delta.content;
|
| 35 |
+
onChunk(out);
|
| 36 |
+
}
|
| 37 |
+
}
|
| 38 |
}
|
| 39 |
|
| 40 |
export async function handleNonStreamingResponse(
|
| 41 |
+
hf: HfInference,
|
| 42 |
+
model: string,
|
| 43 |
+
messages: Message[],
|
| 44 |
+
temperature: number,
|
| 45 |
+
maxTokens: number,
|
| 46 |
+
jsonMode: boolean
|
| 47 |
): Promise<Message> {
|
| 48 |
+
const response = await hf.chatCompletion({
|
| 49 |
+
model: model,
|
| 50 |
+
messages: messages,
|
| 51 |
+
temperature: temperature,
|
| 52 |
+
max_tokens: maxTokens,
|
| 53 |
+
json_mode: jsonMode
|
| 54 |
+
});
|
| 55 |
|
| 56 |
+
if (response.choices && response.choices.length > 0) {
|
| 57 |
+
return response.choices[0].message;
|
| 58 |
+
}
|
| 59 |
+
throw new Error('No response from the model');
|
| 60 |
}
|