Report transformers.js env.
Browse files- package.json +1 -1
- src/app/boot-app.js +3 -2
- src/app/worker-connection.js +6 -6
- src/worker/boot-worker.js +1 -1
- src/worker/model-cache.js +5 -1
package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
{
|
| 2 |
"name": "localm",
|
| 3 |
-
"version": "1.1.
|
| 4 |
"description": "",
|
| 5 |
"main": "chat-full.js",
|
| 6 |
"scripts": {
|
|
|
|
| 1 |
{
|
| 2 |
"name": "localm",
|
| 3 |
+
"version": "1.1.7",
|
| 4 |
"description": "",
|
| 5 |
"main": "chat-full.js",
|
| 6 |
"scripts": {
|
src/app/boot-app.js
CHANGED
|
@@ -19,8 +19,9 @@ export var worker;
|
|
| 19 |
export async function bootApp() {
|
| 20 |
const { chatLog, chatInput } = initHTML();
|
| 21 |
worker = workerConnection();
|
| 22 |
-
worker.loaded.then(async () => {
|
| 23 |
-
outputMessage(
|
|
|
|
| 24 |
const models = await worker.listModels();
|
| 25 |
outputMessage('Available models: ' + models.join(', '));
|
| 26 |
});
|
|
|
|
| 19 |
export async function bootApp() {
|
| 20 |
const { chatLog, chatInput } = initHTML();
|
| 21 |
worker = workerConnection();
|
| 22 |
+
worker.loaded.then(async ({ env }) => {
|
| 23 |
+
outputMessage(
|
| 24 |
+
'transformers.js\n\n```JSON\n' + JSON.stringify(env, null, 2) + '\n```');
|
| 25 |
const models = await worker.listModels();
|
| 26 |
outputMessage('Available models: ' + models.join(', '));
|
| 27 |
});
|
src/app/worker-connection.js
CHANGED
|
@@ -6,7 +6,7 @@ export function workerConnection() {
|
|
| 6 |
const workerLoaded = loadWorker();
|
| 7 |
|
| 8 |
const connection = {
|
| 9 |
-
loaded: workerLoaded.then(
|
| 10 |
listModels,
|
| 11 |
loadModel,
|
| 12 |
runPrompt
|
|
@@ -33,7 +33,7 @@ export function workerConnection() {
|
|
| 33 |
const msg = ev.data || {};
|
| 34 |
if (msg && msg.type === 'ready') {
|
| 35 |
ready = true;
|
| 36 |
-
resolve({ worker, pending, send });
|
| 37 |
return;
|
| 38 |
}
|
| 39 |
|
|
@@ -71,15 +71,15 @@ export function workerConnection() {
|
|
| 71 |
|
| 72 |
async function listModels() {
|
| 73 |
await workerLoaded;
|
| 74 |
-
|
| 75 |
-
|
| 76 |
}
|
| 77 |
|
| 78 |
/** @param {string} modelName */
|
| 79 |
async function loadModel(modelName) {
|
| 80 |
await workerLoaded;
|
| 81 |
-
|
| 82 |
-
|
| 83 |
}
|
| 84 |
|
| 85 |
/**
|
|
|
|
| 6 |
const workerLoaded = loadWorker();
|
| 7 |
|
| 8 |
const connection = {
|
| 9 |
+
loaded: workerLoaded.then(worker => ({ env: worker.env })),
|
| 10 |
listModels,
|
| 11 |
loadModel,
|
| 12 |
runPrompt
|
|
|
|
| 33 |
const msg = ev.data || {};
|
| 34 |
if (msg && msg.type === 'ready') {
|
| 35 |
ready = true;
|
| 36 |
+
resolve({ worker, pending, send, env: msg.env });
|
| 37 |
return;
|
| 38 |
}
|
| 39 |
|
|
|
|
| 71 |
|
| 72 |
async function listModels() {
|
| 73 |
await workerLoaded;
|
| 74 |
+
const { send } = await workerLoaded;
|
| 75 |
+
return send({ type: 'listModels' });
|
| 76 |
}
|
| 77 |
|
| 78 |
/** @param {string} modelName */
|
| 79 |
async function loadModel(modelName) {
|
| 80 |
await workerLoaded;
|
| 81 |
+
const { send } = await workerLoaded;
|
| 82 |
+
return send({ type: 'loadModel', modelName });
|
| 83 |
}
|
| 84 |
|
| 85 |
/**
|
src/worker/boot-worker.js
CHANGED
|
@@ -15,7 +15,7 @@ export function bootWorker() {
|
|
| 15 |
|
| 16 |
|
| 17 |
// signal ready to main thread (worker script loaded; model runtime may still be pending)
|
| 18 |
-
self.postMessage({ type: 'ready' });
|
| 19 |
|
| 20 |
// handle incoming requests from the UI thread
|
| 21 |
self.addEventListener('message', handleMessage);
|
|
|
|
| 15 |
|
| 16 |
|
| 17 |
// signal ready to main thread (worker script loaded; model runtime may still be pending)
|
| 18 |
+
self.postMessage({ type: 'ready', env: modelCache.env, backend: modelCache.backend });
|
| 19 |
|
| 20 |
// handle incoming requests from the UI thread
|
| 21 |
self.addEventListener('message', handleMessage);
|
src/worker/model-cache.js
CHANGED
|
@@ -1,5 +1,7 @@
|
|
| 1 |
// @ts-check
|
| 2 |
|
|
|
|
|
|
|
| 3 |
import { loadModelCore } from './load-model-core';
|
| 4 |
|
| 5 |
export class ModelCache {
|
|
@@ -7,9 +9,11 @@ export class ModelCache {
|
|
| 7 |
/** @type {import('@huggingface/transformers').DeviceType | undefined} */
|
| 8 |
backend = undefined;
|
| 9 |
|
|
|
|
|
|
|
| 10 |
knownModels = [
|
| 11 |
-
'Xenova/phi-3-mini-4k-instruct',
|
| 12 |
'Xenova/llama2.c-stories15M', // nonsense
|
|
|
|
| 13 |
'Xenova/all-MiniLM-L6-v2', // unsupported model type: bert
|
| 14 |
'Xenova/phi-1.5', // gated
|
| 15 |
'Qwen/Qwen2.5-3B', // cannot be loaded
|
|
|
|
| 1 |
// @ts-check
|
| 2 |
|
| 3 |
+
import { pipeline, env } from '@huggingface/transformers';
|
| 4 |
+
|
| 5 |
import { loadModelCore } from './load-model-core';
|
| 6 |
|
| 7 |
export class ModelCache {
|
|
|
|
| 9 |
/** @type {import('@huggingface/transformers').DeviceType | undefined} */
|
| 10 |
backend = undefined;
|
| 11 |
|
| 12 |
+
env = env;
|
| 13 |
+
|
| 14 |
knownModels = [
|
|
|
|
| 15 |
'Xenova/llama2.c-stories15M', // nonsense
|
| 16 |
+
'Xenova/phi-3-mini-4k-instruct', // huge
|
| 17 |
'Xenova/all-MiniLM-L6-v2', // unsupported model type: bert
|
| 18 |
'Xenova/phi-1.5', // gated
|
| 19 |
'Qwen/Qwen2.5-3B', // cannot be loaded
|