Spaces:
Sleeping
Sleeping
Commit
·
572e0e2
1
Parent(s):
552cdd6
update the home page
Browse files- index.html +97 -0
- index.ts +64 -22
index.html
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="en">
|
| 3 |
+
<head>
|
| 4 |
+
<meta charset="UTF-8">
|
| 5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
| 6 |
+
<title>Inference Proxy</title>
|
| 7 |
+
<!-- Include Tailwind CSS -->
|
| 8 |
+
<script src="https://cdn.tailwindcss.com"></script>
|
| 9 |
+
<!-- Include Prism.js for syntax highlighting -->
|
| 10 |
+
<link href="https://cdnjs.cloudflare.com/ajax/libs/prism/1.25.0/themes/prism-tomorrow.min.css" rel="stylesheet" />
|
| 11 |
+
<style>
|
| 12 |
+
code {
|
| 13 |
+
font-size: 0.75rem !important;
|
| 14 |
+
}
|
| 15 |
+
</style>
|
| 16 |
+
</head>
|
| 17 |
+
<body class="bg-gray-50 text-gray-800 font-sans antialiased">
|
| 18 |
+
<div class="max-w-4xl mx-auto px-4 py-8 md:py-12">
|
| 19 |
+
<header class="mb-10">
|
| 20 |
+
<h1 class="text-3xl md:text-4xl font-bold text-gray-800 mb-2">Inference Proxy</h1>
|
| 21 |
+
<div class="h-1 w-20 bg-gray-300 rounded"></div>
|
| 22 |
+
</header>
|
| 23 |
+
|
| 24 |
+
<main>
|
| 25 |
+
<section class="mb-8">
|
| 26 |
+
<h2 class="text-xl md:text-2xl font-semibold text-gray-800 mb-4">Setup</h2>
|
| 27 |
+
|
| 28 |
+
<p class="mb-6 text-gray-700">This proxy captures and stores traces from LLM API requests to your personal Hugging Face dataset.</p>
|
| 29 |
+
|
| 30 |
+
<div class="space-y-6">
|
| 31 |
+
<div class="bg-white rounded-lg shadow-md p-6">
|
| 32 |
+
<h3 class="text-lg font-medium text-gray-800 mb-3">1. Duplicate Space</h3>
|
| 33 |
+
<p class="text-gray-600 mb-4">First, duplicate this space to your account to set up your own instance.</p>
|
| 34 |
+
</div>
|
| 35 |
+
|
| 36 |
+
<div class="bg-white rounded-lg shadow-md p-6">
|
| 37 |
+
<h3 class="text-lg font-medium text-gray-800 mb-3">2. Set Environment Variables</h3>
|
| 38 |
+
<p class="text-gray-600 mb-4">Configure these required environment variables in your space settings:</p>
|
| 39 |
+
<div class="bg-gray-100 p-3 rounded-md mb-4">
|
| 40 |
+
<code class="text-sm text-gray-700">HF_ACCESS_TOKEN=your_huggingface_token</code>
|
| 41 |
+
</div>
|
| 42 |
+
<div class="bg-gray-100 p-3 rounded-md">
|
| 43 |
+
<code class="text-sm text-gray-700">USER_NAME=your_huggingface_username</code>
|
| 44 |
+
</div>
|
| 45 |
+
</div>
|
| 46 |
+
</div>
|
| 47 |
+
</section>
|
| 48 |
+
|
| 49 |
+
<section class="mb-8">
|
| 50 |
+
<h2 class="text-xl md:text-2xl font-semibold text-gray-800 mb-4">Example Usage</h2>
|
| 51 |
+
<div class="bg-gray-800 rounded-lg shadow-lg overflow-hidden">
|
| 52 |
+
<div class="flex items-center px-4 py-2 bg-gray-900">
|
| 53 |
+
<div class="flex space-x-2 mr-2">
|
| 54 |
+
<div class="w-3 h-3 rounded-full bg-red-500"></div>
|
| 55 |
+
<div class="w-3 h-3 rounded-full bg-yellow-500"></div>
|
| 56 |
+
<div class="w-3 h-3 rounded-full bg-green-500"></div>
|
| 57 |
+
</div>
|
| 58 |
+
<p class="text-xs text-gray-400">JavaScript</p>
|
| 59 |
+
</div>
|
| 60 |
+
<pre class="p-4 overflow-x-auto text-xs font-mono"><code class="language-javascript">import { OpenAI } from "openai";
|
| 61 |
+
|
| 62 |
+
const client = new OpenAI({
|
| 63 |
+
<span class="bg-yellow-700 px-1 rounded">baseURL: "{{HOST_URL}}/fireworks-ai/inference/v1",</span>
|
| 64 |
+
apiKey: process.env.HF_API_KEY,
|
| 65 |
+
});
|
| 66 |
+
|
| 67 |
+
let out = "";
|
| 68 |
+
|
| 69 |
+
const stream = await client.chat.completions.create({
|
| 70 |
+
model: "accounts/fireworks/models/deepseek-v3",
|
| 71 |
+
messages: [
|
| 72 |
+
{
|
| 73 |
+
role: "user",
|
| 74 |
+
content: "What is the capital of France?",
|
| 75 |
+
},
|
| 76 |
+
],
|
| 77 |
+
stream: true,
|
| 78 |
+
max_tokens: 500,
|
| 79 |
+
});
|
| 80 |
+
|
| 81 |
+
for await (const chunk of stream) {
|
| 82 |
+
if (chunk.choices && chunk.choices.length > 0) {
|
| 83 |
+
const newContent = chunk.choices[0].delta.content;
|
| 84 |
+
out += newContent;
|
| 85 |
+
console.log(newContent);
|
| 86 |
+
}
|
| 87 |
+
}</code></pre>
|
| 88 |
+
</div>
|
| 89 |
+
</section>
|
| 90 |
+
</main>
|
| 91 |
+
</div>
|
| 92 |
+
|
| 93 |
+
<!-- Include Prism.js JavaScript for syntax highlighting -->
|
| 94 |
+
<script src="https://cdnjs.cloudflare.com/ajax/libs/prism/1.25.0/components/prism-core.min.js"></script>
|
| 95 |
+
<script src="https://cdnjs.cloudflare.com/ajax/libs/prism/1.25.0/plugins/autoloader/prism-autoloader.min.js"></script>
|
| 96 |
+
</body>
|
| 97 |
+
</html>
|
index.ts
CHANGED
|
@@ -230,7 +230,20 @@ function checkAndFlushTraces() {
|
|
| 230 |
}
|
| 231 |
|
| 232 |
app.get('/', (c) => {
|
| 233 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 234 |
});
|
| 235 |
|
| 236 |
async function storeStreamedResponse(streamToLog: ReadableStream<Uint8Array>, contentType: string | null, targetUrl: string, traceIndex: number) {
|
|
@@ -342,11 +355,18 @@ app.all('*', async (c) => {
|
|
| 342 |
const targetPath = url.pathname;
|
| 343 |
const targetUrl = `${TARGET_BASE_URL}${targetPath}${url.search}`;
|
| 344 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 345 |
// Extract provider from the URL path
|
| 346 |
const pathParts = targetPath.split('/');
|
| 347 |
const provider = pathParts.length > 1 ? pathParts[1] : 'unknown';
|
| 348 |
|
| 349 |
-
|
|
|
|
|
|
|
|
|
|
| 350 |
|
| 351 |
const headers = new Headers(c.req.header());
|
| 352 |
headers.delete('host');
|
|
@@ -379,8 +399,8 @@ app.all('*', async (c) => {
|
|
| 379 |
}
|
| 380 |
}
|
| 381 |
|
| 382 |
-
|
| 383 |
-
|
| 384 |
timestamp_start: new Date().toISOString(),
|
| 385 |
provider
|
| 386 |
};
|
|
@@ -396,6 +416,11 @@ app.all('*', async (c) => {
|
|
| 396 |
}
|
| 397 |
}
|
| 398 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 399 |
if (parsedRequestBody.messages) {
|
| 400 |
traceEntry.messages = parsedRequestBody.messages;
|
| 401 |
|
|
@@ -413,11 +438,18 @@ app.all('*', async (c) => {
|
|
| 413 |
} else if (parsedRequestBody.parameters) {
|
| 414 |
traceEntry.arguments = parsedRequestBody.parameters;
|
| 415 |
}
|
|
|
|
|
|
|
|
|
|
| 416 |
}
|
| 417 |
|
| 418 |
-
requestTraces.
|
| 419 |
-
|
| 420 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 421 |
|
| 422 |
const response = await fetch(targetUrl, {
|
| 423 |
method: c.req.method,
|
|
@@ -425,7 +457,10 @@ app.all('*', async (c) => {
|
|
| 425 |
body: requestBody,
|
| 426 |
});
|
| 427 |
|
| 428 |
-
|
|
|
|
|
|
|
|
|
|
| 429 |
|
| 430 |
c.status(response.status as StatusCode);
|
| 431 |
response.headers.forEach((value, key) => {
|
|
@@ -441,26 +476,33 @@ app.all('*', async (c) => {
|
|
| 441 |
const [streamForClient, streamForStorage] = response.body.tee();
|
| 442 |
const contentType = response.headers.get('content-type');
|
| 443 |
|
| 444 |
-
|
| 445 |
-
|
| 446 |
-
|
|
|
|
|
|
|
| 447 |
|
| 448 |
return stream(c, async (streamInstance) => {
|
| 449 |
await streamInstance.pipe(streamForClient);
|
| 450 |
});
|
| 451 |
} else {
|
| 452 |
-
|
| 453 |
-
|
| 454 |
-
|
| 455 |
-
|
| 456 |
-
if (
|
| 457 |
-
|
| 458 |
-
|
| 459 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 460 |
}
|
| 461 |
-
|
| 462 |
-
// Check if we need to flush based on batch size
|
| 463 |
-
checkAndFlushTraces();
|
| 464 |
return c.body(null);
|
| 465 |
}
|
| 466 |
} catch (error) {
|
|
|
|
| 230 |
}
|
| 231 |
|
| 232 |
app.get('/', (c) => {
|
| 233 |
+
const hostUrl = new URL(c.req.url).origin;
|
| 234 |
+
|
| 235 |
+
try {
|
| 236 |
+
let html = readFileSync('index.html', 'utf8');
|
| 237 |
+
|
| 238 |
+
// Replace template variables
|
| 239 |
+
html = html.replace(/{{TARGET_BASE_URL}}/g, TARGET_BASE_URL)
|
| 240 |
+
.replace(/{{HOST_URL}}/g, hostUrl);
|
| 241 |
+
|
| 242 |
+
return c.html(html);
|
| 243 |
+
} catch (error) {
|
| 244 |
+
console.error('Error reading index.html:', error);
|
| 245 |
+
return c.text('Hono forwarding proxy running!', 500);
|
| 246 |
+
}
|
| 247 |
});
|
| 248 |
|
| 249 |
async function storeStreamedResponse(streamToLog: ReadableStream<Uint8Array>, contentType: string | null, targetUrl: string, traceIndex: number) {
|
|
|
|
| 355 |
const targetPath = url.pathname;
|
| 356 |
const targetUrl = `${TARGET_BASE_URL}${targetPath}${url.search}`;
|
| 357 |
|
| 358 |
+
// Skip trace creation for favicon and other common browser requests
|
| 359 |
+
const skipTracePatterns = ['/favicon.ico', '/robots.txt'];
|
| 360 |
+
const shouldSkipTrace = skipTracePatterns.some(pattern => targetPath.includes(pattern));
|
| 361 |
+
|
| 362 |
// Extract provider from the URL path
|
| 363 |
const pathParts = targetPath.split('/');
|
| 364 |
const provider = pathParts.length > 1 ? pathParts[1] : 'unknown';
|
| 365 |
|
| 366 |
+
// Only log if we're not skipping the trace
|
| 367 |
+
if (!shouldSkipTrace) {
|
| 368 |
+
console.log(`Forwarding request for ${url.pathname} to ${targetUrl}`);
|
| 369 |
+
}
|
| 370 |
|
| 371 |
const headers = new Headers(c.req.header());
|
| 372 |
headers.delete('host');
|
|
|
|
| 399 |
}
|
| 400 |
}
|
| 401 |
|
| 402 |
+
let shouldCreateTrace = !shouldSkipTrace;
|
| 403 |
+
let traceEntry: typeof requestTraces[0] = {
|
| 404 |
timestamp_start: new Date().toISOString(),
|
| 405 |
provider
|
| 406 |
};
|
|
|
|
| 416 |
}
|
| 417 |
}
|
| 418 |
|
| 419 |
+
// Skip traces without a valid model
|
| 420 |
+
if (!traceEntry.model || traceEntry.model === 'unknown') {
|
| 421 |
+
shouldCreateTrace = false;
|
| 422 |
+
}
|
| 423 |
+
|
| 424 |
if (parsedRequestBody.messages) {
|
| 425 |
traceEntry.messages = parsedRequestBody.messages;
|
| 426 |
|
|
|
|
| 438 |
} else if (parsedRequestBody.parameters) {
|
| 439 |
traceEntry.arguments = parsedRequestBody.parameters;
|
| 440 |
}
|
| 441 |
+
} else {
|
| 442 |
+
// Skip traces without a request body
|
| 443 |
+
shouldCreateTrace = false;
|
| 444 |
}
|
| 445 |
|
| 446 |
+
const traceIndex = shouldCreateTrace ? requestTraces.length : -1;
|
| 447 |
+
|
| 448 |
+
if (shouldCreateTrace) {
|
| 449 |
+
requestTraces.push(traceEntry);
|
| 450 |
+
// Check if we need to flush based on batch size
|
| 451 |
+
checkAndFlushTraces();
|
| 452 |
+
}
|
| 453 |
|
| 454 |
const response = await fetch(targetUrl, {
|
| 455 |
method: c.req.method,
|
|
|
|
| 457 |
body: requestBody,
|
| 458 |
});
|
| 459 |
|
| 460 |
+
// Only log if we're not skipping the trace
|
| 461 |
+
if (!shouldSkipTrace) {
|
| 462 |
+
console.log(`Received response status ${response.status} from ${targetUrl}`);
|
| 463 |
+
}
|
| 464 |
|
| 465 |
c.status(response.status as StatusCode);
|
| 466 |
response.headers.forEach((value, key) => {
|
|
|
|
| 476 |
const [streamForClient, streamForStorage] = response.body.tee();
|
| 477 |
const contentType = response.headers.get('content-type');
|
| 478 |
|
| 479 |
+
if (shouldCreateTrace && traceIndex >= 0) {
|
| 480 |
+
storeStreamedResponse(streamForStorage, contentType, targetUrl, traceIndex).catch(err => {
|
| 481 |
+
console.error("Error in background stream storage:", err);
|
| 482 |
+
});
|
| 483 |
+
}
|
| 484 |
|
| 485 |
return stream(c, async (streamInstance) => {
|
| 486 |
await streamInstance.pipe(streamForClient);
|
| 487 |
});
|
| 488 |
} else {
|
| 489 |
+
// Only log if we're not skipping the trace
|
| 490 |
+
if (!shouldSkipTrace) {
|
| 491 |
+
console.log(`Received response with no body from ${targetUrl}.`);
|
| 492 |
+
}
|
| 493 |
+
if (shouldCreateTrace && traceIndex >= 0) {
|
| 494 |
+
requestTraces[traceIndex].timestamp_end = new Date().toISOString();
|
| 495 |
+
|
| 496 |
+
// Calculate duration if we have both timestamps
|
| 497 |
+
if (requestTraces[traceIndex].timestamp_start && requestTraces[traceIndex].timestamp_end) {
|
| 498 |
+
const startTime = new Date(requestTraces[traceIndex].timestamp_start).getTime();
|
| 499 |
+
const endTime = new Date(requestTraces[traceIndex].timestamp_end).getTime();
|
| 500 |
+
requestTraces[traceIndex].duration_ms = endTime - startTime;
|
| 501 |
+
}
|
| 502 |
+
|
| 503 |
+
// Check if we need to flush based on batch size
|
| 504 |
+
checkAndFlushTraces();
|
| 505 |
}
|
|
|
|
|
|
|
|
|
|
| 506 |
return c.body(null);
|
| 507 |
}
|
| 508 |
} catch (error) {
|