File size: 2,169 Bytes
2883812
 
 
 
 
51bd31f
 
 
2883812
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
import { OpenAI } from 'openai'

const client = new OpenAI({
  baseURL: 'https://router.huggingface.co/v1',
  apiKey: process.env.HF_TOKEN,
  defaultHeaders: {
		"X-HF-Bill-To": "huggingface" 
  }
})

export default async function handler(req, res) {
  if (req.method !== 'POST') {
    return res.status(405).json({ error: 'Method not allowed' })
  }

  try {
    const { messages } = req.body

    if (!messages || !Array.isArray(messages)) {
      return res.status(400).json({ error: 'Messages array is required' })
    }

    // Transform messages to OpenAI format
    const openAIMessages = messages.map(msg => {
      if (msg.image) {
        return {
          role: msg.role,
          content: [
            {
              type: 'text',
              text: msg.content || 'Analyze this image'
            },
            {
              type: 'image_url',
              image_url: {
                url: msg.image
              }
            }
          ]
        }
      }
      return {
        role: msg.role,
        content: msg.content
      }
    })

    // Create streaming response
    res.writeHead(200, {
      'Content-Type': 'text/event-stream',
      'Cache-Control': 'no-cache',
      'Connection': 'keep-alive',
      'Access-Control-Allow-Origin': '*',
      'Access-Control-Allow-Headers': 'Cache-Control',
    })

    try {
      const stream = await client.chat.completions.create({
        model: 'Qwen/Qwen3-VL-8B-Instruct:novita',
        messages: openAIMessages,
        stream: true,
        max_tokens: 1000,
        temperature: 0.7,
      })

      for await (const chunk of stream) {
        const content = chunk.choices[0]?.delta?.content || ''
        if (content) {
          res.write(`data: ${JSON.stringify({ content })}\n\n`)
        }
      }

      res.write('data: [DONE]\n\n')
      res.end()
    } catch (streamError) {
      console.error('Streaming error:', streamError)
      res.write(`data: ${JSON.stringify({ error: streamError.message })}\n\n`)
      res.end()
    }

  } catch (error) {
    console.error('API error:', error)
    res.status(500).json({ error: error.message || 'Internal server error' })
  }
}