akhaliq HF Staff commited on
Commit
4a11dd7
·
verified ·
1 Parent(s): 01caec1

Upload pages/api/chat.ts with huggingface_hub

Browse files
Files changed (1) hide show
  1. pages/api/chat.ts +55 -0
pages/api/chat.ts ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import type { NextApiRequest, NextApiResponse } from 'next'
2
+ import OpenAI from 'openai'
3
+
4
+ export const config = {
5
+ api: {
6
+ responseLimit: false,
7
+ },
8
+ }
9
+
10
+ export default async function handler(
11
+ req: NextApiRequest,
12
+ res: NextApiResponse
13
+ ) {
14
+ if (req.method !== 'POST') {
15
+ return res.status(405).json({ error: 'Method not allowed' })
16
+ }
17
+
18
+ const hfToken = process.env.HF_TOKEN
19
+ if (!hfToken) {
20
+ return res.status(500).json({ error: 'HF_TOKEN not configured' })
21
+ }
22
+
23
+ const { messages } = req.body
24
+ if (!messages || !Array.isArray(messages)) {
25
+ return res.status(400).json({ error: 'Invalid messages format' })
26
+ }
27
+
28
+ const openai = new OpenAI({
29
+ baseURL: 'https://router.huggingface.co/v1',
30
+ apiKey: hfToken,
31
+ })
32
+
33
+ try {
34
+ const stream = await openai.chat.completions.create({
35
+ model: 'zai-org/GLM-4.6V-Flash:zai-org',
36
+ messages,
37
+ stream: true,
38
+ max_tokens: 1024,
39
+ })
40
+
41
+ res.setHeader('Content-Type', 'text/plain; charset=utf-8')
42
+ res.setHeader('Cache-Control', 'no-cache')
43
+ res.setHeader('Connection', 'keep-alive')
44
+
45
+ for await (const chunk of stream) {
46
+ const content = chunk.choices[0]?.delta?.content || ''
47
+ res.write(content)
48
+ }
49
+
50
+ res.end()
51
+ } catch (error) {
52
+ console.error('API error:', error)
53
+ res.status(500).json({ error: 'Failed to generate response' })
54
+ }
55
+ }