FROM node:18-alpine AS base
# Install dependencies only when needed
FROM base AS deps
RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY package.json package-lock.json ./
RUN npm ci --only=production
# Rebuild the source code only when needed
FROM base AS builder
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
COPY . .
ENV NEXT_TELEMETRY_DISABLED 1
RUN npm run build
# Production image, copy all the files and run next
FROM base AS runner
WORKDIR /app
ENV NODE_ENV production
ENV NEXT_TELEMETRY_DISABLED 1
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 nextjs
COPY --from=builder /app/public ./public
# Set the correct permission for prerender cache
RUN mkdir .next
RUN chown nextjs:nodejs .next
# Automatically leverage output traces to reduce image size
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
USER nextjs
EXPOSE 7860
ENV PORT 7860
ENV HOSTNAME "0.0.0.0"
CMD ["node", "server.js"]
=== package.json ===
{
"name": "glm-4.6v-flash",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint"
},
"dependencies": {
"next": "14.1.0",
"openai": "^4.28.0",
"react": "^18.2.0",
"react-dom": "^18.2.0"
},
"devDependencies": {
"@types/node": "^20.11.0",
"@types/react": "^18.2.0",
"@types/react-dom": "^18.2.0",
"autoprefixer": "^10.4.16",
"postcss": "^8.4.33",
"tailwindcss": "^3.4.1",
"typescript": "^5.3.3"
}
}
=== next.config.js ===
/** @type {import('next').NextConfig} */
const nextConfig = {
experimental: {
appDir: false,
},
output: 'standalone',
images: {
domains: ['cdn.britannica.com', 'localhost'],
},
}
module.exports = nextConfig
=== pages/_app.js ===
import '../styles/globals.css'
function MyApp({ Component, pageProps }) {
return
}
export default MyApp
=== pages/index.js ===
import Head from 'next/head'
import ChatInterface from '../components/ChatInterface'
export default function Home() {
return (
<>
GLM-4.6V-Flash
>
)
}
=== postcss.config.js ===
module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}
=== styles/globals.css ===
@tailwind base;
@tailwind components;
@tailwind utilities;
@layer base {
body {
@apply bg-gray-50 text-gray-900 antialiased;
}
}
@layer components {
.chat-container {
@apply flex flex-col h-screen max-w-4xl mx-auto bg-white shadow-lg;
}
.message-bubble {
@apply rounded-2xl px-4 py-3 max-w-xs md:max-w-md break-words;
}
.user-bubble {
@apply bg-blue-600 text-white ml-auto;
}
.assistant-bubble {
@apply bg-gray-200 text-gray-800 mr-auto;
}
}
=== tailwind.config.js ===
/** @type {import('tailwindcss').Config} */
module.exports = {
content: [
'./pages/**/*.{js,ts,jsx,tsx,mdx}',
'./components/**/*.{js,ts,jsx,tsx,mdx}',
],
darkMode: 'class',
theme: {
extend: {
animation: {
'pulse-dots': 'pulse-dots 1.4s infinite ease-in-out both',
},
keyframes: {
'pulse-dots': {
'0%, 80%, 100%': { transform: 'scale(0)', opacity: '0.5' },
'40%': { transform: 'scale(1)', opacity: '1' },
},
},
},
},
plugins: [],
}
=== pages/api/chat.js ===
import { OpenAI } from 'openai';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-12345',
});
export default async function handler(req, res) {
if (req.method !== 'POST') {
return res.status(405).json({ error: 'Method not allowed' });
}
try {
const { messages, image } = req.body;
const completion = await openai.chat.completions.create({
model: "gpt-4-vision-preview",
messages: messages,
max_tokens: 500,
temperature: 0.7,
});
const response = completion.choices[0]?.message?.content || "I'm sorry, I couldn't process that request.";
res.status(200).json({ response });
} catch (error) {
console.error('Error:', error);
res.status(500).json({ error: 'Failed to process request' });
}
}
=== components/ChatInterface.jsx ===
import React, { useState, useRef, useEffect } from 'react';
export default function ChatInterface() {
const [messages, setMessages] = useState([]);
const [input, setInput] = useState('');
const [isLoading, setIsLoading] = useState(false);
const [image, setImage] = useState(null);
const messagesEndRef = useRef(null);
const fileInputRef = useRef(null);
const scrollToBottom = () => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
};
useEffect(() => {
scrollToBottom();
}, [messages]);
const handleImageUpload = (e) => {
const file = e.target.files[0];
if (file && file.type.startsWith('image/')) {
const reader = new FileReader();
reader.onloadend = () => {
setImage(reader.result);
};
reader.readAsDataURL(file);
}
};
const handleSubmit = async (e) => {
e.preventDefault();
if (!input.trim() && !image) return;
const userMessage = {
role: 'user',
content: input,
image: image,
};
setMessages(prev => [...prev, userMessage]);
setInput('');
setIsLoading(true);
try {
const response = await fetch('/api/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
messages: [...messages, userMessage].map(msg => ({
role: msg.role,
content: msg.content,
})),
image: image,
}),
});
const data = await response.json();
if (response.ok) {
setMessages(prev => [...prev, {
role: 'assistant',
content: data.response,
}]);
} else {
throw new Error(data.error || 'Failed to get response');
}
} catch (error) {
console.error('Error:', error);
setMessages(prev => [...prev, {
role: 'assistant',
content: 'Sorry, I encountered an error. Please try again.',
}]);
} finally {
setIsLoading(false);
setImage(null);
if (fileInputRef.current) {
fileInputRef.current.value = '';
}
}
};
return (
{messages.length === 0 && (
Welcome to GLM-4.6V-Flash
Upload an image and ask me about it!
)}
{messages.map((message, index) => (
{message.image && (

)}
{message.content}
))}
{isLoading && (
)}
{image && (
)}
);
}