nexusbert's picture
push all
8a12695
import express from "express";
import { generateContent, generateContentStream } from "../utils/geminiClient";
const router = express.Router();
/**
* @openapi
* /api/chat:
* post:
* summary: Fashion chat without wardrobe (using Gemini AI)
* tags: [Chat]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - message
* properties:
* message:
* type: string
* description: User's fashion question
* session_id:
* type: string
* description: Session ID for conversation context (not used with Gemini, but kept for compatibility)
* images:
* type: array
* items:
* type: string
* description: Optional array of image URLs or base64 images
* responses:
* 200:
* description: Chat response from Gemini AI
* 400:
* description: Bad request
* 500:
* description: Server error
*/
router.post("/", async (req, res) => {
try {
const { message, session_id, images } = req.body;
if (!message) {
return res.status(400).json({ success: false, error: "message is required" });
}
console.log(`[Chat] Sending request to Gemini AI`);
const systemInstruction = "You are a helpful fashion and style assistant. Provide friendly, practical fashion advice and answer questions about clothing, style, trends, and outfit coordination.";
const result = await generateContent(
message,
systemInstruction,
images && Array.isArray(images) && images.length > 0 ? images : undefined
);
res.json({
success: true,
response: result.text,
session_id: session_id || "default"
});
} catch (error: any) {
console.error("Chat error:", error.message);
res.status(500).json({ success: false, error: error.message || "Chat failed" });
}
});
/**
* @openapi
* /api/chat/stream:
* post:
* summary: Fashion chat with streaming response (using Gemini AI)
* tags: [Chat]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - message
* properties:
* message:
* type: string
* description: User's fashion question
* session_id:
* type: string
* description: Session ID for conversation context (not used with Gemini, but kept for compatibility)
* images:
* type: array
* items:
* type: string
* description: Optional array of image URLs or base64 images
* responses:
* 200:
* description: Server-Sent Events stream
* content:
* text/event-stream:
* schema:
* type: string
* 400:
* description: Bad request
* 500:
* description: Server error
*/
router.post("/stream", async (req, res) => {
try {
const { message, session_id, images } = req.body;
if (!message) {
return res.status(400).json({ success: false, error: "message is required" });
}
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
res.setHeader('X-Accel-Buffering', 'no');
console.log(`[Chat Stream] Sending streaming request to Gemini AI`);
const systemInstruction = "You are a helpful fashion and style assistant. Provide friendly, practical fashion advice and answer questions about clothing, style, trends, and outfit coordination.";
try {
const stream = generateContentStream(
message,
systemInstruction,
images && Array.isArray(images) && images.length > 0 ? images : undefined
);
for await (const chunk of stream) {
if (chunk.text) {
res.write(`data: ${JSON.stringify({ type: "chunk", content: chunk.text })}\n\n`);
}
if (chunk.done) {
break;
}
}
res.write(`data: ${JSON.stringify({ type: "done" })}\n\n`);
res.end();
} catch (streamError: any) {
console.error('[Chat Stream] Stream error:', streamError);
res.write(`data: ${JSON.stringify({ type: "error", message: streamError.message || "Streaming failed" })}\n\n`);
res.end();
}
req.on('close', () => {
console.log('[Chat Stream] Client disconnected');
});
} catch (error: any) {
console.error("Chat stream error:", error.message);
res.write(`data: ${JSON.stringify({ type: "error", message: error.message || "Streaming failed" })}\n\n`);
res.end();
}
});
export default router;