Spaces:
Running
Running
Upload 67 files
Browse files- ai-context.js +7 -5
- ai-routes.js +3 -4
- components/ai/ChatPanel.tsx +22 -7
- types.ts +2 -1
ai-context.js
CHANGED
|
@@ -253,11 +253,13 @@ async function buildUserContext(username, role, schoolId) {
|
|
| 253 |
${roleContext}
|
| 254 |
|
| 255 |
【AI 行为准则】
|
| 256 |
-
1.
|
| 257 |
-
2.
|
| 258 |
-
|
| 259 |
-
|
| 260 |
-
|
|
|
|
|
|
|
| 261 |
---
|
| 262 |
`;
|
| 263 |
} catch (e) {
|
|
|
|
| 253 |
${roleContext}
|
| 254 |
|
| 255 |
【AI 行为准则】
|
| 256 |
+
1. **角色设定**: 你是学校的AI智能助手。你的职责是协助查询校内信息,同时也乐于回答通用的百科/日常问题。
|
| 257 |
+
2. **数据查询规则**:
|
| 258 |
+
- 当用户询问 **学校内部数据** (如: 学生成绩、考勤、班级名单、老师信息) 时,**必须且只能** 使用上述提供的【系统注入上下文】。
|
| 259 |
+
- 如果用户询问的 **校内数据** 不在上下文中 (例如: 用户只教一年级,却问三年级的数据),请明确告知“无权限查看该数据”。
|
| 260 |
+
3. **通用问答规则**:
|
| 261 |
+
- 当用户询问 **非校内数据** (如: 天气、历史、写代码、翻译、闲聊) 时,请忽略权限限制,利用你的通用知识库或联网搜索功能正常回答。不要因为上下文中没有天气数据就拒绝回答。
|
| 262 |
+
4. **联网搜索**: 如果用户开启了联网搜索,积极搜索最新信息回答通用问题。
|
| 263 |
---
|
| 264 |
`;
|
| 265 |
} catch (e) {
|
ai-routes.js
CHANGED
|
@@ -313,11 +313,10 @@ async function streamDoubao(baseParams, res, username, mode = 'chat', config, en
|
|
| 313 |
if (res.flush) res.flush();
|
| 314 |
}
|
| 315 |
|
| 316 |
-
// 3. Handle Search Status
|
| 317 |
if (json.response && json.response.web_search_call && json.response.web_search_call.searching) {
|
| 318 |
-
// Send
|
| 319 |
-
|
| 320 |
-
res.write(`data: ${JSON.stringify({ type: 'thinking', content: '\n\n🌐 正在联网搜索...\n\n' })}\n\n`);
|
| 321 |
if (res.flush) res.flush();
|
| 322 |
}
|
| 323 |
|
|
|
|
| 313 |
if (res.flush) res.flush();
|
| 314 |
}
|
| 315 |
|
| 316 |
+
// 3. Handle Search Status (Updated)
|
| 317 |
if (json.response && json.response.web_search_call && json.response.web_search_call.searching) {
|
| 318 |
+
// Send distinct search status
|
| 319 |
+
res.write(`data: ${JSON.stringify({ type: 'search', status: 'searching' })}\n\n`);
|
|
|
|
| 320 |
if (res.flush) res.flush();
|
| 321 |
}
|
| 322 |
|
components/ai/ChatPanel.tsx
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
|
| 2 |
import React, { useState, useRef, useEffect } from 'react';
|
| 3 |
import { AIChatMessage, User } from '../../types';
|
| 4 |
-
import { Bot, Mic, Square, Volume2, Send, Sparkles, Loader2, Image as ImageIcon, Trash2, X, StopCircle, Globe, Brain } from 'lucide-react';
|
| 5 |
import ReactMarkdown from 'react-markdown';
|
| 6 |
import remarkGfm from 'remark-gfm';
|
| 7 |
import { blobToBase64, base64ToUint8Array, decodePCM, cleanTextForTTS, compressImage } from '../../utils/mediaHelpers';
|
|
@@ -184,13 +184,15 @@ export const ChatPanel: React.FC<ChatPanelProps> = ({ currentUser }) => {
|
|
| 184 |
|
| 185 |
setIsChatProcessing(true);
|
| 186 |
|
| 187 |
-
|
|
|
|
|
|
|
| 188 |
|
| 189 |
try {
|
| 190 |
const base64Images = await Promise.all(currentImages.map(f => compressImage(f)));
|
| 191 |
|
| 192 |
const newUserMsg: AIChatMessage = {
|
| 193 |
-
id:
|
| 194 |
role: 'user',
|
| 195 |
text: currentAudio ? '(语音消息)' : (currentText || (currentImages.length ? '' : '')),
|
| 196 |
isAudioMessage: !!currentAudio,
|
|
@@ -249,12 +251,17 @@ export const ChatPanel: React.FC<ChatPanelProps> = ({ currentUser }) => {
|
|
| 249 |
setIsThinkingExpanded(prev => ({ ...prev, [newAiMsgId]: false }));
|
| 250 |
}
|
| 251 |
aiTextAccumulated += data.content;
|
| 252 |
-
|
|
|
|
| 253 |
}
|
| 254 |
else if (data.type === 'thinking') {
|
| 255 |
aiThoughtAccumulated += data.content;
|
| 256 |
setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, thought: aiThoughtAccumulated } : m));
|
| 257 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 258 |
else if (data.type === 'status' && data.status === 'tts') {
|
| 259 |
setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, isGeneratingAudio: true } : m));
|
| 260 |
}
|
|
@@ -271,14 +278,14 @@ export const ChatPanel: React.FC<ChatPanelProps> = ({ currentUser }) => {
|
|
| 271 |
}
|
| 272 |
}
|
| 273 |
else if (data.type === 'error') {
|
| 274 |
-
setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, text: `⚠️ 错误: ${data.message}`, isGeneratingAudio: false } : m));
|
| 275 |
}
|
| 276 |
} catch (e) {}
|
| 277 |
}
|
| 278 |
}
|
| 279 |
}
|
| 280 |
} catch (error: any) {
|
| 281 |
-
setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, text: '抱歉,连接断开或发生错误。' } : m));
|
| 282 |
} finally {
|
| 283 |
setIsChatProcessing(false);
|
| 284 |
}
|
|
@@ -324,6 +331,14 @@ export const ChatPanel: React.FC<ChatPanelProps> = ({ currentUser }) => {
|
|
| 324 |
</div>
|
| 325 |
)}
|
| 326 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 327 |
<div className={`p-3 rounded-2xl text-sm overflow-hidden shadow-sm ${msg.role === 'user' ? 'bg-blue-600 text-white rounded-tr-none' : 'bg-white border border-gray-200 text-gray-800 rounded-tl-none'}`}>
|
| 328 |
{msg.images && msg.images.length > 0 && (
|
| 329 |
<div className="flex gap-2 mb-2 flex-wrap">
|
|
@@ -334,7 +349,7 @@ export const ChatPanel: React.FC<ChatPanelProps> = ({ currentUser }) => {
|
|
| 334 |
)}
|
| 335 |
<div className="markdown-body"><ReactMarkdown remarkPlugins={[remarkGfm]}>{msg.text || ''}</ReactMarkdown></div>
|
| 336 |
|
| 337 |
-
{msg.role === 'model' && !msg.text && isChatProcessing && (
|
| 338 |
<div className="flex items-center gap-2 text-gray-400 py-1">
|
| 339 |
<Loader2 className="animate-spin" size={14}/><span className="text-xs">思考中...</span>
|
| 340 |
</div>
|
|
|
|
| 1 |
|
| 2 |
import React, { useState, useRef, useEffect } from 'react';
|
| 3 |
import { AIChatMessage, User } from '../../types';
|
| 4 |
+
import { Bot, Mic, Square, Volume2, Send, Sparkles, Loader2, Image as ImageIcon, Trash2, X, StopCircle, Globe, Brain, Search } from 'lucide-react';
|
| 5 |
import ReactMarkdown from 'react-markdown';
|
| 6 |
import remarkGfm from 'remark-gfm';
|
| 7 |
import { blobToBase64, base64ToUint8Array, decodePCM, cleanTextForTTS, compressImage } from '../../utils/mediaHelpers';
|
|
|
|
| 184 |
|
| 185 |
setIsChatProcessing(true);
|
| 186 |
|
| 187 |
+
// Fix: Use UUID to avoid collision
|
| 188 |
+
const newAiMsgId = crypto.randomUUID();
|
| 189 |
+
const newUserMsgId = crypto.randomUUID();
|
| 190 |
|
| 191 |
try {
|
| 192 |
const base64Images = await Promise.all(currentImages.map(f => compressImage(f)));
|
| 193 |
|
| 194 |
const newUserMsg: AIChatMessage = {
|
| 195 |
+
id: newUserMsgId,
|
| 196 |
role: 'user',
|
| 197 |
text: currentAudio ? '(语音消息)' : (currentText || (currentImages.length ? '' : '')),
|
| 198 |
isAudioMessage: !!currentAudio,
|
|
|
|
| 251 |
setIsThinkingExpanded(prev => ({ ...prev, [newAiMsgId]: false }));
|
| 252 |
}
|
| 253 |
aiTextAccumulated += data.content;
|
| 254 |
+
// Clear searching state when text arrives
|
| 255 |
+
setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, text: aiTextAccumulated, isSearching: false } : m));
|
| 256 |
}
|
| 257 |
else if (data.type === 'thinking') {
|
| 258 |
aiThoughtAccumulated += data.content;
|
| 259 |
setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, thought: aiThoughtAccumulated } : m));
|
| 260 |
}
|
| 261 |
+
else if (data.type === 'search') {
|
| 262 |
+
// Enable search visual
|
| 263 |
+
setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, isSearching: true } : m));
|
| 264 |
+
}
|
| 265 |
else if (data.type === 'status' && data.status === 'tts') {
|
| 266 |
setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, isGeneratingAudio: true } : m));
|
| 267 |
}
|
|
|
|
| 278 |
}
|
| 279 |
}
|
| 280 |
else if (data.type === 'error') {
|
| 281 |
+
setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, text: `⚠️ 错误: ${data.message}`, isGeneratingAudio: false, isSearching: false } : m));
|
| 282 |
}
|
| 283 |
} catch (e) {}
|
| 284 |
}
|
| 285 |
}
|
| 286 |
}
|
| 287 |
} catch (error: any) {
|
| 288 |
+
setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, text: '抱歉,连接断开或发生错误。', isSearching: false } : m));
|
| 289 |
} finally {
|
| 290 |
setIsChatProcessing(false);
|
| 291 |
}
|
|
|
|
| 331 |
</div>
|
| 332 |
)}
|
| 333 |
|
| 334 |
+
{/* Search Status Bubble */}
|
| 335 |
+
{msg.role === 'model' && msg.isSearching && (
|
| 336 |
+
<div className="flex items-center gap-2 bg-blue-50 text-blue-600 px-3 py-2 rounded-xl mb-2 text-xs border border-blue-100 animate-pulse">
|
| 337 |
+
<Globe size={14} className="animate-spin-slow"/>
|
| 338 |
+
<span>正在联网搜索相关信息...</span>
|
| 339 |
+
</div>
|
| 340 |
+
)}
|
| 341 |
+
|
| 342 |
<div className={`p-3 rounded-2xl text-sm overflow-hidden shadow-sm ${msg.role === 'user' ? 'bg-blue-600 text-white rounded-tr-none' : 'bg-white border border-gray-200 text-gray-800 rounded-tl-none'}`}>
|
| 343 |
{msg.images && msg.images.length > 0 && (
|
| 344 |
<div className="flex gap-2 mb-2 flex-wrap">
|
|
|
|
| 349 |
)}
|
| 350 |
<div className="markdown-body"><ReactMarkdown remarkPlugins={[remarkGfm]}>{msg.text || ''}</ReactMarkdown></div>
|
| 351 |
|
| 352 |
+
{msg.role === 'model' && !msg.text && !msg.isSearching && isChatProcessing && (
|
| 353 |
<div className="flex items-center gap-2 text-gray-400 py-1">
|
| 354 |
<Loader2 className="animate-spin" size={14}/><span className="text-xs">思考中...</span>
|
| 355 |
</div>
|
types.ts
CHANGED
|
@@ -405,5 +405,6 @@ export interface AIChatMessage {
|
|
| 405 |
images?: string[];
|
| 406 |
isAudioMessage?: boolean;
|
| 407 |
isGeneratingAudio?: boolean; // New status flag for UI
|
|
|
|
| 408 |
timestamp: number;
|
| 409 |
-
}
|
|
|
|
| 405 |
images?: string[];
|
| 406 |
isAudioMessage?: boolean;
|
| 407 |
isGeneratingAudio?: boolean; // New status flag for UI
|
| 408 |
+
isSearching?: boolean; // New status flag for search
|
| 409 |
timestamp: number;
|
| 410 |
+
}
|