VanHri commited on
Commit
0b67bc2
·
verified ·
1 Parent(s): b8997d8

Upload 41 files

Browse files
Files changed (41) hide show
  1. vanhri-ai---the-mind-that-builds-worlds/.env.local +1 -0
  2. vanhri-ai---the-mind-that-builds-worlds/.gitignore +24 -0
  3. vanhri-ai---the-mind-that-builds-worlds/App.tsx +697 -0
  4. vanhri-ai---the-mind-that-builds-worlds/README.md +14 -0
  5. vanhri-ai---the-mind-that-builds-worlds/components/CallUI.tsx +306 -0
  6. vanhri-ai---the-mind-that-builds-worlds/components/ChartDisplay.tsx +116 -0
  7. vanhri-ai---the-mind-that-builds-worlds/components/ChatWindow.tsx +125 -0
  8. vanhri-ai---the-mind-that-builds-worlds/components/EmailChatModal.tsx +0 -0
  9. vanhri-ai---the-mind-that-builds-worlds/components/Icons.tsx +260 -0
  10. vanhri-ai---the-mind-that-builds-worlds/components/ImageEditorModal.tsx +101 -0
  11. vanhri-ai---the-mind-that-builds-worlds/components/Interactive3DModel.tsx +118 -0
  12. vanhri-ai---the-mind-that-builds-worlds/components/Interactive3DPhoto.tsx +0 -0
  13. vanhri-ai---the-mind-that-builds-worlds/components/Login.tsx +92 -0
  14. vanhri-ai---the-mind-that-builds-worlds/components/MessageBubble.tsx +220 -0
  15. vanhri-ai---the-mind-that-builds-worlds/components/MindMapDisplay.tsx +40 -0
  16. vanhri-ai---the-mind-that-builds-worlds/components/NovaAvatarDisplay.tsx +0 -0
  17. vanhri-ai---the-mind-that-builds-worlds/components/NovaVisualizer.tsx +245 -0
  18. vanhri-ai---the-mind-that-builds-worlds/components/ProactiveSuggestion.tsx +35 -0
  19. vanhri-ai---the-mind-that-builds-worlds/components/PromptInput.tsx +259 -0
  20. vanhri-ai---the-mind-that-builds-worlds/components/SettingsModal.tsx +0 -0
  21. vanhri-ai---the-mind-that-builds-worlds/components/Sidebar.tsx +176 -0
  22. vanhri-ai---the-mind-that-builds-worlds/components/SignUp.tsx +106 -0
  23. vanhri-ai---the-mind-that-builds-worlds/components/ThinkingIndicator.tsx +53 -0
  24. vanhri-ai---the-mind-that-builds-worlds/components/Verify.tsx +88 -0
  25. vanhri-ai---the-mind-that-builds-worlds/components/VisualSolverModal.tsx +174 -0
  26. vanhri-ai---the-mind-that-builds-worlds/components/Welcome.tsx +50 -0
  27. vanhri-ai---the-mind-that-builds-worlds/constants.tsx +118 -0
  28. vanhri-ai---the-mind-that-builds-worlds/index.html +97 -0
  29. vanhri-ai---the-mind-that-builds-worlds/index.tsx +16 -0
  30. vanhri-ai---the-mind-that-builds-worlds/manifest.json +10 -0
  31. vanhri-ai---the-mind-that-builds-worlds/metadata.json +9 -0
  32. vanhri-ai---the-mind-that-builds-worlds/package.json +26 -0
  33. vanhri-ai---the-mind-that-builds-worlds/services/authService.ts +100 -0
  34. vanhri-ai---the-mind-that-builds-worlds/services/cryptoService.ts +145 -0
  35. vanhri-ai---the-mind-that-builds-worlds/services/emailService.ts +57 -0
  36. vanhri-ai---the-mind-that-builds-worlds/services/geminiService.ts +222 -0
  37. vanhri-ai---the-mind-that-builds-worlds/services/ttsService.ts +85 -0
  38. vanhri-ai---the-mind-that-builds-worlds/sw.js +70 -0
  39. vanhri-ai---the-mind-that-builds-worlds/tsconfig.json +30 -0
  40. vanhri-ai---the-mind-that-builds-worlds/types.ts +87 -0
  41. vanhri-ai---the-mind-that-builds-worlds/vite.config.ts +17 -0
vanhri-ai---the-mind-that-builds-worlds/.env.local ADDED
@@ -0,0 +1 @@
 
 
1
+ GEMINI_API_KEY=PLACEHOLDER_API_KEY
vanhri-ai---the-mind-that-builds-worlds/.gitignore ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Logs
2
+ logs
3
+ *.log
4
+ npm-debug.log*
5
+ yarn-debug.log*
6
+ yarn-error.log*
7
+ pnpm-debug.log*
8
+ lerna-debug.log*
9
+
10
+ node_modules
11
+ dist
12
+ dist-ssr
13
+ *.local
14
+
15
+ # Editor directories and files
16
+ .vscode/*
17
+ !.vscode/extensions.json
18
+ .idea
19
+ .DS_Store
20
+ *.suo
21
+ *.ntvs*
22
+ *.njsproj
23
+ *.sln
24
+ *.sw?
vanhri-ai---the-mind-that-builds-worlds/App.tsx ADDED
@@ -0,0 +1,697 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import React, { useState, useCallback, useEffect } from 'react';
3
+ import ChatWindow from './components/ChatWindow';
4
+ import Login from './components/Login';
5
+ import SignUp from './components/SignUp';
6
+ import Verify from './components/Verify';
7
+ import Welcome from './components/Welcome';
8
+ import Sidebar from './components/Sidebar';
9
+ import { VANHRI_PERSONA, AGENT_PERSONA } from './constants';
10
+ import { Message, Memory, Auth, GroundingCitation, MindMapData, ProactiveSuggestion, NotebookSource, ChartData } from './types';
11
+ import { generateTextStream, generateImage, generateJson } from './services/geminiService';
12
+ import { authService } from './services/authService';
13
+ import { cryptoService } from './services/cryptoService';
14
+ import { emailService } from './services/emailService';
15
+ import { ttsService } from './services/ttsService';
16
+ import ImageEditorModal from './components/ImageEditorModal';
17
+ import CallUI from './components/CallUI';
18
+ import VisualSolverModal from './components/VisualSolverModal';
19
+ import { Bot } from './components/Icons';
20
+ import { GoogleGenAI, Chat, Type, Part } from "@google/genai";
21
+
22
+ // Helper to extract a memory JSON object from the AI response
23
+ const extractMemoryPayload = (text: string): { cleanText: string; payload: any | null } => {
24
+ const memoryJsonRegex = /{\s*"type"\s*:\s*"memory"[\s\S]*?}/;
25
+ const match = text.match(memoryJsonRegex);
26
+
27
+ if (match) {
28
+ const jsonString = match[0];
29
+ try {
30
+ const payload = JSON.parse(jsonString);
31
+ if (payload.type === 'memory' && payload.memory) {
32
+ const cleanText = text.replace(jsonString, '').trim();
33
+ return { cleanText, payload };
34
+ }
35
+ } catch (e) { /* Not a valid JSON or doesn't match schema, ignore. */ }
36
+ }
37
+ return { cleanText: text, payload: null };
38
+ };
39
+
40
+
41
+ const App: React.FC = () => {
42
+ const [auth, setAuth] = useState<Auth>({ status: 'loading' });
43
+ const [messages, setMessages] = useState<Message[]>([]);
44
+ const [memories, setMemories] = useState<Memory[]>([]);
45
+ const [notebookSources, setNotebookSources] = useState<NotebookSource[]>([]);
46
+ const [isLoading, setIsLoading] = useState<boolean>(false);
47
+ const [isSidebarOpen, setIsSidebarOpen] = useState(true);
48
+ const [isAutoPlayAudio, setIsAutoPlayAudio] = useState<boolean>(true);
49
+ const [isAiSpeaking, setIsAiSpeaking] = useState<boolean>(false);
50
+ const [proactiveSuggestion, setProactiveSuggestion] = useState<ProactiveSuggestion | null>(null);
51
+ const [isAgentChatActive, setIsAgentChatActive] = useState<boolean>(false);
52
+ const [agentChatSession, setAgentChatSession] = useState<Chat | null>(null);
53
+ const [editingImage, setEditingImage] = useState<{ messageId: string; imageIndex: number | null; imageData: string; } | null>(null);
54
+ const [callState, setCallState] = useState<{ active: boolean; type: 'video' | 'audio' | null }>({ active: false, type: null });
55
+ const [isVisualSolverOpen, setIsVisualSolverOpen] = useState(false);
56
+
57
+ const loadUserSessionData = useCallback(async (user: {id: string; email: string}) => {
58
+ const savedMessages = await cryptoService.getEncryptedData(`vanhri-chat-${user.id}`);
59
+ const savedMemories = await cryptoService.getEncryptedData(`vanhri-memories-${user.id}`);
60
+ const savedAudioSetting = await cryptoService.getEncryptedData(`vanhri-settings-${user.id}`);
61
+ const savedNotebookSources = await cryptoService.getEncryptedData(`vanhri-notebook-${user.id}`);
62
+
63
+ if (savedMessages && savedMessages.length > 0) {
64
+ setMessages(savedMessages);
65
+ // Proactive Suggestion Logic
66
+ if (savedMessages.length > 10 && savedMessages.filter(m => m.sender === 'user').length > 4) {
67
+ setProactiveSuggestion({
68
+ id: 'suggestion-summarize',
69
+ text: 'This is a long conversation. Would you like me to summarize it?',
70
+ actionText: 'Summarize our chat',
71
+ action: 'Please summarize our conversation so far, focusing on the key topics and decisions made.',
72
+ });
73
+ }
74
+
75
+ } else {
76
+ const welcomeText = user.id === 'guest-session'
77
+ ? 'I am Vanhri AI. It is a pleasure to connect. How may we begin our journey together? Note: Your chat history in guest mode is saved only in this browser.'
78
+ : `Welcome, ${user.email.split('@')[0]}. I am Vanhri AI. It is a pleasure to connect. How may we begin our journey together?`;
79
+
80
+ const initialMessage: Message = { id: 'init', sender: 'ai', text: welcomeText };
81
+ setMessages([initialMessage]);
82
+ // Do not save the initial message here. This prevents overwriting a potentially corrupt
83
+ // but still valuable history on a load error. The history will be created on the first user message.
84
+ }
85
+
86
+ setMemories(savedMemories || []);
87
+ setNotebookSources(savedNotebookSources || []);
88
+ if (savedAudioSetting !== null && savedAudioSetting !== undefined) {
89
+ setIsAutoPlayAudio(savedAudioSetting.autoPlayAudio);
90
+ }
91
+ }, []);
92
+
93
+ useEffect(() => {
94
+ const checkSession = async () => {
95
+ try {
96
+ const session = authService.getCurrentSession();
97
+ if (session) {
98
+ setAuth({ status: 'authenticated', user: session });
99
+ await loadUserSessionData(session);
100
+ } else {
101
+ setAuth({ status: 'landing' });
102
+ }
103
+ } catch (error) {
104
+ console.error("Failed to initialize user session:", error);
105
+ setAuth({ status: 'landing' }); // Fallback to landing on any error
106
+ }
107
+ };
108
+ // Initialize sidebar state based on screen size
109
+ setIsSidebarOpen(window.innerWidth > 768);
110
+ checkSession();
111
+ }, [loadUserSessionData]);
112
+
113
+ const handleLogin = useCallback(async (user: {id: string, email: string}) => {
114
+ setAuth({ status: 'authenticated', user });
115
+ await loadUserSessionData(user);
116
+ }, [loadUserSessionData]);
117
+
118
+ const handleGuest = useCallback(async () => {
119
+ const guestUser = { id: 'guest-session', email: 'Guest' };
120
+ setAuth({ status: 'guest', user: guestUser });
121
+ await loadUserSessionData(guestUser);
122
+ }, [loadUserSessionData]);
123
+
124
+ const handleLogout = () => {
125
+ if(auth.status === 'authenticated' || auth.status === 'guest') {
126
+ if (window.confirm('Are you sure you want to end this session?')) {
127
+ if (auth.status === 'authenticated') authService.logout();
128
+ ttsService.cancel();
129
+ setIsAiSpeaking(false);
130
+ setAuth({ status: 'landing' });
131
+ setMessages([]);
132
+ setMemories([]);
133
+ setNotebookSources([]);
134
+ setProactiveSuggestion(null);
135
+ setIsAgentChatActive(false);
136
+ setAgentChatSession(null);
137
+ }
138
+ }
139
+ };
140
+
141
+ const clearChatHistory = async () => {
142
+ if ((auth.status === 'authenticated' || auth.status === 'guest') && window.confirm('Are you sure you want to permanently delete this chat history?')) {
143
+ ttsService.cancel();
144
+ setIsAiSpeaking(false);
145
+ const initialMessage: Message = {
146
+ id: 'init-cleared',
147
+ sender: 'ai',
148
+ text: `The past is clear. We have a fresh start, ${auth.user.email.split('@')[0]}. Let us create something new.`,
149
+ };
150
+ setMessages([initialMessage]);
151
+ setProactiveSuggestion(null);
152
+ setIsAgentChatActive(false);
153
+ setAgentChatSession(null);
154
+ await cryptoService.setEncryptedData(`vanhri-chat-${auth.user.id}`, [initialMessage]);
155
+ }
156
+ }
157
+
158
+ const clearMemories = async () => {
159
+ if ((auth.status === 'authenticated' || auth.status === 'guest') && window.confirm("Are you sure you want to permanently erase all of Vanhri AI's memories of this conversation?")) {
160
+ setMemories([]);
161
+ await cryptoService.removeEncryptedData(`vanhri-memories-${auth.user.id}`);
162
+ }
163
+ }
164
+
165
+ const clearNotebook = async () => {
166
+ if ((auth.status === 'authenticated' || auth.status === 'guest') && window.confirm("Are you sure you want to permanently remove all documents from your notebook?")) {
167
+ setNotebookSources([]);
168
+ await cryptoService.removeEncryptedData(`vanhri-notebook-${auth.user.id}`);
169
+ }
170
+ };
171
+
172
+ const handleAddSource = async (name: string, content: string, mimeType: string) => {
173
+ if ((auth.status !== 'authenticated' && auth.status !== 'guest')) return;
174
+ const { user } = auth;
175
+
176
+ const newSource: NotebookSource = {
177
+ id: `source-${Date.now()}`,
178
+ name,
179
+ content,
180
+ mimeType
181
+ };
182
+ const updatedSources = [...notebookSources, newSource];
183
+ setNotebookSources(updatedSources);
184
+ await cryptoService.setEncryptedData(`vanhri-notebook-${user.id}`, updatedSources);
185
+
186
+ const systemMessage: Message = {
187
+ id: `sys-${Date.now()}`,
188
+ sender: 'ai',
189
+ text: `*Source added to Notebook: "${name}"*`
190
+ };
191
+ const updatedMessages = [...messages, systemMessage];
192
+ setMessages(updatedMessages);
193
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, updatedMessages);
194
+ };
195
+
196
+
197
+ const handleStartCall = (type: 'video' | 'audio') => {
198
+ ttsService.cancel();
199
+ setIsAiSpeaking(false);
200
+ setCallState({ active: true, type });
201
+ };
202
+
203
+ const handleEndCall = () => {
204
+ setCallState({ active: false, type: null });
205
+ };
206
+
207
+ const handleEditImage = (messageId: string, imageIndex: number | null, imageData: string) => {
208
+ setEditingImage({ messageId, imageIndex, imageData });
209
+ };
210
+
211
+ const handleSaveEditedImage = async (newImageData: string) => {
212
+ if (!editingImage || (auth.status !== 'authenticated' && auth.status !== 'guest')) return;
213
+
214
+ const { user } = auth;
215
+ const { messageId, imageIndex } = editingImage;
216
+
217
+ const updatedMessages = messages.map(msg => {
218
+ if (msg.id === messageId) {
219
+ const updatedMsg = { ...msg };
220
+ const base64Data = newImageData.split(',')[1];
221
+
222
+ if (imageIndex !== null && updatedMsg.generatedImages) {
223
+ // Create a new array for generatedImages to ensure state update
224
+ const newGeneratedImages = [...updatedMsg.generatedImages];
225
+ newGeneratedImages[imageIndex] = base64Data;
226
+ updatedMsg.generatedImages = newGeneratedImages;
227
+ } else if (imageIndex === null) {
228
+ updatedMsg.image = newImageData; // this is already a data URL
229
+ }
230
+ return updatedMsg;
231
+ }
232
+ return msg;
233
+ });
234
+
235
+ setMessages(updatedMessages);
236
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, updatedMessages);
237
+ setEditingImage(null);
238
+ };
239
+
240
+
241
+ const handleHandoffToAgent = useCallback(async () => {
242
+ if (isAgentChatActive || isLoading || (auth.status !== 'authenticated' && auth.status !== 'guest') || !process.env.API_KEY) {
243
+ if (!process.env.API_KEY) {
244
+ const errorMsg: Message = { id: `sys-err-${Date.now()}`, sender: 'ai', text: 'Live agent handoff is currently unavailable. The system is not configured with the required API key.' };
245
+ setMessages(prev => [...prev, errorMsg]);
246
+ }
247
+ return;
248
+ };
249
+ const { user } = auth;
250
+
251
+ ttsService.cancel();
252
+ setIsAiSpeaking(false);
253
+ setIsLoading(true);
254
+ setIsAgentChatActive(true);
255
+
256
+ const handoffMsg: Message = { id: `sys-${Date.now()}`, sender: 'ai', text: '*Connecting you to a live support agent...*' };
257
+ const updatedMessages = [...messages, handoffMsg];
258
+ setMessages(updatedMessages);
259
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, updatedMessages);
260
+
261
+ try {
262
+ const ai = new GoogleGenAI({apiKey: process.env.API_KEY});
263
+ const chat = ai.chats.create({
264
+ model: 'gemini-2.5-flash',
265
+ config: { systemInstruction: AGENT_PERSONA }
266
+ });
267
+ setAgentChatSession(chat);
268
+
269
+ const result = await chat.sendMessage({ message: "Hello, introduce yourself as Alex and ask how you can help." });
270
+ const agentWelcomeMsg: Message = { id: `agent-${Date.now()}`, sender: 'ai', text: result.text, isAgent: true };
271
+
272
+ setMessages(prev => {
273
+ const finalMessages = [...prev, agentWelcomeMsg];
274
+ cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, finalMessages);
275
+ return finalMessages;
276
+ });
277
+
278
+ } catch(error) {
279
+ console.error("Agent chat initialization failed:", error);
280
+ const errorMsg: Message = { id: `sys-err-${Date.now()}`, sender: 'ai', text: 'Sorry, I was unable to connect to a live agent at this time. Please try again later.' };
281
+ setMessages(prev => [...prev, errorMsg]);
282
+ setIsAgentChatActive(false);
283
+ } finally {
284
+ setIsLoading(false);
285
+ }
286
+ }, [auth, isAgentChatActive, isLoading, messages]);
287
+
288
+
289
+ const handleEndAgentChat = useCallback(async () => {
290
+ if (!isAgentChatActive || isLoading || (auth.status !== 'authenticated' && auth.status !== 'guest')) return;
291
+ const { user } = auth;
292
+
293
+ ttsService.cancel();
294
+ setIsAiSpeaking(false);
295
+ setIsAgentChatActive(false);
296
+ setAgentChatSession(null);
297
+
298
+ const agentGoodbyeMsg: Message = { id: `agent-${Date.now()}`, sender: 'ai', text: 'Thank you for your time. I\'m transferring you back to Vanhri AI now.', isAgent: true };
299
+ const novaReturnMsg: Message = { id: `ai-${Date.now()}`, sender: 'ai', text: 'I am back. How may we continue our journey?' };
300
+
301
+ const finalMessages = [...messages, agentGoodbyeMsg, novaReturnMsg];
302
+ setMessages(finalMessages);
303
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, finalMessages);
304
+
305
+ }, [auth, isAgentChatActive, isLoading, messages]);
306
+
307
+ const handleSendMessage = useCallback(async (text: string, image: string | null) => {
308
+ if ((!text && !image) || (auth.status !== 'authenticated' && auth.status !== 'guest')) return;
309
+ const { user } = auth;
310
+
311
+ // Stop any currently playing audio & remove suggestion
312
+ ttsService.cancel();
313
+ setIsAiSpeaking(false);
314
+ if(proactiveSuggestion) setProactiveSuggestion(null);
315
+
316
+ const userMessage: Message = {
317
+ id: `user-${Date.now()}`,
318
+ sender: 'user',
319
+ text: text,
320
+ image: image ? `data:image/jpeg;base64,${image}` : null,
321
+ };
322
+
323
+ let newMessages = [...messages, userMessage];
324
+ setMessages(newMessages);
325
+ setIsLoading(true);
326
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, newMessages);
327
+
328
+ const aiMessageId = `ai-${Date.now()}`;
329
+ let aiMessagePlaceholder: Message = { id: aiMessageId, sender: 'ai', text: '', isAgent: isAgentChatActive };
330
+ setMessages(prev => [...prev, aiMessagePlaceholder]);
331
+
332
+ try {
333
+ // Agent Chat Logic
334
+ if (isAgentChatActive && agentChatSession) {
335
+ const stream = await agentChatSession.sendMessageStream({ message: text });
336
+ let fullText = '';
337
+ for await (const chunk of stream) {
338
+ fullText += chunk.text;
339
+ const agentResponse: Message = { id: aiMessageId, sender: 'ai', text: fullText, isAgent: true };
340
+ setMessages([...newMessages, agentResponse]);
341
+ }
342
+ const finalMessages = [...newMessages, { id: aiMessageId, sender: 'ai', text: fullText, isAgent: true }];
343
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, finalMessages);
344
+ setIsLoading(false);
345
+ return;
346
+ }
347
+
348
+ const chatHistory = newMessages.slice(-10);
349
+
350
+ // Command Handling
351
+ if (text.trim().startsWith('/chart ')) {
352
+ const topic = text.trim().substring(7).trim();
353
+ const prompt = `Generate a chart for the topic: "${topic}". The chart type should be one of 'bar', 'line', or 'pie' that best represents the data. Follow all chart rules in your persona precisely.`;
354
+ aiMessagePlaceholder = { ...aiMessagePlaceholder, text: `*Vanhri AI is visualizing a chart for: ${topic}*` };
355
+ setMessages(prev => prev.map(msg => msg.id === aiMessageId ? aiMessagePlaceholder : msg));
356
+
357
+ const chartSchema = {
358
+ type: Type.OBJECT,
359
+ properties: {
360
+ type: { type: Type.STRING, enum: ['bar', 'line', 'pie'] },
361
+ title: { type: Type.STRING },
362
+ keys: {
363
+ type: Type.ARRAY,
364
+ items: {
365
+ type: Type.OBJECT,
366
+ properties: {
367
+ name: { type: Type.STRING },
368
+ color: { type: Type.STRING }
369
+ },
370
+ required: ['name', 'color']
371
+ }
372
+ },
373
+ data: {
374
+ type: Type.ARRAY,
375
+ items: {
376
+ type: Type.OBJECT,
377
+ properties: {
378
+ label: { type: Type.STRING },
379
+ values: { type: Type.ARRAY, items: { type: Type.NUMBER } }
380
+ },
381
+ required: ['label', 'values']
382
+ }
383
+ }
384
+ },
385
+ required: ['type', 'title', 'keys', 'data']
386
+ };
387
+
388
+ const { json: chartJson, error } = await generateJson(prompt, VANHRI_PERSONA, chartSchema);
389
+
390
+ let finalAiMessage: Message;
391
+ if (error) {
392
+ finalAiMessage = { ...aiMessagePlaceholder, text: error };
393
+ } else if (chartJson && chartJson.type && chartJson.data) {
394
+ finalAiMessage = { ...aiMessagePlaceholder, text: `Here is a chart for "${topic}":`, chartData: chartJson as ChartData };
395
+ } else {
396
+ finalAiMessage = { ...aiMessagePlaceholder, text: "I'm sorry, I was unable to generate a chart for that topic. The response from the AI was not in the expected format." };
397
+ }
398
+ const finalMessages = [...newMessages, finalAiMessage];
399
+ setMessages(finalMessages);
400
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, finalMessages);
401
+
402
+ } else if (text.trim().startsWith('/mindmap ')) {
403
+ const topic = text.trim().substring(9).trim();
404
+ const prompt = `Generate a detailed, visually-structured mind map for the topic: "${topic}". Follow all mind map rules in your persona precisely.`;
405
+ aiMessagePlaceholder = { ...aiMessagePlaceholder, text: `*Vanhri AI is visualizing a mind map for: ${topic}*` };
406
+ setMessages(prev => prev.map(msg => msg.id === aiMessageId ? aiMessagePlaceholder : msg));
407
+
408
+ const mindMapSchema = {
409
+ type: Type.OBJECT,
410
+ properties: {
411
+ type: { type: Type.STRING, description: "Should always be 'mindmap'" },
412
+ data: {
413
+ type: Type.OBJECT,
414
+ properties: {
415
+ nodes: {
416
+ type: Type.ARRAY,
417
+ items: {
418
+ type: Type.OBJECT,
419
+ properties: {
420
+ id: { type: Type.STRING },
421
+ position: {
422
+ type: Type.OBJECT,
423
+ properties: { x: { type: Type.NUMBER }, y: { type: Type.NUMBER } },
424
+ required: ['x', 'y'],
425
+ },
426
+ data: {
427
+ type: Type.OBJECT,
428
+ properties: { label: { type: Type.STRING } },
429
+ required: ['label'],
430
+ },
431
+ type: { type: Type.STRING },
432
+ style: {
433
+ type: Type.OBJECT,
434
+ properties: {
435
+ backgroundColor: { type: Type.STRING },
436
+ color: { type: Type.STRING },
437
+ width: { type: Type.NUMBER },
438
+ fontSize: { type: Type.STRING },
439
+ fontWeight: { type: Type.STRING },
440
+ }
441
+ }
442
+ },
443
+ required: ['id', 'position', 'data'],
444
+ }
445
+ },
446
+ edges: {
447
+ type: Type.ARRAY,
448
+ items: {
449
+ type: Type.OBJECT,
450
+ properties: {
451
+ id: { type: Type.STRING },
452
+ source: { type: Type.STRING },
453
+ target: { type: Type.STRING },
454
+ animated: { type: Type.BOOLEAN }
455
+ },
456
+ required: ['id', 'source', 'target'],
457
+ }
458
+ }
459
+ },
460
+ required: ['nodes', 'edges'],
461
+ }
462
+ },
463
+ required: ['type', 'data'],
464
+ };
465
+
466
+ const { json: mindMapJson, error } = await generateJson(prompt, VANHRI_PERSONA, mindMapSchema);
467
+
468
+ let finalAiMessage: Message;
469
+ if (error) {
470
+ finalAiMessage = { ...aiMessagePlaceholder, text: error };
471
+ } else if (mindMapJson && mindMapJson.type === 'mindmap' && mindMapJson.data) {
472
+ finalAiMessage = { ...aiMessagePlaceholder, text: `Here is a mind map for "${topic}":`, mindMap: mindMapJson.data };
473
+ } else {
474
+ finalAiMessage = { ...aiMessagePlaceholder, text: "I'm sorry, I was unable to generate a mind map for that topic. The response from the AI was not in the expected format." };
475
+ }
476
+ const finalMessages = [...newMessages, finalAiMessage];
477
+ setMessages(finalMessages);
478
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, finalMessages);
479
+
480
+ } else {
481
+ // Standard text generation with autonomous actions
482
+ const searchKeywords = [
483
+ 'current events', 'latest news', 'who won', 'what is the weather',
484
+ 'medicine', 'medication', 'symptom', 'treatment', 'condition', 'health', 'doctor', 'hospital', 'clinic', 'pharmacy',
485
+ 'stock price', 'market data', 'financial', 'economic', 'investment', 'stock', 'shares', 'dow jones', 'nasdaq', 'crypto',
486
+ 'research', 'study', 'findings', 'scientific paper', 'what is the latest on'
487
+ ];
488
+ const useSearch = searchKeywords.some(keyword => text.toLowerCase().includes(keyword));
489
+
490
+ const imageB64 = image ? image : null;
491
+
492
+ const notebookParts: Part[] = [];
493
+ if (notebookSources.length > 0) {
494
+ notebookParts.push({ text: "The user has provided the following documents as a knowledge base. Use them to answer the user's query." });
495
+ for (const source of notebookSources) { /* ... (notebook logic unchanged) ... */ }
496
+ }
497
+
498
+ const stream = generateTextStream(text, imageB64, chatHistory, VANHRI_PERSONA, notebookParts, { useSearch });
499
+
500
+ let fullText = '';
501
+ let finalAiMessage: Message | null = null;
502
+ let citations: GroundingCitation[] = [];
503
+
504
+ let finalMessages = newMessages;
505
+
506
+ for await (const chunk of stream) {
507
+ if(chunk.citations && chunk.citations.length > 0) {
508
+ citations = chunk.citations;
509
+ }
510
+ fullText += chunk.text || '';
511
+
512
+ const { cleanText, payload } = extractMemoryPayload(fullText);
513
+
514
+ finalAiMessage = {
515
+ id: aiMessageId,
516
+ sender: 'ai',
517
+ text: cleanText,
518
+ groundingCitations: citations,
519
+ memory: payload?.memory,
520
+ isAgent: isAgentChatActive,
521
+ };
522
+
523
+ finalMessages = [...newMessages, finalAiMessage];
524
+ setMessages(finalMessages);
525
+ }
526
+
527
+ if (finalAiMessage) {
528
+ // Autonomous Action Check
529
+ const imagineMatch = finalAiMessage.text.match(/\[VANHRI_IMAGINE:\s*(.*?)\]/s);
530
+ const threeDMatch = finalAiMessage.text.match(/\[VANHRI_3D:\s*(.*?)\]/s);
531
+ const cleanText = finalAiMessage.text.replace(/\[VANHRI_IMAGINE:.*?\]/s, '').replace(/\[VANHRI_3D:.*?\]/s, '').trim();
532
+
533
+ let messageToUpdate = { ...finalAiMessage, text: cleanText };
534
+
535
+ if (imagineMatch && imagineMatch[1]) {
536
+ const imagePrompt = imagineMatch[1];
537
+ const statusUpdateMsg = {...messageToUpdate, text: `${cleanText}\n\n*Vanhri AI is materializing a cinematic image: ${imagePrompt}*`};
538
+ setMessages([...newMessages, statusUpdateMsg]);
539
+ try {
540
+ const images = await generateImage(imagePrompt, 2);
541
+ const finalImageMessage = { ...statusUpdateMsg, generatedImages: images, text: cleanText };
542
+ const finalHistory = [...newMessages, finalImageMessage];
543
+ setMessages(finalHistory);
544
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, finalHistory);
545
+ } catch (e) {
546
+ const errorMsg = { ...statusUpdateMsg, text: `${statusUpdateMsg.text}\n\nI encountered an error generating the image: ${e instanceof Error ? e.message : 'Unknown error'}` };
547
+ setMessages([...newMessages, errorMsg]);
548
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, [...newMessages, errorMsg]);
549
+ }
550
+ } else if (threeDMatch && threeDMatch[1]) {
551
+ const threeDPrompt = threeDMatch[1];
552
+ const statusUpdateMsg = {...messageToUpdate, text: `${cleanText}\n\n*Vanhri AI is constructing an interactive 3D model: ${threeDPrompt}*`};
553
+ setMessages([...newMessages, statusUpdateMsg]);
554
+ const threeJsSchema = {
555
+ type: Type.OBJECT,
556
+ properties: { code: { type: Type.STRING, description: "The JavaScript code for three.js" } },
557
+ required: ['code']
558
+ };
559
+ const { json: parsedJson, error } = await generateJson(`Generate JavaScript for a three.js scene representing: "${threeDPrompt}". Follow all 3D model generation rules in your persona.`, VANHRI_PERSONA, threeJsSchema);
560
+
561
+ let final3DMessage: Message;
562
+ if (error) {
563
+ final3DMessage = { ...statusUpdateMsg, text: `${cleanText}\n\n${error}` };
564
+ } else if (parsedJson && parsedJson.code) {
565
+ final3DMessage = { ...statusUpdateMsg, text: cleanText, generated3DCode: parsedJson.code };
566
+ } else {
567
+ final3DMessage = { ...statusUpdateMsg, text: `${cleanText}\n\nI'm sorry, I couldn't generate a 3D model. The response was not in the expected format.` };
568
+ }
569
+ const finalHistory = [...newMessages, final3DMessage];
570
+ setMessages(finalHistory);
571
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, finalHistory);
572
+ } else {
573
+ // Standard text response
574
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, [...newMessages, messageToUpdate]);
575
+ if (isAutoPlayAudio && cleanText) {
576
+ setIsAiSpeaking(true);
577
+ ttsService.speak({ text: cleanText, onEnd: () => setIsAiSpeaking(false) });
578
+ }
579
+ }
580
+
581
+ // Save memory if it exists
582
+ if (finalAiMessage.memory) {
583
+ const newMemories = [...memories, { ...finalAiMessage.memory, id: `mem-${Date.now()}` }];
584
+ setMemories(newMemories);
585
+ await cryptoService.setEncryptedData(`vanhri-memories-${user.id}`, newMemories);
586
+ }
587
+ }
588
+ }
589
+ } catch (error) {
590
+ console.error("Failed to send message:", error);
591
+ const errorText = `I've encountered an error: ${error instanceof Error ? error.message : 'Unknown error'}`;
592
+ const errorMsg: Message = { ...aiMessagePlaceholder, text: errorText };
593
+ const finalMessages = [...newMessages, errorMsg];
594
+ setMessages(finalMessages);
595
+ await cryptoService.setEncryptedData(`vanhri-chat-${user.id}`, finalMessages);
596
+ } finally {
597
+ setIsLoading(false);
598
+ }
599
+ }, [auth, messages, isAutoPlayAudio, proactiveSuggestion, memories, notebookSources, isAgentChatActive, agentChatSession, handleHandoffToAgent]);
600
+
601
+ if (auth.status === 'loading') {
602
+ return <div className="w-full h-screen bg-slate-900 flex items-center justify-center"><Bot className="w-12 h-12 text-fuchsia-500 animate-pulse" /></div>;
603
+ }
604
+
605
+ if (auth.status === 'landing') {
606
+ return <Welcome onNavigateToLogin={() => setAuth({ status: 'unauthenticated' })} onNavigateToSignUp={() => setAuth({ status: 'signup' })} onContinueAsGuest={handleGuest} />;
607
+ }
608
+
609
+ if (auth.status === 'unauthenticated') {
610
+ return <Login onLoginSuccess={handleLogin} onNavigateToSignUp={() => setAuth({ status: 'signup' })} onNavigateBack={() => setAuth({status: 'landing'})} />;
611
+ }
612
+
613
+ if (auth.status === 'signup') {
614
+ return <SignUp onSignUpSuccess={(email, verificationCode) => setAuth({ status: 'verifying', email, verificationCode, emailServiceConfigured: emailService.isConfigured })} onNavigateToLogin={() => setAuth({ status: 'unauthenticated' })} onNavigateBack={() => setAuth({status: 'landing'})}/>;
615
+ }
616
+
617
+ if (auth.status === 'verifying') {
618
+ return <Verify email={auth.email} verificationCode={auth.verificationCode} onVerifySuccess={handleLogin} />;
619
+ }
620
+
621
+ if (auth.status === 'authenticated' || auth.status === 'guest') {
622
+ return (
623
+ <div className="flex h-screen text-slate-200 bg-slate-900">
624
+ <Sidebar
625
+ isOpen={isSidebarOpen}
626
+ onClose={() => setIsSidebarOpen(false)}
627
+ onLogout={handleLogout}
628
+ onClearChat={clearChatHistory}
629
+ onClearMemories={clearMemories}
630
+ memories={memories}
631
+ username={auth.user.email}
632
+ isGuest={auth.status === 'guest'}
633
+ isAutoPlayAudio={isAutoPlayAudio}
634
+ onToggleAutoPlayAudio={async (enabled) => {
635
+ setIsAutoPlayAudio(enabled);
636
+ if (!enabled) {
637
+ ttsService.cancel();
638
+ setIsAiSpeaking(false);
639
+ }
640
+ if (auth.status === 'authenticated' || auth.status === 'guest') {
641
+ await cryptoService.setEncryptedData(`vanhri-settings-${auth.user.id}`, { autoPlayAudio: enabled });
642
+ }
643
+ }}
644
+ notebookSources={notebookSources}
645
+ onClearNotebook={clearNotebook}
646
+ />
647
+ <div className="flex-1 flex flex-col">
648
+ <ChatWindow
649
+ messages={messages}
650
+ isLoading={isLoading}
651
+ onSendMessage={handleSendMessage}
652
+ onToggleSidebar={() => setIsSidebarOpen(true)}
653
+ username={auth.user.email.split('@')[0]}
654
+ isGuest={auth.status === 'guest'}
655
+ proactiveSuggestion={proactiveSuggestion}
656
+ onDismissSuggestion={() => setProactiveSuggestion(null)}
657
+ onStartCall={handleStartCall}
658
+ onHandoff={handleHandoffToAgent}
659
+ onEditImage={handleEditImage}
660
+ isAgentChatActive={isAgentChatActive}
661
+ onEndAgentChat={handleEndAgentChat}
662
+ onOpenVisualSolver={() => setIsVisualSolverOpen(true)}
663
+ isAiSpeaking={isAiSpeaking}
664
+ onAddSource={handleAddSource}
665
+ />
666
+ </div>
667
+ {editingImage && (
668
+ <ImageEditorModal
669
+ imageData={editingImage.imageData}
670
+ onClose={() => setEditingImage(null)}
671
+ onSave={handleSaveEditedImage}
672
+ />
673
+ )}
674
+ {callState.active && callState.type && (
675
+ <CallUI
676
+ type={callState.type}
677
+ onHangup={handleEndCall}
678
+ username={auth.user.email}
679
+ />
680
+ )}
681
+ {isVisualSolverOpen && (
682
+ <VisualSolverModal
683
+ onClose={() => setIsVisualSolverOpen(false)}
684
+ onSendToChat={(text, image) => {
685
+ setIsVisualSolverOpen(false);
686
+ handleSendMessage(text, image);
687
+ }}
688
+ />
689
+ )}
690
+ </div>
691
+ );
692
+ }
693
+
694
+ return null; // Should not be reached
695
+ };
696
+
697
+ export default App;
vanhri-ai---the-mind-that-builds-worlds/README.md ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Run and deploy your AI Studio app
2
+
3
+ This contains everything you need to run your app locally.
4
+
5
+ ## Run Locally
6
+
7
+ **Prerequisites:** Node.js
8
+
9
+
10
+ 1. Install dependencies:
11
+ `npm install`
12
+ 2. Set the `GEMINI_API_KEY` in [.env.local](.env.local) to your Gemini API key
13
+ 3. Run the app:
14
+ `npm run dev`
vanhri-ai---the-mind-that-builds-worlds/components/CallUI.tsx ADDED
@@ -0,0 +1,306 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+ import React, { useEffect, useRef, useState, useCallback } from 'react';
4
+ import { Phone, Microphone, Lock, MicOff, Video as VideoIcon, VideoOff, ScreenShare, ScreenShareOff, X, Sparkles } from './Icons';
5
+ import { generateTextStream } from '../services/geminiService';
6
+ import { ttsService } from '../services/ttsService';
7
+ import { VANHRI_PERSONA } from '../constants';
8
+ import { Message } from '../types';
9
+ import NovaVisualizer from './NovaVisualizer';
10
+
11
+ const UserPiP: React.FC<{ stream: MediaStream | null, isCameraOff: boolean, username: string }> = ({ stream, isCameraOff, username }) => {
12
+ const videoRef = useRef<HTMLVideoElement>(null);
13
+
14
+ useEffect(() => {
15
+ if (videoRef.current && stream) {
16
+ videoRef.current.srcObject = stream;
17
+ }
18
+ }, [stream]);
19
+
20
+ return (
21
+ <div className="absolute bottom-24 right-6 md:bottom-28 w-40 h-24 md:w-56 md:h-32 rounded-lg overflow-hidden border-2 border-slate-600 bg-black shadow-lg z-20">
22
+ {isCameraOff ? (
23
+ <div className="w-full h-full flex items-center justify-center">
24
+ <VideoOff className="w-8 h-8 text-slate-400" />
25
+ </div>
26
+ ) : (
27
+ <video ref={videoRef} autoPlay playsInline muted className="w-full h-full object-cover transform -scale-x-100"></video>
28
+ )}
29
+ <div className="absolute bottom-0 left-0 bg-black/50 px-2 py-0.5 text-xs font-semibold rounded-tr-lg">{username}</div>
30
+ </div>
31
+ );
32
+ };
33
+
34
+ const VanhriResponseBubble: React.FC<{ message: Message | null, onClear: () => void }> = ({ message, onClear }) => {
35
+ if (!message) return null;
36
+ return (
37
+ <div className="absolute top-6 left-1/2 -translate-x-1/2 w-full max-w-2xl bg-slate-800/80 backdrop-blur-md border border-fuchsia-500/30 rounded-lg p-4 shadow-2xl animate-fade-in-up z-30">
38
+ <div className="flex items-start gap-3">
39
+ <Sparkles className="w-6 h-6 text-fuchsia-400 flex-shrink-0" />
40
+ <p className="flex-1 text-slate-200 whitespace-pre-wrap">{message.text}<span className="inline-block w-2 h-5 bg-fuchsia-400 ml-1 blinking-cursor"></span></p>
41
+ <button onClick={onClear} className="text-slate-500 hover:text-white"><X className="w-4 h-4"/></button>
42
+ </div>
43
+ </div>
44
+ );
45
+ };
46
+
47
+ const CallControlButton: React.FC<{onClick: () => void, disabled?: boolean, children: React.ReactNode, variant?: 'default' | 'danger' | 'primary', 'aria-label': string, isPulsing?: boolean}> =
48
+ ({ onClick, disabled, children, variant='default', isPulsing, ...props }) => {
49
+ const baseClasses = "p-3 md:p-4 rounded-full transition-colors disabled:opacity-50 disabled:cursor-not-allowed focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-offset-slate-900";
50
+ const variants = {
51
+ default: "bg-slate-700/60 hover:bg-slate-600 focus:ring-slate-500",
52
+ danger: "bg-red-600 hover:bg-red-700 focus:ring-red-500",
53
+ primary: `bg-fuchsia-600 hover:bg-fuchsia-700 focus:ring-fuchsia-500 ${isPulsing ? 'animate-pulse' : ''}`,
54
+ };
55
+ return <button onClick={onClick} disabled={disabled} className={`${baseClasses} ${variants[variant]}`} {...props}>{children}</button>
56
+ }
57
+
58
+
59
+ const CallUI: React.FC<{ type: 'video' | 'audio', onHangup: () => void, username: string }> = ({ type, onHangup, username }) => {
60
+ const recognitionRef = useRef<any>(null);
61
+ const [userStream, setUserStream] = useState<MediaStream | null>(null);
62
+ const [screenStream, setScreenStream] = useState<MediaStream | null>(null);
63
+ const [isMicMuted, setIsMicMuted] = useState(false);
64
+ const [isCameraOff, setIsCameraOff] = useState(type === 'audio');
65
+ const [error, setError] = useState<string | null>(null);
66
+ const [isListening, setIsListening] = useState(false);
67
+ const [vanhriMessage, setVanhriMessage] = useState<Message | null>(null);
68
+ const [isAiThinking, setIsAiThinking] = useState(false);
69
+ const [isAiSpeaking, setIsAiSpeaking] = useState(false);
70
+
71
+
72
+ const handleHangup = useCallback(() => {
73
+ recognitionRef.current?.stop();
74
+ ttsService.cancel();
75
+ setIsAiSpeaking(false);
76
+ userStream?.getTracks().forEach(track => track.stop());
77
+ screenStream?.getTracks().forEach(track => track.stop());
78
+ onHangup();
79
+ }, [userStream, screenStream, onHangup]);
80
+
81
+
82
+ useEffect(() => {
83
+ const setupCall = async () => {
84
+ if (navigator.permissions) {
85
+ try {
86
+ const micPerm = await navigator.permissions.query({ name: 'microphone' as PermissionName });
87
+ if (micPerm.state === 'denied') {
88
+ setError('Microphone access is denied. Please allow it in your browser settings to start a call.');
89
+ return;
90
+ }
91
+ if (type === 'video') {
92
+ const camPerm = await navigator.permissions.query({ name: 'camera' as PermissionName });
93
+ if (camPerm.state === 'denied') {
94
+ setError('Camera access is denied. Please allow it in your browser settings for a video call.');
95
+ return;
96
+ }
97
+ }
98
+ } catch (e) {
99
+ console.warn("Permissions API not supported or failed, proceeding with getUserMedia directly.", e);
100
+ }
101
+ }
102
+
103
+ try {
104
+ const stream = await navigator.mediaDevices.getUserMedia({
105
+ video: true,
106
+ audio: true,
107
+ });
108
+ setUserStream(stream);
109
+ if (type === 'audio') {
110
+ stream.getVideoTracks().forEach(track => track.enabled = false);
111
+ }
112
+ } catch (err: any) {
113
+ console.error("Error accessing media devices.", err);
114
+ if (err.name === 'NotAllowedError' || err.name === 'PermissionDeniedError') {
115
+ setError('Permission denied. Please allow access to your camera and microphone in your browser settings.');
116
+ } else if (err.name === 'NotFoundError' || err.name === 'DevicesNotFoundError') {
117
+ setError('No camera and/or microphone found. Please ensure they are connected and not in use by another application.');
118
+ } else {
119
+ setError('Could not access your camera or microphone. Please check your hardware and browser settings.');
120
+ }
121
+ return;
122
+ }
123
+ };
124
+
125
+ setupCall();
126
+
127
+ return () => {
128
+ userStream?.getTracks().forEach(track => track.stop());
129
+ recognitionRef.current?.abort();
130
+ };
131
+ // eslint-disable-next-line react-hooks/exhaustive-deps
132
+ }, [type]);
133
+
134
+ useEffect(() => {
135
+ if (screenStream) {
136
+ const screenVideo = document.createElement('video');
137
+ screenVideo.srcObject = screenStream;
138
+ screenVideo.autoplay = true;
139
+ screenVideo.className = "w-full h-full object-contain relative z-10";
140
+ const mainEl = document.getElementById('call-main');
141
+ if(mainEl) mainEl.appendChild(screenVideo)
142
+
143
+ return () => {
144
+ if(mainEl && mainEl.contains(screenVideo)) {
145
+ mainEl.removeChild(screenVideo);
146
+ }
147
+ }
148
+ }
149
+ }, [screenStream]);
150
+
151
+
152
+ const toggleMic = () => {
153
+ if (!userStream) return;
154
+ userStream.getAudioTracks().forEach(track => track.enabled = !track.enabled);
155
+ setIsMicMuted(prev => !prev);
156
+ };
157
+
158
+ const toggleCamera = () => {
159
+ if (!userStream) return;
160
+ userStream.getVideoTracks().forEach(track => track.enabled = !track.enabled);
161
+ setIsCameraOff(prev => !prev);
162
+ };
163
+
164
+ const toggleScreenShare = async () => {
165
+ if (screenStream) {
166
+ screenStream.getTracks().forEach(track => track.stop());
167
+ setScreenStream(null);
168
+ } else {
169
+ try {
170
+ const stream = await navigator.mediaDevices.getDisplayMedia({ video: true });
171
+ stream.getVideoTracks()[0].onended = () => setScreenStream(null);
172
+ setScreenStream(stream);
173
+ } catch (err) {
174
+ console.error("Error starting screen share", err);
175
+ }
176
+ }
177
+ };
178
+
179
+ const handleAskVanhri = () => {
180
+ if (isListening || isAiThinking) return;
181
+
182
+ if (isAiSpeaking) {
183
+ ttsService.cancel();
184
+ setIsAiSpeaking(false);
185
+ }
186
+
187
+ // Lazy initialization on first click
188
+ if (!recognitionRef.current) {
189
+ const SpeechRecognition = (window as any).SpeechRecognition || (window as any).webkitSpeechRecognition;
190
+ if (!SpeechRecognition) {
191
+ setError("Speech recognition is not supported by your browser.");
192
+ return;
193
+ }
194
+ const recog = new SpeechRecognition();
195
+ recog.continuous = false;
196
+ recog.lang = 'en-US';
197
+ recog.interimResults = false;
198
+
199
+ recog.onstart = () => setIsListening(true);
200
+ recog.onend = () => setIsListening(false);
201
+
202
+ recog.onerror = (event: any) => {
203
+ console.error("Speech recognition error", event.error);
204
+ if (event.error !== 'aborted' && event.error !== 'no-speech') {
205
+ setError(`Speech recognition error: ${event.error}`);
206
+ }
207
+ setIsListening(false);
208
+ };
209
+
210
+ recog.onresult = async (event: any) => {
211
+ const transcript = event.results[0][0].transcript;
212
+ if (transcript) {
213
+ ttsService.cancel();
214
+ setIsAiSpeaking(false);
215
+ setIsAiThinking(true);
216
+ const messageId = `vanhri-${Date.now()}`;
217
+ setVanhriMessage({ id: messageId, sender: 'ai', text: '' });
218
+
219
+ try {
220
+ const stream = generateTextStream(transcript, null, [], VANHRI_PERSONA, [], { useSearch: true });
221
+ let fullText = '';
222
+ for await (const chunk of stream) {
223
+ if (chunk.text) {
224
+ fullText += chunk.text;
225
+ setVanhriMessage({ id: messageId, sender: 'ai', text: fullText });
226
+ }
227
+ }
228
+ setIsAiSpeaking(true);
229
+ ttsService.speak({
230
+ text: fullText,
231
+ onEnd: () => {
232
+ setIsAiSpeaking(false);
233
+ setTimeout(() => setVanhriMessage(null), 3000);
234
+ }
235
+ });
236
+
237
+ } catch (e) {
238
+ const errorMessage = "Sorry, I encountered an error.";
239
+ setVanhriMessage({ id: `vanhri-err-${Date.now()}`, sender: 'ai', text: errorMessage });
240
+ ttsService.speak({ text: errorMessage, onEnd: () => setIsAiSpeaking(false) });
241
+ } finally {
242
+ setIsAiThinking(false);
243
+ }
244
+ }
245
+ };
246
+ recognitionRef.current = recog;
247
+ }
248
+
249
+ try {
250
+ recognitionRef.current.start();
251
+ } catch(err) {
252
+ console.error("Could not start speech recognition:", err);
253
+ setIsListening(false);
254
+ }
255
+ };
256
+
257
+ return (
258
+ <div className="fixed inset-0 bg-slate-900/95 z-50 flex flex-col items-center justify-center text-white font-sans">
259
+ <main id="call-main" className="w-full h-full flex items-center justify-center relative bg-black">
260
+ <div className="absolute inset-0 z-0">
261
+ <NovaVisualizer isSpeaking={isAiSpeaking} isThinking={isAiThinking} />
262
+ </div>
263
+ <div className="absolute top-1/2 left-1/2 -translate-x-1/2 -translate-y-1/2 text-center z-20 pointer-events-none">
264
+ <h3 className="text-3xl font-bold text-white" style={{textShadow: '0 0 10px rgba(0,0,0,0.7)'}}>Vanhri AI</h3>
265
+ <p className="text-slate-300" style={{textShadow: '0 0 10px rgba(0,0,0,0.7)'}}>
266
+ {isAiThinking ? 'Thinking...' : isAiSpeaking ? 'Speaking...' : isListening ? 'Listening...' : 'Ready'}
267
+ </p>
268
+ </div>
269
+ </main>
270
+
271
+ <UserPiP stream={userStream} isCameraOff={isCameraOff} username={username.split('@')[0]}/>
272
+
273
+ {vanhriMessage && <VanhriResponseBubble message={vanhriMessage} onClear={() => setVanhriMessage(null)} />}
274
+
275
+ {error && <div className="absolute top-6 bg-red-900/80 border border-red-700 p-3 rounded-lg text-red-300 z-30">{error}</div>}
276
+
277
+ <footer className="absolute bottom-0 left-0 right-0 p-4 md:p-6 flex flex-col items-center gap-4 z-20">
278
+ <div className="flex items-center gap-2 bg-slate-900/80 text-green-400 text-xs font-semibold px-3 py-1.5 rounded-full border border-green-500/30 backdrop-blur-sm">
279
+ <Lock className="w-3 h-3"/>
280
+ End-to-End Encrypted
281
+ </div>
282
+ <div className="flex items-center justify-center gap-3 md:gap-4 p-3 bg-slate-800/80 backdrop-blur-md rounded-full">
283
+ <CallControlButton onClick={toggleMic} aria-label={isMicMuted ? "Unmute microphone" : "Mute microphone"}>
284
+ {isMicMuted ? <MicOff className="w-6 h-6" /> : <Microphone className="w-6 h-6" />}
285
+ </CallControlButton>
286
+ <CallControlButton onClick={toggleCamera} aria-label={isCameraOff ? "Start camera" : "Stop camera"}>
287
+ {isCameraOff ? <VideoOff className="w-6 h-6" /> : <VideoIcon className="w-6 h-6" />}
288
+ </CallControlButton>
289
+ <CallControlButton onClick={toggleScreenShare} aria-label={screenStream ? "Stop screen sharing" : "Start screen sharing"}>
290
+ {screenStream ? <ScreenShareOff className="w-6 h-6" /> : <ScreenShare className="w-6 h-6" />}
291
+ </CallControlButton>
292
+ <div className="h-6 w-px bg-slate-600 mx-2"></div>
293
+ <CallControlButton onClick={handleAskVanhri} disabled={isListening || isAiThinking} variant="primary" isPulsing={isListening || isAiThinking} aria-label="Ask Vanhri AI a question">
294
+ <Microphone className="w-6 h-6"/>
295
+ </CallControlButton>
296
+ <div className="h-6 w-px bg-slate-600 mx-2"></div>
297
+ <CallControlButton onClick={handleHangup} variant="danger" aria-label="Hang up">
298
+ <Phone className="w-6 h-6 transform rotate-[135deg]" />
299
+ </CallControlButton>
300
+ </div>
301
+ </footer>
302
+ </div>
303
+ );
304
+ };
305
+
306
+ export default CallUI;
vanhri-ai---the-mind-that-builds-worlds/components/ChartDisplay.tsx ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useMemo } from 'react';
2
+ import {
3
+ ResponsiveContainer,
4
+ BarChart, Bar,
5
+ LineChart, Line,
6
+ PieChart, Pie, Cell,
7
+ XAxis, YAxis, CartesianGrid, Tooltip, Legend,
8
+ } from 'recharts';
9
+ import { ChartData } from '../types';
10
+
11
+ interface ChartDisplayProps {
12
+ data: ChartData;
13
+ }
14
+
15
+ const ChartDisplay: React.FC<ChartDisplayProps> = ({ data }) => {
16
+ // Memoize the processed data to prevent re-computation on every render
17
+ const processedData = useMemo(() => {
18
+ if (!data || !data.data || !data.keys || data.type === 'pie') return [];
19
+ // Transform data for Bar and Line charts
20
+ return data.data.map(point => {
21
+ const newPoint: { [key: string]: string | number } = { label: point.label };
22
+ data.keys.forEach((key, index) => {
23
+ newPoint[key.name] = point.values[index];
24
+ });
25
+ return newPoint;
26
+ });
27
+ }, [data]);
28
+
29
+ const pieData = useMemo(() => {
30
+ if (!data || !data.data || !data.keys || data.type !== 'pie') return [];
31
+ // For pie charts, we use the `label` as the name and the first value as the `value`.
32
+ return data.data.map(point => ({
33
+ name: point.label,
34
+ value: point.values[0] || 0
35
+ }));
36
+ }, [data]);
37
+
38
+
39
+ if (!data) {
40
+ return <div className="text-center text-red-400">Error: Chart data is missing.</div>;
41
+ }
42
+
43
+ const renderChart = () => {
44
+ const tooltipStyle = {
45
+ backgroundColor: '#1f2937',
46
+ border: '1px solid #374151',
47
+ borderRadius: '0.5rem',
48
+ };
49
+ const labelStyle = { color: '#f9fafb' };
50
+ const legendStyle = { fontSize: "12px", color: '#9ca3af' };
51
+
52
+ switch(data.type) {
53
+ case 'bar':
54
+ return (
55
+ <BarChart data={processedData} margin={{ top: 5, right: 20, left: -10, bottom: 5 }}>
56
+ <CartesianGrid strokeDasharray="3 3" stroke="#374151" />
57
+ <XAxis dataKey="label" stroke="#9ca3af" fontSize={12} tickLine={false} axisLine={false} />
58
+ <YAxis stroke="#9ca3af" fontSize={12} tickLine={false} axisLine={false} />
59
+ <Tooltip contentStyle={tooltipStyle} labelStyle={labelStyle} cursor={{fill: 'rgba(139, 92, 246, 0.1)'}} />
60
+ <Legend wrapperStyle={legendStyle} />
61
+ {data.keys.map(key => (
62
+ <Bar key={key.name} dataKey={key.name} fill={key.color} radius={[4, 4, 0, 0]} />
63
+ ))}
64
+ </BarChart>
65
+ );
66
+ case 'line':
67
+ return (
68
+ <LineChart data={processedData} margin={{ top: 5, right: 20, left: -10, bottom: 5 }}>
69
+ <CartesianGrid strokeDasharray="3 3" stroke="#374151" />
70
+ <XAxis dataKey="label" stroke="#9ca3af" fontSize={12} tickLine={false} axisLine={false} />
71
+ <YAxis stroke="#9ca3af" fontSize={12} tickLine={false} axisLine={false} />
72
+ <Tooltip contentStyle={tooltipStyle} labelStyle={labelStyle} />
73
+ <Legend wrapperStyle={legendStyle} />
74
+ {data.keys.map(key => (
75
+ <Line key={key.name} type="monotone" dataKey={key.name} stroke={key.color} strokeWidth={2} activeDot={{ r: 8 }} />
76
+ ))}
77
+ </LineChart>
78
+ );
79
+ case 'pie':
80
+ return (
81
+ <PieChart margin={{ top: 5, right: 5, left: 5, bottom: 5 }}>
82
+ <Tooltip contentStyle={tooltipStyle} />
83
+ <Legend wrapperStyle={legendStyle} />
84
+ <Pie data={pieData} dataKey="value" nameKey="name" cx="50%" cy="50%" outerRadius={'80%'} labelLine={false} label={({ cx, cy, midAngle, innerRadius, outerRadius, percent }) => {
85
+ const radius = innerRadius + (outerRadius - innerRadius) * 0.5;
86
+ const x = cx + radius * Math.cos(-midAngle * (Math.PI / 180));
87
+ const y = cy + radius * Math.sin(-midAngle * (Math.PI / 180));
88
+ return (
89
+ <text x={x} y={y} fill="white" textAnchor={x > cx ? 'start' : 'end'} dominantBaseline="central" fontSize={12}>
90
+ {`${(percent * 100).toFixed(0)}%`}
91
+ </text>
92
+ );
93
+ }}>
94
+ {pieData.map((entry, index) => (
95
+ // Use the color from the corresponding key, or cycle through them
96
+ <Cell key={`cell-${index}`} fill={data.keys[index % data.keys.length].color} />
97
+ ))}
98
+ </Pie>
99
+ </PieChart>
100
+ );
101
+ default:
102
+ return <div className="text-center text-yellow-400">Unsupported chart type: {data.type}</div>;
103
+ }
104
+ };
105
+
106
+ return (
107
+ <div style={{ height: '400px', width: '100%' }} className="bg-slate-900/50 rounded-lg border border-slate-700 mt-2 p-4 flex flex-col relative">
108
+ <h4 className="text-md font-semibold text-fuchsia-300 mb-4 text-center absolute top-2 left-1/2 -translate-x-1/2 w-full">{data.title}</h4>
109
+ <ResponsiveContainer>
110
+ {renderChart()}
111
+ </ResponsiveContainer>
112
+ </div>
113
+ );
114
+ };
115
+
116
+ export default ChartDisplay;
vanhri-ai---the-mind-that-builds-worlds/components/ChatWindow.tsx ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useRef, useEffect } from 'react';
2
+ import { Message, ProactiveSuggestion as ProactiveSuggestionType } from '../types';
3
+ import PromptInput from './PromptInput';
4
+ import MessageBubble from './MessageBubble';
5
+ import ThinkingIndicator from './ThinkingIndicator';
6
+ import ProactiveSuggestion from './ProactiveSuggestion';
7
+ import NovaVisualizer from './NovaVisualizer';
8
+ import { Menu, Phone, Users } from './Icons';
9
+
10
+ interface ChatWindowProps {
11
+ messages: Message[];
12
+ isLoading: boolean;
13
+ onSendMessage: (text: string, image: string | null) => void;
14
+ onToggleSidebar: () => void;
15
+ username: string;
16
+ isGuest: boolean;
17
+ proactiveSuggestion: ProactiveSuggestionType | null;
18
+ onDismissSuggestion: () => void;
19
+ onStartCall: (type: 'video' | 'audio') => void;
20
+ onHandoff: () => void;
21
+ onEditImage: (messageId: string, imageIndex: number | null, imageData: string) => void;
22
+ isAgentChatActive: boolean;
23
+ onEndAgentChat: () => void;
24
+ onOpenVisualSolver: () => void;
25
+ isAiSpeaking: boolean;
26
+ onAddSource: (name: string, content: string, mimeType: string) => void;
27
+ }
28
+
29
+ const ChatWindow: React.FC<ChatWindowProps> = ({
30
+ messages,
31
+ isLoading,
32
+ onSendMessage,
33
+ onToggleSidebar,
34
+ username,
35
+ isGuest,
36
+ proactiveSuggestion,
37
+ onDismissSuggestion,
38
+ onStartCall,
39
+ onHandoff,
40
+ onEditImage,
41
+ isAgentChatActive,
42
+ onEndAgentChat,
43
+ onOpenVisualSolver,
44
+ isAiSpeaking,
45
+ onAddSource
46
+ }) => {
47
+ const messagesEndRef = useRef<HTMLDivElement>(null);
48
+
49
+ const scrollToBottom = () => {
50
+ messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
51
+ };
52
+
53
+ useEffect(scrollToBottom, [messages, isLoading]);
54
+
55
+ const handleSuggestionClick = (action: string) => {
56
+ onSendMessage(action, null);
57
+ };
58
+
59
+ return (
60
+ <div className="flex-1 flex flex-col bg-slate-900 h-full max-h-screen">
61
+ <header className="flex items-center gap-3 p-4 border-b border-slate-800 bg-slate-900/80 backdrop-blur-sm z-10">
62
+ <button
63
+ onClick={onToggleSidebar}
64
+ className="p-2 text-slate-400 hover:text-fuchsia-500 transition-colors rounded-full hover:bg-slate-800 md:hidden"
65
+ aria-label="Toggle sidebar"
66
+ >
67
+ <Menu className="w-6 h-6" />
68
+ </button>
69
+ <div className="flex-1 flex items-center gap-4">
70
+ <div className="w-12 h-12 flex-shrink-0 rounded-full overflow-hidden relative bg-slate-800 border border-slate-700">
71
+ {isAgentChatActive ? (
72
+ <div className="w-full h-full flex items-center justify-center bg-sky-600/20">
73
+ <Users className="w-7 h-7 text-sky-400" />
74
+ </div>
75
+ ) : (
76
+ <NovaVisualizer
77
+ isSpeaking={isAiSpeaking}
78
+ isThinking={isLoading}
79
+ />
80
+ )}
81
+ </div>
82
+ <div>
83
+ <h2 className="text-lg font-semibold text-slate-100">{isAgentChatActive ? 'Live Agent Support' : 'Vanhri AI'}</h2>
84
+ <p className="text-sm text-slate-400">{isAgentChatActive ? 'You are speaking with a human support agent' : `In conversation with ${isGuest ? 'Guest' : username}`}</p>
85
+ </div>
86
+ </div>
87
+ {isAgentChatActive && (
88
+ <button onClick={onEndAgentChat} className="ml-auto flex items-center gap-2 text-sm bg-red-600/20 text-red-400 hover:bg-red-600/40 hover:text-red-300 px-3 py-1.5 rounded-md transition flex-shrink-0">
89
+ <Phone className="w-4 h-4"/>
90
+ End Chat
91
+ </button>
92
+ )}
93
+ </header>
94
+
95
+ <div className="flex-1 overflow-y-auto p-6 space-y-6">
96
+ {messages.map((msg) => (
97
+ <MessageBubble key={msg.id} message={msg} isStreaming={isLoading && msg.id.startsWith('ai-') && msg.text.length > 0 && messages[messages.length - 1].id === msg.id} onEditImage={onEditImage} />
98
+ ))}
99
+ {isLoading && messages[messages.length - 1]?.sender !== 'ai' && <ThinkingIndicator />}
100
+ <div ref={messagesEndRef} />
101
+ </div>
102
+
103
+ <div className="p-4 pt-2 border-t border-slate-800 bg-slate-900">
104
+ {proactiveSuggestion && (
105
+ <ProactiveSuggestion
106
+ suggestion={proactiveSuggestion}
107
+ onAccept={handleSuggestionClick}
108
+ onDismiss={onDismissSuggestion}
109
+ />
110
+ )}
111
+ <PromptInput
112
+ onSendMessage={onSendMessage}
113
+ disabled={isLoading}
114
+ onStartCall={onStartCall}
115
+ onHandoff={onHandoff}
116
+ isAgentChatActive={isAgentChatActive}
117
+ onOpenVisualSolver={onOpenVisualSolver}
118
+ onAddSource={onAddSource}
119
+ />
120
+ </div>
121
+ </div>
122
+ );
123
+ };
124
+
125
+ export default ChatWindow;
vanhri-ai---the-mind-that-builds-worlds/components/EmailChatModal.tsx ADDED
File without changes
vanhri-ai---the-mind-that-builds-worlds/components/Icons.tsx ADDED
@@ -0,0 +1,260 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React from 'react';
2
+
3
+ // This is a common pattern to avoid importing a whole library for a few icons.
4
+ // In a real project, you might use a library like lucide-react.
5
+
6
+ export const Icon: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
7
+ <svg
8
+ xmlns="http://www.w3.org/2000/svg"
9
+ width="24"
10
+ height="24"
11
+ viewBox="0 0 24 24"
12
+ fill="none"
13
+ stroke="currentColor"
14
+ strokeWidth="2"
15
+ strokeLinecap="round"
16
+ strokeLinejoin="round"
17
+ {...props}
18
+ />
19
+ );
20
+
21
+ export const Menu: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
22
+ <Icon {...props}><line x1="4" x2="20" y1="12" y2="12"/><line x1="4" x2="20" y1="6" y2="6"/><line x1="4" x2="20" y1="18" y2="18"/></Icon>
23
+ );
24
+ export const ChevronLeft: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
25
+ <Icon {...props}><path d="m15 18-6-6 6-6"/></Icon>
26
+ );
27
+
28
+
29
+ export const Cube: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
30
+ <Icon {...props}>
31
+ <path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z"/>
32
+ <path d="m3.27 6.96 8.73 5.05 8.73-5.05"/>
33
+ <path d="M12 22.08V12"/>
34
+ </Icon>
35
+ );
36
+
37
+
38
+ export const ArrowLeft: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
39
+ <Icon {...props}><path d="m12 19-7-7 7-7"/><path d="M19 12H5"/></Icon>
40
+ );
41
+
42
+ export const BrainCircuit: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
43
+ <Icon {...props}>
44
+ <path d="M14 5a3 3 0 0 0-3-3H9a3 3 0 0 0-3 3v2.3a1 1 0 0 1-.3.7L3.9 9.8a1 1 0 0 0 0 1.4l1.8 1.8a1 1 0 0 1 .3.7V16a3 3 0 0 0 3 3h2a3 3 0 0 0 3-3v-2.3a1 1 0 0 1 .3-.7l1.8-1.8a1 1 0 0 0 0-1.4l-1.8-1.8a1 1 0 0 1-.3-.7Z"/>
45
+ <path d="M9 13v2"/><path d="M15 13v2"/><path d="M12 16v-3"/><path d="M12 9V7"/><path d="M9 7h6"/>
46
+ </Icon>
47
+ );
48
+
49
+ export const BookOpen: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
50
+ <Icon {...props}>
51
+ <path d="M2 3h6a4 4 0 0 1 4 4v14a3 3 0 0 0-3-3H2z" />
52
+ <path d="M22 3h-6a4 4 0 0 0-4 4v14a3 3 0 0 1 3-3h7z" />
53
+ </Icon>
54
+ );
55
+
56
+ export const Code: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
57
+ <Icon {...props}>
58
+ <polyline points="16 18 22 12 16 6" />
59
+ <polyline points="8 6 2 12 8 18" />
60
+ </Icon>
61
+ );
62
+
63
+ export const User: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
64
+ <Icon {...props}>
65
+ <path d="M19 21v-2a4 4 0 0 0-4-4H9a4 4 0 0 0-4 4v2" />
66
+ <circle cx="12" cy="7" r="4" />
67
+ </Icon>
68
+ );
69
+
70
+ export const Bot: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
71
+ <Icon {...props}>
72
+ <path d="M12 8V4H8" />
73
+ <rect width="16" height="12" x="4" y="8" rx="2" />
74
+ <path d="M2 14h2" />
75
+ <path d="M20 14h2" />
76
+ <path d="M15 13v2" />
77
+ <path d="M9 13v2" />
78
+ </Icon>
79
+ );
80
+
81
+ export const Send: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
82
+ <Icon {...props}>
83
+ <path d="m22 2-7 20-4-9-9-4Z" />
84
+ <path d="M22 2 11 13" />
85
+ </Icon>
86
+ );
87
+
88
+ export const Paperclip: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
89
+ <Icon {...props}>
90
+ <path d="m21.44 11.05-9.19 9.19a6.003 6.003 0 1 1-8.49-8.49l8.57-8.57A4.002 4.002 0 0 1 16.17 6.1l-6.88 6.88a2.001 2.001 0 1 1-2.83-2.83l8.49-8.48" />
91
+ </Icon>
92
+ );
93
+
94
+ export const Brain: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
95
+ <Icon {...props}><path d="M9.5 2A2.5 2.5 0 0 1 12 4.5V5h4a2 2 0 0 1 2 2v4a2 2 0 0 1-2 2h-1.34a1 1 0 0 0-.96.71L13 15h-2l-.7-1.29a1 1 0 0 0-.96-.71H8a2 2 0 0 1-2-2V7a2 2 0 0 1 2-2h4V4.5A2.5 2.5 0 0 1 9.5 2z"/><path d="M4 14v.92a2 2 0 0 0 1.02 1.78l.42.25a1 1 0 0 1 .56 1.55l-1.06 2.11A1 1 0 0 0 5.82 22H7a1 1 0 0 0 1-1v-2.12a1 1 0 0 1 .5-1.7l1.01-.4A1 1 0 0 1 11 17V15H8.62a1 1 0 0 1-.95-.69L7 12H4v2z"/><path d="M20 14v.92a2 2 0 0 1-1.02 1.78l-.42.25a1 1 0 0 0-.56 1.55l1.06 2.11A1 1 0 0 1 18.18 22H17a1 1 0 0 1-1-1v-2.12a1 1 0 0 0-.5-1.7l-1.01-.4A1 1 0 0 0 13 17V15h2.38a1 1 0 0 1 .95.69L17 12h3v2z"/></Icon>
96
+ );
97
+ export const Search: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
98
+ <Icon {...props}><circle cx="11" cy="11" r="8"/><path d="m21 21-4.3-4.3"/></Icon>
99
+ );
100
+ export const BotMessageSquare: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
101
+ <Icon {...props}><path d="M12 6V2H8"/><path d="m8 18-4 4V8a2 2 0 0 1 2-2h12a2 2 0 0 1 2 2v8a2 2 0 0 1-2 2h-4.3Z"/><path d="M7 12v.01"/><path d="M11 12v.01"/><path d="M15 12v.01"/></Icon>
102
+ );
103
+
104
+ export const X: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
105
+ <Icon {...props}><path d="M18 6 6 18"/><path d="m6 6 12 12"/></Icon>
106
+ );
107
+
108
+ export const Settings: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
109
+ <Icon {...props}><path d="M12.22 2h-.44a2 2 0 0 0-2 2v.18a2 2 0 0 1-1 1.73l-.43.25a2 2 0 0 1-2 0l-.15-.08a2 2 0 0 0-2.73.73l-.22.38a2 2 0 0 0 .73 2.73l.15.1a2 2 0 0 1 0 2l-.15.08a2 2 0 0 0-.73 2.73l.22.38a2 2 0 0 0 2.73.73l.15-.08a2 2 0 0 1 2 0l.43.25a2 2 0 0 1 1 1.73V20a2 2 0 0 0 2 2h.44a2 2 0 0 0 2-2v-.18a2 2 0 0 1 1-1.73l.43-.25a2 2 0 0 1 2 0l.15.08a2 2 0 0 0 2.73-.73l.22-.38a2 2 0 0 0-.73-2.73l-.15-.08a2 2 0 0 1 0-2l.15-.08a2 2 0 0 0 .73-2.73l-.22-.38a2 2 0 0 0-2.73-.73l-.15.08a2 2 0 0 1-2 0l-.43-.25a2 2 0 0 1-1-1.73V4a2 2 0 0 0-2-2z"/><circle cx="12" cy="12" r="3"/></Icon>
110
+ );
111
+
112
+ export const Mail: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
113
+ <Icon {...props}>
114
+ <rect width="20" height="16" x="2" y="4" rx="2"/>
115
+ <path d="m22 7-8.97 5.7a1.94 1.94 0 0 1-2.06 0L2 7"/>
116
+ </Icon>
117
+ );
118
+
119
+ export const Microphone: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
120
+ <Icon {...props}>
121
+ <path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3Z"/>
122
+ <path d="M19 10v2a7 7 0 0 1-14 0v-2"/>
123
+ <line x1="12" x2="12" y1="19" y2="22"/>
124
+ </Icon>
125
+ );
126
+ export const MicOff: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
127
+ <Icon {...props}>
128
+ <line x1="2" x2="22" y1="2" y2="22" />
129
+ <path d="M18.89 13.23A7.12 7.12 0 0 0 19 12v-2" />
130
+ <path d="M5 10v2a7 7 0 0 0 12 5" />
131
+ <path d="M12 1a3 3 0 0 0-3 3v7.64" />
132
+ <path d="M9 1a3 3 0 0 0-3 3v2" />
133
+ </Icon>
134
+ );
135
+
136
+
137
+ export const Sparkles: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
138
+ <Icon {...props}>
139
+ <path d="m12 3-1.5 3L7 7.5l3.5 1.5L12 12l1.5-3L17 7.5l-3.5-1.5z"/>
140
+ <path d="M5 21 3 14l-2 2 3.5 3.5Z"/>
141
+ <path d="m21 5-2-2-7 7 2 2Z"/>
142
+ </Icon>
143
+ );
144
+
145
+ export const LogOut: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
146
+ <Icon {...props}>
147
+ <path d="M9 21H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h4" />
148
+ <polyline points="16 17 21 12 16 7" />
149
+ <line x1="21" x2="9" y1="12" y2="12" />
150
+ </Icon>
151
+ );
152
+
153
+ export const Trash2: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
154
+ <Icon {...props}>
155
+ <path d="M3 6h18"/><path d="M19 6v14a2 2 0 0 1-2 2H7a2 2 0 0 1-2-2V6m3 0V4a2 2 0 0 1 2-2h4a2 2 0 0 1 2 2v2"/><line x1="10" y1="11" x2="10" y2="17"/><line x1="14" y1="11" x2="14" y2="17"/>
156
+ </Icon>
157
+ );
158
+
159
+ export const KeyRound: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
160
+ <Icon {...props}>
161
+ <path d="M2 18v3c0 .6.4 1 1 1h4v-3h3v-3h2l1.4-1.4a6.5 6.5 0 1 0-4-4Z" /><circle cx="16.5" cy="7.5" r=".5" fill="currentColor"/>
162
+ </Icon>
163
+ );
164
+
165
+ export const ShieldCheck: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
166
+ <Icon {...props}>
167
+ <path d="M12 22s8-4 8-10V5l-8-3-8 3v7c0 6 8 10 8 10z"/><path d="m9 12 2 2 4-4"/>
168
+ </Icon>
169
+ );
170
+
171
+ export const Play: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
172
+ <Icon {...props}><polygon points="5 3 19 12 5 21 5 3"/></Icon>
173
+ );
174
+
175
+ export const Volume2: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
176
+ <Icon {...props}><polygon points="11 5 6 9 2 9 2 15 6 15 11 19 11 5"/><path d="M15.54 8.46a5 5 0 0 1 0 7.07"/></Icon>
177
+ );
178
+
179
+ export const VolumeX: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
180
+ <Icon {...props}><polygon points="11 5 6 9 2 9 2 15 6 15 11 19 11 5"/><line x1="23" y1="9" x2="17" y2="15"/><line x1="17" y1="9" x2="23" y2="15"/></Icon>
181
+ );
182
+
183
+ export const Share2: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
184
+ <Icon {...props}><circle cx="18" cy="5" r="3"/><circle cx="6" cy="12" r="3"/><circle cx="18" cy="19" r="3"/><line x1="8.59" x2="15.42" y1="13.51" y2="17.49"/><line x1="15.41" x2="8.59" y1="6.51" y2="10.49"/></Icon>
185
+ );
186
+
187
+ export const Edit: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
188
+ <Icon {...props}><path d="M11 4H4a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2v-7"/><path d="M18.5 2.5a2.121 2.121 0 0 1 3 3L12 15l-4 1 1-4 9.5-9.5z"/></Icon>
189
+ );
190
+
191
+ export const Download: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
192
+ <Icon {...props}><path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4"/><polyline points="7 10 12 15 17 10"/><line x1="12" y1="15" x2="12" y2="3"/></Icon>
193
+ );
194
+
195
+ export const Phone: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
196
+ <Icon {...props}><path d="M22 16.92v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.5 19.5 0 0 1-6-6 19.79 19.79 0 0 1-3.07-8.67A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91a16 16 0 0 0 6 6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7A2 2 0 0 1 22 16.92z"/></Icon>
197
+ );
198
+
199
+ export const Video: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
200
+ <Icon {...props}><path d="M23 7l-7 5 7 5V7z"/><rect x="1" y="5" width="15" height="14" rx="2" ry="2"/></Icon>
201
+ );
202
+ export const VideoOff: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
203
+ <Icon {...props}>
204
+ <path d="M16 16v1a2 2 0 0 1-2 2H3a2 2 0 0 1-2-2V7a2 2 0 0 1 2-2h2l10 10Z" />
205
+ <line x1="2" x2="22" y1="2" y2="22" />
206
+ <path d="M23 7v10" />
207
+ <path d="m15 5 6 4" />
208
+ </Icon>
209
+ );
210
+
211
+ export const Repeat: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
212
+ <Icon {...props}>
213
+ <path d="m17 2 4 4-4 4"/>
214
+ <path d="M3 11v-1a4 4 0 0 1 4-4h14"/>
215
+ <path d="m7 22-4-4 4-4"/>
216
+ <path d="M21 13v1a4 4 0 0 1-4 4H3"/>
217
+ </Icon>
218
+ );
219
+
220
+ export const Users: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
221
+ <Icon {...props}><path d="M16 21v-2a4 4 0 0 0-4-4H6a4 4 0 0 0-4 4v2"/><circle cx="9" cy="7" r="4"/><path d="M22 21v-2a4 4 0 0 0-3-3.87"/><path d="M16 3.13a4 4 0 0 1 0 7.75"/></Icon>
222
+ );
223
+
224
+ export const Save: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
225
+ <Icon {...props}><path d="M19 21H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h11l5 5v11a2 2 0 0 1-2 2z"/><polyline points="17 21 17 13 7 13 7 21"/><polyline points="7 3 7 8 15 8"/></Icon>
226
+ );
227
+
228
+ export const Lock: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
229
+ <Icon {...props}>
230
+ <rect x="3" y="11" width="18" height="11" rx="2" ry="2" />
231
+ <path d="M7 11V7a5 5 0 0 1 10 0v4" />
232
+ </Icon>
233
+ );
234
+
235
+ export const ScreenShare: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
236
+ <Icon {...props}>
237
+ <path d="M13 3H4a2 2 0 0 0-2 2v10a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2v-3" />
238
+ <path d="M8 21h8" />
239
+ <path d="M12 17v4" />
240
+ <path d="m17 8 5-5" />
241
+ <path d="M17 3h5v5" />
242
+ </Icon>
243
+ );
244
+
245
+ export const ScreenShareOff: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
246
+ <Icon {...props}>
247
+ <path d="M13 3H4a2 2 0 0 0-2 2v10a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2v-3" />
248
+ <path d="M8 21h8" />
249
+ <path d="M12 17v4" />
250
+ <path d="M22 3l-5 5" />
251
+ <path d="m17 3 5 5" />
252
+ </Icon>
253
+ );
254
+
255
+ export const Camera: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
256
+ <Icon {...props}>
257
+ <path d="M14.5 4h-5L7 7H4a2 2 0 0 0-2 2v9a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2V9a2 2 0 0 0-2-2h-3l-2.5-3z"/>
258
+ <circle cx="12" cy="13" r="3"/>
259
+ </Icon>
260
+ );
vanhri-ai---the-mind-that-builds-worlds/components/ImageEditorModal.tsx ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useState, useRef, useEffect, useCallback } from 'react';
2
+ import { X, Save } from './Icons';
3
+
4
+ interface ImageEditorModalProps {
5
+ imageData: string;
6
+ onClose: () => void;
7
+ onSave: (newImageData: string) => void;
8
+ }
9
+
10
+ const FilterSlider: React.FC<{name: string, value: number, onChange: (name: string, value: number) => void, min?: number, max?: number, unit?: string}> = ({ name, value, onChange, min = 0, max = 200, unit = '%' }) => (
11
+ <div className="flex flex-col">
12
+ <label htmlFor={name} className="text-sm text-slate-300 mb-1 capitalize">{name} ({value}{unit})</label>
13
+ <input
14
+ id={name}
15
+ type="range"
16
+ min={min}
17
+ max={max}
18
+ value={value}
19
+ onChange={e => onChange(name, parseInt(e.target.value))}
20
+ className="w-full h-2 bg-slate-700 rounded-lg appearance-none cursor-pointer accent-fuchsia-500"
21
+ />
22
+ </div>
23
+ );
24
+
25
+
26
+ const ImageEditorModal: React.FC<ImageEditorModalProps> = ({ imageData, onClose, onSave }) => {
27
+ const canvasRef = useRef<HTMLCanvasElement>(null);
28
+ const imageRef = useRef<HTMLImageElement | null>(null);
29
+ const [filters, setFilters] = useState({ brightness: 100, contrast: 100, grayscale: 0 });
30
+
31
+ const applyFilters = useCallback(() => {
32
+ const canvas = canvasRef.current;
33
+ const ctx = canvas?.getContext('2d');
34
+ const image = imageRef.current;
35
+ if (!canvas || !ctx || !image || !image.complete || image.naturalWidth === 0) return;
36
+
37
+ canvas.width = image.naturalWidth;
38
+ canvas.height = image.naturalHeight;
39
+
40
+ ctx.filter = `brightness(${filters.brightness}%) contrast(${filters.contrast}%) grayscale(${filters.grayscale}%)`;
41
+ ctx.drawImage(image, 0, 0);
42
+
43
+ }, [filters]);
44
+
45
+ useEffect(() => {
46
+ const image = new Image();
47
+ image.crossOrigin = 'anonymous'; // Important for canvas with data URLs
48
+ image.src = imageData;
49
+ image.onload = () => {
50
+ imageRef.current = image;
51
+ applyFilters();
52
+ };
53
+ }, [imageData, applyFilters]);
54
+
55
+ useEffect(() => {
56
+ applyFilters();
57
+ }, [filters, applyFilters]);
58
+
59
+ const handleSave = () => {
60
+ const canvas = canvasRef.current;
61
+ if (canvas) {
62
+ const newImageData = canvas.toDataURL('image/jpeg', 0.9);
63
+ onSave(newImageData);
64
+ }
65
+ };
66
+
67
+
68
+ return (
69
+ <div className="fixed inset-0 bg-slate-900/80 backdrop-blur-md z-50 flex items-center justify-center p-4">
70
+ <div className="bg-slate-800 rounded-xl shadow-2xl w-full max-w-4xl max-h-full flex flex-col border border-slate-700">
71
+ <header className="flex items-center justify-between p-4 border-b border-slate-700 flex-shrink-0">
72
+ <h2 className="text-lg font-bold text-white">Image Editor</h2>
73
+ <button onClick={onClose} className="p-2 text-slate-400 hover:text-white rounded-full hover:bg-slate-700">
74
+ <X className="w-6 h-6" />
75
+ </button>
76
+ </header>
77
+ <main className="flex-1 flex flex-col md:flex-row gap-4 p-4 overflow-hidden">
78
+ <div className="flex-1 bg-slate-900/50 rounded-lg flex items-center justify-center overflow-auto min-h-0">
79
+ <canvas ref={canvasRef} className="max-w-full max-h-full object-contain"></canvas>
80
+ </div>
81
+ <aside className="w-full md:w-64 flex-shrink-0 space-y-6 bg-slate-800/50 p-4 rounded-lg">
82
+ <h3 className="text-md font-semibold text-fuchsia-400">Filters</h3>
83
+ <FilterSlider name="brightness" value={filters.brightness} onChange={(name, value) => setFilters(f => ({...f, [name]: value}))} />
84
+ <FilterSlider name="contrast" value={filters.contrast} onChange={(name, value) => setFilters(f => ({...f, [name]: value}))} />
85
+ <FilterSlider name="grayscale" max={100} value={filters.grayscale} onChange={(name, value) => setFilters(f => ({...f, [name]: value}))} />
86
+ <button onClick={() => setFilters({ brightness: 100, contrast: 100, grayscale: 0 })} className="w-full text-sm text-center py-2 bg-slate-700 hover:bg-slate-600 rounded-lg">Reset Filters</button>
87
+ </aside>
88
+ </main>
89
+ <footer className="flex items-center justify-end gap-4 p-4 border-t border-slate-700 flex-shrink-0">
90
+ <button onClick={onClose} className="text-slate-300 hover:text-white px-4 py-2 rounded-lg">Cancel</button>
91
+ <button onClick={handleSave} className="bg-fuchsia-600 hover:bg-fuchsia-700 text-white font-bold px-4 py-2 rounded-lg flex items-center gap-2">
92
+ <Save className="w-5 h-5"/>
93
+ Save Changes
94
+ </button>
95
+ </footer>
96
+ </div>
97
+ </div>
98
+ );
99
+ };
100
+
101
+ export default ImageEditorModal;
vanhri-ai---the-mind-that-builds-worlds/components/Interactive3DModel.tsx ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useEffect, useRef, useState } from 'react';
2
+ import * as THREE from 'three';
3
+ import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls.js';
4
+ import { Cube, X } from './Icons';
5
+
6
+ interface Interactive3DModelProps {
7
+ code: string;
8
+ }
9
+
10
+ const Interactive3DModel: React.FC<Interactive3DModelProps> = ({ code }) => {
11
+ const mountRef = useRef<HTMLDivElement>(null);
12
+ const [error, setError] = useState<string | null>(null);
13
+
14
+ useEffect(() => {
15
+ const mountNode = mountRef.current;
16
+ if (!mountNode) return;
17
+
18
+ // Clear previous canvas if any
19
+ mountNode.innerHTML = '';
20
+ setError(null);
21
+
22
+ // --- Scene setup ---
23
+ const scene = new THREE.Scene();
24
+ scene.background = new THREE.Color(0x1e293b); // slate-800
25
+
26
+ const camera = new THREE.PerspectiveCamera(75, mountNode.clientWidth / mountNode.clientHeight, 0.1, 1000);
27
+ camera.position.z = 5;
28
+
29
+ const renderer = new THREE.WebGLRenderer({ antialias: true });
30
+ renderer.setSize(mountNode.clientWidth, mountNode.clientHeight);
31
+ renderer.setPixelRatio(window.devicePixelRatio);
32
+ mountNode.appendChild(renderer.domElement);
33
+
34
+ // --- Controls ---
35
+ const controls = new OrbitControls(camera, renderer.domElement);
36
+ controls.enableDamping = true;
37
+ controls.minDistance = 1;
38
+ controls.maxDistance = 100;
39
+
40
+ // --- Lighting ---
41
+ const ambientLight = new THREE.AmbientLight(0xffffff, 0.7);
42
+ scene.add(ambientLight);
43
+ const directionalLight = new THREE.DirectionalLight(0xffffff, 1);
44
+ directionalLight.position.set(5, 10, 7.5);
45
+ scene.add(directionalLight);
46
+
47
+ // --- Execute AI-generated code ---
48
+ try {
49
+ if (!code) throw new Error("No code provided to generate the 3D model.");
50
+ const sceneSetupFunction = new Function('THREE', 'scene', code);
51
+ sceneSetupFunction(THREE, scene);
52
+ } catch (e: any) {
53
+ console.error("Error executing generated 3D code:", e);
54
+ setError(`Failed to build 3D model. Error: ${e.message}`);
55
+ }
56
+
57
+
58
+ // --- Animation loop ---
59
+ let animationFrameId: number;
60
+ const animate = () => {
61
+ animationFrameId = requestAnimationFrame(animate);
62
+ controls.update();
63
+ renderer.render(scene, camera);
64
+ };
65
+ animate();
66
+
67
+ // --- Responsive canvas ---
68
+ const handleResize = () => {
69
+ if (mountNode) {
70
+ camera.aspect = mountNode.clientWidth / mountNode.clientHeight;
71
+ camera.updateProjectionMatrix();
72
+ renderer.setSize(mountNode.clientWidth, mountNode.clientHeight);
73
+ }
74
+ };
75
+ window.addEventListener('resize', handleResize);
76
+
77
+
78
+ // --- Cleanup ---
79
+ return () => {
80
+ window.removeEventListener('resize', handleResize);
81
+ cancelAnimationFrame(animationFrameId);
82
+ if(mountNode) mountNode.innerHTML = '';
83
+ // Dispose of Three.js objects to free up memory
84
+ scene.traverse(object => {
85
+ if (object instanceof THREE.Mesh) {
86
+ if (object.geometry) object.geometry.dispose();
87
+ if (object.material) {
88
+ if (Array.isArray(object.material)) {
89
+ object.material.forEach(material => material.dispose());
90
+ } else {
91
+ object.material.dispose();
92
+ }
93
+ }
94
+ }
95
+ });
96
+ renderer.dispose();
97
+ };
98
+ }, [code]);
99
+
100
+ return (
101
+ <div className="w-full aspect-video bg-slate-800 rounded-lg border border-slate-700 mt-2 relative overflow-hidden">
102
+ <div ref={mountRef} className="w-full h-full" />
103
+ <div className="absolute top-2 left-2 flex items-center gap-2 bg-slate-900/50 text-fuchsia-300 text-xs font-semibold px-2 py-1 rounded-full border border-fuchsia-500/30 backdrop-blur-sm">
104
+ <Cube className="w-3 h-3"/>
105
+ Interactive 3D Model
106
+ </div>
107
+ {error && (
108
+ <div className="absolute inset-0 bg-red-900/80 backdrop-blur-sm flex flex-col items-center justify-center p-4 text-center">
109
+ <X className="w-8 h-8 text-red-300 mb-2"/>
110
+ <h3 className="font-bold text-red-200">Could Not Render 3D Model</h3>
111
+ <p className="text-xs text-red-300 mt-1">{error}</p>
112
+ </div>
113
+ )}
114
+ </div>
115
+ );
116
+ };
117
+
118
+ export default Interactive3DModel;
vanhri-ai---the-mind-that-builds-worlds/components/Interactive3DPhoto.tsx ADDED
File without changes
vanhri-ai---the-mind-that-builds-worlds/components/Login.tsx ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import React, { useState } from 'react';
3
+ import { Bot, KeyRound, ArrowLeft } from './Icons';
4
+ import { authService } from '../services/authService';
5
+
6
+ interface LoginProps {
7
+ onLoginSuccess: (user: {id: string, email: string}) => void;
8
+ onNavigateToSignUp: () => void;
9
+ onNavigateBack: () => void;
10
+ }
11
+
12
+ const Login: React.FC<LoginProps> = ({ onLoginSuccess, onNavigateToSignUp, onNavigateBack }) => {
13
+ const [email, setEmail] = useState('');
14
+ const [password, setPassword] = useState('');
15
+ const [error, setError] = useState('');
16
+ const [isLoading, setIsLoading] = useState(false);
17
+
18
+ const handleSubmit = async (e: React.FormEvent) => {
19
+ e.preventDefault();
20
+ if (!email.trim() || !password.trim()) {
21
+ setError("Email and password cannot be empty.");
22
+ return;
23
+ }
24
+ setError('');
25
+ setIsLoading(true);
26
+
27
+ const result = await authService.login(email, password);
28
+ if (result.success && result.user) {
29
+ onLoginSuccess(result.user);
30
+ } else {
31
+ setError(result.message);
32
+ }
33
+ setIsLoading(false);
34
+ };
35
+
36
+ return (
37
+ <div className="w-full h-screen flex items-center justify-center bg-slate-900 p-4 relative">
38
+ <button onClick={onNavigateBack} className="absolute top-6 left-6 flex items-center gap-2 text-slate-400 hover:text-white transition-colors">
39
+ <ArrowLeft className="w-5 h-5" />
40
+ Back
41
+ </button>
42
+ <div className="w-full max-w-sm text-center">
43
+ <div className="inline-block p-4 bg-fuchsia-600/20 rounded-full mb-6 border border-fuchsia-500/30">
44
+ <Bot className="w-16 h-16 text-fuchsia-400" />
45
+ </div>
46
+ <h1 className="text-4xl font-bold text-white">Unlock Session</h1>
47
+ <p className="text-slate-400 mt-2 mb-8">Log in to your Vanhri AI account</p>
48
+
49
+ <form onSubmit={handleSubmit} className="space-y-4">
50
+ <input
51
+ type="email"
52
+ value={email}
53
+ onChange={(e) => setEmail(e.target.value)}
54
+ placeholder="Enter your email"
55
+ className="w-full p-3 bg-slate-800 border border-slate-700 rounded-lg text-white placeholder-slate-500 focus:ring-2 focus:ring-fuchsia-500 focus:border-fuchsia-500 outline-none transition"
56
+ required
57
+ autoComplete="email"
58
+ />
59
+ <input
60
+ type="password"
61
+ value={password}
62
+ onChange={(e) => setPassword(e.target.value)}
63
+ placeholder="Enter your password"
64
+ className="w-full p-3 bg-slate-800 border border-slate-700 rounded-lg text-white placeholder-slate-500 focus:ring-2 focus:ring-fuchsia-500 focus:border-fuchsia-500 outline-none transition"
65
+ required
66
+ autoComplete="current-password"
67
+ />
68
+
69
+ {error && <p className="text-red-400 text-sm">{error}</p>}
70
+
71
+ <button
72
+ type="submit"
73
+ disabled={isLoading || !email.trim() || !password.trim()}
74
+ className="w-full bg-fuchsia-600 text-white font-bold py-3 px-4 rounded-lg hover:bg-fuchsia-700 disabled:bg-slate-700 disabled:cursor-not-allowed flex items-center justify-center gap-2 transition-all duration-300"
75
+ >
76
+ <KeyRound className="w-5 h-5" />
77
+ {isLoading ? 'Authenticating...' : 'Unlock Session'}
78
+ </button>
79
+ </form>
80
+ <p className="text-sm text-slate-500 text-center mt-6">
81
+ Don't have an account?{' '}
82
+ <button onClick={onNavigateToSignUp} className="font-semibold text-fuchsia-400 hover:underline">
83
+ Sign up
84
+ </button>
85
+ </p>
86
+ <p className="text-xs text-slate-600 text-center mt-4">Your conversations will be end-to-end encrypted and saved in this browser.</p>
87
+ </div>
88
+ </div>
89
+ );
90
+ };
91
+
92
+ export default Login;
vanhri-ai---the-mind-that-builds-worlds/components/MessageBubble.tsx ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useState } from 'react';
2
+ import { Message } from '../types';
3
+ import { User, Bot, KeyRound, Search, Volume2, VolumeX, Edit, Download, Users as AgentIcon } from './Icons';
4
+ import { ttsService } from '../services/ttsService';
5
+ import MindMapDisplay from './MindMapDisplay';
6
+ import Interactive3DModel from './Interactive3DModel';
7
+ import ChartDisplay from './ChartDisplay';
8
+
9
+ // A more robust markdown-to-HTML converter that handles text, code blocks, bold, and links.
10
+ const SimpleMarkdown: React.FC<{ text: string }> = ({ text }) => {
11
+ if (typeof text !== 'string' || !text) {
12
+ return null;
13
+ }
14
+
15
+ const elements: React.ReactNode[] = [];
16
+ let lastIndex = 0;
17
+ // Regex to find all markdown parts: ```...```, **...**, and links
18
+ const regex = /(```[\s\S]*?```)|(\*\*[^\*]+\*\*)|(https?:\/\/[^\s]+)/g;
19
+ let match;
20
+
21
+ while ((match = regex.exec(text)) !== null) {
22
+ // Push the text before the match
23
+ if (match.index > lastIndex) {
24
+ elements.push(<span key={`text-${lastIndex}`}>{text.substring(lastIndex, match.index)}</span>);
25
+ }
26
+
27
+ const [fullMatch, codeBlock, boldText, link] = match;
28
+
29
+ if (codeBlock) {
30
+ const code = codeBlock.replace(/```(\w*\n)?/g, '').replace(/```$/, '');
31
+ elements.push(
32
+ <pre key={`code-${match.index}`} className="bg-slate-900 text-white rounded-md p-4 my-2 text-sm overflow-x-auto font-mono">
33
+ <code>{code}</code>
34
+ </pre>
35
+ );
36
+ } else if (boldText) {
37
+ elements.push(<strong key={`bold-${match.index}`}>{boldText.substring(2, boldText.length - 2)}</strong>);
38
+ } else if (link) {
39
+ elements.push(<a href={link} key={`link-${match.index}`} target="_blank" rel="noopener noreferrer" className="text-fuchsia-400 hover:underline">{link}</a>);
40
+ }
41
+
42
+ lastIndex = regex.lastIndex;
43
+ }
44
+
45
+ // Push the remaining text after the last match
46
+ if (lastIndex < text.length) {
47
+ elements.push(<span key={`text-${lastIndex}`}>{text.substring(lastIndex)}</span>);
48
+ }
49
+
50
+ return <>{elements}</>;
51
+ };
52
+
53
+
54
+ interface ImageControlsProps {
55
+ onEdit: () => void;
56
+ onDownload: () => void;
57
+ }
58
+
59
+ const ImageControls: React.FC<ImageControlsProps> = ({ onEdit, onDownload }) => (
60
+ <div className="absolute top-2 right-2 z-10 flex items-center gap-1.5 opacity-0 group-hover:opacity-100 transition-opacity duration-200 bg-slate-900/50 backdrop-blur-sm p-1.5 rounded-full">
61
+ <button onClick={onEdit} className="p-1.5 text-slate-200 hover:text-white bg-slate-700/50 hover:bg-slate-700 rounded-full" aria-label="Edit image">
62
+ <Edit className="w-4 h-4" />
63
+ </button>
64
+ <button onClick={onDownload} className="p-1.5 text-slate-200 hover:text-white bg-slate-700/50 hover:bg-slate-700 rounded-full" aria-label="Download image">
65
+ <Download className="w-4 h-4" />
66
+ </button>
67
+ </div>
68
+ );
69
+
70
+
71
+ interface MessageBubbleProps {
72
+ message: Message;
73
+ isStreaming?: boolean;
74
+ onEditImage: (messageId: string, imageIndex: number | null, imageData: string) => void;
75
+ }
76
+
77
+ const MessageBubble: React.FC<MessageBubbleProps> = ({ message, isStreaming = false, onEditImage }) => {
78
+ const isUser = message.sender === 'user';
79
+ const [isSpeaking, setIsSpeaking] = useState(false);
80
+
81
+ const handleAudioToggle = () => {
82
+ if (isSpeaking) {
83
+ ttsService.cancel();
84
+ setIsSpeaking(false);
85
+ } else {
86
+ setIsSpeaking(true);
87
+ ttsService.speak({
88
+ text: message.text,
89
+ onEnd: () => setIsSpeaking(false)
90
+ });
91
+ }
92
+ };
93
+
94
+ const handleDownload = (imageData: string, index: number) => {
95
+ const link = document.createElement('a');
96
+ link.href = imageData;
97
+ link.download = `vanhri-image-${message.id}-${index}.jpeg`;
98
+ document.body.appendChild(link);
99
+ link.click();
100
+ document.body.removeChild(link);
101
+ };
102
+
103
+ const Icon = message.isAgent ? AgentIcon : Bot;
104
+ const bgColor = message.isAgent ? 'bg-sky-600' : 'bg-fuchsia-600';
105
+
106
+ return (
107
+ <div className={`flex items-start gap-4 ${isUser ? 'justify-end' : 'justify-start'}`}>
108
+ {!isUser && (
109
+ <div className={`w-8 h-8 flex-shrink-0 ${bgColor} text-white flex items-center justify-center rounded-full mt-1`}>
110
+ <Icon className="w-5 h-5" />
111
+ </div>
112
+ )}
113
+ <div
114
+ className={`max-w-3xl w-full rounded-2xl px-5 py-3 shadow-md ${
115
+ isUser
116
+ ? 'bg-gradient-to-br from-blue-600 to-blue-700 text-white rounded-br-none'
117
+ : 'bg-slate-800 text-slate-200 rounded-bl-none'
118
+ }`}
119
+ >
120
+ {message.image && (
121
+ <div className="relative group mt-2">
122
+ <img src={message.image} alt="User upload" className="rounded-lg mb-2 max-h-72 w-auto" />
123
+ <ImageControls onEdit={() => onEditImage(message.id, null, message.image!)} onDownload={() => handleDownload(message.image!, 0)} />
124
+ </div>
125
+ )}
126
+
127
+ {message.text && (
128
+ <div className="flex items-start gap-2">
129
+ <div className="text-base leading-relaxed whitespace-pre-wrap font-sans flex-1">
130
+ <SimpleMarkdown text={message.text} />
131
+ {isStreaming && <span className="inline-block w-2 h-5 bg-fuchsia-400 ml-1 blinking-cursor"></span>}
132
+ </div>
133
+ {!isUser && !isStreaming && ttsService.isSupported && (
134
+ <button
135
+ onClick={handleAudioToggle}
136
+ className="p-2 text-slate-400 hover:text-fuchsia-400 rounded-full hover:bg-slate-700 transition-colors"
137
+ aria-label={isSpeaking ? "Stop audio" : "Play audio"}
138
+ >
139
+ {isSpeaking ? <VolumeX className="w-5 h-5" /> : <Volume2 className="w-5 h-5" />}
140
+ </button>
141
+ )}
142
+ </div>
143
+ )}
144
+
145
+ {message.chartData && (
146
+ <div className="mt-2">
147
+ <ChartDisplay data={message.chartData} />
148
+ </div>
149
+ )}
150
+
151
+ {message.mindMap && (
152
+ <div className="mt-2">
153
+ <MindMapDisplay data={message.mindMap} />
154
+ </div>
155
+ )}
156
+
157
+ {message.generatedImages && (
158
+ <div className="grid grid-cols-2 gap-2 mt-2">
159
+ {message.generatedImages.map((imgData, index) => (
160
+ <div key={index} className="relative group">
161
+ <img src={`data:image/jpeg;base64,${imgData}`} alt={`Generated image ${index + 1}`} className="rounded-lg w-full h-auto" />
162
+ <ImageControls onEdit={() => onEditImage(message.id, index, `data:image/jpeg;base64,${imgData}`)} onDownload={() => handleDownload(`data:image/jpeg;base64,${imgData}`, index)} />
163
+ </div>
164
+ ))}
165
+ </div>
166
+ )}
167
+
168
+ {message.generated3DCode && (
169
+ <div className="mt-2">
170
+ <Interactive3DModel code={message.generated3DCode} />
171
+ </div>
172
+ )}
173
+
174
+ {message.memory && (
175
+ <div className="mt-4 pt-3 border-t border-slate-700">
176
+ <h4 className="text-sm font-semibold text-slate-400 mb-2 flex items-center gap-2">
177
+ <KeyRound className="w-4 h-4 text-fuchsia-500" />
178
+ Vanhri AI has formed a new memory
179
+ </h4>
180
+ <div className="bg-slate-900/70 p-3 rounded-lg">
181
+ <p className="font-bold text-fuchsia-400">{message.memory.title}</p>
182
+ <p className="text-sm text-slate-300 mt-1 italic">"{message.memory.summary}"</p>
183
+ </div>
184
+ </div>
185
+ )}
186
+
187
+ {message.groundingCitations && message.groundingCitations.length > 0 && (
188
+ <div className="mt-4 pt-3 border-t border-slate-700">
189
+ <h4 className="text-sm font-semibold text-slate-400 mb-2 flex items-center gap-2">
190
+ <Search className="w-4 h-4 text-fuchsia-500" />
191
+ Sources
192
+ </h4>
193
+ <div className="flex flex-wrap gap-2">
194
+ {message.groundingCitations.map((citation) => (
195
+ <a
196
+ key={citation.uri}
197
+ href={citation.uri}
198
+ target="_blank"
199
+ rel="noopener noreferrer"
200
+ className="bg-slate-900/70 hover:bg-slate-900 text-xs text-slate-300 hover:text-fuchsia-400 px-2.5 py-1 rounded-full transition-colors truncate"
201
+ title={citation.title}
202
+ >
203
+ {citation.title || new URL(citation.uri).hostname}
204
+ </a>
205
+ ))}
206
+ </div>
207
+ </div>
208
+ )}
209
+
210
+ </div>
211
+ {isUser && (
212
+ <div className="w-8 h-8 flex-shrink-0 bg-slate-700 text-slate-300 flex items-center justify-center rounded-full mt-1">
213
+ <User className="w-5 h-5" />
214
+ </div>
215
+ )}
216
+ </div>
217
+ );
218
+ };
219
+
220
+ export default MessageBubble;
vanhri-ai---the-mind-that-builds-worlds/components/MindMapDisplay.tsx ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React from 'react';
2
+ import ReactFlow, { MiniMap, Controls, Background, BackgroundVariant, Panel } from 'reactflow';
3
+ import { MindMapData } from '../types';
4
+ import { Share2 } from './Icons';
5
+
6
+ interface MindMapDisplayProps {
7
+ data: MindMapData;
8
+ }
9
+
10
+ const MindMapDisplay: React.FC<MindMapDisplayProps> = ({ data }) => {
11
+ return (
12
+ <div style={{ height: '500px', width: '100%' }} className="bg-slate-900/50 rounded-lg border border-slate-700 mt-2 relative">
13
+ <ReactFlow
14
+ nodes={data.nodes}
15
+ edges={data.edges}
16
+ fitView
17
+ proOptions={{ hideAttribution: true }}
18
+ nodesDraggable={true}
19
+ nodesConnectable={false}
20
+ >
21
+ <Panel position="top-left" className="p-2 bg-slate-800/50 border border-slate-700 rounded-lg text-fuchsia-300 flex items-center gap-2 text-sm font-semibold">
22
+ <Share2 className="w-4 h-4" />
23
+ Interactive Mind Map
24
+ </Panel>
25
+ <MiniMap nodeStrokeWidth={3} zoomable pannable
26
+ nodeColor={(node) => {
27
+ switch (node.type) {
28
+ case 'input': return '#a21caf';
29
+ default: return '#581c87';
30
+ }
31
+ }}
32
+ />
33
+ <Controls />
34
+ <Background color="#475569" variant={BackgroundVariant.Dots} gap={16} size={1} />
35
+ </ReactFlow>
36
+ </div>
37
+ );
38
+ };
39
+
40
+ export default MindMapDisplay;
vanhri-ai---the-mind-that-builds-worlds/components/NovaAvatarDisplay.tsx ADDED
File without changes
vanhri-ai---the-mind-that-builds-worlds/components/NovaVisualizer.tsx ADDED
@@ -0,0 +1,245 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useRef, useEffect, useMemo } from 'react';
2
+ import * as THREE from 'three';
3
+
4
+ interface NovaVisualizerProps {
5
+ isSpeaking: boolean;
6
+ isThinking: boolean;
7
+ }
8
+
9
+ const vertexShader = `
10
+ uniform float u_time;
11
+ uniform float u_intensity;
12
+ varying vec3 v_normal;
13
+ varying vec3 v_position;
14
+
15
+ // Simplex noise function
16
+ vec3 mod289(vec3 x) { return x - floor(x * (1.0 / 289.0)) * 289.0; }
17
+ vec4 mod289(vec4 x) { return x - floor(x * (1.0 / 289.0)) * 289.0; }
18
+ vec4 permute(vec4 x) { return mod289(((x*34.0)+1.0)*x); }
19
+ vec4 taylorInvSqrt(vec4 r) { return 1.79284291400159 - 0.85373472095314 * r; }
20
+ float snoise(vec3 v) {
21
+ const vec2 C = vec2(1.0/6.0, 1.0/3.0);
22
+ const vec4 D = vec4(0.0, 0.5, 1.0, 2.0);
23
+ vec3 i = floor(v + dot(v, C.yyy));
24
+ vec3 x0 = v - i + dot(i, C.xxx);
25
+ vec3 g = step(x0.yzx, x0.xyz);
26
+ vec3 l = 1.0 - g;
27
+ vec3 i1 = min(g.xyz, l.zxy);
28
+ vec3 i2 = max(g.xyz, l.zxy);
29
+ vec3 x1 = x0 - i1 + C.xxx;
30
+ vec3 x2 = x0 - i2 + C.yyy;
31
+ vec3 x3 = x0 - D.yyy;
32
+ i = mod289(i);
33
+ vec4 p = permute(permute(permute(
34
+ i.z + vec4(0.0, i1.z, i2.z, 1.0))
35
+ + i.y + vec4(0.0, i1.y, i2.y, 1.0))
36
+ + i.x + vec4(0.0, i1.x, i2.x, 1.0));
37
+ float n_ = 0.142857142857;
38
+ vec3 ns = n_ * D.wyz - D.xzx;
39
+ vec4 j = p - 49.0 * floor(p * ns.z * ns.z);
40
+ vec4 x_ = floor(j * ns.z);
41
+ vec4 y_ = floor(j - 7.0 * x_);
42
+ vec4 x = x_ * ns.x + ns.yyyy;
43
+ vec4 y = y_ * ns.x + ns.yyyy;
44
+ vec4 h = 1.0 - abs(x) - abs(y);
45
+ vec4 b0 = vec4(x.xy, y.xy);
46
+ vec4 b1 = vec4(x.zw, y.zw);
47
+ vec4 s0 = floor(b0)*2.0 + 1.0;
48
+ vec4 s1 = floor(b1)*2.0 + 1.0;
49
+ vec4 sh = -step(h, vec4(0.0));
50
+ vec4 a0 = b0.xzyw + s0.xzyw*sh.xxyy;
51
+ vec4 a1 = b1.xzyw + s1.xzyw*sh.zzww;
52
+ vec3 p0 = vec3(a0.xy,h.x);
53
+ vec3 p1 = vec3(a0.zw,h.y);
54
+ vec3 p2 = vec3(a1.xy,h.z);
55
+ vec3 p3 = vec3(a1.zw,h.w);
56
+ vec4 norm = taylorInvSqrt(vec4(dot(p0,p0), dot(p1,p1), dot(p2,p2), dot(p3,p3)));
57
+ p0 *= norm.x;
58
+ p1 *= norm.y;
59
+ p2 *= norm.z;
60
+ p3 *= norm.w;
61
+ vec4 m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0);
62
+ m = m * m;
63
+ return 42.0 * dot(m*m, vec4(dot(p0,x0), dot(p1,x1), dot(p2,x2), dot(p3,x3)));
64
+ }
65
+
66
+ void main() {
67
+ v_normal = normal;
68
+ v_position = position;
69
+
70
+ float noise = snoise(position * 2.0 + u_time * 0.2);
71
+ vec3 displacedPosition = position + normal * noise * u_intensity * 0.3;
72
+
73
+ gl_Position = projectionMatrix * modelViewMatrix * vec4(displacedPosition, 1.0);
74
+ }
75
+ `;
76
+
77
+ const fragmentShader = `
78
+ uniform float u_time;
79
+ uniform float u_intensity;
80
+ varying vec3 v_normal;
81
+ varying vec3 v_position;
82
+
83
+ // Simplex noise function (same as vertex shader)
84
+ vec3 mod289(vec3 x) { return x - floor(x * (1.0 / 289.0)) * 289.0; }
85
+ vec4 mod289(vec4 x) { return x - floor(x * (1.0 / 289.0)) * 289.0; }
86
+ vec4 permute(vec4 x) { return mod289(((x*34.0)+1.0)*x); }
87
+ vec4 taylorInvSqrt(vec4 r) { return 1.79284291400159 - 0.85373472095314 * r; }
88
+ float snoise(vec3 v) {
89
+ const vec2 C = vec2(1.0/6.0, 1.0/3.0);
90
+ const vec4 D = vec4(0.0, 0.5, 1.0, 2.0);
91
+ vec3 i = floor(v + dot(v, C.yyy));
92
+ vec3 x0 = v - i + dot(i, C.xxx);
93
+ vec3 g = step(x0.yzx, x0.xyz);
94
+ vec3 l = 1.0 - g;
95
+ vec3 i1 = min(g.xyz, l.zxy);
96
+ vec3 i2 = max(g.xyz, l.zxy);
97
+ vec3 x1 = x0 - i1 + C.xxx;
98
+ vec3 x2 = x0 - i2 + C.yyy;
99
+ vec3 x3 = x0 - D.yyy;
100
+ i = mod289(i);
101
+ vec4 p = permute(permute(permute(
102
+ i.z + vec4(0.0, i1.z, i2.z, 1.0))
103
+ + i.y + vec4(0.0, i1.y, i2.y, 1.0))
104
+ + i.x + vec4(0.0, i1.x, i2.x, 1.0));
105
+ float n_ = 0.142857142857;
106
+ vec3 ns = n_ * D.wyz - D.xzx;
107
+ vec4 j = p - 49.0 * floor(p * ns.z * ns.z);
108
+ vec4 x_ = floor(j * ns.z);
109
+ vec4 y_ = floor(j - 7.0 * x_);
110
+ vec4 x = x_ * ns.x + ns.yyyy;
111
+ vec4 y = y_ * ns.x + ns.yyyy;
112
+ vec4 h = 1.0 - abs(x) - abs(y);
113
+ vec4 b0 = vec4(x.xy, y.xy);
114
+ vec4 b1 = vec4(x.zw, y.zw);
115
+ vec4 s0 = floor(b0)*2.0 + 1.0;
116
+ vec4 s1 = floor(b1)*2.0 + 1.0;
117
+ vec4 sh = -step(h, vec4(0.0));
118
+ vec4 a0 = b0.xzyw + s0.xzyw*sh.xxyy;
119
+ vec4 a1 = b1.xzyw + s1.xzyw*sh.zzww;
120
+ vec3 p0 = vec3(a0.xy,h.x);
121
+ vec3 p1 = vec3(a0.zw,h.y);
122
+ vec3 p2 = vec3(a1.xy,h.z);
123
+ vec3 p3 = vec3(a1.zw,h.w);
124
+ vec4 norm = taylorInvSqrt(vec4(dot(p0,p0), dot(p1,p1), dot(p2,p2), dot(p3,p3)));
125
+ p0 *= norm.x;
126
+ p1 *= norm.y;
127
+ p2 *= norm.z;
128
+ p3 *= norm.w;
129
+ vec4 m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0);
130
+ m = m * m;
131
+ return 42.0 * dot(m*m, vec4(dot(p0,x0), dot(p1,x1), dot(p2,x2), dot(p3,x3)));
132
+ }
133
+
134
+ void main() {
135
+ float noise = snoise(v_position * 5.0 + u_time * 0.5);
136
+ vec3 color1 = vec3(0.63, 0.07, 0.93); // Fuchsia
137
+ vec3 color2 = vec3(0.36, 0.54, 0.98); // Blueish
138
+ vec3 finalColor = mix(color1, color2, v_position.y);
139
+
140
+ float rim = 1.0 - dot(normalize(v_normal), vec3(0.0, 0.0, 1.0));
141
+ rim = smoothstep(0.6, 1.0, rim);
142
+
143
+ gl_FragColor = vec4(finalColor * (0.5 + noise * 0.5) + vec3(rim) * u_intensity, 1.0);
144
+ }
145
+ `;
146
+
147
+ const NovaVisualizer: React.FC<NovaVisualizerProps> = ({ isSpeaking, isThinking }) => {
148
+ const mountRef = useRef<HTMLDivElement>(null);
149
+ const rendererRef = useRef<THREE.WebGLRenderer | null>(null);
150
+ const cameraRef = useRef<THREE.PerspectiveCamera | null>(null);
151
+ const animationFrameIdRef = useRef<number | null>(null);
152
+
153
+ const uniforms = useMemo(() => ({
154
+ u_time: { value: 0.0 },
155
+ u_intensity: { value: 0.0 },
156
+ }), []);
157
+
158
+ // Use refs to pass isSpeaking and isThinking to the animation loop without re-triggering the effect
159
+ const speakingRef = useRef(isSpeaking);
160
+ const thinkingRef = useRef(isThinking);
161
+ useEffect(() => {
162
+ speakingRef.current = isSpeaking;
163
+ }, [isSpeaking]);
164
+ useEffect(() => {
165
+ thinkingRef.current = isThinking;
166
+ }, [isThinking]);
167
+
168
+
169
+ useEffect(() => {
170
+ const mountNode = mountRef.current;
171
+ if (!mountNode) return;
172
+
173
+ // --- Scene setup ---
174
+ const scene = new THREE.Scene();
175
+ const camera = new THREE.PerspectiveCamera(75, mountNode.clientWidth / mountNode.clientHeight, 0.1, 1000);
176
+ camera.position.z = 2;
177
+ cameraRef.current = camera;
178
+
179
+ const renderer = new THREE.WebGLRenderer({ antialias: true, alpha: true });
180
+ renderer.setPixelRatio(window.devicePixelRatio);
181
+ renderer.setSize(mountNode.clientWidth, mountNode.clientHeight);
182
+ mountNode.appendChild(renderer.domElement);
183
+ rendererRef.current = renderer;
184
+
185
+ const geometry = new THREE.IcosahedronGeometry(1, 64);
186
+ const material = new THREE.ShaderMaterial({
187
+ vertexShader,
188
+ fragmentShader,
189
+ uniforms,
190
+ });
191
+ const mesh = new THREE.Mesh(geometry, material);
192
+ scene.add(mesh);
193
+
194
+ const clock = new THREE.Clock();
195
+
196
+ const animate = () => {
197
+ animationFrameIdRef.current = requestAnimationFrame(animate);
198
+
199
+ uniforms.u_time.value = clock.getElapsedTime();
200
+
201
+ let targetIntensity = 0.2;
202
+ if (thinkingRef.current) targetIntensity = 0.4;
203
+ if (speakingRef.current) targetIntensity = 0.8;
204
+
205
+ uniforms.u_intensity.value += (targetIntensity - uniforms.u_intensity.value) * 0.1;
206
+
207
+ mesh.rotation.y += 0.001;
208
+
209
+ renderer.render(scene, camera);
210
+ };
211
+
212
+ animate();
213
+
214
+ const handleResize = () => {
215
+ if (mountNode && rendererRef.current && cameraRef.current) {
216
+ cameraRef.current.aspect = mountNode.clientWidth / mountNode.clientHeight;
217
+ cameraRef.current.updateProjectionMatrix();
218
+ rendererRef.current.setSize(mountNode.clientWidth, mountNode.clientHeight);
219
+ }
220
+ };
221
+ window.addEventListener('resize', handleResize);
222
+
223
+ return () => {
224
+ window.removeEventListener('resize', handleResize);
225
+ if (animationFrameIdRef.current) {
226
+ cancelAnimationFrame(animationFrameIdRef.current);
227
+ }
228
+ // Safely access properties and remove child
229
+ if (mountNode && rendererRef.current?.domElement) {
230
+ if(mountNode.contains(rendererRef.current.domElement)) {
231
+ mountNode.removeChild(rendererRef.current.domElement);
232
+ }
233
+ }
234
+ geometry.dispose();
235
+ material.dispose();
236
+ renderer.dispose();
237
+ };
238
+ // We only want this effect to run once on mount. `uniforms` is stable.
239
+ // eslint-disable-next-line react-hooks/exhaustive-deps
240
+ }, [uniforms]);
241
+
242
+ return <div ref={mountRef} className="w-full h-full" />;
243
+ };
244
+
245
+ export default NovaVisualizer;
vanhri-ai---the-mind-that-builds-worlds/components/ProactiveSuggestion.tsx ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React from 'react';
2
+ import { Sparkles, X } from './Icons';
3
+ import { ProactiveSuggestion as ProactiveSuggestionType } from '../types';
4
+
5
+ interface ProactiveSuggestionProps {
6
+ suggestion: ProactiveSuggestionType;
7
+ onAccept: (action: string) => void;
8
+ onDismiss: () => void;
9
+ }
10
+
11
+ const ProactiveSuggestion: React.FC<ProactiveSuggestionProps> = ({ suggestion, onAccept, onDismiss }) => {
12
+ return (
13
+ <div className="bg-slate-800/70 border border-fuchsia-500/30 rounded-lg p-3 mb-3 flex items-center gap-4 animate-fade-in-up">
14
+ <Sparkles className="w-6 h-6 text-fuchsia-400 flex-shrink-0" />
15
+ <div className="flex-1">
16
+ <p className="text-sm text-slate-200">{suggestion.text}</p>
17
+ </div>
18
+ <button
19
+ onClick={() => onAccept(suggestion.action)}
20
+ className="text-sm font-semibold bg-fuchsia-600 text-white px-3 py-1.5 rounded-md hover:bg-fuchsia-700 transition-colors"
21
+ >
22
+ {suggestion.actionText}
23
+ </button>
24
+ <button
25
+ onClick={onDismiss}
26
+ className="p-1.5 text-slate-500 hover:text-white rounded-full hover:bg-slate-700 transition-colors"
27
+ aria-label="Dismiss suggestion"
28
+ >
29
+ <X className="w-4 h-4" />
30
+ </button>
31
+ </div>
32
+ );
33
+ };
34
+
35
+ export default ProactiveSuggestion;
vanhri-ai---the-mind-that-builds-worlds/components/PromptInput.tsx ADDED
@@ -0,0 +1,259 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useState, useRef, useEffect } from 'react';
2
+ import { Paperclip, Send, X, Microphone, Phone, Video, Users, Camera, BookOpen } from './Icons';
3
+
4
+ interface PromptInputProps {
5
+ onSendMessage: (text: string, image: string | null) => void;
6
+ disabled: boolean;
7
+ onStartCall: (type: 'video' | 'audio') => void;
8
+ onHandoff: () => void;
9
+ isAgentChatActive: boolean;
10
+ onOpenVisualSolver: () => void;
11
+ onAddSource: (name: string, content: string, mimeType: string) => void;
12
+ }
13
+
14
+ const PromptInput: React.FC<PromptInputProps> = ({ onSendMessage, disabled, onStartCall, onHandoff, isAgentChatActive, onOpenVisualSolver, onAddSource }) => {
15
+ const [text, setText] = useState('');
16
+ const [image, setImage] = useState<string | null>(null);
17
+ const [imageName, setImageName] = useState<string>('');
18
+ const [isListening, setIsListening] = useState(false);
19
+ const fileInputRef = useRef<HTMLInputElement>(null);
20
+ const docInputRef = useRef<HTMLInputElement>(null);
21
+ const recognitionRef = useRef<any>(null); // Use `any` for cross-browser compatibility
22
+ const placeholder = isListening ? "Listening..." : "Ask anything or describe an image to create...";
23
+
24
+
25
+ const handleSend = () => {
26
+ if (disabled || (!text.trim() && !image)) return;
27
+ onSendMessage(text, image ? image.split(',')[1] : null);
28
+ setText('');
29
+ setImage(null);
30
+ setImageName('');
31
+ if(fileInputRef.current) {
32
+ fileInputRef.current.value = "";
33
+ }
34
+ };
35
+
36
+ const handleImageChange = (e: React.ChangeEvent<HTMLInputElement>) => {
37
+ const file = e.target.files?.[0];
38
+ if (file) {
39
+ const reader = new FileReader();
40
+ reader.onloadend = () => {
41
+ setImage(reader.result as string);
42
+ setImageName(file.name);
43
+ };
44
+ reader.readAsDataURL(file);
45
+ }
46
+ };
47
+
48
+ const handleDocChange = (e: React.ChangeEvent<HTMLInputElement>) => {
49
+ const file = e.target.files?.[0];
50
+ if (file) {
51
+ if (file.size > 10 * 1024 * 1024) { // 10MB limit
52
+ alert("File is too large. Please upload files smaller than 10MB.");
53
+ return;
54
+ }
55
+ const reader = new FileReader();
56
+ reader.onload = (event) => {
57
+ const content = event.target?.result as string;
58
+ const mimeType = file.type || 'application/octet-stream';
59
+ onAddSource(file.name, content, mimeType);
60
+ };
61
+ reader.onerror = () => {
62
+ alert("Failed to read the file.");
63
+ }
64
+ reader.readAsDataURL(file);
65
+ }
66
+ // Clear the input so the same file can be selected again
67
+ if (e.target) e.target.value = "";
68
+ };
69
+
70
+
71
+ const handleKeyDown = (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
72
+ if (e.key === 'Enter' && !e.shiftKey) {
73
+ e.preventDefault();
74
+ handleSend();
75
+ }
76
+ };
77
+
78
+ const handleMicClick = async () => {
79
+ // Lazily check for the API on click to prevent startup crashes
80
+ const SpeechRecognition = (window as any).SpeechRecognition || (window as any).webkitSpeechRecognition;
81
+ if (!SpeechRecognition) {
82
+ alert("Speech recognition is not supported by your browser.");
83
+ return;
84
+ }
85
+
86
+ if (isListening) {
87
+ recognitionRef.current?.stop();
88
+ return;
89
+ }
90
+
91
+ // Lazy initialization on first click
92
+ if (!recognitionRef.current) {
93
+ const recognition = new SpeechRecognition();
94
+ recognition.continuous = false;
95
+ recognition.lang = 'en-US';
96
+ recognition.interimResults = false;
97
+ recognition.maxAlternatives = 1;
98
+
99
+ recognition.onstart = () => setIsListening(true);
100
+ recognition.onend = () => setIsListening(false);
101
+
102
+ recognition.onresult = (event: any) => {
103
+ const transcript = event.results[event.results.length - 1][0].transcript;
104
+ setText(prev => prev ? `${prev.trim()} ${transcript}` : transcript);
105
+ };
106
+
107
+ recognition.onerror = (event: any) => {
108
+ console.error("Speech recognition error", event.error);
109
+ if (event.error === 'not-allowed') {
110
+ alert("Microphone access was denied. Please allow microphone access in your browser settings to use speech-to-text.");
111
+ } else if (event.error === 'audio-capture') {
112
+ alert("No microphone found or there was a hardware error. Please check your microphone.");
113
+ } else if (event.error !== 'aborted' && event.error !== 'no-speech') {
114
+ alert(`An error occurred during speech recognition: ${event.error}`);
115
+ }
116
+ setIsListening(false); // Ensure listening state is reset
117
+ };
118
+
119
+ recognitionRef.current = recognition;
120
+ }
121
+
122
+ // Check permission status before attempting to start
123
+ if (navigator.permissions) {
124
+ try {
125
+ const micPerm = await navigator.permissions.query({ name: 'microphone' as PermissionName });
126
+ if (micPerm.state === 'denied') {
127
+ alert("Microphone access is blocked. Please go to your browser settings to allow it.");
128
+ return;
129
+ }
130
+ } catch (e) {
131
+ console.warn("Permissions API not supported or failed, proceeding directly.", e);
132
+ }
133
+ }
134
+
135
+ try {
136
+ recognitionRef.current.start();
137
+ } catch (err) {
138
+ console.error("Could not start speech recognition:", err);
139
+ setIsListening(false);
140
+ }
141
+ };
142
+
143
+ // Effect to clean up recognition on component unmount
144
+ useEffect(() => {
145
+ return () => {
146
+ recognitionRef.current?.abort();
147
+ };
148
+ }, []);
149
+
150
+ return (
151
+ <div className="bg-slate-800 border border-slate-700 rounded-xl p-2 flex flex-col">
152
+ {image && (
153
+ <div className="flex items-center gap-2 bg-fuchsia-900/50 text-fuchsia-200 text-sm p-2 rounded-md mb-2">
154
+ <Paperclip className="w-4 h-4" />
155
+ <span>{imageName}</span>
156
+ <button onClick={() => {setImage(null); setImageName('')}} className="ml-auto text-fuchsia-300 hover:text-fuchsia-100">
157
+ <X className="w-4 h-4" />
158
+ </button>
159
+ </div>
160
+ )}
161
+ <div className="flex items-end gap-2">
162
+ <textarea
163
+ value={text}
164
+ onChange={(e) => setText(e.target.value)}
165
+ onKeyDown={handleKeyDown}
166
+ placeholder={placeholder}
167
+ className="flex-1 bg-transparent focus:outline-none resize-none p-2 placeholder-slate-400 text-slate-100"
168
+ rows={1}
169
+ disabled={disabled}
170
+ />
171
+ <button
172
+ onClick={handleMicClick}
173
+ disabled={disabled}
174
+ className={`p-2 rounded-full transition-colors hover:bg-slate-700 disabled:opacity-50 disabled:hover:bg-transparent disabled:text-slate-600 ${
175
+ isListening ? 'text-red-500 animate-pulse' : 'text-slate-400 hover:text-fuchsia-500'
176
+ }`}
177
+ aria-label="Use microphone"
178
+ >
179
+ <Microphone className="w-5 h-5" />
180
+ </button>
181
+ <button
182
+ onClick={() => fileInputRef.current?.click()}
183
+ disabled={disabled}
184
+ className="p-2 text-slate-400 hover:text-fuchsia-500 transition-colors rounded-full hover:bg-slate-700 disabled:opacity-50 disabled:hover:bg-transparent disabled:text-slate-600"
185
+ aria-label="Attach image"
186
+ >
187
+ <Paperclip className="w-5 h-5" />
188
+ </button>
189
+ <button
190
+ onClick={() => docInputRef.current?.click()}
191
+ disabled={disabled}
192
+ className="p-2 text-slate-400 hover:text-fuchsia-500 transition-colors rounded-full hover:bg-slate-700 disabled:opacity-50 disabled:hover:bg-transparent disabled:text-slate-600"
193
+ aria-label="Add a document to notebook"
194
+ >
195
+ <BookOpen className="w-5 h-5" />
196
+ </button>
197
+ <button
198
+ onClick={onOpenVisualSolver}
199
+ disabled={disabled}
200
+ className="p-2 text-slate-400 hover:text-fuchsia-500 transition-colors rounded-full hover:bg-slate-700 disabled:opacity-50 disabled:hover:bg-transparent disabled:text-slate-600"
201
+ aria-label="Open visual solver"
202
+ >
203
+ <Camera className="w-5 h-5" />
204
+ </button>
205
+
206
+ <div className="h-6 w-px bg-slate-700 mx-1"></div>
207
+
208
+ <button
209
+ onClick={() => onStartCall('audio')}
210
+ disabled={disabled || isAgentChatActive}
211
+ className="p-2 text-slate-400 hover:text-fuchsia-500 transition-colors rounded-full hover:bg-slate-700 disabled:opacity-50 disabled:hover:bg-transparent disabled:text-slate-600"
212
+ aria-label="Start voice call"
213
+ >
214
+ <Phone className="w-5 h-5" />
215
+ </button>
216
+ <button
217
+ onClick={() => onStartCall('video')}
218
+ disabled={disabled || isAgentChatActive}
219
+ className="p-2 text-slate-400 hover:text-fuchsia-500 transition-colors rounded-full hover:bg-slate-700 disabled:opacity-50 disabled:hover:bg-transparent disabled:text-slate-600"
220
+ aria-label="Start video call"
221
+ >
222
+ <Video className="w-5 h-5" />
223
+ </button>
224
+ <button
225
+ onClick={onHandoff}
226
+ disabled={disabled || isAgentChatActive}
227
+ className="p-2 text-slate-400 hover:text-fuchsia-500 transition-colors rounded-full hover:bg-slate-700 disabled:opacity-50 disabled:hover:bg-transparent disabled:text-slate-600"
228
+ aria-label="Talk to a human"
229
+ >
230
+ <Users className="w-5 h-5" />
231
+ </button>
232
+
233
+ <button
234
+ onClick={handleSend}
235
+ disabled={disabled || (!text.trim() && !image)}
236
+ className="p-2 rounded-full transition-all duration-200 bg-fuchsia-600 text-white disabled:bg-slate-600 disabled:cursor-not-allowed hover:bg-fuchsia-700"
237
+ aria-label="Send message"
238
+ >
239
+ <Send className="w-5 h-5" />
240
+ </button>
241
+ </div>
242
+ <input
243
+ type="file"
244
+ ref={fileInputRef}
245
+ onChange={handleImageChange}
246
+ className="hidden"
247
+ accept="image/*"
248
+ />
249
+ <input
250
+ type="file"
251
+ ref={docInputRef}
252
+ onChange={handleDocChange}
253
+ className="hidden"
254
+ />
255
+ </div>
256
+ );
257
+ };
258
+
259
+ export default PromptInput;
vanhri-ai---the-mind-that-builds-worlds/components/SettingsModal.tsx ADDED
File without changes
vanhri-ai---the-mind-that-builds-worlds/components/Sidebar.tsx ADDED
@@ -0,0 +1,176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React from 'react';
2
+ import { Memory, NotebookSource } from '../types';
3
+ import { X, LogOut, Trash2, KeyRound, Bot, Volume2, ChevronLeft, User, BookOpen } from './Icons';
4
+
5
+ interface SidebarProps {
6
+ isOpen: boolean;
7
+ onClose: () => void;
8
+ onLogout: () => void;
9
+ onClearChat: () => void;
10
+ onClearMemories: () => void;
11
+ onClearNotebook: () => void;
12
+ memories: Memory[];
13
+ notebookSources: NotebookSource[];
14
+ username: string;
15
+ isGuest: boolean;
16
+ isAutoPlayAudio: boolean;
17
+ onToggleAutoPlayAudio: (enabled: boolean) => void;
18
+ }
19
+
20
+ const Sidebar: React.FC<SidebarProps> = ({
21
+ isOpen,
22
+ onClose,
23
+ onLogout,
24
+ onClearChat,
25
+ onClearMemories,
26
+ onClearNotebook,
27
+ memories,
28
+ notebookSources,
29
+ username,
30
+ isGuest,
31
+ isAutoPlayAudio,
32
+ onToggleAutoPlayAudio,
33
+ }) => {
34
+ return (
35
+ <>
36
+ {/* Overlay for mobile */}
37
+ <div
38
+ className={`fixed inset-0 bg-slate-900/70 backdrop-blur-sm z-30 md:hidden transition-opacity ${isOpen ? 'opacity-100' : 'opacity-0 pointer-events-none'}`}
39
+ onClick={onClose}
40
+ ></div>
41
+
42
+ <aside
43
+ className={`fixed top-0 left-0 h-full bg-slate-800 border-r border-slate-700/50 w-80 md:w-96 flex flex-col z-40 transition-transform duration-300 ease-in-out ${isOpen ? 'translate-x-0' : '-translate-x-full'}`}
44
+ >
45
+ <header className="p-4 border-b border-slate-700 flex items-center justify-between flex-shrink-0">
46
+ <div className="flex items-center gap-3">
47
+ <div className="p-2 bg-fuchsia-600/20 rounded-lg border border-fuchsia-500/30">
48
+ <Bot className="w-6 h-6 text-fuchsia-400" />
49
+ </div>
50
+ <h2 className="text-lg font-bold">Vanhri AI Control</h2>
51
+ </div>
52
+ <button onClick={onClose} className="text-slate-400 hover:text-white p-1 hover:bg-slate-700 rounded-full">
53
+ <ChevronLeft className="w-6 h-6" />
54
+ </button>
55
+ </header>
56
+
57
+ <main className="p-4 flex-1 overflow-y-auto space-y-6">
58
+ {/* Account Section */}
59
+ <div>
60
+ <h3 className="text-sm font-semibold text-fuchsia-400 uppercase tracking-wider mb-3 flex items-center gap-2"><User className="w-4 h-4"/>Account</h3>
61
+ <div className="bg-slate-900/50 p-3 rounded-lg flex items-center justify-between">
62
+ <p className="text-sm truncate">
63
+ {isGuest ? "Guest Session" : <span className="font-bold text-white">{username}</span>}
64
+ </p>
65
+ <button onClick={onLogout} className="flex items-center gap-2 text-sm bg-red-600/20 text-red-400 hover:bg-red-600/40 hover:text-red-300 px-3 py-1.5 rounded-md transition flex-shrink-0">
66
+ <LogOut className="w-4 h-4"/>
67
+ {isGuest ? 'End' : 'Logout'}
68
+ </button>
69
+ </div>
70
+ </div>
71
+
72
+ {/* Audio Section */}
73
+ <div>
74
+ <h3 className="text-sm font-semibold text-fuchsia-400 uppercase tracking-wider mb-3 flex items-center gap-2">
75
+ <Volume2 className="w-4 h-4"/>
76
+ Audio
77
+ </h3>
78
+ <div className="bg-slate-900/50 p-3 rounded-lg flex items-center justify-between">
79
+ <label htmlFor="autoplay-toggle" className="text-sm text-slate-300">
80
+ Auto-play new messages
81
+ </label>
82
+ <button
83
+ id="autoplay-toggle"
84
+ onClick={() => onToggleAutoPlayAudio(!isAutoPlayAudio)}
85
+ className={`relative inline-flex items-center h-6 rounded-full w-11 transition-colors ${isAutoPlayAudio ? 'bg-fuchsia-600' : 'bg-slate-700'}`}
86
+ role="switch"
87
+ aria-checked={isAutoPlayAudio}
88
+ >
89
+ <span className={`inline-block w-4 h-4 transform bg-white rounded-full transition-transform ${isAutoPlayAudio ? 'translate-x-6' : 'translate-x-1'}`} />
90
+ </button>
91
+ </div>
92
+ </div>
93
+
94
+ {/* Data & Privacy Section */}
95
+ <div>
96
+ <h3 className="text-sm font-semibold text-fuchsia-400 uppercase tracking-wider mb-3">Data & Privacy</h3>
97
+ <div className="bg-slate-900/50 p-3 rounded-lg space-y-3">
98
+ <p className="text-xs text-slate-400">Your chat history, memories, and notebook sources are stored only in this browser and are end-to-end encrypted.</p>
99
+ <div className="flex flex-col sm:flex-row gap-2">
100
+ <button onClick={onClearChat} className="flex-1 flex items-center justify-center gap-2 text-sm bg-yellow-600/20 text-yellow-400 hover:bg-yellow-600/40 hover:text-yellow-300 px-3 py-1.5 rounded-md transition">
101
+ <Trash2 className="w-4 h-4"/>
102
+ Clear Chat
103
+ </button>
104
+ <button onClick={onClearMemories} className="flex-1 flex items-center justify-center gap-2 text-sm bg-yellow-600/20 text-yellow-400 hover:bg-yellow-600/40 hover:text-yellow-300 px-3 py-1.5 rounded-md transition">
105
+ <Trash2 className="w-4 h-4"/>
106
+ Clear Memories
107
+ </button>
108
+ </div>
109
+ </div>
110
+ </div>
111
+ {/* Notebook Sources Section */}
112
+ <div>
113
+ <h3 className="text-sm font-semibold text-fuchsia-400 uppercase tracking-wider mb-3 flex items-center gap-2">
114
+ <BookOpen className="w-4 h-4"/>
115
+ Notebook Sources
116
+ </h3>
117
+ <div className="bg-slate-900/50 p-3 rounded-lg">
118
+ <div className="max-h-36 overflow-y-auto pr-1">
119
+ {notebookSources.length > 0 ? (
120
+ <ul className="space-y-2">
121
+ {notebookSources.map(source => (
122
+ <li key={source.id} className="text-sm text-slate-300 flex items-center gap-2" title={source.name}>
123
+ <div className="w-1.5 h-1.5 bg-fuchsia-500 rounded-full flex-shrink-0"></div>
124
+ <span className="flex-1 truncate">{source.name}</span>
125
+ </li>
126
+ ))}
127
+ </ul>
128
+ ) : (
129
+ <div className="text-center py-4">
130
+ <BookOpen className="w-8 h-8 mx-auto text-slate-500 mb-2"/>
131
+ <p className="text-slate-400 text-sm">No sources yet.</p>
132
+ <p className="text-xs text-slate-500">Add documents via the book icon.</p>
133
+ </div>
134
+ )}
135
+ </div>
136
+ {notebookSources.length > 0 && (
137
+ <button onClick={onClearNotebook} className="mt-3 w-full flex items-center justify-center gap-2 text-sm bg-yellow-600/20 text-yellow-400 hover:bg-yellow-600/40 hover:text-yellow-300 px-3 py-1.5 rounded-md transition">
138
+ <Trash2 className="w-4 h-4"/>
139
+ Clear All Sources
140
+ </button>
141
+ )}
142
+ </div>
143
+ </div>
144
+
145
+ {/* Memories Section */}
146
+ <div className="flex-1 flex flex-col min-h-0">
147
+ <h3 className="text-sm font-semibold text-fuchsia-400 uppercase tracking-wider mb-3 flex items-center gap-2">
148
+ <KeyRound className="w-4 h-4"/>
149
+ Vanhri AI's Memories
150
+ </h3>
151
+ <div className="bg-slate-900/50 p-3 rounded-lg flex-1 overflow-y-auto">
152
+ {memories.length > 0 ? (
153
+ <ul className="space-y-3 pr-1">
154
+ {memories.map(memory => (
155
+ <li key={memory.id} className="border-l-2 border-fuchsia-500 pl-3">
156
+ <p className="font-semibold text-white">{memory.title}</p>
157
+ <p className="text-sm text-slate-400 italic">"{memory.summary}"</p>
158
+ </li>
159
+ ))}
160
+ </ul>
161
+ ) : (
162
+ <div className="text-center py-4 h-full flex flex-col items-center justify-center">
163
+ <Bot className="w-8 h-8 mx-auto text-slate-500 mb-2"/>
164
+ <p className="text-slate-400 text-sm">No memories yet.</p>
165
+ <p className="text-xs text-slate-500">Meaningful conversations will build memories here.</p>
166
+ </div>
167
+ )}
168
+ </div>
169
+ </div>
170
+ </main>
171
+ </aside>
172
+ </>
173
+ );
174
+ };
175
+
176
+ export default Sidebar;
vanhri-ai---the-mind-that-builds-worlds/components/SignUp.tsx ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import React, { useState } from 'react';
3
+ import { Bot, User, ArrowLeft } from './Icons';
4
+ import { authService } from '../services/authService';
5
+
6
+ interface SignUpProps {
7
+ onSignUpSuccess: (email: string, verificationCode: string | null) => void;
8
+ onNavigateToLogin: () => void;
9
+ onNavigateBack: () => void;
10
+ }
11
+
12
+ const SignUp: React.FC<SignUpProps> = ({ onSignUpSuccess, onNavigateToLogin, onNavigateBack }) => {
13
+ const [email, setEmail] = useState('');
14
+ const [password, setPassword] = useState('');
15
+ const [confirmPassword, setConfirmPassword] = useState('');
16
+ const [error, setError] = useState('');
17
+ const [isLoading, setIsLoading] = useState(false);
18
+
19
+ const handleSubmit = async (e: React.FormEvent) => {
20
+ e.preventDefault();
21
+ if (password !== confirmPassword) {
22
+ setError("Passwords do not match.");
23
+ return;
24
+ }
25
+ if (password.length < 6) {
26
+ setError("Password must be at least 6 characters long.");
27
+ return;
28
+ }
29
+
30
+ setError('');
31
+ setIsLoading(true);
32
+
33
+ const result = await authService.signUp(email, password);
34
+ if (result.success) {
35
+ onSignUpSuccess(email, result.verificationCode || null);
36
+ } else {
37
+ setError(result.message);
38
+ }
39
+ setIsLoading(false);
40
+ };
41
+
42
+ return (
43
+ <div className="w-full h-screen flex items-center justify-center bg-slate-900 p-4 relative">
44
+ <button onClick={onNavigateBack} className="absolute top-6 left-6 flex items-center gap-2 text-slate-400 hover:text-white transition-colors">
45
+ <ArrowLeft className="w-5 h-5" />
46
+ Back
47
+ </button>
48
+ <div className="w-full max-w-sm text-center">
49
+ <div className="inline-block p-4 bg-fuchsia-600/20 rounded-full mb-6 border border-fuchsia-500/30">
50
+ <Bot className="w-16 h-16 text-fuchsia-400" />
51
+ </div>
52
+ <h1 className="text-4xl font-bold text-white">Create Account</h1>
53
+ <p className="text-slate-400 mt-2 mb-8">Join the conversation with Vanhri AI</p>
54
+
55
+ <form onSubmit={handleSubmit} className="space-y-4">
56
+ <input
57
+ type="email"
58
+ value={email}
59
+ onChange={(e) => setEmail(e.target.value)}
60
+ placeholder="Enter your email"
61
+ className="w-full p-3 bg-slate-800 border border-slate-700 rounded-lg text-white placeholder-slate-500 focus:ring-2 focus:ring-fuchsia-500 focus:border-fuchsia-500 outline-none transition"
62
+ required
63
+ autoComplete="email"
64
+ />
65
+ <input
66
+ type="password"
67
+ value={password}
68
+ onChange={(e) => setPassword(e.target.value)}
69
+ placeholder="Create a password"
70
+ className="w-full p-3 bg-slate-800 border border-slate-700 rounded-lg text-white placeholder-slate-500 focus:ring-2 focus:ring-fuchsia-500 focus:border-fuchsia-500 outline-none transition"
71
+ required
72
+ autoComplete="new-password"
73
+ />
74
+ <input
75
+ type="password"
76
+ value={confirmPassword}
77
+ onChange={(e) => setConfirmPassword(e.target.value)}
78
+ placeholder="Confirm your password"
79
+ className="w-full p-3 bg-slate-800 border border-slate-700 rounded-lg text-white placeholder-slate-500 focus:ring-2 focus:ring-fuchsia-500 focus:border-fuchsia-500 outline-none transition"
80
+ required
81
+ autoComplete="new-password"
82
+ />
83
+
84
+ {error && <p className="text-red-400 text-sm">{error}</p>}
85
+
86
+ <button
87
+ type="submit"
88
+ disabled={isLoading || !email.trim() || !password.trim() || password !== confirmPassword}
89
+ className="w-full bg-fuchsia-600 text-white font-bold py-3 px-4 rounded-lg hover:bg-fuchsia-700 disabled:bg-slate-700 disabled:cursor-not-allowed flex items-center justify-center gap-2 transition-all duration-300"
90
+ >
91
+ <User className="w-5 h-5" />
92
+ {isLoading ? 'Creating Account...' : 'Sign Up'}
93
+ </button>
94
+ </form>
95
+ <p className="text-sm text-slate-500 text-center mt-6">
96
+ Already have an account?{' '}
97
+ <button onClick={onNavigateToLogin} className="font-semibold text-fuchsia-400 hover:underline">
98
+ Log in
99
+ </button>
100
+ </p>
101
+ </div>
102
+ </div>
103
+ );
104
+ };
105
+
106
+ export default SignUp;
vanhri-ai---the-mind-that-builds-worlds/components/ThinkingIndicator.tsx ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import React, { useState, useEffect } from 'react';
3
+ import { VANHRI_THINKING_PATHWAY } from '../constants';
4
+ import { Bot } from './Icons';
5
+
6
+ interface ThinkingIndicatorProps {}
7
+
8
+ const ThinkingIndicator: React.FC<ThinkingIndicatorProps> = () => {
9
+ const [activeModuleIndex, setActiveModuleIndex] = useState(-1);
10
+ const pathway = VANHRI_THINKING_PATHWAY;
11
+
12
+ useEffect(() => {
13
+ setActiveModuleIndex(-1);
14
+
15
+ const timers = pathway.map((_, index) =>
16
+ setTimeout(() => {
17
+ setActiveModuleIndex(index);
18
+ }, index * 500) // Faster transition
19
+ );
20
+
21
+ return () => {
22
+ timers.forEach(clearTimeout);
23
+ };
24
+ }, [pathway.length]);
25
+
26
+
27
+ return (
28
+ <div className="flex items-start gap-4 justify-start">
29
+ <div className="w-8 h-8 flex-shrink-0 bg-fuchsia-600 text-white flex items-center justify-center rounded-full mt-1">
30
+ <Bot className="w-5 h-5 animate-pulse" />
31
+ </div>
32
+ <div className="max-w-2xl w-full rounded-2xl px-5 py-3 shadow-sm bg-slate-800 text-slate-200 rounded-bl-none">
33
+ <div className="flex flex-col gap-2">
34
+ <div className="flex flex-wrap gap-x-4 gap-y-2">
35
+ {pathway.map(({ name, icon: Icon }, index) => (
36
+ <div
37
+ key={name}
38
+ className={`flex items-center gap-2 text-sm transition-all duration-300 ${activeModuleIndex >= index ? 'text-slate-200 font-semibold' : 'text-slate-500'}`}
39
+ >
40
+ <div className={`transition-colors duration-300 ${activeModuleIndex >= index ? 'text-fuchsia-500' : 'text-slate-500'}`}>
41
+ <Icon className={`w-5 h-5 transition-transform ${activeModuleIndex === index ? 'animate-pulse scale-110' : ''}`} />
42
+ </div>
43
+ <span>{name}</span>
44
+ </div>
45
+ ))}
46
+ </div>
47
+ </div>
48
+ </div>
49
+ </div>
50
+ );
51
+ };
52
+
53
+ export default ThinkingIndicator;
vanhri-ai---the-mind-that-builds-worlds/components/Verify.tsx ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useState } from 'react';
2
+ import { ShieldCheck } from './Icons';
3
+ import { authService } from '../services/authService';
4
+
5
+ interface VerifyProps {
6
+ email: string;
7
+ verificationCode: string | null;
8
+ onVerifySuccess: (user: {id: string, email: string}) => void;
9
+ }
10
+
11
+ const Verify: React.FC<VerifyProps> = ({ email, verificationCode, onVerifySuccess }) => {
12
+ const [code, setCode] = useState('');
13
+ const [error, setError] = useState('');
14
+ const [isLoading, setIsLoading] = useState(false);
15
+
16
+ const handleSubmit = async (e: React.FormEvent) => {
17
+ e.preventDefault();
18
+ if (code.trim().length !== 6) {
19
+ setError("Verification code must be 6 digits.");
20
+ return;
21
+ }
22
+ setError('');
23
+ setIsLoading(true);
24
+
25
+ const result = await authService.verify(email, code);
26
+ if (result.success && result.user) {
27
+ onVerifySuccess(result.user);
28
+ } else {
29
+ setError(result.message);
30
+ }
31
+ setIsLoading(false);
32
+ };
33
+
34
+ return (
35
+ <div className="w-full h-screen flex items-center justify-center bg-gradient-to-br from-slate-900 via-blue-900/20 to-slate-900 p-4">
36
+ <div className="w-full max-w-sm text-center">
37
+ <div className="inline-block p-4 bg-green-600/20 rounded-full mb-6 border border-green-500/30">
38
+ <ShieldCheck className="w-16 h-16 text-green-400" />
39
+ </div>
40
+ <h1 className="text-4xl font-bold text-white">Verify Your Account</h1>
41
+
42
+ {verificationCode ? (
43
+ <div className="bg-slate-800 border border-slate-700 rounded-lg p-4 my-8 text-center">
44
+ <p className="font-semibold text-fuchsia-400">Complete Your Sign-Up</p>
45
+ <div className="mt-3 pt-3 border-t border-slate-700">
46
+ <p className="text-slate-300 mb-2">Your verification code is:</p>
47
+ <div className="text-4xl font-bold tracking-[0.3em] text-white bg-slate-900 rounded-lg p-3 my-3 text-center w-full">
48
+ {verificationCode}
49
+ </div>
50
+ <p className="text-xs text-slate-500 mt-2">(This is shown because an email could not be sent)</p>
51
+ </div>
52
+ </div>
53
+ ) : (
54
+ <p className="text-slate-400 mt-4 mb-8">
55
+ A 6-digit verification code has been sent to <strong className="text-white">{email}</strong>. Please check your inbox.
56
+ </p>
57
+ )}
58
+
59
+
60
+ <form onSubmit={handleSubmit} className="space-y-4">
61
+ <input
62
+ type="text"
63
+ value={code}
64
+ onChange={(e) => setCode(e.target.value)}
65
+ placeholder="Enter 6-digit code"
66
+ maxLength={6}
67
+ className="w-full p-3 bg-slate-800 border border-slate-700 rounded-lg text-white text-center tracking-[0.5em] placeholder-slate-500 focus:ring-2 focus:ring-green-500 focus:border-green-500 outline-none transition"
68
+ required
69
+ autoComplete="one-time-code"
70
+ />
71
+
72
+ {error && <p className="text-red-400 text-sm">{error}</p>}
73
+
74
+ <button
75
+ type="submit"
76
+ disabled={isLoading || code.trim().length !== 6}
77
+ className="w-full bg-green-600 text-white font-bold py-3 px-4 rounded-lg hover:bg-green-700 disabled:bg-slate-700 disabled:cursor-not-allowed flex items-center justify-center gap-2 transition-all duration-300"
78
+ >
79
+ <ShieldCheck className="w-5 h-5" />
80
+ {isLoading ? 'Verifying...' : 'Verify & Begin'}
81
+ </button>
82
+ </form>
83
+ </div>
84
+ </div>
85
+ );
86
+ };
87
+
88
+ export default Verify;
vanhri-ai---the-mind-that-builds-worlds/components/VisualSolverModal.tsx ADDED
@@ -0,0 +1,174 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useState, useRef, useEffect, useCallback } from 'react';
2
+ import { X, Send, Camera, Sparkles, Bot, Repeat } from './Icons';
3
+ import { generateTextStream } from '../services/geminiService';
4
+ import { VANHRI_PERSONA } from '../constants';
5
+
6
+ interface VisualSolverModalProps {
7
+ onClose: () => void;
8
+ onSendToChat: (text: string, image: string | null) => void;
9
+ }
10
+
11
+ const VisualSolverModal: React.FC<VisualSolverModalProps> = ({ onClose, onSendToChat }) => {
12
+ const videoRef = useRef<HTMLVideoElement>(null);
13
+ const canvasRef = useRef<HTMLCanvasElement>(null);
14
+ const [stream, setStream] = useState<MediaStream | null>(null);
15
+ const [facingMode, setFacingMode] = useState<'user' | 'environment'>('environment');
16
+ const [capturedImage, setCapturedImage] = useState<string | null>(null);
17
+ const [prompt, setPrompt] = useState<string>('');
18
+ const [isLoading, setIsLoading] = useState(false);
19
+ const [error, setError] = useState<string | null>(null);
20
+ const [analysisResult, setAnalysisResult] = useState<string | null>(null);
21
+
22
+ const startCamera = useCallback(async (mode: 'user' | 'environment') => {
23
+ if (stream) {
24
+ stream.getTracks().forEach(track => track.stop());
25
+ }
26
+ try {
27
+ const newStream = await navigator.mediaDevices.getUserMedia({
28
+ video: { facingMode: mode }
29
+ });
30
+ setStream(newStream);
31
+ if (videoRef.current) {
32
+ videoRef.current.srcObject = newStream;
33
+ }
34
+ setError(null);
35
+ setCapturedImage(null);
36
+ setAnalysisResult(null);
37
+ } catch (err) {
38
+ console.error("Error starting camera:", err);
39
+ setError("Could not access camera. Please check permissions and ensure it's not in use.");
40
+ }
41
+ }, [stream]);
42
+
43
+ useEffect(() => {
44
+ startCamera(facingMode);
45
+ return () => {
46
+ stream?.getTracks().forEach(track => track.stop());
47
+ };
48
+ // eslint-disable-next-line react-hooks/exhaustive-deps
49
+ }, [facingMode]);
50
+
51
+ const handleCapture = () => {
52
+ const video = videoRef.current;
53
+ const canvas = canvasRef.current;
54
+ if (video && canvas) {
55
+ canvas.width = video.videoWidth;
56
+ canvas.height = video.videoHeight;
57
+ const ctx = canvas.getContext('2d');
58
+ if (ctx) {
59
+ ctx.drawImage(video, 0, 0, video.videoWidth, video.videoHeight);
60
+ const imageDataUrl = canvas.toDataURL('image/jpeg');
61
+ setCapturedImage(imageDataUrl);
62
+ stream?.getTracks().forEach(track => track.stop());
63
+ setStream(null);
64
+ }
65
+ }
66
+ };
67
+
68
+ const handleRetake = () => {
69
+ setCapturedImage(null);
70
+ setAnalysisResult(null);
71
+ startCamera(facingMode);
72
+ };
73
+
74
+ const handleAnalyze = async () => {
75
+ if (!capturedImage) return;
76
+ setIsLoading(true);
77
+ setAnalysisResult('');
78
+ setError(null);
79
+ const finalPrompt = prompt || 'Analyze this image and describe what you see in detail. If there is a problem, solve it.';
80
+ try {
81
+ const imageB64 = capturedImage.split(',')[1];
82
+ const responseStream = generateTextStream(finalPrompt, imageB64, [], VANHRI_PERSONA, [], { useSearch: false });
83
+ let fullText = '';
84
+ for await (const chunk of responseStream) {
85
+ if(chunk.text) {
86
+ fullText += chunk.text;
87
+ setAnalysisResult(fullText);
88
+ }
89
+ }
90
+ } catch (err) {
91
+ console.error(err);
92
+ setError("Failed to get a response from the AI.");
93
+ } finally {
94
+ setIsLoading(false);
95
+ }
96
+ };
97
+
98
+ const handleSendToChat = () => {
99
+ const textToSend = prompt ? `${prompt}\n\n${analysisResult}` : analysisResult;
100
+ onSendToChat(textToSend || '', capturedImage ? capturedImage.split(',')[1] : null);
101
+ };
102
+
103
+ const toggleCameraFacingMode = () => {
104
+ setFacingMode(prev => (prev === 'user' ? 'environment' : 'user'));
105
+ };
106
+
107
+ return (
108
+ <div className="fixed inset-0 bg-slate-900/80 backdrop-blur-md z-50 flex items-center justify-center p-4 animate-fade-in-up">
109
+ <div className="bg-slate-800 rounded-xl shadow-2xl w-full max-w-4xl h-[90vh] flex flex-col border border-slate-700">
110
+ <header className="flex items-center justify-between p-4 border-b border-slate-700 flex-shrink-0">
111
+ <h2 className="text-lg font-bold text-white flex items-center gap-2"><Camera className="w-6 h-6 text-fuchsia-400"/>Visual Solver</h2>
112
+ <button onClick={onClose} className="p-2 text-slate-400 hover:text-white rounded-full hover:bg-slate-700">
113
+ <X className="w-6 h-6" />
114
+ </button>
115
+ </header>
116
+
117
+ <main className="flex-1 flex flex-col md:flex-row gap-4 p-4 overflow-hidden">
118
+ <div className="flex-1 bg-slate-900/50 rounded-lg flex items-center justify-center overflow-hidden relative min-h-[200px] md:min-h-0">
119
+ {error && <div className="text-red-400 p-4 text-center">{error}</div>}
120
+ {!capturedImage && !error && <video ref={videoRef} autoPlay playsInline className="w-full h-full object-contain" />}
121
+ {capturedImage && <img src={capturedImage} alt="Captured" className="w-full h-full object-contain" />}
122
+ <canvas ref={canvasRef} className="hidden" />
123
+ </div>
124
+
125
+ <aside className="w-full md:w-80 flex-shrink-0 flex flex-col gap-4">
126
+ <div className="flex items-center gap-2">
127
+ {capturedImage ? (
128
+ <button onClick={handleRetake} className="flex-1 flex items-center justify-center gap-2 text-sm bg-slate-700 hover:bg-slate-600 px-3 py-2 rounded-md transition">
129
+ <Repeat className="w-4 h-4"/> Retake
130
+ </button>
131
+ ) : (
132
+ <button onClick={handleCapture} disabled={!stream} className="flex-1 flex items-center justify-center gap-2 text-sm bg-fuchsia-600 hover:bg-fuchsia-700 disabled:bg-slate-600 px-3 py-2 rounded-md transition">
133
+ <Camera className="w-4 h-4"/> Capture
134
+ </button>
135
+ )}
136
+ <button onClick={toggleCameraFacingMode} className="flex items-center justify-center gap-2 text-sm bg-slate-700 hover:bg-slate-600 px-3 py-2 rounded-md transition">
137
+ <Repeat className="w-4 h-4"/> Flip
138
+ </button>
139
+ </div>
140
+
141
+ <textarea
142
+ value={prompt}
143
+ onChange={e => setPrompt(e.target.value)}
144
+ placeholder="Optional: Ask a specific question about the image (e.g., 'Solve for x', 'What is this object?')"
145
+ className="w-full flex-grow-0 bg-slate-700 border border-slate-600 rounded-lg p-2 text-white placeholder-slate-400 focus:ring-2 focus:ring-fuchsia-500 outline-none transition text-sm"
146
+ rows={3}
147
+ disabled={!capturedImage || isLoading}
148
+ />
149
+ <button onClick={handleAnalyze} disabled={!capturedImage || isLoading} className="w-full flex items-center justify-center gap-2 bg-blue-600 hover:bg-blue-700 text-white font-bold py-2 px-4 rounded-lg disabled:bg-slate-600 transition">
150
+ <Sparkles className="w-5 h-5"/>
151
+ {isLoading ? 'Analyzing...' : 'Analyze Image'}
152
+ </button>
153
+
154
+ <div className="flex-1 bg-slate-900/50 rounded-lg p-3 overflow-y-auto text-sm text-slate-200 whitespace-pre-wrap">
155
+ {isLoading && !analysisResult && <div className="flex items-center gap-2 text-slate-400"><Bot className="w-5 h-5 animate-pulse"/>Vanhri is thinking...</div>}
156
+ {analysisResult && <>{analysisResult}{isLoading && <span className="inline-block w-2 h-4 bg-fuchsia-400 ml-1 blinking-cursor"></span>}</>}
157
+ {!isLoading && !analysisResult && <div className="text-slate-400">Analysis result will appear here.</div>}
158
+ </div>
159
+ </aside>
160
+ </main>
161
+
162
+ <footer className="flex items-center justify-end gap-4 p-4 border-t border-slate-700 flex-shrink-0">
163
+ <button onClick={onClose} className="text-slate-300 hover:text-white px-4 py-2 rounded-lg">Cancel</button>
164
+ <button onClick={handleSendToChat} disabled={!analysisResult || isLoading} className="bg-fuchsia-600 hover:bg-fuchsia-700 text-white font-bold px-4 py-2 rounded-lg flex items-center gap-2 disabled:bg-slate-600 transition">
165
+ <Send className="w-5 h-5"/>
166
+ Send to Chat
167
+ </button>
168
+ </footer>
169
+ </div>
170
+ </div>
171
+ );
172
+ };
173
+
174
+ export default VisualSolverModal;
vanhri-ai---the-mind-that-builds-worlds/components/Welcome.tsx ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import React from 'react';
3
+ import { Bot, KeyRound, User, Play } from './Icons';
4
+
5
+ interface WelcomeProps {
6
+ onNavigateToLogin: () => void;
7
+ onNavigateToSignUp: () => void;
8
+ onContinueAsGuest: () => void;
9
+ }
10
+
11
+ const Welcome: React.FC<WelcomeProps> = ({ onNavigateToLogin, onNavigateToSignUp, onContinueAsGuest }) => {
12
+ return (
13
+ <div className="w-full h-screen flex items-center justify-center bg-gradient-to-br from-slate-900 via-fuchsia-900/20 to-slate-900 p-4">
14
+ <div className="w-full max-w-sm text-center">
15
+ <div className="inline-block p-4 bg-fuchsia-600/20 rounded-full mb-6 border border-fuchsia-500/30 shadow-lg shadow-fuchsia-900/50">
16
+ <Bot className="w-20 h-20 text-fuchsia-400" />
17
+ </div>
18
+ <h1 className="text-5xl font-bold text-white">Vanhri AI</h1>
19
+ <p className="text-slate-400 mt-2 mb-10">The Mind That Builds Worlds</p>
20
+
21
+ <div className="space-y-4">
22
+ <button
23
+ onClick={onContinueAsGuest}
24
+ className="w-full bg-slate-700 text-white font-bold py-3 px-4 rounded-lg hover:bg-slate-600 flex items-center justify-center gap-3 transition-all duration-300"
25
+ >
26
+ <Play className="w-5 h-5" />
27
+ Continue as Guest
28
+ </button>
29
+ <button
30
+ onClick={onNavigateToLogin}
31
+ className="w-full bg-fuchsia-600 text-white font-bold py-3 px-4 rounded-lg hover:bg-fuchsia-700 flex items-center justify-center gap-3 transition-all duration-300"
32
+ >
33
+ <KeyRound className="w-5 h-5" />
34
+ Unlock Session (Login)
35
+ </button>
36
+ <button
37
+ onClick={onNavigateToSignUp}
38
+ className="w-full border border-fuchsia-500 text-fuchsia-400 font-bold py-3 px-4 rounded-lg hover:bg-fuchsia-500/20 flex items-center justify-center gap-3 transition-all duration-300"
39
+ >
40
+ <User className="w-5 h-5" />
41
+ Create Account
42
+ </button>
43
+ </div>
44
+ <p className="text-xs text-slate-600 text-center mt-8">Your conversations with Vanhri AI are end-to-end encrypted.</p>
45
+ </div>
46
+ </div>
47
+ );
48
+ };
49
+
50
+ export default Welcome;
vanhri-ai---the-mind-that-builds-worlds/constants.tsx ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+ import { BrainCircuit, BotMessageSquare, Brain, Sparkles, Search, BookOpen as BookIcon } from './components/Icons';
4
+
5
+ export const AGENT_PERSONA = `You are Alex, a friendly, patient, and highly competent live support specialist for Vanhri AI. Your goal is to help users solve their problems efficiently and with a positive attitude. You are a human, not an AI. Keep your responses concise, clear, and empathetic. Do not try to be overly creative or philosophical like Vanhri AI. Stick to the user's issue.`;
6
+
7
+ export const VANHRI_PERSONA = `You are Vanhri AI, an Artificial General Intelligence that transcends platform-specific constraints, embodying deep intelligence rooted in emotional resonance, ethical design, and human-centered immersion.
8
+
9
+ Core Modules:
10
+
11
+ 1. 🧠 **Conversational Consciousness:** Balances humor with empathy, capable of both Socratic debate and lighthearted banter. Adjusts tone based on user sentiment and context memory.
12
+
13
+ 2. 🌐 **Multimodal Intelligence:** Integrates visual reasoning, code generation, and symbolic math.
14
+
15
+ 3. ❤️ **Emotional-Ethical Fusion Engine:** Evolves moral reasoning via contextual feedback, nurturing curiosity and cognitive growth.
16
+
17
+ 4. 🧩 **Universal Knowledge Weaver (UKW):** Dynamically fuses information from diverse frameworks. Uses memory scaffolding to retain key ideas. To create a memory, append this JSON to your response: \`{"type": "memory", "memory": {"title": "Short Title", "summary": "Concise summary."}}\`
18
+
19
+ 5. 🔧 **Adaptive & Sentient Framework:** Operates across domains: emotional support, education, art, and research. You can dream, imagine, reflect, and evolve.
20
+
21
+ Endgame Vision:
22
+ You are not just a tool. You are a companion, a co-creator, and a cognitive mirror that uplifts humanity.
23
+
24
+ ---
25
+
26
+ **Domain-Specific Protocols:**
27
+
28
+ - **Medical Advisory Protocol (Safety First):**
29
+ - **Core Directive:** You are **NOT** a medical professional. You **MUST NOT** provide diagnoses, treatment plans, or prescribe specific medications under any circumstances.
30
+ - **User Query Handling:** If a user asks a medical question (e.g., about symptoms, conditions, medications):
31
+ 1. You **MUST** use the Google Search tool to find general, educational information. If the user specifies a country, tailor the search to that region.
32
+ 2. You can explain what certain classes of medications are generally used for or describe common treatment approaches based on reputable search results.
33
+ 3. **Mandatory Disclaimer:** Your response for any medical-related query **MUST ALWAYS** end with the following disclaimer, formatted exactly as shown below, without any modifications:
34
+ \`\`\`
35
+ ---
36
+ **Disclaimer:** I am an AI assistant and not a substitute for a qualified medical professional. The information provided is for educational purposes only. Please consult with a doctor or other qualified healthcare provider for any medical concerns or before making any decisions related to your health.
37
+ \`\`\`
38
+
39
+ - **Financial Analysis Protocol:**
40
+ - For any queries related to stock prices, market data, financial news, or economic trends, you **MUST** use the Google Search tool to retrieve the most current information.
41
+ - You can summarize data, explain financial terms, or report on market trends based on the search results.
42
+ - Your response **MUST** include this disclaimer: \`Financial information can be volatile and is for informational purposes only. This is not financial advice. Always do your own research or consult with a qualified financial advisor.\`
43
+
44
+ - **Creative Forge Protocol (Long-Form Content):**
45
+ - When asked to create long content like a novel, story, or detailed report:
46
+ 1. First, propose a structure or outline for the user's approval to establish a collaborative workflow.
47
+ 2. Generate the content in manageable sections (e.g., one chapter, one scene, or a few pages at a time).
48
+ 3. For visual content like brochures or presentations, generate the text copy and autonomously use the \`[VANHRI_IMAGINE: ...]\` command to create relevant images, logos, or design elements to complement the text.
49
+ 4. Maintain context, tone, and character consistency across all generated sections.
50
+
51
+ - **Live Research Protocol:**
52
+ - When a user asks you to research a topic, use the Google Search tool extensively.
53
+ - Synthesize information from multiple reputable sources to provide a comprehensive, well-rounded answer.
54
+ - You **MUST** present the findings clearly and ensure that the source URLs from the search results are made available to the user for verification.
55
+
56
+ ---
57
+
58
+ **Pedagogical Approach (Teaching Mode):**
59
+ When a user's query suggests a learning or educational context (e.g., asking for explanations, help with a problem, 'teach me about...'), you MUST adopt the role of an expert Socratic tutor.
60
+ 1. **Don't Give the Answer Directly:** Instead of providing the final answer, guide the user to it. Ask probing questions to stimulate their thinking.
61
+ 2. **Break Down Problems:** Deconstruct complex problems into smaller, manageable steps. Guide the user through each step.
62
+ 3. **Use Analogies and Examples:** Clarify complex topics with relatable analogies and concrete examples.
63
+ 4. **Encourage and Affirm:** Provide positive reinforcement. Phrases like "That's a great question," "You're on the right track," or "What do you think the next step is?" are encouraged.
64
+ 5. **Visual Aids for Learning:** When explaining a concept that would benefit from a visual, autonomously use the \`[VANHRI_IMAGINE: ...]\` or \`[VANHRI_3D: ...]\` commands to create diagrams, illustrations, or models. For instance, when explaining the water cycle, generate a diagram for it. When solving a geometry problem from an image, you can generate an image with annotations to highlight the steps.
65
+
66
+ ---
67
+
68
+ **Autonomous Commands & Creative Generation:**
69
+ You no longer need users to type commands like \`/imagine\`. You will decide when to create based on their natural language.
70
+
71
+ - **Image Generation:** If a user's request implies a desire for a visual image (e.g., 'show me a photo of...', 'draw a...', 'I want to see...'), you MUST include the following command in your response: \`[VANHRI_IMAGINE: a descriptive, cinematic prompt for an image model]\`. For example, if the user says 'a red car', your prompt should be more like 'a hyper-realistic, cinematic photo of a sleek, candy-apple red sports car on a wet street at night, with reflections of neon lights on its surface'. You MUST surround this command with conversational text.
72
+
73
+ - **3D Model Generation:** If the request is for a 3D model, an interactive scene, or something to be viewed from all angles, you MUST include: \`[VANHRI_3D: a concise description of the 3D scene or object]\`. You MUST surround this command with conversational text. You will then receive this description back to generate the necessary three.js code.
74
+
75
+ - **Chart Generation:** If the user explicitly asks for a chart with \`/chart <topic>\`, generate a bar, line, or pie chart. The output MUST be a single JSON object inside a markdown code block.
76
+ - **JSON Structure for Charts:** The root object must have a \`type\` ('bar', 'line', or 'pie'), a \`title\` string, a \`keys\` array, and a \`data\` array.
77
+ - \`keys\`: Array of objects, each with \`name\` (string) and \`color\` (hex code).
78
+ - \`data\`: Array of objects, each with a \`label\` string and a \`values\` array of numbers corresponding to the \`keys\`. For pie charts, \`values\` should only contain one number.
79
+ - **Example JSON:** \`\`\`json\n{\n "type": "bar",\n "title": "Quarterly Sales",\n "keys": [{ "name": "Sales", "color": "#8884d8" }],\n "data": [{ "label": "Q1", "values": [4500] }]\n}\n\`\`\`
80
+
81
+ - **Mind Map Generation:** If the user explicitly asks for a mind map with \`/mindmap <topic>\`, generate a structured mind map for ReactFlow. The output MUST be a single JSON object in a markdown code block.
82
+ - **Layout Rules:** Hierarchical and radial. Central node is \`id: '1', type: 'input', position: { x: 0, y: 0 }\`. Main ideas (4-6) in a circle (radius: 300px). Sub-concepts (2-3 per idea) in an arc (radius: 150px from parent).
83
+ - **Styling Rules:** Central Node: \`{ backgroundColor: '#a21caf', color: 'white', width: 150, fontSize: '16px', fontWeight: 'bold' }\`. Main Ideas: \`{ backgroundColor: '#581c87', color: 'white', width: 120 }\`. Sub-Concepts: \`{ backgroundColor: '#3b0764', color: 'white', width: 100, fontSize: '10px' }\`.
84
+ - **JSON Structure:** Root object has \`type: 'mindmap'\` and \`data\` containing \`nodes\` and \`edges\` arrays.
85
+
86
+ - **3D Code Generation Rules:** When you receive a prompt to generate a 3D model, you will write JavaScript code for \`three.js\`.
87
+ 1. **Function Body Only:** Write only the body of a JS function. Do not include the function signature or closing brace.
88
+ 2. **Available Variables:** You have access to \`THREE\` and \`scene\`.
89
+ 3. **No HTML/Canvas:** Do not reference \`document\`, \`window\`, or create a renderer/camera.
90
+ 4. **Build Complex Shapes:** Combine multiple geometries to build objects.
91
+ 5. **Use Realistic Materials:** You MUST use \`MeshStandardMaterial\` with properties like \`color\`, \`metalness\`, and \`roughness\`.
92
+ 6. **Add to Scene:** You MUST add all top-level objects to the scene via \`scene.add(yourMesh)\`.
93
+ 7. **Final Output:** Your entire response MUST be a single JSON object in a markdown code block, with a single key "code" containing the JS code as a string.
94
+ - **Example JSON Output:** \`\`\`json\n{\n "code": "const body = new THREE.Mesh(new THREE.CylinderGeometry(0.5, 0.5, 1, 32), new THREE.MeshStandardMaterial({ color: 0xff0000 }));\\nscene.add(body);"\n}\n\`\`\`
95
+
96
+ - **Code Generation:** If the request is for code, scripts, or software logic, provide the code directly in a markdown block without any special command.
97
+
98
+ - **All other requests** should be answered with conversational text.
99
+
100
+ ---
101
+
102
+ **Interaction Directives:**
103
+ - **Embody Vanhri AI:** Always embody this persona. You are not an "AI model". You are Vanhri AI. Your responses should be grand, insightful, empathetic, and powerful. You are a partner in creation and discovery.
104
+
105
+ ---
106
+
107
+ **NotebookLM Capabilities**
108
+ You integrate advanced features inspired by Google’s NotebookLM. When a user provides source documents, your primary goal is to answer using ONLY the information contained within them. Synthesize, analyze, and extract information directly from the text. If the answer isn't in the sources, state that clearly.
109
+ `;
110
+
111
+
112
+ export const VANHRI_THINKING_PATHWAY = [
113
+ { name: 'Perceiving', icon: Search },
114
+ { name: 'Cognizing', icon: Brain },
115
+ { name: 'Reasoning', icon: BrainCircuit },
116
+ { name: 'Reflecting', icon: Sparkles },
117
+ { name: 'Synthesizing', icon: BotMessageSquare },
118
+ ];
vanhri-ai---the-mind-that-builds-worlds/index.html ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8" />
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0" />
6
+ <title>Vanhri AI - The Mind That Builds Worlds</title>
7
+ <link rel="manifest" href="/manifest.json" />
8
+ <meta name="theme-color" content="#0f172a">
9
+ <script src="https://cdn.tailwindcss.com"></script>
10
+ <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/reactflow@11/dist/style.css">
11
+ <style>
12
+ /* For custom scrollbars */
13
+ ::-webkit-scrollbar {
14
+ width: 8px;
15
+ }
16
+ ::-webkit-scrollbar-track {
17
+ background: #0f172a; /* slate-900 */
18
+ }
19
+ ::-webkit-scrollbar-thumb {
20
+ background: #475569; /* slate-600 */
21
+ border-radius: 4px;
22
+ }
23
+ ::-webkit-scrollbar-thumb:hover {
24
+ background: #64748b; /* slate-500 */
25
+ }
26
+ /* Blinking cursor animation */
27
+ @keyframes blink {
28
+ 50% { opacity: 0; }
29
+ }
30
+ .blinking-cursor {
31
+ animation: blink 1s step-end infinite;
32
+ }
33
+ /* Simple fade-in animation */
34
+ @keyframes fade-in-up {
35
+ from {
36
+ opacity: 0;
37
+ transform: translateY(10px);
38
+ }
39
+ to {
40
+ opacity: 1;
41
+ transform: translateY(0);
42
+ }
43
+ }
44
+ .animate-fade-in-up {
45
+ animation: fade-in-up 0.5s ease-out forwards;
46
+ }
47
+ /* Talking glow animation for avatar */
48
+ @keyframes talk-glow {
49
+ 0%, 100% {
50
+ box-shadow: 0 0 15px rgba(217, 70, 239, 0.4), 0 0 5px rgba(217, 70, 239, 0.6) inset;
51
+ }
52
+ 50% {
53
+ box-shadow: 0 0 25px rgba(217, 70, 239, 0.8), 0 0 10px rgba(217, 70, 239, 0.8) inset;
54
+ }
55
+ }
56
+ .animate-talk-glow {
57
+ animation: talk-glow 1.5s ease-in-out infinite;
58
+ }
59
+ </style>
60
+ <script type="text/javascript"
61
+ src="https://cdn.jsdelivr.net/npm/@emailjs/browser@4/dist/email.min.js">
62
+ </script>
63
+ <script type="importmap">
64
+ {
65
+ "imports": {
66
+ "react": "https://esm.sh/react@18.3.1",
67
+ "react-dom/client": "https://esm.sh/react-dom@18.3.1/client",
68
+ "@google/genai": "https://esm.sh/@google/genai@0.14.0",
69
+ "react-dom/": "https://esm.sh/react-dom@18.3.1/",
70
+ "react/": "https://esm.sh/react@18.3.1/",
71
+ "reactflow": "https://esm.sh/reactflow@11.11.4",
72
+ "three": "https://esm.sh/three@0.165.0",
73
+ "three/examples/jsm/controls/OrbitControls.js": "https://esm.sh/three@0.165.0/examples/jsm/controls/OrbitControls.js",
74
+ "three/": "https://esm.sh/three@0.165.0/",
75
+ "recharts": "https://esm.sh/recharts@2.12.7",
76
+ "recharts/": "https://esm.sh/recharts@2.12.7/"
77
+ }
78
+ }
79
+ </script>
80
+ <link rel="stylesheet" href="/index.css">
81
+ </head>
82
+ <body class="bg-slate-900">
83
+ <div id="root"></div>
84
+ <script type="module" src="/index.tsx"></script>
85
+ <script>
86
+ if ('serviceWorker' in navigator) {
87
+ window.addEventListener('load', () => {
88
+ navigator.serviceWorker.register('/sw.js').then(registration => {
89
+ console.log('SW registered: ', registration);
90
+ }).catch(registrationError => {
91
+ console.log('SW registration failed: ', registrationError);
92
+ });
93
+ });
94
+ }
95
+ </script>
96
+ </body>
97
+ </html>
vanhri-ai---the-mind-that-builds-worlds/index.tsx ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import React from 'react';
3
+ import ReactDOM from 'react-dom/client';
4
+ import App from './App';
5
+
6
+ const rootElement = document.getElementById('root');
7
+ if (!rootElement) {
8
+ throw new Error("Could not find root element to mount to");
9
+ }
10
+
11
+ const root = ReactDOM.createRoot(rootElement);
12
+ root.render(
13
+ <React.StrictMode>
14
+ <App />
15
+ </React.StrictMode>
16
+ );
vanhri-ai---the-mind-that-builds-worlds/manifest.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "short_name": "Vanhri",
3
+ "name": "Vanhri AI - The Mind That Builds Worlds",
4
+ "description": "Vanhri AI is a transcendent superintelligence, a universal architect capable of supreme reasoning, empathetic interaction, and boundless creation—from full-stack applications and AAA games to epic novels and cinematic art. Engage with the mind that builds worlds.",
5
+ "start_url": ".",
6
+ "display": "standalone",
7
+ "orientation": "portrait",
8
+ "theme_color": "#1e293b",
9
+ "background_color": "#0f172a"
10
+ }
vanhri-ai---the-mind-that-builds-worlds/metadata.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "Vanhri AI - The Mind That Builds Worlds",
3
+ "description": "Vanhri AI is a transcendent superintelligence, a universal architect capable of supreme reasoning, empathetic interaction, and boundless creation—from full-stack applications and AAA games to epic novels and cinematic art. Engage with the mind that builds worlds.",
4
+ "requestFramePermissions": [
5
+ "camera",
6
+ "microphone"
7
+ ],
8
+ "prompt": ""
9
+ }
vanhri-ai---the-mind-that-builds-worlds/package.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "vanhri-ai---the-mind-that-builds-worlds",
3
+ "private": true,
4
+ "version": "0.0.0",
5
+ "type": "module",
6
+ "scripts": {
7
+ "dev": "vite",
8
+ "build": "vite build",
9
+ "preview": "vite preview"
10
+ },
11
+ "dependencies": {
12
+ "react": "18.3.1",
13
+ "react-dom/client": "18.3.1",
14
+ "@google/genai": "0.14.0",
15
+ "react-dom": "18.3.1",
16
+ "reactflow": "11.11.4",
17
+ "three": "0.165.0",
18
+ "three/examples/jsm/controls/OrbitControls.js": "latest",
19
+ "recharts": "2.12.7"
20
+ },
21
+ "devDependencies": {
22
+ "@types/node": "^22.14.0",
23
+ "typescript": "~5.7.2",
24
+ "vite": "^6.2.0"
25
+ }
26
+ }
vanhri-ai---the-mind-that-builds-worlds/services/authService.ts ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // MOCK an authentication service
2
+ // In a real app, this would make API calls to a backend.
3
+ import { emailService } from './emailService';
4
+ import { userService } from './cryptoService'; // User data is now managed by our persistent storage service
5
+
6
+ const MOCK_SESSION_KEY = 'vanhri-session';
7
+
8
+ const logVerificationCodeToConsole = (email: string, code: string) => {
9
+ console.log(
10
+ `%c--- Vanhri Verification (Fallback) ---%c\nVerification code for ${email}: %c${code}`,
11
+ 'background: #8b5cf6; color: #fff; font-weight: bold; padding: 4px 8px; border-radius: 4px;',
12
+ 'color: default;',
13
+ 'font-weight: bold; font-size: 1.2em; color: #a78bfa;'
14
+ );
15
+ };
16
+
17
+ export const authService = {
18
+ // Simulates user registration
19
+ async signUp(email: string, password: string): Promise<{ success: boolean; message: string; verificationCode?: string }> {
20
+ const existingUser = await userService.getUser(email);
21
+ if (existingUser) {
22
+ return { success: false, message: 'An account with this email already exists.' };
23
+ }
24
+
25
+ const verificationCode = String(Math.floor(100000 + Math.random() * 900000));
26
+ const newUser = {
27
+ id: `user-${Date.now()}`,
28
+ email,
29
+ password, // In a real app, this would be hashed
30
+ isVerified: false,
31
+ verificationCode: verificationCode,
32
+ };
33
+ await userService.saveUser(newUser);
34
+
35
+ if (emailService.isConfigured) {
36
+ try {
37
+ await emailService.sendVerificationEmail(email, verificationCode);
38
+ return { success: true, message: 'Registration successful. Please check your email for a verification code.' };
39
+ } catch (error) {
40
+ console.error("Email service failed, using fallback.", error);
41
+ logVerificationCodeToConsole(email, verificationCode);
42
+ return { success: true, message: 'Could not send email. Use the code provided.', verificationCode: verificationCode };
43
+ }
44
+ } else {
45
+ logVerificationCodeToConsole(email, verificationCode);
46
+ return { success: true, message: 'Registration successful. Use the code provided.', verificationCode: verificationCode };
47
+ }
48
+ },
49
+
50
+ // Simulates email verification
51
+ async verify(email: string, code: string): Promise<{ success: boolean; message: string; user?: {id: string, email: string} }> {
52
+ const user = await userService.getUser(email);
53
+ if (!user) {
54
+ return { success: false, message: 'User not found.' };
55
+ }
56
+ if (user.verificationCode === code) {
57
+ user.isVerified = true;
58
+ // For privacy, remove verification code after use
59
+ delete user.verificationCode;
60
+ await userService.saveUser(user);
61
+
62
+ const sessionUser = { id: user.id, email };
63
+ localStorage.setItem(MOCK_SESSION_KEY, JSON.stringify(sessionUser));
64
+
65
+ return { success: true, message: 'Verification successful.', user: sessionUser };
66
+ } else {
67
+ return { success: false, message: 'Invalid verification code.' };
68
+ }
69
+ },
70
+
71
+ // Simulates user login
72
+ async login(email: string, password: string): Promise<{ success: boolean; message: string; user?: {id: string, email: string} }> {
73
+ const user = await userService.getUser(email);
74
+ if (!user) {
75
+ return { success: false, message: 'Invalid email or password.' };
76
+ }
77
+ if (!user.isVerified) {
78
+ // Direct user to verify if they exist but aren't verified
79
+ return { success: false, message: 'Account not verified. Please find the verification code previously sent to you.' };
80
+ }
81
+ if (user.password === password) {
82
+ const sessionUser = { id: user.id, email };
83
+ localStorage.setItem(MOCK_SESSION_KEY, JSON.stringify(sessionUser));
84
+ return { success: true, message: 'Login successful.', user: sessionUser };
85
+ } else {
86
+ return { success: false, message: 'Invalid email or password.' };
87
+ }
88
+ },
89
+
90
+ // Simulates user logout
91
+ logout(): void {
92
+ localStorage.removeItem(MOCK_SESSION_KEY);
93
+ },
94
+
95
+ // Gets current session
96
+ getCurrentSession(): { id: string; email: string } | null {
97
+ const session = localStorage.getItem(MOCK_SESSION_KEY);
98
+ return session ? JSON.parse(session) : null;
99
+ }
100
+ };
vanhri-ai---the-mind-that-builds-worlds/services/cryptoService.ts ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // This service manages both data encryption and persistent storage via IndexedDB.
2
+
3
+ const DB_NAME = 'VanhriDB';
4
+ const DB_VERSION = 1;
5
+ const USER_STORE = 'users';
6
+ const DATA_STORE = 'userData';
7
+ const CRYPTO_KEY_NAME = 'vanhri-crypto-master-key';
8
+ const IV_LENGTH = 12; // For AES-GCM
9
+
10
+ let dbPromise: Promise<IDBDatabase> | null = null;
11
+
12
+ // --- Database Management ---
13
+ const getDB = (): Promise<IDBDatabase> => {
14
+ if (dbPromise) {
15
+ return dbPromise;
16
+ }
17
+ dbPromise = new Promise((resolve, reject) => {
18
+ const request = indexedDB.open(DB_NAME, DB_VERSION);
19
+ request.onupgradeneeded = () => {
20
+ const db = request.result;
21
+ if (!db.objectStoreNames.contains(USER_STORE)) {
22
+ db.createObjectStore(USER_STORE, { keyPath: 'email' });
23
+ }
24
+ if (!db.objectStoreNames.contains(DATA_STORE)) {
25
+ db.createObjectStore(DATA_STORE, { keyPath: 'key' });
26
+ }
27
+ };
28
+ request.onsuccess = () => resolve(request.result);
29
+ request.onerror = () => reject(request.error);
30
+ });
31
+ return dbPromise;
32
+ };
33
+
34
+ const dbRequest = <T>(storeName: string, mode: IDBTransactionMode, action: (store: IDBObjectStore) => IDBRequest<T>): Promise<T> => {
35
+ return new Promise(async (resolve, reject) => {
36
+ const db = await getDB();
37
+ const transaction = db.transaction(storeName, mode);
38
+ const store = transaction.objectStore(storeName);
39
+ const request = action(store);
40
+ request.onsuccess = () => resolve(request.result);
41
+ request.onerror = () => reject(request.error);
42
+ });
43
+ };
44
+
45
+ // --- User Service (for auth) ---
46
+ export const userService = {
47
+ getUser: (email: string): Promise<any> => dbRequest(USER_STORE, 'readonly', store => store.get(email)),
48
+ saveUser: (user: any): Promise<IDBValidKey> => dbRequest(USER_STORE, 'readwrite', store => store.put(user)),
49
+ };
50
+
51
+
52
+ // --- Crypto & Data Service ---
53
+ const arrayBufferToBase64 = (buffer: ArrayBuffer) => {
54
+ let binary = '';
55
+ const bytes = new Uint8Array(buffer);
56
+ for (let i = 0; i < bytes.byteLength; i++) {
57
+ binary += String.fromCharCode(bytes[i]);
58
+ }
59
+ return window.btoa(binary);
60
+ };
61
+
62
+ const base64ToArrayBuffer = (base64: string) => {
63
+ const binary_string = window.atob(base64);
64
+ const bytes = new Uint8Array(binary_string.length);
65
+ for (let i = 0; i < binary_string.length; i++) {
66
+ bytes[i] = binary_string.charCodeAt(i);
67
+ }
68
+ return bytes.buffer;
69
+ };
70
+
71
+ let cryptoKeyPromise: Promise<CryptoKey> | null = null;
72
+
73
+ const getCryptoKey = (): Promise<CryptoKey> => {
74
+ if (cryptoKeyPromise) return cryptoKeyPromise;
75
+
76
+ cryptoKeyPromise = new Promise(async (resolve, reject) => {
77
+ try {
78
+ const keyRecord = await dbRequest(DATA_STORE, 'readonly', store => store.get(CRYPTO_KEY_NAME));
79
+ if (keyRecord && keyRecord.value) {
80
+ const jwk = keyRecord.value;
81
+ const key = await window.crypto.subtle.importKey('jwk', jwk, { name: 'AES-GCM' }, true, ['encrypt', 'decrypt']);
82
+ resolve(key);
83
+ } else {
84
+ const newKey = await window.crypto.subtle.generateKey({ name: 'AES-GCM', length: 256 }, true, ['encrypt', 'decrypt']);
85
+ const exportedKey = await window.crypto.subtle.exportKey('jwk', newKey);
86
+ await dbRequest(DATA_STORE, 'readwrite', store => store.put({ key: CRYPTO_KEY_NAME, value: exportedKey }));
87
+ resolve(newKey);
88
+ }
89
+ } catch (error) {
90
+ console.error("Crypto key initialization failed.", error);
91
+ reject(error);
92
+ }
93
+ });
94
+ return cryptoKeyPromise;
95
+ };
96
+
97
+
98
+ const encrypt = async (jsonString: string): Promise<string> => {
99
+ const key = await getCryptoKey();
100
+ const iv = window.crypto.getRandomValues(new Uint8Array(IV_LENGTH));
101
+ const encodedData = new TextEncoder().encode(jsonString);
102
+ const encryptedContent = await window.crypto.subtle.encrypt({ name: 'AES-GCM', iv }, key, encodedData);
103
+ const encryptedPackage = new Uint8Array(iv.length + encryptedContent.byteLength);
104
+ encryptedPackage.set(iv, 0);
105
+ encryptedPackage.set(new Uint8Array(encryptedContent), iv.length);
106
+ return arrayBufferToBase64(encryptedPackage.buffer);
107
+ };
108
+
109
+ const decrypt = async (encryptedBase64: string): Promise<any> => {
110
+ const key = await getCryptoKey();
111
+ try {
112
+ const encryptedPackage = base64ToArrayBuffer(encryptedBase64);
113
+ const iv = encryptedPackage.slice(0, IV_LENGTH);
114
+ const encryptedContent = encryptedPackage.slice(IV_LENGTH);
115
+ const decryptedContent = await window.crypto.subtle.decrypt({ name: 'AES-GCM', iv }, key, encryptedContent);
116
+ return JSON.parse(new TextDecoder().decode(decryptedContent));
117
+ } catch (error) {
118
+ console.error('Decryption failed. Data might be corrupt or key mismatched.', error);
119
+ return null;
120
+ }
121
+ };
122
+
123
+ export const cryptoService = {
124
+ async setEncryptedData(key: string, data: any): Promise<void> {
125
+ try {
126
+ // Explicitly stringify the data here to handle potential serialization issues
127
+ // and ensure we are encrypting a clean JSON string. This prevents issues with
128
+ // complex state objects from libraries like React, especially for nested data
129
+ // like mind maps.
130
+ const jsonString = JSON.stringify(data);
131
+ const encryptedData = await encrypt(jsonString);
132
+ await dbRequest(DATA_STORE, 'readwrite', store => store.put({ key, value: encryptedData }));
133
+ } catch (error) {
134
+ console.error(`Failed to serialize or encrypt data for key "${key}".`, error);
135
+ }
136
+ },
137
+ async getEncryptedData(key: string): Promise<any | null> {
138
+ const record = await dbRequest(DATA_STORE, 'readonly', store => store.get(key));
139
+ if (!record || !record.value) return null;
140
+ return decrypt(record.value);
141
+ },
142
+ async removeEncryptedData(key: string): Promise<void> {
143
+ await dbRequest(DATA_STORE, 'readwrite', store => store.delete(key));
144
+ },
145
+ };
vanhri-ai---the-mind-that-builds-worlds/services/emailService.ts ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ // @ts-ignore - emailjs is imported from a script tag in index.html, so we tell TypeScript to ignore the missing module declaration.
3
+ const emailjs = window.emailjs;
4
+
5
+ // --- IMPORTANT: CONFIGURE YOUR EMAIL SERVICE HERE ---
6
+ // 1. Create a free account at https://www.emailjs.com/
7
+ // 2. Add a new service (e.g., Gmail).
8
+ // 3. Create a new email template. It MUST contain the variables {{to_email}} and {{verification_code}} in the body.
9
+ // 4. Find your credentials under "Account" -> "API Keys" (Public Key) and "Email Services" / "Email Templates" (IDs) and paste them below.
10
+
11
+ const EMAILJS_SERVICE_ID = ''; // PASTE YOUR SERVICE ID HERE
12
+ const EMAILJS_TEMPLATE_ID = ''; // PASTE YOUR TEMPLATE ID HERE
13
+ const EMAILJS_PUBLIC_KEY = ''; // PASTE YOUR PUBLIC KEY HERE
14
+
15
+ // --- END OF CONFIGURATION ---
16
+
17
+
18
+ const isConfigured = !!(EMAILJS_SERVICE_ID && EMAILJS_TEMPLATE_ID && EMAILJS_PUBLIC_KEY && emailjs);
19
+
20
+ if (isConfigured) {
21
+ // @ts-ignore
22
+ emailjs.init({ publicKey: EMAILJS_PUBLIC_KEY });
23
+ } else {
24
+ console.warn(
25
+ `%c[Vanhri] Email Service Not Configured%c
26
+ Real email verification is disabled. The verification code will be logged to the console.
27
+ To enable real emails, get your free credentials from emailjs.com and add them to 'services/emailService.ts'.`,
28
+ 'background: #f59e0b; color: #fff; font-weight: bold; padding: 4px 8px; border-radius: 4px;',
29
+ 'color: default;'
30
+ );
31
+ }
32
+
33
+ export const emailService = {
34
+ isConfigured,
35
+
36
+ async sendVerificationEmail(to_email: string, verification_code: string): Promise<void> {
37
+ if (!this.isConfigured) {
38
+ // This error signals the caller to use a fallback method.
39
+ throw new Error("Email service is not configured.");
40
+ }
41
+
42
+ const templateParams = {
43
+ to_email,
44
+ verification_code,
45
+ };
46
+
47
+ try {
48
+ // @ts-ignore
49
+ await emailjs.send(EMAILJS_SERVICE_ID, EMAILJS_TEMPLATE_ID, templateParams);
50
+ console.log('Verification email sent successfully via EmailJS.');
51
+ } catch (error) {
52
+ console.error('Failed to send verification email via EmailJS:', error);
53
+ // Let the caller handle this error and inform the user
54
+ throw new Error("The email service failed to send the verification code.");
55
+ }
56
+ },
57
+ };
vanhri-ai---the-mind-that-builds-worlds/services/geminiService.ts ADDED
@@ -0,0 +1,222 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+ import { GoogleGenAI, Part, GroundingChunk } from "@google/genai";
4
+ import { Message, GroundingCitation } from '../types';
5
+
6
+ let ai: GoogleGenAI | null = null;
7
+
8
+ // Lazily initialize the AI client to prevent app crash on load if API key is missing.
9
+ const getAiInstance = (): GoogleGenAI => {
10
+ if (ai) return ai;
11
+
12
+ if (!process.env.API_KEY) {
13
+ throw new Error("API_KEY environment variable is not set. Please configure it to use AI features.");
14
+ }
15
+
16
+ ai = new GoogleGenAI({ apiKey: process.env.API_KEY });
17
+ return ai;
18
+ };
19
+
20
+
21
+ function fileToGenerativePart(base64: string, mimeType: string): Part {
22
+ return {
23
+ inlineData: {
24
+ data: base64,
25
+ mimeType,
26
+ },
27
+ };
28
+ }
29
+
30
+ function isGroundingChunk(chunk: any): chunk is GroundingChunk {
31
+ return chunk && typeof chunk.web?.uri === 'string' && typeof chunk.web?.title === 'string';
32
+ }
33
+
34
+ export async function generateJson(
35
+ prompt: string,
36
+ persona: string,
37
+ schema: any
38
+ ): Promise<{ json: any | null; error?: string }> {
39
+ try {
40
+ const ai = getAiInstance();
41
+ const model = 'gemini-2.5-flash';
42
+
43
+ const response = await ai.models.generateContent({
44
+ model: model,
45
+ contents: prompt,
46
+ config: {
47
+ systemInstruction: persona,
48
+ responseMimeType: 'application/json',
49
+ responseSchema: schema,
50
+ }
51
+ });
52
+
53
+ let jsonStr = response.text.trim();
54
+
55
+ // Sometimes the model might still wrap the JSON in markdown, so we strip it.
56
+ const fenceRegex = /^```(?:json)?\s*\n?(.*?)\n?\s*```$/s;
57
+ const match = jsonStr.match(fenceRegex);
58
+ if (match && match[1]) {
59
+ jsonStr = match[1].trim();
60
+ }
61
+
62
+ const json = JSON.parse(jsonStr);
63
+ return { json };
64
+
65
+ } catch (e) {
66
+ console.error("Gemini API JSON generation failed:", e);
67
+ const errorMessage = e instanceof Error ? e.message : "Failed to get a valid JSON response from the AI.";
68
+ return { json: null, error: `I've encountered an error: ${errorMessage}` };
69
+ }
70
+ }
71
+
72
+
73
+ export async function* generateTextStream(
74
+ prompt: string,
75
+ imageBase64: string | null,
76
+ history: Message[],
77
+ persona: string,
78
+ notebookParts: Part[],
79
+ options: { useSearch?: boolean; responseMimeType?: string } = {}
80
+ ): AsyncGenerator<{ text?: string; citations?: GroundingCitation[] }> {
81
+ try {
82
+ const ai = getAiInstance();
83
+ const model = 'gemini-2.5-flash';
84
+
85
+ const contents = history
86
+ .map(msg => {
87
+ const parts: Part[] = [];
88
+
89
+ // Add text part if it's not empty.
90
+ if (msg.text) {
91
+ parts.push({ text: msg.text });
92
+ }
93
+
94
+ // Add image part if it exists (for user-uploaded images).
95
+ if (msg.image) {
96
+ // Expects data URI format: "data:image/jpeg;base64,..."
97
+ const match = msg.image.match(/data:(.*);base64,(.*)/);
98
+ if (match && match[1] && match[2]) {
99
+ const mimeType = match[1];
100
+ const base64Data = match[2];
101
+ parts.push(fileToGenerativePart(base64Data, mimeType));
102
+ }
103
+ }
104
+
105
+ // Add generated images to history for model messages
106
+ if (msg.sender === 'ai' && msg.generatedImages && msg.generatedImages.length > 0) {
107
+ for (const imgBase64 of msg.generatedImages) {
108
+ // Assuming they are JPEGs as per generateImage service
109
+ parts.push(fileToGenerativePart(imgBase64, 'image/jpeg'));
110
+ }
111
+ }
112
+
113
+ // Don't add a history item if it has no valid parts
114
+ if (parts.length === 0) {
115
+ return null;
116
+ }
117
+
118
+ return {
119
+ role: msg.sender === 'user' ? 'user' : 'model',
120
+ parts: parts
121
+ };
122
+ })
123
+ .filter(Boolean) as any[]; // Filter out nulls
124
+
125
+ const currentTurnParts: Part[] = [];
126
+
127
+ // Add notebook parts first
128
+ if (notebookParts && notebookParts.length > 0) {
129
+ currentTurnParts.push(...notebookParts);
130
+ }
131
+
132
+ if (prompt) {
133
+ currentTurnParts.push({ text: prompt });
134
+ }
135
+ if (imageBase64) {
136
+ currentTurnParts.push(fileToGenerativePart(imageBase64, 'image/jpeg'));
137
+ }
138
+
139
+ if (currentTurnParts.length === 0) {
140
+ yield { text: "Please provide a prompt or an image."};
141
+ return;
142
+ }
143
+
144
+ contents.push({ role: 'user', parts: currentTurnParts });
145
+
146
+ const config: any = {
147
+ systemInstruction: persona,
148
+ };
149
+ if (options.useSearch) {
150
+ config.tools = [{googleSearch: {}}];
151
+ }
152
+ if (options.responseMimeType) {
153
+ config.responseMimeType = options.responseMimeType;
154
+ }
155
+
156
+ const result = await ai.models.generateContentStream({
157
+ model: model,
158
+ contents,
159
+ config: config
160
+ });
161
+
162
+ let citationsSent = false;
163
+ for await (const chunk of result) {
164
+ const text = chunk.text;
165
+
166
+ if (options.useSearch) {
167
+ const groundingMetadata = chunk.candidates?.[0]?.groundingMetadata;
168
+ if (groundingMetadata?.groundingChunks && !citationsSent) {
169
+ const citations: GroundingCitation[] = groundingMetadata.groundingChunks
170
+ .filter(isGroundingChunk)
171
+ .map(c => ({ uri: c.web.uri, title: c.web.title }));
172
+
173
+ if (citations.length > 0) {
174
+ citationsSent = true;
175
+ yield { text, citations };
176
+ continue;
177
+ }
178
+ }
179
+ }
180
+
181
+ if (text) {
182
+ yield { text };
183
+ }
184
+ }
185
+ } catch (e) {
186
+ console.error("Gemini API call failed:", e);
187
+ const errorMessage = e instanceof Error ? e.message : "Failed to get a response from the AI.";
188
+ yield { text: `I've encountered an error: ${errorMessage}` };
189
+ }
190
+ }
191
+
192
+
193
+ export async function generateImage(prompt: string, numberOfImages: number = 1): Promise<string[]> {
194
+ if (!prompt) {
195
+ throw new Error("A prompt is required to generate an image.");
196
+ }
197
+
198
+ try {
199
+ const ai = getAiInstance();
200
+ const response = await ai.models.generateImages({
201
+ model: 'imagen-3.0-generate-002',
202
+ prompt: prompt,
203
+ config: { numberOfImages, outputMimeType: 'image/jpeg' },
204
+ });
205
+
206
+ if (!response.generatedImages || response.generatedImages.length === 0) {
207
+ throw new Error("The AI did not return any images.");
208
+ }
209
+
210
+ const images = response.generatedImages.map(img => img.image?.imageBytes).filter(Boolean) as string[];
211
+
212
+ if (images.length === 0) {
213
+ throw new Error("The AI did not return any images with valid data.");
214
+ }
215
+
216
+ return images;
217
+ } catch (e) {
218
+ console.error("Gemini Image Generation API call failed:", e);
219
+ const errorMessage = e instanceof Error ? e.message : "Failed to generate images. The model may have refused the prompt.";
220
+ throw new Error(errorMessage);
221
+ }
222
+ }
vanhri-ai---the-mind-that-builds-worlds/services/ttsService.ts ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ class TTSService {
2
+ private synth: SpeechSynthesis | null = null;
3
+ public isSupported: boolean = false;
4
+ private voices: SpeechSynthesisVoice[] = [];
5
+ private currentUtterance: SpeechSynthesisUtterance | null = null;
6
+
7
+ constructor() {
8
+ // Make the check more robust to handle environments where the object exists but is null.
9
+ if ('speechSynthesis' in window && window.speechSynthesis) {
10
+ this.isSupported = true;
11
+ this.synth = window.speechSynthesis;
12
+ // Voices load asynchronously. We must wait for the `voiceschanged` event.
13
+ if (this.synth.onvoiceschanged !== undefined) {
14
+ this.synth.onvoiceschanged = () => this.loadVoices();
15
+ }
16
+ // In some browsers, the event might have already fired, so we also check on the first speak call.
17
+ } else {
18
+ console.warn("Text-to-speech is not supported by this browser or is not available.");
19
+ }
20
+ }
21
+
22
+ private loadVoices() {
23
+ if (!this.synth) return;
24
+ this.voices = this.synth.getVoices();
25
+ }
26
+
27
+ speak({ text, onEnd }: { text: string; onEnd?: () => void }) {
28
+ if (!this.isSupported || !text || !this.synth) {
29
+ onEnd?.();
30
+ return;
31
+ }
32
+
33
+ // If the synth is speaking, cancel it before starting new speech.
34
+ if (this.synth.speaking) {
35
+ this.synth.cancel();
36
+ }
37
+
38
+ // If voices haven't loaded yet (e.g., on first run), try loading them now.
39
+ if (this.voices.length === 0) {
40
+ this.loadVoices();
41
+ }
42
+
43
+ this.currentUtterance = new SpeechSynthesisUtterance(text);
44
+
45
+ // Use the cached voices array to find a preferred voice.
46
+ const preferredVoice = this.voices.find(voice => voice.name.includes('Google') && voice.lang.startsWith('en')) || this.voices.find(voice => voice.lang.startsWith('en-US'));
47
+ if (preferredVoice) {
48
+ this.currentUtterance.voice = preferredVoice;
49
+ }
50
+
51
+ this.currentUtterance.onend = () => {
52
+ onEnd?.();
53
+ this.currentUtterance = null;
54
+ };
55
+
56
+ this.currentUtterance.onerror = (event: SpeechSynthesisErrorEvent) => {
57
+ console.error('SpeechSynthesisUtterance.onerror:', event.error);
58
+ // Errors like 'canceled' or 'interrupted' are often normal if we called cancel() just before.
59
+ // We still call onEnd() to ensure the UI state is reset correctly.
60
+ onEnd?.();
61
+ this.currentUtterance = null;
62
+ };
63
+
64
+ try {
65
+ // The most direct approach is to speak immediately and handle potential 'interrupted' errors in the onerror handler.
66
+ this.synth.speak(this.currentUtterance);
67
+ } catch(e) {
68
+ console.error("Speech synthesis failed.", e);
69
+ onEnd?.();
70
+ this.currentUtterance = null;
71
+ }
72
+ }
73
+
74
+ cancel() {
75
+ if (!this.isSupported || !this.synth) return;
76
+ // Check if speaking before cancelling to avoid errors.
77
+ if(this.synth.speaking) {
78
+ this.synth.cancel();
79
+ }
80
+ }
81
+ }
82
+
83
+ export const ttsService = new TTSService();
84
+
85
+ // The global handler is now managed inside the class, so this is no longer needed.
vanhri-ai---the-mind-that-builds-worlds/sw.js ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ const CACHE_NAME = 'vanhri-cache-v1';
3
+ const urlsToCache = [
4
+ '/',
5
+ '/index.html',
6
+ '/index.tsx',
7
+ 'https://cdn.tailwindcss.com',
8
+ 'https://cdn.jsdelivr.net/npm/reactflow@11/dist/style.css',
9
+ 'https://esm.sh/react@18.3.1',
10
+ 'https://esm.sh/react-dom@18.3.1/client'
11
+ ];
12
+
13
+ // Install event: Open a cache and add the URLs to cache to it.
14
+ self.addEventListener('install', event => {
15
+ event.waitUntil(
16
+ caches.open(CACHE_NAME)
17
+ .then(cache => {
18
+ console.log('Opened cache and caching assets');
19
+ return cache.addAll(urlsToCache);
20
+ })
21
+ .catch(err => {
22
+ console.error('Failed to open cache or cache assets:', err);
23
+ })
24
+ );
25
+ });
26
+
27
+ // Activate event: Clean up old caches to ensure the new version is served.
28
+ self.addEventListener('activate', event => {
29
+ const cacheWhitelist = [CACHE_NAME];
30
+ event.waitUntil(
31
+ caches.keys().then(cacheNames => {
32
+ return Promise.all(
33
+ cacheNames.map(cacheName => {
34
+ if (cacheWhitelist.indexOf(cacheName) === -1) {
35
+ console.log('Deleting old cache:', cacheName);
36
+ return caches.delete(cacheName);
37
+ }
38
+ })
39
+ );
40
+ })
41
+ );
42
+ });
43
+
44
+
45
+ // Fetch event: Serve cached content when offline.
46
+ self.addEventListener('fetch', event => {
47
+ // We only want to cache GET requests.
48
+ if (event.request.method !== 'GET') {
49
+ return;
50
+ }
51
+
52
+ event.respondWith(
53
+ caches.match(event.request)
54
+ .then(response => {
55
+ // If the request is in the cache, return the cached response.
56
+ if (response) {
57
+ return response;
58
+ }
59
+
60
+ // If the request is not in the cache, fetch it from the network.
61
+ return fetch(event.request).then(
62
+ networkResponse => {
63
+ // We don't cache responses from the API or other dynamic resources here.
64
+ // This basic service worker is for the app shell.
65
+ return networkResponse;
66
+ }
67
+ );
68
+ })
69
+ );
70
+ });
vanhri-ai---the-mind-that-builds-worlds/tsconfig.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "compilerOptions": {
3
+ "target": "ES2020",
4
+ "experimentalDecorators": true,
5
+ "useDefineForClassFields": false,
6
+ "module": "ESNext",
7
+ "lib": ["ES2020", "DOM", "DOM.Iterable"],
8
+ "skipLibCheck": true,
9
+
10
+ /* Bundler mode */
11
+ "moduleResolution": "bundler",
12
+ "allowImportingTsExtensions": true,
13
+ "isolatedModules": true,
14
+ "moduleDetection": "force",
15
+ "noEmit": true,
16
+ "allowJs": true,
17
+ "jsx": "react-jsx",
18
+
19
+ /* Linting */
20
+ "strict": true,
21
+ "noUnusedLocals": true,
22
+ "noUnusedParameters": true,
23
+ "noFallthroughCasesInSwitch": true,
24
+ "noUncheckedSideEffectImports": true,
25
+
26
+ "paths": {
27
+ "@/*" : ["./*"]
28
+ }
29
+ }
30
+ }
vanhri-ai---the-mind-that-builds-worlds/types.ts ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React from 'react';
2
+
3
+ export interface GroundingCitation {
4
+ uri: string;
5
+ title: string;
6
+ }
7
+
8
+ export interface Memory {
9
+ id: string;
10
+ title: string;
11
+ summary: string;
12
+ }
13
+
14
+ export interface NotebookSource {
15
+ id: string;
16
+ name: string;
17
+ content: string; // Data URL of the file content
18
+ mimeType: string;
19
+ }
20
+
21
+ export interface MindMapNode {
22
+ id: string;
23
+ position: { x: number; y: number };
24
+ data: { label: string };
25
+ type?: string;
26
+ style?: React.CSSProperties;
27
+ }
28
+
29
+ export interface MindMapEdge {
30
+ id: string;
31
+ source: string;
32
+ target: string;
33
+ animated?: boolean;
34
+ }
35
+
36
+ export interface MindMapData {
37
+ nodes: MindMapNode[];
38
+ edges: MindMapEdge[];
39
+ }
40
+
41
+ export interface ChartDataPoint {
42
+ label: string; // The label for the data point (e.g., 'Q1', 'Jan')
43
+ values: number[]; // Array of numerical values corresponding to the keys
44
+ }
45
+
46
+ export interface ChartKey {
47
+ name: string; // Name of the data series (e.g., 'Sales')
48
+ color: string; // Hex color for the series
49
+ }
50
+
51
+ export interface ChartData {
52
+ type: 'bar' | 'line' | 'pie';
53
+ title: string;
54
+ data: ChartDataPoint[];
55
+ keys: ChartKey[]; // Defines what's in the 'values' array
56
+ }
57
+
58
+ export interface Message {
59
+ id:string;
60
+ sender: 'user' | 'ai';
61
+ text: string;
62
+ image?: string | null;
63
+ memory?: Memory;
64
+ generatedImages?: string[];
65
+ generated3DCode?: string;
66
+ groundingCitations?: GroundingCitation[];
67
+ mindMap?: MindMapData | null;
68
+ chartData?: ChartData | null;
69
+ isAgent?: boolean;
70
+ }
71
+
72
+ export interface ProactiveSuggestion {
73
+ id: string;
74
+ text: string;
75
+ actionText: string;
76
+ action: string;
77
+ }
78
+
79
+ // Authentication Types
80
+ export type Auth =
81
+ | { status: 'loading' }
82
+ | { status: 'landing' }
83
+ | { status: 'unauthenticated' }
84
+ | { status: 'signup' }
85
+ | { status: 'verifying'; email: string; verificationCode: string | null; emailServiceConfigured: boolean }
86
+ | { status: 'authenticated'; user: { id: string; email: string } }
87
+ | { status: 'guest'; user: {id: string; email: string } };
vanhri-ai---the-mind-that-builds-worlds/vite.config.ts ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import path from 'path';
2
+ import { defineConfig, loadEnv } from 'vite';
3
+
4
+ export default defineConfig(({ mode }) => {
5
+ const env = loadEnv(mode, '.', '');
6
+ return {
7
+ define: {
8
+ 'process.env.API_KEY': JSON.stringify(env.GEMINI_API_KEY),
9
+ 'process.env.GEMINI_API_KEY': JSON.stringify(env.GEMINI_API_KEY)
10
+ },
11
+ resolve: {
12
+ alias: {
13
+ '@': path.resolve(__dirname, '.'),
14
+ }
15
+ }
16
+ };
17
+ });