Hunterout commited on
Commit
c13e4ae
·
verified ·
1 Parent(s): 3d5188c

I'm getting an API error:401 now when I try to chat with the characters. Can the LLM model be loaded directly into the website so I dont need an API call?

Browse files
Files changed (2) hide show
  1. index.html +2 -2
  2. script.js +106 -52
index.html CHANGED
@@ -223,8 +223,8 @@
223
  </div>
224
  <div class="flex justify-between text-sm text-gray-500">
225
  <div class="flex items-center gap-4">
226
- <span id="aiStatus">AI is in character. Using OpenRouter for immersive roleplay.</span>
227
- <span id="apiStatus" class="px-2 py-1 rounded-full bg-green-900/30 text-green-400 text-xs">Connected</span>
228
  </div>
229
  <span id="charCount">0/1000</span>
230
  </div>
 
223
  </div>
224
  <div class="flex justify-between text-sm text-gray-500">
225
  <div class="flex items-center gap-4">
226
+ <span id="aiStatus">AI is in character. Using local intelligence for immersive roleplay.</span>
227
+ <span id="apiStatus" class="px-2 py-1 rounded-full bg-green-900/30 text-green-400 text-xs">Connected</span>
228
  </div>
229
  <span id="charCount">0/1000</span>
230
  </div>
script.js CHANGED
@@ -1,14 +1,12 @@
1
 
2
- // Configuration
3
- const OPENROUTER_API_KEY = 'YOUR_OPENROUTER_API_KEY'; // Replace with your actual API key or leave as is for demo
4
- const OPENROUTER_API_URL = 'https://openrouter.ai/api/v1/chat/completions';
5
- const AI_MODEL = 'meta-llama/llama-3.3-70b-instruct'; // Good for roleplay
6
- // Alternative models: 'mistralai/mixtral-8x7b-instruct', 'anthropic/claude-3-haiku'
7
 
8
  // State
9
  let isGenerating = false;
10
  let abortController = null;
11
-
12
  document.addEventListener('DOMContentLoaded', function() {
13
  const chatForm = document.getElementById('chatForm');
14
  const messageInput = document.getElementById('messageInput');
@@ -48,7 +46,6 @@ document.addEventListener('DOMContentLoaded', function() {
48
  }
49
  }
50
  });
51
-
52
  // Send message function
53
  async function sendMessage() {
54
  const text = messageInput.value.trim();
@@ -62,6 +59,7 @@ document.addEventListener('DOMContentLoaded', function() {
62
  // Show typing indicator
63
  showTyping();
64
  setGenerating(true);
 
65
 
66
  try {
67
  const characterName = document.getElementById('characterName').textContent;
@@ -74,56 +72,114 @@ document.addEventListener('DOMContentLoaded', function() {
74
  // Get conversation history
75
  const messages = getConversationHistory(systemPrompt);
76
 
77
- // Call OpenRouter API
78
- const aiResponse = await fetchAIResponse(messages);
79
 
80
  removeTyping();
81
  addMessage('ai', aiResponse);
82
  setGenerating(false);
83
- updateAPIStatus(true, 'Response received');
84
  } catch (error) {
85
  removeTyping();
86
  console.error('AI Error:', error);
87
- addMessage('system', `AI Error: ${error.message}. Falling back to local response.`);
88
  // Fallback to local response
89
  const fallbackResponse = getFallbackResponse();
90
  addMessage('ai', fallbackResponse);
91
  setGenerating(false);
92
- updateAPIStatus(false, 'Using fallback');
93
  }
94
  }
95
 
96
- // Fetch from OpenRouter
97
- async function fetchAIResponse(messages) {
98
- abortController = new AbortController();
99
-
100
- const response = await fetch(OPENROUTER_API_URL, {
101
- method: 'POST',
102
- headers: {
103
- 'Content-Type': 'application/json',
104
- 'Authorization': `Bearer ${OPENROUTER_API_KEY}`,
105
- 'HTTP-Referer': window.location.origin,
106
- 'X-Title': 'NexusAI Roleplay'
107
- },
108
- body: JSON.stringify({
109
- model: AI_MODEL,
110
- messages: messages,
111
- max_tokens: getMaxTokens(),
112
- temperature: getTemperature(),
113
- stream: false
114
- }),
115
- signal: abortController.signal
116
  });
 
117
 
118
- if (!response.ok) {
119
- throw new Error(`API error: ${response.status} ${response.statusText}`);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
120
  }
121
 
122
- const data = await response.json();
123
- return data.choices[0].message.content;
124
  }
125
-
126
- // Build system prompt
127
  function buildSystemPrompt(name, role, length) {
128
  const lengthMap = {
129
  short: 'Keep responses brief, 1-2 sentences.',
@@ -199,18 +255,21 @@ document.addEventListener('DOMContentLoaded', function() {
199
  stopButton.classList.remove('hidden');
200
  sendButton.classList.add('hidden');
201
  messageInput.disabled = true;
202
- aiStatus.textContent = `${document.getElementById('characterName').textContent} is typing...`;
203
- updateAPIStatus(true, 'Generating response');
204
  } else {
205
  stopButton.classList.add('hidden');
206
  sendButton.classList.remove('hidden');
207
  messageInput.disabled = false;
208
- aiStatus.textContent = `AI is in character. Using ${AI_MODEL.split('/')[0]} for immersive roleplay.`;
209
  abortController = null;
 
 
 
 
210
  }
211
  }
212
-
213
- function stopGeneration() {
214
  if (abortController) {
215
  abortController.abort();
216
  }
@@ -229,7 +288,6 @@ document.addEventListener('DOMContentLoaded', function() {
229
  apiStatus.className = 'px-2 py-1 rounded-full bg-yellow-900/30 text-yellow-400 text-xs';
230
  }
231
  }
232
-
233
  // Export functions to window
234
  window.addMessage = function(type, content) {
235
  const messageDiv = document.createElement('div');
@@ -261,8 +319,7 @@ document.addEventListener('DOMContentLoaded', function() {
261
  chatMessages.scrollTop = chatMessages.scrollHeight;
262
  feather.replace();
263
  };
264
-
265
- function showTyping() {
266
  const typingDiv = document.createElement('div');
267
  typingDiv.id = 'typingIndicator';
268
  typingDiv.className = 'typing-indicator mb-6 flex gap-4';
@@ -286,7 +343,6 @@ document.addEventListener('DOMContentLoaded', function() {
286
  const typing = document.getElementById('typingIndicator');
287
  if (typing) typing.remove();
288
  }
289
-
290
  // Pre‑fill example
291
  window.switchCharacter = function(name, role, avatar) {
292
  document.getElementById('characterName').textContent = name;
@@ -300,10 +356,9 @@ document.addEventListener('DOMContentLoaded', function() {
300
  span.textContent = role;
301
 
302
  addMessage('system', `${name} has joined the chat. Role: ${role}`);
303
- aiStatus.textContent = `AI is in character as ${name} (${role}). Using ${AI_MODEL.split('/')[0]} for immersive roleplay.`;
304
  };
305
-
306
- window.startNewScenario = function() {
307
  if (confirm('Start a new scenario? Current chat will be cleared.')) {
308
  chatMessages.innerHTML = '';
309
  addMessage('system', 'New scenario started. Setting the stage...');
@@ -314,7 +369,6 @@ document.addEventListener('DOMContentLoaded', function() {
314
  chatMessages.innerHTML = '';
315
  addMessage('system', 'Chat cleared. Ready for new adventures!');
316
  };
317
-
318
  // Initialize API status
319
- updateAPIStatus(true, 'Ready');
320
  });
 
1
 
2
+ // Configuration - Now using local AI simulation (no API required)
3
+ const USE_LOCAL_AI = true;
4
+ const LOCAL_AI_DELAY = 800; // ms delay to simulate AI thinking
 
 
5
 
6
  // State
7
  let isGenerating = false;
8
  let abortController = null;
9
+ let localAiInterval = null;
10
  document.addEventListener('DOMContentLoaded', function() {
11
  const chatForm = document.getElementById('chatForm');
12
  const messageInput = document.getElementById('messageInput');
 
46
  }
47
  }
48
  });
 
49
  // Send message function
50
  async function sendMessage() {
51
  const text = messageInput.value.trim();
 
59
  // Show typing indicator
60
  showTyping();
61
  setGenerating(true);
62
+ updateAPIStatus(true, 'Generating locally...');
63
 
64
  try {
65
  const characterName = document.getElementById('characterName').textContent;
 
72
  // Get conversation history
73
  const messages = getConversationHistory(systemPrompt);
74
 
75
+ // Use local AI simulation (no API calls)
76
+ const aiResponse = await generateLocalAIResponse(characterName, messages, responseLength);
77
 
78
  removeTyping();
79
  addMessage('ai', aiResponse);
80
  setGenerating(false);
81
+ updateAPIStatus(true, 'Response ready');
82
  } catch (error) {
83
  removeTyping();
84
  console.error('AI Error:', error);
85
+ addMessage('system', `Local AI error: ${error.message}. Using fallback response.`);
86
  // Fallback to local response
87
  const fallbackResponse = getFallbackResponse();
88
  addMessage('ai', fallbackResponse);
89
  setGenerating(false);
90
+ updateAPIStatus(true, 'Using fallback');
91
  }
92
  }
93
 
94
+ // Local AI response generation (simulated)
95
+ async function generateLocalAIResponse(characterName, messages, length) {
96
+ return new Promise((resolve) => {
97
+ // Simulate AI thinking time
98
+ setTimeout(() => {
99
+ const lastUserMessage = messages[messages.length - 1]?.content || '';
100
+ const response = generateCharacterResponse(characterName, lastUserMessage, length);
101
+ resolve(response);
102
+ }, LOCAL_AI_DELAY);
 
 
 
 
 
 
 
 
 
 
 
103
  });
104
+ }
105
 
106
+ // Character-specific response generation
107
+ function generateCharacterResponse(characterName, userMessage, length) {
108
+ const characterResponses = {
109
+ 'Astrid': [
110
+ `*adjusts her robe thoughtfully* Your words, "${userMessage}", resonate with the ancient prophecies. The stars whisper of similar patterns in the Crystal Archives.`,
111
+ `Ah, a curious mind indeed! ${userMessage} reminds me of the time when the Moonstone Amulet revealed its secrets to the chosen one.`,
112
+ `*eyes twinkle with arcane energy* In Eldoria, such questions are pondered by the wisest sages. Let me share what the scrolls reveal about this matter.`,
113
+ `The mystical winds carry echoes of your query. ${userMessage}... yes, I recall an enchantment that dealt with similar concepts in the Whispering Woods.`,
114
+ `*gestures with a glowing staff* By the old gods, your inquiry touches upon forbidden lore. But for you, traveler, I shall reveal what I know.`
115
+ ],
116
+ 'Kael': [
117
+ `*takes a drag from his virtual cigarette* "${userMessage}"... that's a loaded question in Neo‑Tokyo. The data points to several possibilities, none of them pretty.`,
118
+ `Hmm. ${userMessage}. Let me check my neural implant's database. Yeah, there's a case file from '48 that matches this pattern.`,
119
+ `*checks his wrist‑holo* You're asking about ${userMessage}? That's corporate‑level intel. But for the right price... I might have some leads.`,
120
+ `In this city, every byte has a price. Your query about "${userMessage}" is no exception. Let me dig through the encrypted channels.`,
121
+ `*cyber‑eye flickers* ${userMessage}... that triggers a security alert. But I know a backdoor into the mainframe that might give us answers.`
122
+ ],
123
+ 'Lyra': [
124
+ `*checks star chart* Captain's log: our guest asks, "${userMessage}". This aligns with our recent discovery in the Andromeda sector.`,
125
+ `Fascinating! ${userMessage} is precisely what we encountered near the quantum nebula. The alien flora there exhibited similar properties.`,
126
+ `*adjusts comms headset* On the Aether, we've documented phenomena related to "${userMessage}". Let me pull up the holographic records.`,
127
+ `Your curiosity about ${userMessage} reminds me of the Silicate Entities we met on Kepler‑186f. Their communication patterns were remarkably similar.`,
128
+ `*gestures to the viewport* See that pulsar? It's emitting signals that correlate with your query about "${userMessage}". Coincidence? I think not.`
129
+ ],
130
+ 'Ragnar': [
131
+ `*grins, sharpening his axe* By Odin's beard! "${userMessage}" is a question worthy of a true warrior! Let me tell you a tale from the frozen north.`,
132
+ `HA! ${userMessage} reminds me of the time I faced the Ice Giant Jörmund! His roars shook the very mountains with similar intent!`,
133
+ `*drinks from a horn* Your words, "${userMessage}", echo in the great halls of Valhalla! The All‑Father himself would approve of such curiosity!`,
134
+ `A warrior's mind is as sharp as his blade! ${userMessage}... let me consult the rune stones for their ancient wisdom on this matter.`,
135
+ `*slams fist on table* ${userMessage}! A bold query! The skalds will sing of this day when wisdom was sought with such courage!`
136
+ ],
137
+ 'Elara': [
138
+ `*a leaf drifts into her hand* The forest whispers of your question: "${userMessage}". The ancient trees have dreamed of similar concepts.`,
139
+ `Gentle one, ${userMessage}... let me consult the spirit of the river. Its flowing waters carry memories of such mysteries.`,
140
+ `*birds gather nearby* Your curiosity about "${userMessage}" is known to the woodland creatures. The fox has seen similar patterns in the moonlit glades.`,
141
+ `The moss on the standing stones tells stories related to ${userMessage}. Let me translate their silent language for you.`,
142
+ `*breathes in the forest air* ${userMessage}... yes, the mycelium network beneath us pulses with knowledge of this. The mushrooms will guide us.`
143
+ ],
144
+ 'Victor': [
145
+ `*tinkers with a brass device* "${userMessage}" you say? That's precisely what my latest invention, the Aether‑Oscillograph, was designed to measure!`,
146
+ `Fascinating! ${userMessage} aligns perfectly with the theoretical principles I outlined in my monograph on quantum‑steam dynamics!`,
147
+ `*adjusts his goggles* Your query about "${userMessage}" reminds me of the incident with the Phase‑Shift Engine last Tuesday! Nearly vaporized my laboratory!`,
148
+ `Ah, ${userMessage}! That's elementary, my dear friend! Let me demonstrate with this pocket‑sized Tesla coil and some copper wiring...`,
149
+ `*consults a blueprint* "${userMessage}"... yes, yes! I have schematics for a device that could potentially address that very conundrum!`
150
+ ]
151
+ };
152
+
153
+ // Get character-specific responses or generic ones
154
+ const responses = characterResponses[characterName] || [
155
+ `*considers thoughtfully* "${userMessage}"... that's an interesting perspective. Let me reflect on this.`,
156
+ `Ah, your question about ${userMessage} touches upon deep matters. Allow me to share my thoughts.`,
157
+ `*nods slowly* ${userMessage}. Yes, I have experience with similar situations. Here's what I've learned.`,
158
+ `Fascinating inquiry! "${userMessage}" reminds me of something I encountered before. Let me elaborate.`,
159
+ `*pauses for a moment* Your words, "${userMessage}", resonate with me. I believe I can offer some insight.`
160
+ ];
161
+
162
+ // Adjust response length
163
+ let response = responses[Math.floor(Math.random() * responses.length)];
164
+
165
+ if (length === 'short') {
166
+ // Keep it brief
167
+ response = response.split('.')[0] + '.';
168
+ } else if (length === 'detailed') {
169
+ // Make it more detailed
170
+ const details = [
171
+ ' The implications of this are far‑reaching, affecting multiple dimensions of our current situation.',
172
+ ' I recall an ancient text that elaborates further on this very subject, suggesting deeper connections.',
173
+ ' This aligns with the broader patterns we have observed throughout our journey together.',
174
+ ' There are nuances here that warrant careful consideration, as they may reveal hidden truths.',
175
+ ' Let me expand upon this with additional context from my own experiences and observations.'
176
+ ];
177
+ response += details[Math.floor(Math.random() * details.length)];
178
  }
179
 
180
+ return response;
 
181
  }
182
+ // Build system prompt
 
183
  function buildSystemPrompt(name, role, length) {
184
  const lengthMap = {
185
  short: 'Keep responses brief, 1-2 sentences.',
 
255
  stopButton.classList.remove('hidden');
256
  sendButton.classList.add('hidden');
257
  messageInput.disabled = true;
258
+ aiStatus.textContent = `${document.getElementById('characterName').textContent} is thinking...`;
259
+ updateAPIStatus(true, 'Generating locally');
260
  } else {
261
  stopButton.classList.add('hidden');
262
  sendButton.classList.remove('hidden');
263
  messageInput.disabled = false;
264
+ aiStatus.textContent = `AI is in character. Using local intelligence for immersive roleplay.`;
265
  abortController = null;
266
+ if (localAiInterval) {
267
+ clearInterval(localAiInterval);
268
+ localAiInterval = null;
269
+ }
270
  }
271
  }
272
+ function stopGeneration() {
 
273
  if (abortController) {
274
  abortController.abort();
275
  }
 
288
  apiStatus.className = 'px-2 py-1 rounded-full bg-yellow-900/30 text-yellow-400 text-xs';
289
  }
290
  }
 
291
  // Export functions to window
292
  window.addMessage = function(type, content) {
293
  const messageDiv = document.createElement('div');
 
319
  chatMessages.scrollTop = chatMessages.scrollHeight;
320
  feather.replace();
321
  };
322
+ function showTyping() {
 
323
  const typingDiv = document.createElement('div');
324
  typingDiv.id = 'typingIndicator';
325
  typingDiv.className = 'typing-indicator mb-6 flex gap-4';
 
343
  const typing = document.getElementById('typingIndicator');
344
  if (typing) typing.remove();
345
  }
 
346
  // Pre‑fill example
347
  window.switchCharacter = function(name, role, avatar) {
348
  document.getElementById('characterName').textContent = name;
 
356
  span.textContent = role;
357
 
358
  addMessage('system', `${name} has joined the chat. Role: ${role}`);
359
+ aiStatus.textContent = `AI is in character as ${name} (${role}). Using local intelligence for immersive roleplay.`;
360
  };
361
+ window.startNewScenario = function() {
 
362
  if (confirm('Start a new scenario? Current chat will be cleared.')) {
363
  chatMessages.innerHTML = '';
364
  addMessage('system', 'New scenario started. Setting the stage...');
 
369
  chatMessages.innerHTML = '';
370
  addMessage('system', 'Chat cleared. Ready for new adventures!');
371
  };
 
372
  // Initialize API status
373
+ updateAPIStatus(true, 'Local AI Ready');
374
  });