LuckyEnforceAgent commited on
Commit
e3197b5
·
verified ·
1 Parent(s): 5beea39

NO IT NEEDS TO BE ABLE TO USE OLLMA OPEN SOURCE LLM AI

Browse files
Files changed (1) hide show
  1. chat.html +13 -7
chat.html CHANGED
@@ -143,8 +143,8 @@ waypoint.SetType(EWaypointType.MOVE);</pre>
143
  <script>
144
  // Initialize feather icons
145
  feather.replace();
146
- // Connect to AnythingLLM API
147
- const API_URL = 'http://localhost:3001/api/v1/workspace/default/chat'; // Default AnythingLLM endpoint
148
  const chatContainer = document.querySelector('.chat-container');
149
  const chatForm = document.getElementById('chatForm');
150
  const userInput = document.getElementById('userInput');
@@ -177,18 +177,24 @@ const chatContainer = document.querySelector('.chat-container');
177
  'Content-Type': 'application/json'
178
  },
179
  body: JSON.stringify({
180
- message: prompt,
181
- mode: "query" // Can be "query" or "chat"
 
 
 
 
 
 
182
  })
183
  });
184
 
185
  if (!response.ok) throw new Error('Network response was not ok');
186
  const data = await response.json();
187
- return data.textResponse;
188
  } catch (error) {
189
  console.error('Error:', error);
190
- return "Sorry, I encountered an error. Please ensure LM Studio is running and the API is accessible at http://localhost:1234";
191
- } finally {
192
  typingIndicator.classList.add('hidden');
193
  }
194
  }
 
143
  <script>
144
  // Initialize feather icons
145
  feather.replace();
146
+ // Connect to OLLAMA API
147
+ const API_URL = 'http://localhost:11434/api/chat'; // Default OLLAMA endpoint
148
  const chatContainer = document.querySelector('.chat-container');
149
  const chatForm = document.getElementById('chatForm');
150
  const userInput = document.getElementById('userInput');
 
177
  'Content-Type': 'application/json'
178
  },
179
  body: JSON.stringify({
180
+ model: "llama2", // or any other model you have installed
181
+ messages: [
182
+ {
183
+ role: "user",
184
+ content: prompt
185
+ }
186
+ ],
187
+ stream: false
188
  })
189
  });
190
 
191
  if (!response.ok) throw new Error('Network response was not ok');
192
  const data = await response.json();
193
+ return data.message?.content || "Sorry, I couldn't process the response.";
194
  } catch (error) {
195
  console.error('Error:', error);
196
+ return "Sorry, I encountered an error. Please ensure OLLAMA is running and accessible at http://localhost:11434";
197
+ } finally {
198
  typingIndicator.classList.add('hidden');
199
  }
200
  }