NO IT NEEDS TO BE ABLE TO USE OLLMA OPEN SOURCE LLM AI
Browse files
chat.html
CHANGED
|
@@ -143,8 +143,8 @@ waypoint.SetType(EWaypointType.MOVE);</pre>
|
|
| 143 |
<script>
|
| 144 |
// Initialize feather icons
|
| 145 |
feather.replace();
|
| 146 |
-
// Connect to
|
| 147 |
-
const API_URL = 'http://localhost:
|
| 148 |
const chatContainer = document.querySelector('.chat-container');
|
| 149 |
const chatForm = document.getElementById('chatForm');
|
| 150 |
const userInput = document.getElementById('userInput');
|
|
@@ -177,18 +177,24 @@ const chatContainer = document.querySelector('.chat-container');
|
|
| 177 |
'Content-Type': 'application/json'
|
| 178 |
},
|
| 179 |
body: JSON.stringify({
|
| 180 |
-
|
| 181 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 182 |
})
|
| 183 |
});
|
| 184 |
|
| 185 |
if (!response.ok) throw new Error('Network response was not ok');
|
| 186 |
const data = await response.json();
|
| 187 |
-
return data.
|
| 188 |
} catch (error) {
|
| 189 |
console.error('Error:', error);
|
| 190 |
-
return "Sorry, I encountered an error. Please ensure
|
| 191 |
-
|
| 192 |
typingIndicator.classList.add('hidden');
|
| 193 |
}
|
| 194 |
}
|
|
|
|
| 143 |
<script>
|
| 144 |
// Initialize feather icons
|
| 145 |
feather.replace();
|
| 146 |
+
// Connect to OLLAMA API
|
| 147 |
+
const API_URL = 'http://localhost:11434/api/chat'; // Default OLLAMA endpoint
|
| 148 |
const chatContainer = document.querySelector('.chat-container');
|
| 149 |
const chatForm = document.getElementById('chatForm');
|
| 150 |
const userInput = document.getElementById('userInput');
|
|
|
|
| 177 |
'Content-Type': 'application/json'
|
| 178 |
},
|
| 179 |
body: JSON.stringify({
|
| 180 |
+
model: "llama2", // or any other model you have installed
|
| 181 |
+
messages: [
|
| 182 |
+
{
|
| 183 |
+
role: "user",
|
| 184 |
+
content: prompt
|
| 185 |
+
}
|
| 186 |
+
],
|
| 187 |
+
stream: false
|
| 188 |
})
|
| 189 |
});
|
| 190 |
|
| 191 |
if (!response.ok) throw new Error('Network response was not ok');
|
| 192 |
const data = await response.json();
|
| 193 |
+
return data.message?.content || "Sorry, I couldn't process the response.";
|
| 194 |
} catch (error) {
|
| 195 |
console.error('Error:', error);
|
| 196 |
+
return "Sorry, I encountered an error. Please ensure OLLAMA is running and accessible at http://localhost:11434";
|
| 197 |
+
} finally {
|
| 198 |
typingIndicator.classList.add('hidden');
|
| 199 |
}
|
| 200 |
}
|