Medieval-Village-AI / src /ai /llmHandler.js
6rz6
Add Medieval Village AI Emulator
a32dc8b
// llmHandler.js - Module for handling LLM interactions
class LLMHandler {
constructor(apiToken = null) {
// List of free Hugging Face models
this.freeLLMs = [
"meta-llama/Llama-3.1-8B-Instruct",
"google/gemma-3-270m-it",
"google/gemma-3-4b-it",
"google/gemma-3-27b-it",
"Qwen/Qwen3-4B-Instruct-2507",
"Qwen/Qwen3-8B",
"mistralai/Mistral-7B-Instruct-v0.3",
"HuggingFaceH4/zephyr-7b-beta",
"TinyLlama/TinyLlama-1.1B-Chat-v1.0",
"microsoft/Phi-3-mini-4k-instruct",
"stabilityai/stablelm-2-1_6b",
"NousResearch/Hermes-2-Pro-Llama-3-8B",
"CohereForAI/c4ai-command-r-v01",
"nvidia/Nemotron-Research-Reasoning-Qwen-1.5B",
"inclusionAI/AReaL-boba-2-8B"
];
// Default selected model
this.selectedModel = this.freeLLMs[0];
// Hugging Face Inference API endpoint
this.apiEndpoint = "https://api-inference.huggingface.co/models/";
// API token (can be set in constructor or externally)
this.apiToken = apiToken;
// Flag to indicate if we should use simulated responses
// If we have a token, we'll use real responses by default
this.useSimulatedResponses = !apiToken;
}
/**
* Set the API token for Hugging Face Inference API
* @param {string} token - The API token
*/
setApiToken(token) {
this.apiToken = token;
// When a real API token is set, disable simulated responses
this.useSimulatedResponses = !token;
}
/**
* Set whether to use simulated responses
* @param {boolean} useSimulated - Whether to use simulated responses
*/
setUseSimulatedResponses(useSimulated) {
this.useSimulatedResponses = useSimulated;
}
/**
* Set the selected model
* @param {string} model - The model identifier
*/
setSelectedModel(model) {
if (this.freeLLMs.includes(model)) {
this.selectedModel = model;
} else {
console.warn(`Model ${model} is not in the list of free LLMs`);
}
}
/**
* Send a query to the selected LLM
* @param {string} query - The query to send to the LLM
* @returns {Promise<string>} - The response from the LLM
*/
async sendQuery(query) {
// If we don't have an API token, throw an error
if (!this.apiToken) {
throw new Error("API token is not set. Please set your Hugging Face API token to use the LLM functionality.");
}
// If we're using simulated responses, return a simulated response
if (this.useSimulatedResponses) {
console.log("Using simulated response for query:", query);
// Simulate API delay
await new Promise(resolve => setTimeout(resolve, 1000));
return this.simulateLLMResponse(query);
}
// Prepare the API request
const url = this.apiEndpoint + this.selectedModel;
const payload = {
inputs: query,
parameters: {
max_new_tokens: 200,
temperature: 0.7,
top_p: 0.9,
do_sample: true
}
};
// Make the API request
try {
const response = await fetch(url, {
method: 'POST',
headers: {
'Authorization': `Bearer ${this.apiToken}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(payload)
});
// Check if the request was successful
if (!response.ok) {
const errorData = await response.json();
throw new Error(`API request failed with status ${response.status}: ${errorData.error || 'Unknown error'}`);
}
// Parse the response
const data = await response.json();
// Extract the generated text from the response
if (Array.isArray(data) && data.length > 0 && data[0].generated_text) {
return data[0].generated_text;
} else if (data.generated_text) {
return data.generated_text;
} else {
throw new Error("Unexpected response format from the API");
}
} catch (error) {
console.error("Error sending query to LLM:", error);
throw error;
}
}
/**
* Simulate an LLM response (for testing purposes)
* @param {string} query - The query to simulate a response for
* @returns {string} - A simulated response
*/
simulateLLMResponse(query) {
const lowerQuery = query.toLowerCase();
if (lowerQuery.includes("villager") && lowerQuery.includes("behavior")) {
return "Villagers in the medieval village simulation exhibit complex behaviors based on their needs and the time of day. They cycle through states like sleeping, working, eating, and socializing. Their decisions are influenced by factors such as energy levels, hunger, and social needs.";
} else if (lowerQuery.includes("resource") && lowerQuery.includes("management")) {
return "Resource management in the village is critical for sustainability. Villagers collect resources like wood and stone from designated areas. Proper allocation of resources to buildings and villagers ensures the village's growth and resilience against disasters.";
} else if (lowerQuery.includes("disaster") || lowerQuery.includes("emergency")) {
return "The village simulation includes various disasters like fires, floods, and plagues. These events test the village's resilience and require strategic planning to mitigate their effects. Warriors can be dispatched to help protect the village from certain threats.";
} else if (lowerQuery.includes("ai") || lowerQuery.includes("artificial intelligence")) {
return "This simulation uses several AI techniques including finite state machines for villager behavior, pathfinding algorithms for navigation, and rule-based systems for decision making. The emergent behaviors arise from the interaction of these systems.";
} else if (lowerQuery.includes("building") || lowerQuery.includes("structure")) {
return "The village features various building types, each with unique functions: houses for living, workshops for crafting, markets for trading, and specialized buildings like universities and hospitals. Buildings are placed strategically to optimize villager workflows.";
} else {
return `I've received your query about "${query}". In a full implementation with API access, I would provide a detailed response based on the selected LLM model. For now, try asking about villagers, resources, disasters, AI systems, or buildings in the village.`;
}
}
/**
* Get the list of free LLMs
* @returns {string[]} - Array of free LLM model identifiers
*/
getFreeLLMs() {
return this.freeLLMs;
}
/**
* Get the currently selected model
* @returns {string} - The currently selected model identifier
*/
getSelectedModel() {
return this.selectedModel;
}
/**
* Check if the API token is set
* @returns {boolean} - Whether the API token is set
*/
isApiTokenSet() {
return !!this.apiToken;
}
/**
* Get the API token (for debugging purposes)
* @returns {string|null} - The API token or null if not set
*/
getApiToken() {
return this.apiToken;
}
}
// Export the LLMHandler class
export default LLMHandler;