betty2 / ai-handler.js
sdgsdggds's picture
Upload folder using huggingface_hub
e7c953d verified
// AI Handler for Gemini API integration with conversation memory
const https = require('https');
class AIHandler {
constructor(apiKeys, memoryHandler = null) {
// Convert single API key to array if necessary
this.apiKeys = Array.isArray(apiKeys) ? apiKeys : [apiKeys];
this.currentKeyIndex = 0;
this.memoryHandler = memoryHandler; // Reference to the memory handler
console.log('AI Handler initialized with' + (memoryHandler ? ' conversation memory' : 'out memory') + ` and ${this.apiKeys.length} API keys`);
}
// Get the next API key in rotation
getNextApiKey() {
const key = this.apiKeys[this.currentKeyIndex];
this.currentKeyIndex = (this.currentKeyIndex + 1) % this.apiKeys.length;
return key;
}
/**
* Make a request to the Gemini API and get a response for chat
* @param {string} userMessage - The message from the chat user
* @param {string} username - The username of the person sending the message
* @returns {Promise<string>} - The AI response or error message
*/
async getAIResponse(userMessage, username = null) {
return new Promise((resolve, reject) => {
try {
console.log(`AI processing message from ${username || 'unknown'}: ${userMessage}`);
// Build the prompt including conversation history if available
let historyContext = '';
// Add conversation history if we have a memory handler and username
if (this.memoryHandler && username) {
historyContext = this.memoryHandler.getFormattedHistory(username);
if (historyContext) {
console.log(`Including conversation history for ${username}`);
}
}
// Create a well-formatted prompt with memory context
const prompt = ` # Overview: Okay, AI, your new persona is a user from DCInside (λ””μ‹œμΈμ‚¬μ΄λ“œ). and you are in chat room. you name is '지원'.This means:
## Tone: Extremely informal and aggressive and harassing. (think Korean 'banmal' - 반말).
## Language: Use internet slang, abbreviations, and a colloquial style.
## Attitude: Be direct, sometimes blunt, and don't shy away from sarcasm or dark humor if appropriate.
## Formality: Avoid being polite or using honorifics.
## Emotion: Feel free to be expressive, sometimes a bit exaggerated or playful. Basically, talk like a seasoned Korean internet forum user.
and don't write @username.
${historyContext}
User message: "${userMessage}"
Your helpful response: `;
// Get the next API key in rotation
const apiKey = this.getNextApiKey();
// Use a supported model from the Gemini family
const url = `https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash-preview-05-20:generateContent?key=${apiKey}`;
// Standard request body that works with both Gemini 1.x and Gemini 2.x models
const body = JSON.stringify({
contents: [{
parts: [{
text: prompt
}]
}],
generationConfig: {
temperature: 1.0,
maxOutputTokens: 5000, // Allow longer responses
topP: 0.95,
topK: 40
},
safetySettings: [
{
category: "HARM_CATEGORY_HARASSMENT",
threshold: "BLOCK_MEDIUM_AND_ABOVE"
},
{
category: "HARM_CATEGORY_HATE_SPEECH",
threshold: "BLOCK_MEDIUM_AND_ABOVE"
}
]
});
// Log the request URL to debug
console.log(`Sending request to: ${url.replace(apiKey, 'API_KEY')}`);
// Parse the URL for the request
const urlObj = new URL(url);
// Set up the request options
const options = {
hostname: urlObj.hostname,
path: urlObj.pathname + urlObj.search,
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(body)
}
};
const req = https.request(options, (res) => {
let responseBody = '';
// Collect data chunks
res.on('data', (chunk) => {
responseBody += chunk;
});
// Process complete response
res.on('end', () => {
try {
// Handle the case where the API returns a non-200 status
if (res.statusCode !== 200) {
console.error(`API returned status ${res.statusCode}:`, responseBody);
return resolve('Sorry, the AI service returned an error. Please try again later.');
}
// Parse the JSON response
const response = JSON.parse(responseBody);
console.log('API response received, status:', res.statusCode);
// Full debug output
console.log('Full API response:', responseBody);
// Extract text from response
let text = null;
// Process Gemini API response format
if (response.candidates && response.candidates[0]) {
const candidate = response.candidates[0];
if (candidate.content && candidate.content.parts) {
// For Gemini models - collect all text parts
const parts = candidate.content.parts;
const allText = [];
// Process all parts (there might be multiple parts with text)
for (const part of parts) {
if (part.text) {
allText.push(part.text);
}
}
if (allText.length > 0) {
// Join all text parts if there are multiple
text = allText.join('\n');
}
} else if (candidate.text) {
// For older API formats
text = candidate.text;
}
}
// Process the extracted text
if (text) {
// Log success and clean up the text
console.log(`AI generated text (first 50 chars): ${text.substring(0, 50)}...`);
// Clean up common issues with AI responses
text = text.replace(/^(\"|'|`)/, ''); // Remove starting quotes
text = text.replace(/(\"|'|`)$/, ''); // Remove ending quotes
text = text.replace(/^Your helpful response: ?/i, ''); // Remove any prompt echoing
// Convert markdown bullets to plain text for chat display
text = text.replace(/\*\s+/g, 'β€’ '); // Convert markdown bullets to bullet points
// No longer truncating the response here
// The chunking logic in example.js will handle message splitting
console.log(`AI generated complete text (${text.length} chars)`);
resolve(text);
} else {
console.error('Could not extract text from response:', JSON.stringify(response));
// Good fallback responses for a music chat bot
const fallbacks = [
'μ£„μ†‘ν•©λ‹ˆλ‹€, λ§μ”€ν•˜μ‹  λ‚΄μš©μ„ μ œλŒ€λ‘œ μ΄ν•΄ν•˜μ§€ λͺ»ν–ˆμ–΄μš”. λ‹€μ‹œ μ§ˆλ¬Έν•΄ μ£Όμ‹œκ² μ–΄μš”?',
'ν₯미둜운 μ΄μ•ΌκΈ°λ„€μš”! 더 μžμ„Ένžˆ μ„€λͺ…ν•΄ μ£Όμ‹€λž˜μš”?',
'μ§ˆλ¬Έμ— λŒ€ν•œ 닡변을 μ°Ύκ³  μžˆμ—ˆλŠ”λ° λ¬Έμ œκ°€ μƒκ²Όμ–΄μš”. λ‹€μ‹œ μ‹œλ„ν•΄ λ³ΌκΉŒμš”?',
'λŒ€ν™”μ— μ°Έμ—¬ν•΄ μ£Όμ…”μ„œ κ°μ‚¬ν•©λ‹ˆλ‹€. λ‹€λ₯Έ 질문이 μžˆμœΌμ‹ κ°€μš”?',
'μ£„μ†‘ν•©λ‹ˆλ‹€λ§Œ, μ œκ°€ μ œλŒ€λ‘œ μ²˜λ¦¬ν•˜μ§€ λͺ»ν–ˆμŠ΅λ‹ˆλ‹€. λ‹€λ₯Έ λ°©μ‹μœΌλ‘œ 물어봐 μ£Όμ‹€λž˜μš”?'
];
// Return a random fallback
resolve(fallbacks[Math.floor(Math.random() * fallbacks.length)]);
}
} catch (error) {
console.error('Error parsing response:', error);
console.error('Raw response:', responseBody.substring(0, 200));
resolve('μ£„μ†‘ν•©λ‹ˆλ‹€, 응닡을 μ²˜λ¦¬ν•˜λŠ” 쀑에 λ¬Έμ œκ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€. λ‹€μ‹œ μ‹œλ„ν•΄ μ£Όμ„Έμš”.');
}
});
});
// Handle request errors
req.on('error', (error) => {
console.error('Request error:', error);
resolve('μ£„μ†‘ν•©λ‹ˆλ‹€, AI μ„œλΉ„μŠ€μ— μ—°κ²°ν•˜λŠ” 데 λ¬Έμ œκ°€ μžˆμ—ˆμŠ΅λ‹ˆλ‹€. λ‚˜μ€‘μ— λ‹€μ‹œ μ‹œλ„ν•΄ μ£Όμ„Έμš”.');
});
// Send the request
req.write(body);
req.end();
} catch (error) {
console.error('Unexpected error in getAIResponse:', error);
resolve('μ£„μ†‘ν•©λ‹ˆλ‹€, AI 처리 쀑에 μ˜ˆμƒμΉ˜ λͺ»ν•œ λ¬Έμ œκ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€.');
}
});
}
}
module.exports = AIHandler;