|
|
const fs = require('fs') |
|
|
const path = require('path') |
|
|
const logger = require('../utils/logger') |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class ModelService { |
|
|
constructor() { |
|
|
this.modelsFile = path.join(process.cwd(), 'data', 'supported_models.json') |
|
|
this.supportedModels = null |
|
|
this.fileWatcher = null |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async initialize() { |
|
|
try { |
|
|
this.loadModels() |
|
|
this.setupFileWatcher() |
|
|
logger.success('✅ Model service initialized successfully') |
|
|
} catch (error) { |
|
|
logger.error('❌ Failed to initialize model service:', error) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
loadModels() { |
|
|
try { |
|
|
if (fs.existsSync(this.modelsFile)) { |
|
|
const data = fs.readFileSync(this.modelsFile, 'utf8') |
|
|
this.supportedModels = JSON.parse(data) |
|
|
|
|
|
const totalModels = Object.values(this.supportedModels).reduce( |
|
|
(sum, config) => sum + config.models.length, |
|
|
0 |
|
|
) |
|
|
|
|
|
logger.info(`📋 Loaded ${totalModels} supported models from configuration`) |
|
|
} else { |
|
|
logger.warn('⚠️ Supported models file not found, using defaults') |
|
|
this.supportedModels = this.getDefaultModels() |
|
|
|
|
|
|
|
|
this.saveDefaultConfig() |
|
|
} |
|
|
} catch (error) { |
|
|
logger.error('❌ Failed to load supported models:', error) |
|
|
this.supportedModels = this.getDefaultModels() |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
getDefaultModels() { |
|
|
return { |
|
|
claude: { |
|
|
provider: 'anthropic', |
|
|
description: 'Claude models from Anthropic', |
|
|
models: [ |
|
|
'claude-sonnet-4-5-20250929', |
|
|
'claude-opus-4-1-20250805', |
|
|
'claude-sonnet-4-20250514', |
|
|
'claude-opus-4-20250514', |
|
|
'claude-3-7-sonnet-20250219', |
|
|
'claude-3-5-sonnet-20241022', |
|
|
'claude-3-5-haiku-20241022', |
|
|
'claude-3-opus-20240229', |
|
|
'claude-3-haiku-20240307' |
|
|
] |
|
|
}, |
|
|
openai: { |
|
|
provider: 'openai', |
|
|
description: 'OpenAI GPT models', |
|
|
models: [ |
|
|
'gpt-4o', |
|
|
'gpt-4o-mini', |
|
|
'gpt-4.1', |
|
|
'gpt-4.1-mini', |
|
|
'gpt-4.1-nano', |
|
|
'gpt-4-turbo', |
|
|
'gpt-4', |
|
|
'gpt-3.5-turbo', |
|
|
'o3', |
|
|
'o4-mini', |
|
|
'chatgpt-4o-latest' |
|
|
] |
|
|
}, |
|
|
gemini: { |
|
|
provider: 'google', |
|
|
description: 'Google Gemini models', |
|
|
models: [ |
|
|
'gemini-1.5-pro', |
|
|
'gemini-1.5-flash', |
|
|
'gemini-2.0-flash', |
|
|
'gemini-2.0-flash-exp', |
|
|
'gemini-2.0-flash-thinking', |
|
|
'gemini-2.0-flash-thinking-exp', |
|
|
'gemini-2.0-pro', |
|
|
'gemini-2.5-flash', |
|
|
'gemini-2.5-flash-lite', |
|
|
'gemini-2.5-pro' |
|
|
] |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
saveDefaultConfig() { |
|
|
try { |
|
|
const dataDir = path.dirname(this.modelsFile) |
|
|
if (!fs.existsSync(dataDir)) { |
|
|
fs.mkdirSync(dataDir, { recursive: true }) |
|
|
} |
|
|
|
|
|
fs.writeFileSync(this.modelsFile, JSON.stringify(this.supportedModels, null, 2)) |
|
|
logger.info('💾 Created default supported_models.json configuration') |
|
|
} catch (error) { |
|
|
logger.error('❌ Failed to save default config:', error) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
getAllModels() { |
|
|
const models = [] |
|
|
const now = Math.floor(Date.now() / 1000) |
|
|
|
|
|
for (const [_service, config] of Object.entries(this.supportedModels)) { |
|
|
for (const modelId of config.models) { |
|
|
models.push({ |
|
|
id: modelId, |
|
|
object: 'model', |
|
|
created: now, |
|
|
owned_by: config.provider |
|
|
}) |
|
|
} |
|
|
} |
|
|
|
|
|
return models.sort((a, b) => { |
|
|
|
|
|
if (a.owned_by !== b.owned_by) { |
|
|
return a.owned_by.localeCompare(b.owned_by) |
|
|
} |
|
|
return a.id.localeCompare(b.id) |
|
|
}) |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
getModelsByProvider(provider) { |
|
|
return this.getAllModels().filter((m) => m.owned_by === provider) |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
isModelSupported(modelId) { |
|
|
if (!modelId) { |
|
|
return false |
|
|
} |
|
|
return this.getAllModels().some((m) => m.id === modelId) |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
getModelProvider(modelId) { |
|
|
const model = this.getAllModels().find((m) => m.id === modelId) |
|
|
return model ? model.owned_by : null |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
reloadModels() { |
|
|
logger.info('🔄 Reloading supported models configuration...') |
|
|
this.loadModels() |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
setupFileWatcher() { |
|
|
try { |
|
|
|
|
|
if (this.fileWatcher) { |
|
|
this.fileWatcher.close() |
|
|
this.fileWatcher = null |
|
|
} |
|
|
|
|
|
|
|
|
if (!fs.existsSync(this.modelsFile)) { |
|
|
logger.debug('📋 Models file does not exist yet, skipping file watcher setup') |
|
|
return |
|
|
} |
|
|
|
|
|
|
|
|
const watchOptions = { |
|
|
persistent: true, |
|
|
interval: 60000 |
|
|
} |
|
|
|
|
|
let lastMtime = fs.statSync(this.modelsFile).mtimeMs |
|
|
|
|
|
fs.watchFile(this.modelsFile, watchOptions, (curr, _prev) => { |
|
|
if (curr.mtimeMs !== lastMtime) { |
|
|
lastMtime = curr.mtimeMs |
|
|
logger.info('📋 Detected change in supported_models.json, reloading...') |
|
|
this.reloadModels() |
|
|
} |
|
|
}) |
|
|
|
|
|
|
|
|
this.fileWatcher = { |
|
|
close: () => fs.unwatchFile(this.modelsFile) |
|
|
} |
|
|
|
|
|
logger.info('👁️ File watcher set up for supported_models.json') |
|
|
} catch (error) { |
|
|
logger.error('❌ Failed to setup file watcher:', error) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
getStatus() { |
|
|
const totalModels = this.supportedModels |
|
|
? Object.values(this.supportedModels).reduce((sum, config) => sum + config.models.length, 0) |
|
|
: 0 |
|
|
|
|
|
return { |
|
|
initialized: this.supportedModels !== null, |
|
|
totalModels, |
|
|
providers: this.supportedModels ? Object.keys(this.supportedModels) : [], |
|
|
fileExists: fs.existsSync(this.modelsFile) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
cleanup() { |
|
|
if (this.fileWatcher) { |
|
|
this.fileWatcher.close() |
|
|
this.fileWatcher = null |
|
|
logger.debug('📋 Model service file watcher closed') |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
module.exports = new ModelService() |
|
|
|