| | #!/usr/bin/env node |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | const path = require('path'); |
| | const fs = require('fs'); |
| |
|
| | |
| | require('dotenv').config({ path: path.join(__dirname, '..', '.env') }); |
| |
|
| | const { |
| | USE_REDIS, |
| | REDIS_URI, |
| | REDIS_USERNAME, |
| | REDIS_PASSWORD, |
| | REDIS_CA, |
| | REDIS_KEY_PREFIX, |
| | USE_REDIS_CLUSTER, |
| | REDIS_USE_ALTERNATIVE_DNS_LOOKUP, |
| | } = process.env; |
| |
|
| | |
| | const isEnabled = (value) => value === 'true' || value === true; |
| |
|
| | |
| | const getRedisCA = () => { |
| | if (!REDIS_CA) { |
| | return null; |
| | } |
| | try { |
| | if (fs.existsSync(REDIS_CA)) { |
| | return fs.readFileSync(REDIS_CA, 'utf8'); |
| | } else { |
| | console.warn(`β οΈ Redis CA certificate file not found: ${REDIS_CA}`); |
| | return null; |
| | } |
| | } catch (error) { |
| | console.error(`β Failed to read Redis CA certificate file '${REDIS_CA}':`, error.message); |
| | return null; |
| | } |
| | }; |
| |
|
| | async function showHelp() { |
| | console.log(` |
| | LibreChat Cache Flush Utility |
| | |
| | DESCRIPTION: |
| | Flushes the cache store used by LibreChat. Automatically detects |
| | whether Redis or file-based cache is being used and flushes accordingly. |
| | |
| | USAGE: |
| | npm run flush-cache |
| | node config/flush-cache.js [options] |
| | |
| | OPTIONS: |
| | --help, -h Show this help message |
| | --dry-run Show what would be flushed without actually doing it |
| | --verbose, -v Show detailed output |
| | |
| | CACHE TYPES: |
| | β’ Redis Cache: Flushes all keys with the configured Redis prefix |
| | β’ File Cache: Removes ./data/logs.json and ./data/violations.json |
| | |
| | WHAT GETS FLUSHED: |
| | β’ User sessions and authentication tokens |
| | β’ Configuration cache |
| | β’ Model queries cache |
| | β’ Rate limiting data |
| | β’ Conversation titles cache |
| | β’ File upload progress |
| | β’ SharePoint tokens |
| | β’ And more... |
| | |
| | NOTE: This will log out all users and may require them to re-authenticate. |
| | `); |
| | } |
| |
|
| | async function flushRedisCache(dryRun = false, verbose = false) { |
| | try { |
| | console.log('π Redis cache detected'); |
| |
|
| | if (verbose) { |
| | console.log(` URI: ${REDIS_URI ? REDIS_URI.replace(/\/\/.*@/, '//***:***@') : 'Not set'}`); |
| | console.log(` Prefix: ${REDIS_KEY_PREFIX || 'None'}`); |
| | } |
| |
|
| | |
| | const IoRedis = require('ioredis'); |
| | let redis; |
| |
|
| | |
| | const urls = (REDIS_URI || '').split(',').map((uri) => new URL(uri)); |
| | const username = urls[0]?.username || REDIS_USERNAME; |
| | const password = urls[0]?.password || REDIS_PASSWORD; |
| | const ca = getRedisCA(); |
| |
|
| | |
| | const redisOptions = { |
| | username: username, |
| | password: password, |
| | tls: ca ? { ca } : undefined, |
| | connectTimeout: 10000, |
| | maxRetriesPerRequest: 3, |
| | enableOfflineQueue: true, |
| | lazyConnect: false, |
| | }; |
| |
|
| | |
| | const useCluster = urls.length > 1 || isEnabled(USE_REDIS_CLUSTER); |
| |
|
| | if (useCluster) { |
| | const clusterOptions = { |
| | redisOptions, |
| | enableOfflineQueue: true, |
| | }; |
| |
|
| | |
| | if (isEnabled(REDIS_USE_ALTERNATIVE_DNS_LOOKUP)) { |
| | clusterOptions.dnsLookup = (address, callback) => callback(null, address); |
| | } |
| |
|
| | redis = new IoRedis.Cluster( |
| | urls.map((url) => ({ host: url.hostname, port: parseInt(url.port, 10) || 6379 })), |
| | clusterOptions, |
| | ); |
| | } else { |
| | |
| | redis = new IoRedis(REDIS_URI, redisOptions); |
| | } |
| |
|
| | |
| | await new Promise((resolve, reject) => { |
| | const timeout = setTimeout(() => { |
| | reject(new Error('Connection timeout')); |
| | }, 10000); |
| |
|
| | redis.once('ready', () => { |
| | clearTimeout(timeout); |
| | resolve(undefined); |
| | }); |
| |
|
| | redis.once('error', (err) => { |
| | clearTimeout(timeout); |
| | reject(err); |
| | }); |
| | }); |
| |
|
| | if (dryRun) { |
| | console.log('π [DRY RUN] Would flush Redis cache'); |
| | try { |
| | let allKeys = []; |
| | if (useCluster) { |
| | const nodes = redis.nodes('master'); |
| | console.log(` Cluster detected: ${nodes.length} master nodes`); |
| | for (const node of nodes) { |
| | const keys = await node.keys('*'); |
| | allKeys = allKeys.concat(keys); |
| | } |
| | } else { |
| | allKeys = await redis.keys('*'); |
| | } |
| | console.log(` Would delete ${allKeys.length} keys`); |
| | if (verbose && allKeys.length > 0) { |
| | console.log( |
| | ' Sample keys:', |
| | allKeys.slice(0, 10).join(', ') + (allKeys.length > 10 ? '...' : ''), |
| | ); |
| | } |
| | } catch (error) { |
| | console.log(' Could not fetch keys for preview:', error.message); |
| | } |
| | await redis.disconnect(); |
| | return true; |
| | } |
| |
|
| | |
| | let keyCount = 0; |
| | try { |
| | if (useCluster) { |
| | const nodes = redis.nodes('master'); |
| | for (const node of nodes) { |
| | const keys = await node.keys('*'); |
| | keyCount += keys.length; |
| | } |
| | } else { |
| | const keys = await redis.keys('*'); |
| | keyCount = keys.length; |
| | } |
| | } catch (_error) { |
| | |
| | } |
| |
|
| | |
| | if (useCluster) { |
| | const nodes = redis.nodes('master'); |
| | await Promise.all(nodes.map((node) => node.flushdb())); |
| | console.log(`β
Redis cluster cache flushed successfully (${nodes.length} master nodes)`); |
| | } else { |
| | await redis.flushdb(); |
| | console.log('β
Redis cache flushed successfully'); |
| | } |
| |
|
| | if (keyCount > 0) { |
| | console.log(` Deleted ${keyCount} keys`); |
| | } |
| |
|
| | await redis.disconnect(); |
| | return true; |
| | } catch (error) { |
| | console.error('β Error flushing Redis cache:', error.message); |
| | if (verbose) { |
| | console.error(' Full error:', error); |
| | } |
| | return false; |
| | } |
| | } |
| |
|
| | async function flushFileCache(dryRun = false, verbose = false) { |
| | const dataDir = path.join(__dirname, '..', 'data'); |
| | const filesToClear = [path.join(dataDir, 'logs.json'), path.join(dataDir, 'violations.json')]; |
| |
|
| | console.log('π Checking file-based cache'); |
| |
|
| | if (dryRun) { |
| | console.log('π [DRY RUN] Would flush file cache'); |
| | for (const filePath of filesToClear) { |
| | if (fs.existsSync(filePath)) { |
| | const stats = fs.statSync(filePath); |
| | console.log( |
| | ` Would delete: ${path.basename(filePath)} (${(stats.size / 1024).toFixed(1)} KB)`, |
| | ); |
| | } |
| | } |
| | return true; |
| | } |
| |
|
| | let deletedCount = 0; |
| | let totalSize = 0; |
| |
|
| | for (const filePath of filesToClear) { |
| | try { |
| | if (fs.existsSync(filePath)) { |
| | const stats = fs.statSync(filePath); |
| | totalSize += stats.size; |
| | fs.unlinkSync(filePath); |
| | deletedCount++; |
| | if (verbose) { |
| | console.log( |
| | ` β
Deleted ${path.basename(filePath)} (${(stats.size / 1024).toFixed(1)} KB)`, |
| | ); |
| | } |
| | } |
| | } catch (error) { |
| | if (verbose) { |
| | console.log(` β Failed to delete ${path.basename(filePath)}: ${error.message}`); |
| | } |
| | } |
| | } |
| |
|
| | if (deletedCount > 0) { |
| | console.log('β
File cache flushed successfully'); |
| | console.log(` Deleted ${deletedCount} cache files (${(totalSize / 1024).toFixed(1)} KB)`); |
| | } else { |
| | console.log('βΉοΈ No file cache to flush'); |
| | } |
| |
|
| | return true; |
| | } |
| |
|
| | async function restartRecommendation() { |
| | console.log('\nπ‘ RECOMMENDATION:'); |
| | console.log(' For complete cache clearing, especially for in-memory caches,'); |
| | console.log(' consider restarting the LibreChat backend:'); |
| | console.log(''); |
| | console.log(' npm run backend:stop'); |
| | console.log(' npm run backend:dev'); |
| | console.log(''); |
| | } |
| |
|
| | async function main() { |
| | const args = process.argv.slice(2); |
| | const dryRun = args.includes('--dry-run'); |
| | const verbose = args.includes('--verbose') || args.includes('-v'); |
| | const help = args.includes('--help') || args.includes('-h'); |
| |
|
| | if (help) { |
| | await showHelp(); |
| | return; |
| | } |
| |
|
| | console.log('π§Ή LibreChat Cache Flush Utility'); |
| | console.log('================================'); |
| |
|
| | if (dryRun) { |
| | console.log('π DRY RUN MODE - No actual changes will be made\n'); |
| | } |
| |
|
| | let success = true; |
| | const isRedisEnabled = isEnabled(USE_REDIS) || (REDIS_URI != null && REDIS_URI !== ''); |
| |
|
| | |
| | if (isRedisEnabled) { |
| | success = (await flushRedisCache(dryRun, verbose)) && success; |
| | } else { |
| | console.log('βΉοΈ Redis not configured, using file-based cache only'); |
| | } |
| |
|
| | |
| | success = (await flushFileCache(dryRun, verbose)) && success; |
| |
|
| | console.log('\n' + '='.repeat(50)); |
| |
|
| | if (success) { |
| | if (dryRun) { |
| | console.log('β
Cache flush preview completed'); |
| | console.log(' Run without --dry-run to actually flush the cache'); |
| | } else { |
| | console.log('β
Cache flush completed successfully'); |
| | console.log('β οΈ Note: All users will need to re-authenticate'); |
| | } |
| |
|
| | if (!isRedisEnabled) { |
| | await restartRecommendation(); |
| | } |
| | } else { |
| | console.log('β Cache flush completed with errors'); |
| | console.log(' Check the output above for details'); |
| | process.exit(1); |
| | } |
| | } |
| |
|
| | |
| | process.on('unhandledRejection', (error) => { |
| | console.error('β Unhandled error:', error); |
| | process.exit(1); |
| | }); |
| |
|
| | process.on('uncaughtException', (error) => { |
| | console.error('β Uncaught exception:', error); |
| | process.exit(1); |
| | }); |
| |
|
| | |
| | if (require.main === module) { |
| | main().catch((error) => { |
| | console.error('β Fatal error:', error); |
| | process.exit(1); |
| | }); |
| | } |
| |
|
| | module.exports = { flushRedisCache, flushFileCache }; |
| |
|