| | const fs = require('fs'); |
| | const fetch = require('node-fetch'); |
| | const { initializeS3 } = require('@librechat/api'); |
| | const { logger } = require('@librechat/data-schemas'); |
| | const { FileSources } = require('librechat-data-provider'); |
| | const { getSignedUrl } = require('@aws-sdk/s3-request-presigner'); |
| | const { |
| | PutObjectCommand, |
| | GetObjectCommand, |
| | HeadObjectCommand, |
| | DeleteObjectCommand, |
| | } = require('@aws-sdk/client-s3'); |
| |
|
| | const bucketName = process.env.AWS_BUCKET_NAME; |
| | const defaultBasePath = 'images'; |
| |
|
| | let s3UrlExpirySeconds = 2 * 60; |
| | let s3RefreshExpiryMs = null; |
| |
|
| | if (process.env.S3_URL_EXPIRY_SECONDS !== undefined) { |
| | const parsed = parseInt(process.env.S3_URL_EXPIRY_SECONDS, 10); |
| |
|
| | if (!isNaN(parsed) && parsed > 0) { |
| | s3UrlExpirySeconds = Math.min(parsed, 7 * 24 * 60 * 60); |
| | } else { |
| | logger.warn( |
| | `[S3] Invalid S3_URL_EXPIRY_SECONDS value: "${process.env.S3_URL_EXPIRY_SECONDS}". Using 2-minute expiry.`, |
| | ); |
| | } |
| | } |
| |
|
| | if (process.env.S3_REFRESH_EXPIRY_MS !== null && process.env.S3_REFRESH_EXPIRY_MS) { |
| | const parsed = parseInt(process.env.S3_REFRESH_EXPIRY_MS, 10); |
| |
|
| | if (!isNaN(parsed) && parsed > 0) { |
| | s3RefreshExpiryMs = parsed; |
| | logger.info(`[S3] Using custom refresh expiry time: ${s3RefreshExpiryMs}ms`); |
| | } else { |
| | logger.warn( |
| | `[S3] Invalid S3_REFRESH_EXPIRY_MS value: "${process.env.S3_REFRESH_EXPIRY_MS}". Using default refresh logic.`, |
| | ); |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | const getS3Key = (basePath, userId, fileName) => `${basePath}/${userId}/${fileName}`; |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | async function saveBufferToS3({ userId, buffer, fileName, basePath = defaultBasePath }) { |
| | const key = getS3Key(basePath, userId, fileName); |
| | const params = { Bucket: bucketName, Key: key, Body: buffer }; |
| |
|
| | try { |
| | const s3 = initializeS3(); |
| | await s3.send(new PutObjectCommand(params)); |
| | return await getS3URL({ userId, fileName, basePath }); |
| | } catch (error) { |
| | logger.error('[saveBufferToS3] Error uploading buffer to S3:', error.message); |
| | throw error; |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | async function getS3URL({ |
| | userId, |
| | fileName, |
| | basePath = defaultBasePath, |
| | customFilename = null, |
| | contentType = null, |
| | }) { |
| | const key = getS3Key(basePath, userId, fileName); |
| | const params = { Bucket: bucketName, Key: key }; |
| |
|
| | |
| | if (customFilename) { |
| | params.ResponseContentDisposition = `attachment; filename="${customFilename}"`; |
| | } |
| |
|
| | if (contentType) { |
| | params.ResponseContentType = contentType; |
| | } |
| |
|
| | try { |
| | const s3 = initializeS3(); |
| | return await getSignedUrl(s3, new GetObjectCommand(params), { expiresIn: s3UrlExpirySeconds }); |
| | } catch (error) { |
| | logger.error('[getS3URL] Error getting signed URL from S3:', error.message); |
| | throw error; |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | async function saveURLToS3({ userId, URL, fileName, basePath = defaultBasePath }) { |
| | try { |
| | const response = await fetch(URL); |
| | const buffer = await response.buffer(); |
| | |
| | return await saveBufferToS3({ userId, buffer, fileName, basePath }); |
| | } catch (error) { |
| | logger.error('[saveURLToS3] Error uploading file from URL to S3:', error.message); |
| | throw error; |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | async function deleteFileFromS3(req, file) { |
| | const key = extractKeyFromS3Url(file.filepath); |
| | const params = { Bucket: bucketName, Key: key }; |
| | if (!key.includes(req.user.id)) { |
| | const message = `[deleteFileFromS3] User ID mismatch: ${req.user.id} vs ${key}`; |
| | logger.error(message); |
| | throw new Error(message); |
| | } |
| |
|
| | try { |
| | const s3 = initializeS3(); |
| |
|
| | try { |
| | const headCommand = new HeadObjectCommand(params); |
| | await s3.send(headCommand); |
| | logger.debug('[deleteFileFromS3] File exists, proceeding with deletion'); |
| | } catch (headErr) { |
| | if (headErr.name === 'NotFound') { |
| | logger.warn(`[deleteFileFromS3] File does not exist: ${key}`); |
| | return; |
| | } |
| | } |
| |
|
| | const deleteResult = await s3.send(new DeleteObjectCommand(params)); |
| | logger.debug('[deleteFileFromS3] Delete command response:', JSON.stringify(deleteResult)); |
| | try { |
| | await s3.send(new HeadObjectCommand(params)); |
| | logger.error('[deleteFileFromS3] File still exists after deletion!'); |
| | } catch (verifyErr) { |
| | if (verifyErr.name === 'NotFound') { |
| | logger.debug(`[deleteFileFromS3] Verified file is deleted: ${key}`); |
| | } else { |
| | logger.error('[deleteFileFromS3] Error verifying deletion:', verifyErr); |
| | } |
| | } |
| |
|
| | logger.debug('[deleteFileFromS3] S3 File deletion completed'); |
| | } catch (error) { |
| | logger.error(`[deleteFileFromS3] Error deleting file from S3: ${error.message}`); |
| | logger.error(error.stack); |
| |
|
| | |
| | if (error.code === 'NoSuchKey') { |
| | return; |
| | } |
| | throw error; |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | async function uploadFileToS3({ req, file, file_id, basePath = defaultBasePath }) { |
| | try { |
| | const inputFilePath = file.path; |
| | const userId = req.user.id; |
| | const fileName = `${file_id}__${file.originalname}`; |
| | const key = getS3Key(basePath, userId, fileName); |
| |
|
| | const stats = await fs.promises.stat(inputFilePath); |
| | const bytes = stats.size; |
| | const fileStream = fs.createReadStream(inputFilePath); |
| |
|
| | const s3 = initializeS3(); |
| | const uploadParams = { |
| | Bucket: bucketName, |
| | Key: key, |
| | Body: fileStream, |
| | }; |
| |
|
| | await s3.send(new PutObjectCommand(uploadParams)); |
| | const fileURL = await getS3URL({ userId, fileName, basePath }); |
| | return { filepath: fileURL, bytes }; |
| | } catch (error) { |
| | logger.error('[uploadFileToS3] Error streaming file to S3:', error); |
| | try { |
| | if (file && file.path) { |
| | await fs.promises.unlink(file.path); |
| | } |
| | } catch (unlinkError) { |
| | logger.error( |
| | '[uploadFileToS3] Error deleting temporary file, likely already deleted:', |
| | unlinkError.message, |
| | ); |
| | } |
| | throw error; |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | function extractKeyFromS3Url(fileUrlOrKey) { |
| | if (!fileUrlOrKey) { |
| | throw new Error('Invalid input: URL or key is empty'); |
| | } |
| |
|
| | try { |
| | const url = new URL(fileUrlOrKey); |
| | return url.pathname.substring(1); |
| | } catch (error) { |
| | const parts = fileUrlOrKey.split('/'); |
| |
|
| | if (parts.length >= 3 && !fileUrlOrKey.startsWith('http') && !fileUrlOrKey.startsWith('/')) { |
| | return fileUrlOrKey; |
| | } |
| |
|
| | return fileUrlOrKey.startsWith('/') ? fileUrlOrKey.substring(1) : fileUrlOrKey; |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | async function getS3FileStream(_req, filePath) { |
| | try { |
| | const Key = extractKeyFromS3Url(filePath); |
| | const params = { Bucket: bucketName, Key }; |
| | const s3 = initializeS3(); |
| | const data = await s3.send(new GetObjectCommand(params)); |
| | return data.Body; |
| | } catch (error) { |
| | logger.error('[getS3FileStream] Error retrieving S3 file stream:', error); |
| | throw error; |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | function needsRefresh(signedUrl, bufferSeconds) { |
| | try { |
| | |
| | const url = new URL(signedUrl); |
| |
|
| | |
| | |
| | if (!url.searchParams.has('X-Amz-Signature')) { |
| | |
| | return false; |
| | } |
| |
|
| | |
| | const expiresParam = url.searchParams.get('X-Amz-Expires'); |
| | const dateParam = url.searchParams.get('X-Amz-Date'); |
| |
|
| | if (!expiresParam || !dateParam) { |
| | |
| | return true; |
| | } |
| |
|
| | |
| | const year = dateParam.substring(0, 4); |
| | const month = dateParam.substring(4, 6); |
| | const day = dateParam.substring(6, 8); |
| | const hour = dateParam.substring(9, 11); |
| | const minute = dateParam.substring(11, 13); |
| | const second = dateParam.substring(13, 15); |
| |
|
| | const dateObj = new Date(`${year}-${month}-${day}T${hour}:${minute}:${second}Z`); |
| | const expiresAtDate = new Date(dateObj.getTime() + parseInt(expiresParam) * 1000); |
| |
|
| | |
| | const now = new Date(); |
| |
|
| | |
| | if (s3RefreshExpiryMs !== null) { |
| | const urlCreationTime = dateObj.getTime(); |
| | const urlAge = now.getTime() - urlCreationTime; |
| | return urlAge >= s3RefreshExpiryMs; |
| | } |
| |
|
| | |
| | const bufferTime = new Date(now.getTime() + bufferSeconds * 1000); |
| | return expiresAtDate <= bufferTime; |
| | } catch (error) { |
| | logger.error('Error checking URL expiration:', error); |
| | |
| | return true; |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | async function getNewS3URL(currentURL) { |
| | try { |
| | const s3Key = extractKeyFromS3Url(currentURL); |
| | if (!s3Key) { |
| | return; |
| | } |
| | const keyParts = s3Key.split('/'); |
| | if (keyParts.length < 3) { |
| | return; |
| | } |
| |
|
| | const basePath = keyParts[0]; |
| | const userId = keyParts[1]; |
| | const fileName = keyParts.slice(2).join('/'); |
| |
|
| | return await getS3URL({ |
| | userId, |
| | fileName, |
| | basePath, |
| | }); |
| | } catch (error) { |
| | logger.error('Error getting new S3 URL:', error); |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | async function refreshS3FileUrls(files, batchUpdateFiles, bufferSeconds = 3600) { |
| | if (!files || !Array.isArray(files) || files.length === 0) { |
| | return files; |
| | } |
| |
|
| | const filesToUpdate = []; |
| |
|
| | for (let i = 0; i < files.length; i++) { |
| | const file = files[i]; |
| | if (!file?.file_id) { |
| | continue; |
| | } |
| | if (file.source !== FileSources.s3) { |
| | continue; |
| | } |
| | if (!file.filepath) { |
| | continue; |
| | } |
| | if (!needsRefresh(file.filepath, bufferSeconds)) { |
| | continue; |
| | } |
| | try { |
| | const newURL = await getNewS3URL(file.filepath); |
| | if (!newURL) { |
| | continue; |
| | } |
| | filesToUpdate.push({ |
| | file_id: file.file_id, |
| | filepath: newURL, |
| | }); |
| | files[i].filepath = newURL; |
| | } catch (error) { |
| | logger.error(`Error refreshing S3 URL for file ${file.file_id}:`, error); |
| | } |
| | } |
| |
|
| | if (filesToUpdate.length > 0) { |
| | await batchUpdateFiles(filesToUpdate); |
| | } |
| |
|
| | return files; |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | async function refreshS3Url(fileObj, bufferSeconds = 3600) { |
| | if (!fileObj || fileObj.source !== FileSources.s3 || !fileObj.filepath) { |
| | return fileObj?.filepath || ''; |
| | } |
| |
|
| | if (!needsRefresh(fileObj.filepath, bufferSeconds)) { |
| | return fileObj.filepath; |
| | } |
| |
|
| | try { |
| | const s3Key = extractKeyFromS3Url(fileObj.filepath); |
| | if (!s3Key) { |
| | logger.warn(`Unable to extract S3 key from URL: ${fileObj.filepath}`); |
| | return fileObj.filepath; |
| | } |
| |
|
| | const keyParts = s3Key.split('/'); |
| | if (keyParts.length < 3) { |
| | logger.warn(`Invalid S3 key format: ${s3Key}`); |
| | return fileObj.filepath; |
| | } |
| |
|
| | const basePath = keyParts[0]; |
| | const userId = keyParts[1]; |
| | const fileName = keyParts.slice(2).join('/'); |
| |
|
| | const newUrl = await getS3URL({ |
| | userId, |
| | fileName, |
| | basePath, |
| | }); |
| |
|
| | logger.debug(`Refreshed S3 URL for key: ${s3Key}`); |
| | return newUrl; |
| | } catch (error) { |
| | logger.error(`Error refreshing S3 URL: ${error.message}`); |
| | return fileObj.filepath; |
| | } |
| | } |
| |
|
| | module.exports = { |
| | saveBufferToS3, |
| | saveURLToS3, |
| | getS3URL, |
| | deleteFileFromS3, |
| | uploadFileToS3, |
| | getS3FileStream, |
| | refreshS3FileUrls, |
| | refreshS3Url, |
| | needsRefresh, |
| | getNewS3URL, |
| | }; |
| |
|