| import fs from 'fs'; |
| import path from 'path'; |
| import { promisify } from 'util'; |
| import { format } from 'date-fns'; |
| import mongoose from 'mongoose'; |
| import ExcelJS from 'exceljs'; |
| import { d as private_env } from './shared-server-49TKSBDM.js'; |
| import { E as ExcelColumnsName } from './constants-PEBBwOxo.js'; |
|
|
| const LogSchema = new mongoose.Schema({ |
| llmPrompt: { type: String }, |
| llmTemplate: { type: String }, |
| llmResponse: { type: String }, |
| searchResults: { type: [String] }, |
| selectedSearchResults: { type: [String] }, |
| uiSettings: { type: Object }, |
| explanations: { type: [String] }, |
| consultations: { type: [String] }, |
| userRequest: { type: String }, |
| userScore: { type: String }, |
| userComment: { type: String }, |
| dateCreated: { type: Date, default: Date.now }, |
| dateUpdated: { type: Date, default: Date.now }, |
| requestOriginalTokenCount: { type: Number, default: -1 }, |
| requestSlicedTokenCount: { type: Number, default: -1 }, |
| searchMetrics: { type: Object } |
| }); |
| LogSchema.index({ dateCreated: 1 }, { unique: false }); |
| const LogModel = mongoose.models.Log || mongoose.model("Log", LogSchema); |
| class MongooseService { |
| uri; |
| options; |
| connection; |
| constructor(uri, options = {}) { |
| try { |
| this.uri = uri || process.env.MONGODB_URI || ""; |
| this.options = options; |
| this.connection = null; |
| if (!this.uri) { |
| console.warn("MongoDB URI is not defined"); |
| } |
| } catch { |
| console.warn("MongoDB URI is not defined. Logs wont be saved."); |
| } |
| } |
| |
| |
| |
| |
| async connect() { |
| if (this.connection) { |
| return this.connection; |
| } |
| try { |
| await mongoose.connect(this.uri, this.options); |
| this.connection = mongoose.connection; |
| console.log("Connected to MongoDB"); |
| return this.connection; |
| } catch (error) { |
| console.warn("MongoDB URI is not defined. Logs wont be saved."); |
| } |
| } |
| |
| |
| |
| |
| async disconnect() { |
| if (!this.connection) { |
| console.log("No active MongoDB connection to disconnect"); |
| return; |
| } |
| try { |
| await mongoose.disconnect(); |
| this.connection = null; |
| console.log("Disconnected from MongoDB"); |
| } catch (error) { |
| console.error("Error disconnecting from MongoDB:", error); |
| throw new Error("Could not disconnect from MongoDB"); |
| } |
| } |
| |
| |
| |
| |
| async getConnection() { |
| if (!this.isConnected()) { |
| await this.connect(); |
| } |
| return this.connection; |
| } |
| isConnected() { |
| return this.connection !== null && mongoose.connection.readyState === 1; |
| } |
| } |
| const writeFile = promisify(fs.writeFile); |
| const readDir = promisify(fs.readdir); |
| const readFile = promisify(fs.readFile); |
| class LogService { |
| mongooseService; |
| logModel; |
| constructor() { |
| if (private_env.ENABLE_DB_SUPPORT) { |
| this.mongooseService = new MongooseService(private_env.MONGODB_URI || ""); |
| this.logModel = LogModel; |
| } |
| } |
| |
| |
| |
| |
| |
| async log(document) { |
| if (!private_env.ENABLE_DB_SUPPORT) { |
| return ""; |
| } |
| try { |
| await this.mongooseService.getConnection(); |
| const logEntry = new this.logModel(document); |
| await logEntry.save(); |
| console.log("Log entry saved to MongoDB"); |
| return logEntry._id; |
| } catch (err) { |
| console.error("MongoDB connection failed", err); |
| } |
| return ""; |
| } |
| |
| |
| |
| |
| |
| |
| |
| async logUserScore(id, score, comment) { |
| if (!private_env.ENABLE_DB_SUPPORT) { |
| return ""; |
| } |
| try { |
| await this.mongooseService.getConnection(); |
| const LogModel2 = mongoose.model("Log"); |
| const result = await LogModel2.updateOne( |
| { _id: id }, |
| { $set: { userScore: score, userComment: comment } } |
| ); |
| return result.modifiedCount; |
| } catch (error) { |
| console.error("Error while updating userScore:", error); |
| throw error; |
| } |
| } |
| |
| |
| |
| |
| |
| |
| async saveLogToJsonFile(document) { |
| const timestamp = format( new Date(), "dd-MM-yyyy:HH:mm:ss:SSS"); |
| const logDir = path.join(private_env.LOGS_ROOT_FOLDER, "log"); |
| const logFile = path.join(logDir, `${timestamp}.json`); |
| if (!fs.existsSync(logDir)) { |
| fs.mkdirSync(logDir, { recursive: true }); |
| } |
| const data = JSON.stringify(document, null, 2); |
| await writeFile(logFile, data); |
| console.log(`Log entry saved to file: ${logFile}`); |
| } |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| async export(type, dateFrom, dateTo, fields) { |
| if (!private_env.ENABLE_DB_SUPPORT) { |
| return ""; |
| } |
| const mongoLogs = await this.fetchLogsFromMongo(dateFrom, dateTo, fields); |
| const allLogs = [...mongoLogs]; |
| if (type === "json") { |
| return await this.exportToJson(allLogs); |
| } else if (type === "excel") { |
| return await this.exportToExcel(allLogs); |
| } |
| } |
| |
| |
| |
| |
| |
| |
| async fetchLogsFromMongo(dateFrom, dateTo, fields) { |
| if (!private_env.ENABLE_DB_SUPPORT) { |
| return []; |
| } |
| const query = {}; |
| if (dateFrom || dateTo) { |
| query.dateCreated = {}; |
| if (dateFrom) |
| query.dateCreated.$gte = dateFrom; |
| if (dateTo) |
| query.dateCreated.$lte = dateTo; |
| } |
| await this.mongooseService.getConnection(); |
| return await this.logModel.find(query).select(fields ? fields.join(" ") : "").sort({ dateCreated: 1 }).lean().exec(); |
| } |
| |
| |
| |
| |
| |
| |
| async fetchLogsFromFiles(dateFrom, dateTo) { |
| const logDir = path.join(__dirname, "../data/log"); |
| const files = await readDir(logDir); |
| const logs = []; |
| for (const file of files) { |
| const filePath = path.join(logDir, file); |
| const content = await readFile(filePath, "utf-8"); |
| const logEntry = JSON.parse(content); |
| const fileDate = format( |
| new Date(file.split(".")[0].replace(/-/g, ":")), |
| "yyyy-MM-dd HH:mm:ss:SSS" |
| ); |
| if ((!dateFrom || new Date(fileDate) >= dateFrom) && (!dateTo || new Date(fileDate) <= dateTo)) { |
| logs.push(logEntry); |
| } |
| } |
| logs.sort((a, b) => new Date(a.dateCreated).getTime() - new Date(b.dateCreated).getTime()); |
| return logs; |
| } |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| async exportToExcel(logs) { |
| const workbook = new ExcelJS.Workbook(); |
| const worksheet = workbook.addWorksheet("Logs"); |
| if (private_env.ENABLE_DB_SUPPORT) { |
| worksheet.columns = Object.keys(ExcelColumnsName).map((key) => { |
| return { |
| header: ExcelColumnsName[key] ?? key, |
| key, |
| width: 30 |
| }; |
| }); |
| logs.forEach((log) => { |
| const consultations = (log.consultations ?? []).map((v) => JSON.stringify(v)); |
| const explanations = (log.explanations ?? []).map((v) => JSON.stringify(v)); |
| const searchResults = (log.searchResults ?? []).map((v) => JSON.stringify(v)); |
| const searchMetrics = (Object.entries(log.searchMetrics ?? {}) ?? []).map((v) => JSON.stringify(v)); |
| const selectedSearchResults = (log.selectedSearchResults ?? []).map( |
| (v) => JSON.stringify(v) |
| ); |
| const serializedLog = Object.fromEntries( |
| Object.entries(log).map(([key, value]) => [ |
| key, |
| typeof value === "object" && value !== null ? JSON.stringify(value) : value |
| ]) |
| ); |
| worksheet.addRow({ |
| ...serializedLog, |
| consultations: consultations[0] ?? "", |
| explanations: explanations[0] ?? "", |
| searchResults: searchResults[0] ?? "", |
| selectedSearchResults: selectedSearchResults[0] ?? "", |
| searchMetrics: searchMetrics[0] ?? "" |
| }); |
| const maxLength = Math.max( |
| consultations.length, |
| explanations.length, |
| searchResults.length, |
| selectedSearchResults.length |
| ); |
| for (let index = 1; index < maxLength; index++) { |
| worksheet.addRow({ |
| consultations: consultations[index] ?? "", |
| explanations: explanations[index] ?? "", |
| searchResults: searchResults[index] ?? "", |
| selectedSearchResults: selectedSearchResults[index] ?? "", |
| searchMetrics: searchMetrics[index] ?? "" |
| }); |
| } |
| }); |
| } |
| const buffer = await workbook.xlsx.writeBuffer(); |
| return buffer; |
| } |
| } |
|
|
| export { LogService as L }; |
| |
|
|