File size: 6,129 Bytes
f0743f4 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 | const { v4: uuidv4 } = require('uuid');
const { logger } = require('@librechat/data-schemas');
const { EModelEndpoint, Constants, openAISettings } = require('librechat-data-provider');
const { bulkIncrementTagCounts } = require('~/models/ConversationTag');
const { bulkSaveConvos } = require('~/models/Conversation');
const { bulkSaveMessages } = require('~/models/Message');
/**
* Factory function for creating an instance of ImportBatchBuilder.
* @param {string} requestUserId - The ID of the user making the request.
* @returns {ImportBatchBuilder} - The newly created ImportBatchBuilder instance.
*/
function createImportBatchBuilder(requestUserId) {
return new ImportBatchBuilder(requestUserId);
}
/**
* Class for building a batch of conversations and messages and pushing them to DB for Conversation Import functionality
*/
class ImportBatchBuilder {
/**
* Creates an instance of ImportBatchBuilder.
* @param {string} requestUserId - The ID of the user making the import request.
*/
constructor(requestUserId) {
this.requestUserId = requestUserId;
this.conversations = [];
this.messages = [];
}
/**
* Starts a new conversation in the batch.
* @param {string} [endpoint=EModelEndpoint.openAI] - The endpoint for the conversation. Defaults to EModelEndpoint.openAI.
* @returns {void}
*/
startConversation(endpoint) {
// we are simplifying by using a single model for the entire conversation
this.endpoint = endpoint || EModelEndpoint.openAI;
this.conversationId = uuidv4();
this.lastMessageId = Constants.NO_PARENT;
}
/**
* Adds a user message to the current conversation.
* @param {string} text - The text of the user message.
* @returns {object} The saved message object.
*/
addUserMessage(text) {
const message = this.saveMessage({ text, sender: 'user', isCreatedByUser: true });
return message;
}
/**
* Adds a GPT message to the current conversation.
* @param {string} text - The text of the GPT message.
* @param {string} [model='defaultModel'] - The model used for generating the GPT message. Defaults to 'defaultModel'.
* @param {string} [sender='GPT-3.5'] - The sender of the GPT message. Defaults to 'GPT-3.5'.
* @returns {object} The saved message object.
*/
addGptMessage(text, model, sender = 'GPT-3.5') {
const message = this.saveMessage({
text,
sender,
isCreatedByUser: false,
model: model || openAISettings.model.default,
});
return message;
}
/**
* Finishes the current conversation and adds it to the batch.
* @param {string} [title='Imported Chat'] - The title of the conversation. Defaults to 'Imported Chat'.
* @param {Date} [createdAt] - The creation date of the conversation.
* @param {TConversation} [originalConvo] - The original conversation.
* @returns {{ conversation: TConversation, messages: TMessage[] }} The resulting conversation and messages.
*/
finishConversation(title, createdAt, originalConvo = {}) {
const convo = {
...originalConvo,
user: this.requestUserId,
conversationId: this.conversationId,
title: title || 'Imported Chat',
createdAt: createdAt,
updatedAt: createdAt,
overrideTimestamp: true,
endpoint: this.endpoint,
model: originalConvo.model ?? openAISettings.model.default,
};
convo._id && delete convo._id;
this.conversations.push(convo);
return { conversation: convo, messages: this.messages };
}
/**
* Saves the batch of conversations and messages to the DB.
* Also increments tag counts for any existing tags.
* @returns {Promise<void>} A promise that resolves when the batch is saved.
* @throws {Error} If there is an error saving the batch.
*/
async saveBatch() {
try {
const promises = [];
promises.push(bulkSaveConvos(this.conversations));
promises.push(bulkSaveMessages(this.messages, true));
promises.push(
bulkIncrementTagCounts(
this.requestUserId,
this.conversations.flatMap((convo) => convo.tags),
),
);
await Promise.all(promises);
logger.debug(
`user: ${this.requestUserId} | Added ${this.conversations.length} conversations and ${this.messages.length} messages to the DB.`,
);
} catch (error) {
logger.error('Error saving batch', error);
throw error;
}
}
/**
* Saves a message to the current conversation.
* @param {object} messageDetails - The details of the message.
* @param {string} messageDetails.text - The text of the message.
* @param {string} messageDetails.sender - The sender of the message.
* @param {string} [messageDetails.messageId] - The ID of the current message.
* @param {boolean} messageDetails.isCreatedByUser - Indicates whether the message is created by the user.
* @param {string} [messageDetails.model] - The model used for generating the message.
* @param {string} [messageDetails.endpoint] - The endpoint used for generating the message.
* @param {string} [messageDetails.parentMessageId=this.lastMessageId] - The ID of the parent message.
* @param {Partial<TMessage>} messageDetails.rest - Additional properties that may be included in the message.
* @returns {object} The saved message object.
*/
saveMessage({
text,
sender,
isCreatedByUser,
model,
messageId,
parentMessageId = this.lastMessageId,
endpoint,
...rest
}) {
const newMessageId = messageId ?? uuidv4();
const message = {
...rest,
parentMessageId,
messageId: newMessageId,
conversationId: this.conversationId,
isCreatedByUser: isCreatedByUser,
model: model || this.model,
user: this.requestUserId,
endpoint: endpoint ?? this.endpoint,
unfinished: false,
isEdited: false,
error: false,
sender,
text,
};
message._id && delete message._id;
this.lastMessageId = newMessageId;
this.messages.push(message);
return message;
}
}
module.exports = { ImportBatchBuilder, createImportBatchBuilder };
|