chat-v4 / chat-ui /src /lib /buildPrompt.ts
Матенков Андрей
Chat moved from github
39a2f10
import { SAIGA_PROMPT_WEB_SEARCH_ANALYZE } from "$env/static/private";
import type { BackendModel } from "./server/models";
import type { Message } from "./types/Message";
import { format } from "date-fns";
import type { WebSearch } from "./types/WebSearch";
/**
* Convert [{user: "assistant", content: "hi"}, {user: "user", content: "hello"}] to:
*
* <|assistant|>hi<|endoftext|><|prompter|>hello<|endoftext|><|assistant|>
*/
interface buildPromptOptions {
messages: Pick<Message, "from" | "content">[];
model: BackendModel;
locals?: App.Locals;
webSearch?: WebSearch;
preprompt?: string;
}
export async function buildPrompt({
messages,
model,
webSearch,
preprompt,
}: buildPromptOptions): Promise<Pick<Message, "from" | "content">[]> {
if (webSearch && webSearch.context) {
const messagesWithoutLastUsrMsg = messages.slice(0, -1);
const lastUserMsg = messages.slice(-1)[0];
const currentDate = format(new Date(), "yyyy.MM.dd");
messages = [
//...messagesWithoutLastUsrMsg,
{
from: "user",
content: SAIGA_PROMPT_WEB_SEARCH_ANALYZE.replace('{{query}}', lastUserMsg.content).replace('{{context}}', webSearch.context),
},
];
}
return messages;
// console.log('messages', messages)
return (
model
.chatPromptRender({ messages, preprompt })
// Not super precise, but it's truncated in the model's backend anyway
.split(" ")
.slice(-(model.parameters?.truncate ?? 0))
.join(" ")
);
}