index
int64 0
0
| repo_id
stringclasses 596
values | file_path
stringlengths 31
168
| content
stringlengths 1
6.2M
|
|---|---|---|---|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/typesense.ts
|
import type { Client } from "typesense";
import type { MultiSearchRequestSchema } from "typesense/lib/Typesense/MultiSearch.js";
import type {
SearchResponseHit,
DocumentSchema,
} from "typesense/lib/Typesense/Documents.js";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { VectorStore } from "@langchain/core/vectorstores";
import { Document } from "@langchain/core/documents";
import {
AsyncCaller,
AsyncCallerParams,
} from "@langchain/core/utils/async_caller";
/**
* Interface for the response hit from a vector search in Typesense.
*/
interface VectorSearchResponseHit<T extends DocumentSchema>
extends SearchResponseHit<T> {
vector_distance?: number;
}
/**
* Typesense vector store configuration.
*/
export interface TypesenseConfig extends AsyncCallerParams {
/**
* Typesense client.
*/
typesenseClient: Client;
/**
* Typesense schema name in which documents will be stored and searched.
*/
schemaName: string;
/**
* Typesense search parameters.
* @default { q: '*', per_page: 5, query_by: '' }
*/
searchParams?: MultiSearchRequestSchema;
/**
* Column names.
*/
columnNames?: {
/**
* Vector column name.
* @default 'vec'
*/
vector?: string;
/**
* Page content column name.
* @default 'text'
*/
pageContent?: string;
/**
* Metadata column names.
* @default []
*/
metadataColumnNames?: string[];
};
/**
* Replace default import function.
* Default import function will update documents if there is a document with the same id.
* @param data
* @param collectionName
*/
import?<T extends Record<string, unknown> = Record<string, unknown>>(
data: T[],
collectionName: string
): Promise<void>;
}
/**
* Typesense vector store.
*/
export class Typesense extends VectorStore {
declare FilterType: Partial<MultiSearchRequestSchema>;
private client: Client;
private schemaName: string;
private searchParams: MultiSearchRequestSchema;
private vectorColumnName: string;
private pageContentColumnName: string;
private metadataColumnNames: string[];
private caller: AsyncCaller;
private import: (
data: Record<string, unknown>[],
collectionName: string
) => Promise<void>;
_vectorstoreType(): string {
return "typesense";
}
constructor(embeddings: EmbeddingsInterface, config: TypesenseConfig) {
super(embeddings, config);
// Assign config values to class properties.
this.client = config.typesenseClient;
this.schemaName = config.schemaName;
this.searchParams = config.searchParams || {
q: "*",
per_page: 5,
query_by: "",
};
this.vectorColumnName = config.columnNames?.vector || "vec";
this.pageContentColumnName = config.columnNames?.pageContent || "text";
this.metadataColumnNames = config.columnNames?.metadataColumnNames || [];
// Assign import function.
this.import = config.import || this.importToTypesense.bind(this);
this.caller = new AsyncCaller(config);
}
/**
* Default function to import data to typesense
* @param data
* @param collectionName
*/
private async importToTypesense<
T extends Record<string, unknown> = Record<string, unknown>
>(data: T[], collectionName: string) {
const chunkSize = 2000;
for (let i = 0; i < data.length; i += chunkSize) {
const chunk = data.slice(i, i + chunkSize);
await this.caller.call(async () => {
await this.client
.collections<T>(collectionName)
.documents()
.import(chunk, { action: "emplace", dirty_values: "drop" });
});
}
}
/**
* Transform documents to Typesense records.
* @param documents
* @returns Typesense records.
*/
_documentsToTypesenseRecords(
documents: Document[],
vectors: number[][]
): Record<string, unknown>[] {
const metadatas = documents.map((doc) => doc.metadata);
const typesenseDocuments = documents.map((doc, index) => {
const metadata = metadatas[index];
const objectWithMetadatas: Record<string, unknown> = {};
this.metadataColumnNames.forEach((metadataColumnName) => {
objectWithMetadatas[metadataColumnName] = metadata[metadataColumnName];
});
return {
[this.pageContentColumnName]: doc.pageContent,
[this.vectorColumnName]: vectors[index],
...objectWithMetadatas,
};
});
return typesenseDocuments;
}
/**
* Transform the Typesense records to documents.
* @param typesenseRecords
* @returns documents
*/
_typesenseRecordsToDocuments(
typesenseRecords:
| { document?: Record<string, unknown>; vector_distance: number }[]
| undefined
): [Document, number][] {
const documents: [Document, number][] =
typesenseRecords?.map((hit) => {
const objectWithMetadatas: Record<string, unknown> = {};
const hitDoc = hit.document || {};
this.metadataColumnNames.forEach((metadataColumnName) => {
objectWithMetadatas[metadataColumnName] = hitDoc[metadataColumnName];
});
const document: Document = {
pageContent: (hitDoc[this.pageContentColumnName] as string) || "",
metadata: objectWithMetadatas,
};
return [document, hit.vector_distance];
}) || [];
return documents;
}
/**
* Add documents to the vector store.
* Will be updated if in the metadata there is a document with the same id if is using the default import function.
* Metadata will be added in the columns of the schema based on metadataColumnNames.
* @param documents Documents to add.
*/
async addDocuments(documents: Document[]) {
const typesenseDocuments = this._documentsToTypesenseRecords(
documents,
await this.embeddings.embedDocuments(
documents.map((doc) => doc.pageContent)
)
);
await this.import(typesenseDocuments, this.schemaName);
}
/**
* Adds vectors to the vector store.
* @param vectors Vectors to add.
* @param documents Documents associated with the vectors.
*/
async addVectors(vectors: number[][], documents: Document[]) {
const typesenseDocuments = this._documentsToTypesenseRecords(
documents,
vectors
);
await this.import(typesenseDocuments, this.schemaName);
}
/**
* Search for similar documents with their similarity score.
* @param vectorPrompt vector to search for
* @param k amount of results to return
* @returns similar documents with their similarity score
*/
async similaritySearchVectorWithScore(
vectorPrompt: number[],
k?: number,
filter: this["FilterType"] = {}
) {
const amount = k || this.searchParams.per_page || 5;
const vector_query = `${this.vectorColumnName}:([${vectorPrompt}], k:${amount})`;
const typesenseResponse = await this.client.multiSearch.perform(
{
searches: [
{
...this.searchParams,
...filter,
per_page: amount,
vector_query,
collection: this.schemaName,
},
],
},
{}
);
const results = typesenseResponse.results[0].hits;
const hits = results?.map((hit: VectorSearchResponseHit<object>) => ({
document: hit?.document || {},
vector_distance: hit?.vector_distance || 2,
})) as
| { document: Record<string, unknown>; vector_distance: number }[]
| undefined;
return this._typesenseRecordsToDocuments(hits);
}
/**
* Delete documents from the vector store.
* @param documentIds ids of the documents to delete
*/
async deleteDocuments(documentIds: string[]) {
await this.client
.collections(this.schemaName)
.documents()
.delete({
filter_by: `id:=${documentIds.join(",")}`,
});
}
/**
* Create a vector store from documents.
* @param docs documents
* @param embeddings embeddings
* @param config Typesense configuration
* @returns Typesense vector store
* @warning You can omit this method, and only use the constructor and addDocuments.
*/
static async fromDocuments(
docs: Document[],
embeddings: EmbeddingsInterface,
config: TypesenseConfig
): Promise<Typesense> {
const instance = new Typesense(embeddings, config);
await instance.addDocuments(docs);
return instance;
}
/**
* Create a vector store from texts.
* @param texts
* @param metadatas
* @param embeddings
* @param config
* @returns Typesense vector store
*/
static async fromTexts(
texts: string[],
metadatas: object[],
embeddings: EmbeddingsInterface,
config: TypesenseConfig
) {
const instance = new Typesense(embeddings, config);
const documents: Document[] = texts.map((text, i) => ({
pageContent: text,
metadata: metadatas[i] || {},
}));
await instance.addDocuments(documents);
return instance;
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/usearch.ts
|
import usearch from "usearch";
import * as uuid from "uuid";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { SaveableVectorStore } from "@langchain/core/vectorstores";
import { Document } from "@langchain/core/documents";
import { SynchronousInMemoryDocstore } from "../stores/doc/in_memory.js";
/**
* Interface that defines the arguments that can be passed to the
* `USearch` constructor. It includes optional properties for a
* `docstore`, `index`, and `mapping`.
*/
export interface USearchArgs {
docstore?: SynchronousInMemoryDocstore;
index?: usearch.Index;
mapping?: Record<number, string>;
}
/**
* Class that extends `SaveableVectorStore` and provides methods for
* adding documents and vectors to a `usearch` index, performing
* similarity searches, and saving the index.
*/
export class USearch extends SaveableVectorStore {
_index?: usearch.Index;
_mapping: Record<number, string>;
docstore: SynchronousInMemoryDocstore;
args: USearchArgs;
_vectorstoreType(): string {
return "usearch";
}
constructor(embeddings: EmbeddingsInterface, args: USearchArgs) {
super(embeddings, args);
this.args = args;
this._index = args.index;
this._mapping = args.mapping ?? {};
this.embeddings = embeddings;
this.docstore = args?.docstore ?? new SynchronousInMemoryDocstore();
}
/**
* Method that adds documents to the `usearch` index. It generates
* embeddings for the documents and adds them to the index.
* @param documents An array of `Document` instances to be added to the index.
* @returns A promise that resolves with an array of document IDs.
*/
async addDocuments(documents: Document[]) {
const texts = documents.map(({ pageContent }) => pageContent);
return this.addVectors(
await this.embeddings.embedDocuments(texts),
documents
);
}
public get index(): usearch.Index {
if (!this._index) {
throw new Error(
"Vector store not initialised yet. Try calling `fromTexts` or `fromDocuments` first."
);
}
return this._index;
}
private set index(index: usearch.Index) {
this._index = index;
}
/**
* Method that adds vectors to the `usearch` index. It also updates the
* mapping between vector IDs and document IDs.
* @param vectors An array of vectors to be added to the index.
* @param documents An array of `Document` instances corresponding to the vectors.
* @returns A promise that resolves with an array of document IDs.
*/
async addVectors(vectors: number[][], documents: Document[]) {
if (vectors.length === 0) {
return [];
}
if (vectors.length !== documents.length) {
throw new Error(`Vectors and documents must have the same length`);
}
const dv = vectors[0].length;
if (!this._index) {
this._index = new usearch.Index({
metric: "l2sq",
connectivity: BigInt(16),
dimensions: BigInt(dv),
});
}
const d = this.index.dimensions();
if (BigInt(dv) !== d) {
throw new Error(
`Vectors must have the same length as the number of dimensions (${d})`
);
}
const docstoreSize = this.index.size();
const documentIds = [];
for (let i = 0; i < vectors.length; i += 1) {
const documentId = uuid.v4();
documentIds.push(documentId);
const id = Number(docstoreSize) + i;
this.index.add(BigInt(id), new Float32Array(vectors[i]));
this._mapping[id] = documentId;
this.docstore.add({ [documentId]: documents[i] });
}
return documentIds;
}
/**
* Method that performs a similarity search in the `usearch` index. It
* returns the `k` most similar documents to a given query vector, along
* with their similarity scores.
* @param query The query vector.
* @param k The number of most similar documents to return.
* @returns A promise that resolves with an array of tuples, each containing a `Document` and its similarity score.
*/
async similaritySearchVectorWithScore(query: number[], k: number) {
const d = this.index.dimensions();
if (BigInt(query.length) !== d) {
throw new Error(
`Query vector must have the same length as the number of dimensions (${d})`
);
}
if (k > this.index.size()) {
const total = this.index.size();
console.warn(
`k (${k}) is greater than the number of elements in the index (${total}), setting k to ${total}`
);
// eslint-disable-next-line no-param-reassign
k = Number(total);
}
const result = this.index.search(new Float32Array(query), BigInt(k));
const return_list: [Document, number][] = [];
for (let i = 0; i < result.count; i += 1) {
const uuid = this._mapping[Number(result.keys[i])];
return_list.push([this.docstore.search(uuid), result.distances[i]]);
}
return return_list;
}
/**
* Method that saves the `usearch` index and the document store to disk.
* @param directory The directory where the index and document store should be saved.
* @returns A promise that resolves when the save operation is complete.
*/
async save(directory: string) {
const fs = await import("node:fs/promises");
const path = await import("node:path");
await fs.mkdir(directory, { recursive: true });
await Promise.all([
this.index.save(path.join(directory, "usearch.index")),
await fs.writeFile(
path.join(directory, "docstore.json"),
JSON.stringify([
Array.from(this.docstore._docs.entries()),
this._mapping,
])
),
]);
}
/**
* Static method that creates a new `USearch` instance from a list of
* texts. It generates embeddings for the texts and adds them to the
* `usearch` index.
* @param texts An array of texts to be added to the index.
* @param metadatas Metadata associated with the texts.
* @param embeddings An instance of `Embeddings` used to generate embeddings for the texts.
* @param dbConfig Optional configuration for the document store.
* @returns A promise that resolves with a new `USearch` instance.
*/
static async fromTexts(
texts: string[],
metadatas: object[] | object,
embeddings: EmbeddingsInterface,
dbConfig?: {
docstore?: SynchronousInMemoryDocstore;
}
): Promise<USearch> {
const docs: Document[] = [];
for (let i = 0; i < texts.length; i += 1) {
const metadata = Array.isArray(metadatas) ? metadatas[i] : metadatas;
const newDoc = new Document({
pageContent: texts[i],
metadata,
});
docs.push(newDoc);
}
return this.fromDocuments(docs, embeddings, dbConfig);
}
/**
* Static method that creates a new `USearch` instance from a list of
* documents. It generates embeddings for the documents and adds them to
* the `usearch` index.
* @param docs An array of `Document` instances to be added to the index.
* @param embeddings An instance of `Embeddings` used to generate embeddings for the documents.
* @param dbConfig Optional configuration for the document store.
* @returns A promise that resolves with a new `USearch` instance.
*/
static async fromDocuments(
docs: Document[],
embeddings: EmbeddingsInterface,
dbConfig?: {
docstore?: SynchronousInMemoryDocstore;
}
): Promise<USearch> {
const args: USearchArgs = {
docstore: dbConfig?.docstore,
};
const instance = new this(embeddings, args);
await instance.addDocuments(docs);
return instance;
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/typeorm.ts
|
import { Metadata } from "@opensearch-project/opensearch/api/types.js";
import { DataSource, DataSourceOptions, EntitySchema } from "typeorm";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { VectorStore } from "@langchain/core/vectorstores";
import { Document } from "@langchain/core/documents";
import { getEnvironmentVariable } from "@langchain/core/utils/env";
/**
* Interface that defines the arguments required to create a
* `TypeORMVectorStore` instance. It includes Postgres connection options,
* table name, filter, and verbosity level.
*/
export interface TypeORMVectorStoreArgs {
postgresConnectionOptions: DataSourceOptions;
tableName?: string;
filter?: Metadata;
verbose?: boolean;
}
/**
* Class that extends the `Document` base class and adds an `embedding`
* property. It represents a document in the vector store.
*/
export class TypeORMVectorStoreDocument extends Document {
embedding: string;
}
const defaultDocumentTableName = "documents";
/**
* Class that provides an interface to a Postgres vector database. It
* extends the `VectorStore` base class and implements methods for adding
* documents and vectors, performing similarity searches, and ensuring the
* existence of a table in the database.
*/
export class TypeORMVectorStore extends VectorStore {
declare FilterType: Metadata;
tableName: string;
documentEntity: EntitySchema;
filter?: Metadata;
appDataSource: DataSource;
_verbose?: boolean;
_vectorstoreType(): string {
return "typeorm";
}
private constructor(
embeddings: EmbeddingsInterface,
fields: TypeORMVectorStoreArgs
) {
super(embeddings, fields);
this.tableName = fields.tableName || defaultDocumentTableName;
this.filter = fields.filter;
const TypeORMDocumentEntity = new EntitySchema<TypeORMVectorStoreDocument>({
name: fields.tableName ?? defaultDocumentTableName,
columns: {
id: {
generated: "uuid",
type: "uuid",
primary: true,
},
pageContent: {
type: String,
},
metadata: {
type: "jsonb",
},
embedding: {
type: String,
},
},
});
const appDataSource = new DataSource({
entities: [TypeORMDocumentEntity],
...fields.postgresConnectionOptions,
});
this.appDataSource = appDataSource;
this.documentEntity = TypeORMDocumentEntity;
this._verbose =
getEnvironmentVariable("LANGCHAIN_VERBOSE") === "true" ??
fields.verbose ??
false;
}
/**
* Static method to create a new `TypeORMVectorStore` instance from a
* `DataSource`. It initializes the `DataSource` if it is not already
* initialized.
* @param embeddings Embeddings instance.
* @param fields `TypeORMVectorStoreArgs` instance.
* @returns A new instance of `TypeORMVectorStore`.
*/
static async fromDataSource(
embeddings: EmbeddingsInterface,
fields: TypeORMVectorStoreArgs
): Promise<TypeORMVectorStore> {
const postgresqlVectorStore = new TypeORMVectorStore(embeddings, fields);
if (!postgresqlVectorStore.appDataSource.isInitialized) {
await postgresqlVectorStore.appDataSource.initialize();
}
return postgresqlVectorStore;
}
/**
* Method to add documents to the vector store. It ensures the existence
* of the table in the database, converts the documents into vectors, and
* adds them to the store.
* @param documents Array of `Document` instances.
* @returns Promise that resolves when the documents have been added.
*/
async addDocuments(documents: Document[]): Promise<void> {
const texts = documents.map(({ pageContent }) => pageContent);
// This will create the table if it does not exist. We can call it every time as it doesn't
// do anything if the table already exists, and it is not expensive in terms of performance
await this.ensureTableInDatabase();
return this.addVectors(
await this.embeddings.embedDocuments(texts),
documents
);
}
/**
* Method to add vectors to the vector store. It converts the vectors into
* rows and inserts them into the database.
* @param vectors Array of vectors.
* @param documents Array of `Document` instances.
* @returns Promise that resolves when the vectors have been added.
*/
async addVectors(vectors: number[][], documents: Document[]): Promise<void> {
const rows = vectors.map((embedding, idx) => {
const embeddingString = `[${embedding.join(",")}]`;
const documentRow = {
pageContent: documents[idx].pageContent,
embedding: embeddingString,
metadata: documents[idx].metadata,
};
return documentRow;
});
const documentRepository = this.appDataSource.getRepository(
this.documentEntity
);
const chunkSize = 500;
for (let i = 0; i < rows.length; i += chunkSize) {
const chunk = rows.slice(i, i + chunkSize);
try {
await documentRepository.save(chunk);
} catch (e) {
console.error(e);
throw new Error(`Error inserting: ${chunk[0].pageContent}`);
}
}
}
/**
* Method to perform a similarity search in the vector store. It returns
* the `k` most similar documents to the query vector, along with their
* similarity scores.
* @param query Query vector.
* @param k Number of most similar documents to return.
* @param filter Optional filter to apply to the search.
* @returns Promise that resolves with an array of tuples, each containing a `TypeORMVectorStoreDocument` and its similarity score.
*/
async similaritySearchVectorWithScore(
query: number[],
k: number,
filter?: this["FilterType"]
): Promise<[TypeORMVectorStoreDocument, number][]> {
const embeddingString = `[${query.join(",")}]`;
const _filter = filter ?? "{}";
const queryString = `
SELECT *, embedding <=> $1 as "_distance"
FROM ${this.tableName}
WHERE metadata @> $2
ORDER BY "_distance" ASC
LIMIT $3;`;
const documents = await this.appDataSource.query(queryString, [
embeddingString,
_filter,
k,
]);
const results = [] as [TypeORMVectorStoreDocument, number][];
for (const doc of documents) {
if (doc._distance != null && doc.pageContent != null) {
const document = new Document(doc) as TypeORMVectorStoreDocument;
document.id = doc.id;
results.push([document, doc._distance]);
}
}
return results;
}
/**
* Method to ensure the existence of the table in the database. It creates
* the table if it does not already exist.
* @returns Promise that resolves when the table has been ensured.
*/
async ensureTableInDatabase(): Promise<void> {
await this.appDataSource.query("CREATE EXTENSION IF NOT EXISTS vector;");
await this.appDataSource.query(
'CREATE EXTENSION IF NOT EXISTS "uuid-ossp";'
);
await this.appDataSource.query(`
CREATE TABLE IF NOT EXISTS ${this.tableName} (
"id" uuid NOT NULL DEFAULT uuid_generate_v4() PRIMARY KEY,
"pageContent" text,
metadata jsonb,
embedding vector
);
`);
}
/**
* Static method to create a new `TypeORMVectorStore` instance from an
* array of texts and their metadata. It converts the texts into
* `Document` instances and adds them to the store.
* @param texts Array of texts.
* @param metadatas Array of metadata objects or a single metadata object.
* @param embeddings Embeddings instance.
* @param dbConfig `TypeORMVectorStoreArgs` instance.
* @returns Promise that resolves with a new instance of `TypeORMVectorStore`.
*/
static async fromTexts(
texts: string[],
metadatas: object[] | object,
embeddings: EmbeddingsInterface,
dbConfig: TypeORMVectorStoreArgs
): Promise<TypeORMVectorStore> {
const docs = [];
for (let i = 0; i < texts.length; i += 1) {
const metadata = Array.isArray(metadatas) ? metadatas[i] : metadatas;
const newDoc = new Document({
pageContent: texts[i],
metadata,
});
docs.push(newDoc);
}
return TypeORMVectorStore.fromDocuments(docs, embeddings, dbConfig);
}
/**
* Static method to create a new `TypeORMVectorStore` instance from an
* array of `Document` instances. It adds the documents to the store.
* @param docs Array of `Document` instances.
* @param embeddings Embeddings instance.
* @param dbConfig `TypeORMVectorStoreArgs` instance.
* @returns Promise that resolves with a new instance of `TypeORMVectorStore`.
*/
static async fromDocuments(
docs: Document[],
embeddings: EmbeddingsInterface,
dbConfig: TypeORMVectorStoreArgs
): Promise<TypeORMVectorStore> {
const instance = await TypeORMVectorStore.fromDataSource(
embeddings,
dbConfig
);
await instance.addDocuments(docs);
return instance;
}
/**
* Static method to create a new `TypeORMVectorStore` instance from an
* existing index.
* @param embeddings Embeddings instance.
* @param dbConfig `TypeORMVectorStoreArgs` instance.
* @returns Promise that resolves with a new instance of `TypeORMVectorStore`.
*/
static async fromExistingIndex(
embeddings: EmbeddingsInterface,
dbConfig: TypeORMVectorStoreArgs
): Promise<TypeORMVectorStore> {
const instance = await TypeORMVectorStore.fromDataSource(
embeddings,
dbConfig
);
return instance;
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/neon.ts
|
import { neon } from "@neondatabase/serverless";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { VectorStore } from "@langchain/core/vectorstores";
import { Document } from "@langchain/core/documents";
import { getEnvironmentVariable } from "@langchain/core/utils/env";
type Metadata = Record<string, string | number | Record<"in", string[]>>;
/**
* Interface that defines the arguments required to create a
* `NeonPostgres` instance. It includes Postgres connection options,
* table name, filter, and verbosity level.
*/
export interface NeonPostgresArgs {
connectionString: string;
tableName?: string;
columns?: {
idColumnName?: string;
vectorColumnName?: string;
contentColumnName?: string;
metadataColumnName?: string;
};
filter?: Metadata;
verbose?: boolean;
}
/**
* Class that provides an interface to a Neon Postgres database. It
* extends the `VectorStore` base class and implements methods for adding
* documents and vectors, performing similarity searches, and ensuring the
* existence of a table in the database.
*/
export class NeonPostgres extends VectorStore {
declare FilterType: Metadata;
tableName: string;
idColumnName: string;
vectorColumnName: string;
contentColumnName: string;
metadataColumnName: string;
filter?: Metadata;
_verbose?: boolean;
neonConnectionString: string;
_vectorstoreType(): string {
return "neon-postgres";
}
constructor(embeddings: EmbeddingsInterface, config: NeonPostgresArgs) {
super(embeddings, config);
this._verbose =
getEnvironmentVariable("LANGCHAIN_VERBOSE") === "true" ??
!!config.verbose;
this.neonConnectionString = config.connectionString;
this.tableName = config.tableName ?? "vectorstore_documents";
this.filter = config.filter;
this.vectorColumnName = config.columns?.vectorColumnName ?? "embedding";
this.contentColumnName = config.columns?.contentColumnName ?? "text";
this.idColumnName = config.columns?.idColumnName ?? "id";
this.metadataColumnName = config.columns?.metadataColumnName ?? "metadata";
}
/**
* Static method to create a new `NeonPostgres` instance from a
* connection. It creates a table if one does not exist.
*
* @param embeddings - Embeddings instance.
* @param fields - `NeonPostgresArgs` instance.
* @returns A new instance of `NeonPostgres`.
*/
static async initialize(
embeddings: EmbeddingsInterface,
config: NeonPostgresArgs
): Promise<NeonPostgres> {
const neonVectorStore = new NeonPostgres(embeddings, config);
await neonVectorStore.ensureTableInDatabase();
return neonVectorStore;
}
/**
* Constructs the SQL query for inserting rows into the specified table.
*
* @param rows - The rows of data to be inserted, consisting of values and records.
* @param chunkIndex - The starting index for generating query placeholders based on chunk positioning.
* @returns The complete SQL INSERT INTO query string.
*/
protected async runInsertQuery(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
rows: (string | Record<string, any>)[][],
useIdColumn: boolean
) {
const placeholders = rows.map((row, index) => {
const base = index * row.length;
return `(${row.map((_, j) => `$${base + 1 + j}`)})`;
});
const queryString = `
INSERT INTO ${this.tableName} (
${useIdColumn ? `${this.idColumnName},` : ""}
${this.contentColumnName},
${this.vectorColumnName},
${this.metadataColumnName}
) VALUES ${placeholders.join(", ")}
ON CONFLICT (${this.idColumnName})
DO UPDATE
SET
${this.contentColumnName} = EXCLUDED.${this.contentColumnName},
${this.vectorColumnName} = EXCLUDED.${this.vectorColumnName},
${this.metadataColumnName} = EXCLUDED.${this.metadataColumnName}
RETURNING ${this.idColumnName}
`;
const flatValues = rows.flat();
const sql = neon(this.neonConnectionString);
return await sql(queryString, flatValues);
}
/**
* Method to add vectors to the vector store. It converts the vectors into
* rows and inserts them into the database.
*
* @param vectors - Array of vectors.
* @param documents - Array of `Document` instances.
* @param options - Optional arguments for adding documents
* @returns Promise that resolves when the vectors have been added.
*/
async addVectors(
vectors: number[][],
documents: Document[],
options?: { ids?: string[] }
): Promise<string[]> {
if (options?.ids !== undefined && options?.ids.length !== vectors.length) {
throw new Error(
`If provided, the length of "ids" must be the same as the number of vectors.`
);
}
const rows = vectors.map((embedding, idx) => {
const embeddingString = `[${embedding.join(",")}]`;
const row = [
documents[idx].pageContent,
embeddingString,
documents[idx].metadata,
];
if (options?.ids) {
return [options.ids[idx], ...row];
}
return row;
});
const chunkSize = 500;
const ids = [];
for (let i = 0; i < rows.length; i += chunkSize) {
const chunk = rows.slice(i, i + chunkSize);
try {
const result = await this.runInsertQuery(
chunk,
options?.ids !== undefined
);
ids.push(...result.map((row) => row[this.idColumnName]));
} catch (e) {
console.error(e);
throw new Error(`Error inserting: ${(e as Error).message}`);
}
}
return ids;
}
/**
* Method to perform a similarity search in the vector store. It returns
* the `k` most similar documents to the query vector, along with their
* similarity scores.
*
* @param query - Query vector.
* @param k - Number of most similar documents to return.
* @param filter - Optional filter to apply to the search.
* @returns Promise that resolves with an array of tuples, each containing a `Document` and its similarity score.
*/
async similaritySearchVectorWithScore(
query: number[],
k: number,
filter?: this["FilterType"]
): Promise<[Document, number][]> {
const embeddingString = `[${query.join(",")}]`;
const _filter: this["FilterType"] = filter ?? {};
const whereClauses = [];
const parameters = [embeddingString, k];
let paramCount = parameters.length;
// The vector to query with, and the num of results are the first
// two parameters. The rest of the parameters are the filter values
for (const [key, value] of Object.entries(_filter)) {
if (typeof value === "object" && value !== null) {
const currentParamCount = paramCount;
const placeholders = value.in
.map((_, index) => `$${currentParamCount + index + 1}`)
.join(",");
whereClauses.push(
`${this.metadataColumnName}->>'${key}' IN (${placeholders})`
);
parameters.push(...value.in);
paramCount += value.in.length;
} else {
paramCount += 1;
whereClauses.push(
`${this.metadataColumnName}->>'${key}' = $${paramCount}`
);
parameters.push(value);
}
}
const whereClause = whereClauses.length
? `WHERE ${whereClauses.join(" AND ")}`
: "";
const queryString = `
SELECT *, ${this.vectorColumnName} <=> $1 as "_distance"
FROM ${this.tableName}
${whereClause}
ORDER BY "_distance" ASC
LIMIT $2;`;
const sql = neon(this.neonConnectionString);
const documents = await sql(queryString, parameters);
const results = [] as [Document, number][];
for (const doc of documents) {
if (doc._distance != null && doc[this.contentColumnName] != null) {
const document = new Document({
pageContent: doc[this.contentColumnName],
metadata: doc[this.metadataColumnName],
});
results.push([document, doc._distance]);
}
}
return results;
}
/**
* Method to add documents to the vector store. It converts the documents into
* vectors, and adds them to the store.
*
* @param documents - Array of `Document` instances.
* @param options - Optional arguments for adding documents
* @returns Promise that resolves when the documents have been added.
*/
async addDocuments(
documents: Document[],
options?: { ids?: string[] }
): Promise<string[]> {
const texts = documents.map(({ pageContent }) => pageContent);
return this.addVectors(
await this.embeddings.embedDocuments(texts),
documents,
options
);
}
/**
* Method to delete documents from the vector store. It deletes the
* documents that match the provided ids.
*
* @param ids - Array of document ids.
* @param deleteAll - Boolean to delete all documents.
* @returns Promise that resolves when the documents have been deleted.
*/
async delete(params: { ids?: string[]; deleteAll?: boolean }): Promise<void> {
const sql = neon(this.neonConnectionString);
if (params.ids !== undefined) {
await sql(
`DELETE FROM ${this.tableName}
WHERE ${this.idColumnName}
IN (${params.ids.map((_, idx) => `$${idx + 1}`)})`,
params.ids
);
} else if (params.deleteAll) {
await sql(`TRUNCATE TABLE ${this.tableName}`);
}
}
/**
* Method to ensure the existence of the table to store vectors in
* the database. It creates the table if it does not already exist.
*
* @returns Promise that resolves when the table has been ensured.
*/
async ensureTableInDatabase(): Promise<void> {
const sql = neon(this.neonConnectionString);
await sql(`CREATE EXTENSION IF NOT EXISTS vector;`);
await sql(`CREATE EXTENSION IF NOT EXISTS "uuid-ossp";`);
await sql(`
CREATE TABLE IF NOT EXISTS ${this.tableName} (
${this.idColumnName} uuid NOT NULL DEFAULT uuid_generate_v4() PRIMARY KEY,
${this.contentColumnName} text,
${this.metadataColumnName} jsonb,
${this.vectorColumnName} vector
);
`);
}
/**
* Static method to create a new `NeonPostgres` instance from an
* array of texts and their metadata. It converts the texts into
* `Document` instances and adds them to the store.
*
* @param texts - Array of texts.
* @param metadatas - Array of metadata objects or a single metadata object.
* @param embeddings - Embeddings instance.
* @param dbConfig - `NeonPostgresArgs` instance.
* @returns Promise that resolves with a new instance of `NeonPostgresArgs`.
*/
static async fromTexts(
texts: string[],
metadatas: object[] | object,
embeddings: EmbeddingsInterface,
dbConfig: NeonPostgresArgs
): Promise<NeonPostgres> {
const docs = [];
for (let i = 0; i < texts.length; i += 1) {
const metadata = Array.isArray(metadatas) ? metadatas[i] : metadatas;
const newDoc = new Document({
pageContent: texts[i],
metadata,
});
docs.push(newDoc);
}
return this.fromDocuments(docs, embeddings, dbConfig);
}
/**
* Static method to create a new `NeonPostgres` instance from an
* array of `Document` instances. It adds the documents to the store.
*
* @param docs - Array of `Document` instances.
* @param embeddings - Embeddings instance.
* @param dbConfig - `NeonPostgreseArgs` instance.
* @returns Promise that resolves with a new instance of `NeonPostgres`.
*/
static async fromDocuments(
docs: Document[],
embeddings: EmbeddingsInterface,
dbConfig: NeonPostgresArgs
): Promise<NeonPostgres> {
const instance = await this.initialize(embeddings, dbConfig);
await instance.addDocuments(docs);
return instance;
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/pgvector.ts
|
import pg, { type Pool, type PoolClient, type PoolConfig } from "pg";
import { VectorStore } from "@langchain/core/vectorstores";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { Document } from "@langchain/core/documents";
import { getEnvironmentVariable } from "@langchain/core/utils/env";
type Metadata = Record<string, unknown>;
export type DistanceStrategy = "cosine" | "innerProduct" | "euclidean";
/**
* Interface that defines the arguments required to create a
* `PGVectorStore` instance. It includes Postgres connection options,
* table name, filter, and verbosity level.
*/
export interface PGVectorStoreArgs {
postgresConnectionOptions?: PoolConfig;
pool?: Pool;
tableName: string;
collectionTableName?: string;
collectionName?: string;
collectionMetadata?: Metadata | null;
schemaName?: string | null;
extensionSchemaName?: string | null;
columns?: {
idColumnName?: string;
vectorColumnName?: string;
contentColumnName?: string;
metadataColumnName?: string;
};
filter?: Metadata;
verbose?: boolean;
/**
* The amount of documents to chunk by when
* adding vectors.
* @default 500
*/
chunkSize?: number;
ids?: string[];
distanceStrategy?: DistanceStrategy;
}
/**
* PGVector vector store integration.
*
* Setup:
* Install `@langchain/community` and `pg`.
*
* If you wish to generate ids, you should also install the `uuid` package.
*
* ```bash
* npm install @langchain/community pg uuid
* ```
*
* ## [Constructor args](https://api.js.langchain.com/classes/_langchain_community.vectorstores_pgvector.PGVectorStore.html#constructor)
*
* <details open>
* <summary><strong>Instantiate</strong></summary>
*
* ```typescript
* import {
* PGVectorStore,
* DistanceStrategy,
* } from "@langchain/community/vectorstores/pgvector";
*
* // Or other embeddings
* import { OpenAIEmbeddings } from "@langchain/openai";
* import { PoolConfig } from "pg";
*
* const embeddings = new OpenAIEmbeddings({
* model: "text-embedding-3-small",
* });
*
* // Sample config
* const config = {
* postgresConnectionOptions: {
* type: "postgres",
* host: "127.0.0.1",
* port: 5433,
* user: "myuser",
* password: "ChangeMe",
* database: "api",
* } as PoolConfig,
* tableName: "testlangchainjs",
* columns: {
* idColumnName: "id",
* vectorColumnName: "vector",
* contentColumnName: "content",
* metadataColumnName: "metadata",
* },
* // supported distance strategies: cosine (default), innerProduct, or euclidean
* distanceStrategy: "cosine" as DistanceStrategy,
* };
*
* const vectorStore = await PGVectorStore.initialize(embeddings, config);
* ```
* </details>
*
* <br />
*
* <details>
* <summary><strong>Add documents</strong></summary>
*
* ```typescript
* import type { Document } from '@langchain/core/documents';
*
* const document1 = { pageContent: "foo", metadata: { baz: "bar" } };
* const document2 = { pageContent: "thud", metadata: { bar: "baz" } };
* const document3 = { pageContent: "i will be deleted :(", metadata: {} };
*
* const documents: Document[] = [document1, document2, document3];
* const ids = ["1", "2", "3"];
* await vectorStore.addDocuments(documents, { ids });
* ```
* </details>
*
* <br />
*
* <details>
* <summary><strong>Delete documents</strong></summary>
*
* ```typescript
* await vectorStore.delete({ ids: ["3"] });
* ```
* </details>
*
* <br />
*
* <details>
* <summary><strong>Similarity search</strong></summary>
*
* ```typescript
* const results = await vectorStore.similaritySearch("thud", 1);
* for (const doc of results) {
* console.log(`* ${doc.pageContent} [${JSON.stringify(doc.metadata, null)}]`);
* }
* // Output: * thud [{"baz":"bar"}]
* ```
* </details>
*
* <br />
*
*
* <details>
* <summary><strong>Similarity search with filter</strong></summary>
*
* ```typescript
* const resultsWithFilter = await vectorStore.similaritySearch("thud", 1, { baz: "bar" });
*
* for (const doc of resultsWithFilter) {
* console.log(`* ${doc.pageContent} [${JSON.stringify(doc.metadata, null)}]`);
* }
* // Output: * foo [{"baz":"bar"}]
* ```
* </details>
*
* <br />
*
*
* <details>
* <summary><strong>Similarity search with score</strong></summary>
*
* ```typescript
* const resultsWithScore = await vectorStore.similaritySearchWithScore("qux", 1);
* for (const [doc, score] of resultsWithScore) {
* console.log(`* [SIM=${score.toFixed(6)}] ${doc.pageContent} [${JSON.stringify(doc.metadata, null)}]`);
* }
* // Output: * [SIM=0.000000] qux [{"bar":"baz","baz":"bar"}]
* ```
* </details>
*
* <br />
*
* <details>
* <summary><strong>As a retriever</strong></summary>
*
* ```typescript
* const retriever = vectorStore.asRetriever({
* searchType: "mmr", // Leave blank for standard similarity search
* k: 1,
* });
* const resultAsRetriever = await retriever.invoke("thud");
* console.log(resultAsRetriever);
*
* // Output: [Document({ metadata: { "baz":"bar" }, pageContent: "thud" })]
* ```
* </details>
*
* <br />
*/
export class PGVectorStore extends VectorStore {
declare FilterType: Metadata;
tableName: string;
collectionTableName?: string;
collectionName = "langchain";
collectionMetadata: Metadata | null;
schemaName: string | null;
idColumnName: string;
vectorColumnName: string;
contentColumnName: string;
extensionSchemaName: string | null;
metadataColumnName: string;
filter?: Metadata;
_verbose?: boolean;
pool: Pool;
client?: PoolClient;
chunkSize = 500;
distanceStrategy?: DistanceStrategy = "cosine";
_vectorstoreType(): string {
return "pgvector";
}
constructor(embeddings: EmbeddingsInterface, config: PGVectorStoreArgs) {
super(embeddings, config);
this.tableName = config.tableName;
if (
config.collectionName !== undefined &&
config.collectionTableName === undefined
) {
throw new Error(
`If supplying a "collectionName", you must also supply a "collectionTableName".`
);
}
this.collectionTableName = config.collectionTableName;
this.collectionName = config.collectionName ?? "langchain";
this.collectionMetadata = config.collectionMetadata ?? null;
this.schemaName = config.schemaName ?? null;
this.extensionSchemaName = config.extensionSchemaName ?? null;
this.filter = config.filter;
this.vectorColumnName = config.columns?.vectorColumnName ?? "embedding";
this.contentColumnName = config.columns?.contentColumnName ?? "text";
this.idColumnName = config.columns?.idColumnName ?? "id";
this.metadataColumnName = config.columns?.metadataColumnName ?? "metadata";
if (!config.postgresConnectionOptions && !config.pool) {
throw new Error(
"You must provide either a `postgresConnectionOptions` object or a `pool` instance."
);
}
const pool = config.pool ?? new pg.Pool(config.postgresConnectionOptions);
this.pool = pool;
this.chunkSize = config.chunkSize ?? 500;
this.distanceStrategy = config.distanceStrategy ?? this.distanceStrategy;
const langchainVerbose = getEnvironmentVariable("LANGCHAIN_VERBOSE");
if (langchainVerbose === "true") {
this._verbose = true;
} else if (langchainVerbose === "false") {
this._verbose = false;
} else {
this._verbose = config.verbose;
}
}
get computedTableName() {
return this.schemaName == null
? `${this.tableName}`
: `"${this.schemaName}"."${this.tableName}"`;
}
get computedCollectionTableName() {
return this.schemaName == null
? `${this.collectionTableName}`
: `"${this.schemaName}"."${this.collectionTableName}"`;
}
get computedOperatorString() {
let operator: string;
switch (this.distanceStrategy) {
case "cosine":
operator = "<=>";
break;
case "innerProduct":
operator = "<#>";
break;
case "euclidean":
operator = "<->";
break;
default:
throw new Error(`Unknown distance strategy: ${this.distanceStrategy}`);
}
return this.extensionSchemaName !== null
? `OPERATOR(${this.extensionSchemaName}.${operator})`
: operator;
}
/**
* Static method to create a new `PGVectorStore` instance from a
* connection. It creates a table if one does not exist, and calls
* `connect` to return a new instance of `PGVectorStore`.
*
* @param embeddings - Embeddings instance.
* @param fields - `PGVectorStoreArgs` instance
* @param fields.dimensions Number of dimensions in your vector data type. For example, use 1536 for OpenAI's `text-embedding-3-small`. If not set, indexes like HNSW might not be used during query time.
* @returns A new instance of `PGVectorStore`.
*/
static async initialize(
embeddings: EmbeddingsInterface,
config: PGVectorStoreArgs & { dimensions?: number }
): Promise<PGVectorStore> {
const { dimensions, ...rest } = config;
const postgresqlVectorStore = new PGVectorStore(embeddings, rest);
await postgresqlVectorStore._initializeClient();
await postgresqlVectorStore.ensureTableInDatabase(dimensions);
if (postgresqlVectorStore.collectionTableName) {
await postgresqlVectorStore.ensureCollectionTableInDatabase();
}
return postgresqlVectorStore;
}
protected async _initializeClient() {
this.client = await this.pool.connect();
}
/**
* Method to add documents to the vector store. It converts the documents into
* vectors, and adds them to the store.
*
* @param documents - Array of `Document` instances.
* @param options - Optional arguments for adding documents
* @returns Promise that resolves when the documents have been added.
*/
async addDocuments(
documents: Document[],
options?: { ids?: string[] }
): Promise<void> {
const texts = documents.map(({ pageContent }) => pageContent);
return this.addVectors(
await this.embeddings.embedDocuments(texts),
documents,
options
);
}
/**
* Inserts a row for the collectionName provided at initialization if it does not
* exist and returns the collectionId.
*
* @returns The collectionId for the given collectionName.
*/
async getOrCreateCollection(): Promise<string> {
const queryString = `
SELECT uuid from ${this.computedCollectionTableName}
WHERE name = $1;
`;
const queryResult = await this.pool.query(queryString, [
this.collectionName,
]);
let collectionId = queryResult.rows[0]?.uuid;
if (!collectionId) {
const insertString = `
INSERT INTO ${this.computedCollectionTableName}(
uuid,
name,
cmetadata
)
VALUES (
uuid_generate_v4(),
$1,
$2
)
RETURNING uuid;
`;
const insertResult = await this.pool.query(insertString, [
this.collectionName,
this.collectionMetadata,
]);
collectionId = insertResult.rows[0]?.uuid;
}
return collectionId;
}
/**
* Generates the SQL placeholders for a specific row at the provided index.
*
* @param index - The index of the row for which placeholders need to be generated.
* @param numOfColumns - The number of columns we are inserting data into.
* @returns The SQL placeholders for the row values.
*/
private generatePlaceholderForRowAt(
index: number,
numOfColumns: number
): string {
const placeholders = [];
for (let i = 0; i < numOfColumns; i += 1) {
placeholders.push(`$${index * numOfColumns + i + 1}`);
}
return `(${placeholders.join(", ")})`;
}
/**
* Constructs the SQL query for inserting rows into the specified table.
*
* @param rows - The rows of data to be inserted, consisting of values and records.
* @param chunkIndex - The starting index for generating query placeholders based on chunk positioning.
* @returns The complete SQL INSERT INTO query string.
*/
private async buildInsertQuery(rows: (string | Record<string, unknown>)[][]) {
let collectionId;
if (this.collectionTableName) {
collectionId = await this.getOrCreateCollection();
}
const columns = [
this.contentColumnName,
this.vectorColumnName,
this.metadataColumnName,
];
if (collectionId) {
columns.push("collection_id");
}
// Check if we have added ids to the rows.
if (rows.length !== 0 && columns.length === rows[0].length - 1) {
columns.push(this.idColumnName);
}
const valuesPlaceholders = rows
.map((_, j) => this.generatePlaceholderForRowAt(j, columns.length))
.join(", ");
const text = `
INSERT INTO ${this.computedTableName}(
${columns.map((column) => `"${column}"`).join(", ")}
)
VALUES ${valuesPlaceholders}
`;
return text;
}
/**
* Method to add vectors to the vector store. It converts the vectors into
* rows and inserts them into the database.
*
* @param vectors - Array of vectors.
* @param documents - Array of `Document` instances.
* @param options - Optional arguments for adding documents
* @returns Promise that resolves when the vectors have been added.
*/
async addVectors(
vectors: number[][],
documents: Document[],
options?: { ids?: string[] }
): Promise<void> {
const ids = options?.ids;
// Either all documents have ids or none of them do to avoid confusion.
if (ids !== undefined && ids.length !== vectors.length) {
throw new Error(
"The number of ids must match the number of vectors provided."
);
}
const rows = [];
let collectionId;
if (this.collectionTableName) {
collectionId = await this.getOrCreateCollection();
}
for (let i = 0; i < vectors.length; i += 1) {
const values = [];
const embedding = vectors[i];
const embeddingString = `[${embedding.join(",")}]`;
values.push(
documents[i].pageContent.replace(/\0/g, ""),
embeddingString.replace(/\0/g, ""),
documents[i].metadata
);
if (collectionId) {
values.push(collectionId);
}
if (ids) {
values.push(ids[i]);
}
rows.push(values);
}
for (let i = 0; i < rows.length; i += this.chunkSize) {
const chunk = rows.slice(i, i + this.chunkSize);
const insertQuery = await this.buildInsertQuery(chunk);
const flatValues = chunk.flat();
try {
await this.pool.query(insertQuery, flatValues);
} catch (e) {
console.error(e);
throw new Error(`Error inserting: ${(e as Error).message}`);
}
}
}
/**
* Method to delete documents from the vector store. It deletes the
* documents that match the provided ids.
*
* @param ids - Array of document ids.
* @returns Promise that resolves when the documents have been deleted.
*/
private async deleteById(ids: string[]) {
let collectionId;
if (this.collectionTableName) {
collectionId = await this.getOrCreateCollection();
}
// Set parameters of dynamically generated query
const params = collectionId ? [ids, collectionId] : [ids];
const queryString = `
DELETE FROM ${this.computedTableName}
WHERE ${collectionId ? "collection_id = $2 AND " : ""}${
this.idColumnName
} = ANY($1::uuid[])
`;
await this.pool.query(queryString, params);
}
/**
* Method to delete documents from the vector store. It deletes the
* documents whose metadata contains the filter.
*
* @param filter - An object representing the Metadata filter.
* @returns Promise that resolves when the documents have been deleted.
*/
private async deleteByFilter(filter: Metadata) {
let collectionId;
if (this.collectionTableName) {
collectionId = await this.getOrCreateCollection();
}
// Set parameters of dynamically generated query
const params = collectionId ? [filter, collectionId] : [filter];
const queryString = `
DELETE FROM ${this.computedTableName}
WHERE ${collectionId ? "collection_id = $2 AND " : ""}${
this.metadataColumnName
}::jsonb @> $1
`;
return await this.pool.query(queryString, params);
}
/**
* Method to delete documents from the vector store. It deletes the
* documents that match the provided ids or metadata filter. Matches ids
* exactly and metadata filter according to postgres jsonb containment. Ids and filter
* are mutually exclusive.
*
* @param params - Object containing either an array of ids or a metadata filter object.
* @returns Promise that resolves when the documents have been deleted.
* @throws Error if neither ids nor filter are provided, or if both are provided.
* @example <caption>Delete by ids</caption>
* await vectorStore.delete({ ids: ["id1", "id2"] });
* @example <caption>Delete by filter</caption>
* await vectorStore.delete({ filter: { a: 1, b: 2 } });
*/
async delete(params: { ids?: string[]; filter?: Metadata }): Promise<void> {
const { ids, filter } = params;
if (!(ids || filter)) {
throw new Error(
"You must specify either ids or a filter when deleting documents."
);
}
if (ids && filter) {
throw new Error(
"You cannot specify both ids and a filter when deleting documents."
);
}
if (ids) {
await this.deleteById(ids);
} else if (filter) {
await this.deleteByFilter(filter);
}
}
/**
* Method to perform a similarity search in the vector store. It returns
* the `k` most similar documents to the query vector, along with their
* similarity scores.
*
* @param query - Query vector.
* @param k - Number of most similar documents to return.
* @param filter - Optional filter to apply to the search.
* @returns Promise that resolves with an array of tuples, each containing a `Document` and its similarity score.
*/
async similaritySearchVectorWithScore(
query: number[],
k: number,
filter?: this["FilterType"]
): Promise<[Document, number][]> {
const embeddingString = `[${query.join(",")}]`;
const _filter: this["FilterType"] = filter ?? {};
let collectionId;
if (this.collectionTableName) {
collectionId = await this.getOrCreateCollection();
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const parameters: unknown[] = [embeddingString, k];
const whereClauses = [];
if (collectionId) {
whereClauses.push("collection_id = $3");
parameters.push(collectionId);
}
let paramCount = parameters.length;
for (const [key, value] of Object.entries(_filter)) {
if (typeof value === "object" && value !== null) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const _value: Record<string, any> = value;
const currentParamCount = paramCount;
if (Array.isArray(_value.in)) {
const placeholders = _value.in
.map(
(_: unknown, index: number) => `$${currentParamCount + index + 1}`
)
.join(",");
whereClauses.push(
`${this.metadataColumnName}->>'${key}' IN (${placeholders})`
);
parameters.push(..._value.in);
paramCount += _value.in.length;
}
if (Array.isArray(_value.arrayContains)) {
const placeholders = _value.arrayContains
.map(
(_: unknown, index: number) => `$${currentParamCount + index + 1}`
)
.join(",");
whereClauses.push(
`${this.metadataColumnName}->'${key}' ?| array[${placeholders}]`
);
parameters.push(..._value.arrayContains);
paramCount += _value.arrayContains.length;
}
} else {
paramCount += 1;
whereClauses.push(
`${this.metadataColumnName}->>'${key}' = $${paramCount}`
);
parameters.push(value);
}
}
const whereClause = whereClauses.length
? `WHERE ${whereClauses.join(" AND ")}`
: "";
const queryString = `
SELECT *, "${this.vectorColumnName}" ${this.computedOperatorString} $1 as "_distance"
FROM ${this.computedTableName}
${whereClause}
ORDER BY "_distance" ASC
LIMIT $2;
`;
const documents = (await this.pool.query(queryString, parameters)).rows;
const results = [] as [Document, number][];
for (const doc of documents) {
if (doc._distance != null && doc[this.contentColumnName] != null) {
const document = new Document({
pageContent: doc[this.contentColumnName],
metadata: doc[this.metadataColumnName],
id: doc[this.idColumnName],
});
results.push([document, doc._distance]);
}
}
return results;
}
/**
* Method to ensure the existence of the table in the database. It creates
* the table if it does not already exist.
* @param dimensions Number of dimensions in your vector data type. For example, use 1536 for OpenAI's `text-embedding-3-small`. If not set, indexes like HNSW might not be used during query time.
* @returns Promise that resolves when the table has been ensured.
*/
async ensureTableInDatabase(dimensions?: number): Promise<void> {
const vectorQuery =
this.extensionSchemaName == null
? "CREATE EXTENSION IF NOT EXISTS vector;"
: `CREATE EXTENSION IF NOT EXISTS vector WITH SCHEMA "${this.extensionSchemaName}";`;
const uuidQuery =
this.extensionSchemaName == null
? 'CREATE EXTENSION IF NOT EXISTS "uuid-ossp";'
: `CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA "${this.extensionSchemaName}";`;
const extensionName =
this.extensionSchemaName == null
? "vector"
: `"${this.extensionSchemaName}"."vector"`;
const vectorColumnType = dimensions
? `${extensionName}(${dimensions})`
: extensionName;
const tableQuery = `
CREATE TABLE IF NOT EXISTS ${this.computedTableName} (
"${this.idColumnName}" uuid NOT NULL DEFAULT uuid_generate_v4() PRIMARY KEY,
"${this.contentColumnName}" text,
"${this.metadataColumnName}" jsonb,
"${this.vectorColumnName}" ${vectorColumnType}
);
`;
await this.pool.query(vectorQuery);
await this.pool.query(uuidQuery);
await this.pool.query(tableQuery);
}
/**
* Method to ensure the existence of the collection table in the database.
* It creates the table if it does not already exist.
*
* @returns Promise that resolves when the collection table has been ensured.
*/
async ensureCollectionTableInDatabase(): Promise<void> {
try {
const queryString = `
CREATE TABLE IF NOT EXISTS ${this.computedCollectionTableName} (
uuid uuid NOT NULL DEFAULT uuid_generate_v4() PRIMARY KEY,
name character varying,
cmetadata jsonb
);
CREATE INDEX IF NOT EXISTS idx_${this.collectionTableName}_name ON ${this.computedCollectionTableName}(name);
ALTER TABLE ${this.computedTableName}
ADD COLUMN collection_id uuid;
ALTER TABLE ${this.computedTableName}
ADD CONSTRAINT ${this.tableName}_collection_id_fkey
FOREIGN KEY (collection_id)
REFERENCES ${this.computedCollectionTableName}(uuid)
ON DELETE CASCADE;
`;
await this.pool.query(queryString);
} catch (e) {
if (!(e as Error).message.includes("already exists")) {
console.error(e);
throw new Error(
`Error adding column or creating index: ${(e as Error).message}`
);
}
}
}
/**
* Static method to create a new `PGVectorStore` instance from an
* array of texts and their metadata. It converts the texts into
* `Document` instances and adds them to the store.
*
* @param texts - Array of texts.
* @param metadatas - Array of metadata objects or a single metadata object.
* @param embeddings - Embeddings instance.
* @param dbConfig - `PGVectorStoreArgs` instance.
* @returns Promise that resolves with a new instance of `PGVectorStore`.
*/
static async fromTexts(
texts: string[],
metadatas: object[] | object,
embeddings: EmbeddingsInterface,
dbConfig: PGVectorStoreArgs & { dimensions?: number }
): Promise<PGVectorStore> {
const docs = [];
for (let i = 0; i < texts.length; i += 1) {
const metadata = Array.isArray(metadatas) ? metadatas[i] : metadatas;
const newDoc = new Document({
pageContent: texts[i],
metadata,
});
docs.push(newDoc);
}
return PGVectorStore.fromDocuments(docs, embeddings, dbConfig);
}
/**
* Static method to create a new `PGVectorStore` instance from an
* array of `Document` instances. It adds the documents to the store.
*
* @param docs - Array of `Document` instances.
* @param embeddings - Embeddings instance.
* @param dbConfig - `PGVectorStoreArgs` instance.
* @returns Promise that resolves with a new instance of `PGVectorStore`.
*/
static async fromDocuments(
docs: Document[],
embeddings: EmbeddingsInterface,
dbConfig: PGVectorStoreArgs & { dimensions?: number }
): Promise<PGVectorStore> {
const instance = await PGVectorStore.initialize(embeddings, dbConfig);
await instance.addDocuments(docs, { ids: dbConfig.ids });
return instance;
}
/**
* Closes all the clients in the pool and terminates the pool.
*
* @returns Promise that resolves when all clients are closed and the pool is terminated.
*/
async end(): Promise<void> {
this.client?.release();
return this.pool.end();
}
/**
* Method to create the HNSW index on the vector column.
*
* @param dimensions - Defines the number of dimensions in your vector data type, up to 2000. For example, use 1536 for OpenAI's text-embedding-ada-002 and Amazon's amazon.titan-embed-text-v1 models.
* @param m - The max number of connections per layer (16 by default). Index build time improves with smaller values, while higher values can speed up search queries.
* @param efConstruction - The size of the dynamic candidate list for constructing the graph (64 by default). A higher value can potentially improve the index quality at the cost of index build time.
* @param distanceFunction - The distance function name you want to use, is automatically selected based on the distanceStrategy.
* @param namespace - The namespace is used to create the index with a specific name. This is useful when you want to create multiple indexes on the same database schema (within the same schema in PostgreSQL, the index name must be unique across all tables).
* @returns Promise that resolves with the query response of creating the index.
*/
async createHnswIndex(config: {
dimensions: number;
m?: number;
efConstruction?: number;
distanceFunction?: string;
namespace?: string;
}): Promise<void> {
let idxDistanceFunction = config?.distanceFunction || "vector_cosine_ops";
const prefix = config?.namespace ? `${config.namespace}_` : "";
switch (this.distanceStrategy) {
case "cosine":
idxDistanceFunction = "vector_cosine_ops";
break;
case "innerProduct":
idxDistanceFunction = "vector_ip_ops";
break;
case "euclidean":
idxDistanceFunction = "vector_l2_ops";
break;
default:
throw new Error(`Unknown distance strategy: ${this.distanceStrategy}`);
}
const createIndexQuery = `CREATE INDEX IF NOT EXISTS ${prefix}${
this.vectorColumnName
}_embedding_hnsw_idx
ON ${this.computedTableName} USING hnsw ((${
this.vectorColumnName
}::vector(${config.dimensions})) ${idxDistanceFunction})
WITH (
m=${config?.m || 16},
ef_construction=${config?.efConstruction || 64}
);`;
try {
await this.pool.query(createIndexQuery);
} catch (e) {
console.error(
`Failed to create HNSW index on table ${this.computedTableName}, error: ${e}`
);
}
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/redis.ts
|
import type {
createCluster,
createClient,
RediSearchSchema,
SearchOptions,
} from "redis";
import { SchemaFieldTypes, VectorAlgorithms } from "redis";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { VectorStore } from "@langchain/core/vectorstores";
import { Document } from "@langchain/core/documents";
// Adapted from internal redis types which aren't exported
/**
* @deprecated Install and import from the "@langchain/redis" integration package instead.
* Type for creating a schema vector field. It includes the algorithm,
* distance metric, and initial capacity.
*/
export type CreateSchemaVectorField<
T extends VectorAlgorithms,
A extends Record<string, unknown>
> = {
ALGORITHM: T;
DISTANCE_METRIC: "L2" | "IP" | "COSINE";
INITIAL_CAP?: number;
} & A;
/**
* @deprecated Install and import from the "@langchain/redis" integration package instead.
* Type for creating a flat schema vector field. It extends
* CreateSchemaVectorField with a block size property.
*/
export type CreateSchemaFlatVectorField = CreateSchemaVectorField<
VectorAlgorithms.FLAT,
{
BLOCK_SIZE?: number;
}
>;
/**
* @deprecated Install and import from the "@langchain/redis" integration package instead.
* Type for creating a HNSW schema vector field. It extends
* CreateSchemaVectorField with M, EF_CONSTRUCTION, and EF_RUNTIME
* properties.
*/
export type CreateSchemaHNSWVectorField = CreateSchemaVectorField<
VectorAlgorithms.HNSW,
{
M?: number;
EF_CONSTRUCTION?: number;
EF_RUNTIME?: number;
}
>;
type CreateIndexOptions = NonNullable<
Parameters<ReturnType<typeof createClient>["ft"]["create"]>[3]
>;
/** @deprecated Install and import from the "@langchain/redis" integration package instead. */
export type RedisSearchLanguages = `${NonNullable<
CreateIndexOptions["LANGUAGE"]
>}`;
/** @deprecated Install and import from the "@langchain/redis" integration package instead. */
export type RedisVectorStoreIndexOptions = Omit<
CreateIndexOptions,
"LANGUAGE"
> & { LANGUAGE?: RedisSearchLanguages };
/**
* @deprecated Install and import from the "@langchain/redis" integration package instead.
* Interface for the configuration of the RedisVectorStore. It includes
* the Redis client, index name, index options, key prefix, content key,
* metadata key, vector key, and filter.
*/
export interface RedisVectorStoreConfig {
redisClient:
| ReturnType<typeof createClient>
| ReturnType<typeof createCluster>;
indexName: string;
indexOptions?: CreateSchemaFlatVectorField | CreateSchemaHNSWVectorField;
createIndexOptions?: Omit<RedisVectorStoreIndexOptions, "PREFIX">; // PREFIX must be set with keyPrefix
keyPrefix?: string;
contentKey?: string;
metadataKey?: string;
vectorKey?: string;
filter?: RedisVectorStoreFilterType;
}
/**
* @deprecated Install and import from the "@langchain/redis" integration package instead.
* Interface for the options when adding documents to the
* RedisVectorStore. It includes keys and batch size.
*/
export interface RedisAddOptions {
keys?: string[];
batchSize?: number;
}
/**
* @deprecated Install and import from the "@langchain/redis" integration package instead.
* Type for the filter used in the RedisVectorStore. It is an array of
* strings.
*/
export type RedisVectorStoreFilterType = string[];
/**
* @deprecated Install and import from the "@langchain/redis" integration package instead.
* Class representing a RedisVectorStore. It extends the VectorStore class
* and includes methods for adding documents and vectors, performing
* similarity searches, managing the index, and more.
*/
export class RedisVectorStore extends VectorStore {
declare FilterType: RedisVectorStoreFilterType;
private redisClient:
| ReturnType<typeof createClient>
| ReturnType<typeof createCluster>;
indexName: string;
indexOptions: CreateSchemaFlatVectorField | CreateSchemaHNSWVectorField;
createIndexOptions: CreateIndexOptions;
keyPrefix: string;
contentKey: string;
metadataKey: string;
vectorKey: string;
filter?: RedisVectorStoreFilterType;
_vectorstoreType(): string {
return "redis";
}
constructor(
embeddings: EmbeddingsInterface,
_dbConfig: RedisVectorStoreConfig
) {
super(embeddings, _dbConfig);
this.redisClient = _dbConfig.redisClient;
this.indexName = _dbConfig.indexName;
this.indexOptions = _dbConfig.indexOptions ?? {
ALGORITHM: VectorAlgorithms.HNSW,
DISTANCE_METRIC: "COSINE",
};
this.keyPrefix = _dbConfig.keyPrefix ?? `doc:${this.indexName}:`;
this.contentKey = _dbConfig.contentKey ?? "content";
this.metadataKey = _dbConfig.metadataKey ?? "metadata";
this.vectorKey = _dbConfig.vectorKey ?? "content_vector";
this.filter = _dbConfig.filter;
this.createIndexOptions = {
ON: "HASH",
PREFIX: this.keyPrefix,
...(_dbConfig.createIndexOptions as CreateIndexOptions),
};
}
/**
* Method for adding documents to the RedisVectorStore. It first converts
* the documents to texts and then adds them as vectors.
* @param documents The documents to add.
* @param options Optional parameters for adding the documents.
* @returns A promise that resolves when the documents have been added.
*/
async addDocuments(documents: Document[], options?: RedisAddOptions) {
const texts = documents.map(({ pageContent }) => pageContent);
return this.addVectors(
await this.embeddings.embedDocuments(texts),
documents,
options
);
}
/**
* Method for adding vectors to the RedisVectorStore. It checks if the
* index exists and creates it if it doesn't, then adds the vectors in
* batches.
* @param vectors The vectors to add.
* @param documents The documents associated with the vectors.
* @param keys Optional keys for the vectors.
* @param batchSize The size of the batches in which to add the vectors. Defaults to 1000.
* @returns A promise that resolves when the vectors have been added.
*/
async addVectors(
vectors: number[][],
documents: Document[],
{ keys, batchSize = 1000 }: RedisAddOptions = {}
) {
if (!vectors.length || !vectors[0].length) {
throw new Error("No vectors provided");
}
// check if the index exists and create it if it doesn't
await this.createIndex(vectors[0].length);
const info = await this.redisClient.ft.info(this.indexName);
const lastKeyCount = parseInt(info.numDocs, 10) || 0;
const multi = this.redisClient.multi();
vectors.map(async (vector, idx) => {
const key =
keys && keys.length
? keys[idx]
: `${this.keyPrefix}${idx + lastKeyCount}`;
const metadata =
documents[idx] && documents[idx].metadata
? documents[idx].metadata
: {};
multi.hSet(key, {
[this.vectorKey]: this.getFloat32Buffer(vector),
[this.contentKey]: documents[idx].pageContent,
[this.metadataKey]: this.escapeSpecialChars(JSON.stringify(metadata)),
});
// write batch
if (idx % batchSize === 0) {
await multi.exec();
}
});
// insert final batch
await multi.exec();
}
/**
* Method for performing a similarity search in the RedisVectorStore. It
* returns the documents and their scores.
* @param query The query vector.
* @param k The number of nearest neighbors to return.
* @param filter Optional filter to apply to the search.
* @returns A promise that resolves to an array of documents and their scores.
*/
async similaritySearchVectorWithScore(
query: number[],
k: number,
filter?: RedisVectorStoreFilterType
): Promise<[Document, number][]> {
if (filter && this.filter) {
throw new Error("cannot provide both `filter` and `this.filter`");
}
const _filter = filter ?? this.filter;
const results = await this.redisClient.ft.search(
this.indexName,
...this.buildQuery(query, k, _filter)
);
const result: [Document, number][] = [];
if (results.total) {
for (const res of results.documents) {
if (res.value) {
const document = res.value;
if (document.vector_score) {
result.push([
new Document({
pageContent: (document[this.contentKey] ?? "") as string,
metadata: JSON.parse(
this.unEscapeSpecialChars(
(document.metadata ?? "{}") as string
)
),
}),
Number(document.vector_score),
]);
}
}
}
}
return result;
}
/**
* Static method for creating a new instance of RedisVectorStore from
* texts. It creates documents from the texts and metadata, then adds them
* to the RedisVectorStore.
* @param texts The texts to add.
* @param metadatas The metadata associated with the texts.
* @param embeddings The embeddings to use.
* @param dbConfig The configuration for the RedisVectorStore.
* @returns A promise that resolves to a new instance of RedisVectorStore.
*/
static fromTexts(
texts: string[],
metadatas: object[] | object,
embeddings: EmbeddingsInterface,
dbConfig: RedisVectorStoreConfig
): Promise<RedisVectorStore> {
const docs: Document[] = [];
for (let i = 0; i < texts.length; i += 1) {
const metadata = Array.isArray(metadatas) ? metadatas[i] : metadatas;
const newDoc = new Document({
pageContent: texts[i],
metadata,
});
docs.push(newDoc);
}
return RedisVectorStore.fromDocuments(docs, embeddings, dbConfig);
}
/**
* Static method for creating a new instance of RedisVectorStore from
* documents. It adds the documents to the RedisVectorStore.
* @param docs The documents to add.
* @param embeddings The embeddings to use.
* @param dbConfig The configuration for the RedisVectorStore.
* @returns A promise that resolves to a new instance of RedisVectorStore.
*/
static async fromDocuments(
docs: Document[],
embeddings: EmbeddingsInterface,
dbConfig: RedisVectorStoreConfig
): Promise<RedisVectorStore> {
const instance = new this(embeddings, dbConfig);
await instance.addDocuments(docs);
return instance;
}
/**
* Method for checking if an index exists in the RedisVectorStore.
* @returns A promise that resolves to a boolean indicating whether the index exists.
*/
async checkIndexExists() {
try {
await this.redisClient.ft.info(this.indexName);
} catch (err) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
if ((err as any)?.message.includes("unknown command")) {
throw new Error(
"Failed to run FT.INFO command. Please ensure that you are running a RediSearch-capable Redis instance: https://js.langchain.com/docs/modules/data_connection/vectorstores/integrations/redis#setup"
);
}
// index doesn't exist
return false;
}
return true;
}
/**
* Method for creating an index in the RedisVectorStore. If the index
* already exists, it does nothing.
* @param dimensions The dimensions of the index
* @returns A promise that resolves when the index has been created.
*/
async createIndex(dimensions = 1536): Promise<void> {
if (await this.checkIndexExists()) {
return;
}
const schema: RediSearchSchema = {
[this.vectorKey]: {
type: SchemaFieldTypes.VECTOR,
TYPE: "FLOAT32",
DIM: dimensions,
...this.indexOptions,
},
[this.contentKey]: SchemaFieldTypes.TEXT,
[this.metadataKey]: SchemaFieldTypes.TEXT,
};
await this.redisClient.ft.create(
this.indexName,
schema,
this.createIndexOptions
);
}
/**
* Method for dropping an index from the RedisVectorStore.
* @param deleteDocuments Optional boolean indicating whether to drop the associated documents.
* @returns A promise that resolves to a boolean indicating whether the index was dropped.
*/
async dropIndex(deleteDocuments?: boolean): Promise<boolean> {
try {
const options = deleteDocuments ? { DD: deleteDocuments } : undefined;
await this.redisClient.ft.dropIndex(this.indexName, options);
return true;
} catch (err) {
return false;
}
}
/**
* Deletes vectors from the vector store.
* @param params The parameters for deleting vectors.
* @returns A promise that resolves when the vectors have been deleted.
*/
async delete(params: { deleteAll: boolean }): Promise<void> {
if (params.deleteAll) {
await this.dropIndex(true);
} else {
throw new Error(`Invalid parameters passed to "delete".`);
}
}
private buildQuery(
query: number[],
k: number,
filter?: RedisVectorStoreFilterType
): [string, SearchOptions] {
const vectorScoreField = "vector_score";
let hybridFields = "*";
// if a filter is set, modify the hybrid query
if (filter && filter.length) {
// `filter` is a list of strings, then it's applied using the OR operator in the metadata key
// for example: filter = ['foo', 'bar'] => this will filter all metadata containing either 'foo' OR 'bar'
hybridFields = `@${this.metadataKey}:(${this.prepareFilter(filter)})`;
}
const baseQuery = `${hybridFields} => [KNN ${k} @${this.vectorKey} $vector AS ${vectorScoreField}]`;
const returnFields = [this.metadataKey, this.contentKey, vectorScoreField];
const options: SearchOptions = {
PARAMS: {
vector: this.getFloat32Buffer(query),
},
RETURN: returnFields,
SORTBY: vectorScoreField,
DIALECT: 2,
LIMIT: {
from: 0,
size: k,
},
};
return [baseQuery, options];
}
private prepareFilter(filter: RedisVectorStoreFilterType) {
return filter.map(this.escapeSpecialChars).join("|");
}
/**
* Escapes all '-' characters.
* RediSearch considers '-' as a negative operator, hence we need
* to escape it
* @see https://redis.io/docs/stack/search/reference/query_syntax
*
* @param str
* @returns
*/
private escapeSpecialChars(str: string) {
return str.replaceAll("-", "\\-");
}
/**
* Unescapes all '-' characters, returning the original string
*
* @param str
* @returns
*/
private unEscapeSpecialChars(str: string) {
return str.replaceAll("\\-", "-");
}
/**
* Converts the vector to the buffer Redis needs to
* correctly store an embedding
*
* @param vector
* @returns Buffer
*/
private getFloat32Buffer(vector: number[]) {
return Buffer.from(new Float32Array(vector).buffer);
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/clickhouse.ts
|
import * as uuid from "uuid";
import { ClickHouseClient, createClient } from "@clickhouse/client";
import { format } from "mysql2";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { VectorStore } from "@langchain/core/vectorstores";
import { Document } from "@langchain/core/documents";
/**
* Arguments for the ClickHouseStore class, which include the host, port,
* protocol, username, password, index type, index parameters,
* index query params, column map, database, table.
*/
export interface ClickHouseLibArgs {
host: string;
port: string | number;
protocol?: string;
username: string;
password: string;
indexType?: string;
indexParam?: Record<string, number>;
indexQueryParams?: Record<string, string>;
columnMap?: ColumnMap;
database?: string;
table?: string;
}
/**
* Mapping of columns in the ClickHouse database.
*/
export interface ColumnMap {
id: string;
uuid: string;
document: string;
embedding: string;
metadata: string;
}
/**
* Type for filtering search results in the ClickHouse database.
*/
export interface ClickHouseFilter {
whereStr: string;
}
/**
* Class for interacting with the ClickHouse database. It extends the
* VectorStore class and provides methods for adding vectors and
* documents, searching for similar vectors, and creating instances from
* texts or documents.
*/
export class ClickHouseStore extends VectorStore {
declare FilterType: ClickHouseFilter;
private client: ClickHouseClient;
private indexType: string;
private indexParam: Record<string, number>;
private indexQueryParams: Record<string, string>;
private columnMap: ColumnMap;
private database: string;
private table: string;
private isInitialized = false;
_vectorstoreType(): string {
return "clickhouse";
}
constructor(embeddings: EmbeddingsInterface, args: ClickHouseLibArgs) {
super(embeddings, args);
this.indexType = args.indexType || "annoy";
this.indexParam = args.indexParam || { L2Distance: 100 };
this.indexQueryParams = args.indexQueryParams || {};
this.columnMap = args.columnMap || {
id: "id",
document: "document",
embedding: "embedding",
metadata: "metadata",
uuid: "uuid",
};
this.database = args.database || "default";
this.table = args.table || "vector_table";
this.client = createClient({
host: `${args.protocol ?? "https://"}${args.host}:${args.port}`,
username: args.username,
password: args.password,
session_id: uuid.v4(),
});
}
/**
* Method to add vectors to the ClickHouse database.
* @param vectors The vectors to add.
* @param documents The documents associated with the vectors.
* @returns Promise that resolves when the vectors have been added.
*/
async addVectors(vectors: number[][], documents: Document[]): Promise<void> {
if (vectors.length === 0) {
return;
}
if (!this.isInitialized) {
await this.initialize(vectors[0].length);
}
const queryStr = this.buildInsertQuery(vectors, documents);
await this.client.exec({ query: queryStr });
}
/**
* Method to add documents to the ClickHouse database.
* @param documents The documents to add.
* @returns Promise that resolves when the documents have been added.
*/
async addDocuments(documents: Document[]): Promise<void> {
return this.addVectors(
await this.embeddings.embedDocuments(documents.map((d) => d.pageContent)),
documents
);
}
/**
* Method to search for vectors that are similar to a given query vector.
* @param query The query vector.
* @param k The number of similar vectors to return.
* @param filter Optional filter for the search results.
* @returns Promise that resolves with an array of tuples, each containing a Document and a score.
*/
async similaritySearchVectorWithScore(
query: number[],
k: number,
filter?: this["FilterType"]
): Promise<[Document, number][]> {
if (!this.isInitialized) {
await this.initialize(query.length);
}
const queryStr = this.buildSearchQuery(query, k, filter);
const queryResultSet = await this.client.query({ query: queryStr });
const queryResult: {
data: { document: string; metadata: object; dist: number }[];
} = await queryResultSet.json();
const result: [Document, number][] = queryResult.data.map((item) => [
new Document({ pageContent: item.document, metadata: item.metadata }),
item.dist,
]);
return result;
}
/**
* Static method to create an instance of ClickHouseStore from texts.
* @param texts The texts to use.
* @param metadatas The metadata associated with the texts.
* @param embeddings The embeddings to use.
* @param args The arguments for the ClickHouseStore.
* @returns Promise that resolves with a new instance of ClickHouseStore.
*/
static async fromTexts(
texts: string[],
metadatas: object | object[],
embeddings: EmbeddingsInterface,
args: ClickHouseLibArgs
): Promise<ClickHouseStore> {
const docs: Document[] = [];
for (let i = 0; i < texts.length; i += 1) {
const metadata = Array.isArray(metadatas) ? metadatas[i] : metadatas;
const newDoc = new Document({
pageContent: texts[i],
metadata,
});
docs.push(newDoc);
}
return ClickHouseStore.fromDocuments(docs, embeddings, args);
}
/**
* Static method to create an instance of ClickHouseStore from documents.
* @param docs The documents to use.
* @param embeddings The embeddings to use.
* @param args The arguments for the ClickHouseStore.
* @returns Promise that resolves with a new instance of ClickHouseStore.
*/
static async fromDocuments(
docs: Document[],
embeddings: EmbeddingsInterface,
args: ClickHouseLibArgs
): Promise<ClickHouseStore> {
const instance = new this(embeddings, args);
await instance.addDocuments(docs);
return instance;
}
/**
* Static method to create an instance of ClickHouseStore from an existing
* index.
* @param embeddings The embeddings to use.
* @param args The arguments for the ClickHouseStore.
* @returns Promise that resolves with a new instance of ClickHouseStore.
*/
static async fromExistingIndex(
embeddings: EmbeddingsInterface,
args: ClickHouseLibArgs
): Promise<ClickHouseStore> {
const instance = new this(embeddings, args);
await instance.initialize();
return instance;
}
/**
* Method to initialize the ClickHouse database.
* @param dimension Optional dimension of the vectors.
* @returns Promise that resolves when the database has been initialized.
*/
private async initialize(dimension?: number): Promise<void> {
const dim = dimension ?? (await this.embeddings.embedQuery("test")).length;
const indexParamStr = this.indexParam
? Object.entries(this.indexParam)
.map(([key, value]) => `'${key}', ${value}`)
.join(", ")
: "";
const query = `
CREATE TABLE IF NOT EXISTS ${this.database}.${this.table}(
${this.columnMap.id} Nullable(String),
${this.columnMap.document} Nullable(String),
${this.columnMap.embedding} Array(Float32),
${this.columnMap.metadata} JSON,
${this.columnMap.uuid} UUID DEFAULT generateUUIDv4(),
CONSTRAINT cons_vec_len CHECK length(${this.columnMap.embedding}) = ${dim},
INDEX vec_idx ${this.columnMap.embedding} TYPE ${this.indexType}(${indexParamStr}) GRANULARITY 1000
) ENGINE = MergeTree ORDER BY ${this.columnMap.uuid} SETTINGS index_granularity = 8192;`;
await this.client.exec({
query,
clickhouse_settings: {
allow_experimental_object_type: 1,
allow_experimental_annoy_index: 1,
},
});
this.isInitialized = true;
}
/**
* Method to build an SQL query for inserting vectors and documents into
* the ClickHouse database.
* @param vectors The vectors to insert.
* @param documents The documents to insert.
* @returns The SQL query string.
*/
private buildInsertQuery(vectors: number[][], documents: Document[]): string {
const columnsStr = Object.values(
Object.fromEntries(
Object.entries(this.columnMap).filter(
([key]) => key !== this.columnMap.uuid
)
)
).join(", ");
const placeholders = vectors.map(() => "(?, ?, ?, ?)").join(", ");
const values = [];
for (let i = 0; i < vectors.length; i += 1) {
const vector = vectors[i];
const document = documents[i];
values.push(
uuid.v4(),
this.escapeString(document.pageContent),
JSON.stringify(vector),
JSON.stringify(document.metadata)
);
}
const insertQueryStr = `
INSERT INTO TABLE ${this.database}.${this.table}(${columnsStr})
VALUES ${placeholders}
`;
const insertQuery = format(insertQueryStr, values);
return insertQuery;
}
private escapeString(str: string): string {
return str.replace(/\\/g, "\\\\").replace(/'/g, "\\'");
}
/**
* Method to build an SQL query for searching for similar vectors in the
* ClickHouse database.
* @param query The query vector.
* @param k The number of similar vectors to return.
* @param filter Optional filter for the search results.
* @returns The SQL query string.
*/
private buildSearchQuery(
query: number[],
k: number,
filter?: ClickHouseFilter
): string {
const order = "ASC";
const whereStr = filter ? `PREWHERE ${filter.whereStr}` : "";
const placeholders = query.map(() => "?").join(", ");
const settingStrings: string[] = [];
if (this.indexQueryParams) {
for (const [key, value] of Object.entries(this.indexQueryParams)) {
settingStrings.push(`SETTING ${key}=${value}`);
}
}
const searchQueryStr = `
SELECT ${this.columnMap.document} AS document, ${
this.columnMap.metadata
} AS metadata, dist
FROM ${this.database}.${this.table}
${whereStr}
ORDER BY L2Distance(${
this.columnMap.embedding
}, [${placeholders}]) AS dist ${order}
LIMIT ${k} ${settingStrings.join(" ")}
`;
// Format the query with actual values
const searchQuery = format(searchQueryStr, query);
return searchQuery;
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/closevector/web.ts
|
import {
CloseVectorHNSWWeb,
HierarchicalNSWT,
CloseVectorHNSWLibArgs,
CloseVectorCredentials,
HnswlibModule,
} from "closevector-web";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { Document } from "@langchain/core/documents";
import { CloseVector } from "./common.js";
/**
* package closevector-node is largely based on hnswlib.ts in the current folder with the following exceptions:
* 1. It uses a modified version of hnswlib-node to ensure the generated index can be loaded by closevector_web.ts.
* 2. It adds features to upload and download the index to/from the CDN provided by CloseVector.
*
* For more information, check out https://closevector-docs.getmegaportal.com/
*/
/**
* Arguments for creating a CloseVectorWeb instance, extending CloseVectorHNSWLibArgs.
*/
export interface CloseVectorWebArgs
extends CloseVectorHNSWLibArgs<HierarchicalNSWT> {
space: "cosine" | "l2" | "ip";
instance?: CloseVectorHNSWWeb;
}
/**
* Class that implements a vector store using CloseVector, It extends the SaveableVectorStore class and
* provides methods for adding documents and vectors, performing
* similarity searches, and saving and loading the vector store.
*/
export class CloseVectorWeb extends CloseVector<CloseVectorHNSWWeb> {
declare FilterType: (doc: Document) => boolean;
constructor(
embeddings: EmbeddingsInterface,
args: CloseVectorWebArgs,
credentials?: CloseVectorCredentials
) {
super(embeddings, args, credentials);
if (args.instance) {
this.instance = args.instance;
} else {
this.instance = new CloseVectorHNSWWeb(embeddings, {
...args,
...credentials,
});
}
}
uuid() {
return this.instance.uuid;
}
/**
* Method to save the index to the CloseVector CDN.
* @param options.uuid after uploading the index to the CloseVector CDN, the uuid of the index can be obtained by instance.uuid
* @param options.credentials the credentials to be used to access the CloseVector API
* @param options.onProgress a callback function to track the upload progress
* @param options.public a boolean to determine if the index should be public or private, if not provided, the index will be private. If the index is public, it can be accessed by anyone with the uuid.
* @param options.description a description of the index
*/
async saveToCloud(options: {
uuid?: string;
public?: boolean;
description?: string;
credentials?: CloseVectorCredentials;
onProgress?: (progress: { loaded: number; total: number }) => void;
}) {
await this.instance.saveToCloud({
...options,
credentials: options.credentials || this.credentials,
});
}
/**
* Method to load the index from the CloseVector CDN.
* @param options.uuid after uploading the index to the CloseVector CDN, the uuid of the index can be obtained by instance.uuid
* @param options.credentials the credentials to be used to access the CloseVector API
* @param options.onProgress a callback function to track the download progress
* @param options.embeddings the embeddings to be used by the CloseVectorWeb instance
*/
static async loadFromCloud(options: {
embeddings: EmbeddingsInterface;
uuid: string;
credentials?: CloseVectorCredentials;
onProgress?: (progress: { loaded: number; total: number }) => void;
}) {
const instance = await CloseVectorHNSWWeb.loadFromCloud(options);
const vectorstore = new this(
options.embeddings,
instance.args,
options.credentials
);
return vectorstore;
}
/**
* Static method to load a vector store from a directory. It reads the
* HNSW index, the arguments, and the document store from the directory,
* then creates a new CloseVectorWeb instance with these values.
* @param directory The directory from which to load the vector store.
* @param embeddings The embeddings to be used by the CloseVectorWeb instance.
* @returns A Promise that resolves to a new CloseVectorWeb instance.
*/
static async load(
directory: string,
embeddings: EmbeddingsInterface,
credentials?: CloseVectorCredentials
) {
const instance = await CloseVectorHNSWWeb.load(directory, embeddings);
const vectorstore = new this(embeddings, instance.args, credentials);
return vectorstore;
}
/**
* Static method to create a new CloseVectorWeb instance from texts and metadata.
* It creates a new Document instance for each text and metadata, then
* calls the fromDocuments method to create the CloseVectorWeb instance.
* @param texts The texts to be used to create the documents.
* @param metadatas The metadata to be used to create the documents.
* @param embeddings The embeddings to be used by the CloseVectorWeb instance.
* @param args An optional configuration object for the CloseVectorWeb instance.
* @param credential An optional credential object for the CloseVector API.
* @returns A Promise that resolves to a new CloseVectorWeb instance.
*/
static async fromTexts(
texts: string[],
metadatas: object[] | object,
embeddings: EmbeddingsInterface,
args?: Record<string, unknown>,
credential?: CloseVectorCredentials
): Promise<CloseVectorWeb> {
const docs: Document[] = [];
for (let i = 0; i < texts.length; i += 1) {
const metadata = Array.isArray(metadatas) ? metadatas[i] : metadatas;
const newDoc = new Document({
pageContent: texts[i],
metadata,
});
docs.push(newDoc);
}
return await CloseVectorWeb.fromDocuments(
docs,
embeddings,
args,
credential
);
}
/**
* Static method to create a new CloseVectorWeb instance from documents. It
* creates a new CloseVectorWeb instance, adds the documents to it, then returns
* the instance.
* @param docs The documents to be added to the CloseVectorWeb instance.
* @param embeddings The embeddings to be used by the CloseVectorWeb instance.
* @param args An optional configuration object for the CloseVectorWeb instance.
* @param credentials An optional credential object for the CloseVector API.
* @returns A Promise that resolves to a new CloseVectorWeb instance.
*/
static async fromDocuments(
docs: Document[],
embeddings: EmbeddingsInterface,
args?: Record<string, unknown>,
credentials?: CloseVectorCredentials
): Promise<CloseVectorWeb> {
const _args: Record<string, unknown> = args || {
space: "cosine",
};
const instance = new this(
embeddings,
_args as unknown as CloseVectorWebArgs,
credentials
);
await instance.addDocuments(docs);
return instance;
}
static async imports(): Promise<HnswlibModule> {
return CloseVectorHNSWWeb.imports();
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/closevector/common.ts
|
import type { CloseVectorSaveableVectorStore } from "closevector-common";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { Document } from "@langchain/core/documents";
import { SaveableVectorStore } from "@langchain/core/vectorstores";
type CloseVectorCredentials = {
key?: string;
secret?: string;
};
/**
* package closevector is largely based on hnswlib.ts in the current folder with the following exceptions:
* 1. It uses a modified version of hnswlib-node to ensure the generated index can be loaded by closevector_web.ts.
* 2. It adds features to upload and download the index to/from the CDN provided by CloseVector.
*
* For more information, check out https://closevector-docs.getmegaportal.com/
*/
/**
* Class that implements a vector store using Hierarchical Navigable Small
* World (HNSW) graphs. It extends the SaveableVectorStore class and
* provides methods for adding documents and vectors, performing
* similarity searches, and saving and loading the vector store.
*/
export abstract class CloseVector<
CloseVectorHNSWImplementation extends CloseVectorSaveableVectorStore
> extends SaveableVectorStore {
declare FilterType: (doc: Document) => boolean;
_instance?: CloseVectorHNSWImplementation;
// credentials will not be saved to disk
credentials?: CloseVectorCredentials;
_vectorstoreType(): string {
return "closevector";
}
constructor(
embeddings: EmbeddingsInterface,
args: {
space: "l2" | "ip" | "cosine";
numDimensions?: number;
maxElements?: number;
},
credentials?: CloseVectorCredentials
) {
super(embeddings, args);
this.credentials = credentials;
}
public get instance(): CloseVectorHNSWImplementation {
if (!this._instance) {
throw new Error(
"Vector store not initialised yet. Try calling `addTexts` first."
);
}
return this._instance;
}
protected set instance(instance: CloseVectorHNSWImplementation) {
this._instance = instance;
}
/**
* Method to add documents to the vector store. It first converts the
* documents to vectors using the embeddings, then adds the vectors to the
* vector store.
* @param documents The documents to be added to the vector store.
* @returns A Promise that resolves when the documents have been added.
*/
async addDocuments(documents: Document[]): Promise<void> {
await this.instance.addDocuments(documents);
}
abstract saveToCloud(_options: Record<string, unknown>): Promise<void>;
/**
* Method to save the vector store to a directory. It saves the HNSW
* index, the arguments, and the document store to the directory.
* @param directory The directory to which to save the vector store. In CloseVector, we use IndexedDB to mock the file system. Therefore, this parameter is can be treated as a key to the contents stored.
* @returns A Promise that resolves when the vector store has been saved.
*/
async save(directory: string): Promise<void> {
await this.instance.save(directory);
}
/**
* Method to add vectors to the vector store. It first initializes the
* index if it hasn't been initialized yet, then adds the vectors to the
* index and the documents to the document store.
* @param vectors The vectors to be added to the vector store.
* @param documents The documents corresponding to the vectors.
* @returns A Promise that resolves when the vectors and documents have been added.
*/
async addVectors(vectors: number[][], documents: Document[]) {
await this.instance.addVectors(vectors, documents);
}
/**
* Method to perform a similarity search in the vector store using a query
* vector. It returns the k most similar documents along with their
* similarity scores. An optional filter function can be provided to
* filter the documents.
* @param query The query vector.
* @param k The number of most similar documents to return.
* @param filter An optional filter function to filter the documents.
* @returns A Promise that resolves to an array of tuples, where each tuple contains a document and its similarity score.
*/
async similaritySearchVectorWithScore(
query: number[],
k: number,
filter?: this["FilterType"]
) {
const resp = await this.instance.similaritySearchVectorWithScore(
query,
k,
filter
? (x: { pageContent: string; metadata: Record<string, unknown> }) =>
filter?.({
pageContent: x.pageContent,
metadata: x.metadata || {},
}) || false
: undefined
);
const mapped: [Document<Record<string, unknown>>, number][] = resp.map(
(x) => [
new Document({
pageContent: x[0].pageContent,
metadata: x[0].metadata || {},
}),
1 - x[1],
]
);
return mapped;
}
/**
* Method to delete the vector store from a directory. It deletes the
* hnswlib.index file, the docstore.json file, and the args.json file from
* the directory.
* @param params An object with a directory property that specifies the directory from which to delete the vector store.
* @returns A Promise that resolves when the vector store has been deleted.
*/
async delete(params: { directory: string }) {
return await this.instance.delete(params);
}
static textsToDocuments(texts: string[], metadatas: object[] | object) {
const docs: Document[] = [];
for (let i = 0; i < texts.length; i += 1) {
const metadata = Array.isArray(metadatas) ? metadatas[i] : metadatas;
const newDoc = new Document({
pageContent: texts[i],
metadata,
});
docs.push(newDoc);
}
return docs;
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/closevector/node.ts
|
import {
CloseVectorHNSWNode,
HierarchicalNSWT,
CloseVectorHNSWLibArgs,
CloseVectorCredentials,
} from "closevector-node";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { Document } from "@langchain/core/documents";
import { CloseVector } from "./common.js";
/**
* package closevector-node is largely based on hnswlib.ts in the current folder with the following exceptions:
* 1. It uses a modified version of hnswlib-node to ensure the generated index can be loaded by closevector_web.ts.
* 2. It adds features to upload and download the index to/from the CDN provided by CloseVector.
*
* For more information, check out https://closevector-docs.getmegaportal.com/
*/
/**
* Arguments for creating a CloseVectorNode instance, extending CloseVectorHNSWLibArgs.
*/
export interface CloseVectorNodeArgs
extends CloseVectorHNSWLibArgs<HierarchicalNSWT> {
space: "cosine" | "l2" | "ip";
instance?: CloseVectorHNSWNode;
}
/**
* Class that implements a vector store using Hierarchical Navigable Small
* World (HNSW) graphs. It extends the SaveableVectorStore class and
* provides methods for adding documents and vectors, performing
* similarity searches, and saving and loading the vector store.
*/
export class CloseVectorNode extends CloseVector<CloseVectorHNSWNode> {
declare FilterType: (doc: Document) => boolean;
constructor(
embeddings: EmbeddingsInterface,
args: CloseVectorNodeArgs,
credentials?: CloseVectorCredentials
) {
super(embeddings, args, credentials);
if (args.instance) {
this.instance = args.instance;
} else {
this.instance = new CloseVectorHNSWNode(embeddings, args);
}
}
uuid() {
return this.instance.uuid;
}
/**
* Method to save the index to the CloseVector CDN.
* @param options.uuid after uploading the index to the CloseVector CDN, the uuid of the index can be obtained by instance.uuid
* @param options.credentials the credentials to be used to access the CloseVector API
* @param options.onProgress a callback function to track the upload progress
* @param options.public a boolean to determine if the index should be public or private, if not provided, the index will be private. If the index is public, it can be accessed by anyone with the uuid.
* @param options.description a description of the index
*/
async saveToCloud(options: {
uuid?: string;
public?: boolean;
description?: string;
credentials?: CloseVectorCredentials;
onProgress?: (progress: { loaded: number; total: number }) => void;
}) {
await this.instance.saveToCloud({
...options,
credentials: options.credentials || this.credentials,
});
}
/**
* Method to load the index from the CloseVector CDN.
* @param options.uuid after uploading the index to the CloseVector CDN, the uuid of the index can be obtained by instance.uuid
* @param options.credentials the credentials to be used to access the CloseVector API
* @param options.onProgress a callback function to track the download progress
* @param options.embeddings the embeddings to be used by the CloseVectorWeb instance
*/
static async loadFromCloud(options: {
embeddings: EmbeddingsInterface;
uuid: string;
credentials?: CloseVectorCredentials;
onProgress?: (progress: { loaded: number; total: number }) => void;
}) {
const instance = await CloseVectorHNSWNode.loadFromCloud(options);
const vectorstore = new this(
options.embeddings,
instance.args,
options.credentials
);
return vectorstore;
}
/**
* Static method to load a vector store from a directory. It reads the
* HNSW index, the arguments, and the document store from the directory,
* then creates a new HNSWLib instance with these values.
* @param directory The directory from which to load the vector store.
* @param embeddings The embeddings to be used by the CloseVectorNode instance.
* @returns A Promise that resolves to a new CloseVectorNode instance.
*/
static async load(
directory: string,
embeddings: EmbeddingsInterface,
credentials?: CloseVectorCredentials
) {
const instance = await CloseVectorHNSWNode.load(directory, embeddings);
const vectorstore = new this(embeddings, instance.args, credentials);
return vectorstore;
}
/**
* Static method to create a new CloseVectorWeb instance from texts and metadata.
* It creates a new Document instance for each text and metadata, then
* calls the fromDocuments method to create the CloseVectorWeb instance.
* @param texts The texts to be used to create the documents.
* @param metadatas The metadata to be used to create the documents.
* @param embeddings The embeddings to be used by the CloseVectorWeb instance.
* @param args An optional configuration object for the CloseVectorWeb instance.
* @param credential An optional credential object for the CloseVector API.
* @returns A Promise that resolves to a new CloseVectorWeb instance.
*/
static async fromTexts(
texts: string[],
metadatas: object[] | object,
embeddings: EmbeddingsInterface,
args?: Record<string, unknown>,
credential?: CloseVectorCredentials
): Promise<CloseVectorNode> {
const docs = CloseVector.textsToDocuments(texts, metadatas);
return await CloseVectorNode.fromDocuments(
docs,
embeddings,
args,
credential
);
}
/**
* Static method to create a new CloseVectorNode instance from documents. It
* creates a new CloseVectorNode instance, adds the documents to it, then returns
* the instance.
* @param docs The documents to be added to the HNSWLib instance.
* @param embeddings The embeddings to be used by the HNSWLib instance.
* @param args An optional configuration object for the HNSWLib instance.
* @param credentials An optional credential object for the CloseVector API.
* @returns A Promise that resolves to a new CloseVectorNode instance.
*/
static async fromDocuments(
docs: Document[],
embeddings: EmbeddingsInterface,
args?: Record<string, unknown>,
credentials?: CloseVectorCredentials
): Promise<CloseVectorNode> {
const _args: Record<string, unknown> = args || {
space: "cosine",
};
const instance = new this(
embeddings,
_args as unknown as CloseVectorNodeArgs,
credentials
);
await instance.addDocuments(docs);
return instance;
}
static async imports(): Promise<{
HierarchicalNSW: typeof HierarchicalNSWT;
}> {
return CloseVectorHNSWNode.imports();
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/zep.test.ts
|
/* eslint-disable @typescript-eslint/no-explicit-any, no-new, @typescript-eslint/no-misused-promises */
import { expect, jest, test } from "@jest/globals";
import {
DocumentCollection,
IDocument,
NotFoundError,
ZepClient,
} from "@getzep/zep-js";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { Document } from "@langchain/core/documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { IZepConfig, ZepVectorStore } from "../zep.js";
jest.mock("@getzep/zep-js");
const mockDocuments = [
{
pageContent: "foo bar baz",
metadata: { bar: "baz" },
},
{
pageContent: "foo qux baz",
metadata: { qux: "bar" },
},
{
pageContent: "foo bar baz",
metadata: { foo: "bar" },
},
];
const mockZepDocuments: IDocument[] = mockDocuments.map((doc, index) => ({
uuid: `uuid${index}`,
content: doc.pageContent,
metadata: doc.metadata,
embeddings: new Float32Array([0.0, 0.1]),
score: Math.random(),
}));
const mockCollection = {
addDocuments: jest
.fn<DocumentCollection["addDocuments"]>()
.mockResolvedValue(["uuid1", "uuid2", "uuid3"]),
search: jest
.fn<DocumentCollection["search"]>()
.mockResolvedValue(mockZepDocuments as any),
deleteDocument: jest
.fn<DocumentCollection["deleteDocument"]>()
.mockResolvedValue(undefined as any),
searchReturnQueryVector: jest
.fn<DocumentCollection["searchReturnQueryVector"]>()
.mockResolvedValue([mockZepDocuments, new Float32Array([0.0, 0.1])] as any),
name: "testCollection",
is_auto_embedded: true,
} as any;
const mockClient = {
document: {
getCollection: jest.fn<any>().mockResolvedValue(mockCollection),
addCollection: jest.fn<any>().mockResolvedValue(mockCollection),
},
} as any;
function isADocument(obj: any): obj is IDocument {
return "content" in obj && "metadata" in obj && "embedding" in obj;
}
describe("ZepVectorStore", () => {
let zepConfig: IZepConfig;
let embeddings: EmbeddingsInterface;
beforeEach(() => {
zepConfig = {
apiUrl: "http://api.zep.com",
apiKey: "123456",
collectionName: "testCollection",
description: "Test Description",
metadata: {},
embeddingDimensions: 100,
isAutoEmbedded: true,
};
embeddings = new FakeEmbeddings();
jest
.spyOn(ZepClient, "init")
.mockImplementation(() => Promise.resolve(mockClient));
});
test("should instantiate class successfully when a Collection exists", async () => {
new ZepVectorStore(embeddings, zepConfig);
// Wait for any promises in constructor to resolve
await new Promise(setImmediate);
expect(ZepClient.init).toHaveBeenCalledWith(
zepConfig.apiUrl,
zepConfig.apiKey
);
expect(mockClient.document.getCollection).toHaveBeenCalledWith(
zepConfig.collectionName
);
});
test("should instantiate class successfully when a Collection does not exist", async () => {
mockClient.document.getCollection.mockRejectedValueOnce(
new NotFoundError("Collection not found")
);
new ZepVectorStore(embeddings, zepConfig);
// Wait for any promises in constructor to resolve
await new Promise(setImmediate);
expect(ZepClient.init).toHaveBeenCalledWith(
zepConfig.apiUrl,
zepConfig.apiKey
);
expect(mockClient.document.getCollection).toHaveBeenCalledWith(
zepConfig.collectionName
);
expect(mockClient.document.addCollection).toHaveBeenCalledWith({
name: zepConfig.collectionName,
description: zepConfig.description,
metadata: zepConfig.metadata,
embeddingDimensions: zepConfig.embeddingDimensions,
isAutoEmbedded: zepConfig.isAutoEmbedded,
});
});
test("should add documents successfully", async () => {
const zepVectorStore = new ZepVectorStore(embeddings, zepConfig);
(zepVectorStore as any).collection = mockCollection;
const result = await zepVectorStore.addDocuments(mockDocuments);
expect(mockCollection.addDocuments).toHaveBeenCalledWith(
expect.arrayContaining([
expect.objectContaining({
content: "foo bar baz",
metadata: { bar: "baz" },
}),
expect.objectContaining({
content: "foo qux baz",
metadata: { qux: "bar" },
}),
expect.objectContaining({
content: "foo bar baz",
metadata: { foo: "bar" },
}),
])
);
expect(result).toEqual(["uuid1", "uuid2", "uuid3"]);
});
test("should delete documents successfully", async () => {
const zepVectorStore = new ZepVectorStore(embeddings, zepConfig);
// Inject mockCollection into zepVectorStore
(zepVectorStore as any).collection = mockCollection;
const uuidsToDelete = ["uuid1", "uuid2", "uuid3"];
await zepVectorStore.delete({ uuids: uuidsToDelete });
uuidsToDelete.forEach((uuid) => {
expect(mockCollection.deleteDocument).toHaveBeenCalledWith(uuid);
});
});
test("should create ZepVectorStore from texts successfully", async () => {
const texts = ["text1", "text2", "text3"];
const metadatas = [{ foo: "bar" }, { baz: "qux" }, { quux: "corge" }];
// Mock ZepVectorStore.fromDocuments to inject mockCollection
const originalFromDocuments = ZepVectorStore.fromDocuments;
ZepVectorStore.fromDocuments = jest.fn(
async (docs, embeddings, zepConfig) => {
const zepVectorStore = await originalFromDocuments.call(
ZepVectorStore,
docs as Document[],
embeddings as EmbeddingsInterface,
zepConfig as IZepConfig
);
(zepVectorStore as any).collection = mockCollection;
return zepVectorStore;
}
);
const zepVectorStore = await ZepVectorStore.fromTexts(
texts,
metadatas,
embeddings,
zepConfig
);
expect(zepVectorStore).toBeInstanceOf(ZepVectorStore);
// Did we receive an array of 3 documents?
expect(mockCollection.addDocuments).toHaveBeenCalledWith(
expect.arrayContaining([
expect.anything(),
expect.anything(),
expect.anything(),
])
);
// Check that each object is a valid IDocument
mockCollection.addDocuments.mock.calls[0][0].forEach((obj: any) => {
expect(isADocument(obj)).toBe(true);
});
// Restore the original ZepVectorStore.fromDocuments
ZepVectorStore.fromDocuments = originalFromDocuments;
});
test("should perform similarity search with score successfully", async () => {
const zepVectorStore = new ZepVectorStore(embeddings, zepConfig);
// Inject mockCollection into zepVectorStore
(zepVectorStore as any).collection = mockCollection;
const query = [0.1, 0.2, 0.3, 0.4, 0.5];
const k = 3;
const filter = { foo: "bar" };
const result = await zepVectorStore.similaritySearchVectorWithScore(
query,
k,
filter
);
expect(mockCollection.search).toHaveBeenCalledWith(
expect.objectContaining({
embedding: new Float32Array(query),
metadata: filter,
}),
k
);
const docsAndScores = mockZepDocuments.map((doc) => [
new Document({
pageContent: doc.content,
metadata: doc.metadata,
}),
doc.score,
]);
expect(result).toEqual(docsAndScores);
});
test("should perform similarity search successfully", async () => {
const zepVectorStore = new ZepVectorStore(embeddings, zepConfig);
// Inject mockCollection into zepVectorStore
(zepVectorStore as any).collection = mockCollection;
const query = "foo bar";
const k = 3;
const filter = { foo: "bar" };
const result = await zepVectorStore.similaritySearch(query, k, filter);
expect(mockCollection.search).toHaveBeenCalledWith(
expect.objectContaining({
text: query,
metadata: filter,
}),
k
);
const docs = mockZepDocuments.map(
(doc) =>
new Document({
pageContent: doc.content,
metadata: doc.metadata,
})
);
expect(result).toEqual(docs);
});
test("should perform max marginal relevance search successfully", async () => {
const zepVectorStore = new ZepVectorStore(embeddings, zepConfig);
(zepVectorStore as any).collection = mockCollection;
const query = "foo bar";
const options = {
k: 2,
fetchK: 3,
lambda: 0.5,
filter: { foo: "bar" },
};
await zepVectorStore.maxMarginalRelevanceSearch(query, options);
expect(mockCollection.search).toHaveBeenCalledWith(
expect.objectContaining({
text: query,
metadata: options.filter,
}),
options.fetchK
);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/vercel_postgres.int.test.ts
|
import { expect, test } from "@jest/globals";
import { OpenAIEmbeddings } from "@langchain/openai";
import { VercelPostgres } from "../vercel_postgres.js";
let vercelPostgresStore: VercelPostgres;
const config = {
tableName: "testvercelvectorstorelangchain2",
columns: {
idColumnName: "id",
vectorColumnName: "vector",
contentColumnName: "content",
metadataColumnName: "metadata",
},
};
describe("Test VercelPostgres store", () => {
afterEach(async () => {
await vercelPostgresStore?.delete({ deleteAll: true });
await vercelPostgresStore?.end();
});
test("Test embeddings creation", async () => {
vercelPostgresStore = await VercelPostgres.initialize(
new OpenAIEmbeddings(),
config
);
expect(vercelPostgresStore).toBeDefined();
const docHello = {
pageContent: "hello",
metadata: { a: 1 },
};
const docCat = {
pageContent: "Cat drinks milk",
metadata: { a: 2 },
};
const docHi = { pageContent: "hi", metadata: { a: 1 } };
const ids = await vercelPostgresStore.addDocuments([
docHello,
docHi,
docCat,
]);
const results = await vercelPostgresStore.similaritySearch("hello", 2, {
a: 2,
});
expect(results).toHaveLength(1);
expect(results[0].pageContent).toEqual(docCat.pageContent);
await vercelPostgresStore.addDocuments(
[{ pageContent: "Dog drinks milk", metadata: { a: 2 } }],
{ ids: [ids[2]] }
);
const results2 = await vercelPostgresStore.similaritySearch("hello", 2, {
a: 2,
});
expect(results2).toHaveLength(1);
expect(results2[0].pageContent).toEqual("Dog drinks milk");
await vercelPostgresStore.delete({ ids: [ids[2]] });
const results3 = await vercelPostgresStore.similaritySearch("hello", 2, {
a: 2,
});
expect(results3).toHaveLength(0);
});
test("Test metadata filtering", async () => {
vercelPostgresStore = await VercelPostgres.initialize(
new OpenAIEmbeddings(),
config
);
const docGreen = {
pageContent: "Hi, I am the color green.",
metadata: { color: "green" },
};
const docBlue = {
pageContent: "Hi, I am the color blue.",
metadata: { color: "blue" },
};
const docYellow = {
pageContent: "Hi, I am the color yellow.",
metadata: { color: "yellow" },
};
const docIrrelevant = {
pageContent: "Hi, I am an irrelevant doc without metadata.",
metadata: {},
};
await vercelPostgresStore.addDocuments([
docGreen,
docBlue,
docYellow,
docIrrelevant,
]);
const results1 = await vercelPostgresStore.similaritySearch("color", 5, {
color: "blue",
});
expect(results1).toHaveLength(1);
const results2 = await vercelPostgresStore.similaritySearch(
"irrelevant query",
5,
{
color: { in: ["blue", "yellow"] },
}
);
expect(results2).toHaveLength(2);
const results2WithColorGreen = results2.filter(
(result) => result.metadata.color === "green"
);
expect(results2WithColorGreen).toHaveLength(0);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/chroma.int.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { beforeEach, describe, expect, test } from "@jest/globals";
import { ChromaClient } from "chromadb";
import { faker } from "@faker-js/faker";
import * as uuid from "uuid";
import { Document } from "@langchain/core/documents";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Chroma } from "../chroma.js";
describe.skip("Chroma", () => {
let chromaStore: Chroma;
beforeEach(async () => {
const embeddings = new OpenAIEmbeddings();
chromaStore = new Chroma(embeddings, {
url: "http://localhost:8000",
collectionName: "test-collection",
});
});
test.skip("auto-generated ids", async () => {
const pageContent = faker.lorem.sentence(5);
await chromaStore.addDocuments([{ pageContent, metadata: { foo: "bar" } }]);
const results = await chromaStore.similaritySearch(pageContent, 1);
expect(results).toEqual([
new Document({ metadata: { foo: "bar" }, pageContent }),
]);
});
test.skip("metadata filtering", async () => {
const pageContent = faker.lorem.sentence(5);
const id = uuid.v4();
await chromaStore.addDocuments([
{ pageContent, metadata: { foo: "bar" } },
{ pageContent, metadata: { foo: id } },
{ pageContent, metadata: { foo: "qux" } },
]);
// If the filter wasn't working, we'd get all 3 documents back
const results = await chromaStore.similaritySearch(pageContent, 3, {
foo: id,
});
expect(results).toEqual([
new Document({ metadata: { foo: id }, pageContent }),
]);
});
test.skip("upsert", async () => {
const pageContent = faker.lorem.sentence(5);
const id = uuid.v4();
const ids = await chromaStore.addDocuments([
{ pageContent, metadata: { foo: id } },
{ pageContent, metadata: { foo: id } },
]);
const results = await chromaStore.similaritySearch(pageContent, 4, {
foo: id,
});
expect(results.length).toEqual(2);
const ids2 = await chromaStore.addDocuments(
[
{ pageContent, metadata: { foo: id } },
{ pageContent, metadata: { foo: id } },
],
{ ids }
);
expect(ids).toEqual(ids2);
const newResults = await chromaStore.similaritySearch(pageContent, 4, {
foo: id,
});
expect(newResults.length).toEqual(2);
});
test.skip("delete by ids", async () => {
const pageContent = faker.lorem.sentence(5);
const id = uuid.v4();
const ids = await chromaStore.addDocuments([
{ pageContent, metadata: { foo: id } },
{ pageContent, metadata: { foo: id } },
]);
const results = await chromaStore.similaritySearch(pageContent, 2, {
foo: id,
});
expect(results.length).toEqual(2);
await chromaStore.delete({ ids: ids.slice(0, 1) });
const newResults = await chromaStore.similaritySearch(pageContent, 2, {
foo: id,
});
expect(newResults.length).toEqual(1);
});
test.skip("delete by filter", async () => {
const pageContent = faker.lorem.sentence(5);
const id = uuid.v4();
const id2 = uuid.v4();
await chromaStore.addDocuments([
{ pageContent, metadata: { foo: id } },
{ pageContent, metadata: { foo: id, bar: id2 } },
]);
const results = await chromaStore.similaritySearch(pageContent, 2, {
foo: id,
});
expect(results.length).toEqual(2);
await chromaStore.delete({
filter: {
bar: id2,
},
});
const newResults = await chromaStore.similaritySearch(pageContent, 2, {
foo: id,
});
expect(newResults.length).toEqual(1);
});
test.skip("load from client instance", async () => {
const pageContent = faker.lorem.sentence(5);
const id = uuid.v4();
const chromaStoreFromClient = new Chroma(new OpenAIEmbeddings(), {
index: new ChromaClient({
path: "http://localhost:8000",
}),
collectionName: "test-collection",
});
await chromaStoreFromClient.addDocuments([
{ pageContent, metadata: { foo: "bar" } },
{ pageContent, metadata: { foo: id } },
{ pageContent, metadata: { foo: "qux" } },
]);
const results = await chromaStoreFromClient.similaritySearch(
pageContent,
3
);
expect(results.length).toEqual(3);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/upstash.int.test.ts
|
/* eslint-disable no-process-env */
import { Index } from "@upstash/vector";
import { Document } from "@langchain/core/documents";
import {
SyntheticEmbeddings,
FakeEmbeddings,
} from "@langchain/core/utils/testing";
import { EmbeddingsInterface } from "@langchain/core/embeddings";
import { UpstashVectorStore } from "../upstash.js";
import { sleep } from "../../utils/time.js";
describe("UpstashVectorStore", () => {
let store: UpstashVectorStore;
let embeddings: EmbeddingsInterface;
let index: Index;
beforeEach(async () => {
index = new Index({
url: process.env.UPSTASH_VECTOR_REST_URL,
token: process.env.UPSTASH_VECTOR_REST_TOKEN,
});
await index.reset();
embeddings = new SyntheticEmbeddings({
vectorSize: 384,
});
store = new UpstashVectorStore(embeddings, {
index,
});
expect(store).toBeDefined();
});
test("basic operations with documents", async () => {
const createdAt = new Date().getTime();
const ids = await store.addDocuments([
{ pageContent: "hello", metadata: { a: createdAt + 1 } },
{ pageContent: "car", metadata: { a: createdAt } },
{ pageContent: "adjective", metadata: { a: createdAt } },
{ pageContent: "hi", metadata: { a: createdAt } },
]);
// Sleeping for a second to make sure that all the indexing operations are finished.
await sleep(1000);
const results1 = await store.similaritySearchWithScore("hello!", 1);
expect(results1).toHaveLength(1);
expect([results1[0][0]]).toEqual([
new Document({ metadata: { a: createdAt + 1 }, pageContent: "hello" }),
]);
const results2 = await store.similaritySearchWithScore("testing!", 6);
expect(results2).toHaveLength(4);
await store.delete({ ids: ids.slice(2) });
const results3 = await store.similaritySearchWithScore("testing again!", 6);
expect(results3).toHaveLength(2);
});
test("UpstashVectorStore.fromText", async () => {
const vectorStore = await UpstashVectorStore.fromTexts(
["hello there!", "what are you building?", "vectors are great!"],
[
{ id: 1, name: "text1" },
{ id: 2, name: "text2" },
{ id: 3, name: "text3" },
],
embeddings,
{ index }
);
// Sleeping for a second to make sure that all the indexing operations are finished.
await sleep(1000);
const results1 = await vectorStore.similaritySearch("vectors are great", 1);
expect(results1).toEqual([
new Document({
pageContent: "vectors are great!",
metadata: { id: 3, name: "text3" },
}),
]);
});
test("UpstashVectorStore metadata filtering", async () => {
const createdAt = new Date().getTime();
await store.addDocuments([
{ pageContent: "banana", metadata: { creationTime: createdAt + 1 } },
{ pageContent: "car", metadata: { creationTime: createdAt } },
{ pageContent: "apple", metadata: { creationTime: createdAt } },
{ pageContent: "yellow", metadata: { time: createdAt } },
]);
// Sleeping for a second to make sure that all the indexing operations are finished.
await sleep(1000);
const results1 = await store.similaritySearchWithScore(
"banana",
3,
`creationTime = ${createdAt + 1}`
);
expect(results1).toHaveLength(1);
expect([results1[0][0]]).toEqual([
new Document({
metadata: { creationTime: createdAt + 1 },
pageContent: "banana",
}),
]);
const results2 = await store.similaritySearchWithScore(
"car",
4,
`creationTime = ${createdAt - 1}`
);
expect(results2).toHaveLength(0);
});
test("UpstashVectorStore with Upstash Embedding configuration, the embeddings will be created by Upstash's service", async () => {
const vectorStoreWithUpstashEmbeddings = new UpstashVectorStore(
new FakeEmbeddings(),
{ index }
);
const createdAt = new Date().getTime();
const ids = await vectorStoreWithUpstashEmbeddings.addDocuments([
{ pageContent: "hello", metadata: { a: createdAt + 1 } },
{ pageContent: "car", metadata: { a: createdAt } },
{ pageContent: "adjective", metadata: { a: createdAt } },
{ pageContent: "hi", metadata: { a: createdAt } },
]);
// Sleeping for a second to make sure that all the indexing operations are finished.
await sleep(1000);
const results1 =
await vectorStoreWithUpstashEmbeddings.similaritySearchVectorWithScore(
"hello!",
1
);
expect(results1).toHaveLength(1);
expect([results1[0][0]]).toEqual([
new Document({ metadata: { a: createdAt + 1 }, pageContent: "hello" }),
]);
const results2 =
await vectorStoreWithUpstashEmbeddings.similaritySearchVectorWithScore(
"testing!",
6
);
expect(results2).toHaveLength(4);
await vectorStoreWithUpstashEmbeddings.delete({ ids: ids.slice(2) });
const results3 =
await vectorStoreWithUpstashEmbeddings.similaritySearchVectorWithScore(
"testing again!",
6
);
expect(results3).toHaveLength(2);
});
test("Should upsert the documents to target namespace", async () => {
index = new Index({
url: process.env.UPSTASH_VECTOR_REST_URL,
token: process.env.UPSTASH_VECTOR_REST_TOKEN,
});
await index.reset();
embeddings = new SyntheticEmbeddings({
vectorSize: 384,
});
const storeNamespace1 = new UpstashVectorStore(embeddings, {
index,
namespace: "namespace-1",
});
const storeNamespace2 = new UpstashVectorStore(embeddings, {
index,
namespace: "namespace-2",
});
await storeNamespace1.addDocuments([
{
pageContent: "namespace-test-original",
metadata: { namespace: "namespace-1" },
},
]);
// Sleeping for a second to make sure that all the indexing operations are finished.
await sleep(1000);
const resultsNamespace2 = await storeNamespace2.similaritySearchWithScore(
"namespace-test-original",
1,
"namespace = 'namespace-1'"
);
expect(resultsNamespace2).toHaveLength(0);
const resultsNamespace1 = await storeNamespace1.similaritySearchWithScore(
"namespace-test-original",
1,
"namespace = 'namespace-1'"
);
expect(resultsNamespace1).toHaveLength(1);
expect([resultsNamespace1[0][0]]).toEqual([
new Document({
metadata: { namespace: "namespace-1" },
pageContent: "namespace-test-original",
}),
]);
});
test("Should delete the documents from target namespace", async () => {
index = new Index({
url: process.env.UPSTASH_VECTOR_REST_URL,
token: process.env.UPSTASH_VECTOR_REST_TOKEN,
});
await index.reset();
embeddings = new SyntheticEmbeddings({
vectorSize: 384,
});
const storeNamespace1 = new UpstashVectorStore(embeddings, {
index,
namespace: "namespace-1",
});
const storeNamespace2 = new UpstashVectorStore(embeddings, {
index,
namespace: "namespace-2",
});
const idNamespace1 = await storeNamespace1.addDocuments([
{
pageContent: "namespace-test-original",
metadata: { namespace: "namespace-test" },
},
]);
await storeNamespace2.addDocuments([
{
pageContent: "namespace-test-original",
metadata: { namespace: "namespace-test" },
},
]);
// Sleeping for a second to make sure that all the indexing operations are finished.
await sleep(1000);
await storeNamespace1.delete({ ids: idNamespace1 });
const resultsNamespace1 = await storeNamespace1.similaritySearchWithScore(
"namespace-test-original",
1,
"namespace = 'namespace-test'"
);
expect(resultsNamespace1).toHaveLength(0);
const resultsNamespace2 = await storeNamespace2.similaritySearchWithScore(
"namespace-test-original",
1,
"namespace = 'namespace-test'"
);
expect(resultsNamespace2).toHaveLength(1);
expect([resultsNamespace2[0][0]]).toEqual([
new Document({
metadata: { namespace: "namespace-test" },
pageContent: "namespace-test-original",
}),
]);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/azure_aisearch.test.ts
|
/* eslint-disable @typescript-eslint/no-explicit-any */
import { jest, test, expect } from "@jest/globals";
import { SearchIndexingBufferedSender } from "@azure/search-documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import {
AzureAISearchQueryType,
AzureAISearchVectorStore,
} from "../azure_aisearch.js";
const embedMock = jest.spyOn(FakeEmbeddings.prototype, "embedDocuments");
const uploadDocumentsMock = jest.spyOn(
SearchIndexingBufferedSender.prototype,
"uploadDocuments"
);
const onMock = jest.spyOn(SearchIndexingBufferedSender.prototype, "on");
const flushMock = jest.spyOn(SearchIndexingBufferedSender.prototype, "flush");
const disposeMock = jest.spyOn(
SearchIndexingBufferedSender.prototype,
"dispose"
);
beforeEach(() => {
embedMock.mockClear();
uploadDocumentsMock.mockClear();
onMock.mockClear();
flushMock.mockClear();
disposeMock.mockClear();
});
test("AzureAISearchVectorStore addVectors should upload documents in batches", async () => {
const embeddings = new FakeEmbeddings();
const client = {
indexDocuments: jest.fn(),
};
const store = new AzureAISearchVectorStore(embeddings, {
client: client as any,
search: {
type: "similarity",
},
});
expect(store).toBeDefined();
const documents = [];
const vectors: number[][] = [];
for (let i = 0; i < 1500; i += 1) {
vectors.push(await embeddings.embedQuery(`hello ${i}`));
documents.push({
pageContent: `hello ${i}`,
metadata: {
source: `doc-${i}`,
attributes: [],
},
});
}
await store.addVectors(vectors, documents);
expect(uploadDocumentsMock).toHaveBeenCalledTimes(1);
expect(flushMock).toHaveBeenCalledTimes(1);
expect(client.indexDocuments).toHaveBeenCalledTimes(3);
});
test("AzureAISearchVectorStore addDocuments should embed and upload documents in batches", async () => {
const embeddings = new FakeEmbeddings();
const client = {
indexDocuments: jest.fn(),
};
const store = new AzureAISearchVectorStore(embeddings, {
client: client as any,
search: {
type: "similarity",
},
});
expect(store).toBeDefined();
const documents = [];
for (let i = 0; i < 1500; i += 1) {
documents.push({
pageContent: `hello ${i}`,
metadata: {
source: `doc-${i}`,
attributes: [],
},
});
}
await store.addDocuments(documents);
expect(embedMock).toHaveBeenCalledTimes(1);
expect(uploadDocumentsMock).toHaveBeenCalledTimes(1);
expect(flushMock).toHaveBeenCalledTimes(1);
expect(client.indexDocuments).toHaveBeenCalledTimes(3);
});
test("AzureAISearchVectorStore addDocuments should use specified IDs", async () => {
const embeddings = new FakeEmbeddings();
const client = {
indexDocuments: jest.fn(),
};
const store = new AzureAISearchVectorStore(embeddings, {
client: client as any,
search: {
type: "similarity",
},
});
expect(store).toBeDefined();
const result = await store.addDocuments(
[
{
pageContent: "hello",
metadata: {
source: "test",
attributes: [],
},
},
],
{
ids: ["id1"],
}
);
expect(uploadDocumentsMock).toHaveBeenCalledTimes(1);
expect(result).toEqual(["id1"]);
});
test("AzureAISearchVectorStore similarity search works", async () => {
const search = "test-query";
const embeddings = new FakeEmbeddings();
const client = {
search: jest.fn<any>().mockResolvedValue({
results: [],
}),
};
const store = new AzureAISearchVectorStore(embeddings, {
client: client as any,
search: {
type: AzureAISearchQueryType.Similarity,
},
});
await store.similaritySearch(search, 1);
expect(store).toBeDefined();
expect(client.search.mock.calls[0][0]).toBe("*");
expect((client.search.mock.calls[0][1] as any).queryType).toBeUndefined();
});
test("AzureAISearchVectorStore similarity hybrid search works", async () => {
const search = "test-query";
const embeddings = new FakeEmbeddings();
const client = {
search: jest.fn<any>().mockResolvedValue({
results: [],
}),
};
const store = new AzureAISearchVectorStore(embeddings, {
client: client as any,
search: {
type: AzureAISearchQueryType.SimilarityHybrid,
},
});
await store.similaritySearch(search, 1);
expect(store).toBeDefined();
expect(client.search.mock.calls[0][0]).toBe(search);
expect((client.search.mock.calls[0][1] as any).queryType).toBeUndefined();
});
test("AzureAISearchVectorStore semantic hybrid search works", async () => {
const search = "test-query";
const embeddings = new FakeEmbeddings();
const client = {
search: jest.fn<any>().mockResolvedValue({
results: [],
}),
};
const store = new AzureAISearchVectorStore(embeddings, {
client: client as any,
search: {
type: AzureAISearchQueryType.SemanticHybrid,
},
});
await store.similaritySearch(search, 1);
expect(store).toBeDefined();
expect(client.search.mock.calls[0][0]).toBe(search);
expect((client.search.mock.calls[0][1] as any).queryType).toBe("semantic");
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/voy.test.ts
|
import { test, expect } from "@jest/globals";
import { Document } from "@langchain/core/documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { VoyVectorStore, VoyClient } from "../voy.js";
const fakeClient: VoyClient = {
index: ({ embeddings }) => embeddings.map((i) => i.id).join(","),
add: (_) => {},
search: () => ({
neighbors: [
{ id: "0", title: "", url: "" },
{ id: "1", title: "", url: "" },
],
}),
clear: () => {},
};
test("it can create index using Voy.from text, add new elements to the index and get queried documents", async () => {
const vectorStore = await VoyVectorStore.fromTexts(
["initial first page", "initial second page"],
[{ id: 1 }, { id: 2 }],
new FakeEmbeddings(),
fakeClient
);
// the number of dimensions is produced by fake embeddings
expect(vectorStore.numDimensions).toBe(4);
await vectorStore.addVectors(
[
[0, 1, 0, 0],
[1, 0, 0, 0],
[0.5, 0.5, 0.5, 0.5],
],
[
new Document({
pageContent: "added first page",
metadata: { id: 5 },
}),
new Document({
pageContent: "added second page",
metadata: { id: 4 },
}),
new Document({
pageContent: "added third page",
metadata: { id: 6 },
}),
]
);
expect(vectorStore.docstore.length).toBe(5);
const results = await vectorStore.similaritySearchVectorWithScore(
[1, 0, 0, 0],
3
);
expect(results[0][0].metadata.id).toBe(1);
expect(results[1][0].metadata.id).toBe(2);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/qdrant.int.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { describe, expect, test } from "@jest/globals";
import { QdrantClient } from "@qdrant/js-client-rest";
import { faker } from "@faker-js/faker";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { QdrantVectorStore } from "../qdrant.js";
import { OllamaEmbeddings } from "../../embeddings/ollama.js";
describe.skip("QdrantVectorStore testcase", () => {
test("base usage", async () => {
const embeddings = new OpenAIEmbeddings({});
const qdrantVectorStore = new QdrantVectorStore(embeddings, {
url: process.env.QDRANT_URL || "http://localhost:6333",
collectionName: process.env.QDRANT_COLLECTION || "documents",
});
const pageContent = faker.lorem.sentence(5);
await qdrantVectorStore.addDocuments([{ pageContent, metadata: {} }]);
const results = await qdrantVectorStore.similaritySearch(pageContent, 1);
expect(results[0]).toEqual(new Document({ metadata: {}, pageContent }));
});
test("passing client directly with a local model that creates embeddings with a different number of dimensions", async () => {
const embeddings = new OllamaEmbeddings({});
const pageContent = faker.lorem.sentence(5);
const qdrantVectorStore = await QdrantVectorStore.fromDocuments(
[{ pageContent, metadata: {} }],
embeddings,
{
collectionName: "different_dimensions",
client: new QdrantClient({
url: process.env.QDRANT_URL,
apiKey: process.env.QDRANT_API_KEY,
}),
}
);
const results = await qdrantVectorStore.similaritySearch(pageContent, 1);
expect(results[0]).toEqual(new Document({ metadata: {}, pageContent }));
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/neo4j_vector.fixtures.ts
|
import { Document } from "@langchain/core/documents";
interface Metadata {
name: string;
date: string;
count: number;
is_active: boolean;
tags: string[];
location: number[];
id: number;
height: number | null;
happiness: number | null;
sadness?: number;
}
const metadatas: Metadata[] = [
{
name: "adam",
date: "2021-01-01",
count: 1,
is_active: true,
tags: ["a", "b"],
location: [1.0, 2.0],
id: 1,
height: 10.0,
happiness: 0.9,
sadness: 0.1,
},
{
name: "bob",
date: "2021-01-02",
count: 2,
is_active: false,
tags: ["b", "c"],
location: [2.0, 3.0],
id: 2,
height: 5.7,
happiness: 0.8,
sadness: 0.1,
},
{
name: "jane",
date: "2021-01-01",
count: 3,
is_active: true,
tags: ["b", "d"],
location: [3.0, 4.0],
id: 3,
height: 2.4,
happiness: null,
},
];
const texts: string[] = metadatas.map((metadata) => `id ${metadata.id} `);
export const DOCUMENTS: Document[] = texts.map(
(text, index) =>
new Document({ pageContent: text, metadata: metadatas[index] })
);
interface TestCase {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
filter: Record<string, any>;
expected: number[];
}
export const TYPE_1_FILTERING_TEST_CASES: TestCase[] = [
{ filter: { id: 1 }, expected: [1] },
{ filter: { name: "adam" }, expected: [1] },
{ filter: { is_active: true }, expected: [1, 3] },
{ filter: { is_active: false }, expected: [2] },
{ filter: { id: 1, is_active: true }, expected: [1] },
{ filter: { id: 1, is_active: false }, expected: [] },
];
export const TYPE_2_FILTERING_TEST_CASES: TestCase[] = [
{ filter: { id: 1 }, expected: [1] },
{ filter: { id: { $ne: 1 } }, expected: [2, 3] },
{ filter: { id: { $gt: 1 } }, expected: [2, 3] },
{ filter: { id: { $gte: 1 } }, expected: [1, 2, 3] },
{ filter: { id: { $lt: 1 } }, expected: [] },
{ filter: { id: { $lte: 1 } }, expected: [1] },
{ filter: { name: "adam" }, expected: [1] },
{ filter: { name: "bob" }, expected: [2] },
{ filter: { name: { $eq: "adam" } }, expected: [1] },
{ filter: { name: { $ne: "adam" } }, expected: [2, 3] },
{ filter: { name: { $gt: "jane" } }, expected: [] },
{ filter: { name: { $gte: "jane" } }, expected: [3] },
{ filter: { name: { $lt: "jane" } }, expected: [1, 2] },
{ filter: { name: { $lte: "jane" } }, expected: [1, 2, 3] },
{ filter: { is_active: { $eq: true } }, expected: [1, 3] },
{ filter: { is_active: { $ne: true } }, expected: [2] },
{ filter: { height: { $gt: 5.0 } }, expected: [1, 2] },
{ filter: { height: { $gte: 5.0 } }, expected: [1, 2] },
{ filter: { height: { $lt: 5.0 } }, expected: [3] },
{ filter: { height: { $lte: 5.8 } }, expected: [2, 3] },
];
export const TYPE_3_FILTERING_TEST_CASES: TestCase[] = [
{ filter: { $or: [{ id: 1 }, { id: 2 }] }, expected: [1, 2] },
{ filter: { $or: [{ id: 1 }, { name: "bob" }] }, expected: [1, 2] },
{ filter: { $and: [{ id: 1 }, { id: 2 }] }, expected: [] },
{ filter: { $or: [{ id: 1 }, { id: 2 }, { id: 3 }] }, expected: [1, 2, 3] },
];
export const TYPE_4_FILTERING_TEST_CASES: TestCase[] = [
{ filter: { id: { $between: [1, 2] } }, expected: [1, 2] },
{ filter: { id: { $between: [1, 1] } }, expected: [1] },
{ filter: { name: { $in: ["adam", "bob"] } }, expected: [1, 2] },
];
export const TYPE_5_FILTERING_TEST_CASES: TestCase[] = [
{ filter: { name: { $like: "a%" } }, expected: [1] },
{ filter: { name: { $like: "%a%" } }, expected: [1, 3] },
];
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/azure_cosmosdb.int.test.ts
|
/* eslint-disable no-process-env */
import { test, expect } from "@jest/globals";
import { MongoClient } from "mongodb";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { AzureCosmosDBVectorStore } from "../azure_cosmosdb.js";
const DATABASE_NAME = "langchain";
const COLLECTION_NAME = "test";
const INDEX_NAME = "vectorSearchIndex";
/*
* To run this test, you need have an Azure Cosmos DB for vCore instance
* running. You can deploy a free version on Azure Portal without any cost,
* following this guide:
* https://learn.microsoft.com/azure/cosmos-db/mongodb/vcore/quickstart-portal
*
* You do not need to create a database or collection, it will be created
* automatically by the test.
*
* Once you have the instance running, you need to set the following environment
* variables before running the test:
* - AZURE_COSMOSDB_CONNECTION_STRING
* - AZURE_OPENAI_API_KEY
* - AZURE_OPENAI_API_INSTANCE_NAME
* - AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME
* - AZURE_OPENAI_API_VERSION
*
* A regular OpenAI key can also be used instead of Azure OpenAI.
*/
describe.skip("AzureCosmosDBVectorStore", () => {
beforeEach(async () => {
expect(process.env.AZURE_COSMOSDB_CONNECTION_STRING).toBeDefined();
// Note: when using Azure OpenAI, you have to also set these variables
// in addition to the API key:
// - AZURE_OPENAI_API_INSTANCE_NAME
// - AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME
// - AZURE_OPENAI_API_VERSION
expect(
process.env.OPENAI_API_KEY || process.env.AZURE_OPENAI_API_KEY
).toBeDefined();
const client = new MongoClient(
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
process.env.AZURE_COSMOSDB_CONNECTION_STRING!
);
await client.connect();
const db = client.db(DATABASE_NAME);
const collection = await db.createCollection(COLLECTION_NAME);
// Make sure the database is empty
await collection.deleteMany({});
// Delete any existing index
try {
await collection.dropIndex(INDEX_NAME);
} catch {
// Ignore error if the index does not exist
}
await client.close();
});
test("performs similarity search", async () => {
const vectorStore = new AzureCosmosDBVectorStore(new OpenAIEmbeddings(), {
databaseName: DATABASE_NAME,
collectionName: COLLECTION_NAME,
indexName: INDEX_NAME,
indexOptions: {
numLists: 1,
},
});
expect(vectorStore).toBeDefined();
await vectorStore.addDocuments([
{ pageContent: "This book is about politics", metadata: { a: 1 } },
{ pageContent: "Cats sleeps a lot.", metadata: { b: 1 } },
{ pageContent: "Sandwiches taste good.", metadata: { c: 1 } },
{ pageContent: "The house is open", metadata: { d: 1, e: 2 } },
]);
const results: Document[] = await vectorStore.similaritySearch(
"sandwich",
1
);
expect(results.length).toEqual(1);
expect(results).toMatchObject([
{ pageContent: "Sandwiches taste good.", metadata: { c: 1 } },
]);
const retriever = vectorStore.asRetriever({});
const docs = await retriever.getRelevantDocuments("house");
expect(docs).toBeDefined();
expect(docs[0]).toMatchObject({
pageContent: "The house is open",
metadata: { d: 1, e: 2 },
});
await vectorStore.close();
});
test("performs max marginal relevance search", async () => {
const texts = ["foo", "foo", "fox"];
const vectorStore = await AzureCosmosDBVectorStore.fromTexts(
texts,
{},
new OpenAIEmbeddings(),
{
databaseName: DATABASE_NAME,
collectionName: COLLECTION_NAME,
indexName: INDEX_NAME,
indexOptions: {
numLists: 1,
},
}
);
const output = await vectorStore.maxMarginalRelevanceSearch("foo", {
k: 10,
fetchK: 20,
lambda: 0.1,
});
expect(output).toHaveLength(texts.length);
const actual = output.map((doc) => doc.pageContent);
const expected = ["foo", "fox", "foo"];
expect(actual).toEqual(expected);
const standardRetriever = await vectorStore.asRetriever();
const standardRetrieverOutput =
await standardRetriever.getRelevantDocuments("foo");
expect(output).toHaveLength(texts.length);
const standardRetrieverActual = standardRetrieverOutput.map(
(doc) => doc.pageContent
);
const standardRetrieverExpected = ["foo", "foo", "fox"];
expect(standardRetrieverActual).toEqual(standardRetrieverExpected);
const retriever = await vectorStore.asRetriever({
searchType: "mmr",
searchKwargs: {
fetchK: 20,
lambda: 0.1,
},
});
const retrieverOutput = await retriever.getRelevantDocuments("foo");
expect(output).toHaveLength(texts.length);
const retrieverActual = retrieverOutput.map((doc) => doc.pageContent);
const retrieverExpected = ["foo", "fox", "foo"];
expect(retrieverActual).toEqual(retrieverExpected);
const similarity = await vectorStore.similaritySearchWithScore("foo", 1);
expect(similarity.length).toBe(1);
await vectorStore.close();
});
test("deletes documents by id", async () => {
const vectorStore = new AzureCosmosDBVectorStore(new OpenAIEmbeddings(), {
databaseName: DATABASE_NAME,
collectionName: COLLECTION_NAME,
indexName: INDEX_NAME,
indexOptions: {
numLists: 1,
},
});
const ids = await vectorStore.addDocuments([
{ pageContent: "This book is about politics", metadata: { a: 1 } },
{
pageContent: "The is the house of parliament",
metadata: { d: 1, e: 2 },
},
]);
// Delete document matching specified ids
await vectorStore.delete({ ids: ids.slice(0, 1) });
const results = await vectorStore.similaritySearch("politics", 10);
expect(results.length).toEqual(1);
expect(results[0].pageContent).toEqual("The is the house of parliament");
await vectorStore.close();
});
test("deletes documents by filter", async () => {
const vectorStore = new AzureCosmosDBVectorStore(new OpenAIEmbeddings(), {
databaseName: DATABASE_NAME,
collectionName: COLLECTION_NAME,
indexName: INDEX_NAME,
indexOptions: {
numLists: 1,
},
});
await vectorStore.addDocuments([
{ pageContent: "This book is about politics", metadata: { a: 1 } },
{
pageContent: "The is the house of parliament",
metadata: { d: 1, e: 2 },
},
]);
// Delete document matching the filter
await vectorStore.delete({ filter: { a: 1 } });
const results = await vectorStore.similaritySearch("politics", 10);
expect(results.length).toEqual(1);
expect(results[0].pageContent).toEqual("The is the house of parliament");
await vectorStore.close();
});
test("deletes all documents", async () => {
const vectorStore = new AzureCosmosDBVectorStore(new OpenAIEmbeddings(), {
databaseName: DATABASE_NAME,
collectionName: COLLECTION_NAME,
indexName: INDEX_NAME,
indexOptions: {
numLists: 1,
},
});
await vectorStore.addDocuments([
{ pageContent: "This book is about politics", metadata: { a: 1 } },
{
pageContent: "The is the house of parliament",
metadata: { d: 1, e: 2 },
},
]);
// Delete all documents
await vectorStore.delete();
const results = await vectorStore.similaritySearch("politics", 10);
expect(results.length).toEqual(0);
await vectorStore.close();
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/analyticdb.int.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable import/no-extraneous-dependencies */
import { test } from "@jest/globals";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { AnalyticDBVectorStore } from "../analyticdb.js";
const connectionOptions = {
host: process.env.ANALYTICDB_HOST || "localhost",
port: Number(process.env.ANALYTICDB_PORT) || 5432,
database: process.env.ANALYTICDB_DATABASE || "your_database",
user: process.env.ANALYTICDB_USERNAME || "username",
password: process.env.ANALYTICDB_PASSWORD || "password",
};
const embeddings = new OpenAIEmbeddings();
const _LANGCHAIN_DEFAULT_EMBEDDING_DIM = 1536;
beforeAll(async () => {
expect(process.env.ANALYTICDB_HOST).toBeDefined();
expect(process.env.ANALYTICDB_PORT).toBeDefined();
expect(process.env.ANALYTICDB_DATABASE).toBeDefined();
expect(process.env.ANALYTICDB_USERNAME).toBeDefined();
expect(process.env.ANALYTICDB_USERNAME).toBeDefined();
});
test.skip("test analyticdb", async () => {
const vectorStore = new AnalyticDBVectorStore(embeddings, {
connectionOptions,
collectionName: "test_collection",
preDeleteCollection: true,
});
expect(vectorStore).toBeDefined();
const createdAt = new Date().getTime();
await vectorStore.addDocuments([
{ pageContent: "hi", metadata: { a: createdAt } },
{ pageContent: "bye", metadata: { a: createdAt } },
{ pageContent: "what's this", metadata: { a: createdAt } },
{ pageContent: createdAt.toString(), metadata: { a: createdAt } },
]);
const results = await vectorStore.similaritySearch("what's this", 1);
expect(results).toHaveLength(1);
expect(results).toEqual([
new Document({
pageContent: "what's this",
metadata: { a: createdAt },
}),
]);
await vectorStore.end();
});
test.skip("test analyticdb using filter", async () => {
const vectorStore = new AnalyticDBVectorStore(embeddings, {
connectionOptions,
collectionName: "test_collection",
embeddingDimension: _LANGCHAIN_DEFAULT_EMBEDDING_DIM,
preDeleteCollection: true,
});
expect(vectorStore).toBeDefined();
const createdAt = new Date().getTime();
await vectorStore.addDocuments([
{ pageContent: "foo", metadata: { a: createdAt, b: createdAt + 6 } },
{ pageContent: "bar", metadata: { a: createdAt + 1, b: createdAt + 7 } },
{ pageContent: "baz", metadata: { a: createdAt + 2, b: createdAt + 8 } },
{ pageContent: "foo", metadata: { a: createdAt + 3, b: createdAt + 9 } },
{ pageContent: "bar", metadata: { a: createdAt + 4, b: createdAt + 10 } },
{ pageContent: "baz", metadata: { a: createdAt + 5, b: createdAt + 11 } },
]);
const results = await vectorStore.similaritySearch("bar", 1, {
a: createdAt + 4,
b: createdAt + 10,
});
expect(results).toHaveLength(1);
expect(results).toEqual([
new Document({
pageContent: "bar",
metadata: { a: createdAt + 4, b: createdAt + 10 },
}),
]);
await vectorStore.end();
});
test.skip("test analyticdb from texts", async () => {
const vectorStore = await AnalyticDBVectorStore.fromTexts(
["Bye bye", "Hello world", "hello nice world"],
[
{ id: 2, name: "2" },
{ id: 1, name: "1" },
{ id: 3, name: "3" },
],
embeddings,
{
connectionOptions,
collectionName: "test_collection",
embeddingDimension: _LANGCHAIN_DEFAULT_EMBEDDING_DIM,
preDeleteCollection: true,
}
);
expect(vectorStore).toBeDefined();
const results = await vectorStore.similaritySearch("hello world", 1);
expect(results).toHaveLength(1);
expect(results).toEqual([
new Document({
pageContent: "Hello world",
metadata: { id: 1, name: "1" },
}),
]);
await vectorStore.end();
});
test.skip("test analyticdb from existing index", async () => {
await AnalyticDBVectorStore.fromTexts(
["Bye bye", "Hello world", "hello nice world"],
[
{ id: 2, name: "2" },
{ id: 1, name: "1" },
{ id: 3, name: "3" },
],
embeddings,
{
connectionOptions,
collectionName: "test_collection",
embeddingDimension: _LANGCHAIN_DEFAULT_EMBEDDING_DIM,
preDeleteCollection: true,
}
);
const vectorStore = await AnalyticDBVectorStore.fromExistingIndex(
embeddings,
{
connectionOptions,
collectionName: "test_collection",
embeddingDimension: _LANGCHAIN_DEFAULT_EMBEDDING_DIM,
preDeleteCollection: false,
}
);
const result1 = await vectorStore.similaritySearch("hello world", 1);
expect(result1).toHaveLength(1);
expect(result1).toEqual([
{ pageContent: "Hello world", metadata: { id: 1, name: "1" } },
]);
await vectorStore.addDocuments([
{ pageContent: "bar", metadata: { id: 4, name: "4" } },
{ pageContent: "baz", metadata: { id: 5, name: "5" } },
]);
const result2 = await vectorStore.similaritySearch("bar", 2);
expect(result2).toHaveLength(2);
expect(result2).toEqual([
{ pageContent: "bar", metadata: { id: 4, name: "4" } },
{ pageContent: "baz", metadata: { id: 5, name: "5" } },
]);
await vectorStore.end();
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/closevector_node.test.ts
|
import { test, expect } from "@jest/globals";
import { Document } from "@langchain/core/documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { CloseVectorNode } from "../closevector/node.js";
test("Test CloseVectorNode.fromTexts + addVectors", async () => {
const vectorStore = await CloseVectorNode.fromTexts(
["Hello world"],
[{ id: 2 }],
new FakeEmbeddings()
);
expect(vectorStore.instance.index?.getMaxElements()).toBe(1);
expect(vectorStore.instance.index?.getCurrentCount()).toBe(1);
await vectorStore.addVectors(
[
[0, 1, 0, 0],
[1, 0, 0, 0],
[0.5, 0.5, 0.5, 0.5],
],
[
new Document({
pageContent: "hello bye",
metadata: { id: 5 },
}),
new Document({
pageContent: "hello worlddwkldnsk",
metadata: { id: 4 },
}),
new Document({
pageContent: "hello you",
metadata: { id: 6 },
}),
]
);
expect(vectorStore.instance.index?.getMaxElements()).toBe(4);
const resultTwo = await vectorStore.similaritySearchVectorWithScore(
[1, 0, 0, 0],
3
);
const resultTwoMetadatas = resultTwo.map(([{ metadata }]) => metadata);
expect(resultTwoMetadatas).toEqual([{ id: 4 }, { id: 6 }, { id: 2 }]);
});
test("Test CloseVectorNode metadata filtering", async () => {
const pageContent = "Hello world";
const vectorStore = await CloseVectorNode.fromTexts(
[pageContent, pageContent, pageContent],
[{ id: 2 }, { id: 3 }, { id: 4 }],
new FakeEmbeddings()
);
// If the filter wasn't working, we'd get all 3 documents back
const results = await vectorStore.similaritySearch(
pageContent,
3,
(document) => document.metadata.id === 3
);
expect(results).toEqual([new Document({ metadata: { id: 3 }, pageContent })]);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/hdb.d.ts
|
declare module "hdb";
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/hanavector.int.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-explicit-any */
import hdbClient from "hdb";
import { Document } from "@langchain/core/documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { test, expect } from "@jest/globals";
import { HanaDB, HanaDBArgs } from "../hanavector.js";
import {
DOCUMENTS,
TYPE_1_FILTERING_TEST_CASES,
TYPE_2_FILTERING_TEST_CASES,
TYPE_3_FILTERING_TEST_CASES,
TYPE_4_FILTERING_TEST_CASES,
TYPE_5_FILTERING_TEST_CASES,
TYPE_6_FILTERING_TEST_CASES,
} from "./hanavector.fixtures.js";
// Connection parameters
const connectionParams = {
host: process.env.HANA_HOST,
port: process.env.HANA_PORT,
user: process.env.HANA_UID,
password: process.env.HANA_PWD,
// useCesu8 : false
};
// Fake normalized embeddings which remember all the texts seen so far to return consistent vectors for the same texts.
class NormalizedConsistentFakeEmbeddings extends FakeEmbeddings {
private knownTexts: string[];
private dimensionality: number;
constructor(dimensionality = 10) {
super();
this.knownTexts = [];
this.dimensionality = dimensionality;
}
private normalize(vector: number[]): number[] {
const norm = Math.sqrt(vector.reduce((acc, val) => acc + val * val, 0));
return vector.map((v) => v / norm);
}
public async embedDocuments(texts: string[]): Promise<number[][]> {
const outVectors: number[][] = texts.map((text) => {
let index = this.knownTexts.indexOf(text);
if (index === -1) {
this.knownTexts.push(text);
index = this.knownTexts.length - 1;
}
// Create an embedding with `dimensionality - 1` elements set to 1.0, and the last element set to the index
const vector = new Array(this.dimensionality - 1).fill(1.0).concat(index);
return this.normalize(vector);
});
return Promise.resolve(outVectors);
}
public async embedQuery(text: string): Promise<number[]> {
const embedding = this.embedDocuments([text]).then(
(embeddings) => embeddings[0]
);
return embedding;
}
}
const embeddings = new NormalizedConsistentFakeEmbeddings();
const client = hdbClient.createClient(connectionParams);
async function connectToHANA() {
try {
await new Promise<void>((resolve, reject) => {
client.connect((err: Error) => {
// Use arrow function here
if (err) {
reject(err);
} else {
// console.log("Connected to SAP HANA successfully.");
resolve();
}
});
});
} catch (error) {
// console.error("Connect error", error);
}
}
function executeQuery(client: any, query: string): Promise<any> {
return new Promise((resolve, reject) => {
client.exec(query, (err: Error, result: any) => {
if (err) {
reject(err);
} else {
resolve(result);
}
});
});
}
function prepareQuery(client: any, query: string): Promise<any> {
return new Promise((resolve, reject) => {
client.prepare(query, (err: Error, statement: any) => {
if (err) {
reject(err);
} else {
resolve(statement);
}
});
});
}
function executeStatement(statement: any, params: any): Promise<any> {
return new Promise((resolve, reject) => {
statement.exec(params, (err: Error, res: any) => {
if (err) {
reject(err);
} else {
resolve(res);
}
});
});
}
beforeAll(async () => {
expect(process.env.HANA_HOST).toBeDefined();
expect(process.env.HANA_PORT).toBeDefined();
expect(process.env.HANA_UID).toBeDefined();
expect(process.env.HANA_PWD).toBeDefined();
await connectToHANA();
});
afterAll(async () => {
client.disconnect();
});
async function dropTable(client: any, tableName: string) {
try {
const query = `DROP TABLE "${tableName}"`;
await executeQuery(client, query);
// console.log(`Table ${tableName} dropped successfully.`);
} catch (error) {
// console.error(`Error dropping table ${tableName}:`, error);
}
}
test("test initialization and table non-exist", async () => {
const tableNameTest = "TABLE_INITIALIZE";
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
const vectorStore = new HanaDB(embeddings, args);
expect(vectorStore).toBeDefined();
await dropTable(client, tableNameTest);
let result = await vectorStore.tableExists(tableNameTest);
expect(result).toEqual(false);
await vectorStore.initialize();
result = await vectorStore.tableExists(tableNameTest);
expect(result).toEqual(true);
});
describe("add documents and similarity search tests", () => {
test("test fromText and default similarity search", async () => {
const tableNameTest = "TEST_ADD_TEXT";
await dropTable(client, tableNameTest);
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
const vectorStore = await HanaDB.fromTexts(
["Bye bye", "Hello world", "hello nice world"],
[
{ id: 2, name: "2" },
{ id: 1, name: "1" },
{ id: 3, name: "3" },
],
embeddings,
args
);
expect(vectorStore).toBeDefined();
const results = await vectorStore.similaritySearch("Hello world", 1);
// console.log(results)
expect(results).toHaveLength(1);
expect(results).toEqual([
new Document({
pageContent: "Hello world",
metadata: { id: 1, name: "1" },
}),
]);
});
test("test addVector with provided embedding", async () => {
const tableNameTest = "TEST_ADD_VEC_WITH_EMBEDDING";
await dropTable(client, tableNameTest);
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
const vectorStore = new HanaDB(embeddings, args);
await vectorStore.initialize();
expect(vectorStore).toBeDefined();
await vectorStore.addVectors(
[
[1, 2],
[3, 4],
[3, 5],
],
[
{
pageContent: "Bye bye",
metadata: {
id: 2,
name: "2",
},
},
{
pageContent: "Hello world",
metadata: {
id: 1,
name: "1",
},
},
{
pageContent: "hello nice world",
metadata: {
id: 3,
name: "3",
},
},
]
);
expect(await vectorStore.tableExists(tableNameTest)).toBe(true);
});
test("performs addDocument and user defined similarity search", async () => {
const tableNameTest = "TEST_ADD_DOC";
await dropTable(client, tableNameTest);
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
distanceStrategy: "euclidean",
};
const vectorStore = new HanaDB(embeddings, args);
await vectorStore.initialize();
expect(vectorStore).toBeDefined();
await vectorStore.addDocuments([
{
pageContent: "This book is about politics",
metadata: {
source: "doc1",
attributes: [{ key: "a", value: "1" }],
},
},
{
pageContent: "Cats sleeps a lot.",
metadata: {
source: "doc2",
attributes: [{ key: "b", value: "1" }],
},
},
{
pageContent: "Sandwiches taste good.",
metadata: {
source: "doc3",
attributes: [{ key: "c", value: "1" }],
},
},
{
pageContent: "The house is open",
metadata: {
source: "doc4",
attributes: [
{ key: "d", value: "1" },
{ key: "e", value: "2" },
],
},
},
]);
const results: Document[] = await vectorStore.similaritySearch(
"Sandwiches taste good.",
1
);
expect(results.length).toEqual(1);
expect(results).toMatchObject([
{
pageContent: "Sandwiches taste good.",
metadata: {
source: "doc3",
attributes: [{ key: "c", value: "1" }],
},
},
]);
const retriever = vectorStore.asRetriever({});
const docs = await retriever.getRelevantDocuments("house");
expect(docs).toBeDefined();
expect(docs[0]).toMatchObject({
pageContent: "The house is open",
metadata: {
source: "doc4",
attributes: [
{ key: "d", value: "1" },
{ key: "e", value: "2" },
],
},
});
});
test("performs max marginal relevance search", async () => {
const tableNameTest = "TEST_MRR";
await dropTable(client, tableNameTest);
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
const texts = ["foo", "foo", "fox"];
const vectorStore = await HanaDB.fromTexts(texts, {}, embeddings, args);
const output = await vectorStore.maxMarginalRelevanceSearch("foo", {
k: 3,
fetchK: 20,
lambda: 0,
});
expect(output).toHaveLength(3);
const actual = output.map((doc) => doc.pageContent);
// console.log(actual);
const expected = ["foo", "fox", "foo"];
expect(actual).toEqual(expected);
const standardRetriever = vectorStore.asRetriever();
const standardRetrieverOutput =
await standardRetriever.getRelevantDocuments("foo");
expect(output).toHaveLength(texts.length);
const standardRetrieverActual = standardRetrieverOutput.map(
(doc) => doc.pageContent
);
const standardRetrieverExpected = ["foo", "foo", "fox"];
expect(standardRetrieverActual).toEqual(standardRetrieverExpected);
const retriever = vectorStore.asRetriever({
searchType: "mmr",
searchKwargs: {
fetchK: 20,
lambda: 0.1,
},
});
const retrieverOutput = await retriever.getRelevantDocuments("foo");
expect(output).toHaveLength(texts.length);
const retrieverActual = retrieverOutput.map((doc) => doc.pageContent);
const retrieverExpected = ["foo", "fox", "foo"];
expect(retrieverActual).toEqual(retrieverExpected);
const similarity = await vectorStore.similaritySearchWithScore("foo", 1);
expect(similarity.length).toBe(1);
});
test("test query documents with specific metadata", async () => {
const tableNameTest = "TEST_FILTER";
await dropTable(client, tableNameTest);
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
// client.connect(connectionParams);
const vectorStore = new HanaDB(embeddings, args);
await vectorStore.initialize();
expect(vectorStore).toBeDefined();
const docs: Document[] = [
{
pageContent: "foo",
metadata: {
start: 100,
end: 150,
docName: "foo.txt",
quality: "bad",
ready: true,
},
},
{
pageContent: "bar",
metadata: {
start: 200,
end: 250,
docName: "bar.txt",
quality: "good",
ready: false,
},
},
];
await vectorStore.addDocuments(docs);
const filterString = { quality: "bad" };
const query = "foo";
const resultsString = await vectorStore.similaritySearch(
query,
1,
filterString
);
expect(resultsString.length).toEqual(1);
expect(resultsString).toMatchObject([
{
pageContent: "foo",
metadata: {
start: 100,
end: 150,
docName: "foo.txt",
quality: "bad",
ready: true,
},
},
]);
const filterNumber = { start: 100, end: 150 };
const resultsNumber = await vectorStore.similaritySearch(
query,
1,
filterNumber
);
expect(resultsNumber.length).toEqual(1);
expect(resultsNumber).toMatchObject([
{
pageContent: "foo",
metadata: {
start: 100,
end: 150,
docName: "foo.txt",
quality: "bad",
ready: true,
},
},
]);
const filterBool = { ready: true };
const resultsBool = await vectorStore.similaritySearch(
query,
1,
filterBool
);
expect(resultsBool.length).toEqual(1);
expect(resultsBool).toMatchObject([
{
pageContent: "foo",
metadata: {
start: 100,
end: 150,
docName: "foo.txt",
quality: "bad",
ready: true,
},
},
]);
});
test("test similarity search with score", async () => {
const tableNameTest = "TEST_TABLE_SCORE";
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
await dropTable(client, tableNameTest);
const texts = ["foo", "bar", "baz"];
const vectorDB = await HanaDB.fromTexts(texts, {}, embeddings, args);
const searchResult = await vectorDB.similaritySearchWithScore(texts[0], 3);
expect(searchResult[0][0].pageContent).toEqual(texts[0]);
expect(searchResult[0][1]).toEqual(1.0);
expect(searchResult[1][1]).toBeLessThanOrEqual(searchResult[0][1]);
expect(searchResult[2][1]).toBeLessThanOrEqual(searchResult[1][1]);
expect(searchResult[2][1]).toBeGreaterThanOrEqual(0.0);
});
test("test similarity search with score with euclidian distance", async () => {
const tableNameTest = "TEST_TABLE_SCORE_DISTANCE";
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
distanceStrategy: "euclidean",
};
await dropTable(client, tableNameTest);
const texts = ["foo", "bar", "baz"];
const vectorDB = await HanaDB.fromTexts(texts, {}, embeddings, args);
const searchResult = await vectorDB.similaritySearchWithScore(texts[0], 3);
expect(searchResult[0][0].pageContent).toEqual(texts[0]);
expect(searchResult[0][1]).toEqual(0.0);
expect(searchResult[1][1]).toBeGreaterThanOrEqual(searchResult[0][1]);
expect(searchResult[2][1]).toBeGreaterThanOrEqual(searchResult[1][1]);
});
test("test similarity search by vector", async () => {
const tableNameTest = "TEST_TABLE_SEARCH_SIMPLE_VECTOR";
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
await dropTable(client, tableNameTest);
const texts = ["foo", "bar", "baz"];
const vectorDB = await HanaDB.fromTexts(texts, {}, embeddings, args);
const vector = await embeddings.embedQuery(texts[0]);
const searchResult = await vectorDB.similaritySearchVectorWithScore(
vector,
1
);
expect(searchResult[0][0].pageContent).toEqual(texts[0]);
expect(texts[1]).not.toEqual(searchResult[0][0].pageContent);
});
});
describe("Deletion tests", () => {
test("test hanavector delete called wrong", async () => {
const tableNameTest = "TEST_TABLE_DELETE_FILTER_WRONG";
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
const texts = ["foo", "foo", "fox"];
await dropTable(client, tableNameTest);
const vectorStore = await HanaDB.fromTexts(texts, {}, embeddings, args);
let exceptionOccurred = false;
try {
await vectorStore.delete({});
} catch (error) {
exceptionOccurred = true;
// console.log(error);
}
expect(exceptionOccurred).toBe(true);
// Delete with ids parameter
exceptionOccurred = false;
try {
await vectorStore.delete({
ids: ["id1", "id"],
filter: { start: 100, end: 200 },
});
} catch (error) {
exceptionOccurred = true;
// console.log(error);
}
expect(exceptionOccurred).toBe(true);
});
test("test delete documents with specific metadata", async () => {
const tableNameTest = "DELETE_WITH_META";
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
// client.connect(connectionParams);
const vectorStore = new HanaDB(embeddings, args);
await dropTable(client, tableNameTest);
await vectorStore.initialize();
expect(vectorStore).toBeDefined();
const docs: Document[] = [
{
pageContent: "foo",
metadata: { start: 100, end: 150, docName: "foo.txt", quality: "bad" },
},
{
pageContent: "bar",
metadata: { start: 200, end: 250, docName: "bar.txt", quality: "good" },
},
];
await vectorStore.addDocuments(docs);
const filterTest = { end: 250 };
await vectorStore.delete({ filter: filterTest });
const sql = `SELECT COUNT(*) AS ROW_COUNT FROM "${args.tableName}" WHERE JSON_VALUE(VEC_META, '$.quality') = ?`;
const statement = await prepareQuery(client, sql);
const result = await executeStatement(statement, ["good"]);
expect(result[0].ROW_COUNT).toEqual(0);
});
test("test delete with empty filter", async () => {
const tableNameTest = "TEST_DELETE_ALL";
const texts = ["foo", "bar", "baz"];
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
// // client.connect(connectionParams);
await dropTable(client, tableNameTest);
const vectorStore = await HanaDB.fromTexts(texts, [], embeddings, args);
const filterTest = {};
await vectorStore.delete({ filter: filterTest });
const sql = `SELECT COUNT(*) AS ROW_COUNT FROM "${args.tableName}"`;
const result = await executeQuery(client, sql);
expect(result[0].ROW_COUNT).toEqual(0);
});
});
describe("Tests on HANA side", () => {
test("hanavector non existing table", async () => {
const tableNameTest = "NON_EXISTING";
await dropTable(client, tableNameTest);
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
const vectordb = new HanaDB(embeddings, args);
await vectordb.initialize();
expect(await vectordb.tableExists(tableNameTest)).toBe(true);
});
test("hanavector table with missing columns", async () => {
const tableNameTest = "EXISTING_MISSING_COLS";
// Drop the table if it exists and create a new one with a wrong column
// try {
await dropTable(client, tableNameTest);
const sqlStr = `CREATE TABLE ${tableNameTest} (WRONG_COL NVARCHAR(500));`;
await executeQuery(client, sqlStr);
// } catch (error) {
// console.error("Error while setting up the table:", error);
// throw error;
// }
// Check if an error is raised when trying to create HanaDB instance
let exceptionOccurred = false;
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
try {
// eslint-disable-next-line no-new
const vectordb = new HanaDB(embeddings, args);
await vectordb.initialize();
} catch (error) {
// An Error is expected here
// console.log(error);
exceptionOccurred = true;
}
// Assert that an exception occurred
expect(exceptionOccurred).toBe(true);
});
test("hanavector table with wrong typed columns", async () => {
const tableNameTest = "EXISTING_WRONG_TYPES";
const contentColumnTest = "DOC_TEXT";
const metadataColumnTest = "DOC_META";
const vectorColumnTest = "DOC_VECTOR";
// Drop the table if it exists and create a new one with a wrong column
await dropTable(client, tableNameTest);
const sqlStr = `CREATE TABLE ${tableNameTest} (${contentColumnTest} INTEGER,
${metadataColumnTest} INTEGER, ${vectorColumnTest} INTEGER);`;
await executeQuery(client, sqlStr);
// Check if an error is raised when trying to create HanaDB instance
let exceptionOccurred = false;
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
contentColumn: contentColumnTest,
metadataColumn: metadataColumnTest,
vectorColumn: vectorColumnTest,
};
try {
// eslint-disable-next-line no-new
const vectordb = new HanaDB(embeddings, args);
await vectordb.initialize();
} catch (error) {
// An Error is expected here
// console.log(error);
exceptionOccurred = true;
}
// Assert that an exception occurred
expect(exceptionOccurred).toBe(true);
});
test("hanavector non existing table fixed vector length", async () => {
const tableNameTest = "NON_EXISTING";
const vectorColumnTest = "MY_VECTOR";
const vectorColumnLengthTest = 42;
// Drop the table if it exists and create a new one with a wrong column
await dropTable(client, tableNameTest);
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
vectorColumn: vectorColumnTest,
vectorColumnLength: vectorColumnLengthTest,
};
const vectorStore = new HanaDB(embeddings, args);
await vectorStore.initialize();
expect(await vectorStore.tableExists(tableNameTest)).toBe(true);
await vectorStore.checkColumn(
tableNameTest,
vectorColumnTest,
"REAL_VECTOR",
vectorColumnLengthTest
);
});
test("test hanavector filter prepared statement params", async () => {
const tableNameTest = "TEST_TABLE_FILTER_PARAM";
// Delete table if it exists
await dropTable(client, tableNameTest); // Assuming dropTable function is defined elsewhere
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
const docs: Document[] = [
{
pageContent: "foo",
metadata: { start: 0, end: 100, quality: "good", ready: true },
},
{
pageContent: "bar",
metadata: { start: 100, end: 200, quality: "bad", ready: false },
},
{
pageContent: "baz",
metadata: { start: 200, end: 300, quality: "ugly", ready: true },
},
];
await HanaDB.fromDocuments(docs, embeddings, args);
// Query for JSON_VALUE(VEC_META, '$.start') = '100'
let sqlStr = `SELECT * FROM ${tableNameTest} WHERE JSON_VALUE(VEC_META, '$.start') = '100'`;
let result = await executeQuery(client, sqlStr);
expect(result.length).toBe(1);
// let stm = client.prepare(sqlStr);
// let resultSet = stm.execQuery();
// let rowCount = resultSet.getRowCount();
// expect(rowCount).toBe(1);
// Using prepared statement parameter for query_value = 100
const queryValue1 = 100;
sqlStr = `SELECT * FROM ${tableNameTest} WHERE JSON_VALUE(VEC_META, '$.start') = ?`;
// stm = client.prepare(sqlStr);
// resultSet = stm.execQuery([queryValue1]);
// rowCount = resultSet.getRowCount();
// expect(rowCount).toBe(1);
let stm = await prepareQuery(client, sqlStr);
result = await executeStatement(stm, [queryValue1.toString()]);
expect(result.length).toBe(1);
// Query for JSON_VALUE(VEC_META, '$.quality') = 'good'
sqlStr = `SELECT * FROM ${tableNameTest} WHERE JSON_VALUE(VEC_META, '$.quality') = 'good'`;
// stm = client.prepare(sqlStr);
// resultSet = stm.execQuery();
// rowCount = resultSet.getRowCount();
// expect(rowCount).toBe(1);
result = await executeQuery(client, sqlStr);
expect(result.length).toBe(1);
// Using prepared statement parameter for query_value = "good"
const queryValue2 = "good";
sqlStr = `SELECT * FROM ${tableNameTest} WHERE JSON_VALUE(VEC_META, '$.quality') = ?`;
stm = await prepareQuery(client, sqlStr);
result = await executeStatement(stm, [queryValue2]);
expect(result.length).toBe(1);
// Query for JSON_VALUE(VEC_META, '$.ready') = false
sqlStr = `SELECT * FROM ${tableNameTest} WHERE JSON_VALUE(VEC_META, '$.ready') = false`;
result = await executeQuery(client, sqlStr);
expect(result.length).toBe(1);
// Using prepared statement parameter for query_value = "true"
const queryValue3 = "true";
sqlStr = `SELECT * FROM ${tableNameTest} WHERE JSON_VALUE(VEC_META, '$.ready') = ?`;
stm = await prepareQuery(client, sqlStr);
result = await executeStatement(stm, [queryValue3]);
expect(result.length).toBe(2);
// Using prepared statement parameter for query_value = "false"
const queryValue4 = "false";
sqlStr = `SELECT * FROM ${tableNameTest} WHERE JSON_VALUE(VEC_META, '$.ready') = ?`;
stm = await prepareQuery(client, sqlStr);
result = await executeStatement(stm, [queryValue4]);
expect(result.length).toBe(1);
});
test("test hanavector table mixed case names", async () => {
const tableNameTest = "MyTableName";
const contentColumnTest = "TextColumn";
const metadataColumnTest = "MetaColumn";
const vectorColumnTest = "VectorColumn";
await dropTable(client, tableNameTest);
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
metadataColumn: metadataColumnTest,
contentColumn: contentColumnTest,
vectorColumn: vectorColumnTest,
};
const texts = ["foo", "foo", "fox"];
await HanaDB.fromTexts(texts, [], embeddings, args);
// Check that embeddings have been created in the table
const numberOfTexts = texts.length;
const sqlStr = `SELECT COUNT(*) AS COUNT FROM "${tableNameTest}"`;
const result = await executeQuery(client, sqlStr);
expect(result[0].COUNT).toBe(numberOfTexts);
// const stm = client.prepare(sqlStr);
// const resultSet = stm.execQuery();
// while (resultSet.next()) {
// numberOfRows = resultSet.getValue(0);
// expect(numberOfRows).toBe(numberOfTexts);
// }
});
test("test invalid metadata keys", async () => {
const tableNameTest = "TEST_TABLE_INVALID_METADATA";
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
await dropTable(client, tableNameTest);
const invalidMetadatas1 = [
{ "sta rt": 0, end: 100, quality: "good", ready: true },
];
let exceptionOccurred = false;
try {
await HanaDB.fromTexts(
["foo", "bar", "baz"],
invalidMetadatas1,
embeddings,
args
);
} catch (error) {
// console.log(error);
exceptionOccurred = true;
}
expect(exceptionOccurred).toBe(true);
const invalidMetadatas2 = [
{ "sta/nrt": 0, end: 100, quality: "good", ready: true },
];
exceptionOccurred = false;
try {
await HanaDB.fromTexts(
["foo", "bar", "baz"],
invalidMetadatas2,
embeddings,
args
);
} catch (error) {
// console.log(error);
exceptionOccurred = true;
}
expect(exceptionOccurred).toBe(true);
});
test("test hanavector similarity search with metadata filter invalid type", async () => {
const tableNameTest = "TEST_TABLE_FILTER_INVALID_TYPE";
const args: HanaDBArgs = {
connection: client,
tableName: tableNameTest,
};
await dropTable(client, tableNameTest);
let exceptionOccurred = false;
const vector = await HanaDB.fromTexts(
["foo", "bar", "baz"],
{},
embeddings,
args
);
try {
await vector.similaritySearch("foo", 3, { wrong_type: 0.1 });
} catch (error) {
// console.log(error);
exceptionOccurred = true;
}
expect(exceptionOccurred).toBe(true);
});
});
describe("HNSW Index Creation Tests", () => {
test("test HNSW index creation with default values", async () => {
/**
* Description:
* This test verifies that the HNSW index can be successfully created with default values
* when no parameters are passed to the createHnswIndex function.
*/
const tableNameTest = "TEST_TABLE_HNSW_DEFAULT";
const args = {
connection: client,
tableName: tableNameTest,
};
// Cleanup: Drop table if exists
await dropTable(client, tableNameTest);
// Create HanaDB instance and add data
const vector = await HanaDB.fromTexts(
["foo", "bar", "baz"],
{},
embeddings,
args
);
let exceptionOccurred = false;
try {
// Call the createHnswIndex function with no parameters (default values)
await vector.createHnswIndex();
} catch (error) {
console.log(error);
exceptionOccurred = true;
}
// Assert that no exception occurred
expect(exceptionOccurred).toBe(false);
});
test("test HNSW index creation with specific values", async () => {
/**
* Description:
* This test verifies that the HNSW index can be created with specific values for m, efConstruction,
* efSearch, and a custom indexName.
*/
const tableNameTest = "TEST_TABLE_HNSW_DEFINED";
const args = {
connection: client,
tableName: tableNameTest,
};
// Cleanup: Drop table if exists
await dropTable(client, tableNameTest);
// Create HanaDB instance and add data
const vector = await HanaDB.fromTexts(
["foo", "bar", "baz"],
{},
embeddings,
args
);
let exceptionOccurred = false;
try {
// Call the createHnswIndex function with specific values
await vector.createHnswIndex({
m: 50,
efConstruction: 150,
efSearch: 300,
indexName: "custom_index",
});
} catch (error) {
console.log(error);
exceptionOccurred = true;
}
// Assert that no exception occurred
expect(exceptionOccurred).toBe(false);
});
test("test HNSW index creation after initialization", async () => {
const tableNameTest = "TEST_TABLE_HNSW_INDEX_AFTER_INIT";
// Clean up: drop the table if it exists
await dropTable(client, tableNameTest);
const args = {
connection: client,
tableName: tableNameTest,
};
// Initialize HanaDB without adding documents yet
const vectorDB = new HanaDB(embeddings, args);
await vectorDB.initialize();
expect(vectorDB).toBeDefined();
// Create HNSW index before adding any documents
await vectorDB.createHnswIndex({
indexName: "index_pre_add",
efSearch: 400,
m: 50,
efConstruction: 150,
});
// Add texts after index creation
await vectorDB.addDocuments([
{
pageContent: "Bye bye",
metadata: { id: 2, name: "2" },
},
{
pageContent: "Hello world",
metadata: { id: 1, name: "1" },
},
{
pageContent: "hello nice world",
metadata: { id: 3, name: "3" },
},
]);
const results = await vectorDB.similaritySearch("Hello world", 1);
expect(results).toHaveLength(1);
expect(results).toEqual([
new Document({
pageContent: "Hello world",
metadata: { id: 1, name: "1" },
}),
]);
});
test("test duplicate HNSW index creation", async () => {
const tableNameTest = "TEST_TABLE_HNSW_DUPLICATE_INDEX";
const args = {
connection: client,
tableName: tableNameTest,
};
// Clean up: drop the table if it exists
await dropTable(client, tableNameTest);
// Create HanaDB instance and add data
const vectorDB = await HanaDB.fromTexts(
["foo", "bar", "baz"],
{},
embeddings,
args
);
// Create HNSW index for the first time
await vectorDB.createHnswIndex({
indexName: "index_cosine",
efSearch: 300,
m: 80,
efConstruction: 100,
});
// Trying to create the same index again should raise an exception
await expect(
vectorDB.createHnswIndex({
efSearch: 300,
m: 80,
efConstruction: 100,
})
).rejects.toThrow();
});
test("test HNSW index creation with invalid m value", async () => {
/**
* Description:
* This test ensures that the HNSW index creation throws an error when an invalid value for m is passed
* (e.g., m < 4 or m > 1000).
*/
const tableNameTest = "TEST_TABLE_HNSW_INVALID_M";
const args = {
connection: client,
tableName: tableNameTest,
};
// Cleanup: Drop table if exists
await dropTable(client, tableNameTest);
// Create HanaDB instance and add data
const vector = await HanaDB.fromTexts(
["foo", "bar", "baz"],
{},
embeddings,
args
);
let exceptionOccurred = false;
try {
// Call the createHnswIndex function with invalid m value
await vector.createHnswIndex({
m: 2, // Invalid value for m (should be >= 4)
});
} catch (error) {
exceptionOccurred = true;
}
// Assert that exception occurred
expect(exceptionOccurred).toBe(true);
});
test("test HNSW index creation with invalid efConstruction value", async () => {
/**
* Description:
* This test ensures that the HNSW index creation throws an error when an invalid efConstruction value is passed
* (e.g., efConstruction > 100000).
*/
const tableNameTest = "TEST_TABLE_HNSW_INVALID_EF_CONSTRUCTION";
const args = {
connection: client,
tableName: tableNameTest,
};
// Cleanup: Drop table if exists
await dropTable(client, tableNameTest);
// Create HanaDB instance and add data
const vector = await HanaDB.fromTexts(
["foo", "bar", "baz"],
{},
embeddings,
args
);
let exceptionOccurred = false;
try {
// Call the createHnswIndex function with invalid efConstruction value
await vector.createHnswIndex({
efConstruction: 100001, // Invalid value for efConstruction (should be <= 100000)
});
} catch (error) {
exceptionOccurred = true;
}
// Assert that exception occurred
expect(exceptionOccurred).toBe(true);
});
test("test HNSW index creation with invalid efSearch value", async () => {
/**
* Description:
* This test ensures that the HNSW index creation throws an error when an invalid efSearch value is passed
* (e.g., efSearch < 1 or efSearch > 100000).
*/
const tableNameTest = "TEST_TABLE_HNSW_INVALID_EF_SEARCH";
const args = {
connection: client,
tableName: tableNameTest,
};
// Cleanup: Drop table if exists
await dropTable(client, tableNameTest);
// Create HanaDB instance and add data
const vector = await HanaDB.fromTexts(
["foo", "bar", "baz"],
{},
embeddings,
args
);
let exceptionOccurred = false;
try {
// Call the createHnswIndex function with invalid efSearch value
await vector.createHnswIndex({
efSearch: 0, // Invalid value for efSearch (should be >= 1)
});
} catch (error) {
exceptionOccurred = true;
}
// Assert that exception occurred
expect(exceptionOccurred).toBe(true);
});
});
describe("Filter Tests", () => {
// Filter Test 1: Applying various filters from TYPE_1_FILTERING_TEST_CASES
it.each(TYPE_1_FILTERING_TEST_CASES)(
"should apply type 1 filtering correctly with filter %j",
async (testCase) => {
const { filter, expected } = testCase;
const tableNameTest = "TEST_TABLE_ENHANCED_FILTER_1";
const args = {
connection: client,
tableName: tableNameTest,
};
await dropTable(client, tableNameTest);
// Initialize the HanaDB instance
const vectorDB = new HanaDB(embeddings, args);
await vectorDB.initialize();
expect(vectorDB).toBeDefined();
// Add documents to the database
await vectorDB.addDocuments(DOCUMENTS);
// Perform a similarity search with the filter
const docs = await vectorDB.similaritySearch("Foo", 5, filter);
const ids = docs.map((doc) => doc.metadata.id);
// Check if the returned document IDs match the expected IDs
expect(ids.length).toBe(expected.length);
expect(ids.every((id) => expected.includes(id))).toBe(true);
}
);
// Filter Test 2: Testing TYPE_2_FILTERING_TEST_CASES
it.each(TYPE_2_FILTERING_TEST_CASES)(
"should apply type 2 filtering correctly with filter %j",
async (testCase) => {
const { filter, expected } = testCase;
const tableNameTest = "TEST_TABLE_ENHANCED_FILTER_2";
const args = {
connection: client,
tableName: tableNameTest,
};
await dropTable(client, tableNameTest);
// Initialize the HanaDB instance
const vectorDB = new HanaDB(embeddings, args);
await vectorDB.initialize();
expect(vectorDB).toBeDefined();
// Add documents to the database
await vectorDB.addDocuments(DOCUMENTS);
// Perform a similarity search with the filter
const docs = await vectorDB.similaritySearch("Foo", 5, filter);
const ids = docs.map((doc) => doc.metadata.id);
// Check if the returned document IDs match the expected IDs
expect(ids.length).toBe(expected.length);
expect(ids.every((id) => expected.includes(id))).toBe(true);
}
);
// Filter Test 3: Testing TYPE_3_FILTERING_TEST_CASES
it.each(TYPE_3_FILTERING_TEST_CASES)(
"should apply type 3 filtering correctly with filter %j",
async (testCase) => {
const { filter, expected } = testCase;
const tableNameTest = "TEST_TABLE_ENHANCED_FILTER_3";
const args = {
connection: client,
tableName: tableNameTest,
};
await dropTable(client, tableNameTest);
// Initialize the HanaDB instance
const vectorDB = new HanaDB(embeddings, args);
await vectorDB.initialize();
expect(vectorDB).toBeDefined();
// Add documents to the database
await vectorDB.addDocuments(DOCUMENTS);
// Perform a similarity search with the filter
const docs = await vectorDB.similaritySearch("Foo", 5, filter);
const ids = docs.map((doc) => doc.metadata.id);
// Check if the returned document IDs match the expected IDs
expect(ids.length).toBe(expected.length);
expect(ids.every((id) => expected.includes(id))).toBe(true);
}
);
// Filter Test 4: Testing TYPE_4_FILTERING_TEST_CASES
it.each(TYPE_4_FILTERING_TEST_CASES)(
"should apply type 4 filtering correctly with filter %j",
async (testCase) => {
const { filter, expected } = testCase;
const tableNameTest = "TEST_TABLE_ENHANCED_FILTER_4";
const args = {
connection: client,
tableName: tableNameTest,
};
await dropTable(client, tableNameTest);
// Initialize the HanaDB instance
const vectorDB = new HanaDB(embeddings, args);
await vectorDB.initialize();
expect(vectorDB).toBeDefined();
// Add documents to the database
await vectorDB.addDocuments(DOCUMENTS);
// Perform a similarity search with the filter
const docs = await vectorDB.similaritySearch("Foo", 5, filter);
const ids = docs.map((doc) => doc.metadata.id);
// Check if the returned document IDs match the expected IDs
expect(ids.length).toBe(expected.length);
expect(ids.every((id) => expected.includes(id))).toBe(true);
}
);
// Filter Test 5: Testing TYPE_4_FILTERING_TEST_CASES
it.each(TYPE_5_FILTERING_TEST_CASES)(
"should apply type 5 filtering correctly with filter %j",
async (testCase) => {
const { filter, expected } = testCase;
const tableNameTest = "TEST_TABLE_ENHANCED_FILTER_5";
const args = {
connection: client,
tableName: tableNameTest,
};
await dropTable(client, tableNameTest);
// Initialize the HanaDB instance
const vectorDB = new HanaDB(embeddings, args);
await vectorDB.initialize();
expect(vectorDB).toBeDefined();
// Add documents to the database
await vectorDB.addDocuments(DOCUMENTS);
// Perform a similarity search with the filter
const docs = await vectorDB.similaritySearch("Foo", 5, filter);
const ids = docs.map((doc) => doc.metadata.id);
// Check if the returned document IDs match the expected IDs
expect(ids.length).toBe(expected.length);
expect(ids.every((id) => expected.includes(id))).toBe(true);
}
);
// Filter Test 6: Testing TYPE_6_FILTERING_TEST_CASES
it.each(TYPE_6_FILTERING_TEST_CASES)(
"should apply type 6 filtering correctly with filter %j",
async (testCase) => {
const { filter, expected } = testCase;
const tableNameTest = "TEST_TABLE_ENHANCED_FILTER_6";
const args = {
connection: client,
tableName: tableNameTest,
};
await dropTable(client, tableNameTest);
// Initialize the HanaDB instance
const vectorDB = new HanaDB(embeddings, args);
await vectorDB.initialize();
expect(vectorDB).toBeDefined();
// Add documents to the database
await vectorDB.addDocuments(DOCUMENTS);
// Perform a similarity search with the filter
const docs = await vectorDB.similaritySearch("Foo", 5, filter);
console.log(docs);
const ids = docs.map((doc) => doc.metadata.id);
// Check if the returned document IDs match the expected IDs
expect(ids.length).toBe(expected.length);
expect(ids.every((id) => expected.includes(id))).toBe(true);
}
);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/qdrant.test.ts
|
/* eslint-disable @typescript-eslint/no-explicit-any */
import { jest, test, expect } from "@jest/globals";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { QdrantVectorStore } from "../qdrant.js";
test("QdrantVectorStore works", async () => {
const client = {
upsert: jest.fn(),
search: jest.fn<any>().mockResolvedValue([]),
getCollections: jest.fn<any>().mockResolvedValue({ collections: [] }),
createCollection: jest.fn(),
};
const embeddings = new FakeEmbeddings();
const store = new QdrantVectorStore(embeddings, {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
client: client as any,
});
expect(store).toBeDefined();
await store.addDocuments([
{
pageContent: "hello",
metadata: {},
},
]);
expect(client.upsert).toHaveBeenCalledTimes(1);
const results = await store.similaritySearch("hello", 1);
expect(results).toHaveLength(0);
});
test("QdrantVectorStore adds vectors with custom payload", async () => {
// Mock Qdrant client
const client = {
upsert: jest.fn(),
search: jest.fn<any>().mockResolvedValue([]),
getCollections: jest.fn<any>().mockResolvedValue({ collections: [] }),
createCollection: jest.fn(),
};
// Mock embeddings
const embeddings = new FakeEmbeddings();
// Create QdrantVectorStore instance with the mock client
const qdrantVectorStore = new QdrantVectorStore(embeddings, {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
client: client as any,
});
// Define a custom payload
const customPayload = {
customPayload: [
{
customField1: "value1",
customField2: "value2",
},
],
};
// Add documents with custom payload
await qdrantVectorStore.addDocuments(
[
{
pageContent: "hello",
metadata: {},
},
],
customPayload
);
// Verify that the Qdrant client's upsert method was called with the correct arguments
expect(client.upsert).toHaveBeenCalledTimes(1);
expect(client.upsert).toHaveBeenCalledWith("documents", {
wait: true,
points: [
expect.objectContaining({
payload: expect.objectContaining({
content: "hello",
metadata: {},
customPayload: customPayload.customPayload[0],
}),
}),
],
});
});
test("QdrantVectorStore adds vectors with multiple custom payload", async () => {
// Mock Qdrant client
const client = {
upsert: jest.fn(),
search: jest.fn<any>().mockResolvedValue([]),
getCollections: jest.fn<any>().mockResolvedValue({ collections: [] }),
createCollection: jest.fn(),
};
// Mock embeddings
const embeddings = new FakeEmbeddings();
// Create QdrantVectorStore instance with the mock client
const qdrantVectorStore = new QdrantVectorStore(embeddings, {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
client: client as any,
});
// Define a custom payload
const customPayload = {
customPayload: [
{
customField1: "value1",
customField2: "value2",
},
{
customField3: "value3",
},
],
};
// Add documents with custom payload
await qdrantVectorStore.addDocuments(
[
{
pageContent: "hello",
metadata: {},
},
{
pageContent: "Goodbye",
metadata: {},
},
{
pageContent: "D01",
metadata: {},
},
],
customPayload
);
// Verify that the Qdrant client's upsert method was called with the correct arguments
expect(client.upsert).toHaveBeenCalledTimes(1);
expect(client.upsert).toHaveBeenCalledWith("documents", {
wait: true,
points: [
expect.objectContaining({
payload: expect.objectContaining({
content: "hello",
metadata: {},
customPayload: customPayload.customPayload[0],
}),
}),
expect.objectContaining({
payload: expect.objectContaining({
content: "Goodbye",
metadata: {},
customPayload: customPayload.customPayload[1],
}),
}),
expect.objectContaining({
payload: expect.objectContaining({
content: "D01",
metadata: {},
}),
}),
],
});
});
test("QdrantVectorStore adds vectors with no custom payload", async () => {
// Mock Qdrant client
const client = {
upsert: jest.fn(),
search: jest.fn<any>().mockResolvedValue([]),
getCollections: jest.fn<any>().mockResolvedValue({ collections: [] }),
createCollection: jest.fn(),
};
// Mock embeddings
const embeddings = new FakeEmbeddings();
// Create QdrantVectorStore instance with the mock client
const qdrantVectorStore = new QdrantVectorStore(embeddings, {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
client: client as any,
});
// Add documents with custom payload
await qdrantVectorStore.addDocuments([
{
pageContent: "hello",
metadata: {},
},
]);
// Verify that the Qdrant client's upsert method was called with the correct arguments
expect(client.upsert).toHaveBeenCalledTimes(1);
expect(client.upsert).toHaveBeenCalledWith("documents", {
wait: true,
points: [
expect.objectContaining({
payload: expect.objectContaining({
content: "hello",
metadata: {},
}),
}),
],
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/usearch.int.test.ts
|
import { test, expect } from "@jest/globals";
import { Document } from "@langchain/core/documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { USearch } from "../usearch.js";
test("Test USearch.fromTexts + addVectors", async () => {
const vectorStore = await USearch.fromTexts(
["Hello world"],
[{ id: 2 }],
new FakeEmbeddings()
);
expect(vectorStore.index?.size()).toBe(1n);
await vectorStore.addVectors(
[
[0, 1, 0, 0],
[0.5, 0.5, 0.5, 0.5],
],
[
new Document({
pageContent: "hello bye",
metadata: { id: 5 },
}),
new Document({
pageContent: "hello you",
metadata: { id: 6 },
}),
]
);
expect(vectorStore.index?.size()).toBe(3n);
const result = await vectorStore.similaritySearch("hello world", 2);
expect(result[0].metadata).toEqual({ id: 2 });
});
test("Test USearch.fromDocuments + addVectors", async () => {
const vectorStore = await USearch.fromDocuments(
[
new Document({
pageContent: "hello bye",
metadata: { id: 5 },
}),
new Document({
pageContent: "hello world",
metadata: { id: 4 },
}),
new Document({
pageContent: "hello you",
metadata: { id: 6 },
}),
],
new FakeEmbeddings()
);
expect(vectorStore.index?.size()).toBe(3n);
await vectorStore.addVectors(
[
[1, 0, 0, 0],
[1, 0, 0, 1],
],
[
new Document({
pageContent: "my world",
metadata: { id: 7 },
}),
new Document({
pageContent: "our world",
metadata: { id: 8 },
}),
]
);
expect(vectorStore.index?.size()).toBe(5n);
const results = await vectorStore.similaritySearchVectorWithScore(
[1, 0, 0, 0],
2
);
expect(results).toHaveLength(2);
expect(results).toEqual([
[new Document({ metadata: { id: 7 }, pageContent: "my world" }), 0],
[new Document({ metadata: { id: 8 }, pageContent: "our world" }), 1],
]);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/hanavector.fixtures.ts
|
import { Document } from "@langchain/core/documents";
interface Metadata {
name: string;
date: string;
count: number;
is_active: boolean;
tags: string[];
location: number[];
id: number;
height: number | null;
happiness: number | null;
sadness?: number;
}
const metadatas: Metadata[] = [
{
name: "adam",
date: "2021-01-01",
count: 1,
is_active: true,
tags: ["a", "b"],
location: [1.0, 2.0],
id: 1,
height: 10.0,
happiness: 0.9,
sadness: 0.1,
},
{
name: "bob",
date: "2021-01-02",
count: 2,
is_active: false,
tags: ["b", "c"],
location: [2.0, 3.0],
id: 2,
height: 5.7,
happiness: 0.8,
sadness: 0.1,
},
{
name: "jane",
date: "2021-01-01",
count: 3,
is_active: true,
tags: ["b", "d"],
location: [3.0, 4.0],
id: 3,
height: 2.4,
happiness: null,
},
];
const texts: string[] = metadatas.map((metadata) => `id ${metadata.id} `);
export const DOCUMENTS: Document[] = texts.map(
(text, index) =>
new Document({ pageContent: text, metadata: metadatas[index] })
);
interface TestCase {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
filter: Record<string, any>;
expected: number[];
}
export const TYPE_1_FILTERING_TEST_CASES: TestCase[] = [
{ filter: { id: 1 }, expected: [1] },
{ filter: { name: "adam" }, expected: [1] },
{ filter: { is_active: true }, expected: [1, 3] },
{ filter: { is_active: false }, expected: [2] },
{ filter: { id: 1, is_active: true }, expected: [1] },
{ filter: { id: 1, is_active: false }, expected: [] },
];
export const TYPE_2_FILTERING_TEST_CASES: TestCase[] = [
{ filter: { id: 1 }, expected: [1] },
{ filter: { id: { $ne: 1 } }, expected: [2, 3] },
{ filter: { id: { $gt: 1 } }, expected: [2, 3] },
{ filter: { id: { $gte: 1 } }, expected: [1, 2, 3] },
{ filter: { id: { $lt: 1 } }, expected: [] },
{ filter: { id: { $lte: 1 } }, expected: [1] },
{ filter: { name: "adam" }, expected: [1] },
{ filter: { name: "bob" }, expected: [2] },
{ filter: { name: { $eq: "adam" } }, expected: [1] },
{ filter: { name: { $ne: "adam" } }, expected: [2, 3] },
{ filter: { name: { $gt: "jane" } }, expected: [] },
{ filter: { name: { $gte: "jane" } }, expected: [3] },
{ filter: { name: { $lt: "jane" } }, expected: [1, 2] },
{ filter: { name: { $lte: "jane" } }, expected: [1, 2, 3] },
{ filter: { is_active: { $eq: true } }, expected: [1, 3] },
{ filter: { is_active: { $ne: true } }, expected: [2] },
{ filter: { height: { $gt: 5.0 } }, expected: [1, 2] },
{ filter: { height: { $gte: 5.0 } }, expected: [1, 2] },
{ filter: { height: { $lt: 5.0 } }, expected: [3] },
{ filter: { height: { $lte: 5.8 } }, expected: [2, 3] },
// New date-related test cases
{
filter: { date: { $eq: { type: "date", date: "2021-01-01" } } },
expected: [1, 3],
},
{ filter: { date: { $ne: "2021-01-01" } }, expected: [2] },
{ filter: { date: { $gt: "2021-01-01" } }, expected: [2] },
{ filter: { date: { $gte: "2021-01-01" } }, expected: [1, 2, 3] },
{ filter: { date: { $lt: "2021-01-02" } }, expected: [1, 3] },
{ filter: { date: { $lte: "2021-01-02" } }, expected: [1, 2, 3] },
];
export const TYPE_3_FILTERING_TEST_CASES: TestCase[] = [
{ filter: { $or: [{ id: 1 }, { id: 2 }] }, expected: [1, 2] },
{ filter: { $or: [{ id: 1 }, { name: "bob" }] }, expected: [1, 2] },
{ filter: { $and: [{ id: 1 }, { id: 2 }] }, expected: [] },
{ filter: { $or: [{ id: 1 }, { id: 2 }, { id: 3 }] }, expected: [1, 2, 3] },
];
export const TYPE_4_FILTERING_TEST_CASES: TestCase[] = [
{ filter: { id: { $between: [1, 2] } }, expected: [1, 2] },
{ filter: { id: { $between: [1, 1] } }, expected: [1] },
{ filter: { name: { $in: ["adam", "bob"] } }, expected: [1, 2] },
{ filter: { name: { $nin: ["adam", "bob"] } }, expected: [3] },
];
export const TYPE_5_FILTERING_TEST_CASES: TestCase[] = [
{ filter: { name: { $like: "a%" } }, expected: [1] },
{ filter: { name: { $like: "%a%" } }, expected: [1, 3] },
];
export const TYPE_6_FILTERING_TEST_CASES: TestCase[] = [
{
filter: {
$and: [
{
$or: [{ id: { $eq: 1 } }, { id: { $in: [2, 3] } }],
},
{ height: { $gte: 5.0 } },
],
},
expected: [1, 2],
},
{ filter: { id: 3, height: { $gte: 5.0 } }, expected: [] },
{ filter: { $and: [{ id: 1 }, { height: { $gte: 5.0 } }] }, expected: [1] },
];
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/momento_vector_index.int.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { describe, expect } from "@jest/globals";
import { faker } from "@faker-js/faker";
import {
PreviewVectorIndexClient,
VectorIndexConfigurations,
CredentialProvider,
} from "@gomomento/sdk";
import * as uuid from "uuid";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { sleep } from "../../utils/time.js";
import { MomentoVectorIndex } from "../momento_vector_index.js";
async function withVectorStore(
block: (vectorStore: MomentoVectorIndex) => Promise<void>
): Promise<void> {
const indexName = uuid.v4();
const vectorStore = new MomentoVectorIndex(new OpenAIEmbeddings(), {
client: new PreviewVectorIndexClient({
configuration: VectorIndexConfigurations.Laptop.latest(),
credentialProvider: CredentialProvider.fromEnvironmentVariable({
environmentVariableName: "MOMENTO_API_KEY",
}),
}),
indexName,
});
try {
await block(vectorStore);
} finally {
await vectorStore.getClient().deleteIndex(indexName);
}
}
describe.skip("MomentoVectorIndex", () => {
it("stores user-provided ids", async () => {
await withVectorStore(async (vectorStore: MomentoVectorIndex) => {
const pageContent = faker.lorem.sentence(5);
const documentId = "foo";
await vectorStore.addDocuments([{ pageContent, metadata: {} }], {
ids: [documentId],
});
await sleep();
const results = await vectorStore.similaritySearch(pageContent, 1);
expect(results).toEqual([new Document({ metadata: {}, pageContent })]);
});
});
it("stores uuids when no ids are provided", async () => {
await withVectorStore(async (vectorStore: MomentoVectorIndex) => {
const pageContent = faker.lorem.sentence(5);
await vectorStore.addDocuments([{ pageContent, metadata: {} }]);
await sleep();
const results = await vectorStore.similaritySearch(pageContent, 1);
expect(results).toEqual([new Document({ metadata: {}, pageContent })]);
});
});
it("stores metadata", async () => {
await withVectorStore(async (vectorStore: MomentoVectorIndex) => {
const pageContent = faker.lorem.sentence(5);
const metadata = {
foo: "bar",
page: 1,
pi: 3.14,
isTrue: true,
tags: ["a", "b"],
};
await vectorStore.addDocuments([{ pageContent, metadata }]);
await sleep();
const results = await vectorStore.similaritySearch(pageContent, 1);
expect(results).toEqual([new Document({ metadata, pageContent })]);
});
});
it("fails with fromTexts when texts length doesn't match metadatas length", async () => {
const pageContent = faker.lorem.sentence(5);
const metadata = { foo: "bar" };
await expect(
MomentoVectorIndex.fromTexts(
[pageContent],
[metadata, metadata],
new OpenAIEmbeddings(),
{
client: new PreviewVectorIndexClient({
configuration: VectorIndexConfigurations.Laptop.latest(),
credentialProvider: CredentialProvider.fromEnvironmentVariable({
environmentVariableName: "MOMENTO_API_KEY",
}),
}),
}
)
).rejects.toThrow(
"Number of texts (1) does not equal number of metadatas (2)"
);
});
it("deletes documents by id", async () => {
await withVectorStore(async (vectorStore: MomentoVectorIndex) => {
const pageContent1 = faker.lorem.sentence(5);
const documentId1 = "pageContent1";
const pageContent2 = faker.lorem.sentence(5);
const documentId2 = "pageContent2";
await vectorStore.addDocuments(
[
{ pageContent: pageContent1, metadata: {} },
{ pageContent: pageContent2, metadata: {} },
],
{
ids: [documentId1, documentId2],
}
);
await sleep();
const searchResults = await vectorStore.similaritySearch(pageContent1, 1);
expect(searchResults).toEqual([
new Document({ metadata: {}, pageContent: pageContent1 }),
]);
await vectorStore.delete({ ids: [documentId1] });
await sleep();
const results = await vectorStore.similaritySearch(pageContent1, 2);
expect(results).toEqual([
new Document({ metadata: {}, pageContent: pageContent2 }),
]);
});
});
it("re-ranks when using max marginal relevance search", async () => {
await withVectorStore(async (vectorStore: MomentoVectorIndex) => {
const pepperoniPizza = "pepperoni pizza";
const cheesePizza = "cheese pizza";
const hotDog = "hot dog";
await vectorStore.addDocuments([
{ pageContent: pepperoniPizza, metadata: {} },
{ pageContent: cheesePizza, metadata: {} },
{ pageContent: hotDog, metadata: {} },
]);
await sleep();
const searchResults = await vectorStore.similaritySearch("pizza", 2);
expect(searchResults).toEqual([
new Document({ metadata: {}, pageContent: pepperoniPizza }),
new Document({ metadata: {}, pageContent: cheesePizza }),
]);
const searchResults2 = await vectorStore.maxMarginalRelevanceSearch(
"pizza",
{
k: 2,
fetchK: 3,
lambda: 0.5,
}
);
expect(searchResults2).toEqual([
new Document({ metadata: {}, pageContent: pepperoniPizza }),
new Document({ metadata: {}, pageContent: hotDog }),
]);
});
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/lancedb.int.test.ts
|
import { beforeEach, describe, expect, test } from "@jest/globals";
import * as fs from "node:fs/promises";
import * as path from "node:path";
import * as os from "node:os";
import { connect, Table } from "@lancedb/lancedb";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { LanceDB } from "../lancedb.js";
describe("LanceDB", () => {
let lanceDBTable: Table;
beforeEach(async () => {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "lcjs-lancedb-"));
const db = await connect(dir);
lanceDBTable = await db.createTable("vectors", [
{ vector: Array(1536), text: "sample", id: 1 },
]);
});
test("Test fromTexts + addDocuments", async () => {
const embeddings = new OpenAIEmbeddings();
const vectorStore = await LanceDB.fromTexts(
["hello bye", "hello world", "bye bye"],
[{ id: 1 }, { id: 2 }, { id: 3 }],
embeddings,
{
table: lanceDBTable,
}
);
const results = await vectorStore.similaritySearch("hello bye", 10);
expect(results.length).toBe(4);
await vectorStore.addDocuments([
new Document({
pageContent: "a new world",
metadata: { id: 4 },
}),
]);
const resultsTwo = await vectorStore.similaritySearch("hello bye", 10);
expect(resultsTwo.length).toBe(5);
});
});
describe("LanceDB empty schema", () => {
test("Test fromTexts + addDocuments", async () => {
const embeddings = new OpenAIEmbeddings();
const vectorStore = await LanceDB.fromTexts(
["hello bye", "hello world", "bye bye"],
[{ id: 1 }, { id: 2 }, { id: 3 }],
embeddings
);
const results = await vectorStore.similaritySearch("hello bye", 10);
expect(results.length).toBe(3);
await vectorStore.addDocuments([
new Document({
pageContent: "a new world",
metadata: { id: 4 },
}),
]);
const resultsTwo = await vectorStore.similaritySearch("hello bye", 10);
expect(resultsTwo.length).toBe(4);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/weaviate.test.ts
|
import { test, expect } from "@jest/globals";
import { flattenObjectForWeaviate } from "../weaviate.js";
test("flattenObjectForWeaviate", () => {
expect(
flattenObjectForWeaviate({
array2: [{}, "a"],
deep: {
string: "deep string",
array: ["1", 2],
array3: [1, 3],
deepdeep: {
string: "even a deeper string",
},
},
emptyArray: [],
})
).toMatchInlineSnapshot(`
{
"deep_array3": [
1,
3,
],
"deep_deepdeep_string": "even a deeper string",
"deep_string": "deep string",
"emptyArray": [],
}
`);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/azure_cosmosdb.test.ts
|
/* eslint-disable @typescript-eslint/no-explicit-any */
import { jest, test, expect } from "@jest/globals";
import { Document } from "@langchain/core/documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { AzureCosmosDBVectorStore } from "../azure_cosmosdb.js";
// Mock mongodb client
const createMockClient = () => ({
db: jest.fn<any>().mockReturnValue({
collectionName: "documents",
collection: jest.fn<any>().mockReturnValue({
listIndexes: jest.fn().mockReturnValue({
toArray: jest.fn().mockReturnValue([
{
name: "vectorSearchIndex",
},
]),
}),
dropIndex: jest.fn(),
deleteMany: jest.fn(),
insertMany: jest.fn().mockImplementation((docs: any) => ({
insertedIds: docs.map((_: any, i: any) => `id${i}`),
})),
aggregate: jest.fn().mockReturnValue({
map: jest.fn().mockReturnValue({
toArray: jest
.fn()
.mockReturnValue([
[new Document({ pageContent: "test", metadata: { a: 1 } }), 0.5],
]),
}),
}),
}),
command: jest.fn(),
}),
connect: jest.fn(),
close: jest.fn(),
});
const embedMock = jest.spyOn(FakeEmbeddings.prototype, "embedDocuments");
beforeEach(() => {
embedMock.mockClear();
});
test("AzureCosmosDBVectorStore works", async () => {
const client = createMockClient();
const embeddings = new FakeEmbeddings();
const store = new AzureCosmosDBVectorStore(embeddings, {
client: client as any,
});
expect(store).toBeDefined();
await store.addDocuments([
{
pageContent: "test",
metadata: { a: 1 },
},
]);
const mockCollection = client.db().collection();
expect(mockCollection.insertMany).toHaveBeenCalledTimes(1);
expect(embedMock).toHaveBeenCalledTimes(1);
const results = await store.similaritySearch("test", 1);
expect(mockCollection.aggregate).toHaveBeenCalledTimes(1);
expect(results).toHaveLength(1);
});
test("AzureCosmosDBVectorStore manages its index", async () => {
const client = createMockClient();
const embeddings = new FakeEmbeddings();
const store = new AzureCosmosDBVectorStore(embeddings, {
client: client as any,
});
const indexExists = await store.checkIndexExists();
const mockDb = client.db();
const mockCollection = mockDb.collection();
expect(mockDb.command).toHaveBeenCalledTimes(1);
expect(mockCollection.listIndexes).toHaveBeenCalledTimes(1);
expect(indexExists).toBe(true);
await store.deleteIndex();
expect(mockCollection.dropIndex).toHaveBeenCalledTimes(1);
});
test("AzureCosmosDBVectorStore deletes documents", async () => {
const client = createMockClient();
const embeddings = new FakeEmbeddings();
const store = new AzureCosmosDBVectorStore(embeddings, {
client: client as any,
});
await store.delete();
const mockCollection = client.db().collection();
expect(mockCollection.deleteMany).toHaveBeenCalledTimes(1);
expect(mockCollection.deleteMany).toHaveBeenCalledWith({});
await store.delete({ ids: ["id1234567890", "id2345678901"] });
expect(mockCollection.deleteMany).toHaveBeenCalledTimes(2);
expect(mockCollection.deleteMany.mock.calls[1][0]).toMatchObject({ _id: {} });
await store.delete({ filter: { a: 1 } });
expect(mockCollection.deleteMany).toHaveBeenCalledTimes(3);
expect(mockCollection.deleteMany.mock.calls[2][0]).toMatchObject({ a: 1 });
});
test("AzureCosmosDBVectorStore adds vectors", async () => {
const client = createMockClient();
const embeddings = new FakeEmbeddings();
const store = new AzureCosmosDBVectorStore(embeddings, {
client: client as any,
});
await store.addVectors(
[[1, 2, 5]],
[
{
pageContent: "test",
metadata: { a: 1 },
},
]
);
const mockCollection = client.db().collection();
expect(embedMock).toHaveBeenCalledTimes(0);
expect(mockCollection.insertMany).toHaveBeenCalledTimes(1);
});
test("AzureCosmosDBVectorStore initializes from texts", async () => {
const client = createMockClient();
const embeddings = new FakeEmbeddings();
const store = await AzureCosmosDBVectorStore.fromTexts(
["test", "hello", "world"],
{},
embeddings,
{ client: client as any }
);
expect(store).toBeDefined();
const mockCollection = client.db().collection();
expect(mockCollection.insertMany).toHaveBeenCalledTimes(1);
expect(mockCollection.insertMany).toHaveBeenCalledWith([
{
textContent: "test",
vectorContent: [0.1, 0.2, 0.3, 0.4],
},
{
textContent: "hello",
vectorContent: [0.1, 0.2, 0.3, 0.4],
},
{
textContent: "world",
vectorContent: [0.1, 0.2, 0.3, 0.4],
},
]);
expect(embedMock).toHaveBeenCalledTimes(1);
});
test("AzureCosmosDBVectorStore initializes from documents", async () => {
const client = createMockClient();
const embeddings = new FakeEmbeddings();
const store = await AzureCosmosDBVectorStore.fromDocuments(
[
new Document({ pageContent: "house" }),
new Document({ pageContent: "pool" }),
],
embeddings,
{ client: client as any }
);
expect(store).toBeDefined();
const mockCollection = client.db().collection();
expect(mockCollection.insertMany).toHaveBeenCalledTimes(1);
expect(mockCollection.insertMany).toHaveBeenCalledWith([
{
textContent: "house",
vectorContent: [0.1, 0.2, 0.3, 0.4],
},
{
textContent: "pool",
vectorContent: [0.1, 0.2, 0.3, 0.4],
},
]);
expect(embedMock).toHaveBeenCalledTimes(1);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/voy.int.test.ts
|
import { expect, test } from "@jest/globals";
import { Voy as VoyOriginClient } from "voy-search";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { VoyVectorStore } from "../voy.js";
const client = new VoyOriginClient();
test("it can create index using Voy.from text, add new elements to the index and get queried documents", async () => {
const vectorStore = await VoyVectorStore.fromTexts(
["initial first page", "initial second page"],
[{ id: 1 }, { id: 2 }],
new OpenAIEmbeddings(),
client
);
// the number of dimensions is produced by OpenAI
expect(vectorStore.numDimensions).toBe(1536);
await vectorStore.addDocuments([
new Document({
pageContent: "added first page",
metadata: { id: 5 },
}),
new Document({
pageContent: "added second page",
metadata: { id: 4 },
}),
new Document({
pageContent: "added third page",
metadata: { id: 6 },
}),
]);
expect(vectorStore.docstore.length).toBe(5);
await vectorStore.addDocuments([
new Document({
pageContent: "added another first page",
metadata: { id: 7 },
}),
]);
const results = await vectorStore.similaritySearchWithScore("added first", 6);
expect(results.length).toBe(6);
await vectorStore.delete({
deleteAll: true,
});
const results2 = await vectorStore.similaritySearchWithScore(
"added first",
6
);
expect(results2.length).toBe(0);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/typesense.test.ts
|
import { Client } from "typesense";
import { Document } from "@langchain/core/documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { Typesense } from "../typesense.js";
test("documentsToTypesenseRecords should return the correct typesense records", async () => {
const embeddings = new FakeEmbeddings();
const vectorstore = new Typesense(embeddings, {
schemaName: "test",
typesenseClient: {} as unknown as Client,
columnNames: {
vector: "vec",
pageContent: "text",
metadataColumnNames: ["foo", "bar", "baz"],
},
});
const documents: Document[] = [
{
metadata: {
id: "1",
foo: "fooo",
bar: "barr",
baz: "bazz",
},
pageContent: "hello world",
},
{
metadata: {
id: "2",
foo: "foooo",
bar: "barrr",
baz: "bazzz",
},
pageContent: "hello world 2",
},
];
const expected = [
{
text: "hello world",
foo: "fooo",
bar: "barr",
baz: "bazz",
vec: await embeddings.embedQuery("hello world"),
},
{
text: "hello world 2",
foo: "foooo",
bar: "barrr",
baz: "bazzz",
vec: await embeddings.embedQuery("hello world 2"),
},
];
expect(
await vectorstore._documentsToTypesenseRecords(
documents,
await embeddings.embedDocuments(["hello world", "hello world 2"])
)
).toEqual(expected);
});
test("typesenseRecordsToDocuments should return the correct langchain documents", async () => {
const embeddings = new FakeEmbeddings();
const vectorstore = new Typesense(embeddings, {
schemaName: "test",
typesenseClient: {} as unknown as Client,
columnNames: {
vector: "vec",
pageContent: "text",
metadataColumnNames: ["foo", "bar", "baz"],
},
});
const typesenseRecords = [
{
document: {
text: "hello world",
foo: "fooo",
bar: "barr",
baz: "bazz",
vec: await embeddings.embedQuery("hello world"),
},
vector_distance: 0.2342145,
},
{
document: {
text: "hello world 2",
foo: "foooo",
bar: "barrr",
baz: "bazzz",
vec: await embeddings.embedQuery("hello world 2"),
},
vector_distance: 0.4521355,
},
];
const expected = [
[
{
metadata: {
foo: "fooo",
bar: "barr",
baz: "bazz",
},
pageContent: "hello world",
},
0.2342145,
],
[
{
metadata: {
foo: "foooo",
bar: "barrr",
baz: "bazzz",
},
pageContent: "hello world 2",
},
0.4521355,
],
];
expect(vectorstore._typesenseRecordsToDocuments(typesenseRecords)).toEqual(
expected
);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/chroma.test.ts
|
/* eslint-disable @typescript-eslint/no-explicit-any */
import { jest, test, expect } from "@jest/globals";
import { type Collection } from "chromadb";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { Chroma } from "../chroma.js";
const mockCollection = {
count: jest.fn<Collection["count"]>().mockResolvedValue(5),
upsert: jest.fn<Collection["upsert"]>().mockResolvedValue(undefined as any),
delete: jest.fn<Collection["delete"]>().mockResolvedValue(undefined as any),
// add: jest.fn<Collection["add"]>().mockResolvedValue(undefined as any),
// modify: jest.fn<Collection["modify"]>().mockResolvedValue(undefined as any),
// get: jest.fn<Collection["get"]>().mockResolvedValue(undefined as any),
// update: jest.fn<Collection["update"]>().mockResolvedValue({ success: true }),
// query: jest.fn<Collection["query"]>().mockResolvedValue(undefined as any),
// peek: jest.fn<Collection["peek"]>().mockResolvedValue(undefined as any),
} as any;
const mockClient = {
getOrCreateCollection: jest.fn<any>().mockResolvedValue(mockCollection),
} as any;
describe("Chroma", () => {
beforeEach(() => {
jest.clearAllMocks();
});
test("imports correctly", async () => {
const { ChromaClient } = await Chroma.imports();
expect(ChromaClient).toBeDefined();
});
test("constructor works", async () => {
const chromaStore = new Chroma(new FakeEmbeddings(), {
index: mockClient,
collectionName: "test-collection",
});
expect(chromaStore).toBeDefined();
});
test("should add vectors to the collection", async () => {
const expectedPageContents = ["Document 1", "Document 2"];
const embeddings = new FakeEmbeddings();
jest.spyOn(embeddings, "embedDocuments");
const args = {
collectionName: "testCollection",
index: mockClient,
collectionMetadata: { "hnsw:space": "cosine" },
};
const documents = expectedPageContents.map((pc) => ({ pageContent: pc }));
const chroma = new Chroma(embeddings, args);
await chroma.addDocuments(documents as any);
expect(mockClient.getOrCreateCollection).toHaveBeenCalled();
expect(embeddings.embedDocuments).toHaveBeenCalledWith(
expectedPageContents
);
expect(mockCollection.upsert).toHaveBeenCalled();
const { metadatas } = mockCollection.upsert.mock.calls[0][0];
expect(metadatas).toEqual([{}, {}]);
});
test("should override loc.lines with locFrom/locTo", async () => {
const expectedPageContents = ["Document 1"];
const embeddings = new FakeEmbeddings();
jest.spyOn(embeddings, "embedDocuments");
const args = { collectionName: "testCollection", index: mockClient };
const documents = expectedPageContents.map((pc) => ({
pageContent: pc,
metadata: { source: "source.html", loc: { lines: { from: 0, to: 4 } } },
}));
const chroma = new Chroma(embeddings, args);
await chroma.addDocuments(documents as any);
const { metadatas } = mockCollection.upsert.mock.calls[0][0];
expect(metadatas[0]).toEqual({
source: "source.html",
locFrom: 0,
locTo: 4,
});
});
test("should throw an error for mismatched vector lengths", async () => {
const args = { collectionName: "testCollection" };
const vectors = [
[1, 2],
[3, 4],
];
const documents = [
{ metadata: { id: 1 }, pageContent: "Document 1" },
{ metadata: { id: 2 }, pageContent: "Document 2" },
];
const chroma = new Chroma(new FakeEmbeddings(), args);
chroma.numDimensions = 3; // Mismatched numDimensions
await expect(chroma.addVectors(vectors, documents)).rejects.toThrowError();
});
test("should perform similarity search and return results", async () => {
const args = { collectionName: "testCollection" };
const query = [1, 2];
const expectedResultCount = 5;
mockCollection.query = jest.fn<Collection["query"]>().mockResolvedValue({
ids: [["0", "1", "2", "3", "4"]],
distances: [[0.1, 0.2, 0.3, 0.4, 0.5]],
documents: [
["Document 1", "Document 2", "Document 3", "Document 4", "Document 5"],
],
metadatas: [[{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }]],
} as any);
const chroma = new Chroma(new FakeEmbeddings(), args);
chroma.collection = mockCollection;
const results = await chroma.similaritySearchVectorWithScore(
query,
expectedResultCount
);
expect(mockCollection.query).toHaveBeenCalledWith({
queryEmbeddings: query,
nResults: expectedResultCount,
where: undefined,
});
expect(results).toHaveLength(5);
});
test("should return id properly when adding documents", async () => {
const document1 = {
pageContent: "Document 1",
metadata: { source: "https://example.com" },
};
const documents = [document1];
const chroma = new Chroma(new FakeEmbeddings(), {
collectionName: "new-test-collection",
index: mockClient,
});
await chroma.addDocuments(documents, { ids: ["0"] });
const result = await chroma.similaritySearch(document1.pageContent, 1);
expect(result[0]).toHaveProperty("id", "0");
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/azure_aisearch.int.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { jest, test, expect } from "@jest/globals";
import { setTimeout } from "timers/promises";
import { SearchIndexClient, AzureKeyCredential } from "@azure/search-documents";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import {
AzureAISearchVectorStore,
AzureAISearchQueryType,
AzureAISearchDocumentMetadata,
} from "../azure_aisearch.js";
const INDEX_NAME = "vectorsearch";
const DOCUMENT_IDS: string[] = ["1", "2", "3", "4"];
/*
* To run these tests, you need have an Azure AI Search instance running.
* You can deploy a free version on Azure Portal without any cost, following
* this guide:
* https://learn.microsoft.com/azure/search/search-create-service-portal
*
* Once you have the instance running, you need to set the following environment
* variables before running the test:
* - AZURE_AISEARCH_ENDPOINT
* - AZURE_AISEARCH_KEY
* - AZURE_OPENAI_API_KEY
* - AZURE_OPENAI_API_INSTANCE_NAME
* - AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME
* - AZURE_OPENAI_API_VERSION
*
* A regular OpenAI key can also be used instead of Azure OpenAI.
*/
describe.skip("AzureAISearchVectorStore e2e integration tests", () => {
let indexClient: SearchIndexClient;
beforeEach(async () => {
expect(process.env.AZURE_AISEARCH_ENDPOINT).toBeDefined();
expect(process.env.AZURE_AISEARCH_KEY).toBeDefined();
// Note: when using Azure OpenAI, you have to also set these variables
// in addition to the API key:
// - AZURE_OPENAI_API_INSTANCE_NAME
// - AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME
// - AZURE_OPENAI_API_VERSION
expect(
process.env.OPENAI_API_KEY || process.env.AZURE_OPENAI_API_KEY
).toBeDefined();
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const endpoint = process.env.AZURE_AISEARCH_ENDPOINT!;
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const credential = new AzureKeyCredential(process.env.AZURE_AISEARCH_KEY!);
indexClient = new SearchIndexClient(endpoint, credential);
try {
await indexClient.deleteIndex(INDEX_NAME);
} catch (e) {
// Ignore if documents or index do not exist
}
});
afterAll(async () => {
try {
await indexClient.deleteIndex(INDEX_NAME);
} catch (e) {
// Ignore
}
});
test("performs similarity search", async () => {
const vectorStore = new AzureAISearchVectorStore(new OpenAIEmbeddings(), {
indexName: INDEX_NAME,
search: {
type: AzureAISearchQueryType.SemanticHybrid,
},
});
expect(vectorStore).toBeDefined();
await vectorStore.addDocuments(
[
{
pageContent: "This book is about politics",
metadata: {
source: "doc1",
attributes: [{ key: "a", value: "1" }],
},
},
{
pageContent: "Cats sleeps a lot.",
metadata: {
source: "doc2",
attributes: [{ key: "b", value: "1" }],
},
},
{
pageContent: "Sandwiches taste good.",
metadata: {
source: "doc3",
attributes: [{ key: "c", value: "1" }],
},
},
{
pageContent: "The house is open",
metadata: {
source: "doc4",
attributes: [
{ key: "d", value: "1" },
{ key: "e", value: "2" },
],
},
},
],
{ ids: DOCUMENT_IDS }
);
// Wait for the documents to be indexed
await setTimeout(1000);
const results: Document[] = await vectorStore.similaritySearch(
"sandwich",
1
);
expect(results.length).toEqual(1);
expect(results).toMatchObject([
{
pageContent: "Sandwiches taste good.",
metadata: {
source: "doc3",
attributes: [{ key: "c", value: "1" }],
},
},
]);
const retriever = vectorStore.asRetriever({});
const docs = await retriever.getRelevantDocuments("house");
expect(docs).toBeDefined();
expect(docs[0]).toMatchObject({
pageContent: "The house is open",
metadata: {
source: "doc4",
attributes: [
{ key: "d", value: "1" },
{ key: "e", value: "2" },
],
},
});
});
test("performs max marginal relevance search", async () => {
const texts = ["foo", "foo", "fox"];
const vectorStore = await AzureAISearchVectorStore.fromTexts(
texts,
{},
new OpenAIEmbeddings(),
{
indexName: INDEX_NAME,
search: {
type: "similarity",
},
}
);
// Wait for the documents to be indexed
await setTimeout(1000);
const output = await vectorStore.maxMarginalRelevanceSearch("foo", {
k: 10,
fetchK: 20,
lambda: 0.1,
});
expect(output).toHaveLength(texts.length);
const actual = output.map((doc) => doc.pageContent);
const expected = ["foo", "fox", "foo"];
expect(actual).toEqual(expected);
const standardRetriever = await vectorStore.asRetriever();
const standardRetrieverOutput =
await standardRetriever.getRelevantDocuments("foo");
expect(output).toHaveLength(texts.length);
const standardRetrieverActual = standardRetrieverOutput.map(
(doc) => doc.pageContent
);
const standardRetrieverExpected = ["foo", "foo", "fox"];
expect(standardRetrieverActual).toEqual(standardRetrieverExpected);
const retriever = await vectorStore.asRetriever({
searchType: "mmr",
searchKwargs: {
fetchK: 20,
lambda: 0.1,
},
});
const retrieverOutput = await retriever.getRelevantDocuments("foo");
expect(output).toHaveLength(texts.length);
const retrieverActual = retrieverOutput.map((doc) => doc.pageContent);
const retrieverExpected = ["foo", "fox", "foo"];
expect(retrieverActual).toEqual(retrieverExpected);
const similarity = await vectorStore.similaritySearchWithScore("foo", 1);
expect(similarity.length).toBe(1);
});
});
describe.skip("AzureAISearchVectorStore integration tests", () => {
const embeddings = new FakeEmbeddings();
let indexClient: SearchIndexClient;
const embedMock = jest
.spyOn(FakeEmbeddings.prototype, "embedDocuments")
.mockImplementation(async (documents: string[]) =>
documents.map(() => Array(1536).fill(0.2))
);
const queryMock = jest
.spyOn(FakeEmbeddings.prototype, "embedQuery")
.mockImplementation(async () => Array(1536).fill(0.2));
beforeEach(() => {
embedMock.mockClear();
queryMock.mockClear();
});
beforeAll(async () => {
expect(process.env.AZURE_AISEARCH_ENDPOINT).toBeDefined();
expect(process.env.AZURE_AISEARCH_KEY).toBeDefined();
indexClient = new SearchIndexClient(
process.env.AZURE_AISEARCH_ENDPOINT!,
new AzureKeyCredential(process.env.AZURE_AISEARCH_KEY!)
);
try {
await indexClient.deleteIndex(INDEX_NAME);
} catch (e) {
// Ignore
}
});
afterAll(async () => {
try {
await indexClient.deleteIndex(INDEX_NAME);
} catch (e) {
// Ignore
}
});
test("test index creation if not exists", async () => {
const newName = "index-undefined";
try {
await indexClient.deleteIndex(newName);
} catch (e) {
// Ignore
}
const store = new AzureAISearchVectorStore(embeddings, {
indexName: newName,
search: {
type: AzureAISearchQueryType.Similarity,
},
});
await store.addDocuments([
{
pageContent: "foo",
metadata: {
source: "bar",
},
},
]);
const index = await indexClient.getIndex(newName);
expect(index).toBeDefined();
// Cleanup
try {
await indexClient.deleteIndex(newName);
} catch (e) {
// Ignore
}
});
test("test add document", async () => {
const id = new Date().getTime().toString();
const store = new AzureAISearchVectorStore(embeddings, {
indexName: INDEX_NAME,
search: {
type: AzureAISearchQueryType.Similarity,
},
});
const result = await store.addDocuments(
[
new Document<AzureAISearchDocumentMetadata>({
pageContent: "test index document upload text",
metadata: {
source: "test",
},
}),
],
{
ids: [id],
}
);
expect(result).toHaveLength(1);
});
test("test search document", async () => {
const store = await AzureAISearchVectorStore.fromTexts(
["test index document upload text"],
[],
embeddings,
{
indexName: INDEX_NAME,
}
);
// Need to wait a bit for the document to be indexed
await setTimeout(1000);
const docs = await store.similaritySearch("test", 1);
expect(docs).toHaveLength(1);
expect(docs[0].metadata.embeddings).not.toBeDefined();
});
test("test search document with included embeddings", async () => {
const store = await AzureAISearchVectorStore.fromTexts(
["test index document upload text"],
[],
embeddings,
{
indexName: INDEX_NAME,
}
);
// Need to wait a bit for the document to be indexed
await setTimeout(1000);
const docs = await store.similaritySearch("test", 1, {
includeEmbeddings: true,
});
expect(docs).toHaveLength(1);
expect(docs[0].metadata.embedding).toBeDefined();
});
test("test search document with filter", async () => {
const store = await AzureAISearchVectorStore.fromTexts(
["test index document upload text"],
[
{
source: "filter-test",
attributes: [{ key: "abc", value: "def" }],
},
],
embeddings,
{
indexName: INDEX_NAME,
}
);
// Need to wait a bit for the document to be indexed
await setTimeout(1000);
const bySource = await store.similaritySearch("test", 1, {
filterExpression: "metadata/source eq 'filter-test'",
});
const byAttr = await store.similaritySearch("test", 1, {
filterExpression:
"metadata/attributes/any(t: t/key eq 'abc' and t/value eq 'def')",
});
expect(bySource).toHaveLength(1);
expect(byAttr).toHaveLength(1);
});
test("test search document with query key", async () => {
const store = new AzureAISearchVectorStore(embeddings, {
indexName: INDEX_NAME,
});
const result = await store.similaritySearch("test", 1);
// Need to wait a bit for the document to be indexed
await setTimeout(1000);
expect(result).toBeDefined();
});
test("test delete documents by id", async () => {
const id = new Date().getTime().toString();
const store = new AzureAISearchVectorStore(embeddings, {
indexName: INDEX_NAME,
});
await store.addDocuments(
[
new Document<AzureAISearchDocumentMetadata>({
pageContent: "test index document upload text",
metadata: {
source: "deleteById",
},
}),
],
{
ids: [id],
}
);
// Need to wait a bit for the document to be indexed
await setTimeout(1000);
await store.delete({ ids: id });
// Wait a bit for the index to be updated
await setTimeout(1000);
const docs = await store.similaritySearch("test", 1, {
filterExpression: "metadata/source eq 'deleteById'",
});
expect(docs).toHaveLength(0);
});
test("test delete documents by filter", async () => {
const id = new Date().getTime().toString();
const source = `test-${id}`;
const store = new AzureAISearchVectorStore(embeddings, {
indexName: INDEX_NAME,
});
await store.addDocuments([
new Document<AzureAISearchDocumentMetadata>({
pageContent: "test index document upload text",
metadata: {
source,
},
}),
]);
// Need to wait a bit for the document to be indexed
await setTimeout(1000);
await store.delete({
filter: {
filterExpression: `metadata/source eq '${source}'`,
},
});
// Wait a bit for the index to be updated
await setTimeout(1000);
const docs = await store.similaritySearch("test", 1, {
filterExpression: `metadata/source eq '${source}'`,
});
expect(docs).toHaveLength(0);
});
test("test connect with custom credentials", async () => {
const store = await AzureAISearchVectorStore.fromTexts(
["test index document upload text"],
[],
embeddings,
{
indexName: INDEX_NAME,
credentials: new AzureKeyCredential(process.env.AZURE_AISEARCH_KEY!),
}
);
// Need to wait a bit for the document to be indexed
await setTimeout(1000);
const docs = await store.similaritySearch("test", 1);
expect(docs).toHaveLength(1);
expect(docs[0].metadata.embeddings).not.toBeDefined();
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/vectara.int.test.ts
|
/* eslint-disable @typescript-eslint/no-unused-vars */
/* eslint-disable no-process-env */
import fs from "fs";
import { expect, beforeAll } from "@jest/globals";
import { insecureHash } from "@langchain/core/utils/hash";
import { Document } from "@langchain/core/documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import {
VectaraFile,
VectaraLibArgs,
VectaraStore,
VectaraSummary,
} from "../vectara.js";
import { VectaraSummaryRetriever } from "../../retrievers/vectara_summary.js";
const getDocs = (): Document[] => {
// Some text from Lord of the Rings
const englishOne = `It all depends on what you want. You can trust us to stick to you through thick and thin to the
bitter end. And you can trust us to keep any secret of yours - closer than you keep it yourself.
But you cannot trust us to let you face trouble alone, and go off without a word. We are your
friends, Frodo. Anyway: there it is. We know most of what Gandalf has told you. We know a good
deal about the Ring. We are horribly afraid - but we are coming with you; or following you
like hounds.`;
const englishTwo = `Sam lay back, and stared with open mouth, and for a moment, between bewilderment and great joy,
he could not answer. At last he gasped: “Gandalf! I thought you were dead! But then I thought I
was dead myself. Is everything sad going to come untrue? What's happened to the world?`;
const frenchOne = `Par exemple, sur la planète Terre, l'homme a toujours supposé qu'il était plus intelligent que les dauphins
parce qu'il avait accompli tant de choses - la roue, New York, les guerres, etc. passer du
bon temps. Mais à l'inverse, les dauphins ont toujours cru qu'ils étaient bien plus
intelligents que l'homme, pour les mêmes raisons précisément.`;
const documents = [
new Document({
pageContent: englishOne,
metadata: {
document_id: insecureHash(englishOne), // Generate a hashcode for document id based on the text
title: "Lord of the Rings",
author: "Tolkien",
genre: "fiction",
lang: "eng",
},
}),
new Document({
pageContent: englishTwo,
metadata: {
document_id: insecureHash(englishTwo), // Generate a hashcode for document id based on the text
title: "Lord of the Rings",
author: "Tolkien",
genre: "fiction",
lang: "eng",
},
}),
new Document({
pageContent: frenchOne,
metadata: {
document_id: insecureHash(frenchOne), // Generate a hashcode for document id based on the text
title: "The hitchhiker's guide to the galaxy",
author: "Douglas Adams",
genre: "fiction",
lang: "fra",
},
}),
];
return documents;
};
let corpusId: number[] = [];
const envValue = process.env.VECTARA_CORPUS_ID;
if (envValue) {
corpusId = envValue.split(",").map((id) => {
const num = Number(id);
if (Number.isNaN(num)) corpusId = [0];
return num;
});
if (corpusId.length === 0) corpusId = [0];
} else {
corpusId = [0];
}
describe("VectaraStore", () => {
["VECTARA_CUSTOMER_ID", "VECTARA_CORPUS_ID", "VECTARA_API_KEY"].forEach(
(envVar) => {
if (!process.env[envVar]) {
throw new Error(`${envVar} not set`);
}
}
);
describe("fromTexts", () => {
const args: VectaraLibArgs = {
customerId: Number(process.env.VECTARA_CUSTOMER_ID) || 0,
corpusId,
apiKey: process.env.VECTARA_API_KEY || "",
};
test.skip("with fakeEmbeddings doesn't throw error", () => {
expect(() =>
VectaraStore.fromTexts([], [], new FakeEmbeddings(), args)
).not.toThrow();
});
});
describe("fromDocuments", () => {
const args: VectaraLibArgs = {
customerId: Number(process.env.VECTARA_CUSTOMER_ID) || 0,
corpusId,
apiKey: process.env.VECTARA_API_KEY || "",
};
test.skip("with fakeEmbeddings doesn't throw error", async () => {
await expect(
VectaraStore.fromDocuments(getDocs(), new FakeEmbeddings(), args)
).resolves.toBeDefined();
});
});
describe("access operations", () => {
let store: VectaraStore;
let doc_ids: string[] = [];
beforeAll(async () => {
store = new VectaraStore({
customerId: Number(process.env.VECTARA_CUSTOMER_ID) || 0,
corpusId,
apiKey: process.env.VECTARA_API_KEY || "",
});
doc_ids = await store.addDocuments(getDocs());
});
test.skip("similaritySearchWithScore", async () => {
const resultsWithScore = await store.similaritySearchWithScore(
"What did Sam do?",
10, // Number of results needed
{ lambda: 0.025 }
);
expect(resultsWithScore.length).toBeGreaterThan(0);
expect(resultsWithScore[0][0].pageContent.length).toBeGreaterThan(0);
expect(resultsWithScore[0][0].metadata.title).toBe("Lord of the Rings");
expect(resultsWithScore[0][1]).toBeGreaterThan(0);
});
test.skip("similaritySearch", async () => {
const results = await store.similaritySearch(
"Was Gandalf dead?",
10, // Number of results needed
{
lambda: 0.025,
contextConfig: {
sentencesAfter: 1,
sentencesBefore: 1,
},
}
);
expect(results.length).toBeGreaterThan(0);
expect(results[0].pageContent.length).toBeGreaterThan(0);
expect(results[0].metadata.title).toBe("Lord of the Rings");
});
test.skip("similaritySearch with filter", async () => {
const results = await store.similaritySearch(
"Was Gandalf dead?",
10, // Number of results needed
{ filter: "part.lang = 'fra'", lambda: 0.025 } // Filter on the language of the document
);
expect(results.length).toBeGreaterThan(0);
expect(results[0].pageContent.length).toBeGreaterThan(0);
// Query filtered on French, so we expect only French results
const hasEnglish = results.some(
(result) =>
// eslint-disable-next-line @typescript-eslint/no-explicit-any
result.metadata.lang === "eng"
);
expect(hasEnglish).toBe(false);
});
test.skip("similaritySearch with contextConfig", async () => {
const results = await store.similaritySearch(
"Was Gandalf dead?",
10, // Number of results needed
{
lambda: 0.025,
contextConfig: {
charsBefore: 30,
charsAfter: 30,
sentencesBefore: 3,
sentencesAfter: 3,
startTag: "<b>",
endTag: "</b>",
},
}
);
expect(results.length).toBeGreaterThan(0);
expect(results[0].pageContent.length).toBeGreaterThan(0);
});
test.skip("similaritySearch with MMR", async () => {
const results = await store.similaritySearch(
"Was Gandalf dead?",
10, // Number of results needed
{
lambda: 0.025,
mmrConfig: {
diversityBias: 1.0,
},
}
);
expect(results.length).toBeGreaterThan(0);
expect(results[0].pageContent.length).toBeGreaterThan(0);
});
test.skip("RAG retrieval with generative summarization", async () => {
const summaryConfig: VectaraSummary = {
enabled: true,
summarizerPromptName: "vectara-summary-ext-v1.2.0",
maxSummarizedResults: 3,
responseLang: "ita",
};
const topK = 3;
const retriever = new VectaraSummaryRetriever({
vectara: store,
topK,
summaryConfig,
filter: {
lambda: 0.025,
},
});
const result = await retriever.getRelevantDocuments("Was Gandalf dead?");
expect(result.length).toBeGreaterThan(0);
expect(result.length).toBe(topK + 1); // +1 for the summary
expect(result[0].pageContent.length).toBeGreaterThan(0);
});
test.skip("addFiles", async () => {
const docs = getDocs();
const englishOneContent = docs[0].pageContent;
const frenchOneContent = docs[2].pageContent;
const files = [
{ filename: "englishOne.txt", content: englishOneContent },
{ filename: "frenchOne.txt", content: frenchOneContent },
];
const vectaraFiles: VectaraFile[] = [];
for (const file of files) {
fs.writeFileSync(file.filename, file.content);
const buffer = fs.readFileSync(file.filename);
vectaraFiles.push({
blob: new Blob([buffer], { type: "text/plain" }),
fileName: file.filename,
});
}
const bitcoinBuffer = fs.readFileSync(
"../examples/src/document_loaders/example_data/bitcoin.pdf"
);
vectaraFiles.push({
blob: new Blob([bitcoinBuffer], { type: "application/pdf" }),
fileName: "bitcoin.pdf",
});
const file_doc_ids = await store.addFiles(vectaraFiles);
doc_ids = [...doc_ids, ...file_doc_ids];
for (const file of files) {
fs.unlinkSync(file.filename);
}
expect(file_doc_ids.length).toEqual(3);
const searchResults = await store.similaritySearch("What is bitcoin");
expect(searchResults.length).toBeGreaterThan(0);
expect(searchResults[0].pageContent).toContain(
"A Peer-to-Peer Electronic Cash System"
);
});
// delete documents added in the test
afterAll(async () => {
store = new VectaraStore({
customerId: Number(process.env.VECTARA_CUSTOMER_ID) || 0,
corpusId,
apiKey: process.env.VECTARA_API_KEY || "",
});
await store.deleteDocuments(doc_ids);
});
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/typeorm.int.test.ts
|
import { expect, test } from "@jest/globals";
import { DataSourceOptions } from "typeorm";
import { OpenAIEmbeddings } from "@langchain/openai";
import { TypeORMVectorStore } from "../typeorm.js";
test.skip("Test embeddings creation", async () => {
const args = {
postgresConnectionOptions: {
type: "postgres",
host: "localhost",
port: 5432,
username: "myuser",
password: "ChangeMe",
database: "api",
} as DataSourceOptions,
tableName: "testlangchain",
};
const typeormVectorStore = await TypeORMVectorStore.fromDataSource(
new OpenAIEmbeddings(),
args
);
expect(typeormVectorStore).toBeDefined();
const docHello = {
pageContent: "hello",
metadata: { a: 1 },
};
const docCat = {
pageContent: "Cat drinks milk",
metadata: { a: 2 },
};
const docHi = { pageContent: "hi", metadata: { a: 1 } };
await typeormVectorStore.addDocuments([docHello, docHi, docCat]);
const results = await typeormVectorStore.similaritySearch("hello", 2, {
a: 2,
});
expect(results).toHaveLength(1);
expect(results[0].pageContent).toEqual(docCat.pageContent);
await typeormVectorStore.appDataSource.query(
'TRUNCATE TABLE "testlangchain"'
);
await typeormVectorStore.appDataSource.destroy();
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/supabase.int.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { test, expect } from "@jest/globals";
import { createClient } from "@supabase/supabase-js";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { SupabaseVectorStore, SupabaseFilterRPCCall } from "../supabase.js";
test("SupabaseVectorStore with external ids", async () => {
const client = createClient(
process.env.SUPABASE_VECTOR_STORE_URL!,
process.env.SUPABASE_VECTOR_STORE_PRIVATE_KEY!
);
const embeddings = new OpenAIEmbeddings();
const store = new SupabaseVectorStore(embeddings, { client });
expect(store).toBeDefined();
const createdAt = new Date().getTime();
await store.addDocuments([
{ pageContent: createdAt.toString(), metadata: { a: createdAt } },
{ pageContent: "hi", metadata: { a: createdAt } },
{ pageContent: "bye", metadata: { a: createdAt } },
{ pageContent: "what's this", metadata: { a: createdAt } },
]);
const results = await store.similaritySearch(createdAt.toString(), 1);
expect(results).toHaveLength(1);
expect(results).toEqual([
new Document({
metadata: { a: createdAt },
pageContent: createdAt.toString(),
}),
]);
});
test("Search a SupabaseVectorStore using a metadata filter", async () => {
const client = createClient(
process.env.SUPABASE_VECTOR_STORE_URL!,
process.env.SUPABASE_VECTOR_STORE_PRIVATE_KEY!
);
const embeddings = new OpenAIEmbeddings();
const store = new SupabaseVectorStore(embeddings, {
client,
tableName: "documents",
});
expect(store).toBeDefined();
const createdAt = new Date().getTime();
await store.addDocuments([
{ pageContent: "hello 0", metadata: { created_at: createdAt } },
{ pageContent: "hello 1", metadata: { created_at: createdAt + 1 } },
{ pageContent: "hello 2", metadata: { created_at: createdAt + 2 } },
{ pageContent: "hello 3", metadata: { created_at: createdAt + 3 } },
]);
const results = await store.similaritySearch("hello", 1, {
created_at: createdAt + 2,
});
expect(results).toHaveLength(1);
expect(results).toEqual([
new Document({
metadata: { created_at: createdAt + 2 },
pageContent: "hello 2",
}),
]);
});
test("Search a SupabaseVectorStore with a functional metadata filter", async () => {
const client = createClient(
process.env.SUPABASE_VECTOR_STORE_URL!,
process.env.SUPABASE_VECTOR_STORE_PRIVATE_KEY!
);
const embeddings = new OpenAIEmbeddings();
const store = new SupabaseVectorStore(embeddings, {
client,
tableName: "documents",
});
expect(store).toBeDefined();
const createdAt = new Date().getTime();
const docs = [
{
pageContent:
"This is a long text, but it actually means something because vector database does not understand Lorem Ipsum. So I would need to expand upon the notion of quantum fluff, a theorectical concept where subatomic particles coalesce to form transient multidimensional spaces. Yet, this abstraction holds no real-world application or comprehensible meaning, reflecting a cosmic puzzle.",
metadata: { b: 1, c: 10, stuff: "right", created_at: createdAt },
},
{
pageContent:
"This is a long text, but it actually means something because vector database does not understand Lorem Ipsum. So I would need to proceed by discussing the echo of virtual tweets in the binary corridors of the digital universe. Each tweet, like a pixelated canary, hums in an unseen frequency, a fascinatingly perplexing phenomenon that, while conjuring vivid imagery, lacks any concrete implication or real-world relevance, portraying a paradox of multidimensional spaces in the age of cyber folklore.",
metadata: { b: 2, c: 9, stuff: "right", created_at: createdAt },
},
{
pageContent: "hello",
metadata: { b: 1, c: 9, stuff: "right", created_at: createdAt },
},
{
pageContent: "hello",
metadata: { b: 1, c: 9, stuff: "wrong", created_at: createdAt },
},
{
pageContent: "hi",
metadata: { b: 2, c: 8, stuff: "right", created_at: createdAt },
},
{
pageContent: "bye",
metadata: { b: 3, c: 7, stuff: "right", created_at: createdAt },
},
{
pageContent: "what's this",
metadata: { b: 4, c: 6, stuff: "right", created_at: createdAt },
},
];
await store.addDocuments(docs);
const funcFilterA: SupabaseFilterRPCCall = (rpc) =>
rpc
.filter("metadata->b::int", "lt", 3)
.filter("metadata->c::int", "gt", 7)
.filter("metadata->created_at::int", "eq", createdAt)
.textSearch("content", `'multidimensional' & 'spaces'`, {
config: "english",
});
const resultA = await store.similaritySearch("quantum", 4, funcFilterA);
const gibberish = resultA.map((doc) => doc.pageContent);
expect(gibberish).toEqual([docs[0].pageContent, docs[1].pageContent]);
const funcFilterB: SupabaseFilterRPCCall = (rpc) =>
rpc
.filter("metadata->b::int", "lt", 3)
.filter("metadata->c::int", "gt", 7)
.filter("metadata->>stuff", "eq", "right")
.filter("metadata->created_at::int", "eq", createdAt);
const resultB = await store.similaritySearch("hello", 2, funcFilterB);
expect(resultB).toEqual([
new Document({
pageContent: "hello",
metadata: {
b: 1,
c: 9,
stuff: "right",
created_at: createdAt,
},
}),
new Document({
pageContent: "hi",
metadata: {
b: 2,
c: 8,
stuff: "right",
created_at: createdAt,
},
}),
]);
});
test("Search a SupabaseVectorStore with MMR", async () => {
const client = createClient(
process.env.SUPABASE_VECTOR_STORE_URL!,
process.env.SUPABASE_VECTOR_STORE_PRIVATE_KEY!
);
const embeddings = new OpenAIEmbeddings();
const store = new SupabaseVectorStore(embeddings, { client });
expect(store).toBeDefined();
const createdAt = new Date().getTime();
await store.addDocuments([
{ pageContent: "hi", metadata: { a: createdAt } },
{ pageContent: "greetings", metadata: { a: createdAt } },
{ pageContent: "bye", metadata: { a: createdAt } },
{ pageContent: "what's this", metadata: { a: createdAt } },
]);
const results = await store.maxMarginalRelevanceSearch("hello world", {
k: 2,
fetchK: 20,
filter: { a: createdAt },
});
expect(results).toHaveLength(2);
expect(results).toEqual([
new Document({
metadata: { a: createdAt },
pageContent: "greetings",
}),
new Document({
metadata: { a: createdAt },
pageContent: "what's this",
}),
]);
});
test("Search a SupabaseVectorStore with MMR and a functional metadata filter", async () => {
const client = createClient(
process.env.SUPABASE_VECTOR_STORE_URL!,
process.env.SUPABASE_VECTOR_STORE_PRIVATE_KEY!
);
const embeddings = new OpenAIEmbeddings();
const store = new SupabaseVectorStore(embeddings, {
client,
tableName: "documents",
});
expect(store).toBeDefined();
const createdAt = new Date().getTime();
const docs = [
{
pageContent:
"This is a long text, but it actually means something because vector database does not understand Lorem Ipsum. So I would need to expand upon the notion of quantum fluff, a theorectical concept where subatomic particles coalesce to form transient multidimensional spaces. Yet, this abstraction holds no real-world application or comprehensible meaning, reflecting a cosmic puzzle.",
metadata: { b: 1, c: 10, stuff: "right", created_at: createdAt },
},
{
pageContent:
"This is a long text, but it actually means something because vector database does not understand Lorem Ipsum. So I would need to proceed by discussing the echo of virtual tweets in the binary corridors of the digital universe. Each tweet, like a pixelated canary, hums in an unseen frequency, a fascinatingly perplexing phenomenon that, while conjuring vivid imagery, lacks any concrete implication or real-world relevance, portraying a paradox of multidimensional spaces in the age of cyber folklore.",
metadata: { b: 2, c: 9, stuff: "right", created_at: createdAt },
},
{
pageContent: "hello",
metadata: { b: 1, c: 9, stuff: "right", created_at: createdAt },
},
{
pageContent: "hello",
metadata: { b: 1, c: 9, stuff: "wrong", created_at: createdAt },
},
{
pageContent: "hi",
metadata: { b: 2, c: 8, stuff: "right", created_at: createdAt },
},
{
pageContent: "bye",
metadata: { b: 3, c: 7, stuff: "right", created_at: createdAt },
},
{
pageContent: "what's this",
metadata: { b: 4, c: 6, stuff: "right", created_at: createdAt },
},
];
await store.addDocuments(docs);
const funcFilter: SupabaseFilterRPCCall = (rpc) =>
rpc
.filter("metadata->b::int", "lt", 3)
.filter("metadata->c::int", "gt", 7)
.filter("metadata->created_at::int", "eq", createdAt)
.textSearch("content", `'multidimensional' & 'spaces'`, {
config: "english",
});
const result = await store.maxMarginalRelevanceSearch("quantum", {
k: 4,
filter: funcFilter,
});
const gibberish = result.map((doc) => doc.pageContent);
expect(gibberish).toEqual([docs[0].pageContent, docs[1].pageContent]);
});
test("Upsert on a SupabaseVectorStore", async () => {
const client = createClient(
process.env.SUPABASE_VECTOR_STORE_URL!,
process.env.SUPABASE_VECTOR_STORE_PRIVATE_KEY!
);
const embeddings = new OpenAIEmbeddings();
const store = new SupabaseVectorStore(embeddings, {
client,
tableName: "documents",
});
expect(store).toBeDefined();
const createdAt = new Date().getTime();
const ids = await store.addDocuments([
{ pageContent: "hello 0", metadata: { created_at: createdAt } },
]);
const results = await store.similaritySearch("hello", 2, {
created_at: createdAt,
});
expect(results).toHaveLength(1);
const ids2 = await store.addDocuments(
[{ pageContent: "hello 1", metadata: { created_at: createdAt } }],
{ ids }
);
expect(ids).toEqual(ids2);
const results2 = await store.similaritySearch("hello", 2, {
created_at: createdAt,
});
expect(results2).toHaveLength(1);
expect(results2[0].pageContent).toEqual("hello 1");
});
test("Delete on a SupabaseVectorStore", async () => {
const client = createClient(
process.env.SUPABASE_VECTOR_STORE_URL!,
process.env.SUPABASE_VECTOR_STORE_PRIVATE_KEY!
);
const embeddings = new OpenAIEmbeddings();
const store = new SupabaseVectorStore(embeddings, {
client,
tableName: "documents",
});
expect(store).toBeDefined();
const createdAt = new Date().getTime();
const ids = await store.addDocuments([
{ pageContent: "hello 0", metadata: { created_at: createdAt } },
{ pageContent: "hello 1", metadata: { created_at: createdAt + 1 } },
{ pageContent: "hello 2", metadata: { created_at: createdAt + 2 } },
{ pageContent: "hello 3", metadata: { created_at: createdAt + 2 } },
]);
const results = await store.similaritySearch("hello", 2, {
created_at: createdAt + 2,
});
expect(results).toHaveLength(2);
expect(results).toEqual([
new Document({
metadata: { created_at: createdAt + 2 },
pageContent: "hello 2",
}),
new Document({
metadata: { created_at: createdAt + 2 },
pageContent: "hello 3",
}),
]);
await store.delete({ ids: ids.slice(-1) });
const results2 = await store.similaritySearch("hello", 1, {
created_at: createdAt + 2,
});
expect(results2).toEqual([
new Document({
metadata: { created_at: createdAt + 2 },
pageContent: "hello 2",
}),
]);
});
test("Add documents with manual ids", async () => {
const client = createClient(
process.env.SUPABASE_VECTOR_STORE_URL!,
process.env.SUPABASE_VECTOR_STORE_PRIVATE_KEY!
);
const embeddings = new OpenAIEmbeddings();
const store = new SupabaseVectorStore(embeddings, {
client,
tableName: "documents",
upsertBatchSize: 2,
});
expect(store).toBeDefined();
const createdAt = new Date().getTime();
const ids = [
(createdAt + 1).toString(),
(createdAt + 2).toString(),
(createdAt + 3).toString(),
(createdAt + 4).toString(),
];
const returnedIds = await store.addDocuments(
[
{ pageContent: "hello 0", metadata: { created_at: createdAt } },
{ pageContent: "hello 1", metadata: { created_at: createdAt + 1 } },
{ pageContent: "hello 2", metadata: { created_at: createdAt + 2 } },
{ pageContent: "hello 3", metadata: { created_at: createdAt + 2 } },
],
{ ids }
);
expect(ids).toEqual(returnedIds.map((id) => id.toString()));
await store.delete({ ids });
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/opensearch.int.test.ts
|
/* eslint-disable no-process-env */
import { test, expect } from "@jest/globals";
import { Client } from "@opensearch-project/opensearch";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { OpenSearchVectorStore } from "../opensearch.js";
test.skip("OpenSearchVectorStore integration", async () => {
if (!process.env.OPENSEARCH_URL) {
throw new Error("OPENSEARCH_URL not set");
}
const client = new Client({
nodes: [process.env.OPENSEARCH_URL],
});
const indexName = "test_index";
const embeddings = new OpenAIEmbeddings();
const store = new OpenSearchVectorStore(embeddings, { client, indexName });
await store.deleteIfExists();
expect(store).toBeDefined();
await store.addDocuments([
{ pageContent: "hello", metadata: { a: 2 } },
{ pageContent: "car", metadata: { a: 1 } },
{ pageContent: "adjective", metadata: { a: 1 } },
{ pageContent: "hi", metadata: { a: 1 } },
]);
const results1 = await store.similaritySearch("hello!", 1);
expect(results1).toHaveLength(1);
expect(results1).toEqual([
new Document({ metadata: { a: 2 }, pageContent: "hello" }),
]);
const results2 = await store.similaritySearchWithScore("hello!", 1, {
a: 1,
});
expect(results2).toHaveLength(1);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/clickhouse.int.test.ts
|
/* eslint-disable no-process-env */
import { test, expect } from "@jest/globals";
import { Document } from "@langchain/core/documents";
import { ClickHouseStore } from "../clickhouse.js";
// Import OpenAIEmbeddings if you have a valid OpenAI API key
import { HuggingFaceInferenceEmbeddings } from "../../embeddings/hf.js";
test.skip("ClickHouseStore.fromText", async () => {
const vectorStore = await ClickHouseStore.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[
{ id: 2, name: "2" },
{ id: 1, name: "1" },
{ id: 3, name: "3" },
],
new HuggingFaceInferenceEmbeddings(),
{
host: process.env.CLICKHOUSE_HOST || "localhost",
port: process.env.CLICKHOUSE_PORT || "8443",
username: process.env.CLICKHOUSE_USERNAME || "username",
password: process.env.CLICKHOUSE_PASSWORD || "password",
}
);
// Sleep 1 second to ensure that the search occurs after the successful insertion of data.
// eslint-disable-next-line no-promise-executor-return
await new Promise((resolve) => setTimeout(resolve, 1000));
const results = await vectorStore.similaritySearch("hello world", 1);
expect(results).toEqual([
new Document({
pageContent: "Hello world",
metadata: { id: 2, name: "2" },
}),
]);
const filteredResults = await vectorStore.similaritySearch("hello world", 1, {
whereStr: "metadata.name = '1'",
});
expect(filteredResults).toEqual([
new Document({
pageContent: "Bye bye",
metadata: { id: 1, name: "1" },
}),
]);
});
test.skip("ClickHouseStore.fromExistingIndex", async () => {
await ClickHouseStore.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[
{ id: 2, name: "2" },
{ id: 1, name: "1" },
{ id: 3, name: "3" },
],
new HuggingFaceInferenceEmbeddings(),
{
host: process.env.CLICKHOUSE_HOST || "localhost",
port: process.env.CLICKHOUSE_PORT || "8443",
username: process.env.CLICKHOUSE_USERNAME || "username",
password: process.env.CLICKHOUSE_PASSWORD || "password",
table: "test_table",
}
);
const vectorStore = await ClickHouseStore.fromExistingIndex(
new HuggingFaceInferenceEmbeddings(),
{
host: process.env.CLICKHOUSE_HOST || "localhost",
port: process.env.CLICKHOUSE_PORT || "8443",
username: process.env.CLICKHOUSE_USERNAME || "username",
password: process.env.CLICKHOUSE_PASSWORD || "password",
table: "test_table",
}
);
// Sleep 1 second to ensure that the search occurs after the successful insertion of data.
// eslint-disable-next-line no-promise-executor-return
await new Promise((resolve) => setTimeout(resolve, 1000));
const results = await vectorStore.similaritySearch("hello world", 1);
expect(results).toEqual([
new Document({
pageContent: "Hello world",
metadata: { id: 2, name: "2" },
}),
]);
const filteredResults = await vectorStore.similaritySearch("hello world", 1, {
whereStr: "metadata.name = '1'",
});
expect(filteredResults).toEqual([
new Document({
pageContent: "Bye bye",
metadata: { id: 1, name: "1" },
}),
]);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/singlestore.int.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable import/no-extraneous-dependencies */
import { test, expect } from "@jest/globals";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { SingleStoreVectorStore, SearchStrategy } from "../singlestore.js";
class MockEmbeddings extends OpenAIEmbeddings {
queryIndex: number;
constructor() {
super();
this.queryIndex = 0;
}
async embedDocuments(documents: string[]): Promise<number[][]> {
return documents.map((text: string, _) => this.embed(text));
}
embed(_: string): number[] {
this.queryIndex += 1;
return [
Math.cos((this.queryIndex * Math.PI) / 10.0),
Math.sin((this.queryIndex * Math.PI) / 10.0),
];
}
async embedQuery(document: string): Promise<number[]> {
return this.embed(document);
}
}
const weatherTexts: string[] = [
"In the parched desert, a sudden rainstorm brought relief, as the droplets danced upon the thirsty earth, rejuvenating the landscape with the sweet scent of petrichor.",
"Amidst the bustling cityscape, the rain fell relentlessly, creating a symphony of pitter-patter on the pavement, while umbrellas bloomed like colorful flowers in a sea of gray.",
"High in the mountains, the rain transformed into a delicate mist, enveloping the peaks in a mystical veil, where each droplet seemed to whisper secrets to the ancient rocks below.",
"Blanketing the countryside in a soft, pristine layer, the snowfall painted a serene tableau, muffling the world in a tranquil hush as delicate flakes settled upon the branches of trees like nature's own lacework.",
"In the urban landscape, snow descended, transforming bustling streets into a winter wonderland, where the laughter of children echoed amidst the flurry of snowballs and the twinkle of holiday lights.",
"Atop the rugged peaks, snow fell with an unyielding intensity, sculpting the landscape into a pristine alpine paradise, where the frozen crystals shimmered under the moonlight, casting a spell of enchantment over the wilderness below.",
];
const weatherMetadata: object[] = [
{ count: "1", category: "rain", group: "a" },
{ count: "2", category: "rain", group: "a" },
{ count: "3", category: "rain", group: "b" },
{ count: "1", category: "snow", group: "b" },
{ count: "2", category: "snow", group: "a" },
{ count: "3", category: "snow", group: "a" },
];
test.skip("SingleStoreVectorStore", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await SingleStoreVectorStore.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[
{ id: 2, name: "2" },
{ id: 1, name: "1" },
{ id: 3, name: "3" },
],
new OpenAIEmbeddings(),
{
connectionOptions: {
host: process.env.SINGLESTORE_HOST,
port: Number(process.env.SINGLESTORE_PORT),
user: process.env.SINGLESTORE_USERNAME,
password: process.env.SINGLESTORE_PASSWORD,
database: process.env.SINGLESTORE_DATABASE,
},
contentColumnName: "cont",
metadataColumnName: "met",
vectorColumnName: "vec",
}
);
expect(vectorStore).toBeDefined();
const results = await vectorStore.similaritySearch("hello world", 1);
expect(results).toEqual([
new Document({
pageContent: "Hello world",
metadata: { id: 2, name: "2" },
}),
]);
await vectorStore.addDocuments([
new Document({
pageContent: "Green forest",
metadata: { id: 4, name: "4" },
}),
new Document({
pageContent: "Green field",
metadata: { id: 5, name: "5" },
}),
]);
const results2 = await vectorStore.similaritySearch("forest", 1);
expect(results2).toEqual([
new Document({
pageContent: "Green forest",
metadata: { id: 4, name: "4" },
}),
]);
await vectorStore.end();
});
test.skip("SingleStoreVectorStore euclidean_distance", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await SingleStoreVectorStore.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[
{ id: 2, name: "2" },
{ id: 1, name: "1" },
{ id: 3, name: "3" },
],
new OpenAIEmbeddings(),
{
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "euclidean_distance_test",
distanceMetric: "EUCLIDEAN_DISTANCE",
}
);
expect(vectorStore).toBeDefined();
const results = await vectorStore.similaritySearch("hello world", 1);
expect(results).toEqual([
new Document({
pageContent: "Hello world",
metadata: { id: 2, name: "2" },
}),
]);
await vectorStore.end();
});
test.skip("SingleStoreVectorStore filtering", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await SingleStoreVectorStore.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[
{ id: 2, name: "2", sub: { sub2: { idx: 1 } } },
{ id: 1, name: "1" },
{ id: 3, name: "3" },
],
new OpenAIEmbeddings(),
{
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "filtering_test",
}
);
expect(vectorStore).toBeDefined();
const results1 = await vectorStore.similaritySearch("hello world", 1, {
id: 3,
});
expect(results1).toEqual([
new Document({
pageContent: "hello nice world",
metadata: { id: 3, name: "3" },
}),
]);
const results2 = await vectorStore.similaritySearch("hello nice world", 1, {
name: "2",
});
expect(results2).toEqual([
new Document({
pageContent: "Hello world",
metadata: { id: 2, name: "2", sub: { sub2: { idx: 1 } } },
}),
]);
const results3 = await vectorStore.similaritySearch("hello nice world", 1, {
sub: { sub2: { idx: 1 } },
});
expect(results3).toEqual([
new Document({
pageContent: "Hello world",
metadata: { id: 2, name: "2", sub: { sub2: { idx: 1 } } },
}),
]);
const results4 = await vectorStore.similaritySearch("hello nice world", 1, {
name: "2",
id: 2,
});
expect(results4).toEqual([
new Document({
pageContent: "Hello world",
metadata: { id: 2, name: "2", sub: { sub2: { idx: 1 } } },
}),
]);
const results5 = await vectorStore.similaritySearch("hello nice world", 1, {
name: "3",
sub: { sub2: { idx: 1 } },
});
expect(results5).toEqual([]);
await vectorStore.end();
});
test.skip("SingleStorevectorStore wrong search type", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await SingleStoreVectorStore.fromTexts(
[],
[],
new MockEmbeddings(),
{
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "wrong_serch_type_test",
useVectorIndex: true,
useFullTextIndex: false,
}
);
for (const searchType of [
"TEXT_ONLY",
"FILTER_BY_TEXT",
"FILTER_BY_VECTOR",
"WEIGHTED_SUM",
]) {
await vectorStore.setSearchConfig({
searchStrategy: searchType as SearchStrategy,
});
await expect(
vectorStore.similaritySearch("hello world", 1)
).rejects.toThrow(
"Full text index is required for text-based search strategies."
);
}
await vectorStore.end();
});
test.skip("SingleStoreVectorStore no filter threshold type 1", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await SingleStoreVectorStore.fromTexts(
[],
[],
new MockEmbeddings(),
{
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "no_filter_threshold_type_test",
useVectorIndex: true,
useFullTextIndex: true,
searchConfig: {
searchStrategy: "FILTER_BY_TEXT",
},
}
);
await expect(
vectorStore.similaritySearch("hello world", 1, { id: 1 })
).rejects.toThrow(
"Filter threshold is required for filter-based search strategies."
);
await vectorStore.end();
});
test.skip("SingleStoreVectorStore no filter threshold type 2", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await SingleStoreVectorStore.fromTexts(
[],
[],
new MockEmbeddings(),
{
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "no_filter_threshold_type_test",
useVectorIndex: true,
useFullTextIndex: true,
searchConfig: {
searchStrategy: "FILTER_BY_VECTOR",
},
}
);
await expect(
vectorStore.similaritySearch("hello world", 1, { id: 1 })
).rejects.toThrow(
"Filter threshold is required for filter-based search strategies."
);
await vectorStore.end();
});
test.skip("SingleStoreVectorStore no weight coefs 1", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await SingleStoreVectorStore.fromTexts(
[],
[],
new MockEmbeddings(),
{
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "no_weighted_sum_params",
useVectorIndex: true,
useFullTextIndex: true,
searchConfig: {
searchStrategy: "WEIGHTED_SUM",
vectorWeight: 1,
textWeight: 1,
},
}
);
await expect(
vectorStore.similaritySearch("hello world", 1, { id: 1 })
).rejects.toThrow(
"Text and vector weight and vector select count multiplier are required for weighted sum search strategy."
);
await vectorStore.end();
});
test.skip("SingleStoreVectorStore no weight coefs 2", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await SingleStoreVectorStore.fromTexts(
[],
[],
new MockEmbeddings(),
{
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "no_weighted_sum_params",
useVectorIndex: true,
useFullTextIndex: true,
searchConfig: {
searchStrategy: "WEIGHTED_SUM",
textWeight: 1,
vectorselectCountMultiplier: 10,
},
}
);
await expect(
vectorStore.similaritySearch("hello world", 1, { id: 1 })
).rejects.toThrow(
"Text and vector weight and vector select count multiplier are required for weighted sum search strategy."
);
await vectorStore.end();
});
test.skip("SingleStoreVectorStore no weight coefs 3", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await SingleStoreVectorStore.fromTexts(
[],
[],
new MockEmbeddings(),
{
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "no_weighted_sum_params",
useVectorIndex: true,
useFullTextIndex: true,
searchConfig: {
searchStrategy: "WEIGHTED_SUM",
vectorWeight: 1,
vectorselectCountMultiplier: 10,
},
}
);
await expect(
vectorStore.similaritySearch("hello world", 1, { id: 1 })
).rejects.toThrow(
"Text and vector weight and vector select count multiplier are required for weighted sum search strategy."
);
await vectorStore.end();
});
test.skip("SingleStoreVectorStore text only search", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await SingleStoreVectorStore.fromTexts(
weatherTexts,
weatherMetadata,
new MockEmbeddings(),
{
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "text_only_search",
useVectorIndex: false,
useFullTextIndex: true,
searchConfig: {
searchStrategy: "TEXT_ONLY",
},
}
);
const output = await vectorStore.similaritySearch(
"rainstorm in parched desert",
3,
{ count: "1" }
);
await vectorStore.end();
expect(output.length).toEqual(2);
expect(output[0].pageContent).toContain(
"In the parched desert, a sudden rainstorm brought relief,"
);
expect(output[1].pageContent).toContain(
"Blanketing the countryside in a soft, pristine layer"
);
});
test.skip("SingleStoreVectorStore filter by text search", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await SingleStoreVectorStore.fromTexts(
weatherTexts,
weatherMetadata,
new MockEmbeddings(),
{
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "filter_by_text_search",
useVectorIndex: false,
useFullTextIndex: true,
searchConfig: {
searchStrategy: "FILTER_BY_TEXT",
filterThreshold: 0.0001,
},
}
);
const output = await vectorStore.similaritySearch(
"rainstorm in parched desert",
1
);
await vectorStore.end();
expect(output.length).toEqual(1);
expect(output[0].pageContent).toContain(
"In the parched desert, a sudden rainstorm brought relief,"
);
});
test.skip("SingleStoreVectorStore filter by vector search", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await new SingleStoreVectorStore(new MockEmbeddings(), {
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "filter_by_vector_search",
useVectorIndex: false,
vectorSize: 2,
useFullTextIndex: true,
searchConfig: {
searchStrategy: "FILTER_BY_VECTOR",
},
});
for (let i = 0; i < weatherTexts.length; i += 1) {
await vectorStore.addDocuments([
new Document({
pageContent: weatherTexts[i],
metadata: weatherMetadata[i],
}),
]);
}
await vectorStore.setSearchConfig({
searchStrategy: "FILTER_BY_VECTOR",
filterThreshold: -0.2,
});
const output = await vectorStore.similaritySearch(
"rainstorm in parched desert, rain",
1,
{ group: "b" }
);
await vectorStore.end();
expect(output.length).toEqual(1);
expect(output[0].pageContent).toContain(
"High in the mountains, the rain transformed into a delicate"
);
});
test.skip("SingleStoreVectorStore filter by text search", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await new SingleStoreVectorStore(new MockEmbeddings(), {
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "filter_by_text_search",
useVectorIndex: false,
vectorSize: 2,
useFullTextIndex: true,
});
for (let i = 0; i < weatherTexts.length; i += 1) {
await vectorStore.addDocuments([
new Document({
pageContent: weatherTexts[i],
metadata: weatherMetadata[i],
}),
]);
}
await vectorStore.setSearchConfig({
searchStrategy: "FILTER_BY_TEXT",
filterThreshold: 0,
});
const output = await vectorStore.similaritySearch(
"rainstorm in parched desert",
1
);
await vectorStore.end();
expect(output.length).toEqual(1);
expect(output[0].pageContent).toContain(
"In the parched desert, a sudden rainstorm brought relief"
);
});
test.skip("SingleStoreVectorStore weighted sum search unsupported strategy", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await new SingleStoreVectorStore(new MockEmbeddings(), {
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "filter_by_weighted_sum_unsuported",
useVectorIndex: true,
vectorSize: 2,
useFullTextIndex: true,
distanceMetric: "EUCLIDEAN_DISTANCE",
searchConfig: {
searchStrategy: "WEIGHTED_SUM",
textWeight: 1,
vectorWeight: 1,
vectorselectCountMultiplier: 10,
},
});
await expect(vectorStore.similaritySearch("some text", 1)).rejects.toThrow(
"Weighted sum search strategy is only available for DOT_PRODUCT distance metric."
);
await vectorStore.end();
});
test.skip("SingleStoreVectorStore weighted sum search", async () => {
expect(process.env.SINGLESTORE_HOST).toBeDefined();
expect(process.env.SINGLESTORE_PORT).toBeDefined();
expect(process.env.SINGLESTORE_USERNAME).toBeDefined();
expect(process.env.SINGLESTORE_PASSWORD).toBeDefined();
expect(process.env.SINGLESTORE_DATABASE).toBeDefined();
const vectorStore = await new SingleStoreVectorStore(new MockEmbeddings(), {
connectionURI: `http://${process.env.SINGLESTORE_USERNAME}:${process.env.SINGLESTORE_PASSWORD}@${process.env.SINGLESTORE_HOST}:${process.env.SINGLESTORE_PORT}/${process.env.SINGLESTORE_DATABASE}`,
tableName: "filter_by_weighted_sum",
useVectorIndex: true,
vectorSize: 2,
useFullTextIndex: true,
distanceMetric: "DOT_PRODUCT",
searchConfig: {
searchStrategy: "WEIGHTED_SUM",
textWeight: 1,
vectorWeight: 1,
vectorselectCountMultiplier: 10,
},
});
for (let i = 0; i < weatherTexts.length; i += 1) {
await vectorStore.addDocuments([
new Document({
pageContent: weatherTexts[i],
metadata: weatherMetadata[i],
}),
]);
}
const output = await vectorStore.similaritySearch(
"rainstorm in parched desert, rain",
1,
{ category: "snow" }
);
await vectorStore.end();
expect(output.length).toEqual(1);
expect(output[0].pageContent).toContain(
"Atop the rugged peaks, snow fell with an unyielding"
);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/libsql.int.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { expect, test } from "@jest/globals";
import { Document } from "@langchain/core/documents";
import { OpenAIEmbeddings } from "@langchain/openai";
import { createClient } from "@libsql/client";
import { SyntheticEmbeddings } from "@langchain/core/utils/testing";
import fs from "node:fs";
import { LibSQLVectorStore, LibSQLVectorStoreArgs } from "../libsql.js";
test("can create and query (cloud)", async () => {
const client = createClient({
url: process.env.LIBSQL_URL!,
authToken: process.env.LIBSQL_AUTH_TOKEN,
});
const vectorStore = new LibSQLVectorStore(
new OpenAIEmbeddings({
model: "text-embedding-3-small",
dimensions: 1536,
}),
{
db: client,
table: "documents",
column: "embeddings",
}
);
const ids = await vectorStore.addDocuments([
new Document({
pageContent: "added first page",
}),
new Document({
pageContent: "added second page",
}),
new Document({
pageContent: "added third page",
}),
]);
const nextId = await vectorStore.addDocuments([
new Document({
pageContent: "added another first page",
}),
]);
ids.push(nextId[0]);
const results = await vectorStore.similaritySearchWithScore("added first", 4);
expect(results.length).toBe(4);
});
describe("LibSQLVectorStore (local)", () => {
const client = createClient({
url: "file:store.db",
});
const config: LibSQLVectorStoreArgs = {
db: client,
};
const embeddings = new SyntheticEmbeddings({
vectorSize: 1024,
});
afterAll(async () => {
await client.close();
if (fs.existsSync("store.db")) {
fs.unlinkSync("store.db");
}
});
test("a document with content can be added", async () => {
await client.batch([
`DROP TABLE IF EXISTS vectors;`,
`CREATE TABLE IF NOT EXISTS vectors (
content TEXT,
metadata JSON,
embedding F32_BLOB(1024)
);`,
`CREATE INDEX IF NOT EXISTS idx_vectors_embedding
ON vectors (libsql_vector_idx(embedding));`,
]);
const store = new LibSQLVectorStore(embeddings, config);
const ids = await store.addDocuments([
new Document({
pageContent: "hello",
metadata: { a: 1 },
}),
]);
expect(ids).toHaveLength(1);
const [id] = ids;
expect(typeof id).toBe("string");
const resultSet = await client.execute(`SELECT * FROM vectors`);
expect(resultSet.rows).toHaveLength(1);
const [row] = resultSet.rows;
expect(row.content).toBe("hello");
expect(JSON.parse(row.metadata as string)).toEqual({ a: 1 });
});
test("a document with spaces in the content can be added", async () => {
await client.batch([
`DROP TABLE IF EXISTS vectors;`,
`CREATE TABLE IF NOT EXISTS vectors (
content TEXT,
metadata JSON,
embedding F32_BLOB(1024)
);`,
`CREATE INDEX IF NOT EXISTS idx_vectors_embedding
ON vectors (libsql_vector_idx(embedding));`,
]);
const store = new LibSQLVectorStore(embeddings, config);
const ids = await store.addDocuments([
new Document({
pageContent: "hello world",
metadata: { a: 1 },
}),
]);
expect(ids).toHaveLength(1);
const [id] = ids;
expect(typeof id).toBe("string");
const resultSet = await client.execute(`SELECT * FROM vectors`);
expect(resultSet.rows).toHaveLength(1);
const [row] = resultSet.rows;
expect(row.content).toBe("hello world");
expect(JSON.parse(row.metadata as string)).toEqual({ a: 1 });
});
test("a similarity search can be performed", async () => {
await client.batch([
`DROP TABLE IF EXISTS vectors;`,
`CREATE TABLE IF NOT EXISTS vectors (
content TEXT,
metadata JSON,
embedding F32_BLOB(1024)
);`,
`CREATE INDEX IF NOT EXISTS idx_vectors_embedding
ON vectors (libsql_vector_idx(embedding));`,
]);
const store = new LibSQLVectorStore(embeddings, config);
const ids = await store.addDocuments([
new Document({
pageContent: "the quick brown fox",
}),
new Document({
pageContent: "jumped over the lazy dog",
}),
new Document({
pageContent: "hello world",
}),
]);
expect(ids).toHaveLength(3);
expect(ids.every((id) => typeof id === "string")).toBe(true);
const results1 = await store.similaritySearch("the quick brown dog", 2);
expect(results1).toHaveLength(2);
expect(
results1.map((result) => result.id).every((id) => typeof id === "string")
).toBe(true);
const results2 = await store.similaritySearch("hello");
expect(results2).toHaveLength(3);
expect(
results2.map((result) => result.id).every((id) => typeof id === "string")
).toBe(true);
});
test("a similarity search with a filter can be performed", async () => {
await client.batch([
`DROP TABLE IF EXISTS vectors;`,
`CREATE TABLE IF NOT EXISTS vectors (
content TEXT,
metadata JSON,
embedding F32_BLOB(1024)
);`,
`CREATE INDEX IF NOT EXISTS idx_vectors_embedding
ON vectors (libsql_vector_idx(embedding));`,
]);
const store = new LibSQLVectorStore(embeddings, config);
const ids = await store.addDocuments([
new Document({
pageContent: "the quick brown fox",
metadata: {
label: "1",
},
}),
new Document({
pageContent: "jumped over the lazy dog",
metadata: {
label: "2",
},
}),
new Document({
pageContent: "hello world",
metadata: {
label: "1",
},
}),
]);
expect(ids).toHaveLength(3);
expect(ids.every((id) => typeof id === "string")).toBe(true);
const results = await store.similaritySearch("the quick brown dog", 10, {
label: {
operator: "=",
value: "1",
},
});
expect(results).toHaveLength(2);
expect(results.map((result) => result.pageContent)).toEqual([
"the quick brown fox",
"hello world",
]);
expect(
results.map((result) => result.id).every((id) => typeof id === "string")
).toBe(true);
});
test("a document can be deleted by id", async () => {
await client.batch([
`DROP TABLE IF EXISTS vectors;`,
`CREATE TABLE IF NOT EXISTS vectors (
content TEXT,
metadata JSON,
embedding F32_BLOB(1024)
);`,
`CREATE INDEX IF NOT EXISTS idx_vectors_embedding
ON vectors (libsql_vector_idx(embedding));`,
]);
const store = new LibSQLVectorStore(embeddings, config);
const ids = await store.addDocuments([
new Document({
pageContent: "the quick brown fox",
}),
new Document({
pageContent: "jumped over the lazy dog",
metadata: { a: 2 },
}),
new Document({
pageContent: "hello world",
metadata: { a: 3 },
}),
]);
expect(ids).toHaveLength(3);
expect(ids.every((id) => typeof id === "string")).toBe(true);
const [id1, id2] = ids;
await store.delete({ ids: [id1, id2] });
const resultSet = await client.execute(`SELECT * FROM vectors`);
expect(resultSet.rows).toHaveLength(1);
const [row] = resultSet.rows;
expect(row.content).toBe("hello world");
expect(JSON.parse(row.metadata as string)).toEqual({ a: 3 });
});
test("all documents can be deleted", async () => {
await client.batch([
`DROP TABLE IF EXISTS vectors;`,
`CREATE TABLE IF NOT EXISTS vectors (
content TEXT,
metadata JSON,
embedding F32_BLOB(1024)
);`,
`CREATE INDEX IF NOT EXISTS idx_vectors_embedding
ON vectors (libsql_vector_idx(embedding));`,
]);
const store = new LibSQLVectorStore(embeddings, config);
const ids = await store.addDocuments([
new Document({
pageContent: "the quick brown fox",
}),
new Document({
pageContent: "jumped over the lazy dog",
}),
new Document({
pageContent: "hello world",
}),
]);
expect(ids).toHaveLength(3);
expect(ids.every((id) => typeof id === "string")).toBe(true);
await store.delete({
deleteAll: true,
});
const resultSet = await client.execute(`SELECT * FROM vectors`);
expect(resultSet.rows).toHaveLength(0);
});
test("the table can have a custom id column name", async () => {
await client.batch([
`DROP TABLE IF EXISTS vectors;`,
`CREATE TABLE IF NOT EXISTS vectors (
id INTEGER PRIMARY KEY AUTOINCREMENT,
content TEXT,
metadata JSON,
embedding F32_BLOB(1024)
);`,
`CREATE INDEX IF NOT EXISTS idx_vectors_embedding
ON vectors (libsql_vector_idx(embedding));`,
]);
const store = new LibSQLVectorStore(embeddings, config);
const ids = await store.addDocuments([
new Document({
pageContent: "the quick brown fox",
metadata: { a: 1 },
}),
new Document({
pageContent: "jumped over the lazy dog",
metadata: { a: 2 },
}),
new Document({
pageContent: "hello world",
metadata: { a: 3 },
}),
]);
expect(ids).toHaveLength(3);
expect(ids).toEqual(["1", "2", "3"]);
const results = await store.similaritySearch("the quick brown dog", 2);
expect(results).toHaveLength(2);
expect(results.map((result) => result.pageContent)).toEqual([
"the quick brown fox",
"jumped over the lazy dog",
]);
expect(
results.map((result) => result.id).every((id) => typeof id === "string")
).toBe(true);
const [id1, id2] = ids;
await store.delete({ ids: [id1, id2] });
const resultSet = await client.execute(`SELECT * FROM vectors`);
expect(resultSet.rows).toHaveLength(1);
const [row] = resultSet.rows;
expect(row.content).toBe("hello world");
expect(JSON.parse(row.metadata as string)).toEqual({ a: 3 });
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/elasticsearch.int.test.ts
|
/* eslint-disable no-process-env */
import { test, expect } from "@jest/globals";
import { Client, ClientOptions } from "@elastic/elasticsearch";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { ElasticVectorSearch } from "../elasticsearch.js";
describe("ElasticVectorSearch", () => {
let store: ElasticVectorSearch;
beforeEach(async () => {
if (!process.env.ELASTIC_URL) {
throw new Error("ELASTIC_URL not set");
}
const config: ClientOptions = {
node: process.env.ELASTIC_URL,
};
if (process.env.ELASTIC_API_KEY) {
config.auth = {
apiKey: process.env.ELASTIC_API_KEY,
};
} else if (process.env.ELASTIC_USERNAME && process.env.ELASTIC_PASSWORD) {
config.auth = {
username: process.env.ELASTIC_USERNAME,
password: process.env.ELASTIC_PASSWORD,
};
}
const client = new Client(config);
const indexName = "test_index";
const embeddings = new OpenAIEmbeddings();
store = new ElasticVectorSearch(embeddings, { client, indexName });
await store.deleteIfExists();
expect(store).toBeDefined();
});
test.skip("ElasticVectorSearch integration", async () => {
const createdAt = new Date().getTime();
const ids = await store.addDocuments([
{ pageContent: "hello", metadata: { a: createdAt + 1 } },
{ pageContent: "car", metadata: { a: createdAt } },
{ pageContent: "adjective", metadata: { a: createdAt } },
{ pageContent: "hi", metadata: { a: createdAt } },
]);
const results1 = await store.similaritySearch("hello!", 1);
expect(results1).toHaveLength(1);
expect(results1).toEqual([
new Document({ metadata: { a: createdAt + 1 }, pageContent: "hello" }),
]);
const results2 = await store.similaritySearchWithScore("testing!", 6, {
a: createdAt,
});
expect(results2).toHaveLength(3);
const ids2 = await store.addDocuments(
[
{ pageContent: "hello upserted", metadata: { a: createdAt + 1 } },
{ pageContent: "car upserted", metadata: { a: createdAt } },
{ pageContent: "adjective upserted", metadata: { a: createdAt } },
{ pageContent: "hi upserted", metadata: { a: createdAt } },
],
{ ids }
);
expect(ids).toEqual(ids2);
const results3 = await store.similaritySearchWithScore("testing!", 6, {
a: createdAt,
});
expect(results3).toHaveLength(3);
// console.log(`Upserted:`, results3);
await store.delete({ ids: ids.slice(2) });
const results4 = await store.similaritySearchWithScore("testing!", 3, {
a: createdAt,
});
expect(results4).toHaveLength(1);
});
test.skip("ElasticVectorSearch integration with more than 10 documents", async () => {
const createdAt = new Date().getTime();
await store.addDocuments([
{ pageContent: "pretty", metadata: { a: createdAt + 1 } },
{ pageContent: "intelligent", metadata: { a: createdAt } },
{ pageContent: "creative", metadata: { a: createdAt } },
{ pageContent: "courageous", metadata: { a: createdAt } },
{ pageContent: "energetic", metadata: { a: createdAt } },
{ pageContent: "patient", metadata: { a: createdAt } },
{ pageContent: "responsible", metadata: { a: createdAt } },
{ pageContent: "friendly", metadata: { a: createdAt } },
{ pageContent: "confident", metadata: { a: createdAt } },
{ pageContent: "generous", metadata: { a: null } },
{ pageContent: "compassionate", metadata: {} },
]);
const results = await store.similaritySearch("*", 11);
expect(results).toHaveLength(11);
const results2 = await store.similaritySearch("*", 11, [
{
field: "a",
value: createdAt,
operator: "exclude",
},
]);
expect(results2).toHaveLength(3);
const results3 = await store.similaritySearch("*", 11, [
{
field: "a",
value: [createdAt],
operator: "exclude",
},
]);
expect(results3).toHaveLength(3);
const results4 = await store.similaritySearch("*", 11, [
{
field: "a",
operator: "not_exists",
},
]);
expect(results4).toHaveLength(2);
});
test.skip("ElasticVectorSearch integration with text splitting metadata", async () => {
const createdAt = new Date().getTime();
const documents = [
new Document({
pageContent: "hello",
metadata: { a: createdAt, loc: { lines: { from: 1, to: 1 } } },
}),
new Document({
pageContent: "car",
metadata: { a: createdAt, loc: { lines: { from: 2, to: 2 } } },
}),
];
await store.addDocuments(documents);
const results1 = await store.similaritySearch("hello!", 1);
expect(results1).toHaveLength(1);
expect(results1).toEqual([
new Document({
metadata: { a: createdAt, loc: { lines: { from: 1, to: 1 } } },
pageContent: "hello",
}),
]);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/supabase.test.ts
|
import { test, expect, jest } from "@jest/globals";
import { SupabaseClient } from "@supabase/supabase-js";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { SupabaseVectorStore } from "../supabase.js";
test("similaritySearchVectorWithScore should call RPC with the vectorstore filters", async () => {
const supabaseClientMock = {
rpc: jest.fn().mockReturnValue(Promise.resolve({ data: [] })),
} as Partial<SupabaseClient>;
const embeddings = new FakeEmbeddings();
const vectorStore = new SupabaseVectorStore(embeddings, {
client: supabaseClientMock as SupabaseClient,
tableName: "documents",
queryName: "match_documents",
filter: { a: 2 },
});
await vectorStore.similaritySearchVectorWithScore([1, 2, 3], 5);
expect(supabaseClientMock.rpc).toHaveBeenCalledWith("match_documents", {
filter: { a: 2 },
query_embedding: [1, 2, 3],
match_count: 5,
});
});
test("similaritySearchVectorWithScore should call RPC with the passed filters", async () => {
const supabaseClientMock = {
rpc: jest.fn().mockReturnValue(Promise.resolve({ data: [] })),
} as Partial<SupabaseClient>;
const embeddings = new FakeEmbeddings();
const vectorStore = new SupabaseVectorStore(embeddings, {
client: supabaseClientMock as SupabaseClient,
tableName: "documents",
queryName: "match_documents",
});
await vectorStore.similaritySearchVectorWithScore([1, 2, 3], 5, { b: 3 });
expect(supabaseClientMock.rpc).toHaveBeenCalledWith("match_documents", {
filter: { b: 3 },
query_embedding: [1, 2, 3],
match_count: 5,
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex.int.test.ts
|
/* eslint-disable no-process-env */
import { ConvexHttpClient } from "convex/browser";
import { expect, test } from "@jest/globals";
// eslint-disable-next-line import/no-relative-packages
import { api } from "./convex/convex/_generated/api.js";
// To run these tests at least once, follow these steps:
//
// 1. `cd langchain/src/vectorstores/tests/convex`
// 2. `npx convex dev --once`
// 3. `cd ../../../..`
// 3. `cp src/vectorstores/tests/convex/.env.local .env`
// 4. Add your OpenAI key to `.env` (see `.env.example`)
// 5. `yarn test:single src/vectorstores/tests/convex.int.test.ts`
//
// If you're developing these tests, after you've done the above:
//
// In `langchain/src/vectorstores/tests/convex` run `npx convex dev`
// In `langchain` run `yarn test:single src/vectorstores/tests/convex.int.test.ts`
describe.skip("Convex Vectorstore", () => {
test("Convex ingest, similaritySearch", async () => {
const client = new ConvexHttpClient(process.env.CONVEX_URL as string);
const openAIApiKey = process.env.OPENAI_API_KEY as string;
await client.mutation(api.lib.reset);
await client.action(api.lib.ingest, {
openAIApiKey,
texts: ["Hello world", "Bye bye", "hello nice world"],
metadatas: [{ id: 2 }, { id: 1 }, { id: 3 }],
});
const metadatas = await client.action(api.lib.similaritySearch, {
openAIApiKey,
query: "hello world",
});
expect(metadatas).toEqual([{ id: 2 }, { id: 3 }, { id: 1 }]);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/couchbase.int.test.ts
|
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable no-process-env */
import { describe, test } from "@jest/globals";
import { Cluster } from "couchbase";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import {
CouchbaseVectorStore,
CouchbaseVectorStoreArgs,
} from "../couchbase.js";
describe.skip("Couchbase vector store", () => {
const connectionString =
process.env.COUCHBASE_DB_CONN_STR ?? "couchbase://localhost";
const databaseUsername = process.env.COUCHBASE_DB_USERNAME ?? "Administrator";
const databasePassword = process.env.COUCHBASE_DB_PASSWORD ?? "Password";
const bucketName = process.env.COUCHBASE_DB_BUCKET_NAME ?? "testing";
const scopeName = process.env.COUCHBASE_DB_SCOPE_NAME ?? "_default";
const collectionName = process.env.COUCHBASE_DB_COLLECTION_NAME ?? "_default";
const indexName = process.env.COUCHBASE_DB_INDEX_NAME ?? "vector-index";
const textFieldKey = "text";
const embeddingFieldKey = "embedding";
const isScopedIndex = true;
let couchbaseClient: Cluster;
let embeddings: OpenAIEmbeddings;
const texts = [
"Couchbase, built on a key-value store, offers efficient data operations.",
"As a NoSQL database, Couchbase provides scalability and flexibility to handle diverse data types.",
"Couchbase supports N1QL, a SQL-like language, easing the transition for developers familiar with SQL.",
"Couchbase ensures high availability with built-in fault tolerance and automatic multi-master replication.",
"With its memory-first architecture, Couchbase delivers high performance and low latency data access.",
];
const metadata = [
{ id: "101", name: "Efficient Operator" },
{ id: "102", name: "Flexible Storer" },
{ id: "103", name: "Quick Performer" },
{ id: "104", name: "Reliable Guardian" },
{ id: "105", name: "Adaptable Navigator" },
];
beforeEach(async () => {
couchbaseClient = await Cluster.connect(connectionString, {
username: databaseUsername,
password: databasePassword,
configProfile: "wanDevelopment",
});
embeddings = new OpenAIEmbeddings({
openAIApiKey: process.env.OPENAI_API_KEY,
});
});
test("from Texts to vector store", async () => {
const couchbaseConfig: CouchbaseVectorStoreArgs = {
cluster: couchbaseClient,
bucketName,
scopeName,
collectionName,
indexName,
textKey: textFieldKey,
embeddingKey: embeddingFieldKey,
scopedIndex: isScopedIndex,
};
const store = await CouchbaseVectorStore.fromTexts(
texts,
metadata,
embeddings,
couchbaseConfig
);
const results = await store.similaritySearchWithScore(texts[0], 1);
expect(results.length).toEqual(1);
expect(results[0][0].pageContent).toEqual(texts[0]);
expect(results[0][0].metadata.name).toEqual(metadata[0].name);
expect(results[0][0].metadata.id).toEqual(metadata[0].id);
});
test("Add and delete Documents to vector store", async () => {
const couchbaseConfig: CouchbaseVectorStoreArgs = {
cluster: couchbaseClient,
bucketName,
scopeName,
collectionName,
indexName,
textKey: textFieldKey,
embeddingKey: embeddingFieldKey,
scopedIndex: isScopedIndex,
};
const documents: Document[] = [];
for (let i = 0; i < texts.length; i += 1) {
documents.push({
pageContent: texts[i],
metadata: {},
});
}
const store = await CouchbaseVectorStore.initialize(
embeddings,
couchbaseConfig
);
const ids = await store.addDocuments(documents, {
ids: metadata.map((val) => val.id),
metadata: metadata.map((val) => {
const metadataObj = {
name: val.name,
};
return metadataObj;
}),
});
expect(ids.length).toEqual(texts.length);
for (let i = 0; i < ids.length; i += 1) {
expect(ids[i]).toEqual(metadata[i].id);
}
const results = await store.similaritySearch(texts[1], 1);
expect(results.length).toEqual(1);
expect(results[0].pageContent).toEqual(texts[1]);
expect(results[0].metadata.name).toEqual(metadata[1].name);
await store.delete(ids);
const cbCollection = couchbaseClient
.bucket(bucketName)
.scope(scopeName)
.collection(collectionName);
expect((await cbCollection.exists(ids[0])).exists).toBe(false);
expect((await cbCollection.exists(ids[4])).exists).toBe(false);
const resultsDeleted = await store.similaritySearch(texts[1], 1);
expect(resultsDeleted.length).not.toEqual(1);
});
test("hybrid search", async () => {
const couchbaseConfig: CouchbaseVectorStoreArgs = {
cluster: couchbaseClient,
bucketName,
scopeName,
collectionName,
indexName,
textKey: textFieldKey,
embeddingKey: embeddingFieldKey,
scopedIndex: isScopedIndex,
};
const query = `Couchbase offers impressive memory-first performance for your important applications.`;
const hybridSearchMetadata: { [key: string]: any }[] = [];
// Add More Metadata
for (let i = 0; i < texts.length; i += 1) {
const doc: { [key: string]: any } = {};
doc.date = `${2020 + (i % 10)}-01-01`;
doc.rating = 1 + (i % 5);
doc.author = ["John Doe", "Jane Doe"][(i + 1) % 2];
doc.id = (i + 100).toString();
hybridSearchMetadata.push(doc);
}
const store = await CouchbaseVectorStore.fromTexts(
texts,
hybridSearchMetadata,
embeddings,
couchbaseConfig
);
const resultsSimilaritySearch = await store.similaritySearch(query, 1);
expect(resultsSimilaritySearch.length).toEqual(1);
expect(resultsSimilaritySearch[0].metadata.date).not.toEqual(undefined);
// search by exact value in metadata
const exactValueResult = await store.similaritySearch(query, 4, {
fields: ["metadata.author"],
searchOptions: {
query: { field: "metadata.author", match: "John Doe" },
},
});
expect(exactValueResult.length).toEqual(4);
expect(exactValueResult[0].metadata.author).toEqual("John Doe");
// search by partial match in metadata
const partialMatchResult = await store.similaritySearch(query, 4, {
fields: ["metadata.author"],
searchOptions: {
query: { field: "metadata.author", match: "Johny", fuzziness: 1 },
},
});
expect(partialMatchResult.length).toEqual(4);
expect(partialMatchResult[0].metadata.author).toEqual("John Doe");
// search by date range
const dateRangeResult = await store.similaritySearch(query, 4, {
fields: ["metadata.date", "metadata.author"],
searchOptions: {
query: {
start: "2022-12-31",
end: "2023-01-02",
inclusiveStart: true,
inclusiveEnd: false,
field: "metadata.date",
},
},
});
expect(dateRangeResult.length).toEqual(4);
// search by rating range
const ratingRangeResult = await store.similaritySearch(texts[0], 4, {
fields: ["metadata.rating"],
searchOptions: {
query: {
min: 3,
max: 5,
inclusiveMin: false,
inclusiveMax: true,
field: "metadata.rating",
},
},
});
expect(ratingRangeResult.length).toEqual(4);
// multiple search conditions
const multipleConditionsResult = await store.similaritySearch(texts[0], 4, {
fields: ["metadata.rating", "metadata.date"],
searchOptions: {
query: {
conjuncts: [
{ min: 3, max: 4, inclusive_max: true, field: "metadata.rating" },
{ start: "2022-12-31", end: "2023-01-02", field: "metadata.date" },
],
},
},
});
expect(multipleConditionsResult.length).toEqual(4);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/hnswlib.int.test.ts
|
import { test, expect } from "@jest/globals";
import * as fs from "node:fs/promises";
import * as path from "node:path";
import * as os from "node:os";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { HNSWLib } from "../hnswlib.js";
test("Test HNSWLib.fromTexts", async () => {
const vectorStore = await HNSWLib.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[{ id: 2 }, { id: 1 }, { id: 3 }],
new OpenAIEmbeddings()
);
expect(vectorStore.index?.getCurrentCount()).toBe(3);
const resultOne = await vectorStore.similaritySearch("hello world", 1);
const resultOneMetadatas = resultOne.map(({ metadata }) => metadata);
expect(resultOneMetadatas).toEqual([{ id: 2 }]);
const resultTwo = await vectorStore.similaritySearch("hello world", 3);
const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata);
expect(resultTwoMetadatas).toEqual([{ id: 2 }, { id: 3 }, { id: 1 }]);
});
test("Test HNSWLib.fromTexts + addDocuments", async () => {
const vectorStore = await HNSWLib.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[{ id: 2 }, { id: 1 }, { id: 3 }],
new OpenAIEmbeddings()
);
expect(vectorStore.index?.getMaxElements()).toBe(3);
expect(vectorStore.index?.getCurrentCount()).toBe(3);
await vectorStore.addDocuments([
new Document({
pageContent: "hello worldklmslksmn",
metadata: { id: 4 },
}),
]);
expect(vectorStore.index?.getMaxElements()).toBe(4);
const resultTwo = await vectorStore.similaritySearch("hello world", 3);
const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata);
expect(resultTwoMetadatas).toEqual([{ id: 2 }, { id: 3 }, { id: 4 }]);
});
test("Test HNSWLib.load, HNSWLib.save, and HNSWLib.delete", async () => {
const vectorStore = await HNSWLib.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[{ id: 2 }, { id: 1 }, { id: 3 }],
new OpenAIEmbeddings()
);
expect(vectorStore.index?.getCurrentCount()).toBe(3);
const resultOne = await vectorStore.similaritySearch("hello world", 1);
const resultOneMetadatas = resultOne.map(({ metadata }) => metadata);
expect(resultOneMetadatas).toEqual([{ id: 2 }]);
const resultTwo = await vectorStore.similaritySearch("hello world", 3);
const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata);
expect(resultTwoMetadatas).toEqual([{ id: 2 }, { id: 3 }, { id: 1 }]);
const tempDirectory = await fs.mkdtemp(path.join(os.tmpdir(), "lcjs-"));
// console.log(tempDirectory);
await vectorStore.save(tempDirectory);
const loadedVectorStore = await HNSWLib.load(
tempDirectory,
new OpenAIEmbeddings()
);
const resultThree = await loadedVectorStore.similaritySearch(
"hello world",
1
);
const resultThreeMetadatas = resultThree.map(({ metadata }) => metadata);
expect(resultThreeMetadatas).toEqual([{ id: 2 }]);
const resultFour = await loadedVectorStore.similaritySearch("hello world", 3);
const resultFourMetadatas = resultFour.map(({ metadata }) => metadata);
expect(resultFourMetadatas).toEqual([{ id: 2 }, { id: 3 }, { id: 1 }]);
await loadedVectorStore.delete({
directory: tempDirectory,
});
await expect(async () => {
await HNSWLib.load(tempDirectory, new OpenAIEmbeddings());
}).rejects.toThrow();
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/closevector_node.int.test.ts
|
import { test, expect } from "@jest/globals";
import { OpenAIEmbeddings } from "@langchain/openai";
import { getEnvironmentVariable } from "@langchain/core/utils/env";
import { CloseVectorNode } from "../closevector/node.js";
test.skip("Test CloseVectorNode.fromTexts + addVectors", async () => {
const key = getEnvironmentVariable("CLOSEVECTOR_API_KEY");
const secret = getEnvironmentVariable("CLOSEVECTOR_API_SECRET");
if (!key || !secret) {
throw new Error("CLOSEVECTOR_API_KEY or CLOSEVECTOR_API_SECRET not set");
}
const vectorStore = await CloseVectorNode.fromTexts(
["Hello world"],
[{ id: 2 }],
new OpenAIEmbeddings(),
undefined,
{
key,
secret,
}
);
expect(vectorStore.instance.index?.getMaxElements()).toBe(1);
expect(vectorStore.instance.index?.getCurrentCount()).toBe(1);
await vectorStore.saveToCloud({
description: "test",
public: true,
});
const { uuid } = vectorStore.instance;
const vectorStore2 = await CloseVectorNode.loadFromCloud({
uuid,
embeddings: new OpenAIEmbeddings(),
credentials: {
key,
secret,
},
});
expect(vectorStore2.instance.index?.getMaxElements()).toBe(1);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/cassandra.int.test.ts
|
/* eslint-disable no-process-env */
import { test, expect, describe } from "@jest/globals";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { CassandraClientFactory } from "../../utils/cassandra.js";
import { CassandraLibArgs, CassandraStore } from "../cassandra.js";
const cassandraConfig = {
serviceProviderArgs: {
astra: {
token: process.env.ASTRA_TOKEN as string,
endpoint: process.env.ASTRA_DB_ENDPOINT as string,
},
},
keyspace: "default_keyspace",
table: "test",
};
let client;
const noPartitionConfig = {
...cassandraConfig,
dimensions: 1536,
primaryKey: {
name: "id",
type: "int",
},
metadataColumns: [
{
name: "name",
type: "text",
},
{
name: "seq",
type: "int",
},
],
};
// These tests are configured to run against an Astra database. You can run against Cassandra by
// updating the cassandraConfig above, and adjusting the environment variables as appropriate.
//
// Note there are multiple describe functions that need to be un-skipped for internal testing.
// To run these tests:
// 1. switch "describe.skip(" to "describe("
// 2. Export OPENAI_API_KEY, ASTRA_DB_ENDPOINT, and ASTRA_TOKEN
// 3. cd langchainjs/libs/langchain-community
// 4. yarn test:single src/vectorstores/tests/cassandra.int.test.ts
// Once manual testing is complete, re-instate the ".skip"
describe("CassandraStore - no explicit partition key", () => {
beforeAll(async () => {
client = await CassandraClientFactory.getClient(cassandraConfig);
await client.execute("DROP TABLE IF EXISTS default_keyspace.test;");
});
beforeEach(async () => {
try {
client = await CassandraClientFactory.getClient(cassandraConfig);
await client.execute("TRUNCATE default_keyspace.test;");
} catch (err) {
// Ignore error if table does not exist
}
});
test("CassandraStore.fromText", async () => {
const vectorStore = await CassandraStore.fromTexts(
["I am blue", "Green yellow purple", "Hello there hello"],
[
{ id: 2, name: "Alex" },
{ id: 1, name: "Scott" },
{ id: 3, name: "Bubba" },
],
new OpenAIEmbeddings(),
noPartitionConfig
);
const results = await vectorStore.similaritySearch(
"Green yellow purple",
1
);
expect(results).toEqual([
new Document({
pageContent: "Green yellow purple",
metadata: { id: 1, name: "Scott" },
}),
]);
});
test("CassandraStore.fromExistingIndex", async () => {
await CassandraStore.fromTexts(
["Hey", "Whats up", "Hello"],
[
{ id: 2, name: "Alex" },
{ id: 1, name: "Scott" },
{ id: 3, name: "Bubba" },
],
new OpenAIEmbeddings(),
noPartitionConfig
);
const vectorStore = await CassandraStore.fromExistingIndex(
new OpenAIEmbeddings(),
noPartitionConfig
);
const results = await vectorStore.similaritySearch("Whats up", 1);
expect(results).toEqual([
new Document({
pageContent: "Whats up",
metadata: { id: 1, name: "Scott" },
}),
]);
});
test("CassandraStore.fromExistingIndex (with filter)", async () => {
const testConfig = {
...noPartitionConfig,
indices: [
{
name: "name",
value: "(name)",
},
],
};
await CassandraStore.fromTexts(
["Hey", "Whats up", "Hello"],
[
{ id: 2, name: "Alex" },
{ id: 1, name: "Scott" },
{ id: 3, name: "Bubba" },
],
new OpenAIEmbeddings(),
testConfig
);
const vectorStore = await CassandraStore.fromExistingIndex(
new OpenAIEmbeddings(),
testConfig
);
const results = await vectorStore.similaritySearch("Hey", 1, {
name: "Bubba",
});
expect(results).toEqual([
new Document({
pageContent: "Hello",
metadata: { id: 3, name: "Bubba" },
}),
]);
});
test("CassandraStore.fromExistingIndex (with inequality filter)", async () => {
const testConfig = {
...noPartitionConfig,
indices: [
{
name: "seq",
value: "(seq)",
},
],
};
await CassandraStore.fromTexts(
["Hey", "Whats up", "Hello"],
[
{ id: 2, name: "Alex", seq: 99 },
{ id: 1, name: "Scott", seq: 88 },
{ id: 3, name: "Bubba", seq: 77 },
],
new OpenAIEmbeddings(),
testConfig
);
const vectorStore = await CassandraStore.fromExistingIndex(
new OpenAIEmbeddings(),
testConfig
);
// With out the filter this would match on Scott, but we are using > filter
const results = await vectorStore.similaritySearch("Whats up", 1, [
{ name: "seq", operator: ">", value: "88" },
]);
expect(results).toEqual([
new Document({
pageContent: "Hey",
metadata: { id: 2, name: "Alex", seq: 99 },
}),
]);
});
test("CassandraStore.addDocuments (with batch))", async () => {
const testConfig = {
...noPartitionConfig,
maxConcurrency: 1,
batchSize: 5,
};
const docs: Document[] = [];
docs.push(
new Document({
pageContent: "Hello Muddah, hello Faddah",
metadata: { id: 1, name: "Alex" },
})
);
docs.push(
new Document({
pageContent: "Here I am at Camp Granada",
metadata: { id: 2, name: "Blair" },
})
);
docs.push(
new Document({
pageContent: "Camp is very entertaining",
metadata: { id: 3, name: "Casey" },
})
);
docs.push(
new Document({
pageContent: "And they say we'll have some fun if it stops raining",
metadata: { id: 4, name: "Dana" },
})
);
docs.push(
new Document({
pageContent: "I went hiking with Joe Spivey",
metadata: { id: 5, name: "Amber" },
})
);
docs.push(
new Document({
pageContent: "He developed poison ivy",
metadata: { id: 6, name: "Blair" },
})
);
docs.push(
new Document({
pageContent: "You remember Leonard Skinner",
metadata: { id: 7, name: "Casey" },
})
);
docs.push(
new Document({
pageContent: "He got Ptomaine poisoning last night after dinner",
metadata: { id: 8, name: "Dana" },
})
);
docs.push(
new Document({
pageContent: "All the counsellors hate the waiters",
metadata: { id: 9, name: "Amber" },
})
);
docs.push(
new Document({
pageContent: "And the lake has alligators",
metadata: { id: 10, name: "Blair" },
})
);
docs.push(
new Document({
pageContent: "And the head coach wants no sissies",
metadata: { id: 11, name: "Casey" },
})
);
docs.push(
new Document({
pageContent: "So he reads to us from something called Ulysses",
metadata: { id: 12, name: "Dana" },
})
);
const vectorStore = await CassandraStore.fromExistingIndex(
new OpenAIEmbeddings(),
testConfig
);
await vectorStore.addDocuments(docs);
const results = await vectorStore.similaritySearch(
"something called Ulysses",
1
);
expect(results).toEqual([
new Document({
pageContent: "So he reads to us from something called Ulysses",
metadata: { id: 12, name: "Dana" },
}),
]);
});
test("CassandraStore.mmr", async () => {
const vectorStore = await CassandraStore.fromTexts(
["I am blue!", "I am blue!", "I am yellow"],
[
{ id: 2, name: "Alex" },
{ id: 1, name: "Scott" },
{ id: 3, name: "Bubba" },
],
new OpenAIEmbeddings(),
noPartitionConfig
);
const results = await vectorStore.maxMarginalRelevanceSearch("I am blue!", {
k: 2,
fetchK: 3,
lambda: 0,
});
// Check that the results array has exactly two documents.
expect(results.length).toEqual(2);
// Check if one of the documents has id=3.
const hasId3 = results.some((doc) => doc.metadata.id === 3);
expect(hasId3).toBeTruthy();
});
});
describe.skip("CassandraStore - no explicit partition key", () => {
beforeAll(async () => {
client = await CassandraClientFactory.getClient(cassandraConfig);
await client.execute("DROP TABLE IF EXISTS default_keyspace.test;");
});
test("CassandraStore.fromExistingIndex (with geo_distance filter)", async () => {
const testConfig = {
...noPartitionConfig,
metadataColumns: [
{
name: "name",
type: "text",
},
{
name: "coord",
type: "VECTOR<FLOAT,2>",
},
],
indices: [
{
name: "coord",
value: "(coord)",
options: "{'similarity_function': 'euclidean' }",
},
],
};
await CassandraStore.fromTexts(
// This is a little Easter Egg for you physics people!
["Heavy", "Complex", "Intertwined"],
[
{
id: 1,
name: "Newton",
coord: new Float32Array([51.5080181, -0.0776972]),
},
{
id: 3,
name: "Hamilton",
coord: new Float32Array([53.3870814, -6.3375127]),
},
{
id: 2,
name: "Maxwell",
coord: new Float32Array([52.2069212, 0.116913]),
},
],
new OpenAIEmbeddings(),
testConfig
);
const vectorStore = await CassandraStore.fromExistingIndex(
new OpenAIEmbeddings(),
testConfig
);
const results = await vectorStore.similaritySearch("Heavy", 1, [
{
name: "GEO_DISTANCE(coord, ?)",
operator: "<",
value: [new Float32Array([53.3730617, -6.3000515]), 10000],
},
]);
expect(results).toEqual([
new Document({
pageContent: "Complex",
metadata: {
id: 3,
name: "Hamilton",
coord: new Float32Array([53.3870814, -6.3375127]),
},
}),
]);
});
});
const partitionConfig = {
...noPartitionConfig,
primaryKey: [
{
name: "group",
type: "int",
partition: true,
},
{
name: "ts",
type: "timestamp",
},
{
name: "id",
type: "int",
},
],
withClause: "CLUSTERING ORDER BY (ts DESC)",
};
describe.skip("CassandraStore - with explicit partition key", () => {
beforeAll(async () => {
client = await CassandraClientFactory.getClient(cassandraConfig);
await client.execute("DROP TABLE IF EXISTS default_keyspace.test;");
});
test("CassandraStore.partitionKey", async () => {
const vectorStore = await CassandraStore.fromTexts(
["Hey", "Hey"],
[
{ group: 1, ts: new Date(1655377200000), id: 1, name: "Alex" },
{ group: 2, ts: new Date(1655377200000), id: 1, name: "Alice" },
],
new OpenAIEmbeddings(),
partitionConfig
);
const results = await vectorStore.similaritySearch("Hey", 1, {
group: 2,
});
expect(results).toEqual([
new Document({
pageContent: "Hey",
metadata: {
group: 2,
ts: new Date(1655377200000),
id: 1,
name: "Alice",
},
}),
]);
});
test("CassandraStore.partition with cluster filter", async () => {
const vectorStore = await CassandraStore.fromTexts(
["Apple", "Banana", "Cherry", "Date", "Elderberry"],
[
{ group: 3, ts: new Date(1655377200000), id: 1, name: "Alex" },
{ group: 3, ts: new Date(1655377201000), id: 2, name: "Alex" },
{ group: 3, ts: new Date(1655377202000), id: 3, name: "Alex" },
{ group: 3, ts: new Date(1655377203000), id: 4, name: "Alex" },
{ group: 3, ts: new Date(1655377204000), id: 5, name: "Alex" },
],
new OpenAIEmbeddings(),
partitionConfig
);
await expect(
vectorStore.similaritySearch("Banana", 1, [
{ name: "group", value: 1 },
{ name: "ts", value: new Date(1655377202000), operator: ">" },
])
).rejects.toThrow();
// Once Cassandra supports filtering against non-indexed cluster columns, the following should work
// expect(results).toEqual([
// new Document({
// pageContent: "Elderberry",
// metadata: { group: 1, ts: new Date(1655377204000), id: 5, name: "Alex", seq: null}
// }),
// ]);
});
});
describe("CassandraStore - with explicit partition key", () => {
beforeAll(async () => {
client = await CassandraClientFactory.getClient(cassandraConfig);
await client.execute("DROP TABLE IF EXISTS default_keyspace.test;");
});
test("no metadata and no primary keys", async () => {
const store = await CassandraStore.fromTexts(
["I am blue", "Green yellow purple", "Hello there hello"],
[],
new OpenAIEmbeddings(),
{
...cassandraConfig,
dimensions: 1536,
} as CassandraLibArgs
);
const result = await store.similaritySearch("Green yellow purple", 1);
const content = result[0].pageContent;
expect(content).toEqual("Green yellow purple");
}, 60000);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/xata.int.test.ts
|
/* eslint-disable no-process-env */
// eslint-disable-next-line import/no-extraneous-dependencies
import { BaseClient } from "@xata.io/client";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { XataVectorSearch } from "../xata.js";
// Tests require a DB with a table called "docs" with:
// * a column name content of type Text
// * a column named embedding of type Vector
// * a column named a of type Integer
test.skip("XataVectorSearch integration", async () => {
if (!process.env.XATA_API_KEY) {
throw new Error("XATA_API_KEY not set");
}
if (!process.env.XATA_DB_URL) {
throw new Error("XATA_DB_URL not set");
}
const xata = new BaseClient({
databaseURL: process.env.XATA_DB_URL,
apiKey: process.env.XATA_API_KEY,
branch: process.env.XATA_BRANCH || "main",
});
const table = "docs";
const embeddings = new OpenAIEmbeddings();
const store = new XataVectorSearch(embeddings, { client: xata, table });
expect(store).toBeDefined();
const createdAt = new Date().getTime();
const ids1 = await store.addDocuments([
{ pageContent: "hello", metadata: { a: createdAt + 1 } },
{ pageContent: "car", metadata: { a: createdAt } },
{ pageContent: "adjective", metadata: { a: createdAt } },
{ pageContent: "hi", metadata: { a: createdAt } },
]);
let results1 = await store.similaritySearch("hello!", 1);
// search store is eventually consistent so we need to retry if nothing is
// returned
for (let i = 0; i < 5 && results1.length === 0; i += 1) {
results1 = await store.similaritySearch("hello!", 1);
// eslint-disable-next-line no-promise-executor-return
await new Promise((r) => setTimeout(r, 1000));
}
expect(results1).toHaveLength(1);
expect(results1).toEqual([
new Document({ metadata: { a: createdAt + 1 }, pageContent: "hello" }),
]);
const results2 = await store.similaritySearchWithScore("testing!", 6, {
a: createdAt,
});
expect(results2).toHaveLength(3);
const ids2 = await store.addDocuments(
[
{ pageContent: "hello upserted", metadata: { a: createdAt + 1 } },
{ pageContent: "car upserted", metadata: { a: createdAt } },
{ pageContent: "adjective upserted", metadata: { a: createdAt } },
{ pageContent: "hi upserted", metadata: { a: createdAt } },
],
{ ids: ids1 }
);
expect(ids1).toEqual(ids2);
const results3 = await store.similaritySearchWithScore("testing!", 6, {
a: createdAt,
});
expect(results3).toHaveLength(3);
await store.delete({ ids: ids1.slice(2) });
let results4 = await store.similaritySearchWithScore("testing!", 3, {
a: createdAt,
});
for (let i = 0; i < 5 && results4.length > 1; i += 1) {
results4 = await store.similaritySearchWithScore("testing!", 3, {
a: createdAt,
});
// eslint-disable-next-line no-promise-executor-return
await new Promise((r) => setTimeout(r, 1000));
}
expect(results4).toHaveLength(1);
await store.delete({ ids: ids1 });
let results5 = await store.similaritySearch("hello!", 1);
for (let i = 0; i < 5 && results1.length > 0; i += 1) {
results5 = await store.similaritySearch("hello!", 1);
// eslint-disable-next-line no-promise-executor-return
await new Promise((r) => setTimeout(r, 1000));
}
expect(results5).toHaveLength(0);
});
test.skip("Search a XataVectorSearch using a metadata filter", async () => {
if (!process.env.XATA_API_KEY) {
throw new Error("XATA_API_KEY not set");
}
if (!process.env.XATA_DB_URL) {
throw new Error("XATA_DB_URL not set");
}
const xata = new BaseClient({
databaseURL: process.env.XATA_DB_URL,
apiKey: process.env.XATA_API_KEY,
branch: process.env.XATA_BRANCH || "main",
});
const table = "docs";
const embeddings = new OpenAIEmbeddings();
const store = new XataVectorSearch(embeddings, { client: xata, table });
expect(store).toBeDefined();
const createdAt = new Date().getTime();
const ids = await store.addDocuments([
{ pageContent: "hello 0", metadata: { a: createdAt } },
{ pageContent: "hello 1", metadata: { a: createdAt + 1 } },
{ pageContent: "hello 2", metadata: { a: createdAt + 2 } },
{ pageContent: "hello 3", metadata: { a: createdAt + 3 } },
]);
// search store is eventually consistent so we need to retry if nothing is
// returned
let results1 = await store.similaritySearch("hello!", 1);
for (let i = 0; i < 5 && results1.length < 4; i += 1) {
results1 = await store.similaritySearch("hello", 6);
// eslint-disable-next-line no-promise-executor-return
await new Promise((r) => setTimeout(r, 1000));
}
expect(results1).toHaveLength(4);
const results = await store.similaritySearch("hello", 1, {
a: createdAt + 2,
});
expect(results).toHaveLength(1);
expect(results).toEqual([
new Document({
metadata: { a: createdAt + 2 },
pageContent: "hello 2",
}),
]);
await store.delete({ ids });
let results5 = await store.similaritySearch("hello!", 1);
for (let i = 0; i < 5 && results1.length > 0; i += 1) {
results5 = await store.similaritySearch("hello", 1);
// eslint-disable-next-line no-promise-executor-return
await new Promise((r) => setTimeout(r, 1000));
}
expect(results5).toHaveLength(0);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/hnswlib.test.ts
|
import { test, expect } from "@jest/globals";
import { Document } from "@langchain/core/documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { HNSWLib } from "../hnswlib.js";
test("Test HNSWLib.fromTexts + addVectors", async () => {
const vectorStore = await HNSWLib.fromTexts(
["Hello world"],
[{ id: 2 }],
new FakeEmbeddings()
);
expect(vectorStore.index?.getMaxElements()).toBe(1);
expect(vectorStore.index?.getCurrentCount()).toBe(1);
await vectorStore.addVectors(
[
[0, 1, 0, 0],
[1, 0, 0, 0],
[0.5, 0.5, 0.5, 0.5],
],
[
new Document({
pageContent: "hello bye",
metadata: { id: 5 },
}),
new Document({
pageContent: "hello worlddwkldnsk",
metadata: { id: 4 },
}),
new Document({
pageContent: "hello you",
metadata: { id: 6 },
}),
]
);
expect(vectorStore.index?.getMaxElements()).toBe(4);
const resultTwo = await vectorStore.similaritySearchVectorWithScore(
[1, 0, 0, 0],
3
);
const resultTwoMetadatas = resultTwo.map(([{ metadata }]) => metadata);
expect(resultTwoMetadatas).toEqual([{ id: 4 }, { id: 6 }, { id: 2 }]);
});
test("Test HNSWLib metadata filtering", async () => {
const pageContent = "Hello world";
const vectorStore = await HNSWLib.fromTexts(
[pageContent, pageContent, pageContent],
[{ id: 2 }, { id: 3 }, { id: 4 }],
new FakeEmbeddings()
);
// If the filter wasn't working, we'd get all 3 documents back
const results = await vectorStore.similaritySearch(
pageContent,
3,
(document) => document.metadata.id === 3
);
expect(results).toEqual([new Document({ metadata: { id: 3 }, pageContent })]);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/googlevertexai.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { beforeEach, expect, test } from "@jest/globals";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { SyntheticEmbeddings } from "@langchain/core/utils/testing";
import { InMemoryDocstore } from "../../stores/doc/in_memory.js";
import { MatchingEngineArgs, MatchingEngine } from "../googlevertexai.js";
describe("Vertex AI matching", () => {
let embeddings: EmbeddingsInterface;
let store: InMemoryDocstore;
let config: MatchingEngineArgs;
let engine: MatchingEngine;
beforeEach(() => {
embeddings = new SyntheticEmbeddings({
vectorSize: Number.parseInt(
process.env.SYNTHETIC_EMBEDDINGS_VECTOR_SIZE ?? "768",
10
),
});
store = new InMemoryDocstore();
config = {
index: process.env.GOOGLE_VERTEXAI_MATCHINGENGINE_INDEX!,
indexEndpoint: process.env.GOOGLE_VERTEXAI_MATCHINGENGINE_INDEXENDPOINT!,
apiVersion: "v1beta1",
docstore: store,
};
engine = new MatchingEngine(embeddings, config);
});
test("clean metadata", () => {
const m = {
alpha: "a",
bravo: {
uno: 1,
dos: "two",
tres: false,
quatro: ["a", "b", "c", "d"],
cinco: {
prime: [1, 2],
doublePrime: ["g", true],
},
},
charlie: ["e", "f"],
};
const flat = engine.cleanMetadata(m);
// console.log("flatten metadata", flat);
expect(flat.alpha).toEqual("a");
expect(flat["bravo.uno"]).toEqual(1);
expect(flat["bravo.dos"]).toEqual("two");
expect(flat["bravo.tres"]).toEqual(false);
expect(flat["bravo.quatro"]).toEqual(["a", "b", "c", "d"]);
expect(flat["bravo.cinco.prime"]).toEqual(["1", "2"]);
expect(flat["bravo.cinco.doublePrime"]).toEqual(["g", "true"]);
expect(flat.charlie).toEqual(["e", "f"]);
});
test("restrictions", () => {
const m = {
alpha: "a",
bravo: {
uno: 1,
dos: "two",
tres: false,
quatro: ["a", "b", "c", "d"],
cinco: {
prime: [1, 2],
doublePrime: ["g", true],
},
},
charlie: ["e", "f"],
};
const r = engine.metadataToRestrictions(m);
// console.log("restrictions", r);
expect(r[0].namespace).toEqual("alpha");
expect(r[0].allowList).toEqual(["a"]);
expect(r[4].namespace).toEqual("bravo.quatro");
expect(r[4].allowList).toEqual(["a", "b", "c", "d"]);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/myscale.int.test.ts
|
/* eslint-disable no-process-env */
import { test, expect } from "@jest/globals";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { MyScaleStore } from "../myscale.js";
test.skip("MyScaleStore.fromText", async () => {
const vectorStore = await MyScaleStore.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[
{ id: 2, name: "2" },
{ id: 1, name: "1" },
{ id: 3, name: "3" },
],
new OpenAIEmbeddings(),
{
host: process.env.MYSCALE_HOST || "localhost",
port: process.env.MYSCALE_PORT || "8443",
username: process.env.MYSCALE_USERNAME || "username",
password: process.env.MYSCALE_PASSWORD || "password",
}
);
const results = await vectorStore.similaritySearch("hello world", 1);
expect(results).toEqual([
new Document({
pageContent: "Hello world",
metadata: { id: 2, name: "2" },
}),
]);
const filteredResults = await vectorStore.similaritySearch("hello world", 1, {
whereStr: "metadata.name = '1'",
});
expect(filteredResults).toEqual([
new Document({
pageContent: "Bye bye",
metadata: { id: 1, name: "1" },
}),
]);
});
test.skip("MyScaleStore.fromExistingIndex", async () => {
await MyScaleStore.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[
{ id: 2, name: "2" },
{ id: 1, name: "1" },
{ id: 3, name: "3" },
],
new OpenAIEmbeddings(),
{
host: process.env.MYSCALE_HOST || "localhost",
port: process.env.MYSCALE_PORT || "8443",
username: process.env.MYSCALE_USERNAME || "username",
password: process.env.MYSCALE_PASSWORD || "password",
table: "test_table",
}
);
const vectorStore = await MyScaleStore.fromExistingIndex(
new OpenAIEmbeddings(),
{
host: process.env.MYSCALE_HOST || "localhost",
port: process.env.MYSCALE_PORT || "8443",
username: process.env.MYSCALE_USERNAME || "username",
password: process.env.MYSCALE_PASSWORD || "password",
table: "test_table",
}
);
const results = await vectorStore.similaritySearch("hello world", 1);
expect(results).toEqual([
new Document({
pageContent: "Hello world",
metadata: { id: 2, name: "2" },
}),
]);
const filteredResults = await vectorStore.similaritySearch("hello world", 1, {
whereStr: "metadata.name = '1'",
});
expect(filteredResults).toEqual([
new Document({
pageContent: "Bye bye",
metadata: { id: 1, name: "1" },
}),
]);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/tigris.test.ts
|
/* eslint-disable @typescript-eslint/no-explicit-any */
import { jest, test, expect } from "@jest/globals";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { TigrisVectorStore } from "../tigris.js";
test("TigrisVectorStore with external ids", async () => {
const client = {
addDocumentsWithVectors: jest.fn(),
similaritySearchVectorWithScore: jest
.fn()
.mockImplementation(async () => []),
};
const embeddings = new FakeEmbeddings();
const store = new TigrisVectorStore(embeddings, {
index: client as any,
});
expect(store).toBeDefined();
await store.addDocuments(
[
{
pageContent: "hello",
metadata: {
a: 1,
b: { nested: [1, { a: 4 }] },
},
},
],
["id1"]
);
expect(client.addDocumentsWithVectors).toHaveBeenCalledTimes(1);
expect(client.addDocumentsWithVectors).toHaveBeenCalledWith({
ids: ["id1"],
embeddings: [[0.1, 0.2, 0.3, 0.4]],
documents: [
{
content: "hello",
metadata: {
a: 1,
b: { nested: [1, { a: 4 }] },
},
},
],
});
const results = await store.similaritySearch("hello", 1);
expect(results).toHaveLength(0);
});
test("TigrisVectorStore with generated ids", async () => {
const client = {
addDocumentsWithVectors: jest.fn(),
similaritySearchVectorWithScore: jest
.fn()
.mockImplementation(async () => []),
};
const embeddings = new FakeEmbeddings();
const store = new TigrisVectorStore(embeddings, { index: client as any });
expect(store).toBeDefined();
await store.addDocuments([{ pageContent: "hello", metadata: { a: 1 } }]);
expect(client.addDocumentsWithVectors).toHaveBeenCalledTimes(1);
const results = await store.similaritySearch("hello", 1);
expect(results).toHaveLength(0);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/googlevertexai.int.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { beforeAll, expect, test } from "@jest/globals";
import { Document } from "@langchain/core/documents";
import type { EmbeddingsInterface } from "@langchain/core/embeddings";
import { SyntheticEmbeddings } from "@langchain/core/utils/testing";
import { InMemoryDocstore } from "../../stores/doc/in_memory.js";
import {
MatchingEngineArgs,
MatchingEngine,
IdDocument,
Restriction,
} from "../googlevertexai.js";
describe("Vertex AI matching", () => {
let embeddings: EmbeddingsInterface;
let store: InMemoryDocstore;
let config: MatchingEngineArgs;
let engine: MatchingEngine;
beforeAll(() => {
embeddings = new SyntheticEmbeddings({
vectorSize: Number.parseInt(
process.env.SYNTHETIC_EMBEDDINGS_VECTOR_SIZE ?? "768",
10
),
});
store = new InMemoryDocstore();
config = {
index: process.env.GOOGLE_VERTEXAI_MATCHINGENGINE_INDEX!,
indexEndpoint: process.env.GOOGLE_VERTEXAI_MATCHINGENGINE_INDEXENDPOINT!,
apiVersion: "v1beta1",
docstore: store,
};
engine = new MatchingEngine(embeddings, config);
});
test.skip("public endpoint", async () => {
const apiendpoint = await engine.determinePublicAPIEndpoint();
// console.log(apiendpoint);
expect(apiendpoint).toHaveProperty("apiEndpoint");
expect(apiendpoint).toHaveProperty("deployedIndexId");
});
test.skip("store", async () => {
const doc = new Document({ pageContent: "this" });
await engine.addDocuments([doc]);
// console.log(store._docs);
});
test.skip("query", async () => {
const results = await engine.similaritySearch("that");
// console.log("query", results);
expect(results?.length).toBeGreaterThanOrEqual(1);
});
test.skip("query filter exclude", async () => {
const filters: Restriction[] = [
{
namespace: "color",
allowList: ["red"],
},
];
const results = await engine.similaritySearch("that", 4, filters);
// console.log("query", results);
expect(results?.length).toEqual(0);
});
test.skip("delete", async () => {
const newDoc = new Document({ pageContent: "this" });
await engine.addDocuments([newDoc]);
// console.log("added", newDoc);
const oldResults: IdDocument[] = await engine.similaritySearch("this", 10);
expect(oldResults?.length).toBeGreaterThanOrEqual(1);
// console.log(oldResults);
const oldIds = oldResults.map((doc) => doc.id!);
await engine.delete({ ids: oldIds });
// console.log("deleted", oldIds);
const newResults: IdDocument[] = await engine.similaritySearch("this", 10);
expect(newResults).not.toEqual(oldResults);
// console.log(newResults);
});
describe("restrictions", () => {
let documents: IdDocument[];
beforeAll(async () => {
documents = [
new IdDocument({
id: "1",
pageContent: "this apple",
metadata: {
color: "red",
category: "edible",
},
}),
new IdDocument({
id: "2",
pageContent: "this blueberry",
metadata: {
color: "blue",
category: "edible",
},
}),
new IdDocument({
id: "3",
pageContent: "this firetruck",
metadata: {
color: "red",
category: "machine",
},
}),
];
// Add all our documents
await engine.addDocuments(documents);
});
test.skip("none", async () => {
// A general query to make sure we can read everything
const allResults = await engine.similaritySearch("this", 4);
expect(allResults).toHaveLength(3);
});
test.skip("red things", async () => {
// Just get red things
const redFilter: Restriction[] = [
{
namespace: "color",
allowList: ["red"],
},
];
const redResults = await engine.similaritySearch("this", 4, redFilter);
expect(redResults).toHaveLength(2);
});
test.skip("red, not edible", async () => {
const filter: Restriction[] = [
{
namespace: "color",
allowList: ["red"],
},
{
namespace: "category",
denyList: ["edible"],
},
];
const results = await engine.similaritySearch("thing", 4, filter);
expect(results).toHaveLength(1);
expect(results[0].pageContent).toEqual("this firetruck");
});
afterAll(async () => {
// Cleanup
const ids = documents.map((doc) => doc.id!);
await engine.delete({ ids });
});
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/prisma.test.ts
|
/* eslint-disable @typescript-eslint/no-explicit-any */
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { jest, test, expect } from "@jest/globals";
import { PrismaVectorStore } from "../prisma.js";
class Sql {
strings: string[];
}
const mockColumns = {
id: PrismaVectorStore.IdColumn as typeof PrismaVectorStore.IdColumn,
content:
PrismaVectorStore.ContentColumn as typeof PrismaVectorStore.ContentColumn,
};
const sql = jest.fn<(params: unknown) => Sql>();
const raw = jest.fn<(params: unknown) => Sql>();
const join = jest.fn<(params: unknown) => Sql>();
const mockPrismaNamespace = {
ModelName: {},
Sql,
raw,
join,
sql,
};
const $queryRaw = jest.fn<(params: unknown) => Promise<any>>();
const $executeRaw = jest.fn<(params: unknown) => Promise<any>>();
const $transaction = jest.fn<(params: unknown) => Promise<any>>();
const mockPrismaClient = {
$queryRaw,
$executeRaw,
$transaction,
};
describe("Prisma", () => {
beforeEach(() => {
jest.clearAllMocks();
});
test("passes provided filters with simiaritySearch", async () => {
const embeddings = new FakeEmbeddings();
const store = new PrismaVectorStore(new FakeEmbeddings(), {
db: mockPrismaClient,
prisma: mockPrismaNamespace,
tableName: "test",
vectorColumnName: "vector",
columns: mockColumns,
});
const similaritySearchVectorWithScoreSpy = jest
.spyOn(store, "similaritySearchVectorWithScore")
.mockResolvedValue([]);
const filter = { id: { equals: "123" } };
await store.similaritySearch("hello", 1, filter);
const embeddedQuery = await embeddings.embedQuery("hello");
expect(similaritySearchVectorWithScoreSpy).toHaveBeenCalledTimes(1);
expect(similaritySearchVectorWithScoreSpy).toHaveBeenCalledWith(
embeddedQuery,
1,
filter
);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/faiss.int.test.ts
|
import { test, expect } from "@jest/globals";
import * as fs from "node:fs/promises";
import * as path from "node:path";
import * as os from "node:os";
import { fileURLToPath } from "node:url";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { FaissStore } from "../faiss.js";
test("Test FaissStore.fromTexts", async () => {
const vectorStore = await FaissStore.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[{ id: 2 }, { id: 1 }, { id: 3 }],
new OpenAIEmbeddings()
);
expect(vectorStore.index?.ntotal()).toBe(3);
const resultOne = await vectorStore.similaritySearch("hello world", 1);
const resultOneMetadatas = resultOne.map(({ metadata }) => metadata);
expect(resultOneMetadatas).toEqual([{ id: 2 }]);
const resultTwo = await vectorStore.similaritySearch("hello world", 3);
const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata);
expect(resultTwoMetadatas).toEqual([{ id: 2 }, { id: 3 }, { id: 1 }]);
});
test("Test FaissStore.fromTexts + addDocuments", async () => {
const vectorStore = await FaissStore.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[{ id: 2 }, { id: 1 }, { id: 3 }],
new OpenAIEmbeddings()
);
expect(vectorStore.index?.ntotal()).toBe(3);
await vectorStore.addDocuments([
new Document({
pageContent: "hello worldklmslksmn",
metadata: { id: 4 },
}),
]);
expect(vectorStore.index?.ntotal()).toBe(4);
const resultTwo = await vectorStore.similaritySearch("hello world", 3);
const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata);
expect(resultTwoMetadatas).toEqual([{ id: 2 }, { id: 3 }, { id: 4 }]);
});
test("Test FaissStore.load and FaissStore.save", async () => {
const vectorStore = await FaissStore.fromTexts(
["Hello world", "Bye bye", "hello nice world"],
[{ id: 2 }, { id: 1 }, { id: 3 }],
new OpenAIEmbeddings()
);
expect(vectorStore.index?.ntotal()).toBe(3);
const resultOne = await vectorStore.similaritySearch("hello world", 1);
const resultOneMetadatas = resultOne.map(({ metadata }) => metadata);
expect(resultOneMetadatas).toEqual([{ id: 2 }]);
const resultTwo = await vectorStore.similaritySearch("hello world", 3);
const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata);
expect(resultTwoMetadatas).toEqual([{ id: 2 }, { id: 3 }, { id: 1 }]);
const tempDirectory = await fs.mkdtemp(path.join(os.tmpdir(), "lcjs-"));
// console.log(tempDirectory);
await vectorStore.save(tempDirectory);
const loadedVectorStore = await FaissStore.load(
tempDirectory,
new OpenAIEmbeddings()
);
const resultThree = await loadedVectorStore.similaritySearch(
"hello world",
1
);
const resultThreeMetadatas = resultThree.map(({ metadata }) => metadata);
expect(resultThreeMetadatas).toEqual([{ id: 2 }]);
const resultFour = await loadedVectorStore.similaritySearch("hello world", 3);
const resultFourMetadatas = resultFour.map(({ metadata }) => metadata);
expect(resultFourMetadatas).toEqual([{ id: 2 }, { id: 3 }, { id: 1 }]);
});
test("Test FaissStore.loadFromPython", async () => {
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const loadedFromPythonVectorStore = await FaissStore.loadFromPython(
path.join(__dirname, "faiss.int.test.data/faiss_index"),
new OpenAIEmbeddings()
);
expect(loadedFromPythonVectorStore.index?.ntotal()).toBe(42);
const results0 = await loadedFromPythonVectorStore.similaritySearch(
"What did the president say about Ketanji Brown Jackson"
);
const expectedResultofPythonSaved = new Document({
metadata: { source: "../../../../../examples/state_of_the_union.txt" },
pageContent: `Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while you’re at it, pass the Disclose Act so Americans can know who is funding our elections.
Tonight, I’d like to honor someone who has dedicated his life to serve this country: Justice Stephen Breyer—an Army veteran, Constitutional scholar, and retiring Justice of the United States Supreme Court. Justice Breyer, thank you for your service.
One of the most serious constitutional responsibilities a President has is nominating someone to serve on the United States Supreme Court.
And I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nation’s top legal minds, who will continue Justice Breyer’s legacy of excellence.`,
});
expect(results0).toHaveLength(4);
expect(results0[0]).toEqual(expectedResultofPythonSaved);
await loadedFromPythonVectorStore.addDocuments([
new Document({
metadata: {
source: "addDocuments_0",
},
pageContent: "hello",
}),
new Document({
metadata: {
source: "addDocuments_1",
},
pageContent: "你好吗?",
}),
new Document({
metadata: {
source: "addDocuments_2",
},
pageContent: "おはようございます。",
}),
new Document({
metadata: {
source: "addDocuments_3",
},
pageContent: "こんにちは!",
}),
]);
const results1 = await loadedFromPythonVectorStore.similaritySearch("hello");
expect(results1).toEqual([
new Document({
pageContent: "hello",
metadata: { source: "addDocuments_0" },
}),
new Document({
pageContent: "こんにちは!",
metadata: { source: "addDocuments_3" },
}),
new Document({
pageContent: "你好吗?",
metadata: { source: "addDocuments_1" },
}),
new Document({
pageContent: "おはようございます。",
metadata: { source: "addDocuments_2" },
}),
]);
const tempDirectory = await fs.mkdtemp(path.join(os.tmpdir(), "lcjs-"));
// console.log(tempDirectory);
await loadedFromPythonVectorStore.save(tempDirectory);
const loadedVectorStore = await FaissStore.load(
tempDirectory,
new OpenAIEmbeddings()
);
const results2 = await loadedVectorStore.similaritySearch("早上", 1);
expect(results2).toHaveLength(1);
expect(results2[0]).toEqual(
new Document({
pageContent: "おはようございます。",
metadata: { source: "addDocuments_2" },
})
);
const results3 = await loadedVectorStore.similaritySearch(
"What did the president say about Ketanji Brown Jackson",
1
);
expect(results3).toHaveLength(1);
expect(results3[0]).toEqual(expectedResultofPythonSaved);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/milvus.int.test.ts
|
import { test, expect, afterAll, beforeAll } from "@jest/globals";
import { ErrorCode, MilvusClient } from "@zilliz/milvus2-sdk-node";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { Milvus } from "../milvus.js";
let collectionName: string;
let embeddings: OpenAIEmbeddings;
// https://docs.zilliz.com/docs/quick-start-1#create-a-collection
const MILVUS_ADDRESS = "";
const MILVUS_TOKEN = "";
const OPEN_AI_API_KEY = "";
beforeAll(async () => {
embeddings = new OpenAIEmbeddings({
openAIApiKey: OPEN_AI_API_KEY,
});
collectionName = `test_collection_${Math.random().toString(36).substring(7)}`;
});
test.skip("Test Milvus.fromtext with token", async () => {
const texts = [
`Tortoise: Labyrinth? Labyrinth? Could it Are we in the notorious Little
Harmonic Labyrinth of the dreaded Majotaur?`,
"Achilles: Yiikes! What is that?",
`Tortoise: They say-although I person never believed it myself-that an I
Majotaur has created a tiny labyrinth sits in a pit in the middle of
it, waiting innocent victims to get lost in its fears complexity.
Then, when they wander and dazed into the center, he laughs and
laughs at them-so hard, that he laughs them to death!`,
"Achilles: Oh, no!",
"Tortoise: But it's only a myth. Courage, Achilles.",
];
const objA = { A: { B: "some string" } };
const objB = { A: { B: "some other string" } };
const metadatas: object[] = [
{ id: 2, other: objA },
{ id: 1, other: objB },
{ id: 3, other: objA },
{ id: 4, other: objB },
{ id: 5, other: objA },
];
const milvus = await Milvus.fromTexts(texts, metadatas, embeddings, {
collectionName,
autoId: false,
primaryField: "id",
clientConfig: {
address: MILVUS_ADDRESS,
token: MILVUS_TOKEN,
},
});
const query = "who is achilles?";
const result = await milvus.similaritySearch(query, 1);
const resultMetadatas = result.map(({ metadata }) => metadata);
expect(resultMetadatas).toEqual([{ id: 1, other: objB }]);
const resultTwo = await milvus.similaritySearch(query, 3);
const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata);
expect(resultTwoMetadatas).toEqual([
{ id: 1, other: objB },
{ id: 4, other: objB },
{ id: 5, other: objA },
]);
const resultThree = await milvus.similaritySearch(query, 1, 'id == "1"');
const resultThreeMetadatas = resultThree.map(({ metadata }) => metadata);
expect(resultThreeMetadatas).toEqual([{ id: 1, other: objB }]);
});
test.skip("Test Milvus.fromtext", async () => {
const texts = [
`Tortoise: Labyrinth? Labyrinth? Could it Are we in the notorious Little
Harmonic Labyrinth of the dreaded Majotaur?`,
"Achilles: Yiikes! What is that?",
`Tortoise: They say-although I person never believed it myself-that an I
Majotaur has created a tiny labyrinth sits in a pit in the middle of
it, waiting innocent victims to get lost in its fears complexity.
Then, when they wander and dazed into the center, he laughs and
laughs at them-so hard, that he laughs them to death!`,
"Achilles: Oh, no!",
"Tortoise: But it's only a myth. Courage, Achilles.",
];
const objA = { A: { B: "some string" } };
const objB = { A: { B: "some other string" } };
const metadatas: object[] = [
{ id: 2, other: objA },
{ id: 1, other: objB },
{ id: 3, other: objA },
{ id: 4, other: objB },
{ id: 5, other: objA },
];
const milvus = await Milvus.fromTexts(texts, metadatas, embeddings, {
collectionName,
autoId: false,
primaryField: "id",
clientConfig: {
address: MILVUS_ADDRESS,
token: MILVUS_TOKEN,
},
});
const query = "who is achilles?";
const result = await milvus.similaritySearch(query, 1);
const resultMetadatas = result.map(({ metadata }) => metadata);
expect(resultMetadatas).toEqual([{ id: 1, other: objB }]);
const resultTwo = await milvus.similaritySearch(query, 3);
const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata);
expect(resultTwoMetadatas).toEqual([
{ id: 1, other: objB },
{ id: 4, other: objB },
{ id: 5, other: objA },
]);
const resultThree = await milvus.similaritySearch(query, 1, 'id == "1"');
const resultThreeMetadatas = resultThree.map(({ metadata }) => metadata);
expect(resultThreeMetadatas).toEqual([{ id: 1, other: objB }]);
});
test.skip("Test Milvus.fromExistingCollection", async () => {
const milvus = await Milvus.fromExistingCollection(embeddings, {
collectionName,
autoId: false,
primaryField: "id",
clientConfig: {
address: MILVUS_ADDRESS,
token: MILVUS_TOKEN,
},
});
const query = "who is achilles?";
const result = await milvus.similaritySearch(query, 1);
const resultMetadatas = result.map(({ metadata }) => metadata);
expect(resultMetadatas.length).toBe(1);
expect(resultMetadatas[0].id).toEqual(1);
const resultTwo = await milvus.similaritySearch(query, 3);
const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata);
expect(resultTwoMetadatas.length).toBe(3);
expect(resultTwoMetadatas[0].id).toEqual(1);
expect(resultTwoMetadatas[1].id).toEqual(4);
expect(resultTwoMetadatas[2].id).toEqual(5);
const resultThree = await milvus.similaritySearch(query, 1, 'id == "1"');
const resultThreeMetadatas = resultThree.map(({ metadata }) => metadata);
expect(resultThreeMetadatas.length).toBe(1);
expect(resultThreeMetadatas[0].id).toEqual(1);
});
test.skip("Test Milvus.deleteData with filter", async () => {
const milvus = await Milvus.fromExistingCollection(embeddings, {
collectionName,
autoId: false,
primaryField: "id",
clientConfig: {
address: MILVUS_ADDRESS,
token: MILVUS_TOKEN,
},
});
const query = "who is achilles?";
const result = await milvus.similaritySearch(query, 1);
const resultMetadatas = result.map(({ metadata }) => metadata);
const primaryId = resultMetadatas[0].id;
expect(resultMetadatas.length).toBe(1);
expect(resultMetadatas[0].id).toEqual(1);
await milvus.delete({ filter: `id in ["${primaryId}"]` });
const resultTwo = await milvus.similaritySearch(query, 1);
const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata);
expect(resultTwoMetadatas[0].id).not.toEqual(primaryId);
});
test.skip("Test Milvus.deleteData with ids", async () => {
const milvus = await Milvus.fromExistingCollection(embeddings, {
collectionName,
autoId: false,
primaryField: "id",
clientConfig: {
address: MILVUS_ADDRESS,
token: MILVUS_TOKEN,
},
});
const query = "who is tortoise?";
const result = await milvus.similaritySearch(query, 3);
const resultMetadatas = result.map(({ metadata }) => metadata);
const primaryIds = resultMetadatas.map((rm) => rm.id);
expect(resultMetadatas.length).toBe(3);
expect(resultMetadatas[0].id).toEqual(3);
expect(resultMetadatas[1].id).toEqual(2);
expect(resultMetadatas[2].id).toEqual(5);
await milvus.delete({ ids: primaryIds });
const resultTwo = await milvus.similaritySearch(query, 3);
const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata);
expect(resultTwoMetadatas[0].id).not.toEqual(3);
expect(resultTwoMetadatas[0].id).not.toEqual(2);
expect(resultTwoMetadatas[0].id).not.toEqual(5);
});
test.skip("Test Milvus.addDocuments with auto ID", async () => {
const vectorstore = new Milvus(embeddings, {
collectionName: `test_collection_${Math.random()
.toString(36)
.substring(7)}`,
clientConfig: {
address: MILVUS_ADDRESS,
token: MILVUS_TOKEN,
},
});
await vectorstore.addDocuments([
new Document({
pageContent: "test",
metadata: { test: "a" },
}),
]);
const result = await vectorstore.similaritySearch("test", 1);
const resultMetadatas = result.map(({ metadata }) => metadata);
expect(resultMetadatas.length).toBe(1);
});
afterAll(async () => {
// eslint-disable-next-line no-process-env
if (!process.env.MILVUS_URL) return;
// eslint-disable-next-line no-process-env
const client = new MilvusClient(process.env.MILVUS_URL as string);
const dropRes = await client.dropCollection({
collection_name: collectionName,
});
// console.log("Drop collection response: ", dropRes)
expect(dropRes.error_code).toBe(ErrorCode.SUCCESS);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/neo4j_vector.int.test.ts
|
/* eslint-disable no-process-env */
import { Document } from "@langchain/core/documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import {
DOCUMENTS,
TYPE_1_FILTERING_TEST_CASES,
TYPE_2_FILTERING_TEST_CASES,
TYPE_3_FILTERING_TEST_CASES,
TYPE_4_FILTERING_TEST_CASES,
} from "./neo4j_vector.fixtures.js";
import { Neo4jVectorStore } from "../neo4j_vector.js";
const OS_TOKEN_COUNT = 1536;
const texts = ["foo", "bar", "baz", "This is the end of the world!"];
class FakeEmbeddingsWithOsDimension extends FakeEmbeddings {
async embedDocuments(documents: string[]): Promise<number[][]> {
return Promise.resolve(
documents.map((_, i) =>
Array(OS_TOKEN_COUNT - 1)
.fill(1.0)
.concat([i + 1.0])
)
);
}
async embedQuery(text: string): Promise<number[]> {
const index = texts.indexOf(text);
if (index !== -1) {
return Array(OS_TOKEN_COUNT - 1)
.fill(1.0)
.concat([index + 1]);
} else {
throw new Error(`Text '${text}' not found in the 'texts' array.`);
}
}
}
async function dropVectorIndexes(store: Neo4jVectorStore) {
const allIndexes = await store.query(`
SHOW INDEXES YIELD name, type
WHERE type = "VECTOR"
RETURN name
`);
if (allIndexes) {
for (const index of allIndexes) {
await store.query(`DROP INDEX ${index.name}`);
}
}
}
describe("Neo4j Vector", () => {
test.skip("Test fromTexts", async () => {
const url = process.env.NEO4J_URI as string;
const username = process.env.NEO4J_USERNAME as string;
const password = process.env.NEO4J_PASSWORD as string;
expect(url).toBeDefined();
expect(username).toBeDefined();
expect(password).toBeDefined();
const embeddings = new FakeEmbeddingsWithOsDimension();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const metadatas: any[] = [];
const neo4jVectorStore = await Neo4jVectorStore.fromTexts(
texts,
metadatas,
embeddings,
{
url,
username,
password,
preDeleteCollection: true,
}
);
const output = await neo4jVectorStore.similaritySearch("foo", 2);
const expectedResult = [
new Document({
pageContent: "foo",
metadata: {},
}),
new Document({
pageContent: "bar",
metadata: {},
}),
];
expect(output).toStrictEqual(expectedResult);
await dropVectorIndexes(neo4jVectorStore);
await neo4jVectorStore.close();
});
test.skip("Test fromTexts Hybrid", async () => {
const url = process.env.NEO4J_URI as string;
const username = process.env.NEO4J_USERNAME as string;
const password = process.env.NEO4J_PASSWORD as string;
expect(url).toBeDefined();
expect(username).toBeDefined();
expect(password).toBeDefined();
const embeddings = new FakeEmbeddingsWithOsDimension();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const metadatas: any[] = [];
const neo4jVectorStore = await Neo4jVectorStore.fromTexts(
texts,
metadatas,
embeddings,
{
url,
username,
password,
preDeleteCollection: true,
searchType: "hybrid",
}
);
const output = await neo4jVectorStore.similaritySearch("foo", 2);
const expectedResult = [
new Document({
pageContent: "foo",
metadata: {},
}),
new Document({
pageContent: "bar",
metadata: {},
}),
];
expect(output).toStrictEqual(expectedResult);
await dropVectorIndexes(neo4jVectorStore);
await neo4jVectorStore.close();
});
test.skip("Test fromExistingIndex", async () => {
const url = process.env.NEO4J_URI as string;
const username = process.env.NEO4J_USERNAME as string;
const password = process.env.NEO4J_PASSWORD as string;
expect(url).toBeDefined();
expect(username).toBeDefined();
expect(password).toBeDefined();
const embeddings = new FakeEmbeddingsWithOsDimension();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const metadatas: any[] = [];
const neo4jVectorStore = await Neo4jVectorStore.fromTexts(
texts,
metadatas,
embeddings,
{
url,
username,
password,
indexName: "vector",
preDeleteCollection: true,
}
);
const existingIndex = await Neo4jVectorStore.fromExistingIndex(embeddings, {
url,
username,
password,
indexName: "vector",
});
const output = await existingIndex.similaritySearch("foo", 2);
const expectedResult = [
new Document({
pageContent: "foo",
metadata: {},
}),
new Document({
pageContent: "bar",
metadata: {},
}),
];
expect(output).toStrictEqual(expectedResult);
await dropVectorIndexes(neo4jVectorStore);
await neo4jVectorStore.close();
await existingIndex.close();
});
test.skip("Test fromExistingIndex Hybrid", async () => {
const url = process.env.NEO4J_URI as string;
const username = process.env.NEO4J_USERNAME as string;
const password = process.env.NEO4J_PASSWORD as string;
expect(url).toBeDefined();
expect(username).toBeDefined();
expect(password).toBeDefined();
const embeddings = new FakeEmbeddingsWithOsDimension();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const metadatas: any[] = [];
const neo4jVectorStore = await Neo4jVectorStore.fromTexts(
texts,
metadatas,
embeddings,
{
url,
username,
password,
indexName: "vector",
keywordIndexName: "keyword",
searchType: "hybrid",
preDeleteCollection: true,
}
);
const existingIndex = await Neo4jVectorStore.fromExistingIndex(embeddings, {
url,
username,
password,
indexName: "vector",
keywordIndexName: "keyword",
searchType: "hybrid",
});
const output = await existingIndex.similaritySearch("foo", 2);
const expectedResult = [
new Document({
pageContent: "foo",
metadata: {},
}),
new Document({
pageContent: "bar",
metadata: {},
}),
];
expect(output).toStrictEqual(expectedResult);
await dropVectorIndexes(neo4jVectorStore);
await neo4jVectorStore.close();
await existingIndex.close();
});
test.skip("Test retrievalQuery", async () => {
const url = process.env.NEO4J_URI as string;
const username = process.env.NEO4J_USERNAME as string;
const password = process.env.NEO4J_PASSWORD as string;
expect(url).toBeDefined();
expect(username).toBeDefined();
expect(password).toBeDefined();
const embeddings = new FakeEmbeddingsWithOsDimension();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const metadatas: any[] = [];
const neo4jVectorStore = await Neo4jVectorStore.fromTexts(
texts,
metadatas,
embeddings,
{
url,
username,
password,
indexName: "vector",
preDeleteCollection: true,
retrievalQuery:
"RETURN node.text AS text, score, {foo:'bar'} AS metadata",
}
);
const output = await neo4jVectorStore.similaritySearch("foo", 2);
const expectedResult = [
new Document({
pageContent: "foo",
metadata: { foo: "bar" },
}),
new Document({
pageContent: "bar",
metadata: { foo: "bar" },
}),
];
expect(output).toStrictEqual(expectedResult);
await dropVectorIndexes(neo4jVectorStore);
await neo4jVectorStore.close();
});
test.skip("Test fromExistingGraph", async () => {
const url = process.env.NEO4J_URI as string;
const username = process.env.NEO4J_USERNAME as string;
const password = process.env.NEO4J_PASSWORD as string;
expect(url).toBeDefined();
expect(username).toBeDefined();
expect(password).toBeDefined();
const embeddings = new FakeEmbeddingsWithOsDimension();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const metadatas: any[] = [];
const neo4jVectorStore = await Neo4jVectorStore.fromTexts(
texts,
metadatas,
embeddings,
{
url,
username,
password,
indexName: "vector",
preDeleteCollection: true,
}
);
await neo4jVectorStore.query("MATCH (n) DETACH DELETE n");
await neo4jVectorStore.query(
"CREATE (:Test {name:'Foo'}), (:Test {name:'Bar', foo:'bar'})"
);
const existingGraph = await Neo4jVectorStore.fromExistingGraph(embeddings, {
url,
username,
password,
indexName: "vector1",
nodeLabel: "Test",
textNodeProperties: ["name"],
embeddingNodeProperty: "embedding",
});
const output = await existingGraph.similaritySearch("foo", 2);
const expectedResult = [
new Document({
pageContent: "\nname: Foo",
metadata: {},
}),
new Document({
pageContent: "\nname: Bar",
metadata: { foo: "bar" },
}),
];
expect(output).toStrictEqual(expectedResult);
await dropVectorIndexes(neo4jVectorStore);
await neo4jVectorStore.close();
await existingGraph.close();
});
test.skip("Test fromExistingGraph multiple properties", async () => {
const url = process.env.NEO4J_URI as string;
const username = process.env.NEO4J_USERNAME as string;
const password = process.env.NEO4J_PASSWORD as string;
expect(url).toBeDefined();
expect(username).toBeDefined();
expect(password).toBeDefined();
const embeddings = new FakeEmbeddingsWithOsDimension();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const metadatas: any[] = [];
const neo4jVectorStore = await Neo4jVectorStore.fromTexts(
texts,
metadatas,
embeddings,
{
url,
username,
password,
indexName: "vector",
preDeleteCollection: true,
}
);
await neo4jVectorStore.query("MATCH (n) DETACH DELETE n");
await neo4jVectorStore.query(
"CREATE (:Test {name:'Foo', name2:'Fooz'}), (:Test {name:'Bar', foo:'bar'})"
);
const existingGraph = await Neo4jVectorStore.fromExistingGraph(embeddings, {
url,
username,
password,
indexName: "vector1",
nodeLabel: "Test",
textNodeProperties: ["name", "name2"],
embeddingNodeProperty: "embedding",
});
const output = await existingGraph.similaritySearch("foo", 2);
const expectedResult = [
new Document({
pageContent: "\nname: Foo\nname2: Fooz",
metadata: {},
}),
new Document({
pageContent: "\nname: Bar\nname2: ",
metadata: { foo: "bar" },
}),
];
expect(output).toStrictEqual(expectedResult);
await dropVectorIndexes(neo4jVectorStore);
await neo4jVectorStore.close();
await existingGraph.close();
});
test.skip("Test fromExistingGraph multiple properties hybrid", async () => {
const url = process.env.NEO4J_URI as string;
const username = process.env.NEO4J_USERNAME as string;
const password = process.env.NEO4J_PASSWORD as string;
expect(url).toBeDefined();
expect(username).toBeDefined();
expect(password).toBeDefined();
const embeddings = new FakeEmbeddingsWithOsDimension();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const metadatas: any[] = [];
const neo4jVectorStore = await Neo4jVectorStore.fromTexts(
texts,
metadatas,
embeddings,
{
url,
username,
password,
indexName: "vector",
preDeleteCollection: true,
}
);
await neo4jVectorStore.query("MATCH (n) DETACH DELETE n");
await neo4jVectorStore.query(
"CREATE (:Test {name:'Foo', name2:'Fooz'}), (:Test {name:'Bar', foo:'bar'})"
);
const existingGraph = await Neo4jVectorStore.fromExistingGraph(embeddings, {
url,
username,
password,
indexName: "vector1",
nodeLabel: "Test",
textNodeProperties: ["name", "name2"],
embeddingNodeProperty: "embedding",
searchType: "hybrid",
});
const output = await existingGraph.similaritySearch("foo", 2);
const expectedResult = [
new Document({
pageContent: "\nname: Foo\nname2: Fooz",
metadata: {},
}),
new Document({
pageContent: "\nname: Bar\nname2: ",
metadata: { foo: "bar" },
}),
];
expect(output).toStrictEqual(expectedResult);
await dropVectorIndexes(neo4jVectorStore);
await neo4jVectorStore.close();
await existingGraph.close();
});
test.skip("Test escape lucene characters", async () => {
const url = process.env.NEO4J_URI as string;
const username = process.env.NEO4J_USERNAME as string;
const password = process.env.NEO4J_PASSWORD as string;
expect(url).toBeDefined();
expect(username).toBeDefined();
expect(password).toBeDefined();
const embeddings = new FakeEmbeddingsWithOsDimension();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const metadatas: any[] = [];
const neo4jVectorStore = await Neo4jVectorStore.fromTexts(
texts,
metadatas,
embeddings,
{
url,
username,
password,
preDeleteCollection: true,
searchType: "hybrid",
}
);
const output = await neo4jVectorStore.similaritySearch(
"This is the end of the world!",
2
);
const expectedResult = [
new Document({
pageContent: "This is the end of the world!",
metadata: {},
}),
new Document({
pageContent: "baz",
metadata: {},
}),
];
expect(output).toStrictEqual(expectedResult);
await dropVectorIndexes(neo4jVectorStore);
await neo4jVectorStore.close();
});
test.skip("Test multiple index", async () => {
const url = process.env.NEO4J_URI as string;
const username = process.env.NEO4J_USERNAME as string;
const password = process.env.NEO4J_PASSWORD as string;
expect(url).toBeDefined();
expect(username).toBeDefined();
expect(password).toBeDefined();
const embeddings = new FakeEmbeddingsWithOsDimension();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const metadatas: any[] = [];
const foo = await Neo4jVectorStore.fromTexts(
["foo"],
metadatas,
embeddings,
{
url,
username,
password,
indexName: "Foo",
nodeLabel: "Foo",
}
);
const bar = await Neo4jVectorStore.fromTexts(
["bar"],
metadatas,
embeddings,
{
url,
username,
password,
indexName: "Bar",
nodeLabel: "Bar",
}
);
const fooExistingIndex = await Neo4jVectorStore.fromExistingIndex(
embeddings,
{
url,
username,
password,
indexName: "Foo",
}
);
const fooOutput = await fooExistingIndex.similaritySearch(
"This is the end of the world!",
1
);
const fooExpectedResult = [
new Document({
pageContent: "foo",
metadata: {},
}),
];
expect(fooOutput).toStrictEqual(fooExpectedResult);
const barExistingIndex = await Neo4jVectorStore.fromExistingIndex(
embeddings,
{
url,
username,
password,
indexName: "Bar",
}
);
const barOutput = await barExistingIndex.similaritySearch(
"This is the end of the world!",
1
);
const barExpectedResult = [
new Document({
pageContent: "bar",
metadata: {},
}),
];
expect(barOutput).toStrictEqual(barExpectedResult);
await dropVectorIndexes(barExistingIndex);
await foo.close();
await bar.close();
await barExistingIndex.close();
await fooExistingIndex.close();
});
test.skip("Test retrievalQuery with params", async () => {
const url = process.env.NEO4J_URI as string;
const username = process.env.NEO4J_USERNAME as string;
const password = process.env.NEO4J_PASSWORD as string;
expect(url).toBeDefined();
expect(username).toBeDefined();
expect(password).toBeDefined();
const embeddings = new FakeEmbeddingsWithOsDimension();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const metadatas: any[] = [];
const neo4jVectorStore = await Neo4jVectorStore.fromTexts(
texts,
metadatas,
embeddings,
{
url,
username,
password,
indexName: "vector",
preDeleteCollection: true,
retrievalQuery: "RETURN $test AS text, score, {foo:$test1} AS metadata",
}
);
const output = await neo4jVectorStore.similaritySearch("foo", 2, {
test: "test",
test1: "test1",
});
const expectedResult = [
new Document({
pageContent: "test",
metadata: { foo: "test1" },
}),
new Document({
pageContent: "test",
metadata: { foo: "test1" },
}),
];
expect(output).toStrictEqual(expectedResult);
await dropVectorIndexes(neo4jVectorStore);
await neo4jVectorStore.close();
});
test.skip("Test metadata filters", async () => {
const url = process.env.NEO4J_URI as string;
const username = process.env.NEO4J_USERNAME as string;
const password = process.env.NEO4J_PASSWORD as string;
expect(url).toBeDefined();
expect(username).toBeDefined();
expect(password).toBeDefined();
const docsearch = await Neo4jVectorStore.fromDocuments(
DOCUMENTS,
new FakeEmbeddings(),
{
url,
username,
password,
indexName: "vector",
preDeleteCollection: true,
}
);
const examples = [
...TYPE_1_FILTERING_TEST_CASES,
...TYPE_2_FILTERING_TEST_CASES,
...TYPE_3_FILTERING_TEST_CASES,
...TYPE_4_FILTERING_TEST_CASES,
];
for (const example of examples) {
const { filter, expected } = example;
const output = await docsearch.similaritySearch("Foo", 4, { filter });
const adjustedIndices = expected.map((index) => index - 1);
const expectedOutput = adjustedIndices.map((index) => DOCUMENTS[index]);
// We don't return id properties from similarity search by default
// Also remove any key where the value is null
for (const doc of expectedOutput) {
if ("id" in doc.metadata) {
delete doc.metadata.id;
}
const keysWithNull = Object.keys(doc.metadata).filter(
(key) => doc.metadata[key] === null
);
for (const key of keysWithNull) {
delete doc.metadata[key];
}
}
// console.log("OUTPUT:", output);
// console.log("EXPECTED OUTPUT:", expectedOutput);
expect(output.length).toEqual(expectedOutput.length);
expect(output).toEqual(expect.arrayContaining(expectedOutput));
}
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/astradb.int.test.ts
|
/* eslint-disable no-process-env */
import { describe, expect, test } from "@jest/globals";
import { DataAPIClient, Db } from "@datastax/astra-db-ts";
import { faker } from "@faker-js/faker";
import { Document } from "@langchain/core/documents";
import { OpenAIEmbeddings } from "@langchain/openai";
import { FakeEmbeddings } from "closevector-common/dist/fake.js";
import { AstraDBVectorStore, AstraLibArgs } from "../astradb.js";
describe.skip("AstraDBVectorStore", () => {
let db: Db;
let astraConfig: AstraLibArgs;
beforeAll(() => {
const clientConfig = {
token: process.env.ASTRA_DB_APPLICATION_TOKEN ?? "dummy",
endpoint: process.env.ASTRA_DB_ENDPOINT ?? "dummy",
namespace: process.env.ASTRA_DB_NAMESPACE ?? "default_keyspace",
};
const dataAPIClient = new DataAPIClient(clientConfig.token);
db = dataAPIClient.db(clientConfig.endpoint);
astraConfig = {
...clientConfig,
collection: process.env.ASTRA_DB_COLLECTION ?? "langchain_test",
collectionOptions: {
vector: {
dimension: 1536,
metric: "cosine",
},
},
};
});
beforeEach(async () => {
try {
await db.dropCollection(astraConfig.collection);
} catch (e) {
// console.debug("Collection doesn't exist yet, skipping drop");
}
});
test("addDocuments", async () => {
const store = new AstraDBVectorStore(new OpenAIEmbeddings(), astraConfig);
await store.initialize();
const pageContent: string[] = [
faker.lorem.sentence(5),
faker.lorem.sentence(5),
];
const metadata = [{ foo: "bar" }, { foo: "baz" }];
await store.addDocuments(
pageContent.map(
(content, idx) =>
new Document({ pageContent: content, metadata: metadata[idx] })
)
);
const results = await store.similaritySearch(pageContent[0], 1);
expect(results.length).toEqual(1);
expect(results[0].pageContent).toEqual(pageContent[0]);
expect(results[0].metadata.foo).toEqual(metadata[0].foo);
});
test("fromText", async () => {
const store = await AstraDBVectorStore.fromTexts(
[
"AstraDB is built on Apache Cassandra",
"AstraDB is a NoSQL DB",
"AstraDB supports vector search",
],
[{ id: 123 }, { id: 456 }, { id: 789 }],
new OpenAIEmbeddings(),
astraConfig
);
const results = await store.similaritySearch("Apache Cassandra", 1);
expect(results.length).toEqual(1);
expect(results[0].pageContent).toEqual(
"AstraDB is built on Apache Cassandra"
);
expect(results[0].metadata.id).toEqual(123);
});
test("fromExistingIndex", async () => {
await AstraDBVectorStore.fromTexts(
[
"AstraDB is built on Apache Cassandra",
"AstraDB is a NoSQL DB",
"AstraDB supports vector search",
],
[{ id: 123 }, { id: 456 }, { id: 789 }],
new OpenAIEmbeddings(),
astraConfig
);
const store2 = await AstraDBVectorStore.fromExistingIndex(
new OpenAIEmbeddings(),
astraConfig
);
const results = await store2.similaritySearch("Apache Cassandra", 1);
expect(results.length).toEqual(1);
expect(results[0].pageContent).toEqual(
"AstraDB is built on Apache Cassandra"
);
expect(results[0].metadata.id).toEqual(123);
});
test("delete", async () => {
const store = await AstraDBVectorStore.fromTexts(
[
"AstraDB is built on Apache Cassandra",
"AstraDB is a NoSQL DB",
"AstraDB supports vector search",
],
[{ id: 123 }, { id: 456 }, { id: 789 }],
new OpenAIEmbeddings(),
astraConfig
);
const results = await store.similaritySearch("Apache Cassandra", 1);
expect(results.length).toEqual(1);
expect(results[0].pageContent).toEqual(
"AstraDB is built on Apache Cassandra"
);
expect(results[0].metadata.id).toEqual(123);
await store.delete({ ids: [results[0].metadata._id] });
const results2 = await store.similaritySearch("Apache Cassandra", 1);
expect(results2[0].pageContent).not.toBe(
"AstraDB is built on Apache Cassandra"
);
});
test("collection exists", async () => {
let store = new AstraDBVectorStore(new FakeEmbeddings(), astraConfig);
await store.initialize();
await store.initialize();
try {
store = new AstraDBVectorStore(new FakeEmbeddings(), {
...astraConfig,
collectionOptions: {
vector: {
dimension: 8,
metric: "cosine",
},
},
});
await store.initialize();
fail("Should have thrown error");
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (e: any) {
expect(e.message).toContain(
"already exists with different collection options"
);
}
}, 60000);
test("skipCollectionProvisioning", async () => {
let store = new AstraDBVectorStore(new FakeEmbeddings(), {
...astraConfig,
skipCollectionProvisioning: true,
collectionOptions: undefined,
});
await store.initialize();
try {
await store.similaritySearch("test");
fail("Should have thrown error");
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (e: any) {
expect(e.message).toContain("langchain_test");
}
store = new AstraDBVectorStore(new FakeEmbeddings(), {
...astraConfig,
skipCollectionProvisioning: false,
collectionOptions: {
checkExists: false,
vector: {
dimension: 4,
metric: "cosine",
},
},
});
await store.initialize();
await store.similaritySearch("test");
});
test("upsert", async () => {
const store = new AstraDBVectorStore(new FakeEmbeddings(), {
...astraConfig,
collectionOptions: {
vector: {
dimension: 4,
metric: "cosine",
},
},
});
await store.initialize();
await store.addDocuments([
{ pageContent: "Foo bar baz.", metadata: { a: 1, _id: "123456789" } },
{ pageContent: "Bar baz foo.", metadata: { a: 2, _id: "987654321" } },
{ pageContent: "Baz foo bar.", metadata: { a: 3, _id: "234567891" } },
]);
await store.addDocuments([
{ pageContent: "upserted", metadata: { a: 1, _id: "123456789" } },
]);
const collection = await db.collection(astraConfig.collection);
const doc = await collection.findOne({ _id: "123456789" });
expect(doc?.text).toEqual("upserted");
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/weaviate.int.test.ts
|
/* eslint-disable no-process-env */
import { test, expect } from "@jest/globals";
import weaviate from "weaviate-ts-client";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { WeaviateStore } from "../weaviate.js";
test("WeaviateStore", async () => {
// Something wrong with the weaviate-ts-client types, so we need to disable
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const client = (weaviate as any).client({
scheme:
process.env.WEAVIATE_SCHEME ||
(process.env.WEAVIATE_HOST ? "https" : "http"),
host: process.env.WEAVIATE_HOST || "localhost:8080",
apiKey: process.env.WEAVIATE_API_KEY
? // eslint-disable-next-line @typescript-eslint/no-explicit-any
new (weaviate as any).ApiKey(process.env.WEAVIATE_API_KEY)
: undefined,
});
const store = await WeaviateStore.fromTexts(
["hello world", "hi there", "how are you", "bye now"],
[{ foo: "bar" }, { foo: "baz" }, { foo: "qux" }, { foo: "bar" }],
new OpenAIEmbeddings(),
{
client,
indexName: "Test",
textKey: "text",
metadataKeys: ["foo"],
}
);
const results = await store.similaritySearch("hello world", 1);
expect(results).toEqual([
new Document({ pageContent: "hello world", metadata: { foo: "bar" } }),
]);
const results2 = await store.similaritySearch("hello world", 1, {
where: {
operator: "Equal",
path: ["foo"],
valueText: "baz",
},
});
expect(results2).toEqual([
new Document({ pageContent: "hi there", metadata: { foo: "baz" } }),
]);
const testDocumentWithObjectMetadata = new Document({
pageContent: "this is the deep document world!",
metadata: {
deep: {
string: "deep string",
deepdeep: {
string: "even a deeper string",
},
},
},
});
const documentStore = await WeaviateStore.fromDocuments(
[testDocumentWithObjectMetadata],
new OpenAIEmbeddings(),
{
client,
indexName: "DocumentTest",
textKey: "text",
metadataKeys: ["deep_string", "deep_deepdeep_string"],
}
);
const result3 = await documentStore.similaritySearch(
"this is the deep document world!",
1,
{
where: {
operator: "Equal",
path: ["deep_string"],
valueText: "deep string",
},
}
);
expect(result3).toEqual([
new Document({
pageContent: "this is the deep document world!",
metadata: {
deep_string: "deep string",
deep_deepdeep_string: "even a deeper string",
},
}),
]);
});
test("WeaviateStore upsert + delete", async () => {
// Something wrong with the weaviate-ts-client types, so we need to disable
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const client = (weaviate as any).client({
scheme:
process.env.WEAVIATE_SCHEME ||
(process.env.WEAVIATE_HOST ? "https" : "http"),
host: process.env.WEAVIATE_HOST || "localhost:8080",
apiKey: process.env.WEAVIATE_API_KEY
? // eslint-disable-next-line @typescript-eslint/no-explicit-any
new (weaviate as any).ApiKey(process.env.WEAVIATE_API_KEY)
: undefined,
});
const createdAt = new Date().getTime();
const store = await WeaviateStore.fromDocuments(
[
new Document({
pageContent: "testing",
metadata: { deletionTest: createdAt.toString() },
}),
],
new OpenAIEmbeddings(),
{
client,
indexName: "DocumentTest",
textKey: "pageContent",
metadataKeys: ["deletionTest"],
}
);
const ids = await store.addDocuments([
{
pageContent: "hello world",
metadata: { deletionTest: (createdAt + 1).toString() },
},
{
pageContent: "hello world",
metadata: { deletionTest: (createdAt + 1).toString() },
},
]);
const results = await store.similaritySearch("hello world", 4, {
where: {
operator: "Equal",
path: ["deletionTest"],
valueText: (createdAt + 1).toString(),
},
});
expect(results).toEqual([
new Document({
pageContent: "hello world",
metadata: { deletionTest: (createdAt + 1).toString() },
}),
new Document({
pageContent: "hello world",
metadata: { deletionTest: (createdAt + 1).toString() },
}),
]);
const ids2 = await store.addDocuments(
[
{
pageContent: "hello world upserted",
metadata: { deletionTest: (createdAt + 1).toString() },
},
{
pageContent: "hello world upserted",
metadata: { deletionTest: (createdAt + 1).toString() },
},
],
{ ids }
);
expect(ids2).toEqual(ids);
const results2 = await store.similaritySearch("hello world", 4, {
where: {
operator: "Equal",
path: ["deletionTest"],
valueText: (createdAt + 1).toString(),
},
});
expect(results2).toEqual([
new Document({
pageContent: "hello world upserted",
metadata: { deletionTest: (createdAt + 1).toString() },
}),
new Document({
pageContent: "hello world upserted",
metadata: { deletionTest: (createdAt + 1).toString() },
}),
]);
await store.delete({ ids: ids.slice(0, 1) });
const results3 = await store.similaritySearch("hello world", 1, {
where: {
operator: "Equal",
path: ["deletionTest"],
valueText: (createdAt + 1).toString(),
},
});
expect(results3).toEqual([
new Document({
pageContent: "hello world upserted",
metadata: { deletionTest: (createdAt + 1).toString() },
}),
]);
});
test("WeaviateStore delete with filter", async () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const client = (weaviate as any).client({
scheme:
process.env.WEAVIATE_SCHEME ||
(process.env.WEAVIATE_HOST ? "https" : "http"),
host: process.env.WEAVIATE_HOST || "localhost:8080",
apiKey: process.env.WEAVIATE_API_KEY
? // eslint-disable-next-line @typescript-eslint/no-explicit-any
new (weaviate as any).ApiKey(process.env.WEAVIATE_API_KEY)
: undefined,
});
const store = await WeaviateStore.fromTexts(
["hello world", "hi there", "how are you", "bye now"],
[{ foo: "bar" }, { foo: "baz" }, { foo: "qux" }, { foo: "bar" }],
new OpenAIEmbeddings(),
{
client,
indexName: "FilterDeletionTest",
textKey: "text",
metadataKeys: ["foo"],
}
);
const results = await store.similaritySearch("hello world", 1);
expect(results).toEqual([
new Document({ pageContent: "hello world", metadata: { foo: "bar" } }),
]);
await store.delete({
filter: {
where: {
operator: "Equal",
path: ["foo"],
valueText: "bar",
},
},
});
const results2 = await store.similaritySearch("hello world", 1, {
where: {
operator: "Equal",
path: ["foo"],
valueText: "bar",
},
});
expect(results2).toEqual([]);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/rockset.int.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import rockset from "@rockset/client";
import { test, expect } from "@jest/globals";
import { OpenAIEmbeddings } from "@langchain/openai";
import { Document } from "@langchain/core/documents";
import { RocksetStore, SimilarityMetric } from "../rockset.js";
const getPageContents = (documents: Document[]) =>
documents.map((document) => document.pageContent);
const embeddings = new OpenAIEmbeddings();
let store: RocksetStore | undefined;
const docs = [
new Document({
pageContent: "Tomatoes are red.",
metadata: { subject: "tomatoes" },
}),
new Document({
pageContent: "Tomatoes are small.",
metadata: { subject: "tomatoes" },
}),
new Document({
pageContent: "Apples are juicy.",
metadata: { subject: "apples" },
}),
];
test.skip("create new collection as a RocksetVectorStore", async () => {
store = await RocksetStore.withNewCollection(embeddings, {
collectionName: "langchain_demo",
client: rockset.default(
process.env.ROCKSET_API_KEY ?? "",
`https://api.${process.env.ROCKSET_API_REGION ?? "usw2a1"}.rockset.com`
),
});
});
test.skip("add to RocksetVectorStore", async () => {
expect(store).toBeDefined();
expect((await store!.addDocuments(docs))?.length).toBe(docs.length);
});
test.skip("query RocksetVectorStore with cosine sim", async () => {
expect(store).toBeDefined();
const relevantDocs = await store!.similaritySearch(
"What color are tomatoes?"
);
expect(getPageContents(relevantDocs)).toEqual(getPageContents(relevantDocs));
});
test.skip("query RocksetVectorStore with dot product", async () => {
expect(store).toBeDefined();
store!.similarityMetric = SimilarityMetric.DotProduct;
const relevantDocs = await store!.similaritySearch(
"What color are tomatoes?"
);
expect(getPageContents(relevantDocs)).toEqual(getPageContents(relevantDocs));
});
test.skip("query RocksetVectorStore with euclidean distance", async () => {
expect(store).toBeDefined();
store!.similarityMetric = SimilarityMetric.EuclideanDistance;
const relevantDocs = await store!.similaritySearch(
"What color are tomatoes?"
);
expect(getPageContents(relevantDocs)).toEqual(getPageContents(relevantDocs));
});
test.skip("query RocksetVectorStore with metadata filter", async () => {
expect(store).toBeDefined();
const relevantDocs = await store!.similaritySearch(
"What color are tomatoes?",
undefined,
"subject='apples'"
);
expect(relevantDocs.length).toBe(1);
expect(getPageContents(relevantDocs)).toEqual(getPageContents([docs[2]]));
});
test.skip("query RocksetVectorStore with k", async () => {
expect(store).toBeDefined();
const relevantDocs = await store!.similaritySearch(
"What color are tomatoes?",
1
);
expect(relevantDocs.length).toBe(1);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/turbopuffer.int.test.ts
|
import { test, expect } from "@jest/globals";
import { Document } from "@langchain/core/documents";
import { OpenAIEmbeddings } from "@langchain/openai";
import { getEnvironmentVariable } from "@langchain/core/utils/env";
import { TurbopufferVectorStore } from "../turbopuffer.js";
beforeEach(async () => {
const embeddings = new OpenAIEmbeddings();
const store = new TurbopufferVectorStore(embeddings, {
apiKey: getEnvironmentVariable("TURBOPUFFER_API_KEY"),
namespace: "langchain-js-testing",
});
await store.delete({
deleteIndex: true,
});
});
test("similaritySearchVectorWithScore", async () => {
const embeddings = new OpenAIEmbeddings();
const store = new TurbopufferVectorStore(embeddings, {
apiKey: getEnvironmentVariable("TURBOPUFFER_API_KEY"),
namespace: "langchain-js-testing",
});
expect(store).toBeDefined();
const createdAt = new Date().toString();
await store.addDocuments([
{ pageContent: createdAt.toString(), metadata: { a: createdAt } },
{ pageContent: "hi", metadata: { a: createdAt } },
{ pageContent: "bye", metadata: { a: createdAt } },
{ pageContent: "what's this", metadata: { a: createdAt } },
]);
// console.log("added docs");
const results = await store.similaritySearch(createdAt.toString(), 1);
expect(results).toHaveLength(1);
expect(results).toEqual([
new Document({
metadata: { a: createdAt },
pageContent: createdAt.toString(),
}),
]);
});
test("similaritySearch with a passed filter", async () => {
const embeddings = new OpenAIEmbeddings();
const store = new TurbopufferVectorStore(embeddings, {
apiKey: getEnvironmentVariable("TURBOPUFFER_API_KEY"),
namespace: "langchain-js-testing",
});
expect(store).toBeDefined();
const createdAt = new Date().getTime();
await store.addDocuments([
{ pageContent: "hello 0", metadata: { created_at: createdAt.toString() } },
{
pageContent: "hello 1",
metadata: { created_at: (createdAt + 1).toString() },
},
{
pageContent: "hello 2",
metadata: { created_at: (createdAt + 2).toString() },
},
{
pageContent: "hello 3",
metadata: { created_at: (createdAt + 3).toString() },
},
]);
const results = await store.similaritySearch("hello", 1, {
created_at: [["Eq", (createdAt + 2).toString()]],
});
expect(results).toHaveLength(1);
expect(results).toEqual([
new Document({
metadata: { created_at: (createdAt + 2).toString() },
pageContent: "hello 2",
}),
]);
});
test("Should drop metadata keys from docs with non-string metadata", async () => {
const embeddings = new OpenAIEmbeddings();
const store = new TurbopufferVectorStore(embeddings, {
apiKey: getEnvironmentVariable("TURBOPUFFER_API_KEY"),
namespace: "langchain-js-testing",
});
expect(store).toBeDefined();
const createdAt = new Date().getTime();
await store.addDocuments([
{
pageContent: "hello 0",
metadata: { created_at: { time: createdAt.toString() } },
},
{
pageContent: "goodbye",
metadata: { created_at: (createdAt + 1).toString() },
},
]);
const results = await store.similaritySearch("hello", 1, {
created_at: [["Eq", createdAt.toString()]],
});
expect(results).toHaveLength(0);
const results2 = await store.similaritySearch("hello", 1);
expect(results2).toEqual([
new Document({
metadata: {
created_at: null,
},
pageContent: "hello 0",
}),
]);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/hanavector.test.ts
|
import { expect } from "@jest/globals";
import { HanaDB } from "../hanavector.js";
describe("Sanity check tests", () => {
it("should sanitize int with illegal value", () => {
try {
HanaDB.sanitizeInt("HUGO");
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (error: any) {
expect(error.message).toContain("must not be smaller than 0");
}
});
it("should sanitize int with legal values", () => {
expect(HanaDB.sanitizeInt(42)).toBe(42);
expect(HanaDB.sanitizeInt("21")).toBe(21);
});
it("should sanitize int with negative values", () => {
expect(HanaDB.sanitizeInt(-1, -1)).toBe(-1);
expect(HanaDB.sanitizeInt("-1", -1)).toBe(-1);
});
it("should sanitize int with illegal negative value", () => {
try {
HanaDB.sanitizeInt(-2, -1);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (error: any) {
expect(error.message).toContain("must not be smaller than -1");
}
});
it("should parse float array from string", () => {
const arrayAsString = "[0.1, 0.2, 0.3]";
expect(HanaDB.parseFloatArrayFromString(arrayAsString)).toEqual([
0.1, 0.2, 0.3,
]);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/faiss.test.ts
|
import { test, expect } from "@jest/globals";
import { Document } from "@langchain/core/documents";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { FaissStore } from "../faiss.js";
test("Test FaissStore.fromTexts + addVectors", async () => {
const vectorStore = await FaissStore.fromTexts(
["Hello world"],
[{ id: 2 }],
new FakeEmbeddings()
);
expect(vectorStore.index?.ntotal()).toBe(1);
await vectorStore.addVectors(
[
[0, 1, 0, 0],
[1, 0, 0, 0],
[0.5, 0.5, 0.5, 0.5],
],
[
new Document({
pageContent: "hello bye",
metadata: { id: 5 },
}),
new Document({
pageContent: "hello worlddwkldnsk",
metadata: { id: 4 },
}),
new Document({
pageContent: "hello you",
metadata: { id: 6 },
}),
]
);
expect(vectorStore.index?.ntotal()).toBe(4);
const resultTwo = await vectorStore.similaritySearchVectorWithScore(
[1, 0, 0, 0],
3
);
const resultTwoMetadatas = resultTwo.map(([{ metadata }]) => metadata);
expect(resultTwoMetadatas).toEqual([{ id: 4 }, { id: 6 }, { id: 2 }]);
});
test("Test FaissStore.fromDocuments + addVectors", async () => {
const vectorStore = await FaissStore.fromDocuments(
[
new Document({
pageContent: "hello bye",
metadata: { id: 5 },
}),
new Document({
pageContent: "hello world",
metadata: { id: 4 },
}),
new Document({
pageContent: "hello you",
metadata: { id: 6 },
}),
],
new FakeEmbeddings()
);
expect(vectorStore.index?.ntotal()).toBe(3);
await vectorStore.addVectors(
[
[1, 0, 0, 0],
[1, 0, 0, 1],
],
[
new Document({
pageContent: "my world",
metadata: { id: 7 },
}),
new Document({
pageContent: "our world",
metadata: { id: 8 },
}),
]
);
expect(vectorStore.index?.ntotal()).toBe(5);
const results = await vectorStore.similaritySearchVectorWithScore(
[1, 0, 0, 0],
2
);
expect(results).toHaveLength(2);
expect(results).toEqual([
[new Document({ metadata: { id: 7 }, pageContent: "my world" }), 0],
[new Document({ metadata: { id: 8 }, pageContent: "our world" }), 1],
]);
});
test("Test FaissStore.fromIndex + mergeFrom", async () => {
const vectorStore1 = await FaissStore.fromDocuments(
[
new Document({
pageContent: "hello world",
metadata: { id: 1 },
}),
],
new FakeEmbeddings()
);
await vectorStore1.addVectors(
[
[1, 0, 0, 0],
[1, 0, 0, 1],
],
[
new Document({
pageContent: "my world",
metadata: { id: 1 },
}),
new Document({
pageContent: "our world",
metadata: { id: 2 },
}),
]
);
expect(vectorStore1.index?.ntotal()).toBe(3);
const vectorStore2 = await FaissStore.fromDocuments(
[
new Document({
pageContent: "hello world",
metadata: { id: 3 },
}),
],
new FakeEmbeddings()
);
await vectorStore2.mergeFrom(vectorStore1);
expect(vectorStore2.index?.ntotal()).toBe(4);
const results1 = await vectorStore2.similaritySearchVectorWithScore(
[1, 0, 0, 0],
2
);
expect(results1).toHaveLength(2);
expect(results1).toEqual([
[new Document({ metadata: { id: 1 }, pageContent: "my world" }), 0],
[new Document({ metadata: { id: 2 }, pageContent: "our world" }), 1],
]);
const vectorStore3 = await FaissStore.fromIndex(
vectorStore2,
new FakeEmbeddings()
);
const results2 = await vectorStore3.similaritySearchVectorWithScore(
[1, 0, 0, 0],
2
);
expect(results2).toHaveLength(2);
expect(results2).toEqual([
[new Document({ metadata: { id: 1 }, pageContent: "my world" }), 0],
[new Document({ metadata: { id: 2 }, pageContent: "our world" }), 1],
]);
});
test("Test FaissStore.addDocuments", async () => {
const vectorStore = new FaissStore(new FakeEmbeddings(), {});
const idsReturned = await vectorStore.addDocuments([
{ pageContent: "bar", metadata: { id: 4, name: "4" } },
{ pageContent: "baz", metadata: { id: 5, name: "5" } },
]);
expect(idsReturned.length).toEqual(2);
const ids = ["2", "1", "4"];
const idsReturned1 = await vectorStore.addDocuments(
[
{ pageContent: "bar", metadata: { id: 4, name: "4" } },
{ pageContent: "baz", metadata: { id: 5, name: "5" } },
],
{
ids,
}
);
expect(idsReturned1).toStrictEqual(ids);
expect(vectorStore.index?.ntotal()).toBe(4);
expect(Object.keys(vectorStore._mapping).length).toBe(4);
expect(vectorStore.docstore._docs.size).toBe(4);
});
test("Test FaissStore.delete", async () => {
const vectorStore = new FaissStore(new FakeEmbeddings(), {});
const ids = ["2", "1", "4"];
const idsReturned = await vectorStore.addVectors(
[
[1, 0, 0, 0],
[1, 0, 0, 1],
[1, 1, 0, 1],
],
[
new Document({
pageContent: "my world",
metadata: { tag: 2 },
}),
new Document({
pageContent: "our world",
metadata: { tag: 1 },
}),
new Document({
pageContent: "your world",
metadata: { tag: 4 },
}),
],
{
ids,
}
);
expect(idsReturned).toStrictEqual(ids);
expect(vectorStore.index?.ntotal()).toBe(3);
expect(Object.keys(vectorStore._mapping).length).toBe(3);
expect(vectorStore.docstore._docs.size).toBe(3);
const [[doc]] = await vectorStore.similaritySearchVectorWithScore(
[1, 1, 0, 1],
1
);
expect(doc.metadata.tag).toEqual(4);
await vectorStore.delete({ ids: ids.slice(2) });
expect(vectorStore.index?.ntotal()).toBe(2);
expect(Object.keys(vectorStore._mapping).length).toBe(2);
expect(vectorStore.docstore._docs.size).toBe(2);
const [[doc1]] = await vectorStore.similaritySearchVectorWithScore(
[1, 1, 0, 1],
1
);
expect(doc1.metadata.tag).toEqual(1);
const idsReturned1 = await vectorStore.addVectors(
[
[1, 0, 0, 0],
[1, 1, 0, 1],
],
[
new Document({
pageContent: "my world 1",
metadata: { tag: 7 },
}),
new Document({
pageContent: "our world 2",
metadata: { tag: 8 },
}),
]
);
expect(idsReturned1.length).toStrictEqual(2);
const [[doc2]] = await vectorStore.similaritySearchVectorWithScore(
[1, 1, 0, 1],
1
);
expect(doc2.metadata.tag).toEqual(8);
await vectorStore.delete({ ids: [idsReturned1[0]] });
const [[doc3]] = await vectorStore.similaritySearchVectorWithScore(
[1, 1, 0, 1],
1
);
expect(doc3.metadata.tag).toEqual(8);
});
test("Test FaissStore Exceptions", async () => {
const vectorStore = new FaissStore(new FakeEmbeddings(), {});
expect(() => vectorStore.index).toThrow(
"Vector store not initialised yet. Try calling `fromTexts`, `fromDocuments` or `fromIndex` first."
);
await vectorStore.addVectors(
[[1, 1]],
[
new Document({
pageContent: "our world",
metadata: { id: 8 },
}),
]
);
await expect(async () => {
await vectorStore.addVectors(
[
[1, 1],
[1, 2],
],
[
new Document({
pageContent: "our world",
metadata: { id: 8 },
}),
]
);
}).rejects.toThrow("Vectors and documents must have the same length");
await expect(async () => {
await vectorStore.addVectors(
[[1, 1, 1]],
[
new Document({
pageContent: "our world",
metadata: { id: 8 },
}),
]
);
}).rejects.toThrow(
"Vectors must have the same length as the number of dimensions (2)"
);
await expect(async () => {
await vectorStore.similaritySearchVectorWithScore([1, 1, 1], 1);
}).rejects.toThrow(
"Query vector must have the same length as the number of dimensions (2)"
);
const vectorStore2 = new FaissStore(new FakeEmbeddings(), {});
await vectorStore2.addVectors(
[[1, 1, 1]],
[
new Document({
pageContent: "different dimensions",
metadata: { id: 9 },
}),
]
);
await expect(async () => {
await vectorStore2.mergeFrom(vectorStore);
}).rejects.toThrow("Cannot merge indexes with different dimensions.");
await expect(async () => {
await FaissStore.load("_fake_path", new FakeEmbeddings());
}).rejects.toThrow(/No such file or directory$/);
const vectorStore3 = new FaissStore(new FakeEmbeddings(), {});
await expect(async () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
await vectorStore3.delete({ ids: null as any });
}).rejects.toThrow("No documentIds provided to delete.");
await expect(async () => {
await vectorStore3.delete({ ids: ["123"] });
}).rejects.toThrow(
"Some specified documentIds do not exist in the current store. DocumentIds not found: 123"
);
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/pgvector/docker-compose.yml
|
# Run this command to start the database:
# docker-compose up --build
version: "3"
services:
db:
hostname: 127.0.0.1
image: ankane/pgvector
ports:
- 5432:5432
restart: always
environment:
- POSTGRES_DB=api
- POSTGRES_USER=myuser
- POSTGRES_PASSWORD=ChangeMe
volumes:
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/pgvector/pgvector.int.test.ts
|
import { expect, test } from "@jest/globals";
import pg, { PoolConfig } from "pg";
import { OpenAIEmbeddings } from "@langchain/openai";
import { PGVectorStore, PGVectorStoreArgs } from "../../pgvector.js";
// import { BedrockEmbeddings } from "../../../embeddings/bedrock.js";
const embeddingsEngine = new OpenAIEmbeddings();
// const embeddingsEngine = new BedrockEmbeddings({
// region: "us-east-1",
// });
const postgresConnectionOptions = {
type: "postgres",
host: "127.0.0.1",
port: 5432,
user: "myuser",
password: "ChangeMe",
database: "api",
} as PoolConfig;
describe("PGVectorStore", () => {
let pgvectorVectorStore: PGVectorStore;
const tableName = "testlangchain";
beforeAll(async () => {
const config: PGVectorStoreArgs = {
postgresConnectionOptions,
tableName: "testlangchain",
// collectionTableName: "langchain_pg_collection",
// collectionName: "langchain",
columns: {
idColumnName: "id",
vectorColumnName: "vector",
contentColumnName: "content",
metadataColumnName: "metadata",
},
};
pgvectorVectorStore = await PGVectorStore.initialize(
embeddingsEngine,
config
);
});
afterEach(async () => {
// Drop table, then recreate it for the next test.
await pgvectorVectorStore.pool.query(`DROP TABLE "${tableName}"`);
await pgvectorVectorStore.ensureTableInDatabase();
});
afterAll(async () => {
await pgvectorVectorStore.end();
});
test("Test embeddings creation", async () => {
const documents = [
{
pageContent: "hello",
metadata: { a: 1 },
},
{
pageContent: "Cat drinks milk",
metadata: { a: 2 },
},
{ pageContent: "hi", metadata: { a: 1 } },
];
await pgvectorVectorStore.addDocuments(documents);
const results = await pgvectorVectorStore.similaritySearch("hello", 2, {
a: 2,
});
expect(results).toHaveLength(1);
expect(results[0].pageContent).toEqual("Cat drinks milk");
});
test("PGvector can save documents with a list greater than default chunk size", async () => {
// Extract the default chunk size and add one.
const docsToGenerate = pgvectorVectorStore.chunkSize + 1;
const documents = [];
for (let i = 1; i <= docsToGenerate; i += 1) {
documents.push({ pageContent: "Lorem Ipsum", metadata: { a: i } });
}
await pgvectorVectorStore.addDocuments(documents);
// Query the table to check the number of rows
const result = await pgvectorVectorStore.pool.query(
`SELECT COUNT(*) FROM "${tableName}"`
);
const rowCount = parseInt(result.rows[0].count, 10);
// Check if the number of rows is equal to the number of documents added
expect(rowCount).toEqual(docsToGenerate);
});
test("PGvector can save documents with ids", async () => {
const id1 = "d8e70e98-19ab-4438-9c14-4bb2bb21a1f9";
const id2 = "2bbb4b73-efec-4d5e-80ea-df94a4ed3aa3";
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: 1 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 2 } },
];
await pgvectorVectorStore.addDocuments(documents, { ids: [id1, id2] });
const result = await pgvectorVectorStore.pool.query(
`SELECT id FROM "${tableName}" WHERE id = $1`,
[id1]
);
expect(result.rowCount).toEqual(1);
});
test("PGvector supports different filter types", async () => {
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: 100 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 200 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 300 } },
];
await pgvectorVectorStore.addDocuments(documents);
const result = await pgvectorVectorStore.similaritySearch("hello", 2, {
a: {
in: [100, 300],
},
});
expect(result.length).toEqual(2);
expect(result).toEqual([
{ pageContent: "Lorem Ipsum", metadata: { a: 100 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 300 } },
]);
const result2 = await pgvectorVectorStore.similaritySearch("hello", 2, {
a: 200,
});
expect(result2.length).toEqual(1);
expect(result2).toEqual([
{ pageContent: "Lorem Ipsum", metadata: { a: 200 } },
]);
const result3 = await pgvectorVectorStore.similaritySearch("hello", 3);
expect(result3.length).toEqual(3);
});
test("PGvector supports arrayContains (?|) in metadata filter ", async () => {
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: ["tag1", "tag2"] } },
{ pageContent: "Lorem Ipsum", metadata: { a: ["tag2"] } },
{ pageContent: "Lorem Ipsum", metadata: { a: ["tag1"] } },
];
await pgvectorVectorStore.addDocuments(documents);
const result = await pgvectorVectorStore.similaritySearch("hello", 2, {
a: {
arrayContains: ["tag1"],
},
});
expect(result.length).toEqual(2);
expect(result).toEqual([
{ pageContent: "Lorem Ipsum", metadata: { a: ["tag1", "tag2"] } },
{ pageContent: "Lorem Ipsum", metadata: { a: ["tag1"] } },
]);
const result2 = await pgvectorVectorStore.similaritySearch("hello", 2, {
a: {
arrayContains: ["tag2"],
},
});
expect(result2.length).toEqual(2);
expect(result2).toEqual([
{ pageContent: "Lorem Ipsum", metadata: { a: ["tag1", "tag2"] } },
{ pageContent: "Lorem Ipsum", metadata: { a: ["tag2"] } },
]);
const result3 = await pgvectorVectorStore.similaritySearch("hello", 3);
expect(result3.length).toEqual(3);
expect(result3).toEqual(documents);
});
test("PGvector can delete document by id", async () => {
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: 1 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 2 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 3 } },
];
await pgvectorVectorStore.addDocuments(documents);
const result = await pgvectorVectorStore.pool.query(
`SELECT id FROM "${tableName}"`
);
const initialIds = result.rows.map((row) => row.id);
const firstIdToDelete = initialIds[0];
const secondIdToDelete = initialIds[1];
const idToKeep = initialIds[2];
await pgvectorVectorStore.delete({
ids: [firstIdToDelete, secondIdToDelete],
});
const result2 = await pgvectorVectorStore.pool.query(
`SELECT id FROM "${tableName}"`
);
// Only one row should be left
expect(result2.rowCount).toEqual(1);
// The deleted ids should not be in the result
const idsAfterDelete = result2.rows.map((row) => row.id);
expect(idsAfterDelete).not.toContain(firstIdToDelete);
expect(idsAfterDelete).not.toContain(secondIdToDelete);
expect(idsAfterDelete).toContain(idToKeep);
});
test("PGvector can delete document by metadata", async () => {
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: 1, b: 1 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 2, b: 1 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 1, c: 1 } },
];
await pgvectorVectorStore.addDocuments(documents);
const result = await pgvectorVectorStore.pool.query(
`SELECT id FROM "${tableName}"`
);
const initialIds = result.rows.map((row) => row.id);
// Filter Matches 1st document
await pgvectorVectorStore.delete({ filter: { a: 1, b: 1 } });
const result2 = await pgvectorVectorStore.pool.query(
`SELECT id FROM "${tableName}"`
);
// Two rows should be left
expect(result2.rowCount).toEqual(2);
const idsAfterDelete = result2.rows.map((row) => row.id);
// The document with matching metadata should not be in the database
expect(idsAfterDelete).not.toContainEqual(initialIds[0]);
// All other documents should still be in database
expect(idsAfterDelete).toContainEqual(initialIds[1]);
expect(idsAfterDelete).toContainEqual(initialIds[2]);
});
test.skip("PGvector supports different vector types", async () => {
// verify by asserting different pgvector operators based on vector type
pgvectorVectorStore.distanceStrategy = "cosine";
expect(pgvectorVectorStore.computedOperatorString).toEqual("<=>");
pgvectorVectorStore.distanceStrategy = "innerProduct";
expect(pgvectorVectorStore.computedOperatorString).toEqual("<#>");
pgvectorVectorStore.distanceStrategy = "euclidean";
expect(pgvectorVectorStore.computedOperatorString).toEqual("<->");
// verify with extensionSchemaName
pgvectorVectorStore.distanceStrategy = "cosine";
pgvectorVectorStore.extensionSchemaName = "schema1";
expect(pgvectorVectorStore.computedOperatorString).toEqual(
"OPERATOR(schema1.<=>)"
);
pgvectorVectorStore.distanceStrategy = "innerProduct";
pgvectorVectorStore.extensionSchemaName = "schema2";
expect(pgvectorVectorStore.computedOperatorString).toEqual(
"OPERATOR(schema2.<#>)"
);
pgvectorVectorStore.distanceStrategy = "euclidean";
pgvectorVectorStore.extensionSchemaName = "schema3";
expect(pgvectorVectorStore.computedOperatorString).toEqual(
"OPERATOR(schema3.<->)"
);
});
});
describe("PGVectorStore with collection", () => {
let pgvectorVectorStore: PGVectorStore;
const tableName = "testlangchain_collection";
const collectionTableName = "langchain_pg_collection";
beforeAll(async () => {
const config = {
postgresConnectionOptions,
tableName,
collectionTableName,
collectionName: "langchain",
columns: {
idColumnName: "id",
vectorColumnName: "vector",
contentColumnName: "content",
metadataColumnName: "metadata",
},
};
pgvectorVectorStore = await PGVectorStore.initialize(
embeddingsEngine,
config
);
});
afterEach(async () => {
// Drop table, then recreate it for the next test.
await pgvectorVectorStore.pool.query(`DROP TABLE "${tableName}"`);
await pgvectorVectorStore.pool.query(
`DROP TABLE ${pgvectorVectorStore.computedCollectionTableName}`
);
await pgvectorVectorStore.ensureTableInDatabase();
await pgvectorVectorStore.ensureCollectionTableInDatabase();
});
afterAll(async () => {
await pgvectorVectorStore.end();
});
test("'name' column is indexed", async () => {
const result = await pgvectorVectorStore.pool.query(
`SELECT * FROM pg_indexes WHERE tablename = '${pgvectorVectorStore.computedCollectionTableName}'`
);
const expectedIndexName = `idx_${pgvectorVectorStore.computedCollectionTableName}_name`;
const index = result.rows.find(
(row) => row.indexname === expectedIndexName
);
expect(index).toBeDefined();
});
test("Test embeddings creation", async () => {
const documents = [
{
pageContent: "hello",
metadata: { a: 1 },
},
{
pageContent: "Cat drinks milk",
metadata: { a: 2 },
},
{ pageContent: "hi", metadata: { a: 1 } },
];
await pgvectorVectorStore.addDocuments(documents);
const results = await pgvectorVectorStore.similaritySearch("hello", 2, {
a: 2,
});
expect(results).toHaveLength(1);
expect(results[0].pageContent).toEqual("Cat drinks milk");
});
test("PGvector can save documents with a list greater than default chunk size", async () => {
// Extract the default chunk size and add one.
const docsToGenerate = pgvectorVectorStore.chunkSize + 1;
const documents = [];
for (let i = 1; i <= docsToGenerate; i += 1) {
documents.push({ pageContent: "Lorem Ipsum", metadata: { a: i } });
}
await pgvectorVectorStore.addDocuments(documents);
// Query the table to check the number of rows
const result = await pgvectorVectorStore.pool.query(
`SELECT COUNT(*) FROM "${tableName}"`
);
const rowCount = parseInt(result.rows[0].count, 10);
// Check if the number of rows is equal to the number of documents added
expect(rowCount).toEqual(docsToGenerate);
});
test("PGvector can save documents with ids", async () => {
const id1 = "d8e70e98-19ab-4438-9c14-4bb2bb21a1f9";
const id2 = "2bbb4b73-efec-4d5e-80ea-df94a4ed3aa3";
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: 1 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 2 } },
];
await pgvectorVectorStore.addDocuments(documents, { ids: [id1, id2] });
const result = await pgvectorVectorStore.pool.query(
`SELECT id FROM "${tableName}" WHERE id = $1`,
[id1]
);
expect(result.rowCount).toEqual(1);
});
test("PGvector supports different filter types", async () => {
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: 100 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 200 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 300 } },
];
await pgvectorVectorStore.addDocuments(documents);
const result = await pgvectorVectorStore.similaritySearch("hello", 2, {
a: {
in: [100, 300],
},
});
expect(result.length).toEqual(2);
expect(result).toEqual([
{ pageContent: "Lorem Ipsum", metadata: { a: 100 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 300 } },
]);
const result2 = await pgvectorVectorStore.similaritySearch("hello", 2, {
a: 200,
});
expect(result2.length).toEqual(1);
expect(result2).toEqual([
{ pageContent: "Lorem Ipsum", metadata: { a: 200 } },
]);
const result3 = await pgvectorVectorStore.similaritySearch("hello", 3);
expect(result3.length).toEqual(3);
});
test("PGvector can delete document by id", async () => {
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: 1 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 2 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 3 } },
];
await pgvectorVectorStore.addDocuments(documents);
const result = await pgvectorVectorStore.pool.query(
`SELECT id FROM "${tableName}"`
);
const initialIds = result.rows.map((row) => row.id);
const firstIdToDelete = initialIds[0];
const secondIdToDelete = initialIds[1];
const idToKeep = initialIds[2];
await pgvectorVectorStore.delete({
ids: [firstIdToDelete, secondIdToDelete],
});
const result2 = await pgvectorVectorStore.pool.query(
`SELECT id FROM "${tableName}"`
);
// Only one row should be left
expect(result2.rowCount).toEqual(1);
// The deleted ids should not be in the result
const idsAfterDelete = result2.rows.map((row) => row.id);
expect(idsAfterDelete).not.toContain(firstIdToDelete);
expect(idsAfterDelete).not.toContain(secondIdToDelete);
expect(idsAfterDelete).toContain(idToKeep);
});
test("PGvector can delete document by metadata", async () => {
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: 1, b: 1 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 2, b: 1 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 1, c: 1 } },
];
await pgvectorVectorStore.addDocuments(documents);
const result = await pgvectorVectorStore.pool.query(
`SELECT id FROM "${tableName}"`
);
const initialIds = result.rows.map((row) => row.id);
// Filter Matches 1st document
await pgvectorVectorStore.delete({ filter: { a: 1, b: 1 } });
const result2 = await pgvectorVectorStore.pool.query(
`SELECT id FROM "${tableName}"`
);
// Two rows should be left
expect(result2.rowCount).toEqual(2);
const idsAfterDelete = result2.rows.map((row) => row.id);
// The document with matching metadata should not be in the database
expect(idsAfterDelete).not.toContainEqual(initialIds[0]);
// All other documents should still be in database
expect(idsAfterDelete).toContainEqual(initialIds[1]);
expect(idsAfterDelete).toContainEqual(initialIds[2]);
});
});
describe("PGVectorStore with schema", () => {
let pgvectorVectorStore: PGVectorStore;
const tableName = "testlangchain_schema";
const schema = "test_schema";
const collectionTableName = "langchain_pg_collection_schema";
let computedTableName: string;
let computedCollectionTableName: string;
let pool: pg.Pool;
beforeAll(async () => {
pool = new pg.Pool(postgresConnectionOptions);
const config: PGVectorStoreArgs = {
pool,
tableName,
columns: {
idColumnName: "id",
vectorColumnName: "vector",
contentColumnName: "content",
metadataColumnName: "metadata",
},
collectionTableName,
collectionName: "langchain",
schemaName: schema,
};
await pool.query(`CREATE SCHEMA IF NOT EXISTS ${schema}`);
pgvectorVectorStore = await PGVectorStore.initialize(
embeddingsEngine,
config
);
computedTableName = pgvectorVectorStore.computedTableName;
computedCollectionTableName =
pgvectorVectorStore.computedCollectionTableName;
});
afterEach(async () => {
// Drop table, then recreate it for the next test.
await pgvectorVectorStore.pool.query(`DROP TABLE ${computedTableName}`);
await pgvectorVectorStore.pool.query(
`DROP TABLE ${computedCollectionTableName}`
);
await pgvectorVectorStore.ensureTableInDatabase();
await pgvectorVectorStore.ensureCollectionTableInDatabase();
});
afterAll(async () => {
await pool.query(`DROP SCHEMA ${schema} CASCADE`);
await pgvectorVectorStore.end();
});
test("Test table creation with schema", async () => {
const result = await pgvectorVectorStore.pool.query(
`SELECT table_schema FROM information_schema.tables WHERE table_name = '${tableName}' AND table_schema = '${schema}'`
);
expect(result.rowCount).toEqual(1);
const result2 = await pgvectorVectorStore.pool.query(
`SELECT table_schema FROM information_schema.tables WHERE table_name = '${collectionTableName}' AND table_schema = '${schema}'`
);
expect(result2.rowCount).toEqual(1);
});
test("Test embeddings creation", async () => {
const documents = [
{
pageContent: "hello",
metadata: { a: 1 },
},
{
pageContent: "Cat drinks milk",
metadata: { a: 2 },
},
{ pageContent: "hi", metadata: { a: 1 } },
];
await pgvectorVectorStore.addDocuments(documents);
const results = await pgvectorVectorStore.similaritySearch("hello", 2, {
a: 2,
});
expect(results).toHaveLength(1);
expect(results[0].pageContent).toEqual("Cat drinks milk");
});
test("PGvector can save documents with a list greater than default chunk size", async () => {
// Extract the default chunk size and add one.
const docsToGenerate = pgvectorVectorStore.chunkSize + 1;
const documents = [];
for (let i = 1; i <= docsToGenerate; i += 1) {
documents.push({ pageContent: "Lorem Ipsum", metadata: { a: i } });
}
await pgvectorVectorStore.addDocuments(documents);
// Query the table to check the number of rows
const result = await pgvectorVectorStore.pool.query(
`SELECT COUNT(*) FROM ${computedTableName}`
);
const rowCount = parseInt(result.rows[0].count, 10);
// Check if the number of rows is equal to the number of documents added
expect(rowCount).toEqual(docsToGenerate);
});
test("PGvector can save documents with ids", async () => {
const id1 = "d8e70e98-19ab-4438-9c14-4bb2bb21a1f9";
const id2 = "2bbb4b73-efec-4d5e-80ea-df94a4ed3aa3";
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: 1 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 2 } },
];
await pgvectorVectorStore.addDocuments(documents, { ids: [id1, id2] });
const result = await pgvectorVectorStore.pool.query(
`SELECT id FROM ${computedTableName} WHERE id = $1`,
[id1]
);
expect(result.rowCount).toEqual(1);
});
test("PGvector supports different filter types", async () => {
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: 100 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 200 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 300 } },
];
await pgvectorVectorStore.addDocuments(documents);
const result = await pgvectorVectorStore.similaritySearch("hello", 2, {
a: {
in: [100, 300],
},
});
expect(result.length).toEqual(2);
expect(result).toEqual([
{ pageContent: "Lorem Ipsum", metadata: { a: 100 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 300 } },
]);
const result2 = await pgvectorVectorStore.similaritySearch("hello", 2, {
a: 200,
});
expect(result2.length).toEqual(1);
expect(result2).toEqual([
{ pageContent: "Lorem Ipsum", metadata: { a: 200 } },
]);
const result3 = await pgvectorVectorStore.similaritySearch("hello", 3);
expect(result3.length).toEqual(3);
});
test("PGvector can delete document by id", async () => {
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: 1 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 2 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 3 } },
];
await pgvectorVectorStore.addDocuments(documents);
const result = await pgvectorVectorStore.pool.query(
`SELECT id FROM ${computedTableName}`
);
const initialIds = result.rows.map((row) => row.id);
const firstIdToDelete = initialIds[0];
const secondIdToDelete = initialIds[1];
const idToKeep = initialIds[2];
await pgvectorVectorStore.delete({
ids: [firstIdToDelete, secondIdToDelete],
});
const result2 = await pgvectorVectorStore.pool.query(
`SELECT id FROM ${computedTableName}`
);
// Only one row should be left
expect(result2.rowCount).toEqual(1);
// The deleted ids should not be in the result
const idsAfterDelete = result2.rows.map((row) => row.id);
expect(idsAfterDelete).not.toContain(firstIdToDelete);
expect(idsAfterDelete).not.toContain(secondIdToDelete);
expect(idsAfterDelete).toContain(idToKeep);
});
test("PGvector can delete document by metadata", async () => {
const documents = [
{ pageContent: "Lorem Ipsum", metadata: { a: 1, b: 1 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 2, b: 1 } },
{ pageContent: "Lorem Ipsum", metadata: { a: 1, c: 1 } },
];
await pgvectorVectorStore.addDocuments(documents);
const result = await pgvectorVectorStore.pool.query(
`SELECT id FROM ${computedTableName}`
);
const initialIds = result.rows.map((row) => row.id);
// Filter Matches 1st document
await pgvectorVectorStore.delete({ filter: { a: 1, b: 1 } });
const result2 = await pgvectorVectorStore.pool.query(
`SELECT id FROM ${computedTableName}`
);
// Two rows should be left
expect(result2.rowCount).toEqual(2);
const idsAfterDelete = result2.rows.map((row) => row.id);
// The document with matching metadata should not be in the database
expect(idsAfterDelete).not.toContainEqual(initialIds[0]);
// All other documents should still be in database
expect(idsAfterDelete).toContainEqual(initialIds[1]);
expect(idsAfterDelete).toContainEqual(initialIds[2]);
});
});
describe("PGVectorStore with HNSW index", () => {
let pgvectorVectorStore: PGVectorStore;
const tableName = "testlangchain";
beforeAll(async () => {
const config: PGVectorStoreArgs = {
postgresConnectionOptions,
tableName: "testlangchain",
columns: {
idColumnName: "id",
vectorColumnName: "vector",
contentColumnName: "content",
metadataColumnName: "metadata",
},
distanceStrategy: "cosine",
};
pgvectorVectorStore = await PGVectorStore.initialize(
embeddingsEngine,
config
);
// Create the index
await pgvectorVectorStore.createHnswIndex({ dimensions: 1536 });
});
afterEach(async () => {
// Drop table, then recreate it for the next test.
await pgvectorVectorStore.pool.query(`DROP TABLE "${tableName}"`);
await pgvectorVectorStore.ensureTableInDatabase();
await pgvectorVectorStore.createHnswIndex({ dimensions: 1536 });
});
afterAll(async () => {
await pgvectorVectorStore.end();
});
test("Ensure table has HNSW index", async () => {
const result = await pgvectorVectorStore.pool.query(
`SELECT indexname, tablename, indexdef FROM pg_indexes where indexname='vector_embedding_hnsw_idx';`
);
const { indexdef } = result.rows[0];
expect(result.rowCount).toBe(1);
expect(indexdef.includes("USING hnsw")).toBe(true);
});
test("Test embeddings creation", async () => {
const documents = [
{
pageContent: "hello",
metadata: { a: 1 },
},
{
pageContent: "Cat drinks milk",
metadata: { a: 2 },
},
{ pageContent: "hi", metadata: { a: 1 } },
];
await pgvectorVectorStore.addDocuments(documents);
const query = await embeddingsEngine.embedQuery("milk");
const results = await pgvectorVectorStore.similaritySearchVectorWithScore(
query,
1
);
expect(results).toHaveLength(1);
expect(results[0][0].pageContent).toEqual("Cat drinks milk");
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/package.json
|
{
"name": "langchain-convex-tests",
"version": "0.0.1",
"type": "module",
"dependencies": {
"convex": "1.4.1",
"@langchain/core": ">=0.3.0 <0.4.0"
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex/schema.ts
|
/* eslint-disable import/no-extraneous-dependencies */
import { defineSchema, defineTable } from "convex/server";
import { v } from "convex/values";
export default defineSchema({
documents: defineTable({
embedding: v.array(v.number()),
text: v.string(),
metadata: v.any(),
}).vectorIndex("byEmbedding", {
vectorField: "embedding",
dimensions: 1536,
}),
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex/lib.ts
|
// eslint-disable-next-line import/no-extraneous-dependencies
import { v } from "convex/values";
import { FakeEmbeddings } from "@langchain/core/utils/testing";
import { ConvexVectorStore } from "../../../convex.js";
import { action, mutation } from "./_generated/server.js";
export const reset = mutation({
args: {},
handler: async (ctx) => {
const documents = await ctx.db.query("documents").collect();
await Promise.all(documents.map((document) => ctx.db.delete(document._id)));
},
});
export const ingest = action({
args: {
openAIApiKey: v.string(),
texts: v.array(v.string()),
metadatas: v.array(v.any()),
},
handler: async (ctx, { texts, metadatas }) => {
await ConvexVectorStore.fromTexts(
texts,
metadatas,
new FakeEmbeddings({}),
{ ctx }
);
},
});
export const similaritySearch = action({
args: {
openAIApiKey: v.string(),
query: v.string(),
},
handler: async (ctx, { query }) => {
const vectorStore = new ConvexVectorStore(new FakeEmbeddings({}), { ctx });
const result = await vectorStore.similaritySearch(query, 3);
return result.map(({ metadata }) => metadata);
},
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex/langchain/db.ts
|
export * from "../../../../../utils/convex.js";
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex/_generated/server.d.ts
|
/* eslint-disable */
/**
* Generated utilities for implementing server-side Convex query and mutation functions.
*
* THIS CODE IS AUTOMATICALLY GENERATED.
*
* Generated by convex@1.3.1.
* To regenerate, run `npx convex dev`.
* @module
*/
import {
ActionBuilder,
HttpActionBuilder,
MutationBuilder,
QueryBuilder,
GenericActionCtx,
GenericMutationCtx,
GenericQueryCtx,
GenericDatabaseReader,
GenericDatabaseWriter,
} from "convex/server";
import type { DataModel } from "./dataModel.js";
/**
* Define a query in this Convex app's public API.
*
* This function will be allowed to read your Convex database and will be accessible from the client.
*
* @param func - The query function. It receives a {@link QueryCtx} as its first argument.
* @returns The wrapped query. Include this as an `export` to name it and make it accessible.
*/
export declare const query: QueryBuilder<DataModel, "public">;
/**
* Define a query that is only accessible from other Convex functions (but not from the client).
*
* This function will be allowed to read from your Convex database. It will not be accessible from the client.
*
* @param func - The query function. It receives a {@link QueryCtx} as its first argument.
* @returns The wrapped query. Include this as an `export` to name it and make it accessible.
*/
export declare const internalQuery: QueryBuilder<DataModel, "internal">;
/**
* Define a mutation in this Convex app's public API.
*
* This function will be allowed to modify your Convex database and will be accessible from the client.
*
* @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
* @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
*/
export declare const mutation: MutationBuilder<DataModel, "public">;
/**
* Define a mutation that is only accessible from other Convex functions (but not from the client).
*
* This function will be allowed to modify your Convex database. It will not be accessible from the client.
*
* @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
* @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
*/
export declare const internalMutation: MutationBuilder<DataModel, "internal">;
/**
* Define an action in this Convex app's public API.
*
* An action is a function which can execute any JavaScript code, including non-deterministic
* code and code with side-effects, like calling third-party services.
* They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive.
* They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}.
*
* @param func - The action. It receives an {@link ActionCtx} as its first argument.
* @returns The wrapped action. Include this as an `export` to name it and make it accessible.
*/
export declare const action: ActionBuilder<DataModel, "public">;
/**
* Define an action that is only accessible from other Convex functions (but not from the client).
*
* @param func - The function. It receives an {@link ActionCtx} as its first argument.
* @returns The wrapped function. Include this as an `export` to name it and make it accessible.
*/
export declare const internalAction: ActionBuilder<DataModel, "internal">;
/**
* Define an HTTP action.
*
* This function will be used to respond to HTTP requests received by a Convex
* deployment if the requests matches the path and method where this action
* is routed. Be sure to route your action in `convex/http.js`.
*
* @param func - The function. It receives an {@link ActionCtx} as its first argument.
* @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up.
*/
export declare const httpAction: HttpActionBuilder;
/**
* A set of services for use within Convex query functions.
*
* The query context is passed as the first argument to any Convex query
* function run on the server.
*
* This differs from the {@link MutationCtx} because all of the services are
* read-only.
*/
export type QueryCtx = GenericQueryCtx<DataModel>;
/**
* A set of services for use within Convex mutation functions.
*
* The mutation context is passed as the first argument to any Convex mutation
* function run on the server.
*/
export type MutationCtx = GenericMutationCtx<DataModel>;
/**
* A set of services for use within Convex action functions.
*
* The action context is passed as the first argument to any Convex action
* function run on the server.
*/
export type ActionCtx = GenericActionCtx<DataModel>;
/**
* An interface to read from the database within Convex query functions.
*
* The two entry points are {@link DatabaseReader.get}, which fetches a single
* document by its {@link Id}, or {@link DatabaseReader.query}, which starts
* building a query.
*/
export type DatabaseReader = GenericDatabaseReader<DataModel>;
/**
* An interface to read from and write to the database within Convex mutation
* functions.
*
* Convex guarantees that all writes within a single mutation are
* executed atomically, so you never have to worry about partial writes leaving
* your data in an inconsistent state. See [the Convex Guide](https://docs.convex.dev/understanding/convex-fundamentals/functions#atomicity-and-optimistic-concurrency-control)
* for the guarantees Convex provides your functions.
*/
export type DatabaseWriter = GenericDatabaseWriter<DataModel>;
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex/_generated/api.js
|
/* eslint-disable */
/**
* Generated `api` utility.
*
* THIS CODE IS AUTOMATICALLY GENERATED.
*
* Generated by convex@1.3.1.
* To regenerate, run `npx convex dev`.
* @module
*/
import { anyApi } from "convex/server";
/**
* A utility for referencing Convex functions in your app's API.
*
* Usage:
* ```js
* const myFunctionReference = api.myModule.myFunction;
* ```
*/
export const api = anyApi;
export const internal = anyApi;
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex/_generated/server.js
|
/* eslint-disable */
/**
* Generated utilities for implementing server-side Convex query and mutation functions.
*
* THIS CODE IS AUTOMATICALLY GENERATED.
*
* Generated by convex@1.3.1.
* To regenerate, run `npx convex dev`.
* @module
*/
import {
actionGeneric,
httpActionGeneric,
queryGeneric,
mutationGeneric,
internalActionGeneric,
internalMutationGeneric,
internalQueryGeneric,
} from "convex/server";
/**
* Define a query in this Convex app's public API.
*
* This function will be allowed to read your Convex database and will be accessible from the client.
*
* @param func - The query function. It receives a {@link QueryCtx} as its first argument.
* @returns The wrapped query. Include this as an `export` to name it and make it accessible.
*/
export const query = queryGeneric;
/**
* Define a query that is only accessible from other Convex functions (but not from the client).
*
* This function will be allowed to read from your Convex database. It will not be accessible from the client.
*
* @param func - The query function. It receives a {@link QueryCtx} as its first argument.
* @returns The wrapped query. Include this as an `export` to name it and make it accessible.
*/
export const internalQuery = internalQueryGeneric;
/**
* Define a mutation in this Convex app's public API.
*
* This function will be allowed to modify your Convex database and will be accessible from the client.
*
* @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
* @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
*/
export const mutation = mutationGeneric;
/**
* Define a mutation that is only accessible from other Convex functions (but not from the client).
*
* This function will be allowed to modify your Convex database. It will not be accessible from the client.
*
* @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
* @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
*/
export const internalMutation = internalMutationGeneric;
/**
* Define an action in this Convex app's public API.
*
* An action is a function which can execute any JavaScript code, including non-deterministic
* code and code with side-effects, like calling third-party services.
* They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive.
* They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}.
*
* @param func - The action. It receives an {@link ActionCtx} as its first argument.
* @returns The wrapped action. Include this as an `export` to name it and make it accessible.
*/
export const action = actionGeneric;
/**
* Define an action that is only accessible from other Convex functions (but not from the client).
*
* @param func - The function. It receives an {@link ActionCtx} as its first argument.
* @returns The wrapped function. Include this as an `export` to name it and make it accessible.
*/
export const internalAction = internalActionGeneric;
/**
* Define a Convex HTTP action.
*
* @param func - The function. It receives an {@link ActionCtx} as its first argument, and a `Request` object
* as its second.
* @returns The wrapped endpoint function. Route a URL path to this function in `convex/http.js`.
*/
export const httpAction = httpActionGeneric;
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex/_generated/dataModel.d.ts
|
/* eslint-disable */
/**
* Generated data model types.
*
* THIS CODE IS AUTOMATICALLY GENERATED.
*
* Generated by convex@1.3.1.
* To regenerate, run `npx convex dev`.
* @module
*/
import type { DataModelFromSchemaDefinition } from "convex/server";
import type { DocumentByName, TableNamesInDataModel } from "convex/server";
import type { GenericId } from "convex/values";
import schema from "../schema";
/**
* The names of all of your Convex tables.
*/
export type TableNames = TableNamesInDataModel<DataModel>;
/**
* The type of a document stored in Convex.
*
* @typeParam TableName - A string literal type of the table name (like "users").
*/
export type Doc<TableName extends TableNames> = DocumentByName<
DataModel,
TableName
>;
/**
* An identifier for a document in Convex.
*
* Convex documents are uniquely identified by their `Id`, which is accessible
* on the `_id` field. To learn more, see [Document IDs](https://docs.convex.dev/using/document-ids).
*
* Documents can be loaded using `db.get(id)` in query and mutation functions.
*
* IDs are just strings at runtime, but this type can be used to distinguish them from other
* strings when type checking.
*
* @typeParam TableName - A string literal type of the table name (like "users").
*/
export type Id<TableName extends TableNames> = GenericId<TableName>;
/**
* A type describing your Convex data model.
*
* This type includes information about what tables you have, the type of
* documents stored in those tables, and the indexes defined on them.
*
* This type is used to parameterize methods like `queryGeneric` and
* `mutationGeneric` to make them type-safe.
*/
export type DataModel = DataModelFromSchemaDefinition<typeof schema>;
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/convex/convex/_generated/api.d.ts
|
/* eslint-disable */
/**
* Generated `api` utility.
*
* THIS CODE IS AUTOMATICALLY GENERATED.
*
* Generated by convex@1.3.1.
* To regenerate, run `npx convex dev`.
* @module
*/
import type {
ApiFromModules,
FilterApi,
FunctionReference,
} from "convex/server";
import type * as langchain_db from "../langchain/db";
import type * as lib from "../lib";
/**
* A utility for referencing Convex functions in your app's API.
*
* Usage:
* ```js
* const myFunctionReference = api.myModule.myFunction;
* ```
*/
declare const fullApi: ApiFromModules<{
"langchain/db": typeof langchain_db;
lib: typeof lib;
}>;
export declare const api: FilterApi<
typeof fullApi,
FunctionReference<any, "public">
>;
export declare const internal: FilterApi<
typeof fullApi,
FunctionReference<any, "internal">
>;
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/faiss.int.test.data/requirements.txt
|
langchain==0.3.0
langchain-community==0.3.0
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests
|
lc_public_repos/langchainjs/libs/langchain-community/src/vectorstores/tests/faiss.int.test.data/faiss.int.test.py
|
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.text_splitter import CharacterTextSplitter
from langchain_community.vectorstores import FAISS
from langchain_community.document_loaders import TextLoader
loader = TextLoader('../../../../../../examples/state_of_the_union.txt')
documents = loader.load()
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
docs = text_splitter.split_documents(documents)
embeddings = OpenAIEmbeddings()
db = FAISS.from_documents(docs, embeddings)
query = "What did the president say about Ketanji Brown Jackson"
docs = db.similarity_search(query)
print(docs)
db.save_local("faiss_index")
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/document_compressors/ibm.ts
|
import { DocumentInterface } from "@langchain/core/documents";
import { BaseDocumentCompressor } from "@langchain/core/retrievers/document_compressors";
import { WatsonXAI } from "@ibm-cloud/watsonx-ai";
import { AsyncCaller } from "@langchain/core/utils/async_caller";
import { WatsonxAuth, WatsonxParams } from "../types/ibm.js";
import { authenticateAndSetInstance } from "../utils/ibm.js";
export interface WatsonxInputRerank extends Omit<WatsonxParams, "idOrName"> {
truncateInputTokens?: number;
returnOptions?: {
topN?: number;
inputs?: boolean;
};
}
export class WatsonxRerank
extends BaseDocumentCompressor
implements WatsonxInputRerank
{
maxRetries = 0;
version = "2024-05-31";
truncateInputTokens?: number | undefined;
returnOptions?:
| { topN?: number; inputs?: boolean; query?: boolean }
| undefined;
model: string;
spaceId?: string | undefined;
projectId?: string | undefined;
maxConcurrency?: number | undefined;
serviceUrl: string;
service: WatsonXAI;
constructor(fields: WatsonxInputRerank & WatsonxAuth) {
super();
if (fields.projectId && fields.spaceId)
throw new Error("Maximum 1 id type can be specified per instance");
if (!fields.projectId && !fields.spaceId)
throw new Error(
"No id specified! At least id of 1 type has to be specified"
);
this.model = fields.model;
this.serviceUrl = fields.serviceUrl;
this.version = fields.version;
this.projectId = fields?.projectId;
this.spaceId = fields?.spaceId;
this.maxRetries = fields.maxRetries ?? this.maxRetries;
this.maxConcurrency = fields.maxConcurrency;
this.truncateInputTokens = fields.truncateInputTokens;
this.returnOptions = fields.returnOptions;
const {
watsonxAIApikey,
watsonxAIAuthType,
watsonxAIBearerToken,
watsonxAIUsername,
watsonxAIPassword,
watsonxAIUrl,
version,
serviceUrl,
} = fields;
const auth = authenticateAndSetInstance({
watsonxAIApikey,
watsonxAIAuthType,
watsonxAIBearerToken,
watsonxAIUsername,
watsonxAIPassword,
watsonxAIUrl,
version,
serviceUrl,
});
if (auth) this.service = auth;
else throw new Error("You have not provided one type of authentication");
}
scopeId() {
if (this.projectId)
return { projectId: this.projectId, modelId: this.model };
else return { spaceId: this.spaceId, modelId: this.model };
}
invocationParams(options?: Partial<WatsonxInputRerank>) {
return {
truncate_input_tokens:
options?.truncateInputTokens ?? this.truncateInputTokens,
return_options: {
top_n: options?.returnOptions?.topN ?? this.returnOptions?.topN,
inputs: options?.returnOptions?.inputs ?? this.returnOptions?.inputs,
},
};
}
async compressDocuments(
documents: DocumentInterface[],
query: string
): Promise<DocumentInterface[]> {
const caller = new AsyncCaller({
maxConcurrency: this.maxConcurrency,
maxRetries: this.maxRetries,
});
const inputs = documents.map((document) => ({
text: document.pageContent,
}));
const { result } = await caller.call(() =>
this.service.textRerank({
...this.scopeId(),
inputs,
query,
parameters: {
truncate_input_tokens: this.truncateInputTokens,
},
})
);
const resultDocuments = result.results.map(({ index, score }) => {
const rankedDocument = documents[index];
rankedDocument.metadata.relevanceScore = score;
return rankedDocument;
});
return resultDocuments;
}
async rerank(
documents: Array<
DocumentInterface | string | Record<"pageContent", string>
>,
query: string,
options?: Partial<WatsonxInputRerank>
): Promise<Array<{ index: number; relevanceScore: number; input?: string }>> {
const inputs = documents.map((document) => {
if (typeof document === "string") {
return { text: document };
}
return { text: document.pageContent };
});
const caller = new AsyncCaller({
maxConcurrency: this.maxConcurrency,
maxRetries: this.maxRetries,
});
const { result } = await caller.call(() =>
this.service.textRerank({
...this.scopeId(),
inputs,
query,
parameters: this.invocationParams(options),
})
);
const response = result.results.map((document) => {
return document?.input
? {
index: document.index,
relevanceScore: document.score,
input: document?.input,
}
: {
index: document.index,
relevanceScore: document.score,
};
});
return response;
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/document_compressors
|
lc_public_repos/langchainjs/libs/langchain-community/src/document_compressors/tests/ibm.test.ts
|
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-explicit-any */
import { WatsonxRerank, WatsonxInputRerank } from "../ibm.js";
function getKey<K>(key: K): K {
return key;
}
const testProperties = (
instance: WatsonxRerank,
testProps: WatsonxInputRerank,
notExTestProps?: { [key: string]: any }
) => {
const checkProperty = <T extends { [key: string]: any }>(
testProps: T,
instance: T,
existing = true
) => {
Object.keys(testProps).forEach((key) => {
const keys = getKey<keyof T>(key);
type Type = Pick<T, typeof keys>;
if (typeof testProps[key as keyof T] === "object")
checkProperty<Type>(testProps[key as keyof T], instance[key], existing);
else {
if (existing)
expect(instance[key as keyof T]).toBe(testProps[key as keyof T]);
else if (instance) expect(instance[key as keyof T]).toBeUndefined();
}
});
};
checkProperty<typeof testProps>(testProps, instance);
if (notExTestProps)
checkProperty<typeof notExTestProps>(notExTestProps, instance, false);
};
const fakeAuthProp = {
watsonxAIAuthType: "iam",
watsonxAIApikey: "fake_key",
};
describe("Embeddings unit tests", () => {
describe("Positive tests", () => {
test("Basic properties", () => {
const testProps = {
model: "cross-encoder/ms-marco-minilm-l-12-v2",
version: "2024-05-31",
serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
projectId: process.env.WATSONX_AI_PROJECT_ID || "testString",
};
const instance = new WatsonxRerank({ ...testProps, ...fakeAuthProp });
testProperties(instance, testProps);
});
test("Basic properties", () => {
const testProps: WatsonxInputRerank = {
model: "cross-encoder/ms-marco-minilm-l-12-v2",
version: "2024-05-31",
serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
projectId: process.env.WATSONX_AI_PROJECT_ID || "testString",
truncateInputTokens: 10,
maxConcurrency: 2,
maxRetries: 2,
returnOptions: {
topN: 5,
inputs: false,
},
};
const instance = new WatsonxRerank({ ...testProps, ...fakeAuthProp });
testProperties(instance, testProps);
});
});
describe("Negative tests", () => {
test("Missing id", async () => {
const testProps = {
model: "cross-encoder/ms-marco-minilm-l-12-v2",
version: "2024-05-31",
serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
};
expect(
() =>
new WatsonxRerank({
...testProps,
...fakeAuthProp,
})
).toThrowError();
});
test("Missing other props", async () => {
// @ts-expect-error Intentionally passing wrong value
const testPropsProjectId: WatsonxInputLLM = {
projectId: process.env.WATSONX_AI_PROJECT_ID || "testString",
};
expect(
() =>
new WatsonxRerank({
...testPropsProjectId,
})
).toThrowError();
// @ts-expect-error //Intentionally passing wrong value
const testPropsServiceUrl: WatsonxInputLLM = {
serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
};
expect(
() =>
new WatsonxRerank({
...testPropsServiceUrl,
})
).toThrowError();
const testPropsVersion = {
version: "2024-05-31",
};
expect(
() =>
new WatsonxRerank({
// @ts-expect-error Intentionally passing wrong props
testPropsVersion,
})
).toThrowError();
});
test("Passing more than one id", async () => {
const testProps = {
model: "cross-encoder/ms-marco-minilm-l-12-v2",
version: "2024-05-31",
serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
projectId: process.env.WATSONX_AI_PROJECT_ID || "testString",
spaceId: process.env.WATSONX_AI_PROJECT_ID || "testString",
};
expect(
() =>
new WatsonxRerank({
...testProps,
...fakeAuthProp,
})
).toThrowError();
});
test("Invalid properties", () => {
const testProps = {
model: "cross-encoder/ms-marco-minilm-l-12-v2",
version: "2024-05-31",
serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
projectId: process.env.WATSONX_AI_PROJECT_ID || "testString",
};
const notExTestProps = {
notExisting: 12,
notExObj: {
notExProp: 12,
},
};
const instance = new WatsonxRerank({
...testProps,
...notExTestProps,
...fakeAuthProp,
});
testProperties(instance, testProps, notExTestProps);
});
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/document_compressors
|
lc_public_repos/langchainjs/libs/langchain-community/src/document_compressors/tests/ibm.int.test.ts
|
/* eslint-disable no-process-env */
import { Document } from "@langchain/core/documents";
import { WatsonxRerank } from "../ibm.js";
const query = "What is the capital of the United States?";
const docs = [
new Document({
pageContent:
"Carson City is the capital city of the American state of Nevada. At the 2010 United States Census, Carson City had a population of 55,274.",
}),
new Document({
pageContent:
"The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that are a political division controlled by the United States. Its capital is Saipan.",
}),
new Document({
pageContent:
"Charlotte Amalie is the capital and largest city of the United States Virgin Islands. It has about 20,000 people. The city is on the island of Saint Thomas.",
}),
new Document({
pageContent:
"Washington, D.C. (also known as simply Washington or D.C., and officially as the District of Columbia) is the capital of the United States. It is a federal district. The President of the USA and many major national government offices are in the territory. This makes it the political center of the United States of America.",
}),
new Document({
pageContent:
"Capital punishment (the death penalty) has existed in the United States since before the United States was a country. As of 2017, capital punishment is legal in 30 of the 50 states. The federal government (including the United States military) also uses capital punishment.",
}),
];
describe("Integration tests on WatsonxRerank", () => {
describe(".compressDocuments() method", () => {
test("Basic call", async () => {
const instance = new WatsonxRerank({
model: "cross-encoder/ms-marco-minilm-l-12-v2",
serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
version: "2024-05-31",
projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString",
});
const result = await instance.compressDocuments(docs, query);
expect(result.length).toBe(docs.length);
result.forEach((item) =>
expect(typeof item.metadata.relevanceScore).toBe("number")
);
});
test("Basic call with truncation", async () => {
const instance = new WatsonxRerank({
model: "cross-encoder/ms-marco-minilm-l-12-v2",
serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
version: "2024-05-31",
projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString",
truncateInputTokens: 512,
});
const longerDocs: Document[] = docs.map((item) => ({
pageContent: item.pageContent.repeat(100),
metadata: {},
}));
const result = await instance.compressDocuments(longerDocs, query);
expect(result.length).toBe(docs.length);
result.forEach((item) =>
expect(typeof item.metadata.relevanceScore).toBe("number")
);
});
});
describe(".rerank() method", () => {
test("Basic call", async () => {
const instance = new WatsonxRerank({
model: "cross-encoder/ms-marco-minilm-l-12-v2",
serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
version: "2024-05-31",
projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString",
});
const result = await instance.rerank(docs, query);
expect(result.length).toBe(docs.length);
result.forEach((item) => {
expect(typeof item.relevanceScore).toBe("number");
expect(item.input).toBeUndefined();
});
});
test("Basic call with options", async () => {
const instance = new WatsonxRerank({
model: "cross-encoder/ms-marco-minilm-l-12-v2",
serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
version: "2024-05-31",
projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString",
});
const result = await instance.rerank(docs, query, {
returnOptions: {
topN: 3,
inputs: true,
},
});
expect(result.length).toBe(3);
result.forEach((item) => {
expect(typeof item.relevanceScore).toBe("number");
expect(item.input).toBeDefined();
});
});
test("Basic call with truncation", async () => {
const instance = new WatsonxRerank({
model: "cross-encoder/ms-marco-minilm-l-12-v2",
serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
version: "2024-05-31",
projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString",
});
const longerDocs = docs.map((item) => ({
pageContent: item.pageContent.repeat(100),
}));
const result = await instance.rerank(longerDocs, query, {
truncateInputTokens: 512,
});
result.forEach((item) => {
expect(typeof item.relevanceScore).toBe("number");
expect(item.input).toBeUndefined();
});
});
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage/vercel_kv.ts
|
import { kv, type VercelKV } from "@vercel/kv";
import { BaseStore } from "@langchain/core/stores";
/**
* Class that extends the BaseStore class to interact with a Vercel KV
* database. It provides methods for getting, setting, and deleting data,
* as well as yielding keys from the database.
* @example
* ```typescript
* const store = new VercelKVStore({
* client: getClient(),
* });
* await store.mset([
* { key: "message:id:0", value: "encoded message 0" },
* { key: "message:id:1", value: "encoded message 1" },
* ]);
* const retrievedMessages = await store.mget(["message:id:0", "message:id:1"]);
* const yieldedKeys = [];
* for await (const key of store.yieldKeys("message:id:")) {
* yieldedKeys.push(key);
* }
* await store.mdelete(yieldedKeys);
* ```
*/
export class VercelKVStore extends BaseStore<string, Uint8Array> {
lc_namespace = ["langchain", "storage"];
protected client: VercelKV;
protected ttl?: number;
protected namespace?: string;
protected yieldKeysScanBatchSize = 1000;
constructor(fields?: {
client?: VercelKV;
ttl?: number;
namespace?: string;
yieldKeysScanBatchSize?: number;
}) {
super(fields);
this.client = fields?.client ?? kv;
this.ttl = fields?.ttl;
this.namespace = fields?.namespace;
this.yieldKeysScanBatchSize =
fields?.yieldKeysScanBatchSize ?? this.yieldKeysScanBatchSize;
}
_getPrefixedKey(key: string) {
if (this.namespace) {
const delimiter = "/";
return `${this.namespace}${delimiter}${key}`;
}
return key;
}
_getDeprefixedKey(key: string) {
if (this.namespace) {
const delimiter = "/";
return key.slice(this.namespace.length + delimiter.length);
}
return key;
}
/**
* Gets multiple keys from the Redis database.
* @param keys Array of keys to be retrieved.
* @returns An array of retrieved values.
*/
async mget(keys: string[]) {
const prefixedKeys = keys.map(this._getPrefixedKey.bind(this));
const retrievedValues = await this.client.mget<(string | undefined)[]>(
...prefixedKeys
);
const encoder = new TextEncoder();
return retrievedValues.map((value) => {
if (value === undefined || value === null) {
return undefined;
} else if (typeof value === "object") {
return encoder.encode(JSON.stringify(value));
} else {
return encoder.encode(value);
}
});
}
/**
* Sets multiple keys in the Redis database.
* @param keyValuePairs Array of key-value pairs to be set.
* @returns Promise that resolves when all keys have been set.
*/
async mset(keyValuePairs: [string, Uint8Array][]): Promise<void> {
const decoder = new TextDecoder();
const decodedKeyValuePairs = keyValuePairs.map(([key, value]) => [
this._getPrefixedKey(key),
decoder.decode(value),
]);
const pipeline = this.client.pipeline();
for (const [key, value] of decodedKeyValuePairs) {
if (this.ttl) {
pipeline.setex(key, this.ttl, value);
} else {
pipeline.set(key, value);
}
}
await pipeline.exec();
}
/**
* Deletes multiple keys from the Redis database.
* @param keys Array of keys to be deleted.
* @returns Promise that resolves when all keys have been deleted.
*/
async mdelete(keys: string[]): Promise<void> {
await this.client.del(...keys.map(this._getPrefixedKey.bind(this)));
}
/**
* Yields keys from the Redis database.
* @param prefix Optional prefix to filter the keys.
* @returns An AsyncGenerator that yields keys from the Redis database.
*/
async *yieldKeys(prefix?: string): AsyncGenerator<string> {
let pattern;
if (prefix) {
const wildcardPrefix = prefix.endsWith("*") ? prefix : `${prefix}*`;
pattern = this._getPrefixedKey(wildcardPrefix);
} else {
pattern = this._getPrefixedKey("*");
}
let [cursor, batch] = await this.client.scan(0, {
match: pattern,
count: this.yieldKeysScanBatchSize,
});
for (const key of batch) {
yield this._getDeprefixedKey(key);
}
while (cursor !== 0) {
[cursor, batch] = await this.client.scan(cursor, {
match: pattern,
count: this.yieldKeysScanBatchSize,
});
for (const key of batch) {
yield this._getDeprefixedKey(key);
}
}
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage/convex.ts
|
// eslint-disable-next-line import/no-extraneous-dependencies
import {
FieldPaths,
FunctionReference,
GenericActionCtx,
GenericDataModel,
NamedTableInfo,
TableNamesInDataModel,
VectorIndexNames,
makeFunctionReference,
} from "convex/server";
// eslint-disable-next-line import/no-extraneous-dependencies
import { Value } from "convex/values";
import { BaseStore } from "@langchain/core/stores";
/**
* Type that defines the config required to initialize the
* ConvexKVStore class. It includes the table name,
* index name, field name.
*/
export type ConvexKVStoreConfig<
DataModel extends GenericDataModel,
TableName extends TableNamesInDataModel<DataModel>,
IndexName extends VectorIndexNames<NamedTableInfo<DataModel, TableName>>,
KeyFieldName extends FieldPaths<NamedTableInfo<DataModel, TableName>>,
ValueFieldName extends FieldPaths<NamedTableInfo<DataModel, TableName>>,
UpsertMutation extends FunctionReference<
"mutation",
"internal",
{ table: string; document: object }
>,
LookupQuery extends FunctionReference<
"query",
"internal",
{ table: string; index: string; keyField: string; key: string },
object[]
>,
DeleteManyMutation extends FunctionReference<
"mutation",
"internal",
{ table: string; index: string; keyField: string; key: string }
>
> = {
readonly ctx: GenericActionCtx<DataModel>;
/**
* Defaults to "cache"
*/
readonly table?: TableName;
/**
* Defaults to "byKey"
*/
readonly index?: IndexName;
/**
* Defaults to "key"
*/
readonly keyField?: KeyFieldName;
/**
* Defaults to "value"
*/
readonly valueField?: ValueFieldName;
/**
* Defaults to `internal.langchain.db.upsert`
*/
readonly upsert?: UpsertMutation;
/**
* Defaults to `internal.langchain.db.lookup`
*/
readonly lookup?: LookupQuery;
/**
* Defaults to `internal.langchain.db.deleteMany`
*/
readonly deleteMany?: DeleteManyMutation;
};
/**
* Class that extends the BaseStore class to interact with a Convex
* database. It provides methods for getting, setting, and deleting key value pairs,
* as well as yielding keys from the database.
*/
export class ConvexKVStore<
T extends Value,
DataModel extends GenericDataModel,
TableName extends TableNamesInDataModel<DataModel>,
IndexName extends VectorIndexNames<NamedTableInfo<DataModel, TableName>>,
KeyFieldName extends FieldPaths<NamedTableInfo<DataModel, TableName>>,
ValueFieldName extends FieldPaths<NamedTableInfo<DataModel, TableName>>,
UpsertMutation extends FunctionReference<
"mutation",
"internal",
{ table: string; document: object }
>,
LookupQuery extends FunctionReference<
"query",
"internal",
{ table: string; index: string; keyField: string; key: string },
object[]
>,
DeleteManyMutation extends FunctionReference<
"mutation",
"internal",
{ table: string; index: string; keyField: string; key: string }
>
> extends BaseStore<string, T> {
lc_namespace = ["langchain", "storage", "convex"];
private readonly ctx: GenericActionCtx<DataModel>;
private readonly table: TableName;
private readonly index: IndexName;
private readonly keyField: KeyFieldName;
private readonly valueField: ValueFieldName;
private readonly upsert: UpsertMutation;
private readonly lookup: LookupQuery;
private readonly deleteMany: DeleteManyMutation;
constructor(
config: ConvexKVStoreConfig<
DataModel,
TableName,
IndexName,
KeyFieldName,
ValueFieldName,
UpsertMutation,
LookupQuery,
DeleteManyMutation
>
) {
super(config);
this.ctx = config.ctx;
this.table = config.table ?? ("cache" as TableName);
this.index = config.index ?? ("byKey" as IndexName);
this.keyField = config.keyField ?? ("key" as KeyFieldName);
this.valueField = config.valueField ?? ("value" as ValueFieldName);
this.upsert =
// eslint-disable-next-line @typescript-eslint/no-explicit-any
config.upsert ?? (makeFunctionReference("langchain/db:upsert") as any);
this.lookup =
// eslint-disable-next-line @typescript-eslint/no-explicit-any
config.lookup ?? (makeFunctionReference("langchain/db:lookup") as any);
this.deleteMany =
config.deleteMany ??
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(makeFunctionReference("langchain/db:deleteMany") as any);
}
/**
* Gets multiple keys from the Convex database.
* @param keys Array of keys to be retrieved.
* @returns An array of retrieved values.
*/
async mget(keys: string[]) {
return (await Promise.all(
keys.map(async (key) => {
const found = (await this.ctx.runQuery(this.lookup, {
table: this.table,
index: this.index,
keyField: this.keyField,
key,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} as any)) as any;
return found.length > 0 ? found[0][this.valueField] : undefined;
})
)) as (T | undefined)[];
}
/**
* Sets multiple keys in the Convex database.
* @param keyValuePairs Array of key-value pairs to be set.
* @returns Promise that resolves when all keys have been set.
*/
async mset(keyValuePairs: [string, T][]): Promise<void> {
// TODO: Remove chunking when Convex handles the concurrent requests correctly
const PAGE_SIZE = 16;
for (let i = 0; i < keyValuePairs.length; i += PAGE_SIZE) {
await Promise.all(
keyValuePairs.slice(i, i + PAGE_SIZE).map(([key, value]) =>
this.ctx.runMutation(this.upsert, {
table: this.table,
index: this.index,
keyField: this.keyField,
key,
document: { [this.keyField]: key, [this.valueField]: value },
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} as any)
)
);
}
}
/**
* Deletes multiple keys from the Convex database.
* @param keys Array of keys to be deleted.
* @returns Promise that resolves when all keys have been deleted.
*/
async mdelete(keys: string[]): Promise<void> {
await Promise.all(
keys.map((key) =>
this.ctx.runMutation(this.deleteMany, {
table: this.table,
index: this.index,
keyField: this.keyField,
key,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} as any)
)
);
}
/**
* Yields keys from the Convex database.
* @param prefix Optional prefix to filter the keys.
* @returns An AsyncGenerator that yields keys from the Convex database.
*/
// eslint-disable-next-line require-yield
async *yieldKeys(_prefix?: string): AsyncGenerator<string> {
throw new Error("yieldKeys not implemented yet for ConvexKVStore");
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage/cassandra.ts
|
import { BaseStore } from "@langchain/core/stores";
import {
CassandraClientArgs,
Column,
Filter,
CassandraTable,
} from "../utils/cassandra.js";
/**
* Configuration options for initializing a CassandraKVStore.
* These options extend generic Cassandra client arguments with specific settings
* for key-value store operations.
*
* @interface CassandraKVOptions
* @extends CassandraClientArgs Custom arguments for the Cassandra client, such as connection settings.
*
* @property {string} keyspace The name of the Cassandra keyspace to be used by the key-value store.
* The keyspace must exist.
*
* @property {string} table The name of the table within the specified keyspace dedicated to storing
* key-value pairs. The table will be created if it does not exist.
*
* @property {string} [keyDelimiter="/"] An optional delimiter used to structure complex keys. Defaults to '/'.
* This delimiter is used for parsing complex keys (e.g., hierarchical keys) when performing
* operations that involve key prefixes or segmentation.
*/
export interface CassandraKVOptions extends CassandraClientArgs {
keyspace: string;
table: string;
keyDelimiter?: string;
}
/**
* A concrete implementation of BaseStore for interacting with a Cassandra database.
* It provides methods to get, set, delete, and yield keys based on specified criteria.
*/
export class CassandraKVStore extends BaseStore<string, Uint8Array> {
lc_namespace = ["langchain", "storage"];
private cassandraTable: CassandraTable;
private options: CassandraKVOptions;
private colKey: Column;
private colKeyMap: Column;
private colVal: Column;
private keyDelimiter: string;
protected inClauseSize = 1000;
protected yieldKeysFetchSize = 5000;
constructor(options: CassandraKVOptions) {
super(options);
this.options = options;
this.colKey = { name: "key", type: "text", partition: true };
this.colKeyMap = { name: "key_map", type: "map<tinyint,text>" };
this.colVal = { name: "val", type: "blob" };
this.keyDelimiter = options.keyDelimiter || "/";
}
/**
* Retrieves the values associated with an array of keys from the Cassandra database.
* It chunks requests for large numbers of keys to manage performance and Cassandra limitations.
* @param keys An array of keys for which to retrieve values.
* @returns A promise that resolves with an array of Uint8Array or undefined, corresponding to each key.
*/
async mget(keys: string[]): Promise<(Uint8Array | undefined)[]> {
await this.ensureTable();
const processFunction = async (
chunkKeys: string[]
): Promise<(Uint8Array | undefined)[]> => {
const chunkResults = await this.cassandraTable.select(
[this.colKey, this.colVal],
[{ name: this.colKey.name, operator: "IN", value: chunkKeys }]
);
const useMap = chunkKeys.length > 25;
const rowsMap = useMap
? new Map(chunkResults.rows.map((row) => [row[this.colKey.name], row]))
: null;
return chunkKeys.map((key) => {
const row =
useMap && rowsMap
? rowsMap.get(key)
: chunkResults.rows.find((row) => row[this.colKey.name] === key);
if (row && row[this.colVal.name]) {
const buffer = row[this.colVal.name];
return new Uint8Array(
buffer.buffer,
buffer.byteOffset,
buffer.byteLength
);
}
return undefined;
});
};
const result = await this.processInChunks<Uint8Array | undefined>(
keys,
processFunction
);
return result || [];
}
/**
* Sets multiple key-value pairs in the Cassandra database.
* Each key-value pair is processed to ensure compatibility with Cassandra's storage requirements.
* @param keyValuePairs An array of key-value pairs to set in the database.
* @returns A promise that resolves when all key-value pairs have been set.
*/
async mset(keyValuePairs: [string, Uint8Array][]): Promise<void> {
await this.ensureTable();
const values = keyValuePairs.map(([key, value]) => {
const keySegments = key.split(this.keyDelimiter);
const keyMap = keySegments.reduce<Record<number, string>>(
(acc, segment, index) => {
acc[index] = segment;
return acc;
},
{}
);
const bufferValue = Buffer.from(
value.buffer,
value.byteOffset,
value.byteLength
);
return [key, keyMap, bufferValue];
});
await this.cassandraTable.upsert(values, [
this.colKey,
this.colKeyMap,
this.colVal,
]);
}
/**
* Deletes multiple keys and their associated values from the Cassandra database.
* @param keys An array of keys to delete from the database.
* @returns A promise that resolves when all specified keys have been deleted.
*/
async mdelete(keys: string[]): Promise<void> {
if (keys.length > 0) {
await this.ensureTable();
const processFunction = async (chunkKeys: string[]): Promise<void> => {
const filter: Filter = {
name: this.colKey.name,
operator: "IN",
value: chunkKeys,
};
await this.cassandraTable.delete(filter);
};
await this.processInChunks(keys, processFunction);
}
}
/**
* Yields keys from the Cassandra database optionally based on a prefix, based
* on the store's keyDelimiter. This method pages through results efficiently
* for large datasets.
* @param prefix An optional prefix to filter the keys to be yielded.
* @returns An async generator that yields keys from the database.
*/
async *yieldKeys(prefix?: string): AsyncGenerator<string> {
await this.ensureTable();
const filter: Filter[] = [];
if (prefix) {
let segments = prefix.split(this.keyDelimiter);
// Remove the last segment only if it is empty (due to a trailing delimiter)
if (segments[segments.length - 1] === "") {
segments = segments.slice(0, -1);
}
segments.forEach((segment, index) => {
filter.push({
name: `${this.colKeyMap.name}[${index}]`,
operator: "=",
value: segment,
});
});
}
let currentPageState;
do {
const results = await this.cassandraTable.select(
[this.colKey],
filter,
undefined, // orderBy
undefined, // limit
false, // allowFiltering
this.yieldKeysFetchSize,
currentPageState
);
for (const row of results.rows) {
yield row[this.colKey.name];
}
currentPageState = results.pageState;
} while (currentPageState);
}
/**
* Ensures the Cassandra table is initialized and ready for operations.
* This method is called internally before database operations.
* @returns A promise that resolves when the table is ensured to exist and be accessible.
*/
private async ensureTable(): Promise<void> {
if (this.cassandraTable) {
return;
}
const tableConfig = {
...this.options,
primaryKey: [this.colKey],
nonKeyColumns: [this.colKeyMap, this.colVal],
indices: [
{
name: this.colKeyMap.name,
value: `( ENTRIES (${this.colKeyMap.name}))`,
},
],
};
this.cassandraTable = await new CassandraTable(tableConfig);
}
/**
* Processes an array of keys in chunks, applying a given processing function to each chunk.
* This method is designed to handle large sets of keys by breaking them down into smaller
* manageable chunks, applying the processing function to each chunk sequentially. This approach
* helps in managing resource utilization and adhering to database query limitations.
*
* The method is generic, allowing for flexible processing functions that can either perform actions
* without returning a result (e.g., deletion operations) or return a result (e.g., data retrieval).
* This design enables the method to be used across a variety of batch processing scenarios.
*
* @template T The type of elements in the result array when the processFunction returns data. This
* is used to type the resolution of the promise returned by processFunction. For void
* operations, T can be omitted or set to any empty interface or null type.
* @param keys The complete array of keys to be processed. The method chunks this array
* based on the specified CHUNK_SIZE.
* @param processFunction A function that will be applied to each chunk of keys. This function
* should accept an array of strings (chunkKeys) and return a Promise
* that resolves to either void (for operations that don't produce a result,
* like deletion) or an array of type T (for operations that fetch data,
* like retrieval). The array of type T should match the template parameter.
* @param CHUNK_SIZE (optional) The maximum size of each chunk. If not specified, the class's
* `inClauseSize` property is used as the default chunk size. This value determines
* how many keys are included in each chunk and should be set based on the
* operation's performance characteristics and any limitations of the underlying
* storage system.
*
* @returns A Promise that resolves to void if the processing function returns void, or an array
* of type T if the processing function returns data. If the processing function returns
* data for each chunk, the results from all chunks are concatenated and returned as a
* single array. If the processing function does not return data, the method resolves to undefined,
* aligning with the void return expectation for non-data-returning operations.
*/
private async processInChunks<T>(
keys: string[],
processFunction: (chunkKeys: string[]) => Promise<T[] | void>,
CHUNK_SIZE: number = this.inClauseSize
): Promise<T[] | void> {
let results: T[] = [];
for (let i = 0; i < keys.length; i += CHUNK_SIZE) {
const chunkKeys = keys.slice(i, i + CHUNK_SIZE);
const chunkResult: T[] | void = await processFunction(chunkKeys);
if (Array.isArray(chunkResult)) {
results = results.concat(chunkResult);
}
}
return results.length > 0 ? results : undefined;
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage/upstash_redis.ts
|
import { Redis as UpstashRedis, type RedisConfigNodejs } from "@upstash/redis";
import { BaseStore } from "@langchain/core/stores";
/**
* Type definition for the input parameters required to initialize an
* instance of the UpstashStoreInput class.
*/
export interface UpstashRedisStoreInput {
sessionTTL?: number;
config?: RedisConfigNodejs;
client?: UpstashRedis;
/**
* The amount of keys to retrieve per batch when yielding keys.
* @default 1000
*/
yieldKeysScanBatchSize?: number;
/**
* The namespace to use for the keys in the database.
*/
namespace?: string;
}
/**
* Class that extends the BaseStore class to interact with an Upstash Redis
* database. It provides methods for getting, setting, and deleting data,
* as well as yielding keys from the database.
* @example
* ```typescript
* const store = new UpstashRedisStore({
* client: new Redis({
* url: "your-upstash-redis-url",
* token: "your-upstash-redis-token",
* }),
* });
* await store.mset([
* ["message:id:0", "encoded-ai-message"],
* ["message:id:1", "encoded-human-message"],
* ]);
* const retrievedMessages = await store.mget(["message:id:0", "message:id:1"]);
* const yieldedKeys = [];
* for await (const key of store.yieldKeys("message:id")) {
* yieldedKeys.push(key);
* }
* await store.mdelete(yieldedKeys);
* ```
*/
export class UpstashRedisStore extends BaseStore<string, Uint8Array> {
lc_namespace = ["langchain", "storage"];
protected client: UpstashRedis;
protected namespace?: string;
protected yieldKeysScanBatchSize = 1000;
private sessionTTL?: number;
constructor(fields: UpstashRedisStoreInput) {
super(fields);
if (fields.client) {
this.client = fields.client;
} else if (fields.config) {
this.client = new UpstashRedis(fields.config);
} else {
throw new Error(
`Upstash Redis store requires either a config object or a pre-configured client.`
);
}
this.sessionTTL = fields.sessionTTL;
this.yieldKeysScanBatchSize =
fields.yieldKeysScanBatchSize ?? this.yieldKeysScanBatchSize;
this.namespace = fields.namespace;
}
_getPrefixedKey(key: string) {
if (this.namespace) {
const delimiter = "/";
return `${this.namespace}${delimiter}${key}`;
}
return key;
}
_getDeprefixedKey(key: string) {
if (this.namespace) {
const delimiter = "/";
return key.slice(this.namespace.length + delimiter.length);
}
return key;
}
/**
* Gets multiple keys from the Upstash Redis database.
* @param keys Array of keys to be retrieved.
* @returns An array of retrieved values.
*/
async mget(keys: string[]) {
const encoder = new TextEncoder();
const prefixedKeys = keys.map(this._getPrefixedKey.bind(this));
const retrievedValues = await this.client.mget<Uint8Array[]>(
...prefixedKeys
);
return retrievedValues.map((value) => {
if (!value) {
return undefined;
} else if (typeof value === "object") {
return encoder.encode(JSON.stringify(value));
} else {
return encoder.encode(value);
}
});
}
/**
* Sets multiple keys in the Upstash Redis database.
* @param keyValuePairs Array of key-value pairs to be set.
* @returns Promise that resolves when all keys have been set.
*/
async mset(keyValuePairs: [string, Uint8Array][]): Promise<void> {
const decoder = new TextDecoder();
const encodedKeyValuePairs = keyValuePairs.map(([key, value]) => [
this._getPrefixedKey(key),
decoder.decode(value),
]);
const pipeline = this.client.pipeline();
for (const [key, value] of encodedKeyValuePairs) {
if (this.sessionTTL) {
pipeline.setex(key, this.sessionTTL, value);
} else {
pipeline.set(key, value);
}
}
await pipeline.exec();
}
/**
* Deletes multiple keys from the Upstash Redis database.
* @param keys Array of keys to be deleted.
* @returns Promise that resolves when all keys have been deleted.
*/
async mdelete(keys: string[]): Promise<void> {
await this.client.del(...keys.map(this._getPrefixedKey.bind(this)));
}
/**
* Yields keys from the Upstash Redis database.
* @param prefix Optional prefix to filter the keys. A wildcard (*) is always appended to the end.
* @returns An AsyncGenerator that yields keys from the Upstash Redis database.
*/
async *yieldKeys(prefix?: string): AsyncGenerator<string> {
let pattern;
if (prefix) {
const wildcardPrefix = prefix.endsWith("*") ? prefix : `${prefix}*`;
pattern = `${this._getPrefixedKey(wildcardPrefix)}*`;
} else {
pattern = this._getPrefixedKey("*");
}
let [cursor, batch] = await this.client.scan(0, {
match: pattern,
count: this.yieldKeysScanBatchSize,
});
for (const key of batch) {
yield this._getDeprefixedKey(key);
}
while (cursor !== "0") {
[cursor, batch] = await this.client.scan(cursor, {
match: pattern,
count: this.yieldKeysScanBatchSize,
});
for (const key of batch) {
yield this._getDeprefixedKey(key);
}
}
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage/ioredis.ts
|
import type { Redis } from "ioredis";
import { BaseStore } from "@langchain/core/stores";
/**
* Class that extends the BaseStore class to interact with a Redis
* database. It provides methods for getting, setting, and deleting data,
* as well as yielding keys from the database.
* @example
* ```typescript
* const store = new RedisByteStore({ client: new Redis({}) });
* await store.mset([
* [
* "message:id:0",
* new TextEncoder().encode(JSON.stringify(new AIMessage("ai stuff..."))),
* ],
* [
* "message:id:1",
* new TextEncoder().encode(
* JSON.stringify(new HumanMessage("human stuff...")),
* ),
* ],
* ]);
* const retrievedMessages = await store.mget(["message:id:0", "message:id:1"]);
* console.log(retrievedMessages.map((v) => new TextDecoder().decode(v)));
* const yieldedKeys = [];
* for await (const key of store.yieldKeys("message:id:")) {
* yieldedKeys.push(key);
* }
* console.log(yieldedKeys);
* await store.mdelete(yieldedKeys);
* ```
*/
export class RedisByteStore extends BaseStore<string, Uint8Array> {
lc_namespace = ["langchain", "storage"];
protected client: Redis;
protected ttl?: number;
protected namespace?: string;
protected yieldKeysScanBatchSize = 1000;
constructor(fields: {
client: Redis;
ttl?: number;
namespace?: string;
yieldKeysScanBatchSize?: number;
}) {
super(fields);
this.client = fields.client;
this.ttl = fields.ttl;
this.namespace = fields.namespace;
this.yieldKeysScanBatchSize =
fields.yieldKeysScanBatchSize ?? this.yieldKeysScanBatchSize;
}
_getPrefixedKey(key: string) {
if (this.namespace) {
const delimiter = "/";
return `${this.namespace}${delimiter}${key}`;
}
return key;
}
_getDeprefixedKey(key: string) {
if (this.namespace) {
const delimiter = "/";
return key.slice(this.namespace.length + delimiter.length);
}
return key;
}
/**
* Gets multiple keys from the Redis database.
* @param keys Array of keys to be retrieved.
* @returns An array of retrieved values.
*/
async mget(keys: string[]) {
const prefixedKeys = keys.map(this._getPrefixedKey.bind(this));
const retrievedValues = await this.client.mgetBuffer(prefixedKeys);
return retrievedValues.map((value) => {
if (!value) {
return undefined;
} else {
return value;
}
});
}
/**
* Sets multiple keys in the Redis database.
* @param keyValuePairs Array of key-value pairs to be set.
* @returns Promise that resolves when all keys have been set.
*/
async mset(keyValuePairs: [string, Uint8Array][]): Promise<void> {
const decoder = new TextDecoder();
const encodedKeyValuePairs = keyValuePairs.map(([key, value]) => [
this._getPrefixedKey(key),
decoder.decode(value),
]);
const pipeline = this.client.pipeline();
for (const [key, value] of encodedKeyValuePairs) {
if (this.ttl) {
pipeline.set(key, value, "EX", this.ttl);
} else {
pipeline.set(key, value);
}
}
await pipeline.exec();
}
/**
* Deletes multiple keys from the Redis database.
* @param keys Array of keys to be deleted.
* @returns Promise that resolves when all keys have been deleted.
*/
async mdelete(keys: string[]): Promise<void> {
await this.client.del(...keys.map(this._getPrefixedKey.bind(this)));
}
/**
* Yields keys from the Redis database.
* @param prefix Optional prefix to filter the keys.
* @returns An AsyncGenerator that yields keys from the Redis database.
*/
async *yieldKeys(prefix?: string): AsyncGenerator<string> {
let pattern;
if (prefix) {
const wildcardPrefix = prefix.endsWith("*") ? prefix : `${prefix}*`;
pattern = this._getPrefixedKey(wildcardPrefix);
} else {
pattern = this._getPrefixedKey("*");
}
let [cursor, batch] = await this.client.scan(
0,
"MATCH",
pattern,
"COUNT",
this.yieldKeysScanBatchSize
);
for (const key of batch) {
yield this._getDeprefixedKey(key);
}
while (cursor !== "0") {
[cursor, batch] = await this.client.scan(
cursor,
"MATCH",
pattern,
"COUNT",
this.yieldKeysScanBatchSize
);
for (const key of batch) {
yield this._getDeprefixedKey(key);
}
}
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage/tests/vercel_kv.int.test.ts
|
/* eslint-disable @typescript-eslint/no-non-null-assertion */
/* eslint-disable no-process-env */
import { test } from "@jest/globals";
import { createClient } from "@vercel/kv";
import { VercelKVStore } from "../vercel_kv.js";
const getClient = () => {
if (!process.env.VERCEL_KV_API_URL || !process.env.VERCEL_KV_API_TOKEN) {
throw new Error(
"VERCEL_KV_API_URL and VERCEL_KV_API_TOKEN must be set in the environment"
);
}
const client = createClient({
url: process.env.VERCEL_KV_API_URL,
token: process.env.VERCEL_KV_API_TOKEN,
});
return client;
};
describe("VercelKVStore", () => {
const client = getClient();
afterEach(async () => await client.flushall());
test("VercelKVStore can preform all operations", async () => {
const store = new VercelKVStore({
client,
});
const value1 = new Date().toISOString();
const value2 = new Date().toISOString() + new Date().toISOString();
const encoder = new TextEncoder();
await store.mset([
["key1", encoder.encode(value1)],
["key2", encoder.encode(value2)],
]);
const retrievedValues = await store.mget(["key1", "key2"]);
expect(retrievedValues).toEqual([
encoder.encode(value1),
encoder.encode(value2),
]);
// @eslint-disable-next-line/@typescript-eslint/ban-ts-comment
// @ts-expect-error unused var
for await (const key of store.yieldKeys()) {
// console.log(key);
}
await store.mdelete(["key1", "key2"]);
const retrievedValues2 = await store.mget(["key1", "key2"]);
expect(retrievedValues2).toEqual([undefined, undefined]);
});
test("VercelKVStore can yield keys with prefix", async () => {
const prefix = "prefix_";
const prefixedKeys = [`${prefix}key1`, `${prefix}key2`];
const store = new VercelKVStore({
client,
});
const value1 = new Date().toISOString();
const value2 = new Date().toISOString() + new Date().toISOString();
const encoder = new TextEncoder();
await store.mset([
[prefixedKeys[0], encoder.encode(value1)],
[prefixedKeys[1], encoder.encode(value2)],
]);
const retrievedValues = await store.mget(prefixedKeys);
expect(retrievedValues).toEqual([
encoder.encode(value1),
encoder.encode(value2),
]);
const yieldedKeys = [];
for await (const key of store.yieldKeys(prefix)) {
yieldedKeys.push(key);
}
// console.log(yieldedKeys);
expect(yieldedKeys).toEqual(expect.arrayContaining(prefixedKeys));
await store.mdelete(prefixedKeys);
const retrievedValues2 = await store.mget(prefixedKeys);
expect(retrievedValues2).toEqual([undefined, undefined]);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage/tests/upstash_redis.int.test.ts
|
/* eslint-disable no-process-env */
import { test } from "@jest/globals";
import { Redis as UpstashRedis } from "@upstash/redis";
import { UpstashRedisStore } from "../upstash_redis.js";
const getClient = () => {
if (
!process.env.UPSTASH_REDIS_REST_URL ||
!process.env.UPSTASH_REDIS_REST_TOKEN
) {
throw new Error("Missing Upstash Redis env variables.");
}
const config = {
url: process.env.UPSTASH_REDIS_REST_URL,
token: process.env.UPSTASH_REDIS_REST_TOKEN,
};
return new UpstashRedis(config);
};
describe.skip("UpstashRedisStore", () => {
const keys = ["key1", "key2"];
const client = getClient();
afterEach(async () => {
await client.del(...keys);
});
test("UpstashRedis can write & read values", async () => {
const encoder = new TextEncoder();
const decoder = new TextDecoder();
const store = new UpstashRedisStore({
client,
});
const value1 = new Date().toISOString();
const value2 = new Date().toISOString() + new Date().toISOString();
await store.mset([
[keys[0], encoder.encode(value1)],
[keys[1], encoder.encode(value2)],
]);
const retrievedValues = await store.mget([keys[0], keys[1]]);
const everyValueDefined = retrievedValues.every((v) => v !== undefined);
expect(everyValueDefined).toBe(true);
expect(retrievedValues.map((v) => decoder.decode(v))).toEqual([
value1,
value2,
]);
});
test("UpstashRedis can delete values", async () => {
const encoder = new TextEncoder();
const store = new UpstashRedisStore({
client,
});
const value1 = new Date().toISOString();
const value2 = new Date().toISOString() + new Date().toISOString();
await store.mset([
[keys[0], encoder.encode(value1)],
[keys[1], encoder.encode(value2)],
]);
await store.mdelete(keys);
const retrievedValues = await store.mget([keys[0], keys[1]]);
const everyValueUndefined = retrievedValues.every((v) => v === undefined);
expect(everyValueUndefined).toBe(true);
});
test("UpstashRedis can yield keys with prefix", async () => {
const prefix = "prefix_";
const keysWithPrefix = keys.map((key) => `${prefix}${key}`);
const encoder = new TextEncoder();
const store = new UpstashRedisStore({
client,
});
const value = new Date().toISOString();
await store.mset(keysWithPrefix.map((key) => [key, encoder.encode(value)]));
const yieldedKeys = [];
for await (const key of store.yieldKeys(prefix)) {
yieldedKeys.push(key);
}
// console.log("Yielded keys:", yieldedKeys);
expect(yieldedKeys.sort()).toEqual(keysWithPrefix.sort());
// afterEach won't automatically delete these since we're applying a prefix.
await store.mdelete(keysWithPrefix);
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage/tests/ioredis.int.test.ts
|
/* eslint-disable no-promise-executor-return */
import { test } from "@jest/globals";
import { Redis } from "ioredis";
import { RedisByteStore } from "../ioredis.js";
describe("RedisByteStore", () => {
const client = new Redis({});
afterEach(async () => await client.flushall());
afterAll(() => client.disconnect());
test("RedisByteStore", async () => {
const store = new RedisByteStore({
client,
});
const encoder = new TextEncoder();
const decoder = new TextDecoder();
const value1 = new Date().toISOString();
const value2 = new Date().toISOString() + new Date().toISOString();
await store.mset([
["key1", encoder.encode(value1)],
["key2", encoder.encode(value2)],
]);
const retrievedValues = await store.mget(["key1", "key2"]);
expect(retrievedValues.map((v) => decoder.decode(v))).toEqual([
value1,
value2,
]);
// @eslint-disable-next-line/@typescript-eslint/ban-ts-comment
// @ts-expect-error unused var
for await (const key of store.yieldKeys()) {
// console.log(key);
}
await store.mdelete(["key1", "key2"]);
const retrievedValues2 = await store.mget(["key1", "key2"]);
expect(retrievedValues2).toEqual([undefined, undefined]);
});
test("RedisByteStore yield keys with prefix", async () => {
const prefix = "prefix_";
const prefixedKeys = [`${prefix}key1`, `${prefix}key2`];
const store = new RedisByteStore({
client,
});
const encoder = new TextEncoder();
const value1 = new Date().toISOString();
const value2 = new Date().toISOString() + new Date().toISOString();
await store.mset([
[prefixedKeys[0], encoder.encode(value1)],
[prefixedKeys[1], encoder.encode(value2)],
]);
const yieldedKeys = [];
for await (const key of store.yieldKeys(prefix)) {
yieldedKeys.push(key);
}
// console.log(yieldedKeys);
expect(yieldedKeys).toEqual(expect.arrayContaining(prefixedKeys));
});
});
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage
|
lc_public_repos/langchainjs/libs/langchain-community/src/storage/tests/cassandra.int.test.ts
|
// /* eslint-disable no-process-env */
// Hangs when run with other tests, uncomment for development
// import { test, expect, describe } from "@jest/globals";
// import { CassandraClientFactory } from "../../utils/cassandra.js";
// import { CassandraKVStore } from "../cassandra.js";
test("Empty test to prevent runner from complaining", async () => {});
// const cassandraConfig = {
// serviceProviderArgs: {
// astra: {
// token: process.env.ASTRA_TOKEN as string,
// endpoint: process.env.ASTRA_DB_ENDPOINT as string,
// },
// },
// keyspace: "test",
// table: "test_kv",
// };
// let client;
// // For internal testing:
// // 1. switch "describe.skip(" to "describe("
// // 2. Export ASTRA_DB_ENDPOINT and ASTRA_TOKEN; "test" keyspace should exist
// // 3. cd langchainjs/libs/langchain-community
// // 4. yarn test:single src/storage/tests/cassandra.int.test.ts
// // Once manual testing is complete, re-instate the ".skip"
// describe.skip("CassandraKVStore", () => {
// let store: CassandraKVStore;
// beforeAll(async () => {
// client = await CassandraClientFactory.getClient(cassandraConfig);
// await client.execute("DROP TABLE IF EXISTS test.test_kv;");
// store = new CassandraKVStore(cassandraConfig);
// });
// test("CassandraKVStore can perform all operations", async () => {
// // Using TextEncoder to simulate encoding of string data to binary format
// const encoder = new TextEncoder();
// const decoder = new TextDecoder();
// const value1 = encoder.encode(new Date().toISOString());
// const value2 = encoder.encode(
// new Date().toISOString() + new Date().toISOString()
// );
// // mset
// await store.mset([
// ["key1", value1],
// ["key2", value2],
// ]);
// // mget
// const retrievedValues = await store.mget(["key1", "key2"]);
// expect(retrievedValues.map((v) => decoder.decode(v))).toEqual([
// decoder.decode(value1),
// decoder.decode(value2),
// ]);
// // yieldKeys
// const keys = [];
// for await (const key of store.yieldKeys()) {
// keys.push(key);
// }
// expect(keys).toContain("key1");
// expect(keys).toContain("key2");
// // mdelete
// await store.mdelete(["key1", "key2"]);
// const retrievedValuesAfterDelete = await store.mget(["key1", "key2"]);
// expect(retrievedValuesAfterDelete).toEqual([undefined, undefined]);
// });
// describe.skip("CassandraKVStore key prefix retrieval", () => {
// beforeAll(async () => {
// client = await CassandraClientFactory.getClient(cassandraConfig);
// await client.execute("DROP TABLE IF EXISTS test.test_kv;");
// store = new CassandraKVStore(cassandraConfig);
// await store.mset([
// ["a/b/c", new TextEncoder().encode("value abc")],
// ["a/b/d", new TextEncoder().encode("value abd")],
// ["a/e/f", new TextEncoder().encode("value aef")],
// ]);
// });
// test.each([
// ["a", ["a/b/c", "a/b/d", "a/e/f"]],
// ["a/", ["a/b/c", "a/b/d", "a/e/f"]],
// ["a/b", ["a/b/c", "a/b/d"]],
// ["a/b/", ["a/b/c", "a/b/d"]],
// ["a/e", ["a/e/f"]],
// ["a/e/", ["a/e/f"]],
// ["b", []],
// ])(
// "yieldKeys with prefix '%s' returns expected keys",
// async (prefix, expectedKeys) => {
// const retrievedKeys = [];
// for await (const key of store.yieldKeys(prefix)) {
// retrievedKeys.push(key);
// }
// expect(retrievedKeys.sort()).toEqual(expectedKeys.sort());
// }
// );
// });
// });
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/types/assemblyai-types.ts
|
import { BaseServiceParams } from "assemblyai";
import { Optional } from "./type-utils.js";
export type * from "assemblyai";
export type AssemblyAIOptions = Optional<BaseServiceParams, "apiKey">;
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/types/type-utils.ts
|
// Utility for marking only some keys of an interface as optional
// Compare to Partial<T> which marks all keys as optional
export type Optional<T, K extends keyof T> = Omit<T, K> & Partial<Pick<T, K>>;
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/types/pdf-parse.d.ts
|
/**
* Type definitions adapted from pdfjs-dist
* https://github.com/mozilla/pdfjs-dist/blob/master/types/src/display/api.d.ts
*/
declare module "pdf-parse/lib/pdf.js/v1.10.100/build/pdf.js" {
export type TypedArray =
| Int8Array
| Uint8Array
| Uint8ClampedArray
| Int16Array
| Uint16Array
| Int32Array
| Uint32Array
| Float32Array
| Float64Array;
export type BinaryData = TypedArray | ArrayBuffer | Array<number> | string;
export type RefProxy = {
num: number;
gen: number;
};
/**
* Document initialization / loading parameters object.
*/
export type DocumentInitParameters = {
/**
* - The URL of the PDF.
*/
url?: string | URL | undefined;
/**
* - Binary PDF data.
* Use TypedArrays (Uint8Array) to improve the memory usage. If PDF data is
* BASE64-encoded, use `atob()` to convert it to a binary string first.
*
* NOTE: If TypedArrays are used they will generally be transferred to the
* worker-thread. This will help reduce main-thread memory usage, however
* it will take ownership of the TypedArrays.
*/
data?: BinaryData | undefined;
/**
* - Basic authentication headers.
*/
httpHeaders?: Object | undefined;
/**
* - Indicates whether or not
* cross-site Access-Control requests should be made using credentials such
* as cookies or authorization headers. The default is `false`.
*/
withCredentials?: boolean | undefined;
/**
* - For decrypting password-protected PDFs.
*/
password?: string | undefined;
/**
* - The PDF file length. It's used for progress
* reports and range requests operations.
*/
length?: number | undefined;
/**
* - Allows for using a custom range
* transport implementation.
*/
range?: PDFDataRangeTransport | undefined;
/**
* - Specify maximum number of bytes fetched
* per range request. The default value is {@link DEFAULT_RANGE_CHUNK_SIZE }.
*/
rangeChunkSize?: number | undefined;
/**
* - The worker that will be used for loading and
* parsing the PDF data.
*/
worker?: PDFWorker | undefined;
/**
* - Controls the logging level; the constants
* from {@link VerbosityLevel } should be used.
*/
verbosity?: number | undefined;
/**
* - The base URL of the document, used when
* attempting to recover valid absolute URLs for annotations, and outline
* items, that (incorrectly) only specify relative URLs.
*/
docBaseUrl?: string | undefined;
/**
* - The URL where the predefined Adobe CMaps are
* located. Include the trailing slash.
*/
cMapUrl?: string | undefined;
/**
* - Specifies if the Adobe CMaps are binary
* packed or not. The default value is `true`.
*/
cMapPacked?: boolean | undefined;
/**
* - The factory that will be used when
* reading built-in CMap files. Providing a custom factory is useful for
* environments without Fetch API or `XMLHttpRequest` support, such as
* Node.js. The default value is {DOMCMapReaderFactory}.
*/
CMapReaderFactory?: Object | undefined;
/**
* - When `true`, fonts that aren't
* embedded in the PDF document will fallback to a system font.
* The default value is `true` in web environments and `false` in Node.js;
* unless `disableFontFace === true` in which case this defaults to `false`
* regardless of the environment (to prevent completely broken fonts).
*/
useSystemFonts?: boolean | undefined;
/**
* - The URL where the standard font
* files are located. Include the trailing slash.
*/
standardFontDataUrl?: string | undefined;
/**
* - The factory that will be used
* when reading the standard font files. Providing a custom factory is useful
* for environments without Fetch API or `XMLHttpRequest` support, such as
* Node.js. The default value is {DOMStandardFontDataFactory}.
*/
StandardFontDataFactory?: Object | undefined;
/**
* - Enable using the Fetch API in the
* worker-thread when reading CMap and standard font files. When `true`,
* the `CMapReaderFactory` and `StandardFontDataFactory` options are ignored.
* The default value is `true` in web environments and `false` in Node.js.
*/
useWorkerFetch?: boolean | undefined;
/**
* - Reject certain promises, e.g.
* `getOperatorList`, `getTextContent`, and `RenderTask`, when the associated
* PDF data cannot be successfully parsed, instead of attempting to recover
* whatever possible of the data. The default value is `false`.
*/
stopAtErrors?: boolean | undefined;
/**
* - The maximum allowed image size in total
* pixels, i.e. width * height. Images above this value will not be rendered.
* Use -1 for no limit, which is also the default value.
*/
maxImageSize?: number | undefined;
/**
* - Determines if we can evaluate strings
* as JavaScript. Primarily used to improve performance of font rendering, and
* when parsing PDF functions. The default value is `true`.
*/
isEvalSupported?: boolean | undefined;
/**
* - Determines if we can use
* `OffscreenCanvas` in the worker. Primarily used to improve performance of
* image conversion/rendering.
* The default value is `true` in web environments and `false` in Node.js.
*/
isOffscreenCanvasSupported?: boolean | undefined;
/**
* - The integer value is used to
* know when an image must be resized (uses `OffscreenCanvas` in the worker).
* If it's -1 then a possibly slow algorithm is used to guess the max value.
*/
canvasMaxAreaInBytes?: boolean | undefined;
/**
* - By default fonts are converted to
* OpenType fonts and loaded via the Font Loading API or `@font-face` rules.
* If disabled, fonts will be rendered using a built-in font renderer that
* constructs the glyphs with primitive path commands.
* The default value is `false` in web environments and `true` in Node.js.
*/
disableFontFace?: boolean | undefined;
/**
* - Include additional properties,
* which are unused during rendering of PDF documents, when exporting the
* parsed font data from the worker-thread. This may be useful for debugging
* purposes (and backwards compatibility), but note that it will lead to
* increased memory usage. The default value is `false`.
*/
fontExtraProperties?: boolean | undefined;
/**
* - Render Xfa forms if any.
* The default value is `false`.
*/
enableXfa?: boolean | undefined;
/**
* - Specify an explicit document
* context to create elements with and to load resources, such as fonts,
* into. Defaults to the current document.
*/
ownerDocument?: HTMLDocument | undefined;
/**
* - Disable range request loading of PDF
* files. When enabled, and if the server supports partial content requests,
* then the PDF will be fetched in chunks. The default value is `false`.
*/
disableRange?: boolean | undefined;
/**
* - Disable streaming of PDF file data.
* By default PDF.js attempts to load PDF files in chunks. The default value
* is `false`.
*/
disableStream?: boolean | undefined;
/**
* - Disable pre-fetching of PDF file
* data. When range requests are enabled PDF.js will automatically keep
* fetching more data even if it isn't needed to display the current page.
* The default value is `false`.
*
* NOTE: It is also necessary to disable streaming, see above, in order for
* disabling of pre-fetching to work correctly.
*/
disableAutoFetch?: boolean | undefined;
/**
* - Enables special hooks for debugging PDF.js
* (see `web/debugger.js`). The default value is `false`.
*/
pdfBug?: boolean | undefined;
/**
* - The factory instance that will be used
* when creating canvases. The default value is {new DOMCanvasFactory()}.
*/
canvasFactory?: Object | undefined;
/**
* - A factory instance that will be used
* to create SVG filters when rendering some images on the main canvas.
*/
filterFactory?: Object | undefined;
};
export type OnProgressParameters = {
/**
* - Currently loaded number of bytes.
*/
loaded: number;
/**
* - Total number of bytes in the PDF file.
*/
total: number;
};
/**
* Page getViewport parameters.
*/
export type GetViewportParameters = {
/**
* - The desired scale of the viewport.
*/
scale: number;
/**
* - The desired rotation, in degrees, of
* the viewport. If omitted it defaults to the page rotation.
*/
rotation?: number | undefined;
/**
* - The horizontal, i.e. x-axis, offset.
* The default value is `0`.
*/
offsetX?: number | undefined;
/**
* - The vertical, i.e. y-axis, offset.
* The default value is `0`.
*/
offsetY?: number | undefined;
/**
* - If true, the y-axis will not be
* flipped. The default value is `false`.
*/
dontFlip?: boolean | undefined;
};
/**
* Page getTextContent parameters.
*/
export type getTextContentParameters = {
/**
* - When true include marked
* content items in the items array of TextContent. The default is `false`.
*/
includeMarkedContent?: boolean | undefined;
};
/**
* Page text content.
*/
export type TextContent = {
/**
* - Array of
* {@link TextItem } and {@link TextMarkedContent } objects. TextMarkedContent
* items are included when includeMarkedContent is true.
*/
items: Array<TextItem | TextMarkedContent>;
/**
* - {@link TextStyle } objects,
* indexed by font name.
*/
styles: {
[x: string]: TextStyle;
};
};
/**
* Page text content part.
*/
export type TextItem = {
/**
* - Text content.
*/
str: string;
/**
* - Text direction: 'ttb', 'ltr' or 'rtl'.
*/
dir: string;
/**
* - Transformation matrix.
*/
transform: Array<any>;
/**
* - Width in device space.
*/
width: number;
/**
* - Height in device space.
*/
height: number;
/**
* - Font name used by PDF.js for converted font.
*/
fontName: string;
/**
* - Indicating if the text content is followed by a
* line-break.
*/
hasEOL: boolean;
};
/**
* Page text marked content part.
*/
export type TextMarkedContent = {
/**
* - Either 'beginMarkedContent',
* 'beginMarkedContentProps', or 'endMarkedContent'.
*/
type: string;
/**
* - The marked content identifier. Only used for type
* 'beginMarkedContentProps'.
*/
id: string;
};
/**
* Text style.
*/
export type TextStyle = {
/**
* - Font ascent.
*/
ascent: number;
/**
* - Font descent.
*/
descent: number;
/**
* - Whether or not the text is in vertical mode.
*/
vertical: boolean;
/**
* - The possible font family.
*/
fontFamily: string;
};
/**
* Page annotation parameters.
*/
export type GetAnnotationsParameters = {
/**
* - Determines the annotations that are fetched,
* can be 'display' (viewable annotations), 'print' (printable annotations),
* or 'any' (all annotations). The default value is 'display'.
*/
intent?: string | undefined;
};
/**
* Page render parameters.
*/
export type RenderParameters = {
/**
* - A 2D context of a DOM
* Canvas object.
*/
canvasContext: CanvasRenderingContext2D;
/**
* - Rendering viewport obtained by calling
* the `PDFPageProxy.getViewport` method.
*/
viewport: PageViewport;
/**
* - Rendering intent, can be 'display', 'print',
* or 'any'. The default value is 'display'.
*/
intent?: string | undefined;
/**
* Controls which annotations are rendered
* onto the canvas, for annotations with appearance-data; the values from
* {@link AnnotationMode } should be used. The following values are supported:
* - `AnnotationMode.DISABLE`, which disables all annotations.
* - `AnnotationMode.ENABLE`, which includes all possible annotations (thus
* it also depends on the `intent`-option, see above).
* - `AnnotationMode.ENABLE_FORMS`, which excludes annotations that contain
* interactive form elements (those will be rendered in the display layer).
* - `AnnotationMode.ENABLE_STORAGE`, which includes all possible annotations
* (as above) but where interactive form elements are updated with data
* from the {@link AnnotationStorage }-instance; useful e.g. for printing.
* The default value is `AnnotationMode.ENABLE`.
*/
annotationMode?: number | undefined;
/**
* - Additional transform, applied just
* before viewport transform.
*/
transform?: any[] | undefined;
/**
* - Background
* to use for the canvas.
* Any valid `canvas.fillStyle` can be used: a `DOMString` parsed as CSS
* <color> value, a `CanvasGradient` object (a linear or radial gradient) or
* a `CanvasPattern` object (a repetitive image). The default value is
* 'rgb(255,255,255)'.
*
* NOTE: This option may be partially, or completely, ignored when the
* `pageColors`-option is used.
*/
background?: string | CanvasGradient | CanvasPattern | undefined;
/**
* - Overwrites background and foreground colors
* with user defined ones in order to improve readability in high contrast
* mode.
*/
pageColors?: Object | undefined;
/**
* -
* A promise that should resolve with an {@link OptionalContentConfig }created from `PDFDocumentProxy.getOptionalContentConfig`. If `null`,
* the configuration will be fetched automatically with the default visibility
* states set.
*/
optionalContentConfigPromise?: Promise<OptionalContentConfig> | undefined;
/**
* - Map some
* annotation ids with canvases used to render them.
*/
annotationCanvasMap?: Map<string, HTMLCanvasElement> | undefined;
printAnnotationStorage?: PrintAnnotationStorage | undefined;
};
/**
* Page getOperatorList parameters.
*/
export type GetOperatorListParameters = {
/**
* - Rendering intent, can be 'display', 'print',
* or 'any'. The default value is 'display'.
*/
intent?: string | undefined;
/**
* Controls which annotations are included
* in the operatorList, for annotations with appearance-data; the values from
* {@link AnnotationMode } should be used. The following values are supported:
* - `AnnotationMode.DISABLE`, which disables all annotations.
* - `AnnotationMode.ENABLE`, which includes all possible annotations (thus
* it also depends on the `intent`-option, see above).
* - `AnnotationMode.ENABLE_FORMS`, which excludes annotations that contain
* interactive form elements (those will be rendered in the display layer).
* - `AnnotationMode.ENABLE_STORAGE`, which includes all possible annotations
* (as above) but where interactive form elements are updated with data
* from the {@link AnnotationStorage }-instance; useful e.g. for printing.
* The default value is `AnnotationMode.ENABLE`.
*/
annotationMode?: number | undefined;
printAnnotationStorage?: PrintAnnotationStorage | undefined;
};
/**
* Structure tree node. The root node will have a role "Root".
*/
export type StructTreeNode = {
/**
* - Array of
* {@link StructTreeNode } and {@link StructTreeContent } objects.
*/
children: Array<StructTreeNode | StructTreeContent>;
/**
* - element's role, already mapped if a role map exists
* in the PDF.
*/
role: string;
};
/**
* Structure tree content.
*/
export type StructTreeContent = {
/**
* - either "content" for page and stream structure
* elements or "object" for object references.
*/
type: string;
/**
* - unique id that will map to the text layer.
*/
id: string;
};
/**
* PDF page operator list.
*/
export type PDFOperatorList = {
/**
* - Array containing the operator functions.
*/
fnArray: Array<number>;
/**
* - Array containing the arguments of the
* functions.
*/
argsArray: Array<any>;
};
export type PDFWorkerParameters = {
/**
* - The name of the worker.
*/
name?: string | undefined;
/**
* - The `workerPort` object.
*/
port?: Worker | undefined;
/**
* - Controls the logging level;
* the constants from {@link VerbosityLevel } should be used.
*/
verbosity?: number | undefined;
};
/** @type {string} */
export const build: string;
export let DefaultCanvasFactory: typeof DOMCanvasFactory;
export let DefaultCMapReaderFactory: typeof DOMCMapReaderFactory;
export let DefaultFilterFactory: typeof DOMFilterFactory;
export let DefaultStandardFontDataFactory: typeof DOMStandardFontDataFactory;
/**
* @typedef { Int8Array | Uint8Array | Uint8ClampedArray |
* Int16Array | Uint16Array |
* Int32Array | Uint32Array | Float32Array |
* Float64Array
* } TypedArray
*/
/**
* @typedef { TypedArray | ArrayBuffer | Array<number> | string } BinaryData
*/
/**
* @typedef {Object} RefProxy
* @property {number} num
* @property {number} gen
*/
/**
* Document initialization / loading parameters object.
*
* @typedef {Object} DocumentInitParameters
* @property {string | URL} [url] - The URL of the PDF.
* @property {BinaryData} [data] - Binary PDF data.
* Use TypedArrays (Uint8Array) to improve the memory usage. If PDF data is
* BASE64-encoded, use `atob()` to convert it to a binary string first.
*
* NOTE: If TypedArrays are used they will generally be transferred to the
* worker-thread. This will help reduce main-thread memory usage, however
* it will take ownership of the TypedArrays.
* @property {Object} [httpHeaders] - Basic authentication headers.
* @property {boolean} [withCredentials] - Indicates whether or not
* cross-site Access-Control requests should be made using credentials such
* as cookies or authorization headers. The default is `false`.
* @property {string} [password] - For decrypting password-protected PDFs.
* @property {number} [length] - The PDF file length. It's used for progress
* reports and range requests operations.
* @property {PDFDataRangeTransport} [range] - Allows for using a custom range
* transport implementation.
* @property {number} [rangeChunkSize] - Specify maximum number of bytes fetched
* per range request. The default value is {@link DEFAULT_RANGE_CHUNK_SIZE}.
* @property {PDFWorker} [worker] - The worker that will be used for loading and
* parsing the PDF data.
* @property {number} [verbosity] - Controls the logging level; the constants
* from {@link VerbosityLevel} should be used.
* @property {string} [docBaseUrl] - The base URL of the document, used when
* attempting to recover valid absolute URLs for annotations, and outline
* items, that (incorrectly) only specify relative URLs.
* @property {string} [cMapUrl] - The URL where the predefined Adobe CMaps are
* located. Include the trailing slash.
* @property {boolean} [cMapPacked] - Specifies if the Adobe CMaps are binary
* packed or not. The default value is `true`.
* @property {Object} [CMapReaderFactory] - The factory that will be used when
* reading built-in CMap files. Providing a custom factory is useful for
* environments without Fetch API or `XMLHttpRequest` support, such as
* Node.js. The default value is {DOMCMapReaderFactory}.
* @property {boolean} [useSystemFonts] - When `true`, fonts that aren't
* embedded in the PDF document will fallback to a system font.
* The default value is `true` in web environments and `false` in Node.js;
* unless `disableFontFace === true` in which case this defaults to `false`
* regardless of the environment (to prevent completely broken fonts).
* @property {string} [standardFontDataUrl] - The URL where the standard font
* files are located. Include the trailing slash.
* @property {Object} [StandardFontDataFactory] - The factory that will be used
* when reading the standard font files. Providing a custom factory is useful
* for environments without Fetch API or `XMLHttpRequest` support, such as
* Node.js. The default value is {DOMStandardFontDataFactory}.
* @property {boolean} [useWorkerFetch] - Enable using the Fetch API in the
* worker-thread when reading CMap and standard font files. When `true`,
* the `CMapReaderFactory` and `StandardFontDataFactory` options are ignored.
* The default value is `true` in web environments and `false` in Node.js.
* @property {boolean} [stopAtErrors] - Reject certain promises, e.g.
* `getOperatorList`, `getTextContent`, and `RenderTask`, when the associated
* PDF data cannot be successfully parsed, instead of attempting to recover
* whatever possible of the data. The default value is `false`.
* @property {number} [maxImageSize] - The maximum allowed image size in total
* pixels, i.e. width * height. Images above this value will not be rendered.
* Use -1 for no limit, which is also the default value.
* @property {boolean} [isEvalSupported] - Determines if we can evaluate strings
* as JavaScript. Primarily used to improve performance of font rendering, and
* when parsing PDF functions. The default value is `true`.
* @property {boolean} [isOffscreenCanvasSupported] - Determines if we can use
* `OffscreenCanvas` in the worker. Primarily used to improve performance of
* image conversion/rendering.
* The default value is `true` in web environments and `false` in Node.js.
* @property {boolean} [canvasMaxAreaInBytes] - The integer value is used to
* know when an image must be resized (uses `OffscreenCanvas` in the worker).
* If it's -1 then a possibly slow algorithm is used to guess the max value.
* @property {boolean} [disableFontFace] - By default fonts are converted to
* OpenType fonts and loaded via the Font Loading API or `@font-face` rules.
* If disabled, fonts will be rendered using a built-in font renderer that
* constructs the glyphs with primitive path commands.
* The default value is `false` in web environments and `true` in Node.js.
* @property {boolean} [fontExtraProperties] - Include additional properties,
* which are unused during rendering of PDF documents, when exporting the
* parsed font data from the worker-thread. This may be useful for debugging
* purposes (and backwards compatibility), but note that it will lead to
* increased memory usage. The default value is `false`.
* @property {boolean} [enableXfa] - Render Xfa forms if any.
* The default value is `false`.
* @property {HTMLDocument} [ownerDocument] - Specify an explicit document
* context to create elements with and to load resources, such as fonts,
* into. Defaults to the current document.
* @property {boolean} [disableRange] - Disable range request loading of PDF
* files. When enabled, and if the server supports partial content requests,
* then the PDF will be fetched in chunks. The default value is `false`.
* @property {boolean} [disableStream] - Disable streaming of PDF file data.
* By default PDF.js attempts to load PDF files in chunks. The default value
* is `false`.
* @property {boolean} [disableAutoFetch] - Disable pre-fetching of PDF file
* data. When range requests are enabled PDF.js will automatically keep
* fetching more data even if it isn't needed to display the current page.
* The default value is `false`.
*
* NOTE: It is also necessary to disable streaming, see above, in order for
* disabling of pre-fetching to work correctly.
* @property {boolean} [pdfBug] - Enables special hooks for debugging PDF.js
* (see `web/debugger.js`). The default value is `false`.
* @property {Object} [canvasFactory] - The factory instance that will be used
* when creating canvases. The default value is {new DOMCanvasFactory()}.
* @property {Object} [filterFactory] - A factory instance that will be used
* to create SVG filters when rendering some images on the main canvas.
*/
/**
* This is the main entry point for loading a PDF and interacting with it.
*
* NOTE: If a URL is used to fetch the PDF data a standard Fetch API call (or
* XHR as fallback) is used, which means it must follow same origin rules,
* e.g. no cross-domain requests without CORS.
*
* @param {string | URL | TypedArray | ArrayBuffer | DocumentInitParameters}
* src - Can be a URL where a PDF file is located, a typed array (Uint8Array)
* already populated with data, or a parameter object.
* @returns {PDFDocumentLoadingTask}
*/
export function getDocument(
src: string | URL | TypedArray | ArrayBuffer | DocumentInitParameters
): PDFDocumentLoadingTask;
export class LoopbackPort {
postMessage(obj: any, transfer: any): void;
addEventListener(name: any, listener: any): void;
removeEventListener(name: any, listener: any): void;
terminate(): void;
#private;
}
/**
* @typedef {Object} OnProgressParameters
* @property {number} loaded - Currently loaded number of bytes.
* @property {number} total - Total number of bytes in the PDF file.
*/
/**
* The loading task controls the operations required to load a PDF document
* (such as network requests) and provides a way to listen for completion,
* after which individual pages can be rendered.
*/
export class PDFDocumentLoadingTask {
static "__#16@#docId": number;
_capability: import("../shared/util.js").PromiseCapability;
_transport: any;
_worker: any;
/**
* Unique identifier for the document loading task.
* @type {string}
*/
docId: string;
/**
* Whether the loading task is destroyed or not.
* @type {boolean}
*/
destroyed: boolean;
/**
* Callback to request a password if a wrong or no password was provided.
* The callback receives two parameters: a function that should be called
* with the new password, and a reason (see {@link PasswordResponses}).
* @type {function}
*/
onPassword: Function;
/**
* Callback to be able to monitor the loading progress of the PDF file
* (necessary to implement e.g. a loading bar).
* The callback receives an {@link OnProgressParameters} argument.
* @type {function}
*/
onProgress: Function;
/**
* Promise for document loading task completion.
* @type {Promise<PDFDocumentProxy>}
*/
get promise(): Promise<PDFDocumentProxy>;
/**
* Abort all network requests and destroy the worker.
* @returns {Promise<void>} A promise that is resolved when destruction is
* completed.
*/
destroy(): Promise<void>;
}
/**
* Proxy to a `PDFDocument` in the worker thread.
*/
export class PDFDocumentProxy {
constructor(pdfInfo: any, transport: any);
_pdfInfo: any;
_transport: any;
/**
* @type {AnnotationStorage} Storage for annotation data in forms.
*/
get annotationStorage(): AnnotationStorage;
/**
* @type {Object} The filter factory instance.
*/
get filterFactory(): Object;
/**
* @type {number} Total number of pages in the PDF file.
*/
get numPages(): number;
/**
* @type {Array<string, string|null>} A (not guaranteed to be) unique ID to
* identify the PDF document.
* NOTE: The first element will always be defined for all PDF documents,
* whereas the second element is only defined for *modified* PDF documents.
*/
get fingerprints(): string[];
/**
* @type {boolean} True if only XFA form.
*/
get isPureXfa(): boolean;
/**
* NOTE: This is (mostly) intended to support printing of XFA forms.
*
* @type {Object | null} An object representing a HTML tree structure
* to render the XFA, or `null` when no XFA form exists.
*/
get allXfaHtml(): Object | null;
/**
* @param {number} pageNumber - The page number to get. The first page is 1.
* @returns {Promise<PDFPageProxy>} A promise that is resolved with
* a {@link PDFPageProxy} object.
*/
getPage(pageNumber: number): Promise<PDFPageProxy>;
/**
* @param {RefProxy} ref - The page reference.
* @returns {Promise<number>} A promise that is resolved with the page index,
* starting from zero, that is associated with the reference.
*/
getPageIndex(ref: RefProxy): Promise<number>;
/**
* @returns {Promise<Object<string, Array<any>>>} A promise that is resolved
* with a mapping from named destinations to references.
*
* This can be slow for large documents. Use `getDestination` instead.
*/
getDestinations(): Promise<{
[x: string]: Array<any>;
}>;
/**
* @param {string} id - The named destination to get.
* @returns {Promise<Array<any> | null>} A promise that is resolved with all
* information of the given named destination, or `null` when the named
* destination is not present in the PDF file.
*/
getDestination(id: string): Promise<Array<any> | null>;
/**
* @returns {Promise<Array<string> | null>} A promise that is resolved with
* an {Array} containing the page labels that correspond to the page
* indexes, or `null` when no page labels are present in the PDF file.
*/
getPageLabels(): Promise<Array<string> | null>;
/**
* @returns {Promise<string>} A promise that is resolved with a {string}
* containing the page layout name.
*/
getPageLayout(): Promise<string>;
/**
* @returns {Promise<string>} A promise that is resolved with a {string}
* containing the page mode name.
*/
getPageMode(): Promise<string>;
/**
* @returns {Promise<Object | null>} A promise that is resolved with an
* {Object} containing the viewer preferences, or `null` when no viewer
* preferences are present in the PDF file.
*/
getViewerPreferences(): Promise<Object | null>;
/**
* @returns {Promise<any | null>} A promise that is resolved with an {Array}
* containing the destination, or `null` when no open action is present
* in the PDF.
*/
getOpenAction(): Promise<any | null>;
/**
* @returns {Promise<any>} A promise that is resolved with a lookup table
* for mapping named attachments to their content.
*/
getAttachments(): Promise<any>;
/**
* @returns {Promise<Array<string> | null>} A promise that is resolved with
* an {Array} of all the JavaScript strings in the name tree, or `null`
* if no JavaScript exists.
*/
getJavaScript(): Promise<Array<string> | null>;
/**
* @returns {Promise<Object | null>} A promise that is resolved with
* an {Object} with the JavaScript actions:
* - from the name tree (like getJavaScript);
* - from A or AA entries in the catalog dictionary.
* , or `null` if no JavaScript exists.
*/
getJSActions(): Promise<Object | null>;
/**
* @typedef {Object} OutlineNode
* @property {string} title
* @property {boolean} bold
* @property {boolean} italic
* @property {Uint8ClampedArray} color - The color in RGB format to use for
* display purposes.
* @property {string | Array<any> | null} dest
* @property {string | null} url
* @property {string | undefined} unsafeUrl
* @property {boolean | undefined} newWindow
* @property {number | undefined} count
* @property {Array<OutlineNode>} items
*/
/**
* @returns {Promise<Array<OutlineNode>>} A promise that is resolved with an
* {Array} that is a tree outline (if it has one) of the PDF file.
*/
getOutline(): Promise<
{
title: string;
bold: boolean;
italic: boolean;
/**
* - The color in RGB format to use for
* display purposes.
*/
color: Uint8ClampedArray;
dest: string | Array<any> | null;
url: string | null;
unsafeUrl: string | undefined;
newWindow: boolean | undefined;
count: number | undefined;
items: any[];
}[]
>;
/**
* @returns {Promise<OptionalContentConfig>} A promise that is resolved with
* an {@link OptionalContentConfig} that contains all the optional content
* groups (assuming that the document has any).
*/
getOptionalContentConfig(): Promise<OptionalContentConfig>;
/**
* @returns {Promise<Array<number> | null>} A promise that is resolved with
* an {Array} that contains the permission flags for the PDF document, or
* `null` when no permissions are present in the PDF file.
*/
getPermissions(): Promise<Array<number> | null>;
/**
* @returns {Promise<{ info: Object, metadata: Metadata }>} A promise that is
* resolved with an {Object} that has `info` and `metadata` properties.
* `info` is an {Object} filled with anything available in the information
* dictionary and similarly `metadata` is a {Metadata} object with
* information from the metadata section of the PDF.
*/
getMetadata(): Promise<{
info: Object;
metadata: Metadata;
}>;
/**
* @typedef {Object} MarkInfo
* Properties correspond to Table 321 of the PDF 32000-1:2008 spec.
* @property {boolean} Marked
* @property {boolean} UserProperties
* @property {boolean} Suspects
*/
/**
* @returns {Promise<MarkInfo | null>} A promise that is resolved with
* a {MarkInfo} object that contains the MarkInfo flags for the PDF
* document, or `null` when no MarkInfo values are present in the PDF file.
*/
getMarkInfo(): Promise<{
Marked: boolean;
UserProperties: boolean;
Suspects: boolean;
} | null>;
/**
* @returns {Promise<Uint8Array>} A promise that is resolved with a
* {Uint8Array} containing the raw data of the PDF document.
*/
getData(): Promise<Uint8Array>;
/**
* @returns {Promise<Uint8Array>} A promise that is resolved with a
* {Uint8Array} containing the full data of the saved document.
*/
saveDocument(): Promise<Uint8Array>;
/**
* @returns {Promise<{ length: number }>} A promise that is resolved when the
* document's data is loaded. It is resolved with an {Object} that contains
* the `length` property that indicates size of the PDF data in bytes.
*/
getDownloadInfo(): Promise<{
length: number;
}>;
/**
* Cleans up resources allocated by the document on both the main and worker
* threads.
*
* NOTE: Do not, under any circumstances, call this method when rendering is
* currently ongoing since that may lead to rendering errors.
*
* @param {boolean} [keepLoadedFonts] - Let fonts remain attached to the DOM.
* NOTE: This will increase persistent memory usage, hence don't use this
* option unless absolutely necessary. The default value is `false`.
* @returns {Promise} A promise that is resolved when clean-up has finished.
*/
cleanup(keepLoadedFonts?: boolean | undefined): Promise<any>;
/**
* Destroys the current document instance and terminates the worker.
*/
destroy(): Promise<void>;
/**
* @type {DocumentInitParameters} A subset of the current
* {DocumentInitParameters}, which are needed in the viewer.
*/
get loadingParams(): DocumentInitParameters;
/**
* @type {PDFDocumentLoadingTask} The loadingTask for the current document.
*/
get loadingTask(): PDFDocumentLoadingTask;
/**
* @returns {Promise<Object<string, Array<Object>> | null>} A promise that is
* resolved with an {Object} containing /AcroForm field data for the JS
* sandbox, or `null` when no field data is present in the PDF file.
*/
getFieldObjects(): Promise<{
[x: string]: Array<Object>;
} | null>;
/**
* @returns {Promise<boolean>} A promise that is resolved with `true`
* if some /AcroForm fields have JavaScript actions.
*/
hasJSActions(): Promise<boolean>;
/**
* @returns {Promise<Array<string> | null>} A promise that is resolved with an
* {Array<string>} containing IDs of annotations that have a calculation
* action, or `null` when no such annotations are present in the PDF file.
*/
getCalculationOrderIds(): Promise<Array<string> | null>;
}
/**
* Page getViewport parameters.
*
* @typedef {Object} GetViewportParameters
* @property {number} scale - The desired scale of the viewport.
* @property {number} [rotation] - The desired rotation, in degrees, of
* the viewport. If omitted it defaults to the page rotation.
* @property {number} [offsetX] - The horizontal, i.e. x-axis, offset.
* The default value is `0`.
* @property {number} [offsetY] - The vertical, i.e. y-axis, offset.
* The default value is `0`.
* @property {boolean} [dontFlip] - If true, the y-axis will not be
* flipped. The default value is `false`.
*/
/**
* Page getTextContent parameters.
*
* @typedef {Object} getTextContentParameters
* @property {boolean} [includeMarkedContent] - When true include marked
* content items in the items array of TextContent. The default is `false`.
*/
/**
* Page text content.
*
* @typedef {Object} TextContent
* @property {Array<TextItem | TextMarkedContent>} items - Array of
* {@link TextItem} and {@link TextMarkedContent} objects. TextMarkedContent
* items are included when includeMarkedContent is true.
* @property {Object<string, TextStyle>} styles - {@link TextStyle} objects,
* indexed by font name.
*/
/**
* Page text content part.
*
* @typedef {Object} TextItem
* @property {string} str - Text content.
* @property {string} dir - Text direction: 'ttb', 'ltr' or 'rtl'.
* @property {Array<any>} transform - Transformation matrix.
* @property {number} width - Width in device space.
* @property {number} height - Height in device space.
* @property {string} fontName - Font name used by PDF.js for converted font.
* @property {boolean} hasEOL - Indicating if the text content is followed by a
* line-break.
*/
/**
* Page text marked content part.
*
* @typedef {Object} TextMarkedContent
* @property {string} type - Either 'beginMarkedContent',
* 'beginMarkedContentProps', or 'endMarkedContent'.
* @property {string} id - The marked content identifier. Only used for type
* 'beginMarkedContentProps'.
*/
/**
* Text style.
*
* @typedef {Object} TextStyle
* @property {number} ascent - Font ascent.
* @property {number} descent - Font descent.
* @property {boolean} vertical - Whether or not the text is in vertical mode.
* @property {string} fontFamily - The possible font family.
*/
/**
* Page annotation parameters.
*
* @typedef {Object} GetAnnotationsParameters
* @property {string} [intent] - Determines the annotations that are fetched,
* can be 'display' (viewable annotations), 'print' (printable annotations),
* or 'any' (all annotations). The default value is 'display'.
*/
/**
* Page render parameters.
*
* @typedef {Object} RenderParameters
* @property {CanvasRenderingContext2D} canvasContext - A 2D context of a DOM
* Canvas object.
* @property {PageViewport} viewport - Rendering viewport obtained by calling
* the `PDFPageProxy.getViewport` method.
* @property {string} [intent] - Rendering intent, can be 'display', 'print',
* or 'any'. The default value is 'display'.
* @property {number} [annotationMode] Controls which annotations are rendered
* onto the canvas, for annotations with appearance-data; the values from
* {@link AnnotationMode} should be used. The following values are supported:
* - `AnnotationMode.DISABLE`, which disables all annotations.
* - `AnnotationMode.ENABLE`, which includes all possible annotations (thus
* it also depends on the `intent`-option, see above).
* - `AnnotationMode.ENABLE_FORMS`, which excludes annotations that contain
* interactive form elements (those will be rendered in the display layer).
* - `AnnotationMode.ENABLE_STORAGE`, which includes all possible annotations
* (as above) but where interactive form elements are updated with data
* from the {@link AnnotationStorage}-instance; useful e.g. for printing.
* The default value is `AnnotationMode.ENABLE`.
* @property {Array<any>} [transform] - Additional transform, applied just
* before viewport transform.
* @property {CanvasGradient | CanvasPattern | string} [background] - Background
* to use for the canvas.
* Any valid `canvas.fillStyle` can be used: a `DOMString` parsed as CSS
* <color> value, a `CanvasGradient` object (a linear or radial gradient) or
* a `CanvasPattern` object (a repetitive image). The default value is
* 'rgb(255,255,255)'.
*
* NOTE: This option may be partially, or completely, ignored when the
* `pageColors`-option is used.
* @property {Object} [pageColors] - Overwrites background and foreground colors
* with user defined ones in order to improve readability in high contrast
* mode.
* @property {Promise<OptionalContentConfig>} [optionalContentConfigPromise] -
* A promise that should resolve with an {@link OptionalContentConfig}
* created from `PDFDocumentProxy.getOptionalContentConfig`. If `null`,
* the configuration will be fetched automatically with the default visibility
* states set.
* @property {Map<string, HTMLCanvasElement>} [annotationCanvasMap] - Map some
* annotation ids with canvases used to render them.
* @property {PrintAnnotationStorage} [printAnnotationStorage]
*/
/**
* Page getOperatorList parameters.
*
* @typedef {Object} GetOperatorListParameters
* @property {string} [intent] - Rendering intent, can be 'display', 'print',
* or 'any'. The default value is 'display'.
* @property {number} [annotationMode] Controls which annotations are included
* in the operatorList, for annotations with appearance-data; the values from
* {@link AnnotationMode} should be used. The following values are supported:
* - `AnnotationMode.DISABLE`, which disables all annotations.
* - `AnnotationMode.ENABLE`, which includes all possible annotations (thus
* it also depends on the `intent`-option, see above).
* - `AnnotationMode.ENABLE_FORMS`, which excludes annotations that contain
* interactive form elements (those will be rendered in the display layer).
* - `AnnotationMode.ENABLE_STORAGE`, which includes all possible annotations
* (as above) but where interactive form elements are updated with data
* from the {@link AnnotationStorage}-instance; useful e.g. for printing.
* The default value is `AnnotationMode.ENABLE`.
* @property {PrintAnnotationStorage} [printAnnotationStorage]
*/
/**
* Structure tree node. The root node will have a role "Root".
*
* @typedef {Object} StructTreeNode
* @property {Array<StructTreeNode | StructTreeContent>} children - Array of
* {@link StructTreeNode} and {@link StructTreeContent} objects.
* @property {string} role - element's role, already mapped if a role map exists
* in the PDF.
*/
/**
* Structure tree content.
*
* @typedef {Object} StructTreeContent
* @property {string} type - either "content" for page and stream structure
* elements or "object" for object references.
* @property {string} id - unique id that will map to the text layer.
*/
/**
* PDF page operator list.
*
* @typedef {Object} PDFOperatorList
* @property {Array<number>} fnArray - Array containing the operator functions.
* @property {Array<any>} argsArray - Array containing the arguments of the
* functions.
*/
/**
* Proxy to a `PDFPage` in the worker thread.
*/
export class PDFPageProxy {
constructor(
pageIndex: any,
pageInfo: any,
transport: any,
pdfBug?: boolean
);
_pageIndex: any;
_pageInfo: any;
_transport: any;
_stats: StatTimer | null;
_pdfBug: boolean;
/** @type {PDFObjects} */
commonObjs: PDFObjects;
objs: PDFObjects;
_maybeCleanupAfterRender: boolean;
_intentStates: Map<any, any>;
destroyed: boolean;
/**
* @type {number} Page number of the page. First page is 1.
*/
get pageNumber(): number;
/**
* @type {number} The number of degrees the page is rotated clockwise.
*/
get rotate(): number;
/**
* @type {RefProxy | null} The reference that points to this page.
*/
get ref(): RefProxy | null;
/**
* @type {number} The default size of units in 1/72nds of an inch.
*/
get userUnit(): number;
/**
* @type {Array<number>} An array of the visible portion of the PDF page in
* user space units [x1, y1, x2, y2].
*/
get view(): number[];
/**
* @param {GetViewportParameters} params - Viewport parameters.
* @returns {PageViewport} Contains 'width' and 'height' properties
* along with transforms required for rendering.
*/
getViewport({
scale,
rotation,
offsetX,
offsetY,
dontFlip,
}?: GetViewportParameters): PageViewport;
/**
* @param {GetAnnotationsParameters} params - Annotation parameters.
* @returns {Promise<Array<any>>} A promise that is resolved with an
* {Array} of the annotation objects.
*/
getAnnotations({ intent }?: GetAnnotationsParameters): Promise<Array<any>>;
/**
* @returns {Promise<Object>} A promise that is resolved with an
* {Object} with JS actions.
*/
getJSActions(): Promise<Object>;
/**
* @type {boolean} True if only XFA form.
*/
get isPureXfa(): boolean;
/**
* @returns {Promise<Object | null>} A promise that is resolved with
* an {Object} with a fake DOM object (a tree structure where elements
* are {Object} with a name, attributes (class, style, ...), value and
* children, very similar to a HTML DOM tree), or `null` if no XFA exists.
*/
getXfa(): Promise<Object | null>;
/**
* Begins the process of rendering a page to the desired context.
*
* @param {RenderParameters} params - Page render parameters.
* @returns {RenderTask} An object that contains a promise that is
* resolved when the page finishes rendering.
*/
render(
{
canvasContext,
viewport,
intent,
annotationMode,
transform,
background,
optionalContentConfigPromise,
annotationCanvasMap,
pageColors,
printAnnotationStorage,
}: RenderParameters,
...args: any[]
): RenderTask;
/**
* @param {GetOperatorListParameters} params - Page getOperatorList
* parameters.
* @returns {Promise<PDFOperatorList>} A promise resolved with an
* {@link PDFOperatorList} object that represents the page's operator list.
*/
getOperatorList({
intent,
annotationMode,
printAnnotationStorage,
}?: GetOperatorListParameters): Promise<PDFOperatorList>;
/**
* NOTE: All occurrences of whitespace will be replaced by
* standard spaces (0x20).
*
* @param {getTextContentParameters} params - getTextContent parameters.
* @returns {ReadableStream} Stream for reading text content chunks.
*/
streamTextContent({
includeMarkedContent,
}?: getTextContentParameters): ReadableStream;
/**
* NOTE: All occurrences of whitespace will be replaced by
* standard spaces (0x20).
*
* @param {getTextContentParameters} params - getTextContent parameters.
* @returns {Promise<TextContent>} A promise that is resolved with a
* {@link TextContent} object that represents the page's text content.
*/
getTextContent(params?: getTextContentParameters): Promise<TextContent>;
/**
* @returns {Promise<StructTreeNode>} A promise that is resolved with a
* {@link StructTreeNode} object that represents the page's structure tree,
* or `null` when no structure tree is present for the current page.
*/
getStructTree(): Promise<StructTreeNode>;
/**
* Destroys the page object.
* @private
*/
private _destroy;
/**
* Cleans up resources allocated by the page.
*
* @param {boolean} [resetStats] - Reset page stats, if enabled.
* The default value is `false`.
* @returns {boolean} Indicates if clean-up was successfully run.
*/
cleanup(resetStats?: boolean | undefined): boolean;
/**
* @private
*/
private _startRenderPage;
/**
* @private
*/
private _renderPageChunk;
/**
* @private
*/
private _pumpOperatorList;
/**
* @private
*/
private _abortOperatorList;
/**
* @type {StatTimer | null} Returns page stats, if enabled; returns `null`
* otherwise.
*/
get stats(): StatTimer | null;
#private;
}
/**
* PDF.js web worker abstraction that controls the instantiation of PDF
* documents. Message handlers are used to pass information from the main
* thread to the worker thread and vice versa. If the creation of a web
* worker is not possible, a "fake" worker will be used instead.
*
* @param {PDFWorkerParameters} params - The worker initialization parameters.
*/
export class PDFWorker {
static "__#19@#workerPorts": WeakMap<object, any>;
/**
* @param {PDFWorkerParameters} params - The worker initialization parameters.
*/
static fromPort(params: PDFWorkerParameters): any;
/**
* The current `workerSrc`, when it exists.
* @type {string}
*/
static get workerSrc(): string;
static get _mainThreadWorkerMessageHandler(): any;
static get _setupFakeWorkerGlobal(): any;
constructor({
name,
port,
verbosity,
}?: {
name?: null | undefined;
port?: null | undefined;
verbosity?: number | undefined;
});
name: any;
destroyed: boolean;
verbosity: number;
_readyCapability: import("../shared/util.js").PromiseCapability;
_port: any;
_webWorker: Worker | null;
_messageHandler: MessageHandler | null;
/**
* Promise for worker initialization completion.
* @type {Promise<void>}
*/
get promise(): Promise<void>;
/**
* The current `workerPort`, when it exists.
* @type {Worker}
*/
get port(): Worker;
/**
* The current MessageHandler-instance.
* @type {MessageHandler}
*/
get messageHandler(): MessageHandler;
_initializeFromPort(port: any): void;
_initialize(): void;
_setupFakeWorker(): void;
/**
* Destroys the worker instance.
*/
destroy(): void;
}
export namespace PDFWorkerUtil {
const isWorkerDisabled: boolean;
const fallbackWorkerSrc: null;
const fakeWorkerId: number;
}
/**
* Allows controlling of the rendering tasks.
*/
export class RenderTask {
constructor(internalRenderTask: any);
/**
* Callback for incremental rendering -- a function that will be called
* each time the rendering is paused. To continue rendering call the
* function that is the first argument to the callback.
* @type {function}
*/
onContinue: Function;
/**
* Promise for rendering task completion.
* @type {Promise<void>}
*/
get promise(): Promise<void>;
/**
* Cancels the rendering task. If the task is currently rendering it will
* not be cancelled until graphics pauses with a timeout. The promise that
* this object extends will be rejected when cancelled.
*
* @param {number} [extraDelay]
*/
cancel(extraDelay?: number | undefined): void;
/**
* Whether form fields are rendered separately from the main operatorList.
* @type {boolean}
*/
get separateAnnots(): boolean;
#private;
}
/** @type {string} */
export const version: string;
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/types/expression-parser.d.ts
|
declare interface ParseOptions {
filename?: string;
startRule?: "Start";
tracer?: any;
[key: string]: any;
}
declare type ParseFunction = <Options extends ParseOptions>(
input: string,
options?: Options
) => Options extends { startRule: infer StartRule }
? StartRule extends "Start"
? Start
: Start
: Start;
// These types were autogenerated by ts-pegjs
declare type Start = Program;
declare type Identifier = IdentifierName;
declare type IdentifierName = { type: "Identifier"; name: string };
declare type Literal =
| NullLiteral
| BooleanLiteral
| NumericLiteral
| StringLiteral;
declare type NullLiteral = { type: "NullLiteral"; value: null };
declare type BooleanLiteral =
| { type: "BooleanLiteral"; value: true }
| { type: "BooleanLiteral"; value: false };
declare type NumericLiteral = DecimalLiteral;
declare type DecimalLiteral = { type: "NumericLiteral"; value: number };
declare type StringLiteral = { type: "StringLiteral"; value: string };
declare type PrimaryExpression =
| Identifier
| Literal
| ArrayExpression
| ObjectExpression
| Expression;
declare type ArrayExpression = {
type: "ArrayExpression";
elements: ElementList;
};
declare type ElementList = PrimaryExpression[];
declare type ObjectExpression =
| { type: "ObjectExpression"; properties: [] }
| { type: "ObjectExpression"; properties: PropertyNameAndValueList };
declare type PropertyNameAndValueList = PrimaryExpression[];
declare type PropertyAssignment = {
type: "PropertyAssignment";
key: PropertyName;
value: Expression;
kind: "init";
};
declare type PropertyName = IdentifierName | StringLiteral | NumericLiteral;
declare type MemberExpression =
| {
type: "MemberExpression";
property: StringLiteral;
computed: true;
object: MemberExpression | Identifier | StringLiteral;
}
| {
type: "MemberExpression";
property: Identifier;
computed: false;
object: MemberExpression | Identifier | StringLiteral;
};
declare type CallExpression = {
type: "CallExpression";
arguments: Arguments;
callee: MemberExpression | Identifier;
};
declare type Arguments = PrimaryExpression[];
declare type Expression = CallExpression | MemberExpression;
declare type ExpressionStatement = {
type: "ExpressionStatement";
expression: Expression;
};
declare type Program = { type: "Program"; body: ExpressionStatement };
declare type ExpressionNode =
| Program
| ExpressionStatement
| ArrayExpression
| BooleanLiteral
| CallExpression
| Identifier
| MemberExpression
| NumericLiteral
| ObjectExpression
| PropertyAssignment
| NullLiteral
| StringLiteral;
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/types/ibm.ts
|
export interface TokenUsage {
generated_token_count: number;
input_token_count: number;
}
export interface WatsonxAuth {
watsonxAIApikey?: string;
watsonxAIBearerToken?: string;
watsonxAIUsername?: string;
watsonxAIPassword?: string;
watsonxAIUrl?: string;
watsonxAIAuthType?: string;
}
export interface WatsonxInit {
authenticator?: string;
serviceUrl: string;
version: string;
}
export interface WatsonxParams extends WatsonxInit {
model: string;
spaceId?: string;
projectId?: string;
idOrName?: string;
maxConcurrency?: number;
maxRetries?: number;
}
export interface GenerationInfo {
text: string;
stop_reason: string | undefined;
generated_token_count: number;
input_token_count: number;
}
export interface ResponseChunk {
id: number;
event: string;
data: {
results: (TokenUsage & {
stop_reason?: string;
generated_text: string;
})[];
};
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/types/googlevertexai-types.ts
|
import type { BaseLLMParams } from "@langchain/core/language_models/llms";
export interface GoogleConnectionParams<AuthOptions> {
authOptions?: AuthOptions;
}
export interface GoogleVertexAIConnectionParams<AuthOptions>
extends GoogleConnectionParams<AuthOptions> {
/** Hostname for the API call */
endpoint?: string;
/** Region where the LLM is stored */
location?: string;
/** The version of the API functions. Part of the path. */
apiVersion?: string;
/**
* If you are planning to connect to a model that lives under a custom endpoint
* provide the "customModelURL" which will override the automatic URL building
*
* This is necessary in cases when you want to point to a fine-tuned model or
* a model that has been hidden under VertexAI Endpoints.
*
* In those cases, specifying the `GoogleVertexAIModelParams.model` param
* will not be necessary and will be ignored.
*
* @see GoogleVertexAILLMConnection.buildUrl
* */
customModelURL?: string;
}
export interface GoogleVertexAIModelParams {
/** Model to use */
model?: string;
/** Sampling temperature to use */
temperature?: number;
/**
* Maximum number of tokens to generate in the completion.
*/
maxOutputTokens?: number;
/**
* Top-p changes how the model selects tokens for output.
*
* Tokens are selected from most probable to least until the sum
* of their probabilities equals the top-p value.
*
* For example, if tokens A, B, and C have a probability of
* .3, .2, and .1 and the top-p value is .5, then the model will
* select either A or B as the next token (using temperature).
*/
topP?: number;
/**
* Top-k changes how the model selects tokens for output.
*
* A top-k of 1 means the selected token is the most probable among
* all tokens in the model’s vocabulary (also called greedy decoding),
* while a top-k of 3 means that the next token is selected from
* among the 3 most probable tokens (using temperature).
*/
topK?: number;
}
export interface GoogleVertexAIBaseLLMInput<AuthOptions>
extends BaseLLMParams,
GoogleVertexAIConnectionParams<AuthOptions>,
GoogleVertexAIModelParams {}
export interface GoogleResponse {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: any;
}
export interface GoogleVertexAIBasePrediction {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
safetyAttributes?: any;
}
export interface GoogleVertexAILLMPredictions<
PredictionType extends GoogleVertexAIBasePrediction
> {
predictions: PredictionType[];
}
export type GoogleAbstractedClientOpsMethod = "GET" | "POST";
export type GoogleAbstractedClientOpsResponseType = "json" | "stream";
export type GoogleAbstractedClientOps = {
url?: string;
method?: GoogleAbstractedClientOpsMethod;
data?: unknown;
responseType?: GoogleAbstractedClientOpsResponseType;
};
export interface GoogleAbstractedClient {
request: (opts: GoogleAbstractedClientOps) => unknown;
getProjectId: () => Promise<string>;
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/indexes/postgres.ts
|
import pg, { PoolConfig, Pool } from "pg";
import {
ListKeyOptions,
RecordManagerInterface,
UpdateOptions,
} from "./base.js";
export type PostgresRecordManagerOptions = {
postgresConnectionOptions: PoolConfig;
pool?: Pool;
tableName?: string;
schema?: string;
};
export class PostgresRecordManager implements RecordManagerInterface {
lc_namespace = ["langchain", "recordmanagers", "postgres"];
pool: Pool;
tableName: string;
namespace: string;
finalTableName: string;
constructor(namespace: string, config: PostgresRecordManagerOptions) {
const { postgresConnectionOptions, tableName, pool } = config;
this.namespace = namespace;
this.pool = pool || new pg.Pool(postgresConnectionOptions);
this.tableName = tableName || "upsertion_records";
this.finalTableName = config.schema
? `"${config.schema}"."${this.tableName}"`
: `"${this.tableName}"`;
}
async createSchema(): Promise<void> {
try {
await this.pool.query(`
CREATE TABLE IF NOT EXISTS ${this.finalTableName} (
uuid UUID PRIMARY KEY DEFAULT gen_random_uuid(),
key TEXT NOT NULL,
namespace TEXT NOT NULL,
updated_at Double PRECISION NOT NULL,
group_id TEXT,
UNIQUE (key, namespace)
);
CREATE INDEX IF NOT EXISTS updated_at_index ON ${this.finalTableName} (updated_at);
CREATE INDEX IF NOT EXISTS key_index ON ${this.finalTableName} (key);
CREATE INDEX IF NOT EXISTS namespace_index ON ${this.finalTableName} (namespace);
CREATE INDEX IF NOT EXISTS group_id_index ON ${this.finalTableName} (group_id);`);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (e: any) {
// This error indicates that the table already exists
// Due to asynchronous nature of the code, it is possible that
// the table is created between the time we check if it exists
// and the time we try to create it. It can be safely ignored.
if ("code" in e && e.code === "23505") {
return;
}
throw e;
}
}
async getTime(): Promise<number> {
const res = await this.pool.query(
"SELECT EXTRACT(EPOCH FROM CURRENT_TIMESTAMP)"
);
return Number.parseFloat(res.rows[0].extract);
}
/**
* Generates the SQL placeholders for a specific row at the provided index.
*
* @param index - The index of the row for which placeholders need to be generated.
* @param numOfColumns - The number of columns we are inserting data into.
* @returns The SQL placeholders for the row values.
*/
private generatePlaceholderForRowAt(
index: number,
numOfColumns: number
): string {
const placeholders = [];
for (let i = 0; i < numOfColumns; i += 1) {
placeholders.push(`$${index * numOfColumns + i + 1}`);
}
return `(${placeholders.join(", ")})`;
}
async update(keys: string[], updateOptions?: UpdateOptions): Promise<void> {
if (keys.length === 0) {
return;
}
const updatedAt = await this.getTime();
const { timeAtLeast, groupIds: _groupIds } = updateOptions ?? {};
if (timeAtLeast && updatedAt < timeAtLeast) {
throw new Error(
`Time sync issue with database ${updatedAt} < ${timeAtLeast}`
);
}
const groupIds = _groupIds ?? keys.map(() => null);
if (groupIds.length !== keys.length) {
throw new Error(
`Number of keys (${keys.length}) does not match number of group_ids ${groupIds.length})`
);
}
const recordsToUpsert = keys.map((key, i) => [
key,
this.namespace,
updatedAt,
groupIds[i],
]);
const valuesPlaceholders = recordsToUpsert
.map((_, j) =>
this.generatePlaceholderForRowAt(j, recordsToUpsert[0].length)
)
.join(", ");
const query = `INSERT INTO ${this.finalTableName} (key, namespace, updated_at, group_id) VALUES ${valuesPlaceholders} ON CONFLICT (key, namespace) DO UPDATE SET updated_at = EXCLUDED.updated_at;`;
await this.pool.query(query, recordsToUpsert.flat());
}
async exists(keys: string[]): Promise<boolean[]> {
if (keys.length === 0) {
return [];
}
const startIndex = 2;
const arrayPlaceholders = keys
.map((_, i) => `$${i + startIndex}`)
.join(", ");
const query = `
WITH ordered_keys AS (
SELECT * FROM unnest(ARRAY[${arrayPlaceholders}]) WITH ORDINALITY as t(key, o)
)
SELECT ok.key, (r.key IS NOT NULL) ex
FROM ordered_keys ok
LEFT JOIN ${this.finalTableName} r
ON r.key = ok.key
AND namespace = $1
ORDER BY ok.o;
`;
const res = await this.pool.query(query, [this.namespace, ...keys.flat()]);
return res.rows.map((row: { ex: boolean }) => row.ex);
}
async listKeys(options?: ListKeyOptions): Promise<string[]> {
const { before, after, limit, groupIds } = options ?? {};
let query = `SELECT key FROM ${this.finalTableName} WHERE namespace = $1`;
const values: (string | number | (string | null)[])[] = [this.namespace];
let index = 2;
if (before) {
values.push(before);
query += ` AND updated_at < $${index}`;
index += 1;
}
if (after) {
values.push(after);
query += ` AND updated_at > $${index}`;
index += 1;
}
if (limit) {
values.push(limit);
query += ` LIMIT $${index}`;
index += 1;
}
if (groupIds) {
values.push(groupIds);
query += ` AND group_id = ANY($${index})`;
index += 1;
}
query += ";";
const res = await this.pool.query(query, values);
return res.rows.map((row: { key: string }) => row.key);
}
async deleteKeys(keys: string[]): Promise<void> {
if (keys.length === 0) {
return;
}
const query = `DELETE FROM ${this.finalTableName} WHERE namespace = $1 AND key = ANY($2);`;
await this.pool.query(query, [this.namespace, keys]);
}
/**
* Terminates the connection pool.
* @returns {Promise<void>}
*/
async end(): Promise<void> {
await this.pool.end();
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/indexes/memory.ts
|
import { ListKeyOptions, RecordManager, UpdateOptions } from "./base.js";
interface MemoryRecord {
updatedAt: number;
groupId: string | null;
}
export class InMemoryRecordManager extends RecordManager {
lc_namespace = ["langchain", "recordmanagers", "memory"];
records: Map<string, MemoryRecord>;
constructor() {
super();
this.records = new Map();
}
async createSchema(): Promise<void> {
// nothing to do here
// compatibility with other record managers
return Promise.resolve();
}
async getTime(): Promise<number> {
return Promise.resolve(Date.now());
}
async update(keys: string[], updateOptions?: UpdateOptions): Promise<void> {
const updatedAt = await this.getTime();
const { timeAtLeast, groupIds: _groupIds } = updateOptions ?? {};
if (timeAtLeast && updatedAt < timeAtLeast) {
throw new Error(
`Time sync issue with database ${updatedAt} < ${timeAtLeast}`
);
}
const groupIds = _groupIds ?? keys.map(() => null);
if (groupIds.length !== keys.length) {
throw new Error(
`Number of keys (${keys.length}) does not match number of group_ids ${groupIds.length})`
);
}
keys.forEach((key, i) => {
const old = this.records.get(key);
if (old) {
old.updatedAt = updatedAt;
} else {
this.records.set(key, { updatedAt, groupId: groupIds[i] });
}
});
}
async exists(keys: string[]): Promise<boolean[]> {
return Promise.resolve(keys.map((key) => this.records.has(key)));
}
async listKeys(options?: ListKeyOptions): Promise<string[]> {
const { before, after, limit, groupIds } = options ?? {};
const filteredRecords = Array.from(this.records).filter(([_key, doc]) => {
// Inclusive bounds for before and after (i.e. <= and >=).
// This is technically incorrect, but because there is no
// latency, it is not garanteed that after an update the
// timestamp on subsequent listKeys calls will be different.
const isBefore = !before || doc.updatedAt <= before;
const isAfter = !after || doc.updatedAt >= after;
const belongsToGroup = !groupIds || groupIds.includes(doc.groupId);
return isBefore && isAfter && belongsToGroup;
});
return Promise.resolve(
filteredRecords
.map(([key]) => key)
.slice(0, limit ?? filteredRecords.length)
);
}
async deleteKeys(keys: string[]): Promise<void> {
keys.forEach((key) => this.records.delete(key));
return Promise.resolve();
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/indexes/sqlite.ts
|
// eslint-disable-next-line import/no-extraneous-dependencies
import Database, { Database as DatabaseType, Statement } from "better-sqlite3";
import {
ListKeyOptions,
RecordManagerInterface,
UpdateOptions,
} from "./base.js";
interface TimeRow {
epoch: number;
}
interface KeyRecord {
key: string;
}
/**
* Options for configuring the SQLiteRecordManager class.
*/
export type SQLiteRecordManagerOptions = {
/**
* The file path of the SQLite database.
* One of either `localPath` or `connectionString` is required.
*/
localPath?: string;
/**
* The connection string of the SQLite database.
* One of either `localPath` or `connectionString` is required.
*/
connectionString?: string;
/**
* The name of the table in the SQLite database.
*/
tableName: string;
};
export class SQLiteRecordManager implements RecordManagerInterface {
lc_namespace = ["langchain", "recordmanagers", "sqlite"];
tableName: string;
db: DatabaseType;
namespace: string;
constructor(namespace: string, config: SQLiteRecordManagerOptions) {
const { localPath, connectionString, tableName } = config;
if (!connectionString && !localPath) {
throw new Error(
"One of either `localPath` or `connectionString` is required."
);
}
if (connectionString && localPath) {
throw new Error(
"Only one of either `localPath` or `connectionString` is allowed."
);
}
this.namespace = namespace;
this.tableName = tableName;
this.db = new Database(connectionString ?? localPath);
}
async createSchema(): Promise<void> {
try {
this.db.exec(`
CREATE TABLE IF NOT EXISTS "${this.tableName}" (
uuid TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(16)))),
key TEXT NOT NULL,
namespace TEXT NOT NULL,
updated_at REAL NOT NULL,
group_id TEXT,
UNIQUE (key, namespace)
);
CREATE INDEX IF NOT EXISTS updated_at_index ON "${this.tableName}" (updated_at);
CREATE INDEX IF NOT EXISTS key_index ON "${this.tableName}" (key);
CREATE INDEX IF NOT EXISTS namespace_index ON "${this.tableName}" (namespace);
CREATE INDEX IF NOT EXISTS group_id_index ON "${this.tableName}" (group_id);`);
} catch (error) {
console.error("Error creating schema");
throw error; // Re-throw the error to let the caller handle it
}
}
async getTime(): Promise<number> {
try {
const statement: Statement<[]> = this.db.prepare(
"SELECT strftime('%s', 'now') AS epoch"
);
const { epoch } = statement.get() as TimeRow;
return Number(epoch);
} catch (error) {
console.error("Error getting time in SQLiteRecordManager:");
throw error;
}
}
async update(keys: string[], updateOptions?: UpdateOptions): Promise<void> {
if (keys.length === 0) {
return;
}
const updatedAt = await this.getTime();
const { timeAtLeast, groupIds: _groupIds } = updateOptions ?? {};
if (timeAtLeast && updatedAt < timeAtLeast) {
throw new Error(
`Time sync issue with database ${updatedAt} < ${timeAtLeast}`
);
}
const groupIds = _groupIds ?? keys.map(() => null);
if (groupIds.length !== keys.length) {
throw new Error(
`Number of keys (${keys.length}) does not match number of group_ids (${groupIds.length})`
);
}
const recordsToUpsert = keys.map((key, i) => [
key,
this.namespace,
updatedAt,
groupIds[i] ?? null, // Ensure groupIds[i] is null if undefined
]);
// Consider using a transaction for batch operations
const updateTransaction = this.db.transaction(() => {
for (const row of recordsToUpsert) {
this.db
.prepare(
`
INSERT INTO "${this.tableName}" (key, namespace, updated_at, group_id)
VALUES (?, ?, ?, ?)
ON CONFLICT (key, namespace) DO UPDATE SET updated_at = excluded.updated_at`
)
.run(...row);
}
});
updateTransaction();
}
async exists(keys: string[]): Promise<boolean[]> {
if (keys.length === 0) {
return [];
}
// Prepare the placeholders and the query
const placeholders = keys.map(() => `?`).join(", ");
const sql = `
SELECT key
FROM "${this.tableName}"
WHERE namespace = ? AND key IN (${placeholders})`;
// Initialize an array to fill with the existence checks
const existsArray = new Array(keys.length).fill(false);
try {
// Execute the query
const rows = this.db
.prepare(sql)
.all(this.namespace, ...keys) as KeyRecord[];
// Create a set of existing keys for faster lookup
const existingKeysSet = new Set(rows.map((row) => row.key));
// Map the input keys to booleans indicating if they exist
keys.forEach((key, index) => {
existsArray[index] = existingKeysSet.has(key);
});
return existsArray;
} catch (error) {
console.error("Error checking existence of keys");
throw error; // Allow the caller to handle the error
}
}
async listKeys(options?: ListKeyOptions): Promise<string[]> {
const { before, after, limit, groupIds } = options ?? {};
let query = `SELECT key FROM "${this.tableName}" WHERE namespace = ?`;
const values: (string | number | string[])[] = [this.namespace];
if (before) {
query += ` AND updated_at < ?`;
values.push(before);
}
if (after) {
query += ` AND updated_at > ?`;
values.push(after);
}
if (limit) {
query += ` LIMIT ?`;
values.push(limit);
}
if (groupIds && Array.isArray(groupIds)) {
query += ` AND group_id IN (${groupIds
.filter((gid) => gid !== null)
.map(() => "?")
.join(", ")})`;
values.push(...groupIds.filter((gid): gid is string => gid !== null));
}
query += ";";
// Directly using try/catch with async/await for cleaner flow
try {
const result = this.db.prepare(query).all(...values) as { key: string }[];
return result.map((row) => row.key);
} catch (error) {
console.error("Error listing keys.");
throw error; // Re-throw the error to be handled by the caller
}
}
async deleteKeys(keys: string[]): Promise<void> {
if (keys.length === 0) {
return;
}
const placeholders = keys.map(() => "?").join(", ");
const query = `DELETE FROM "${this.tableName}" WHERE namespace = ? AND key IN (${placeholders});`;
const values = [this.namespace, ...keys].map((v) =>
typeof v !== "string" ? `${v}` : v
);
// Directly using try/catch with async/await for cleaner flow
try {
this.db.prepare(query).run(...values);
} catch (error) {
console.error("Error deleting keys");
throw error; // Re-throw the error to be handled by the caller
}
}
}
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src
|
lc_public_repos/langchainjs/libs/langchain-community/src/indexes/base.ts
|
export {
UUIDV5_NAMESPACE,
type UpdateOptions,
type ListKeyOptions,
type RecordManagerInterface,
RecordManager,
} from "@langchain/core/indexing";
|
0
|
lc_public_repos/langchainjs/libs/langchain-community/src/indexes
|
lc_public_repos/langchainjs/libs/langchain-community/src/indexes/tests/indexing.milvus.int.test.ts
|
import { Document } from "@langchain/core/documents";
import { index } from "@langchain/core/indexing";
import { BaseDocumentLoader } from "@langchain/core/document_loaders/base";
import { OpenAIEmbeddings } from "@langchain/openai";
import { InMemoryRecordManager } from "../memory.js";
import { sleep } from "../../utils/time.js";
import { Milvus } from "../../vectorstores/milvus.js";
let collectionName: string;
let embeddings: OpenAIEmbeddings;
// https://docs.zilliz.com/docs/quick-start-1#create-a-collection
const MILVUS_ADDRESS = "";
const MILVUS_TOKEN = "";
const OPEN_AI_API_KEY = "";
class MockLoader extends BaseDocumentLoader {
constructor(public docs: Document[]) {
super();
}
async load(): Promise<Document[]> {
return this.docs;
}
}
describe.skip("Indexing API", () => {
let recordManager: InMemoryRecordManager;
let vectorstore: Milvus;
beforeAll(async () => {
embeddings = new OpenAIEmbeddings({
openAIApiKey: OPEN_AI_API_KEY,
});
collectionName = `test_collection_${Math.random()
.toString(36)
.substring(7)}`;
recordManager = new InMemoryRecordManager();
await recordManager.createSchema();
vectorstore = await new Milvus(embeddings, {
collectionName,
autoId: false,
clientConfig: {
address: MILVUS_ADDRESS,
token: MILVUS_TOKEN,
},
});
});
afterEach(async () => {
recordManager.records.clear();
await index({
docsSource: [],
recordManager,
vectorStore: vectorstore,
options: {
cleanup: "full",
},
});
// Because the indexing API relies on timestamps, without this the tests are flaky.
await sleep(1000);
});
test("Test indexing sanity", async () => {
const docs = [
{
pageContent: "Document 1 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 2 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 3 Content",
metadata: { source: "test" },
},
];
const initialIndexingResult = await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
});
expect(initialIndexingResult.numAdded).toEqual(3);
const secondIndexingResult = await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
});
expect(secondIndexingResult.numAdded).toEqual(0);
expect(secondIndexingResult.numSkipped).toEqual(3);
const query = "Document";
const result = await vectorstore.similaritySearch(query, 3);
expect(recordManager.records.size).toEqual(3);
const resultMetadatas = result.map(({ metadata }) => metadata);
expect(resultMetadatas.length).toBe(3);
});
test("Test indexing with cleanup full", async () => {
const docs = [
{
pageContent: "Document 1 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 2 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 3 Content",
metadata: { source: "test" },
},
];
await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
options: { cleanup: "full" },
});
const secondIndexingResult = await index({
docsSource: [],
recordManager,
vectorStore: vectorstore,
options: {
cleanup: "full",
},
});
expect(secondIndexingResult.numAdded).toEqual(0);
expect(secondIndexingResult.numSkipped).toEqual(0);
expect(secondIndexingResult.numDeleted).toEqual(3);
expect(recordManager.records.size).toEqual(0);
});
test("Test indexing with updated page content (full)", async () => {
const docs = [
{
pageContent: "Document 1 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 2 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 3 Content",
metadata: { source: "test" },
},
];
await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
options: {
cleanup: "full",
},
});
docs[0].pageContent = "Document 0 Content";
const secondIndexingResult = await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
options: {
cleanup: "full",
},
});
expect(secondIndexingResult.numAdded).toEqual(1);
expect(secondIndexingResult.numDeleted).toEqual(1);
expect(secondIndexingResult.numSkipped).toEqual(2);
const query = "Document";
const result = await vectorstore.similaritySearch(query, 3);
expect(recordManager.records.size).toEqual(3);
const resultMetadatas = result.map(({ metadata }) => metadata);
expect(resultMetadatas.length).toBe(3);
});
test("Test indexing with updated metadata (full)", async () => {
const docs: Document[] = [
{
pageContent: "Document 1 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 2 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 3 Content",
metadata: { source: "test" },
},
];
await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
options: {
cleanup: "full",
},
});
docs[0].metadata.field = "value";
const secondIndexingResult = await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
options: {
cleanup: "full",
},
});
expect(secondIndexingResult.numAdded).toEqual(1);
expect(secondIndexingResult.numDeleted).toEqual(1);
expect(secondIndexingResult.numSkipped).toEqual(2);
});
test("Test indexing with updated page content (incremental)", async () => {
const docs = [
{
pageContent: "Document 1 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 2 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 3 Content",
metadata: { source: "test" },
},
];
await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
options: {
cleanup: "incremental",
sourceIdKey: "source",
},
});
docs[0].pageContent = "Document 0 Content";
const secondIndexingResult = await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
options: {
cleanup: "incremental",
sourceIdKey: "source",
},
});
expect(secondIndexingResult.numAdded).toEqual(1);
expect(secondIndexingResult.numDeleted).toEqual(1);
expect(secondIndexingResult.numSkipped).toEqual(2);
const query = "Document";
const result = await vectorstore.similaritySearch(query, 3);
expect(recordManager.records.size).toEqual(3);
const resultMetadatas = result.map(({ metadata }) => metadata);
expect(resultMetadatas.length).toBe(3);
});
test("Test indexing with updated metadata (incremental)", async () => {
const docs: Document[] = [
{
pageContent: "Document 1 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 2 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 3 Content",
metadata: { source: "test" },
},
];
await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
options: {
cleanup: "incremental",
sourceIdKey: "source",
},
});
docs[0].metadata.field = "value";
const secondIndexingResult = await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
options: {
cleanup: "incremental",
sourceIdKey: "source",
},
});
expect(secondIndexingResult.numAdded).toEqual(1);
expect(secondIndexingResult.numDeleted).toEqual(1);
expect(secondIndexingResult.numSkipped).toEqual(2);
});
test("Test indexing with updated page content without cleanup", async () => {
const docs: Document[] = [
{
pageContent: "Document 1 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 2 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 3 Content",
metadata: { source: "test" },
},
];
await index({ docsSource: docs, recordManager, vectorStore: vectorstore });
docs[0].pageContent = "Document 0 Content";
const secondIndexingResult = await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
});
expect(secondIndexingResult.numAdded).toEqual(1);
expect(secondIndexingResult.numDeleted).toEqual(0);
expect(secondIndexingResult.numSkipped).toEqual(2);
});
test("Test indexing with forced update", async () => {
const docs: Document[] = [
{
pageContent: "Document 1 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 2 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 3 Content",
metadata: { source: "test" },
},
];
await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
options: {
cleanup: "full",
},
});
// Force update is mostly useful when you are re-indexing with updated embeddings.
// Some vector stores (such as Milvus) do not support overwriting records
// and will throw an error if you try to do so. We must therefore delete the records
// before re-indexing.
await vectorstore.delete({ ids: Array.from(recordManager.records.keys()) });
const secondIndexingResult = await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
options: {
cleanup: "full",
forceUpdate: true,
},
});
expect(secondIndexingResult.numAdded).toEqual(0);
expect(secondIndexingResult.numDeleted).toEqual(0);
expect(secondIndexingResult.numUpdated).toEqual(3);
});
test("Test indexing with duplicate documents", async () => {
const docs: Document[] = [
{
pageContent: "Document 1 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 1 Content",
metadata: { source: "test" },
},
];
const indexingResult = await index({
docsSource: docs,
recordManager,
vectorStore: vectorstore,
});
expect(indexingResult.numAdded).toEqual(1);
expect(indexingResult.numSkipped).toEqual(0);
});
test("Test indexing with doc loader", async () => {
const mockLoader = new MockLoader([
{
pageContent: "Document 1 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 2 Content",
metadata: { source: "test" },
},
{
pageContent: "Document 3 Content",
metadata: { source: "test" },
},
]);
const indexingResult = await index({
docsSource: mockLoader,
recordManager,
vectorStore: vectorstore,
});
expect(indexingResult.numAdded).toEqual(3);
});
});
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.