index
int64
0
0
repo_id
stringclasses
596 values
file_path
stringlengths
31
168
content
stringlengths
1
6.2M
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan/.prettierrc
{ "$schema": "https://json.schemastore.org/prettierrc", "printWidth": 80, "tabWidth": 2, "useTabs": false, "semi": true, "singleQuote": false, "quoteProps": "as-needed", "jsxSingleQuote": false, "trailingComma": "es5", "bracketSpacing": true, "arrowParens": "always", "requirePragma": false, "insertPragma": false, "proseWrap": "preserve", "htmlWhitespaceSensitivity": "css", "vueIndentScriptAndStyle": false, "endOfLine": "lf" }
0
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan/src/index.ts
export * from "./chat_models.js"; export * from "./embeddings.js";
0
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan/src/chat_models.ts
import { BaseChatModel, type BaseChatModelParams, } from "@langchain/core/language_models/chat_models"; import { AIMessage, AIMessageChunk, BaseMessage, ChatMessage, } from "@langchain/core/messages"; import { ChatGeneration, ChatGenerationChunk, ChatResult, } from "@langchain/core/outputs"; import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager"; import { getEnvironmentVariable } from "@langchain/core/utils/env"; import { ChatCompletion } from "@baiducloud/qianfan"; /** * Type representing the role of a message in the Qianfan chat model. */ export type QianfanRole = "assistant" | "user"; /** * Interface representing a message in the Qianfan chat model. */ interface Qianfan { role: QianfanRole; content: string; } /** * Interface representing the usage of tokens in a chat completion. */ interface TokenUsage { completionTokens?: number; promptTokens?: number; totalTokens?: number; } /** * Interface representing a request for a chat completion. */ interface ChatCompletionRequest { messages: Qianfan[]; stream?: boolean; user_id?: string; temperature?: number; top_p?: number; penalty_score?: number; system?: string; } /** * Interface representing a response from a chat completion. */ interface ChatCompletionResponse { id: string; object: string; created: number; sentence_id: number; is_end: boolean; is_truncated: boolean; result: string; need_clear_history: boolean; finish_reason: string; usage: TokenUsage; } /** * Interface defining the input to the ChatBaiduQianfan class. */ declare interface BaiduQianfanChatInput { /** * Model name to use. Available options are: ERNIE-Bot, ERNIE-Lite-8K, ERNIE-Bot-4 * Alias for `model` * @default "ERNIE-Bot-turbo" */ modelName: string; /** Model name to use. Available options are: ERNIE-Bot, ERNIE-Lite-8K, ERNIE-Bot-4 * @default "ERNIE-Bot-turbo" */ model: string; /** Whether to stream the results or not. Defaults to false. */ streaming?: boolean; /** Messages to pass as a prefix to the prompt */ prefixMessages?: Qianfan[]; /** * ID of the end-user who made requests. */ userId?: string; /** * Access key to use when making requests by Qianfan SDK. Defaults to the value of * `QIANFAN_KEY` environment variable. */ qianfanAK?: string; /** * Secret key to use when making requests by Qianfan SDK. Defaults to the value of * `QIANFAN_KEY` environment variable. */ qianfanSK?: string; /** * Access key to use when making requests by Qianfan SDK with auth. Defaults to the value of * `QIANFAN_ACCESS_KEY` environment variable. */ qianfanAccessKey?: string; /** * Secret key to use when making requests by Qianfan SDK with auth. Defaults to the value of * `QIANFAN_SECRET_KEY` environment variable. */ qianfanSecretKey?: string; /** Amount of randomness injected into the response. Ranges * from 0 to 1 (0 is not included). Use temp closer to 0 for analytical / * multiple choice, and temp closer to 1 for creative * and generative tasks. Defaults to 0.95. */ temperature?: number; /** Total probability mass of tokens to consider at each step. Range * from 0 to 1.0. Defaults to 0.8. */ topP?: number; /** Penalizes repeated tokens according to frequency. Range * from 1.0 to 2.0. Defaults to 1.0. */ penaltyScore?: number; } /** * Function that extracts the custom role of a generic chat message. * @param message Chat message from which to extract the custom role. * @returns The custom role of the chat message. */ function extractGenericMessageCustomRole(message: ChatMessage) { if (message.role !== "assistant" && message.role !== "user") { console.warn(`Unknown message role: ${message.role}`); } return message.role as QianfanRole; } /** * Function that converts a base message to a Qianfan message role. * @param message Base message to convert. * @returns The Qianfan message role. */ function messageToQianfanRole(message: BaseMessage): QianfanRole { const type = message._getType(); switch (type) { case "ai": return "assistant"; case "human": return "user"; case "system": throw new Error("System messages should not be here"); case "function": throw new Error("Function messages not supported"); case "generic": { if (!ChatMessage.isInstance(message)) throw new Error("Invalid generic chat message"); return extractGenericMessageCustomRole(message); } default: throw new Error(`Unknown message type: ${type}`); } } /** * Wrapper around Baidu ERNIE large language models that use the Chat endpoint. * * To use you should have the `QIANFAN_AK` and `QIANFAN_SK` * environment variable set. * * @augments BaseLLM * @augments BaiduERNIEInput * ``` */ export class ChatBaiduQianfan extends BaseChatModel implements BaiduQianfanChatInput { static lc_name() { return "ChatBaiduQianfan"; } get callKeys(): string[] { return ["stop", "signal", "options"]; } get lc_secrets(): { [key: string]: string } | undefined { return { qianfanAK: "QIANFAN_AK", qianfanSK: "QIANFAN_SK", qianfanAccessKey: "QIANFAN_ACCESS_KEY", qianfanSecretKey: "QIANFAN_SECRET_KEY", }; } get lc_aliases(): { [key: string]: string } | undefined { return undefined; } lc_serializable = true; streaming = false; prefixMessages?: Qianfan[]; userId?: string; modelName = "ERNIE-Bot-turbo"; model = "ERNIE-Bot-turbo"; temperature?: number | undefined; topP?: number | undefined; penaltyScore?: number | undefined; // eslint-disable-next-line @typescript-eslint/no-explicit-any client?: any; qianfanAK?: string; qianfanSK?: string; qianfanAccessKey?: string; qianfanSecretKey?: string; constructor(fields?: Partial<BaiduQianfanChatInput> & BaseChatModelParams) { super(fields ?? {}); this.modelName = fields?.model ?? fields?.modelName ?? this.model; this.model = this.modelName; if (!this.model) { throw new Error(`Please provide modelName`); } this.qianfanAK = fields?.qianfanAK ?? getEnvironmentVariable("QIANFAN_AK"); this.qianfanSK = fields?.qianfanSK ?? getEnvironmentVariable("QIANFAN_SK"); this.qianfanAccessKey = fields?.qianfanAccessKey ?? getEnvironmentVariable("QIANFAN_ACCESS_KEY"); this.qianfanSecretKey = fields?.qianfanSecretKey ?? getEnvironmentVariable("QIANFAN_SECRET_KEY"); // 优先使用安全认证AK/SK鉴权 if (this.qianfanAccessKey && this.qianfanSecretKey) { this.client = new ChatCompletion({ QIANFAN_ACCESS_KEY: this.qianfanAccessKey, QIANFAN_SECRET_KEY: this.qianfanSecretKey, }); } else if (this.qianfanAK && this.qianfanSK) { this.client = new ChatCompletion({ QIANFAN_AK: this.qianfanAK, QIANFAN_SK: this.qianfanSK, }); } else { throw new Error("Please provide AK/SK"); } this.streaming = fields?.streaming ?? this.streaming; this.prefixMessages = fields?.prefixMessages ?? this.prefixMessages; this.userId = fields?.userId ?? this.userId; this.temperature = fields?.temperature ?? this.temperature; this.topP = fields?.topP ?? this.topP; this.penaltyScore = fields?.penaltyScore ?? this.penaltyScore; } /** * Get the parameters used to invoke the model */ invocationParams(): Omit<ChatCompletionRequest, "messages"> { return { stream: this.streaming, user_id: this.userId, temperature: this.temperature, top_p: this.topP, penalty_score: this.penaltyScore, }; } /** * Get the identifying parameters for the model */ identifyingParams() { return { model_name: this.model, ...this.invocationParams(), }; } private _ensureMessages(messages: BaseMessage[]): Qianfan[] { return messages.map((message) => ({ role: messageToQianfanRole(message), content: message.content.toString(), })); } /** @ignore */ async _generate( messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun ): Promise<ChatResult> { if (this.streaming) { let finalChunk: ChatGenerationChunk | undefined; const stream = this._streamResponseChunks(messages, options, runManager); for await (const chunk of stream) { if (finalChunk === undefined) { finalChunk = chunk; } else { finalChunk = finalChunk.concat(chunk); } } if (finalChunk === undefined) { throw new Error("No chunks returned from BaiduQianFan API."); } return { generations: [ { text: finalChunk.text, message: finalChunk.message, }, ], llmOutput: finalChunk.generationInfo?.usage ?? {}, }; } else { const params = this.invocationParams(); const systemMessage = messages.find( (message) => message._getType() === "system" ); if (systemMessage) { // eslint-disable-next-line no-param-reassign messages = messages.filter((message) => message !== systemMessage); params.system = systemMessage.content.toString(); } const messagesMapped = this._ensureMessages(messages); const data = (await this.completionWithRetry( { ...params, messages: messagesMapped, }, false )) as ChatCompletionResponse; const tokenUsage = data.usage || {}; const generations: ChatGeneration[] = [ { text: data.result || "", message: new AIMessage(data.result || ""), }, ]; return { generations, llmOutput: { tokenUsage }, }; } } /** @ignore */ async completionWithRetry( request: ChatCompletionRequest, stream: boolean ): Promise< ChatCompletionResponse | AsyncIterableIterator<ChatCompletionResponse> > { const makeCompletionRequest = async () => { const response = await this.client.chat(request, this.model); if (!stream) { return response; } else { return response as AsyncIterableIterator<ChatCompletionResponse>; } }; return this.caller.call(makeCompletionRequest); } async *_streamResponseChunks( messages: BaseMessage[], _options?: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun ): AsyncGenerator<ChatGenerationChunk> { const parameters = { ...this.invocationParams(), stream: true, }; const systemMessage = messages.find( (message) => message._getType() === "system" ); if (systemMessage) { // eslint-disable-next-line no-param-reassign messages = messages.filter((message) => message !== systemMessage); parameters.system = systemMessage.content.toString(); } const messagesMapped = this._ensureMessages(messages); const stream = (await this.caller.call(async () => this.completionWithRetry( { ...parameters, messages: messagesMapped, }, true ) )) as AsyncIterableIterator<ChatCompletionResponse>; for await (const chunk of stream) { const { result, is_end, id } = chunk; yield new ChatGenerationChunk({ text: result, message: new AIMessageChunk({ content: result }), generationInfo: is_end ? { is_end, request_id: id, usage: chunk.usage, } : undefined, }); await runManager?.handleLLMNewToken(result); } } _llmType() { return "baiduqianfan"; } }
0
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan/src/embeddings.ts
import { Embeddings, type EmbeddingsParams } from "@langchain/core/embeddings"; import { chunkArray } from "@langchain/core/utils/chunk_array"; import { getEnvironmentVariable } from "@langchain/core/utils/env"; import { Embedding } from "@baiducloud/qianfan"; export interface BaiduQianfanEmbeddingsParams extends EmbeddingsParams { /** Model name to use */ modelName: "Embedding-V1" | "bge-large-zh" | "bge-large-en" | "tao-8k"; /** * Timeout to use when making requests to BaiduQianfan. */ timeout?: number; /** * The maximum number of characters allowed for embedding in a single request varies by model: * - Embedding-V1 model: up to 1000 characters * - bge-large-zh model: up to 2000 characters * - bge-large-en model: up to 2000 characters * - tao-8k model: up to 28000 characters * * Note: These limits are model-specific and should be adhered to for optimal performance. */ batchSize?: number; /** * Whether to strip new lines from the input text. */ stripNewLines?: boolean; } interface EmbeddingCreateParams { input: string[]; } interface EmbeddingResponse { data: { object: "embedding"; index: number; embedding: number[] }[]; usage: { prompt_tokens: number; total_tokens: number; }; id: string; } interface EmbeddingErrorResponse { error_code: number | string; error_msg: string; } export class BaiduQianfanEmbeddings extends Embeddings implements BaiduQianfanEmbeddingsParams { modelName: BaiduQianfanEmbeddingsParams["modelName"] = "Embedding-V1"; batchSize = 16; stripNewLines = true; qianfanAK: string | undefined; qianfanSK: string | undefined; qianfanAccessKey: string | undefined; qianfanSecretKey: string | undefined; accessToken: string; // eslint-disable-next-line @typescript-eslint/no-explicit-any embeddings: any; constructor( fields?: Partial<BaiduQianfanEmbeddingsParams> & { verbose?: boolean; qianfanAK?: string; qianfanSK?: string; qianfanAccessKey?: string; qianfanSecretKey?: string; } ) { const fieldsWithDefaults = { maxConcurrency: 2, ...fields }; super(fieldsWithDefaults); this.qianfanAK = fieldsWithDefaults?.qianfanAK ?? getEnvironmentVariable("QIANFAN_AK"); this.qianfanSK = fieldsWithDefaults?.qianfanSK ?? getEnvironmentVariable("QIANFAN_SK"); this.qianfanAccessKey = fieldsWithDefaults?.qianfanAccessKey ?? getEnvironmentVariable("QIANFAN_ACCESS_KEY"); this.qianfanSecretKey = fieldsWithDefaults?.qianfanSecretKey ?? getEnvironmentVariable("QIANFAN_SECRET_KEY"); // 优先使用安全认证AK/SK鉴权 if (this.qianfanAccessKey && this.qianfanSecretKey) { this.embeddings = new Embedding({ QIANFAN_ACCESS_KEY: this.qianfanAccessKey, QIANFAN_SECRET_KEY: this.qianfanSecretKey, }); } else if (this.qianfanAK && this.qianfanSK) { this.embeddings = new Embedding({ QIANFAN_AK: this.qianfanAK, QIANFAN_SK: this.qianfanSK, }); } else { throw new Error("Please provide AK/SK"); } this.modelName = fieldsWithDefaults?.modelName ?? this.modelName; if (this.modelName === "tao-8k") { if (fieldsWithDefaults?.batchSize && fieldsWithDefaults.batchSize !== 1) { throw new Error( "tao-8k model supports only a batchSize of 1. Please adjust your batchSize accordingly" ); } this.batchSize = 1; } else { this.batchSize = fieldsWithDefaults?.batchSize ?? this.batchSize; } this.stripNewLines = fieldsWithDefaults?.stripNewLines ?? this.stripNewLines; } /** * Method to generate embeddings for an array of documents. Splits the * documents into batches and makes requests to the BaiduQianFan API to generate * embeddings. * @param texts Array of documents to generate embeddings for. * @returns Promise that resolves to a 2D array of embeddings for each document. */ async embedDocuments(texts: string[]): Promise<number[][]> { const batches = chunkArray( this.stripNewLines ? texts.map((t) => t.replace(/\n/g, " ")) : texts, this.batchSize ); const batchRequests = batches.map((batch) => { const params = this.getParams(batch); return this.embeddingWithRetry(params); }); const batchResponses = await Promise.all(batchRequests); const embeddings: number[][] = []; for (let i = 0; i < batchResponses.length; i += 1) { const batch = batches[i]; const batchResponse = batchResponses[i] || []; for (let j = 0; j < batch.length; j += 1) { embeddings.push(batchResponse[j]); } } return embeddings; } /** * Method to generate an embedding for a single document. Calls the * embeddingWithRetry method with the document as the input. * @param text Document to generate an embedding for. * @returns Promise that resolves to an embedding for the document. */ async embedQuery(text: string): Promise<number[]> { const params = this.getParams([ this.stripNewLines ? text.replace(/\n/g, " ") : text, ]); const embeddings = (await this.embeddingWithRetry(params)) || [[]]; return embeddings[0]; } /** * Method to generate an embedding params. * @param texts Array of documents to generate embeddings for. * @returns an embedding params. */ private getParams( texts: EmbeddingCreateParams["input"] ): EmbeddingCreateParams { return { input: texts, }; } /** * Private method to make a request to the BaiduAI API to generate * embeddings. Handles the retry logic and returns the response from the * API. * @param request Request to send to the BaiduAI API. * @returns Promise that resolves to the response from the API. */ private async embeddingWithRetry(body: EmbeddingCreateParams) { const embeddingData: EmbeddingResponse | EmbeddingErrorResponse = await this.embeddings.embedding(body, this.modelName); if ("error_code" in embeddingData && embeddingData.error_code) { throw new Error( `${embeddingData.error_code}: ${embeddingData.error_msg}` ); } return (embeddingData as EmbeddingResponse).data.map( ({ embedding }) => embedding ); } }
0
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan/src
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan/src/tests/chat_models.int.test.ts
import { test } from "@jest/globals"; import { HumanMessage } from "@langchain/core/messages"; import { ChatBaiduQianfan } from "../chat_models.js"; test("invoke", async () => { const chat = new ChatBaiduQianfan({ model: "ERNIE-Lite-8K", }); const message = new HumanMessage("北京天气"); const res = await chat.invoke([message]); // console.log(res.content); expect(res.content.length).toBeGreaterThan(10); }); test("invokeWithStream", async () => { const chat = new ChatBaiduQianfan({ model: "ERNIE-Lite-8K", streaming: true, }); const message = new HumanMessage("等额本金和等额本息有什么区别?"); const res = await chat.invoke([message]); // console.log({ res }); expect(res.content.length).toBeGreaterThan(10); });
0
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan/src
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan/src/tests/embeddings.int.test.ts
import { test } from "@jest/globals"; import { BaiduQianfanEmbeddings } from "../embeddings.js"; test("embedQuery", async () => { const embeddings = new BaiduQianfanEmbeddings(); const res = await embeddings.embedQuery("Introduce the city Beijing"); // console.log({ res }); expect(res.length).toBeGreaterThan(10); }); test("embedDocuments", async () => { const embeddings = new BaiduQianfanEmbeddings(); const res = await embeddings.embedDocuments(["Hello world", "Bye bye"]); // console.log({ res }); expect(res.length).toBe(2); });
0
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan
lc_public_repos/langchainjs/libs/langchain-baidu-qianfan/scripts/jest-setup-after-env.js
import { awaitAllCallbacks } from "@langchain/core/callbacks/promises"; import { afterAll, jest } from "@jest/globals"; afterAll(awaitAllCallbacks); // Allow console.log to be disabled in tests if (process.env.DISABLE_CONSOLE_LOGS === "true") { console.log = jest.fn(); }
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-aws/tsconfig.json
{ "extends": "@tsconfig/recommended", "compilerOptions": { "outDir": "../dist", "rootDir": "./src", "target": "ES2021", "lib": ["ES2021", "ES2022.Object", "DOM"], "module": "ES2020", "moduleResolution": "nodenext", "esModuleInterop": true, "declaration": true, "noImplicitReturns": true, "noFallthroughCasesInSwitch": true, "noUnusedLocals": true, "noUnusedParameters": true, "useDefineForClassFields": true, "strictPropertyInitialization": false, "allowJs": true, "strict": true }, "include": ["src/**/*"], "exclude": ["node_modules", "dist", "docs"] }
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-aws/LICENSE
The MIT License Copyright (c) 2023 LangChain Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-aws/jest.config.cjs
/** @type {import('ts-jest').JestConfigWithTsJest} */ module.exports = { preset: "ts-jest/presets/default-esm", testEnvironment: "./jest.env.cjs", modulePathIgnorePatterns: ["dist/", "docs/"], moduleNameMapper: { "^(\\.{1,2}/.*)\\.js$": "$1", }, transform: { "^.+\\.tsx?$": ["@swc/jest"], }, transformIgnorePatterns: [ "/node_modules/", "\\.pnp\\.[^\\/]+$", "./scripts/jest-setup-after-env.js", ], setupFiles: ["dotenv/config"], testTimeout: 20_000, passWithNoTests: true, collectCoverageFrom: ["src/**/*.ts"], };
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-aws/jest.env.cjs
const { TestEnvironment } = require("jest-environment-node"); class AdjustedTestEnvironmentToSupportFloat32Array extends TestEnvironment { constructor(config, context) { // Make `instanceof Float32Array` return true in tests // to avoid https://github.com/xenova/transformers.js/issues/57 and https://github.com/jestjs/jest/issues/2549 super(config, context); this.global.Float32Array = Float32Array; } } module.exports = AdjustedTestEnvironmentToSupportFloat32Array;
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-aws/README.md
# @langchain/aws This package contains the LangChain.js integrations for AWS through their SDK. ## Installation ```bash npm install @langchain/aws ``` This package, along with the main LangChain package, depends on [`@langchain/core`](https://npmjs.com/package/@langchain/core/). If you are using this package with other LangChain packages, you should make sure that all of the packages depend on the same instance of @langchain/core. You can do so by adding appropriate fields to your project's `package.json` like this: ```json { "name": "your-project", "version": "0.0.0", "dependencies": { "@langchain/aws": "^0.0.1", "@langchain/core": "^0.3.0" }, "resolutions": { "@langchain/core": "^0.3.0" }, "overrides": { "@langchain/core": "^0.3.0" }, "pnpm": { "overrides": { "@langchain/core": "^0.3.0" } } } ``` The field you need depends on the package manager you're using, but we recommend adding a field for the common `yarn`, `npm`, and `pnpm` to maximize compatibility. ## Chat Models This package contains the `ChatBedrockConverse` class, which is the recommended way to interface with the AWS Bedrock Converse series of models. To use, install the requirements, and configure your environment. ```bash export BEDROCK_AWS_REGION= export BEDROCK_AWS_SECRET_ACCESS_KEY= export BEDROCK_AWS_ACCESS_KEY_ID= ``` Then initialize ```typescript import { ChatBedrockConverse } from "@langchain/aws"; const model = new ChatBedrockConverse({ region: process.env.BEDROCK_AWS_REGION ?? "us-east-1", credentials: { secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY, accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID, }, }); const response = await model.invoke(new HumanMessage("Hello world!")); ``` ### Streaming ```typescript import { ChatBedrockConverse } from "@langchain/aws"; const model = new ChatBedrockConverse({ region: process.env.BEDROCK_AWS_REGION ?? "us-east-1", credentials: { secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY, accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID, }, }); const response = await model.stream(new HumanMessage("Hello world!")); ``` ## Development To develop the AWS package, you'll need to follow these instructions: ### Install dependencies ```bash yarn install ``` ### Build the package ```bash yarn build ``` Or from the repo root: ```bash yarn build --filter=@langchain/aws ``` ### Run tests Test files should live within a `tests/` file in the `src/` folder. Unit tests should end in `.test.ts` and integration tests should end in `.int.test.ts`: ```bash $ yarn test $ yarn test:int ``` ### Lint & Format Run the linter & formatter to ensure your code is up to standard: ```bash yarn lint && yarn format ``` ### Adding new entrypoints If you add a new file to be exported, either import & re-export from `src/index.ts`, or add it to the `entrypoints` field in the `config` variable located inside `langchain.config.js` and run `yarn build` to generate the new entrypoint. ## Publishing After running `yarn build`, publish a new version with: ```bash $ npm publish ```
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-aws/.release-it.json
{ "github": { "release": true, "autoGenerate": true, "tokenRef": "GITHUB_TOKEN_RELEASE" }, "npm": { "versionArgs": ["--workspaces-update=false"] } }
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-aws/.eslintrc.cjs
module.exports = { extends: [ "airbnb-base", "eslint:recommended", "prettier", "plugin:@typescript-eslint/recommended", ], parserOptions: { ecmaVersion: 12, parser: "@typescript-eslint/parser", project: "./tsconfig.json", sourceType: "module", }, plugins: ["@typescript-eslint", "no-instanceof"], ignorePatterns: [ ".eslintrc.cjs", "scripts", "node_modules", "dist", "dist-cjs", "*.js", "*.cjs", "*.d.ts", ], rules: { "no-process-env": 2, "no-instanceof/no-instanceof": 2, "@typescript-eslint/explicit-module-boundary-types": 0, "@typescript-eslint/no-empty-function": 0, "@typescript-eslint/no-shadow": 0, "@typescript-eslint/no-empty-interface": 0, "@typescript-eslint/no-use-before-define": ["error", "nofunc"], "@typescript-eslint/no-unused-vars": ["warn", { args: "none" }], "@typescript-eslint/no-floating-promises": "error", "@typescript-eslint/no-misused-promises": "error", camelcase: 0, "class-methods-use-this": 0, "import/extensions": [2, "ignorePackages"], "import/no-extraneous-dependencies": [ "error", { devDependencies: ["**/*.test.ts"] }, ], "import/no-unresolved": 0, "import/prefer-default-export": 0, "keyword-spacing": "error", "max-classes-per-file": 0, "max-len": 0, "no-await-in-loop": 0, "no-bitwise": 0, "no-console": 0, "no-restricted-syntax": 0, "no-shadow": 0, "no-continue": 0, "no-void": 0, "no-underscore-dangle": 0, "no-use-before-define": 0, "no-useless-constructor": 0, "no-return-await": 0, "consistent-return": 0, "no-else-return": 0, "func-names": 0, "no-lonely-if": 0, "prefer-rest-params": 0, "new-cap": ["error", { properties: false, capIsNew: false }], }, overrides: [ { files: ["**/*.test.ts"], rules: { "@typescript-eslint/no-unused-vars": "off", }, }, ], };
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-aws/langchain.config.js
import { resolve, dirname } from "node:path"; import { fileURLToPath } from "node:url"; /** * @param {string} relativePath * @returns {string} */ function abs(relativePath) { return resolve(dirname(fileURLToPath(import.meta.url)), relativePath); } export const config = { internals: [/node\:/, /@langchain\/core\//], entrypoints: { index: "index", }, requiresOptionalDependency: [], tsConfigPath: resolve("./tsconfig.json"), cjsSource: "./dist-cjs", cjsDestination: "./dist", abs, };
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-aws/package.json
{ "name": "@langchain/aws", "version": "0.1.2", "description": "LangChain AWS integration", "type": "module", "engines": { "node": ">=18" }, "main": "./index.js", "types": "./index.d.ts", "repository": { "type": "git", "url": "git@github.com:langchain-ai/langchainjs.git" }, "homepage": "https://github.com/langchain-ai/langchainjs/tree/main/libs/langchain-aws/", "scripts": { "build": "yarn turbo:command build:internal --filter=@langchain/aws", "build:internal": "yarn lc_build --create-entrypoints --pre --tree-shaking", "lint:eslint": "NODE_OPTIONS=--max-old-space-size=4096 eslint --cache --ext .ts,.js src/", "lint:dpdm": "dpdm --exit-code circular:1 --no-warning --no-tree src/*.ts src/**/*.ts", "lint": "yarn lint:eslint && yarn lint:dpdm", "lint:fix": "yarn lint:eslint --fix && yarn lint:dpdm", "clean": "rm -rf .turbo dist/", "prepack": "yarn build", "test": "NODE_OPTIONS=--experimental-vm-modules jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%", "test:watch": "NODE_OPTIONS=--experimental-vm-modules jest --watch --testPathIgnorePatterns=\\.int\\.test.ts", "test:single": "NODE_OPTIONS=--experimental-vm-modules yarn run jest --config jest.config.cjs --testTimeout 100000", "test:int": "NODE_OPTIONS=--experimental-vm-modules jest --testPathPattern=\\.int\\.test.ts --testTimeout 100000 --maxWorkers=50%", "format": "prettier --config .prettierrc --write \"src\"", "format:check": "prettier --config .prettierrc --check \"src\"" }, "author": "LangChain", "license": "MIT", "dependencies": { "@aws-sdk/client-bedrock-agent-runtime": "^3.616.0", "@aws-sdk/client-bedrock-runtime": "^3.602.0", "@aws-sdk/client-kendra": "^3.352.0", "@aws-sdk/credential-provider-node": "^3.600.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.22.5" }, "peerDependencies": { "@langchain/core": ">=0.2.21 <0.4.0" }, "devDependencies": { "@aws-sdk/types": "^3.609.0", "@jest/globals": "^29.5.0", "@langchain/core": "workspace:*", "@langchain/scripts": ">=0.1.0 <0.2.0", "@langchain/standard-tests": "0.0.0", "@smithy/types": "^3.2.0", "@swc/core": "^1.3.90", "@swc/jest": "^0.2.29", "@tsconfig/recommended": "^1.0.3", "@typescript-eslint/eslint-plugin": "^6.12.0", "@typescript-eslint/parser": "^6.12.0", "dotenv": "^16.3.1", "dpdm": "^3.12.0", "eslint": "^8.33.0", "eslint-config-airbnb-base": "^15.0.0", "eslint-config-prettier": "^8.6.0", "eslint-plugin-import": "^2.27.5", "eslint-plugin-no-instanceof": "^1.0.1", "eslint-plugin-prettier": "^4.2.1", "jest": "^29.5.0", "jest-environment-node": "^29.6.4", "prettier": "^2.8.3", "release-it": "^17.6.0", "rollup": "^4.5.2", "ts-jest": "^29.1.0", "typescript": "<5.2.0", "zod": "^3.22.4" }, "publishConfig": { "access": "public" }, "exports": { ".": { "types": { "import": "./index.d.ts", "require": "./index.d.cts", "default": "./index.d.ts" }, "import": "./index.js", "require": "./index.cjs" }, "./package.json": "./package.json" }, "files": [ "dist/", "index.cjs", "index.js", "index.d.ts", "index.d.cts" ] }
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-aws/tsconfig.cjs.json
{ "extends": "./tsconfig.json", "compilerOptions": { "module": "commonjs", "declaration": false }, "exclude": ["node_modules", "dist", "docs", "**/tests"] }
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-aws/turbo.json
{ "extends": ["//"], "pipeline": { "build": { "outputs": ["**/dist/**"] }, "build:internal": { "dependsOn": ["^build:internal"] } } }
0
lc_public_repos/langchainjs/libs
lc_public_repos/langchainjs/libs/langchain-aws/.prettierrc
{ "$schema": "https://json.schemastore.org/prettierrc", "printWidth": 80, "tabWidth": 2, "useTabs": false, "semi": true, "singleQuote": false, "quoteProps": "as-needed", "jsxSingleQuote": false, "trailingComma": "es5", "bracketSpacing": true, "arrowParens": "always", "requirePragma": false, "insertPragma": false, "proseWrap": "preserve", "htmlWhitespaceSensitivity": "css", "vueIndentScriptAndStyle": false, "endOfLine": "lf" }
0
lc_public_repos/langchainjs/libs/langchain-aws
lc_public_repos/langchainjs/libs/langchain-aws/src/types.ts
import type { ToolChoice, Tool as BedrockTool, } from "@aws-sdk/client-bedrock-runtime"; import type { AwsCredentialIdentity, Provider } from "@aws-sdk/types"; import { ConverseCommand } from "@aws-sdk/client-bedrock-runtime"; import { BindToolsInput } from "@langchain/core/language_models/chat_models"; export type CredentialType = | AwsCredentialIdentity | Provider<AwsCredentialIdentity>; export type ConverseCommandParams = ConstructorParameters< typeof ConverseCommand >[0]; export type BedrockToolChoice = | ToolChoice.AnyMember | ToolChoice.AutoMember | ToolChoice.ToolMember; export type ChatBedrockConverseToolType = BindToolsInput | BedrockTool;
0
lc_public_repos/langchainjs/libs/langchain-aws
lc_public_repos/langchainjs/libs/langchain-aws/src/index.ts
export * from "./chat_models.js"; export * from "./types.js"; export * from "./retrievers/index.js"; export * from "./embeddings.js";
0
lc_public_repos/langchainjs/libs/langchain-aws
lc_public_repos/langchainjs/libs/langchain-aws/src/chat_models.ts
import type { BaseMessage } from "@langchain/core/messages"; import { AIMessageChunk } from "@langchain/core/messages"; import type { BaseLanguageModelInput, StructuredOutputMethodOptions, ToolDefinition, } from "@langchain/core/language_models/base"; import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager"; import { type BaseChatModelParams, BaseChatModel, LangSmithParams, BaseChatModelCallOptions, } from "@langchain/core/language_models/chat_models"; import type { ToolConfiguration, GuardrailConfiguration, } from "@aws-sdk/client-bedrock-runtime"; import { BedrockRuntimeClient, ConverseCommand, ConverseStreamCommand, } from "@aws-sdk/client-bedrock-runtime"; import { ChatGenerationChunk, ChatResult } from "@langchain/core/outputs"; import { getEnvironmentVariable } from "@langchain/core/utils/env"; import { defaultProvider, DefaultProviderInit, } from "@aws-sdk/credential-provider-node"; import type { DocumentType as __DocumentType } from "@smithy/types"; import { Runnable, RunnableLambda, RunnablePassthrough, RunnableSequence, } from "@langchain/core/runnables"; import { zodToJsonSchema } from "zod-to-json-schema"; import { isZodSchema } from "@langchain/core/utils/types"; import { z } from "zod"; import { convertToConverseTools, convertToBedrockToolChoice, convertToConverseMessages, convertConverseMessageToLangChainMessage, handleConverseStreamContentBlockDelta, handleConverseStreamMetadata, handleConverseStreamContentBlockStart, BedrockConverseToolChoice, } from "./common.js"; import { ChatBedrockConverseToolType, ConverseCommandParams, CredentialType, } from "./types.js"; /** * Inputs for ChatBedrockConverse. */ export interface ChatBedrockConverseInput extends BaseChatModelParams, Partial<DefaultProviderInit> { /** * The BedrockRuntimeClient to use. * It gives ability to override the default client with a custom one, allowing you to pass requestHandler {NodeHttpHandler} parameter * in case it is not provided here. */ client?: BedrockRuntimeClient; /** * Whether or not to stream responses */ streaming?: boolean; /** * Model to use. * For example, "anthropic.claude-3-haiku-20240307-v1:0", this is equivalent to the modelId property in the * list-foundation-models api. * See the below link for a full list of models. * @link https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html#model-ids-arns * * @default anthropic.claude-3-haiku-20240307-v1:0 */ model?: string; /** * The AWS region e.g. `us-west-2`. * Fallback to AWS_DEFAULT_REGION env variable or region specified in ~/.aws/config * in case it is not provided here. */ region?: string; /** * AWS Credentials. If no credentials are provided, the default credentials from * `@aws-sdk/credential-provider-node` will be used. */ credentials?: CredentialType; /** * Temperature. */ temperature?: number; /** * Max tokens. */ maxTokens?: number; /** * Override the default endpoint hostname. */ endpointHost?: string; /** * The percentage of most-likely candidates that the model considers for the next token. For * example, if you choose a value of 0.8 for `topP`, the model selects from the top 80% of the * probability distribution of tokens that could be next in the sequence. * The default value is the default value for the model that you are using. * For more information, see the inference parameters for foundation models link below. * @link https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html */ topP?: number; /** * Additional inference parameters that the model supports, beyond the * base set of inference parameters that the Converse API supports in the `inferenceConfig` * field. For more information, see the model parameters link below. * @link https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html */ additionalModelRequestFields?: __DocumentType; /** * Whether or not to include usage data, like token counts * in the streamed response chunks. Passing as a call option will * take precedence over the class-level setting. * @default true */ streamUsage?: boolean; /** * Configuration information for a guardrail that you want to use in the request. */ guardrailConfig?: GuardrailConfiguration; /** * Which types of `tool_choice` values the model supports. * * Inferred if not specified. Inferred as ['auto', 'any', 'tool'] if a 'claude-3' * model is used, ['auto', 'any'] if a 'mistral-large' model is used, empty otherwise. */ supportsToolChoiceValues?: Array<"auto" | "any" | "tool">; } export interface ChatBedrockConverseCallOptions extends BaseChatModelCallOptions, Pick< ChatBedrockConverseInput, "additionalModelRequestFields" | "streamUsage" > { /** * A list of stop sequences. A stop sequence is a sequence of characters that causes * the model to stop generating the response. */ stop?: string[]; tools?: ChatBedrockConverseToolType[]; /** * Tool choice for the model. If passing a string, it must be "any", "auto" or the * name of the tool to use. Or, pass a BedrockToolChoice object. * * If "any" is passed, the model must request at least one tool. * If "auto" is passed, the model automatically decides if a tool should be called * or whether to generate text instead. * If a tool name is passed, it will force the model to call that specific tool. */ tool_choice?: BedrockConverseToolChoice; } /** * AWS Bedrock Converse chat model integration. * * Setup: * Install `@langchain/aws` and set the following environment variables: * * ```bash * npm install @langchain/aws * export BEDROCK_AWS_REGION="your-aws-region" * export BEDROCK_AWS_SECRET_ACCESS_KEY="your-aws-secret-access-key" * export BEDROCK_AWS_ACCESS_KEY_ID="your-aws-access-key-id" * ``` * * ## [Constructor args](https://api.js.langchain.com/classes/langchain_aws.ChatBedrockConverse.html#constructor) * * ## [Runtime args](https://api.js.langchain.com/interfaces/langchain_aws.ChatBedrockConverseCallOptions.html) * * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below: * * ```typescript * // When calling `.bind`, call options should be passed via the first argument * const llmWithArgsBound = llm.bind({ * stop: ["\n"], * tools: [...], * }); * * // When calling `.bindTools`, call options should be passed via the second argument * const llmWithTools = llm.bindTools( * [...], * { * stop: ["\n"], * } * ); * ``` * * ## Examples * * <details open> * <summary><strong>Instantiate</strong></summary> * * ```typescript * import { ChatBedrockConverse } from '@langchain/aws'; * * const llm = new ChatBedrockConverse({ * model: "anthropic.claude-3-5-sonnet-20240620-v1:0", * temperature: 0, * maxTokens: undefined, * timeout: undefined, * maxRetries: 2, * region: process.env.BEDROCK_AWS_REGION, * credentials: { * secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY!, * accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID!, * }, * // other params... * }); * ``` * </details> * * <br /> * * <details> * <summary><strong>Invoking</strong></summary> * * ```typescript * const input = `Translate "I love programming" into French.`; * * // Models also accept a list of chat messages or a formatted prompt * const result = await llm.invoke(input); * console.log(result); * ``` * * ```txt * AIMessage { * "id": "81a27f7a-550c-473d-8307-c2fbb9c74956", * "content": "Here's the translation to French:\n\nJ'adore la programmation.", * "response_metadata": { * "$metadata": { * "httpStatusCode": 200, * "requestId": "81a27f7a-550c-473d-8307-c2fbb9c74956", * "attempts": 1, * "totalRetryDelay": 0 * }, * "metrics": { * "latencyMs": 1109 * }, * "stopReason": "end_turn", * "usage": { * "inputTokens": 25, * "outputTokens": 19, * "totalTokens": 44 * } * }, * "usage_metadata": { * "input_tokens": 25, * "output_tokens": 19, * "total_tokens": 44 * } * } * ``` * </details> * * <br /> * * <details> * <summary><strong>Streaming Chunks</strong></summary> * * ```typescript * for await (const chunk of await llm.stream(input)) { * console.log(chunk); * } * ``` * * ```txt * AIMessageChunk { * "content": "" * "response_metadata": { * "messageStart": { * "p": "abcdefghijk", * "role": "assistant" * } * } * } * AIMessageChunk { * "content": "Here" * } * AIMessageChunk { * "content": "'s" * } * AIMessageChunk { * "content": " the translation" * } * AIMessageChunk { * "content": " to" * } * AIMessageChunk { * "content": " French:\n\nJ" * } * AIMessageChunk { * "content": "'adore la" * } * AIMessageChunk { * "content": " programmation." * } * AIMessageChunk { * "content": "" * "response_metadata": { * "contentBlockStop": { * "contentBlockIndex": 0, * "p": "abcdefghijk" * } * } * } * AIMessageChunk { * "content": "" * "response_metadata": { * "messageStop": { * "stopReason": "end_turn" * } * } * } * AIMessageChunk { * "content": "" * "response_metadata": { * "metadata": { * "metrics": { * "latencyMs": 838 * }, * "p": "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123", * "usage": { * "inputTokens": 25, * "outputTokens": 19, * "totalTokens": 44 * } * } * } * "usage_metadata": { * "input_tokens": 25, * "output_tokens": 19, * "total_tokens": 44 * } * } * ``` * </details> * * <br /> * * <details> * <summary><strong>Aggregate Streamed Chunks</strong></summary> * * ```typescript * import { AIMessageChunk } from '@langchain/core/messages'; * import { concat } from '@langchain/core/utils/stream'; * * const stream = await llm.stream(input); * let full: AIMessageChunk | undefined; * for await (const chunk of stream) { * full = !full ? chunk : concat(full, chunk); * } * console.log(full); * ``` * * ```txt * AIMessageChunk { * "content": "Here's the translation to French:\n\nJ'adore la programmation.", * "response_metadata": { * "messageStart": { * "p": "ab", * "role": "assistant" * }, * "contentBlockStop": { * "contentBlockIndex": 0, * "p": "abcdefghijklmnopqrstuvwxyzABCDEFGHIJK" * }, * "messageStop": { * "stopReason": "end_turn" * }, * "metadata": { * "metrics": { * "latencyMs": 838 * }, * "p": "abcdefghijklmnopqrstuvwxyz", * "usage": { * "inputTokens": 25, * "outputTokens": 19, * "totalTokens": 44 * } * } * }, * "usage_metadata": { * "input_tokens": 25, * "output_tokens": 19, * "total_tokens": 44 * } * } * ``` * </details> * * <br /> * * <details> * <summary><strong>Bind tools</strong></summary> * * ```typescript * import { z } from 'zod'; * * const GetWeather = { * name: "GetWeather", * description: "Get the current weather in a given location", * schema: z.object({ * location: z.string().describe("The city and state, e.g. San Francisco, CA") * }), * } * * const GetPopulation = { * name: "GetPopulation", * description: "Get the current population in a given location", * schema: z.object({ * location: z.string().describe("The city and state, e.g. San Francisco, CA") * }), * } * * const llmWithTools = llm.bindTools( * [GetWeather, GetPopulation], * { * // strict: true // enforce tool args schema is respected * } * ); * const aiMsg = await llmWithTools.invoke( * "Which city is hotter today and which is bigger: LA or NY?" * ); * console.log(aiMsg.tool_calls); * ``` * * ```txt * [ * { * id: 'tooluse_hIaiqfweRtSiJyi6J4naJA', * name: 'GetWeather', * args: { location: 'Los Angeles, CA' }, * type: 'tool_call' * }, * { * id: 'tooluse_nOS8B0UlTd2FdpH4MSHw9w', * name: 'GetWeather', * args: { location: 'New York, NY' }, * type: 'tool_call' * }, * { * id: 'tooluse_XxMpZiETQ5aVS5opVDyIaw', * name: 'GetPopulation', * args: { location: 'Los Angeles, CA' }, * type: 'tool_call' * }, * { * id: 'tooluse_GpYvAfldT2aR8VQfH-p4PQ', * name: 'GetPopulation', * args: { location: 'New York, NY' }, * type: 'tool_call' * } * ] * ``` * </details> * * <br /> * * <details> * <summary><strong>Structured Output</strong></summary> * * ```typescript * import { z } from 'zod'; * * const Joke = z.object({ * setup: z.string().describe("The setup of the joke"), * punchline: z.string().describe("The punchline to the joke"), * rating: z.number().optional().describe("How funny the joke is, from 1 to 10") * }).describe('Joke to tell user.'); * * const structuredLlm = llm.withStructuredOutput(Joke, { name: "Joke" }); * const jokeResult = await structuredLlm.invoke("Tell me a joke about cats"); * console.log(jokeResult); * ``` * * ```txt * { * setup: "Why don't cats play poker in the jungle?", * punchline: 'Too many cheetahs!', * rating: 7 * } * ``` * </details> * * <br /> * * <details> * <summary><strong>Multimodal</strong></summary> * * ```typescript * import { HumanMessage } from '@langchain/core/messages'; * * const imageUrl = "https://example.com/image.jpg"; * const imageData = await fetch(imageUrl).then(res => res.arrayBuffer()); * const base64Image = Buffer.from(imageData).toString('base64'); * * const message = new HumanMessage({ * content: [ * { type: "text", text: "describe the weather in this image" }, * { * type: "image_url", * image_url: { url: `data:image/jpeg;base64,${base64Image}` }, * }, * ] * }); * * const imageDescriptionAiMsg = await llm.invoke([message]); * console.log(imageDescriptionAiMsg.content); * ``` * * ```txt * The weather in this image appears to be clear and pleasant. The sky is a vibrant blue with scattered white clouds, suggesting a sunny day with good visibility. The clouds are light and wispy, indicating fair weather conditions. There's no sign of rain, storm, or any adverse weather patterns. The lush green grass on the rolling hills looks well-watered and healthy, which could indicate recent rainfall or generally favorable weather conditions. Overall, the image depicts a beautiful, calm day with blue skies and sunshine - perfect weather for enjoying the outdoors. * ``` * </details> * * <br /> * * <details> * <summary><strong>Usage Metadata</strong></summary> * * ```typescript * const aiMsgForMetadata = await llm.invoke(input); * console.log(aiMsgForMetadata.usage_metadata); * ``` * * ```txt * { input_tokens: 25, output_tokens: 19, total_tokens: 44 } * ``` * </details> * * <br /> * * <details> * <summary><strong>Stream Usage Metadata</strong></summary> * * ```typescript * const streamForMetadata = await llm.stream(input); * let fullForMetadata: AIMessageChunk | undefined; * for await (const chunk of streamForMetadata) { * fullForMetadata = !fullForMetadata ? chunk : concat(fullForMetadata, chunk); * } * console.log(fullForMetadata?.usage_metadata); * ``` * * ```txt * { input_tokens: 25, output_tokens: 19, total_tokens: 44 } * ``` * </details> * * <br /> * * <details> * <summary><strong>Response Metadata</strong></summary> * * ```typescript * const aiMsgForResponseMetadata = await llm.invoke(input); * console.log(aiMsgForResponseMetadata.response_metadata); * ``` * * ```txt * { * '$metadata': { * httpStatusCode: 200, * requestId: '5de2a2e5-d1dc-4dff-bb02-31361f4107bc', * extendedRequestId: undefined, * cfId: undefined, * attempts: 1, * totalRetryDelay: 0 * }, * metrics: { latencyMs: 1163 }, * stopReason: 'end_turn', * usage: { inputTokens: 25, outputTokens: 19, totalTokens: 44 } * } * ``` * </details> * * <br /> */ export class ChatBedrockConverse extends BaseChatModel<ChatBedrockConverseCallOptions, AIMessageChunk> implements ChatBedrockConverseInput { // Used for tracing, replace with the same name as your class static lc_name() { return "ChatBedrockConverse"; } /** * Replace with any secrets this class passes to `super`. * See {@link ../../langchain-cohere/src/chat_model.ts} for * an example. */ get lc_secrets(): { [key: string]: string } | undefined { return { apiKey: "API_KEY_NAME", }; } get lc_aliases(): { [key: string]: string } | undefined { return { apiKey: "API_KEY_NAME", }; } model = "anthropic.claude-3-haiku-20240307-v1:0"; streaming = false; region: string; temperature?: number | undefined = undefined; maxTokens?: number | undefined = undefined; endpointHost?: string; topP?: number; additionalModelRequestFields?: __DocumentType; streamUsage = true; guardrailConfig?: GuardrailConfiguration; client: BedrockRuntimeClient; /** * Which types of `tool_choice` values the model supports. * * Inferred if not specified. Inferred as ['auto', 'any', 'tool'] if a 'claude-3' * model is used, ['auto', 'any'] if a 'mistral-large' model is used, empty otherwise. */ supportsToolChoiceValues?: Array<"auto" | "any" | "tool">; constructor(fields?: ChatBedrockConverseInput) { super(fields ?? {}); const { profile, filepath, configFilepath, ignoreCache, mfaCodeProvider, roleAssumer, roleArn, webIdentityTokenFile, roleAssumerWithWebIdentity, ...rest } = fields ?? {}; const credentials = rest?.credentials ?? defaultProvider({ profile, filepath, configFilepath, ignoreCache, mfaCodeProvider, roleAssumer, roleArn, webIdentityTokenFile, roleAssumerWithWebIdentity, }); const region = rest?.region ?? getEnvironmentVariable("AWS_DEFAULT_REGION"); if (!region) { throw new Error( "Please set the AWS_DEFAULT_REGION environment variable or pass it to the constructor as the region field." ); } this.client = fields?.client ?? new BedrockRuntimeClient({ region, credentials, }); this.region = region; this.model = rest?.model ?? this.model; this.streaming = rest?.streaming ?? this.streaming; this.temperature = rest?.temperature; this.maxTokens = rest?.maxTokens; this.endpointHost = rest?.endpointHost; this.topP = rest?.topP; this.additionalModelRequestFields = rest?.additionalModelRequestFields; this.streamUsage = rest?.streamUsage ?? this.streamUsage; this.guardrailConfig = rest?.guardrailConfig; if (rest?.supportsToolChoiceValues === undefined) { if (this.model.includes("claude-3")) { this.supportsToolChoiceValues = ["auto", "any", "tool"]; } else if (this.model.includes("mistral-large")) { this.supportsToolChoiceValues = ["auto", "any"]; } else { this.supportsToolChoiceValues = undefined; } } else { this.supportsToolChoiceValues = rest.supportsToolChoiceValues; } } getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { const params = this.invocationParams(options); return { ls_provider: "amazon_bedrock", ls_model_name: this.model, ls_model_type: "chat", ls_temperature: params.inferenceConfig?.temperature ?? this.temperature, ls_max_tokens: params.inferenceConfig?.maxTokens ?? undefined, ls_stop: options.stop, }; } override bindTools( tools: ChatBedrockConverseToolType[], kwargs?: Partial<this["ParsedCallOptions"]> ): Runnable< BaseLanguageModelInput, AIMessageChunk, this["ParsedCallOptions"] > { return this.bind({ tools: convertToConverseTools(tools), ...kwargs }); } // Replace _llmType() { return "chat_bedrock_converse"; } invocationParams( options?: this["ParsedCallOptions"] ): Partial<ConverseCommandParams> { let toolConfig: ToolConfiguration | undefined; if (options?.tools && options.tools.length) { const tools = convertToConverseTools(options.tools); toolConfig = { tools, toolChoice: options.tool_choice ? convertToBedrockToolChoice(options.tool_choice, tools, { model: this.model, supportsToolChoiceValues: this.supportsToolChoiceValues, }) : undefined, }; } return { inferenceConfig: { maxTokens: this.maxTokens, temperature: this.temperature, topP: this.topP, stopSequences: options?.stop, }, toolConfig, additionalModelRequestFields: this.additionalModelRequestFields ?? options?.additionalModelRequestFields, guardrailConfig: this.guardrailConfig, }; } async _generate( messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun ): Promise<ChatResult> { if (this.streaming) { const stream = this._streamResponseChunks(messages, options, runManager); let finalResult: ChatGenerationChunk | undefined; for await (const chunk of stream) { if (finalResult === undefined) { finalResult = chunk; } else { finalResult = finalResult.concat(chunk); } } if (finalResult === undefined) { throw new Error( "Could not parse final output from Bedrock streaming call." ); } return { generations: [finalResult], llmOutput: finalResult.generationInfo, }; } return this._generateNonStreaming(messages, options, runManager); } async _generateNonStreaming( messages: BaseMessage[], options: Partial<this["ParsedCallOptions"]>, _runManager?: CallbackManagerForLLMRun ): Promise<ChatResult> { const { converseMessages, converseSystem } = convertToConverseMessages(messages); const params = this.invocationParams(options); const command = new ConverseCommand({ modelId: this.model, messages: converseMessages, system: converseSystem, ...params, }); const response = await this.client.send(command, { abortSignal: options.signal, }); const { output, ...responseMetadata } = response; if (!output?.message) { throw new Error("No message found in Bedrock response."); } const message = convertConverseMessageToLangChainMessage( output.message, responseMetadata ); return { generations: [ { text: typeof message.content === "string" ? message.content : "", message, }, ], }; } async *_streamResponseChunks( messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun ): AsyncGenerator<ChatGenerationChunk> { const { converseMessages, converseSystem } = convertToConverseMessages(messages); const params = this.invocationParams(options); let { streamUsage } = this; if (options.streamUsage !== undefined) { streamUsage = options.streamUsage; } const command = new ConverseStreamCommand({ modelId: this.model, messages: converseMessages, system: converseSystem, ...params, }); const response = await this.client.send(command, { abortSignal: options.signal, }); if (response.stream) { for await (const chunk of response.stream) { if (chunk.contentBlockStart) { yield handleConverseStreamContentBlockStart(chunk.contentBlockStart); } else if (chunk.contentBlockDelta) { const textChatGeneration = handleConverseStreamContentBlockDelta( chunk.contentBlockDelta ); yield textChatGeneration; await runManager?.handleLLMNewToken(textChatGeneration.text); } else if (chunk.metadata) { yield handleConverseStreamMetadata(chunk.metadata, { streamUsage, }); } else { yield new ChatGenerationChunk({ text: "", message: new AIMessageChunk({ content: "", response_metadata: chunk, }), }); } } } } withStructuredOutput< // eslint-disable-next-line @typescript-eslint/no-explicit-any RunOutput extends Record<string, any> = Record<string, any> // eslint-disable-next-line @typescript-eslint/no-explicit-any >( outputSchema: | z.ZodType<RunOutput> // eslint-disable-next-line @typescript-eslint/no-explicit-any | Record<string, any>, config?: StructuredOutputMethodOptions<false> ): Runnable<BaseLanguageModelInput, RunOutput>; withStructuredOutput< // eslint-disable-next-line @typescript-eslint/no-explicit-any RunOutput extends Record<string, any> = Record<string, any> >( outputSchema: | z.ZodType<RunOutput> // eslint-disable-next-line @typescript-eslint/no-explicit-any | Record<string, any>, config?: StructuredOutputMethodOptions<true> ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>; withStructuredOutput< // eslint-disable-next-line @typescript-eslint/no-explicit-any RunOutput extends Record<string, any> = Record<string, any> >( outputSchema: | z.ZodType<RunOutput> // eslint-disable-next-line @typescript-eslint/no-explicit-any | Record<string, any>, config?: StructuredOutputMethodOptions<boolean> ): | Runnable<BaseLanguageModelInput, RunOutput> | Runnable< BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput; } > { // eslint-disable-next-line @typescript-eslint/no-explicit-any const schema: z.ZodType<RunOutput> | Record<string, any> = outputSchema; const name = config?.name; const description = schema.description ?? "A function available to call."; const method = config?.method; const includeRaw = config?.includeRaw; if (method === "jsonMode") { throw new Error(`ChatBedrockConverse does not support 'jsonMode'.`); } let functionName = name ?? "extract"; let tools: ToolDefinition[]; if (isZodSchema(schema)) { tools = [ { type: "function", function: { name: functionName, description, parameters: zodToJsonSchema(schema), }, }, ]; } else { if ("name" in schema) { functionName = schema.name; } tools = [ { type: "function", function: { name: functionName, description, parameters: schema, }, }, ]; } const supportsToolChoiceValues = this.supportsToolChoiceValues ?? []; let toolChoiceObj: { tool_choice: string } | undefined; if (supportsToolChoiceValues.includes("tool")) { toolChoiceObj = { tool_choice: tools[0].function.name, }; } else if (supportsToolChoiceValues.includes("any")) { toolChoiceObj = { tool_choice: "any", }; } const llm = this.bindTools(tools, toolChoiceObj); const outputParser = RunnableLambda.from<AIMessageChunk, RunOutput>( (input: AIMessageChunk): RunOutput => { if (!input.tool_calls || input.tool_calls.length === 0) { throw new Error("No tool calls found in the response."); } const toolCall = input.tool_calls.find( (tc) => tc.name === functionName ); if (!toolCall) { throw new Error(`No tool call found with name ${functionName}.`); } return toolCall.args as RunOutput; } ); if (!includeRaw) { return llm.pipe(outputParser).withConfig({ runName: "StructuredOutput", }) as Runnable<BaseLanguageModelInput, RunOutput>; } const parserAssign = RunnablePassthrough.assign({ // eslint-disable-next-line @typescript-eslint/no-explicit-any parsed: (input: any, config) => outputParser.invoke(input.raw, config), }); const parserNone = RunnablePassthrough.assign({ parsed: () => null, }); const parsedWithFallback = parserAssign.withFallbacks({ fallbacks: [parserNone], }); return RunnableSequence.from< BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput } >([ { raw: llm, }, parsedWithFallback, ]).withConfig({ runName: "StructuredOutputRunnable", }); } }
0
lc_public_repos/langchainjs/libs/langchain-aws
lc_public_repos/langchainjs/libs/langchain-aws/src/embeddings.ts
import { BedrockRuntimeClient, InvokeModelCommand, } from "@aws-sdk/client-bedrock-runtime"; import { Embeddings, EmbeddingsParams } from "@langchain/core/embeddings"; import { CredentialType } from "./types.js"; /** * Interface that extends EmbeddingsParams and defines additional * parameters specific to the BedrockEmbeddings class. */ export interface BedrockEmbeddingsParams extends EmbeddingsParams { /** * Model Name to use. Defaults to `amazon.titan-embed-text-v1` if not provided * */ model?: string; /** * A client provided by the user that allows them to customze any * SDK configuration options. */ client?: BedrockRuntimeClient; region?: string; credentials?: CredentialType; } /** * Class that extends the Embeddings class and provides methods for * generating embeddings using the Bedrock API. * @example * ```typescript * const embeddings = new BedrockEmbeddings({ * region: "your-aws-region", * credentials: { * accessKeyId: "your-access-key-id", * secretAccessKey: "your-secret-access-key", * }, * model: "amazon.titan-embed-text-v1", * }); * * // Embed a query and log the result * const res = await embeddings.embedQuery( * "What would be a good company name for a company that makes colorful socks?" * ); * console.log({ res }); * ``` */ export class BedrockEmbeddings extends Embeddings implements BedrockEmbeddingsParams { model: string; client: BedrockRuntimeClient; batchSize = 512; constructor(fields?: BedrockEmbeddingsParams) { super(fields ?? {}); this.model = fields?.model ?? "amazon.titan-embed-text-v1"; this.client = fields?.client ?? new BedrockRuntimeClient({ region: fields?.region, credentials: fields?.credentials, }); } /** * Protected method to make a request to the Bedrock API to generate * embeddings. Handles the retry logic and returns the response from the * API. * @param request Request to send to the Bedrock API. * @returns Promise that resolves to the response from the API. */ protected async _embedText(text: string): Promise<number[]> { return this.caller.call(async () => { try { // replace newlines, which can negatively affect performance. const cleanedText = text.replace(/\n/g, " "); const res = await this.client.send( new InvokeModelCommand({ modelId: this.model, body: JSON.stringify({ inputText: cleanedText, }), contentType: "application/json", accept: "application/json", }) ); const body = new TextDecoder().decode(res.body); return JSON.parse(body).embedding; } catch (e) { console.error({ error: e, }); // eslint-disable-next-line no-instanceof/no-instanceof if (e instanceof Error) { throw new Error( `An error occurred while embedding documents with Bedrock: ${e.message}` ); } throw new Error( "An error occurred while embedding documents with Bedrock" ); } }); } /** * Method that takes a document as input and returns a promise that * resolves to an embedding for the document. It calls the _embedText * method with the document as the input. * @param document Document for which to generate an embedding. * @returns Promise that resolves to an embedding for the input document. */ embedQuery(document: string): Promise<number[]> { return this.caller.callWithOptions( {}, this._embedText.bind(this), document ); } /** * Method to generate embeddings for an array of texts. Calls _embedText * method which batches and handles retry logic when calling the AWS Bedrock API. * @param documents Array of texts for which to generate embeddings. * @returns Promise that resolves to a 2D array of embeddings for each input document. */ async embedDocuments(documents: string[]): Promise<number[][]> { return Promise.all(documents.map((document) => this._embedText(document))); } }
0
lc_public_repos/langchainjs/libs/langchain-aws
lc_public_repos/langchainjs/libs/langchain-aws/src/common.ts
import type { MessageContentComplex, BaseMessage, UsageMetadata, } from "@langchain/core/messages"; import { AIMessage, AIMessageChunk, ToolMessage, } from "@langchain/core/messages"; import type { ToolCall } from "@langchain/core/messages/tool"; import { isOpenAITool } from "@langchain/core/language_models/base"; import type { Message as BedrockMessage, SystemContentBlock as BedrockSystemContentBlock, Tool as BedrockTool, ContentBlock, ImageFormat, ConverseResponse, ContentBlockDeltaEvent, ConverseStreamMetadataEvent, ContentBlockStartEvent, } from "@aws-sdk/client-bedrock-runtime"; import type { DocumentType as __DocumentType } from "@smithy/types"; import { isLangChainTool } from "@langchain/core/utils/function_calling"; import { zodToJsonSchema } from "zod-to-json-schema"; import { ChatGenerationChunk } from "@langchain/core/outputs"; import { ChatBedrockConverseToolType, BedrockToolChoice } from "./types.js"; export function extractImageInfo(base64: string): ContentBlock.ImageMember { // Extract the format from the base64 string const formatMatch = base64.match(/^data:image\/(\w+);base64,/); let format: ImageFormat | undefined; if (formatMatch) { const extractedFormat = formatMatch[1].toLowerCase(); if (["gif", "jpeg", "png", "webp"].includes(extractedFormat)) { format = extractedFormat as ImageFormat; } } // Remove the data URL prefix if present const base64Data = base64.replace(/^data:image\/\w+;base64,/, ""); // Convert base64 to Uint8Array const binaryString = atob(base64Data); const bytes = new Uint8Array(binaryString.length); for (let i = 0; i < binaryString.length; i += 1) { bytes[i] = binaryString.charCodeAt(i); } return { image: { format, source: { bytes, }, }, }; } export function convertToConverseMessages(messages: BaseMessage[]): { converseMessages: BedrockMessage[]; converseSystem: BedrockSystemContentBlock[]; } { const converseSystem: BedrockSystemContentBlock[] = messages .filter((msg) => msg._getType() === "system") .map((msg) => { if (typeof msg.content === "string") { return { text: msg.content }; } else if (msg.content.length === 1 && msg.content[0].type === "text") { return { text: msg.content[0].text }; } throw new Error( "System message content must be either a string, or a content array containing a single text object." ); }); const converseMessages: BedrockMessage[] = messages .filter((msg) => msg._getType() !== "system") .map((msg) => { if (msg._getType() === "ai") { const castMsg = msg as AIMessage; const assistantMsg: BedrockMessage = { role: "assistant", content: [], }; if (castMsg.tool_calls && castMsg.tool_calls.length) { assistantMsg.content = castMsg.tool_calls.map((tc) => ({ toolUse: { toolUseId: tc.id, name: tc.name, input: tc.args, }, })); } if (typeof castMsg.content === "string" && castMsg.content !== "") { assistantMsg.content?.push({ text: castMsg.content, }); } else if (Array.isArray(castMsg.content)) { const contentBlocks: ContentBlock[] = castMsg.content.map((block) => { if (block.type === "text" && block.text !== "") { return { text: block.text, }; } else { const blockValues = Object.fromEntries( Object.values(block).filter(([key]) => key !== "type") ); throw new Error( `Unsupported content block type: ${ block.type } with content of ${JSON.stringify(blockValues, null, 2)}` ); } }); assistantMsg.content = [ ...(assistantMsg.content ? assistantMsg.content : []), ...contentBlocks, ]; } return assistantMsg; } else if (msg._getType() === "human" || msg._getType() === "generic") { if (typeof msg.content === "string" && msg.content !== "") { return { role: "user" as const, content: [ { text: msg.content, }, ], }; } else if (Array.isArray(msg.content)) { const contentBlocks: ContentBlock[] = msg.content.flatMap((block) => { if (block.type === "image_url") { const base64: string = typeof block.image_url === "string" ? block.image_url : block.image_url.url; return extractImageInfo(base64); } else if (block.type === "text") { return { text: block.text, }; } else if ( block.type === "document" && block.document !== undefined ) { return { document: block.document, }; } else if (block.type === "image" && block.image !== undefined) { return { image: block.image, }; } else { throw new Error(`Unsupported content block type: ${block.type}`); } }); return { role: "user" as const, content: contentBlocks, }; } else { throw new Error( `Invalid message content: empty string. '${msg._getType()}' must contain non-empty content.` ); } } else if (msg._getType() === "tool") { const castMsg = msg as ToolMessage; if (typeof castMsg.content === "string") { return { // Tool use messages are always from the user role: "user" as const, content: [ { toolResult: { toolUseId: castMsg.tool_call_id, content: [ { text: castMsg.content, }, ], }, }, ], }; } else { return { // Tool use messages are always from the user role: "user" as const, content: [ { toolResult: { toolUseId: castMsg.tool_call_id, content: [ { json: castMsg.content, }, ], }, }, ], }; } } else { throw new Error(`Unsupported message type: ${msg._getType()}`); } }); // Combine consecutive user tool result messages into a single message const combinedConverseMessages = converseMessages.reduce<BedrockMessage[]>( (acc, curr) => { const lastMessage = acc[acc.length - 1]; if ( lastMessage && lastMessage.role === "user" && lastMessage.content?.some((c) => "toolResult" in c) && curr.role === "user" && curr.content?.some((c) => "toolResult" in c) ) { lastMessage.content = lastMessage.content.concat(curr.content); } else { acc.push(curr); } return acc; }, [] ); return { converseMessages: combinedConverseMessages, converseSystem }; } export function isBedrockTool(tool: unknown): tool is BedrockTool { if (typeof tool === "object" && tool && "toolSpec" in tool) { return true; } return false; } export function convertToConverseTools( tools: ChatBedrockConverseToolType[] ): BedrockTool[] { if (tools.every(isOpenAITool)) { return tools.map((tool) => ({ toolSpec: { name: tool.function.name, description: tool.function.description, inputSchema: { json: tool.function.parameters as __DocumentType, }, }, })); } else if (tools.every(isLangChainTool)) { return tools.map((tool) => ({ toolSpec: { name: tool.name, description: tool.description, inputSchema: { json: zodToJsonSchema(tool.schema) as __DocumentType, }, }, })); } else if (tools.every(isBedrockTool)) { return tools; } throw new Error( "Invalid tools passed. Must be an array of StructuredToolInterface, ToolDefinition, or BedrockTool." ); } export type BedrockConverseToolChoice = | "any" | "auto" | string | BedrockToolChoice; export function convertToBedrockToolChoice( toolChoice: BedrockConverseToolChoice, tools: BedrockTool[], fields: { model: string; supportsToolChoiceValues?: Array<"auto" | "any" | "tool">; } ): BedrockToolChoice { const supportsToolChoiceValues = fields.supportsToolChoiceValues ?? []; let bedrockToolChoice: BedrockToolChoice; if (typeof toolChoice === "string") { switch (toolChoice) { case "any": bedrockToolChoice = { any: {}, }; break; case "auto": bedrockToolChoice = { auto: {}, }; break; default: { const foundTool = tools.find( (tool) => tool.toolSpec?.name === toolChoice ); if (!foundTool) { throw new Error( `Tool with name ${toolChoice} not found in tools list.` ); } bedrockToolChoice = { tool: { name: toolChoice, }, }; } } } else { bedrockToolChoice = toolChoice; } const toolChoiceType = Object.keys(bedrockToolChoice)[0] as | "auto" | "any" | "tool"; if (!supportsToolChoiceValues.includes(toolChoiceType)) { let supportedTxt = ""; if (supportsToolChoiceValues.length) { supportedTxt = `Model ${fields.model} does not currently support 'tool_choice' ` + `of type ${toolChoiceType}. The following 'tool_choice' types ` + `are supported: ${supportsToolChoiceValues.join(", ")}.`; } else { supportedTxt = `Model ${fields.model} does not currently support 'tool_choice'.`; } throw new Error( `${supportedTxt} Please see` + "https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_ToolChoice.html" + "for the latest documentation on models that support tool choice." ); } return bedrockToolChoice; } export function convertConverseMessageToLangChainMessage( message: BedrockMessage, responseMetadata: Omit<ConverseResponse, "output"> ): BaseMessage { if (!message.content) { throw new Error("No message content found in response."); } if (message.role !== "assistant") { throw new Error( `Unsupported message role received in ChatBedrockConverse response: ${message.role}` ); } let requestId: string | undefined; if ( "$metadata" in responseMetadata && responseMetadata.$metadata && typeof responseMetadata.$metadata === "object" && "requestId" in responseMetadata.$metadata ) { requestId = responseMetadata.$metadata.requestId as string; } let tokenUsage: UsageMetadata | undefined; if (responseMetadata.usage) { const input_tokens = responseMetadata.usage.inputTokens ?? 0; const output_tokens = responseMetadata.usage.outputTokens ?? 0; tokenUsage = { input_tokens, output_tokens, total_tokens: responseMetadata.usage.totalTokens ?? input_tokens + output_tokens, }; } if ( message.content?.length === 1 && "text" in message.content[0] && typeof message.content[0].text === "string" ) { return new AIMessage({ content: message.content[0].text, response_metadata: responseMetadata, usage_metadata: tokenUsage, id: requestId, }); } else { const toolCalls: ToolCall[] = []; const content: MessageContentComplex[] = []; message.content.forEach((c) => { if ( "toolUse" in c && c.toolUse && c.toolUse.name && c.toolUse.input && typeof c.toolUse.input === "object" ) { toolCalls.push({ id: c.toolUse.toolUseId, name: c.toolUse.name, args: c.toolUse.input, type: "tool_call", }); } else if ("text" in c && typeof c.text === "string") { content.push({ type: "text", text: c.text }); } else { content.push(c); } }); return new AIMessage({ content: content.length ? content : "", tool_calls: toolCalls.length ? toolCalls : undefined, response_metadata: responseMetadata, usage_metadata: tokenUsage, id: requestId, }); } } export function handleConverseStreamContentBlockDelta( contentBlockDelta: ContentBlockDeltaEvent ): ChatGenerationChunk { if (!contentBlockDelta.delta) { throw new Error("No delta found in content block."); } if (typeof contentBlockDelta.delta.text === "string") { return new ChatGenerationChunk({ text: contentBlockDelta.delta.text, message: new AIMessageChunk({ content: contentBlockDelta.delta.text, }), }); } else if (contentBlockDelta.delta.toolUse) { const index = contentBlockDelta.contentBlockIndex; return new ChatGenerationChunk({ text: "", message: new AIMessageChunk({ content: "", tool_call_chunks: [ { args: contentBlockDelta.delta.toolUse.input, index, type: "tool_call_chunk", }, ], }), }); } else { throw new Error( `Unsupported content block type(s): ${JSON.stringify( contentBlockDelta.delta, null, 2 )}` ); } } export function handleConverseStreamContentBlockStart( contentBlockStart: ContentBlockStartEvent ): ChatGenerationChunk { const index = contentBlockStart.contentBlockIndex; if (contentBlockStart.start?.toolUse) { return new ChatGenerationChunk({ text: "", message: new AIMessageChunk({ content: "", tool_call_chunks: [ { name: contentBlockStart.start.toolUse.name, id: contentBlockStart.start.toolUse.toolUseId, index, type: "tool_call_chunk", }, ], }), }); } throw new Error("Unsupported content block start event."); } export function handleConverseStreamMetadata( metadata: ConverseStreamMetadataEvent, extra: { streamUsage: boolean; } ): ChatGenerationChunk { const inputTokens = metadata.usage?.inputTokens ?? 0; const outputTokens = metadata.usage?.outputTokens ?? 0; const usage_metadata: UsageMetadata = { input_tokens: inputTokens, output_tokens: outputTokens, total_tokens: metadata.usage?.totalTokens ?? inputTokens + outputTokens, }; return new ChatGenerationChunk({ text: "", message: new AIMessageChunk({ content: "", usage_metadata: extra.streamUsage ? usage_metadata : undefined, response_metadata: { // Use the same key as returned from the Converse API metadata, }, }), }); }
0
lc_public_repos/langchainjs/libs/langchain-aws/src
lc_public_repos/langchainjs/libs/langchain-aws/src/tests/chat_models.standard.int.test.ts
/* eslint-disable no-process-env */ import { test, expect } from "@jest/globals"; import { ChatModelIntegrationTests } from "@langchain/standard-tests"; import { AIMessageChunk } from "@langchain/core/messages"; import { ChatBedrockConverse, ChatBedrockConverseCallOptions, } from "../chat_models.js"; class ChatBedrockConverseStandardIntegrationTests extends ChatModelIntegrationTests< ChatBedrockConverseCallOptions, AIMessageChunk > { constructor() { const region = process.env.BEDROCK_AWS_REGION ?? "us-east-1"; super({ Cls: ChatBedrockConverse, chatModelHasToolCalling: true, chatModelHasStructuredOutput: true, supportsParallelToolCalls: true, constructorArgs: { region, model: "anthropic.claude-3-sonnet-20240229-v1:0", credentials: { secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY, accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID, }, }, }); } async testToolMessageHistoriesStringContent() { this.skipTestMessage( "testToolMessageHistoriesStringContent", "ChatBedrockConverse", "Not properly implemented." ); } async testToolMessageHistoriesListContent() { this.skipTestMessage( "testToolMessageHistoriesListContent", "ChatBedrockConverse", "Not properly implemented." ); } async testStructuredFewShotExamples() { this.skipTestMessage( "testStructuredFewShotExamples", "ChatBedrockConverse", "Not properly implemented." ); } async testParallelToolCalling() { // Pass `true` in the second argument to only verify it can support parallel tool calls in the message history. // This is because the model struggles to actually call parallel tools. await super.testParallelToolCalling(undefined, true); } } const testClass = new ChatBedrockConverseStandardIntegrationTests(); test("ChatBedrockConverseStandardIntegrationTests", async () => { const testResults = await testClass.runTests(); expect(testResults).toBe(true); });
0
lc_public_repos/langchainjs/libs/langchain-aws/src
lc_public_repos/langchainjs/libs/langchain-aws/src/tests/chat_models.standard.test.ts
/* eslint-disable no-process-env */ import { test, expect } from "@jest/globals"; import { ChatModelUnitTests } from "@langchain/standard-tests"; import { AIMessageChunk } from "@langchain/core/messages"; import { ChatBedrockConverse, ChatBedrockConverseCallOptions, } from "../chat_models.js"; class ChatBedrockConverseStandardUnitTests extends ChatModelUnitTests< ChatBedrockConverseCallOptions, AIMessageChunk > { constructor() { super({ Cls: ChatBedrockConverse, chatModelHasToolCalling: true, chatModelHasStructuredOutput: true, constructorArgs: {}, }); process.env.BEDROCK_AWS_SECRET_ACCESS_KEY = "test"; process.env.BEDROCK_AWS_ACCESS_KEY_ID = "test"; process.env.BEDROCK_AWS_SESSION_TOKEN = "test"; process.env.AWS_DEFAULT_REGION = "us-east-1"; } testChatModelInitApiKey() { this.skipTestMessage( "testChatModelInitApiKey", "BedrockChat", this.multipleApiKeysRequiredMessage ); } } const testClass = new ChatBedrockConverseStandardUnitTests(); test("ChatBedrockConverseStandardUnitTests", () => { const testResults = testClass.runTests(); expect(testResults).toBe(true); });
0
lc_public_repos/langchainjs/libs/langchain-aws/src
lc_public_repos/langchainjs/libs/langchain-aws/src/tests/chat_models.int.test.ts
/* eslint-disable no-process-env */ import { test, expect } from "@jest/globals"; import { AIMessage, AIMessageChunk, HumanMessage, SystemMessage, ToolMessage, } from "@langchain/core/messages"; import { tool } from "@langchain/core/tools"; import { z } from "zod"; import { ChatBedrockConverse } from "../chat_models.js"; // Save the original value of the 'LANGCHAIN_CALLBACKS_BACKGROUND' environment variable const originalBackground = process.env.LANGCHAIN_CALLBACKS_BACKGROUND; const baseConstructorArgs: Partial< ConstructorParameters<typeof ChatBedrockConverse>[0] > = { region: process.env.BEDROCK_AWS_REGION ?? "us-east-1", credentials: { secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY!, accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID!, }, maxRetries: 1, }; test("Test ChatBedrockConverse can invoke", async () => { const model = new ChatBedrockConverse({ ...baseConstructorArgs, maxTokens: 5, }); const res = await model.invoke([new HumanMessage("Print hello world")]); // console.log({ res }); expect(typeof res.content).toBe("string"); expect(res.content.length).toBeGreaterThan(1); }); test("Test ChatBedrockConverse stream method", async () => { const model = new ChatBedrockConverse({ ...baseConstructorArgs, maxTokens: 50, }); const stream = await model.stream("Print hello world."); const chunks = []; for await (const chunk of stream) { chunks.push(chunk); } // @eslint-disable-next-line/@typescript-eslint/ban-ts-comment // @ts-expect-error unused var const finalMessage = chunks.map((c) => c.content).join(""); // console.log(finalMessage); expect(chunks.length).toBeGreaterThan(1); }); test("Test ChatBedrockConverse in streaming mode", async () => { // Running LangChain callbacks in the background will sometimes cause the callbackManager to execute // after the test/llm call has already finished & returned. Set that environment variable to false // to prevent that from happening. process.env.LANGCHAIN_CALLBACKS_BACKGROUND = "false"; try { let nrNewTokens = 0; let streamedCompletion = ""; const model = new ChatBedrockConverse({ ...baseConstructorArgs, streaming: true, maxTokens: 10, callbacks: [ { async handleLLMNewToken(token: string) { nrNewTokens += 1; streamedCompletion += token; }, }, ], }); const message = new HumanMessage("Hello!"); const result = await model.invoke([message]); // console.log(result); expect(nrNewTokens > 0).toBe(true); expect(result.content).toBe(streamedCompletion); } finally { // Reset the environment variable process.env.LANGCHAIN_CALLBACKS_BACKGROUND = originalBackground; } }, 10000); test("Test ChatBedrockConverse with stop", async () => { const model = new ChatBedrockConverse({ ...baseConstructorArgs, maxTokens: 5, }); const res = await model.invoke([new HumanMessage("Print hello world")], { stop: ["world"], }); // console.log({ res }); expect(typeof res.content).toBe("string"); expect(res.content.length).toBeGreaterThan(1); expect(res.content).not.toContain("world"); }); test("Test ChatBedrockConverse stream method with early break", async () => { const model = new ChatBedrockConverse({ ...baseConstructorArgs, maxTokens: 50, }); const stream = await model.stream( "How is your day going? Be extremely verbose." ); let i = 0; // @eslint-disable-next-line/@typescript-eslint/ban-ts-comment // @ts-expect-error unused var for await (const chunk of stream) { // console.log(chunk); i += 1; if (i > 10) { break; } } }); test("Streaming tokens can be found in usage_metadata field", async () => { const model = new ChatBedrockConverse({ ...baseConstructorArgs, maxTokens: 5, }); const response = await model.stream("Hello, how are you?"); let finalResult: AIMessageChunk | undefined; for await (const chunk of response) { if (finalResult) { finalResult = finalResult.concat(chunk); } else { finalResult = chunk; } } // console.log({ // usage_metadata: finalResult?.usage_metadata, // }); expect(finalResult).toBeTruthy(); expect(finalResult?.usage_metadata).toBeTruthy(); expect(finalResult?.usage_metadata?.input_tokens).toBeGreaterThan(0); expect(finalResult?.usage_metadata?.output_tokens).toBeGreaterThan(0); expect(finalResult?.usage_metadata?.total_tokens).toBeGreaterThan(0); }); test("populates ID field on AIMessage", async () => { const model = new ChatBedrockConverse({ ...baseConstructorArgs, maxTokens: 5, }); const response = await model.invoke("Hell"); // console.log({ // invokeId: response.id, // }); expect(response.id?.length).toBeGreaterThan(1); /** * Bedrock Converse does not include an ID in * the response of a streaming call. */ // Streaming // let finalChunk: AIMessageChunk | undefined; // for await (const chunk of await model.stream("Hell")) { // if (!finalChunk) { // finalChunk = chunk; // } else { // finalChunk = finalChunk.concat(chunk); // } // } // console.log({ // streamId: finalChunk?.id, // }); // expect(finalChunk?.id?.length).toBeGreaterThan(1); }); test("Test ChatBedrockConverse can invoke tools", async () => { const model = new ChatBedrockConverse({ ...baseConstructorArgs, }); const tools = [ tool( (_input) => // console.log("tool", input); "Hello", { name: "get_weather", description: "Get the weather", schema: z.object({ location: z.string().describe("Location to get the weather for"), }), } ), ]; const modelWithTools = model.bindTools(tools); const result = await modelWithTools.invoke([ new HumanMessage("Get the weather for London"), ]); expect(result.tool_calls).toBeDefined(); expect(result.tool_calls).toHaveLength(1); // console.log("result.tool_calls?.[0]", result.tool_calls?.[0]); expect(result.tool_calls?.[0].name).toBe("get_weather"); expect(result.tool_calls?.[0].id).toBeDefined(); }); test("Test ChatBedrockConverse can invoke tools with non anthropic model", async () => { const model = new ChatBedrockConverse({ ...baseConstructorArgs, model: "cohere.command-r-v1:0", }); const tools = [ tool( (_input) => // console.log("tool", input); "Hello", { name: "get_weather", description: "Get the weather", schema: z.object({ location: z.string().describe("Location to get the weather for"), }), } ), ]; const modelWithTools = model.bindTools(tools); const result = await modelWithTools.invoke([ new HumanMessage("Get the weather for London"), ]); expect(result.tool_calls).toBeDefined(); expect(result.tool_calls).toHaveLength(1); // console.log("result.tool_calls?.[0]", result.tool_calls?.[0]); expect(result.tool_calls?.[0].name).toBe("get_weather"); expect(result.tool_calls?.[0].id).toBeDefined(); }); test("Test ChatBedrockConverse can stream tools", async () => { const model = new ChatBedrockConverse({ ...baseConstructorArgs, }); const tools = [ tool( (_input) => // console.log("tool", input); "Hello", { name: "get_weather", description: "Get the weather", schema: z.object({ location: z.string().describe("Location to get the weather for"), }), } ), ]; const modelWithTools = model.bindTools(tools); const stream = await modelWithTools.stream([ new HumanMessage("Get the weather for London"), ]); let finalChunk: AIMessageChunk | undefined; for await (const chunk of stream) { if (!finalChunk) { finalChunk = chunk; } else { finalChunk = finalChunk.concat(chunk); } } expect(finalChunk?.tool_calls).toBeDefined(); expect(finalChunk?.tool_calls).toHaveLength(1); // console.log("result.tool_calls?.[0]", finalChunk?.tool_calls?.[0]); expect(finalChunk?.tool_calls?.[0].name).toBe("get_weather"); expect(finalChunk?.tool_calls?.[0].id).toBeDefined(); }); test("Test ChatBedrockConverse tool_choice works", async () => { const model = new ChatBedrockConverse({ ...baseConstructorArgs, }); const tools = [ tool( (_input) => // console.log("tool", input); "Hello", { name: "get_weather", description: "Get the weather", schema: z.object({ location: z.string().describe("Location to get the weather for"), }), } ), tool( (_input) => // console.log("tool", input); "Hello", { name: "calculator", description: "Sum two numbers", schema: z.object({ a: z.number().describe("First number to sum"), b: z.number().describe("Second number to sum"), }), } ), ]; const modelWithTools = model.bindTools(tools, { tool_choice: "get_weather", }); const result = await modelWithTools.invoke([ new HumanMessage( "What is 261319136 plus 81863183? It is VERY important you tell me the answer to that math problem." ), ]); expect(result.tool_calls).toBeDefined(); expect(result.tool_calls).toHaveLength(1); // console.log("result.tool_calls?.[0]", result.tool_calls?.[0]); expect(result.tool_calls?.[0].name).toBe("get_weather"); expect(result.tool_calls?.[0].id).toBeDefined(); }); test("Model can handle empty content messages", async () => { const model = new ChatBedrockConverse({ ...baseConstructorArgs, }); const retrieverTool = tool((_) => "Success", { name: "retrieverTool", schema: z.object({ url: z.string().describe("The URL to fetch"), }), description: "A tool to fetch data from a URL", }); const messages = [ new SystemMessage("You're an advanced AI assistant."), new HumanMessage( "What's the weather like today in Berkeley, CA? Use weather.com to check." ), new AIMessage({ content: "", tool_calls: [ { name: "retrieverTool", args: { url: "https://weather.com", }, id: "123_retriever_tool", }, ], }), new ToolMessage({ tool_call_id: "123_retriever_tool", content: "The weather in Berkeley, CA is 70 degrees and sunny.", }), ]; const result = await model.bindTools([retrieverTool]).invoke(messages); expect(result.content).toBeDefined(); expect(typeof result.content).toBe("string"); expect(result.content.length).toBeGreaterThan(1); });
0
lc_public_repos/langchainjs/libs/langchain-aws/src
lc_public_repos/langchainjs/libs/langchain-aws/src/tests/embeddings.int.test.ts
/* eslint-disable no-process-env */ /* eslint-disable @typescript-eslint/no-non-null-assertion */ import { expect, test } from "@jest/globals"; import { BedrockRuntimeClient } from "@aws-sdk/client-bedrock-runtime"; // import { MemoryVectorStore } from "langchain/vectorstores/memory"; import { BedrockEmbeddings } from "../embeddings.js"; const getClient = () => { if ( !process.env.BEDROCK_AWS_REGION || !process.env.BEDROCK_AWS_ACCESS_KEY_ID || !process.env.BEDROCK_AWS_SECRET_ACCESS_KEY ) { throw new Error("Missing environment variables for AWS"); } const client = new BedrockRuntimeClient({ region: process.env.BEDROCK_AWS_REGION, credentials: { accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID, secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY, }, }); return client; }; test("Test BedrockEmbeddings.embedQuery", async () => { const client = getClient(); const embeddings = new BedrockEmbeddings({ maxRetries: 1, client, }); const res = await embeddings.embedQuery("Hello world"); // console.log(res); expect(typeof res[0]).toBe("number"); }); test("Test BedrockEmbeddings.embedDocuments with passed region and credentials", async () => { const client = getClient(); const embeddings = new BedrockEmbeddings({ maxRetries: 1, client, }); const res = await embeddings.embedDocuments([ "Hello world", "Bye bye", "we need", "at least", "six documents", "to test pagination", ]); // console.log(res); expect(res).toHaveLength(6); res.forEach((r) => { expect(typeof r[0]).toBe("number"); }); }); // TODO: langchain dependency breaks CI. Should add a `FakeVectorStore` in core & import here to fix. test.skip("Test end to end with MemoryVectorStore", async () => { // const client = getClient(); // const vectorStore = await MemoryVectorStore.fromTexts( // ["Hello world", "Bye bye", "hello nice world"], // [{ id: 2 }, { id: 1 }, { id: 3 }], // new BedrockEmbeddings({ // maxRetries: 1, // client, // }) // ); // expect(vectorStore.memoryVectors).toHaveLength(3); // const resultOne = await vectorStore.similaritySearch("hello world", 1); // const resultOneMetadatas = resultOne.map(({ metadata }) => metadata); // expect(resultOneMetadatas).toEqual([{ id: 2 }]); // const resultTwo = await vectorStore.similaritySearch("hello world", 2); // const resultTwoMetadatas = resultTwo.map(({ metadata }) => metadata); // expect(resultTwoMetadatas).toEqual([{ id: 2 }, { id: 3 }]); // const resultThree = await vectorStore.similaritySearch("hello world", 3); // const resultThreeMetadatas = resultThree.map(({ metadata }) => metadata); // expect(resultThreeMetadatas).toEqual([{ id: 2 }, { id: 3 }, { id: 1 }]); });
0
lc_public_repos/langchainjs/libs/langchain-aws/src
lc_public_repos/langchainjs/libs/langchain-aws/src/tests/chat_models.test.ts
import { SystemMessage, HumanMessage, AIMessage, ToolMessage, AIMessageChunk, BaseMessage, } from "@langchain/core/messages"; import { concat } from "@langchain/core/utils/stream"; import type { Message as BedrockMessage, SystemContentBlock as BedrockSystemContentBlock, } from "@aws-sdk/client-bedrock-runtime"; import { z } from "zod"; import { describe, expect, test } from "@jest/globals"; import { convertToConverseMessages, handleConverseStreamContentBlockDelta, } from "../common.js"; import { ChatBedrockConverse } from "../chat_models.js"; describe("convertToConverseMessages", () => { const testCases: { name: string; input: BaseMessage[]; output: { converseMessages: BedrockMessage[]; converseSystem: BedrockSystemContentBlock[]; }; }[] = [ { name: "empty input", input: [], output: { converseMessages: [], converseSystem: [], }, }, { name: "simple messages", input: [ new SystemMessage("You're an advanced AI assistant."), new HumanMessage( "What's the weather like today in Berkeley, CA? Use weather.com to check." ), new AIMessage({ content: "", tool_calls: [ { name: "retrieverTool", args: { url: "https://weather.com", }, id: "123_retriever_tool", }, ], }), new ToolMessage({ tool_call_id: "123_retriever_tool", content: "The weather in Berkeley, CA is 70 degrees and sunny.", }), ], output: { converseMessages: [ { role: "user", content: [ { text: "What's the weather like today in Berkeley, CA? Use weather.com to check.", }, ], }, { role: "assistant", content: [ { toolUse: { name: "retrieverTool", toolUseId: "123_retriever_tool", input: { url: "https://weather.com", }, }, }, ], }, { role: "user", content: [ { toolResult: { toolUseId: "123_retriever_tool", content: [ { text: "The weather in Berkeley, CA is 70 degrees and sunny.", }, ], }, }, ], }, ], converseSystem: [ { text: "You're an advanced AI assistant.", }, ], }, }, { name: "consecutive user tool messages", input: [ new SystemMessage("You're an advanced AI assistant."), new HumanMessage( "What's the weather like today in Berkeley, CA and in Paris, France? Use weather.com to check." ), new AIMessage({ content: "", tool_calls: [ { name: "retrieverTool", args: { url: "https://weather.com", }, id: "123_retriever_tool", }, { name: "retrieverTool", args: { url: "https://weather.com", }, id: "456_retriever_tool", }, ], }), new ToolMessage({ tool_call_id: "123_retriever_tool", content: "The weather in Berkeley, CA is 70 degrees and sunny.", }), new ToolMessage({ tool_call_id: "456_retriever_tool", content: "The weather in Paris, France is perfect.", }), new HumanMessage( "What's the weather like today in Berkeley, CA and in Paris, France? Use meteofrance.com to check." ), new AIMessage({ content: "", tool_calls: [ { name: "retrieverTool", args: { url: "https://meteofrance.com", }, id: "321_retriever_tool", }, { name: "retrieverTool", args: { url: "https://meteofrance.com", }, id: "654_retriever_tool", }, ], }), new ToolMessage({ tool_call_id: "321_retriever_tool", content: "Why don't you check yourself?", }), new ToolMessage({ tool_call_id: "654_retriever_tool", content: "The weather in Paris, France is horrible.", }), ], output: { converseSystem: [ { text: "You're an advanced AI assistant.", }, ], converseMessages: [ { role: "user", content: [ { text: "What's the weather like today in Berkeley, CA and in Paris, France? Use weather.com to check.", }, ], }, { role: "assistant", content: [ { toolUse: { name: "retrieverTool", toolUseId: "123_retriever_tool", input: { url: "https://weather.com", }, }, }, { toolUse: { name: "retrieverTool", toolUseId: "456_retriever_tool", input: { url: "https://weather.com", }, }, }, ], }, { role: "user", content: [ { toolResult: { toolUseId: "123_retriever_tool", content: [ { text: "The weather in Berkeley, CA is 70 degrees and sunny.", }, ], }, }, { toolResult: { toolUseId: "456_retriever_tool", content: [ { text: "The weather in Paris, France is perfect.", }, ], }, }, ], }, { role: "user", content: [ { text: "What's the weather like today in Berkeley, CA and in Paris, France? Use meteofrance.com to check.", }, ], }, { role: "assistant", content: [ { toolUse: { name: "retrieverTool", toolUseId: "321_retriever_tool", input: { url: "https://meteofrance.com", }, }, }, { toolUse: { name: "retrieverTool", toolUseId: "654_retriever_tool", input: { url: "https://meteofrance.com", }, }, }, ], }, { role: "user", content: [ { toolResult: { toolUseId: "321_retriever_tool", content: [ { text: "Why don't you check yourself?", }, ], }, }, { toolResult: { toolUseId: "654_retriever_tool", content: [ { text: "The weather in Paris, France is horrible.", }, ], }, }, ], }, ], }, }, ]; it.each(testCases.map((tc) => [tc.name, tc]))( "convertToConverseMessages: case %s", (_, tc) => { const { converseMessages, converseSystem } = convertToConverseMessages( tc.input ); expect(converseMessages).toEqual(tc.output.converseMessages); expect(converseSystem).toEqual(tc.output.converseSystem); } ); }); test("Streaming supports empty string chunks", async () => { const contentBlocks = [ { contentBlockIndex: 0, delta: { text: "Hello ", }, }, { contentBlockIndex: 0, delta: { text: "", }, }, { contentBlockIndex: 0, delta: { text: "world!", }, }, ]; let finalChunk: AIMessageChunk | undefined; for (const block of contentBlocks) { const chunk = handleConverseStreamContentBlockDelta(block).message; finalChunk = !finalChunk ? chunk : concat(finalChunk, chunk); } expect(finalChunk).toBeDefined(); if (!finalChunk) return; expect(finalChunk.content).toBe("Hello world!"); }); describe("tool_choice works for supported models", () => { const tool = { name: "weather", schema: z.object({ location: z.string(), }), }; const baseConstructorArgs = { region: "us-east-1", credentials: { secretAccessKey: "process.env.BEDROCK_AWS_SECRET_ACCESS_KEY", accessKeyId: "process.env.BEDROCK_AWS_ACCESS_KEY_ID", }, }; const supportsToolChoiceValuesClaude3: Array<"auto" | "any" | "tool"> = [ "auto", "any", "tool", ]; const supportsToolChoiceValuesMistralLarge: Array<"auto" | "any" | "tool"> = [ "auto", "any", ]; it("throws an error if passing tool_choice with unsupported models", async () => { // Claude 2 should throw const claude2Model = new ChatBedrockConverse({ ...baseConstructorArgs, model: "anthropic.claude-v2", }); const claude2WithTool = claude2Model.bindTools([tool], { tool_choice: tool.name, }); await expect(claude2WithTool.invoke("foo")).rejects.toThrow(); // Cohere should throw const cohereModel = new ChatBedrockConverse({ ...baseConstructorArgs, model: "cohere.command-text-v14", }); const cohereModelWithTool = cohereModel.bindTools([tool], { tool_choice: tool.name, }); await expect(cohereModelWithTool.invoke("foo")).rejects.toThrow(); // Mistral (not mistral large) should throw const mistralModel = new ChatBedrockConverse({ ...baseConstructorArgs, model: "mistral.mistral-7b-instruct-v0:2", }); const mistralModelWithTool = mistralModel.bindTools([tool], { tool_choice: tool.name, }); await expect(mistralModelWithTool.invoke("foo")).rejects.toThrow(); }); it("does NOT throw and binds tool_choice when calling bindTools with supported models", async () => { // Claude 3 should NOT throw const claude3Model = new ChatBedrockConverse({ ...baseConstructorArgs, model: "anthropic.claude-3-5-sonnet-20240620-v1:0", supportsToolChoiceValues: supportsToolChoiceValuesClaude3, }); const claude3ModelWithTool = claude3Model.bindTools([tool], { tool_choice: tool.name, }); expect(claude3ModelWithTool).toBeDefined(); const claude3ModelWithToolAsJSON = claude3ModelWithTool.toJSON(); if (!("kwargs" in claude3ModelWithToolAsJSON)) { throw new Error("kwargs not found in claude3ModelWithToolAsJSON"); } expect(claude3ModelWithToolAsJSON.kwargs.kwargs).toHaveProperty( "tool_choice" ); expect(claude3ModelWithToolAsJSON.kwargs.kwargs.tool_choice).toBe( tool.name ); // Mistral large should NOT throw const mistralModel = new ChatBedrockConverse({ ...baseConstructorArgs, model: "mistral.mistral-large-2407-v1:0", supportsToolChoiceValues: supportsToolChoiceValuesMistralLarge, }); const mistralModelWithTool = mistralModel.bindTools([tool], { tool_choice: tool.name, }); expect(mistralModelWithTool).toBeDefined(); const mistralModelWithToolAsJSON = mistralModelWithTool.toJSON(); if (!("kwargs" in mistralModelWithToolAsJSON)) { throw new Error("kwargs not found in mistralModelWithToolAsJSON"); } expect(mistralModelWithToolAsJSON.kwargs.kwargs).toHaveProperty( "tool_choice" ); expect(mistralModelWithToolAsJSON.kwargs.kwargs.tool_choice).toBe( tool.name ); }); it("should NOT bind and NOT throw when using WSO with unsupported models", async () => { // Claude 2 should NOT throw is using WSO const claude2Model = new ChatBedrockConverse({ ...baseConstructorArgs, model: "anthropic.claude-v2", }); const claude2ModelWSO = claude2Model.withStructuredOutput(tool.schema, { name: tool.name, }); expect(claude2ModelWSO).toBeDefined(); const claude2ModelWSOAsJSON = claude2ModelWSO.toJSON(); if (!("kwargs" in claude2ModelWSOAsJSON)) { throw new Error("kwargs not found in claude2ModelWSOAsJSON"); } expect(claude2ModelWSOAsJSON.kwargs.bound.first.kwargs).not.toHaveProperty( "tool_choice" ); // Cohere should NOT throw is using WSO const cohereModel = new ChatBedrockConverse({ ...baseConstructorArgs, model: "cohere.command-text-v14", }); const cohereModelWSO = cohereModel.withStructuredOutput(tool.schema, { name: tool.name, }); expect(cohereModelWSO).toBeDefined(); const cohereModelWSOAsJSON = cohereModelWSO.toJSON(); if (!("kwargs" in cohereModelWSOAsJSON)) { throw new Error("kwargs not found in cohereModelWSOAsJSON"); } expect(cohereModelWSOAsJSON.kwargs.bound.first.kwargs).not.toHaveProperty( "tool_choice" ); // Mistral (not mistral large) should NOT throw is using WSO const mistralModel = new ChatBedrockConverse({ ...baseConstructorArgs, model: "mistral.mistral-7b-instruct-v0:2", }); const mistralModelWSO = mistralModel.withStructuredOutput(tool.schema, { name: tool.name, }); expect(mistralModelWSO).toBeDefined(); const mistralModelWSOAsJSON = mistralModelWSO.toJSON(); if (!("kwargs" in mistralModelWSOAsJSON)) { throw new Error("kwargs not found in mistralModelWSOAsJSON"); } expect(mistralModelWSOAsJSON.kwargs.bound.first.kwargs).not.toHaveProperty( "tool_choice" ); }); it("should bind tool_choice when using WSO with supported models", async () => { // Claude 3 should NOT throw is using WSO & it should have `tool_choice` bound. const claude3Model = new ChatBedrockConverse({ ...baseConstructorArgs, model: "anthropic.claude-3-5-sonnet-20240620-v1:0", // We are not passing the `supportsToolChoiceValues` arg here as // it should be inferred from the model name. }); const claude3ModelWSO = claude3Model.withStructuredOutput(tool.schema, { name: tool.name, }); expect(claude3ModelWSO).toBeDefined(); const claude3ModelWSOAsJSON = claude3ModelWSO.toJSON(); if (!("kwargs" in claude3ModelWSOAsJSON)) { throw new Error("kwargs not found in claude3ModelWSOAsJSON"); } expect(claude3ModelWSOAsJSON.kwargs.bound.first.kwargs).toHaveProperty( "tool_choice" ); expect(claude3ModelWSOAsJSON.kwargs.bound.first.kwargs.tool_choice).toBe( tool.name ); // Mistral (not mistral large) should NOT throw is using WSO const mistralModel = new ChatBedrockConverse({ ...baseConstructorArgs, model: "mistral.mistral-large-2407-v1:0", // We are not passing the `supportsToolChoiceValues` arg here as // it should be inferred from the model name. }); const mistralModelWSO = mistralModel.withStructuredOutput(tool.schema, { name: tool.name, }); expect(mistralModelWSO).toBeDefined(); const mistralModelWSOAsJSON = mistralModelWSO.toJSON(); if (!("kwargs" in mistralModelWSOAsJSON)) { throw new Error("kwargs not found in mistralModelWSOAsJSON"); } expect(mistralModelWSOAsJSON.kwargs.bound.first.kwargs).toHaveProperty( "tool_choice" ); // Mistral large only supports "auto" and "any" for tool_choice, not the actual tool name expect(mistralModelWSOAsJSON.kwargs.bound.first.kwargs.tool_choice).toBe( "any" ); }); });
0
lc_public_repos/langchainjs/libs/langchain-aws/src
lc_public_repos/langchainjs/libs/langchain-aws/src/retrievers/bedrock.ts
import { RetrieveCommand, BedrockAgentRuntimeClient, type BedrockAgentRuntimeClientConfig, type SearchType, type RetrievalFilter, } from "@aws-sdk/client-bedrock-agent-runtime"; import { BaseRetriever } from "@langchain/core/retrievers"; import { Document } from "@langchain/core/documents"; /** * Interface for the arguments required to initialize an * AmazonKnowledgeBaseRetriever instance. */ export interface AmazonKnowledgeBaseRetrieverArgs { knowledgeBaseId: string; topK: number; region: string; clientOptions?: BedrockAgentRuntimeClientConfig; filter?: RetrievalFilter; overrideSearchType?: SearchType; } /** * Class for interacting with Amazon Bedrock Knowledge Bases, a RAG workflow oriented service * provided by AWS. Extends the BaseRetriever class. * @example * ```typescript * const retriever = new AmazonKnowledgeBaseRetriever({ * topK: 10, * knowledgeBaseId: "YOUR_KNOWLEDGE_BASE_ID", * region: "us-east-2", * clientOptions: { * credentials: { * accessKeyId: "YOUR_ACCESS_KEY_ID", * secretAccessKey: "YOUR_SECRET_ACCESS_KEY", * }, * }, * }); * * const docs = await retriever.getRelevantDocuments("How are clouds formed?"); * ``` */ export class AmazonKnowledgeBaseRetriever extends BaseRetriever { static lc_name() { return "AmazonKnowledgeBaseRetriever"; } lc_namespace = ["langchain", "retrievers", "amazon_bedrock_knowledge_base"]; knowledgeBaseId: string; topK: number; bedrockAgentRuntimeClient: BedrockAgentRuntimeClient; filter?: RetrievalFilter; overrideSearchType?: SearchType; constructor({ knowledgeBaseId, topK = 10, clientOptions, region, filter, overrideSearchType, }: AmazonKnowledgeBaseRetrieverArgs) { super(); this.topK = topK; this.filter = filter; this.overrideSearchType = overrideSearchType; this.bedrockAgentRuntimeClient = new BedrockAgentRuntimeClient({ region, ...clientOptions, }); this.knowledgeBaseId = knowledgeBaseId; } /** * Cleans the result text by replacing sequences of whitespace with a * single space and removing ellipses. * @param resText The result text to clean. * @returns The cleaned result text. */ cleanResult(resText: string) { const res = resText.replace(/\s+/g, " ").replace(/\.\.\./g, ""); return res; } async queryKnowledgeBase( query: string, topK: number, filter?: RetrievalFilter, overrideSearchType?: SearchType ) { const retrieveCommand = new RetrieveCommand({ knowledgeBaseId: this.knowledgeBaseId, retrievalQuery: { text: query, }, retrievalConfiguration: { vectorSearchConfiguration: { numberOfResults: topK, overrideSearchType, filter, }, }, }); const retrieveResponse = await this.bedrockAgentRuntimeClient.send( retrieveCommand ); return ( retrieveResponse.retrievalResults?.map((result) => { let source; switch (result.location?.type) { case "CONFLUENCE": source = result.location?.confluenceLocation?.url; break; case "S3": source = result.location?.s3Location?.uri; break; case "SALESFORCE": source = result.location?.salesforceLocation?.url; break; case "SHAREPOINT": source = result.location?.sharePointLocation?.url; break; case "WEB": source = result.location?.webLocation?.url; break; default: source = result.location?.s3Location?.uri; break; } return { pageContent: this.cleanResult(result.content?.text || ""), metadata: { source, score: result.score, ...result.metadata, }, }; }) ?? ([] as Array<Document>) ); } async _getRelevantDocuments(query: string): Promise<Document[]> { const docs = await this.queryKnowledgeBase( query, this.topK, this.filter, this.overrideSearchType ); return docs; } }
0
lc_public_repos/langchainjs/libs/langchain-aws/src
lc_public_repos/langchainjs/libs/langchain-aws/src/retrievers/kendra.ts
import type { AttributeFilter, DocumentAttribute, DocumentAttributeValue, KendraClientConfig, QueryCommandOutput, QueryResultItem, RetrieveCommandOutput, RetrieveResultItem, } from "@aws-sdk/client-kendra"; import { KendraClient, QueryCommand, RetrieveCommand, } from "@aws-sdk/client-kendra"; import { BaseRetriever } from "@langchain/core/retrievers"; import { Document } from "@langchain/core/documents"; /** * Interface for the arguments required to initialize an * AmazonKendraRetriever instance. */ export interface AmazonKendraRetrieverArgs { indexId: string; topK: number; region: string; attributeFilter?: AttributeFilter; clientOptions?: KendraClientConfig; } /** * Class for interacting with Amazon Kendra, an intelligent search service * provided by AWS. Extends the BaseRetriever class. * @example * ```typescript * const retriever = new AmazonKendraRetriever({ * topK: 10, * indexId: "YOUR_INDEX_ID", * region: "us-east-2", * clientOptions: { * credentials: { * accessKeyId: "YOUR_ACCESS_KEY_ID", * secretAccessKey: "YOUR_SECRET_ACCESS_KEY", * }, * }, * }); * * const docs = await retriever.getRelevantDocuments("How are clouds formed?"); * ``` */ export class AmazonKendraRetriever extends BaseRetriever { static lc_name() { return "AmazonKendraRetriever"; } lc_namespace = ["langchain", "retrievers", "amazon_kendra"]; indexId: string; topK: number; kendraClient: KendraClient; attributeFilter?: AttributeFilter; constructor({ indexId, topK = 10, clientOptions, attributeFilter, region, }: AmazonKendraRetrieverArgs) { super(); if (!region) { throw new Error("Please pass regionName field to the constructor!"); } if (!indexId) { throw new Error("Please pass Kendra Index Id to the constructor"); } this.topK = topK; this.kendraClient = new KendraClient({ region, ...clientOptions, }); this.attributeFilter = attributeFilter; this.indexId = indexId; } // A method to combine title and excerpt into a single string. /** * Combines title and excerpt into a single string. * @param title The title of the document. * @param excerpt An excerpt from the document. * @returns A single string combining the title and excerpt. */ combineText(title?: string, excerpt?: string): string { let text = ""; if (title) { text += `Document Title: ${title}\n`; } if (excerpt) { text += `Document Excerpt: \n${excerpt}\n`; } return text; } // A method to clean the result text by replacing sequences of whitespace with a single space and removing ellipses. /** * Cleans the result text by replacing sequences of whitespace with a * single space and removing ellipses. * @param resText The result text to clean. * @returns The cleaned result text. */ cleanResult(resText: string) { const res = resText.replace(/\s+/g, " ").replace(/\.\.\./g, ""); return res; } // A method to extract the attribute value from a DocumentAttributeValue object. /** * Extracts the attribute value from a DocumentAttributeValue object. * @param value The DocumentAttributeValue object to extract the value from. * @returns The extracted attribute value. */ getDocAttributeValue(value: DocumentAttributeValue) { if (value.DateValue) { return value.DateValue; } if (value.LongValue) { return value.LongValue; } if (value.StringListValue) { return value.StringListValue; } if (value.StringValue) { return value.StringValue; } return ""; } // A method to extract the attribute key-value pairs from an array of DocumentAttribute objects. /** * Extracts the attribute key-value pairs from an array of * DocumentAttribute objects. * @param documentAttributes The array of DocumentAttribute objects to extract the key-value pairs from. * @returns An object containing the extracted attribute key-value pairs. */ getDocAttributes(documentAttributes?: DocumentAttribute[]): { [key: string]: unknown; } { const attributes: { [key: string]: unknown } = {}; if (documentAttributes) { for (const attr of documentAttributes) { if (attr.Key && attr.Value) { attributes[attr.Key] = this.getDocAttributeValue(attr.Value); } } } return attributes; } // A method to convert a RetrieveResultItem object into a Document object. /** * Converts a RetrieveResultItem object into a Document object. * @param item The RetrieveResultItem object to convert. * @returns A Document object. */ convertRetrieverItem(item: RetrieveResultItem) { const title = item.DocumentTitle || ""; const excerpt = item.Content ? this.cleanResult(item.Content) : ""; const pageContent = this.combineText(title, excerpt); const source = item.DocumentURI; const attributes = this.getDocAttributes(item.DocumentAttributes); const metadata = { source, title, excerpt, document_attributes: attributes, }; return new Document({ pageContent, metadata }); } // A method to extract the top-k documents from a RetrieveCommandOutput object. /** * Extracts the top-k documents from a RetrieveCommandOutput object. * @param response The RetrieveCommandOutput object to extract the documents from. * @param pageSize The number of documents to extract. * @returns An array of Document objects. */ getRetrieverDocs( response: RetrieveCommandOutput, pageSize: number ): Document[] { if (!response.ResultItems) return []; const { length } = response.ResultItems; const count = length < pageSize ? length : pageSize; return response.ResultItems.slice(0, count).map((item) => this.convertRetrieverItem(item) ); } // A method to extract the excerpt text from a QueryResultItem object. /** * Extracts the excerpt text from a QueryResultItem object. * @param item The QueryResultItem object to extract the excerpt text from. * @returns The extracted excerpt text. */ getQueryItemExcerpt(item: QueryResultItem) { if ( item.AdditionalAttributes && item.AdditionalAttributes.length && item.AdditionalAttributes[0].Key === "AnswerText" ) { if (!item.AdditionalAttributes) { return ""; } if (!item.AdditionalAttributes[0]) { return ""; } return this.cleanResult( item.AdditionalAttributes[0].Value?.TextWithHighlightsValue?.Text || "" ); } else if (item.DocumentExcerpt) { return this.cleanResult(item.DocumentExcerpt.Text || ""); } else { return ""; } } // A method to convert a QueryResultItem object into a Document object. /** * Converts a QueryResultItem object into a Document object. * @param item The QueryResultItem object to convert. * @returns A Document object. */ convertQueryItem(item: QueryResultItem) { const title = item.DocumentTitle?.Text || ""; const excerpt = this.getQueryItemExcerpt(item); const pageContent = this.combineText(title, excerpt); const source = item.DocumentURI; const attributes = this.getDocAttributes(item.DocumentAttributes); const metadata = { source, title, excerpt, document_attributes: attributes, }; return new Document({ pageContent, metadata }); } // A method to extract the top-k documents from a QueryCommandOutput object. /** * Extracts the top-k documents from a QueryCommandOutput object. * @param response The QueryCommandOutput object to extract the documents from. * @param pageSize The number of documents to extract. * @returns An array of Document objects. */ getQueryDocs(response: QueryCommandOutput, pageSize: number) { if (!response.ResultItems) return []; const { length } = response.ResultItems; const count = length < pageSize ? length : pageSize; return response.ResultItems.slice(0, count).map((item) => this.convertQueryItem(item) ); } // A method to send a retrieve or query request to Kendra and return the top-k documents. /** * Sends a retrieve or query request to Kendra and returns the top-k * documents. * @param query The query to send to Kendra. * @param topK The number of top documents to return. * @param attributeFilter Optional filter to apply when retrieving documents. * @returns A Promise that resolves to an array of Document objects. */ async queryKendra( query: string, topK: number, attributeFilter?: AttributeFilter ) { const retrieveCommand = new RetrieveCommand({ IndexId: this.indexId, QueryText: query, PageSize: topK, AttributeFilter: attributeFilter, }); const retrieveResponse = await this.kendraClient.send(retrieveCommand); const retriveLength = retrieveResponse.ResultItems?.length; if (retriveLength === 0) { // Retrieve API returned 0 results, call query API const queryCommand = new QueryCommand({ IndexId: this.indexId, QueryText: query, PageSize: topK, AttributeFilter: attributeFilter, }); const queryResponse = await this.kendraClient.send(queryCommand); return this.getQueryDocs(queryResponse, this.topK); } else { return this.getRetrieverDocs(retrieveResponse, this.topK); } } async _getRelevantDocuments(query: string): Promise<Document[]> { const docs = await this.queryKendra(query, this.topK, this.attributeFilter); return docs; } }
0
lc_public_repos/langchainjs/libs/langchain-aws/src
lc_public_repos/langchainjs/libs/langchain-aws/src/retrievers/index.ts
export * from "./bedrock.js"; export * from "./kendra.js";
0
lc_public_repos/langchainjs/libs/langchain-aws/src/retrievers
lc_public_repos/langchainjs/libs/langchain-aws/src/retrievers/tests/bedrock.int.test.ts
/* eslint-disable no-process-env */ /* eslint-disable @typescript-eslint/no-non-null-assertion */ import { test } from "@jest/globals"; import { AmazonKnowledgeBaseRetriever } from "../bedrock.js"; test.skip("AmazonKnowledgeBaseRetriever", async () => { if ( !process.env.BEDROCK_AWS_REGION || !process.env.BEDROCK_AWS_ACCESS_KEY_ID || !process.env.BEDROCK_AWS_SECRET_ACCESS_KEY ) { throw new Error("Missing environment variables for AWS"); } const retriever = new AmazonKnowledgeBaseRetriever({ topK: 10, knowledgeBaseId: process.env.AMAZON_KNOWLEDGE_BASE_ID || "", region: process.env.BEDROCK_AWS_REGION, overrideSearchType: "HYBRID", filter: undefined, clientOptions: { credentials: { accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID, secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY, // sessionToken: process.env.AWS_SESSION_TOKEN!, }, }, }); const docs = await retriever.invoke("How are clouds formed?"); expect(docs.length).toBeGreaterThan(0); });
0
lc_public_repos/langchainjs/libs/langchain-aws/src/retrievers
lc_public_repos/langchainjs/libs/langchain-aws/src/retrievers/tests/kendra.int.test.ts
/* eslint-disable no-process-env */ /* eslint-disable @typescript-eslint/no-non-null-assertion */ import { test } from "@jest/globals"; import { AmazonKendraRetriever } from "../kendra.js"; test.skip("AmazonKendraRetriever", async () => { if ( !process.env.BEDROCK_AWS_REGION || !process.env.BEDROCK_AWS_ACCESS_KEY_ID || !process.env.BEDROCK_AWS_SECRET_ACCESS_KEY ) { throw new Error("Missing environment variables for AWS"); } const retriever = new AmazonKendraRetriever({ topK: 10, indexId: "5c0fcb10-9573-42df-8846-e30d69004ec5", region: process.env.BEDROCK_AWS_REGION, clientOptions: { credentials: { accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID, secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY, }, }, }); const docs = await retriever.invoke("How are clouds formed?"); expect(docs.length).toBeGreaterThan(0); // console.log(docs); });
0
lc_public_repos/langchainjs/libs/langchain-aws
lc_public_repos/langchainjs/libs/langchain-aws/scripts/jest-setup-after-env.js
import { awaitAllCallbacks } from "@langchain/core/callbacks/promises"; import { afterAll, jest } from "@jest/globals"; afterAll(awaitAllCallbacks); // Allow console.log to be disabled in tests if (process.env.DISABLE_CONSOLE_LOGS === "true") { console.log = jest.fn(); }
0
lc_public_repos/langchainjs
lc_public_repos/langchainjs/dependency_range_tests/docker-compose.yml
version: "3" services: # LangChain langchain-latest-deps: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: - ../langchain:/langchain - ./scripts:/scripts command: bash /scripts/langchain/test-with-latest-deps.sh langchain-lowest-deps: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: - ../langchain:/langchain - ./scripts:/scripts command: bash /scripts/langchain/test-with-lowest-deps.sh # Community community-latest-deps: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: - ../turbo.json:/turbo.json - ./scripts/with_standard_tests/community/node/package.json:/package.json - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-community:/libs/langchain-community - ./scripts:/scripts command: bash /scripts/with_standard_tests/community/test-with-latest-deps.sh community-lowest-deps: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: - ../turbo.json:/turbo.json - ./scripts/with_standard_tests/community/node/package.json:/package.json - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-community:/libs/langchain-community - ./scripts:/scripts command: bash /scripts/with_standard_tests/community/test-with-lowest-deps.sh community-npm-install: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" working_dir: /app volumes: - ../turbo.json:/turbo.json - ./scripts/with_standard_tests/community/node/package.json:/package.json - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-community:/libs/langchain-community - ./scripts:/scripts command: bash /scripts/with_standard_tests/community/npm-install.sh # OpenAI openai-latest-deps: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: - ../turbo.json:/turbo.json - ./scripts/with_standard_tests/openai/node/package.json:/package.json - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-openai:/libs/langchain-openai - ./scripts:/scripts command: bash /scripts/with_standard_tests/openai/test-with-latest-deps.sh openai-lowest-deps: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: - ../turbo.json:/turbo.json - ./scripts/with_standard_tests/openai/node/package.json:/package.json - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-openai:/libs/langchain-openai - ./scripts:/scripts command: bash /scripts/with_standard_tests/openai/test-with-lowest-deps.sh # Anthropic anthropic-latest-deps: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: - ../turbo.json:/turbo.json - ./scripts/with_standard_tests/anthropic/node/package.json:/package.json - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-anthropic:/libs/langchain-anthropic - ./scripts:/scripts command: bash /scripts/with_standard_tests/anthropic/test-with-latest-deps.sh anthropic-lowest-deps: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: - ../turbo.json:/turbo.json - ./scripts/with_standard_tests/anthropic/node/package.json:/package.json - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-anthropic:/libs/langchain-anthropic - ./scripts:/scripts command: bash /scripts/with_standard_tests/anthropic/test-with-lowest-deps.sh # Google VertexAI google-vertexai-latest-deps: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: - ../turbo.json:/turbo.json - ./scripts/with_standard_tests/google-vertexai/node/package.json:/package.json - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-google-vertexai:/libs/langchain-google-vertexai - ./scripts:/scripts command: bash /scripts/with_standard_tests/google-vertexai/test-with-latest-deps.sh google-vertexai-lowest-deps: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: - ../turbo.json:/turbo.json - ./scripts/with_standard_tests/google-vertexai/node/package.json:/package.json - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-google-vertexai:/libs/langchain-google-vertexai - ./scripts:/scripts command: bash /scripts/with_standard_tests/google-vertexai/test-with-lowest-deps.sh # Cohere cohere-latest-deps: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: - ../turbo.json:/turbo.json - ./scripts/with_standard_tests/cohere/node/package.json:/package.json - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-cohere:/libs/langchain-cohere - ./scripts:/scripts command: bash /scripts/with_standard_tests/cohere/test-with-latest-deps.sh cohere-lowest-deps: image: node:20 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: - ../turbo.json:/turbo.json - ./scripts/with_standard_tests/cohere/node/package.json:/package.json - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-cohere:/libs/langchain-cohere - ./scripts:/scripts command: bash /scripts/with_standard_tests/cohere/test-with-lowest-deps.sh
0
lc_public_repos/langchainjs/dependency_range_tests/scripts
lc_public_repos/langchainjs/dependency_range_tests/scripts/langchain/test-with-latest-deps.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true # enable extended globbing for omitting build artifacts shopt -s extglob # avoid copying build artifacts from the host cp -r ../langchain/!(node_modules|dist|dist-cjs|dist-esm|build|.next|.turbo) ./ mkdir -p /updater_script cp -r /scripts/langchain/node/!(node_modules|dist|dist-cjs|dist-esm|build|.next|.turbo) /updater_script/ cd /updater_script yarn cd /app node /updater_script/update_resolutions_latest.js yarn yarn add @langchain/core # Check the test command completes successfully NODE_OPTIONS=--experimental-vm-modules yarn run jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%
0
lc_public_repos/langchainjs/dependency_range_tests/scripts
lc_public_repos/langchainjs/dependency_range_tests/scripts/langchain/test-with-lowest-deps.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true # enable extended globbing for omitting build artifacts shopt -s extglob # avoid copying build artifacts from the host cp -r ../langchain/!(node_modules|dist|dist-cjs|dist-esm|build|.next|.turbo) ./ mkdir -p /updater_script cp -r /scripts/langchain/node/!(node_modules|dist|dist-cjs|dist-esm|build|.next|.turbo) /updater_script/ cd /updater_script yarn cd /app node /updater_script/update_resolutions_lowest.js # Read the @langchain/core version from peerDependencies core_version=$(node -p "require('./package.json').peerDependencies['@langchain/core']") yarn yarn add @langchain/core@$core_version # Check the test command completes successfully NODE_OPTIONS=--experimental-vm-modules yarn run jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/langchain
lc_public_repos/langchainjs/dependency_range_tests/scripts/langchain/node/update_resolutions_latest.js
const fs = require("fs"); const communityPackageJsonPath = "package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.devDependencies["@langchain/core"]) { delete currentPackageJson.devDependencies["@langchain/core"]; currentPackageJson.peerDependencies["@langchain/core"] = "latest"; } // Stupid hack currentPackageJson.resolutions = { ...currentPackageJson.resolutions, "jackspeak": "2.1.1" }; fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/langchain
lc_public_repos/langchainjs/dependency_range_tests/scripts/langchain/node/yarn.lock
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" semver@^7.5.4: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" yallist@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/langchain
lc_public_repos/langchainjs/dependency_range_tests/scripts/langchain/node/package.json
{ "name": "dependency-range-tests", "version": "0.0.0", "private": true, "description": "Tests dependency ranges for LangChain.", "dependencies": { "semver": "^7.5.4" } }
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/langchain
lc_public_repos/langchainjs/dependency_range_tests/scripts/langchain/node/update_resolutions_lowest.js
const fs = require("fs"); const semver = require("semver"); const communityPackageJsonPath = "package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.peerDependencies["@langchain/core"] && !currentPackageJson.peerDependencies["@langchain/core"].includes("rc")) { const minVersion = semver.minVersion( currentPackageJson.peerDependencies["@langchain/core"] ).version; currentPackageJson.peerDependencies = { ...currentPackageJson.peerDependencies, "@langchain/core": minVersion, }; } if (currentPackageJson.devDependencies["@langchain/core"]) { delete currentPackageJson.devDependencies["@langchain/core"]; } if (currentPackageJson.dependencies["@langchain/openai"] && !currentPackageJson.dependencies["@langchain/openai"].includes("rc")) { const minVersion = semver.minVersion( currentPackageJson.dependencies["@langchain/openai"] ).version; currentPackageJson.dependencies = { ...currentPackageJson.dependencies, "@langchain/openai": minVersion, }; } if (currentPackageJson.dependencies["@langchain/textsplitters"] && !currentPackageJson.dependencies["@langchain/textsplitters"].includes("rc")) { const minVersion = semver.minVersion( currentPackageJson.dependencies["@langchain/textsplitters"] ).version; currentPackageJson.dependencies = { ...currentPackageJson.dependencies, "@langchain/textsplitters": minVersion, }; } // Stupid hack currentPackageJson.resolutions = { ...currentPackageJson.resolutions, "jackspeak": "2.1.1" }; fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/shared.sh
#!/usr/bin/env bash # Extract the package name from the first argument package_name=$1 # New monorepo directory paths monorepo_dir="/app/monorepo" monorepo_libs_dir="$monorepo_dir/libs" monorepo_package_dir="$monorepo_libs_dir/langchain-$package_name" monorepo_standard_tests_dir="$monorepo_libs_dir/langchain-standard-tests" # Updater script will not live inside the monorepo standard_tests_updater_script_dir="/app/with_standard_script" # Original directory paths original_package_dir="/libs/langchain-$package_name" original_standard_tests_dir="/libs/langchain-standard-tests" original_package_json_dir="/package.json" original_turbo_json_dir="/turbo.json" original_standard_tests_updater_script_dir="/scripts/with_standard_tests/node" # enable extended globbing for omitting build artifacts shopt -s extglob # Create the top level monorepo directory mkdir -p "$monorepo_dir" # Copy `@langchain/standard-tests` WITH build artifacts from the host. # This is because we build @langchain/standard-tests before running this script. mkdir -p "$monorepo_standard_tests_dir/" cp -r "$original_standard_tests_dir"/* "$monorepo_standard_tests_dir/" # Copy `@langchain/package` WITHOUT build artifacts from the host. mkdir -p "$monorepo_package_dir/" cp -r "$original_package_dir"/!(node_modules|dist|dist-cjs|dist-esm|build|.next|.turbo) "$monorepo_package_dir/" # Copy the turbo and package.json files for monorepo cp "$original_turbo_json_dir" "$monorepo_dir/" cp "$original_package_json_dir" "$monorepo_dir/" # Replace any workspace dependencies in `@langchain/standard-tests` # with "latest" for the version. mkdir -p "$standard_tests_updater_script_dir" cp "$original_standard_tests_updater_script_dir"/* "$standard_tests_updater_script_dir/" cd "$standard_tests_updater_script_dir" # Run the updater script node "update_workspace_dependencies.js" # Navigate back to root cd "/app"
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/anthropic/test-with-latest-deps.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true # New monorepo directory paths monorepo_dir="/app/monorepo" monorepo_anthropic_dir="/app/monorepo/libs/langchain-anthropic" # Updater script will not live inside the monorepo updater_script_dir="/app/updater_script" # Original directory paths original_updater_script_dir="/scripts/with_standard_tests/anthropic/node" # Run the shared script to copy all necessary folders/files bash /scripts/with_standard_tests/shared.sh anthropic mkdir -p "$updater_script_dir" cp "$original_updater_script_dir"/* "$updater_script_dir/" cd "$updater_script_dir" # Update any workspace dep to the latest version since not all workspaces are # available in the test enviroment. node "update_workspace_deps.js" node "update_resolutions_latest.js" # Navigate back to monorepo root and install dependencies cd "$monorepo_dir" touch yarn.lock yarn # Navigate into `@langchain/anthropic` to build and run tests # We need to run inside the anthropic directory so turbo repo does # not try to build the package/its workspace dependencies. cd "$monorepo_anthropic_dir" yarn add @langchain/core yarn test
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/anthropic/test-with-lowest-deps.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true monorepo_dir="/app/monorepo" monorepo_anthropic_dir="/app/monorepo/libs/langchain-anthropic" updater_script_dir="/app/updater_script" original_updater_script_dir="/scripts/with_standard_tests/anthropic/node" # Run the shared script to copy all necessary folders/files bash /scripts/with_standard_tests/shared.sh anthropic # Copy the updater script to the monorepo mkdir -p "$updater_script_dir" cp "$original_updater_script_dir"/* "$updater_script_dir/" # Install deps (e.g semver) for the updater script cd "$updater_script_dir" yarn # Run the updater script node "update_workspace_deps.js" node "update_resolutions_lowest.js" # Navigate back to monorepo root and install dependencies cd "$monorepo_dir" touch yarn.lock yarn # Navigate into `@langchain/anthropic` to build and run tests # We need to run inside the package directory so turbo repo does # not try to build the package/its workspace dependencies. cd "$monorepo_anthropic_dir" # Read the @langchain/core version from peerDependencies core_version=$(node -p "require('./package.json').peerDependencies['@langchain/core']") # Install @langchain/core at the specified version yarn add @langchain/core@$core_version yarn test
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/anthropic
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/anthropic/node/update_resolutions_latest.js
const fs = require("fs"); const communityPackageJsonPath = "/app/monorepo/libs/langchain-anthropic/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.devDependencies["@langchain/core"]) { delete currentPackageJson.devDependencies["@langchain/core"]; currentPackageJson.peerDependencies["@langchain/core"] = "latest"; } fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/anthropic
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/anthropic/node/yarn.lock
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" semver@^7.5.4: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" yallist@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/anthropic
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/anthropic/node/package.json
{ "name": "dependency-range-tests", "version": "0.0.0", "private": true, "workspaces": [ "libs/*" ], "description": "Tests dependency ranges for LangChain.", "dependencies": { "semver": "^7.5.4" } }
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/anthropic
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/anthropic/node/update_resolutions_lowest.js
const fs = require("fs"); const semver = require("semver"); const communityPackageJsonPath = "/app/monorepo/libs/langchain-anthropic/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.peerDependencies["@langchain/core"] && !currentPackageJson.peerDependencies["@langchain/core"].includes("rc")) { const minVersion = semver.minVersion( currentPackageJson.peerDependencies["@langchain/core"] ).version; currentPackageJson.peerDependencies = { ...currentPackageJson.peerDependencies, "@langchain/core": minVersion, }; } if (currentPackageJson.devDependencies["@langchain/core"]) { delete currentPackageJson.devDependencies["@langchain/core"]; } fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/anthropic
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/anthropic/node/update_workspace_deps.js
const fs = require("fs"); const communityPackageJsonPath = "/app/monorepo/libs/langchain-anthropic/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); // Anthropic has other workspaces as devDependencies, but tagged as `workspace:*` for the version. // Update these to be `latest` for the test. if (currentPackageJson.devDependencies["@langchain/community"]) { currentPackageJson.devDependencies = { ...currentPackageJson.devDependencies, "@langchain/community": "latest", }; } fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/openai/test-with-latest-deps.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true # New monorepo directory paths monorepo_dir="/app/monorepo" monorepo_openai_dir="/app/monorepo/libs/langchain-openai" # Updater script will not live inside the monorepo updater_script_dir="/app/updater_script" # Original directory paths original_updater_script_dir="/scripts/with_standard_tests/openai/node" # Run the shared script to copy all necessary folders/files bash /scripts/with_standard_tests/shared.sh openai mkdir -p "$updater_script_dir" cp "$original_updater_script_dir"/* "$updater_script_dir/" cd "$updater_script_dir" # Update any workspace dep to the latest version since not all workspaces are # available in the test enviroment. node "update_resolutions_latest.js" # Navigate back to monorepo root and install dependencies cd "$monorepo_dir" touch yarn.lock yarn # Navigate into `@langchain/openai` to build and run tests # We need to run inside the openai directory so turbo repo does # not try to build the package/its workspace dependencies. cd "$monorepo_openai_dir" yarn add @langchain/core yarn test
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/openai/test-with-lowest-deps.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true monorepo_dir="/app/monorepo" monorepo_openai_dir="/app/monorepo/libs/langchain-openai" updater_script_dir="/app/updater_script" updater_script_dir="/app/updater_script" original_updater_script_dir="/scripts/with_standard_tests/openai/node" # Run the shared script to copy all necessary folders/files bash /scripts/with_standard_tests/shared.sh openai # Copy the updater script to the monorepo mkdir -p "$updater_script_dir" cp "$original_updater_script_dir"/* "$updater_script_dir/" # Install deps (e.g semver) for the updater script cd "$updater_script_dir" yarn # Run the updater script node "update_resolutions_lowest.js" # Navigate back to monorepo root and install dependencies cd "$monorepo_dir" touch yarn.lock yarn # Navigate into `@langchain/package` to build and run tests # We need to run inside the package directory so turbo repo does # not try to build the package/its workspace dependencies. cd "$monorepo_openai_dir" # Read the @langchain/core version from peerDependencies core_version=$(node -p "require('./package.json').peerDependencies['@langchain/core']") # Install @langchain/core at the specified version yarn add @langchain/core@$core_version yarn test
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/openai
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/openai/node/update_resolutions_latest.js
const fs = require("fs"); const communityPackageJsonPath = "/app/monorepo/libs/langchain-openai/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.devDependencies["@langchain/core"]) { delete currentPackageJson.devDependencies["@langchain/core"]; currentPackageJson.peerDependencies["@langchain/core"] = "latest"; } fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/openai
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/openai/node/yarn.lock
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" semver@^7.5.4: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" yallist@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/openai
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/openai/node/package.json
{ "name": "dependency-range-tests", "version": "0.0.0", "private": true, "workspaces": [ "libs/*" ], "description": "Tests dependency ranges for LangChain.", "dependencies": { "semver": "^7.5.4" } }
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/openai
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/openai/node/update_resolutions_lowest.js
const fs = require("fs"); const semver = require("semver"); const communityPackageJsonPath = "/app/monorepo/libs/langchain-openai/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.peerDependencies["@langchain/core"] && !currentPackageJson.peerDependencies["@langchain/core"].includes("rc")) { const minVersion = semver.minVersion( currentPackageJson.peerDependencies["@langchain/core"] ).version; currentPackageJson.peerDependencies = { ...currentPackageJson.peerDependencies, "@langchain/core": minVersion, }; } if (currentPackageJson.devDependencies["@langchain/core"]) { delete currentPackageJson.devDependencies["@langchain/core"]; } fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/cohere/test-with-latest-deps.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true # New monorepo directory paths monorepo_dir="/app/monorepo" monorepo_cohere_dir="/app/monorepo/libs/langchain-cohere" # Updater script will not live inside the monorepo updater_script_dir="/app/updater_script" # Original directory paths original_updater_script_dir="/scripts/with_standard_tests/cohere/node" # Run the shared script to copy all necessary folders/files bash /scripts/with_standard_tests/shared.sh cohere mkdir -p "$updater_script_dir" cp "$original_updater_script_dir"/* "$updater_script_dir/" cd "$updater_script_dir" # Update any workspace dep to the latest version since not all workspaces are # available in the test enviroment. node "update_resolutions_latest.js" # Navigate back to monorepo root and install dependencies cd "$monorepo_dir" touch yarn.lock yarn # Navigate into `@langchain/cohere` to build and run tests # We need to run inside the cohere directory so turbo repo does # not try to build the package/its workspace dependencies. cd "$monorepo_cohere_dir" yarn add @langchain/core yarn test
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/cohere/test-with-lowest-deps.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true monorepo_dir="/app/monorepo" monorepo_cohere_dir="/app/monorepo/libs/langchain-cohere" updater_script_dir="/app/updater_script" updater_script_dir="/app/updater_script" original_updater_script_dir="/scripts/with_standard_tests/cohere/node" # Run the shared script to copy all necessary folders/files bash /scripts/with_standard_tests/shared.sh cohere # Copy the updater script to the monorepo mkdir -p "$updater_script_dir" cp "$original_updater_script_dir"/* "$updater_script_dir/" # Install deps (e.g semver) for the updater script cd "$updater_script_dir" yarn # Run the updater script node "update_resolutions_lowest.js" # Navigate back to monorepo root and install dependencies cd "$monorepo_dir" touch yarn.lock yarn # Navigate into `@langchain/cohere` to build and run tests # We need to run inside the cohere directory so turbo repo does # not try to build the package/its workspace dependencies. cd "$monorepo_cohere_dir" # Read the @langchain/core version from peerDependencies core_version=$(node -p "require('./package.json').peerDependencies['@langchain/core']") # Install @langchain/core at the specified version yarn add @langchain/core@$core_version yarn test
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/cohere
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/cohere/node/update_resolutions_latest.js
const fs = require("fs"); const communityPackageJsonPath = "/app/monorepo/libs/langchain-cohere/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.devDependencies["@langchain/core"]) { delete currentPackageJson.devDependencies["@langchain/core"]; currentPackageJson.peerDependencies["@langchain/core"] = "latest"; } fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/cohere
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/cohere/node/yarn.lock
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" semver@^7.5.4: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" yallist@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/cohere
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/cohere/node/package.json
{ "name": "dependency-range-tests", "version": "0.0.0", "private": true, "workspaces": [ "libs/*" ], "description": "Tests dependency ranges for LangChain.", "dependencies": { "semver": "^7.5.4" } }
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/cohere
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/cohere/node/update_resolutions_lowest.js
const fs = require("fs"); const semver = require("semver"); const communityPackageJsonPath = "/app/monorepo/libs/langchain-cohere/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.peerDependencies["@langchain/core"] && !currentPackageJson.peerDependencies["@langchain/core"].includes("rc")) { const minVersion = semver.minVersion( currentPackageJson.peerDependencies["@langchain/core"] ).version; currentPackageJson.peerDependencies = { ...currentPackageJson.peerDependencies, "@langchain/core": minVersion, }; } if (currentPackageJson.devDependencies["@langchain/core"]) { delete currentPackageJson.devDependencies["@langchain/core"]; } fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community/test-with-latest-deps.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true # New monorepo directory paths monorepo_dir="/app/monorepo" monorepo_community_dir="/app/monorepo/libs/langchain-community" # Updater script will not live inside the monorepo updater_script_dir="/app/updater_script" # Original directory paths original_updater_script_dir="/scripts/with_standard_tests/community/node" # Run the shared script to copy all necessary folders/files bash /scripts/with_standard_tests/shared.sh community mkdir -p "$updater_script_dir" cp "$original_updater_script_dir"/* "$updater_script_dir/" cd "$updater_script_dir" # Update any workspace dep to the latest version since not all workspaces are # available in the test enviroment. node "update_resolutions_latest.js" # Navigate back to monorepo root and install dependencies cd "$monorepo_dir" touch yarn.lock yarn # Navigate into `@langchain/community` to build and run tests # We need to run inside the community directory so turbo repo does # not try to build the package/its workspace dependencies. cd "$monorepo_community_dir" yarn add @langchain/core yarn test
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community/npm-install.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true # New monorepo directory paths monorepo_dir="/app/monorepo" monorepo_community_dir="/app/monorepo/libs/langchain-community" # Updater script will not live inside the monorepo updater_script_dir="/app/updater_script" # Original directory paths original_updater_script_dir="/scripts/with_standard_tests/community/node" # Run the shared script to copy all necessary folders/files bash /scripts/with_standard_tests/shared.sh community mkdir -p "$updater_script_dir" cp "$original_updater_script_dir"/* "$updater_script_dir/" cd "$updater_script_dir" node "update_resolutions_npm.js" # Navigate back to monorepo root and install dependencies cd "$monorepo_dir" npm install @langchain/core --production npm install --production
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community/test-with-lowest-deps.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true monorepo_dir="/app/monorepo" monorepo_community_dir="/app/monorepo/libs/langchain-community" updater_script_dir="/app/updater_script" updater_script_dir="/app/updater_script" original_updater_script_dir="/scripts/with_standard_tests/community/node" # Run the shared script to copy all necessary folders/files bash /scripts/with_standard_tests/shared.sh community # Copy the updater script to the monorepo mkdir -p "$updater_script_dir" cp "$original_updater_script_dir"/* "$updater_script_dir/" # Install deps (e.g semver) for the updater script cd "$updater_script_dir" yarn # Run the updater script node "update_resolutions_lowest.js" # Navigate back to monorepo root and install dependencies cd "$monorepo_dir" touch yarn.lock yarn # Navigate into `@langchain/package` to build and run tests # We need to run inside the package directory so turbo repo does # not try to build the package/its workspace dependencies. cd "$monorepo_community_dir" # Read the @langchain/core version from peerDependencies core_version=$(node -p "require('./package.json').peerDependencies['@langchain/core']") # Install @langchain/core at the specified version yarn add @langchain/core@$core_version yarn test
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community/node/update_resolutions_latest.js
const fs = require("fs"); const communityPackageJsonPath = "/app/monorepo/libs/langchain-community/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.devDependencies["@langchain/core"]) { delete currentPackageJson.devDependencies["@langchain/core"]; currentPackageJson.peerDependencies["@langchain/core"] = "latest"; } if (currentPackageJson.dependencies["@langchain/openai"]) { delete currentPackageJson.dependencies["@langchain/openai"]; currentPackageJson.dependencies["@langchain/openai"] = "latest"; } fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community/node/yarn.lock
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" semver@^7.5.4: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" yallist@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community/node/package.json
{ "name": "dependency-range-tests", "version": "0.0.0", "private": true, "workspaces": [ "libs/*" ], "description": "Tests dependency ranges for LangChain.", "dependencies": { "semver": "^7.5.4" } }
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community/node/update_resolutions_lowest.js
const fs = require("fs"); const semver = require("semver"); const communityPackageJsonPath = "/app/monorepo/libs/langchain-community/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.peerDependencies["@langchain/core"] && !currentPackageJson.peerDependencies["@langchain/core"].includes("rc")) { const minVersion = semver.minVersion( currentPackageJson.peerDependencies["@langchain/core"] ).version; currentPackageJson.peerDependencies = { ...currentPackageJson.peerDependencies, "@langchain/core": minVersion, }; } if (currentPackageJson.devDependencies["@langchain/core"]) { delete currentPackageJson.devDependencies["@langchain/core"]; } if (currentPackageJson.dependencies["@langchain/openai"] && !currentPackageJson.dependencies["@langchain/openai"].includes("rc")) { const minVersion = semver.minVersion( currentPackageJson.dependencies["@langchain/openai"] ).version; currentPackageJson.dependencies = { ...currentPackageJson.dependencies, "@langchain/openai": minVersion, }; } if (currentPackageJson.devDependencies["@langchain/openai"]) { delete currentPackageJson.devDependencies["@langchain/openai"]; } fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/community/node/update_resolutions_npm.js
const fs = require("fs"); const communityPackageJsonPath = "/app/monorepo/libs/langchain-community/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.devDependencies) { delete currentPackageJson.devDependencies; } fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/google-vertexai/test-with-latest-deps.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true # New monorepo directory paths monorepo_dir="/app/monorepo" monorepo_vertexai_dir="/app/monorepo/libs/langchain-google-vertexai" # Updater script will not live inside the monorepo updater_script_dir="/app/updater_script" # Original directory paths original_updater_script_dir="/scripts/with_standard_tests/google-vertexai/node" # Run the shared script to copy all necessary folders/files bash /scripts/with_standard_tests/shared.sh google-vertexai mkdir -p "$updater_script_dir" cp "$original_updater_script_dir"/* "$updater_script_dir/" cd "$updater_script_dir" # Update any workspace dep to the latest version since not all workspaces are # available in the test enviroment. node "update_resolutions_latest.js" # Navigate back to monorepo root and install dependencies cd "$monorepo_dir" touch yarn.lock cat ./package.json yarn # Navigate into `@langchain/google-vertexai` to build and run tests # We need to run inside the google-vertexai directory so turbo repo does # not try to build the package/its workspace dependencies. cd "$monorepo_vertexai_dir" yarn add @langchain/core @langchain/google-gauth yarn test
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/google-vertexai/test-with-lowest-deps.sh
#!/usr/bin/env bash set -euxo pipefail export CI=true monorepo_dir="/app/monorepo" monorepo_vertexai_dir="/app/monorepo/libs/langchain-google-vertexai" updater_script_dir="/app/updater_script" updater_script_dir="/app/updater_script" original_updater_script_dir="/scripts/with_standard_tests/google-vertexai/node" # Run the shared script to copy all necessary folders/files bash /scripts/with_standard_tests/shared.sh google-vertexai # Copy the updater script to the monorepo mkdir -p "$updater_script_dir" cp "$original_updater_script_dir"/* "$updater_script_dir/" # Install deps (e.g semver) for the updater script cd "$updater_script_dir" yarn # Run the updater script node "update_resolutions_lowest.js" # Navigate back to monorepo root and install dependencies cd "$monorepo_dir" touch yarn.lock yarn # Navigate into `@langchain/package` to build and run tests # We need to run inside the package directory so turbo repo does # not try to build the package/its workspace dependencies. cd "$monorepo_vertexai_dir" # Read the @langchain/core version from peerDependencies core_version=$(node -p "require('./package.json').peerDependencies['@langchain/core']") # Install @langchain/core at the specified version yarn add @langchain/core@$core_version yarn test
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/google-vertexai
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/google-vertexai/node/update_resolutions_latest.js
const fs = require("fs"); const communityPackageJsonPath = "/app/monorepo/libs/langchain-google-vertexai/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.devDependencies["@langchain/core"]) { delete currentPackageJson.devDependencies["@langchain/core"]; currentPackageJson.peerDependencies["@langchain/core"] = "latest"; } fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/google-vertexai
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/google-vertexai/node/yarn.lock
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" semver@^7.5.4: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" yallist@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/google-vertexai
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/google-vertexai/node/package.json
{ "name": "dependency-range-tests", "version": "0.0.0", "private": true, "workspaces": [ "libs/*" ], "description": "Tests dependency ranges for LangChain.", "dependencies": { "semver": "^7.5.4" }, "packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e" }
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/google-vertexai
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/google-vertexai/node/update_resolutions_lowest.js
const fs = require("fs"); const semver = require("semver"); const communityPackageJsonPath = "/app/monorepo/libs/langchain-google-vertexai/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); if (currentPackageJson.peerDependencies["@langchain/core"] && !currentPackageJson.peerDependencies["@langchain/core"].includes("rc")) { const minVersion = semver.minVersion( currentPackageJson.peerDependencies["@langchain/core"] ).version; currentPackageJson.peerDependencies = { ...currentPackageJson.peerDependencies, "@langchain/core": minVersion, }; } if (currentPackageJson.devDependencies["@langchain/core"]) { delete currentPackageJson.devDependencies["@langchain/core"]; } if (currentPackageJson.dependencies["@langchain/google-gauth"] && !currentPackageJson.dependencies["@langchain/google-gauth"].includes("rc")) { const minVersion = semver.minVersion( currentPackageJson.dependencies["@langchain/google-gauth"] ).version; currentPackageJson.dependencies = { ...currentPackageJson.dependencies, "@langchain/google-gauth": minVersion, }; } fs.writeFileSync(communityPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/node/yarn.lock
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. # yarn lockfile v1 lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" semver@^7.5.4: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" yallist@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/node/update_workspace_dependencies.js
const fs = require("fs"); const standardTestsPackageJsonPath = "/app/monorepo/libs/langchain-standard-tests/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(standardTestsPackageJsonPath)); if (currentPackageJson.dependencies["@langchain/core"]) { currentPackageJson.dependencies = { ...currentPackageJson.dependencies, "@langchain/core": "latest", }; } if (currentPackageJson.devDependencies["@langchain/scripts"]) { currentPackageJson.devDependencies = { ...currentPackageJson.devDependencies, "@langchain/scripts": "*", }; } fs.writeFileSync(standardTestsPackageJsonPath, JSON.stringify(currentPackageJson, null, 2));
0
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests
lc_public_repos/langchainjs/dependency_range_tests/scripts/with_standard_tests/node/package.json
{ "name": "dependency-range-tests", "version": "0.0.0", "private": true, "workspaces": [ "libs/*" ], "description": "Tests dependency ranges for LangChain.", "dependencies": {} }
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/api_refs/tsconfig.json
{ "compilerOptions": { "target": "es5", "lib": ["dom", "dom.iterable", "esnext"], "allowJs": true, "skipLibCheck": true, "strict": true, "noEmit": true, "esModuleInterop": true, "module": "esnext", "moduleResolution": "bundler", "resolveJsonModule": true, "isolatedModules": true, "jsx": "preserve", "incremental": true, "plugins": [ { "name": "next" } ], "paths": { "@/*": ["./src/*"] } }, "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], "exclude": ["node_modules"] }
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/api_refs/.eslintrc.json
{ "extends": "next/core-web-vitals" }
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/api_refs/blacklisted-entrypoints.json
[ "../../langchain/src/load.ts", "../../langchain/src/load/serializable.ts", "../../langchain/src/agents/toolkits/connery.ts", "../../langchain/src/tools/aws_lambda.ts", "../../langchain/src/tools/aws_sfn.ts", "../../langchain/src/tools/connery.ts", "../../langchain/src/tools/gmail.ts", "../../langchain/src/tools/google_places.ts", "../../langchain/src/embeddings/bedrock.ts", "../../langchain/src/embeddings/cloudflare_workersai.ts", "../../langchain/src/embeddings/ollama.ts", "../../langchain/src/embeddings/cohere.ts", "../../langchain/src/embeddings/tensorflow.ts", "../../langchain/src/embeddings/hf.ts", "../../langchain/src/embeddings/hf_transformers.ts", "../../langchain/src/embeddings/googlevertexai.ts", "../../langchain/src/embeddings/googlepalm.ts", "../../langchain/src/embeddings/minimax.ts", "../../langchain/src/embeddings/voyage.ts", "../../langchain/src/embeddings/llama_cpp.ts", "../../langchain/src/embeddings/gradient_ai.ts", "../../langchain/src/llms/ai21.ts", "../../langchain/src/llms/aleph_alpha.ts", "../../langchain/src/llms/cloudflare_workersai.ts", "../../langchain/src/llms/cohere.ts", "../../langchain/src/llms/hf.ts", "../../langchain/src/llms/raycast.ts", "../../langchain/src/llms/ollama.ts", "../../langchain/src/llms/replicate.ts", "../../langchain/src/llms/fireworks.ts", "../../langchain/src/llms/googlevertexai.ts", "../../langchain/src/llms/googlevertexai/web.ts", "../../langchain/src/llms/googlepalm.ts", "../../langchain/src/llms/gradient_ai.ts", "../../langchain/src/llms/sagemaker_endpoint.ts", "../../langchain/src/llms/watsonx_ai.ts", "../../langchain/src/llms/bedrock.ts", "../../langchain/src/llms/bedrock/web.ts", "../../langchain/src/llms/llama_cpp.ts", "../../langchain/src/llms/writer.ts", "../../langchain/src/llms/portkey.ts", "../../langchain/src/llms/yandex.ts", "../../langchain/src/vectorstores/clickhouse.ts", "../../langchain/src/vectorstores/analyticdb.ts", "../../langchain/src/vectorstores/cassandra.ts", "../../langchain/src/vectorstores/convex.ts", "../../langchain/src/vectorstores/elasticsearch.ts", "../../langchain/src/vectorstores/cloudflare_vectorize.ts", "../../langchain/src/vectorstores/closevector/web.ts", "../../langchain/src/vectorstores/closevector/node.ts", "../../langchain/src/vectorstores/chroma.ts", "../../langchain/src/vectorstores/googlevertexai.ts", "../../langchain/src/vectorstores/hnswlib.ts", "../../langchain/src/vectorstores/hanavector.ts", "../../langchain/src/vectorstores/faiss.ts", "../../langchain/src/vectorstores/weaviate.ts", "../../langchain/src/vectorstores/lancedb.ts", "../../langchain/src/vectorstores/momento_vector_index.ts", "../../langchain/src/vectorstores/mongodb_atlas.ts", "../../langchain/src/vectorstores/pinecone.ts", "../../langchain/src/vectorstores/qdrant.ts", "../../langchain/src/vectorstores/supabase.ts", "../../langchain/src/vectorstores/opensearch.ts", "../../langchain/src/vectorstores/pgvector.ts", "../../langchain/src/vectorstores/milvus.ts", "../../langchain/src/vectorstores/neo4j_vector.ts", "../../langchain/src/vectorstores/prisma.ts", "../../langchain/src/vectorstores/typeorm.ts", "../../langchain/src/vectorstores/myscale.ts", "../../langchain/src/vectorstores/redis.ts", "../../langchain/src/vectorstores/rockset.ts", "../../langchain/src/vectorstores/typesense.ts", "../../langchain/src/vectorstores/singlestore.ts", "../../langchain/src/vectorstores/tigris.ts", "../../langchain/src/vectorstores/usearch.ts", "../../langchain/src/vectorstores/vectara.ts", "../../langchain/src/vectorstores/vercel_postgres.ts", "../../langchain/src/vectorstores/voy.ts", "../../langchain/src/vectorstores/xata.ts", "../../langchain/src/vectorstores/zep.ts", "../../langchain/src/memory/zep.ts", "../../langchain/src/document_transformers/html_to_text.ts", "../../langchain/src/document_transformers/mozilla_readability.ts", "../../langchain/src/chat_models/portkey.ts", "../../langchain/src/chat_models/bedrock.ts", "../../langchain/src/chat_models/bedrock/web.ts", "../../langchain/src/chat_models/cloudflare_workersai.ts", "../../langchain/src/chat_models/googlevertexai.ts", "../../langchain/src/chat_models/googlevertexai/web.ts", "../../langchain/src/chat_models/googlepalm.ts", "../../langchain/src/chat_models/fireworks.ts", "../../langchain/src/chat_models/baiduwenxin.ts", "../../langchain/src/chat_models/iflytek_xinghuo.ts", "../../langchain/src/chat_models/iflytek_xinghuo/web.ts", "../../langchain/src/chat_models/ollama.ts", "../../langchain/src/chat_models/minimax.ts", "../../langchain/src/chat_models/llama_cpp.ts", "../../langchain/src/chat_models/yandex.ts", "../../langchain/src/callbacks/handlers/llmonitor.ts", "../../langchain/src/retrievers/amazon_kendra.ts", "../../langchain/src/retrievers/supabase.ts", "../../langchain/src/retrievers/zep.ts", "../../langchain/src/retrievers/metal.ts", "../../langchain/src/retrievers/chaindesk.ts", "../../langchain/src/retrievers/databerry.ts", "../../langchain/src/retrievers/vectara_summary.ts", "../../langchain/src/retrievers/tavily_search_api.ts", "../../langchain/src/retrievers/vespa.ts", "../../langchain/src/stores/doc/in_memory.ts", "../../langchain/src/stores/message/cassandra.ts", "../../langchain/src/stores/message/convex.ts", "../../langchain/src/stores/message/cloudflare_d1.ts", "../../langchain/src/stores/message/in_memory.ts", "../../langchain/src/stores/message/dynamodb.ts", "../../langchain/src/stores/message/firestore.ts", "../../langchain/src/stores/message/momento.ts", "../../langchain/src/stores/message/mongodb.ts", "../../langchain/src/stores/message/redis.ts", "../../langchain/src/stores/message/ioredis.ts", "../../langchain/src/stores/message/upstash_redis.ts", "../../langchain/src/stores/message/planetscale.ts", "../../langchain/src/stores/message/xata.ts", "../../langchain/src/storage/convex.ts", "../../langchain/src/storage/ioredis.ts", "../../langchain/src/storage/vercel_kv.ts", "../../langchain/src/storage/upstash_redis.ts", "../../langchain/src/graphs/neo4j_graph.ts", "../../langchain/src/util/convex.ts", "../../langchain/src/runnables.ts", "../../libs/langchain-community/src/chat_models/yandex.ts", "../../libs/langchain-community/src/llms/yandex.ts", "../../langchain/src/schema/output_parser.ts", "../../langchain/src/document.ts", "../../langchain/src/callbacks/index.ts" ]
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/api_refs/README.md
# Auto-generated API documentation for LangChainJS Do not edit the contents of this directory directly. ## Usage To build the API refs run `yarn build` from the root of this directory, then `yarn dev` or `yarn start` to serve the docs locally. This app uses [Typedoc](https://typedoc.org/) to generate API references from the source code. The generated HTML is then placed inside the `/public` directory, which is served by [Next.js](https://nextjs.org/). There is a default redirect when requests are made to `/` which redirects to `/index.html`. The API references are gitignored by default, so they will not be committed to the repo.
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/api_refs/package.json
{ "name": "api_refs", "version": "0.1.0", "private": true, "scripts": { "dev": "next dev -p 3001", "typedoc:build": "npx typedoc --options typedoc.json", "build:scripts": "node ./scripts/create-entrypoints.js && yarn typedoc:build && node ./scripts/update-typedoc-css.js", "build": "yarn clean && yarn build:scripts && next build", "start": "yarn build && next start -p 3001", "lint": "next lint", "format": "prettier --write \"**/*.{js,jsx,ts,tsx}\"", "format:check": "prettier --check \"**/*.{js,jsx,ts,tsx}\"", "clean": "rm -rf .next .turbo public/ && mkdir public" }, "dependencies": { "next": "14.0.1", "react": "^18", "react-dom": "^18" }, "devDependencies": { "@types/node": "^20", "@types/react": "^18", "@types/react-dom": "^18", "autoprefixer": "^10.0.1", "eslint": "^8", "eslint-config-next": "14.0.1", "glob": "^10.3.10", "postcss": "^8", "prettier": "^2.8.3", "tailwindcss": "^3.3.0", "ts-morph": "^23.0.0", "typedoc": "^0.26.1", "typedoc-plugin-expand-object-like-types": "^0.1.2", "typescript": "~5.1.6" } }
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/api_refs/tailwind.config.ts
import type { Config } from "tailwindcss"; const config: Config = { content: [ "./src/pages/**/*.{js,ts,jsx,tsx,mdx}", "./src/components/**/*.{js,ts,jsx,tsx,mdx}", "./src/app/**/*.{js,ts,jsx,tsx,mdx}", ], theme: { extend: { backgroundImage: { "gradient-radial": "radial-gradient(var(--tw-gradient-stops))", "gradient-conic": "conic-gradient(from 180deg at 50% 50%, var(--tw-gradient-stops))", }, }, }, plugins: [], }; export default config;
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/api_refs/vercel.json
{ "buildCommand": "yarn build", "trailingSlash": false, "redirects": [ { "source": "/:path*/langchain_load:rest", "destination": "/:path*/langchain.load:rest" }, { "source": "/:path*/langchain_agents:rest", "destination": "/:path*/langchain.agents:rest" }, { "source": "/:path*/langchain_tools:rest", "destination": "/:path*/langchain.tools:rest" }, { "source": "/:path*/langchain_chains:rest", "destination": "/:path*/langchain.chains:rest" }, { "source": "/:path*/langchain_chat_models:rest", "destination": "/:path*/langchain.chat_models:rest" }, { "source": "/:path*/langchain_embeddings:rest", "destination": "/:path*/langchain.embeddings:rest" }, { "source": "/:path*/langchain_vectorstores:rest", "destination": "/:path*/langchain.vectorstores:rest" }, { "source": "/:path*/langchain_text_splitter:rest", "destination": "/:path*/langchain.text_splitter:rest" }, { "source": "/:path*/langchain_memory:rest", "destination": "/:path*/langchain.memory:rest" }, { "source": "/:path*/langchain_document:rest", "destination": "/:path*/langchain.document:rest" }, { "source": "/:path*/langchain_document_loaders:rest", "destination": "/:path*/langchain.document_loaders:rest" }, { "source": "/:path*/langchain_document_transformers:rest", "destination": "/:path*/langchain.document_transformers:rest" }, { "source": "/:path*/langchain_sql_db:rest", "destination": "/:path*/langchain.sql_db:rest" }, { "source": "/:path*/langchain_callbacks:rest", "destination": "/:path*/langchain.callbacks:rest" }, { "source": "/:path*/langchain_output_parsers:rest", "destination": "/:path*/langchain.output_parsers:rest" }, { "source": "/:path*/langchain_retrievers:rest", "destination": "/:path*/langchain.retrievers:rest" }, { "source": "/:path*/langchain_cache:rest", "destination": "/:path*/langchain.cache:rest" }, { "source": "/:path*/langchain_stores:rest", "destination": "/:path*/langchain.stores:rest" }, { "source": "/:path*/langchain_storage:rest", "destination": "/:path*/langchain.storage:rest" }, { "source": "/:path*/langchain_hub:rest", "destination": "/:path*/langchain.hub:rest" }, { "source": "/:path*/langchain_util:rest", "destination": "/:path*/langchain.util:rest" }, { "source": "/:path*/langchain_experimental:rest", "destination": "/:path*/langchain.experimental:rest" }, { "source": "/:path*/langchain_evaluation:rest", "destination": "/:path*/langchain.evaluation:rest" }, { "source": "/:path*/langchain_smith:rest", "destination": "/:path*/langchain.smith:rest" }, { "source": "/:path*/langchain_runnables:rest", "destination": "/:path*/langchain.runnables:rest" }, { "source": "/:path*/langchain_indexes:rest", "destination": "/:path*/langchain.indexes:rest" }, { "source": "/:path*/langchain_schema:rest", "destination": "/:path*/langchain.schema:rest" }, { "source": "/:path*/langchain_core_:rest", "destination": "/:path*/_langchain_core.:rest" }, { "source": "/:path*/langchain_core.:rest", "destination": "/:path*/_langchain_core.:rest" }, { "source": "/:path*/langchain_anthropic_experimental(_|\\.):rest", "destination": "/:path*/_langchain_anthropic.experimental.:rest" }, { "source": "/:path*/langchain_anthropic.ChatAnthropic.:rest", "destination": "/:path*/_langchain_anthropic.index.ChatAnthropic.:rest" }, { "source": "/:path*/langchain_anthropic.ChatAnthropicMessages.:rest", "destination": "/:path*/_langchain_anthropic.index.ChatAnthropicMessages.:rest" }, { "source": "/:path*/langchain_anthropic.AnthropicInput.:rest", "destination": "/:path*/_langchain_anthropic.index.AnthropicInput.:rest" }, { "source": "/:path*/langchain_anthropic.ChatAnthropicCallOptions.:rest", "destination": "/:path*/_langchain_anthropic.index.ChatAnthropicCallOptions.:rest" }, { "source": "/:path*/langchain_aws(_|\\.):rest", "destination": "/:path*/_langchain_aws.:rest" }, { "source": "/:path*/langchain_azure_cosmosdb(_|\\.):rest", "destination": "/:path*/_langchain_azure_cosmosdb.:rest" }, { "source": "/:path*/langchain_azure_dynamic_sessions(_|\\.):rest", "destination": "/:path*/_langchain_azure_dynamic_sessions.:rest" }, { "source": "/:path*/langchain_baidu_qianfan(_|\\.):rest", "destination": "/:path*/_langchain_baidu_qianfan.:rest" }, { "source": "/:path*/langchain_cloudflare_langgraph_checkpointers(_|\\.):rest", "destination": "/:path*/_langchain_cloudflare.langgraph_checkpointers.:rest" }, { "source": "/:path*/langchain_cloudflare.ChatCloudflareWorkersAI:rest", "destination": "/:path*/_langchain_cloudflare.index.ChatCloudflareWorkersAI:rest" }, { "source": "/:path*/langchain_cloudflare.CloudflareD1MessageHistory:rest", "destination": "/:path*/_langchain_cloudflare.index.CloudflareD1MessageHistory:rest" }, { "source": "/:path*/langchain_cloudflare.CloudflareKVCache:rest", "destination": "/:path*/_langchain_cloudflare.index.CloudflareKVCache:rest" }, { "source": "/:path*/langchain_cloudflare.CloudflareVectorizeStore:rest", "destination": "/:path*/_langchain_cloudflare.index.CloudflareVectorizeStore:rest" }, { "source": "/:path*/langchain_cloudflare.CloudflareWorkersAI:rest", "destination": "/:path*/_langchain_cloudflare.index.CloudflareWorkersAI:rest" }, { "source": "/:path*/langchain_cloudflare.CloudflareWorkersAIEmbeddings:rest", "destination": "/:path*/_langchain_cloudflare.index.CloudflareWorkersAIEmbeddings:rest" }, { "source": "/:path*/langchain_cloudflare.ChatCloudflareWorkersAICallOptions:rest", "destination": "/:path*/_langchain_cloudflare.index.ChatCloudflareWorkersAICallOptions:rest" }, { "source": "/:path*/langchain_cloudflare.CloudflareWorkersAIEmbeddingsParams:rest", "destination": "/:path*/_langchain_cloudflare.index.CloudflareWorkersAIEmbeddingsParams:rest" }, { "source": "/:path*/langchain_cloudflare.CloudflareWorkersAIInput:rest", "destination": "/:path*/_langchain_cloudflare.index.CloudflareWorkersAIInput:rest" }, { "source": "/:path*/langchain_cloudflare.VectorizeLibArgs:rest", "destination": "/:path*/_langchain_cloudflare.index.VectorizeLibArgs:rest" }, { "source": "/:path*/langchain_cloudflare.CloudflareD1MessageHistoryInput:rest", "destination": "/:path*/_langchain_cloudflare.index.CloudflareD1MessageHistoryInput:rest" }, { "source": "/:path*/langchain_cloudflare.VectorizeDeleteParams:rest", "destination": "/:path*/_langchain_cloudflare.index.VectorizeDeleteParams:rest" }, { "source": "/:path*/langchain_cohere(_|\\.):rest", "destination": "/:path*/_langchain_cohere.:rest" }, { "source": "/:path*/langchain_community_:rest", "destination": "/:path*/_langchain_community.:rest" }, { "source": "/:path*/langchain_exa(_|\\.):rest", "destination": "/:path*/_langchain_exa.:rest" }, { "source": "/:path*/langchain_google_common_types(_|\\.):rest", "destination": "/:path*/_langchain_google_common.types.:rest" }, { "source": "/:path*/langchain_google_common_utils(_|\\.):rest", "destination": "/:path*/_langchain_google_common.utils.:rest" }, { "source": "/:path*/langchain_google_common.AbstractGoogleLLMConnection.:rest", "destination": "/:path*/_langchain_google_common.index.AbstractGoogleLLMConnection.:rest" }, { "source": "/:path*/langchain_google_common.ApiKeyGoogleAuth.:rest", "destination": "/:path*/_langchain_google_common.index.ApiKeyGoogleAuth.:rest" }, { "source": "/:path*/langchain_google_common.BaseGoogleEmbeddings.:rest", "destination": "/:path*/_langchain_google_common.index.BaseGoogleEmbeddings.:rest" }, { "source": "/:path*/langchain_google_common.ChatGoogleBase.:rest", "destination": "/:path*/_langchain_google_common.index.ChatGoogleBase.:rest" }, { "source": "/:path*/langchain_google_common.ComplexJsonStream.:rest", "destination": "/:path*/_langchain_google_common.index.ComplexJsonStream.:rest" }, { "source": "/:path*/langchain_google_common.GoogleAIConnection.:rest", "destination": "/:path*/_langchain_google_common.index.GoogleAIConnection.:rest" }, { "source": "/:path*/langchain_google_common.GoogleAbstractedFetchClient.:rest", "destination": "/:path*/_langchain_google_common.index.GoogleAbstractedFetchClient.:rest" }, { "source": "/:path*/langchain_google_common.GoogleBaseLLM.:rest", "destination": "/:path*/_langchain_google_common.index.GoogleBaseLLM.:rest" }, { "source": "/:path*/langchain_google_common.GoogleConnection.:rest", "destination": "/:path*/_langchain_google_common.index.GoogleConnection.:rest" }, { "source": "/:path*/langchain_google_common.GoogleHostConnection.:rest", "destination": "/:path*/_langchain_google_common.index.GoogleHostConnection.:rest" }, { "source": "/:path*/langchain_google_common.JsonStream.:rest", "destination": "/:path*/_langchain_google_common.index.JsonStream.:rest" }, { "source": "/:path*/langchain_google_common.ReadableJsonStream.:rest", "destination": "/:path*/_langchain_google_common.index.ReadableJsonStream.:rest" }, { "source": "/:path*/langchain_google_common.BaseGoogleEmbeddingsOptions.:rest", "destination": "/:path*/_langchain_google_common.index.BaseGoogleEmbeddingsOptions.:rest" }, { "source": "/:path*/langchain_google_common.BaseGoogleEmbeddingsParams.:rest", "destination": "/:path*/_langchain_google_common.index.BaseGoogleEmbeddingsParams.:rest" }, { "source": "/:path*/langchain_google_common.ChatGoogleBaseInput.:rest", "destination": "/:path*/_langchain_google_common.index.ChatGoogleBaseInput.:rest" }, { "source": "/:path*/langchain_google_common.GoogleAbstractedClient.:rest", "destination": "/:path*/_langchain_google_common.index.GoogleAbstractedClient.:rest" }, { "source": "/:path*/langchain_google_common.GoogleEmbeddingsInstance.:rest", "destination": "/:path*/_langchain_google_common.index.GoogleEmbeddingsInstance.:rest" }, { "source": "/:path*/langchain_google_common.GoogleEmbeddingsResponse.:rest", "destination": "/:path*/_langchain_google_common.index.GoogleEmbeddingsResponse.:rest" }, { "source": "/:path*/langchain_google_common.GoogleAbstractedClientOps.:rest", "destination": "/:path*/_langchain_google_common.index.GoogleAbstractedClientOps.:rest" }, { "source": "/:path*/langchain_google_common.GoogleAbstractedClientOpsMethod.:rest", "destination": "/:path*/_langchain_google_common.index.GoogleAbstractedClientOpsMethod.:rest" }, { "source": "/:path*/langchain_google_common.GoogleAbstractedClientOpsResponseType.:rest", "destination": "/:path*/_langchain_google_common.index.GoogleAbstractedClientOpsResponseType.:rest" }, { "source": "/:path*/langchain_google_common.aiPlatformScope.:rest", "destination": "/:path*/_langchain_google_common.index.aiPlatformScope.:rest" }, { "source": "/:path*/langchain_google_common.complexValue.:rest", "destination": "/:path*/_langchain_google_common.index.complexValue.:rest" }, { "source": "/:path*/langchain_google_common.convertToGeminiTools.:rest", "destination": "/:path*/_langchain_google_common.index.convertToGeminiTools.:rest" }, { "source": "/:path*/langchain_google_common.copyAIModelParams.:rest", "destination": "/:path*/_langchain_google_common.index.copyAIModelParams.:rest" }, { "source": "/:path*/langchain_google_common.copyAIModelParamsInto.:rest", "destination": "/:path*/_langchain_google_common.index.copyAIModelParamsInto.:rest" }, { "source": "/:path*/langchain_google_common.copyAndValidateModelParamsInto.:rest", "destination": "/:path*/_langchain_google_common.index.copyAndValidateModelParamsInto.:rest" }, { "source": "/:path*/langchain_google_common.ensureAuthOptionScopes.:rest", "destination": "/:path*/_langchain_google_common.index.ensureAuthOptionScopes.:rest" }, { "source": "/:path*/langchain_google_common.jsonSchemaToGeminiParameters.:rest", "destination": "/:path*/_langchain_google_common.index.jsonSchemaToGeminiParameters.:rest" }, { "source": "/:path*/langchain_google_common.modelToFamily.:rest", "destination": "/:path*/_langchain_google_common.index.modelToFamily.:rest" }, { "source": "/:path*/langchain_google_common.removeAdditionalProperties.:rest", "destination": "/:path*/_langchain_google_common.index.removeAdditionalProperties.:rest" }, { "source": "/:path*/langchain_google_common.simpleValue.:rest", "destination": "/:path*/_langchain_google_common.index.simpleValue.:rest" }, { "source": "/:path*/langchain_google_common.validateModelParams.:rest", "destination": "/:path*/_langchain_google_common.index.validateModelParams.:rest" }, { "source": "/:path*/langchain_google_common.zodToGeminiParameters.:rest", "destination": "/:path*/_langchain_google_common.index.zodToGeminiParameters.:rest" }, { "source": "/:path*/langchain_google_genai(_|\\.):rest", "destination": "/:path*/_langchain_google_genai.:rest" }, { "source": "/:path*/langchain_google_vertexai_types(_|\\.):rest", "destination": "/:path*/_langchain_google_vertexai.types.:rest" }, { "source": "/:path*/langchain_google_vertexai_utils(_|\\.):rest", "destination": "/:path*/_langchain_google_vertexai.utils.:rest" }, { "source": "/:path*/langchain_google_vertexai.ChatVertexAI.:rest", "destination": "/:path*/_langchain_google_vertexai.index.ChatVertexAI.:rest" }, { "source": "/:path*/langchain_google_vertexai.VertexAI.:rest", "destination": "/:path*/_langchain_google_vertexai.index.VertexAI.:rest" }, { "source": "/:path*/langchain_google_vertexai.VertexAIEmbeddings.:rest", "destination": "/:path*/_langchain_google_vertexai.index.VertexAIEmbeddings.:rest" }, { "source": "/:path*/langchain_google_vertexai.ChatVertexAIInput.:rest", "destination": "/:path*/_langchain_google_vertexai.index.ChatVertexAIInput.:rest" }, { "source": "/:path*/langchain_google_vertexai.GoogleVertexAIEmbeddingsInput.:rest", "destination": "/:path*/_langchain_google_vertexai.index.GoogleVertexAIEmbeddingsInput.:rest" }, { "source": "/:path*/langchain_google_vertexai.VertexAIInput.:rest", "destination": "/:path*/_langchain_google_vertexai.index.VertexAIInput.:rest" }, { "source": "/:path*/langchain_google_vertexai_web_types(_|\\.):rest", "destination": "/:path*/_langchain_google_vertexai_web.types.:rest" }, { "source": "/:path*/langchain_google_vertexai_web_utils(_|\\.):rest", "destination": "/:path*/_langchain_google_vertexai_web.utils.:rest" }, { "source": "/:path*/langchain_google_vertexai_web.ChatVertexAI.:rest", "destination": "/:path*/_langchain_google_vertexai_web.index.ChatVertexAI.:rest" }, { "source": "/:path*/langchain_google_vertexai_web.VertexAI.:rest", "destination": "/:path*/_langchain_google_vertexai_web.index.VertexAI.:rest" }, { "source": "/:path*/langchain_google_vertexai_web.VertexAIEmbeddings.:rest", "destination": "/:path*/_langchain_google_vertexai_web.index.VertexAIEmbeddings.:rest" }, { "source": "/:path*/langchain_google_vertexai_web.ChatVertexAIInput.:rest", "destination": "/:path*/_langchain_google_vertexai_web.index.ChatVertexAIInput.:rest" }, { "source": "/:path*/langchain_google_vertexai_web.GoogleVertexAIEmbeddingsInput.:rest", "destination": "/:path*/_langchain_google_vertexai_web.index.GoogleVertexAIEmbeddingsInput.:rest" }, { "source": "/:path*/langchain_google_vertexai_web.VertexAIInput.:rest", "destination": "/:path*/_langchain_google_vertexai_web.index.VertexAIInput.:rest" }, { "source": "/:path*/langchain_groq(_|\\.):rest", "destination": "/:path*/_langchain_groq.:rest" }, { "source": "/:path*/langchain_mistralai(_|\\.):rest", "destination": "/:path*/_langchain_mistralai.:rest" }, { "source": "/:path*/langchain_mixedbread_ai(_|\\.):rest", "destination": "/:path*/_langchain_mixedbread_ai.:rest" }, { "source": "/:path*/langchain_mongodb(_|\\.):rest", "destination": "/:path*/_langchain_mongodb.:rest" }, { "source": "/:path*/langchain_nomic(_|\\.):rest", "destination": "/:path*/_langchain_nomic.:rest" }, { "source": "/:path*/langchain_ollama(_|\\.):rest", "destination": "/:path*/_langchain_ollama.:rest" }, { "source": "/:path*/langchain_openai(_|\\.):rest", "destination": "/:path*/_langchain_openai.:rest" }, { "source": "/:path*/langchain_pinecone(_|\\.):rest", "destination": "/:path*/_langchain_pinecone.:rest" }, { "source": "/:path*/langchain_qdrant(_|\\.):rest", "destination": "/:path*/_langchain_qdrant.:rest" }, { "source": "/:path*/langchain_redis(_|\\.):rest", "destination": "/:path*/_langchain_redis.:rest" }, { "source": "/:path*/langchain_textsplitters(_|\\.):rest", "destination": "/:path*/_langchain_textsplitters.:rest" }, { "source": "/:path*/langchain_weaviate(_|\\.):rest", "destination": "/:path*/_langchain_weaviate.:rest" }, { "source": "/:path*/langchain_yandex(_|\\.):rest", "destination": "/:path*/_langchain_yandex.:rest" } ] }
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/api_refs/postcss.config.js
module.exports = { plugins: { tailwindcss: {}, autoprefixer: {}, }, };
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/api_refs/typedoc.base.json
{ "$schema": "https://typedoc.org/schema.json", "includeVersion": true }
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/api_refs/next.config.js
/** @type {import('next').NextConfig} */ const nextConfig = { async redirects() { return [ { source: "/", destination: "/index.html", permanent: false, }, ]; }, }; module.exports = nextConfig;
0
lc_public_repos/langchainjs/docs/api_refs
lc_public_repos/langchainjs/docs/api_refs/scripts/create-entrypoints.js
const fs = require("fs"); const path = require("path"); const { execSync } = require("child_process"); const BASE_TYPEDOC_CONFIG = { $schema: "https://typedoc.org/schema.json", out: "public", sort: [ "kind", "visibility", "instance-first", "required-first", "alphabetical", ], plugin: [ "./scripts/typedoc-plugin.js", "typedoc-plugin-expand-object-like-types", ], tsconfig: "../../tsconfig.json", excludePrivate: true, excludeInternal: true, excludeExternals: false, excludeNotDocumented: false, includeVersion: true, sourceLinkTemplate: "https://github.com/langchain-ai/langchainjs/blob/{gitRevision}/{path}#L{line}", logLevel: "Error", name: "LangChain.js", skipErrorChecking: true, exclude: ["dist"], hostedBaseUrl: "https://v03.api.js.langchain.com/", entryPointStrategy: "packages", }; /** * * @param {string} relativePath * @param {any} updateFunction */ const updateJsonFile = (relativePath, updateFunction) => { const contents = fs.readFileSync(relativePath).toString(); const res = updateFunction(JSON.parse(contents)); fs.writeFileSync(relativePath, JSON.stringify(res, null, 2) + "\n"); }; const workspacesListBreakStr = `"} {"`; const workspacesListJoinStr = `"},{"`; const BLACKLISTED_WORKSPACES = [ "@langchain/azure-openai", "@langchain/google-gauth", "@langchain/google-webauth", ]; /** * @returns {Array<string>} An array of paths to all workspaces in the monorepo. */ function getYarnWorkspaces() { const stdout = execSync("yarn workspaces list --json"); const workspaces = JSON.parse( `[${stdout .toString() .split(workspacesListBreakStr) .join(workspacesListJoinStr)}]` ); const cleanedWorkspaces = workspaces.filter( (ws) => ws.name === "langchain" || (ws.name.startsWith("@langchain/") && !BLACKLISTED_WORKSPACES.find((blacklisted) => ws.name === blacklisted)) ); return cleanedWorkspaces.map((ws) => `../../${ws.location}`); } async function main() { const workspaces = fs .readdirSync("../../libs/") .filter((dir) => dir.startsWith("langchain-")) .map((dir) => path.join("../../libs/", dir, "/langchain.config.js")) .filter((configPath) => fs.existsSync(configPath)); const configFiles = [ "../../langchain/langchain.config.js", "../../langchain-core/langchain.config.js", ...workspaces, ] .map((configFile) => path.resolve(configFile)) .filter((configFile) => !configFile.includes("/langchain-scripts/")); /** @type {Array<string>} */ const blacklistedEntrypoints = JSON.parse( fs.readFileSync("./blacklisted-entrypoints.json") ); for await (const configFile of configFiles) { const langChainConfig = await import(configFile); if (!("entrypoints" in langChainConfig.config)) { throw new Error( `The config file "${configFile}" does not contain any entrypoints.` ); } else if ( langChainConfig.config.entrypoints === null || langChainConfig.config.entrypoints === undefined ) { continue; } const { config } = langChainConfig; const entrypointDir = path.relative( process.cwd(), configFile.split("/langchain.config.js")[0] ); const deprecatedNodeOnly = "deprecatedNodeOnly" in config ? config.deprecatedNodeOnly : []; const workspaceEntrypoints = Object.values(config.entrypoints) .filter((key) => !deprecatedNodeOnly.includes(key)) .filter( (key) => !blacklistedEntrypoints.find( (blacklistedItem) => blacklistedItem === `${entrypointDir}/src/${key}.ts` ) ) .map((key) => `src/${key}.ts`); const typedocPath = path.join(entrypointDir, "typedoc.json"); if (!fs.existsSync(typedocPath)) { fs.writeFileSync(typedocPath, "{}\n"); } updateJsonFile(typedocPath, (existingConfig) => ({ ...existingConfig, entryPoints: workspaceEntrypoints, extends: typedocPath.includes("/libs/") ? ["../../docs/api_refs/typedoc.base.json"] : ["../docs/api_refs/typedoc.base.json"], })); } // Check if the `./typedoc.json` file exists, since it is gitignored by default if (!fs.existsSync("./typedoc.json")) { fs.writeFileSync("./typedoc.json", "{}\n"); } const yarnWorkspaces = getYarnWorkspaces(); updateJsonFile("./typedoc.json", () => ({ ...BASE_TYPEDOC_CONFIG, entryPoints: yarnWorkspaces, })); } async function runMain() { try { await main(); } catch (error) { console.error("An error occurred while creating the entrypoints."); throw error; } } runMain();
0
lc_public_repos/langchainjs/docs/api_refs
lc_public_repos/langchainjs/docs/api_refs/scripts/typedoc-plugin.js
const { Application, Converter, Context, ReflectionKind, DeclarationReflection, RendererEvent, UrlMapping, Reflection, } = require("typedoc"); const fs = require("fs"); const path = require("path"); const { glob } = require("glob"); const { Project, ClassDeclaration } = require("ts-morph"); // Chat model methods which _should_ be included in the documentation const WHITELISTED_CHAT_MODEL_INHERITED_METHODS = [ "invoke", "stream", "batch", "streamLog", "streamEvents", "bind", "bindTools", "asTool", "pipe", "withConfig", "withRetry", "assign", "getNumTokens", "getGraph", "pick", "withFallbacks", "withStructuredOutput", "withListeners", "transform", ]; // Reflection types to check for methods that should not be documented. // e.g methods prefixed with `_` or `lc_` const REFLECTION_KINDS_TO_HIDE = [ ReflectionKind.Property, ReflectionKind.Accessor, ReflectionKind.Variable, ReflectionKind.Method, ReflectionKind.Function, ReflectionKind.Class, ReflectionKind.Interface, ReflectionKind.Enum, ReflectionKind.TypeAlias, ]; const BASE_OUTPUT_DIR = "./public"; // Script to inject into the HTML to add a CMD/CTRL + K 'hotkey' which focuses // on the search input element. const SCRIPT_HTML = `<script> document.addEventListener('keydown', (e) => { if ((e.metaKey || e.ctrlKey) && e.keyCode === 75) { // Check for CMD + K or CTRL + K const input = document.getElementById('tsd-search-field'); // Get the search input element by ID input.focus(); // Focus on the search input element document.getElementById('tsd-search').style.display = 'block'; // Show the div wrapper with ID tsd-search } }, false); // Add event listener for keydown events </script>`; // Injected into each page's HTML to add a dropdown to switch between versions. const VERSION_DROPDOWN_HTML = `<div class="version-select"> <select id="version-dropdown" onchange="window.location.href=this.value;"> <option selected value="">v0.3</option> <option value="https://v02.api.js.langchain.com/">v0.2</option> <option value="https://v01.api.js.langchain.com/">v0.1</option> </select> </div>`; /** * HTML injected into sections where there is a `@deprecated` JSDoc tag. * This provides a far more visible warning to the user that the feature is * deprecated. * * @param {string | undefined} deprecationText * @returns {string} */ const DEPRECATION_HTML = (deprecationText) => `<div class="deprecation-warning"> <h2>⚠️ Deprecated ⚠️</h2> ${deprecationText ? `<p>${deprecationText}</p>` : ""} <p>This feature is deprecated and will be removed in the future.</p> <p>It is not recommended for use.</p> </div>`; /** * Uses ts-morph to check if the class is a subclass of `BaseChatModel` or * `SimpleChatModel`. * * @param {ClassDeclaration} classDeclaration * @returns {boolean} */ function isBaseChatModelOrSimpleChatModel(classDeclaration) { let currentClass = classDeclaration; while (currentClass) { const baseClassName = currentClass.getBaseClass()?.getName(); if ( baseClassName === "BaseChatModel" || baseClassName === "SimpleChatModel" ) { return true; } currentClass = currentClass.getBaseClass(); } return false; } /** * Uses ts-morph to load all chat model files, and extract the names of the * classes. This is then used to remove unwanted properties from showing up * in the documentation of those classes. * * @returns {Array<string>} */ function getAllChatModelNames() { const communityChatModelPath = "../../libs/langchain-community/src/chat_models/*"; const communityChatModelNestedPath = "../../libs/langchain-community/src/chat_models/**/*"; const partnerPackageGlob = "../../libs/!(langchain-community)/**/chat_models.ts"; const partnerPackageFiles = glob.globSync(partnerPackageGlob); const tsMorphProject = new Project(); const sourceFiles = tsMorphProject.addSourceFilesAtPaths([ communityChatModelPath, communityChatModelNestedPath, ...partnerPackageFiles, ]); const chatModelNames = []; for (const sourceFile of sourceFiles) { const exportedClasses = sourceFile.getClasses(); for (const exportedClass of exportedClasses) { if (isBaseChatModelOrSimpleChatModel(exportedClass)) { chatModelNames.push(exportedClass.getName()); } } } return chatModelNames.flatMap((n) => (n ? [n] : [])); } /** * Takes in a reflection and an array of all chat model class names. * Then performs checks to see if the given reflection should be removed * from the documentation. * E.g a class method on chat models which is * not intended to be documented. * * @param {DeclarationReflection} reflection * @param {Array<string>} chatModelNames * @returns {boolean} Whether or not the reflection should be removed */ function shouldRemoveReflection(reflection, chatModelNames) { const kind = reflection.kind; if ( reflection.parent && chatModelNames.find((name) => name === reflection.parent.name) && reflection.name !== "constructor" ) { if (kind === ReflectionKind.Property) { return true; } if ( !WHITELISTED_CHAT_MODEL_INHERITED_METHODS.find( (n) => n === reflection.name ) ) { return true; } if (kind === ReflectionKind.Accessor && reflection.name === "callKeys") { return true; } } if (REFLECTION_KINDS_TO_HIDE.find((kindToHide) => kindToHide === kind)) { if (reflection.name.startsWith("_") || reflection.name.startsWith("lc_")) { // Remove all reflections which start with an `_` or `lc_` return true; } } } /** * @param {Application} application * @returns {void} */ function load(application) { let allChatModelNames = []; try { allChatModelNames = getAllChatModelNames(); } catch (err) { console.error("Error while getting all chat model names"); throw err; } application.converter.on( Converter.EVENT_CREATE_DECLARATION, resolveReflection ); application.renderer.on(RendererEvent.END, onEndRenderEvent); /** * @param {Context} context * @param {DeclarationReflection} reflection * @returns {void} */ function resolveReflection(context, reflection) { const { project } = context; if (shouldRemoveReflection(reflection, allChatModelNames)) { project.removeReflection(reflection); } } /** * @param {Context} context */ function onEndRenderEvent(context) { const htmlToSplitAtSearchScript = `<div class="tsd-toolbar-contents container">`; const htmlToSplitAtVersionDropdown = `<div id="tsd-toolbar-links">`; const deprecatedHTML = "<h4>Deprecated</h4>"; const { urls } = context; for (const { url } of urls) { const indexFilePath = path.join(BASE_OUTPUT_DIR, url); let htmlFileContent = fs.readFileSync(indexFilePath, "utf-8"); if (htmlFileContent.includes(deprecatedHTML)) { // If any comments are added to the `@deprecated` JSDoc, they'll // be inside the following <p> tag. const deprecationTextRegex = new RegExp( `${deprecatedHTML}<p>(.*?)</p>` ); const deprecationTextMatch = htmlFileContent.match(deprecationTextRegex); /** @type {string | undefined} */ let textInsidePTag; if (deprecationTextMatch) { textInsidePTag = deprecationTextMatch[1]; const newTextToReplace = `${deprecatedHTML}<p>${textInsidePTag}</p>`; htmlFileContent = htmlFileContent.replace( newTextToReplace, DEPRECATION_HTML(textInsidePTag) ); } else { htmlFileContent = htmlFileContent.replace( deprecatedHTML, DEPRECATION_HTML(undefined) ); } } const [part1, part2] = htmlFileContent.split(htmlToSplitAtSearchScript); const htmlWithScript = part1 + SCRIPT_HTML + part2; const htmlWithDropdown = htmlWithScript.replace( htmlToSplitAtVersionDropdown, htmlToSplitAtVersionDropdown + VERSION_DROPDOWN_HTML ); fs.writeFileSync(indexFilePath, htmlWithDropdown); } } } module.exports = { load };
0
lc_public_repos/langchainjs/docs/api_refs
lc_public_repos/langchainjs/docs/api_refs/scripts/update-typedoc-css.js
const { readFile, writeFile } = require("fs/promises"); const CSS = `\n.tsd-navigation { word-break: break-word; } .page-menu { display: none; } .deprecation-warning { background-color: #ef4444; border-radius: 0.375rem; display: flex; flex-direction: column; padding: 12px; text-align: left; } .version-select { display: inline-block; margin-left: 10px; z-index: 1; } .version-select select { padding: 2.5px 5px; font-size: 14px; border: 1px solid #ccc; border-radius: 4px; background-color: #fff; color: #333; cursor: pointer; } .version-select select:hover { border-color: #999; } .version-select select:focus { outline: none; box-shadow: 0 0 3px rgba(0, 0, 0, 0.2); } `; async function main() { let cssContents = await readFile("./public/assets/style.css", "utf-8"); cssContents += CSS; await writeFile("./public/assets/style.css", cssContents); } main();
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/core_docs/.eslintrc.js
/** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @format */ const OFF = 0; const WARNING = 1; const ERROR = 2; module.exports = { root: true, env: { browser: true, commonjs: true, jest: true, node: true, }, parser: "@babel/eslint-parser", parserOptions: { allowImportExportEverywhere: true, }, extends: ["airbnb", "prettier"], plugins: ["react-hooks", "header"], ignorePatterns: [ "build", "docs/api", "node_modules", "docs/_static", "static", ], rules: { // Ignore certain webpack alias because it can't be resolved "import/no-unresolved": [ ERROR, { ignore: ["^@theme", "^@docusaurus", "^@generated"] }, ], "import/extensions": OFF, "react/jsx-filename-extension": OFF, "react-hooks/rules-of-hooks": ERROR, "react/jsx-props-no-spreading": OFF, "react/prop-types": OFF, // PropTypes aren't used much these days. "react/function-component-definition": [ WARNING, { namedComponents: "function-declaration", unnamedComponents: "arrow-function", }, ], }, };
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/core_docs/sidebars.js
/** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @format */ /** * Creating a sidebar enables you to: - create an ordered group of docs - render a sidebar for each doc of that group - provide next/previous navigation The sidebars can be generated from the filesystem, or explicitly defined here. Create as many sidebars as you want. */ module.exports = { docs: [ "introduction", { type: "category", link: { type: "doc", id: "tutorials/index" }, label: "Tutorials", collapsible: false, items: [ { type: "autogenerated", dirName: "tutorials", className: "hidden", }, ], }, { type: "category", link: { type: "doc", id: "how_to/index" }, label: "How-to guides", collapsible: false, items: [ { type: "autogenerated", dirName: "how_to", className: "hidden", }, ], }, { type: "category", link: { type: "doc", id: "concepts/index" }, label: "Conceptual Guide", collapsible: false, items: [ { type: "autogenerated", dirName: "concepts", className: "hidden", }, ], }, { type: "category", label: "Ecosystem", collapsed: false, collapsible: false, items: [ { type: "link", href: "https://docs.smith.langchain.com/", label: "🦜🛠️ LangSmith", }, { type: "link", href: "https://langchain-ai.github.io/langgraphjs/", label: "🦜🕸️ LangGraph.js", }, ], }, { type: "category", label: "Versions", collapsed: false, collapsible: false, items: [ { type: "doc", id: "versions/v0_3/index", label: "v0.3", }, { type: "category", label: "v0.2", items: [ { type: "autogenerated", dirName: "versions/v0_2", }, ], }, { type: "category", label: "Migrating from v0.0 memory", link: { type: "doc", id: "versions/migrating_memory/index" }, collapsible: false, collapsed: false, items: [ { type: "autogenerated", dirName: "versions/migrating_memory", className: "hidden", }, ], }, "versions/release_policy", ], }, "security", ], integrations: [ { type: "category", label: "Providers", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/platforms", }, { type: "category", label: "More", collapsed: true, items: [ { type: "autogenerated", dirName: "integrations/providers", }, ], link: { type: "generated-index", slug: "integrations/providers", }, }, ], link: { type: "doc", id: "integrations/platforms/index", }, }, { type: "category", label: "Components", collapsible: false, items: [ { type: "category", label: "Chat models", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/chat", className: "hidden", }, ], link: { type: "doc", id: "integrations/chat/index", }, }, { type: "category", label: "LLMs", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/llms", className: "hidden", }, ], link: { type: "doc", id: "integrations/llms/index", }, }, { type: "category", label: "Embedding models", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/text_embedding", className: "hidden", }, ], link: { type: "doc", id: "integrations/text_embedding/index", }, }, { type: "category", label: "Document loaders", collapsed: true, items: [ { type: "category", label: "File loaders", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/document_loaders/file_loaders", className: "hidden", }, ], link: { type: "doc", id: "integrations/document_loaders/file_loaders/index", }, }, { type: "category", label: "Web loaders", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/document_loaders/web_loaders", className: "hidden", }, ], link: { type: "doc", id: "integrations/document_loaders/web_loaders/index", }, }, ], link: { type: "doc", id: "integrations/document_loaders/index", }, }, { type: "category", label: "Vector stores", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/vectorstores", className: "hidden", }, ], link: { type: "doc", id: "integrations/vectorstores/index", }, }, { type: "category", label: "Retrievers", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/retrievers", className: "hidden", }, ], link: { type: "doc", id: "integrations/retrievers/index", }, }, { type: "category", label: "Tools/Toolkits", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/tools", className: "hidden", }, ], link: { type: "doc", id: "integrations/tools/index", }, }, { type: "category", label: "Toolkits", collapsible: false, className: "hidden", items: [ { type: "autogenerated", dirName: "integrations/toolkits", className: "hidden", }, ], link: { type: "doc", id: "integrations/tools/index", }, }, { type: "category", label: "Key-value stores", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/stores", className: "hidden", }, ], link: { type: "doc", id: "integrations/stores/index", }, }, { type: "category", label: "Other", collapsed: true, items: [ { type: "category", label: "Document transformers", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/document_transformers", className: "hidden", }, ], link: { type: "generated-index", slug: "integrations/document_transformers", }, }, { type: "category", label: "Document rerankers", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/document_compressors", className: "hidden", }, ], link: { type: "generated-index", slug: "integrations/document_compressors", }, }, { type: "category", label: "Model caches", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/llm_caching", className: "hidden", }, ], link: { type: "doc", id: "integrations/llm_caching/index", }, }, { type: "category", label: "Graphs", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/graphs", className: "hidden", }, ], link: { type: "generated-index", slug: "integrations/graphs", }, }, { type: "category", label: "Memory", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/memory", className: "hidden", }, ], link: { type: "generated-index", slug: "integrations/memory", }, }, { type: "category", label: "Callbacks", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/callbacks", className: "hidden", }, ], link: { type: "generated-index", slug: "integrations/callbacks", }, }, { type: "category", label: "Chat loaders", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/chat_loaders", className: "hidden", }, ], link: { type: "generated-index", slug: "integrations/chat_loaders", }, }, { type: "category", label: "Adapters", collapsible: false, items: [ { type: "autogenerated", dirName: "integrations/adapters", className: "hidden", }, ], link: { type: "generated-index", slug: "integrations/adapters", }, }, ], }, ], link: { type: "generated-index", slug: "integrations/components", }, }, ], contributing: [ { type: "category", label: "Contributing", items: [ { type: "autogenerated", dirName: "contributing", }, ], }, ], };
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/core_docs/docusaurus.config.js
/* eslint-disable global-require,import/no-extraneous-dependencies */ // @ts-check // Note: type annotations allow type checking and IDEs autocompletion // eslint-disable-next-line import/no-extraneous-dependencies const { ProvidePlugin } = require("webpack"); const path = require("path"); require("dotenv").config(); const examplesPath = path.resolve(__dirname, "..", "..", "examples", "src"); const mdxComponentsPath = path.resolve(__dirname, "docs", "mdx_components"); const baseLightCodeBlockTheme = require("prism-react-renderer/themes/vsLight"); const baseDarkCodeBlockTheme = require("prism-react-renderer/themes/vsDark"); const baseUrl = "/"; /** @type {import('@docusaurus/types').Config} */ const config = { title: "🦜️🔗 Langchain", tagline: "LangChain.js Docs", favicon: "img/brand/favicon.png", // Set the production url of your site here url: "https://js.langchain.com", // Set the /<baseUrl>/ pathname under which your site is served // For GitHub pages deployment, it is often '/<projectName>/' baseUrl, onBrokenLinks: "throw", onBrokenMarkdownLinks: "throw", plugins: [ () => ({ name: "custom-webpack-config", configureWebpack: () => ({ plugins: [ new ProvidePlugin({ process: require.resolve("process/browser"), }), ], resolve: { fallback: { path: false, url: false, }, alias: { "@examples": examplesPath, "@mdx_components": mdxComponentsPath, react: path.resolve("../../node_modules/react"), }, }, module: { rules: [ { test: examplesPath, use: ["json-loader", "./scripts/code-block-loader.js"], }, { test: /\.ya?ml$/, use: "yaml-loader", }, { test: /\.m?js/, resolve: { fullySpecified: false, }, }, ], }, }), }), ], presets: [ [ "classic", /** @type {import('@docusaurus/preset-classic').Options} */ ({ docs: { sidebarPath: require.resolve("./sidebars.js"), remarkPlugins: [ [require("@docusaurus/remark-plugin-npm2yarn"), { sync: true }], ], async sidebarItemsGenerator({ defaultSidebarItemsGenerator, ...args }) { const sidebarItems = await defaultSidebarItemsGenerator(args); sidebarItems.forEach((subItem) => { // This allows breaking long sidebar labels into multiple lines // by inserting a zero-width space after each slash. if ( "label" in subItem && subItem.label && subItem.label.includes("/") ) { // eslint-disable-next-line no-param-reassign subItem.label = subItem.label.replace(/\//g, "/\u200B"); } if (args.item.className) { subItem.className = args.item.className; } }); return sidebarItems; }, }, pages: { remarkPlugins: [require("@docusaurus/remark-plugin-npm2yarn")], }, theme: { customCss: require.resolve("./src/css/custom.css"), }, }), ], ], webpack: { jsLoader: (isServer) => ({ loader: require.resolve("swc-loader"), options: { jsc: { parser: { syntax: "typescript", tsx: true, }, target: "es2017", }, module: { type: isServer ? "commonjs" : "es6", }, }, }), }, themeConfig: /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ ({ prism: { theme: { ...baseLightCodeBlockTheme, plain: { ...baseLightCodeBlockTheme.plain, backgroundColor: "#F5F5F5", }, }, darkTheme: { ...baseDarkCodeBlockTheme, plain: { ...baseDarkCodeBlockTheme.plain, backgroundColor: "#222222", }, }, }, image: "img/brand/theme-image.png", navbar: { logo: { src: "img/brand/wordmark.png", srcDark: "img/brand/wordmark-dark.png", }, items: [ { type: "docSidebar", position: "left", sidebarId: "integrations", label: "Integrations", }, { href: "https://v03.api.js.langchain.com", label: "API Reference", position: "left", }, { type: "dropdown", label: "More", position: "left", items: [ { to: "/docs/people/", label: "People", }, { to: "/docs/community", label: "Community", }, { to: "/docs/troubleshooting/errors", label: "Error reference", }, { to: "/docs/additional_resources/tutorials", label: "External guides", }, { to: "/docs/contributing", label: "Contributing", }, ], }, { type: "dropdown", label: "v0.3", position: "right", items: [ { label: "v0.3", href: "/docs/introduction", }, { label: "v0.2", href: "https://js.langchain.com/v0.2/docs/introduction", }, { label: "v0.1", href: "https://js.langchain.com/v0.1/docs/get_started/introduction", }, ], }, { type: "dropdown", label: "🦜🔗", position: "right", items: [ { href: "https://smith.langchain.com", label: "LangSmith", }, { href: "https://docs.smith.langchain.com", label: "LangSmith Docs", }, { href: "https://smith.langchain.com/hub", label: "LangChain Hub", }, { href: "https://github.com/langchain-ai/langserve", label: "LangServe", }, { href: "https://python.langchain.com/", label: "Python Docs", }, ], }, { href: "https://chatjs.langchain.com", label: "Chat", position: "right", }, // Please keep GitHub link to the right for consistency. { href: "https://github.com/langchain-ai/langchainjs", className: "header-github-link", position: "right", "aria-label": "GitHub repository", }, ], }, footer: { style: "light", links: [ { title: "Community", items: [ { label: "Twitter", href: "https://twitter.com/LangChainAI", }, ], }, { title: "GitHub", items: [ { label: "Python", href: "https://github.com/langchain-ai/langchain", }, { label: "JS/TS", href: "https://github.com/langchain-ai/langchainjs", }, ], }, { title: "More", items: [ { label: "Homepage", href: "https://langchain.com", }, { label: "Blog", href: "https://blog.langchain.dev", }, ], }, ], copyright: `Copyright © ${new Date().getFullYear()} LangChain, Inc.`, }, algolia: { // The application ID provided by Algolia appId: "3EZV6U1TYC", // Public API key: it is safe to commit it // this is linked to erick@langchain.dev currently apiKey: "180851bbb9ba0ef6be9214849d6efeaf", indexName: "js-langchain-latest", contextualSearch: false, }, }), scripts: [ baseUrl + "js/google_analytics.js", { src: "https://www.googletagmanager.com/gtag/js?id=G-TVSL7JBE9Y", async: true, }, ], customFields: { supabasePublicKey: process.env.NEXT_PUBLIC_SUPABASE_PUBLIC_KEY, supabaseUrl: process.env.NEXT_PUBLIC_SUPABASE_URL, }, }; module.exports = config;
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/core_docs/README.md
# Website This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator. ### Installation ``` $ yarn ``` ### Local Development ``` $ yarn start ``` This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server. ### Build ``` $ yarn build ``` This command generates static content into the `build` directory and can be served using any static contents hosting service. ### Deployment Using SSH: ``` $ USE_SSH=true yarn deploy ``` Not using SSH: ``` $ GIT_USER=<Your GitHub username> yarn deploy ``` If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch. ### Continuous Integration Some common defaults for linting/formatting have been set for you. If you integrate your project with an open source Continuous Integration system (e.g. Travis CI, CircleCI), you may check for issues using the following command. ``` $ yarn ci ``` ### Validating Notebooks You can validate that notebooks build and compile TypeScript using the following command: ```bash $ yarn validate <PATH_TO_FILE> ```
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/core_docs/.prettierignore
node_modules build .docusaurus docs/api docs/_static static
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/core_docs/package.json
{ "name": "core_docs", "version": "0.0.0", "private": true, "scripts": { "docusaurus": "docusaurus", "start": "yarn quarto && rimraf ./docs/api && NODE_OPTIONS=--max-old-space-size=7168 docusaurus start", "build": "yarn clean && yarn quarto && rimraf ./build && NODE_OPTIONS=--max-old-space-size=7168 DOCUSAURUS_SSR_CONCURRENCY=4 docusaurus build", "build:vercel": "yarn clean && bash ./scripts/vercel_build.sh && yarn quarto:vercel && rimraf ./build && NODE_OPTIONS=--max-old-space-size=7168 DOCUSAURUS_SSR_CONCURRENCY=4 docusaurus build", "swizzle": "docusaurus swizzle", "deploy": "docusaurus deploy", "clear": "docusaurus clear", "serve": "docusaurus serve", "write-translations": "docusaurus write-translations", "write-heading-ids": "docusaurus write-heading-ids", "lint:eslint": "NODE_OPTIONS=--max-old-space-size=4096 eslint --cache --ext .ts,.js src/", "lint": "yarn lint:eslint", "lint:fix": "yarn lint --fix", "precommit": "lint-staged", "format": "prettier --write \"**/*.{js,jsx,ts,tsx,md,mdx}\"", "format:check": "prettier --check \"**/*.{js,jsx,ts,tsx,md,mdx}\"", "clean": "rm -rf .docusaurus/ .turbo/ .build/", "quarto": "quarto render docs/ && node ./scripts/quarto-build.js && python3 ./scripts/append_related_links.py ./docs", "quarto:vercel": "node ./scripts/quarto-build.js && python3 ./scripts/append_related_links.py ./docs", "gen": "yarn gen:supabase", "gen:supabase": "npx supabase gen types typescript --project-id 'xsqpnijvmbodcxyapnyq' --schema public > ./src/supabase.d.ts", "broken-links": "node ./scripts/check-broken-links.js", "check:broken-links": "yarn quarto && yarn broken-links", "check:broken-links:ci": "yarn quarto:vercel && yarn broken-links", "validate": "yarn notebook_validate" }, "dependencies": { "@anthropic-ai/vertex-sdk": "^0.4.1", "@docusaurus/core": "2.4.3", "@docusaurus/preset-classic": "2.4.3", "@docusaurus/remark-plugin-npm2yarn": "2.4.3", "@docusaurus/theme-mermaid": "2.4.3", "@mdx-js/react": "^1.6.22", "@supabase/supabase-js": "^2.45.0", "clsx": "^1.2.1", "cookie": "^0.6.0", "isomorphic-dompurify": "^2.9.0", "json-loader": "^0.5.7", "marked": "^12.0.2", "process": "^0.11.10", "react": "^17.0.2", "react-dom": "^17.0.2", "uuid": "^10.0.0", "webpack": "^5.75.0" }, "devDependencies": { "@babel/eslint-parser": "^7.18.2", "@langchain/langgraph": "0.2.3", "@langchain/scripts": "workspace:*", "@microsoft/fetch-event-source": "^2.0.1", "@swc/core": "^1.3.62", "@types/cookie": "^0", "docusaurus-plugin-typedoc": "1.0.0-next.5", "dotenv": "^16.4.5", "eslint": "^8.19.0", "eslint-config-airbnb": "^19.0.4", "eslint-config-prettier": "^8.5.0", "eslint-plugin-header": "^3.1.1", "eslint-plugin-import": "^2.26.0", "eslint-plugin-jsx-a11y": "^6.6.0", "eslint-plugin-react": "^7.30.1", "eslint-plugin-react-hooks": "^4.6.0", "glob": "^10.3.10", "prettier": "^2.8.3", "rimraf": "^5.0.1", "supabase": "^1.148.6", "swc-loader": "^0.2.3", "ts-morph": "^23.0.0", "tsx": "^3.12.3", "typedoc": "^0.24.4", "typedoc-plugin-markdown": "next", "typescript": "~5.1.6", "yaml-loader": "^0.8.0" }, "packageManager": "yarn@3.4.1", "browserslist": { "production": [ ">0.5%", "not dead", "not op_mini all" ], "development": [ "last 1 chrome version", "last 1 firefox version", "last 1 safari version" ] }, "resolutions": { "typedoc-plugin-markdown@next": "patch:typedoc-plugin-markdown@npm%3A4.0.0-next.6#./.yarn/patches/typedoc-plugin-markdown-npm-4.0.0-next.6-96b4b47746.patch" }, "engines": { "node": ">=18" } }
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/core_docs/vercel.json
{ "buildCommand": "yarn build:vercel", "outputDirectory": "build", "trailingSlash": true, "rewrites": [ { "source": "/v0.1/:path(.*/?)*", "destination": "https://langchainjs-v01.vercel.app/v0.1/:path*" }, { "source": "/v0.2/:path(.*/?)*", "destination": "https://langchainjs-v02.vercel.app/v0.2/:path*" } ], "redirects": [ { "source": "/docs/how_to/callbacks_backgrounding(/?)", "destination": "/docs/how_to/callbacks_serverless/" }, { "source": "/docs/get_started/introduction(/?)", "destination": "/docs/introduction/" }, { "source": "/docs(/?)", "destination": "/docs/introduction/" }, { "source": "/docs/get_started/introduction(/?)", "destination": "/docs/introduction/" }, { "source": "/docs/how_to/tool_calls_multi_modal(/?)", "destination": "/docs/how_to/multimodal_inputs/" }, { "source": "/docs/langgraph(/?)", "destination": "https://langchain-ai.github.io/langgraphjs/" }, { "source": "/docs/langsmith(/?)", "destination": "https://docs.smith.langchain.com/" }, { "source": "/docs/integrations/chat/chrome_ai(/?)", "destination": "/docs/integrations/llms/chrome_ai/" }, { "source": "/docs/integrations/retrievers/vectorstore(/?)", "destination": "/docs/how_to/vectorstore_retriever/" }, { "source": "/docs/integrations/chat_memory(/?)", "destination": "/docs/integrations/memory/" }, { "source": "/docs/integrations/chat_memory/:path(.*/?)*", "destination": "/docs/integrations/memory/:path*" }, { "source": "/docs/integrations/llms/togetherai(/?)", "destination": "/docs/integrations/llms/together/" }, { "source": "/docs/tutorials/agents(/?)", "destination": "https://langchain-ai.github.io/langgraphjs/tutorials/quickstart/" }, { "source": "/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT(/?)", "destination": "https://langchain-ai.github.io/langgraphjs/troubleshooting/errors/GRAPH_RECURSION_LIMIT/" }, { "source": "/docs/troubleshooting/errors/INVALID_CONCURRENT_GRAPH_UPDATE(/?)", "destination": "https://langchain-ai.github.io/langgraphjs/troubleshooting/errors/INVALID_CONCURRENT_GRAPH_UPDATE/" }, { "source": "/docs/troubleshooting/errors/INVALID_GRAPH_NODE_RETURN_VALUE(/?)", "destination": "https://langchain-ai.github.io/langgraphjs/troubleshooting/errors/INVALID_GRAPH_NODE_RETURN_VALUE/" }, { "source": "/docs/troubleshooting/errors/MULTIPLE_SUBGRAPHS(/?)", "destination": "https://langchain-ai.github.io/langgraphjs/troubleshooting/errors/MULTIPLE_SUBGRAPHS/" } ] }
0
lc_public_repos/langchainjs/docs
lc_public_repos/langchainjs/docs/core_docs/babel.config.js
/** * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @format */ module.exports = { presets: [require.resolve("@docusaurus/core/lib/babel/preset")], };