|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import { RunnableConfig } from '../../../../src//core/context.js';
|
|
|
import {Runnable} from '../../../../src/index.js';
|
|
|
import {BaseCallback} from '../../../../src/utils/callbacks.js';
|
|
|
|
|
|
class TextProcessorRunnable extends Runnable {
|
|
|
constructor(options = {}) {
|
|
|
super();
|
|
|
|
|
|
this.defaultMaxLength = options.maxLength ?? 50;
|
|
|
this.defaultUppercase = options.uppercase ?? false;
|
|
|
this.defaultPrefix = options.prefix ?? '';
|
|
|
}
|
|
|
|
|
|
async _call(input, config) {
|
|
|
const maxLength = config.configurable?.maxLength ?? this.defaultMaxLength;
|
|
|
const uppercase = config.configurable?.uppercase ?? this.defaultUppercase;
|
|
|
const prefix = config.configurable?.prefix ?? this.defaultPrefix;
|
|
|
|
|
|
|
|
|
let result = input;
|
|
|
|
|
|
|
|
|
if (prefix) {
|
|
|
result = prefix + result;
|
|
|
}
|
|
|
|
|
|
|
|
|
if (uppercase) {
|
|
|
result = result.toUpperCase();
|
|
|
}
|
|
|
|
|
|
|
|
|
if (result.length > maxLength) {
|
|
|
result = result.substring(0, maxLength) + '...';
|
|
|
}
|
|
|
|
|
|
return result;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
|
|
|
class ConfigLoggerCallback extends BaseCallback {
|
|
|
async onStart(runnable, input, config) {
|
|
|
if (config.configurable && Object.keys(config.configurable).length > 0) {
|
|
|
console.log(`π Runtime config:`, config.configurable);
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
async function exercise() {
|
|
|
console.log('=== Exercise 16: Runtime Configuration Override ===\n');
|
|
|
|
|
|
const processor = new TextProcessorRunnable({ maxLength: 50 });
|
|
|
|
|
|
const logger = new ConfigLoggerCallback();
|
|
|
|
|
|
const longText = "The quick brown fox jumps over the lazy dog. This is a longer sentence to test truncation and various configuration options.";
|
|
|
|
|
|
|
|
|
console.log('--- Test 1: Using Defaults ---');
|
|
|
const result1 = await processor.invoke(longText, { callbacks: [logger] });
|
|
|
console.log('Result:', result1);
|
|
|
console.log();
|
|
|
|
|
|
|
|
|
console.log('--- Test 2: Override maxLength ---');
|
|
|
const result2 = await processor.invoke(longText, {
|
|
|
callbacks: [logger],
|
|
|
configurable: { maxLength: 20 }
|
|
|
});
|
|
|
console.log('Result:', result2);
|
|
|
console.log();
|
|
|
|
|
|
|
|
|
console.log('--- Test 3: Override Multiple Settings ---');
|
|
|
const result3 = await processor.invoke(longText, {
|
|
|
callbacks: [logger],
|
|
|
configurable: { uppercase: true, maxLength: 30 }
|
|
|
});
|
|
|
console.log('Result:', result3);
|
|
|
console.log();
|
|
|
|
|
|
|
|
|
console.log('--- Test 4: Add Prefix at Runtime ---');
|
|
|
const result4 = await processor.invoke(longText, {
|
|
|
callbacks: [logger],
|
|
|
configurable: { prefix: '[PREFIX] ', maxLength: 40 }
|
|
|
});
|
|
|
console.log('Result:', result4);
|
|
|
console.log();
|
|
|
|
|
|
|
|
|
console.log('--- Test 5: A/B Testing Different Configs ---');
|
|
|
|
|
|
const configA = new RunnableConfig({
|
|
|
callbacks: [logger],
|
|
|
configurable: { maxLength: 25, uppercase: false },
|
|
|
metadata: { variant: 'A', experiment: 'text-processing' }
|
|
|
});
|
|
|
|
|
|
const configB = new RunnableConfig({
|
|
|
callbacks: [logger],
|
|
|
configurable: { maxLength: 40, uppercase: true },
|
|
|
metadata: { variant: 'B', experiment: 'text-processing' }
|
|
|
});
|
|
|
|
|
|
const testText = "Testing A/B configuration variants";
|
|
|
|
|
|
const resultA = await processor.invoke(testText, configA);
|
|
|
const resultB = await processor.invoke(testText, configB);
|
|
|
|
|
|
console.log('Variant A:', resultA);
|
|
|
console.log('Variant B:', resultB);
|
|
|
console.log();
|
|
|
|
|
|
|
|
|
console.log('--- Test 6: LLM-Style Temperature Override ---');
|
|
|
|
|
|
|
|
|
class MockLLMRunnable extends Runnable {
|
|
|
constructor(defaultTemp = 0.7) {
|
|
|
super();
|
|
|
this.defaultTemperature = defaultTemp;
|
|
|
}
|
|
|
|
|
|
async _call(input, config) {
|
|
|
const temperature = config.configurable?.temperature ?? this.defaultTemperature;
|
|
|
|
|
|
|
|
|
if (temperature < 0.3) {
|
|
|
return `[temp=${temperature}] Deterministic response: ${input}`;
|
|
|
} else if (temperature > 0.8) {
|
|
|
return `[temp=${temperature}] Creative response about ${input}!!!`;
|
|
|
} else {
|
|
|
return `[temp=${temperature}] Balanced response: ${input}.`;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
const llm = new MockLLMRunnable();
|
|
|
|
|
|
console.log('Low temp (0.1):');
|
|
|
const low = await llm.invoke("AI", { configurable: { temperature: 0.1 } });
|
|
|
console.log(low);
|
|
|
|
|
|
console.log('\nMedium temp (0.7):');
|
|
|
const med = await llm.invoke("AI", { configurable: { temperature: 0.7 } });
|
|
|
console.log(med);
|
|
|
|
|
|
console.log('\nHigh temp (1.0):');
|
|
|
const high = await llm.invoke("AI", { configurable: { temperature: 1.0 } });
|
|
|
console.log(high);
|
|
|
|
|
|
console.log('\nβ Exercise 16 complete!');
|
|
|
}
|
|
|
|
|
|
|
|
|
exercise().catch(console.error);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|