bookreport / book_report_chatflow.json
joeraylo's picture
Rename book_report_chatflow.json.json to book_report_chatflow.json
f5de15a verified
{
"nodes": [
{
"id": "llmChain_0",
"position": {
"x": 521.6088857691359,
"y": 156.66399870319754
},
"type": "customNode",
"data": {
"id": "llmChain_0",
"label": "LLM Chain",
"version": 3,
"name": "llmChain",
"type": "LLMChain",
"baseClasses": [
"LLMChain",
"BaseChain",
"Runnable"
],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Chain Name",
"name": "chainName",
"type": "string",
"placeholder": "Name Your Chain",
"optional": true,
"id": "llmChain_0-input-chainName-string"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "llmChain_0-input-model-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_0-input-prompt-BasePromptTemplate"
},
{
"label": "Output Parser",
"name": "outputParser",
"type": "BaseLLMOutputParser",
"optional": true,
"id": "llmChain_0-input-outputParser-BaseLLMOutputParser"
},
{
"label": "Input Moderation",
"description": "Detect text that could generate harmful output and prevent it from being sent to the language model",
"name": "inputModeration",
"type": "Moderation",
"optional": true,
"list": true,
"id": "llmChain_0-input-inputModeration-Moderation"
}
],
"inputs": {
"model": "{{chatDeepseek_0.data.instance}}",
"prompt": "{{promptTemplate_1.data.instance}}",
"outputParser": "",
"inputModeration": "",
"chainName": "chef"
},
"outputAnchors": [
{
"name": "output",
"label": "Output",
"type": "options",
"description": "",
"options": [
{
"id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable",
"name": "llmChain",
"label": "LLM Chain",
"description": "",
"type": "LLMChain | BaseChain | Runnable"
},
{
"id": "llmChain_0-output-outputPrediction-string|json",
"name": "outputPrediction",
"label": "Output Prediction",
"description": "",
"type": "string | json"
}
],
"default": "llmChain"
}
],
"outputs": {
"output": "outputPrediction"
},
"selected": false
},
"width": 300,
"height": 507,
"selected": false,
"positionAbsolute": {
"x": 521.6088857691359,
"y": 156.66399870319754
},
"dragging": false
},
{
"id": "promptTemplate_1",
"position": {
"x": -46.341231729058606,
"y": 499.6383856394611
},
"type": "customNode",
"data": {
"id": "promptTemplate_1",
"label": "Prompt Template",
"version": 1,
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": [
"PromptTemplate",
"BaseStringPromptTemplate",
"BasePromptTemplate",
"Runnable"
],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_1-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "json",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_1-input-promptValues-json"
}
],
"inputAnchors": [],
"inputs": {
"template": "you are an expert we research assistant {title}\n\n",
"promptValues": "{}"
},
"outputAnchors": [
{
"id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable",
"name": "promptTemplate",
"label": "PromptTemplate",
"description": "Schema to represent a basic prompt for an LLM",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable"
}
],
"outputs": {},
"selected": false
},
"width": 300,
"height": 512,
"selected": false,
"positionAbsolute": {
"x": -46.341231729058606,
"y": 499.6383856394611
},
"dragging": false
},
{
"id": "chatDeepseek_0",
"position": {
"x": 79.45481135797655,
"y": -248.91281901063368
},
"type": "customNode",
"data": {
"id": "chatDeepseek_0",
"label": "ChatDeepseek",
"version": 1,
"name": "chatDeepseek",
"type": "chatDeepseek",
"baseClasses": [
"chatDeepseek",
"BaseChatModel",
"BaseLanguageModel",
"Runnable"
],
"category": "Chat Models",
"description": "Wrapper around Deepseek large language models that use the Chat endpoint",
"inputParams": [
{
"label": "Connect Credential",
"name": "credential",
"type": "credential",
"credentialNames": [
"deepseekApi"
],
"id": "chatDeepseek_0-input-credential-credential"
},
{
"label": "Model Name",
"name": "modelName",
"type": "asyncOptions",
"loadMethod": "listModels",
"default": "deepseek-chat",
"id": "chatDeepseek_0-input-modelName-asyncOptions"
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"step": 0.1,
"default": 0.7,
"optional": true,
"id": "chatDeepseek_0-input-temperature-number"
},
{
"label": "Streaming",
"name": "streaming",
"type": "boolean",
"default": true,
"optional": true,
"additionalParams": true,
"id": "chatDeepseek_0-input-streaming-boolean"
},
{
"label": "Max Tokens",
"name": "maxTokens",
"type": "number",
"step": 1,
"optional": true,
"additionalParams": true,
"id": "chatDeepseek_0-input-maxTokens-number"
},
{
"label": "Top Probability",
"name": "topP",
"type": "number",
"step": 0.1,
"optional": true,
"additionalParams": true,
"id": "chatDeepseek_0-input-topP-number"
},
{
"label": "Frequency Penalty",
"name": "frequencyPenalty",
"type": "number",
"step": 0.1,
"optional": true,
"additionalParams": true,
"id": "chatDeepseek_0-input-frequencyPenalty-number"
},
{
"label": "Presence Penalty",
"name": "presencePenalty",
"type": "number",
"step": 0.1,
"optional": true,
"additionalParams": true,
"id": "chatDeepseek_0-input-presencePenalty-number"
},
{
"label": "Timeout",
"name": "timeout",
"type": "number",
"step": 1,
"optional": true,
"additionalParams": true,
"id": "chatDeepseek_0-input-timeout-number"
},
{
"label": "Stop Sequence",
"name": "stopSequence",
"type": "string",
"rows": 4,
"optional": true,
"description": "List of stop words to use when generating. Use comma to separate multiple stop words.",
"additionalParams": true,
"id": "chatDeepseek_0-input-stopSequence-string"
},
{
"label": "Base Options",
"name": "baseOptions",
"type": "json",
"optional": true,
"additionalParams": true,
"description": "Additional options to pass to the Deepseek client. This should be a JSON object.",
"id": "chatDeepseek_0-input-baseOptions-json"
}
],
"inputAnchors": [
{
"label": "Cache",
"name": "cache",
"type": "BaseCache",
"optional": true,
"id": "chatDeepseek_0-input-cache-BaseCache"
}
],
"inputs": {
"cache": "",
"modelName": "deepseek-chat",
"temperature": 0.7,
"streaming": true,
"maxTokens": "",
"topP": "",
"frequencyPenalty": "",
"presencePenalty": "",
"timeout": "",
"stopSequence": "",
"baseOptions": ""
},
"outputAnchors": [
{
"id": "chatDeepseek_0-output-chatDeepseek-chatDeepseek|BaseChatModel|BaseLanguageModel|Runnable",
"name": "chatDeepseek",
"label": "chatDeepseek",
"description": "Wrapper around Deepseek large language models that use the Chat endpoint",
"type": "chatDeepseek | BaseChatModel | BaseLanguageModel | Runnable"
}
],
"outputs": {},
"selected": false
},
"width": 300,
"height": 573,
"positionAbsolute": {
"x": 79.45481135797655,
"y": -248.91281901063368
},
"selected": false,
"dragging": false
},
{
"id": "chatDeepseek_1",
"position": {
"x": 963.4890932888779,
"y": -209.560438365845
},
"type": "customNode",
"data": {
"id": "chatDeepseek_1",
"label": "ChatDeepseek",
"version": 1,
"name": "chatDeepseek",
"type": "chatDeepseek",
"baseClasses": [
"chatDeepseek",
"BaseChatModel",
"BaseLanguageModel",
"Runnable"
],
"category": "Chat Models",
"description": "Wrapper around Deepseek large language models that use the Chat endpoint",
"inputParams": [
{
"label": "Connect Credential",
"name": "credential",
"type": "credential",
"credentialNames": [
"deepseekApi"
],
"id": "chatDeepseek_1-input-credential-credential"
},
{
"label": "Model Name",
"name": "modelName",
"type": "asyncOptions",
"loadMethod": "listModels",
"default": "deepseek-chat",
"id": "chatDeepseek_1-input-modelName-asyncOptions"
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"step": 0.1,
"default": 0.7,
"optional": true,
"id": "chatDeepseek_1-input-temperature-number"
},
{
"label": "Streaming",
"name": "streaming",
"type": "boolean",
"default": true,
"optional": true,
"additionalParams": true,
"id": "chatDeepseek_1-input-streaming-boolean"
},
{
"label": "Max Tokens",
"name": "maxTokens",
"type": "number",
"step": 1,
"optional": true,
"additionalParams": true,
"id": "chatDeepseek_1-input-maxTokens-number"
},
{
"label": "Top Probability",
"name": "topP",
"type": "number",
"step": 0.1,
"optional": true,
"additionalParams": true,
"id": "chatDeepseek_1-input-topP-number"
},
{
"label": "Frequency Penalty",
"name": "frequencyPenalty",
"type": "number",
"step": 0.1,
"optional": true,
"additionalParams": true,
"id": "chatDeepseek_1-input-frequencyPenalty-number"
},
{
"label": "Presence Penalty",
"name": "presencePenalty",
"type": "number",
"step": 0.1,
"optional": true,
"additionalParams": true,
"id": "chatDeepseek_1-input-presencePenalty-number"
},
{
"label": "Timeout",
"name": "timeout",
"type": "number",
"step": 1,
"optional": true,
"additionalParams": true,
"id": "chatDeepseek_1-input-timeout-number"
},
{
"label": "Stop Sequence",
"name": "stopSequence",
"type": "string",
"rows": 4,
"optional": true,
"description": "List of stop words to use when generating. Use comma to separate multiple stop words.",
"additionalParams": true,
"id": "chatDeepseek_1-input-stopSequence-string"
},
{
"label": "Base Options",
"name": "baseOptions",
"type": "json",
"optional": true,
"additionalParams": true,
"description": "Additional options to pass to the Deepseek client. This should be a JSON object.",
"id": "chatDeepseek_1-input-baseOptions-json"
}
],
"inputAnchors": [
{
"label": "Cache",
"name": "cache",
"type": "BaseCache",
"optional": true,
"id": "chatDeepseek_1-input-cache-BaseCache"
}
],
"inputs": {
"cache": "",
"modelName": "deepseek-chat",
"temperature": 0.7,
"streaming": true,
"maxTokens": "",
"topP": "",
"frequencyPenalty": "",
"presencePenalty": "",
"timeout": "",
"stopSequence": "",
"baseOptions": ""
},
"outputAnchors": [
{
"id": "chatDeepseek_1-output-chatDeepseek-chatDeepseek|BaseChatModel|BaseLanguageModel|Runnable",
"name": "chatDeepseek",
"label": "chatDeepseek",
"description": "Wrapper around Deepseek large language models that use the Chat endpoint",
"type": "chatDeepseek | BaseChatModel | BaseLanguageModel | Runnable"
}
],
"outputs": {},
"selected": false
},
"width": 300,
"height": 573,
"positionAbsolute": {
"x": 963.4890932888779,
"y": -209.560438365845
},
"selected": false,
"dragging": false
},
{
"id": "llmChain_1",
"position": {
"x": 1397.4322987499522,
"y": 391.07025959430916
},
"type": "customNode",
"data": {
"id": "llmChain_1",
"label": "LLM Chain",
"version": 3,
"name": "llmChain",
"type": "LLMChain",
"baseClasses": [
"LLMChain",
"BaseChain",
"Runnable"
],
"category": "Chains",
"description": "Chain to run queries against LLMs",
"inputParams": [
{
"label": "Chain Name",
"name": "chainName",
"type": "string",
"placeholder": "Name Your Chain",
"optional": true,
"id": "llmChain_1-input-chainName-string"
}
],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "llmChain_1-input-model-BaseLanguageModel"
},
{
"label": "Prompt",
"name": "prompt",
"type": "BasePromptTemplate",
"id": "llmChain_1-input-prompt-BasePromptTemplate"
},
{
"label": "Output Parser",
"name": "outputParser",
"type": "BaseLLMOutputParser",
"optional": true,
"id": "llmChain_1-input-outputParser-BaseLLMOutputParser"
},
{
"label": "Input Moderation",
"description": "Detect text that could generate harmful output and prevent it from being sent to the language model",
"name": "inputModeration",
"type": "Moderation",
"optional": true,
"list": true,
"id": "llmChain_1-input-inputModeration-Moderation"
}
],
"inputs": {
"model": "{{chatDeepseek_1.data.instance}}",
"prompt": "{{promptTemplate_0.data.instance}}",
"outputParser": "",
"inputModeration": "",
"chainName": "critic"
},
"outputAnchors": [
{
"name": "output",
"label": "Output",
"type": "options",
"description": "",
"options": [
{
"id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable",
"name": "llmChain",
"label": "LLM Chain",
"description": "",
"type": "LLMChain | BaseChain | Runnable"
},
{
"id": "llmChain_1-output-outputPrediction-string|json",
"name": "outputPrediction",
"label": "Output Prediction",
"description": "",
"type": "string | json"
}
],
"default": "llmChain"
}
],
"outputs": {
"output": "llmChain"
},
"selected": false
},
"width": 300,
"height": 507,
"selected": false,
"positionAbsolute": {
"x": 1397.4322987499522,
"y": 391.07025959430916
},
"dragging": false
},
{
"id": "promptTemplate_0",
"position": {
"x": 931.1065881092055,
"y": 430.4226402390982
},
"type": "customNode",
"data": {
"id": "promptTemplate_0",
"label": "Prompt Template",
"version": 1,
"name": "promptTemplate",
"type": "PromptTemplate",
"baseClasses": [
"PromptTemplate",
"BaseStringPromptTemplate",
"BasePromptTemplate",
"Runnable"
],
"category": "Prompts",
"description": "Schema to represent a basic prompt for an LLM",
"inputParams": [
{
"label": "Template",
"name": "template",
"type": "string",
"rows": 4,
"placeholder": "What is a good name for a company that makes {product}?",
"id": "promptTemplate_0-input-template-string"
},
{
"label": "Format Prompt Values",
"name": "promptValues",
"type": "json",
"optional": true,
"acceptVariable": true,
"list": true,
"id": "promptTemplate_0-input-promptValues-json"
}
],
"inputAnchors": [],
"inputs": {
"template": "https://www.scholastic.com/teachers/teaching-tools/articles/high-school-book-report-template.html\nhas template for book report, write a book report for {title} at say a given grade level, and make it legit so it does not sounds ai created. {grade}",
"promptValues": "{\"title\":\"{{llmChain_0.data.instance}}\"}"
},
"outputAnchors": [
{
"id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable",
"name": "promptTemplate",
"label": "PromptTemplate",
"description": "Schema to represent a basic prompt for an LLM",
"type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable"
}
],
"outputs": {},
"selected": false
},
"width": 300,
"height": 512,
"selected": false,
"positionAbsolute": {
"x": 931.1065881092055,
"y": 430.4226402390982
},
"dragging": false
}
],
"edges": [
{
"source": "promptTemplate_1",
"sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate"
},
{
"source": "chatDeepseek_0",
"sourceHandle": "chatDeepseek_0-output-chatDeepseek-chatDeepseek|BaseChatModel|BaseLanguageModel|Runnable",
"target": "llmChain_0",
"targetHandle": "llmChain_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "chatDeepseek_0-chatDeepseek_0-output-chatDeepseek-chatDeepseek|BaseChatModel|BaseLanguageModel|Runnable-llmChain_0-llmChain_0-input-model-BaseLanguageModel"
},
{
"source": "chatDeepseek_1",
"sourceHandle": "chatDeepseek_1-output-chatDeepseek-chatDeepseek|BaseChatModel|BaseLanguageModel|Runnable",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "chatDeepseek_1-chatDeepseek_1-output-chatDeepseek-chatDeepseek|BaseChatModel|BaseLanguageModel|Runnable-llmChain_1-llmChain_1-input-model-BaseLanguageModel"
},
{
"source": "promptTemplate_0",
"sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable",
"target": "llmChain_1",
"targetHandle": "llmChain_1-input-prompt-BasePromptTemplate",
"type": "buttonedge",
"id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate"
},
{
"source": "llmChain_0",
"sourceHandle": "llmChain_0-output-outputPrediction-string|json",
"target": "promptTemplate_0",
"targetHandle": "promptTemplate_0-input-promptValues-json",
"type": "buttonedge",
"id": "llmChain_0-llmChain_0-output-outputPrediction-string|json-promptTemplate_0-promptTemplate_0-input-promptValues-json"
}
]
}