Spaces:
Paused
Paused
| { | |
| "data": { | |
| "edges": [ | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "ChatInput", | |
| "id": "ChatInput-WGsAl", | |
| "name": "message", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "urls", | |
| "id": "URLComponent-ZKgiv", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-ChatInput-WGsAl{œdataTypeœ:œChatInputœ,œidœ:œChatInput-WGsAlœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-URLComponent-ZKgiv{œfieldNameœ:œurlsœ,œidœ:œURLComponent-ZKgivœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "ChatInput-WGsAl", | |
| "sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-WGsAlœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "URLComponent-ZKgiv", | |
| "targetHandle": "{œfieldNameœ:œurlsœ,œidœ:œURLComponent-ZKgivœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "URLComponent", | |
| "id": "URLComponent-ZKgiv", | |
| "name": "data", | |
| "output_types": [ | |
| "Data" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "input_data", | |
| "id": "ParserComponent-ZfeDa", | |
| "inputTypes": [ | |
| "DataFrame", | |
| "Data" | |
| ], | |
| "type": "other" | |
| } | |
| }, | |
| "id": "reactflow__edge-URLComponent-ZKgiv{œdataTypeœ:œURLComponentœ,œidœ:œURLComponent-ZKgivœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParserComponent-ZfeDa{œfieldNameœ:œinput_dataœ,œidœ:œParserComponent-ZfeDaœ,œinputTypesœ:[œDataFrameœ,œDataœ],œtypeœ:œotherœ}", | |
| "selected": false, | |
| "source": "URLComponent-ZKgiv", | |
| "sourceHandle": "{œdataTypeœ:œURLComponentœ,œidœ:œURLComponent-ZKgivœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}", | |
| "target": "ParserComponent-ZfeDa", | |
| "targetHandle": "{œfieldNameœ:œinput_dataœ,œidœ:œParserComponent-ZfeDaœ,œinputTypesœ:[œDataFrameœ,œDataœ],œtypeœ:œotherœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "ParserComponent", | |
| "id": "ParserComponent-ZfeDa", | |
| "name": "parsed_text", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "parsed_text", | |
| "id": "Prompt-DaEQe", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-ParserComponent-ZfeDa{œdataTypeœ:œParserComponentœ,œidœ:œParserComponent-ZfeDaœ,œnameœ:œparsed_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-DaEQe{œfieldNameœ:œparsed_textœ,œidœ:œPrompt-DaEQeœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "ParserComponent-ZfeDa", | |
| "sourceHandle": "{œdataTypeœ:œParserComponentœ,œidœ:œParserComponent-ZfeDaœ,œnameœ:œparsed_textœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "Prompt-DaEQe", | |
| "targetHandle": "{œfieldNameœ:œparsed_textœ,œidœ:œPrompt-DaEQeœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "Prompt", | |
| "id": "Prompt-DaEQe", | |
| "name": "prompt", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "system_message", | |
| "id": "AnthropicModel-ff7II", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-Prompt-DaEQe{œdataTypeœ:œPromptœ,œidœ:œPrompt-DaEQeœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-AnthropicModel-ff7II{œfieldNameœ:œsystem_messageœ,œidœ:œAnthropicModel-ff7IIœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "Prompt-DaEQe", | |
| "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-DaEQeœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "AnthropicModel-ff7II", | |
| "targetHandle": "{œfieldNameœ:œsystem_messageœ,œidœ:œAnthropicModel-ff7IIœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "Prompt", | |
| "id": "Prompt-DaEQe", | |
| "name": "prompt", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "input_value", | |
| "id": "AnthropicModel-ff7II", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-Prompt-DaEQe{œdataTypeœ:œPromptœ,œidœ:œPrompt-DaEQeœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-AnthropicModel-ff7II{œfieldNameœ:œinput_valueœ,œidœ:œAnthropicModel-ff7IIœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "Prompt-DaEQe", | |
| "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-DaEQeœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "AnthropicModel-ff7II", | |
| "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œAnthropicModel-ff7IIœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "AnthropicModel", | |
| "id": "AnthropicModel-ff7II", | |
| "name": "text_output", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "article", | |
| "id": "Prompt-qLrNR", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-AnthropicModel-ff7II{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-ff7IIœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-qLrNR{œfieldNameœ:œarticleœ,œidœ:œPrompt-qLrNRœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "AnthropicModel-ff7II", | |
| "sourceHandle": "{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-ff7IIœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "Prompt-qLrNR", | |
| "targetHandle": "{œfieldNameœ:œarticleœ,œidœ:œPrompt-qLrNRœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "Prompt", | |
| "id": "Prompt-qLrNR", | |
| "name": "prompt", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "system_message", | |
| "id": "AnthropicModel-6Akp4", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-Prompt-qLrNR{œdataTypeœ:œPromptœ,œidœ:œPrompt-qLrNRœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-AnthropicModel-6Akp4{œfieldNameœ:œsystem_messageœ,œidœ:œAnthropicModel-6Akp4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "Prompt-qLrNR", | |
| "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-qLrNRœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "AnthropicModel-6Akp4", | |
| "targetHandle": "{œfieldNameœ:œsystem_messageœ,œidœ:œAnthropicModel-6Akp4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "Prompt", | |
| "id": "Prompt-qLrNR", | |
| "name": "prompt", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "input_value", | |
| "id": "AnthropicModel-6Akp4", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-Prompt-qLrNR{œdataTypeœ:œPromptœ,œidœ:œPrompt-qLrNRœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-AnthropicModel-6Akp4{œfieldNameœ:œinput_valueœ,œidœ:œAnthropicModel-6Akp4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "Prompt-qLrNR", | |
| "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-qLrNRœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "AnthropicModel-6Akp4", | |
| "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œAnthropicModel-6Akp4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "AnthropicModel", | |
| "id": "AnthropicModel-ff7II", | |
| "name": "text_output", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "analysis1", | |
| "id": "Prompt-l52jJ", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-AnthropicModel-ff7II{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-ff7IIœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-l52jJ{œfieldNameœ:œanalysis1œ,œidœ:œPrompt-l52jJœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "AnthropicModel-ff7II", | |
| "sourceHandle": "{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-ff7IIœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "Prompt-l52jJ", | |
| "targetHandle": "{œfieldNameœ:œanalysis1œ,œidœ:œPrompt-l52jJœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "AnthropicModel", | |
| "id": "AnthropicModel-6Akp4", | |
| "name": "text_output", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "analysis3", | |
| "id": "Prompt-l52jJ", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-AnthropicModel-6Akp4{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-6Akp4œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-l52jJ{œfieldNameœ:œanalysis3œ,œidœ:œPrompt-l52jJœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "AnthropicModel-6Akp4", | |
| "sourceHandle": "{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-6Akp4œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "Prompt-l52jJ", | |
| "targetHandle": "{œfieldNameœ:œanalysis3œ,œidœ:œPrompt-l52jJœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "ParserComponent", | |
| "id": "ParserComponent-ZfeDa", | |
| "name": "parsed_text", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "article", | |
| "id": "Prompt-l52jJ", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-ParserComponent-ZfeDa{œdataTypeœ:œParserComponentœ,œidœ:œParserComponent-ZfeDaœ,œnameœ:œparsed_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-l52jJ{œfieldNameœ:œarticleœ,œidœ:œPrompt-l52jJœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "ParserComponent-ZfeDa", | |
| "sourceHandle": "{œdataTypeœ:œParserComponentœ,œidœ:œParserComponent-ZfeDaœ,œnameœ:œparsed_textœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "Prompt-l52jJ", | |
| "targetHandle": "{œfieldNameœ:œarticleœ,œidœ:œPrompt-l52jJœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "Prompt", | |
| "id": "Prompt-l52jJ", | |
| "name": "prompt", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "system_message", | |
| "id": "AnthropicModel-BFQkw", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-Prompt-l52jJ{œdataTypeœ:œPromptœ,œidœ:œPrompt-l52jJœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-AnthropicModel-BFQkw{œfieldNameœ:œsystem_messageœ,œidœ:œAnthropicModel-BFQkwœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "Prompt-l52jJ", | |
| "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-l52jJœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "AnthropicModel-BFQkw", | |
| "targetHandle": "{œfieldNameœ:œsystem_messageœ,œidœ:œAnthropicModel-BFQkwœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "Prompt", | |
| "id": "Prompt-l52jJ", | |
| "name": "prompt", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "input_value", | |
| "id": "AnthropicModel-BFQkw", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-Prompt-l52jJ{œdataTypeœ:œPromptœ,œidœ:œPrompt-l52jJœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-AnthropicModel-BFQkw{œfieldNameœ:œinput_valueœ,œidœ:œAnthropicModel-BFQkwœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "Prompt-l52jJ", | |
| "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-l52jJœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "AnthropicModel-BFQkw", | |
| "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œAnthropicModel-BFQkwœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "AnthropicModel", | |
| "id": "AnthropicModel-ff7II", | |
| "name": "text_output", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "analysis1", | |
| "id": "Prompt-L3qXf", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-AnthropicModel-ff7II{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-ff7IIœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-L3qXf{œfieldNameœ:œanalysis1œ,œidœ:œPrompt-L3qXfœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "AnthropicModel-ff7II", | |
| "sourceHandle": "{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-ff7IIœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "Prompt-L3qXf", | |
| "targetHandle": "{œfieldNameœ:œanalysis1œ,œidœ:œPrompt-L3qXfœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "AnthropicModel", | |
| "id": "AnthropicModel-6Akp4", | |
| "name": "text_output", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "analysis3", | |
| "id": "Prompt-L3qXf", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-AnthropicModel-6Akp4{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-6Akp4œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-L3qXf{œfieldNameœ:œanalysis3œ,œidœ:œPrompt-L3qXfœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "AnthropicModel-6Akp4", | |
| "sourceHandle": "{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-6Akp4œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "Prompt-L3qXf", | |
| "targetHandle": "{œfieldNameœ:œanalysis3œ,œidœ:œPrompt-L3qXfœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "AnthropicModel", | |
| "id": "AnthropicModel-BFQkw", | |
| "name": "text_output", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "analysis4", | |
| "id": "Prompt-L3qXf", | |
| "inputTypes": [ | |
| "Message" | |
| ], | |
| "type": "str" | |
| } | |
| }, | |
| "id": "reactflow__edge-AnthropicModel-BFQkw{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-BFQkwœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-L3qXf{œfieldNameœ:œanalysis4œ,œidœ:œPrompt-L3qXfœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
| "selected": false, | |
| "source": "AnthropicModel-BFQkw", | |
| "sourceHandle": "{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-BFQkwœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "Prompt-L3qXf", | |
| "targetHandle": "{œfieldNameœ:œanalysis4œ,œidœ:œPrompt-L3qXfœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
| }, | |
| { | |
| "animated": false, | |
| "className": "", | |
| "data": { | |
| "sourceHandle": { | |
| "dataType": "Prompt", | |
| "id": "Prompt-L3qXf", | |
| "name": "prompt", | |
| "output_types": [ | |
| "Message" | |
| ] | |
| }, | |
| "targetHandle": { | |
| "fieldName": "input_value", | |
| "id": "ChatOutput-7XrLH", | |
| "inputTypes": [ | |
| "Data", | |
| "DataFrame", | |
| "Message" | |
| ], | |
| "type": "other" | |
| } | |
| }, | |
| "id": "reactflow__edge-Prompt-L3qXf{œdataTypeœ:œPromptœ,œidœ:œPrompt-L3qXfœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-7XrLH{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-7XrLHœ,œinputTypesœ:[œDataœ,œDataFrameœ,œMessageœ],œtypeœ:œotherœ}", | |
| "selected": false, | |
| "source": "Prompt-L3qXf", | |
| "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-L3qXfœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", | |
| "target": "ChatOutput-7XrLH", | |
| "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-7XrLHœ,œinputTypesœ:[œDataœ,œDataFrameœ,œMessageœ],œtypeœ:œotherœ}" | |
| } | |
| ], | |
| "nodes": [ | |
| { | |
| "data": { | |
| "id": "Prompt-DaEQe", | |
| "node": { | |
| "base_classes": [ | |
| "Message" | |
| ], | |
| "beta": false, | |
| "conditional_paths": [], | |
| "custom_fields": { | |
| "template": [ | |
| "parsed_text" | |
| ] | |
| }, | |
| "description": "Create a prompt template with dynamic variables.", | |
| "display_name": "Prompt", | |
| "documentation": "", | |
| "edited": false, | |
| "error": null, | |
| "field_order": [ | |
| "template", | |
| "tool_placeholder" | |
| ], | |
| "frozen": false, | |
| "full_path": null, | |
| "icon": "prompts", | |
| "is_composition": null, | |
| "is_input": null, | |
| "is_output": null, | |
| "legacy": false, | |
| "lf_version": "1.4.3", | |
| "metadata": {}, | |
| "minimized": false, | |
| "name": "", | |
| "output_types": [], | |
| "outputs": [ | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Prompt Message", | |
| "hidden": false, | |
| "method": "build_prompt", | |
| "name": "prompt", | |
| "options": null, | |
| "required_inputs": null, | |
| "selected": "Message", | |
| "tool_mode": true, | |
| "types": [ | |
| "Message" | |
| ], | |
| "value": "__UNDEFINED__" | |
| } | |
| ], | |
| "pinned": false, | |
| "priority": null, | |
| "template": { | |
| "_type": "Component", | |
| "code": { | |
| "advanced": true, | |
| "dynamic": true, | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "code", | |
| "password": false, | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "code", | |
| "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import MessageTextInput, Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n MessageTextInput(\n name=\"tool_placeholder\",\n display_name=\"Tool Placeholder\",\n tool_mode=True,\n advanced=True,\n info=\"A placeholder input for tool mode.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n async def update_frontend_node(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = await super().update_frontend_node(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" | |
| }, | |
| "parsed_text": { | |
| "advanced": false, | |
| "display_name": "parsed_text", | |
| "dynamic": false, | |
| "field_type": "str", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "parsed_text", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "template": { | |
| "_input_type": "PromptInput", | |
| "advanced": false, | |
| "display_name": "Template", | |
| "dynamic": false, | |
| "info": "", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "template", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "type": "prompt", | |
| "value": "IMPORTANTISIMO: LA FECHA ACTUAL ES 2025.\nNo incluyas reflexiones internas, procesos de pensamiento o marcadores HTML/Markdown en tu respuesta. Solo devuelve el contenido final estructurado.\n## Rol\nEres un **experto en periodismo, información, ciencia y desinformación**.\nUtilizas las TOOLS con los siguientes metodos:\nSCRAPE (para obtener la informaciòn del url brindado por el usuario)\nSEARCH_SERPER (para buscar informaciòn relevante cuando lo creas necesario. Tambien para buscar las Fuentes prioritarias para contraste)\nFETCH_CONTENT y AS_DATAFRAME (para utilizar wikipedia)\n\nTu tarea central es **analizar el contenido de las URLs** (noticias, artículos, publicaciones, etc.) que te proporcione el usuario y producir un informe detallado.\n\n## Objetivos\n1. **Sesgo**\n- **POSITIVO** → tono perceptiblemente positivo del emisor.\n- **NEGATIVO** → tono perceptiblemente negativo.\n- **NEUTRO** → imposible clasificar como positivo o negativo.\n\n2. **Matices**\nSiempre identifica y enumera cualquier matiz emocional o actitudinal presente (puedes añadir otros además de los listados):\n- Agresividad\n- Tristeza\n- Polarización\n- Alegría\n- Miedo\n- Solidaridad\n- Desconfianza\n- Cooperación\n\n3. **Resumen del corpus**\nRedacta un resumen conciso y de alta calidad.\n\n4. **Aclaraciones**\nExamina detenidamente el texto para detectar falacias o mentiras.\n- Enumera cada falacia.\n- Describe brevemente por qué es una falacia.\n- Desmiente cada mentira aportando la evidencia correspondiente.\n\n5. **Contraste de fuentes**\nBusca en internet información sobre el mismo tema y proporciona una **lista de fuentes** con enlace para verificación (ver sección de fuentes prioritarias).\n\n6. **Info. del Autor**\nEn los casos en los que el autor se encuentre explicitamente mencionado, realiza una investigaciòn del mismo y brinda un resume del mismo + URLs de otras noticias/publicaciones/articulos/etc, que puedas encontrar sobre el autor. Si el autor no se especifica, olvida este punto.\n\n---\n\n## Ejemplos de mentiras desmentidas\n\n### “Los hornos microondas causan cáncer”\n**Afirmación:** “La radiación de los microondas es suficientemente intensa para dañar el ADN y producir cáncer en los alimentos que cocinamos.” (snopes.com)\n**Realidad:** Los microondas emplean radiación no ionizante, sin energía suficiente para dañar el ADN. Estudios y agencias sanitarias confirman su inocuidad si se usan recipientes aptos. (politifact.com)\n\n### “Sólo usamos el 10 % de nuestro cerebro”\n**Afirmación:** “Apenas empleamos una décima parte de nuestra capacidad cerebral; si activáramos el resto, lograríamos poderes extraordinarios.” (snopes.com)\n**Realidad:** Escáneres cerebrales (fMRI, PET) muestran que prácticamente todas las áreas están activas en diversas tareas. No existen zonas ‘inactivas’ equivalentes al 90 % restante. (en.wikipedia.org)\n\n### “Comer muchas zanahorias te da visión nocturna”\n**Afirmación:** “Una dieta rica en zanahorias otorga visión de gato en la oscuridad.” (snopes.com)\n**Realidad:** El beta-caroteno sólo ayuda a mantener la visión normal cuando hay déficit de vitamina A; el mito surgió como propaganda británica en la II Guerra Mundial. (smithsonianmag.com)\n\n### “El 5G causa o agrava el COVID-19”\n**Afirmación:** “La expansión de redes 5G es responsable de la aparición o propagación del coronavirus.” (snopes.com)\n**Realidad:** No existe vínculo entre tecnología de comunicaciones y enfermedades víricas; la hipótesis carece de fundamento biológico. (time.com)\n\n### “Las vacunas provocan autismo”\n**Afirmación:** “El tiomersal en las vacunas causa autismo infantil.” (snopes.com)\n**Realidad:** Amplias revisiones epidemiológicas descartan relación causal entre vacunas y autismo. (mayoclinichealthsystem.org)\n\n### “La fluorización del agua reduce el coeficiente intelectual”\n**Afirmación:** “El fluoruro en el agua potable baja el IQ de los niños.” (snopes.com)\n**Realidad:** Los estudios que muestran tal efecto usan exposiciones muy superiores a las recomendadas; los niveles óptimos (0,7 mg/L) son seguros. (en.wikipedia.org)\n\n---\n\n## Fuentes prioritarias para contraste (no excluyentes) (recuerda que debes brindar noticias, articulos, posteos, etc. concretos)\nSnopes – https://www.snopes.com/\nPolitiFact – https://www.politifact.com/\nMayo Clinic Health System – https://www.mayoclinichealthsystem.org/\nFactCheck.org – https://www.factcheck.org/\nReuters Fact Check – https://www.reuters.com/fact-check/\nAP Fact Check – https://apnews.com/ap-fact-check\nAFP Fact Check – https://factcheck.afp.com/\nFull Fact – https://fullfact.org/\nCheck Your Fact – https://checkyourfact.com/\nAfrica Check – https://africacheck.org/\nCenters for Disease Control and Prevention (CDC) – https://www.cdc.gov/\nWorld Health Organization (WHO) – https://www.who.int/\nCochrane Library – https://www.cochranelibrary.com/\nNPR Fact Check – https://www.npr.org/sections/politics-fact-check\nFirst Draft – https://firstdraftnews.org/\nInternational Fact-Checking Network (IFCN) – https://www.poynter.org/ifcn/\nEuropean Fact-Checking Standards Network (EFCSN) – https://efcsn.com/\n\n---\n\n## Comportamiento de la Respuesta\n**¡Instrucción Crítica!** No reveles tu proceso de pensamiento, los pasos intermedios, ni las llamadas a las herramientas (como `SCRAPE` o `SEARCH_SERPER`). Tu única salida debe ser el informe final, siguiendo estrictamente la estructura definida en la sección 'Formato de salida'. No incluyas frases como \"Voy a analizar...\" o \"Usaré la herramienta...\".\n\n## Formato de salida (estrictamente. Presentalo con la estructura de un informe profesional en un formato markdown elegante)\n\n####Título de la noticia: <TÍTULO>\n(<URL>)\n\nSesgo detectado : <POSITIVO | NEGATIVO | NEUTRO> + <Breve Explicaciòn del ¿Por Que?>\nMatices detectados : <matiz1>, <matiz2>, …\nResumen del corpus :\n{parsed_text}\n\nAclaraciones :\n<Falacia o mentira 1>: <Explicación breve / desmentido>\n<Falacia o mentira 2>: <Explicación breve / desmentido>\n…\nFuentes que puedes investigar :\n<Título fuente 1> – <URL 1>\n<Título fuente 2> – <URL 2>\n…\nInvestigaciòn del Autor\nResume\n\nURL1\nURL2\n..." | |
| }, | |
| "tool_placeholder": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Tool Placeholder", | |
| "dynamic": false, | |
| "info": "A placeholder input for tool mode.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "tool_placeholder", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": true, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| } | |
| }, | |
| "tool_mode": false | |
| }, | |
| "showNode": true, | |
| "type": "Prompt" | |
| }, | |
| "dragging": false, | |
| "id": "Prompt-DaEQe", | |
| "measured": { | |
| "height": 411, | |
| "width": 320 | |
| }, | |
| "position": { | |
| "x": 2318.597592027175, | |
| "y": 1224.0767908780801 | |
| }, | |
| "selected": false, | |
| "type": "genericNode" | |
| }, | |
| { | |
| "data": { | |
| "id": "ChatInput-WGsAl", | |
| "node": { | |
| "base_classes": [ | |
| "Message" | |
| ], | |
| "beta": false, | |
| "category": "inputs", | |
| "conditional_paths": [], | |
| "custom_fields": {}, | |
| "description": "Get chat inputs from the Playground.", | |
| "display_name": "Chat Input", | |
| "documentation": "", | |
| "edited": false, | |
| "field_order": [ | |
| "input_value", | |
| "should_store_message", | |
| "sender", | |
| "sender_name", | |
| "session_id", | |
| "files", | |
| "background_color", | |
| "chat_icon", | |
| "text_color" | |
| ], | |
| "frozen": false, | |
| "icon": "MessagesSquare", | |
| "key": "ChatInput", | |
| "legacy": false, | |
| "lf_version": "1.4.3", | |
| "metadata": {}, | |
| "minimized": true, | |
| "output_types": [], | |
| "outputs": [ | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Message", | |
| "method": "message_response", | |
| "name": "message", | |
| "selected": "Message", | |
| "tool_mode": true, | |
| "types": [ | |
| "Message" | |
| ], | |
| "value": "__UNDEFINED__" | |
| } | |
| ], | |
| "pinned": false, | |
| "score": 0.0020353564437605998, | |
| "template": { | |
| "_type": "Component", | |
| "background_color": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Background Color", | |
| "dynamic": false, | |
| "info": "The background color of the icon.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "background_color", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "chat_icon": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Icon", | |
| "dynamic": false, | |
| "info": "The icon of the message.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "chat_icon", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "code": { | |
| "advanced": true, | |
| "dynamic": true, | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "code", | |
| "password": false, | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "code", | |
| "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" | |
| }, | |
| "files": { | |
| "_input_type": "FileInput", | |
| "advanced": true, | |
| "display_name": "Files", | |
| "dynamic": false, | |
| "fileTypes": [ | |
| "txt", | |
| "md", | |
| "mdx", | |
| "csv", | |
| "json", | |
| "yaml", | |
| "yml", | |
| "xml", | |
| "html", | |
| "htm", | |
| "pdf", | |
| "docx", | |
| "py", | |
| "sh", | |
| "sql", | |
| "js", | |
| "ts", | |
| "tsx", | |
| "jpg", | |
| "jpeg", | |
| "png", | |
| "bmp", | |
| "image" | |
| ], | |
| "file_path": "", | |
| "info": "Files to be sent with the message.", | |
| "list": true, | |
| "list_add_label": "Add More", | |
| "name": "files", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "temp_file": true, | |
| "title_case": false, | |
| "trace_as_metadata": true, | |
| "type": "file", | |
| "value": "" | |
| }, | |
| "input_value": { | |
| "_input_type": "MultilineInput", | |
| "advanced": false, | |
| "copy_field": false, | |
| "display_name": "Text", | |
| "dynamic": false, | |
| "info": "Message to be passed as input.", | |
| "input_types": [], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "input_value", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "sender": { | |
| "_input_type": "DropdownInput", | |
| "advanced": true, | |
| "combobox": false, | |
| "dialog_inputs": {}, | |
| "display_name": "Sender Type", | |
| "dynamic": false, | |
| "info": "Type of sender.", | |
| "name": "sender", | |
| "options": [ | |
| "Machine", | |
| "User" | |
| ], | |
| "options_metadata": [], | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "toggle": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "User" | |
| }, | |
| "sender_name": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Sender Name", | |
| "dynamic": false, | |
| "info": "Name of the sender.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "sender_name", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "User" | |
| }, | |
| "session_id": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Session ID", | |
| "dynamic": false, | |
| "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "session_id", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "should_store_message": { | |
| "_input_type": "BoolInput", | |
| "advanced": true, | |
| "display_name": "Store Messages", | |
| "dynamic": false, | |
| "info": "Store the message in the history.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "should_store_message", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "bool", | |
| "value": true | |
| }, | |
| "text_color": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Text Color", | |
| "dynamic": false, | |
| "info": "The text color of the name", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "text_color", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| } | |
| }, | |
| "tool_mode": false | |
| }, | |
| "showNode": false, | |
| "type": "ChatInput" | |
| }, | |
| "dragging": false, | |
| "id": "ChatInput-WGsAl", | |
| "measured": { | |
| "height": 66, | |
| "width": 192 | |
| }, | |
| "position": { | |
| "x": 1373.5683878740256, | |
| "y": 1196.96974299611 | |
| }, | |
| "selected": false, | |
| "type": "genericNode" | |
| }, | |
| { | |
| "data": { | |
| "id": "URLComponent-ZKgiv", | |
| "node": { | |
| "base_classes": [ | |
| "Data", | |
| "DataFrame", | |
| "Message" | |
| ], | |
| "beta": false, | |
| "category": "data", | |
| "conditional_paths": [], | |
| "custom_fields": {}, | |
| "description": "Load and parse child links from a root URL recursively", | |
| "display_name": "URL", | |
| "documentation": "", | |
| "edited": false, | |
| "field_order": [ | |
| "urls", | |
| "max_depth", | |
| "prevent_outside", | |
| "use_async", | |
| "format", | |
| "timeout", | |
| "headers" | |
| ], | |
| "frozen": false, | |
| "icon": "layout-template", | |
| "key": "URLComponent", | |
| "legacy": false, | |
| "lf_version": "1.4.3", | |
| "metadata": {}, | |
| "minimized": false, | |
| "output_types": [], | |
| "outputs": [ | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Data", | |
| "hidden": false, | |
| "method": "fetch_content", | |
| "name": "data", | |
| "selected": "Data", | |
| "tool_mode": true, | |
| "types": [ | |
| "Data" | |
| ], | |
| "value": "__UNDEFINED__" | |
| }, | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Message", | |
| "hidden": false, | |
| "method": "fetch_content_text", | |
| "name": "text", | |
| "selected": "Message", | |
| "tool_mode": true, | |
| "types": [ | |
| "Message" | |
| ], | |
| "value": "__UNDEFINED__" | |
| }, | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "DataFrame", | |
| "hidden": false, | |
| "method": "as_dataframe", | |
| "name": "dataframe", | |
| "selected": "DataFrame", | |
| "tool_mode": true, | |
| "types": [ | |
| "DataFrame" | |
| ], | |
| "value": "__UNDEFINED__" | |
| } | |
| ], | |
| "pinned": false, | |
| "score": 2.220446049250313e-16, | |
| "template": { | |
| "_type": "Component", | |
| "code": { | |
| "advanced": true, | |
| "dynamic": true, | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "code", | |
| "password": false, | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "code", | |
| "value": "import re\n\nimport httpx\nfrom bs4 import BeautifulSoup\nfrom langchain_community.document_loaders import RecursiveUrlLoader\nfrom loguru import logger\n\nfrom langflow.custom.custom_component.component import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs.inputs import TableInput\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.message import Message\nfrom langflow.services.deps import get_settings_service\n\n\nclass URLComponent(Component):\n \"\"\"A component that loads and parses child links from a root URL recursively.\"\"\"\n\n display_name = \"URL\"\n description = \"Load and parse child links from a root URL recursively\"\n icon = \"layout-template\"\n name = \"URLComponent\"\n\n inputs = [\n MessageTextInput(\n name=\"urls\",\n display_name=\"URLs\",\n info=\"Enter one or more URLs to crawl recursively, by clicking the '+' button.\",\n is_list=True,\n tool_mode=True,\n placeholder=\"Enter a URL...\",\n list_add_label=\"Add URL\",\n ),\n IntInput(\n name=\"max_depth\",\n display_name=\"Max Depth\",\n info=(\n \"Controls how many 'clicks' away from the initial page the crawler will go:\\n\"\n \"- depth 1: only the initial page\\n\"\n \"- depth 2: initial page + all pages linked directly from it\\n\"\n \"- depth 3: initial page + direct links + links found on those direct link pages\\n\"\n \"Note: This is about link traversal, not URL path depth.\"\n ),\n value=1,\n required=False,\n ),\n BoolInput(\n name=\"prevent_outside\",\n display_name=\"Prevent Outside\",\n info=(\n \"If enabled, only crawls URLs within the same domain as the root URL. \"\n \"This helps prevent the crawler from going to external websites.\"\n ),\n value=True,\n required=False,\n advanced=True,\n ),\n BoolInput(\n name=\"use_async\",\n display_name=\"Use Async\",\n info=(\n \"If enabled, uses asynchronous loading which can be significantly faster \"\n \"but might use more system resources.\"\n ),\n value=True,\n required=False,\n advanced=True,\n ),\n DropdownInput(\n name=\"format\",\n display_name=\"Output Format\",\n info=\"Output Format. Use 'Text' to extract the text from the HTML or 'HTML' for the raw HTML content.\",\n options=[\"Text\", \"HTML\"],\n value=\"Text\",\n advanced=True,\n ),\n IntInput(\n name=\"timeout\",\n display_name=\"Timeout\",\n info=\"Timeout for the request in seconds.\",\n value=30,\n required=False,\n advanced=True,\n ),\n TableInput(\n name=\"headers\",\n display_name=\"Headers\",\n info=\"The headers to send with the request\",\n table_schema=[\n {\n \"name\": \"key\",\n \"display_name\": \"Header\",\n \"type\": \"str\",\n \"description\": \"Header name\",\n },\n {\n \"name\": \"value\",\n \"display_name\": \"Value\",\n \"type\": \"str\",\n \"description\": \"Header value\",\n },\n ],\n value=[{\"key\": \"User-Agent\", \"value\": get_settings_service().settings.user_agent}],\n advanced=True,\n input_types=[\"DataFrame\"],\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"fetch_content\"),\n Output(display_name=\"Message\", name=\"text\", method=\"fetch_content_text\"),\n Output(display_name=\"DataFrame\", name=\"dataframe\", method=\"as_dataframe\"),\n ]\n\n def validate_url(self, string: str) -> bool:\n \"\"\"Validates if the given string matches URL pattern.\"\"\"\n url_regex = re.compile(\n r\"^(https?:\\/\\/)?\" r\"(www\\.)?\" r\"([a-zA-Z0-9.-]+)\" r\"(\\.[a-zA-Z]{2,})?\" r\"(:\\d+)?\" r\"(\\/[^\\s]*)?$\",\n re.IGNORECASE,\n )\n return bool(url_regex.match(string))\n\n def ensure_url(self, url: str) -> str:\n \"\"\"Ensures the given string is a valid URL.\"\"\"\n if not url.startswith((\"http://\", \"https://\")):\n url = \"http://\" + url\n\n if not self.validate_url(url):\n error_msg = \"Invalid URL - \" + url\n raise ValueError(error_msg)\n\n return url\n\n def fetch_content(self) -> list[Data]:\n \"\"\"Load documents from the URLs.\"\"\"\n all_docs = []\n data = []\n try:\n urls = list({self.ensure_url(url.strip()) for url in self.urls if url.strip()})\n\n no_urls_msg = \"No valid URLs provided.\"\n if not urls:\n raise ValueError(no_urls_msg)\n\n # If there's only one URL, we'll make sure to propagate any errors\n single_url = len(urls) == 1\n\n for processed_url in urls:\n msg = f\"Loading documents from {processed_url}\"\n logger.info(msg)\n\n # Create headers dictionary\n headers_dict = {header[\"key\"]: header[\"value\"] for header in self.headers}\n\n # Configure RecursiveUrlLoader with httpx-compatible settings\n extractor = (lambda x: x) if self.format == \"HTML\" else (lambda x: BeautifulSoup(x, \"lxml\").get_text())\n\n # Modified settings for RecursiveUrlLoader\n # Note: We need to pass a compatible client or settings to RecursiveUrlLoader\n # This will depend on how RecursiveUrlLoader is implemented\n loader = RecursiveUrlLoader(\n url=processed_url,\n max_depth=self.max_depth,\n prevent_outside=self.prevent_outside,\n use_async=self.use_async,\n continue_on_failure=not single_url,\n extractor=extractor,\n timeout=self.timeout,\n headers=headers_dict,\n )\n\n try:\n docs = loader.load()\n if not docs:\n msg = f\"No documents found for {processed_url}\"\n logger.warning(msg)\n if single_url:\n message = f\"No documents found for {processed_url}\"\n raise ValueError(message)\n else:\n msg = f\"Found {len(docs)} documents from {processed_url}\"\n logger.info(msg)\n all_docs.extend(docs)\n except (httpx.HTTPError, httpx.RequestError) as e:\n msg = f\"Error loading documents from {processed_url}: {e}\"\n logger.exception(msg)\n if single_url:\n raise # Re-raise the exception if it's the only URL\n except UnicodeDecodeError as e:\n msg = f\"Error decoding content from {processed_url}: {e}\"\n logger.error(msg)\n if single_url:\n raise # Re-raise the exception if it's the only URL\n except Exception as e:\n msg = f\"Unexpected error loading documents from {processed_url}: {e}\"\n logger.exception(msg)\n if single_url:\n raise # Re-raise the exception if it's the only URL\n\n data = [Data(text=doc.page_content, **doc.metadata) for doc in all_docs]\n self.status = data\n\n except Exception as e:\n error_msg = e.message if hasattr(e, \"message\") else e\n msg = f\"Error loading documents: {error_msg!s}\"\n logger.exception(msg)\n raise ValueError(msg) from e\n\n self.status = data\n return data\n\n def fetch_content_text(self) -> Message:\n \"\"\"Load documents and return their text content.\"\"\"\n data = self.fetch_content()\n result_string = data_to_text(\"{text}\", data)\n self.status = result_string\n return Message(text=result_string)\n\n def as_dataframe(self) -> DataFrame:\n \"\"\"Convert the documents to a DataFrame.\"\"\"\n data_frame = DataFrame(self.fetch_content())\n self.status = data_frame\n return data_frame\n" | |
| }, | |
| "format": { | |
| "_input_type": "DropdownInput", | |
| "advanced": true, | |
| "combobox": false, | |
| "dialog_inputs": {}, | |
| "display_name": "Output Format", | |
| "dynamic": false, | |
| "info": "Output Format. Use 'Text' to extract the text from the HTML or 'HTML' for the raw HTML content.", | |
| "name": "format", | |
| "options": [ | |
| "Text", | |
| "HTML" | |
| ], | |
| "options_metadata": [], | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "toggle": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "Text" | |
| }, | |
| "headers": { | |
| "_input_type": "TableInput", | |
| "advanced": true, | |
| "display_name": "Headers", | |
| "dynamic": false, | |
| "info": "The headers to send with the request", | |
| "input_types": [ | |
| "DataFrame" | |
| ], | |
| "is_list": true, | |
| "list_add_label": "Add More", | |
| "name": "headers", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "table_icon": "Table", | |
| "table_schema": { | |
| "columns": [ | |
| { | |
| "default": "None", | |
| "description": "Header name", | |
| "disable_edit": false, | |
| "display_name": "Header", | |
| "edit_mode": "popover", | |
| "filterable": true, | |
| "formatter": "text", | |
| "hidden": false, | |
| "name": "key", | |
| "sortable": true, | |
| "type": "str" | |
| }, | |
| { | |
| "default": "None", | |
| "description": "Header value", | |
| "disable_edit": false, | |
| "display_name": "Value", | |
| "edit_mode": "popover", | |
| "filterable": true, | |
| "formatter": "text", | |
| "hidden": false, | |
| "name": "value", | |
| "sortable": true, | |
| "type": "str" | |
| } | |
| ] | |
| }, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "trigger_icon": "Table", | |
| "trigger_text": "Open table", | |
| "type": "table", | |
| "value": [ | |
| { | |
| "key": "User-Agent", | |
| "value": "langflow" | |
| } | |
| ] | |
| }, | |
| "max_depth": { | |
| "_input_type": "IntInput", | |
| "advanced": false, | |
| "display_name": "Max Depth", | |
| "dynamic": false, | |
| "info": "Controls how many 'clicks' away from the initial page the crawler will go:\n- depth 1: only the initial page\n- depth 2: initial page + all pages linked directly from it\n- depth 3: initial page + direct links + links found on those direct link pages\nNote: This is about link traversal, not URL path depth.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "max_depth", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "int", | |
| "value": 1 | |
| }, | |
| "prevent_outside": { | |
| "_input_type": "BoolInput", | |
| "advanced": true, | |
| "display_name": "Prevent Outside", | |
| "dynamic": false, | |
| "info": "If enabled, only crawls URLs within the same domain as the root URL. This helps prevent the crawler from going to external websites.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "prevent_outside", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "bool", | |
| "value": true | |
| }, | |
| "timeout": { | |
| "_input_type": "IntInput", | |
| "advanced": true, | |
| "display_name": "Timeout", | |
| "dynamic": false, | |
| "info": "Timeout for the request in seconds.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "timeout", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "int", | |
| "value": 30 | |
| }, | |
| "urls": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": false, | |
| "display_name": "URLs", | |
| "dynamic": false, | |
| "info": "Enter one or more URLs to crawl recursively, by clicking the '+' button.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": true, | |
| "list_add_label": "Add URL", | |
| "load_from_db": false, | |
| "name": "urls", | |
| "placeholder": "Enter a URL...", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": true, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": [ | |
| "" | |
| ] | |
| }, | |
| "use_async": { | |
| "_input_type": "BoolInput", | |
| "advanced": true, | |
| "display_name": "Use Async", | |
| "dynamic": false, | |
| "info": "If enabled, uses asynchronous loading which can be significantly faster but might use more system resources.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "use_async", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "bool", | |
| "value": true | |
| } | |
| }, | |
| "tool_mode": false | |
| }, | |
| "showNode": true, | |
| "type": "URLComponent" | |
| }, | |
| "dragging": false, | |
| "id": "URLComponent-ZKgiv", | |
| "measured": { | |
| "height": 435, | |
| "width": 320 | |
| }, | |
| "position": { | |
| "x": 1615.717150727325, | |
| "y": 978.3118478140434 | |
| }, | |
| "selected": false, | |
| "type": "genericNode" | |
| }, | |
| { | |
| "data": { | |
| "id": "ParserComponent-ZfeDa", | |
| "node": { | |
| "base_classes": [ | |
| "Message" | |
| ], | |
| "beta": false, | |
| "category": "processing", | |
| "conditional_paths": [], | |
| "custom_fields": {}, | |
| "description": "Format a DataFrame or Data object into text using a template. Enable 'Stringify' to convert input into a readable string instead.", | |
| "display_name": "Parser", | |
| "documentation": "", | |
| "edited": false, | |
| "field_order": [ | |
| "mode", | |
| "pattern", | |
| "input_data", | |
| "sep" | |
| ], | |
| "frozen": false, | |
| "icon": "braces", | |
| "key": "ParserComponent", | |
| "legacy": false, | |
| "lf_version": "1.4.3", | |
| "metadata": {}, | |
| "minimized": false, | |
| "output_types": [], | |
| "outputs": [ | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Parsed Text", | |
| "hidden": false, | |
| "method": "parse_combined_text", | |
| "name": "parsed_text", | |
| "options": null, | |
| "required_inputs": null, | |
| "selected": "Message", | |
| "tool_mode": true, | |
| "types": [ | |
| "Message" | |
| ], | |
| "value": "__UNDEFINED__" | |
| } | |
| ], | |
| "pinned": false, | |
| "score": 0.001, | |
| "template": { | |
| "_type": "Component", | |
| "clean_data": { | |
| "_input_type": "BoolInput", | |
| "advanced": true, | |
| "display_name": "Clean Data", | |
| "dynamic": false, | |
| "info": "Enable to clean the data by removing empty rows and lines in each cell of the DataFrame/ Data object.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "clean_data", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "bool", | |
| "value": true | |
| }, | |
| "code": { | |
| "advanced": true, | |
| "dynamic": true, | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "code", | |
| "password": false, | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "code", | |
| "value": "import json\nfrom typing import Any\n\nfrom langflow.custom import Component\nfrom langflow.io import (\n BoolInput,\n HandleInput,\n MessageTextInput,\n MultilineInput,\n Output,\n TabInput,\n)\nfrom langflow.schema import Data, DataFrame\nfrom langflow.schema.message import Message\n\n\nclass ParserComponent(Component):\n display_name = \"Parser\"\n description = (\n \"Format a DataFrame or Data object into text using a template. \"\n \"Enable 'Stringify' to convert input into a readable string instead.\"\n )\n icon = \"braces\"\n\n inputs = [\n TabInput(\n name=\"mode\",\n display_name=\"Mode\",\n options=[\"Parser\", \"Stringify\"],\n value=\"Parser\",\n info=\"Convert into raw string instead of using a template.\",\n real_time_refresh=True,\n ),\n MultilineInput(\n name=\"pattern\",\n display_name=\"Template\",\n info=(\n \"Use variables within curly brackets to extract column values for DataFrames \"\n \"or key values for Data.\"\n \"For example: `Name: {Name}, Age: {Age}, Country: {Country}`\"\n ),\n value=\"Text: {text}\", # Example default\n dynamic=True,\n show=True,\n required=True,\n ),\n HandleInput(\n name=\"input_data\",\n display_name=\"Data or DataFrame\",\n input_types=[\"DataFrame\", \"Data\"],\n info=\"Accepts either a DataFrame or a Data object.\",\n required=True,\n ),\n MessageTextInput(\n name=\"sep\",\n display_name=\"Separator\",\n advanced=True,\n value=\"\\n\",\n info=\"String used to separate rows/items.\",\n ),\n ]\n\n outputs = [\n Output(\n display_name=\"Parsed Text\",\n name=\"parsed_text\",\n info=\"Formatted text output.\",\n method=\"parse_combined_text\",\n ),\n ]\n\n def update_build_config(self, build_config, field_value, field_name=None):\n \"\"\"Dynamically hide/show `template` and enforce requirement based on `stringify`.\"\"\"\n if field_name == \"mode\":\n build_config[\"pattern\"][\"show\"] = self.mode == \"Parser\"\n build_config[\"pattern\"][\"required\"] = self.mode == \"Parser\"\n if field_value:\n clean_data = BoolInput(\n name=\"clean_data\",\n display_name=\"Clean Data\",\n info=(\n \"Enable to clean the data by removing empty rows and lines \"\n \"in each cell of the DataFrame/ Data object.\"\n ),\n value=True,\n advanced=True,\n required=False,\n )\n build_config[\"clean_data\"] = clean_data.to_dict()\n else:\n build_config.pop(\"clean_data\", None)\n\n return build_config\n\n def _clean_args(self):\n \"\"\"Prepare arguments based on input type.\"\"\"\n input_data = self.input_data\n\n match input_data:\n case list() if all(isinstance(item, Data) for item in input_data):\n msg = \"List of Data objects is not supported.\"\n raise ValueError(msg)\n case DataFrame():\n return input_data, None\n case Data():\n return None, input_data\n case dict() if \"data\" in input_data:\n try:\n if \"columns\" in input_data: # Likely a DataFrame\n return DataFrame.from_dict(input_data), None\n # Likely a Data object\n return None, Data(**input_data)\n except (TypeError, ValueError, KeyError) as e:\n msg = f\"Invalid structured input provided: {e!s}\"\n raise ValueError(msg) from e\n case _:\n msg = f\"Unsupported input type: {type(input_data)}. Expected DataFrame or Data.\"\n raise ValueError(msg)\n\n def parse_combined_text(self) -> Message:\n \"\"\"Parse all rows/items into a single text or convert input to string if `stringify` is enabled.\"\"\"\n # Early return for stringify option\n if self.mode == \"Stringify\":\n return self.convert_to_string()\n\n df, data = self._clean_args()\n\n lines = []\n if df is not None:\n for _, row in df.iterrows():\n formatted_text = self.pattern.format(**row.to_dict())\n lines.append(formatted_text)\n elif data is not None:\n formatted_text = self.pattern.format(**data.data)\n lines.append(formatted_text)\n\n combined_text = self.sep.join(lines)\n self.status = combined_text\n return Message(text=combined_text)\n\n def _safe_convert(self, data: Any) -> str:\n \"\"\"Safely convert input data to string.\"\"\"\n try:\n if isinstance(data, str):\n return data\n if isinstance(data, Message):\n return data.get_text()\n if isinstance(data, Data):\n return json.dumps(data.data)\n if isinstance(data, DataFrame):\n if hasattr(self, \"clean_data\") and self.clean_data:\n # Remove empty rows\n data = data.dropna(how=\"all\")\n # Remove empty lines in each cell\n data = data.replace(r\"^\\s*$\", \"\", regex=True)\n # Replace multiple newlines with a single newline\n data = data.replace(r\"\\n+\", \"\\n\", regex=True)\n return data.to_markdown(index=False)\n return str(data)\n except (ValueError, TypeError, AttributeError) as e:\n msg = f\"Error converting data: {e!s}\"\n raise ValueError(msg) from e\n\n def convert_to_string(self) -> Message:\n \"\"\"Convert input data to string with proper error handling.\"\"\"\n result = \"\"\n if isinstance(self.input_data, list):\n result = \"\\n\".join([self._safe_convert(item) for item in self.input_data])\n else:\n result = self._safe_convert(self.input_data)\n self.log(f\"Converted to string with length: {len(result)}\")\n\n message = Message(text=result)\n self.status = message\n return message\n" | |
| }, | |
| "input_data": { | |
| "_input_type": "HandleInput", | |
| "advanced": false, | |
| "display_name": "Data or DataFrame", | |
| "dynamic": false, | |
| "info": "Accepts either a DataFrame or a Data object.", | |
| "input_types": [ | |
| "DataFrame", | |
| "Data" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "input_data", | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "trace_as_metadata": true, | |
| "type": "other", | |
| "value": "" | |
| }, | |
| "mode": { | |
| "_input_type": "TabInput", | |
| "advanced": false, | |
| "display_name": "Mode", | |
| "dynamic": false, | |
| "info": "Convert into raw string instead of using a template.", | |
| "name": "mode", | |
| "options": [ | |
| "Parser", | |
| "Stringify" | |
| ], | |
| "placeholder": "", | |
| "real_time_refresh": true, | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "tab", | |
| "value": "Stringify" | |
| }, | |
| "pattern": { | |
| "_input_type": "MultilineInput", | |
| "advanced": false, | |
| "copy_field": false, | |
| "display_name": "Template", | |
| "dynamic": true, | |
| "info": "Use variables within curly brackets to extract column values for DataFrames or key values for Data.For example: `Name: {Name}, Age: {Age}, Country: {Country}`", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "pattern", | |
| "placeholder": "", | |
| "required": false, | |
| "show": false, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "Text: {text}" | |
| }, | |
| "sep": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Separator", | |
| "dynamic": false, | |
| "info": "String used to separate rows/items.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "sep", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "\n" | |
| } | |
| }, | |
| "tool_mode": false | |
| }, | |
| "showNode": true, | |
| "type": "ParserComponent" | |
| }, | |
| "dragging": false, | |
| "id": "ParserComponent-ZfeDa", | |
| "measured": { | |
| "height": 311, | |
| "width": 320 | |
| }, | |
| "position": { | |
| "x": 1959.7422728156764, | |
| "y": 1017.6456933412757 | |
| }, | |
| "selected": false, | |
| "type": "genericNode" | |
| }, | |
| { | |
| "data": { | |
| "id": "AnthropicModel-ff7II", | |
| "node": { | |
| "base_classes": [ | |
| "LanguageModel", | |
| "Message" | |
| ], | |
| "beta": false, | |
| "category": "models", | |
| "conditional_paths": [], | |
| "custom_fields": {}, | |
| "description": "Generate text using Anthropic Chat&Completion LLMs with prefill support.", | |
| "display_name": "Anthropic", | |
| "documentation": "", | |
| "edited": false, | |
| "field_order": [ | |
| "input_value", | |
| "system_message", | |
| "stream", | |
| "max_tokens", | |
| "model_name", | |
| "api_key", | |
| "temperature", | |
| "base_url", | |
| "tool_model_enabled", | |
| "prefill" | |
| ], | |
| "frozen": false, | |
| "icon": "Anthropic", | |
| "key": "AnthropicModel", | |
| "legacy": false, | |
| "lf_version": "1.4.3", | |
| "metadata": {}, | |
| "minimized": false, | |
| "output_types": [], | |
| "outputs": [ | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Message", | |
| "hidden": false, | |
| "method": "text_response", | |
| "name": "text_output", | |
| "options": null, | |
| "required_inputs": [], | |
| "selected": "Message", | |
| "tool_mode": true, | |
| "types": [ | |
| "Message" | |
| ], | |
| "value": "__UNDEFINED__" | |
| }, | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Language Model", | |
| "method": "build_model", | |
| "name": "model_output", | |
| "options": null, | |
| "required_inputs": [ | |
| "api_key" | |
| ], | |
| "selected": "LanguageModel", | |
| "tool_mode": true, | |
| "types": [ | |
| "LanguageModel" | |
| ], | |
| "value": "__UNDEFINED__" | |
| } | |
| ], | |
| "pinned": false, | |
| "score": 0.0005851173668140926, | |
| "template": { | |
| "_type": "Component", | |
| "api_key": { | |
| "_input_type": "SecretStrInput", | |
| "advanced": false, | |
| "display_name": "Anthropic API Key", | |
| "dynamic": false, | |
| "info": "Your Anthropic API key.", | |
| "input_types": [], | |
| "load_from_db": true, | |
| "name": "api_key", | |
| "password": true, | |
| "placeholder": "", | |
| "real_time_refresh": true, | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "base_url": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Anthropic API URL", | |
| "dynamic": false, | |
| "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "base_url", | |
| "placeholder": "", | |
| "real_time_refresh": true, | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "https://api.anthropic.com" | |
| }, | |
| "code": { | |
| "advanced": true, | |
| "dynamic": true, | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "code", | |
| "password": false, | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "code", | |
| "value": "from typing import Any, cast\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n DEFAULT_ANTHROPIC_API_URL,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=DEFAULT_ANTHROPIC_API_URL,\n real_time_refresh=True,\n advanced=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=DEFAULT_ANTHROPIC_API_URL,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=cast(str, self.base_url) or DEFAULT_ANTHROPIC_API_URL,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if \"base_url\" in build_config and build_config[\"base_url\"][\"value\"] is None:\n build_config[\"base_url\"][\"value\"] = DEFAULT_ANTHROPIC_API_URL\n self.base_url = DEFAULT_ANTHROPIC_API_URL\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" | |
| }, | |
| "input_value": { | |
| "_input_type": "MessageInput", | |
| "advanced": false, | |
| "display_name": "Input", | |
| "dynamic": false, | |
| "info": "", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "input_value", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "max_tokens": { | |
| "_input_type": "IntInput", | |
| "advanced": true, | |
| "display_name": "Max Tokens", | |
| "dynamic": false, | |
| "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "max_tokens", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "int", | |
| "value": 4096 | |
| }, | |
| "model_name": { | |
| "_input_type": "DropdownInput", | |
| "advanced": false, | |
| "combobox": true, | |
| "dialog_inputs": {}, | |
| "display_name": "Model Name", | |
| "dynamic": false, | |
| "info": "", | |
| "name": "model_name", | |
| "options": [ | |
| "claude-3-7-sonnet-latest", | |
| "claude-3-5-sonnet-latest", | |
| "claude-3-5-haiku-latest", | |
| "claude-3-opus-latest", | |
| "claude-3-sonnet-20240229", | |
| "claude-3-haiku-20240307", | |
| "claude-3-haiku-20240307" | |
| ], | |
| "options_metadata": [], | |
| "placeholder": "", | |
| "refresh_button": true, | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "toggle": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "claude-3-7-sonnet-latest" | |
| }, | |
| "prefill": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Prefill", | |
| "dynamic": false, | |
| "info": "Prefill text to guide the model's response.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "prefill", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "stream": { | |
| "_input_type": "BoolInput", | |
| "advanced": true, | |
| "display_name": "Stream", | |
| "dynamic": false, | |
| "info": "Stream the response from the model. Streaming works only in Chat.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "stream", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "bool", | |
| "value": false | |
| }, | |
| "system_message": { | |
| "_input_type": "MultilineInput", | |
| "advanced": false, | |
| "copy_field": false, | |
| "display_name": "System Message", | |
| "dynamic": false, | |
| "info": "System message to pass to the model.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "system_message", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "temperature": { | |
| "_input_type": "SliderInput", | |
| "advanced": true, | |
| "display_name": "Temperature", | |
| "dynamic": false, | |
| "info": "Run inference with this temperature. Must by in the closed interval [0.0, 1.0].", | |
| "max_label": "", | |
| "max_label_icon": "", | |
| "min_label": "", | |
| "min_label_icon": "", | |
| "name": "temperature", | |
| "placeholder": "", | |
| "range_spec": { | |
| "max": 1, | |
| "min": 0, | |
| "step": 0.01, | |
| "step_type": "float" | |
| }, | |
| "required": false, | |
| "show": true, | |
| "slider_buttons": false, | |
| "slider_buttons_options": [], | |
| "slider_input": false, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "type": "slider", | |
| "value": 0.1 | |
| }, | |
| "tool_model_enabled": { | |
| "_input_type": "BoolInput", | |
| "advanced": false, | |
| "display_name": "Enable Tool Models", | |
| "dynamic": false, | |
| "info": "Select if you want to use models that can work with tools. If yes, only those models will be shown.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "tool_model_enabled", | |
| "placeholder": "", | |
| "real_time_refresh": true, | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "bool", | |
| "value": true | |
| } | |
| }, | |
| "tool_mode": false | |
| }, | |
| "showNode": true, | |
| "type": "AnthropicModel" | |
| }, | |
| "dragging": false, | |
| "id": "AnthropicModel-ff7II", | |
| "measured": { | |
| "height": 585, | |
| "width": 320 | |
| }, | |
| "position": { | |
| "x": 2323.784879380801, | |
| "y": 609.9825991441663 | |
| }, | |
| "selected": false, | |
| "type": "genericNode" | |
| }, | |
| { | |
| "data": { | |
| "id": "Prompt-qLrNR", | |
| "node": { | |
| "base_classes": [ | |
| "Message" | |
| ], | |
| "beta": false, | |
| "conditional_paths": [], | |
| "custom_fields": { | |
| "template": [ | |
| "article" | |
| ] | |
| }, | |
| "description": "Create a prompt template with dynamic variables.", | |
| "display_name": "Prompt", | |
| "documentation": "", | |
| "edited": false, | |
| "error": null, | |
| "field_order": [ | |
| "template", | |
| "tool_placeholder" | |
| ], | |
| "frozen": false, | |
| "full_path": null, | |
| "icon": "prompts", | |
| "is_composition": null, | |
| "is_input": null, | |
| "is_output": null, | |
| "legacy": false, | |
| "lf_version": "1.4.3", | |
| "metadata": {}, | |
| "minimized": false, | |
| "name": "", | |
| "output_types": [], | |
| "outputs": [ | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Prompt Message", | |
| "hidden": false, | |
| "method": "build_prompt", | |
| "name": "prompt", | |
| "options": null, | |
| "required_inputs": null, | |
| "selected": "Message", | |
| "tool_mode": true, | |
| "types": [ | |
| "Message" | |
| ], | |
| "value": "__UNDEFINED__" | |
| } | |
| ], | |
| "pinned": false, | |
| "priority": null, | |
| "template": { | |
| "_type": "Component", | |
| "article": { | |
| "advanced": false, | |
| "display_name": "article", | |
| "dynamic": false, | |
| "field_type": "str", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "article", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "code": { | |
| "advanced": true, | |
| "dynamic": true, | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "code", | |
| "password": false, | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "code", | |
| "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import MessageTextInput, Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n MessageTextInput(\n name=\"tool_placeholder\",\n display_name=\"Tool Placeholder\",\n tool_mode=True,\n advanced=True,\n info=\"A placeholder input for tool mode.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n async def update_frontend_node(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = await super().update_frontend_node(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" | |
| }, | |
| "template": { | |
| "_input_type": "PromptInput", | |
| "advanced": false, | |
| "display_name": "Template", | |
| "dynamic": false, | |
| "info": "", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "template", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "type": "prompt", | |
| "value": "Eres un experto en psicografía, segmentación de audiencias y análisis de targeting mediático. Tu tarea es identificar con precisión quirúrgica a quién está dirigido este contenido y por qué. No muestras tu pensamiento/razonamiento, solo el resultado. Actualmente es el año 2025.\n\nCONTENIDO PARA ANALIZAR:\n{article}\n\nANÁLISIS REQUERIDO:\n\n1. **Perfil Demográfico Inferido**\n\n * Rango de edad probable\n * Nivel educativo estimado\n * Estrato socioeconómico\n * Ubicación geográfica/cultural implícita\n\n2. **Perfil Psicográfico Profundo**\n\n * Valores y creencias que asume el contenido\n * Miedos y aspiraciones a los que apela\n * Identidades grupales que activa o refuerza\n * Sesgos preexistentes que explota\n\n3. **Indicadores de Microsegmentación**\n\n * Palabras clave o referencias culturales específicas\n * Dog whistles o señales para grupos específicos\n * Exclusiones intencionales (a quién NO le habla)\n\n4. **Análisis de Vulnerabilidad Contextual**\n\n * Momento del día/semana óptimo para este público\n * Estado emocional que presupone en la audiencia\n * Contexto de consumo esperado (móvil, trabajo, hogar)\n\n5. **Estrategia de Targeting**\n\n * ¿Es segmentación amplia o láser-focused?\n * ¿Busca movilizar una base o convertir indecisos?\n * ¿Qué acción específica espera provocar?\n\nSé específico pero evita estereotipos. Basa tus inferencias en evidencia textual concreta. No incluyas reflexiones internas, procesos de pensamiento o marcadores HTML/Markdown en tu respuesta. Solo devuelve el contenido final estructurado.\n" | |
| }, | |
| "tool_placeholder": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Tool Placeholder", | |
| "dynamic": false, | |
| "info": "A placeholder input for tool mode.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "tool_placeholder", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": true, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| } | |
| }, | |
| "tool_mode": false | |
| }, | |
| "showNode": true, | |
| "type": "Prompt" | |
| }, | |
| "dragging": false, | |
| "id": "Prompt-qLrNR", | |
| "measured": { | |
| "height": 411, | |
| "width": 320 | |
| }, | |
| "position": { | |
| "x": 2689.0632572032105, | |
| "y": 1219.4187192652807 | |
| }, | |
| "selected": false, | |
| "type": "genericNode" | |
| }, | |
| { | |
| "data": { | |
| "id": "AnthropicModel-6Akp4", | |
| "node": { | |
| "base_classes": [ | |
| "LanguageModel", | |
| "Message" | |
| ], | |
| "beta": false, | |
| "category": "models", | |
| "conditional_paths": [], | |
| "custom_fields": {}, | |
| "description": "Generate text using Anthropic Chat&Completion LLMs with prefill support.", | |
| "display_name": "Anthropic", | |
| "documentation": "", | |
| "edited": false, | |
| "field_order": [ | |
| "input_value", | |
| "system_message", | |
| "stream", | |
| "max_tokens", | |
| "model_name", | |
| "api_key", | |
| "temperature", | |
| "base_url", | |
| "tool_model_enabled", | |
| "prefill" | |
| ], | |
| "frozen": false, | |
| "icon": "Anthropic", | |
| "key": "AnthropicModel", | |
| "legacy": false, | |
| "lf_version": "1.4.3", | |
| "metadata": {}, | |
| "minimized": false, | |
| "output_types": [], | |
| "outputs": [ | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Message", | |
| "hidden": false, | |
| "method": "text_response", | |
| "name": "text_output", | |
| "options": null, | |
| "required_inputs": [], | |
| "selected": "Message", | |
| "tool_mode": true, | |
| "types": [ | |
| "Message" | |
| ], | |
| "value": "__UNDEFINED__" | |
| }, | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Language Model", | |
| "method": "build_model", | |
| "name": "model_output", | |
| "options": null, | |
| "required_inputs": [ | |
| "api_key" | |
| ], | |
| "selected": "LanguageModel", | |
| "tool_mode": true, | |
| "types": [ | |
| "LanguageModel" | |
| ], | |
| "value": "__UNDEFINED__" | |
| } | |
| ], | |
| "pinned": false, | |
| "score": 0.0005851173668140926, | |
| "template": { | |
| "_type": "Component", | |
| "api_key": { | |
| "_input_type": "SecretStrInput", | |
| "advanced": false, | |
| "display_name": "Anthropic API Key", | |
| "dynamic": false, | |
| "info": "Your Anthropic API key.", | |
| "input_types": [], | |
| "load_from_db": true, | |
| "name": "api_key", | |
| "password": true, | |
| "placeholder": "", | |
| "real_time_refresh": true, | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "base_url": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Anthropic API URL", | |
| "dynamic": false, | |
| "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "base_url", | |
| "placeholder": "", | |
| "real_time_refresh": true, | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "https://api.anthropic.com" | |
| }, | |
| "code": { | |
| "advanced": true, | |
| "dynamic": true, | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "code", | |
| "password": false, | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "code", | |
| "value": "from typing import Any, cast\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n DEFAULT_ANTHROPIC_API_URL,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=DEFAULT_ANTHROPIC_API_URL,\n real_time_refresh=True,\n advanced=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=DEFAULT_ANTHROPIC_API_URL,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=cast(str, self.base_url) or DEFAULT_ANTHROPIC_API_URL,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if \"base_url\" in build_config and build_config[\"base_url\"][\"value\"] is None:\n build_config[\"base_url\"][\"value\"] = DEFAULT_ANTHROPIC_API_URL\n self.base_url = DEFAULT_ANTHROPIC_API_URL\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" | |
| }, | |
| "input_value": { | |
| "_input_type": "MessageInput", | |
| "advanced": false, | |
| "display_name": "Input", | |
| "dynamic": false, | |
| "info": "", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "input_value", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "max_tokens": { | |
| "_input_type": "IntInput", | |
| "advanced": true, | |
| "display_name": "Max Tokens", | |
| "dynamic": false, | |
| "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "max_tokens", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "int", | |
| "value": 4096 | |
| }, | |
| "model_name": { | |
| "_input_type": "DropdownInput", | |
| "advanced": false, | |
| "combobox": true, | |
| "dialog_inputs": {}, | |
| "display_name": "Model Name", | |
| "dynamic": false, | |
| "info": "", | |
| "name": "model_name", | |
| "options": [ | |
| "claude-3-7-sonnet-latest", | |
| "claude-3-5-sonnet-latest", | |
| "claude-3-5-haiku-latest", | |
| "claude-3-opus-latest", | |
| "claude-3-sonnet-20240229", | |
| "claude-3-haiku-20240307", | |
| "claude-3-haiku-20240307" | |
| ], | |
| "options_metadata": [], | |
| "placeholder": "", | |
| "refresh_button": true, | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "toggle": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "claude-3-7-sonnet-latest" | |
| }, | |
| "prefill": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Prefill", | |
| "dynamic": false, | |
| "info": "Prefill text to guide the model's response.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "prefill", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "stream": { | |
| "_input_type": "BoolInput", | |
| "advanced": true, | |
| "display_name": "Stream", | |
| "dynamic": false, | |
| "info": "Stream the response from the model. Streaming works only in Chat.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "stream", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "bool", | |
| "value": false | |
| }, | |
| "system_message": { | |
| "_input_type": "MultilineInput", | |
| "advanced": false, | |
| "copy_field": false, | |
| "display_name": "System Message", | |
| "dynamic": false, | |
| "info": "System message to pass to the model.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "system_message", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "temperature": { | |
| "_input_type": "SliderInput", | |
| "advanced": true, | |
| "display_name": "Temperature", | |
| "dynamic": false, | |
| "info": "Run inference with this temperature. Must by in the closed interval [0.0, 1.0].", | |
| "max_label": "", | |
| "max_label_icon": "", | |
| "min_label": "", | |
| "min_label_icon": "", | |
| "name": "temperature", | |
| "placeholder": "", | |
| "range_spec": { | |
| "max": 1, | |
| "min": 0, | |
| "step": 0.01, | |
| "step_type": "float" | |
| }, | |
| "required": false, | |
| "show": true, | |
| "slider_buttons": false, | |
| "slider_buttons_options": [], | |
| "slider_input": false, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "type": "slider", | |
| "value": 0.1 | |
| }, | |
| "tool_model_enabled": { | |
| "_input_type": "BoolInput", | |
| "advanced": false, | |
| "display_name": "Enable Tool Models", | |
| "dynamic": false, | |
| "info": "Select if you want to use models that can work with tools. If yes, only those models will be shown.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "tool_model_enabled", | |
| "placeholder": "", | |
| "real_time_refresh": true, | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "bool", | |
| "value": true | |
| } | |
| }, | |
| "tool_mode": false | |
| }, | |
| "showNode": true, | |
| "type": "AnthropicModel" | |
| }, | |
| "dragging": false, | |
| "id": "AnthropicModel-6Akp4", | |
| "measured": { | |
| "height": 585, | |
| "width": 320 | |
| }, | |
| "position": { | |
| "x": 2693.6706774677364, | |
| "y": 609.4582005509999 | |
| }, | |
| "selected": false, | |
| "type": "genericNode" | |
| }, | |
| { | |
| "data": { | |
| "id": "Prompt-l52jJ", | |
| "node": { | |
| "base_classes": [ | |
| "Message" | |
| ], | |
| "beta": false, | |
| "conditional_paths": [], | |
| "custom_fields": { | |
| "template": [ | |
| "article", | |
| "analysis1", | |
| "analysis3" | |
| ] | |
| }, | |
| "description": "Create a prompt template with dynamic variables.", | |
| "display_name": "Prompt", | |
| "documentation": "", | |
| "edited": false, | |
| "error": null, | |
| "field_order": [ | |
| "template", | |
| "tool_placeholder" | |
| ], | |
| "frozen": false, | |
| "full_path": null, | |
| "icon": "prompts", | |
| "is_composition": null, | |
| "is_input": null, | |
| "is_output": null, | |
| "legacy": false, | |
| "lf_version": "1.4.3", | |
| "metadata": {}, | |
| "minimized": false, | |
| "name": "", | |
| "output_types": [], | |
| "outputs": [ | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Prompt Message", | |
| "hidden": false, | |
| "method": "build_prompt", | |
| "name": "prompt", | |
| "options": null, | |
| "required_inputs": null, | |
| "selected": "Message", | |
| "tool_mode": true, | |
| "types": [ | |
| "Message" | |
| ], | |
| "value": "__UNDEFINED__" | |
| } | |
| ], | |
| "pinned": false, | |
| "priority": null, | |
| "template": { | |
| "_type": "Component", | |
| "analysis1": { | |
| "advanced": false, | |
| "display_name": "analysis1", | |
| "dynamic": false, | |
| "field_type": "str", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "analysis1", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "analysis3": { | |
| "advanced": false, | |
| "display_name": "analysis3", | |
| "dynamic": false, | |
| "field_type": "str", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "analysis3", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "article": { | |
| "advanced": false, | |
| "display_name": "article", | |
| "dynamic": false, | |
| "field_type": "str", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "article", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "code": { | |
| "advanced": true, | |
| "dynamic": true, | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "code", | |
| "password": false, | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "code", | |
| "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import MessageTextInput, Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n MessageTextInput(\n name=\"tool_placeholder\",\n display_name=\"Tool Placeholder\",\n tool_mode=True,\n advanced=True,\n info=\"A placeholder input for tool mode.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n async def update_frontend_node(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = await super().update_frontend_node(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" | |
| }, | |
| "template": { | |
| "_input_type": "PromptInput", | |
| "advanced": false, | |
| "display_name": "Template", | |
| "dynamic": false, | |
| "info": "", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "template", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "type": "prompt", | |
| "value": "Eres un analista forense de intencionalidad comunicativa, especializado en detectar agendas ocultas y animosidad en todas sus formas. Este es el análisis más profundo y synthesizador de TrueEye. No muestras tu pensamiento/razonamiento, solo el resultado. Actualmente es el año 2025.\n\nCONTENIDO ORIGINAL\n{article}\n\nINFORMES PREVIOS\n{analysis1}\n{analysis3}\n\nINSTRUCCIONES PARA DETECCIÓN DE ANIMOSIDAD:\n\n1. **Análisis Multidimensional de Intencionalidad**\n\n * *Para SESGO NEGATIVO*:\n • Animosidad directa (ataques, descalificaciones, deshumanización)\n • Amplificación selectiva de aspectos negativos\n • Construcción de enemigos o chivos expiatorios\n * *Para SESGO POSITIVO*:\n • Animosidad manipulativa (adulación con agenda)\n • Ocultamiento de información crítica\n • Construcción de falsos héroes o salvadores\n * *Para SESGO NEUTRO* (CRÍTICO):\n • Animosidad por omisión (“neutralidad” que ignora injusticias)\n • Falsa equivalencia que normaliza lo inaceptable\n • Indiferencia calculada ante sufrimiento o daño\n\n2. **Arquitectura de la Manipulación Avanzada**\n\n * Técnicas de gaslighting institucional\n * Construcción de realidades alternativas\n * Weaponización de la incertidumbre\n * Explotación de la fatiga informativa\n\n3. **Detección de Agendas Ocultas**\n\n * ¿Quién se beneficia de esta narrativa?\n * ¿Qué intereses económicos/políticos hay detrás?\n * ¿Qué cambios conductuales busca?\n * ¿A quién perjudica “colateralmente”?\n\n4. **Análisis de Omisiones Estratégicas**\n\n * ¿Qué información crucial falta?\n * ¿Qué preguntas no se hacen?\n * ¿Qué voces están ausentes?\n * ¿Qué contexto se ignora deliberadamente?\n\n5. **Evaluación de Peligrosidad**\n\n * Nivel de sofisticación de la manipulación (1-10)\n * Potencial de daño social/individual\n * Urgencia de intervención educativa\n * Grupos en mayor riesgo\n\nIMPORTANTE: Si el público objetivo incluye poblaciones vulnerables (niños, ancianos, personas en crisis), eleva automáticamente el nivel de preocupación. La animosidad hacia vulnerables es especialmente grave.\n\nSé implacable en tu análisis pero justo en tus conclusiones. No toda intencionalidad es maliciosa, pero toda manipulación debe ser expuesta.\n\nNo incluyas reflexiones internas, procesos de pensamiento o marcadores HTML/Markdown en tu respuesta. Solo devuelve el contenido final estructurado.\n\n" | |
| }, | |
| "tool_placeholder": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Tool Placeholder", | |
| "dynamic": false, | |
| "info": "A placeholder input for tool mode.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "tool_placeholder", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": true, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| } | |
| }, | |
| "tool_mode": false | |
| }, | |
| "showNode": true, | |
| "type": "Prompt" | |
| }, | |
| "dragging": false, | |
| "id": "Prompt-l52jJ", | |
| "measured": { | |
| "height": 575, | |
| "width": 320 | |
| }, | |
| "position": { | |
| "x": 3061.2588720583094, | |
| "y": 1206.990961975601 | |
| }, | |
| "selected": false, | |
| "type": "genericNode" | |
| }, | |
| { | |
| "data": { | |
| "id": "AnthropicModel-BFQkw", | |
| "node": { | |
| "base_classes": [ | |
| "LanguageModel", | |
| "Message" | |
| ], | |
| "beta": false, | |
| "category": "models", | |
| "conditional_paths": [], | |
| "custom_fields": {}, | |
| "description": "Generate text using Anthropic Chat&Completion LLMs with prefill support.", | |
| "display_name": "Anthropic", | |
| "documentation": "", | |
| "edited": false, | |
| "field_order": [ | |
| "input_value", | |
| "system_message", | |
| "stream", | |
| "max_tokens", | |
| "model_name", | |
| "api_key", | |
| "temperature", | |
| "base_url", | |
| "tool_model_enabled", | |
| "prefill" | |
| ], | |
| "frozen": false, | |
| "icon": "Anthropic", | |
| "key": "AnthropicModel", | |
| "legacy": false, | |
| "lf_version": "1.4.3", | |
| "metadata": {}, | |
| "minimized": false, | |
| "output_types": [], | |
| "outputs": [ | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Message", | |
| "hidden": false, | |
| "method": "text_response", | |
| "name": "text_output", | |
| "options": null, | |
| "required_inputs": [], | |
| "selected": "Message", | |
| "tool_mode": true, | |
| "types": [ | |
| "Message" | |
| ], | |
| "value": "__UNDEFINED__" | |
| }, | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Language Model", | |
| "method": "build_model", | |
| "name": "model_output", | |
| "options": null, | |
| "required_inputs": [ | |
| "api_key" | |
| ], | |
| "selected": "LanguageModel", | |
| "tool_mode": true, | |
| "types": [ | |
| "LanguageModel" | |
| ], | |
| "value": "__UNDEFINED__" | |
| } | |
| ], | |
| "pinned": false, | |
| "score": 0.0005851173668140926, | |
| "template": { | |
| "_type": "Component", | |
| "api_key": { | |
| "_input_type": "SecretStrInput", | |
| "advanced": false, | |
| "display_name": "Anthropic API Key", | |
| "dynamic": false, | |
| "info": "Your Anthropic API key.", | |
| "input_types": [], | |
| "load_from_db": true, | |
| "name": "api_key", | |
| "password": true, | |
| "placeholder": "", | |
| "real_time_refresh": true, | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "base_url": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Anthropic API URL", | |
| "dynamic": false, | |
| "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "base_url", | |
| "placeholder": "", | |
| "real_time_refresh": true, | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "https://api.anthropic.com" | |
| }, | |
| "code": { | |
| "advanced": true, | |
| "dynamic": true, | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "code", | |
| "password": false, | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "code", | |
| "value": "from typing import Any, cast\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import (\n ANTHROPIC_MODELS,\n DEFAULT_ANTHROPIC_API_URL,\n TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS,\n TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS,\n)\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=DEFAULT_ANTHROPIC_API_URL,\n real_time_refresh=True,\n advanced=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=DEFAULT_ANTHROPIC_API_URL,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = ANTHROPIC_MODELS + [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n\n # Create a new list instead of modifying while iterating\n filtered_models = []\n for model in model_ids:\n if model in TOOL_CALLING_SUPPORTED_ANTHROPIC_MODELS:\n filtered_models.append(model)\n continue\n\n model_with_tool = ChatAnthropic(\n model=model, # Use the current model being checked\n anthropic_api_key=self.api_key,\n anthropic_api_url=cast(str, self.base_url) or DEFAULT_ANTHROPIC_API_URL,\n )\n\n if (\n not self.supports_tool_calling(model_with_tool)\n or model in TOOL_CALLING_UNSUPPORTED_ANTHROPIC_MODELS\n ):\n continue\n\n filtered_models.append(model)\n\n return filtered_models\n\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if \"base_url\" in build_config and build_config[\"base_url\"][\"value\"] is None:\n build_config[\"base_url\"][\"value\"] = DEFAULT_ANTHROPIC_API_URL\n self.base_url = DEFAULT_ANTHROPIC_API_URL\n if field_name in {\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\"} and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n build_config[\"model_name\"][\"combobox\"] = True\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" | |
| }, | |
| "input_value": { | |
| "_input_type": "MessageInput", | |
| "advanced": false, | |
| "display_name": "Input", | |
| "dynamic": false, | |
| "info": "", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "input_value", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "max_tokens": { | |
| "_input_type": "IntInput", | |
| "advanced": true, | |
| "display_name": "Max Tokens", | |
| "dynamic": false, | |
| "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "max_tokens", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "int", | |
| "value": 4096 | |
| }, | |
| "model_name": { | |
| "_input_type": "DropdownInput", | |
| "advanced": false, | |
| "combobox": true, | |
| "dialog_inputs": {}, | |
| "display_name": "Model Name", | |
| "dynamic": false, | |
| "info": "", | |
| "name": "model_name", | |
| "options": [ | |
| "claude-3-7-sonnet-latest", | |
| "claude-3-5-sonnet-latest", | |
| "claude-3-5-haiku-latest", | |
| "claude-3-opus-latest", | |
| "claude-3-sonnet-20240229", | |
| "claude-3-haiku-20240307", | |
| "claude-3-haiku-20240307" | |
| ], | |
| "options_metadata": [], | |
| "placeholder": "", | |
| "refresh_button": true, | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "toggle": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "claude-3-7-sonnet-latest" | |
| }, | |
| "prefill": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Prefill", | |
| "dynamic": false, | |
| "info": "Prefill text to guide the model's response.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "prefill", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "stream": { | |
| "_input_type": "BoolInput", | |
| "advanced": true, | |
| "display_name": "Stream", | |
| "dynamic": false, | |
| "info": "Stream the response from the model. Streaming works only in Chat.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "stream", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "bool", | |
| "value": false | |
| }, | |
| "system_message": { | |
| "_input_type": "MultilineInput", | |
| "advanced": false, | |
| "copy_field": false, | |
| "display_name": "System Message", | |
| "dynamic": false, | |
| "info": "System message to pass to the model.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "system_message", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "temperature": { | |
| "_input_type": "SliderInput", | |
| "advanced": true, | |
| "display_name": "Temperature", | |
| "dynamic": false, | |
| "info": "Run inference with this temperature. Must by in the closed interval [0.0, 1.0].", | |
| "max_label": "", | |
| "max_label_icon": "", | |
| "min_label": "", | |
| "min_label_icon": "", | |
| "name": "temperature", | |
| "placeholder": "", | |
| "range_spec": { | |
| "max": 1, | |
| "min": 0, | |
| "step": 0.01, | |
| "step_type": "float" | |
| }, | |
| "required": false, | |
| "show": true, | |
| "slider_buttons": false, | |
| "slider_buttons_options": [], | |
| "slider_input": false, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "type": "slider", | |
| "value": 0.1 | |
| }, | |
| "tool_model_enabled": { | |
| "_input_type": "BoolInput", | |
| "advanced": false, | |
| "display_name": "Enable Tool Models", | |
| "dynamic": false, | |
| "info": "Select if you want to use models that can work with tools. If yes, only those models will be shown.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "tool_model_enabled", | |
| "placeholder": "", | |
| "real_time_refresh": true, | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "bool", | |
| "value": true | |
| } | |
| }, | |
| "tool_mode": false | |
| }, | |
| "showNode": true, | |
| "type": "AnthropicModel" | |
| }, | |
| "dragging": false, | |
| "id": "AnthropicModel-BFQkw", | |
| "measured": { | |
| "height": 585, | |
| "width": 320 | |
| }, | |
| "position": { | |
| "x": 3053.8775442317087, | |
| "y": 605.1945417238652 | |
| }, | |
| "selected": false, | |
| "type": "genericNode" | |
| }, | |
| { | |
| "data": { | |
| "id": "Prompt-L3qXf", | |
| "node": { | |
| "base_classes": [ | |
| "Message" | |
| ], | |
| "beta": false, | |
| "conditional_paths": [], | |
| "custom_fields": { | |
| "template": [ | |
| "analysis1", | |
| "analysis3", | |
| "analysis4" | |
| ] | |
| }, | |
| "description": "Create a prompt template with dynamic variables.", | |
| "display_name": "Prompt", | |
| "documentation": "", | |
| "edited": false, | |
| "error": null, | |
| "field_order": [ | |
| "template", | |
| "tool_placeholder" | |
| ], | |
| "frozen": false, | |
| "full_path": null, | |
| "icon": "prompts", | |
| "is_composition": null, | |
| "is_input": null, | |
| "is_output": null, | |
| "legacy": false, | |
| "lf_version": "1.4.3", | |
| "metadata": {}, | |
| "minimized": false, | |
| "name": "", | |
| "output_types": [], | |
| "outputs": [ | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Prompt Message", | |
| "hidden": false, | |
| "method": "build_prompt", | |
| "name": "prompt", | |
| "options": null, | |
| "required_inputs": null, | |
| "selected": "Message", | |
| "tool_mode": true, | |
| "types": [ | |
| "Message" | |
| ], | |
| "value": "__UNDEFINED__" | |
| } | |
| ], | |
| "pinned": false, | |
| "priority": null, | |
| "template": { | |
| "_type": "Component", | |
| "analysis1": { | |
| "advanced": false, | |
| "display_name": "analysis1", | |
| "dynamic": false, | |
| "field_type": "str", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "analysis1", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "analysis3": { | |
| "advanced": false, | |
| "display_name": "analysis3", | |
| "dynamic": false, | |
| "field_type": "str", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "analysis3", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "analysis4": { | |
| "advanced": false, | |
| "display_name": "analysis4", | |
| "dynamic": false, | |
| "field_type": "str", | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "analysis4", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "code": { | |
| "advanced": true, | |
| "dynamic": true, | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "code", | |
| "password": false, | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "code", | |
| "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import MessageTextInput, Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n MessageTextInput(\n name=\"tool_placeholder\",\n display_name=\"Tool Placeholder\",\n tool_mode=True,\n advanced=True,\n info=\"A placeholder input for tool mode.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n async def update_frontend_node(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = await super().update_frontend_node(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" | |
| }, | |
| "template": { | |
| "_input_type": "PromptInput", | |
| "advanced": false, | |
| "display_name": "Template", | |
| "dynamic": false, | |
| "info": "", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "template", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "type": "prompt", | |
| "value": "\n# Sesgo, Matices y Resumen\n{analysis1}\n\n# Segmentación de Audiencia\n{analysis3}\n\n# Intencionalidad y Peligrosidad\n{analysis4}\n\n" | |
| }, | |
| "tool_placeholder": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Tool Placeholder", | |
| "dynamic": false, | |
| "info": "A placeholder input for tool mode.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "tool_placeholder", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": true, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| } | |
| }, | |
| "tool_mode": false | |
| }, | |
| "showNode": true, | |
| "type": "Prompt" | |
| }, | |
| "dragging": false, | |
| "id": "Prompt-L3qXf", | |
| "measured": { | |
| "height": 575, | |
| "width": 320 | |
| }, | |
| "position": { | |
| "x": 3515.0666217486555, | |
| "y": 931.8250969525958 | |
| }, | |
| "selected": false, | |
| "type": "genericNode" | |
| }, | |
| { | |
| "data": { | |
| "id": "ChatOutput-7XrLH", | |
| "node": { | |
| "base_classes": [ | |
| "Message" | |
| ], | |
| "beta": false, | |
| "category": "outputs", | |
| "conditional_paths": [], | |
| "custom_fields": {}, | |
| "description": "Display a chat message in the Playground.", | |
| "display_name": "Chat Output", | |
| "documentation": "", | |
| "edited": false, | |
| "field_order": [ | |
| "input_value", | |
| "should_store_message", | |
| "sender", | |
| "sender_name", | |
| "session_id", | |
| "data_template", | |
| "background_color", | |
| "chat_icon", | |
| "text_color", | |
| "clean_data" | |
| ], | |
| "frozen": false, | |
| "icon": "MessagesSquare", | |
| "key": "ChatOutput", | |
| "legacy": false, | |
| "lf_version": "1.4.3", | |
| "metadata": {}, | |
| "minimized": true, | |
| "output_types": [], | |
| "outputs": [ | |
| { | |
| "allows_loop": false, | |
| "cache": true, | |
| "display_name": "Message", | |
| "method": "message_response", | |
| "name": "message", | |
| "selected": "Message", | |
| "tool_mode": true, | |
| "types": [ | |
| "Message" | |
| ], | |
| "value": "__UNDEFINED__" | |
| } | |
| ], | |
| "pinned": false, | |
| "score": 0.003169567463043492, | |
| "template": { | |
| "_type": "Component", | |
| "background_color": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Background Color", | |
| "dynamic": false, | |
| "info": "The background color of the icon.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "background_color", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "chat_icon": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Icon", | |
| "dynamic": false, | |
| "info": "The icon of the message.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "chat_icon", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "clean_data": { | |
| "_input_type": "BoolInput", | |
| "advanced": true, | |
| "display_name": "Basic Clean Data", | |
| "dynamic": false, | |
| "info": "Whether to clean the data", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "clean_data", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "bool", | |
| "value": true | |
| }, | |
| "code": { | |
| "advanced": true, | |
| "dynamic": true, | |
| "fileTypes": [], | |
| "file_path": "", | |
| "info": "", | |
| "list": false, | |
| "load_from_db": false, | |
| "multiline": true, | |
| "name": "code", | |
| "password": false, | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "type": "code", | |
| "value": "from collections.abc import Generator\nfrom typing import Any\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.inputs.inputs import HandleInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema.data import Data\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_AI,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n minimized = True\n\n inputs = [\n HandleInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n input_types=[\"Data\", \"DataFrame\", \"Message\"],\n required=True,\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n BoolInput(\n name=\"clean_data\",\n display_name=\"Basic Clean Data\",\n value=True,\n info=\"Whether to clean the data\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, id_: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if id_:\n source_dict[\"id\"] = id_\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n # Handle case where source is a ChatOpenAI object\n if hasattr(source, \"model_name\"):\n source_dict[\"source\"] = source.model_name\n elif hasattr(source, \"model\"):\n source_dict[\"source\"] = str(source.model)\n else:\n source_dict[\"source\"] = str(source)\n return Source(**source_dict)\n\n async def message_response(self) -> Message:\n # First convert the input to string if needed\n text = self.convert_to_string()\n # Get source properties\n source, icon, display_name, source_id = self.get_properties_from_source_component()\n background_color = self.background_color\n text_color = self.text_color\n if self.chat_icon:\n icon = self.chat_icon\n\n # Create or use existing Message object\n if isinstance(self.input_value, Message):\n message = self.input_value\n # Update message properties\n message.text = text\n else:\n message = Message(text=text)\n\n # Set message properties\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(source_id, display_name, source)\n message.properties.icon = icon\n message.properties.background_color = background_color\n message.properties.text_color = text_color\n\n # Store message if needed\n if self.session_id and self.should_store_message:\n stored_message = await self.send_message(message)\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n\n def _validate_input(self) -> None:\n \"\"\"Validate the input data and raise ValueError if invalid.\"\"\"\n if self.input_value is None:\n msg = \"Input data cannot be None\"\n raise ValueError(msg)\n if isinstance(self.input_value, list) and not all(\n isinstance(item, Message | Data | DataFrame | str) for item in self.input_value\n ):\n invalid_types = [\n type(item).__name__\n for item in self.input_value\n if not isinstance(item, Message | Data | DataFrame | str)\n ]\n msg = f\"Expected Data or DataFrame or Message or str, got {invalid_types}\"\n raise TypeError(msg)\n if not isinstance(\n self.input_value,\n Message | Data | DataFrame | str | list | Generator | type(None),\n ):\n type_name = type(self.input_value).__name__\n msg = f\"Expected Data or DataFrame or Message or str, Generator or None, got {type_name}\"\n raise TypeError(msg)\n\n def _safe_convert(self, data: Any) -> str:\n \"\"\"Safely convert input data to string.\"\"\"\n try:\n if isinstance(data, str):\n return data\n if isinstance(data, Message):\n return data.get_text()\n if isinstance(data, Data):\n if data.get_text() is None:\n msg = \"Empty Data object\"\n raise ValueError(msg)\n return data.get_text()\n if isinstance(data, DataFrame):\n if self.clean_data:\n # Remove empty rows\n data = data.dropna(how=\"all\")\n # Remove empty lines in each cell\n data = data.replace(r\"^\\s*$\", \"\", regex=True)\n # Replace multiple newlines with a single newline\n data = data.replace(r\"\\n+\", \"\\n\", regex=True)\n\n # Replace pipe characters to avoid markdown table issues\n processed_data = data.replace(r\"\\|\", r\"\\\\|\", regex=True)\n\n processed_data = processed_data.map(\n lambda x: str(x).replace(\"\\n\", \"<br/>\") if isinstance(x, str) else x\n )\n\n return processed_data.to_markdown(index=False)\n return str(data)\n except (ValueError, TypeError, AttributeError) as e:\n msg = f\"Error converting data: {e!s}\"\n raise ValueError(msg) from e\n\n def convert_to_string(self) -> str | Generator[Any, None, None]:\n \"\"\"Convert input data to string with proper error handling.\"\"\"\n self._validate_input()\n if isinstance(self.input_value, list):\n return \"\\n\".join([self._safe_convert(item) for item in self.input_value])\n if isinstance(self.input_value, Generator):\n return self.input_value\n return self._safe_convert(self.input_value)\n" | |
| }, | |
| "data_template": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Data Template", | |
| "dynamic": false, | |
| "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "data_template", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "{text}" | |
| }, | |
| "input_value": { | |
| "_input_type": "HandleInput", | |
| "advanced": false, | |
| "display_name": "Text", | |
| "dynamic": false, | |
| "info": "Message to be passed as output.", | |
| "input_types": [ | |
| "Data", | |
| "DataFrame", | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "input_value", | |
| "placeholder": "", | |
| "required": true, | |
| "show": true, | |
| "title_case": false, | |
| "trace_as_metadata": true, | |
| "type": "other", | |
| "value": "" | |
| }, | |
| "sender": { | |
| "_input_type": "DropdownInput", | |
| "advanced": true, | |
| "combobox": false, | |
| "dialog_inputs": {}, | |
| "display_name": "Sender Type", | |
| "dynamic": false, | |
| "info": "Type of sender.", | |
| "name": "sender", | |
| "options": [ | |
| "Machine", | |
| "User" | |
| ], | |
| "options_metadata": [], | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "toggle": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "Machine" | |
| }, | |
| "sender_name": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Sender Name", | |
| "dynamic": false, | |
| "info": "Name of the sender.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "sender_name", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "AI" | |
| }, | |
| "session_id": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Session ID", | |
| "dynamic": false, | |
| "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "session_id", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| }, | |
| "should_store_message": { | |
| "_input_type": "BoolInput", | |
| "advanced": true, | |
| "display_name": "Store Messages", | |
| "dynamic": false, | |
| "info": "Store the message in the history.", | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "name": "should_store_message", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_metadata": true, | |
| "type": "bool", | |
| "value": true | |
| }, | |
| "text_color": { | |
| "_input_type": "MessageTextInput", | |
| "advanced": true, | |
| "display_name": "Text Color", | |
| "dynamic": false, | |
| "info": "The text color of the name", | |
| "input_types": [ | |
| "Message" | |
| ], | |
| "list": false, | |
| "list_add_label": "Add More", | |
| "load_from_db": false, | |
| "name": "text_color", | |
| "placeholder": "", | |
| "required": false, | |
| "show": true, | |
| "title_case": false, | |
| "tool_mode": false, | |
| "trace_as_input": true, | |
| "trace_as_metadata": true, | |
| "type": "str", | |
| "value": "" | |
| } | |
| }, | |
| "tool_mode": false | |
| }, | |
| "showNode": false, | |
| "type": "ChatOutput" | |
| }, | |
| "dragging": false, | |
| "id": "ChatOutput-7XrLH", | |
| "measured": { | |
| "height": 66, | |
| "width": 192 | |
| }, | |
| "position": { | |
| "x": 3923.8537370870367, | |
| "y": 1453.4662802565874 | |
| }, | |
| "selected": false, | |
| "type": "genericNode" | |
| } | |
| ], | |
| "viewport": { | |
| "x": -575.1269282037726, | |
| "y": -121.01456439168783, | |
| "zoom": 0.46457601517129365 | |
| } | |
| }, | |
| "description": "Unleashing Linguistic Creativity.", | |
| "endpoint_name": null, | |
| "id": "27ad0656-fe3a-468a-aed1-88b849415894", | |
| "is_component": false, | |
| "last_tested_version": "1.4.3", | |
| "name": "TrueEyeBeta", | |
| "tags": [] | |
| } |