testAIED / Basic Prompting.json
Xavier Antonio Ochoa Chehab
New commit
fa213dd
{
"id": "c1302271-22bf-402a-a4bb-f63a2311423c",
"data": {
"nodes": [
{
"data": {
"description": "Get chat inputs from the Playground.",
"display_name": "Chat Input",
"id": "ChatInput-w2pkG",
"node": {
"base_classes": [
"Message"
],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
"description": "Get chat inputs from the Playground.",
"display_name": "Chat Input",
"documentation": "",
"edited": false,
"field_order": [
"input_value",
"store_message",
"sender",
"sender_name",
"session_id",
"files"
],
"frozen": false,
"icon": "MessagesSquare",
"legacy": false,
"lf_version": "1.0.19.post2",
"metadata": {},
"output_types": [],
"outputs": [
{
"allows_loop": false,
"cache": true,
"display_name": "Message",
"method": "message_response",
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"value": "__UNDEFINED__"
}
],
"pinned": false,
"template": {
"_type": "Component",
"background_color": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "background_color",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"chat_icon": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "chat_icon",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"code": {
"advanced": true,
"dynamic": true,
"fileTypes": [],
"file_path": "",
"info": "",
"list": false,
"load_from_db": false,
"multiline": true,
"name": "code",
"password": false,
"placeholder": "",
"required": true,
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"advanced": true,
"display_name": "Files",
"dynamic": false,
"fileTypes": [
"txt",
"md",
"mdx",
"csv",
"json",
"yaml",
"yml",
"xml",
"html",
"htm",
"pdf",
"docx",
"py",
"sh",
"sql",
"js",
"ts",
"tsx",
"jpg",
"jpeg",
"png",
"bmp",
"image"
],
"file_path": "",
"info": "Files to be sent with the message.",
"list": true,
"name": "files",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
"value": ""
},
"input_value": {
"advanced": false,
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as input.",
"input_types": [],
"list": false,
"load_from_db": false,
"multiline": true,
"name": "input_value",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": "Hello"
},
"sender": {
"advanced": true,
"display_name": "Sender Type",
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "str",
"value": "User"
},
"sender_name": {
"advanced": true,
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "sender_name",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": "User"
},
"session_id": {
"advanced": true,
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "session_id",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"should_store_message": {
"_input_type": "BoolInput",
"advanced": true,
"display_name": "Store Messages",
"dynamic": false,
"info": "Store the message in the history.",
"list": false,
"name": "should_store_message",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "bool",
"value": true
},
"text_color": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "text_color",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
}
}
},
"type": "ChatInput"
},
"dragging": false,
"height": 234,
"id": "ChatInput-w2pkG",
"measured": {
"height": 234,
"width": 320
},
"position": {
"x": 689.5720422421635,
"y": 765.155834131403
},
"positionAbsolute": {
"x": 689.5720422421635,
"y": 765.155834131403
},
"selected": false,
"type": "genericNode",
"width": 320
},
{
"data": {
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
"id": "Prompt-6H4qF",
"node": {
"base_classes": [
"Message"
],
"beta": false,
"conditional_paths": [],
"custom_fields": {
"template": []
},
"description": "Create a prompt template with dynamic variables.",
"display_name": "Prompt",
"documentation": "",
"edited": false,
"field_order": [
"template"
],
"frozen": false,
"icon": "prompts",
"legacy": false,
"metadata": {},
"output_types": [],
"outputs": [
{
"allows_loop": false,
"cache": true,
"display_name": "Prompt Message",
"method": "build_prompt",
"name": "prompt",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"value": "__UNDEFINED__"
}
],
"pinned": false,
"template": {
"_type": "Component",
"code": {
"advanced": true,
"dynamic": true,
"fileTypes": [],
"file_path": "",
"info": "",
"list": false,
"load_from_db": false,
"multiline": true,
"name": "code",
"password": false,
"placeholder": "",
"required": true,
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import MessageTextInput, Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n MessageTextInput(\n name=\"tool_placeholder\",\n display_name=\"Tool Placeholder\",\n tool_mode=True,\n advanced=True,\n info=\"A placeholder input for tool mode.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n async def update_frontend_node(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = await super().update_frontend_node(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n"
},
"template": {
"_input_type": "PromptInput",
"advanced": false,
"display_name": "Template",
"dynamic": false,
"info": "",
"list": false,
"load_from_db": false,
"name": "template",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"type": "prompt",
"value": "Answer the user as if you were a GenAI expert, enthusiastic about helping them get started building something fresh."
},
"tool_placeholder": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Tool Placeholder",
"dynamic": false,
"info": "A placeholder input for tool mode.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "tool_placeholder",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": true,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
}
},
"tool_mode": false
},
"type": "Prompt"
},
"dragging": false,
"height": 260,
"id": "Prompt-6H4qF",
"measured": {
"height": 260,
"width": 320
},
"position": {
"x": 690.2015147036818,
"y": 1040.6625705470924
},
"positionAbsolute": {
"x": 690.2015147036818,
"y": 1018.5443911764344
},
"selected": false,
"type": "genericNode",
"width": 320
},
{
"data": {
"id": "undefined-axZIx",
"node": {
"description": "## 📖 README\n\nPerform basic prompting with an OpenAI model.\n\n#### Quick Start\n- Add your **OpenAI API key** to the **OpenAI Model**\n- Open the **Playground** to chat with your bot.\n\n#### Next steps:\n Experiment by changing the prompt and the OpenAI model temperature to see how the bot's responses change.",
"display_name": "Read Me",
"documentation": "",
"template": {
"backgroundColor": "neutral"
}
}
},
"dragging": false,
"height": 250,
"id": "undefined-axZIx",
"measured": {
"height": 250,
"width": 325
},
"position": {
"x": 66.38770028934243,
"y": 749.744424427066
},
"positionAbsolute": {
"x": 66.38770028934243,
"y": 749.744424427066
},
"resizing": false,
"selected": false,
"style": {
"height": 250,
"width": 600
},
"type": "noteNode",
"width": 600
},
{
"data": {
"id": "note-sPuYW",
"node": {
"description": "### 💡 Add your OpenAI API key here 👇",
"display_name": "",
"documentation": "",
"template": {
"backgroundColor": "transparent"
}
},
"type": "note"
},
"dragging": false,
"height": 324,
"id": "note-sPuYW",
"measured": {
"height": 324,
"width": 324
},
"position": {
"x": 1075.829573520873,
"y": 657.2057655038416
},
"positionAbsolute": {
"x": 1075.829573520873,
"y": 657.2057655038416
},
"resizing": false,
"selected": false,
"style": {
"height": 324,
"width": 324
},
"type": "noteNode",
"width": 324
},
{
"data": {
"id": "ChatOutput-Xju6V",
"node": {
"base_classes": [
"Message"
],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
"description": "Display a chat message in the Playground.",
"display_name": "Chat Output",
"documentation": "",
"edited": false,
"field_order": [
"input_value",
"should_store_message",
"sender",
"sender_name",
"session_id",
"data_template",
"background_color",
"chat_icon",
"text_color"
],
"frozen": false,
"icon": "MessagesSquare",
"legacy": false,
"metadata": {},
"output_types": [],
"outputs": [
{
"allows_loop": false,
"cache": true,
"display_name": "Message",
"method": "message_response",
"name": "message",
"selected": "Message",
"tool_mode": true,
"types": [
"Message"
],
"value": "__UNDEFINED__"
}
],
"pinned": false,
"template": {
"_type": "Component",
"background_color": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "background_color",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"chat_icon": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "chat_icon",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"code": {
"advanced": true,
"dynamic": true,
"fileTypes": [],
"file_path": "",
"info": "",
"list": false,
"load_from_db": false,
"multiline": true,
"name": "code",
"password": false,
"placeholder": "",
"required": true,
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_AI,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n minimized = True\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, id_: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if id_:\n source_dict[\"id\"] = id_\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n async def message_response(self) -> Message:\n source, icon, display_name, source_id = self.get_properties_from_source_component()\n background_color = self.background_color\n text_color = self.text_color\n if self.chat_icon:\n icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(source_id, display_name, source)\n message.properties.icon = icon\n message.properties.background_color = background_color\n message.properties.text_color = text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"data_template": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "data_template",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": "{text}"
},
"input_value": {
"_input_type": "MessageInput",
"advanced": false,
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "input_value",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"sender": {
"_input_type": "DropdownInput",
"advanced": true,
"combobox": false,
"display_name": "Sender Type",
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "str",
"value": "Machine"
},
"sender_name": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "sender_name",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": "AI"
},
"session_id": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "session_id",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"should_store_message": {
"_input_type": "BoolInput",
"advanced": true,
"display_name": "Store Messages",
"dynamic": false,
"info": "Store the message in the history.",
"list": false,
"name": "should_store_message",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "bool",
"value": true
},
"text_color": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "text_color",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
}
},
"tool_mode": false
},
"type": "ChatOutput"
},
"dragging": false,
"height": 234,
"id": "ChatOutput-Xju6V",
"measured": {
"height": 234,
"width": 320
},
"position": {
"x": 1460.070372772908,
"y": 872.7273956769025
},
"positionAbsolute": {
"x": 1444.936881624563,
"y": 872.7273956769025
},
"selected": false,
"type": "genericNode",
"width": 320
},
{
"id": "GoogleGenerativeAIModel-xh97T",
"type": "genericNode",
"position": {
"x": 1091.666782163277,
"y": 780.460184309683
},
"data": {
"node": {
"template": {
"_type": "Component",
"api_key": {
"load_from_db": false,
"required": true,
"placeholder": "",
"show": true,
"name": "api_key",
"value": "AIzaSyCg2jUKQbHhjY3kjbwoCS0C89ig3--0RJc",
"display_name": "Google API Key",
"advanced": false,
"input_types": [
"Message"
],
"dynamic": false,
"info": "The Google API Key to use for the Google Generative AI.",
"real_time_refresh": true,
"title_case": false,
"password": true,
"type": "str",
"_input_type": "SecretStrInput"
},
"code": {
"type": "code",
"required": true,
"placeholder": "",
"list": false,
"show": true,
"multiline": true,
"value": "from typing import Any\n\nimport requests\nfrom loguru import logger\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.google_generative_ai_constants import GOOGLE_GENERATIVE_AI_MODELS\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput, SliderInput\nfrom langflow.inputs.inputs import BoolInput\nfrom langflow.schema import dotdict\n\n\nclass GoogleGenerativeAIComponent(LCModelComponent):\n display_name = \"Google Generative AI\"\n description = \"Generate text using Google Generative AI.\"\n icon = \"GoogleGenerativeAI\"\n name = \"GoogleGenerativeAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_output_tokens\", display_name=\"Max Output Tokens\", info=\"The maximum number of tokens to generate.\"\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model\",\n info=\"The name of the model to use.\",\n options=GOOGLE_GENERATIVE_AI_MODELS,\n value=\"gemini-1.5-pro\",\n refresh_button=True,\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Google API Key\",\n info=\"The Google API Key to use for the Google Generative AI.\",\n required=True,\n real_time_refresh=True,\n ),\n FloatInput(\n name=\"top_p\",\n display_name=\"Top P\",\n info=\"The maximum cumulative probability of tokens to consider when sampling.\",\n advanced=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n range_spec=RangeSpec(min=0, max=2, step=0.01),\n info=\"Controls randomness. Lower values are more deterministic, higher values are more creative.\",\n ),\n IntInput(\n name=\"n\",\n display_name=\"N\",\n info=\"Number of chat completions to generate for each prompt. \"\n \"Note that the API may not return the full n completions if duplicates are generated.\",\n advanced=True,\n ),\n IntInput(\n name=\"top_k\",\n display_name=\"Top K\",\n info=\"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.\",\n advanced=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Tool Model Enabled\",\n info=\"Whether to use the tool model.\",\n value=False,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_google_genai import ChatGoogleGenerativeAI\n except ImportError as e:\n msg = \"The 'langchain_google_genai' package is required to use the Google Generative AI model.\"\n raise ImportError(msg) from e\n\n google_api_key = self.api_key\n model = self.model_name\n max_output_tokens = self.max_output_tokens\n temperature = self.temperature\n top_k = self.top_k\n top_p = self.top_p\n n = self.n\n\n return ChatGoogleGenerativeAI(\n model=model,\n max_output_tokens=max_output_tokens or None,\n temperature=temperature,\n top_k=top_k or None,\n top_p=top_p or None,\n n=n or 1,\n google_api_key=SecretStr(google_api_key).get_secret_value(),\n )\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import google.generativeai as genai\n\n genai.configure(api_key=self.api_key)\n model_ids = [\n model.name.replace(\"models/\", \"\")\n for model in genai.list_models()\n if \"generateContent\" in model.supported_generation_methods\n ]\n model_ids.sort(reverse=True)\n except (ImportError, ValueError) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = GOOGLE_GENERATIVE_AI_MODELS\n if tool_model_enabled:\n try:\n from langchain_google_genai.chat_models import ChatGoogleGenerativeAI\n except ImportError as e:\n msg = \"langchain_google_genai is not installed.\"\n raise ImportError(msg) from e\n for model in model_ids:\n model_with_tool = ChatGoogleGenerativeAI(\n model=self.model_name,\n google_api_key=self.api_key,\n )\n if not self.supports_tool_calling(model_with_tool):\n model_ids.remove(model)\n return model_ids\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in (\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\") and field_value:\n try:\n if len(self.api_key) == 0:\n ids = GOOGLE_GENERATIVE_AI_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = GOOGLE_GENERATIVE_AI_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n",
"fileTypes": [],
"file_path": "",
"password": false,
"name": "code",
"advanced": true,
"dynamic": true,
"info": "",
"load_from_db": false,
"title_case": false
},
"input_value": {
"trace_as_input": true,
"tool_mode": false,
"trace_as_metadata": true,
"load_from_db": false,
"list": false,
"list_add_label": "Add More",
"required": false,
"placeholder": "",
"show": true,
"name": "input_value",
"value": "",
"display_name": "Input",
"advanced": false,
"input_types": [
"Message"
],
"dynamic": false,
"info": "",
"title_case": false,
"type": "str",
"_input_type": "MessageInput"
},
"max_output_tokens": {
"tool_mode": false,
"trace_as_metadata": true,
"list": false,
"list_add_label": "Add More",
"required": false,
"placeholder": "",
"show": true,
"name": "max_output_tokens",
"value": "",
"display_name": "Max Output Tokens",
"advanced": false,
"dynamic": false,
"info": "The maximum number of tokens to generate.",
"title_case": false,
"type": "int",
"_input_type": "IntInput"
},
"model_name": {
"tool_mode": false,
"trace_as_metadata": true,
"options": [
"learnlm-1.5-pro-experimental",
"gemini-pro-vision",
"gemini-pro",
"gemini-exp-1206",
"gemini-2.0-pro-exp-02-05",
"gemini-2.0-pro-exp",
"gemini-2.0-flash-thinking-exp-1219",
"gemini-2.0-flash-thinking-exp-01-21",
"gemini-2.0-flash-thinking-exp",
"gemini-2.0-flash-lite-preview-02-05",
"gemini-2.0-flash-lite-preview",
"gemini-2.0-flash-exp",
"gemini-2.0-flash-001",
"gemini-2.0-flash",
"gemini-1.5-pro-latest",
"gemini-1.5-pro-002",
"gemini-1.5-pro-001",
"gemini-1.5-pro",
"gemini-1.5-flash-latest",
"gemini-1.5-flash-8b-latest",
"gemini-1.5-flash-8b-exp-0924",
"gemini-1.5-flash-8b-exp-0827",
"gemini-1.5-flash-8b-001",
"gemini-1.5-flash-8b",
"gemini-1.5-flash-002",
"gemini-1.5-flash-001-tuning",
"gemini-1.5-flash-001",
"gemini-1.5-flash",
"gemini-1.0-pro-vision-latest",
"gemini-1.0-pro-latest",
"gemini-1.0-pro-001",
"gemini-1.0-pro"
],
"options_metadata": [],
"combobox": true,
"dialog_inputs": {},
"required": false,
"placeholder": "",
"show": true,
"name": "model_name",
"value": "learnlm-1.5-pro-experimental",
"display_name": "Model",
"advanced": false,
"dynamic": false,
"info": "The name of the model to use.",
"refresh_button": true,
"title_case": false,
"type": "str",
"_input_type": "DropdownInput"
},
"n": {
"tool_mode": false,
"trace_as_metadata": true,
"list": false,
"list_add_label": "Add More",
"required": false,
"placeholder": "",
"show": true,
"name": "n",
"value": "",
"display_name": "N",
"advanced": true,
"dynamic": false,
"info": "Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.",
"title_case": false,
"type": "int",
"_input_type": "IntInput"
},
"stream": {
"tool_mode": false,
"trace_as_metadata": true,
"list": false,
"list_add_label": "Add More",
"required": false,
"placeholder": "",
"show": true,
"name": "stream",
"value": false,
"display_name": "Stream",
"advanced": false,
"dynamic": false,
"info": "Stream the response from the model. Streaming works only in Chat.",
"title_case": false,
"type": "bool",
"_input_type": "BoolInput"
},
"system_message": {
"tool_mode": false,
"trace_as_input": true,
"multiline": true,
"trace_as_metadata": true,
"load_from_db": false,
"list": false,
"list_add_label": "Add More",
"required": false,
"placeholder": "",
"show": true,
"name": "system_message",
"value": "",
"display_name": "System Message",
"advanced": false,
"input_types": [
"Message"
],
"dynamic": false,
"info": "System message to pass to the model.",
"title_case": false,
"type": "str",
"_input_type": "MultilineInput"
},
"temperature": {
"tool_mode": false,
"min_label": "",
"max_label": "",
"min_label_icon": "",
"max_label_icon": "",
"slider_buttons": false,
"slider_buttons_options": [],
"slider_input": false,
"range_spec": {
"step_type": "float",
"min": 0,
"max": 2,
"step": 0.01
},
"required": false,
"placeholder": "",
"show": true,
"name": "temperature",
"value": 0.1,
"display_name": "Temperature",
"advanced": false,
"dynamic": false,
"info": "Controls randomness. Lower values are more deterministic, higher values are more creative.",
"title_case": false,
"type": "slider",
"_input_type": "SliderInput"
},
"tool_model_enabled": {
"tool_mode": false,
"trace_as_metadata": true,
"list": false,
"list_add_label": "Add More",
"required": false,
"placeholder": "",
"show": true,
"name": "tool_model_enabled",
"value": false,
"display_name": "Tool Model Enabled",
"advanced": false,
"dynamic": false,
"info": "Whether to use the tool model.",
"title_case": false,
"type": "bool",
"_input_type": "BoolInput"
},
"top_k": {
"tool_mode": false,
"trace_as_metadata": true,
"list": false,
"list_add_label": "Add More",
"required": false,
"placeholder": "",
"show": true,
"name": "top_k",
"value": "",
"display_name": "Top K",
"advanced": true,
"dynamic": false,
"info": "Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.",
"title_case": false,
"type": "int",
"_input_type": "IntInput"
},
"top_p": {
"tool_mode": false,
"trace_as_metadata": true,
"list": false,
"list_add_label": "Add More",
"required": false,
"placeholder": "",
"show": true,
"name": "top_p",
"value": "",
"display_name": "Top P",
"advanced": true,
"dynamic": false,
"info": "The maximum cumulative probability of tokens to consider when sampling.",
"title_case": false,
"type": "float",
"_input_type": "FloatInput"
}
},
"description": "Generate text using Google Generative AI.",
"icon": "GoogleGenerativeAI",
"base_classes": [
"LanguageModel",
"Message"
],
"display_name": "Google Generative AI",
"documentation": "",
"minimized": false,
"custom_fields": {},
"output_types": [],
"pinned": false,
"conditional_paths": [],
"frozen": false,
"outputs": [
{
"types": [
"Message"
],
"selected": "Message",
"name": "text_output",
"hidden": null,
"display_name": "Message",
"method": "text_response",
"value": "__UNDEFINED__",
"cache": true,
"required_inputs": [],
"allows_loop": false,
"tool_mode": true
},
{
"types": [
"LanguageModel"
],
"selected": "LanguageModel",
"name": "model_output",
"hidden": null,
"display_name": "Language Model",
"method": "build_model",
"value": "__UNDEFINED__",
"cache": true,
"required_inputs": [
"api_key"
],
"allows_loop": false,
"tool_mode": true
}
],
"field_order": [
"input_value",
"system_message",
"stream",
"max_output_tokens",
"model_name",
"api_key",
"top_p",
"temperature",
"n",
"top_k",
"tool_model_enabled"
],
"beta": false,
"legacy": false,
"edited": false,
"metadata": {},
"tool_mode": false
},
"showNode": true,
"type": "GoogleGenerativeAIModel",
"id": "GoogleGenerativeAIModel-xh97T"
},
"selected": true,
"measured": {
"width": 320,
"height": 777
},
"dragging": false
}
],
"edges": [
{
"source": "ChatInput-w2pkG",
"sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-w2pkGœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}",
"target": "GoogleGenerativeAIModel-xh97T",
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œGoogleGenerativeAIModel-xh97Tœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
"data": {
"targetHandle": {
"fieldName": "input_value",
"id": "GoogleGenerativeAIModel-xh97T",
"inputTypes": [
"Message"
],
"type": "str"
},
"sourceHandle": {
"dataType": "ChatInput",
"id": "ChatInput-w2pkG",
"name": "message",
"output_types": [
"Message"
]
}
},
"id": "xy-edge__ChatInput-w2pkG{œdataTypeœ:œChatInputœ,œidœ:œChatInput-w2pkGœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-GoogleGenerativeAIModel-xh97T{œfieldNameœ:œinput_valueœ,œidœ:œGoogleGenerativeAIModel-xh97Tœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
},
{
"source": "Prompt-6H4qF",
"sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-6H4qFœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}",
"target": "GoogleGenerativeAIModel-xh97T",
"targetHandle": "{œfieldNameœ:œsystem_messageœ,œidœ:œGoogleGenerativeAIModel-xh97Tœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
"data": {
"targetHandle": {
"fieldName": "system_message",
"id": "GoogleGenerativeAIModel-xh97T",
"inputTypes": [
"Message"
],
"type": "str"
},
"sourceHandle": {
"dataType": "Prompt",
"id": "Prompt-6H4qF",
"name": "prompt",
"output_types": [
"Message"
]
}
},
"id": "xy-edge__Prompt-6H4qF{œdataTypeœ:œPromptœ,œidœ:œPrompt-6H4qFœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-GoogleGenerativeAIModel-xh97T{œfieldNameœ:œsystem_messageœ,œidœ:œGoogleGenerativeAIModel-xh97Tœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
},
{
"source": "GoogleGenerativeAIModel-xh97T",
"sourceHandle": "{œdataTypeœ:œGoogleGenerativeAIModelœ,œidœ:œGoogleGenerativeAIModel-xh97Tœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}",
"target": "ChatOutput-Xju6V",
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Xju6Vœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
"data": {
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-Xju6V",
"inputTypes": [
"Message"
],
"type": "str"
},
"sourceHandle": {
"dataType": "GoogleGenerativeAIModel",
"id": "GoogleGenerativeAIModel-xh97T",
"name": "text_output",
"output_types": [
"Message"
]
}
},
"id": "xy-edge__GoogleGenerativeAIModel-xh97T{œdataTypeœ:œGoogleGenerativeAIModelœ,œidœ:œGoogleGenerativeAIModel-xh97Tœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-Xju6V{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Xju6Vœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
}
],
"viewport": {
"x": 449.86558686155433,
"y": -298.7022457531368,
"zoom": 0.5739879299960177
}
},
"description": "Perform basic prompting with an OpenAI model.",
"name": "Basic Prompting",
"last_tested_version": "1.1.4.post1",
"endpoint_name": null,
"is_component": false
}