Xavier Antonio Ochoa Chehab commited on
Commit
fa213dd
·
1 Parent(s): a7389a1

New commit

Browse files
Files changed (1) hide show
  1. Basic Prompting.json +1224 -0
Basic Prompting.json ADDED
@@ -0,0 +1,1224 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "id": "c1302271-22bf-402a-a4bb-f63a2311423c",
3
+ "data": {
4
+ "nodes": [
5
+ {
6
+ "data": {
7
+ "description": "Get chat inputs from the Playground.",
8
+ "display_name": "Chat Input",
9
+ "id": "ChatInput-w2pkG",
10
+ "node": {
11
+ "base_classes": [
12
+ "Message"
13
+ ],
14
+ "beta": false,
15
+ "conditional_paths": [],
16
+ "custom_fields": {},
17
+ "description": "Get chat inputs from the Playground.",
18
+ "display_name": "Chat Input",
19
+ "documentation": "",
20
+ "edited": false,
21
+ "field_order": [
22
+ "input_value",
23
+ "store_message",
24
+ "sender",
25
+ "sender_name",
26
+ "session_id",
27
+ "files"
28
+ ],
29
+ "frozen": false,
30
+ "icon": "MessagesSquare",
31
+ "legacy": false,
32
+ "lf_version": "1.0.19.post2",
33
+ "metadata": {},
34
+ "output_types": [],
35
+ "outputs": [
36
+ {
37
+ "allows_loop": false,
38
+ "cache": true,
39
+ "display_name": "Message",
40
+ "method": "message_response",
41
+ "name": "message",
42
+ "selected": "Message",
43
+ "tool_mode": true,
44
+ "types": [
45
+ "Message"
46
+ ],
47
+ "value": "__UNDEFINED__"
48
+ }
49
+ ],
50
+ "pinned": false,
51
+ "template": {
52
+ "_type": "Component",
53
+ "background_color": {
54
+ "_input_type": "MessageTextInput",
55
+ "advanced": true,
56
+ "display_name": "Background Color",
57
+ "dynamic": false,
58
+ "info": "The background color of the icon.",
59
+ "input_types": [
60
+ "Message"
61
+ ],
62
+ "list": false,
63
+ "load_from_db": false,
64
+ "name": "background_color",
65
+ "placeholder": "",
66
+ "required": false,
67
+ "show": true,
68
+ "title_case": false,
69
+ "trace_as_input": true,
70
+ "trace_as_metadata": true,
71
+ "type": "str",
72
+ "value": ""
73
+ },
74
+ "chat_icon": {
75
+ "_input_type": "MessageTextInput",
76
+ "advanced": true,
77
+ "display_name": "Icon",
78
+ "dynamic": false,
79
+ "info": "The icon of the message.",
80
+ "input_types": [
81
+ "Message"
82
+ ],
83
+ "list": false,
84
+ "load_from_db": false,
85
+ "name": "chat_icon",
86
+ "placeholder": "",
87
+ "required": false,
88
+ "show": true,
89
+ "title_case": false,
90
+ "trace_as_input": true,
91
+ "trace_as_metadata": true,
92
+ "type": "str",
93
+ "value": ""
94
+ },
95
+ "code": {
96
+ "advanced": true,
97
+ "dynamic": true,
98
+ "fileTypes": [],
99
+ "file_path": "",
100
+ "info": "",
101
+ "list": false,
102
+ "load_from_db": false,
103
+ "multiline": true,
104
+ "name": "code",
105
+ "password": false,
106
+ "placeholder": "",
107
+ "required": true,
108
+ "show": true,
109
+ "title_case": false,
110
+ "type": "code",
111
+ "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
112
+ },
113
+ "files": {
114
+ "advanced": true,
115
+ "display_name": "Files",
116
+ "dynamic": false,
117
+ "fileTypes": [
118
+ "txt",
119
+ "md",
120
+ "mdx",
121
+ "csv",
122
+ "json",
123
+ "yaml",
124
+ "yml",
125
+ "xml",
126
+ "html",
127
+ "htm",
128
+ "pdf",
129
+ "docx",
130
+ "py",
131
+ "sh",
132
+ "sql",
133
+ "js",
134
+ "ts",
135
+ "tsx",
136
+ "jpg",
137
+ "jpeg",
138
+ "png",
139
+ "bmp",
140
+ "image"
141
+ ],
142
+ "file_path": "",
143
+ "info": "Files to be sent with the message.",
144
+ "list": true,
145
+ "name": "files",
146
+ "placeholder": "",
147
+ "required": false,
148
+ "show": true,
149
+ "title_case": false,
150
+ "trace_as_metadata": true,
151
+ "type": "file",
152
+ "value": ""
153
+ },
154
+ "input_value": {
155
+ "advanced": false,
156
+ "display_name": "Text",
157
+ "dynamic": false,
158
+ "info": "Message to be passed as input.",
159
+ "input_types": [],
160
+ "list": false,
161
+ "load_from_db": false,
162
+ "multiline": true,
163
+ "name": "input_value",
164
+ "placeholder": "",
165
+ "required": false,
166
+ "show": true,
167
+ "title_case": false,
168
+ "trace_as_input": true,
169
+ "trace_as_metadata": true,
170
+ "type": "str",
171
+ "value": "Hello"
172
+ },
173
+ "sender": {
174
+ "advanced": true,
175
+ "display_name": "Sender Type",
176
+ "dynamic": false,
177
+ "info": "Type of sender.",
178
+ "name": "sender",
179
+ "options": [
180
+ "Machine",
181
+ "User"
182
+ ],
183
+ "placeholder": "",
184
+ "required": false,
185
+ "show": true,
186
+ "title_case": false,
187
+ "trace_as_metadata": true,
188
+ "type": "str",
189
+ "value": "User"
190
+ },
191
+ "sender_name": {
192
+ "advanced": true,
193
+ "display_name": "Sender Name",
194
+ "dynamic": false,
195
+ "info": "Name of the sender.",
196
+ "input_types": [
197
+ "Message"
198
+ ],
199
+ "list": false,
200
+ "load_from_db": false,
201
+ "name": "sender_name",
202
+ "placeholder": "",
203
+ "required": false,
204
+ "show": true,
205
+ "title_case": false,
206
+ "trace_as_input": true,
207
+ "trace_as_metadata": true,
208
+ "type": "str",
209
+ "value": "User"
210
+ },
211
+ "session_id": {
212
+ "advanced": true,
213
+ "display_name": "Session ID",
214
+ "dynamic": false,
215
+ "info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
216
+ "input_types": [
217
+ "Message"
218
+ ],
219
+ "list": false,
220
+ "load_from_db": false,
221
+ "name": "session_id",
222
+ "placeholder": "",
223
+ "required": false,
224
+ "show": true,
225
+ "title_case": false,
226
+ "trace_as_input": true,
227
+ "trace_as_metadata": true,
228
+ "type": "str",
229
+ "value": ""
230
+ },
231
+ "should_store_message": {
232
+ "_input_type": "BoolInput",
233
+ "advanced": true,
234
+ "display_name": "Store Messages",
235
+ "dynamic": false,
236
+ "info": "Store the message in the history.",
237
+ "list": false,
238
+ "name": "should_store_message",
239
+ "placeholder": "",
240
+ "required": false,
241
+ "show": true,
242
+ "title_case": false,
243
+ "trace_as_metadata": true,
244
+ "type": "bool",
245
+ "value": true
246
+ },
247
+ "text_color": {
248
+ "_input_type": "MessageTextInput",
249
+ "advanced": true,
250
+ "display_name": "Text Color",
251
+ "dynamic": false,
252
+ "info": "The text color of the name",
253
+ "input_types": [
254
+ "Message"
255
+ ],
256
+ "list": false,
257
+ "load_from_db": false,
258
+ "name": "text_color",
259
+ "placeholder": "",
260
+ "required": false,
261
+ "show": true,
262
+ "title_case": false,
263
+ "trace_as_input": true,
264
+ "trace_as_metadata": true,
265
+ "type": "str",
266
+ "value": ""
267
+ }
268
+ }
269
+ },
270
+ "type": "ChatInput"
271
+ },
272
+ "dragging": false,
273
+ "height": 234,
274
+ "id": "ChatInput-w2pkG",
275
+ "measured": {
276
+ "height": 234,
277
+ "width": 320
278
+ },
279
+ "position": {
280
+ "x": 689.5720422421635,
281
+ "y": 765.155834131403
282
+ },
283
+ "positionAbsolute": {
284
+ "x": 689.5720422421635,
285
+ "y": 765.155834131403
286
+ },
287
+ "selected": false,
288
+ "type": "genericNode",
289
+ "width": 320
290
+ },
291
+ {
292
+ "data": {
293
+ "description": "Create a prompt template with dynamic variables.",
294
+ "display_name": "Prompt",
295
+ "id": "Prompt-6H4qF",
296
+ "node": {
297
+ "base_classes": [
298
+ "Message"
299
+ ],
300
+ "beta": false,
301
+ "conditional_paths": [],
302
+ "custom_fields": {
303
+ "template": []
304
+ },
305
+ "description": "Create a prompt template with dynamic variables.",
306
+ "display_name": "Prompt",
307
+ "documentation": "",
308
+ "edited": false,
309
+ "field_order": [
310
+ "template"
311
+ ],
312
+ "frozen": false,
313
+ "icon": "prompts",
314
+ "legacy": false,
315
+ "metadata": {},
316
+ "output_types": [],
317
+ "outputs": [
318
+ {
319
+ "allows_loop": false,
320
+ "cache": true,
321
+ "display_name": "Prompt Message",
322
+ "method": "build_prompt",
323
+ "name": "prompt",
324
+ "selected": "Message",
325
+ "tool_mode": true,
326
+ "types": [
327
+ "Message"
328
+ ],
329
+ "value": "__UNDEFINED__"
330
+ }
331
+ ],
332
+ "pinned": false,
333
+ "template": {
334
+ "_type": "Component",
335
+ "code": {
336
+ "advanced": true,
337
+ "dynamic": true,
338
+ "fileTypes": [],
339
+ "file_path": "",
340
+ "info": "",
341
+ "list": false,
342
+ "load_from_db": false,
343
+ "multiline": true,
344
+ "name": "code",
345
+ "password": false,
346
+ "placeholder": "",
347
+ "required": true,
348
+ "show": true,
349
+ "title_case": false,
350
+ "type": "code",
351
+ "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import MessageTextInput, Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n MessageTextInput(\n name=\"tool_placeholder\",\n display_name=\"Tool Placeholder\",\n tool_mode=True,\n advanced=True,\n info=\"A placeholder input for tool mode.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n async def update_frontend_node(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = await super().update_frontend_node(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n"
352
+ },
353
+ "template": {
354
+ "_input_type": "PromptInput",
355
+ "advanced": false,
356
+ "display_name": "Template",
357
+ "dynamic": false,
358
+ "info": "",
359
+ "list": false,
360
+ "load_from_db": false,
361
+ "name": "template",
362
+ "placeholder": "",
363
+ "required": false,
364
+ "show": true,
365
+ "title_case": false,
366
+ "tool_mode": false,
367
+ "trace_as_input": true,
368
+ "type": "prompt",
369
+ "value": "Answer the user as if you were a GenAI expert, enthusiastic about helping them get started building something fresh."
370
+ },
371
+ "tool_placeholder": {
372
+ "_input_type": "MessageTextInput",
373
+ "advanced": true,
374
+ "display_name": "Tool Placeholder",
375
+ "dynamic": false,
376
+ "info": "A placeholder input for tool mode.",
377
+ "input_types": [
378
+ "Message"
379
+ ],
380
+ "list": false,
381
+ "load_from_db": false,
382
+ "name": "tool_placeholder",
383
+ "placeholder": "",
384
+ "required": false,
385
+ "show": true,
386
+ "title_case": false,
387
+ "tool_mode": true,
388
+ "trace_as_input": true,
389
+ "trace_as_metadata": true,
390
+ "type": "str",
391
+ "value": ""
392
+ }
393
+ },
394
+ "tool_mode": false
395
+ },
396
+ "type": "Prompt"
397
+ },
398
+ "dragging": false,
399
+ "height": 260,
400
+ "id": "Prompt-6H4qF",
401
+ "measured": {
402
+ "height": 260,
403
+ "width": 320
404
+ },
405
+ "position": {
406
+ "x": 690.2015147036818,
407
+ "y": 1040.6625705470924
408
+ },
409
+ "positionAbsolute": {
410
+ "x": 690.2015147036818,
411
+ "y": 1018.5443911764344
412
+ },
413
+ "selected": false,
414
+ "type": "genericNode",
415
+ "width": 320
416
+ },
417
+ {
418
+ "data": {
419
+ "id": "undefined-axZIx",
420
+ "node": {
421
+ "description": "## 📖 README\n\nPerform basic prompting with an OpenAI model.\n\n#### Quick Start\n- Add your **OpenAI API key** to the **OpenAI Model**\n- Open the **Playground** to chat with your bot.\n\n#### Next steps:\n Experiment by changing the prompt and the OpenAI model temperature to see how the bot's responses change.",
422
+ "display_name": "Read Me",
423
+ "documentation": "",
424
+ "template": {
425
+ "backgroundColor": "neutral"
426
+ }
427
+ }
428
+ },
429
+ "dragging": false,
430
+ "height": 250,
431
+ "id": "undefined-axZIx",
432
+ "measured": {
433
+ "height": 250,
434
+ "width": 325
435
+ },
436
+ "position": {
437
+ "x": 66.38770028934243,
438
+ "y": 749.744424427066
439
+ },
440
+ "positionAbsolute": {
441
+ "x": 66.38770028934243,
442
+ "y": 749.744424427066
443
+ },
444
+ "resizing": false,
445
+ "selected": false,
446
+ "style": {
447
+ "height": 250,
448
+ "width": 600
449
+ },
450
+ "type": "noteNode",
451
+ "width": 600
452
+ },
453
+ {
454
+ "data": {
455
+ "id": "note-sPuYW",
456
+ "node": {
457
+ "description": "### 💡 Add your OpenAI API key here 👇",
458
+ "display_name": "",
459
+ "documentation": "",
460
+ "template": {
461
+ "backgroundColor": "transparent"
462
+ }
463
+ },
464
+ "type": "note"
465
+ },
466
+ "dragging": false,
467
+ "height": 324,
468
+ "id": "note-sPuYW",
469
+ "measured": {
470
+ "height": 324,
471
+ "width": 324
472
+ },
473
+ "position": {
474
+ "x": 1075.829573520873,
475
+ "y": 657.2057655038416
476
+ },
477
+ "positionAbsolute": {
478
+ "x": 1075.829573520873,
479
+ "y": 657.2057655038416
480
+ },
481
+ "resizing": false,
482
+ "selected": false,
483
+ "style": {
484
+ "height": 324,
485
+ "width": 324
486
+ },
487
+ "type": "noteNode",
488
+ "width": 324
489
+ },
490
+ {
491
+ "data": {
492
+ "id": "ChatOutput-Xju6V",
493
+ "node": {
494
+ "base_classes": [
495
+ "Message"
496
+ ],
497
+ "beta": false,
498
+ "conditional_paths": [],
499
+ "custom_fields": {},
500
+ "description": "Display a chat message in the Playground.",
501
+ "display_name": "Chat Output",
502
+ "documentation": "",
503
+ "edited": false,
504
+ "field_order": [
505
+ "input_value",
506
+ "should_store_message",
507
+ "sender",
508
+ "sender_name",
509
+ "session_id",
510
+ "data_template",
511
+ "background_color",
512
+ "chat_icon",
513
+ "text_color"
514
+ ],
515
+ "frozen": false,
516
+ "icon": "MessagesSquare",
517
+ "legacy": false,
518
+ "metadata": {},
519
+ "output_types": [],
520
+ "outputs": [
521
+ {
522
+ "allows_loop": false,
523
+ "cache": true,
524
+ "display_name": "Message",
525
+ "method": "message_response",
526
+ "name": "message",
527
+ "selected": "Message",
528
+ "tool_mode": true,
529
+ "types": [
530
+ "Message"
531
+ ],
532
+ "value": "__UNDEFINED__"
533
+ }
534
+ ],
535
+ "pinned": false,
536
+ "template": {
537
+ "_type": "Component",
538
+ "background_color": {
539
+ "_input_type": "MessageTextInput",
540
+ "advanced": true,
541
+ "display_name": "Background Color",
542
+ "dynamic": false,
543
+ "info": "The background color of the icon.",
544
+ "input_types": [
545
+ "Message"
546
+ ],
547
+ "list": false,
548
+ "load_from_db": false,
549
+ "name": "background_color",
550
+ "placeholder": "",
551
+ "required": false,
552
+ "show": true,
553
+ "title_case": false,
554
+ "tool_mode": false,
555
+ "trace_as_input": true,
556
+ "trace_as_metadata": true,
557
+ "type": "str",
558
+ "value": ""
559
+ },
560
+ "chat_icon": {
561
+ "_input_type": "MessageTextInput",
562
+ "advanced": true,
563
+ "display_name": "Icon",
564
+ "dynamic": false,
565
+ "info": "The icon of the message.",
566
+ "input_types": [
567
+ "Message"
568
+ ],
569
+ "list": false,
570
+ "load_from_db": false,
571
+ "name": "chat_icon",
572
+ "placeholder": "",
573
+ "required": false,
574
+ "show": true,
575
+ "title_case": false,
576
+ "tool_mode": false,
577
+ "trace_as_input": true,
578
+ "trace_as_metadata": true,
579
+ "type": "str",
580
+ "value": ""
581
+ },
582
+ "code": {
583
+ "advanced": true,
584
+ "dynamic": true,
585
+ "fileTypes": [],
586
+ "file_path": "",
587
+ "info": "",
588
+ "list": false,
589
+ "load_from_db": false,
590
+ "multiline": true,
591
+ "name": "code",
592
+ "password": false,
593
+ "placeholder": "",
594
+ "required": true,
595
+ "show": true,
596
+ "title_case": false,
597
+ "type": "code",
598
+ "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_AI,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n minimized = True\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, id_: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if id_:\n source_dict[\"id\"] = id_\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n async def message_response(self) -> Message:\n source, icon, display_name, source_id = self.get_properties_from_source_component()\n background_color = self.background_color\n text_color = self.text_color\n if self.chat_icon:\n icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(source_id, display_name, source)\n message.properties.icon = icon\n message.properties.background_color = background_color\n message.properties.text_color = text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
599
+ },
600
+ "data_template": {
601
+ "_input_type": "MessageTextInput",
602
+ "advanced": true,
603
+ "display_name": "Data Template",
604
+ "dynamic": false,
605
+ "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
606
+ "input_types": [
607
+ "Message"
608
+ ],
609
+ "list": false,
610
+ "load_from_db": false,
611
+ "name": "data_template",
612
+ "placeholder": "",
613
+ "required": false,
614
+ "show": true,
615
+ "title_case": false,
616
+ "tool_mode": false,
617
+ "trace_as_input": true,
618
+ "trace_as_metadata": true,
619
+ "type": "str",
620
+ "value": "{text}"
621
+ },
622
+ "input_value": {
623
+ "_input_type": "MessageInput",
624
+ "advanced": false,
625
+ "display_name": "Text",
626
+ "dynamic": false,
627
+ "info": "Message to be passed as output.",
628
+ "input_types": [
629
+ "Message"
630
+ ],
631
+ "list": false,
632
+ "load_from_db": false,
633
+ "name": "input_value",
634
+ "placeholder": "",
635
+ "required": false,
636
+ "show": true,
637
+ "title_case": false,
638
+ "trace_as_input": true,
639
+ "trace_as_metadata": true,
640
+ "type": "str",
641
+ "value": ""
642
+ },
643
+ "sender": {
644
+ "_input_type": "DropdownInput",
645
+ "advanced": true,
646
+ "combobox": false,
647
+ "display_name": "Sender Type",
648
+ "dynamic": false,
649
+ "info": "Type of sender.",
650
+ "name": "sender",
651
+ "options": [
652
+ "Machine",
653
+ "User"
654
+ ],
655
+ "placeholder": "",
656
+ "required": false,
657
+ "show": true,
658
+ "title_case": false,
659
+ "tool_mode": false,
660
+ "trace_as_metadata": true,
661
+ "type": "str",
662
+ "value": "Machine"
663
+ },
664
+ "sender_name": {
665
+ "_input_type": "MessageTextInput",
666
+ "advanced": true,
667
+ "display_name": "Sender Name",
668
+ "dynamic": false,
669
+ "info": "Name of the sender.",
670
+ "input_types": [
671
+ "Message"
672
+ ],
673
+ "list": false,
674
+ "load_from_db": false,
675
+ "name": "sender_name",
676
+ "placeholder": "",
677
+ "required": false,
678
+ "show": true,
679
+ "title_case": false,
680
+ "tool_mode": false,
681
+ "trace_as_input": true,
682
+ "trace_as_metadata": true,
683
+ "type": "str",
684
+ "value": "AI"
685
+ },
686
+ "session_id": {
687
+ "_input_type": "MessageTextInput",
688
+ "advanced": true,
689
+ "display_name": "Session ID",
690
+ "dynamic": false,
691
+ "info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
692
+ "input_types": [
693
+ "Message"
694
+ ],
695
+ "list": false,
696
+ "load_from_db": false,
697
+ "name": "session_id",
698
+ "placeholder": "",
699
+ "required": false,
700
+ "show": true,
701
+ "title_case": false,
702
+ "tool_mode": false,
703
+ "trace_as_input": true,
704
+ "trace_as_metadata": true,
705
+ "type": "str",
706
+ "value": ""
707
+ },
708
+ "should_store_message": {
709
+ "_input_type": "BoolInput",
710
+ "advanced": true,
711
+ "display_name": "Store Messages",
712
+ "dynamic": false,
713
+ "info": "Store the message in the history.",
714
+ "list": false,
715
+ "name": "should_store_message",
716
+ "placeholder": "",
717
+ "required": false,
718
+ "show": true,
719
+ "title_case": false,
720
+ "trace_as_metadata": true,
721
+ "type": "bool",
722
+ "value": true
723
+ },
724
+ "text_color": {
725
+ "_input_type": "MessageTextInput",
726
+ "advanced": true,
727
+ "display_name": "Text Color",
728
+ "dynamic": false,
729
+ "info": "The text color of the name",
730
+ "input_types": [
731
+ "Message"
732
+ ],
733
+ "list": false,
734
+ "load_from_db": false,
735
+ "name": "text_color",
736
+ "placeholder": "",
737
+ "required": false,
738
+ "show": true,
739
+ "title_case": false,
740
+ "tool_mode": false,
741
+ "trace_as_input": true,
742
+ "trace_as_metadata": true,
743
+ "type": "str",
744
+ "value": ""
745
+ }
746
+ },
747
+ "tool_mode": false
748
+ },
749
+ "type": "ChatOutput"
750
+ },
751
+ "dragging": false,
752
+ "height": 234,
753
+ "id": "ChatOutput-Xju6V",
754
+ "measured": {
755
+ "height": 234,
756
+ "width": 320
757
+ },
758
+ "position": {
759
+ "x": 1460.070372772908,
760
+ "y": 872.7273956769025
761
+ },
762
+ "positionAbsolute": {
763
+ "x": 1444.936881624563,
764
+ "y": 872.7273956769025
765
+ },
766
+ "selected": false,
767
+ "type": "genericNode",
768
+ "width": 320
769
+ },
770
+ {
771
+ "id": "GoogleGenerativeAIModel-xh97T",
772
+ "type": "genericNode",
773
+ "position": {
774
+ "x": 1091.666782163277,
775
+ "y": 780.460184309683
776
+ },
777
+ "data": {
778
+ "node": {
779
+ "template": {
780
+ "_type": "Component",
781
+ "api_key": {
782
+ "load_from_db": false,
783
+ "required": true,
784
+ "placeholder": "",
785
+ "show": true,
786
+ "name": "api_key",
787
+ "value": "AIzaSyCg2jUKQbHhjY3kjbwoCS0C89ig3--0RJc",
788
+ "display_name": "Google API Key",
789
+ "advanced": false,
790
+ "input_types": [
791
+ "Message"
792
+ ],
793
+ "dynamic": false,
794
+ "info": "The Google API Key to use for the Google Generative AI.",
795
+ "real_time_refresh": true,
796
+ "title_case": false,
797
+ "password": true,
798
+ "type": "str",
799
+ "_input_type": "SecretStrInput"
800
+ },
801
+ "code": {
802
+ "type": "code",
803
+ "required": true,
804
+ "placeholder": "",
805
+ "list": false,
806
+ "show": true,
807
+ "multiline": true,
808
+ "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.google_generative_ai_constants import GOOGLE_GENERATIVE_AI_MODELS\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput, SliderInput\nfrom langflow.inputs.inputs import BoolInput\nfrom langflow.schema import dotdict\n\n\nclass GoogleGenerativeAIComponent(LCModelComponent):\n display_name = \"Google Generative AI\"\n description = \"Generate text using Google Generative AI.\"\n icon = \"GoogleGenerativeAI\"\n name = \"GoogleGenerativeAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_output_tokens\", display_name=\"Max Output Tokens\", info=\"The maximum number of tokens to generate.\"\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model\",\n info=\"The name of the model to use.\",\n options=GOOGLE_GENERATIVE_AI_MODELS,\n value=\"gemini-1.5-pro\",\n refresh_button=True,\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Google API Key\",\n info=\"The Google API Key to use for the Google Generative AI.\",\n required=True,\n real_time_refresh=True,\n ),\n FloatInput(\n name=\"top_p\",\n display_name=\"Top P\",\n info=\"The maximum cumulative probability of tokens to consider when sampling.\",\n advanced=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n range_spec=RangeSpec(min=0, max=2, step=0.01),\n info=\"Controls randomness. Lower values are more deterministic, higher values are more creative.\",\n ),\n IntInput(\n name=\"n\",\n display_name=\"N\",\n info=\"Number of chat completions to generate for each prompt. \"\n \"Note that the API may not return the full n completions if duplicates are generated.\",\n advanced=True,\n ),\n IntInput(\n name=\"top_k\",\n display_name=\"Top K\",\n info=\"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.\",\n advanced=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Tool Model Enabled\",\n info=\"Whether to use the tool model.\",\n value=False,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_google_genai import ChatGoogleGenerativeAI\n except ImportError as e:\n msg = \"The 'langchain_google_genai' package is required to use the Google Generative AI model.\"\n raise ImportError(msg) from e\n\n google_api_key = self.api_key\n model = self.model_name\n max_output_tokens = self.max_output_tokens\n temperature = self.temperature\n top_k = self.top_k\n top_p = self.top_p\n n = self.n\n\n return ChatGoogleGenerativeAI(\n model=model,\n max_output_tokens=max_output_tokens or None,\n temperature=temperature,\n top_k=top_k or None,\n top_p=top_p or None,\n n=n or 1,\n google_api_key=SecretStr(google_api_key).get_secret_value(),\n )\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import google.generativeai as genai\n\n genai.configure(api_key=self.api_key)\n model_ids = [\n model.name.replace(\"models/\", \"\")\n for model in genai.list_models()\n if \"generateContent\" in model.supported_generation_methods\n ]\n model_ids.sort(reverse=True)\n except (ImportError, ValueError) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = GOOGLE_GENERATIVE_AI_MODELS\n if tool_model_enabled:\n try:\n from langchain_google_genai.chat_models import ChatGoogleGenerativeAI\n except ImportError as e:\n msg = \"langchain_google_genai is not installed.\"\n raise ImportError(msg) from e\n for model in model_ids:\n model_with_tool = ChatGoogleGenerativeAI(\n model=self.model_name,\n google_api_key=self.api_key,\n )\n if not self.supports_tool_calling(model_with_tool):\n model_ids.remove(model)\n return model_ids\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in (\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\") and field_value:\n try:\n if len(self.api_key) == 0:\n ids = GOOGLE_GENERATIVE_AI_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = GOOGLE_GENERATIVE_AI_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n",
809
+ "fileTypes": [],
810
+ "file_path": "",
811
+ "password": false,
812
+ "name": "code",
813
+ "advanced": true,
814
+ "dynamic": true,
815
+ "info": "",
816
+ "load_from_db": false,
817
+ "title_case": false
818
+ },
819
+ "input_value": {
820
+ "trace_as_input": true,
821
+ "tool_mode": false,
822
+ "trace_as_metadata": true,
823
+ "load_from_db": false,
824
+ "list": false,
825
+ "list_add_label": "Add More",
826
+ "required": false,
827
+ "placeholder": "",
828
+ "show": true,
829
+ "name": "input_value",
830
+ "value": "",
831
+ "display_name": "Input",
832
+ "advanced": false,
833
+ "input_types": [
834
+ "Message"
835
+ ],
836
+ "dynamic": false,
837
+ "info": "",
838
+ "title_case": false,
839
+ "type": "str",
840
+ "_input_type": "MessageInput"
841
+ },
842
+ "max_output_tokens": {
843
+ "tool_mode": false,
844
+ "trace_as_metadata": true,
845
+ "list": false,
846
+ "list_add_label": "Add More",
847
+ "required": false,
848
+ "placeholder": "",
849
+ "show": true,
850
+ "name": "max_output_tokens",
851
+ "value": "",
852
+ "display_name": "Max Output Tokens",
853
+ "advanced": false,
854
+ "dynamic": false,
855
+ "info": "The maximum number of tokens to generate.",
856
+ "title_case": false,
857
+ "type": "int",
858
+ "_input_type": "IntInput"
859
+ },
860
+ "model_name": {
861
+ "tool_mode": false,
862
+ "trace_as_metadata": true,
863
+ "options": [
864
+ "learnlm-1.5-pro-experimental",
865
+ "gemini-pro-vision",
866
+ "gemini-pro",
867
+ "gemini-exp-1206",
868
+ "gemini-2.0-pro-exp-02-05",
869
+ "gemini-2.0-pro-exp",
870
+ "gemini-2.0-flash-thinking-exp-1219",
871
+ "gemini-2.0-flash-thinking-exp-01-21",
872
+ "gemini-2.0-flash-thinking-exp",
873
+ "gemini-2.0-flash-lite-preview-02-05",
874
+ "gemini-2.0-flash-lite-preview",
875
+ "gemini-2.0-flash-exp",
876
+ "gemini-2.0-flash-001",
877
+ "gemini-2.0-flash",
878
+ "gemini-1.5-pro-latest",
879
+ "gemini-1.5-pro-002",
880
+ "gemini-1.5-pro-001",
881
+ "gemini-1.5-pro",
882
+ "gemini-1.5-flash-latest",
883
+ "gemini-1.5-flash-8b-latest",
884
+ "gemini-1.5-flash-8b-exp-0924",
885
+ "gemini-1.5-flash-8b-exp-0827",
886
+ "gemini-1.5-flash-8b-001",
887
+ "gemini-1.5-flash-8b",
888
+ "gemini-1.5-flash-002",
889
+ "gemini-1.5-flash-001-tuning",
890
+ "gemini-1.5-flash-001",
891
+ "gemini-1.5-flash",
892
+ "gemini-1.0-pro-vision-latest",
893
+ "gemini-1.0-pro-latest",
894
+ "gemini-1.0-pro-001",
895
+ "gemini-1.0-pro"
896
+ ],
897
+ "options_metadata": [],
898
+ "combobox": true,
899
+ "dialog_inputs": {},
900
+ "required": false,
901
+ "placeholder": "",
902
+ "show": true,
903
+ "name": "model_name",
904
+ "value": "learnlm-1.5-pro-experimental",
905
+ "display_name": "Model",
906
+ "advanced": false,
907
+ "dynamic": false,
908
+ "info": "The name of the model to use.",
909
+ "refresh_button": true,
910
+ "title_case": false,
911
+ "type": "str",
912
+ "_input_type": "DropdownInput"
913
+ },
914
+ "n": {
915
+ "tool_mode": false,
916
+ "trace_as_metadata": true,
917
+ "list": false,
918
+ "list_add_label": "Add More",
919
+ "required": false,
920
+ "placeholder": "",
921
+ "show": true,
922
+ "name": "n",
923
+ "value": "",
924
+ "display_name": "N",
925
+ "advanced": true,
926
+ "dynamic": false,
927
+ "info": "Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.",
928
+ "title_case": false,
929
+ "type": "int",
930
+ "_input_type": "IntInput"
931
+ },
932
+ "stream": {
933
+ "tool_mode": false,
934
+ "trace_as_metadata": true,
935
+ "list": false,
936
+ "list_add_label": "Add More",
937
+ "required": false,
938
+ "placeholder": "",
939
+ "show": true,
940
+ "name": "stream",
941
+ "value": false,
942
+ "display_name": "Stream",
943
+ "advanced": false,
944
+ "dynamic": false,
945
+ "info": "Stream the response from the model. Streaming works only in Chat.",
946
+ "title_case": false,
947
+ "type": "bool",
948
+ "_input_type": "BoolInput"
949
+ },
950
+ "system_message": {
951
+ "tool_mode": false,
952
+ "trace_as_input": true,
953
+ "multiline": true,
954
+ "trace_as_metadata": true,
955
+ "load_from_db": false,
956
+ "list": false,
957
+ "list_add_label": "Add More",
958
+ "required": false,
959
+ "placeholder": "",
960
+ "show": true,
961
+ "name": "system_message",
962
+ "value": "",
963
+ "display_name": "System Message",
964
+ "advanced": false,
965
+ "input_types": [
966
+ "Message"
967
+ ],
968
+ "dynamic": false,
969
+ "info": "System message to pass to the model.",
970
+ "title_case": false,
971
+ "type": "str",
972
+ "_input_type": "MultilineInput"
973
+ },
974
+ "temperature": {
975
+ "tool_mode": false,
976
+ "min_label": "",
977
+ "max_label": "",
978
+ "min_label_icon": "",
979
+ "max_label_icon": "",
980
+ "slider_buttons": false,
981
+ "slider_buttons_options": [],
982
+ "slider_input": false,
983
+ "range_spec": {
984
+ "step_type": "float",
985
+ "min": 0,
986
+ "max": 2,
987
+ "step": 0.01
988
+ },
989
+ "required": false,
990
+ "placeholder": "",
991
+ "show": true,
992
+ "name": "temperature",
993
+ "value": 0.1,
994
+ "display_name": "Temperature",
995
+ "advanced": false,
996
+ "dynamic": false,
997
+ "info": "Controls randomness. Lower values are more deterministic, higher values are more creative.",
998
+ "title_case": false,
999
+ "type": "slider",
1000
+ "_input_type": "SliderInput"
1001
+ },
1002
+ "tool_model_enabled": {
1003
+ "tool_mode": false,
1004
+ "trace_as_metadata": true,
1005
+ "list": false,
1006
+ "list_add_label": "Add More",
1007
+ "required": false,
1008
+ "placeholder": "",
1009
+ "show": true,
1010
+ "name": "tool_model_enabled",
1011
+ "value": false,
1012
+ "display_name": "Tool Model Enabled",
1013
+ "advanced": false,
1014
+ "dynamic": false,
1015
+ "info": "Whether to use the tool model.",
1016
+ "title_case": false,
1017
+ "type": "bool",
1018
+ "_input_type": "BoolInput"
1019
+ },
1020
+ "top_k": {
1021
+ "tool_mode": false,
1022
+ "trace_as_metadata": true,
1023
+ "list": false,
1024
+ "list_add_label": "Add More",
1025
+ "required": false,
1026
+ "placeholder": "",
1027
+ "show": true,
1028
+ "name": "top_k",
1029
+ "value": "",
1030
+ "display_name": "Top K",
1031
+ "advanced": true,
1032
+ "dynamic": false,
1033
+ "info": "Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.",
1034
+ "title_case": false,
1035
+ "type": "int",
1036
+ "_input_type": "IntInput"
1037
+ },
1038
+ "top_p": {
1039
+ "tool_mode": false,
1040
+ "trace_as_metadata": true,
1041
+ "list": false,
1042
+ "list_add_label": "Add More",
1043
+ "required": false,
1044
+ "placeholder": "",
1045
+ "show": true,
1046
+ "name": "top_p",
1047
+ "value": "",
1048
+ "display_name": "Top P",
1049
+ "advanced": true,
1050
+ "dynamic": false,
1051
+ "info": "The maximum cumulative probability of tokens to consider when sampling.",
1052
+ "title_case": false,
1053
+ "type": "float",
1054
+ "_input_type": "FloatInput"
1055
+ }
1056
+ },
1057
+ "description": "Generate text using Google Generative AI.",
1058
+ "icon": "GoogleGenerativeAI",
1059
+ "base_classes": [
1060
+ "LanguageModel",
1061
+ "Message"
1062
+ ],
1063
+ "display_name": "Google Generative AI",
1064
+ "documentation": "",
1065
+ "minimized": false,
1066
+ "custom_fields": {},
1067
+ "output_types": [],
1068
+ "pinned": false,
1069
+ "conditional_paths": [],
1070
+ "frozen": false,
1071
+ "outputs": [
1072
+ {
1073
+ "types": [
1074
+ "Message"
1075
+ ],
1076
+ "selected": "Message",
1077
+ "name": "text_output",
1078
+ "hidden": null,
1079
+ "display_name": "Message",
1080
+ "method": "text_response",
1081
+ "value": "__UNDEFINED__",
1082
+ "cache": true,
1083
+ "required_inputs": [],
1084
+ "allows_loop": false,
1085
+ "tool_mode": true
1086
+ },
1087
+ {
1088
+ "types": [
1089
+ "LanguageModel"
1090
+ ],
1091
+ "selected": "LanguageModel",
1092
+ "name": "model_output",
1093
+ "hidden": null,
1094
+ "display_name": "Language Model",
1095
+ "method": "build_model",
1096
+ "value": "__UNDEFINED__",
1097
+ "cache": true,
1098
+ "required_inputs": [
1099
+ "api_key"
1100
+ ],
1101
+ "allows_loop": false,
1102
+ "tool_mode": true
1103
+ }
1104
+ ],
1105
+ "field_order": [
1106
+ "input_value",
1107
+ "system_message",
1108
+ "stream",
1109
+ "max_output_tokens",
1110
+ "model_name",
1111
+ "api_key",
1112
+ "top_p",
1113
+ "temperature",
1114
+ "n",
1115
+ "top_k",
1116
+ "tool_model_enabled"
1117
+ ],
1118
+ "beta": false,
1119
+ "legacy": false,
1120
+ "edited": false,
1121
+ "metadata": {},
1122
+ "tool_mode": false
1123
+ },
1124
+ "showNode": true,
1125
+ "type": "GoogleGenerativeAIModel",
1126
+ "id": "GoogleGenerativeAIModel-xh97T"
1127
+ },
1128
+ "selected": true,
1129
+ "measured": {
1130
+ "width": 320,
1131
+ "height": 777
1132
+ },
1133
+ "dragging": false
1134
+ }
1135
+ ],
1136
+ "edges": [
1137
+ {
1138
+ "source": "ChatInput-w2pkG",
1139
+ "sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-w2pkGœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}",
1140
+ "target": "GoogleGenerativeAIModel-xh97T",
1141
+ "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œGoogleGenerativeAIModel-xh97Tœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
1142
+ "data": {
1143
+ "targetHandle": {
1144
+ "fieldName": "input_value",
1145
+ "id": "GoogleGenerativeAIModel-xh97T",
1146
+ "inputTypes": [
1147
+ "Message"
1148
+ ],
1149
+ "type": "str"
1150
+ },
1151
+ "sourceHandle": {
1152
+ "dataType": "ChatInput",
1153
+ "id": "ChatInput-w2pkG",
1154
+ "name": "message",
1155
+ "output_types": [
1156
+ "Message"
1157
+ ]
1158
+ }
1159
+ },
1160
+ "id": "xy-edge__ChatInput-w2pkG{œdataTypeœ:œChatInputœ,œidœ:œChatInput-w2pkGœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-GoogleGenerativeAIModel-xh97T{œfieldNameœ:œinput_valueœ,œidœ:œGoogleGenerativeAIModel-xh97Tœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
1161
+ },
1162
+ {
1163
+ "source": "Prompt-6H4qF",
1164
+ "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-6H4qFœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}",
1165
+ "target": "GoogleGenerativeAIModel-xh97T",
1166
+ "targetHandle": "{œfieldNameœ:œsystem_messageœ,œidœ:œGoogleGenerativeAIModel-xh97Tœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
1167
+ "data": {
1168
+ "targetHandle": {
1169
+ "fieldName": "system_message",
1170
+ "id": "GoogleGenerativeAIModel-xh97T",
1171
+ "inputTypes": [
1172
+ "Message"
1173
+ ],
1174
+ "type": "str"
1175
+ },
1176
+ "sourceHandle": {
1177
+ "dataType": "Prompt",
1178
+ "id": "Prompt-6H4qF",
1179
+ "name": "prompt",
1180
+ "output_types": [
1181
+ "Message"
1182
+ ]
1183
+ }
1184
+ },
1185
+ "id": "xy-edge__Prompt-6H4qF{œdataTypeœ:œPromptœ,œidœ:œPrompt-6H4qFœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-GoogleGenerativeAIModel-xh97T{œfieldNameœ:œsystem_messageœ,œidœ:œGoogleGenerativeAIModel-xh97Tœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
1186
+ },
1187
+ {
1188
+ "source": "GoogleGenerativeAIModel-xh97T",
1189
+ "sourceHandle": "{œdataTypeœ:œGoogleGenerativeAIModelœ,œidœ:œGoogleGenerativeAIModel-xh97Tœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}",
1190
+ "target": "ChatOutput-Xju6V",
1191
+ "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Xju6Vœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
1192
+ "data": {
1193
+ "targetHandle": {
1194
+ "fieldName": "input_value",
1195
+ "id": "ChatOutput-Xju6V",
1196
+ "inputTypes": [
1197
+ "Message"
1198
+ ],
1199
+ "type": "str"
1200
+ },
1201
+ "sourceHandle": {
1202
+ "dataType": "GoogleGenerativeAIModel",
1203
+ "id": "GoogleGenerativeAIModel-xh97T",
1204
+ "name": "text_output",
1205
+ "output_types": [
1206
+ "Message"
1207
+ ]
1208
+ }
1209
+ },
1210
+ "id": "xy-edge__GoogleGenerativeAIModel-xh97T{œdataTypeœ:œGoogleGenerativeAIModelœ,œidœ:œGoogleGenerativeAIModel-xh97Tœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-Xju6V{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Xju6Vœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
1211
+ }
1212
+ ],
1213
+ "viewport": {
1214
+ "x": 449.86558686155433,
1215
+ "y": -298.7022457531368,
1216
+ "zoom": 0.5739879299960177
1217
+ }
1218
+ },
1219
+ "description": "Perform basic prompting with an OpenAI model.",
1220
+ "name": "Basic Prompting",
1221
+ "last_tested_version": "1.1.4.post1",
1222
+ "endpoint_name": null,
1223
+ "is_component": false
1224
+ }