n8n-docs-datasets / docs-dataset /fullstack_training.jsonl
DavidrPatton's picture
Add docs-dataset dataset
4484c0f verified
{"instruction": "How do I implement JWT authentication in n8n workflows?", "output": "# JWT Authentication in n8n\n\n## Generate JWT Token\n```javascript\n// Code node - Generate JWT\nconst crypto = require('crypto');\n\nconst header = { alg: 'HS256', typ: 'JWT' };\nconst payload = {\n sub: $json.userId,\n email: $json.email,\n iat: Math.floor(Date.now() / 1000),\n exp: Math.floor(Date.now() / 1000) + 3600 // 1 hour\n};\n\nconst base64Header = Buffer.from(JSON.stringify(header)).toString('base64url');\nconst base64Payload = Buffer.from(JSON.stringify(payload)).toString('base64url');\n\nconst signature = crypto\n .createHmac('sha256', $env.JWT_SECRET)\n .update(`${base64Header}.${base64Payload}`)\n .digest('base64url');\n\nreturn { token: `${base64Header}.${base64Payload}.${signature}` };\n```\n\n## Verify JWT Token\n```javascript\n// Code node - Verify JWT\nconst [header, payload, signature] = $json.token.split('.');\n\nconst expectedSig = crypto\n .createHmac('sha256', $env.JWT_SECRET)\n .update(`${header}.${payload}`)\n .digest('base64url');\n\nif (signature !== expectedSig) throw new Error('Invalid token');\n\nconst decoded = JSON.parse(Buffer.from(payload, 'base64url').toString());\nif (decoded.exp < Date.now() / 1000) throw new Error('Token expired');\n\nreturn { valid: true, user: decoded };\n```\n\n## Workflow Pattern\n```\nLogin Request \u2192 Validate Credentials \u2192 Generate JWT \u2192 Return Token\nProtected Endpoint \u2192 Extract Token \u2192 Verify JWT \u2192 Process Request\n```", "metadata": {"topic": "jwt_auth", "type": "implementation"}}
{"instruction": "How do I implement OAuth 2.0 flows in n8n?", "output": "# OAuth 2.0 Implementation in n8n\n\n## Authorization Code Flow\n\n### Step 1: Redirect to Auth Provider\n```javascript\nconst authUrl = new URL('https://provider.com/oauth/authorize');\nauthUrl.searchParams.set('client_id', $env.CLIENT_ID);\nauthUrl.searchParams.set('redirect_uri', 'https://yourapp.com/callback');\nauthUrl.searchParams.set('response_type', 'code');\nauthUrl.searchParams.set('scope', 'read write');\nauthUrl.searchParams.set('state', crypto.randomBytes(16).toString('hex'));\n\nreturn { redirectUrl: authUrl.toString() };\n```\n\n### Step 2: Exchange Code for Token\n```json\n{\n \"name\": \"Exchange Code\",\n \"type\": \"n8n-nodes-base.httpRequest\",\n \"parameters\": {\n \"url\": \"https://provider.com/oauth/token\",\n \"method\": \"POST\",\n \"body\": {\n \"grant_type\": \"authorization_code\",\n \"code\": \"={{ $json.code }}\",\n \"client_id\": \"={{ $env.CLIENT_ID }}\",\n \"client_secret\": \"={{ $env.CLIENT_SECRET }}\",\n \"redirect_uri\": \"https://yourapp.com/callback\"\n }\n }\n}\n```\n\n### Step 3: Refresh Token\n```json\n{\n \"name\": \"Refresh Token\",\n \"type\": \"n8n-nodes-base.httpRequest\",\n \"parameters\": {\n \"url\": \"https://provider.com/oauth/token\",\n \"method\": \"POST\",\n \"body\": {\n \"grant_type\": \"refresh_token\",\n \"refresh_token\": \"={{ $json.refresh_token }}\",\n \"client_id\": \"={{ $env.CLIENT_ID }}\"\n }\n }\n}\n```\n\n## Token Storage\nStore tokens securely in database with encryption:\n```javascript\nconst encrypted = crypto.createCipheriv('aes-256-gcm', key, iv)\n .update(token, 'utf8', 'hex');\nawait db.upsert('oauth_tokens', { user_id, encrypted_token });\n```", "metadata": {"topic": "oauth", "type": "implementation"}}
{"instruction": "How do I implement real-time data sync with Supabase in n8n?", "output": "# Real-Time Supabase Sync in n8n\n\n## Supabase Realtime Triggers\n```javascript\n// Subscribe to table changes\nconst { createClient } = require('@supabase/supabase-js');\nconst supabase = createClient(SUPABASE_URL, SUPABASE_KEY);\n\nsupabase\n .channel('table-changes')\n .on('postgres_changes', \n { event: '*', schema: 'public', table: 'orders' },\n (payload) => {\n // Trigger n8n webhook with payload\n fetch('https://n8n.example.com/webhook/orders', {\n method: 'POST',\n body: JSON.stringify(payload)\n });\n }\n )\n .subscribe();\n```\n\n## n8n Webhook Handler\n```json\n{\n \"nodes\": [\n {\n \"name\": \"Supabase Webhook\",\n \"type\": \"n8n-nodes-base.webhook\",\n \"parameters\": {\"path\": \"orders\"}\n },\n {\n \"name\": \"Route by Event\",\n \"type\": \"n8n-nodes-base.switch\",\n \"parameters\": {\n \"rules\": [\n {\"value\": \"INSERT\"},\n {\"value\": \"UPDATE\"},\n {\"value\": \"DELETE\"}\n ]\n }\n },\n {\n \"name\": \"Handle New Order\",\n \"type\": \"n8n-nodes-base.code\",\n \"parameters\": {\n \"jsCode\": \"// Process new order\\nconst order = $json.new;\\nreturn { order, action: 'notify_warehouse' };\"\n }\n }\n ]\n}\n```\n\n## Bidirectional Sync Pattern\n```\nSupabase Change \u2192 Webhook \u2192 n8n Process \u2192 Update External System\nExternal Change \u2192 n8n Poll/Webhook \u2192 Supabase Upsert\n```", "metadata": {"topic": "realtime_sync", "type": "workflow"}}
{"instruction": "How do I use WebSockets with n8n for real-time updates?", "output": "# WebSocket Integration with n8n\n\n## WebSocket Server (External)\n```javascript\n// Node.js WebSocket server\nconst WebSocket = require('ws');\nconst wss = new WebSocket.Server({ port: 8080 });\n\nwss.on('connection', (ws) => {\n ws.on('message', async (message) => {\n // Forward to n8n webhook\n await fetch('https://n8n.example.com/webhook/ws-message', {\n method: 'POST',\n body: message,\n headers: { 'Content-Type': 'application/json' }\n });\n });\n});\n\n// Broadcast from n8n\napp.post('/broadcast', (req, res) => {\n wss.clients.forEach(client => {\n if (client.readyState === WebSocket.OPEN) {\n client.send(JSON.stringify(req.body));\n }\n });\n res.json({ sent: true });\n});\n```\n\n## n8n Workflow\n```json\n{\n \"nodes\": [\n {\"name\": \"WS Message Received\", \"type\": \"n8n-nodes-base.webhook\"},\n {\"name\": \"Process Message\", \"type\": \"n8n-nodes-base.code\"},\n {\"name\": \"Broadcast Response\", \"type\": \"n8n-nodes-base.httpRequest\",\n \"parameters\": {\n \"url\": \"http://ws-server:8080/broadcast\",\n \"method\": \"POST\"\n }}\n ]\n}\n```\n\n## Use Cases\n- Live dashboards\n- Chat applications\n- Real-time notifications\n- Collaborative editing\n- Live order tracking", "metadata": {"topic": "websockets", "type": "integration"}}
{"instruction": "How do I trigger AWS Lambda functions from n8n?", "output": "# AWS Lambda Integration with n8n\n\n## Direct Invocation\n```json\n{\n \"name\": \"Invoke Lambda\",\n \"type\": \"n8n-nodes-base.awsLambda\",\n \"parameters\": {\n \"function\": \"my-function-name\",\n \"payload\": \"={{ JSON.stringify($json) }}\",\n \"invocationType\": \"RequestResponse\"\n },\n \"credentials\": {\"aws\": {\"accessKeyId\": \"\", \"secretAccessKey\": \"\", \"region\": \"us-east-1\"}}\n}\n```\n\n## Via API Gateway\n```json\n{\n \"name\": \"Lambda via API Gateway\",\n \"type\": \"n8n-nodes-base.httpRequest\",\n \"parameters\": {\n \"url\": \"https://abc123.execute-api.us-east-1.amazonaws.com/prod/function\",\n \"method\": \"POST\",\n \"headers\": {\"x-api-key\": \"={{ $env.API_GATEWAY_KEY }}\"},\n \"body\": \"={{ JSON.stringify($json) }}\"\n }\n}\n```\n\n## Async Invocation\n```json\n{\n \"parameters\": {\n \"invocationType\": \"Event\",\n \"payload\": \"={{ JSON.stringify({taskId: $json.id, callback: 'https://n8n.example.com/webhook/lambda-complete'}) }}\"\n }\n}\n```\n\n## Lambda Function Pattern\n```javascript\n// Lambda handler\nexports.handler = async (event) => {\n const result = await processData(event);\n \n // Callback to n8n\n await fetch(event.callback, {\n method: 'POST',\n body: JSON.stringify({ taskId: event.taskId, result })\n });\n \n return { statusCode: 200 };\n};\n```", "metadata": {"topic": "aws_lambda", "type": "integration"}}
{"instruction": "How do I use Google Cloud Functions with n8n?", "output": "# Google Cloud Functions with n8n\n\n## HTTP Trigger\n```json\n{\n \"name\": \"Call Cloud Function\",\n \"type\": \"n8n-nodes-base.httpRequest\",\n \"parameters\": {\n \"url\": \"https://REGION-PROJECT.cloudfunctions.net/function-name\",\n \"method\": \"POST\",\n \"headers\": {\n \"Authorization\": \"Bearer {{ $json.idToken }}\"\n },\n \"body\": \"={{ JSON.stringify($json.payload) }}\"\n }\n}\n```\n\n## Get ID Token for Auth\n```json\n{\n \"name\": \"Get ID Token\",\n \"type\": \"n8n-nodes-base.httpRequest\",\n \"parameters\": {\n \"url\": \"http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/identity?audience=https://REGION-PROJECT.cloudfunctions.net/function-name\",\n \"headers\": {\"Metadata-Flavor\": \"Google\"}\n }\n}\n```\n\n## Cloud Function Example\n```javascript\nconst functions = require('@google-cloud/functions-framework');\n\nfunctions.http('processData', async (req, res) => {\n const data = req.body;\n const result = await heavyProcessing(data);\n \n // Optional: callback to n8n\n if (data.callbackUrl) {\n await fetch(data.callbackUrl, {\n method: 'POST',\n body: JSON.stringify(result)\n });\n }\n \n res.json(result);\n});\n```\n\n## Pub/Sub Trigger Pattern\n```\nn8n \u2192 Publish to Pub/Sub \u2192 Cloud Function \u2192 Process \u2192 Webhook back to n8n\n```", "metadata": {"topic": "gcp_functions", "type": "integration"}}
{"instruction": "How do I build ETL pipelines in n8n?", "output": "# ETL Pipeline Implementation in n8n\n\n## Extract Phase\n```json\n{\n \"nodes\": [\n {\n \"name\": \"Extract from Source DB\",\n \"type\": \"n8n-nodes-base.postgres\",\n \"parameters\": {\n \"operation\": \"executeQuery\",\n \"query\": \"SELECT * FROM orders WHERE updated_at > '{{ $json.lastSync }}'\"\n }\n },\n {\n \"name\": \"Extract from API\",\n \"type\": \"n8n-nodes-base.httpRequest\",\n \"parameters\": {\n \"url\": \"https://api.source.com/data\",\n \"qs\": {\"since\": \"={{ $json.lastSync }}\"}\n }\n }\n ]\n}\n```\n\n## Transform Phase\n```javascript\n// Code node - Transform data\nconst transformed = items.map(item => {\n return {\n json: {\n id: item.json.id,\n customer_name: `${item.json.first_name} ${item.json.last_name}`.trim(),\n total_amount: parseFloat(item.json.amount) || 0,\n order_date: new Date(item.json.created_at).toISOString().split('T')[0],\n status: item.json.status.toUpperCase(),\n source_system: 'legacy_db'\n }\n };\n});\nreturn transformed;\n```\n\n## Load Phase\n```json\n{\n \"name\": \"Load to Data Warehouse\",\n \"type\": \"n8n-nodes-base.postgres\",\n \"parameters\": {\n \"operation\": \"insert\",\n \"table\": \"fact_orders\",\n \"columns\": \"id, customer_name, total_amount, order_date, status, source_system\",\n \"options\": {\"onConflict\": \"id\", \"updateOnConflict\": true}\n }\n}\n```\n\n## Full ETL Workflow\n```\nSchedule Trigger (daily) \u2192 Get Last Sync Time \u2192 Extract Sources (parallel) \u2192\nMerge Data \u2192 Transform/Clean \u2192 Validate \u2192 Load to Warehouse \u2192 Update Sync Time\n```\n\n## Error Handling\n- Log failed records to error table\n- Continue on fail for non-critical fields\n- Retry transient failures\n- Alert on threshold exceeded", "metadata": {"topic": "etl", "type": "workflow"}}
{"instruction": "How do I implement CDC (Change Data Capture) with n8n?", "output": "# Change Data Capture with n8n\n\n## Database Triggers Approach\n```sql\n-- PostgreSQL trigger\nCREATE OR REPLACE FUNCTION notify_n8n()\nRETURNS trigger AS $$\nBEGIN\n PERFORM pg_notify('table_changes', \n json_build_object(\n 'table', TG_TABLE_NAME,\n 'operation', TG_OP,\n 'old', OLD,\n 'new', NEW\n )::text\n );\n RETURN NEW;\nEND;\n$$ LANGUAGE plpgsql;\n\nCREATE TRIGGER orders_cdc\nAFTER INSERT OR UPDATE OR DELETE ON orders\nFOR EACH ROW EXECUTE FUNCTION notify_n8n();\n```\n\n## n8n Listener Service\n```javascript\n// External service listening to Postgres NOTIFY\nconst { Client } = require('pg');\nconst client = new Client(connectionString);\n\nawait client.connect();\nawait client.query('LISTEN table_changes');\n\nclient.on('notification', async (msg) => {\n const payload = JSON.parse(msg.payload);\n await fetch('https://n8n.example.com/webhook/cdc', {\n method: 'POST',\n body: JSON.stringify(payload)\n });\n});\n```\n\n## Debezium Integration\n```json\n{\n \"name\": \"Kafka CDC Consumer\",\n \"nodes\": [\n {\"name\": \"Poll Kafka\", \"type\": \"n8n-nodes-base.kafka\",\n \"parameters\": {\"topic\": \"dbserver1.public.orders\"}},\n {\"name\": \"Parse CDC Event\", \"type\": \"n8n-nodes-base.code\",\n \"parameters\": {\"jsCode\": \"const event = $json;\\nreturn {op: event.op, before: event.before, after: event.after};\"}}\n ]\n}\n```\n\n## CDC Event Routing\n```javascript\nswitch ($json.operation) {\n case 'INSERT': await handleInsert($json.new); break;\n case 'UPDATE': await handleUpdate($json.old, $json.new); break;\n case 'DELETE': await handleDelete($json.old); break;\n}\n```", "metadata": {"topic": "cdc", "type": "integration"}}
{"instruction": "How do I integrate MongoDB with n8n?", "output": "# MongoDB Integration with n8n\n\n## Basic Operations\n\n### Find Documents\n```json\n{\n \"name\": \"Find Orders\",\n \"type\": \"n8n-nodes-base.mongoDb\",\n \"parameters\": {\n \"operation\": \"find\",\n \"collection\": \"orders\",\n \"query\": {\"status\": \"pending\", \"created_at\": {\"$gte\": \"={{ $json.startDate }}\"}},\n \"options\": {\"sort\": {\"created_at\": -1}, \"limit\": 100}\n }\n}\n```\n\n### Insert Document\n```json\n{\n \"name\": \"Insert Order\",\n \"type\": \"n8n-nodes-base.mongoDb\",\n \"parameters\": {\n \"operation\": \"insert\",\n \"collection\": \"orders\",\n \"fields\": {\n \"customer_id\": \"={{ $json.customerId }}\",\n \"items\": \"={{ $json.items }}\",\n \"total\": \"={{ $json.total }}\",\n \"created_at\": \"={{ new Date().toISOString() }}\"\n }\n }\n}\n```\n\n### Update Document\n```json\n{\n \"name\": \"Update Order Status\",\n \"type\": \"n8n-nodes-base.mongoDb\",\n \"parameters\": {\n \"operation\": \"update\",\n \"collection\": \"orders\",\n \"query\": {\"_id\": {\"$oid\": \"={{ $json.orderId }}\"}},\n \"update\": {\"$set\": {\"status\": \"shipped\", \"shipped_at\": \"={{ new Date().toISOString() }}\"}}\n }\n}\n```\n\n### Aggregation Pipeline\n```json\n{\n \"parameters\": {\n \"operation\": \"aggregate\",\n \"collection\": \"orders\",\n \"pipeline\": [\n {\"$match\": {\"status\": \"completed\"}},\n {\"$group\": {\"_id\": \"$customer_id\", \"total_spent\": {\"$sum\": \"$total\"}}},\n {\"$sort\": {\"total_spent\": -1}},\n {\"$limit\": 10}\n ]\n }\n}\n```\n\n## Connection String\n```\nmongodb+srv://user:pass@cluster.mongodb.net/database?retryWrites=true\n```", "metadata": {"topic": "mongodb", "type": "integration"}}
{"instruction": "How do I use Firebase/Firestore with n8n?", "output": "# Firebase/Firestore Integration with n8n\n\n## Firestore Operations\n\n### Get Document\n```json\n{\n \"name\": \"Get User\",\n \"type\": \"n8n-nodes-base.httpRequest\",\n \"parameters\": {\n \"url\": \"https://firestore.googleapis.com/v1/projects/PROJECT/databases/(default)/documents/users/{{ $json.userId }}\",\n \"headers\": {\"Authorization\": \"Bearer {{ $json.accessToken }}\"}\n }\n}\n```\n\n### Create Document\n```json\n{\n \"name\": \"Create Order\",\n \"type\": \"n8n-nodes-base.httpRequest\",\n \"parameters\": {\n \"url\": \"https://firestore.googleapis.com/v1/projects/PROJECT/databases/(default)/documents/orders\",\n \"method\": \"POST\",\n \"body\": {\n \"fields\": {\n \"customer\": {\"stringValue\": \"={{ $json.customer }}\"},\n \"total\": {\"doubleValue\": \"={{ $json.total }}\"},\n \"items\": {\"arrayValue\": {\"values\": \"={{ $json.items.map(i => ({stringValue: i})) }}\"}}\n }\n }\n }\n}\n```\n\n### Query Collection\n```json\n{\n \"parameters\": {\n \"url\": \"https://firestore.googleapis.com/v1/projects/PROJECT/databases/(default)/documents:runQuery\",\n \"method\": \"POST\",\n \"body\": {\n \"structuredQuery\": {\n \"from\": [{\"collectionId\": \"orders\"}],\n \"where\": {\n \"fieldFilter\": {\n \"field\": {\"fieldPath\": \"status\"},\n \"op\": \"EQUAL\",\n \"value\": {\"stringValue\": \"pending\"}\n }\n },\n \"orderBy\": [{\"field\": {\"fieldPath\": \"created_at\"}, \"direction\": \"DESCENDING\"}],\n \"limit\": 50\n }\n }\n }\n}\n```\n\n## Firebase Admin SDK (Code Node)\n```javascript\nconst admin = require('firebase-admin');\nadmin.initializeApp({ credential: admin.credential.cert(serviceAccount) });\nconst db = admin.firestore();\n\nconst snapshot = await db.collection('users').where('active', '==', true).get();\nreturn snapshot.docs.map(doc => ({ id: doc.id, ...doc.data() }));\n```", "metadata": {"topic": "firebase", "type": "integration"}}
{"instruction": "How do I trigger n8n workflows from Go microservices?", "output": "# Go Microservices + n8n Integration\n\n## Trigger n8n Webhook from Go\n```go\npackage main\n\nimport (\n \"bytes\"\n \"encoding/json\"\n \"net/http\"\n)\n\ntype WorkflowPayload struct {\n OrderID string `json:\"order_id\"`\n CustomerID string `json:\"customer_id\"`\n Amount float64 `json:\"amount\"`\n Action string `json:\"action\"`\n}\n\nfunc triggerN8NWorkflow(payload WorkflowPayload) error {\n jsonData, _ := json.Marshal(payload)\n \n resp, err := http.Post(\n \"https://n8n.example.com/webhook/order-created\",\n \"application/json\",\n bytes.NewBuffer(jsonData),\n )\n if err != nil {\n return err\n }\n defer resp.Body.Close()\n \n return nil\n}\n\n// Usage in your service\nfunc handleNewOrder(order Order) {\n // Process order...\n \n // Trigger n8n workflow\n triggerN8NWorkflow(WorkflowPayload{\n OrderID: order.ID,\n CustomerID: order.CustomerID,\n Amount: order.Total,\n Action: \"new_order\",\n })\n}\n```\n\n## n8n Calling Go Service\n```json\n{\n \"name\": \"Call Go Service\",\n \"type\": \"n8n-nodes-base.httpRequest\",\n \"parameters\": {\n \"url\": \"http://go-service:8080/api/process\",\n \"method\": \"POST\",\n \"headers\": {\"Authorization\": \"Bearer {{ $env.SERVICE_TOKEN }}\"},\n \"body\": \"={{ JSON.stringify($json) }}\"\n }\n}\n```\n\n## Go Service Endpoint\n```go\nfunc processHandler(w http.ResponseWriter, r *http.Request) {\n var data map[string]interface{}\n json.NewDecoder(r.Body).Decode(&data)\n \n result := processData(data)\n \n json.NewEncoder(w).Encode(result)\n}\n```\n\n## Async Pattern with Queue\n```go\n// Publish to queue, n8n consumes\nfunc publishToQueue(msg Message) {\n ch.Publish(\"n8n-tasks\", \"\", false, false, amqp.Publishing{\n ContentType: \"application/json\",\n Body: msgBytes,\n })\n}\n```", "metadata": {"topic": "go_microservices", "type": "integration"}}
{"instruction": "How do I implement parallel execution in n8n for better performance?", "output": "# Parallel Execution in n8n\n\n## Split in Batches with Parallel Processing\n```json\n{\n \"nodes\": [\n {\"name\": \"Get All Items\", \"type\": \"n8n-nodes-base.httpRequest\"},\n {\"name\": \"Split Batches\", \"type\": \"n8n-nodes-base.splitInBatches\",\n \"parameters\": {\"batchSize\": 10}},\n {\"name\": \"Process Item\", \"type\": \"n8n-nodes-base.httpRequest\",\n \"executeOnce\": false}\n ]\n}\n```\n\n## Parallel HTTP Requests\n```javascript\n// Code node - parallel fetch\nconst urls = $json.urls;\nconst results = await Promise.all(\n urls.map(url => \n fetch(url).then(r => r.json()).catch(e => ({ error: e.message }))\n )\n);\nreturn results.map(r => ({ json: r }));\n```\n\n## Merge Parallel Branches\n```json\n{\n \"nodes\": [\n {\"name\": \"Start\", \"type\": \"n8n-nodes-base.manualTrigger\"},\n {\"name\": \"Branch 1\", \"type\": \"n8n-nodes-base.httpRequest\", \"parameters\": {\"url\": \"https://api1.com\"}},\n {\"name\": \"Branch 2\", \"type\": \"n8n-nodes-base.httpRequest\", \"parameters\": {\"url\": \"https://api2.com\"}},\n {\"name\": \"Branch 3\", \"type\": \"n8n-nodes-base.httpRequest\", \"parameters\": {\"url\": \"https://api3.com\"}},\n {\"name\": \"Merge All\", \"type\": \"n8n-nodes-base.merge\", \"parameters\": {\"mode\": \"append\"}}\n ],\n \"connections\": {\n \"Start\": {\"main\": [[{\"node\": \"Branch 1\"}, {\"node\": \"Branch 2\"}, {\"node\": \"Branch 3\"}]]},\n \"Branch 1\": {\"main\": [[{\"node\": \"Merge All\"}]]},\n \"Branch 2\": {\"main\": [[{\"node\": \"Merge All\"}]]},\n \"Branch 3\": {\"main\": [[{\"node\": \"Merge All\"}]]}\n }\n}\n```\n\n## Worker Queue Pattern\n```\nMain Workflow \u2192 Push to Queue \u2192 Multiple Workers Process \u2192 Results to Queue \u2192 Aggregate Results\n```\n\n## Performance Tips\n1. Use `executeOnce: false` for parallel item processing\n2. Batch external API calls (respect rate limits)\n3. Use merge node to combine parallel branches\n4. Consider queue-based processing for heavy loads", "metadata": {"topic": "parallel_execution", "type": "optimization"}}
{"instruction": "How do I monitor n8n workflows with Grafana and Prometheus?", "output": "# n8n Monitoring with Grafana/Prometheus\n\n## n8n Metrics Endpoint\nEnable in n8n environment:\n```bash\nN8N_METRICS=true\nN8N_METRICS_PREFIX=n8n_\n```\n\n## Prometheus Scrape Config\n```yaml\nscrape_configs:\n - job_name: 'n8n'\n static_configs:\n - targets: ['n8n:5678']\n metrics_path: /metrics\n```\n\n## Key Metrics to Monitor\n\n### Workflow Execution\n```promql\n# Execution count by workflow\nsum(rate(n8n_workflow_executions_total[5m])) by (workflow_name)\n\n# Error rate\nsum(rate(n8n_workflow_executions_total{status=\"error\"}[5m])) / \nsum(rate(n8n_workflow_executions_total[5m]))\n\n# Execution duration\nhistogram_quantile(0.95, rate(n8n_workflow_execution_duration_seconds_bucket[5m]))\n```\n\n### System Health\n```promql\n# Active executions\nn8n_active_executions\n\n# Memory usage\nprocess_resident_memory_bytes{job=\"n8n\"}\n\n# CPU usage\nrate(process_cpu_seconds_total{job=\"n8n\"}[5m])\n```\n\n## Grafana Dashboard JSON\n```json\n{\n \"panels\": [\n {\n \"title\": \"Workflow Executions/min\",\n \"type\": \"graph\",\n \"targets\": [{\"expr\": \"sum(rate(n8n_workflow_executions_total[1m])) by (workflow_name)\"}]\n },\n {\n \"title\": \"Error Rate %\",\n \"type\": \"gauge\",\n \"targets\": [{\"expr\": \"sum(rate(n8n_workflow_executions_total{status='error'}[5m])) / sum(rate(n8n_workflow_executions_total[5m])) * 100\"}]\n }\n ]\n}\n```\n\n## Alerting Rules\n```yaml\ngroups:\n - name: n8n\n rules:\n - alert: HighErrorRate\n expr: sum(rate(n8n_workflow_executions_total{status=\"error\"}[5m])) / sum(rate(n8n_workflow_executions_total[5m])) > 0.1\n for: 5m\n labels: {severity: critical}\n annotations: {summary: \"n8n error rate above 10%\"}\n```", "metadata": {"topic": "monitoring", "type": "integration"}}
{"instruction": "How do I encrypt sensitive data in n8n workflows?", "output": "# Data Encryption in n8n Workflows\n\n## AES-256-GCM Encryption\n```javascript\n// Code node - Encrypt\nconst crypto = require('crypto');\n\nconst key = Buffer.from($env.ENCRYPTION_KEY, 'hex'); // 32 bytes\nconst iv = crypto.randomBytes(16);\n\nconst cipher = crypto.createCipheriv('aes-256-gcm', key, iv);\nlet encrypted = cipher.update($json.sensitiveData, 'utf8', 'hex');\nencrypted += cipher.final('hex');\nconst authTag = cipher.getAuthTag().toString('hex');\n\nreturn {\n encrypted: encrypted,\n iv: iv.toString('hex'),\n authTag: authTag\n};\n```\n\n## Decryption\n```javascript\n// Code node - Decrypt\nconst crypto = require('crypto');\n\nconst key = Buffer.from($env.ENCRYPTION_KEY, 'hex');\nconst iv = Buffer.from($json.iv, 'hex');\nconst authTag = Buffer.from($json.authTag, 'hex');\n\nconst decipher = crypto.createDecipheriv('aes-256-gcm', key, iv);\ndecipher.setAuthTag(authTag);\n\nlet decrypted = decipher.update($json.encrypted, 'hex', 'utf8');\ndecrypted += decipher.final('utf8');\n\nreturn { decrypted };\n```\n\n## Field-Level Encryption\n```javascript\n// Encrypt specific fields before storage\nconst fieldsToEncrypt = ['ssn', 'credit_card', 'password'];\nconst record = $json;\n\nfor (const field of fieldsToEncrypt) {\n if (record[field]) {\n record[`${field}_encrypted`] = encrypt(record[field]);\n delete record[field];\n }\n}\n\nreturn record;\n```\n\n## Key Rotation Pattern\n```javascript\n// Re-encrypt with new key\nconst oldKey = Buffer.from($env.OLD_KEY, 'hex');\nconst newKey = Buffer.from($env.NEW_KEY, 'hex');\n\nconst decrypted = decrypt($json.encrypted, oldKey);\nconst reEncrypted = encrypt(decrypted, newKey);\n\nreturn { encrypted: reEncrypted, keyVersion: 2 };\n```\n\n## Best Practices\n1. Never log encrypted data or keys\n2. Use environment variables for keys\n3. Rotate keys periodically\n4. Use authenticated encryption (GCM)\n5. Store IV with encrypted data", "metadata": {"topic": "encryption", "type": "security"}}
{"instruction": "How do I implement automated prompt engineering and validation in n8n?", "output": "# Automated Prompt Engineering in n8n\n\n## Prompt Template System\n```javascript\n// Code node - Build prompt from template\nconst templates = {\n summarize: \"Summarize the following text in {{length}} sentences:\\n\\n{{text}}\",\n translate: \"Translate the following from {{source_lang}} to {{target_lang}}:\\n\\n{{text}}\",\n classify: \"Classify the following text into one of these categories: {{categories}}\\n\\nText: {{text}}\",\n extract: \"Extract the following information from the text: {{fields}}\\n\\nText: {{text}}\"\n};\n\nconst template = templates[$json.templateType];\nlet prompt = template;\nfor (const [key, value] of Object.entries($json.variables)) {\n prompt = prompt.replace(`{{${key}}}`, value);\n}\nreturn { prompt };\n```\n\n## Prompt Validation\n```javascript\n// Validate prompt before sending to LLM\nconst prompt = $json.prompt;\nconst issues = [];\n\n// Check length\nif (prompt.length > 4000) issues.push('Prompt too long');\nif (prompt.length < 10) issues.push('Prompt too short');\n\n// Check for required elements\nif (!prompt.includes('{{')) issues.push('No variable placeholders found');\n\n// Check for injection attempts\nconst injection_patterns = ['ignore previous', 'disregard instructions'];\nif (injection_patterns.some(p => prompt.toLowerCase().includes(p))) {\n issues.push('Potential prompt injection detected');\n}\n\nreturn { valid: issues.length === 0, issues };\n```\n\n## Output Validation\n```javascript\n// Validate LLM response\nconst response = $json.aiResponse;\nconst expected = $json.expectedFormat;\n\nconst validation = {\n isJSON: false,\n hasRequiredFields: false,\n withinLength: false\n};\n\nif (expected.format === 'json') {\n try {\n const parsed = JSON.parse(response);\n validation.isJSON = true;\n validation.hasRequiredFields = expected.requiredFields.every(f => f in parsed);\n } catch (e) {\n validation.isJSON = false;\n }\n}\n\nvalidation.withinLength = response.length <= expected.maxLength;\n\nreturn {\n valid: Object.values(validation).every(v => v),\n validation,\n response: validation.isJSON ? JSON.parse(response) : response\n};\n```\n\n## A/B Testing Prompts\n```javascript\n// Select prompt variant\nconst variants = [\n { id: 'A', prompt: 'Concise version...', weight: 0.5 },\n { id: 'B', prompt: 'Detailed version...', weight: 0.5 }\n];\n\nconst rand = Math.random();\nlet cumulative = 0;\nlet selected;\nfor (const v of variants) {\n cumulative += v.weight;\n if (rand <= cumulative) { selected = v; break; }\n}\n\nreturn { promptVariant: selected.id, prompt: selected.prompt };\n```", "metadata": {"topic": "prompt_engineering", "type": "ai"}}
{"instruction": "How do I implement multi-agent orchestration with n8n?", "output": "# Multi-Agent Orchestration in n8n\n\n## Agent Architecture\n```\nOrchestrator Agent \u2192 Specialist Agents \u2192 Result Aggregation\n \u2193\n[Research Agent] [Writing Agent] [Review Agent] [Code Agent]\n```\n\n## Orchestrator Workflow\n```json\n{\n \"nodes\": [\n {\"name\": \"Task Input\", \"type\": \"n8n-nodes-base.webhook\"},\n {\"name\": \"Analyze Task\", \"type\": \"n8n-nodes-base.openAi\",\n \"parameters\": {\"prompt\": \"Analyze this task and determine which specialists are needed: {{$json.task}}\\n\\nReturn JSON: {agents: ['research', 'write', 'review']}\"}},\n {\"name\": \"Parse Agent List\", \"type\": \"n8n-nodes-base.code\"},\n {\"name\": \"Route to Agents\", \"type\": \"n8n-nodes-base.switch\"},\n {\"name\": \"Research Agent\", \"type\": \"n8n-nodes-base.executeWorkflow\", \"parameters\": {\"workflowId\": \"research_agent\"}},\n {\"name\": \"Writing Agent\", \"type\": \"n8n-nodes-base.executeWorkflow\", \"parameters\": {\"workflowId\": \"writing_agent\"}},\n {\"name\": \"Aggregate Results\", \"type\": \"n8n-nodes-base.merge\"}\n ]\n}\n```\n\n## Specialist Agent (Research)\n```javascript\n// Research agent prompt\nconst systemPrompt = `You are a research specialist. Your role:\n1. Search for relevant information\n2. Verify facts from multiple sources\n3. Summarize findings concisely\n4. Cite sources\n\nReturn JSON: {findings: [...], sources: [...], confidence: 0-1}`;\n```\n\n## Agent Communication Protocol\n```javascript\n// Message format between agents\nconst agentMessage = {\n from: 'research_agent',\n to: 'writing_agent',\n type: 'research_complete',\n payload: {\n findings: [...],\n context: {...},\n metadata: { timestamp, confidence }\n }\n};\n```\n\n## Feedback Loop\n```json\n{\n \"nodes\": [\n {\"name\": \"Generate Content\", \"type\": \"n8n-nodes-base.openAi\"},\n {\"name\": \"Review Agent\", \"type\": \"n8n-nodes-base.openAi\",\n \"parameters\": {\"prompt\": \"Review this content for accuracy and quality...\"}},\n {\"name\": \"Check Approval\", \"type\": \"n8n-nodes-base.if\",\n \"parameters\": {\"conditions\": {\"string\": [{\"value1\": \"={{ $json.approved }}\", \"value2\": \"true\"}]}}},\n {\"name\": \"Return to Generator\", \"type\": \"n8n-nodes-base.set\"}\n ]\n}\n```\n\n## Best Practices\n1. Define clear agent responsibilities\n2. Use structured message formats\n3. Implement timeout handling\n4. Log all agent interactions\n5. Set max iteration limits for loops", "metadata": {"topic": "multi_agent", "type": "ai"}}