puq-docker-minio-deploy

工作流概述

这是一个包含33个节点的复杂工作流,主要用于自动化处理各种任务。

工作流源代码

下载
{
  "id": "IJYpB2CIAdLk8Umg",
  "meta": {
    "instanceId": "ffb0782f8b2cf4278577cb919e0cd26141bc9ff8774294348146d454633aa4e3",
    "templateCredsSetupCompleted": true
  },
  "name": "puq-docker-minio-deploy",
  "tags": [],
  "nodes": [
    {
      "id": "d79fe295-a0b0-4871-8382-67d9af5d0d2c",
      "name": "If",
      "type": "n8n-nodes-base.if",
      "position": [
        -2060,
        -320
      ],
      "parameters": {
        "options": {},
        "conditions": {
          "options": {
            "version": 2,
            "leftValue": "",
            "caseSensitive": true,
            "typeValidation": "strict"
          },
          "combinator": "or",
          "conditions": [
            {
              "id": "b702e607-888a-42c9-b9a7-f9d2a64dfccd",
              "operator": {
                "type": "string",
                "operation": "equals"
              },
              "leftValue": "={{ $json.server_domain }}",
              "rightValue": "={{ $('API').item.json.body.server_domain }}"
            }
          ]
        }
      },
      "typeVersion": 2.2
    },
    {
      "id": "52c088af-95ae-411f-b1fa-f50b8ea99b58",
      "name": "Parametrs",
      "type": "n8n-nodes-base.set",
      "position": [
        -2280,
        -320
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "a6328600-7ee0-4031-9bdb-fcee99b79658",
              "name": "server_domain",
              "type": "string",
              "value": "d01-test.uuq.pl"
            },
            {
              "id": "370ddc4e-0fc0-48f6-9b30-ebdfba72c62f",
              "name": "clients_dir",
              "type": "string",
              "value": "/opt/docker/clients"
            },
            {
              "id": "92202bb8-6113-4bc5-9a29-79d238456df2",
              "name": "mount_dir",
              "type": "string",
              "value": "/mnt"
            },
            {
              "id": "baa52df2-9c10-42b2-939f-f05ea85ea2be",
              "name": "screen_left",
              "type": "string",
              "value": "{{"
            },
            {
              "id": "2b19ed99-2630-412a-98b6-4be44d35d2e7",
              "name": "screen_right",
              "type": "string",
              "value": "}}"
            }
          ]
        }
      },
      "typeVersion": 3.4
    },
    {
      "id": "9814333d-a9c1-4787-aed1-116db9395b88",
      "name": "API",
      "type": "n8n-nodes-base.webhook",
      "position": [
        -2600,
        -320
      ],
      "webhookId": "73068cf8-be17-4b10-b9a3-744f7e4843b0",
      "parameters": {
        "path": "docker-minio",
        "options": {},
        "httpMethod": [
          "POST"
        ],
        "responseMode": "responseNode",
        "authentication": "basicAuth",
        "multipleMethods": true
      },
      "credentials": {
        "httpBasicAuth": {
          "id": "J4uXcnEb1SIQ2VN7",
          "name": "MinIO"
        }
      },
      "typeVersion": 2
    },
    {
      "id": "a3e0156c-8033-4829-ab57-06e3708a7a09",
      "name": "422-Invalid server domain",
      "type": "n8n-nodes-base.respondToWebhook",
      "position": [
        -2100,
        0
      ],
      "parameters": {
        "options": {
          "responseCode": 422
        },
        "respondWith": "json",
        "responseBody": "[{
  \"status\": \"error\",
  \"error\": \"Invalid server domain\"
}]"
      },
      "typeVersion": 1.1,
      "alwaysOutputData": false
    },
    {
      "id": "a5f410f8-ca52-4e85-b76f-651756c80de5",
      "name": "Code1",
      "type": "n8n-nodes-base.code",
      "position": [
        800,
        -240
      ],
      "parameters": {
        "mode": "runOnceForEachItem",
        "jsCode": "try {
  if ($json.stdout === 'success') {
    return {
      json: {
        status: 'success',
        message: '',
        data: '',
      }
    };
  }

  const parsedData = JSON.parse($json.stdout);

  return {
    json: {
      status: parsedData.status === 'error' ? 'error' : 'success',
      message: parsedData.message || (parsedData.status === 'error' ? 'An error occurred' : ''),
      data: parsedData || '',
    }
  };

} catch (error) {
  return {
    json: {
      status: 'error',
      message: $json.stdout??$json.error,
      data: '',
    }
  };
}"
      },
      "executeOnce": false,
      "retryOnFail": false,
      "typeVersion": 2,
      "alwaysOutputData": false
    },
    {
      "id": "e162574f-c3ce-4fd0-8b31-d251ea360389",
      "name": "SSH",
      "type": "n8n-nodes-base.ssh",
      "onError": "continueErrorOutput",
      "position": [
        500,
        -240
      ],
      "parameters": {
        "cwd": "=/",
        "command": "={{ $json.sh }}"
      },
      "credentials": {
        "sshPassword": {
          "id": "Cyjy61UWHwD2Xcd8",
          "name": "d01-test.uuq.pl-puq"
        }
      },
      "executeOnce": true,
      "typeVersion": 1
    },
    {
      "id": "70f53357-5cdc-428c-876c-77036d6736cc",
      "name": "Container Actions",
      "type": "n8n-nodes-base.switch",
      "position": [
        -1680,
        160
      ],
      "parameters": {
        "rules": {
          "values": [
            {
              "outputKey": "start",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "66ad264d-5393-410c-bfa3-011ab8eb234a",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "container_start"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "stop",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "b48957a0-22c0-4ac0-82ef-abd9e7ab0207",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "container_stop"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "mount_disk",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "727971bf-4218-41c1-9b07-22df4b947852",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "container_mount_disk"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "unmount_disk",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "0c80b1d9-e7ca-4cf3-b3ac-b40fdf4dd8f8",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "container_unmount_disk"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "container_get_acl",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "755e1a9f-667a-4022-9cb5-3f8153f62e95",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "container_get_acl"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "container_set_acl",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "8d75626f-789e-42fc-be5e-3a4e93a9bbc6",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "container_set_acl"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "container_get_net",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "c49d811a-735c-42f4-8b77-d0cd47b3d2b8",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "container_get_net"
                  }
                ]
              },
              "renameOutput": true
            }
          ]
        },
        "options": {}
      },
      "typeVersion": 3.2
    },
    {
      "id": "901a657d-873c-4b92-9949-d03e73a5313c",
      "name": "Service Actions",
      "type": "n8n-nodes-base.switch",
      "position": [
        -900,
        -1300
      ],
      "parameters": {
        "rules": {
          "values": [
            {
              "outputKey": "test_connection",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "3afdd2f1-fe93-47c2-95cd-bac9b1d94eeb",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "test_connection"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "create",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "102f10e9-ec6c-4e63-ba95-0fe6c7dc0bd1",
                    "operator": {
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "create"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "suspend",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "f62dfa34-6751-4b34-adcc-3d6ba1b21a8c",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "suspend"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "unsuspend",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "384d2026-b753-4c27-94c2-8f4fc189eb5f",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "unsuspend"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "terminate",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "0e190a97-827a-4e87-8222-093ff7048b21",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "terminate"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "change_package",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "6f7832f3-b61d-4517-ab6b-6007998136dd",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "change_package"
                  }
                ]
              },
              "renameOutput": true
            }
          ]
        },
        "options": {}
      },
      "typeVersion": 3.2
    },
    {
      "id": "1c59a844-f4ef-422f-abbf-288a55e11934",
      "name": "API answer",
      "type": "n8n-nodes-base.respondToWebhook",
      "position": [
        820,
        0
      ],
      "parameters": {
        "options": {
          "responseCode": 200
        },
        "respondWith": "allIncomingItems"
      },
      "typeVersion": 1.1,
      "alwaysOutputData": true
    },
    {
      "id": "c2019d97-1012-4089-84c3-305308f8603f",
      "name": "Inspect",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -1160,
        -380
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/{{ $('API').item.json.body.domain }}\"
CONTAINER_NAME=\"{{ $('API').item.json.body.domain }}\"

INSPECT_JSON=\"{}\"
if sudo docker ps -a --filter \"name=$CONTAINER_NAME\" | grep -q \"$CONTAINER_NAME\"; then
  INSPECT_JSON=$(sudo docker inspect \"$CONTAINER_NAME\")
fi

echo \"{\\"inspect\\": $INSPECT_JSON}\"

exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "a274a2d1-2382-48a0-a94d-6ef89cd22a57",
      "name": "Stat",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -1060,
        -240
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/{{ $('API').item.json.body.domain }}\"
STATUS_FILE=\"$COMPOSE_DIR/status.json\"
IMG_FILE=\"$COMPOSE_DIR/data.img\"
MOUNT_DIR=\"{{ $('Parametrs').item.json.mount_dir }}/{{ $('API').item.json.body.domain }}\"
CONTAINER_NAME=\"{{ $('API').item.json.body.domain }}\"

# Initialize empty container data
INSPECT_JSON=\"{}\"
STATS_JSON=\"{}\"

# Check if container is running
if sudo docker ps -a --filter \"name=$CONTAINER_NAME\" | grep -q \"$CONTAINER_NAME\"; then
  # Get Docker inspect info in JSON (as raw string)
  INSPECT_JSON=$(sudo docker inspect \"$CONTAINER_NAME\")

  # Get Docker stats info in JSON (as raw string)
  STATS_JSON=$(sudo docker stats --no-stream --format \"{{ $('Parametrs').item.json.screen_left }}json .{{ $('Parametrs').item.json.screen_right }}\" \"$CONTAINER_NAME\")
  STATS_JSON=${STATS_JSON:-'{}'}
fi

# Initialize disk info variables
MOUNT_USED=\"N/A\"
MOUNT_FREE=\"N/A\"
MOUNT_TOTAL=\"N/A\"
MOUNT_PERCENT=\"N/A\"
IMG_SIZE=\"N/A\"
IMG_PERCENT=\"N/A\"
DISK_STATS_IMG=\"N/A\"

# Check if mount directory exists and is accessible
if [ -d \"$MOUNT_DIR\" ]; then
  if mount | grep -q \"$MOUNT_DIR\"; then
    # Get disk usage for mounted directory
    DISK_STATS_MOUNT=$(df -h \"$MOUNT_DIR\" | tail -n 1)
    MOUNT_USED=$(echo \"$DISK_STATS_MOUNT\" | awk '{print $3}')
    MOUNT_FREE=$(echo \"$DISK_STATS_MOUNT\" | awk '{print $4}')
    MOUNT_TOTAL=$(echo \"$DISK_STATS_MOUNT\" | awk '{print $2}')
    MOUNT_PERCENT=$(echo \"$DISK_STATS_MOUNT\" | awk '{print $5}')
  fi
fi

# Check if image file exists
if [ -f \"$IMG_FILE\" ]; then
  # Get disk usage for image file
  IMG_SIZE=$(du -sh \"$IMG_FILE\" | awk '{print $1}')
fi

# Manually create a combined JSON object
FINAL_JSON=\"{\\"inspect\\": $INSPECT_JSON, \\"stats\\": $STATS_JSON, \\"disk\\": {\\"mounted\\": {\\"used\\": \\"$MOUNT_USED\\", \\"free\\": \\"$MOUNT_FREE\\", \\"total\\": \\"$MOUNT_TOTAL\\", \\"percent\\": \\"$MOUNT_PERCENT\\"}, \\"img_file\\": {\\"size\\": \\"$IMG_SIZE\\"}}}\"

# Output the result
echo \"$FINAL_JSON\"

exit 0"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "3e80ebbe-bb8e-4fec-ab20-ba69271a48f8",
      "name": "Start",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -1180,
        140
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/{{ $('API').item.json.body.domain }}\"
IMG_FILE=\"$COMPOSE_DIR/data.img\"
MOUNT_DIR=\"{{ $('Parametrs').item.json.mount_dir }}/{{ $('API').item.json.body.domain }}\"

# Function to log an error, write to status file, and print to console
handle_error() {
    echo \"error: $1\"
    exit 1
}

if ! df -h | grep -q \"$MOUNT_DIR\"; then
    handle_error \"The file $IMG_FILE is not mounted to $MOUNT_DIR\"
fi

if sudo docker ps --filter \"name={{ $('API').item.json.body.domain }}\" --filter \"status=running\" -q | grep -q .; then
    handle_error \"{{ $('API').item.json.body.domain }} container is running\"
fi

# Change to the compose directory
cd \"$COMPOSE_DIR\" > /dev/null 2>&1 || handle_error \"Failed to change directory to $COMPOSE_DIR\"

# Start the Docker containers
if ! sudo docker-compose up -d > /dev/null 2>error.log; then
    ERROR_MSG=$(tail -n 10 error.log)
    handle_error \"Docker-compose failed: $ERROR_MSG\"
fi

# Success
echo \"success\"

exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "4e13ceea-a01f-438c-ba6f-27f55b88798b",
      "name": "Stop",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -1060,
        240
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/{{ $('API').item.json.body.domain }}\"
IMG_FILE=\"$COMPOSE_DIR/data.img\"
MOUNT_DIR=\"{{ $('Parametrs').item.json.mount_dir }}/{{ $('API').item.json.body.domain }}\"

# Function to log an error, write to status file, and print to console
handle_error() {
    echo \"error: $1\"
    exit 1
}

# Check if Docker container is running
if ! sudo docker ps --filter \"name={{ $('API').item.json.body.domain }}\" --filter \"status=running\" -q | grep -q .; then
    handle_error \"{{ $('API').item.json.body.domain }} container is not running\"
fi

# Stop and remove the Docker containers (also remove associated volumes)
if ! sudo docker-compose -f \"$COMPOSE_DIR/docker-compose.yml\" down > /dev/null 2>&1; then
    handle_error \"Failed to stop and remove docker-compose containers\"
fi

echo \"success\"

exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "afa7a4e2-85a6-420b-9e33-30802e9cbb7b",
      "name": "Test Connection1",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -220,
        -1320
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

# Function to log an error, print to console
handle_error() {
    echo \"error: $1\"
    exit 1
}

# Check if Docker is installed
if ! command -v docker &> /dev/null; then
    handle_error \"Docker is not installed\"
fi

# Check if Docker service is running
if ! systemctl is-active --quiet docker; then
    handle_error \"Docker service is not running\"
fi

# Check if nginx-proxy container is running
if ! sudo docker ps --filter \"name=nginx-proxy\" --filter \"status=running\" -q > /dev/null; then
    handle_error \"nginx-proxy container is not running\"
fi

# Check if letsencrypt-nginx-proxy-companion container is running
if ! sudo docker ps --filter \"name=letsencrypt-nginx-proxy-companion\" --filter \"status=running\" -q > /dev/null; then
    handle_error \"letsencrypt-nginx-proxy-companion container is not running\"
fi

# If everything is successful
echo \"success\"

exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "6c8261b4-f024-4b8e-a11c-1f2305e03e1d",
      "name": "Deploy",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -220,
        -1120
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

# Get values for variables from templates
DOMAIN=\"{{ $('API').item.json.body.domain }}\"
COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/$DOMAIN\"
COMPOSE_FILE=\"$COMPOSE_DIR/docker-compose.yml\"
STATUS_FILE=\"$COMPOSE_DIR/status\"
IMG_FILE=\"$COMPOSE_DIR/data.img\"
NGINX_DIR=\"$COMPOSE_DIR/nginx\"
VHOST_DIR=\"/opt/docker/nginx-proxy/nginx/vhost.d\"
MOUNT_DIR=\"{{ $('Parametrs').item.json.mount_dir }}/$DOMAIN\"
DOCKER_COMPOSE_TEXT='{{ $('Deploy-docker-compose').item.json[\"docker-compose\"] }}'

NGINX_MAIN_ACL_FILE=\"$NGINX_DIR/$DOMAIN\"_acl

NGINX_MAIN_TEXT='{{ $('nginx').item.json['main'] }}'
NGINX_MAIN_FILE=\"$NGINX_DIR/$DOMAIN\"
VHOST_MAIN_FILE=\"$VHOST_DIR/$DOMAIN\"

NGINX_MAIN_LOCATION_TEXT='{{ $('nginx').item.json['main_location'] }}'
NGINX_MAIN_LOCATION_FILE=\"$NGINX_DIR/$DOMAIN\"_location
VHOST_MAIN_LOCATION_FILE=\"$VHOST_DIR/$DOMAIN\"_location


NGINX_CONSOLE_ACL_FILE=\"$NGINX_DIR/console.$DOMAIN\"_acl

NGINX_CONSOLE_TEXT='{{ $('nginx').item.json['console'] }}'
NGINX_CONSOLE_FILE=\"$NGINX_DIR/console.$DOMAIN\"
VHOST_CONSOLE_FILE=\"$VHOST_DIR/console.$DOMAIN\"

NGINX_CONSOLE_LOCATION_TEXT='{{ $('nginx').item.json['console_location'] }}'
NGINX_CONSOLE_LOCATION_FILE=\"$NGINX_DIR/console.$DOMAIN\"_location
VHOST_CONSOLE_LOCATION_FILE=\"$VHOST_DIR/console.$DOMAIN\"_location


DISK_SIZE=\"{{ $('API').item.json.body.disk }}\"

# Function to handle errors: write to the status file and print the message to console
handle_error() {
    STATUS_JSON=\"{\\"status\\": \\"error\\", \\"message\\": \\"$1\\"}\"
    echo \"$STATUS_JSON\" | sudo tee \"$STATUS_FILE\" > /dev/null  # Write error to the status file
    echo \"error: $1\"  # Print the error message to the console
    exit 1  # Exit the script with an error code
}

# Check if the directory already exists. If yes, exit with an error.
if [ -d \"$COMPOSE_DIR\" ]; then
    echo \"error: Directory $COMPOSE_DIR already exists\"
    exit 1
fi

# Create necessary directories with permissions
sudo mkdir -p \"$COMPOSE_DIR\" > /dev/null 2>&1 || handle_error \"Failed to create $COMPOSE_DIR\"
sudo mkdir -p \"$NGINX_DIR\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_DIR\"
sudo mkdir -p \"$MOUNT_DIR\" > /dev/null 2>&1 || handle_error \"Failed to create $MOUNT_DIR\"

# Set permissions on the created directories
sudo chmod -R 777 \"$COMPOSE_DIR\" > /dev/null 2>&1 || handle_error \"Failed to set permissions on $COMPOSE_DIR\"
sudo chmod -R 777 \"$NGINX_DIR\" > /dev/null 2>&1 || handle_error \"Failed to set permissions on $NGINX_DIR\"
sudo chmod -R 777 \"$MOUNT_DIR\" > /dev/null 2>&1 || handle_error \"Failed to set permissions on $MOUNT_DIR\"

# Create docker-compose.yml file
echo \"$DOCKER_COMPOSE_TEXT\" | sudo tee \"$COMPOSE_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $COMPOSE_FILE\"

# Create NGINX configuration files
echo \"\" | sudo tee \"$NGINX_MAIN_ACL_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_MAIN_ACL_FILE\"
echo \"\" | sudo tee \"$NGINX_CONSOLE_ACL_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_CONSOLE_ACL_FILE\"

echo \"$NGINX_MAIN_TEXT\" | sudo tee \"$NGINX_MAIN_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_MAIN_FILE\"
echo \"$NGINX_MAIN_LOCATION_TEXT\" | sudo tee \"$NGINX_MAIN_LOCATION_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_MAIN_LOCATION_FILE\"

echo \"$NGINX_CONSOLE_TEXT\" | sudo tee \"$NGINX_CONSOLE_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_CONSOLE_FILE\"
echo \"$NGINX_CONSOLE_LOCATION_TEXT\" | sudo tee \"$NGINX_CONSOLE_LOCATION_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_CONSOLE_LOCATION_FILE\"

# Change to the compose directory
cd \"$COMPOSE_DIR\" > /dev/null 2>&1 || handle_error \"Failed to change directory to $COMPOSE_DIR\"

# Create data.img file if it doesn't exist
if [ ! -f \"$IMG_FILE\" ]; then
    sudo fallocate -l \"$DISK_SIZE\"G \"$IMG_FILE\" > /dev/null 2>&1 || sudo truncate -s \"$DISK_SIZE\"G \"$IMG_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $IMG_FILE\"
    sudo mkfs.ext4 \"$IMG_FILE\" > /dev/null 2>&1 || handle_error \"Failed to format $IMG_FILE\"  # Format the image as ext4
    sync  # Synchronize the data to disk
fi

# Add an entry to /etc/fstab for mounting if not already present
if ! grep -q \"$IMG_FILE\" /etc/fstab; then
    echo \"$IMG_FILE $MOUNT_DIR ext4 loop 0 0\" | sudo tee -a /etc/fstab > /dev/null || handle_error \"Failed to add entry to /etc/fstab\"
fi

# Mount all entries in /etc/fstab
sudo mount -a || handle_error \"Failed to mount entries from /etc/fstab\"

# Set permissions on the mount directory
sudo chmod -R 777 \"$MOUNT_DIR\" > /dev/null 2>&1 || handle_error \"Failed to set permissions on $MOUNT_DIR\"

# Copy NGINX configuration files instead of creating symbolic links
sudo cp -f \"$NGINX_MAIN_FILE\" \"$VHOST_MAIN_FILE\" || handle_error \"Failed to copy $NGINX_MAIN_FILE to $VHOST_MAIN_FILE\"
sudo chmod 777 \"$VHOST_MAIN_FILE\" || handle_error \"Failed to set permissions on $VHOST_MAIN_FILE\"

sudo cp -f \"$NGINX_MAIN_LOCATION_FILE\" \"$VHOST_MAIN_LOCATION_FILE\" || handle_error \"Failed to copy $NGINX_MAIN_LOCATION_FILE to $VHOST_MAIN_LOCATION_FILE\"
sudo chmod 777 \"$VHOST_MAIN_LOCATION_FILE\" || handle_error \"Failed to set permissions on $VHOST_MAIN_LOCATION_FILE\"

sudo cp -f \"$NGINX_CONSOLE_FILE\" \"$VHOST_CONSOLE_FILE\" || handle_error \"Failed to copy $NGINX_CONSOLE_FILE to $VHOST_CONSOLE_FILE\"
sudo chmod 777 \"$VHOST_CONSOLE_FILE\" || handle_error \"Failed to set permissions on $VHOST_CONSOLE_FILE\"

sudo cp -f \"$NGINX_CONSOLE_LOCATION_FILE\" \"$VHOST_CONSOLE_LOCATION_FILE\" || handle_error \"Failed to copy $NGINX_CONSOLE_LOCATION_FILE to $VHOST_CONSOLE_LOCATION_FILE\"
sudo chmod 777 \"$VHOST_CONSOLE_LOCATION_FILE\" || handle_error \"Failed to set permissions on $VHOST_CONSOLE_LOCATION_FILE\"

# Start Docker containers using docker-compose
if ! sudo docker-compose up -d > /dev/null 2>error.log; then
    ERROR_MSG=$(tail -n 10 error.log)  # Read the last 10 lines from error.log
    handle_error \"Docker-compose failed: $ERROR_MSG\"
fi

# If everything is successful, update the status file and print success message
echo \"active\" | sudo tee \"$STATUS_FILE\" > /dev/null
echo \"success\"

exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "d2f48f02-1a75-445e-832b-f9bf1a4d4b71",
      "name": "Suspend",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -220,
        -960
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

DOMAIN=\"{{ $('API').item.json.body.domain }}\"
COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/$DOMAIN\"
COMPOSE_FILE=\"$COMPOSE_DIR/docker-compose.yml\"
STATUS_FILE=\"$COMPOSE_DIR/status\"
IMG_FILE=\"$COMPOSE_DIR/data.img\"
NGINX_DIR=\"$COMPOSE_DIR/nginx\"
VHOST_DIR=\"/opt/docker/nginx-proxy/nginx/vhost.d\"
MOUNT_DIR=\"{{ $('Parametrs').item.json.mount_dir }}/$DOMAIN\"

VHOST_MAIN_FILE=\"$VHOST_DIR/$DOMAIN\"
VHOST_MAIN_LOCATION_FILE=\"$VHOST_DIR/$DOMAIN\"_location
VHOST_CONSOLE_FILE=\"$VHOST_DIR/console.$DOMAIN\"
VHOST_CONSOLE_LOCATION_FILE=\"$VHOST_DIR/console.$DOMAIN\"_location

# Function to log an error, write to status file, and print to console
handle_error() {
    STATUS_JSON=\"{\\"status\\": \\"error\\", \\"message\\": \\"$1\\"}\"
    echo \"$STATUS_JSON\" | sudo tee \"$STATUS_FILE\" > /dev/null
    echo \"error: $1\"
    exit 1
}

# Stop and remove Docker containers (also remove associated volumes)
if [ -f \"$COMPOSE_FILE\" ]; then
    if ! sudo docker-compose -f \"$COMPOSE_FILE\" down > /dev/null 2>&1; then
        handle_error \"Failed to stop and remove docker-compose containers\"
    fi
else
    echo \"Warning: docker-compose.yml not found, skipping container stop.\"
fi

# Remove mount entry from /etc/fstab if it exists
if grep -q \"$IMG_FILE\" /etc/fstab; then
    sudo sed -i \"\|$(printf '%s\n' \"$IMG_FILE\" | sed 's/[.[\*^$]/\\&/g')|d\" /etc/fstab
fi

# Unmount the image if it is mounted
if mount | grep -q \"$MOUNT_DIR\"; then
    sudo umount \"$MOUNT_DIR\" > /dev/null 2>&1 || handle_error \"Failed to unmount $MOUNT_DIR\"
fi

# Remove the mount directory
if [ -d \"$MOUNT_DIR\" ]; then
    sudo rm -rf \"$MOUNT_DIR\" > /dev/null 2>&1 || handle_error \"Failed to remove $MOUNT_DIR\"
fi

# Remove NGINX configuration files
[ -f \"$VHOST_MAIN_FILE\" ] && sudo rm -f \"$VHOST_MAIN_FILE\" || handle_error \"Warning: $VHOST_MAIN_FILE not found.\"
[ -f \"$VHOST_MAIN_LOCATION_FILE\" ] && sudo rm -f \"$VHOST_MAIN_LOCATION_FILE\" || handle_error \"Warning: $VHOST_MAIN_LOCATION_FILE not found.\"
[ -f \"$VHOST_CONSOLE_FILE\" ] && sudo rm -f \"$VHOST_CONSOLE_FILE\" || handle_error \"Warning: $VHOST_CONSOLE_FILE not found.\"
[ -f \"$VHOST_CONSOLE_LOCATION_FILE\" ] && sudo rm -f \"$VHOST_CONSOLE_LOCATION_FILE\" || handle_error \"Warning: $VHOST_CONSOLE_LOCATION_FILE not found.\"

# Update status
echo \"suspended\" | sudo tee \"$STATUS_FILE\" > /dev/null

# Success
echo \"success\"
exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "87b7f7c2-7f7e-49e5-846c-3f92d436b5b6",
      "name": "Terminated",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -220,
        -620
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

DOMAIN=\"{{ $('API').item.json.body.domain }}\"
COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/$DOMAIN\"
COMPOSE_FILE=\"$COMPOSE_DIR/docker-compose.yml\"
STATUS_FILE=\"$COMPOSE_DIR/status\"
IMG_FILE=\"$COMPOSE_DIR/data.img\"
NGINX_DIR=\"$COMPOSE_DIR/nginx\"
VHOST_DIR=\"/opt/docker/nginx-proxy/nginx/vhost.d\"

VHOST_MAIN_FILE=\"$VHOST_DIR/$DOMAIN\"
VHOST_MAIN_LOCATION_FILE=\"$VHOST_DIR/$DOMAIN\"_location
VHOST_CONSOLE_FILE=\"$VHOST_DIR/console.$DOMAIN\"
VHOST_CONSOLE_LOCATION_FILE=\"$VHOST_DIR/console.$DOMAIN\"_location
MOUNT_DIR=\"{{ $('Parametrs').item.json.mount_dir }}/$DOMAIN\"

# Function to log an error, write to status file, and print to console
handle_error() {
    STATUS_JSON=\"{\\"status\\": \\"error\\", \\"message\\": \\"$1\\"}\"
    echo \"error: $1\"
    exit 1
}

# Stop and remove the Docker containers
if [ -f \"$COMPOSE_FILE\" ]; then
    sudo docker-compose -f \"$COMPOSE_FILE\" down > /dev/null 2>&1
fi

# Remove the mount entry from /etc/fstab if it exists
if grep -q \"$IMG_FILE\" /etc/fstab; then
    sudo sed -i \"\|$(printf '%s\n' \"$IMG_FILE\" | sed 's/[.[\*^$]/\\&/g')|d\" /etc/fstab
fi

# Unmount the image if it is still mounted
if mount | grep -q \"$MOUNT_DIR\"; then
    sudo umount \"$MOUNT_DIR\" > /dev/null 2>&1 || handle_error \"Failed to unmount $MOUNT_DIR\"
fi

# Remove all related directories and files
for item in \"$COMPOSE_DIR\" \"$VHOST_MAIN_FILE\" \"$VHOST_MAIN_LOCATION_FILE\" \"$VHOST_CONSOLE_FILE\" \"$VHOST_CONSOLE_LOCATION_FILE\"; do
    if [ -e \"$item\" ]; then
        sudo rm -rf \"$item\" || handle_error \"Failed to remove $item\"
    fi
done

echo \"success\"
exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "610dc730-9a2f-4fbf-bbbe-ce31d1494422",
      "name": "Unsuspend",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -220,
        -800
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

DOMAIN=\"{{ $('API').item.json.body.domain }}\"
COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/$DOMAIN\"
COMPOSE_FILE=\"$COMPOSE_DIR/docker-compose.yml\"
STATUS_FILE=\"$COMPOSE_DIR/status\"
IMG_FILE=\"$COMPOSE_DIR/data.img\"
NGINX_DIR=\"$COMPOSE_DIR/nginx\"
VHOST_DIR=\"/opt/docker/nginx-proxy/nginx/vhost.d\"
MOUNT_DIR=\"{{ $('Parametrs').item.json.mount_dir }}/$DOMAIN\"
DOCKER_COMPOSE_TEXT='{{ $('Deploy-docker-compose').item.json[\"docker-compose\"] }}'

NGINX_MAIN_ACL_FILE=\"$NGINX_DIR/$DOMAIN\"_acl

NGINX_MAIN_TEXT='{{ $('nginx').item.json['main'] }}'
NGINX_MAIN_FILE=\"$NGINX_DIR/$DOMAIN\"
VHOST_MAIN_FILE=\"$VHOST_DIR/$DOMAIN\"

NGINX_MAIN_LOCATION_TEXT='{{ $('nginx').item.json['main_location'] }}'
NGINX_MAIN_LOCATION_FILE=\"$NGINX_DIR/$DOMAIN\"_location
VHOST_MAIN_LOCATION_FILE=\"$VHOST_DIR/$DOMAIN\"_location

NGINX_CONSOLE_ACL_FILE=\"$NGINX_DIR/console.$DOMAIN\"_acl

NGINX_CONSOLE_TEXT='{{ $('nginx').item.json['console'] }}'
NGINX_CONSOLE_FILE=\"$NGINX_DIR/console.$DOMAIN\"
VHOST_CONSOLE_FILE=\"$VHOST_DIR/console.$DOMAIN\"

NGINX_CONSOLE_LOCATION_TEXT='{{ $('nginx').item.json['console_location'] }}'
NGINX_CONSOLE_LOCATION_FILE=\"$NGINX_DIR/console.$DOMAIN\"_location
VHOST_CONSOLE_LOCATION_FILE=\"$VHOST_DIR/console.$DOMAIN\"_location

DISK_SIZE=\"{{ $('API').item.json.body.disk }}\"

# Function to log an error, write to status file, and print to console
handle_error() {
    STATUS_JSON=\"{\\"status\\": \\"error\\", \\"message\\": \\"$1\\"}\"
    echo \"$STATUS_JSON\" | sudo tee \"$STATUS_FILE\" > /dev/null
    echo \"error: $1\"
    exit 1
}

update_nginx_acl() {
    ACL_FILE=$1
    LOCATION_FILE=$2
    
    if [ -s \"$ACL_FILE\" ]; then  # Проверяем, что файл существует и не пустой
        VALID_LINES=$(grep -vE '^\s*$' \"$ACL_FILE\")  # Убираем пустые строки
        if [ -n \"$VALID_LINES\" ]; then  # Если есть непустые строки
            while IFS= read -r line; do
                echo \"allow $line;\" | sudo tee -a \"$LOCATION_FILE\" > /dev/null || handle_error \"Failed to update $LOCATION_FILE\"
            done <<< \"$VALID_LINES\"
            echo \"deny all;\" | sudo tee -a \"$LOCATION_FILE\" > /dev/null || handle_error \"Failed to update $LOCATION_FILE\"
        fi
    fi
}

# Create necessary directories with permissions
for dir in \"$COMPOSE_DIR\" \"$NGINX_DIR\" \"$MOUNT_DIR\"; do
    sudo mkdir -p \"$dir\" || handle_error \"Failed to create $dir\"
    sudo chmod -R 777 \"$dir\" || handle_error \"Failed to set permissions on $dir\"
done

# Check if the image is already mounted using fstab
if ! grep -q \"$IMG_FILE\" /etc/fstab; then
    echo \"$IMG_FILE $MOUNT_DIR ext4 loop 0 0\" | sudo tee -a /etc/fstab > /dev/null || handle_error \"Failed to add fstab entry for $IMG_FILE\"
fi

# Apply the fstab changes and mount the image
if ! mount | grep -q \"$MOUNT_DIR\"; then
    sudo mount -a || handle_error \"Failed to mount image using fstab\"
fi

# Create docker-compose.yml file
echo \"$DOCKER_COMPOSE_TEXT\" | sudo tee \"$COMPOSE_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $COMPOSE_FILE\"

# Create NGINX configuration files
echo \"$NGINX_MAIN_TEXT\" | sudo tee \"$NGINX_MAIN_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_MAIN_FILE\"
echo \"$NGINX_MAIN_LOCATION_TEXT\" | sudo tee \"$NGINX_MAIN_LOCATION_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_MAIN_FILE\"

echo \"$NGINX_CONSOLE_TEXT\" | sudo tee \"$NGINX_CONSOLE_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_CONSOLE_FILE\"
echo \"$NGINX_CONSOLE_LOCATION_TEXT\" | sudo tee \"$NGINX_CONSOLE_LOCATION_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_CONSOLE_LOCATION_FILE\"

# Copy NGINX configuration files instead of creating symbolic links
sudo cp -f \"$NGINX_MAIN_FILE\" \"$VHOST_MAIN_FILE\" || handle_error \"Failed to copy $NGINX_MAIN_FILE to $VHOST_MAIN_FILE\"
sudo chmod 777 \"$VHOST_MAIN_FILE\" || handle_error \"Failed to set permissions on $VHOST_MAIN_FILE\"

sudo cp -f \"$NGINX_MAIN_LOCATION_FILE\" \"$VHOST_MAIN_LOCATION_FILE\" || handle_error \"Failed to copy $NGINX_MAIN_LOCATION_FILE to $VHOST_MAIN_LOCATION_FILE\"
sudo chmod 777 \"$VHOST_MAIN_LOCATION_FILE\" || handle_error \"Failed to set permissions on $VHOST_MAIN_LOCATION_FILE\"

sudo cp -f \"$NGINX_CONSOLE_FILE\" \"$VHOST_CONSOLE_FILE\" || handle_error \"Failed to copy $NGINX_CONSOLE_FILE to $VHOST_CONSOLE_FILE\"
sudo chmod 777 \"$VHOST_CONSOLE_FILE\" || handle_error \"Failed to set permissions on $VHOST_CONSOLE_FILE\"

sudo cp -f \"$NGINX_CONSOLE_LOCATION_FILE\" \"$VHOST_CONSOLE_LOCATION_FILE\" || handle_error \"Failed to copy $NGINX_CONSOLE_LOCATION_FILE to $VHOST_CONSOLE_LOCATION_FILE\"
sudo chmod 777 \"$VHOST_CONSOLE_LOCATION_FILE\" || handle_error \"Failed to set permissions on $VHOST_CONSOLE_LOCATION_FILE\"

update_nginx_acl \"$NGINX_MAIN_ACL_FILE\" \"$VHOST_MAIN_LOCATION_FILE\"
update_nginx_acl \"$NGINX_CONSOLE_ACL_FILE\" \"$VHOST_CONSOLE_LOCATION_FILE\"

# Change to the compose directory
cd \"$COMPOSE_DIR\" || handle_error \"Failed to change directory to $COMPOSE_DIR\"

# Start Docker containers using docker-compose
> error.log
if ! sudo docker-compose up -d > error.log 2>&1; then
    ERROR_MSG=$(tail -n 10 error.log)  # Read the last 10 lines from error.log
    handle_error \"Docker-compose failed: $ERROR_MSG\"
fi

# If everything is successful, update the status file and print success message
echo \"active\" | sudo tee \"$STATUS_FILE\" > /dev/null
echo \"success\"
exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "8d6893c3-9597-43fe-bbec-ba3c55d2c220",
      "name": "Mount Disk",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -1180,
        360
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/{{ $('API').item.json.body.domain }}\"
IMG_FILE=\"$COMPOSE_DIR/data.img\"
MOUNT_DIR=\"{{ $('Parametrs').item.json.mount_dir }}/{{ $('API').item.json.body.domain }}\"

# Function to log an error, write to status file, and print to console
handle_error() {
    echo \"error: $1\"
    exit 1
}

# Create necessary directories with permissions
sudo mkdir -p \"$MOUNT_DIR\" > /dev/null 2>&1 || handle_error \"Failed to create $MOUNT_DIR\"
sudo chmod 777 \"$MOUNT_DIR\" > /dev/null 2>&1 || handle_error \"Failed to set permissions on $MOUNT_DIR\"

if df -h | grep -q \"$MOUNT_DIR\"; then
    handle_error \"The file $IMG_FILE is mounted to $MOUNT_DIR\"
fi

if ! grep -q \"$IMG_FILE\" /etc/fstab; then
    echo \"$IMG_FILE $MOUNT_DIR ext4 loop 0 0\" | sudo tee -a /etc/fstab > /dev/null || handle_error \"Failed to add entry to /etc/fstab\"
fi

sudo mount -a || handle_error \"Failed to mount entries from /etc/fstab\"

echo \"success\"

exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "1b2182c6-7080-4b09-9699-2ba7c3292913",
      "name": "Unmount Disk",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -1060,
        460
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/{{ $('API').item.json.body.domain }}\"
IMG_FILE=\"$COMPOSE_DIR/data.img\"
MOUNT_DIR=\"{{ $('Parametrs').item.json.mount_dir }}/{{ $('API').item.json.body.domain }}\"

# Function to log an error, write to status file, and print to console
handle_error() {
    echo \"error: $1\"
    exit 1
}

if ! df -h | grep -q \"$MOUNT_DIR\"; then
    handle_error \"The file $IMG_FILE is not mounted to $MOUNT_DIR\"
fi

# Remove the mount entry from /etc/fstab if it exists
if grep -q \"$IMG_FILE\" /etc/fstab; then
    sudo sed -i \"\|$(printf '%s\n' \"$IMG_FILE\" | sed 's/[.[\*^$]/\\&/g')|d\" /etc/fstab
fi

# Unmount the image if it is mounted (using fstab)
if mount | grep -q \"$MOUNT_DIR\"; then
    sudo umount \"$MOUNT_DIR\" > /dev/null 2>&1 || handle_error \"Failed to unmount $MOUNT_DIR\"
fi

# Remove the mount directory (if needed)
if ! sudo rm -rf \"$MOUNT_DIR\" > /dev/null 2>&1; then
    handle_error \"Failed to remove $MOUNT_DIR\"
fi

echo \"success\"

exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "dd0cd3d9-876e-485c-94ed-f69e6f26c62b",
      "name": "Log",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -1180,
        -100
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

CONTAINER_NAME=\"{{ $('API').item.json.body.domain }}\"
LOGS_JSON=\"{}\"

# Function to return error in JSON format
handle_error() {
    echo \"{\\"status\\": \\"error\\", \\"message\\": \\"$1\\"}\"
    exit 1
}

# Check if the container exists
if ! sudo docker ps -a | grep -q \"$CONTAINER_NAME\" > /dev/null 2>&1; then
    handle_error \"Container $CONTAINER_NAME not found\"
fi

# Get logs of the container
LOGS=$(sudo docker logs --tail 1000 \"$CONTAINER_NAME\" 2>&1)
if [ $? -ne 0 ]; then
    handle_error \"Failed to retrieve logs for $CONTAINER_NAME\"
fi

# Escape double quotes in logs for valid JSON
LOGS_ESCAPED=$(echo \"$LOGS\" | sed 's/\"/\\\"/g' | sed ':a;N;$!ba;s/\n/\\n/g')

# Format logs as JSON
LOGS_JSON=\"{\\"logs\\": \\"$LOGS_ESCAPED\\"}\"

echo \"$LOGS_JSON\"
exit 0"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "64e41e91-62b3-4346-874b-e952201fecb5",
      "name": "ChangePackage",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -220,
        -440
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

# Get values for variables from templates
DOMAIN=\"{{ $('API').item.json.body.domain }}\"
COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/$DOMAIN\"
COMPOSE_FILE=\"$COMPOSE_DIR/docker-compose.yml\"
STATUS_FILE=\"$COMPOSE_DIR/status\"
IMG_FILE=\"$COMPOSE_DIR/data.img\"
NGINX_DIR=\"$COMPOSE_DIR/nginx\"
VHOST_DIR=\"/opt/docker/nginx-proxy/nginx/vhost.d\"
MOUNT_DIR=\"{{ $('Parametrs').item.json.mount_dir }}/$DOMAIN\"
DOCKER_COMPOSE_TEXT='{{ $('Deploy-docker-compose').item.json[\"docker-compose\"] }}'

NGINX_MAIN_TEXT='{{ $('nginx').item.json['main'] }}'
NGINX_MAIN_FILE=\"$NGINX_DIR/$DOMAIN\"
VHOST_MAIN_FILE=\"$VHOST_DIR/$DOMAIN\"

NGINX_MAIN_LOCATION_TEXT='{{ $('nginx').item.json['main_location'] }}'
NGINX_MAIN_LOCATION_FILE=\"$NGINX_DIR/$DOMAIN\"_location
VHOST_MAIN_LOCATION_FILE=\"$VHOST_DIR/$DOMAIN\"_location

NGINX_CONSOLE_TEXT='{{ $('nginx').item.json['console'] }}'
NGINX_CONSOLE_FILE=\"$NGINX_DIR/console.$DOMAIN\"
VHOST_CONSOLE_FILE=\"$VHOST_DIR/console.$DOMAIN\"

NGINX_CONSOLE_LOCATION_TEXT='{{ $('nginx').item.json['console_location'] }}'
NGINX_CONSOLE_LOCATION_FILE=\"$NGINX_DIR/console.$DOMAIN\"_location
VHOST_CONSOLE_LOCATION_FILE=\"$VHOST_DIR/console.$DOMAIN\"_location

DISK_SIZE=\"{{ $('API').item.json.body.disk }}\"

# Function to log an error, write to status file, and print to console
handle_error() {
    STATUS_JSON=\"{\\"status\\": \\"error\\", \\"message\\": \\"$1\\"}\"
    echo \"$STATUS_JSON\" | sudo tee \"$STATUS_FILE\" > /dev/null
    echo \"error: $1\"
    exit 1
}

# Check if the compose file exists before stopping the container
if [ -f \"$COMPOSE_FILE\" ]; then
    sudo docker-compose -f \"$COMPOSE_FILE\" down > /dev/null 2>&1 || handle_error \"Failed to stop containers\"
else
    handle_error \"docker-compose.yml not found\"
fi

# Unmount the image if it is currently mounted
if mount | grep -q \"$MOUNT_DIR\"; then
    sudo umount \"$MOUNT_DIR\" > /dev/null 2>&1 || handle_error \"Failed to unmount $MOUNT_DIR\"
fi

# Create docker-compose.yml file
echo \"$DOCKER_COMPOSE_TEXT\" | sudo tee \"$COMPOSE_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $COMPOSE_FILE\"

# Create NGINX configuration files
echo \"$NGINX_MAIN_TEXT\" | sudo tee \"$NGINX_MAIN_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_MAIN_FILE\"
echo \"$NGINX_MAIN_LOCATION_TEXT\" | sudo tee \"$NGINX_MAIN_LOCATION_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_MAIN_LOCATION_FILE\"

echo \"$NGINX_CONSOLE_TEXT\" | sudo tee \"$NGINX_CONSOLE_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_CONSOLE_FILE\"
echo \"$NGINX_CONSOLE_LOCATION_TEXT\" | sudo tee \"$NGINX_CONSOLE_LOCATION_FILE\" > /dev/null 2>&1 || handle_error \"Failed to create $NGINX_CONSOLE_LOCATION_FILE\"

# Resize the disk image if it exists
if [ -f \"$IMG_FILE\" ]; then
    sudo truncate -s \"$DISK_SIZE\"G \"$IMG_FILE\" > /dev/null 2>&1 || handle_error \"Failed to resize $IMG_FILE (truncate)\"
    sudo e2fsck -fy \"$IMG_FILE\" > /dev/null 2>&1 || handle_error \"Filesystem check failed on $IMG_FILE\"
    sudo resize2fs \"$IMG_FILE\" > /dev/null 2>&1 || handle_error \"Failed to resize filesystem on $IMG_FILE\"
else
    handle_error \"Disk image $IMG_FILE does not exist\"
fi

# Mount the disk only if it is not already mounted
if ! mount | grep -q \"$MOUNT_DIR\"; then
    sudo mount -a || handle_error \"Failed to mount entries from /etc/fstab\"
fi

# Change to the compose directory
cd \"$COMPOSE_DIR\" > /dev/null 2>&1 || handle_error \"Failed to change directory to $COMPOSE_DIR\"

# Copy NGINX configuration files instead of creating symbolic links
sudo cp -f \"$NGINX_MAIN_FILE\" \"$VHOST_MAIN_FILE\" || handle_error \"Failed to copy $NGINX_MAIN_FILE to $VHOST_MAIN_FILE\"
sudo chmod 777 \"$VHOST_MAIN_FILE\" || handle_error \"Failed to set permissions on $VHOST_MAIN_FILE\"

sudo cp -f \"$NGINX_MAIN_LOCATION_FILE\" \"$VHOST_MAIN_LOCATION_FILE\" || handle_error \"Failed to copy $NGINX_MAIN_LOCATION_FILE to $VHOST_MAIN_LOCATION_FILE\"
sudo chmod 777 \"$VHOST_MAIN_LOCATION_FILE\" || handle_error \"Failed to set permissions on $VHOST_MAIN_LOCATION_FILE\"

sudo cp -f \"$NGINX_CONSOLE_FILE\" \"$VHOST_CONSOLE_FILE\" || handle_error \"Failed to copy $NGINX_CONSOLE_FILE to $VHOST_CONSOLE_FILE\"
sudo chmod 777 \"$VHOST_CONSOLE_FILE\" || handle_error \"Failed to set permissions on $VHOST_CONSOLE_FILE\"

sudo cp -f \"$NGINX_CONSOLE_LOCATION_FILE\" \"$VHOST_CONSOLE_LOCATION_FILE\" || handle_error \"Failed to copy $NGINX_CONSOLE_LOCATION_FILE to $VHOST_CONSOLE_LOCATION_FILE\"
sudo chmod 777 \"$VHOST_CONSOLE_LOCATION_FILE\" || handle_error \"Failed to set permissions on $VHOST_CONSOLE_LOCATION_FILE\"

# Start Docker containers using docker-compose
if ! sudo docker-compose up -d > /dev/null 2>error.log; then
    ERROR_MSG=$(tail -n 10 error.log)  # Read the last 10 lines from error.log
    handle_error \"Docker-compose failed: $ERROR_MSG\"
fi

# Update status file
echo \"active\" | sudo tee \"$STATUS_FILE\" > /dev/null

echo \"success\"

exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "d7688118-55bb-4934-aac7-507bd3a3e956",
      "name": "Sticky Note",
      "type": "n8n-nodes-base.stickyNote",
      "position": [
        -2640,
        -1280
      ],
      "parameters": {
        "color": 6,
        "width": 639,
        "height": 909,
        "content": "## 👋 Welcome to PUQ Docker MinIO deploy!
## Template for MinIO: API Backend for WHMCS/WISECP by PUQcloud

v.1

This is an n8n template that creates an API backend for the WHMCS/WISECP module developed by PUQcloud.

## Setup Instructions

### 1. Configure API Webhook and SSH Access
- Create a Credential (Basic Auth) for the **Webhook API Block** in n8n.
- Create a Credential for **SSH access** to a server with Docker installed (**SSH Block**).

### 2. Modify Template Parameters
In the **Parameters** block of the template, update the following settings:

- `server_domain` – must match the domain of the WHMCS/WISECP Docker server.
- `clients_dir` – directory where user data related to Docker and disks will be stored.
- `mount_dir` – default mount point for the container disk (recommended not to change).

**Do not modify** the following technical parameters:

- `screen_left`
- `screen_right`

## Additional Resources
- Full documentation: [https://doc.puq.info/books/docker-minio-whmcs-module](https://doc.puq.info/books/docker-minio-whmcs-module)
- WHMCS module: [https://puqcloud.com/whmcs-module-docker-minio.php](https://puqcloud.com/whmcs-module-docker-minio.php)

"
      },
      "typeVersion": 1
    },
    {
      "id": "e8b68657-ae60-4558-8ea0-768dba92fcba",
      "name": "Deploy-docker-compose",
      "type": "n8n-nodes-base.set",
      "position": [
        -1200,
        -1360
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "docker-compose",
              "type": "string",
              "value": "=version: \"3\"

services:
  {{ $('API').item.json.body.domain }}:
    image: minio/minio
    restart: unless-stopped
    container_name: {{ $('API').item.json.body.domain }}
    command: server /data --console-address \":9001\"
    environment:
      MINIO_ROOT_USER: {{ $('API').item.json.body.username }}
      MINIO_ROOT_PASSWORD: {{ $('API').item.json.body.password }}
      MINIO_BROWSER_REDIRECT_URL: https://console.{{ $('API').item.json.body.domain }}
      LETSENCRYPT_HOST: {{ $('API').item.json.body.domain }},console.{{ $('API').item.json.body.domain }}
      VIRTUAL_HOST_MULTIPORTS: |-
          {{ $('API').item.json.body.domain }}:
            \"/\":
              port: 9000
          console.{{ $('API').item.json.body.domain }}:
            \"/\":
              port: 9001
    volumes:
      - \"{{ $('Parametrs').item.json.mount_dir }}/{{ $('API').item.json.body.domain }}/data:/data\"
    networks:
      - nginx-proxy_web
    mem_limit: \"{{ $('API').item.json.body.ram }}G\"
    cpus: \"{{ $('API').item.json.body.cpu }}\"

networks:
  nginx-proxy_web:
    external: true
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "938520b1-aae6-4fe7-ac8e-e888f0793c8a",
      "name": "Version",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -1080,
        1300
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

CONTAINER_NAME=\"{{ $('API').item.json.body.domain }}\"
VERSION_JSON=\"{}\"

# Function to return error in JSON format
handle_error() {
    echo \"{\\"status\\": \\"error\\", \\"message\\": \\"$1\\"}\"
    exit 1
}

# Check if the container exists
if ! sudo docker ps -a | grep -q \"$CONTAINER_NAME\" > /dev/null 2>&1; then
    handle_error \"Container $CONTAINER_NAME not found\"
fi

# Get the MinIO version from the container (first line only)
VERSION=$(sudo docker exec \"$CONTAINER_NAME\" minio -v | head -n 1)

# Extract just the version string
VERSION_CLEAN=$(echo \"$VERSION\" | awk '{print $3}')

# Format version as JSON
VERSION_JSON=\"{\\"version\\": \\"$VERSION_CLEAN\\"}\"

echo \"$VERSION_JSON\"
exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "d83a8249-9ad9-4772-bb1b-5484ebeb4b81",
      "name": "Users",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -1140,
        1460
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

CONTAINER_NAME=\"{{ $('API').item.json.body.domain }}\"
MINIO_USERNAME=\"{{ $('API').item.json.body.username }}\"
MINIO_PASSWORD=\"{{ $('API').item.json.body.password }}\"

# Function to return error in JSON format
handle_error() {
    echo \"{\\"status\\": \\"error\\", \\"message\\": \\"$1\\"}\"
    exit 1
}

# Check if the container exists
if ! sudo docker ps -a | grep -q \"$CONTAINER_NAME\" > /dev/null 2>&1; then
    handle_error \"Container $CONTAINER_NAME not found\"
fi

# Set alias for MinIO client
sudo docker exec \"$CONTAINER_NAME\" mc alias set local http://localhost:9000 \"$MINIO_USERNAME\" \"$MINIO_PASSWORD\" > /dev/null 2>&1

# Get user list and format it correctly as JSON array
USERS_JSON=$(sudo docker exec \"$CONTAINER_NAME\" mc admin user list local --json | jq -s '.')

# Check if USERS_JSON is empty
if [ -z \"$USERS_JSON\" ]; then
    handle_error \"Failed to retrieve user list for $CONTAINER_NAME\"
fi

# Wrap in a JSON object
JSON=\"{\\"users\\": $USERS_JSON}\"

echo \"$JSON\"
exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "ba9b26be-31b6-47c9-85c1-719f346abc1a",
      "name": "If1",
      "type": "n8n-nodes-base.if",
      "position": [
        -1780,
        -1260
      ],
      "parameters": {
        "options": {},
        "conditions": {
          "options": {
            "version": 2,
            "leftValue": "",
            "caseSensitive": true,
            "typeValidation": "strict"
          },
          "combinator": "or",
          "conditions": [
            {
              "id": "8602bd4c-9693-4d5f-9e7d-5ee62210baca",
              "operator": {
                "name": "filter.operator.equals",
                "type": "string",
                "operation": "equals"
              },
              "leftValue": "={{ $('API').item.json.body.command }}",
              "rightValue": "create"
            },
            {
              "id": "1c630b59-0e5a-441d-8aa5-70b31338d897",
              "operator": {
                "name": "filter.operator.equals",
                "type": "string",
                "operation": "equals"
              },
              "leftValue": "={{ $('API').item.json.body.command }}",
              "rightValue": "change_package"
            },
            {
              "id": "b3eb7052-a70f-438e-befd-8c5240df32c7",
              "operator": {
                "name": "filter.operator.equals",
                "type": "string",
                "operation": "equals"
              },
              "leftValue": "={{ $('API').item.json.body.command }}",
              "rightValue": "unsuspend"
            }
          ]
        }
      },
      "typeVersion": 2.2
    },
    {
      "id": "c08cfbd4-ef9a-4430-8a03-41ae209a3c92",
      "name": "MinIO",
      "type": "n8n-nodes-base.switch",
      "position": [
        -1680,
        1380
      ],
      "parameters": {
        "rules": {
          "values": [
            {
              "outputKey": "version",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "66ad264d-5393-410c-bfa3-011ab8eb234a",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "app_version"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "users",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "b48957a0-22c0-4ac0-82ef-abd9e7ab0207",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "app_users"
                  }
                ]
              },
              "renameOutput": true
            }
          ]
        },
        "options": {}
      },
      "typeVersion": 3.2
    },
    {
      "id": "d75c83ca-c106-4b96-9db7-9f3ef1e20453",
      "name": "nginx",
      "type": "n8n-nodes-base.set",
      "position": [
        -1420,
        -1360
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "main",
              "type": "string",
              "value": "=ignore_invalid_headers off;
client_max_body_size 0;
proxy_buffering off;
proxy_request_buffering off;"
            },
            {
              "id": "6507763a-21d4-4ff0-84d2-5dc9d21b7430",
              "name": "main_location",
              "type": "string",
              "value": "=# Custom header
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;

proxy_connect_timeout 300;
# Default is HTTP/1, keepalive is only enabled in HTTP/1.1
proxy_http_version 1.1;
proxy_set_header Connection \"\";
chunked_transfer_encoding off;
"
            },
            {
              "id": "d00aa07a-0641-43ef-8fd2-5fb9ef62e313",
              "name": "console",
              "type": "string",
              "value": "=ignore_invalid_headers off;
client_max_body_size 0;
proxy_buffering off;
proxy_request_buffering off;"
            },
            {
              "id": "c00fb803-8b9f-4aca-a1b1-2e3da42fc8d1",
              "name": "console_location",
              "type": "string",
              "value": "=# Custom header
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-NginX-Proxy true;

real_ip_header X-Real-IP;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection \"upgrade\";
  
chunked_transfer_encoding off;"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "70c2cb4d-af9d-4003-8aaf-e5800580552b",
      "name": "Container Stat",
      "type": "n8n-nodes-base.switch",
      "position": [
        -1680,
        -240
      ],
      "parameters": {
        "rules": {
          "values": [
            {
              "outputKey": "inspect",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "66ad264d-5393-410c-bfa3-011ab8eb234a",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "container_information_inspect"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "stats",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "b48957a0-22c0-4ac0-82ef-abd9e7ab0207",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "container_information_stats"
                  }
                ]
              },
              "renameOutput": true
            },
            {
              "outputKey": "log",
              "conditions": {
                "options": {
                  "version": 2,
                  "leftValue": "",
                  "caseSensitive": true,
                  "typeValidation": "strict"
                },
                "combinator": "and",
                "conditions": [
                  {
                    "id": "50ede522-af22-4b7a-b1fd-34b27fd3fadd",
                    "operator": {
                      "name": "filter.operator.equals",
                      "type": "string",
                      "operation": "equals"
                    },
                    "leftValue": "={{ $('API').item.json.body.command }}",
                    "rightValue": "container_log"
                  }
                ]
              },
              "renameOutput": true
            }
          ]
        },
        "options": {}
      },
      "typeVersion": 3.2
    },
    {
      "id": "0bb2aeeb-8279-4f13-827f-a6559ef805b1",
      "name": "GET ACL",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -1180,
        560
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

# Get values for variables from templates
DOMAIN=\"{{ $('API').item.json.body.domain }}\"
COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/$DOMAIN\"
NGINX_DIR=\"$COMPOSE_DIR/nginx\"

NGINX_MAIN_ACL_FILE=\"$NGINX_DIR/$DOMAIN\"_acl
NGINX_CONSOLE_ACL_FILE=\"$NGINX_DIR/console.$DOMAIN\"_acl

# Function to log an error and exit
handle_error() {
    echo \"error: $1\"
    exit 1
}

# Read files if they exist, else assign empty array
if [[ -f \"$NGINX_CONSOLE_ACL_FILE\" ]]; then
    WEB_CONSOLE_IPS=$(cat \"$NGINX_CONSOLE_ACL_FILE\" | jq -R -s 'split(\"\n\") | map(select(length > 0))')
else
    WEB_CONSOLE_IPS=\"[]\"
fi

if [[ -f \"$NGINX_MAIN_ACL_FILE\" ]]; then
    REST_API_IPS=$(cat \"$NGINX_MAIN_ACL_FILE\" | jq -R -s 'split(\"\n\") | map(select(length > 0))')
else
    REST_API_IPS=\"[]\"
fi

# Output JSON
echo \"{ \\"web_console_ips\\": $WEB_CONSOLE_IPS, \\"rest_api_ips\\": $REST_API_IPS }\"

exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "9603bee0-de6f-46bf-97d4-f7a2a4d27514",
      "name": "SET ACL",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -1060,
        700
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

# Get values for variables from templates
DOMAIN=\"{{ $('API').item.json.body.domain }}\"
COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/$DOMAIN\"
NGINX_DIR=\"$COMPOSE_DIR/nginx\"
VHOST_DIR=\"/opt/docker/nginx-proxy/nginx/vhost.d\"

NGINX_MAIN_ACL_FILE=\"$NGINX_DIR/$DOMAIN\"_acl
NGINX_MAIN_ACL_TEXT=\"{{ $('API').item.json.body.rest_api_ips }}\"
VHOST_MAIN_LOCATION_FILE=\"$VHOST_DIR/$DOMAIN\"_location
NGINX_MAIN_LOCATION_FILE=\"$NGINX_DIR/$DOMAIN\"_location

NGINX_CONSOLE_ACL_FILE=\"$NGINX_DIR/console.$DOMAIN\"_acl
NGINX_CONSOLE_ACL_TEXT=\"{{ $('API').item.json.body.web_console_ips }}\"
VHOST_CONSOLE_LOCATION_FILE=\"$VHOST_DIR/console.$DOMAIN\"_location
NGINX_CONSOLE_LOCATION_FILE=\"$NGINX_DIR/console.$DOMAIN\"_location

# Function to log an error and exit
handle_error() {
    echo \"error: $1\"
    exit 1
}

update_nginx_acl() {
    ACL_FILE=$1
    LOCATION_FILE=$2
    
    if [ -s \"$ACL_FILE\" ]; then
        VALID_LINES=$(grep -vE '^\s*$' \"$ACL_FILE\")
        if [ -n \"$VALID_LINES\" ]; then
            while IFS= read -r line; do
                echo \"allow $line;\" | sudo tee -a \"$LOCATION_FILE\" > /dev/null || handle_error \"Failed to update $LOCATION_FILE\"
            done <<< \"$VALID_LINES\"
            echo \"deny all;\" | sudo tee -a \"$LOCATION_FILE\" > /dev/null || handle_error \"Failed to update $LOCATION_FILE\"
        fi
    fi
}

# Create or overwrite the file with the content from variables
echo \"$NGINX_MAIN_ACL_TEXT\" | sudo tee \"$NGINX_MAIN_ACL_FILE\" > /dev/null
echo \"$NGINX_CONSOLE_ACL_TEXT\" | sudo tee \"$NGINX_CONSOLE_ACL_FILE\" > /dev/null

sudo cp -f \"$NGINX_MAIN_LOCATION_FILE\" \"$VHOST_MAIN_LOCATION_FILE\" || handle_error \"Failed to copy $NGINX_MAIN_LOCATION_FILE to $VHOST_MAIN_LOCATION_FILE\"
sudo chmod 777 \"$VHOST_MAIN_LOCATION_FILE\" || handle_error \"Failed to set permissions on $VHOST_MAIN_LOCATION_FILE\"

sudo cp -f \"$NGINX_CONSOLE_LOCATION_FILE\" \"$VHOST_CONSOLE_LOCATION_FILE\" || handle_error \"Failed to copy $NGINX_CONSOLE_LOCATION_FILE to $VHOST_CONSOLE_LOCATION_FILE\"
sudo chmod 777 \"$VHOST_CONSOLE_LOCATION_FILE\" || handle_error \"Failed to set permissions on $VHOST_CONSOLE_LOCATION_FILE\"

update_nginx_acl \"$NGINX_MAIN_ACL_FILE\" \"$VHOST_MAIN_LOCATION_FILE\"
update_nginx_acl \"$NGINX_CONSOLE_ACL_FILE\" \"$VHOST_CONSOLE_LOCATION_FILE\"

# Reload Nginx with sudo
if sudo docker exec nginx-proxy nginx -s reload; then
    echo \"success\"
else
    handle_error \"Failed to reload Nginx.\"
fi

exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    },
    {
      "id": "325e6cfc-f28e-490e-84a0-d8153e1c9fc9",
      "name": "GET NET",
      "type": "n8n-nodes-base.set",
      "onError": "continueRegularOutput",
      "position": [
        -1180,
        840
      ],
      "parameters": {
        "options": {},
        "assignments": {
          "assignments": [
            {
              "id": "21f4453e-c136-4388-be90-1411ae78e8a5",
              "name": "sh",
              "type": "string",
              "value": "=#!/bin/bash

# Get values for variables from templates
DOMAIN=\"{{ $('API').item.json.body.domain }}\"
COMPOSE_DIR=\"{{ $('Parametrs').item.json.clients_dir }}/$DOMAIN\"
NGINX_DIR=\"$COMPOSE_DIR/nginx\"
NET_IN_FILE=\"$COMPOSE_DIR/net_in\"
NET_OUT_FILE=\"$COMPOSE_DIR/net_out\"

# Function to log an error and exit
handle_error() {
    echo \"error: $1\"
    exit 1
}

# Get current network statistics from container
STATS=$(sudo docker exec \"$DOMAIN\" cat /proc/net/dev | grep eth0) || handle_error \"Failed to get network stats\"
NET_IN_NEW=$(echo \"$STATS\" | awk '{print $2}')  # RX bytes (received)
NET_OUT_NEW=$(echo \"$STATS\" | awk '{print $10}') # TX bytes (transmitted)

# Ensure directory exists
mkdir -p \"$COMPOSE_DIR\"

# Read old values, create files if they don't exist
if [[ -f \"$NET_IN_FILE\" ]]; then
    NET_IN_OLD=$(sudo cat \"$NET_IN_FILE\")
else
    NET_IN_OLD=0
fi

if [[ -f \"$NET_OUT_FILE\" ]]; then
    NET_OUT_OLD=$(sudo cat \"$NET_OUT_FILE\")
else
    NET_OUT_OLD=0
fi

# Save new values
echo \"$NET_IN_NEW\" | sudo tee \"$NET_IN_FILE\" > /dev/null
echo \"$NET_OUT_NEW\" | sudo tee \"$NET_OUT_FILE\" > /dev/null

# Output JSON
echo \"{ \\"net_in_new\\": $NET_IN_NEW, \\"net_out_new\\": $NET_OUT_NEW, \\"net_in_old\\": $NET_IN_OLD, \\"net_out_old\\": $NET_OUT_OLD }\"

exit 0
"
            }
          ]
        }
      },
      "typeVersion": 3.4,
      "alwaysOutputData": true
    }
  ],
  "active": true,
  "pinData": {},
  "settings": {
    "timezone": "America/Winnipeg",
    "callerPolicy": "workflowsFromSameOwner",
    "executionOrder": "v1"
  },
  "versionId": "930dd393-6eff-43d5-8446-30ba19fce16d",
  "connections": {
    "If": {
      "main": [
        [
          {
            "node": "Container Stat",
            "type": "main",
            "index": 0
          },
          {
            "node": "Container Actions",
            "type": "main",
            "index": 0
          },
          {
            "node": "MinIO",
            "type": "main",
            "index": 0
          },
          {
            "node": "If1",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "422-Invalid server domain",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "API": {
      "main": [
        [
          {
            "node": "Parametrs",
            "type": "main",
            "index": 0
          }
        ],
        []
      ]
    },
    "If1": {
      "main": [
        [
          {
            "node": "nginx",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Service Actions",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Log": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "SSH": {
      "main": [
        [
          {
            "node": "Code1",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Code1",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Stat": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Stop": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Code1": {
      "main": [
        [
          {
            "node": "API answer",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "MinIO": {
      "main": [
        [
          {
            "node": "Version",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Users",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Start": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Users": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "nginx": {
      "main": [
        [
          {
            "node": "Deploy-docker-compose",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Deploy": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "GET ACL": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "GET NET": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Inspect": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "SET ACL": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Suspend": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ],
        []
      ]
    },
    "Version": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Parametrs": {
      "main": [
        [
          {
            "node": "If",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Unsuspend": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Mount Disk": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Terminated": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Unmount Disk": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "ChangePackage": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Container Stat": {
      "main": [
        [
          {
            "node": "Inspect",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Stat",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Log",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Service Actions": {
      "main": [
        [
          {
            "node": "Test Connection1",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Deploy",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Suspend",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Unsuspend",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Terminated",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "ChangePackage",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Test Connection1": {
      "main": [
        [
          {
            "node": "SSH",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Container Actions": {
      "main": [
        [
          {
            "node": "Start",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Stop",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Mount Disk",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Unmount Disk",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "GET ACL",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "SET ACL",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "GET NET",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Deploy-docker-compose": {
      "main": [
        [
          {
            "node": "Service Actions",
            "type": "main",
            "index": 0
          }
        ]
      ]
    }
  }
}

功能特点

  • 自动检测新邮件
  • AI智能内容分析
  • 自定义分类规则
  • 批量处理能力
  • 详细的处理日志

技术分析

节点类型及作用

  • If
  • Set
  • Webhook
  • Respondtowebhook
  • Code

复杂度评估

配置难度:
★★★★☆
维护难度:
★★☆☆☆
扩展性:
★★★★☆

实施指南

前置条件

  • 有效的Gmail账户
  • n8n平台访问权限
  • Google API凭证
  • AI分类服务订阅

配置步骤

  1. 在n8n中导入工作流JSON文件
  2. 配置Gmail节点的认证信息
  3. 设置AI分类器的API密钥
  4. 自定义分类规则和标签映射
  5. 测试工作流执行
  6. 配置定时触发器(可选)

关键参数

参数名称 默认值 说明
maxEmails 50 单次处理的最大邮件数量
confidenceThreshold 0.8 分类置信度阈值
autoLabel true 是否自动添加标签

最佳实践

优化建议

  • 定期更新AI分类模型以提高准确性
  • 根据邮件量调整处理批次大小
  • 设置合理的分类置信度阈值
  • 定期清理过期的分类规则

安全注意事项

  • 妥善保管API密钥和认证信息
  • 限制工作流的访问权限
  • 定期审查处理日志
  • 启用双因素认证保护Gmail账户

性能优化

  • 使用增量处理减少重复工作
  • 缓存频繁访问的数据
  • 并行处理多个邮件分类任务
  • 监控系统资源使用情况

故障排除

常见问题

邮件未被正确分类

检查AI分类器的置信度阈值设置,适当降低阈值或更新训练数据。

Gmail认证失败

确认Google API凭证有效且具有正确的权限范围,重新进行OAuth授权。

调试技巧

  • 启用详细日志记录查看每个步骤的执行情况
  • 使用测试邮件验证分类逻辑
  • 检查网络连接和API服务状态
  • 逐步执行工作流定位问题节点

错误处理

工作流包含以下错误处理机制:

  • 网络超时自动重试(最多3次)
  • API错误记录和告警
  • 处理失败邮件的隔离机制
  • 异常情况下的回滚操作