980 lines
28 KiB
Bash
980 lines
28 KiB
Bash
|
|
#!/usr/bin/env bash
|
||
|
|
set -Eeuo pipefail
|
||
|
|
|
||
|
|
# Debug mode: 0 = nur JSON ausgeben, 1 = Logs auf stderr
|
||
|
|
DEBUG="${DEBUG:-0}"
|
||
|
|
|
||
|
|
log_ts() { date "+[%F %T]"; }
|
||
|
|
|
||
|
|
info() {
|
||
|
|
[[ "$DEBUG" == "1" ]] && echo "$(log_ts) INFO: $*" >&2
|
||
|
|
return 0
|
||
|
|
}
|
||
|
|
|
||
|
|
warn() {
|
||
|
|
[[ "$DEBUG" == "1" ]] && echo "$(log_ts) WARN: $*" >&2
|
||
|
|
return 0
|
||
|
|
}
|
||
|
|
|
||
|
|
die() {
|
||
|
|
if [[ "$DEBUG" == "1" ]]; then
|
||
|
|
echo "$(log_ts) ERROR: $*" >&2
|
||
|
|
else
|
||
|
|
# JSON-Fehler auf fd 3 ausgeben (falls verfügbar), sonst stdout
|
||
|
|
if { true >&3; } 2>/dev/null; then
|
||
|
|
echo "{\"error\": \"$*\"}" >&3
|
||
|
|
else
|
||
|
|
echo "{\"error\": \"$*\"}"
|
||
|
|
fi
|
||
|
|
fi
|
||
|
|
exit 1
|
||
|
|
}
|
||
|
|
|
||
|
|
setup_traps() {
|
||
|
|
trap 'rc=$?; if [[ $rc -ne 0 ]]; then
|
||
|
|
if [[ "$DEBUG" == "1" ]]; then
|
||
|
|
echo "$(log_ts) ERROR: Failed at line ${BASH_LINENO[0]}: ${BASH_COMMAND} (exit=$rc)" >&2
|
||
|
|
else
|
||
|
|
# JSON-Fehler auf fd 3 ausgeben (falls verfügbar), sonst stdout
|
||
|
|
if { true >&3; } 2>/dev/null; then
|
||
|
|
echo "{\"error\": \"Failed at line ${BASH_LINENO[0]}: ${BASH_COMMAND} (exit=$rc)\"}" >&3
|
||
|
|
else
|
||
|
|
echo "{\"error\": \"Failed at line ${BASH_LINENO[0]}: ${BASH_COMMAND} (exit=$rc)\"}"
|
||
|
|
fi
|
||
|
|
fi
|
||
|
|
fi; exit $rc' ERR
|
||
|
|
}
|
||
|
|
|
||
|
|
need_cmd() {
|
||
|
|
local c
|
||
|
|
for c in "$@"; do
|
||
|
|
command -v "$c" >/dev/null 2>&1 || die "Missing command: $c"
|
||
|
|
done
|
||
|
|
}
|
||
|
|
|
||
|
|
# ----- Proxmox helpers -----
|
||
|
|
|
||
|
|
pve_storage_exists() {
|
||
|
|
local s="$1"
|
||
|
|
pvesm status | awk 'NR>1{print $1}' | grep -qx "$s"
|
||
|
|
}
|
||
|
|
|
||
|
|
pve_bridge_exists() {
|
||
|
|
local b="$1"
|
||
|
|
ip link show "$b" >/dev/null 2>&1
|
||
|
|
}
|
||
|
|
|
||
|
|
# Return ONLY template path on stdout. Logs go to stderr.
|
||
|
|
pve_template_ensure_debian12() {
|
||
|
|
local storage="$1"
|
||
|
|
local tmpl="debian-12-standard_12.12-1_amd64.tar.zst"
|
||
|
|
local cache="/var/lib/vz/template/cache/${tmpl}"
|
||
|
|
|
||
|
|
# pveam templates must be on "local" (dir storage), not on zfs
|
||
|
|
local tstore="$storage"
|
||
|
|
if ! pveam available -section system >/dev/null 2>&1; then
|
||
|
|
warn "pveam not working? continuing"
|
||
|
|
fi
|
||
|
|
|
||
|
|
# heuristic: if storage isn't usable for templates, fallback to local
|
||
|
|
# Most Proxmox setups use 'local' for templates.
|
||
|
|
if ! pvesm status | awk 'NR>1{print $1,$2}' | grep -q "^${tstore} "; then
|
||
|
|
warn "pveam storage '${tstore}' not found; falling back to 'local'"
|
||
|
|
tstore="local"
|
||
|
|
fi
|
||
|
|
|
||
|
|
# If storage exists but isn't a dir storage for templates, pveam will fail -> fallback
|
||
|
|
if ! pveam list "${tstore}" >/dev/null 2>&1; then
|
||
|
|
warn "pveam storage '${tstore}' not available for templates; falling back to 'local'"
|
||
|
|
tstore="local"
|
||
|
|
fi
|
||
|
|
|
||
|
|
if [[ ! -f "$cache" ]]; then
|
||
|
|
info "Downloading CT template to ${tstore}: ${tmpl}"
|
||
|
|
pveam download "${tstore}" "${tmpl}" >&2
|
||
|
|
fi
|
||
|
|
|
||
|
|
echo "${tstore}:vztmpl/${tmpl}"
|
||
|
|
}
|
||
|
|
|
||
|
|
# Build net0 string (with optional vlan tag)
|
||
|
|
pve_build_net0() {
|
||
|
|
local bridge="$1"
|
||
|
|
local ipcfg="$2"
|
||
|
|
local vlan="${3:-0}"
|
||
|
|
|
||
|
|
local mac
|
||
|
|
mac="$(gen_mac)"
|
||
|
|
|
||
|
|
local net="name=eth0,bridge=${bridge},hwaddr=${mac}"
|
||
|
|
if [[ "$vlan" != "0" ]]; then
|
||
|
|
net+=",tag=${vlan}"
|
||
|
|
fi
|
||
|
|
|
||
|
|
if [[ "$ipcfg" == "dhcp" ]]; then
|
||
|
|
net+=",ip=dhcp"
|
||
|
|
else
|
||
|
|
net+=",ip=${ipcfg}"
|
||
|
|
fi
|
||
|
|
|
||
|
|
echo "$net"
|
||
|
|
}
|
||
|
|
|
||
|
|
# Wait for IP from pct; returns first IPv4
|
||
|
|
pct_wait_for_ip() {
|
||
|
|
local ctid="$1"
|
||
|
|
local i ip
|
||
|
|
for i in $(seq 1 40); do
|
||
|
|
ip="$(pct exec "$ctid" -- bash -lc "ip -4 -o addr show scope global | awk '{print \$4}' | cut -d/ -f1 | head -n1" 2>/dev/null || true)"
|
||
|
|
if [[ -n "$ip" ]]; then
|
||
|
|
echo "$ip"
|
||
|
|
return 0
|
||
|
|
fi
|
||
|
|
sleep 1
|
||
|
|
done
|
||
|
|
return 1
|
||
|
|
}
|
||
|
|
|
||
|
|
pct_exec() {
|
||
|
|
local ctid="$1"; shift
|
||
|
|
pct exec "$ctid" -- bash -lc "$*"
|
||
|
|
}
|
||
|
|
|
||
|
|
# Push a text file into CT without SCP
|
||
|
|
pct_push_text() {
|
||
|
|
local ctid="$1"
|
||
|
|
local dest="$2"
|
||
|
|
local content="$3"
|
||
|
|
pct exec "$ctid" -- bash -lc "cat > '$dest' <<'EOF'
|
||
|
|
${content}
|
||
|
|
EOF"
|
||
|
|
}
|
||
|
|
|
||
|
|
# Cluster VMID existence check (best effort)
|
||
|
|
# Uses pvesh cluster resources. If API not available, returns false (and caller can choose another approach).
|
||
|
|
pve_vmid_exists_cluster() {
|
||
|
|
local vmid="$1"
|
||
|
|
pvesh get /cluster/resources --output-format json 2>/dev/null \
|
||
|
|
| python3 - <<'PY' "$vmid" || exit 0
|
||
|
|
import json,sys
|
||
|
|
vmid=sys.argv[1]
|
||
|
|
try:
|
||
|
|
data=json.load(sys.stdin)
|
||
|
|
except Exception:
|
||
|
|
sys.exit(0)
|
||
|
|
for r in data:
|
||
|
|
if str(r.get("vmid",""))==str(vmid):
|
||
|
|
sys.exit(1)
|
||
|
|
sys.exit(0)
|
||
|
|
PY
|
||
|
|
[[ $? -eq 1 ]]
|
||
|
|
}
|
||
|
|
|
||
|
|
# Your agreed CTID scheme: unix time - 1,000,000,000
|
||
|
|
pve_ctid_from_unixtime() {
|
||
|
|
local ts="$1"
|
||
|
|
echo $(( ts - 1000000000 ))
|
||
|
|
}
|
||
|
|
|
||
|
|
# ----- Generators / policies -----
|
||
|
|
|
||
|
|
# Avoid "tr: Broken pipe" by not piping random through tr|head.
|
||
|
|
gen_hex_64() {
|
||
|
|
# 64 hex chars = 32 bytes
|
||
|
|
openssl rand -hex 32
|
||
|
|
}
|
||
|
|
|
||
|
|
gen_mac() {
|
||
|
|
# locally administered unicast: 02:xx:xx:xx:xx:xx
|
||
|
|
printf '02:%02x:%02x:%02x:%02x:%02x\n' \
|
||
|
|
"$((RANDOM%256))" "$((RANDOM%256))" "$((RANDOM%256))" "$((RANDOM%256))" "$((RANDOM%256))"
|
||
|
|
}
|
||
|
|
|
||
|
|
password_policy_check() {
|
||
|
|
local p="$1"
|
||
|
|
[[ ${#p} -ge 8 ]] || return 1
|
||
|
|
[[ "$p" =~ [0-9] ]] || return 1
|
||
|
|
[[ "$p" =~ [A-Z] ]] || return 1
|
||
|
|
return 0
|
||
|
|
}
|
||
|
|
|
||
|
|
gen_password_policy() {
|
||
|
|
# generate until it matches policy (no broken pipes, deterministic enough)
|
||
|
|
local p
|
||
|
|
while true; do
|
||
|
|
# 18 chars, base64-ish but remove confusing chars
|
||
|
|
p="$(openssl rand -base64 18 | tr -d '/+=' | cut -c1-16)"
|
||
|
|
# ensure at least one uppercase and number
|
||
|
|
p="${p}A1"
|
||
|
|
password_policy_check "$p" && { echo "$p"; return 0; }
|
||
|
|
done
|
||
|
|
}
|
||
|
|
|
||
|
|
emit_json() {
|
||
|
|
# prints to stdout only; keep logs on stderr
|
||
|
|
cat
|
||
|
|
}
|
||
|
|
|
||
|
|
# ----- n8n API helpers -----
|
||
|
|
# These functions interact with n8n REST API inside a container
|
||
|
|
|
||
|
|
# Login to n8n and save session cookie
|
||
|
|
# Usage: n8n_api_login <ctid> <email> <password>
|
||
|
|
# Returns: 0 on success, 1 on failure
|
||
|
|
# Side effect: Creates /tmp/n8n_cookies.txt in the container
|
||
|
|
n8n_api_login() {
|
||
|
|
local ctid="$1"
|
||
|
|
local email="$2"
|
||
|
|
local password="$3"
|
||
|
|
local api_url="http://127.0.0.1:5678"
|
||
|
|
|
||
|
|
info "n8n API: Logging in as ${email}..."
|
||
|
|
|
||
|
|
# Escape special characters in password for JSON
|
||
|
|
local escaped_password
|
||
|
|
escaped_password=$(echo "$password" | sed 's/\\/\\\\/g; s/"/\\"/g')
|
||
|
|
|
||
|
|
local response
|
||
|
|
response=$(pct exec "$ctid" -- bash -c "curl -sS -X POST '${api_url}/rest/login' \
|
||
|
|
-H 'Content-Type: application/json' \
|
||
|
|
-c /tmp/n8n_cookies.txt \
|
||
|
|
-d '{\"email\":\"${email}\",\"password\":\"${escaped_password}\"}' 2>&1" || echo "CURL_FAILED")
|
||
|
|
|
||
|
|
if [[ "$response" == *"CURL_FAILED"* ]] || [[ "$response" == *"error"* && "$response" != *"data"* ]]; then
|
||
|
|
warn "n8n API login failed: ${response}"
|
||
|
|
return 1
|
||
|
|
fi
|
||
|
|
|
||
|
|
info "n8n API: Login successful"
|
||
|
|
return 0
|
||
|
|
}
|
||
|
|
|
||
|
|
# Create PostgreSQL credential in n8n
|
||
|
|
# Usage: n8n_api_create_postgres_credential <ctid> <name> <host> <port> <database> <user> <password>
|
||
|
|
# Returns: Credential ID on stdout, or empty on failure
|
||
|
|
n8n_api_create_postgres_credential() {
|
||
|
|
local ctid="$1"
|
||
|
|
local name="$2"
|
||
|
|
local host="$3"
|
||
|
|
local port="$4"
|
||
|
|
local database="$5"
|
||
|
|
local user="$6"
|
||
|
|
local password="$7"
|
||
|
|
local api_url="http://127.0.0.1:5678"
|
||
|
|
|
||
|
|
info "n8n API: Creating PostgreSQL credential '${name}'..."
|
||
|
|
|
||
|
|
# Escape special characters in password for JSON
|
||
|
|
local escaped_password
|
||
|
|
escaped_password=$(echo "$password" | sed 's/\\/\\\\/g; s/"/\\"/g')
|
||
|
|
|
||
|
|
local response
|
||
|
|
response=$(pct exec "$ctid" -- bash -c "curl -sS -X POST '${api_url}/rest/credentials' \
|
||
|
|
-H 'Content-Type: application/json' \
|
||
|
|
-b /tmp/n8n_cookies.txt \
|
||
|
|
-d '{
|
||
|
|
\"name\": \"${name}\",
|
||
|
|
\"type\": \"postgres\",
|
||
|
|
\"data\": {
|
||
|
|
\"host\": \"${host}\",
|
||
|
|
\"port\": ${port},
|
||
|
|
\"database\": \"${database}\",
|
||
|
|
\"user\": \"${user}\",
|
||
|
|
\"password\": \"${escaped_password}\",
|
||
|
|
\"ssl\": \"disable\"
|
||
|
|
}
|
||
|
|
}' 2>&1" || echo "")
|
||
|
|
|
||
|
|
# Extract credential ID from response
|
||
|
|
local cred_id
|
||
|
|
cred_id=$(echo "$response" | grep -oP '"id"\s*:\s*"\K[^"]+' | head -1 || echo "")
|
||
|
|
|
||
|
|
if [[ -n "$cred_id" ]]; then
|
||
|
|
info "n8n API: PostgreSQL credential created with ID: ${cred_id}"
|
||
|
|
echo "$cred_id"
|
||
|
|
return 0
|
||
|
|
else
|
||
|
|
warn "n8n API: Failed to create PostgreSQL credential: ${response}"
|
||
|
|
echo ""
|
||
|
|
return 1
|
||
|
|
fi
|
||
|
|
}
|
||
|
|
|
||
|
|
# Create Ollama credential in n8n
|
||
|
|
# Usage: n8n_api_create_ollama_credential <ctid> <name> <base_url>
|
||
|
|
# Returns: Credential ID on stdout, or empty on failure
|
||
|
|
n8n_api_create_ollama_credential() {
|
||
|
|
local ctid="$1"
|
||
|
|
local name="$2"
|
||
|
|
local base_url="$3"
|
||
|
|
local api_url="http://127.0.0.1:5678"
|
||
|
|
|
||
|
|
info "n8n API: Creating Ollama credential '${name}'..."
|
||
|
|
|
||
|
|
local response
|
||
|
|
response=$(pct exec "$ctid" -- bash -c "curl -sS -X POST '${api_url}/rest/credentials' \
|
||
|
|
-H 'Content-Type: application/json' \
|
||
|
|
-b /tmp/n8n_cookies.txt \
|
||
|
|
-d '{
|
||
|
|
\"name\": \"${name}\",
|
||
|
|
\"type\": \"ollamaApi\",
|
||
|
|
\"data\": {
|
||
|
|
\"baseUrl\": \"${base_url}\"
|
||
|
|
}
|
||
|
|
}' 2>&1" || echo "")
|
||
|
|
|
||
|
|
# Extract credential ID from response
|
||
|
|
local cred_id
|
||
|
|
cred_id=$(echo "$response" | grep -oP '"id"\s*:\s*"\K[^"]+' | head -1 || echo "")
|
||
|
|
|
||
|
|
if [[ -n "$cred_id" ]]; then
|
||
|
|
info "n8n API: Ollama credential created with ID: ${cred_id}"
|
||
|
|
echo "$cred_id"
|
||
|
|
return 0
|
||
|
|
else
|
||
|
|
warn "n8n API: Failed to create Ollama credential: ${response}"
|
||
|
|
echo ""
|
||
|
|
return 1
|
||
|
|
fi
|
||
|
|
}
|
||
|
|
|
||
|
|
# Import workflow into n8n
|
||
|
|
# Usage: n8n_api_import_workflow <ctid> <workflow_json_file_in_container>
|
||
|
|
# Returns: Workflow ID on stdout, or empty on failure
|
||
|
|
n8n_api_import_workflow() {
|
||
|
|
local ctid="$1"
|
||
|
|
local workflow_file="$2"
|
||
|
|
local api_url="http://127.0.0.1:5678"
|
||
|
|
|
||
|
|
info "n8n API: Importing workflow from ${workflow_file}..."
|
||
|
|
|
||
|
|
local response
|
||
|
|
response=$(pct exec "$ctid" -- bash -c "curl -sS -X POST '${api_url}/rest/workflows' \
|
||
|
|
-H 'Content-Type: application/json' \
|
||
|
|
-b /tmp/n8n_cookies.txt \
|
||
|
|
-d @${workflow_file} 2>&1" || echo "")
|
||
|
|
|
||
|
|
# Extract workflow ID from response
|
||
|
|
local workflow_id
|
||
|
|
workflow_id=$(echo "$response" | grep -oP '"id"\s*:\s*"\K[^"]+' | head -1 || echo "")
|
||
|
|
|
||
|
|
if [[ -n "$workflow_id" ]]; then
|
||
|
|
info "n8n API: Workflow imported with ID: ${workflow_id}"
|
||
|
|
echo "$workflow_id"
|
||
|
|
return 0
|
||
|
|
else
|
||
|
|
warn "n8n API: Failed to import workflow: ${response}"
|
||
|
|
echo ""
|
||
|
|
return 1
|
||
|
|
fi
|
||
|
|
}
|
||
|
|
|
||
|
|
# Activate workflow in n8n
|
||
|
|
# Usage: n8n_api_activate_workflow <ctid> <workflow_id>
|
||
|
|
# Returns: 0 on success, 1 on failure
|
||
|
|
n8n_api_activate_workflow() {
|
||
|
|
local ctid="$1"
|
||
|
|
local workflow_id="$2"
|
||
|
|
local api_url="http://127.0.0.1:5678"
|
||
|
|
|
||
|
|
info "n8n API: Activating workflow ${workflow_id}..."
|
||
|
|
|
||
|
|
local response
|
||
|
|
response=$(pct exec "$ctid" -- bash -c "curl -sS -X PATCH '${api_url}/rest/workflows/${workflow_id}' \
|
||
|
|
-H 'Content-Type: application/json' \
|
||
|
|
-b /tmp/n8n_cookies.txt \
|
||
|
|
-d '{\"active\": true}' 2>&1" || echo "")
|
||
|
|
|
||
|
|
if [[ "$response" == *"\"active\":true"* ]] || [[ "$response" == *"\"active\": true"* ]]; then
|
||
|
|
info "n8n API: Workflow ${workflow_id} activated successfully"
|
||
|
|
return 0
|
||
|
|
else
|
||
|
|
warn "n8n API: Failed to activate workflow: ${response}"
|
||
|
|
return 1
|
||
|
|
fi
|
||
|
|
}
|
||
|
|
|
||
|
|
# Generate RAG workflow JSON with credential IDs
|
||
|
|
# Usage: n8n_generate_rag_workflow_json <postgres_cred_id> <ollama_cred_id> <ollama_model> <embedding_model>
|
||
|
|
# Returns: Workflow JSON on stdout
|
||
|
|
n8n_generate_rag_workflow_json() {
|
||
|
|
local postgres_cred_id="$1"
|
||
|
|
local postgres_cred_name="${2:-PostgreSQL (local)}"
|
||
|
|
local ollama_cred_id="$3"
|
||
|
|
local ollama_cred_name="${4:-Ollama (local)}"
|
||
|
|
local ollama_model="${5:-llama3.2:3b}"
|
||
|
|
local embedding_model="${6:-nomic-embed-text:v1.5}"
|
||
|
|
|
||
|
|
cat <<WORKFLOW_JSON
|
||
|
|
{
|
||
|
|
"name": "RAG KI-Bot (PGVector)",
|
||
|
|
"nodes": [
|
||
|
|
{
|
||
|
|
"parameters": {
|
||
|
|
"public": true,
|
||
|
|
"options": {}
|
||
|
|
},
|
||
|
|
"type": "@n8n/n8n-nodes-langchain.chatTrigger",
|
||
|
|
"typeVersion": 1.3,
|
||
|
|
"position": [0, 0],
|
||
|
|
"id": "chat-trigger-001",
|
||
|
|
"name": "When chat message received",
|
||
|
|
"webhookId": "rag-chat-webhook",
|
||
|
|
"notesInFlow": true,
|
||
|
|
"notes": "Chat URL: /webhook/rag-chat-webhook/chat"
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"parameters": {
|
||
|
|
"promptType": "define",
|
||
|
|
"text": "={{ \$json.chatInput }}\nAntworte ausschliesslich auf Deutsch",
|
||
|
|
"options": {}
|
||
|
|
},
|
||
|
|
"type": "@n8n/n8n-nodes-langchain.agent",
|
||
|
|
"typeVersion": 2.2,
|
||
|
|
"position": [208, 0],
|
||
|
|
"id": "ai-agent-001",
|
||
|
|
"name": "AI Agent"
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"parameters": {
|
||
|
|
"model": "${ollama_model}",
|
||
|
|
"options": {}
|
||
|
|
},
|
||
|
|
"type": "@n8n/n8n-nodes-langchain.lmChatOllama",
|
||
|
|
"typeVersion": 1,
|
||
|
|
"position": [64, 208],
|
||
|
|
"id": "ollama-chat-001",
|
||
|
|
"name": "Ollama Chat Model",
|
||
|
|
"credentials": {
|
||
|
|
"ollamaApi": {
|
||
|
|
"id": "${ollama_cred_id}",
|
||
|
|
"name": "${ollama_cred_name}"
|
||
|
|
}
|
||
|
|
}
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"parameters": {},
|
||
|
|
"type": "@n8n/n8n-nodes-langchain.memoryBufferWindow",
|
||
|
|
"typeVersion": 1.3,
|
||
|
|
"position": [224, 208],
|
||
|
|
"id": "memory-001",
|
||
|
|
"name": "Simple Memory"
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"parameters": {
|
||
|
|
"mode": "retrieve-as-tool",
|
||
|
|
"toolName": "knowledge_base",
|
||
|
|
"toolDescription": "Verwende dieses Tool für Infos die der Benutzer fragt. Sucht in der Wissensdatenbank nach relevanten Dokumenten.",
|
||
|
|
"tableName": "documents",
|
||
|
|
"options": {}
|
||
|
|
},
|
||
|
|
"type": "@n8n/n8n-nodes-langchain.vectorStorePGVector",
|
||
|
|
"typeVersion": 1,
|
||
|
|
"position": [432, 128],
|
||
|
|
"id": "pgvector-retrieve-001",
|
||
|
|
"name": "PGVector Store",
|
||
|
|
"credentials": {
|
||
|
|
"postgres": {
|
||
|
|
"id": "${postgres_cred_id}",
|
||
|
|
"name": "${postgres_cred_name}"
|
||
|
|
}
|
||
|
|
}
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"parameters": {
|
||
|
|
"model": "${embedding_model}"
|
||
|
|
},
|
||
|
|
"type": "@n8n/n8n-nodes-langchain.embeddingsOllama",
|
||
|
|
"typeVersion": 1,
|
||
|
|
"position": [384, 320],
|
||
|
|
"id": "embeddings-retrieve-001",
|
||
|
|
"name": "Embeddings Ollama",
|
||
|
|
"credentials": {
|
||
|
|
"ollamaApi": {
|
||
|
|
"id": "${ollama_cred_id}",
|
||
|
|
"name": "${ollama_cred_name}"
|
||
|
|
}
|
||
|
|
}
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"parameters": {
|
||
|
|
"formTitle": "Dokument hochladen",
|
||
|
|
"formDescription": "Laden Sie ein PDF-Dokument hoch, um es in die Wissensdatenbank aufzunehmen.",
|
||
|
|
"formFields": {
|
||
|
|
"values": [
|
||
|
|
{
|
||
|
|
"fieldLabel": "Dokument",
|
||
|
|
"fieldType": "file",
|
||
|
|
"acceptFileTypes": ".pdf"
|
||
|
|
}
|
||
|
|
]
|
||
|
|
},
|
||
|
|
"options": {}
|
||
|
|
},
|
||
|
|
"type": "n8n-nodes-base.formTrigger",
|
||
|
|
"typeVersion": 2.3,
|
||
|
|
"position": [768, 0],
|
||
|
|
"id": "form-trigger-001",
|
||
|
|
"name": "On form submission",
|
||
|
|
"webhookId": "rag-upload-form"
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"parameters": {
|
||
|
|
"operation": "pdf",
|
||
|
|
"binaryPropertyName": "Dokument",
|
||
|
|
"options": {}
|
||
|
|
},
|
||
|
|
"type": "n8n-nodes-base.extractFromFile",
|
||
|
|
"typeVersion": 1,
|
||
|
|
"position": [976, 0],
|
||
|
|
"id": "extract-file-001",
|
||
|
|
"name": "Extract from File"
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"parameters": {
|
||
|
|
"mode": "insert",
|
||
|
|
"tableName": "documents",
|
||
|
|
"options": {}
|
||
|
|
},
|
||
|
|
"type": "@n8n/n8n-nodes-langchain.vectorStorePGVector",
|
||
|
|
"typeVersion": 1,
|
||
|
|
"position": [1184, 0],
|
||
|
|
"id": "pgvector-insert-001",
|
||
|
|
"name": "PGVector Store Insert",
|
||
|
|
"credentials": {
|
||
|
|
"postgres": {
|
||
|
|
"id": "${postgres_cred_id}",
|
||
|
|
"name": "${postgres_cred_name}"
|
||
|
|
}
|
||
|
|
}
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"parameters": {
|
||
|
|
"model": "${embedding_model}"
|
||
|
|
},
|
||
|
|
"type": "@n8n/n8n-nodes-langchain.embeddingsOllama",
|
||
|
|
"typeVersion": 1,
|
||
|
|
"position": [1168, 240],
|
||
|
|
"id": "embeddings-insert-001",
|
||
|
|
"name": "Embeddings Ollama1",
|
||
|
|
"credentials": {
|
||
|
|
"ollamaApi": {
|
||
|
|
"id": "${ollama_cred_id}",
|
||
|
|
"name": "${ollama_cred_name}"
|
||
|
|
}
|
||
|
|
}
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"parameters": {
|
||
|
|
"options": {}
|
||
|
|
},
|
||
|
|
"type": "@n8n/n8n-nodes-langchain.documentDefaultDataLoader",
|
||
|
|
"typeVersion": 1.1,
|
||
|
|
"position": [1392, 240],
|
||
|
|
"id": "data-loader-001",
|
||
|
|
"name": "Default Data Loader"
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"connections": {
|
||
|
|
"When chat message received": {
|
||
|
|
"main": [[{"node": "AI Agent", "type": "main", "index": 0}]]
|
||
|
|
},
|
||
|
|
"Ollama Chat Model": {
|
||
|
|
"ai_languageModel": [[{"node": "AI Agent", "type": "ai_languageModel", "index": 0}]]
|
||
|
|
},
|
||
|
|
"Simple Memory": {
|
||
|
|
"ai_memory": [[{"node": "AI Agent", "type": "ai_memory", "index": 0}]]
|
||
|
|
},
|
||
|
|
"PGVector Store": {
|
||
|
|
"ai_tool": [[{"node": "AI Agent", "type": "ai_tool", "index": 0}]]
|
||
|
|
},
|
||
|
|
"Embeddings Ollama": {
|
||
|
|
"ai_embedding": [[{"node": "PGVector Store", "type": "ai_embedding", "index": 0}]]
|
||
|
|
},
|
||
|
|
"On form submission": {
|
||
|
|
"main": [[{"node": "Extract from File", "type": "main", "index": 0}]]
|
||
|
|
},
|
||
|
|
"Extract from File": {
|
||
|
|
"main": [[{"node": "PGVector Store Insert", "type": "main", "index": 0}]]
|
||
|
|
},
|
||
|
|
"Embeddings Ollama1": {
|
||
|
|
"ai_embedding": [[{"node": "PGVector Store Insert", "type": "ai_embedding", "index": 0}]]
|
||
|
|
},
|
||
|
|
"Default Data Loader": {
|
||
|
|
"ai_document": [[{"node": "PGVector Store Insert", "type": "ai_document", "index": 0}]]
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"settings": {
|
||
|
|
"executionOrder": "v1"
|
||
|
|
}
|
||
|
|
}
|
||
|
|
WORKFLOW_JSON
|
||
|
|
}
|
||
|
|
|
||
|
|
# List all workflows in n8n
|
||
|
|
# Usage: n8n_api_list_workflows <ctid>
|
||
|
|
# Returns: JSON array of workflows on stdout
|
||
|
|
n8n_api_list_workflows() {
|
||
|
|
local ctid="$1"
|
||
|
|
local api_url="http://127.0.0.1:5678"
|
||
|
|
|
||
|
|
info "n8n API: Listing workflows..."
|
||
|
|
|
||
|
|
local response
|
||
|
|
response=$(pct exec "$ctid" -- bash -c "curl -sS -X GET '${api_url}/rest/workflows' \
|
||
|
|
-H 'Content-Type: application/json' \
|
||
|
|
-b /tmp/n8n_cookies.txt 2>&1" || echo "")
|
||
|
|
|
||
|
|
echo "$response"
|
||
|
|
return 0
|
||
|
|
}
|
||
|
|
|
||
|
|
# Get workflow by name
|
||
|
|
# Usage: n8n_api_get_workflow_by_name <ctid> <workflow_name>
|
||
|
|
# Returns: Workflow ID on stdout, or empty if not found
|
||
|
|
n8n_api_get_workflow_by_name() {
|
||
|
|
local ctid="$1"
|
||
|
|
local workflow_name="$2"
|
||
|
|
|
||
|
|
info "n8n API: Searching for workflow '${workflow_name}'..."
|
||
|
|
|
||
|
|
local workflows
|
||
|
|
workflows=$(n8n_api_list_workflows "$ctid")
|
||
|
|
|
||
|
|
# Extract workflow ID by name using grep and awk
|
||
|
|
local workflow_id
|
||
|
|
workflow_id=$(echo "$workflows" | grep -oP "\"name\":\s*\"${workflow_name}\".*?\"id\":\s*\"\K[^\"]+|\"id\":\s*\"\K[^\"]+(?=.*?\"name\":\s*\"${workflow_name}\")" | head -1 || echo "")
|
||
|
|
|
||
|
|
if [[ -n "$workflow_id" ]]; then
|
||
|
|
info "n8n API: Found workflow '${workflow_name}' with ID: ${workflow_id}"
|
||
|
|
echo "$workflow_id"
|
||
|
|
return 0
|
||
|
|
else
|
||
|
|
info "n8n API: Workflow '${workflow_name}' not found"
|
||
|
|
echo ""
|
||
|
|
return 1
|
||
|
|
fi
|
||
|
|
}
|
||
|
|
|
||
|
|
# Delete workflow by ID
|
||
|
|
# Usage: n8n_api_delete_workflow <ctid> <workflow_id>
|
||
|
|
# Returns: 0 on success, 1 on failure
|
||
|
|
n8n_api_delete_workflow() {
|
||
|
|
local ctid="$1"
|
||
|
|
local workflow_id="$2"
|
||
|
|
local api_url="http://127.0.0.1:5678"
|
||
|
|
|
||
|
|
info "n8n API: Deleting workflow ${workflow_id}..."
|
||
|
|
|
||
|
|
local response
|
||
|
|
response=$(pct exec "$ctid" -- bash -c "curl -sS -X DELETE '${api_url}/rest/workflows/${workflow_id}' \
|
||
|
|
-H 'Content-Type: application/json' \
|
||
|
|
-b /tmp/n8n_cookies.txt 2>&1" || echo "")
|
||
|
|
|
||
|
|
# Check if deletion was successful (empty response or success message)
|
||
|
|
if [[ -z "$response" ]] || [[ "$response" == *"\"success\":true"* ]] || [[ "$response" == "{}" ]]; then
|
||
|
|
info "n8n API: Workflow ${workflow_id} deleted successfully"
|
||
|
|
return 0
|
||
|
|
else
|
||
|
|
warn "n8n API: Failed to delete workflow: ${response}"
|
||
|
|
return 1
|
||
|
|
fi
|
||
|
|
}
|
||
|
|
|
||
|
|
# Get credential by name and type
|
||
|
|
# Usage: n8n_api_get_credential_by_name <ctid> <credential_name> <credential_type>
|
||
|
|
# Returns: Credential ID on stdout, or empty if not found
|
||
|
|
n8n_api_get_credential_by_name() {
|
||
|
|
local ctid="$1"
|
||
|
|
local cred_name="$2"
|
||
|
|
local cred_type="$3"
|
||
|
|
local api_url="http://127.0.0.1:5678"
|
||
|
|
|
||
|
|
info "n8n API: Searching for credential '${cred_name}' (type: ${cred_type})..."
|
||
|
|
|
||
|
|
local response
|
||
|
|
response=$(pct exec "$ctid" -- bash -c "curl -sS -X GET '${api_url}/rest/credentials' \
|
||
|
|
-H 'Content-Type: application/json' \
|
||
|
|
-b /tmp/n8n_cookies.txt 2>&1" || echo "")
|
||
|
|
|
||
|
|
# Extract credential ID by name and type
|
||
|
|
local cred_id
|
||
|
|
cred_id=$(echo "$response" | grep -oP "\"name\":\s*\"${cred_name}\".*?\"type\":\s*\"${cred_type}\".*?\"id\":\s*\"\K[^\"]+|\"id\":\s*\"\K[^\"]+(?=.*?\"name\":\s*\"${cred_name}\".*?\"type\":\s*\"${cred_type}\")" | head -1 || echo "")
|
||
|
|
|
||
|
|
if [[ -n "$cred_id" ]]; then
|
||
|
|
info "n8n API: Found credential '${cred_name}' with ID: ${cred_id}"
|
||
|
|
echo "$cred_id"
|
||
|
|
return 0
|
||
|
|
else
|
||
|
|
info "n8n API: Credential '${cred_name}' not found"
|
||
|
|
echo ""
|
||
|
|
return 1
|
||
|
|
fi
|
||
|
|
}
|
||
|
|
|
||
|
|
# Cleanup n8n API session
|
||
|
|
# Usage: n8n_api_cleanup <ctid>
|
||
|
|
n8n_api_cleanup() {
|
||
|
|
local ctid="$1"
|
||
|
|
pct exec "$ctid" -- bash -c "rm -f /tmp/n8n_cookies.txt /tmp/rag_workflow.json" 2>/dev/null || true
|
||
|
|
}
|
||
|
|
|
||
|
|
# Full n8n setup: Create credentials, import workflow from file, activate
|
||
|
|
# This version runs all API calls in a single shell session to preserve cookies
|
||
|
|
# Usage: n8n_setup_rag_workflow <ctid> <email> <password> <pg_host> <pg_port> <pg_db> <pg_user> <pg_pass> <ollama_url> <ollama_model> <embedding_model> <workflow_file>
|
||
|
|
# Returns: 0 on success, 1 on failure
|
||
|
|
n8n_setup_rag_workflow() {
|
||
|
|
local ctid="$1"
|
||
|
|
local email="$2"
|
||
|
|
local password="$3"
|
||
|
|
local pg_host="$4"
|
||
|
|
local pg_port="$5"
|
||
|
|
local pg_db="$6"
|
||
|
|
local pg_user="$7"
|
||
|
|
local pg_pass="$8"
|
||
|
|
local ollama_url="$9"
|
||
|
|
local ollama_model="${10:-ministral-3:3b}"
|
||
|
|
local embedding_model="${11:-nomic-embed-text:latest}"
|
||
|
|
local workflow_file="${12:-}"
|
||
|
|
|
||
|
|
info "n8n Setup: Starting RAG workflow setup..."
|
||
|
|
|
||
|
|
# Validate workflow file
|
||
|
|
if [[ -z "$workflow_file" ]]; then
|
||
|
|
warn "n8n Setup: No workflow file specified, using built-in template"
|
||
|
|
workflow_file=""
|
||
|
|
elif [[ ! -f "$workflow_file" ]]; then
|
||
|
|
warn "n8n Setup: Workflow file not found: $workflow_file"
|
||
|
|
return 1
|
||
|
|
else
|
||
|
|
info "n8n Setup: Using workflow file: $workflow_file"
|
||
|
|
fi
|
||
|
|
|
||
|
|
# Wait for n8n to be ready
|
||
|
|
info "n8n Setup: Waiting for n8n to be ready..."
|
||
|
|
local i
|
||
|
|
for i in $(seq 1 30); do
|
||
|
|
if pct exec "$ctid" -- bash -c "curl -sS -o /dev/null -w '%{http_code}' http://127.0.0.1:5678/rest/settings 2>/dev/null" | grep -q "200"; then
|
||
|
|
info "n8n Setup: n8n is ready"
|
||
|
|
break
|
||
|
|
fi
|
||
|
|
sleep 2
|
||
|
|
done
|
||
|
|
|
||
|
|
# Escape special characters in passwords for JSON
|
||
|
|
local escaped_password
|
||
|
|
escaped_password=$(echo "$password" | sed 's/\\/\\\\/g; s/"/\\"/g')
|
||
|
|
local escaped_pg_pass
|
||
|
|
escaped_pg_pass=$(echo "$pg_pass" | sed 's/\\/\\\\/g; s/"/\\"/g')
|
||
|
|
|
||
|
|
# Read workflow from file or generate from template
|
||
|
|
info "n8n Setup: Preparing workflow JSON..."
|
||
|
|
local workflow_json
|
||
|
|
if [[ -n "$workflow_file" && -f "$workflow_file" ]]; then
|
||
|
|
# Read workflow from external file
|
||
|
|
workflow_json=$(cat "$workflow_file")
|
||
|
|
info "n8n Setup: Loaded workflow from file: $workflow_file"
|
||
|
|
else
|
||
|
|
# Generate workflow from built-in template
|
||
|
|
workflow_json=$(n8n_generate_rag_workflow_json "POSTGRES_CRED_ID" "PostgreSQL (local)" "OLLAMA_CRED_ID" "Ollama (local)" "$ollama_model" "$embedding_model")
|
||
|
|
info "n8n Setup: Generated workflow from built-in template"
|
||
|
|
fi
|
||
|
|
|
||
|
|
# Push workflow JSON to container (will be processed by setup script)
|
||
|
|
pct_push_text "$ctid" "/tmp/rag_workflow_template.json" "$workflow_json"
|
||
|
|
|
||
|
|
# Create a setup script that runs all API calls in one session
|
||
|
|
info "n8n Setup: Creating setup script..."
|
||
|
|
pct_push_text "$ctid" "/tmp/n8n_setup.sh" "$(cat <<SETUP_SCRIPT
|
||
|
|
#!/bin/bash
|
||
|
|
set -e
|
||
|
|
|
||
|
|
API_URL="http://127.0.0.1:5678"
|
||
|
|
COOKIE_FILE="/tmp/n8n_cookies.txt"
|
||
|
|
EMAIL="${email}"
|
||
|
|
PASSWORD="${escaped_password}"
|
||
|
|
|
||
|
|
# Login (n8n API uses emailOrLdapLoginId instead of email)
|
||
|
|
echo "Logging in..."
|
||
|
|
LOGIN_RESP=\$(curl -sS -X POST "\${API_URL}/rest/login" \\
|
||
|
|
-H "Content-Type: application/json" \\
|
||
|
|
-c "\${COOKIE_FILE}" \\
|
||
|
|
-d "{\"emailOrLdapLoginId\":\"\${EMAIL}\",\"password\":\"\${PASSWORD}\"}")
|
||
|
|
|
||
|
|
if echo "\$LOGIN_RESP" | grep -q '"code":\|"status":"error"'; then
|
||
|
|
echo "LOGIN_FAILED: \$LOGIN_RESP"
|
||
|
|
exit 1
|
||
|
|
fi
|
||
|
|
echo "Login successful"
|
||
|
|
|
||
|
|
# Create PostgreSQL credential
|
||
|
|
echo "Creating PostgreSQL credential..."
|
||
|
|
PG_CRED_RESP=\$(curl -sS -X POST "\${API_URL}/rest/credentials" \\
|
||
|
|
-H "Content-Type: application/json" \\
|
||
|
|
-b "\${COOKIE_FILE}" \\
|
||
|
|
-d '{
|
||
|
|
"name": "PostgreSQL (local)",
|
||
|
|
"type": "postgres",
|
||
|
|
"data": {
|
||
|
|
"host": "${pg_host}",
|
||
|
|
"port": ${pg_port},
|
||
|
|
"database": "${pg_db}",
|
||
|
|
"user": "${pg_user}",
|
||
|
|
"password": "${escaped_pg_pass}",
|
||
|
|
"ssl": "disable"
|
||
|
|
}
|
||
|
|
}')
|
||
|
|
|
||
|
|
PG_CRED_ID=\$(echo "\$PG_CRED_RESP" | grep -oP '"id"\s*:\s*"\K[^"]+' | head -1)
|
||
|
|
if [ -z "\$PG_CRED_ID" ]; then
|
||
|
|
echo "POSTGRES_CRED_FAILED: \$PG_CRED_RESP"
|
||
|
|
exit 1
|
||
|
|
fi
|
||
|
|
echo "PostgreSQL credential created: \$PG_CRED_ID"
|
||
|
|
|
||
|
|
# Create Ollama credential
|
||
|
|
echo "Creating Ollama credential..."
|
||
|
|
OLLAMA_CRED_RESP=\$(curl -sS -X POST "\${API_URL}/rest/credentials" \\
|
||
|
|
-H "Content-Type: application/json" \\
|
||
|
|
-b "\${COOKIE_FILE}" \\
|
||
|
|
-d '{
|
||
|
|
"name": "Ollama (local)",
|
||
|
|
"type": "ollamaApi",
|
||
|
|
"data": {
|
||
|
|
"baseUrl": "${ollama_url}"
|
||
|
|
}
|
||
|
|
}')
|
||
|
|
|
||
|
|
OLLAMA_CRED_ID=\$(echo "\$OLLAMA_CRED_RESP" | grep -oP '"id"\s*:\s*"\K[^"]+' | head -1)
|
||
|
|
if [ -z "\$OLLAMA_CRED_ID" ]; then
|
||
|
|
echo "OLLAMA_CRED_FAILED: \$OLLAMA_CRED_RESP"
|
||
|
|
exit 1
|
||
|
|
fi
|
||
|
|
echo "Ollama credential created: \$OLLAMA_CRED_ID"
|
||
|
|
|
||
|
|
# Process workflow JSON: replace credential IDs and clean up
|
||
|
|
echo "Preparing workflow JSON..."
|
||
|
|
|
||
|
|
# Create a Python script to process the workflow JSON
|
||
|
|
cat > /tmp/process_workflow.py << 'PYTHON_SCRIPT'
|
||
|
|
import json
|
||
|
|
import sys
|
||
|
|
|
||
|
|
# Read the workflow template
|
||
|
|
with open('/tmp/rag_workflow_template.json', 'r') as f:
|
||
|
|
workflow = json.load(f)
|
||
|
|
|
||
|
|
# Get credential IDs from environment/arguments
|
||
|
|
pg_cred_id = sys.argv[1]
|
||
|
|
ollama_cred_id = sys.argv[2]
|
||
|
|
|
||
|
|
# Remove fields that should not be in the import
|
||
|
|
fields_to_remove = ['id', 'versionId', 'meta', 'tags', 'active', 'pinData']
|
||
|
|
for field in fields_to_remove:
|
||
|
|
workflow.pop(field, None)
|
||
|
|
|
||
|
|
# Process all nodes and replace credential IDs
|
||
|
|
for node in workflow.get('nodes', []):
|
||
|
|
credentials = node.get('credentials', {})
|
||
|
|
|
||
|
|
# Replace PostgreSQL credential
|
||
|
|
if 'postgres' in credentials:
|
||
|
|
credentials['postgres'] = {
|
||
|
|
'id': pg_cred_id,
|
||
|
|
'name': 'PostgreSQL (local)'
|
||
|
|
}
|
||
|
|
|
||
|
|
# Replace Ollama credential
|
||
|
|
if 'ollamaApi' in credentials:
|
||
|
|
credentials['ollamaApi'] = {
|
||
|
|
'id': ollama_cred_id,
|
||
|
|
'name': 'Ollama (local)'
|
||
|
|
}
|
||
|
|
|
||
|
|
# Write the processed workflow
|
||
|
|
with open('/tmp/rag_workflow.json', 'w') as f:
|
||
|
|
json.dump(workflow, f)
|
||
|
|
|
||
|
|
print("Workflow processed successfully")
|
||
|
|
PYTHON_SCRIPT
|
||
|
|
|
||
|
|
# Run the Python script to process the workflow
|
||
|
|
python3 /tmp/process_workflow.py "\$PG_CRED_ID" "\$OLLAMA_CRED_ID"
|
||
|
|
|
||
|
|
# Import workflow
|
||
|
|
echo "Importing workflow..."
|
||
|
|
WORKFLOW_RESP=\$(curl -sS -X POST "\${API_URL}/rest/workflows" \\
|
||
|
|
-H "Content-Type: application/json" \\
|
||
|
|
-b "\${COOKIE_FILE}" \\
|
||
|
|
-d @/tmp/rag_workflow.json)
|
||
|
|
|
||
|
|
WORKFLOW_ID=\$(echo "\$WORKFLOW_RESP" | grep -oP '"id"\s*:\s*"\K[^"]+' | head -1)
|
||
|
|
VERSION_ID=\$(echo "\$WORKFLOW_RESP" | grep -oP '"versionId"\s*:\s*"\K[^"]+' | head -1)
|
||
|
|
if [ -z "\$WORKFLOW_ID" ]; then
|
||
|
|
echo "WORKFLOW_IMPORT_FAILED: \$WORKFLOW_RESP"
|
||
|
|
exit 1
|
||
|
|
fi
|
||
|
|
echo "Workflow imported: \$WORKFLOW_ID (version: \$VERSION_ID)"
|
||
|
|
|
||
|
|
# Activate workflow using POST /activate endpoint with versionId
|
||
|
|
echo "Activating workflow..."
|
||
|
|
ACTIVATE_RESP=\$(curl -sS -X POST "\${API_URL}/rest/workflows/\${WORKFLOW_ID}/activate" \\
|
||
|
|
-H "Content-Type: application/json" \\
|
||
|
|
-b "\${COOKIE_FILE}" \\
|
||
|
|
-d "{\"versionId\":\"\${VERSION_ID}\"}")
|
||
|
|
|
||
|
|
if echo "\$ACTIVATE_RESP" | grep -q '"active":true\|"active": true'; then
|
||
|
|
echo "Workflow activated successfully"
|
||
|
|
else
|
||
|
|
echo "WORKFLOW_ACTIVATION_WARNING: \$ACTIVATE_RESP"
|
||
|
|
fi
|
||
|
|
|
||
|
|
# Cleanup
|
||
|
|
rm -f "\${COOKIE_FILE}" /tmp/rag_workflow_template.json /tmp/rag_workflow.json
|
||
|
|
|
||
|
|
# Output results
|
||
|
|
echo "SUCCESS"
|
||
|
|
echo "POSTGRES_CRED_ID=\$PG_CRED_ID"
|
||
|
|
echo "OLLAMA_CRED_ID=\$OLLAMA_CRED_ID"
|
||
|
|
echo "WORKFLOW_ID=\$WORKFLOW_ID"
|
||
|
|
SETUP_SCRIPT
|
||
|
|
)"
|
||
|
|
|
||
|
|
# Make script executable and run it
|
||
|
|
pct exec "$ctid" -- chmod +x /tmp/n8n_setup.sh
|
||
|
|
|
||
|
|
info "n8n Setup: Running setup script in container..."
|
||
|
|
local setup_output
|
||
|
|
setup_output=$(pct exec "$ctid" -- /tmp/n8n_setup.sh 2>&1 || echo "SCRIPT_FAILED")
|
||
|
|
|
||
|
|
# Log the output
|
||
|
|
info "n8n Setup: Script output:"
|
||
|
|
echo "$setup_output" | while read -r line; do
|
||
|
|
info " $line"
|
||
|
|
done
|
||
|
|
|
||
|
|
# Check for success
|
||
|
|
if echo "$setup_output" | grep -q "^SUCCESS$"; then
|
||
|
|
# Extract IDs from output
|
||
|
|
local pg_cred_id ollama_cred_id workflow_id
|
||
|
|
pg_cred_id=$(echo "$setup_output" | grep "^POSTGRES_CRED_ID=" | cut -d= -f2)
|
||
|
|
ollama_cred_id=$(echo "$setup_output" | grep "^OLLAMA_CRED_ID=" | cut -d= -f2)
|
||
|
|
workflow_id=$(echo "$setup_output" | grep "^WORKFLOW_ID=" | cut -d= -f2)
|
||
|
|
|
||
|
|
info "n8n Setup: RAG workflow setup completed successfully"
|
||
|
|
info "n8n Setup: Workflow ID: ${workflow_id}"
|
||
|
|
info "n8n Setup: PostgreSQL Credential ID: ${pg_cred_id}"
|
||
|
|
info "n8n Setup: Ollama Credential ID: ${ollama_cred_id}"
|
||
|
|
|
||
|
|
# Cleanup setup script
|
||
|
|
pct exec "$ctid" -- rm -f /tmp/n8n_setup.sh 2>/dev/null || true
|
||
|
|
|
||
|
|
return 0
|
||
|
|
else
|
||
|
|
warn "n8n Setup: Setup script failed"
|
||
|
|
# Cleanup
|
||
|
|
pct exec "$ctid" -- rm -f /tmp/n8n_setup.sh /tmp/n8n_cookies.txt /tmp/rag_workflow_template.json /tmp/rag_workflow.json 2>/dev/null || true
|
||
|
|
return 1
|
||
|
|
fi
|
||
|
|
}
|