Compare commits
9 commits
5bbd9487c9
...
de318d3511
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
de318d3511 | ||
|
|
61ce9d59bc | ||
|
|
69d0f8347c | ||
|
|
1c30f4c2f4 | ||
| 88e49b9e9d | |||
| 37c3009a62 | |||
|
|
cf99bdc51e | ||
|
|
9ee704ea9c | ||
|
|
8943af4484 |
11 changed files with 1010 additions and 32 deletions
|
|
@ -2,7 +2,7 @@ FROM debian:bookworm-slim
|
|||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
bash curl git jq tmux python3 python3-pip openssh-client ca-certificates age shellcheck procps gosu \
|
||||
&& pip3 install --break-system-packages networkx \
|
||||
&& pip3 install --break-system-packages networkx tomlkit \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Pre-built binaries (copied from docker/agents/bin/)
|
||||
|
|
|
|||
|
|
@ -123,6 +123,11 @@ _generate_local_model_services() {
|
|||
context: .
|
||||
dockerfile: docker/agents/Dockerfile
|
||||
image: disinto/agents:\${DISINTO_IMAGE_TAG:-latest}
|
||||
# Rebuild on every up (#887): without this, \`docker compose up -d --force-recreate\`
|
||||
# reuses the cached image and silently keeps running stale docker/agents/ code
|
||||
# even after the repo is updated. \`pull_policy: build\` makes Compose rebuild
|
||||
# the image on every up; BuildKit layer cache makes unchanged rebuilds fast.
|
||||
pull_policy: build
|
||||
container_name: disinto-agents-${service_name}
|
||||
restart: unless-stopped
|
||||
security_opt:
|
||||
|
|
@ -443,6 +448,9 @@ COMPOSEEOF
|
|||
build:
|
||||
context: .
|
||||
dockerfile: docker/agents/Dockerfile
|
||||
# Rebuild on every up (#887): makes docker/agents/ source changes reach this
|
||||
# container without a manual \`docker compose build\`. Cache-fast when clean.
|
||||
pull_policy: build
|
||||
container_name: disinto-agents-llama
|
||||
restart: unless-stopped
|
||||
security_opt:
|
||||
|
|
@ -493,6 +501,9 @@ COMPOSEEOF
|
|||
build:
|
||||
context: .
|
||||
dockerfile: docker/agents/Dockerfile
|
||||
# Rebuild on every up (#887): makes docker/agents/ source changes reach this
|
||||
# container without a manual \`docker compose build\`. Cache-fast when clean.
|
||||
pull_policy: build
|
||||
container_name: disinto-agents-llama-all
|
||||
restart: unless-stopped
|
||||
profiles: ["agents-llama-all"]
|
||||
|
|
|
|||
|
|
@ -535,7 +535,10 @@ EOF
|
|||
local interval="${poll_interval:-60}"
|
||||
echo " Writing [agents.${section_name}] to ${toml_file}..."
|
||||
python3 -c '
|
||||
import sys, re, pathlib
|
||||
import sys
|
||||
import tomlkit
|
||||
import re
|
||||
import pathlib
|
||||
|
||||
toml_path = sys.argv[1]
|
||||
section_name = sys.argv[2]
|
||||
|
|
@ -548,38 +551,39 @@ poll_interval = sys.argv[7]
|
|||
p = pathlib.Path(toml_path)
|
||||
text = p.read_text()
|
||||
|
||||
# Build the new section
|
||||
new_section = f"""
|
||||
[agents.{section_name}]
|
||||
base_url = "{base_url}"
|
||||
model = "{model}"
|
||||
api_key = "sk-no-key-required"
|
||||
roles = ["{role}"]
|
||||
forge_user = "{agent_name}"
|
||||
compact_pct = 60
|
||||
poll_interval = {poll_interval}
|
||||
"""
|
||||
# Step 1: Remove any commented-out [agents.X] blocks (they cause parse issues)
|
||||
# Match # [agents.section_name] followed by lines that are not section headers
|
||||
# Use negative lookahead to stop before a real section header (# [ or [)
|
||||
commented_pattern = rf"(?:^|\n)# \[agents\.{re.escape(section_name)}\](?:\n(?!# \[|\[)[^\n]*)*"
|
||||
text = re.sub(commented_pattern, "", text, flags=re.DOTALL)
|
||||
|
||||
# Check if section already exists and replace it
|
||||
pattern = rf"\[agents\.{re.escape(section_name)}\][^\[]*"
|
||||
if re.search(pattern, text):
|
||||
text = re.sub(pattern, new_section.strip() + "\n", text)
|
||||
else:
|
||||
# Remove commented-out example [agents.llama] block if present
|
||||
text = re.sub(
|
||||
r"\n# Local-model agents \(optional\).*?(?=\n# \[mirrors\]|\n\[mirrors\]|\Z)",
|
||||
"",
|
||||
text,
|
||||
flags=re.DOTALL,
|
||||
)
|
||||
# Append before [mirrors] if it exists, otherwise at end
|
||||
mirrors_match = re.search(r"\n(# )?\[mirrors\]", text)
|
||||
if mirrors_match:
|
||||
text = text[:mirrors_match.start()] + "\n" + new_section + text[mirrors_match.start():]
|
||||
else:
|
||||
text = text.rstrip() + "\n" + new_section
|
||||
# Step 2: Parse TOML with tomlkit (preserves comments and formatting)
|
||||
try:
|
||||
doc = tomlkit.parse(text)
|
||||
except Exception as e:
|
||||
print(f"Error: Invalid TOML in {toml_path}: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
p.write_text(text)
|
||||
# Step 3: Ensure agents table exists
|
||||
if "agents" not in doc:
|
||||
doc.add("agents", tomlkit.table())
|
||||
|
||||
# Step 4: Update the specific agent section
|
||||
doc["agents"][section_name] = {
|
||||
"base_url": base_url,
|
||||
"model": model,
|
||||
"api_key": "sk-no-key-required",
|
||||
"roles": [role],
|
||||
"forge_user": agent_name,
|
||||
"compact_pct": 60,
|
||||
"poll_interval": int(poll_interval),
|
||||
}
|
||||
|
||||
# Step 5: Serialize back to TOML (preserves comments)
|
||||
output = tomlkit.dumps(doc)
|
||||
|
||||
# Step 6: Write back
|
||||
p.write_text(output)
|
||||
' "$toml_file" "$section_name" "$local_model" "$model" "$agent_name" "$role" "$interval"
|
||||
|
||||
echo " Agent config written to TOML"
|
||||
|
|
|
|||
20
tests/fixtures/.env.vault.enc
vendored
Normal file
20
tests/fixtures/.env.vault.enc
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"data": "ENC[AES256_GCM,data:SsLdIiZDVkkV1bbKeHQ8A1K/4vgXQFJF8y4J87GGwsGa13lNnPoqRaCmPAtuQr3hR5JNqARUhFp8aEusyzwi/lZLU2Reo32YjE26ObVOHf47EGmmHM/tEgh6u0fa1AmFtuqJVQzhG2eZhJmZJFgdRH36+bhdBwI1mkORmsRNtBPHHjtQJDbsgN47maDhuP4B7WvB4/TdnJ++GNMlMbyrbr0pEf2uqqOVO55cJ3I4v/Jcg8tq0clPuW1k5dNFsmFSMbbjE5N25EGrc7oEH5GVZ6I6L6p0Fzyj/MV4hKacboFHiZmBZgRQ,iv:UnXTa800G3PW4IaErkPBIZKjPHAU3LmiCvAqDdhFE/Q=,tag:kdWpHQ8fEPGFlmfVoTMskA==,type:str]",
|
||||
"sops": {
|
||||
"kms": null,
|
||||
"gcp_kms": null,
|
||||
"azure_kv": null,
|
||||
"hc_vault": null,
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1ztkm8yvdk42m2cn4dj2v9ptfknq8wpgr3ry9dpmtmlaeas6p7yyqft0ldg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBrVUlmaEdTNU1iMGg4dFA4\nNFNOSzlBc1NER1U3SHlwVFU1dm5tR1kyeldzCjZ2NXI3MjR4Zkd1RVBKNzJoQ1Jm\nQWpEZU5VMkNuYnhTTVJNc0RpTXlIZE0KLS0tIDFpQ2tlN0MzL1NuS2hKZU5JTG9B\nNWxXMzE0bGZpQkVBTnhWRXZBQlhrc1EKG76DM98cCuqIwUkbfJWHhJdYV77O9r8Q\nRJrq6jH59Gcp9W8iHg/aeShPHZFEOLg1q9azV9Wt9FjJn3SxyTmgvA==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2026-04-16T15:43:34Z",
|
||||
"mac": "ENC[AES256_GCM,data:jVRr2TxSZH2paD2doIX4JwCqo5wiPYfTowpj189w1IVlS0EY/XQoqxiWbunX/LmIDdQlTPCSe/vTp1EJA0cx6vzN2xENrwsfzCP6dwDGaRlZhH3V0CVhtfHIkMTEKWrAUx5hFtiwJPkLYUUYi5aRWRxhZQM1eBeRvuGKdlwvmHA=,iv:H57a61AfVNLrlg+4aMl9mwXI5O38O5ZoRhpxe2PTTkY=,tag:2jwH1855VNYlKseTE/XtTg==,type:str]",
|
||||
"pgp": null,
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.9.4"
|
||||
}
|
||||
}
|
||||
5
tests/fixtures/age-keys.txt
vendored
Normal file
5
tests/fixtures/age-keys.txt
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
# Test age key for sops
|
||||
# Generated: 2026-04-16
|
||||
# Public key: age1ztkm8yvdk42m2cn4dj2v9ptfknq8wpgr3ry9dpmtmlaeas6p7yyqft0ldg
|
||||
|
||||
AGE-SECRET-KEY-1PCQQX37MTZDGES76H9TGQN5XTG2ZZX2UUR87KR784NZ4MQ3NJ56S0Z23SF
|
||||
40
tests/fixtures/dot-env-complete
vendored
Normal file
40
tests/fixtures/dot-env-complete
vendored
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
# Test fixture .env file for vault-import.sh
|
||||
# This file contains all expected keys for the import test
|
||||
|
||||
# Generic forge creds
|
||||
FORGE_TOKEN=generic-forge-token
|
||||
FORGE_PASS=generic-forge-pass
|
||||
FORGE_ADMIN_TOKEN=generic-admin-token
|
||||
|
||||
# Bot tokens (review, dev, gardener, architect, planner, predictor, supervisor, vault)
|
||||
FORGE_REVIEW_TOKEN=review-token
|
||||
FORGE_REVIEW_PASS=review-pass
|
||||
FORGE_DEV_TOKEN=dev-token
|
||||
FORGE_DEV_PASS=dev-pass
|
||||
FORGE_GARDENER_TOKEN=gardener-token
|
||||
FORGE_GARDENER_PASS=gardener-pass
|
||||
FORGE_ARCHITECT_TOKEN=architect-token
|
||||
FORGE_ARCHITECT_PASS=architect-pass
|
||||
FORGE_PLANNER_TOKEN=planner-token
|
||||
FORGE_PLANNER_PASS=planner-pass
|
||||
FORGE_PREDICTOR_TOKEN=predictor-token
|
||||
FORGE_PREDICTOR_PASS=predictor-pass
|
||||
FORGE_SUPERVISOR_TOKEN=supervisor-token
|
||||
FORGE_SUPERVISOR_PASS=supervisor-pass
|
||||
FORGE_VAULT_TOKEN=vault-token
|
||||
FORGE_VAULT_PASS=vault-pass
|
||||
|
||||
# Llama bot
|
||||
FORGE_TOKEN_LLAMA=llama-token
|
||||
FORGE_PASS_LLAMA=llama-pass
|
||||
|
||||
# Woodpecker secrets
|
||||
WOODPECKER_AGENT_SECRET=wp-agent-secret
|
||||
WP_FORGEJO_CLIENT=wp-forgejo-client
|
||||
WP_FORGEJO_SECRET=wp-forgejo-secret
|
||||
WOODPECKER_TOKEN=wp-token
|
||||
|
||||
# Chat secrets
|
||||
FORWARD_AUTH_SECRET=forward-auth-secret
|
||||
CHAT_OAUTH_CLIENT_ID=chat-client-id
|
||||
CHAT_OAUTH_CLIENT_SECRET=chat-client-secret
|
||||
27
tests/fixtures/dot-env-incomplete
vendored
Normal file
27
tests/fixtures/dot-env-incomplete
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
# Test fixture .env file with missing required keys
|
||||
# This file is intentionally missing some keys to test error handling
|
||||
|
||||
# Generic forge creds - missing FORGE_ADMIN_TOKEN
|
||||
FORGE_TOKEN=generic-forge-token
|
||||
FORGE_PASS=generic-forge-pass
|
||||
|
||||
# Bot tokens - missing several roles
|
||||
FORGE_REVIEW_TOKEN=review-token
|
||||
FORGE_REVIEW_PASS=review-pass
|
||||
FORGE_DEV_TOKEN=dev-token
|
||||
FORGE_DEV_PASS=dev-pass
|
||||
|
||||
# Llama bot - missing (only token, no pass)
|
||||
FORGE_TOKEN_LLAMA=llama-token
|
||||
# FORGE_PASS_LLAMA=llama-pass
|
||||
|
||||
# Woodpecker secrets - missing some
|
||||
WOODPECKER_AGENT_SECRET=wp-agent-secret
|
||||
# WP_FORGEJO_CLIENT=wp-forgejo-client
|
||||
# WP_FORGEJO_SECRET=wp-forgejo-secret
|
||||
# WOODPECKER_TOKEN=wp-token
|
||||
|
||||
# Chat secrets - missing some
|
||||
FORWARD_AUTH_SECRET=forward-auth-secret
|
||||
# CHAT_OAUTH_CLIENT_ID=chat-client-id
|
||||
# CHAT_OAUTH_CLIENT_SECRET=chat-client-secret
|
||||
6
tests/fixtures/dot-env.vault.plain
vendored
Normal file
6
tests/fixtures/dot-env.vault.plain
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
GITHUB_TOKEN=github-test-token-abc123
|
||||
CODEBERG_TOKEN=codeberg-test-token-def456
|
||||
CLAWHUB_TOKEN=clawhub-test-token-ghi789
|
||||
DEPLOY_KEY=deploy-key-test-jkl012
|
||||
NPM_TOKEN=npm-test-token-mno345
|
||||
DOCKER_HUB_TOKEN=dockerhub-test-token-pqr678
|
||||
|
|
@ -97,6 +97,38 @@ EOF
|
|||
[[ "$output" == *'dockerfile: docker/agents/Dockerfile'* ]]
|
||||
}
|
||||
|
||||
@test "local-model agent service emits pull_policy: build so docker compose up rebuilds on source change (#887)" {
|
||||
# Without pull_policy: build, `docker compose up -d --force-recreate` reuses
|
||||
# the cached `disinto/agents:latest` image and silently runs stale
|
||||
# docker/agents/entrypoint.sh even after the repo is updated. `pull_policy:
|
||||
# build` forces a rebuild on every up; BuildKit layer cache makes unchanged
|
||||
# rebuilds near-instant. The alternative was requiring every operator to
|
||||
# remember `--build` on every invocation, which was the bug that prompted
|
||||
# #887 (2h of debugging a fix that was merged but never reached the container).
|
||||
cat > "${FACTORY_ROOT}/projects/test.toml" <<'EOF'
|
||||
name = "test"
|
||||
repo = "test-owner/test-repo"
|
||||
forge_url = "http://localhost:3000"
|
||||
|
||||
[agents.dev-qwen2]
|
||||
base_url = "http://10.10.10.1:8081"
|
||||
model = "qwen"
|
||||
api_key = "sk-no-key-required"
|
||||
roles = ["dev"]
|
||||
forge_user = "dev-qwen2"
|
||||
EOF
|
||||
|
||||
run bash -c "
|
||||
set -euo pipefail
|
||||
source '${ROOT}/lib/generators.sh'
|
||||
_generate_local_model_services '${FACTORY_ROOT}/docker-compose.yml'
|
||||
cat '${FACTORY_ROOT}/docker-compose.yml'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" == *'pull_policy: build'* ]]
|
||||
}
|
||||
|
||||
@test "local-model agent service keys FORGE_BOT_USER to forge_user even when it differs from service name (#849)" {
|
||||
# Exercise the case the issue calls out: two agents in the same factory
|
||||
# whose service names are identical (`[agents.llama]`) but whose
|
||||
|
|
|
|||
313
tests/vault-import.bats
Normal file
313
tests/vault-import.bats
Normal file
|
|
@ -0,0 +1,313 @@
|
|||
#!/usr/bin/env bats
|
||||
# tests/vault-import.bats — Tests for tools/vault-import.sh
|
||||
#
|
||||
# Runs against a dev-mode Vault server (single binary, no LXC needed).
|
||||
# CI launches vault server -dev inline before running these tests.
|
||||
|
||||
VAULT_BIN="${VAULT_BIN:-vault}"
|
||||
IMPORT_SCRIPT="${BATS_TEST_DIRNAME}/../tools/vault-import.sh"
|
||||
FIXTURES_DIR="${BATS_TEST_DIRNAME}/fixtures"
|
||||
|
||||
setup_file() {
|
||||
# Start dev-mode vault on a random port
|
||||
export VAULT_DEV_PORT
|
||||
VAULT_DEV_PORT="$(shuf -i 18200-18299 -n 1)"
|
||||
export VAULT_ADDR="http://127.0.0.1:${VAULT_DEV_PORT}"
|
||||
|
||||
"$VAULT_BIN" server -dev \
|
||||
-dev-listen-address="127.0.0.1:${VAULT_DEV_PORT}" \
|
||||
-dev-root-token-id="test-root-token" \
|
||||
-dev-no-store-token \
|
||||
&>"${BATS_FILE_TMPDIR}/vault.log" &
|
||||
export VAULT_PID=$!
|
||||
|
||||
export VAULT_TOKEN="test-root-token"
|
||||
|
||||
# Wait for vault to be ready (up to 10s)
|
||||
local i=0
|
||||
while ! curl -sf "${VAULT_ADDR}/v1/sys/health" >/dev/null 2>&1; do
|
||||
sleep 0.5
|
||||
i=$((i + 1))
|
||||
if [ "$i" -ge 20 ]; then
|
||||
echo "Vault failed to start. Log:" >&2
|
||||
cat "${BATS_FILE_TMPDIR}/vault.log" >&2
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
teardown_file() {
|
||||
if [ -n "${VAULT_PID:-}" ]; then
|
||||
kill "$VAULT_PID" 2>/dev/null || true
|
||||
wait "$VAULT_PID" 2>/dev/null || true
|
||||
fi
|
||||
}
|
||||
|
||||
setup() {
|
||||
# Source the module under test for hvault functions
|
||||
source "${BATS_TEST_DIRNAME}/../lib/hvault.sh"
|
||||
export VAULT_ADDR VAULT_TOKEN
|
||||
}
|
||||
|
||||
# --- Security checks ---
|
||||
|
||||
@test "refuses to run if VAULT_ADDR is not localhost" {
|
||||
export VAULT_ADDR="http://prod-vault.example.com:8200"
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "Security check failed"
|
||||
}
|
||||
|
||||
@test "refuses if age key file permissions are not 0400" {
|
||||
# Create a temp file with wrong permissions
|
||||
local bad_key="${BATS_TEST_TMPDIR}/bad-ages.txt"
|
||||
echo "AGE-SECRET-KEY-1TEST" > "$bad_key"
|
||||
chmod 644 "$bad_key"
|
||||
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$bad_key"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "permissions"
|
||||
}
|
||||
|
||||
# --- Dry-run mode ─────────────────────────────────────────────────────────────
|
||||
|
||||
@test "--dry-run prints plan without writing to Vault" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt" \
|
||||
--dry-run
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | grep -q "DRY-RUN"
|
||||
echo "$output" | grep -q "Import plan"
|
||||
echo "$output" | grep -q "Planned operations"
|
||||
|
||||
# Verify nothing was written to Vault
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/bots/review"
|
||||
[ "$status" -ne 0 ]
|
||||
}
|
||||
|
||||
# --- Complete fixture import ─────────────────────────────────────────────────
|
||||
|
||||
@test "imports all keys from complete fixture" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Check bots/review
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/bots/review"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | grep -q "review-token"
|
||||
echo "$output" | grep -q "review-pass"
|
||||
|
||||
# Check bots/dev-qwen
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/bots/dev-qwen"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | grep -q "llama-token"
|
||||
echo "$output" | grep -q "llama-pass"
|
||||
|
||||
# Check forge
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/shared/forge"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | grep -q "generic-forge-token"
|
||||
echo "$output" | grep -q "generic-forge-pass"
|
||||
echo "$output" | grep -q "generic-admin-token"
|
||||
|
||||
# Check woodpecker
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/shared/woodpecker"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | grep -q "wp-agent-secret"
|
||||
echo "$output" | grep -q "wp-forgejo-client"
|
||||
echo "$output" | grep -q "wp-forgejo-secret"
|
||||
echo "$output" | grep -q "wp-token"
|
||||
|
||||
# Check chat
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/shared/chat"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | grep -q "forward-auth-secret"
|
||||
echo "$output" | grep -q "chat-client-id"
|
||||
echo "$output" | grep -q "chat-client-secret"
|
||||
|
||||
# Check runner tokens from sops
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/runner/GITHUB_TOKEN"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | jq -e '.data.data.value == "github-test-token-abc123"'
|
||||
}
|
||||
|
||||
# --- Idempotency ──────────────────────────────────────────────────────────────
|
||||
|
||||
@test "re-run with unchanged fixtures reports all unchanged" {
|
||||
# First run
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Second run - should report unchanged
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Check that all keys report unchanged
|
||||
echo "$output" | grep -q "unchanged"
|
||||
# Count unchanged occurrences (should be many)
|
||||
local unchanged_count
|
||||
unchanged_count=$(echo "$output" | grep -c "unchanged" || true)
|
||||
[ "$unchanged_count" -gt 10 ]
|
||||
}
|
||||
|
||||
@test "re-run with modified value reports only that key as updated" {
|
||||
# Create a modified fixture
|
||||
local modified_env="${BATS_TEST_TMPDIR}/dot-env-modified"
|
||||
cp "$FIXTURES_DIR/dot-env-complete" "$modified_env"
|
||||
|
||||
# Modify one value
|
||||
sed -i 's/llama-token/MODIFIED-LLAMA-TOKEN/' "$modified_env"
|
||||
|
||||
# Run with modified fixture
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$modified_env" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Check that dev-qwen token was updated
|
||||
echo "$output" | grep -q "dev-qwen.*updated"
|
||||
|
||||
# Verify the new value was written (path is disinto/bots/dev-qwen, key is token)
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/bots/dev-qwen"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | jq -e '.data.data.token == "MODIFIED-LLAMA-TOKEN"'
|
||||
}
|
||||
|
||||
# --- Incomplete fixture ───────────────────────────────────────────────────────
|
||||
|
||||
@test "handles incomplete fixture gracefully" {
|
||||
# The incomplete fixture is missing some keys, but that should be OK
|
||||
# - it should only import what exists
|
||||
# - it should warn about missing pairs
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-incomplete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Should have imported what was available
|
||||
echo "$output" | grep -q "review"
|
||||
|
||||
# Should complete successfully even with incomplete fixture
|
||||
# The script handles missing pairs gracefully with warnings to stderr
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
# --- Security: no secrets in output ───────────────────────────────────────────
|
||||
|
||||
@test "never logs secret values in stdout" {
|
||||
# Run the import
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Check that no actual secret values appear in output
|
||||
# (only key names and status messages)
|
||||
local secret_patterns=(
|
||||
"generic-forge-token"
|
||||
"generic-forge-pass"
|
||||
"generic-admin-token"
|
||||
"review-token"
|
||||
"review-pass"
|
||||
"llama-token"
|
||||
"llama-pass"
|
||||
"wp-agent-secret"
|
||||
"forward-auth-secret"
|
||||
"github-test-token"
|
||||
"codeberg-test-token"
|
||||
"clawhub-test-token"
|
||||
"deploy-key-test"
|
||||
"npm-test-token"
|
||||
"dockerhub-test-token"
|
||||
)
|
||||
|
||||
for pattern in "${secret_patterns[@]}"; do
|
||||
if echo "$output" | grep -q "$pattern"; then
|
||||
echo "FAIL: Found secret pattern '$pattern' in output" >&2
|
||||
echo "Output was:" >&2
|
||||
echo "$output" >&2
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# --- Error handling ───────────────────────────────────────────────────────────
|
||||
|
||||
@test "fails with missing --env argument" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "Missing required argument"
|
||||
}
|
||||
|
||||
@test "fails with missing --sops argument" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "Missing required argument"
|
||||
}
|
||||
|
||||
@test "fails with missing --age-key argument" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "Missing required argument"
|
||||
}
|
||||
|
||||
@test "fails with non-existent env file" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "/nonexistent/.env" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "not found"
|
||||
}
|
||||
|
||||
@test "fails with non-existent sops file" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "/nonexistent/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "not found"
|
||||
}
|
||||
|
||||
@test "fails with non-existent age key file" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "/nonexistent/age-keys.txt"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "not found"
|
||||
}
|
||||
520
tools/vault-import.sh
Executable file
520
tools/vault-import.sh
Executable file
|
|
@ -0,0 +1,520 @@
|
|||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# vault-import.sh — Import .env and sops-decrypted secrets into Vault KV
|
||||
#
|
||||
# Reads existing .env and sops-encrypted .env.vault.enc from the old docker stack
|
||||
# and writes them to Vault KV paths matching the S2.1 policy layout.
|
||||
#
|
||||
# Usage:
|
||||
# vault-import.sh \
|
||||
# --env /path/to/.env \
|
||||
# --sops /path/to/.env.vault.enc \
|
||||
# --age-key /path/to/age/keys.txt
|
||||
#
|
||||
# Mapping:
|
||||
# From .env:
|
||||
# - FORGE_{ROLE}_TOKEN + FORGE_{ROLE}_PASS → kv/disinto/bots/<role>/{token,password}
|
||||
# (roles: review, dev, gardener, architect, planner, predictor, supervisor, vault)
|
||||
# - FORGE_TOKEN_LLAMA + FORGE_PASS_LLAMA → kv/disinto/bots/dev-qwen/{token,password}
|
||||
# - FORGE_TOKEN + FORGE_PASS → kv/disinto/shared/forge/{token,password}
|
||||
# - FORGE_ADMIN_TOKEN → kv/disinto/shared/forge/admin_token
|
||||
# - WOODPECKER_* → kv/disinto/shared/woodpecker/<lowercase_key>
|
||||
# - FORWARD_AUTH_SECRET, CHAT_OAUTH_* → kv/disinto/shared/chat/<lowercase_key>
|
||||
# From sops-decrypted .env.vault.enc:
|
||||
# - GITHUB_TOKEN, CODEBERG_TOKEN, CLAWHUB_TOKEN, DEPLOY_KEY, NPM_TOKEN, DOCKER_HUB_TOKEN
|
||||
# → kv/disinto/runner/<NAME>/value
|
||||
#
|
||||
# Security:
|
||||
# - Refuses to run if VAULT_ADDR is not localhost
|
||||
# - Writes to KV v2, not v1
|
||||
# - Validates sops age key file is mode 0400 before sourcing
|
||||
# - Never logs secret values — only key names
|
||||
#
|
||||
# Idempotency:
|
||||
# - Reports unchanged/updated/created per key via hvault_kv_get
|
||||
# - --dry-run prints the full import plan without writing
|
||||
# =============================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ── Internal helpers ──────────────────────────────────────────────────────────
|
||||
|
||||
# _log — emit a log message to stdout (never to stderr to avoid polluting diff)
|
||||
_log() {
|
||||
printf '[vault-import] %s\n' "$*"
|
||||
}
|
||||
|
||||
# _err — emit an error message to stderr
|
||||
_err() {
|
||||
printf '[vault-import] ERROR: %s\n' "$*" >&2
|
||||
}
|
||||
|
||||
# _die — log error and exit with status 1
|
||||
_die() {
|
||||
_err "$@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# _check_vault_addr — ensure VAULT_ADDR is localhost (security check)
|
||||
_check_vault_addr() {
|
||||
local addr="${VAULT_ADDR:-}"
|
||||
if [[ ! "$addr" =~ ^https?://(localhost|127\.0\.0\.1)(:[0-9]+)?$ ]]; then
|
||||
_die "Security check failed: VAULT_ADDR must be localhost for safety. Got: $addr"
|
||||
fi
|
||||
}
|
||||
|
||||
# _validate_age_key_perms — ensure age key file is mode 0400
|
||||
_validate_age_key_perms() {
|
||||
local keyfile="$1"
|
||||
local perms
|
||||
perms="$(stat -c '%a' "$keyfile" 2>/dev/null)" || _die "Cannot stat age key file: $keyfile"
|
||||
if [ "$perms" != "400" ]; then
|
||||
_die "Age key file permissions are $perms, expected 400. Refusing to proceed for security."
|
||||
fi
|
||||
}
|
||||
|
||||
# _decrypt_sops — decrypt sops-encrypted file using SOPS_AGE_KEY_FILE
|
||||
_decrypt_sops() {
|
||||
local sops_file="$1"
|
||||
local age_key="$2"
|
||||
local output
|
||||
# sops outputs YAML format by default, extract KEY=VALUE lines
|
||||
output="$(SOPS_AGE_KEY_FILE="$age_key" sops -d "$sops_file" 2>/dev/null | \
|
||||
grep -E '^[A-Z_][A-Z0-9_]*=' | \
|
||||
sed 's/^\([^=]*\)=\(.*\)$/\1=\2/')" || \
|
||||
_die "Failed to decrypt sops file: $sops_file. Check age key and file integrity."
|
||||
printf '%s' "$output"
|
||||
}
|
||||
|
||||
# _load_env_file — source an environment file (safety: only KEY=value lines)
|
||||
_load_env_file() {
|
||||
local env_file="$1"
|
||||
local temp_env
|
||||
temp_env="$(mktemp)"
|
||||
# Extract only valid KEY=value lines (skip comments, blank lines, malformed)
|
||||
grep -E '^[A-Za-z_][A-Za-z0-9_]*=' "$env_file" 2>/dev/null > "$temp_env" || true
|
||||
# shellcheck source=/dev/null
|
||||
source "$temp_env"
|
||||
rm -f "$temp_env"
|
||||
}
|
||||
|
||||
# _kv_path_exists — check if a KV path exists (returns 0 if exists, 1 if not)
|
||||
_kv_path_exists() {
|
||||
local path="$1"
|
||||
# Use hvault_kv_get and check if it fails with "not found"
|
||||
if hvault_kv_get "$path" >/dev/null 2>&1; then
|
||||
return 0
|
||||
fi
|
||||
# Check if the error is specifically "not found"
|
||||
local err_output
|
||||
err_output="$(hvault_kv_get "$path" 2>&1)" || true
|
||||
if printf '%s' "$err_output" | grep -qi 'not found\|404'; then
|
||||
return 1
|
||||
fi
|
||||
# Some other error (e.g., auth failure) — treat as unknown
|
||||
return 1
|
||||
}
|
||||
|
||||
# _kv_get_value — get a single key value from a KV path
|
||||
_kv_get_value() {
|
||||
local path="$1"
|
||||
local key="$2"
|
||||
hvault_kv_get "$path" "$key"
|
||||
}
|
||||
|
||||
# _kv_put_secret — write a secret to KV v2
|
||||
_kv_put_secret() {
|
||||
local path="$1"
|
||||
shift
|
||||
local kv_pairs=("$@")
|
||||
local payload='{"data":{}}'
|
||||
|
||||
for kv in "${kv_pairs[@]}"; do
|
||||
local k="${kv%%=*}"
|
||||
local v="${kv#*=}"
|
||||
payload="$(printf '%s' "$payload" | jq -n --arg k "$k" --arg v "$v" '.data[$k] = $v')"
|
||||
done
|
||||
|
||||
# Use curl directly for KV v2 write with versioning
|
||||
local tmpfile http_code
|
||||
tmpfile="$(mktemp)"
|
||||
http_code="$(curl -s -w '%{http_code}' \
|
||||
-H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-X POST \
|
||||
-d "$payload" \
|
||||
-o "$tmpfile" \
|
||||
"${VAULT_ADDR}/v1/secret/data/${path}")" || {
|
||||
rm -f "$tmpfile"
|
||||
_err "Failed to write to Vault at secret/data/${path}: curl error"
|
||||
return 1
|
||||
}
|
||||
rm -f "$tmpfile"
|
||||
|
||||
# Check HTTP status — 2xx is success
|
||||
case "$http_code" in
|
||||
2[0-9][0-9])
|
||||
return 0
|
||||
;;
|
||||
404)
|
||||
_err "KV path not found: secret/data/${path}"
|
||||
return 1
|
||||
;;
|
||||
403)
|
||||
_err "Permission denied writing to secret/data/${path}"
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
_err "Failed to write to Vault at secret/data/${path}: HTTP $http_code"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# _format_status — format the status string for a key
|
||||
_format_status() {
|
||||
local status="$1"
|
||||
local path="$2"
|
||||
local key="$3"
|
||||
case "$status" in
|
||||
unchanged)
|
||||
printf ' %s: %s/%s (unchanged)' "$status" "$path" "$key"
|
||||
;;
|
||||
updated)
|
||||
printf ' %s: %s/%s (updated)' "$status" "$path" "$key"
|
||||
;;
|
||||
created)
|
||||
printf ' %s: %s/%s (created)' "$status" "$path" "$key"
|
||||
;;
|
||||
*)
|
||||
printf ' %s: %s/%s (unknown)' "$status" "$path" "$key"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# ── Mapping definitions ──────────────────────────────────────────────────────
|
||||
|
||||
# Bots mapping: FORGE_{ROLE}_TOKEN + FORGE_{ROLE}_PASS
|
||||
declare -a BOT_ROLES=(review dev gardener architect planner predictor supervisor vault)
|
||||
|
||||
# Runner tokens from sops-decrypted file
|
||||
declare -a RUNNER_TOKENS=(GITHUB_TOKEN CODEBERG_TOKEN CLAWHUB_TOKEN DEPLOY_KEY NPM_TOKEN DOCKER_HUB_TOKEN)
|
||||
|
||||
# ── Main logic ────────────────────────────────────────────────────────────────
|
||||
|
||||
main() {
|
||||
local env_file=""
|
||||
local sops_file=""
|
||||
local age_key_file=""
|
||||
local dry_run=false
|
||||
|
||||
# Parse arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--env)
|
||||
env_file="$2"
|
||||
shift 2
|
||||
;;
|
||||
--sops)
|
||||
sops_file="$2"
|
||||
shift 2
|
||||
;;
|
||||
--age-key)
|
||||
age_key_file="$2"
|
||||
shift 2
|
||||
;;
|
||||
--dry-run)
|
||||
dry_run=true
|
||||
shift
|
||||
;;
|
||||
--help|-h)
|
||||
cat <<'EOF'
|
||||
vault-import.sh — Import .env and sops-decrypted secrets into Vault KV
|
||||
|
||||
Usage:
|
||||
vault-import.sh \
|
||||
--env /path/to/.env \
|
||||
--sops /path/to/.env.vault.enc \
|
||||
--age-key /path/to/age/keys.txt \
|
||||
[--dry-run]
|
||||
|
||||
Options:
|
||||
--env Path to .env file (required)
|
||||
--sops Path to sops-encrypted .env.vault.enc file (required)
|
||||
--age-key Path to age keys file (required)
|
||||
--dry-run Print import plan without writing to Vault (optional)
|
||||
--help Show this help message
|
||||
|
||||
Mapping:
|
||||
From .env:
|
||||
- FORGE_{ROLE}_TOKEN + FORGE_{ROLE}_PASS → kv/disinto/bots/<role>/{token,password}
|
||||
- FORGE_TOKEN_LLAMA + FORGE_PASS_LLAMA → kv/disinto/bots/dev-qwen/{token,password}
|
||||
- FORGE_TOKEN + FORGE_PASS → kv/disinto/shared/forge/{token,password}
|
||||
- FORGE_ADMIN_TOKEN → kv/disinto/shared/forge/admin_token
|
||||
- WOODPECKER_* → kv/disinto/shared/woodpecker/<lowercase_key>
|
||||
- FORWARD_AUTH_SECRET, CHAT_OAUTH_* → kv/disinto/shared/chat/<lowercase_key>
|
||||
|
||||
From sops-decrypted .env.vault.enc:
|
||||
- GITHUB_TOKEN, CODEBERG_TOKEN, CLAWHUB_TOKEN, DEPLOY_KEY, NPM_TOKEN, DOCKER_HUB_TOKEN
|
||||
→ kv/disinto/runner/<NAME>/value
|
||||
|
||||
Examples:
|
||||
vault-import.sh --env .env --sops .env.vault.enc --age-key age-keys.txt
|
||||
vault-import.sh --env .env --sops .env.vault.enc --age-key age-keys.txt --dry-run
|
||||
EOF
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
_die "Unknown option: $1. Use --help for usage."
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Validate required arguments
|
||||
if [ -z "$env_file" ]; then
|
||||
_die "Missing required argument: --env"
|
||||
fi
|
||||
if [ -z "$sops_file" ]; then
|
||||
_die "Missing required argument: --sops"
|
||||
fi
|
||||
if [ -z "$age_key_file" ]; then
|
||||
_die "Missing required argument: --age-key"
|
||||
fi
|
||||
|
||||
# Validate files exist
|
||||
if [ ! -f "$env_file" ]; then
|
||||
_die "Environment file not found: $env_file"
|
||||
fi
|
||||
if [ ! -f "$sops_file" ]; then
|
||||
_die "Sops file not found: $sops_file"
|
||||
fi
|
||||
if [ ! -f "$age_key_file" ]; then
|
||||
_die "Age key file not found: $age_key_file"
|
||||
fi
|
||||
|
||||
# Security check: age key permissions
|
||||
_validate_age_key_perms "$age_key_file"
|
||||
|
||||
# Security check: VAULT_ADDR must be localhost
|
||||
_check_vault_addr
|
||||
|
||||
# Source the Vault helpers
|
||||
source "$(dirname "$0")/../lib/hvault.sh"
|
||||
|
||||
# Load .env file
|
||||
_log "Loading environment from: $env_file"
|
||||
_load_env_file "$env_file"
|
||||
|
||||
# Decrypt sops file
|
||||
_log "Decrypting sops file: $sops_file"
|
||||
local sops_env
|
||||
sops_env="$(_decrypt_sops "$sops_file" "$age_key_file")"
|
||||
# shellcheck disable=SC2086
|
||||
eval "$sops_env"
|
||||
|
||||
# Collect all import operations
|
||||
declare -a operations=()
|
||||
|
||||
# --- From .env ---
|
||||
|
||||
# Bots: FORGE_{ROLE}_TOKEN + FORGE_{ROLE}_PASS
|
||||
for role in "${BOT_ROLES[@]}"; do
|
||||
local token_var="FORGE_${role^^}_TOKEN"
|
||||
local pass_var="FORGE_${role^^}_PASS"
|
||||
local token_val="${!token_var:-}"
|
||||
local pass_val="${!pass_var:-}"
|
||||
|
||||
if [ -n "$token_val" ] && [ -n "$pass_val" ]; then
|
||||
operations+=("bots|$role|token|$env_file|$token_var")
|
||||
operations+=("bots|$role|pass|$env_file|$pass_var")
|
||||
elif [ -n "$token_val" ] || [ -n "$pass_val" ]; then
|
||||
_err "Warning: $role bot has token but no password (or vice versa), skipping"
|
||||
fi
|
||||
done
|
||||
|
||||
# Llama bot: FORGE_TOKEN_LLAMA + FORGE_PASS_LLAMA
|
||||
local llama_token="${FORGE_TOKEN_LLAMA:-}"
|
||||
local llama_pass="${FORGE_PASS_LLAMA:-}"
|
||||
if [ -n "$llama_token" ] && [ -n "$llama_pass" ]; then
|
||||
operations+=("bots|dev-qwen|token|$env_file|FORGE_TOKEN_LLAMA")
|
||||
operations+=("bots|dev-qwen|pass|$env_file|FORGE_PASS_LLAMA")
|
||||
elif [ -n "$llama_token" ] || [ -n "$llama_pass" ]; then
|
||||
_err "Warning: dev-qwen bot has token but no password (or vice versa), skipping"
|
||||
fi
|
||||
|
||||
# Generic forge creds: FORGE_TOKEN + FORGE_PASS
|
||||
local forge_token="${FORGE_TOKEN:-}"
|
||||
local forge_pass="${FORGE_PASS:-}"
|
||||
if [ -n "$forge_token" ] && [ -n "$forge_pass" ]; then
|
||||
operations+=("forge|token|$env_file|FORGE_TOKEN")
|
||||
operations+=("forge|pass|$env_file|FORGE_PASS")
|
||||
fi
|
||||
|
||||
# Forge admin token: FORGE_ADMIN_TOKEN
|
||||
local forge_admin_token="${FORGE_ADMIN_TOKEN:-}"
|
||||
if [ -n "$forge_admin_token" ]; then
|
||||
operations+=("forge|admin_token|$env_file|FORGE_ADMIN_TOKEN")
|
||||
fi
|
||||
|
||||
# Woodpecker secrets: WOODPECKER_*
|
||||
# Only read from the .env file, not shell environment
|
||||
local woodpecker_keys=()
|
||||
while IFS='=' read -r key _; do
|
||||
if [[ "$key" =~ ^WOODPECKER_ ]] || [[ "$key" =~ ^WP_[A-Z_]+$ ]]; then
|
||||
woodpecker_keys+=("$key")
|
||||
fi
|
||||
done < <(grep -E '^[A-Z_][A-Z0-9_]*=' "$env_file" 2>/dev/null || true)
|
||||
for key in "${woodpecker_keys[@]}"; do
|
||||
local val="${!key}"
|
||||
if [ -n "$val" ]; then
|
||||
local lowercase_key="${key,,}"
|
||||
operations+=("woodpecker|$lowercase_key|$env_file|$key")
|
||||
fi
|
||||
done
|
||||
|
||||
# Chat secrets: FORWARD_AUTH_SECRET, CHAT_OAUTH_CLIENT_ID, CHAT_OAUTH_CLIENT_SECRET
|
||||
for key in FORWARD_AUTH_SECRET CHAT_OAUTH_CLIENT_ID CHAT_OAUTH_CLIENT_SECRET; do
|
||||
local val="${!key:-}"
|
||||
if [ -n "$val" ]; then
|
||||
local lowercase_key="${key,,}"
|
||||
operations+=("chat|$lowercase_key|$env_file|$key")
|
||||
fi
|
||||
done
|
||||
|
||||
# --- From sops-decrypted .env.vault.enc ---
|
||||
|
||||
# Runner tokens
|
||||
for token_name in "${RUNNER_TOKENS[@]}"; do
|
||||
local token_val="${!token_name:-}"
|
||||
if [ -n "$token_val" ]; then
|
||||
operations+=("runner|$token_name|$sops_file|$token_name")
|
||||
fi
|
||||
done
|
||||
|
||||
# If dry-run, just print the plan
|
||||
if $dry_run; then
|
||||
_log "=== DRY-RUN: Import plan ==="
|
||||
_log "Environment file: $env_file"
|
||||
_log "Sops file: $sops_file"
|
||||
_log "Age key: $age_key_file"
|
||||
_log ""
|
||||
_log "Planned operations:"
|
||||
for op in "${operations[@]}"; do
|
||||
_log " $op"
|
||||
done
|
||||
_log ""
|
||||
_log "Total: ${#operations[@]} operations"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# --- Actual import with idempotency check ---
|
||||
|
||||
_log "=== Starting Vault import ==="
|
||||
_log "Environment file: $env_file"
|
||||
_log "Sops file: $sops_file"
|
||||
_log "Age key: $age_key_file"
|
||||
_log ""
|
||||
|
||||
local created=0
|
||||
local updated=0
|
||||
local unchanged=0
|
||||
|
||||
for op in "${operations[@]}"; do
|
||||
# Parse operation: category|field|subkey|file|envvar (5 fields for bots/runner)
|
||||
# or category|field|file|envvar (4 fields for forge/woodpecker/chat)
|
||||
local category field subkey file envvar=""
|
||||
local field_count
|
||||
field_count="$(printf '%s' "$op" | awk -F'|' '{print NF}')"
|
||||
|
||||
if [ "$field_count" -eq 5 ]; then
|
||||
# 5 fields: category|role|subkey|file|envvar
|
||||
IFS='|' read -r category field subkey file envvar <<< "$op"
|
||||
else
|
||||
# 4 fields: category|field|file|envvar
|
||||
IFS='|' read -r category field file envvar <<< "$op"
|
||||
subkey="$field" # For 4-field ops, field is the vault key
|
||||
fi
|
||||
|
||||
# Determine Vault path and key based on category
|
||||
local vault_path=""
|
||||
local vault_key="$subkey"
|
||||
local source_value=""
|
||||
|
||||
if [ "$file" = "$env_file" ]; then
|
||||
# Source from environment file (envvar contains the variable name)
|
||||
source_value="${!envvar:-}"
|
||||
else
|
||||
# Source from sops-decrypted env (envvar contains the variable name)
|
||||
source_value="$(printf '%s' "$sops_env" | grep "^${envvar}=" | sed "s/^${envvar}=//" || true)"
|
||||
fi
|
||||
|
||||
case "$category" in
|
||||
bots)
|
||||
vault_path="disinto/bots/${field}"
|
||||
vault_key="$subkey"
|
||||
;;
|
||||
forge)
|
||||
vault_path="disinto/shared/forge"
|
||||
vault_key="$field"
|
||||
;;
|
||||
woodpecker)
|
||||
vault_path="disinto/shared/woodpecker"
|
||||
vault_key="$field"
|
||||
;;
|
||||
chat)
|
||||
vault_path="disinto/shared/chat"
|
||||
vault_key="$field"
|
||||
;;
|
||||
runner)
|
||||
vault_path="disinto/runner/${field}"
|
||||
vault_key="value"
|
||||
;;
|
||||
*)
|
||||
_err "Unknown category: $category"
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
|
||||
# Check if path exists
|
||||
local status="created"
|
||||
if _kv_path_exists "$vault_path"; then
|
||||
# Check if key exists in path
|
||||
local existing_value
|
||||
if existing_value="$(_kv_get_value "$vault_path" "$vault_key")" 2>/dev/null; then
|
||||
if [ "$existing_value" = "$source_value" ]; then
|
||||
status="unchanged"
|
||||
else
|
||||
status="updated"
|
||||
fi
|
||||
else
|
||||
status="created"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Output status
|
||||
_format_status "$status" "$vault_path" "$vault_key"
|
||||
printf '\n'
|
||||
|
||||
# Write if not unchanged
|
||||
if [ "$status" != "unchanged" ]; then
|
||||
if ! _kv_put_secret "$vault_path" "${vault_key}=${source_value}"; then
|
||||
_err "Failed to write $vault_key to $vault_path"
|
||||
exit 1
|
||||
fi
|
||||
case "$status" in
|
||||
updated) ((updated++)) || true ;;
|
||||
created) ((created++)) || true ;;
|
||||
esac
|
||||
else
|
||||
((unchanged++)) || true
|
||||
fi
|
||||
done
|
||||
|
||||
_log ""
|
||||
_log "=== Import complete ==="
|
||||
_log "Created: $created"
|
||||
_log "Updated: $updated"
|
||||
_log "Unchanged: $unchanged"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
Loading…
Add table
Add a link
Reference in a new issue