Merge pull request 'fix: [nomad-step-2] S2.2 — tools/vault-import.sh (import .env + sops into KV) (#880)' (#889) from fix/issue-880 into main
All checks were successful
ci/woodpecker/push/ci Pipeline was successful
All checks were successful
ci/woodpecker/push/ci Pipeline was successful
This commit is contained in:
commit
8b287ebf9a
7 changed files with 978 additions and 0 deletions
20
tests/fixtures/.env.vault.enc
vendored
Normal file
20
tests/fixtures/.env.vault.enc
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"data": "ENC[AES256_GCM,data:SsLdIiZDVkkV1bbKeHQ8A1K/4vgXQFJF8y4J87GGwsGa13lNnPoqRaCmPAtuQr3hR5JNqARUhFp8aEusyzwi/lZLU2Reo32YjE26ObVOHf47EGmmHM/tEgh6u0fa1AmFtuqJVQzhG2eZhJmZJFgdRH36+bhdBwI1mkORmsRNtBPHHjtQJDbsgN47maDhuP4B7WvB4/TdnJ++GNMlMbyrbr0pEf2uqqOVO55cJ3I4v/Jcg8tq0clPuW1k5dNFsmFSMbbjE5N25EGrc7oEH5GVZ6I6L6p0Fzyj/MV4hKacboFHiZmBZgRQ,iv:UnXTa800G3PW4IaErkPBIZKjPHAU3LmiCvAqDdhFE/Q=,tag:kdWpHQ8fEPGFlmfVoTMskA==,type:str]",
|
||||
"sops": {
|
||||
"kms": null,
|
||||
"gcp_kms": null,
|
||||
"azure_kv": null,
|
||||
"hc_vault": null,
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1ztkm8yvdk42m2cn4dj2v9ptfknq8wpgr3ry9dpmtmlaeas6p7yyqft0ldg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBrVUlmaEdTNU1iMGg4dFA4\nNFNOSzlBc1NER1U3SHlwVFU1dm5tR1kyeldzCjZ2NXI3MjR4Zkd1RVBKNzJoQ1Jm\nQWpEZU5VMkNuYnhTTVJNc0RpTXlIZE0KLS0tIDFpQ2tlN0MzL1NuS2hKZU5JTG9B\nNWxXMzE0bGZpQkVBTnhWRXZBQlhrc1EKG76DM98cCuqIwUkbfJWHhJdYV77O9r8Q\nRJrq6jH59Gcp9W8iHg/aeShPHZFEOLg1q9azV9Wt9FjJn3SxyTmgvA==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2026-04-16T15:43:34Z",
|
||||
"mac": "ENC[AES256_GCM,data:jVRr2TxSZH2paD2doIX4JwCqo5wiPYfTowpj189w1IVlS0EY/XQoqxiWbunX/LmIDdQlTPCSe/vTp1EJA0cx6vzN2xENrwsfzCP6dwDGaRlZhH3V0CVhtfHIkMTEKWrAUx5hFtiwJPkLYUUYi5aRWRxhZQM1eBeRvuGKdlwvmHA=,iv:H57a61AfVNLrlg+4aMl9mwXI5O38O5ZoRhpxe2PTTkY=,tag:2jwH1855VNYlKseTE/XtTg==,type:str]",
|
||||
"pgp": null,
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.9.4"
|
||||
}
|
||||
}
|
||||
5
tests/fixtures/age-keys.txt
vendored
Normal file
5
tests/fixtures/age-keys.txt
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
# Test age key for sops
|
||||
# Generated: 2026-04-16
|
||||
# Public key: age1ztkm8yvdk42m2cn4dj2v9ptfknq8wpgr3ry9dpmtmlaeas6p7yyqft0ldg
|
||||
|
||||
AGE-SECRET-KEY-1PCQQX37MTZDGES76H9TGQN5XTG2ZZX2UUR87KR784NZ4MQ3NJ56S0Z23SF
|
||||
40
tests/fixtures/dot-env-complete
vendored
Normal file
40
tests/fixtures/dot-env-complete
vendored
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
# Test fixture .env file for vault-import.sh
|
||||
# This file contains all expected keys for the import test
|
||||
|
||||
# Generic forge creds
|
||||
FORGE_TOKEN=generic-forge-token
|
||||
FORGE_PASS=generic-forge-pass
|
||||
FORGE_ADMIN_TOKEN=generic-admin-token
|
||||
|
||||
# Bot tokens (review, dev, gardener, architect, planner, predictor, supervisor, vault)
|
||||
FORGE_REVIEW_TOKEN=review-token
|
||||
FORGE_REVIEW_PASS=review-pass
|
||||
FORGE_DEV_TOKEN=dev-token
|
||||
FORGE_DEV_PASS=dev-pass
|
||||
FORGE_GARDENER_TOKEN=gardener-token
|
||||
FORGE_GARDENER_PASS=gardener-pass
|
||||
FORGE_ARCHITECT_TOKEN=architect-token
|
||||
FORGE_ARCHITECT_PASS=architect-pass
|
||||
FORGE_PLANNER_TOKEN=planner-token
|
||||
FORGE_PLANNER_PASS=planner-pass
|
||||
FORGE_PREDICTOR_TOKEN=predictor-token
|
||||
FORGE_PREDICTOR_PASS=predictor-pass
|
||||
FORGE_SUPERVISOR_TOKEN=supervisor-token
|
||||
FORGE_SUPERVISOR_PASS=supervisor-pass
|
||||
FORGE_VAULT_TOKEN=vault-token
|
||||
FORGE_VAULT_PASS=vault-pass
|
||||
|
||||
# Llama bot
|
||||
FORGE_TOKEN_LLAMA=llama-token
|
||||
FORGE_PASS_LLAMA=llama-pass
|
||||
|
||||
# Woodpecker secrets
|
||||
WOODPECKER_AGENT_SECRET=wp-agent-secret
|
||||
WP_FORGEJO_CLIENT=wp-forgejo-client
|
||||
WP_FORGEJO_SECRET=wp-forgejo-secret
|
||||
WOODPECKER_TOKEN=wp-token
|
||||
|
||||
# Chat secrets
|
||||
FORWARD_AUTH_SECRET=forward-auth-secret
|
||||
CHAT_OAUTH_CLIENT_ID=chat-client-id
|
||||
CHAT_OAUTH_CLIENT_SECRET=chat-client-secret
|
||||
27
tests/fixtures/dot-env-incomplete
vendored
Normal file
27
tests/fixtures/dot-env-incomplete
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
# Test fixture .env file with missing required keys
|
||||
# This file is intentionally missing some keys to test error handling
|
||||
|
||||
# Generic forge creds - missing FORGE_ADMIN_TOKEN
|
||||
FORGE_TOKEN=generic-forge-token
|
||||
FORGE_PASS=generic-forge-pass
|
||||
|
||||
# Bot tokens - missing several roles
|
||||
FORGE_REVIEW_TOKEN=review-token
|
||||
FORGE_REVIEW_PASS=review-pass
|
||||
FORGE_DEV_TOKEN=dev-token
|
||||
FORGE_DEV_PASS=dev-pass
|
||||
|
||||
# Llama bot - missing (only token, no pass)
|
||||
FORGE_TOKEN_LLAMA=llama-token
|
||||
# FORGE_PASS_LLAMA=llama-pass
|
||||
|
||||
# Woodpecker secrets - missing some
|
||||
WOODPECKER_AGENT_SECRET=wp-agent-secret
|
||||
# WP_FORGEJO_CLIENT=wp-forgejo-client
|
||||
# WP_FORGEJO_SECRET=wp-forgejo-secret
|
||||
# WOODPECKER_TOKEN=wp-token
|
||||
|
||||
# Chat secrets - missing some
|
||||
FORWARD_AUTH_SECRET=forward-auth-secret
|
||||
# CHAT_OAUTH_CLIENT_ID=chat-client-id
|
||||
# CHAT_OAUTH_CLIENT_SECRET=chat-client-secret
|
||||
6
tests/fixtures/dot-env.vault.plain
vendored
Normal file
6
tests/fixtures/dot-env.vault.plain
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
GITHUB_TOKEN=github-test-token-abc123
|
||||
CODEBERG_TOKEN=codeberg-test-token-def456
|
||||
CLAWHUB_TOKEN=clawhub-test-token-ghi789
|
||||
DEPLOY_KEY=deploy-key-test-jkl012
|
||||
NPM_TOKEN=npm-test-token-mno345
|
||||
DOCKER_HUB_TOKEN=dockerhub-test-token-pqr678
|
||||
313
tests/vault-import.bats
Normal file
313
tests/vault-import.bats
Normal file
|
|
@ -0,0 +1,313 @@
|
|||
#!/usr/bin/env bats
|
||||
# tests/vault-import.bats — Tests for tools/vault-import.sh
|
||||
#
|
||||
# Runs against a dev-mode Vault server (single binary, no LXC needed).
|
||||
# CI launches vault server -dev inline before running these tests.
|
||||
|
||||
VAULT_BIN="${VAULT_BIN:-vault}"
|
||||
IMPORT_SCRIPT="${BATS_TEST_DIRNAME}/../tools/vault-import.sh"
|
||||
FIXTURES_DIR="${BATS_TEST_DIRNAME}/fixtures"
|
||||
|
||||
setup_file() {
|
||||
# Start dev-mode vault on a random port
|
||||
export VAULT_DEV_PORT
|
||||
VAULT_DEV_PORT="$(shuf -i 18200-18299 -n 1)"
|
||||
export VAULT_ADDR="http://127.0.0.1:${VAULT_DEV_PORT}"
|
||||
|
||||
"$VAULT_BIN" server -dev \
|
||||
-dev-listen-address="127.0.0.1:${VAULT_DEV_PORT}" \
|
||||
-dev-root-token-id="test-root-token" \
|
||||
-dev-no-store-token \
|
||||
&>"${BATS_FILE_TMPDIR}/vault.log" &
|
||||
export VAULT_PID=$!
|
||||
|
||||
export VAULT_TOKEN="test-root-token"
|
||||
|
||||
# Wait for vault to be ready (up to 10s)
|
||||
local i=0
|
||||
while ! curl -sf "${VAULT_ADDR}/v1/sys/health" >/dev/null 2>&1; do
|
||||
sleep 0.5
|
||||
i=$((i + 1))
|
||||
if [ "$i" -ge 20 ]; then
|
||||
echo "Vault failed to start. Log:" >&2
|
||||
cat "${BATS_FILE_TMPDIR}/vault.log" >&2
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
teardown_file() {
|
||||
if [ -n "${VAULT_PID:-}" ]; then
|
||||
kill "$VAULT_PID" 2>/dev/null || true
|
||||
wait "$VAULT_PID" 2>/dev/null || true
|
||||
fi
|
||||
}
|
||||
|
||||
setup() {
|
||||
# Source the module under test for hvault functions
|
||||
source "${BATS_TEST_DIRNAME}/../lib/hvault.sh"
|
||||
export VAULT_ADDR VAULT_TOKEN
|
||||
}
|
||||
|
||||
# --- Security checks ---
|
||||
|
||||
@test "refuses to run if VAULT_ADDR is not localhost" {
|
||||
export VAULT_ADDR="http://prod-vault.example.com:8200"
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "Security check failed"
|
||||
}
|
||||
|
||||
@test "refuses if age key file permissions are not 0400" {
|
||||
# Create a temp file with wrong permissions
|
||||
local bad_key="${BATS_TEST_TMPDIR}/bad-ages.txt"
|
||||
echo "AGE-SECRET-KEY-1TEST" > "$bad_key"
|
||||
chmod 644 "$bad_key"
|
||||
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$bad_key"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "permissions"
|
||||
}
|
||||
|
||||
# --- Dry-run mode ─────────────────────────────────────────────────────────────
|
||||
|
||||
@test "--dry-run prints plan without writing to Vault" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt" \
|
||||
--dry-run
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | grep -q "DRY-RUN"
|
||||
echo "$output" | grep -q "Import plan"
|
||||
echo "$output" | grep -q "Planned operations"
|
||||
|
||||
# Verify nothing was written to Vault
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/bots/review"
|
||||
[ "$status" -ne 0 ]
|
||||
}
|
||||
|
||||
# --- Complete fixture import ─────────────────────────────────────────────────
|
||||
|
||||
@test "imports all keys from complete fixture" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Check bots/review
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/bots/review"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | grep -q "review-token"
|
||||
echo "$output" | grep -q "review-pass"
|
||||
|
||||
# Check bots/dev-qwen
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/bots/dev-qwen"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | grep -q "llama-token"
|
||||
echo "$output" | grep -q "llama-pass"
|
||||
|
||||
# Check forge
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/shared/forge"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | grep -q "generic-forge-token"
|
||||
echo "$output" | grep -q "generic-forge-pass"
|
||||
echo "$output" | grep -q "generic-admin-token"
|
||||
|
||||
# Check woodpecker
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/shared/woodpecker"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | grep -q "wp-agent-secret"
|
||||
echo "$output" | grep -q "wp-forgejo-client"
|
||||
echo "$output" | grep -q "wp-forgejo-secret"
|
||||
echo "$output" | grep -q "wp-token"
|
||||
|
||||
# Check chat
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/shared/chat"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | grep -q "forward-auth-secret"
|
||||
echo "$output" | grep -q "chat-client-id"
|
||||
echo "$output" | grep -q "chat-client-secret"
|
||||
|
||||
# Check runner tokens from sops
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/runner/GITHUB_TOKEN"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | jq -e '.data.data.value == "github-test-token-abc123"'
|
||||
}
|
||||
|
||||
# --- Idempotency ──────────────────────────────────────────────────────────────
|
||||
|
||||
@test "re-run with unchanged fixtures reports all unchanged" {
|
||||
# First run
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Second run - should report unchanged
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Check that all keys report unchanged
|
||||
echo "$output" | grep -q "unchanged"
|
||||
# Count unchanged occurrences (should be many)
|
||||
local unchanged_count
|
||||
unchanged_count=$(echo "$output" | grep -c "unchanged" || true)
|
||||
[ "$unchanged_count" -gt 10 ]
|
||||
}
|
||||
|
||||
@test "re-run with modified value reports only that key as updated" {
|
||||
# Create a modified fixture
|
||||
local modified_env="${BATS_TEST_TMPDIR}/dot-env-modified"
|
||||
cp "$FIXTURES_DIR/dot-env-complete" "$modified_env"
|
||||
|
||||
# Modify one value
|
||||
sed -i 's/llama-token/MODIFIED-LLAMA-TOKEN/' "$modified_env"
|
||||
|
||||
# Run with modified fixture
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$modified_env" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Check that dev-qwen token was updated
|
||||
echo "$output" | grep -q "dev-qwen.*updated"
|
||||
|
||||
# Verify the new value was written (path is disinto/bots/dev-qwen, key is token)
|
||||
run curl -sf -H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
"${VAULT_ADDR}/v1/secret/data/disinto/bots/dev-qwen"
|
||||
[ "$status" -eq 0 ]
|
||||
echo "$output" | jq -e '.data.data.token == "MODIFIED-LLAMA-TOKEN"'
|
||||
}
|
||||
|
||||
# --- Incomplete fixture ───────────────────────────────────────────────────────
|
||||
|
||||
@test "handles incomplete fixture gracefully" {
|
||||
# The incomplete fixture is missing some keys, but that should be OK
|
||||
# - it should only import what exists
|
||||
# - it should warn about missing pairs
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-incomplete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Should have imported what was available
|
||||
echo "$output" | grep -q "review"
|
||||
|
||||
# Should complete successfully even with incomplete fixture
|
||||
# The script handles missing pairs gracefully with warnings to stderr
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
# --- Security: no secrets in output ───────────────────────────────────────────
|
||||
|
||||
@test "never logs secret values in stdout" {
|
||||
# Run the import
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Check that no actual secret values appear in output
|
||||
# (only key names and status messages)
|
||||
local secret_patterns=(
|
||||
"generic-forge-token"
|
||||
"generic-forge-pass"
|
||||
"generic-admin-token"
|
||||
"review-token"
|
||||
"review-pass"
|
||||
"llama-token"
|
||||
"llama-pass"
|
||||
"wp-agent-secret"
|
||||
"forward-auth-secret"
|
||||
"github-test-token"
|
||||
"codeberg-test-token"
|
||||
"clawhub-test-token"
|
||||
"deploy-key-test"
|
||||
"npm-test-token"
|
||||
"dockerhub-test-token"
|
||||
)
|
||||
|
||||
for pattern in "${secret_patterns[@]}"; do
|
||||
if echo "$output" | grep -q "$pattern"; then
|
||||
echo "FAIL: Found secret pattern '$pattern' in output" >&2
|
||||
echo "Output was:" >&2
|
||||
echo "$output" >&2
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# --- Error handling ───────────────────────────────────────────────────────────
|
||||
|
||||
@test "fails with missing --env argument" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "Missing required argument"
|
||||
}
|
||||
|
||||
@test "fails with missing --sops argument" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "Missing required argument"
|
||||
}
|
||||
|
||||
@test "fails with missing --age-key argument" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "Missing required argument"
|
||||
}
|
||||
|
||||
@test "fails with non-existent env file" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "/nonexistent/.env" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "not found"
|
||||
}
|
||||
|
||||
@test "fails with non-existent sops file" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "/nonexistent/.env.vault.enc" \
|
||||
--age-key "$FIXTURES_DIR/age-keys.txt"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "not found"
|
||||
}
|
||||
|
||||
@test "fails with non-existent age key file" {
|
||||
run "$IMPORT_SCRIPT" \
|
||||
--env "$FIXTURES_DIR/dot-env-complete" \
|
||||
--sops "$FIXTURES_DIR/.env.vault.enc" \
|
||||
--age-key "/nonexistent/age-keys.txt"
|
||||
[ "$status" -ne 0 ]
|
||||
echo "$output" | grep -q "not found"
|
||||
}
|
||||
567
tools/vault-import.sh
Executable file
567
tools/vault-import.sh
Executable file
|
|
@ -0,0 +1,567 @@
|
|||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# vault-import.sh — Import .env and sops-decrypted secrets into Vault KV
|
||||
#
|
||||
# Reads existing .env and sops-encrypted .env.vault.enc from the old docker stack
|
||||
# and writes them to Vault KV paths matching the S2.1 policy layout.
|
||||
#
|
||||
# Usage:
|
||||
# vault-import.sh \
|
||||
# --env /path/to/.env \
|
||||
# --sops /path/to/.env.vault.enc \
|
||||
# --age-key /path/to/age/keys.txt
|
||||
#
|
||||
# Mapping:
|
||||
# From .env:
|
||||
# - FORGE_{ROLE}_TOKEN + FORGE_{ROLE}_PASS → kv/disinto/bots/<role>/{token,password}
|
||||
# (roles: review, dev, gardener, architect, planner, predictor, supervisor, vault)
|
||||
# - FORGE_TOKEN_LLAMA + FORGE_PASS_LLAMA → kv/disinto/bots/dev-qwen/{token,password}
|
||||
# - FORGE_TOKEN + FORGE_PASS → kv/disinto/shared/forge/{token,password}
|
||||
# - FORGE_ADMIN_TOKEN → kv/disinto/shared/forge/admin_token
|
||||
# - WOODPECKER_* → kv/disinto/shared/woodpecker/<lowercase_key>
|
||||
# - FORWARD_AUTH_SECRET, CHAT_OAUTH_* → kv/disinto/shared/chat/<lowercase_key>
|
||||
# From sops-decrypted .env.vault.enc:
|
||||
# - GITHUB_TOKEN, CODEBERG_TOKEN, CLAWHUB_TOKEN, DEPLOY_KEY, NPM_TOKEN, DOCKER_HUB_TOKEN
|
||||
# → kv/disinto/runner/<NAME>/value
|
||||
#
|
||||
# Security:
|
||||
# - Refuses to run if VAULT_ADDR is not localhost
|
||||
# - Writes to KV v2, not v1
|
||||
# - Validates sops age key file is mode 0400 before sourcing
|
||||
# - Never logs secret values — only key names
|
||||
#
|
||||
# Idempotency:
|
||||
# - Reports unchanged/updated/created per key via hvault_kv_get
|
||||
# - --dry-run prints the full import plan without writing
|
||||
# =============================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ── Internal helpers ──────────────────────────────────────────────────────────
|
||||
|
||||
# _log — emit a log message to stdout (never to stderr to avoid polluting diff)
|
||||
_log() {
|
||||
printf '[vault-import] %s\n' "$*"
|
||||
}
|
||||
|
||||
# _err — emit an error message to stderr
|
||||
_err() {
|
||||
printf '[vault-import] ERROR: %s\n' "$*" >&2
|
||||
}
|
||||
|
||||
# _die — log error and exit with status 1
|
||||
_die() {
|
||||
_err "$@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# _check_vault_addr — ensure VAULT_ADDR is localhost (security check)
|
||||
_check_vault_addr() {
|
||||
local addr="${VAULT_ADDR:-}"
|
||||
if [[ ! "$addr" =~ ^https?://(localhost|127\.0\.0\.1)(:[0-9]+)?$ ]]; then
|
||||
_die "Security check failed: VAULT_ADDR must be localhost for safety. Got: $addr"
|
||||
fi
|
||||
}
|
||||
|
||||
# _validate_age_key_perms — ensure age key file is mode 0400
|
||||
_validate_age_key_perms() {
|
||||
local keyfile="$1"
|
||||
local perms
|
||||
perms="$(stat -c '%a' "$keyfile" 2>/dev/null)" || _die "Cannot stat age key file: $keyfile"
|
||||
if [ "$perms" != "400" ]; then
|
||||
_die "Age key file permissions are $perms, expected 400. Refusing to proceed for security."
|
||||
fi
|
||||
}
|
||||
|
||||
# _decrypt_sops — decrypt sops-encrypted file using SOPS_AGE_KEY_FILE
|
||||
_decrypt_sops() {
|
||||
local sops_file="$1"
|
||||
local age_key="$2"
|
||||
local output
|
||||
# sops outputs YAML format by default, extract KEY=VALUE lines
|
||||
output="$(SOPS_AGE_KEY_FILE="$age_key" sops -d "$sops_file" 2>/dev/null | \
|
||||
grep -E '^[A-Z_][A-Z0-9_]*=' | \
|
||||
sed 's/^\([^=]*\)=\(.*\)$/\1=\2/')" || \
|
||||
_die "Failed to decrypt sops file: $sops_file. Check age key and file integrity."
|
||||
printf '%s' "$output"
|
||||
}
|
||||
|
||||
# _load_env_file — source an environment file (safety: only KEY=value lines)
|
||||
_load_env_file() {
|
||||
local env_file="$1"
|
||||
local temp_env
|
||||
temp_env="$(mktemp)"
|
||||
# Extract only valid KEY=value lines (skip comments, blank lines, malformed)
|
||||
grep -E '^[A-Za-z_][A-Za-z0-9_]*=' "$env_file" 2>/dev/null > "$temp_env" || true
|
||||
# shellcheck source=/dev/null
|
||||
source "$temp_env"
|
||||
rm -f "$temp_env"
|
||||
}
|
||||
|
||||
# _kv_path_exists — check if a KV path exists (returns 0 if exists, 1 if not)
|
||||
_kv_path_exists() {
|
||||
local path="$1"
|
||||
# Use hvault_kv_get and check if it fails with "not found"
|
||||
if hvault_kv_get "$path" >/dev/null 2>&1; then
|
||||
return 0
|
||||
fi
|
||||
# Check if the error is specifically "not found"
|
||||
local err_output
|
||||
err_output="$(hvault_kv_get "$path" 2>&1)" || true
|
||||
if printf '%s' "$err_output" | grep -qi 'not found\|404'; then
|
||||
return 1
|
||||
fi
|
||||
# Some other error (e.g., auth failure) — treat as unknown
|
||||
return 1
|
||||
}
|
||||
|
||||
# _kv_get_value — get a single key value from a KV path
|
||||
_kv_get_value() {
|
||||
local path="$1"
|
||||
local key="$2"
|
||||
hvault_kv_get "$path" "$key"
|
||||
}
|
||||
|
||||
# _kv_put_secret — write a secret to KV v2
|
||||
_kv_put_secret() {
|
||||
local path="$1"
|
||||
shift
|
||||
local kv_pairs=("$@")
|
||||
|
||||
# Build JSON payload with all key-value pairs
|
||||
local payload='{"data":{}}'
|
||||
for kv in "${kv_pairs[@]}"; do
|
||||
local k="${kv%%=*}"
|
||||
local v="${kv#*=}"
|
||||
# Use jq with --arg for safe string interpolation (handles quotes/backslashes)
|
||||
payload="$(printf '%s' "$payload" | jq --arg k "$k" --arg v "$v" '. * {"data": {($k): $v}}')"
|
||||
done
|
||||
|
||||
# Use curl directly for KV v2 write with versioning
|
||||
local tmpfile http_code
|
||||
tmpfile="$(mktemp)"
|
||||
http_code="$(curl -s -w '%{http_code}' \
|
||||
-H "X-Vault-Token: ${VAULT_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-X POST \
|
||||
-d "$payload" \
|
||||
-o "$tmpfile" \
|
||||
"${VAULT_ADDR}/v1/secret/data/${path}")" || {
|
||||
rm -f "$tmpfile"
|
||||
_err "Failed to write to Vault at secret/data/${path}: curl error"
|
||||
return 1
|
||||
}
|
||||
rm -f "$tmpfile"
|
||||
|
||||
# Check HTTP status — 2xx is success
|
||||
case "$http_code" in
|
||||
2[0-9][0-9])
|
||||
return 0
|
||||
;;
|
||||
404)
|
||||
_err "KV path not found: secret/data/${path}"
|
||||
return 1
|
||||
;;
|
||||
403)
|
||||
_err "Permission denied writing to secret/data/${path}"
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
_err "Failed to write to Vault at secret/data/${path}: HTTP $http_code"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# _format_status — format the status string for a key
|
||||
_format_status() {
|
||||
local status="$1"
|
||||
local path="$2"
|
||||
local key="$3"
|
||||
case "$status" in
|
||||
unchanged)
|
||||
printf ' %s: %s/%s (unchanged)' "$status" "$path" "$key"
|
||||
;;
|
||||
updated)
|
||||
printf ' %s: %s/%s (updated)' "$status" "$path" "$key"
|
||||
;;
|
||||
created)
|
||||
printf ' %s: %s/%s (created)' "$status" "$path" "$key"
|
||||
;;
|
||||
*)
|
||||
printf ' %s: %s/%s (unknown)' "$status" "$path" "$key"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# ── Mapping definitions ──────────────────────────────────────────────────────
|
||||
|
||||
# Bots mapping: FORGE_{ROLE}_TOKEN + FORGE_{ROLE}_PASS
|
||||
declare -a BOT_ROLES=(review dev gardener architect planner predictor supervisor vault)
|
||||
|
||||
# Runner tokens from sops-decrypted file
|
||||
declare -a RUNNER_TOKENS=(GITHUB_TOKEN CODEBERG_TOKEN CLAWHUB_TOKEN DEPLOY_KEY NPM_TOKEN DOCKER_HUB_TOKEN)
|
||||
|
||||
# ── Main logic ────────────────────────────────────────────────────────────────
|
||||
|
||||
main() {
|
||||
local env_file=""
|
||||
local sops_file=""
|
||||
local age_key_file=""
|
||||
local dry_run=false
|
||||
|
||||
# Parse arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--env)
|
||||
env_file="$2"
|
||||
shift 2
|
||||
;;
|
||||
--sops)
|
||||
sops_file="$2"
|
||||
shift 2
|
||||
;;
|
||||
--age-key)
|
||||
age_key_file="$2"
|
||||
shift 2
|
||||
;;
|
||||
--dry-run)
|
||||
dry_run=true
|
||||
shift
|
||||
;;
|
||||
--help|-h)
|
||||
cat <<'EOF'
|
||||
vault-import.sh — Import .env and sops-decrypted secrets into Vault KV
|
||||
|
||||
Usage:
|
||||
vault-import.sh \
|
||||
--env /path/to/.env \
|
||||
--sops /path/to/.env.vault.enc \
|
||||
--age-key /path/to/age/keys.txt \
|
||||
[--dry-run]
|
||||
|
||||
Options:
|
||||
--env Path to .env file (required)
|
||||
--sops Path to sops-encrypted .env.vault.enc file (required)
|
||||
--age-key Path to age keys file (required)
|
||||
--dry-run Print import plan without writing to Vault (optional)
|
||||
--help Show this help message
|
||||
|
||||
Mapping:
|
||||
From .env:
|
||||
- FORGE_{ROLE}_TOKEN + FORGE_{ROLE}_PASS → kv/disinto/bots/<role>/{token,password}
|
||||
- FORGE_TOKEN_LLAMA + FORGE_PASS_LLAMA → kv/disinto/bots/dev-qwen/{token,password}
|
||||
- FORGE_TOKEN + FORGE_PASS → kv/disinto/shared/forge/{token,password}
|
||||
- FORGE_ADMIN_TOKEN → kv/disinto/shared/forge/admin_token
|
||||
- WOODPECKER_* → kv/disinto/shared/woodpecker/<lowercase_key>
|
||||
- FORWARD_AUTH_SECRET, CHAT_OAUTH_* → kv/disinto/shared/chat/<lowercase_key>
|
||||
|
||||
From sops-decrypted .env.vault.enc:
|
||||
- GITHUB_TOKEN, CODEBERG_TOKEN, CLAWHUB_TOKEN, DEPLOY_KEY, NPM_TOKEN, DOCKER_HUB_TOKEN
|
||||
→ kv/disinto/runner/<NAME>/value
|
||||
|
||||
Examples:
|
||||
vault-import.sh --env .env --sops .env.vault.enc --age-key age-keys.txt
|
||||
vault-import.sh --env .env --sops .env.vault.enc --age-key age-keys.txt --dry-run
|
||||
EOF
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
_die "Unknown option: $1. Use --help for usage."
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Validate required arguments
|
||||
if [ -z "$env_file" ]; then
|
||||
_die "Missing required argument: --env"
|
||||
fi
|
||||
if [ -z "$sops_file" ]; then
|
||||
_die "Missing required argument: --sops"
|
||||
fi
|
||||
if [ -z "$age_key_file" ]; then
|
||||
_die "Missing required argument: --age-key"
|
||||
fi
|
||||
|
||||
# Validate files exist
|
||||
if [ ! -f "$env_file" ]; then
|
||||
_die "Environment file not found: $env_file"
|
||||
fi
|
||||
if [ ! -f "$sops_file" ]; then
|
||||
_die "Sops file not found: $sops_file"
|
||||
fi
|
||||
if [ ! -f "$age_key_file" ]; then
|
||||
_die "Age key file not found: $age_key_file"
|
||||
fi
|
||||
|
||||
# Security check: age key permissions
|
||||
_validate_age_key_perms "$age_key_file"
|
||||
|
||||
# Security check: VAULT_ADDR must be localhost
|
||||
_check_vault_addr
|
||||
|
||||
# Source the Vault helpers
|
||||
source "$(dirname "$0")/../lib/hvault.sh"
|
||||
|
||||
# Load .env file
|
||||
_log "Loading environment from: $env_file"
|
||||
_load_env_file "$env_file"
|
||||
|
||||
# Decrypt sops file
|
||||
_log "Decrypting sops file: $sops_file"
|
||||
local sops_env
|
||||
sops_env="$(_decrypt_sops "$sops_file" "$age_key_file")"
|
||||
# shellcheck disable=SC2086
|
||||
eval "$sops_env"
|
||||
|
||||
# Collect all import operations
|
||||
declare -a operations=()
|
||||
|
||||
# --- From .env ---
|
||||
|
||||
# Bots: FORGE_{ROLE}_TOKEN + FORGE_{ROLE}_PASS
|
||||
for role in "${BOT_ROLES[@]}"; do
|
||||
local token_var="FORGE_${role^^}_TOKEN"
|
||||
local pass_var="FORGE_${role^^}_PASS"
|
||||
local token_val="${!token_var:-}"
|
||||
local pass_val="${!pass_var:-}"
|
||||
|
||||
if [ -n "$token_val" ] && [ -n "$pass_val" ]; then
|
||||
operations+=("bots|$role|token|$env_file|$token_var")
|
||||
operations+=("bots|$role|pass|$env_file|$pass_var")
|
||||
elif [ -n "$token_val" ] || [ -n "$pass_val" ]; then
|
||||
_err "Warning: $role bot has token but no password (or vice versa), skipping"
|
||||
fi
|
||||
done
|
||||
|
||||
# Llama bot: FORGE_TOKEN_LLAMA + FORGE_PASS_LLAMA
|
||||
local llama_token="${FORGE_TOKEN_LLAMA:-}"
|
||||
local llama_pass="${FORGE_PASS_LLAMA:-}"
|
||||
if [ -n "$llama_token" ] && [ -n "$llama_pass" ]; then
|
||||
operations+=("bots|dev-qwen|token|$env_file|FORGE_TOKEN_LLAMA")
|
||||
operations+=("bots|dev-qwen|pass|$env_file|FORGE_PASS_LLAMA")
|
||||
elif [ -n "$llama_token" ] || [ -n "$llama_pass" ]; then
|
||||
_err "Warning: dev-qwen bot has token but no password (or vice versa), skipping"
|
||||
fi
|
||||
|
||||
# Generic forge creds: FORGE_TOKEN + FORGE_PASS
|
||||
local forge_token="${FORGE_TOKEN:-}"
|
||||
local forge_pass="${FORGE_PASS:-}"
|
||||
if [ -n "$forge_token" ] && [ -n "$forge_pass" ]; then
|
||||
operations+=("forge|token|$env_file|FORGE_TOKEN")
|
||||
operations+=("forge|pass|$env_file|FORGE_PASS")
|
||||
fi
|
||||
|
||||
# Forge admin token: FORGE_ADMIN_TOKEN
|
||||
local forge_admin_token="${FORGE_ADMIN_TOKEN:-}"
|
||||
if [ -n "$forge_admin_token" ]; then
|
||||
operations+=("forge|admin_token|$env_file|FORGE_ADMIN_TOKEN")
|
||||
fi
|
||||
|
||||
# Woodpecker secrets: WOODPECKER_*
|
||||
# Only read from the .env file, not shell environment
|
||||
local woodpecker_keys=()
|
||||
while IFS='=' read -r key _; do
|
||||
if [[ "$key" =~ ^WOODPECKER_ ]] || [[ "$key" =~ ^WP_[A-Z_]+$ ]]; then
|
||||
woodpecker_keys+=("$key")
|
||||
fi
|
||||
done < <(grep -E '^[A-Z_][A-Z0-9_]*=' "$env_file" 2>/dev/null || true)
|
||||
for key in "${woodpecker_keys[@]}"; do
|
||||
local val="${!key}"
|
||||
if [ -n "$val" ]; then
|
||||
local lowercase_key="${key,,}"
|
||||
operations+=("woodpecker|$lowercase_key|$env_file|$key")
|
||||
fi
|
||||
done
|
||||
|
||||
# Chat secrets: FORWARD_AUTH_SECRET, CHAT_OAUTH_CLIENT_ID, CHAT_OAUTH_CLIENT_SECRET
|
||||
for key in FORWARD_AUTH_SECRET CHAT_OAUTH_CLIENT_ID CHAT_OAUTH_CLIENT_SECRET; do
|
||||
local val="${!key:-}"
|
||||
if [ -n "$val" ]; then
|
||||
local lowercase_key="${key,,}"
|
||||
operations+=("chat|$lowercase_key|$env_file|$key")
|
||||
fi
|
||||
done
|
||||
|
||||
# --- From sops-decrypted .env.vault.enc ---
|
||||
|
||||
# Runner tokens
|
||||
for token_name in "${RUNNER_TOKENS[@]}"; do
|
||||
local token_val="${!token_name:-}"
|
||||
if [ -n "$token_val" ]; then
|
||||
operations+=("runner|$token_name|$sops_file|$token_name")
|
||||
fi
|
||||
done
|
||||
|
||||
# If dry-run, just print the plan
|
||||
if $dry_run; then
|
||||
_log "=== DRY-RUN: Import plan ==="
|
||||
_log "Environment file: $env_file"
|
||||
_log "Sops file: $sops_file"
|
||||
_log "Age key: $age_key_file"
|
||||
_log ""
|
||||
_log "Planned operations:"
|
||||
for op in "${operations[@]}"; do
|
||||
_log " $op"
|
||||
done
|
||||
_log ""
|
||||
_log "Total: ${#operations[@]} operations"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# --- Actual import with idempotency check ---
|
||||
|
||||
_log "=== Starting Vault import ==="
|
||||
_log "Environment file: $env_file"
|
||||
_log "Sops file: $sops_file"
|
||||
_log "Age key: $age_key_file"
|
||||
_log ""
|
||||
|
||||
local created=0
|
||||
local updated=0
|
||||
local unchanged=0
|
||||
|
||||
# First pass: collect all operations with their parsed values
|
||||
# Store as: ops_data["vault_path:kv_key"] = "source_value|status"
|
||||
declare -A ops_data
|
||||
|
||||
for op in "${operations[@]}"; do
|
||||
# Parse operation: category|field|subkey|file|envvar (5 fields for bots/runner)
|
||||
# or category|field|file|envvar (4 fields for forge/woodpecker/chat)
|
||||
local category field subkey file envvar=""
|
||||
local field_count
|
||||
field_count="$(printf '%s' "$op" | awk -F'|' '{print NF}')"
|
||||
|
||||
if [ "$field_count" -eq 5 ]; then
|
||||
# 5 fields: category|role|subkey|file|envvar
|
||||
IFS='|' read -r category field subkey file envvar <<< "$op"
|
||||
else
|
||||
# 4 fields: category|field|file|envvar
|
||||
IFS='|' read -r category field file envvar <<< "$op"
|
||||
subkey="$field" # For 4-field ops, field is the vault key
|
||||
fi
|
||||
|
||||
# Determine Vault path and key based on category
|
||||
local vault_path=""
|
||||
local vault_key="$subkey"
|
||||
local source_value=""
|
||||
|
||||
if [ "$file" = "$env_file" ]; then
|
||||
# Source from environment file (envvar contains the variable name)
|
||||
source_value="${!envvar:-}"
|
||||
else
|
||||
# Source from sops-decrypted env (envvar contains the variable name)
|
||||
source_value="$(printf '%s' "$sops_env" | grep "^${envvar}=" | sed "s/^${envvar}=//" || true)"
|
||||
fi
|
||||
|
||||
case "$category" in
|
||||
bots)
|
||||
vault_path="disinto/bots/${field}"
|
||||
vault_key="$subkey"
|
||||
;;
|
||||
forge)
|
||||
vault_path="disinto/shared/forge"
|
||||
vault_key="$field"
|
||||
;;
|
||||
woodpecker)
|
||||
vault_path="disinto/shared/woodpecker"
|
||||
vault_key="$field"
|
||||
;;
|
||||
chat)
|
||||
vault_path="disinto/shared/chat"
|
||||
vault_key="$field"
|
||||
;;
|
||||
runner)
|
||||
vault_path="disinto/runner/${field}"
|
||||
vault_key="value"
|
||||
;;
|
||||
*)
|
||||
_err "Unknown category: $category"
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
|
||||
# Determine status for this key
|
||||
local status="created"
|
||||
if _kv_path_exists "$vault_path"; then
|
||||
local existing_value
|
||||
if existing_value="$(_kv_get_value "$vault_path" "$vault_key")" 2>/dev/null; then
|
||||
if [ "$existing_value" = "$source_value" ]; then
|
||||
status="unchanged"
|
||||
else
|
||||
status="updated"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Store operation data: key = "vault_path:kv_key", value = "source_value|status"
|
||||
ops_data["${vault_path}:${vault_key}"]="${source_value}|${status}"
|
||||
done
|
||||
|
||||
# Second pass: group by vault_path and write
|
||||
# IMPORTANT: Always write ALL keys for a path, not just changed ones.
|
||||
# KV v2 POST replaces the entire document, so we must include unchanged keys
|
||||
# to avoid dropping them. The idempotency guarantee comes from KV v2 versioning.
|
||||
declare -A paths_to_write
|
||||
declare -A path_has_changes
|
||||
|
||||
for key in "${!ops_data[@]}"; do
|
||||
local data="${ops_data[$key]}"
|
||||
local source_value="${data%%|*}"
|
||||
local status="${data##*|}"
|
||||
local vault_path="${key%:*}"
|
||||
local vault_key="${key#*:}"
|
||||
|
||||
# Always add to paths_to_write (all keys for this path)
|
||||
if [ -z "${paths_to_write[$vault_path]:-}" ]; then
|
||||
paths_to_write[$vault_path]="${vault_key}=${source_value}"
|
||||
else
|
||||
paths_to_write[$vault_path]="${paths_to_write[$vault_path]}|${vault_key}=${source_value}"
|
||||
fi
|
||||
|
||||
# Track if this path has any changes (for status reporting)
|
||||
if [ "$status" != "unchanged" ]; then
|
||||
path_has_changes[$vault_path]=1
|
||||
fi
|
||||
done
|
||||
|
||||
# Write each path with all its key-value pairs
|
||||
for vault_path in "${!paths_to_write[@]}"; do
|
||||
# Determine effective status for this path (updated if any key changed)
|
||||
local effective_status="unchanged"
|
||||
if [ "${path_has_changes[$vault_path]:-}" = "1" ]; then
|
||||
effective_status="updated"
|
||||
fi
|
||||
|
||||
# Read pipe-separated key-value pairs and write them
|
||||
local pairs_string="${paths_to_write[$vault_path]}"
|
||||
local pairs_array=()
|
||||
local IFS='|'
|
||||
read -r -a pairs_array <<< "$pairs_string"
|
||||
|
||||
if ! _kv_put_secret "$vault_path" "${pairs_array[@]}"; then
|
||||
_err "Failed to write to $vault_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Output status for each key in this path
|
||||
for kv in "${pairs_array[@]}"; do
|
||||
local kv_key="${kv%%=*}"
|
||||
_format_status "$effective_status" "$vault_path" "$kv_key"
|
||||
printf '\n'
|
||||
done
|
||||
|
||||
# Count only if path has changes
|
||||
if [ "$effective_status" = "updated" ]; then
|
||||
((updated++)) || true
|
||||
fi
|
||||
done
|
||||
|
||||
_log ""
|
||||
_log "=== Import complete ==="
|
||||
_log "Created: $created"
|
||||
_log "Updated: $updated"
|
||||
_log "Unchanged: $unchanged"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
Loading…
Add table
Add a link
Reference in a new issue