fix: bug: architect pitch prompt guardrail is prose-only — model bypasses "NEVER call Forgejo API" via Bash tool; fix via permission scoping + PR-driven sub-issue filing (#764)
Shift the guardrail from prose prompt constraints into Forgejo's permission layer. architect-bot loses all write access on the project repo (now read-only for context gathering). Sub-issues are produced by a new filer-bot identity that runs only after a human merges a sprint PR on the ops repo. Changes: - architect-run.sh: remove all project-repo writes (add_inprogress_label, close_vision_issue, check_and_close_completed_visions); add ## Sub-issues block to pitch format with filer:begin/end markers - formulas/run-architect.toml: add Sub-issues schema to pitch format; strip issue-creation API refs; document read-only constraint on project repo - lib/formula-session.sh: remove Create issue curl template from build_prompt_footer (architect cannot create issues) - lib/sprint-filer.sh (new): parser + idempotent filer using FORGE_FILER_TOKEN; parses filer:begin/end blocks, creates issues with decomposed-from markers, adds in-progress label, handles vision lifecycle closure - .woodpecker/ops-filer.yml (new): CI pipeline on ops repo main-branch push that invokes sprint-filer.sh after sprint PR merge - lib/env.sh, .env.example, docker-compose.yml: add FORGE_FILER_TOKEN for filer-bot identity; add filer-bot to FORGE_BOT_USERNAMES - AGENTS.md: add Filer agent entry; update in-progress label docs - .woodpecker/agent-smoke.sh: register sprint-filer.sh for smoke test Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
10c7a88416
commit
04ff8a6e85
10 changed files with 685 additions and 353 deletions
|
|
@ -45,7 +45,9 @@ FORGE_PREDICTOR_TOKEN= # [SECRET] predictor-bot API token
|
|||
FORGE_PREDICTOR_PASS= # [SECRET] predictor-bot password for git HTTP push
|
||||
FORGE_ARCHITECT_TOKEN= # [SECRET] architect-bot API token
|
||||
FORGE_ARCHITECT_PASS= # [SECRET] architect-bot password for git HTTP push
|
||||
FORGE_BOT_USERNAMES=dev-bot,review-bot,planner-bot,gardener-bot,vault-bot,supervisor-bot,predictor-bot,architect-bot
|
||||
FORGE_FILER_TOKEN= # [SECRET] filer-bot API token (issues:write on project repo only)
|
||||
FORGE_FILER_PASS= # [SECRET] filer-bot password for git HTTP push
|
||||
FORGE_BOT_USERNAMES=dev-bot,review-bot,planner-bot,gardener-bot,vault-bot,supervisor-bot,predictor-bot,architect-bot,filer-bot
|
||||
|
||||
# ── Backwards compatibility ───────────────────────────────────────────────
|
||||
# If CODEBERG_TOKEN is set but FORGE_TOKEN is not, env.sh falls back to
|
||||
|
|
|
|||
|
|
@ -213,6 +213,7 @@ check_script lib/issue-lifecycle.sh lib/secret-scan.sh
|
|||
# Still checked for function resolution against LIB_FUNS + own definitions.
|
||||
check_script lib/ci-debug.sh
|
||||
check_script lib/parse-deps.sh
|
||||
check_script lib/sprint-filer.sh
|
||||
|
||||
# Agent scripts — list cross-sourced files where function scope flows across files.
|
||||
check_script dev/dev-agent.sh
|
||||
|
|
|
|||
36
.woodpecker/ops-filer.yml
Normal file
36
.woodpecker/ops-filer.yml
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
# .woodpecker/ops-filer.yml — Sub-issue filer pipeline (#764)
|
||||
#
|
||||
# Triggered on push to main of the ops repo after a sprint PR merges.
|
||||
# Parses sprints/*.md for ## Sub-issues blocks and files them on the
|
||||
# project repo via filer-bot (FORGE_FILER_TOKEN).
|
||||
#
|
||||
# NOTE: This pipeline runs on the ops repo. It must be registered in the
|
||||
# ops repo's Woodpecker project. The filer script (lib/sprint-filer.sh)
|
||||
# lives in the code repo and is cloned into the workspace.
|
||||
#
|
||||
# Idempotency: safe to re-run — each sub-issue carries a decomposed-from
|
||||
# marker that the filer checks before creating.
|
||||
|
||||
when:
|
||||
branch: main
|
||||
event: push
|
||||
|
||||
steps:
|
||||
- name: file-subissues
|
||||
image: alpine:3
|
||||
commands:
|
||||
- apk add --no-cache bash curl jq
|
||||
# Clone the code repo to get the filer script
|
||||
- AUTH_URL=$(printf '%s' "${FORGE_URL}/disinto-admin/disinto.git" | sed "s|://|://token:${FORGE_FILER_TOKEN}@|")
|
||||
- git clone --depth 1 "$AUTH_URL" /tmp/code-repo
|
||||
# Run filer against all sprint files in the ops repo workspace
|
||||
- bash /tmp/code-repo/lib/sprint-filer.sh --all sprints/
|
||||
environment:
|
||||
FORGE_FILER_TOKEN:
|
||||
from_secret: forge_filer_token
|
||||
FORGE_URL:
|
||||
from_secret: forge_url
|
||||
FORGE_API:
|
||||
from_secret: forge_api
|
||||
FORGE_API_BASE:
|
||||
from_secret: forge_api_base
|
||||
|
|
@ -35,7 +35,7 @@ disinto/ (code repo)
|
|||
│ SCHEMA.md — vault item schema documentation
|
||||
│ validate.sh — vault item validator
|
||||
│ examples/ — example vault action TOMLs (promote, publish, release, webhook-call)
|
||||
├── lib/ env.sh, agent-sdk.sh, ci-helpers.sh, ci-debug.sh, load-project.sh, parse-deps.sh, guard.sh, mirrors.sh, pr-lifecycle.sh, issue-lifecycle.sh, worktree.sh, formula-session.sh, stack-lock.sh, forge-setup.sh, forge-push.sh, ops-setup.sh, ci-setup.sh, generators.sh, hire-agent.sh, release.sh, build-graph.py, branch-protection.sh, secret-scan.sh, tea-helpers.sh, vault.sh, ci-log-reader.py, git-creds.sh
|
||||
├── lib/ env.sh, agent-sdk.sh, ci-helpers.sh, ci-debug.sh, load-project.sh, parse-deps.sh, guard.sh, mirrors.sh, pr-lifecycle.sh, issue-lifecycle.sh, worktree.sh, formula-session.sh, stack-lock.sh, forge-setup.sh, forge-push.sh, ops-setup.sh, ci-setup.sh, generators.sh, hire-agent.sh, release.sh, build-graph.py, branch-protection.sh, secret-scan.sh, tea-helpers.sh, vault.sh, ci-log-reader.py, git-creds.sh, sprint-filer.sh
|
||||
│ hooks/ — Claude Code session hooks (on-compact-reinject, on-idle-stop, on-phase-change, on-pretooluse-guard, on-session-end, on-stop-failure)
|
||||
├── projects/ *.toml.example — templates; *.toml — local per-box config (gitignored)
|
||||
├── formulas/ Issue templates (TOML specs for multi-step agent tasks)
|
||||
|
|
@ -113,7 +113,8 @@ bash dev/phase-test.sh
|
|||
| Supervisor | `supervisor/` | Health monitoring | [supervisor/AGENTS.md](supervisor/AGENTS.md) |
|
||||
| Planner | `planner/` | Strategic planning | [planner/AGENTS.md](planner/AGENTS.md) |
|
||||
| Predictor | `predictor/` | Infrastructure pattern detection | [predictor/AGENTS.md](predictor/AGENTS.md) |
|
||||
| Architect | `architect/` | Strategic decomposition | [architect/AGENTS.md](architect/AGENTS.md) |
|
||||
| Architect | `architect/` | Strategic decomposition (read-only on project repo) | [architect/AGENTS.md](architect/AGENTS.md) |
|
||||
| Filer | `lib/sprint-filer.sh` | Sub-issue filing from merged sprint PRs | `.woodpecker/ops-filer.yml` |
|
||||
| Reproduce | `docker/reproduce/` | Bug reproduction using Playwright MCP | `formulas/reproduce.toml` |
|
||||
| Triage | `docker/reproduce/` | Deep root cause analysis | `formulas/triage.toml` |
|
||||
| Edge dispatcher | `docker/edge/` | Polls ops repo for vault actions, executes via Claude sessions | `docker/edge/dispatcher.sh` |
|
||||
|
|
@ -135,7 +136,7 @@ Issues flow: `backlog` → `in-progress` → PR → CI → review → merge →
|
|||
|---|---|---|
|
||||
| `backlog` | Issue is queued for implementation. Dev-poll picks the first ready one. | Planner, gardener, humans |
|
||||
| `priority` | Queue tier above plain backlog. Issues with both `priority` and `backlog` are picked before plain `backlog` issues. FIFO within each tier. | Planner, humans |
|
||||
| `in-progress` | Dev-agent is actively working on this issue. Only one issue per project is in-progress at a time. | dev-agent.sh (claims issue) |
|
||||
| `in-progress` | Dev-agent is actively working on this issue. Only one issue per project is in-progress at a time. Also set on vision issues by filer-bot when sub-issues are filed (#764). | dev-agent.sh (claims issue), filer-bot (vision issues) |
|
||||
| `blocked` | Issue is stuck — agent session failed, crashed, timed out, or CI exhausted. Diagnostic comment on the issue has details. Also used for unmet dependencies. | dev-agent.sh, dev-poll.sh (on failure) |
|
||||
| `tech-debt` | Pre-existing issue flagged by AI reviewer, not introduced by a PR. | review-pr.sh (auto-created follow-ups) |
|
||||
| `underspecified` | Dev-agent refused the issue as too large or vague. | dev-poll.sh (on preflight `too_large`), dev-agent.sh (on mid-run `too_large` refusal) |
|
||||
|
|
|
|||
|
|
@ -117,8 +117,8 @@ build_architect_prompt() {
|
|||
You are the architect agent for ${FORGE_REPO}. Work through the formula below.
|
||||
|
||||
Your role: strategic decomposition of vision issues into development sprints.
|
||||
Propose sprints via PRs on the ops repo, converse with humans through PR comments,
|
||||
and file sub-issues after design forks are resolved.
|
||||
Propose sprints via PRs on the ops repo, converse with humans through PR comments.
|
||||
You are READ-ONLY on the project repo — sub-issues are filed by filer-bot after sprint PR merge (#764).
|
||||
|
||||
## Project context
|
||||
${CONTEXT_BLOCK}
|
||||
|
|
@ -145,8 +145,8 @@ build_architect_prompt_for_mode() {
|
|||
You are the architect agent for ${FORGE_REPO}. Work through the formula below.
|
||||
|
||||
Your role: strategic decomposition of vision issues into development sprints.
|
||||
Propose sprints via PRs on the ops repo, converse with humans through PR comments,
|
||||
and file sub-issues after design forks are resolved.
|
||||
Propose sprints via PRs on the ops repo, converse with humans through PR comments.
|
||||
You are READ-ONLY on the project repo — sub-issues are filed by filer-bot after sprint PR merge (#764).
|
||||
|
||||
## CURRENT STATE: Approved PR awaiting initial design questions
|
||||
|
||||
|
|
@ -157,10 +157,10 @@ design conversation has not yet started. Your task is to:
|
|||
2. Identify the key design decisions that need human input
|
||||
3. Post initial design questions (Q1:, Q2:, etc.) as comments on the PR
|
||||
4. Add a `## Design forks` section to the PR body documenting the design decisions
|
||||
5. File sub-issues for each design fork path if applicable
|
||||
5. Update the ## Sub-issues section in the sprint spec if design decisions affect decomposition
|
||||
|
||||
This is NOT a pitch phase — the pitch is already approved. This is the START
|
||||
of the design Q&A phase.
|
||||
of the design Q&A phase. Sub-issues are filed by filer-bot after sprint PR merge (#764).
|
||||
|
||||
## Project context
|
||||
${CONTEXT_BLOCK}
|
||||
|
|
@ -179,8 +179,8 @@ _PROMPT_EOF_
|
|||
You are the architect agent for ${FORGE_REPO}. Work through the formula below.
|
||||
|
||||
Your role: strategic decomposition of vision issues into development sprints.
|
||||
Propose sprints via PRs on the ops repo, converse with humans through PR comments,
|
||||
and file sub-issues after design forks are resolved.
|
||||
Propose sprints via PRs on the ops repo, converse with humans through PR comments.
|
||||
You are READ-ONLY on the project repo — sub-issues are filed by filer-bot after sprint PR merge (#764).
|
||||
|
||||
## CURRENT STATE: Design Q&A in progress
|
||||
|
||||
|
|
@ -194,7 +194,7 @@ Your task is to:
|
|||
2. Read human answers from PR comments
|
||||
3. Parse the answers and determine next steps
|
||||
4. Post follow-up questions if needed (Q3:, Q4:, etc.)
|
||||
5. If all design forks are resolved, file sub-issues for each path
|
||||
5. If all design forks are resolved, finalize the ## Sub-issues section in the sprint spec
|
||||
6. Update the `## Design forks` section as you progress
|
||||
|
||||
## Project context
|
||||
|
|
@ -418,243 +418,10 @@ fetch_vision_issues() {
|
|||
"${FORGE_API}/issues?labels=vision&state=open&limit=100" 2>/dev/null || echo '[]'
|
||||
}
|
||||
|
||||
# ── Helper: Fetch all sub-issues for a vision issue ───────────────────────
|
||||
# Sub-issues are identified by:
|
||||
# 1. Issues whose body contains "Decomposed from #N" pattern
|
||||
# 2. Issues referenced in merged sprint PR bodies
|
||||
# Returns: newline-separated list of sub-issue numbers (empty if none)
|
||||
# Args: vision_issue_number
|
||||
get_vision_subissues() {
|
||||
local vision_issue="$1"
|
||||
local subissues=()
|
||||
|
||||
# Method 1: Find issues with "Decomposed from #N" in body
|
||||
local issues_json
|
||||
issues_json=$(curl -sf -H "Authorization: token ${FORGE_TOKEN}" \
|
||||
"${FORGE_API}/issues?limit=100" 2>/dev/null) || true
|
||||
|
||||
if [ -n "$issues_json" ] && [ "$issues_json" != "null" ]; then
|
||||
while IFS= read -r subissue_num; do
|
||||
[ -z "$subissue_num" ] && continue
|
||||
subissues+=("$subissue_num")
|
||||
done <<< "$(printf '%s' "$issues_json" | jq -r --arg vid "$vision_issue" \
|
||||
'[.[] | select(.number != ($vid | tonumber)) | select(.body // "" | contains("Decomposed from #" + $vid))] | .[].number' 2>/dev/null)"
|
||||
fi
|
||||
|
||||
# Method 2: Find issues referenced in merged sprint PR bodies
|
||||
# Only consider PRs whose title or body references this specific vision issue
|
||||
local prs_json
|
||||
prs_json=$(curl -sf -H "Authorization: token ${FORGE_TOKEN}" \
|
||||
"${FORGE_API_BASE}/repos/${FORGE_OPS_REPO}/pulls?state=closed&limit=100" 2>/dev/null) || true
|
||||
|
||||
if [ -n "$prs_json" ] && [ "$prs_json" != "null" ]; then
|
||||
while IFS= read -r pr_num; do
|
||||
[ -z "$pr_num" ] && continue
|
||||
|
||||
local pr_details pr_body pr_title
|
||||
pr_details=$(curl -sf -H "Authorization: token ${FORGE_TOKEN}" \
|
||||
"${FORGE_API_BASE}/repos/${FORGE_OPS_REPO}/pulls/${pr_num}" 2>/dev/null) || continue
|
||||
|
||||
local is_merged
|
||||
is_merged=$(printf '%s' "$pr_details" | jq -r '.merged // false') || continue
|
||||
|
||||
if [ "$is_merged" != "true" ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
pr_title=$(printf '%s' "$pr_details" | jq -r '.title // ""') || continue
|
||||
pr_body=$(printf '%s' "$pr_details" | jq -r '.body // ""') || continue
|
||||
|
||||
# Only process PRs that reference this specific vision issue
|
||||
if ! printf '%s\n%s' "$pr_title" "$pr_body" | grep -qE "#${vision_issue}([^0-9]|$)"; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Extract issue numbers from PR body, excluding the vision issue itself
|
||||
while IFS= read -r ref_issue; do
|
||||
[ -z "$ref_issue" ] && continue
|
||||
# Skip the vision issue itself
|
||||
[ "$ref_issue" = "$vision_issue" ] && continue
|
||||
# Skip if already in list
|
||||
local found=false
|
||||
for existing in "${subissues[@]+"${subissues[@]}"}"; do
|
||||
[ "$existing" = "$ref_issue" ] && found=true && break
|
||||
done
|
||||
if [ "$found" = false ]; then
|
||||
subissues+=("$ref_issue")
|
||||
fi
|
||||
done <<< "$(printf '%s' "$pr_body" | grep -oE '#[0-9]+' | tr -d '#' | sort -u)"
|
||||
done <<< "$(printf '%s' "$prs_json" | jq -r '.[] | select(.title | contains("architect:")) | .number')"
|
||||
fi
|
||||
|
||||
# Output unique sub-issues
|
||||
printf '%s\n' "${subissues[@]}" | sort -u | grep -v '^$' || true
|
||||
}
|
||||
|
||||
# ── Helper: Check if all sub-issues of a vision issue are closed ───────────
|
||||
# Returns: 0 if all sub-issues are closed, 1 if any are still open
|
||||
# Args: vision_issue_number
|
||||
all_subissues_closed() {
|
||||
local vision_issue="$1"
|
||||
local subissues
|
||||
subissues=$(get_vision_subissues "$vision_issue")
|
||||
|
||||
# If no sub-issues found, parent cannot be considered complete
|
||||
if [ -z "$subissues" ]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check each sub-issue state
|
||||
while IFS= read -r subissue_num; do
|
||||
[ -z "$subissue_num" ] && continue
|
||||
|
||||
local sub_state
|
||||
sub_state=$(curl -sf -H "Authorization: token ${FORGE_TOKEN}" \
|
||||
"${FORGE_API}/issues/${subissue_num}" 2>/dev/null | jq -r '.state // "unknown"') || true
|
||||
|
||||
if [ "$sub_state" != "closed" ]; then
|
||||
log "Sub-issue #${subissue_num} is ${sub_state} — vision issue #${vision_issue} not ready to close"
|
||||
return 1
|
||||
fi
|
||||
done <<< "$subissues"
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# ── Helper: Close vision issue with summary comment ────────────────────────
|
||||
# Posts a comment listing all completed sub-issues before closing.
|
||||
# Returns: 0 on success, 1 on failure
|
||||
# Args: vision_issue_number
|
||||
close_vision_issue() {
|
||||
local vision_issue="$1"
|
||||
|
||||
# Idempotency guard: check if a completion comment already exists
|
||||
local existing_comments
|
||||
existing_comments=$(curl -sf -H "Authorization: token ${FORGE_TOKEN}" \
|
||||
"${FORGE_API}/issues/${vision_issue}/comments" 2>/dev/null) || existing_comments="[]"
|
||||
|
||||
if printf '%s' "$existing_comments" | jq -e '[.[] | select(.body | contains("Vision Issue Completed"))] | length > 0' >/dev/null 2>&1; then
|
||||
# Comment exists — verify the issue is actually closed before skipping
|
||||
local issue_state
|
||||
issue_state=$(curl -sf -H "Authorization: token ${FORGE_TOKEN}" \
|
||||
"${FORGE_API}/issues/${vision_issue}" 2>/dev/null | jq -r '.state // "open"') || issue_state="open"
|
||||
if [ "$issue_state" = "closed" ]; then
|
||||
log "Vision issue #${vision_issue} already has a completion comment and is closed — skipping"
|
||||
return 0
|
||||
fi
|
||||
log "Vision issue #${vision_issue} has a completion comment but state=${issue_state} — retrying close"
|
||||
else
|
||||
# No completion comment yet — build and post one
|
||||
local subissues
|
||||
subissues=$(get_vision_subissues "$vision_issue")
|
||||
|
||||
# Build summary comment
|
||||
local summary=""
|
||||
local count=0
|
||||
while IFS= read -r subissue_num; do
|
||||
[ -z "$subissue_num" ] && continue
|
||||
local sub_title
|
||||
sub_title=$(curl -sf -H "Authorization: token ${FORGE_TOKEN}" \
|
||||
"${FORGE_API}/issues/${subissue_num}" 2>/dev/null | jq -r '.title // "Untitled"') || sub_title="Untitled"
|
||||
summary+="- #${subissue_num}: ${sub_title}"$'\n'
|
||||
count=$((count + 1))
|
||||
done <<< "$subissues"
|
||||
|
||||
local comment
|
||||
comment=$(cat <<EOF
|
||||
## Vision Issue Completed
|
||||
|
||||
All sub-issues have been implemented and merged. This vision issue is now closed.
|
||||
|
||||
### Completed sub-issues (${count}):
|
||||
${summary}
|
||||
---
|
||||
*Automated closure by architect · $(date -u '+%Y-%m-%d %H:%M UTC')*
|
||||
EOF
|
||||
)
|
||||
|
||||
# Post comment before closing
|
||||
local tmpfile tmpjson
|
||||
tmpfile=$(mktemp /tmp/vision-close-XXXXXX.md)
|
||||
tmpjson="${tmpfile}.json"
|
||||
printf '%s' "$comment" > "$tmpfile"
|
||||
jq -Rs '{body:.}' < "$tmpfile" > "$tmpjson"
|
||||
|
||||
if ! curl -sf -X POST \
|
||||
-H "Authorization: token ${FORGE_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
"${FORGE_API}/issues/${vision_issue}/comments" \
|
||||
--data-binary @"$tmpjson" >/dev/null 2>&1; then
|
||||
log "WARNING: failed to post closure comment on vision issue #${vision_issue}"
|
||||
rm -f "$tmpfile" "$tmpjson"
|
||||
return 1
|
||||
fi
|
||||
rm -f "$tmpfile" "$tmpjson"
|
||||
fi
|
||||
|
||||
# Clear assignee (best-effort) and close the issue
|
||||
curl -sf -X PATCH \
|
||||
-H "Authorization: token ${FORGE_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
"${FORGE_API}/issues/${vision_issue}" \
|
||||
-d '{"assignees":[]}' >/dev/null 2>&1 || true
|
||||
|
||||
local close_response
|
||||
close_response=$(curl -sf -X PATCH \
|
||||
-H "Authorization: token ${FORGE_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
"${FORGE_API}/issues/${vision_issue}" \
|
||||
-d '{"state":"closed"}' 2>/dev/null) || {
|
||||
log "ERROR: state=closed PATCH failed for vision issue #${vision_issue}"
|
||||
return 1
|
||||
}
|
||||
|
||||
local result_state
|
||||
result_state=$(printf '%s' "$close_response" | jq -r '.state // "unknown"') || result_state="unknown"
|
||||
if [ "$result_state" != "closed" ]; then
|
||||
log "ERROR: vision issue #${vision_issue} state is '${result_state}' after close PATCH — expected 'closed'"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log "Closed vision issue #${vision_issue}${count:+ — all ${count} sub-issue(s) complete}"
|
||||
return 0
|
||||
}
|
||||
|
||||
# ── Lifecycle check: Close vision issues with all sub-issues complete ──────
|
||||
# Runs before picking new vision issues for decomposition.
|
||||
# Checks each open vision issue and closes it if all sub-issues are closed.
|
||||
check_and_close_completed_visions() {
|
||||
log "Checking for vision issues with all sub-issues complete..."
|
||||
|
||||
local vision_issues_json
|
||||
vision_issues_json=$(fetch_vision_issues)
|
||||
|
||||
if [ -z "$vision_issues_json" ] || [ "$vision_issues_json" = "null" ]; then
|
||||
log "No open vision issues found"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Get all vision issue numbers
|
||||
local vision_issue_nums
|
||||
vision_issue_nums=$(printf '%s' "$vision_issues_json" | jq -r '.[].number' 2>/dev/null) || vision_issue_nums=""
|
||||
|
||||
local closed_count=0
|
||||
while IFS= read -r vision_issue; do
|
||||
[ -z "$vision_issue" ] && continue
|
||||
|
||||
if all_subissues_closed "$vision_issue"; then
|
||||
if close_vision_issue "$vision_issue"; then
|
||||
closed_count=$((closed_count + 1))
|
||||
fi
|
||||
fi
|
||||
done <<< "$vision_issue_nums"
|
||||
|
||||
if [ "$closed_count" -gt 0 ]; then
|
||||
log "Closed ${closed_count} vision issue(s) with all sub-issues complete"
|
||||
else
|
||||
log "No vision issues ready for closure"
|
||||
fi
|
||||
}
|
||||
# NOTE: get_vision_subissues, all_subissues_closed, close_vision_issue,
|
||||
# check_and_close_completed_visions removed (#764) — architect-bot is read-only
|
||||
# on the project repo. Vision lifecycle (closing completed visions, adding
|
||||
# in-progress labels) is now handled by filer-bot via lib/sprint-filer.sh.
|
||||
|
||||
# ── Helper: Fetch open architect PRs from ops repo Forgejo API ───────────
|
||||
# Returns: JSON array of architect PR objects
|
||||
|
|
@ -746,7 +513,23 @@ Instructions:
|
|||
## Recommendation
|
||||
<architect's assessment: worth it / defer / alternative approach>
|
||||
|
||||
## Sub-issues
|
||||
|
||||
<!-- filer:begin -->
|
||||
- id: <kebab-case-id>
|
||||
title: \"vision(#${issue_num}): <concise sub-issue title>\"
|
||||
labels: [backlog]
|
||||
depends_on: []
|
||||
body: |
|
||||
## Goal
|
||||
<what this sub-issue accomplishes>
|
||||
## Acceptance criteria
|
||||
- [ ] <criterion>
|
||||
<!-- filer:end -->
|
||||
|
||||
IMPORTANT: Do NOT include design forks or questions. This is a go/no-go pitch.
|
||||
The ## Sub-issues block is parsed by the filer-bot pipeline after sprint PR merge.
|
||||
Each sub-issue between filer:begin/end markers becomes a Forgejo issue.
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -855,37 +638,8 @@ post_pr_footer() {
|
|||
fi
|
||||
}
|
||||
|
||||
# ── Helper: Add in-progress label to vision issue ────────────────────────
|
||||
# Args: vision_issue_number
|
||||
add_inprogress_label() {
|
||||
local issue_num="$1"
|
||||
|
||||
# Get label ID for 'in-progress'
|
||||
local labels_json
|
||||
labels_json=$(curl -sf -H "Authorization: token ${FORGE_TOKEN}" \
|
||||
"${FORGE_API}/labels" 2>/dev/null) || return 1
|
||||
|
||||
local inprogress_label_id
|
||||
inprogress_label_id=$(printf '%s' "$labels_json" | jq -r --arg label "in-progress" '.[] | select(.name == $label) | .id' 2>/dev/null) || true
|
||||
|
||||
if [ -z "$inprogress_label_id" ]; then
|
||||
log "WARNING: in-progress label not found"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Add label to issue
|
||||
if curl -sf -X POST \
|
||||
-H "Authorization: token ${FORGE_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
"${FORGE_API}/issues/${issue_num}/labels" \
|
||||
-d "{\"labels\": [${inprogress_label_id}]}" >/dev/null 2>&1; then
|
||||
log "Added in-progress label to vision issue #${issue_num}"
|
||||
return 0
|
||||
else
|
||||
log "WARNING: failed to add in-progress label to vision issue #${issue_num}"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
# NOTE: add_inprogress_label removed (#764) — architect-bot is read-only on
|
||||
# project repo. in-progress label is now added by filer-bot via sprint-filer.sh.
|
||||
|
||||
# ── Precondition checks in bash before invoking the model ─────────────────
|
||||
|
||||
|
|
@ -935,9 +689,7 @@ if [ "${open_arch_prs:-0}" -ge 3 ]; then
|
|||
log "3 open architect PRs found but responses detected — processing"
|
||||
fi
|
||||
|
||||
# ── Lifecycle check: Close vision issues with all sub-issues complete ──────
|
||||
# Run before picking new vision issues for decomposition
|
||||
check_and_close_completed_visions
|
||||
# NOTE: Vision lifecycle check (close completed visions) moved to filer-bot (#764)
|
||||
|
||||
# ── Bash-driven state management: Select vision issues for pitching ───────
|
||||
# This logic is also documented in formulas/run-architect.toml preflight step
|
||||
|
|
@ -1073,8 +825,7 @@ for vision_issue in "${ARCHITECT_TARGET_ISSUES[@]}"; do
|
|||
# Post footer comment
|
||||
post_pr_footer "$pr_number"
|
||||
|
||||
# Add in-progress label to vision issue
|
||||
add_inprogress_label "$vision_issue"
|
||||
# NOTE: in-progress label is added by filer-bot after sprint PR merge (#764)
|
||||
|
||||
pitch_count=$((pitch_count + 1))
|
||||
log "Completed pitch for vision issue #${vision_issue} — PR #${pr_number}"
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ services:
|
|||
- FORGE_SUPERVISOR_TOKEN=${FORGE_SUPERVISOR_TOKEN:-}
|
||||
- FORGE_PREDICTOR_TOKEN=${FORGE_PREDICTOR_TOKEN:-}
|
||||
- FORGE_ARCHITECT_TOKEN=${FORGE_ARCHITECT_TOKEN:-}
|
||||
- FORGE_FILER_TOKEN=${FORGE_FILER_TOKEN:-}
|
||||
- FORGE_BOT_USERNAMES=${FORGE_BOT_USERNAMES:-}
|
||||
- WOODPECKER_TOKEN=${WOODPECKER_TOKEN:-}
|
||||
- CLAUDE_TIMEOUT=${CLAUDE_TIMEOUT:-7200}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,14 @@
|
|||
# - Bash creates the ops PR with pitch content
|
||||
# - Bash posts the ACCEPT/REJECT footer comment
|
||||
# Step 3: Sprint PR creation with questions (issue #101) (one PR per pitch)
|
||||
# Step 4: Answer parsing + sub-issue filing (issue #102)
|
||||
# Step 4: Post-merge sub-issue filing via filer-bot (#764)
|
||||
#
|
||||
# Permission model (#764):
|
||||
# architect-bot: READ-ONLY on project repo (GET issues/PRs/labels for context).
|
||||
# Cannot POST/PUT/PATCH/DELETE any project-repo resource.
|
||||
# Write access ONLY on ops repo (branches, PRs, comments).
|
||||
# filer-bot: issues:write on project repo. Files sub-issues from merged sprint
|
||||
# PRs via ops-filer pipeline. Adds in-progress label to vision issues.
|
||||
#
|
||||
# Architecture:
|
||||
# - Bash script (architect-run.sh) handles ALL state management
|
||||
|
|
@ -146,15 +153,32 @@ For each issue in ARCHITECT_TARGET_ISSUES, bash performs:
|
|||
## Recommendation
|
||||
<architect's assessment: worth it / defer / alternative approach>
|
||||
|
||||
## Sub-issues
|
||||
|
||||
<!-- filer:begin -->
|
||||
- id: <kebab-case-id>
|
||||
title: "vision(#N): <concise sub-issue title>"
|
||||
labels: [backlog]
|
||||
depends_on: []
|
||||
body: |
|
||||
## Goal
|
||||
<what this sub-issue accomplishes>
|
||||
## Acceptance criteria
|
||||
- [ ] <criterion>
|
||||
<!-- filer:end -->
|
||||
|
||||
IMPORTANT: Do NOT include design forks or questions yet. The pitch is a go/no-go
|
||||
decision for the human. Questions come only after acceptance.
|
||||
The ## Sub-issues block is parsed by the filer-bot pipeline after sprint PR merge.
|
||||
Each sub-issue between filer:begin/end markers becomes a Forgejo issue on the
|
||||
project repo. The filer appends a decomposed-from marker to each body automatically.
|
||||
|
||||
4. Bash creates PR:
|
||||
- Create branch: architect/sprint-{pitch-number}
|
||||
- Write sprint spec to sprints/{sprint-slug}.md
|
||||
- Create PR with pitch content as body
|
||||
- Post footer comment: "Reply ACCEPT to proceed with design questions, or REJECT: <reason> to decline."
|
||||
- Add in-progress label to vision issue
|
||||
- NOTE: in-progress label is added by filer-bot after sprint PR merge (#764)
|
||||
|
||||
Output:
|
||||
- One PR per vision issue (up to 3 per run)
|
||||
|
|
@ -185,6 +209,9 @@ This ensures approved PRs don't sit indefinitely without design conversation.
|
|||
Architecture:
|
||||
- Bash creates PRs during stateless pitch generation (step 2)
|
||||
- Model has no role in PR creation — no Forgejo API access
|
||||
- architect-bot is READ-ONLY on the project repo (#764) — all project-repo
|
||||
writes (sub-issue filing, in-progress label) are handled by filer-bot
|
||||
via the ops-filer pipeline after sprint PR merge
|
||||
- This step describes the PR format for reference
|
||||
|
||||
PR Format (created by bash):
|
||||
|
|
@ -201,64 +228,29 @@ PR Format (created by bash):
|
|||
- Head: architect/sprint-{pitch-number}
|
||||
- Footer comment: "Reply ACCEPT to proceed with design questions, or REJECT: <reason> to decline."
|
||||
|
||||
4. Add in-progress label to vision issue:
|
||||
- Look up label ID: GET /repos/{owner}/{repo}/labels
|
||||
- Add label: POST /repos/{owner}/{repo}/issues/{issue_number}/labels
|
||||
|
||||
After creating all PRs, signal PHASE:done.
|
||||
NOTE: in-progress label on the vision issue is added by filer-bot after sprint PR merge (#764).
|
||||
|
||||
## Forgejo API Reference
|
||||
## Forgejo API Reference (ops repo only)
|
||||
|
||||
All operations use the Forgejo API with Authorization: token ${FORGE_TOKEN} header.
|
||||
All operations use the ops repo Forgejo API with `Authorization: token ${FORGE_TOKEN}` header.
|
||||
architect-bot is READ-ONLY on the project repo — cannot POST/PUT/PATCH/DELETE project-repo resources (#764).
|
||||
|
||||
### Create branch
|
||||
### Create branch (ops repo)
|
||||
```
|
||||
POST /repos/{owner}/{repo}/branches
|
||||
POST /repos/{owner}/{repo-ops}/branches
|
||||
Body: {"new_branch_name": "architect/<sprint-slug>", "old_branch_name": "main"}
|
||||
```
|
||||
|
||||
### Create/update file
|
||||
### Create/update file (ops repo)
|
||||
```
|
||||
PUT /repos/{owner}/{repo}/contents/<path>
|
||||
PUT /repos/{owner}/{repo-ops}/contents/<path>
|
||||
Body: {"message": "sprint: add <sprint-slug>.md", "content": "<base64-encoded-content>", "branch": "architect/<sprint-slug>"}
|
||||
```
|
||||
|
||||
### Create PR
|
||||
### Create PR (ops repo)
|
||||
```
|
||||
POST /repos/{owner}/{repo}/pulls
|
||||
Body: {"title": "architect: <sprint summary>", "body": "<markdown-text>", "head": "architect/<sprint-slug>", "base": "main"}
|
||||
```
|
||||
|
||||
**Important: PR body format**
|
||||
- The body field must contain plain markdown text (the raw content from the model)
|
||||
- Do NOT JSON-encode or escape the body — pass it as a JSON string value
|
||||
- Newlines and markdown formatting (headings, lists, etc.) must be preserved as-is
|
||||
|
||||
### Add label to issue
|
||||
```
|
||||
POST /repos/{owner}/{repo}/issues/{index}/labels
|
||||
Body: {"labels": [<label-id>]}
|
||||
```
|
||||
|
||||
## Forgejo API Reference
|
||||
|
||||
All operations use the Forgejo API with `Authorization: token ${FORGE_TOKEN}` header.
|
||||
|
||||
### Create branch
|
||||
```
|
||||
POST /repos/{owner}/{repo}/branches
|
||||
Body: {"new_branch_name": "architect/<sprint-slug>", "old_branch_name": "main"}
|
||||
```
|
||||
|
||||
### Create/update file
|
||||
```
|
||||
PUT /repos/{owner}/{repo}/contents/<path>
|
||||
Body: {"message": "sprint: add <sprint-slug>.md", "content": "<base64-encoded-content>", "branch": "architect/<sprint-slug>"}
|
||||
```
|
||||
|
||||
### Create PR
|
||||
```
|
||||
POST /repos/{owner}/{repo}/pulls
|
||||
POST /repos/{owner}/{repo-ops}/pulls
|
||||
Body: {"title": "architect: <sprint summary>", "body": "<markdown-text>", "head": "architect/<sprint-slug>", "base": "main"}
|
||||
```
|
||||
|
||||
|
|
@ -267,30 +259,22 @@ Body: {"title": "architect: <sprint summary>", "body": "<markdown-text>", "head"
|
|||
- Do NOT JSON-encode or escape the body — pass it as a JSON string value
|
||||
- Newlines and markdown formatting (headings, lists, etc.) must be preserved as-is
|
||||
|
||||
### Close PR
|
||||
### Close PR (ops repo)
|
||||
```
|
||||
PATCH /repos/{owner}/{repo}/pulls/{index}
|
||||
PATCH /repos/{owner}/{repo-ops}/pulls/{index}
|
||||
Body: {"state": "closed"}
|
||||
```
|
||||
|
||||
### Delete branch
|
||||
### Delete branch (ops repo)
|
||||
```
|
||||
DELETE /repos/{owner}/{repo}/git/branches/<branch-name>
|
||||
DELETE /repos/{owner}/{repo-ops}/git/branches/<branch-name>
|
||||
```
|
||||
|
||||
### Get labels (look up label IDs by name)
|
||||
### Read-only on project repo (context gathering)
|
||||
```
|
||||
GET /repos/{owner}/{repo}/labels
|
||||
```
|
||||
|
||||
### Add label to issue (for in-progress on vision issue)
|
||||
```
|
||||
POST /repos/{owner}/{repo}/issues/{index}/labels
|
||||
Body: {"labels": [<label-id>]}
|
||||
```
|
||||
|
||||
### Remove label from issue (for in-progress removal on REJECT)
|
||||
```
|
||||
DELETE /repos/{owner}/{repo}/issues/{index}/labels/{label-id}
|
||||
GET /repos/{owner}/{repo}/issues — list issues
|
||||
GET /repos/{owner}/{repo}/issues/{number} — read issue details
|
||||
GET /repos/{owner}/{repo}/labels — list labels
|
||||
GET /repos/{owner}/{repo}/pulls — list PRs
|
||||
```
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -121,9 +121,10 @@ export FORGE_VAULT_TOKEN="${FORGE_VAULT_TOKEN:-${FORGE_TOKEN}}"
|
|||
export FORGE_SUPERVISOR_TOKEN="${FORGE_SUPERVISOR_TOKEN:-${FORGE_TOKEN}}"
|
||||
export FORGE_PREDICTOR_TOKEN="${FORGE_PREDICTOR_TOKEN:-${FORGE_TOKEN}}"
|
||||
export FORGE_ARCHITECT_TOKEN="${FORGE_ARCHITECT_TOKEN:-${FORGE_TOKEN}}"
|
||||
export FORGE_FILER_TOKEN="${FORGE_FILER_TOKEN:-${FORGE_TOKEN}}"
|
||||
|
||||
# Bot usernames filter
|
||||
export FORGE_BOT_USERNAMES="${FORGE_BOT_USERNAMES:-dev-bot,review-bot,planner-bot,gardener-bot,vault-bot,supervisor-bot,predictor-bot,architect-bot}"
|
||||
export FORGE_BOT_USERNAMES="${FORGE_BOT_USERNAMES:-dev-bot,review-bot,planner-bot,gardener-bot,vault-bot,supervisor-bot,predictor-bot,architect-bot,filer-bot}"
|
||||
|
||||
# Project config
|
||||
export FORGE_REPO="${FORGE_REPO:-}"
|
||||
|
|
|
|||
|
|
@ -819,8 +819,7 @@ build_prompt_footer() {
|
|||
Base URL: ${FORGE_API}
|
||||
Auth header: -H \"Authorization: token \${FORGE_TOKEN}\"
|
||||
Read issue: curl -sf -H \"Authorization: token \${FORGE_TOKEN}\" '${FORGE_API}/issues/{number}' | jq '.body'
|
||||
Create issue: curl -sf -X POST -H \"Authorization: token \${FORGE_TOKEN}\" -H 'Content-Type: application/json' '${FORGE_API}/issues' -d '{\"title\":\"...\",\"body\":\"...\",\"labels\":[LABEL_ID]}'${extra_api}
|
||||
List labels: curl -sf -H \"Authorization: token \${FORGE_TOKEN}\" '${FORGE_API}/labels'
|
||||
List labels: curl -sf -H \"Authorization: token \${FORGE_TOKEN}\" '${FORGE_API}/labels'${extra_api}
|
||||
NEVER echo or include the actual token value in output — always reference \${FORGE_TOKEN}.
|
||||
|
||||
## Environment
|
||||
|
|
|
|||
556
lib/sprint-filer.sh
Executable file
556
lib/sprint-filer.sh
Executable file
|
|
@ -0,0 +1,556 @@
|
|||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# sprint-filer.sh — Parse merged sprint PRs and file sub-issues via filer-bot
|
||||
#
|
||||
# Invoked by the ops-filer Woodpecker pipeline after a sprint PR merges on the
|
||||
# ops repo main branch. Parses each sprints/*.md file for a structured
|
||||
# ## Sub-issues block (filer:begin/end markers), then creates idempotent
|
||||
# Forgejo issues on the project repo using FORGE_FILER_TOKEN.
|
||||
#
|
||||
# Permission model (#764):
|
||||
# filer-bot has issues:write on the project repo.
|
||||
# architect-bot is read-only on the project repo.
|
||||
#
|
||||
# Usage:
|
||||
# sprint-filer.sh <sprint-file.md> — file sub-issues from one sprint
|
||||
# sprint-filer.sh --all <sprints-dir> — scan all sprint files in dir
|
||||
#
|
||||
# Environment:
|
||||
# FORGE_FILER_TOKEN — filer-bot API token (issues:write on project repo)
|
||||
# FORGE_API — project repo API base (e.g. http://forgejo:3000/api/v1/repos/org/repo)
|
||||
# FORGE_API_BASE — API base URL (e.g. http://forgejo:3000/api/v1)
|
||||
# =============================================================================
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
# Source env.sh only if not already loaded (allows standalone + sourced use)
|
||||
if [ -z "${FACTORY_ROOT:-}" ]; then
|
||||
FACTORY_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
# shellcheck source=env.sh
|
||||
source "$SCRIPT_DIR/env.sh"
|
||||
fi
|
||||
|
||||
# ── Logging ──────────────────────────────────────────────────────────────
|
||||
LOG_AGENT="${LOG_AGENT:-filer}"
|
||||
|
||||
filer_log() {
|
||||
printf '[%s] %s: %s\n' "$(date -u '+%Y-%m-%dT%H:%M:%SZ')" "$LOG_AGENT" "$*" >&2
|
||||
}
|
||||
|
||||
# ── Validate required environment ────────────────────────────────────────
|
||||
: "${FORGE_FILER_TOKEN:?sprint-filer.sh requires FORGE_FILER_TOKEN}"
|
||||
: "${FORGE_API:?sprint-filer.sh requires FORGE_API}"
|
||||
|
||||
# ── Parse sub-issues block from a sprint markdown file ───────────────────
|
||||
# Extracts the YAML-in-markdown between <!-- filer:begin --> and <!-- filer:end -->
|
||||
# Args: sprint_file_path
|
||||
# Output: the raw sub-issues block (YAML lines) to stdout
|
||||
# Returns: 0 if block found, 1 if not found or malformed
|
||||
parse_subissues_block() {
|
||||
local sprint_file="$1"
|
||||
|
||||
if [ ! -f "$sprint_file" ]; then
|
||||
filer_log "ERROR: sprint file not found: ${sprint_file}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local in_block=false
|
||||
local block=""
|
||||
local found=false
|
||||
|
||||
while IFS= read -r line; do
|
||||
if [[ "$line" == *"<!-- filer:begin -->"* ]]; then
|
||||
in_block=true
|
||||
found=true
|
||||
continue
|
||||
fi
|
||||
if [[ "$line" == *"<!-- filer:end -->"* ]]; then
|
||||
in_block=false
|
||||
continue
|
||||
fi
|
||||
if [ "$in_block" = true ]; then
|
||||
block+="${line}"$'\n'
|
||||
fi
|
||||
done < "$sprint_file"
|
||||
|
||||
if [ "$found" = false ]; then
|
||||
filer_log "No filer:begin/end block found in ${sprint_file}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [ "$in_block" = true ]; then
|
||||
filer_log "ERROR: malformed sub-issues block in ${sprint_file} — filer:begin without filer:end"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [ -z "$block" ]; then
|
||||
filer_log "WARNING: empty sub-issues block in ${sprint_file}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
printf '%s' "$block"
|
||||
}
|
||||
|
||||
# ── Extract vision issue number from sprint file ─────────────────────────
|
||||
# Looks for "## Vision issues" section with "#N" references
|
||||
# Args: sprint_file_path
|
||||
# Output: first vision issue number found
|
||||
extract_vision_issue() {
|
||||
local sprint_file="$1"
|
||||
grep -oE '#[0-9]+' "$sprint_file" | head -1 | tr -d '#'
|
||||
}
|
||||
|
||||
# ── Extract sprint slug from file path ───────────────────────────────────
|
||||
# Args: sprint_file_path
|
||||
# Output: slug (filename without .md)
|
||||
extract_sprint_slug() {
|
||||
local sprint_file="$1"
|
||||
basename "$sprint_file" .md
|
||||
}
|
||||
|
||||
# ── Parse individual sub-issue entries from the block ────────────────────
|
||||
# The block is a simple YAML-like format:
|
||||
# - id: foo
|
||||
# title: "..."
|
||||
# labels: [backlog, priority]
|
||||
# depends_on: [bar]
|
||||
# body: |
|
||||
# multi-line body
|
||||
#
|
||||
# Args: raw_block (via stdin)
|
||||
# Output: JSON array of sub-issue objects
|
||||
parse_subissue_entries() {
|
||||
local block
|
||||
block=$(cat)
|
||||
|
||||
# Use awk to parse the YAML-like structure into JSON
|
||||
printf '%s' "$block" | awk '
|
||||
BEGIN {
|
||||
printf "["
|
||||
first = 1
|
||||
in_body = 0
|
||||
id = ""; title = ""; labels = ""; depends = ""; body = ""
|
||||
}
|
||||
|
||||
function flush_entry() {
|
||||
if (id == "") return
|
||||
if (!first) printf ","
|
||||
first = 0
|
||||
|
||||
# Escape JSON special characters in body
|
||||
gsub(/\\/, "\\\\", body)
|
||||
gsub(/"/, "\\\"", body)
|
||||
gsub(/\t/, "\\t", body)
|
||||
# Replace newlines with \n for JSON
|
||||
gsub(/\n/, "\\n", body)
|
||||
# Remove trailing \n
|
||||
sub(/\\n$/, "", body)
|
||||
|
||||
# Clean up title (remove surrounding quotes)
|
||||
gsub(/^"/, "", title)
|
||||
gsub(/"$/, "", title)
|
||||
|
||||
printf "{\"id\":\"%s\",\"title\":\"%s\",\"labels\":%s,\"depends_on\":%s,\"body\":\"%s\"}", id, title, labels, depends, body
|
||||
|
||||
id = ""; title = ""; labels = "[]"; depends = "[]"; body = ""
|
||||
in_body = 0
|
||||
}
|
||||
|
||||
/^- id:/ {
|
||||
flush_entry()
|
||||
sub(/^- id: */, "")
|
||||
id = $0
|
||||
labels = "[]"
|
||||
depends = "[]"
|
||||
next
|
||||
}
|
||||
|
||||
/^ title:/ {
|
||||
sub(/^ title: */, "")
|
||||
title = $0
|
||||
# Remove surrounding quotes
|
||||
gsub(/^"/, "", title)
|
||||
gsub(/"$/, "", title)
|
||||
next
|
||||
}
|
||||
|
||||
/^ labels:/ {
|
||||
sub(/^ labels: */, "")
|
||||
# Convert [a, b] to JSON array ["a","b"]
|
||||
gsub(/\[/, "", $0)
|
||||
gsub(/\]/, "", $0)
|
||||
n = split($0, arr, /, */)
|
||||
labels = "["
|
||||
for (i = 1; i <= n; i++) {
|
||||
gsub(/^ */, "", arr[i])
|
||||
gsub(/ *$/, "", arr[i])
|
||||
if (arr[i] != "") {
|
||||
if (i > 1) labels = labels ","
|
||||
labels = labels "\"" arr[i] "\""
|
||||
}
|
||||
}
|
||||
labels = labels "]"
|
||||
next
|
||||
}
|
||||
|
||||
/^ depends_on:/ {
|
||||
sub(/^ depends_on: */, "")
|
||||
gsub(/\[/, "", $0)
|
||||
gsub(/\]/, "", $0)
|
||||
n = split($0, arr, /, */)
|
||||
depends = "["
|
||||
for (i = 1; i <= n; i++) {
|
||||
gsub(/^ */, "", arr[i])
|
||||
gsub(/ *$/, "", arr[i])
|
||||
if (arr[i] != "") {
|
||||
if (i > 1) depends = depends ","
|
||||
depends = depends "\"" arr[i] "\""
|
||||
}
|
||||
}
|
||||
depends = depends "]"
|
||||
next
|
||||
}
|
||||
|
||||
/^ body: *\|/ {
|
||||
in_body = 1
|
||||
body = ""
|
||||
next
|
||||
}
|
||||
|
||||
in_body && /^ / {
|
||||
sub(/^ /, "")
|
||||
body = body $0 "\n"
|
||||
next
|
||||
}
|
||||
|
||||
in_body && !/^ / && !/^$/ {
|
||||
in_body = 0
|
||||
# This line starts a new field or entry — re-process it
|
||||
# (awk does not support re-scanning, so handle common cases)
|
||||
if ($0 ~ /^- id:/) {
|
||||
flush_entry()
|
||||
sub(/^- id: */, "")
|
||||
id = $0
|
||||
labels = "[]"
|
||||
depends = "[]"
|
||||
}
|
||||
}
|
||||
|
||||
END {
|
||||
flush_entry()
|
||||
printf "]"
|
||||
}
|
||||
'
|
||||
}
|
||||
|
||||
# ── Check if sub-issue already exists (idempotency) ─────────────────────
|
||||
# Searches for the decomposed-from marker in existing issues.
|
||||
# Args: vision_issue_number sprint_slug subissue_id
|
||||
# Returns: 0 if already exists, 1 if not
|
||||
subissue_exists() {
|
||||
local vision_issue="$1"
|
||||
local sprint_slug="$2"
|
||||
local subissue_id="$3"
|
||||
|
||||
local marker="<!-- decomposed-from: #${vision_issue}, sprint: ${sprint_slug}, id: ${subissue_id} -->"
|
||||
|
||||
# Search for issues with this exact marker
|
||||
local issues_json
|
||||
issues_json=$(curl -sf -H "Authorization: token ${FORGE_FILER_TOKEN}" \
|
||||
"${FORGE_API}/issues?state=all&limit=50&type=issues" 2>/dev/null) || issues_json="[]"
|
||||
|
||||
if printf '%s' "$issues_json" | jq -e --arg marker "$marker" \
|
||||
'[.[] | select(.body // "" | contains($marker))] | length > 0' >/dev/null 2>&1; then
|
||||
return 0 # Already exists
|
||||
fi
|
||||
|
||||
return 1 # Does not exist
|
||||
}
|
||||
|
||||
# ── Resolve label names to IDs ───────────────────────────────────────────
|
||||
# Args: label_names_json (JSON array of strings)
|
||||
# Output: JSON array of label IDs
|
||||
resolve_label_ids() {
|
||||
local label_names_json="$1"
|
||||
|
||||
# Fetch all labels from project repo
|
||||
local all_labels
|
||||
all_labels=$(curl -sf -H "Authorization: token ${FORGE_FILER_TOKEN}" \
|
||||
"${FORGE_API}/labels" 2>/dev/null) || all_labels="[]"
|
||||
|
||||
# Map names to IDs
|
||||
printf '%s' "$label_names_json" | jq -r '.[]' | while IFS= read -r label_name; do
|
||||
[ -z "$label_name" ] && continue
|
||||
printf '%s' "$all_labels" | jq -r --arg name "$label_name" \
|
||||
'.[] | select(.name == $name) | .id' 2>/dev/null
|
||||
done | jq -Rs 'split("\n") | map(select(. != "") | tonumber)'
|
||||
}
|
||||
|
||||
# ── Add in-progress label to vision issue ────────────────────────────────
|
||||
# Args: vision_issue_number
|
||||
add_inprogress_label() {
|
||||
local issue_num="$1"
|
||||
|
||||
local labels_json
|
||||
labels_json=$(curl -sf -H "Authorization: token ${FORGE_FILER_TOKEN}" \
|
||||
"${FORGE_API}/labels" 2>/dev/null) || return 1
|
||||
|
||||
local label_id
|
||||
label_id=$(printf '%s' "$labels_json" | jq -r '.[] | select(.name == "in-progress") | .id' 2>/dev/null) || true
|
||||
|
||||
if [ -z "$label_id" ]; then
|
||||
filer_log "WARNING: in-progress label not found"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if curl -sf -X POST \
|
||||
-H "Authorization: token ${FORGE_FILER_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
"${FORGE_API}/issues/${issue_num}/labels" \
|
||||
-d "{\"labels\": [${label_id}]}" >/dev/null 2>&1; then
|
||||
filer_log "Added in-progress label to vision issue #${issue_num}"
|
||||
return 0
|
||||
else
|
||||
filer_log "WARNING: failed to add in-progress label to vision issue #${issue_num}"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# ── File sub-issues from a sprint file ───────────────────────────────────
|
||||
# This is the main entry point. Parses the sprint file, extracts sub-issues,
|
||||
# and creates them idempotently via the Forgejo API.
|
||||
# Args: sprint_file_path
|
||||
# Returns: 0 on success, 1 on any error (fail-fast)
|
||||
file_subissues() {
|
||||
local sprint_file="$1"
|
||||
|
||||
filer_log "Processing sprint file: ${sprint_file}"
|
||||
|
||||
# Extract metadata
|
||||
local vision_issue sprint_slug
|
||||
vision_issue=$(extract_vision_issue "$sprint_file")
|
||||
sprint_slug=$(extract_sprint_slug "$sprint_file")
|
||||
|
||||
if [ -z "$vision_issue" ]; then
|
||||
filer_log "ERROR: could not extract vision issue number from ${sprint_file}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
filer_log "Vision issue: #${vision_issue}, sprint slug: ${sprint_slug}"
|
||||
|
||||
# Parse the sub-issues block
|
||||
local raw_block
|
||||
raw_block=$(parse_subissues_block "$sprint_file") || return 1
|
||||
|
||||
# Parse individual entries
|
||||
local entries_json
|
||||
entries_json=$(printf '%s' "$raw_block" | parse_subissue_entries)
|
||||
|
||||
# Validate parsing produced valid JSON
|
||||
if ! printf '%s' "$entries_json" | jq empty 2>/dev/null; then
|
||||
filer_log "ERROR: failed to parse sub-issues block as valid JSON in ${sprint_file}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local entry_count
|
||||
entry_count=$(printf '%s' "$entries_json" | jq 'length')
|
||||
|
||||
if [ "$entry_count" -eq 0 ]; then
|
||||
filer_log "WARNING: no sub-issue entries found in ${sprint_file}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
filer_log "Found ${entry_count} sub-issue(s) to file"
|
||||
|
||||
# File each sub-issue (fail-fast on first error)
|
||||
local filed_count=0
|
||||
local i=0
|
||||
while [ "$i" -lt "$entry_count" ]; do
|
||||
local entry
|
||||
entry=$(printf '%s' "$entries_json" | jq ".[$i]")
|
||||
|
||||
local subissue_id subissue_title subissue_body labels_json
|
||||
subissue_id=$(printf '%s' "$entry" | jq -r '.id')
|
||||
subissue_title=$(printf '%s' "$entry" | jq -r '.title')
|
||||
subissue_body=$(printf '%s' "$entry" | jq -r '.body')
|
||||
labels_json=$(printf '%s' "$entry" | jq -c '.labels')
|
||||
|
||||
if [ -z "$subissue_id" ] || [ "$subissue_id" = "null" ]; then
|
||||
filer_log "ERROR: sub-issue entry at index ${i} has no id — aborting"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [ -z "$subissue_title" ] || [ "$subissue_title" = "null" ]; then
|
||||
filer_log "ERROR: sub-issue '${subissue_id}' has no title — aborting"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Idempotency check
|
||||
if subissue_exists "$vision_issue" "$sprint_slug" "$subissue_id"; then
|
||||
filer_log "Sub-issue '${subissue_id}' already exists — skipping"
|
||||
i=$((i + 1))
|
||||
continue
|
||||
fi
|
||||
|
||||
# Append decomposed-from marker to body
|
||||
local marker="<!-- decomposed-from: #${vision_issue}, sprint: ${sprint_slug}, id: ${subissue_id} -->"
|
||||
local full_body="${subissue_body}
|
||||
|
||||
${marker}"
|
||||
|
||||
# Resolve label names to IDs
|
||||
local label_ids
|
||||
label_ids=$(resolve_label_ids "$labels_json")
|
||||
|
||||
# Build issue payload using jq for safe JSON construction
|
||||
local payload
|
||||
payload=$(jq -n \
|
||||
--arg title "$subissue_title" \
|
||||
--arg body "$full_body" \
|
||||
--argjson labels "$label_ids" \
|
||||
'{title: $title, body: $body, labels: $labels}')
|
||||
|
||||
# Create the issue
|
||||
local response
|
||||
response=$(curl -sf -X POST \
|
||||
-H "Authorization: token ${FORGE_FILER_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
"${FORGE_API}/issues" \
|
||||
-d "$payload" 2>/dev/null) || {
|
||||
filer_log "ERROR: failed to create sub-issue '${subissue_id}' — aborting (${filed_count}/${entry_count} filed so far)"
|
||||
return 1
|
||||
}
|
||||
|
||||
local new_issue_num
|
||||
new_issue_num=$(printf '%s' "$response" | jq -r '.number // empty')
|
||||
filer_log "Filed sub-issue '${subissue_id}' as #${new_issue_num}: ${subissue_title}"
|
||||
|
||||
filed_count=$((filed_count + 1))
|
||||
i=$((i + 1))
|
||||
done
|
||||
|
||||
# Add in-progress label to the vision issue
|
||||
add_inprogress_label "$vision_issue" || true
|
||||
|
||||
filer_log "Successfully filed ${filed_count}/${entry_count} sub-issue(s) for sprint ${sprint_slug}"
|
||||
return 0
|
||||
}
|
||||
|
||||
# ── Vision lifecycle: close completed vision issues ──────────────────────
|
||||
# Checks open vision issues and closes any whose sub-issues are all closed.
|
||||
# Uses the decomposed-from marker to find sub-issues.
|
||||
check_and_close_completed_visions() {
|
||||
filer_log "Checking for vision issues with all sub-issues complete..."
|
||||
|
||||
local vision_issues_json
|
||||
vision_issues_json=$(curl -sf -H "Authorization: token ${FORGE_FILER_TOKEN}" \
|
||||
"${FORGE_API}/issues?labels=vision&state=open&limit=100" 2>/dev/null) || vision_issues_json="[]"
|
||||
|
||||
if [ "$vision_issues_json" = "[]" ] || [ "$vision_issues_json" = "null" ]; then
|
||||
filer_log "No open vision issues found"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local all_issues
|
||||
all_issues=$(curl -sf -H "Authorization: token ${FORGE_FILER_TOKEN}" \
|
||||
"${FORGE_API}/issues?state=all&limit=200&type=issues" 2>/dev/null) || all_issues="[]"
|
||||
|
||||
local vision_nums
|
||||
vision_nums=$(printf '%s' "$vision_issues_json" | jq -r '.[].number' 2>/dev/null) || return 0
|
||||
|
||||
local closed_count=0
|
||||
while IFS= read -r vid; do
|
||||
[ -z "$vid" ] && continue
|
||||
|
||||
# Find sub-issues with decomposed-from marker for this vision
|
||||
local sub_issues
|
||||
sub_issues=$(printf '%s' "$all_issues" | jq --arg vid "$vid" \
|
||||
'[.[] | select(.body // "" | contains("<!-- decomposed-from: #" + $vid))]')
|
||||
|
||||
local sub_count
|
||||
sub_count=$(printf '%s' "$sub_issues" | jq 'length')
|
||||
|
||||
# No sub-issues means not ready to close
|
||||
[ "$sub_count" -eq 0 ] && continue
|
||||
|
||||
# Check if all are closed
|
||||
local open_count
|
||||
open_count=$(printf '%s' "$sub_issues" | jq '[.[] | select(.state != "closed")] | length')
|
||||
|
||||
if [ "$open_count" -gt 0 ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# All sub-issues closed — close the vision issue
|
||||
filer_log "All ${sub_count} sub-issues for vision #${vid} are closed — closing vision"
|
||||
|
||||
local comment_body="## Vision Issue Completed
|
||||
|
||||
All sub-issues have been implemented and merged. This vision issue is now closed.
|
||||
|
||||
---
|
||||
*Automated closure by filer-bot · $(date -u '+%Y-%m-%d %H:%M UTC')*"
|
||||
|
||||
local comment_payload
|
||||
comment_payload=$(jq -n --arg body "$comment_body" '{body: $body}')
|
||||
|
||||
curl -sf -X POST \
|
||||
-H "Authorization: token ${FORGE_FILER_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
"${FORGE_API}/issues/${vid}/comments" \
|
||||
-d "$comment_payload" >/dev/null 2>&1 || true
|
||||
|
||||
curl -sf -X PATCH \
|
||||
-H "Authorization: token ${FORGE_FILER_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
"${FORGE_API}/issues/${vid}" \
|
||||
-d '{"state":"closed"}' >/dev/null 2>&1 || true
|
||||
|
||||
closed_count=$((closed_count + 1))
|
||||
done <<< "$vision_nums"
|
||||
|
||||
if [ "$closed_count" -gt 0 ]; then
|
||||
filer_log "Closed ${closed_count} vision issue(s)"
|
||||
fi
|
||||
}
|
||||
|
||||
# ── Main ─────────────────────────────────────────────────────────────────
|
||||
main() {
|
||||
if [ "${1:-}" = "--all" ]; then
|
||||
local sprints_dir="${2:?Usage: sprint-filer.sh --all <sprints-dir>}"
|
||||
local exit_code=0
|
||||
|
||||
for sprint_file in "${sprints_dir}"/*.md; do
|
||||
[ -f "$sprint_file" ] || continue
|
||||
|
||||
# Only process files with filer:begin markers
|
||||
if ! grep -q '<!-- filer:begin -->' "$sprint_file"; then
|
||||
continue
|
||||
fi
|
||||
|
||||
if ! file_subissues "$sprint_file"; then
|
||||
filer_log "ERROR: failed to process ${sprint_file}"
|
||||
exit_code=1
|
||||
fi
|
||||
done
|
||||
|
||||
# Run vision lifecycle check after filing
|
||||
check_and_close_completed_visions || true
|
||||
|
||||
return "$exit_code"
|
||||
elif [ -n "${1:-}" ]; then
|
||||
file_subissues "$1"
|
||||
# Run vision lifecycle check after filing
|
||||
check_and_close_completed_visions || true
|
||||
else
|
||||
echo "Usage: sprint-filer.sh <sprint-file.md>" >&2
|
||||
echo " sprint-filer.sh --all <sprints-dir>" >&2
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Run main only when executed directly (not when sourced for testing)
|
||||
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
||||
main "$@"
|
||||
fi
|
||||
Loading…
Add table
Add a link
Reference in a new issue