From 8649b278a0d608af146a264a4b0c17a51a531395 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 12 Apr 2026 03:56:07 +0000 Subject: [PATCH 1/7] fix: vision(#623): disinto-chat escalation tools (CI run, issue create, PR create) (#712) --- docker/chat/server.py | 264 ++++++++++++++++++++++++++++++++++++++ docker/chat/ui/index.html | 184 +++++++++++++++++++++++++- lib/generators.sh | 6 + 3 files changed, 453 insertions(+), 1 deletion(-) diff --git a/docker/chat/server.py b/docker/chat/server.py index ad8897d..42e7cf8 100644 --- a/docker/chat/server.py +++ b/docker/chat/server.py @@ -30,6 +30,8 @@ import secrets import subprocess import sys import time +import urllib.request +import urllib.error from http.server import HTTPServer, BaseHTTPRequestHandler from urllib.parse import urlparse, parse_qs, urlencode @@ -57,6 +59,14 @@ CHAT_MAX_REQUESTS_PER_HOUR = int(os.environ.get("CHAT_MAX_REQUESTS_PER_HOUR", 60 CHAT_MAX_REQUESTS_PER_DAY = int(os.environ.get("CHAT_MAX_REQUESTS_PER_DAY", 500)) CHAT_MAX_TOKENS_PER_DAY = int(os.environ.get("CHAT_MAX_TOKENS_PER_DAY", 1000000)) +# Action endpoints configuration (#712) +WOODPECKER_TOKEN = os.environ.get("WOODPECKER_TOKEN", "") +WOODPECKER_URL = os.environ.get("WOODPECKER_URL", "http://woodpecker:8000") +FORGE_TOKEN = os.environ.get("FORGE_TOKEN", "") +FORGE_URL = os.environ.get("FORGE_URL", "http://forgejo:3000") +FORGE_OWNER = os.environ.get("FORGE_OWNER", "") +FORGE_REPO = os.environ.get("FORGE_REPO", "") + # Allowed users - disinto-admin always allowed; CSV allowlist extends it _allowed_csv = os.environ.get("DISINTO_CHAT_ALLOWED_USERS", "") ALLOWED_USERS = {"disinto-admin"} @@ -423,6 +433,181 @@ def _delete_conversation(user, conv_id): return False +# ============================================================================= +# Action Endpoints (#712) +# ============================================================================= + +def _write_action_record(user, conv_id, action_type, payload, response_data): + """Write an action record to the conversation history.""" + record = { + "ts": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()), + "user": user, + "role": "action", + "action_type": action_type, + "payload": payload, + "response": response_data, + } + conv_path = _get_conversation_path(user, conv_id) + _ensure_user_dir(user) + with open(conv_path, "a", encoding="utf-8") as f: + f.write(json.dumps(record, ensure_ascii=False) + "\n") + + +def _trigger_woodpecker_pipeline(repo, branch): + """Trigger a Woodpecker CI pipeline for the given repo and branch. + + Woodpecker API: POST /api/v1/repos/{owner}/{repo}/pipeline + Returns dict with success status and response data. + """ + if not WOODPECKER_TOKEN: + return {"success": False, "error": "WOODPECKER_TOKEN not configured"} + + if not FORGE_OWNER or not FORGE_REPO: + return {"success": False, "error": "FORGE_OWNER and FORGE_REPO not configured"} + + try: + url = f"{WOODPECKER_URL}/api/v1/repos/{FORGE_OWNER}/{FORGE_REPO}/pipeline" + data = json.dumps({"branch": branch, "event": "push"}).encode("utf-8") + + req = urllib.request.Request( + url, + data=data, + headers={ + "Authorization": f"token {WOODPECKER_TOKEN}", + "Content-Type": "application/json", + "Accept": "application/json", + }, + method="POST", + ) + + with urllib.request.urlopen(req, timeout=30) as resp: + result = json.loads(resp.read().decode()) + return {"success": True, "data": result} + + except urllib.error.HTTPError as e: + try: + body = json.loads(e.read().decode()) + return {"success": False, "error": str(e.reason), "details": body} + except (json.JSONDecodeError, UnicodeDecodeError): + return {"success": False, "error": str(e.reason)} + except urllib.error.URLError as e: + return {"success": False, "error": f"Network error: {e.reason}"} + except json.JSONDecodeError as e: + return {"success": False, "error": f"Invalid JSON response: {e}"} + except Exception as e: + return {"success": False, "error": str(e)} + + +def _create_forgejo_issue(title, body, labels=None): + """Create a Forgejo issue. + + Forgejo API: POST /api/v1/repos/{owner}/{repo}/issues + Returns dict with success status and response data. + """ + if not FORGE_TOKEN: + return {"success": False, "error": "FORGE_TOKEN not configured"} + + if not FORGE_OWNER or not FORGE_REPO: + return {"success": False, "error": "FORGE_OWNER and FORGE_REPO not configured"} + + if not title: + return {"success": False, "error": "Title is required"} + + try: + url = f"{FORGE_URL}/api/v1/repos/{FORGE_OWNER}/{FORGE_REPO}/issues" + payload = { + "title": title, + "body": body or "", + } + if labels: + payload["labels"] = labels + + data = json.dumps(payload).encode("utf-8") + + req = urllib.request.Request( + url, + data=data, + headers={ + "Authorization": f"token {FORGE_TOKEN}", + "Content-Type": "application/json", + "Accept": "application/json", + }, + method="POST", + ) + + with urllib.request.urlopen(req, timeout=30) as resp: + result = json.loads(resp.read().decode()) + return {"success": True, "data": result} + + except urllib.error.HTTPError as e: + try: + body = json.loads(e.read().decode()) + return {"success": False, "error": str(e.reason), "details": body} + except (json.JSONDecodeError, UnicodeDecodeError): + return {"success": False, "error": str(e.reason)} + except urllib.error.URLError as e: + return {"success": False, "error": f"Network error: {e.reason}"} + except json.JSONDecodeError as e: + return {"success": False, "error": f"Invalid JSON response: {e}"} + except Exception as e: + return {"success": False, "error": str(e)} + + +def _create_forgejo_pull_request(head, base, title, body=None): + """Create a Forgejo pull request. + + Forgejo API: POST /api/v1/repos/{owner}/{repo}/pulls + Returns dict with success status and response data. + """ + if not FORGE_TOKEN: + return {"success": False, "error": "FORGE_TOKEN not configured"} + + if not FORGE_OWNER or not FORGE_REPO: + return {"success": False, "error": "FORGE_OWNER and FORGE_REPO not configured"} + + if not head or not base or not title: + return {"success": False, "error": "head, base, and title are required"} + + try: + url = f"{FORGE_URL}/api/v1/repos/{FORGE_OWNER}/{FORGE_REPO}/pulls" + payload = { + "head": head, + "base": base, + "title": title, + "body": body or "", + } + + data = json.dumps(payload).encode("utf-8") + + req = urllib.request.Request( + url, + data=data, + headers={ + "Authorization": f"token {FORGE_TOKEN}", + "Content-Type": "application/json", + "Accept": "application/json", + }, + method="POST", + ) + + with urllib.request.urlopen(req, timeout=30) as resp: + result = json.loads(resp.read().decode()) + return {"success": True, "data": result} + + except urllib.error.HTTPError as e: + try: + body = json.loads(e.read().decode()) + return {"success": False, "error": str(e.reason), "details": body} + except (json.JSONDecodeError, UnicodeDecodeError): + return {"success": False, "error": str(e.reason)} + except urllib.error.URLError as e: + return {"success": False, "error": f"Network error: {e.reason}"} + except json.JSONDecodeError as e: + return {"success": False, "error": f"Invalid JSON response: {e}"} + except Exception as e: + return {"success": False, "error": str(e)} + + class ChatHandler(BaseHTTPRequestHandler): """HTTP request handler for disinto-chat with Forgejo OAuth.""" @@ -549,6 +734,16 @@ class ChatHandler(BaseHTTPRequestHandler): parsed = urlparse(self.path) path = parsed.path + # Action endpoints (#712) + if path in ("/chat/action/ci-run", "/chat/action/issue-create", "/chat/action/pr-create"): + user = self._require_session() + if not user: + return + if not self._check_forwarded_user(user): + return + self.handle_action(user, path) + return + # New conversation endpoint (session required) if path == "/chat/new": user = self._require_session() @@ -901,6 +1096,75 @@ class ChatHandler(BaseHTTPRequestHandler): self.end_headers() self.wfile.write(json.dumps({"conversation_id": conv_id}, ensure_ascii=False).encode("utf-8")) + def handle_action(self, user, path): + """Handle action requests (ci-run, issue-create, pr-create).""" + # Determine action type from path + action_type = path.replace("/chat/action/", "") + + # Read request body + content_length = int(self.headers.get("Content-Length", 0)) + if content_length == 0: + self.send_error_page(400, "No request body provided") + return + + body = self.rfile.read(content_length) + try: + request_data = json.loads(body.decode("utf-8")) + except json.JSONDecodeError: + self.send_error_page(400, "Invalid JSON in request body") + return + + # Get conversation ID from request or session + conv_id = request_data.get("conversation_id") + if not conv_id or not _validate_conversation_id(conv_id): + # Fall back to session-based conversation if available + # For now, we'll use a default or generate one + conv_id = request_data.get("conversation_id") + if not conv_id: + self.send_error_page(400, "conversation_id is required") + return + + # Route to appropriate handler + if action_type == "ci-run": + repo = request_data.get("repo") + branch = request_data.get("branch") + if not repo or not branch: + self.send_error_page(400, "repo and branch are required for ci-run") + return + payload = {"repo": repo, "branch": branch} + result = _trigger_woodpecker_pipeline(repo, branch) + elif action_type == "issue-create": + title = request_data.get("title") + body_text = request_data.get("body", "") + labels = request_data.get("labels", []) + if not title: + self.send_error_page(400, "title is required for issue-create") + return + payload = {"title": title, "body": body_text, "labels": labels} + result = _create_forgejo_issue(title, body_text, labels) + elif action_type == "pr-create": + head = request_data.get("head") + base = request_data.get("base") + title = request_data.get("title") + body_text = request_data.get("body", "") + if not head or not base or not title: + self.send_error_page(400, "head, base, and title are required for pr-create") + return + payload = {"head": head, "base": base, "title": title, "body": body_text} + result = _create_forgejo_pull_request(head, base, title, body_text) + else: + self.send_error_page(404, f"Unknown action type: {action_type}") + return + + # Log the action to history + _write_action_record(user, conv_id, action_type, payload, result) + + # Send response + self.send_response(200) + self.send_header("Content-Type", "application/json; charset=utf-8") + self.end_headers() + self.wfile.write(json.dumps(result, ensure_ascii=False).encode("utf-8")) + def do_DELETE(self): """Handle DELETE requests.""" parsed = urlparse(self.path) diff --git a/docker/chat/ui/index.html b/docker/chat/ui/index.html index bd920f9..a11e632 100644 --- a/docker/chat/ui/index.html +++ b/docker/chat/ui/index.html @@ -161,6 +161,56 @@ white-space: pre-wrap; word-wrap: break-word; } + /* Action button container */ + .action-buttons { + margin-top: 0.75rem; + display: flex; + flex-wrap: wrap; + gap: 0.5rem; + } + .action-btn { + background: #0f3460; + border: 1px solid #e94560; + color: #e94560; + padding: 0.5rem 1rem; + border-radius: 6px; + font-size: 0.875rem; + font-weight: 600; + cursor: pointer; + transition: all 0.2s; + display: inline-flex; + align-items: center; + gap: 0.5rem; + } + .action-btn:hover { + background: #e94560; + color: white; + } + .action-btn:disabled { + opacity: 0.5; + cursor: not-allowed; + } + .action-btn .spinner { + width: 14px; + height: 14px; + border: 2px solid currentColor; + border-top-color: transparent; + border-radius: 50%; + animation: spin 0.8s linear infinite; + } + @keyframes spin { + to { transform: rotate(360deg); } + } + .action-btn.success { + background: #1a1a2e; + border-color: #4ade80; + color: #4ade80; + } + .action-btn.error { + background: #1a1a2e; + border-color: #f87171; + color: #f87171; + } .input-area { display: flex; gap: 0.5rem; @@ -404,11 +454,28 @@ function addMessage(role, content, streaming = false) { const msgDiv = document.createElement('div'); msgDiv.className = `message ${role}`; + + // Parse action markers if this is an assistant message + let contentHtml = escapeHtml(content); + let actions = []; + + if (role === 'assistant' && !streaming) { + const parsed = parseActionMarkers(content, messagesDiv.children.length); + contentHtml = parsed.html; + actions = parsed.actions; + } + msgDiv.innerHTML = `
${role}
-
${escapeHtml(content)}
+
${contentHtml}
`; messagesDiv.appendChild(msgDiv); + + // Render action buttons for assistant messages + if (actions.length > 0) { + renderActionButtons(msgDiv, actions, messagesDiv.children.length - 1); + } + messagesDiv.scrollTop = messagesDiv.scrollHeight; return msgDiv.querySelector('.content'); } @@ -430,6 +497,121 @@ return div.innerHTML.replace(/\n/g, '
'); } + // Action buttons state - track pending actions by message index + const pendingActions = new Map(); + + // Parse action markers from content and return HTML with action buttons + function parseActionMarkers(content, messageIndex) { + const actionPattern = /(.*?)<\/action>/gs; + const hasActions = actionPattern.test(content); + + if (!hasActions) { + return { html: escapeHtml(content), actions: [] }; + } + + // Reset pending actions for this message + pendingActions.set(messageIndex, []); + + let html = content; + const actions = []; + + // Replace action markers with placeholders and collect actions + html = html.replace(actionPattern, (match, type, jsonStr) => { + try { + const action = JSON.parse(jsonStr); + actions.push({ type, payload: action, id: `${messageIndex}-${actions.length}` }); + // Replace with placeholder that will be rendered as button + return `
`; + } catch (e) { + // If JSON parsing fails, keep the original marker + return match; + } + }); + + // Convert newlines to
for HTML output + html = html.replace(/\n/g, '
'); + + return { html, actions }; + } + + // Render action buttons for a message + function renderActionButtons(msgDiv, actions, messageIndex) { + if (actions.length === 0) return; + + const buttonsDiv = document.createElement('div'); + buttonsDiv.className = 'action-buttons'; + + actions.forEach(action => { + const btn = document.createElement('button'); + btn.className = 'action-btn'; + btn.dataset.actionId = action.id; + btn.dataset.messageIndex = messageIndex; + + let btnText = 'Execute'; + let icon = ''; + + switch (action.type) { + case 'ci-run': + icon = 'πŸš€'; + btnText = `Run CI for ${action.payload.branch || 'default'}`; + break; + case 'issue-create': + icon = 'πŸ“'; + btnText = `Create Issue: ${action.payload.title ? action.payload.title.substring(0, 30) + (action.payload.title.length > 30 ? '...' : '') : 'New Issue'}`; + break; + case 'pr-create': + icon = 'πŸ”€'; + btnText = `Create PR: ${action.payload.title ? action.payload.title.substring(0, 30) + (action.payload.title.length > 30 ? '...' : '') : 'New PR'}`; + break; + default: + btnText = `Execute ${action.type}`; + } + + btn.innerHTML = `${icon}${btnText}`; + + btn.addEventListener('click', () => executeAction(btn, action)); + buttonsDiv.appendChild(btn); + }); + + msgDiv.appendChild(buttonsDiv); + } + + // Execute an action + async function executeAction(btn, action) { + const messageIndex = btn.dataset.messageIndex; + const actionId = btn.dataset.actionId; + + // Disable button and show loading state + btn.disabled = true; + btn.innerHTML = ' Executing...'; + + try { + const response = await fetch(`/chat/action/${action.type}`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + ...action.payload, + conversation_id: currentConversationId, + }), + }); + + const result = await response.json(); + + if (result.success) { + btn.className = 'action-btn success'; + btn.innerHTML = 'βœ“ Executed successfully'; + } else { + btn.className = 'action-btn error'; + btn.innerHTML = `βœ— Error: ${result.error || 'Unknown error'}`; + } + } catch (error) { + btn.className = 'action-btn error'; + btn.innerHTML = `βœ— Error: ${error.message}`; + } + } + // Send message handler async function sendMessage() { const message = textarea.value.trim(); diff --git a/lib/generators.sh b/lib/generators.sh index 72f030e..9ad0f2a 100644 --- a/lib/generators.sh +++ b/lib/generators.sh @@ -518,6 +518,12 @@ services: CHAT_MAX_REQUESTS_PER_HOUR: ${CHAT_MAX_REQUESTS_PER_HOUR:-60} CHAT_MAX_REQUESTS_PER_DAY: ${CHAT_MAX_REQUESTS_PER_DAY:-500} CHAT_MAX_TOKENS_PER_DAY: ${CHAT_MAX_TOKENS_PER_DAY:-1000000} + # Action endpoints (#712) + WOODPECKER_TOKEN: ${WOODPECKER_TOKEN:-} + WOODPECKER_URL: http://woodpecker:8000 + FORGE_TOKEN: ${FORGE_TOKEN:-} + FORGE_OWNER: ${FORGE_OWNER:-} + FORGE_REPO: ${FORGE_REPO:-} networks: - disinto-net From aeda17a601c68ca9a9b8744049567493caaa9f24 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 12 Apr 2026 04:08:10 +0000 Subject: [PATCH 2/7] chore: gardener housekeeping 2026-04-12 --- AGENTS.md | 2 +- architect/AGENTS.md | 2 +- dev/AGENTS.md | 2 +- gardener/AGENTS.md | 2 +- gardener/pending-actions.json | 23 +---------------------- lib/AGENTS.md | 4 ++-- planner/AGENTS.md | 2 +- predictor/AGENTS.md | 2 +- review/AGENTS.md | 2 +- supervisor/AGENTS.md | 2 +- 10 files changed, 11 insertions(+), 32 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 31608dc..5320432 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,4 +1,4 @@ - + # Disinto β€” Agent Instructions ## What this repo is diff --git a/architect/AGENTS.md b/architect/AGENTS.md index 7461740..ebf38eb 100644 --- a/architect/AGENTS.md +++ b/architect/AGENTS.md @@ -1,4 +1,4 @@ - + # Architect β€” Agent Instructions ## What this agent is diff --git a/dev/AGENTS.md b/dev/AGENTS.md index e463ba9..a870d1d 100644 --- a/dev/AGENTS.md +++ b/dev/AGENTS.md @@ -1,4 +1,4 @@ - + # Dev Agent **Role**: Implement issues autonomously β€” write code, push branches, address diff --git a/gardener/AGENTS.md b/gardener/AGENTS.md index 8d82491..9f1627c 100644 --- a/gardener/AGENTS.md +++ b/gardener/AGENTS.md @@ -1,4 +1,4 @@ - + # Gardener Agent **Role**: Backlog grooming β€” detect duplicate issues, missing acceptance diff --git a/gardener/pending-actions.json b/gardener/pending-actions.json index c73fea3..fe51488 100644 --- a/gardener/pending-actions.json +++ b/gardener/pending-actions.json @@ -1,22 +1 @@ -[ - { - "action": "edit_body", - "issue": 710, - "body": "## Goal\n\nDecide and implement a conversation history persistence model for `disinto-chat`. MVP target: append-only per-user NDJSON files on a bind-mounted host volume, one file per conversation, with a simple history list endpoint and sidebar in the UI.\n\n## Why\n\n- Without history, every page refresh loses context. Claude is stateless per invocation; the chat UI is what makes it feel like a conversation.\n- A full database with search is overkill for a personal / small-team factory (#623 security posture). Flat files are enough and recoverable by `cat`.\n\n## Scope\n\n### Files to touch\n\n- `lib/generators.sh` chat service:\n - Add a writable bind mount `${CHAT_HISTORY_DIR:-./state/chat-history}:/var/lib/chat/history` (one per-project host path; compose already pins the project root).\n - Must coexist with #706's read-only rootfs (this is a separate mount, not part of rootfs β€” sanity-check the sandbox verify script still passes).\n- `docker/chat/server.{py,go}`:\n - On each `POST /chat`, append one NDJSON line `{ts, user, role, content}` to `/var/lib/chat/history//.ndjson`.\n - `GET /chat/history` β†’ returns the list of conversation ids and first-message previews for the logged-in user.\n - `GET /chat/history/` β†’ returns the full conversation for the logged-in user; 404 if the file belongs to another user.\n - New conversation: `POST /chat/new` β†’ generates a fresh conversation_id (random 12-char hex) and returns it.\n - UI: sidebar with conversation list, \"new chat\" button, load history into the log on click.\n- File naming: `/.ndjson` β€” user-scoped directory prevents cross-user leakage even if a bug leaks ids. `conversation_id` must match `^[0-9a-f]{12}$`, no slashes allowed.\n\n### Out of scope\n\n- Full-text search.\n- Database / SQLite.\n- History retention / rotation β€” unbounded for now.\n\n### In scope explicitly\n\n- Replaying prior turns back into the `claude --print` subprocess for follow-up turns: the backend must feed the prior NDJSON lines back into claude via whatever convention the agent code uses. Cross-check `docker/agents/entrypoint.sh` for how agents pass conversation state.\n\n## Acceptance\n\n- [ ] Sending 3 messages, refreshing the page, and clicking the conversation in the sidebar re-loads all 3 messages.\n- [ ] A new conversation starts with an empty context and does not see prior messages.\n- [ ] `ls state/chat-history/disinto-admin/` on the host shows one NDJSON file per conversation, each line is valid JSON.\n- [ ] A second user logging in via the #708 allowlist sees only their own conversations.\n- [ ] History endpoints are blocked for unauthenticated requests (inherits #708 / #709 auth).\n\n## Depends on\n\n- #705 (chat scaffold).\n\n## Notes\n\n- NDJSON, not JSON-array: append is O(1) and partial writes never corrupt prior lines. Mirrors the factory's CI log format at `lib/ci-log-reader.py`.\n- Per-user directory, not a single shared dir β€” path traversal via a crafted `conversation_id` is the main risk. The strict regex above is the mitigation.\n\n## Boundaries for dev-agent\n\n- Do not add SQLite, Postgres, or any database. Files.\n- Do not invent a conversation replay system. Whatever `claude --print` / the agents already do for context is the baseline β€” match it.\n- Do not store history inside the container's tmpfs β€” it has to survive container restarts.\n- Parent vision: #623.\n\n## Affected files\n- `lib/generators.sh` β€” chat service bind mount CHAT_HISTORY_DIR:/var/lib/chat/history\n- `docker/chat/server.{py,go}` β€” NDJSON append on POST /chat; GET /chat/history; GET /chat/history/; POST /chat/new" - }, - { - "action": "edit_body", - "issue": 711, - "body": "## Goal\n\nAdd per-user cost and request caps to `disinto-chat` so a compromised session (or a wedged browser tab firing requests in a loop) cannot run up an unbounded Anthropic bill or starve the agents' token budget.\n\n## Why\n\n- #623 \"Open questions\" explicitly calls this out. Chat is the only user-facing surface that spawns Claude on demand; no other factory surface does.\n- Cheap to enforce (counter + bash-style dict), expensive to forget.\n\n## Scope\n\n### Files to touch\n\n- `docker/chat/server.{py,go}`:\n - Per-user sliding-window request counter: `CHAT_MAX_REQUESTS_PER_HOUR` (default `60`), `CHAT_MAX_REQUESTS_PER_DAY` (default `500`).\n - Per-user token-cost counter: after each `claude --print`, parse the final `usage` event from `--output-format stream-json` if present; track cumulative tokens per day; reject if over `CHAT_MAX_TOKENS_PER_DAY` (default `1000000`).\n - Counters stored in-memory; reset on container restart (acceptable for MVP; file-based persistence is a follow-up).\n - Rejection response: 429 with `Retry-After` header and a friendly HTMX fragment explaining which cap was hit.\n- `lib/generators.sh` chat env: expose the three caps as overridable env vars with sane defaults baked in.\n\n### Out of scope\n\n- Billing dashboard.\n- Cross-container token budget coordination with the agents.\n- Cost tracking via Anthropic's billing API (not stable enough to depend on).\n\n## Acceptance\n\n- [ ] Sending 61 requests in an hour trips the hourly cap and returns 429 with `Retry-After: `.\n- [ ] A single large completion that pushes daily tokens over the cap blocks the *next* request, not the current one (atomic check-then-consume is OK to skip for MVP).\n- [ ] Resetting the container clears counters (verified manually).\n- [ ] Caps are configurable via `.env` without rebuilding the image.\n\n## Depends on\n\n- #705 (chat scaffold).\n\n## Notes\n\n- Token accounting from `claude --print`: the stream-json mode emits a final `usage` event. If that event is absent or its format changes, fall back to a coarse request count only β€” do not block the user on parsing failures.\n- `Retry-After` must be an integer seconds value, not an HTTP-date, for HTMX to handle it cleanly client-side.\n\n## Boundaries for dev-agent\n\n- Do not add a rate-limiting library. A dict + timestamp list is sufficient for three counters.\n- Do not persist counters to disk this chunk. In-memory is the contract.\n- Do not block requests on Anthropic's own rate limiter. That is retried by `claude` itself; this layer is about *cost*, not throttling.\n- Parent vision: #623.\n\n## Affected files\n- `docker/chat/server.{py,go}` β€” per-user sliding-window request counter and token-cost counter; 429 rejection with Retry-After header\n- `lib/generators.sh` β€” chat env: CHAT_MAX_REQUESTS_PER_HOUR, CHAT_MAX_REQUESTS_PER_DAY, CHAT_MAX_TOKENS_PER_DAY" - }, - { - "action": "edit_body", - "issue": 712, - "body": "## Goal\n\nLet `disinto-chat` perform scoped write actions against the factory β€” specifically: trigger a Woodpecker CI run, create a Forgejo issue, create a Forgejo PR β€” via explicit backend endpoints. The UI surfaces these as buttons the user clicks from a chat turn that proposes an action. The model never holds API tokens directly.\n\n## Why\n\n- #623 lists these escalations as the difference between \"chat that talks about the project\" and \"chat that moves the project forward\".\n- Routing through explicit backend endpoints (instead of giving the sandboxed claude process API tokens) keeps the trust model tight: the *user* authorises each action, not the model.\n\n## Scope\n\n### Files to touch\n\n- `docker/chat/server.{py,go}` β€” new authenticated endpoints (reuse #708 / #709 session check):\n - `POST /chat/action/ci-run` β€” body `{repo, branch}` β†’ calls Woodpecker API with `WOODPECKER_TOKEN` (already in `.env` from existing factory setup) to trigger a pipeline.\n - `POST /chat/action/issue-create` β€” body `{title, body, labels}` β†’ calls Forgejo API `/repos///issues` with `FORGE_TOKEN`.\n - `POST /chat/action/pr-create` β€” body `{head, base, title, body}` β†’ calls `/repos///pulls`.\n - All actions record to #710's NDJSON history as `{role: \"action\", ...}` lines.\n- `docker/chat/ui/index.html` β€” small HTMX pattern: when claude's response contains a marker like `{...}`, render a clickable button below the message; clicking POSTs to `/chat/action/` with the payload.\n- `lib/generators.sh` chat env: pass `WOODPECKER_TOKEN`, `FORGE_TOKEN`, `FORGE_URL`, `FORGE_OWNER`, `FORGE_REPO`.\n\n### Out of scope\n\n- Destructive actions (branch delete, force push, secret rotation) β€” deliberately excluded.\n- Multi-step workflows / approval chains.\n- Arbitrary code execution in the chat container (that is what the agents exist for).\n\n## Acceptance\n\n- [ ] A chat turn that emits an `{...}` block renders a button; clicking it creates an issue on Forgejo, visible via the API.\n- [ ] CI-trigger action creates a Woodpecker pipeline that can be seen in the CI UI.\n- [ ] PR-create action produces a Forgejo PR with the specified head / base.\n- [ ] All three actions are logged into the #710 history file with role `action` and the response from the API call.\n- [ ] Unauthenticated requests to `/chat/action/*` return 401 (inherits #708 gate).\n\n## Depends on\n\n- #708 (OAuth gate β€” actions are authorised by the logged-in user).\n- #710 (history β€” actions need to be logged alongside chat turns).\n\n## Notes\n\n- Forgejo API auth: the factory's `FORGE_TOKEN` is a long-lived admin token. For MVP, reuse it; a follow-up issue can scope it down to per-user Forgejo tokens derived from the OAuth flow.\n- Woodpecker API is at `http://woodpecker:8000/api/...`, reachable via the compose network β€” no need to go through the edge container.\n- The `` marker is deliberately simple markup the model can emit in its response text. Do not implement tool-calling protocol; do not spin up an MCP server.\n\n## Boundaries for dev-agent\n\n- Do not give the claude subprocess direct API tokens. The chat backend holds them; the model only emits action markers the user clicks.\n- Do not add destructive actions (delete, force-push). Additive only.\n- Do not invent a new markup format beyond `{JSON}`.\n- Parent vision: #623.\n\n## Affected files\n- `docker/chat/server.{py,go}` β€” new endpoints: POST /chat/action/ci-run, POST /chat/action/issue-create, POST /chat/action/pr-create\n- `docker/chat/ui/index.html` β€” HTMX pattern for action buttons triggered by markers\n- `lib/generators.sh` β€” chat env: WOODPECKER_TOKEN, FORGE_TOKEN, FORGE_URL, FORGE_OWNER, FORGE_REPO" - }, - { - "action": "edit_body", - "issue": 713, - "body": "## Goal\n\nContingency track: if the subpath routing + Forgejo OAuth combination from #704 and #708 proves unworkable (redirect loops, Forgejo `ROOT_URL` quirks, etc.), provide a documented fallback using per-service subdomains (`forge..disinto.ai`, `ci..disinto.ai`, `chat..disinto.ai`) under the same wildcard cert.\n\n## Why\n\n- #623 Scope Highlights mentions this as the fallback if subpath OAuth fails.\n- Documenting the fallback up front means we can pivot without a days-long investigation when the subpath approach hits a wall.\n- The wildcard cert from #621 already covers `*.disinto.ai` at no extra cost.\n\n## Scope\n\nThis issue is a **plan + small toggle**, not a full implementation. Implementation only happens if #704 or #708 get stuck.\n\n### Files to touch\n\n- `docs/edge-routing-fallback.md` (new) β€” documents the fallback topology, diffing concretely against #704 / #708:\n - Caddyfile: four separate host blocks (`.disinto.ai`, `forge....`, `ci....`, `chat....`), each a single `reverse_proxy` to the container.\n - Forgejo `ROOT_URL` becomes `https://forge..disinto.ai/` (root path, not subpath).\n - Woodpecker `WOODPECKER_HOST` becomes `https://ci..disinto.ai`.\n - OAuth redirect URIs (chat, woodpecker) become sub-subdomain paths.\n - DNS: all handled by the existing wildcard; no new records.\n- `lib/generators.sh` β€” no code change until pivot; document the env vars that would need to change (`EDGE_TUNNEL_FQDN_FORGE`, etc.) in a comment near `generate_compose`.\n- `tools/edge-control/register.sh` (from #621) β€” leave a TODO comment noting the fallback shape would need an additional subdomain parameter per project.\n\n### Out of scope (unless pivot)\n\n- Actually implementing the fallback β€” gated on #704 / #708 failing.\n\n## Acceptance\n\n- [ ] `docs/edge-routing-fallback.md` exists and is concrete enough that a follow-up PR to pivot would take under a day.\n- [ ] The doc names exactly which files / lines each pivot would touch (Caddyfile, `lib/generators.sh`, `lib/ci-setup.sh` redirect URI).\n- [ ] A pivot decision criterion is written into the doc: \"pivot if , not if \".\n\n## Depends on\n\n- None β€” can be written in parallel to #704 / #708.\n\n## Notes\n\n- Keep the doc short. This is a pressure-release valve, not a parallel architecture.\n- Whichever chunk is implementing subpaths first should update this doc if they hit a blocker so the pivot decision is informed.\n\n## Boundaries for dev-agent\n\n- This is a documentation chunk. Do not implement the fallback unless someone explicitly says to pivot.\n- Do not make the main chunks \"fallback-ready\" β€” that is over-engineering for a contingency.\n- Parent vision: #623.\n\n## Affected files\n- `docs/edge-routing-fallback.md` (new) β€” fallback topology doc with Caddyfile, Forgejo ROOT_URL, Woodpecker HOST, OAuth redirect URI changes\n- `lib/generators.sh` β€” comment near generate_compose documenting env vars that change on pivot\n- `tools/edge-control/register.sh` β€” TODO comment noting fallback shape needs additional subdomain parameter" - } -] +[] diff --git a/lib/AGENTS.md b/lib/AGENTS.md index 4bffcb4..b4b32b5 100644 --- a/lib/AGENTS.md +++ b/lib/AGENTS.md @@ -1,4 +1,4 @@ - + # Shared Helpers (`lib/`) All agents source `lib/env.sh` as their first action. Additional helpers are @@ -30,6 +30,6 @@ sourced as needed. | `lib/git-creds.sh` | Shared git credential helper configuration. `configure_git_creds([HOME_DIR] [RUN_AS_CMD])` β€” writes a static credential helper script and configures git globally to use password-based HTTP auth (Forgejo 11.x rejects API tokens for `git push`, #361). `repair_baked_cred_urls([--as RUN_AS_CMD] DIR ...)` β€” rewrites any git remote URLs that have credentials baked in to use clean URLs instead; uses `safe.directory` bypass for root-owned repos (#671). Requires `FORGE_PASS`, `FORGE_URL`, `FORGE_TOKEN`. | entrypoints (agents, edge) | | `lib/ops-setup.sh` | `setup_ops_repo()` β€” creates ops repo on Forgejo if it doesn't exist, configures bot collaborators, clones/initializes ops repo locally, seeds directory structure (vault, knowledge, evidence, sprints). Evidence subdirectories seeded: engagement/, red-team/, holdout/, evolution/, user-test/. Also seeds sprints/ for architect output. Exports `_ACTUAL_OPS_SLUG`. `migrate_ops_repo(ops_root, [primary_branch])` β€” idempotent migration helper that seeds missing directories and .gitkeep files on existing ops repos (pre-#407 deployments). | bin/disinto (init) | | `lib/ci-setup.sh` | `_install_cron_impl()` β€” installs crontab entries for bare-metal deployments (compose mode uses polling loop instead). `_create_forgejo_oauth_app()` β€” generic helper to create an OAuth2 app on Forgejo (shared by Woodpecker and chat). `_create_woodpecker_oauth_impl()` β€” creates Woodpecker OAuth2 app (thin wrapper). `_create_chat_oauth_impl()` β€” creates disinto-chat OAuth2 app, writes `CHAT_OAUTH_CLIENT_ID`/`CHAT_OAUTH_CLIENT_SECRET` to `.env` (#708). `_generate_woodpecker_token_impl()` β€” auto-generates WOODPECKER_TOKEN via OAuth2 flow. `_activate_woodpecker_repo_impl()` β€” activates repo in Woodpecker. All gated by `_load_ci_context()` which validates required env vars. | bin/disinto (init) | -| `lib/generators.sh` | Template generation for `disinto init`: `generate_compose()` β€” docker-compose.yml (uses `codeberg.org/forgejo/forgejo:11.0` tag; adds `security_opt: [apparmor:unconfined]` to all services for rootless container compatibility; Forgejo includes a healthcheck so dependent services use `condition: service_healthy` β€” fixes cold-start races, #665; adds `chat` service block with isolated `chat-config` named volume; all `depends_on` now use `condition: service_healthy/started` instead of bare service names), `generate_caddyfile()` β€” Caddyfile (routes: `/forge/*` β†’ forgejo:3000, `/woodpecker/*` β†’ woodpecker:8000, `/staging/*` β†’ staging:80, `/chat/*` β†’ chat:8080; root `/` redirects to `/forge/`), `generate_staging_index()` β€” staging index, `generate_deploy_pipelines()` β€” Woodpecker deployment pipeline configs. Requires `FACTORY_ROOT`, `PROJECT_NAME`, `PRIMARY_BRANCH`. | bin/disinto (init) | +| `lib/generators.sh` | Template generation for `disinto init`: `generate_compose()` β€” docker-compose.yml (uses `codeberg.org/forgejo/forgejo:11.0` tag; adds `security_opt: [apparmor:unconfined]` to all services for rootless container compatibility; Forgejo includes a healthcheck so dependent services use `condition: service_healthy` β€” fixes cold-start races, #665; adds `chat` service block with isolated `chat-config` named volume and `CHAT_HISTORY_DIR` bind-mount for per-user NDJSON history persistence (#710); injects `FORWARD_AUTH_SECRET` for Caddy↔chat defense-in-depth auth (#709); cost-cap env vars `CHAT_MAX_REQUESTS_PER_HOUR`, `CHAT_MAX_REQUESTS_PER_DAY`, `CHAT_MAX_TOKENS_PER_DAY` (#711); subdomain fallback comment for `EDGE_TUNNEL_FQDN_*` vars (#713); all `depends_on` now use `condition: service_healthy/started` instead of bare service names), `generate_caddyfile()` β€” Caddyfile (routes: `/forge/*` β†’ forgejo:3000, `/woodpecker/*` β†’ woodpecker:8000, `/staging/*` β†’ staging:80; `/chat/login` and `/chat/oauth/callback` bypass `forward_auth` so unauthenticated users can reach the OAuth flow; `/chat/*` gated by `forward_auth` on `chat:8080/chat/auth/verify` which stamps `X-Forwarded-User` (#709); root `/` redirects to `/forge/`), `generate_staging_index()` β€” staging index, `generate_deploy_pipelines()` β€” Woodpecker deployment pipeline configs. Requires `FACTORY_ROOT`, `PROJECT_NAME`, `PRIMARY_BRANCH`. | bin/disinto (init) | | `lib/hire-agent.sh` | `disinto_hire_an_agent()` β€” user creation, `.profile` repo setup, formula copying, branch protection, and state marker creation for hiring a new agent. Requires `FORGE_URL`, `FORGE_TOKEN`, `FACTORY_ROOT`, `PROJECT_NAME`. Extracted from `bin/disinto`. | bin/disinto (hire) | | `lib/release.sh` | `disinto_release()` β€” vault TOML creation, branch setup on ops repo, PR creation, and auto-merge request for a versioned release. `_assert_release_globals()` validates required env vars. Requires `FORGE_URL`, `FORGE_TOKEN`, `FORGE_OPS_REPO`, `FACTORY_ROOT`, `PRIMARY_BRANCH`. Extracted from `bin/disinto`. | bin/disinto (release) | diff --git a/planner/AGENTS.md b/planner/AGENTS.md index a32d67f..e2652e6 100644 --- a/planner/AGENTS.md +++ b/planner/AGENTS.md @@ -1,4 +1,4 @@ - + # Planner Agent **Role**: Strategic planning using a Prerequisite Tree (Theory of Constraints), diff --git a/predictor/AGENTS.md b/predictor/AGENTS.md index 6a762bc..99e87c6 100644 --- a/predictor/AGENTS.md +++ b/predictor/AGENTS.md @@ -1,4 +1,4 @@ - + # Predictor Agent **Role**: Abstract adversary (the "goblin"). Runs a 2-step formula diff --git a/review/AGENTS.md b/review/AGENTS.md index 68d674c..1771f06 100644 --- a/review/AGENTS.md +++ b/review/AGENTS.md @@ -1,4 +1,4 @@ - + # Review Agent **Role**: AI-powered PR review β€” post structured findings and formal diff --git a/supervisor/AGENTS.md b/supervisor/AGENTS.md index 1936621..205470e 100644 --- a/supervisor/AGENTS.md +++ b/supervisor/AGENTS.md @@ -1,4 +1,4 @@ - + # Supervisor Agent **Role**: Health monitoring and auto-remediation, executed as a formula-driven From e275c35fa88a0c6ae2ecd64fbae4cab46f657db7 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 12 Apr 2026 04:41:12 +0000 Subject: [PATCH 3/7] =?UTF-8?q?fix:=20bug:=20architect=20close-vision=20li?= =?UTF-8?q?fecycle=20matches=20unrelated=20sub-issues=20=E2=80=94=20spams?= =?UTF-8?q?=20false=20completion=20comments=20(#735)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Claude Opus 4.6 (1M context) --- architect/architect-run.sh | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/architect/architect-run.sh b/architect/architect-run.sh index d51960c..9e0045d 100755 --- a/architect/architect-run.sh +++ b/architect/architect-run.sh @@ -441,6 +441,7 @@ get_vision_subissues() { fi # Method 2: Find issues referenced in merged sprint PR bodies + # Only consider PRs whose title or body references this specific vision issue local prs_json prs_json=$(curl -sf -H "Authorization: token ${FORGE_TOKEN}" \ "${FORGE_API_BASE}/repos/${FORGE_OPS_REPO}/pulls?state=closed&limit=100" 2>/dev/null) || true @@ -449,8 +450,7 @@ get_vision_subissues() { while IFS= read -r pr_num; do [ -z "$pr_num" ] && continue - # Check if PR is merged and references the vision issue - local pr_details pr_body + local pr_details pr_body pr_title pr_details=$(curl -sf -H "Authorization: token ${FORGE_TOKEN}" \ "${FORGE_API_BASE}/repos/${FORGE_OPS_REPO}/pulls/${pr_num}" 2>/dev/null) || continue @@ -461,11 +461,19 @@ get_vision_subissues() { continue fi + pr_title=$(printf '%s' "$pr_details" | jq -r '.title // ""') || continue pr_body=$(printf '%s' "$pr_details" | jq -r '.body // ""') || continue - # Extract all issue numbers from PR body + # Only process PRs that reference this specific vision issue + if ! printf '%s\n%s' "$pr_title" "$pr_body" | grep -qE "#${vision_issue}([^0-9]|$)"; then + continue + fi + + # Extract issue numbers from PR body, excluding the vision issue itself while IFS= read -r ref_issue; do [ -z "$ref_issue" ] && continue + # Skip the vision issue itself + [ "$ref_issue" = "$vision_issue" ] && continue # Skip if already in list local found=false for existing in "${subissues[@]+"${subissues[@]}"}"; do @@ -518,6 +526,17 @@ all_subissues_closed() { # Args: vision_issue_number close_vision_issue() { local vision_issue="$1" + + # Idempotency guard: check if a completion comment already exists + local existing_comments + existing_comments=$(curl -sf -H "Authorization: token ${FORGE_TOKEN}" \ + "${FORGE_API}/issues/${vision_issue}/comments" 2>/dev/null) || existing_comments="[]" + + if printf '%s' "$existing_comments" | jq -e '[.[] | select(.body | contains("Vision Issue Completed"))] | length > 0' >/dev/null 2>&1; then + log "Vision issue #${vision_issue} already has a completion comment β€” skipping" + return 0 + fi + local subissues subissues=$(get_vision_subissues "$vision_issue") From 0c4f00a86cb787b0d31ed0b2922bb88f55a0a7c9 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 12 Apr 2026 05:19:57 +0000 Subject: [PATCH 4/7] chore: gardener housekeeping 2026-04-12 --- AGENTS.md | 2 +- architect/AGENTS.md | 11 +++++---- dev/AGENTS.md | 2 +- gardener/AGENTS.md | 2 +- gardener/pending-actions.json | 43 ++++++++++++++++++++++++++++++++++- lib/AGENTS.md | 2 +- planner/AGENTS.md | 2 +- predictor/AGENTS.md | 2 +- review/AGENTS.md | 2 +- supervisor/AGENTS.md | 2 +- 10 files changed, 56 insertions(+), 14 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 5320432..14986ab 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,4 +1,4 @@ - + # Disinto β€” Agent Instructions ## What this repo is diff --git a/architect/AGENTS.md b/architect/AGENTS.md index ebf38eb..4f04c8d 100644 --- a/architect/AGENTS.md +++ b/architect/AGENTS.md @@ -1,4 +1,4 @@ - + # Architect β€” Agent Instructions ## What this agent is @@ -67,11 +67,12 @@ REJECT review β†’ close PR + journal (model processes rejection, bash merges PR) Vision issues decompose into sprint sub-issues tracked via "Decomposed from #N" in sub-issue bodies. The architect automatically closes vision issues when all sub-issues are closed: 1. Before picking new vision issues, the architect checks each open vision issue -2. For each, it queries for sub-issues with "Decomposed from #N" in their body (regardless of state) -3. If all sub-issues are closed, it posts a summary comment listing completed sub-issues -4. The vision issue is then closed automatically +2. For each, it queries merged sprint PRs β€” **only PRs whose title or body reference the specific vision issue** (matched via `#N` pattern, filtering out unrelated PRs that happen to close unrelated issues) (#735/#736) +3. Extracts sub-issue numbers from those PRs, excluding the vision issue itself +4. If all sub-issues are closed, posts a summary comment listing completed sub-issues (with an idempotency guard: skips if a "Vision Issue Completed" comment already exists) +5. The vision issue is then closed automatically -This ensures vision issues transition from `open` β†’ `closed` once their work is complete, without manual intervention. +This ensures vision issues transition from `open` β†’ `closed` once their work is complete, without manual intervention. The #N-scoped matching prevents false positives where unrelated sub-issues would incorrectly trigger vision issue closure. ### Session management diff --git a/dev/AGENTS.md b/dev/AGENTS.md index a870d1d..6f9e14b 100644 --- a/dev/AGENTS.md +++ b/dev/AGENTS.md @@ -1,4 +1,4 @@ - + # Dev Agent **Role**: Implement issues autonomously β€” write code, push branches, address diff --git a/gardener/AGENTS.md b/gardener/AGENTS.md index 9f1627c..1a04fe3 100644 --- a/gardener/AGENTS.md +++ b/gardener/AGENTS.md @@ -1,4 +1,4 @@ - + # Gardener Agent **Role**: Backlog grooming β€” detect duplicate issues, missing acceptance diff --git a/gardener/pending-actions.json b/gardener/pending-actions.json index fe51488..07da9d8 100644 --- a/gardener/pending-actions.json +++ b/gardener/pending-actions.json @@ -1 +1,42 @@ -[] +[ + { + "action": "edit_body", + "issue": 737, + "body": "Flagged by AI reviewer in PR #736.\n\n## Problem\n\n`close_vision_issue()` posts a completion comment and then closes the issue in two separate API calls. The close calls (lines 587-597 of `architect/architect-run.sh`) both use `|| true`, so a failed `state=closed` PATCH is silently ignored.\n\nThe idempotency guard added in PR #736 checks for the completion comment before proceeding. If the comment was posted successfully in a prior run but the close PATCH failed, the guard will now return 0 on all subsequent runs without retrying the close β€” the vision issue stays open indefinitely with a misleading \"Vision Issue Completed\" comment.\n\n## Fix\n\nAfter posting the comment, verify the issue is actually closed (check `.state` in the PATCH response or re-fetch). If the close fails, return 1 so the next polling cycle retries. The idempotency guard should also verify the issue state, not just the comment presence.\n\n---\n*Auto-created from AI review*\n\n## Acceptance criteria\n\n- [ ] After posting the completion comment, `close_vision_issue()` verifies `.state == \"closed\"` in the PATCH response or re-fetches the issue; if close fails, returns 1\n- [ ] The idempotency guard checks both comment presence AND `.state == \"closed\"` before returning 0; if the comment exists but the issue is still open, attempts the close again\n- [ ] Running `close_vision_issue` twice on a stuck-open issue (comment posted but state still open) retries the close and does not post a duplicate comment\n\n## Affected files\n\n- `architect/architect-run.sh` β€” `close_vision_issue()` function (~lines 527-600)\n" + }, + { + "action": "add_label", + "issue": 737, + "label": "backlog" + }, + { + "action": "edit_body", + "issue": 712, + "body": "## Goal\n\nLet `disinto-chat` perform scoped write actions against the factory β€” specifically: trigger a Woodpecker CI run, create a Forgejo issue, create a Forgejo PR β€” via explicit backend endpoints. The UI surfaces these as buttons the user clicks from a chat turn that proposes an action. The model never holds API tokens directly.\n\n## Why\n\n- #623 lists these escalations as the difference between \"chat that talks about the project\" and \"chat that moves the project forward\".\n- Routing through explicit backend endpoints (instead of giving the sandboxed claude process API tokens) keeps the trust model tight: the *user* authorises each action, not the model.\n\n## Scope\n\n### Files to touch\n\n- `docker/chat/server.{py,go}` β€” new authenticated endpoints (reuse #708 / #709 session check):\n - `POST /chat/action/ci-run` β€” body `{repo, branch}` β†’ calls Woodpecker API with `WOODPECKER_TOKEN` (already in `.env` from existing factory setup) to trigger a pipeline.\n - `POST /chat/action/issue-create` β€” body `{title, body, labels}` β†’ calls Forgejo API `/repos///issues` with `FORGE_TOKEN`.\n - `POST /chat/action/pr-create` β€” body `{head, base, title, body}` β†’ calls `/repos///pulls`.\n - All actions record to #710's NDJSON history as `{role: \"action\", ...}` lines.\n- `docker/chat/ui/index.html` β€” small HTMX pattern: when claude's response contains a marker like `{...}`, render a clickable button below the message; clicking POSTs to `/chat/action/` with the payload.\n- `lib/generators.sh` chat env: pass `WOODPECKER_TOKEN`, `FORGE_TOKEN`, `FORGE_URL`, `FORGE_OWNER`, `FORGE_REPO`.\n\n### Out of scope\n\n- Destructive actions (branch delete, force push, secret rotation) β€” deliberately excluded.\n- Multi-step workflows / approval chains.\n- Arbitrary code execution in the chat container (that is what the agents exist for).\n\n## Acceptance\n\n- [ ] A chat turn that emits an `{...}` block renders a button; clicking it creates an issue on Forgejo, visible via the API.\n- [ ] CI-trigger action creates a Woodpecker pipeline that can be seen in the CI UI.\n- [ ] PR-create action produces a Forgejo PR with the specified head / base.\n- [ ] All three actions are logged into the #710 history file with role `action` and the response from the API call.\n- [ ] Unauthenticated requests to `/chat/action/*` return 401 (inherits #708 gate).\n\n## Depends on\n\n- #708 (OAuth gate β€” actions are authorised by the logged-in user).\n- #710 (history β€” actions need to be logged alongside chat turns).\n\n## Notes\n\n- Forgejo API auth: the factory's `FORGE_TOKEN` is a long-lived admin token. For MVP, reuse it; a follow-up issue can scope it down to per-user Forgejo tokens derived from the OAuth flow.\n- Woodpecker API is at `http://woodpecker:8000/api/...`, reachable via the compose network β€” no need to go through the edge container.\n- The `` marker is deliberately simple markup the model can emit in its response text. Do not implement tool-calling protocol; do not spin up an MCP server.\n\n## Boundaries for dev-agent\n\n- Do not give the claude subprocess direct API tokens. The chat backend holds them; the model only emits action markers the user clicks.\n- Do not add destructive actions (delete, force-push). Additive only.\n- Do not invent a new markup format beyond `{JSON}`.\n- Parent vision: #623.\n\n## Affected files\n\n- `docker/chat/server.py` (or `server.go`) β€” new `/chat/action/ci-run`, `/chat/action/issue-create`, `/chat/action/pr-create` endpoints\n- `docker/chat/ui/index.html` β€” action button rendering from `{...}` markers\n- `lib/generators.sh` β€” chat service env block: pass `WOODPECKER_TOKEN`, `FORGE_TOKEN`, `FORGE_URL`, `FORGE_OWNER`, `FORGE_REPO`\n" + }, + { + "action": "remove_label", + "issue": 712, + "label": "blocked" + }, + { + "action": "add_label", + "issue": 712, + "label": "backlog" + }, + { + "action": "edit_body", + "issue": 707, + "body": "## Goal\n\nGive `disinto-chat` its own Claude identity mount so its OAuth refresh races cannot corrupt the factory agents' shared `~/.claude` credentials. Default to a separate `~/.claude-chat/` on the host; support `ANTHROPIC_API_KEY` as a fallback that skips OAuth entirely.\n\n## Why\n\n- #623 root-caused this: Claude Code's internal refresh lock in `~/.claude.lock` operates outside bind-mounted directories, so two containers sharing `~/.claude` can race during token refresh and invalidate each other. The factory has already had OAuth expiry incidents traced to multiple agents sharing credentials.\n- Scoping chat to its own identity dir means chat can be logged in as a different Anthropic account, or pinned to an API key, without touching agent credentials.\n\n## Scope\n\n### Files to touch\n\n- `lib/generators.sh` chat service block (from #705):\n - Replace the throwaway named volume with `${CHAT_CLAUDE_DIR:-${HOME}/.claude-chat}:/home/chat/.claude-chat`.\n - Env: `CLAUDE_CONFIG_DIR=/home/chat/.claude-chat/config`, `CLAUDE_CREDENTIALS_DIR=/home/chat/.claude-chat/config/credentials`.\n - Conditional: if `ANTHROPIC_API_KEY` is set in `.env`, pass it through and **do not** mount `~/.claude-chat` at all (no credentials on disk in that mode).\n- `bin/disinto disinto_init()` β€” after #620's admin password prompt, add an optional prompt: `Use separate Anthropic identity for chat? (y/N)`. On yes, create `~/.claude-chat/` and invoke `claude login` in a subshell with `CLAUDE_CONFIG_DIR=~/.claude-chat/config`.\n- `lib/claude-config.sh` β€” factor out the existing `~/.claude` setup logic so a non-default `CLAUDE_CONFIG_DIR` is a first-class parameter. If it is already parameterised, just document it; if not, extract a helper `setup_claude_dir ` and have the existing path call it with the default dir.\n- `docker/chat/Dockerfile` β€” declare `VOLUME /home/chat/.claude-chat`, set owner to the non-root chat user introduced in #706.\n\n### Out of scope\n\n- Cross-session lock coherence for multiple concurrent chat containers (single-chat-container assumption is fine for MVP).\n- Anthropic team / workspace support β€” single identity is enough.\n\n## Acceptance\n\n- [ ] Fresh `disinto init` with \"use separate chat identity\" answered yes creates `~/.claude-chat/` and logs in successfully.\n- [ ] With `ANTHROPIC_API_KEY=sk-ant-...` set in `.env`, chat starts without any `~/.claude-chat` mount (verified via `docker inspect disinto-chat`) and successfully completes a test prompt.\n- [ ] Running the factory agents AND chat simultaneously for 24h does not produce any OAuth refresh failures on either side (manual soak test β€” document result in PR).\n- [ ] `CLAUDE_CONFIG_DIR` and `CLAUDE_CREDENTIALS_DIR` inside the chat container resolve to `/home/chat/.claude-chat/config*`, not the shared factory path.\n\n## Depends on\n\n- #705 (chat scaffold).\n- #620 (admin password prompt β€” same init flow this adds a step to).\n\n## Notes\n\n- The factory's existing shared mount is `/var/lib/disinto/claude-shared` (see `lib/generators.sh:113,327,381,426`). Chat must NOT use this path.\n- `flock(\"${HOME}/.claude/session.lock\")` logic mentioned in #623 is load-bearing, not redundant β€” do not \"simplify\" it.\n- Prefer the API-key path for anyone running the factory on shared hardware; call this out in README updates.\n\n## Boundaries for dev-agent\n\n- Do not try to make chat share `~/.claude` with the agents \"just for convenience\". The whole point of this chunk is the opposite.\n- Do not add a third claude config dir. One for agents, one for chat, done.\n- Do not refactor `lib/claude-config.sh` beyond extracting a parameterised helper if needed.\n- Parent vision: #623.\n\n## Affected files\n\n- `lib/generators.sh` β€” chat service block: replace throwaway named volume with `${CHAT_CLAUDE_DIR:-${HOME}/.claude-chat}` bind mount; add `CLAUDE_CONFIG_DIR`/`CLAUDE_CREDENTIALS_DIR` env vars; skip mount when `ANTHROPIC_API_KEY` is set\n- `bin/disinto` β€” `disinto_init()`: add optional prompt for separate Anthropic identity for chat\n- `lib/claude-config.sh` β€” extract parameterized `setup_claude_dir ` helper\n- `docker/chat/Dockerfile` β€” declare `VOLUME /home/chat/.claude-chat`, set owner to non-root chat user\n" + }, + { + "action": "remove_label", + "issue": 707, + "label": "blocked" + }, + { + "action": "add_label", + "issue": 707, + "label": "backlog" + } +] diff --git a/lib/AGENTS.md b/lib/AGENTS.md index b4b32b5..83ea9e2 100644 --- a/lib/AGENTS.md +++ b/lib/AGENTS.md @@ -1,4 +1,4 @@ - + # Shared Helpers (`lib/`) All agents source `lib/env.sh` as their first action. Additional helpers are diff --git a/planner/AGENTS.md b/planner/AGENTS.md index e2652e6..7d3bf32 100644 --- a/planner/AGENTS.md +++ b/planner/AGENTS.md @@ -1,4 +1,4 @@ - + # Planner Agent **Role**: Strategic planning using a Prerequisite Tree (Theory of Constraints), diff --git a/predictor/AGENTS.md b/predictor/AGENTS.md index 99e87c6..f6cf3ca 100644 --- a/predictor/AGENTS.md +++ b/predictor/AGENTS.md @@ -1,4 +1,4 @@ - + # Predictor Agent **Role**: Abstract adversary (the "goblin"). Runs a 2-step formula diff --git a/review/AGENTS.md b/review/AGENTS.md index 1771f06..b12508e 100644 --- a/review/AGENTS.md +++ b/review/AGENTS.md @@ -1,4 +1,4 @@ - + # Review Agent **Role**: AI-powered PR review β€” post structured findings and formal diff --git a/supervisor/AGENTS.md b/supervisor/AGENTS.md index 205470e..7c24f29 100644 --- a/supervisor/AGENTS.md +++ b/supervisor/AGENTS.md @@ -1,4 +1,4 @@ - + # Supervisor Agent **Role**: Health monitoring and auto-remediation, executed as a formula-driven From 7ca2764fa46c8416a68cc06a2a0b6164cd2fac67 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 12 Apr 2026 05:53:20 +0000 Subject: [PATCH 5/7] fix: agent-smoke.sh - add lib/env.sh as extra source for ci-debug.sh --- .woodpecker/agent-smoke.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.woodpecker/agent-smoke.sh b/.woodpecker/agent-smoke.sh index 86ee756..f963414 100644 --- a/.woodpecker/agent-smoke.sh +++ b/.woodpecker/agent-smoke.sh @@ -222,7 +222,7 @@ check_script lib/issue-lifecycle.sh lib/secret-scan.sh # Standalone lib scripts (not sourced by agents; run directly or as services). # Still checked for function resolution against LIB_FUNS + own definitions. -check_script lib/ci-debug.sh +check_script lib/ci-debug.sh lib/env.sh check_script lib/parse-deps.sh # Agent scripts β€” list cross-sourced files where function scope flows across files. From 2605d8afbae4ffbb520933b0d5cfd335f3ca2fd2 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 12 Apr 2026 03:56:07 +0000 Subject: [PATCH 6/7] fix: vision(#623): disinto-chat escalation tools (CI run, issue create, PR create) (#712) --- docker/chat/server.py | 264 ++++++++++++++++++++++++++++++++++++++ docker/chat/ui/index.html | 184 +++++++++++++++++++++++++- lib/generators.sh | 6 + 3 files changed, 453 insertions(+), 1 deletion(-) diff --git a/docker/chat/server.py b/docker/chat/server.py index ad8897d..42e7cf8 100644 --- a/docker/chat/server.py +++ b/docker/chat/server.py @@ -30,6 +30,8 @@ import secrets import subprocess import sys import time +import urllib.request +import urllib.error from http.server import HTTPServer, BaseHTTPRequestHandler from urllib.parse import urlparse, parse_qs, urlencode @@ -57,6 +59,14 @@ CHAT_MAX_REQUESTS_PER_HOUR = int(os.environ.get("CHAT_MAX_REQUESTS_PER_HOUR", 60 CHAT_MAX_REQUESTS_PER_DAY = int(os.environ.get("CHAT_MAX_REQUESTS_PER_DAY", 500)) CHAT_MAX_TOKENS_PER_DAY = int(os.environ.get("CHAT_MAX_TOKENS_PER_DAY", 1000000)) +# Action endpoints configuration (#712) +WOODPECKER_TOKEN = os.environ.get("WOODPECKER_TOKEN", "") +WOODPECKER_URL = os.environ.get("WOODPECKER_URL", "http://woodpecker:8000") +FORGE_TOKEN = os.environ.get("FORGE_TOKEN", "") +FORGE_URL = os.environ.get("FORGE_URL", "http://forgejo:3000") +FORGE_OWNER = os.environ.get("FORGE_OWNER", "") +FORGE_REPO = os.environ.get("FORGE_REPO", "") + # Allowed users - disinto-admin always allowed; CSV allowlist extends it _allowed_csv = os.environ.get("DISINTO_CHAT_ALLOWED_USERS", "") ALLOWED_USERS = {"disinto-admin"} @@ -423,6 +433,181 @@ def _delete_conversation(user, conv_id): return False +# ============================================================================= +# Action Endpoints (#712) +# ============================================================================= + +def _write_action_record(user, conv_id, action_type, payload, response_data): + """Write an action record to the conversation history.""" + record = { + "ts": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()), + "user": user, + "role": "action", + "action_type": action_type, + "payload": payload, + "response": response_data, + } + conv_path = _get_conversation_path(user, conv_id) + _ensure_user_dir(user) + with open(conv_path, "a", encoding="utf-8") as f: + f.write(json.dumps(record, ensure_ascii=False) + "\n") + + +def _trigger_woodpecker_pipeline(repo, branch): + """Trigger a Woodpecker CI pipeline for the given repo and branch. + + Woodpecker API: POST /api/v1/repos/{owner}/{repo}/pipeline + Returns dict with success status and response data. + """ + if not WOODPECKER_TOKEN: + return {"success": False, "error": "WOODPECKER_TOKEN not configured"} + + if not FORGE_OWNER or not FORGE_REPO: + return {"success": False, "error": "FORGE_OWNER and FORGE_REPO not configured"} + + try: + url = f"{WOODPECKER_URL}/api/v1/repos/{FORGE_OWNER}/{FORGE_REPO}/pipeline" + data = json.dumps({"branch": branch, "event": "push"}).encode("utf-8") + + req = urllib.request.Request( + url, + data=data, + headers={ + "Authorization": f"token {WOODPECKER_TOKEN}", + "Content-Type": "application/json", + "Accept": "application/json", + }, + method="POST", + ) + + with urllib.request.urlopen(req, timeout=30) as resp: + result = json.loads(resp.read().decode()) + return {"success": True, "data": result} + + except urllib.error.HTTPError as e: + try: + body = json.loads(e.read().decode()) + return {"success": False, "error": str(e.reason), "details": body} + except (json.JSONDecodeError, UnicodeDecodeError): + return {"success": False, "error": str(e.reason)} + except urllib.error.URLError as e: + return {"success": False, "error": f"Network error: {e.reason}"} + except json.JSONDecodeError as e: + return {"success": False, "error": f"Invalid JSON response: {e}"} + except Exception as e: + return {"success": False, "error": str(e)} + + +def _create_forgejo_issue(title, body, labels=None): + """Create a Forgejo issue. + + Forgejo API: POST /api/v1/repos/{owner}/{repo}/issues + Returns dict with success status and response data. + """ + if not FORGE_TOKEN: + return {"success": False, "error": "FORGE_TOKEN not configured"} + + if not FORGE_OWNER or not FORGE_REPO: + return {"success": False, "error": "FORGE_OWNER and FORGE_REPO not configured"} + + if not title: + return {"success": False, "error": "Title is required"} + + try: + url = f"{FORGE_URL}/api/v1/repos/{FORGE_OWNER}/{FORGE_REPO}/issues" + payload = { + "title": title, + "body": body or "", + } + if labels: + payload["labels"] = labels + + data = json.dumps(payload).encode("utf-8") + + req = urllib.request.Request( + url, + data=data, + headers={ + "Authorization": f"token {FORGE_TOKEN}", + "Content-Type": "application/json", + "Accept": "application/json", + }, + method="POST", + ) + + with urllib.request.urlopen(req, timeout=30) as resp: + result = json.loads(resp.read().decode()) + return {"success": True, "data": result} + + except urllib.error.HTTPError as e: + try: + body = json.loads(e.read().decode()) + return {"success": False, "error": str(e.reason), "details": body} + except (json.JSONDecodeError, UnicodeDecodeError): + return {"success": False, "error": str(e.reason)} + except urllib.error.URLError as e: + return {"success": False, "error": f"Network error: {e.reason}"} + except json.JSONDecodeError as e: + return {"success": False, "error": f"Invalid JSON response: {e}"} + except Exception as e: + return {"success": False, "error": str(e)} + + +def _create_forgejo_pull_request(head, base, title, body=None): + """Create a Forgejo pull request. + + Forgejo API: POST /api/v1/repos/{owner}/{repo}/pulls + Returns dict with success status and response data. + """ + if not FORGE_TOKEN: + return {"success": False, "error": "FORGE_TOKEN not configured"} + + if not FORGE_OWNER or not FORGE_REPO: + return {"success": False, "error": "FORGE_OWNER and FORGE_REPO not configured"} + + if not head or not base or not title: + return {"success": False, "error": "head, base, and title are required"} + + try: + url = f"{FORGE_URL}/api/v1/repos/{FORGE_OWNER}/{FORGE_REPO}/pulls" + payload = { + "head": head, + "base": base, + "title": title, + "body": body or "", + } + + data = json.dumps(payload).encode("utf-8") + + req = urllib.request.Request( + url, + data=data, + headers={ + "Authorization": f"token {FORGE_TOKEN}", + "Content-Type": "application/json", + "Accept": "application/json", + }, + method="POST", + ) + + with urllib.request.urlopen(req, timeout=30) as resp: + result = json.loads(resp.read().decode()) + return {"success": True, "data": result} + + except urllib.error.HTTPError as e: + try: + body = json.loads(e.read().decode()) + return {"success": False, "error": str(e.reason), "details": body} + except (json.JSONDecodeError, UnicodeDecodeError): + return {"success": False, "error": str(e.reason)} + except urllib.error.URLError as e: + return {"success": False, "error": f"Network error: {e.reason}"} + except json.JSONDecodeError as e: + return {"success": False, "error": f"Invalid JSON response: {e}"} + except Exception as e: + return {"success": False, "error": str(e)} + + class ChatHandler(BaseHTTPRequestHandler): """HTTP request handler for disinto-chat with Forgejo OAuth.""" @@ -549,6 +734,16 @@ class ChatHandler(BaseHTTPRequestHandler): parsed = urlparse(self.path) path = parsed.path + # Action endpoints (#712) + if path in ("/chat/action/ci-run", "/chat/action/issue-create", "/chat/action/pr-create"): + user = self._require_session() + if not user: + return + if not self._check_forwarded_user(user): + return + self.handle_action(user, path) + return + # New conversation endpoint (session required) if path == "/chat/new": user = self._require_session() @@ -901,6 +1096,75 @@ class ChatHandler(BaseHTTPRequestHandler): self.end_headers() self.wfile.write(json.dumps({"conversation_id": conv_id}, ensure_ascii=False).encode("utf-8")) + def handle_action(self, user, path): + """Handle action requests (ci-run, issue-create, pr-create).""" + # Determine action type from path + action_type = path.replace("/chat/action/", "") + + # Read request body + content_length = int(self.headers.get("Content-Length", 0)) + if content_length == 0: + self.send_error_page(400, "No request body provided") + return + + body = self.rfile.read(content_length) + try: + request_data = json.loads(body.decode("utf-8")) + except json.JSONDecodeError: + self.send_error_page(400, "Invalid JSON in request body") + return + + # Get conversation ID from request or session + conv_id = request_data.get("conversation_id") + if not conv_id or not _validate_conversation_id(conv_id): + # Fall back to session-based conversation if available + # For now, we'll use a default or generate one + conv_id = request_data.get("conversation_id") + if not conv_id: + self.send_error_page(400, "conversation_id is required") + return + + # Route to appropriate handler + if action_type == "ci-run": + repo = request_data.get("repo") + branch = request_data.get("branch") + if not repo or not branch: + self.send_error_page(400, "repo and branch are required for ci-run") + return + payload = {"repo": repo, "branch": branch} + result = _trigger_woodpecker_pipeline(repo, branch) + elif action_type == "issue-create": + title = request_data.get("title") + body_text = request_data.get("body", "") + labels = request_data.get("labels", []) + if not title: + self.send_error_page(400, "title is required for issue-create") + return + payload = {"title": title, "body": body_text, "labels": labels} + result = _create_forgejo_issue(title, body_text, labels) + elif action_type == "pr-create": + head = request_data.get("head") + base = request_data.get("base") + title = request_data.get("title") + body_text = request_data.get("body", "") + if not head or not base or not title: + self.send_error_page(400, "head, base, and title are required for pr-create") + return + payload = {"head": head, "base": base, "title": title, "body": body_text} + result = _create_forgejo_pull_request(head, base, title, body_text) + else: + self.send_error_page(404, f"Unknown action type: {action_type}") + return + + # Log the action to history + _write_action_record(user, conv_id, action_type, payload, result) + + # Send response + self.send_response(200) + self.send_header("Content-Type", "application/json; charset=utf-8") + self.end_headers() + self.wfile.write(json.dumps(result, ensure_ascii=False).encode("utf-8")) + def do_DELETE(self): """Handle DELETE requests.""" parsed = urlparse(self.path) diff --git a/docker/chat/ui/index.html b/docker/chat/ui/index.html index bd920f9..a11e632 100644 --- a/docker/chat/ui/index.html +++ b/docker/chat/ui/index.html @@ -161,6 +161,56 @@ white-space: pre-wrap; word-wrap: break-word; } + /* Action button container */ + .action-buttons { + margin-top: 0.75rem; + display: flex; + flex-wrap: wrap; + gap: 0.5rem; + } + .action-btn { + background: #0f3460; + border: 1px solid #e94560; + color: #e94560; + padding: 0.5rem 1rem; + border-radius: 6px; + font-size: 0.875rem; + font-weight: 600; + cursor: pointer; + transition: all 0.2s; + display: inline-flex; + align-items: center; + gap: 0.5rem; + } + .action-btn:hover { + background: #e94560; + color: white; + } + .action-btn:disabled { + opacity: 0.5; + cursor: not-allowed; + } + .action-btn .spinner { + width: 14px; + height: 14px; + border: 2px solid currentColor; + border-top-color: transparent; + border-radius: 50%; + animation: spin 0.8s linear infinite; + } + @keyframes spin { + to { transform: rotate(360deg); } + } + .action-btn.success { + background: #1a1a2e; + border-color: #4ade80; + color: #4ade80; + } + .action-btn.error { + background: #1a1a2e; + border-color: #f87171; + color: #f87171; + } .input-area { display: flex; gap: 0.5rem; @@ -404,11 +454,28 @@ function addMessage(role, content, streaming = false) { const msgDiv = document.createElement('div'); msgDiv.className = `message ${role}`; + + // Parse action markers if this is an assistant message + let contentHtml = escapeHtml(content); + let actions = []; + + if (role === 'assistant' && !streaming) { + const parsed = parseActionMarkers(content, messagesDiv.children.length); + contentHtml = parsed.html; + actions = parsed.actions; + } + msgDiv.innerHTML = `
${role}
-
${escapeHtml(content)}
+
${contentHtml}
`; messagesDiv.appendChild(msgDiv); + + // Render action buttons for assistant messages + if (actions.length > 0) { + renderActionButtons(msgDiv, actions, messagesDiv.children.length - 1); + } + messagesDiv.scrollTop = messagesDiv.scrollHeight; return msgDiv.querySelector('.content'); } @@ -430,6 +497,121 @@ return div.innerHTML.replace(/\n/g, '
'); } + // Action buttons state - track pending actions by message index + const pendingActions = new Map(); + + // Parse action markers from content and return HTML with action buttons + function parseActionMarkers(content, messageIndex) { + const actionPattern = /(.*?)<\/action>/gs; + const hasActions = actionPattern.test(content); + + if (!hasActions) { + return { html: escapeHtml(content), actions: [] }; + } + + // Reset pending actions for this message + pendingActions.set(messageIndex, []); + + let html = content; + const actions = []; + + // Replace action markers with placeholders and collect actions + html = html.replace(actionPattern, (match, type, jsonStr) => { + try { + const action = JSON.parse(jsonStr); + actions.push({ type, payload: action, id: `${messageIndex}-${actions.length}` }); + // Replace with placeholder that will be rendered as button + return `
`; + } catch (e) { + // If JSON parsing fails, keep the original marker + return match; + } + }); + + // Convert newlines to
for HTML output + html = html.replace(/\n/g, '
'); + + return { html, actions }; + } + + // Render action buttons for a message + function renderActionButtons(msgDiv, actions, messageIndex) { + if (actions.length === 0) return; + + const buttonsDiv = document.createElement('div'); + buttonsDiv.className = 'action-buttons'; + + actions.forEach(action => { + const btn = document.createElement('button'); + btn.className = 'action-btn'; + btn.dataset.actionId = action.id; + btn.dataset.messageIndex = messageIndex; + + let btnText = 'Execute'; + let icon = ''; + + switch (action.type) { + case 'ci-run': + icon = 'πŸš€'; + btnText = `Run CI for ${action.payload.branch || 'default'}`; + break; + case 'issue-create': + icon = 'πŸ“'; + btnText = `Create Issue: ${action.payload.title ? action.payload.title.substring(0, 30) + (action.payload.title.length > 30 ? '...' : '') : 'New Issue'}`; + break; + case 'pr-create': + icon = 'πŸ”€'; + btnText = `Create PR: ${action.payload.title ? action.payload.title.substring(0, 30) + (action.payload.title.length > 30 ? '...' : '') : 'New PR'}`; + break; + default: + btnText = `Execute ${action.type}`; + } + + btn.innerHTML = `${icon}${btnText}`; + + btn.addEventListener('click', () => executeAction(btn, action)); + buttonsDiv.appendChild(btn); + }); + + msgDiv.appendChild(buttonsDiv); + } + + // Execute an action + async function executeAction(btn, action) { + const messageIndex = btn.dataset.messageIndex; + const actionId = btn.dataset.actionId; + + // Disable button and show loading state + btn.disabled = true; + btn.innerHTML = ' Executing...'; + + try { + const response = await fetch(`/chat/action/${action.type}`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + ...action.payload, + conversation_id: currentConversationId, + }), + }); + + const result = await response.json(); + + if (result.success) { + btn.className = 'action-btn success'; + btn.innerHTML = 'βœ“ Executed successfully'; + } else { + btn.className = 'action-btn error'; + btn.innerHTML = `βœ— Error: ${result.error || 'Unknown error'}`; + } + } catch (error) { + btn.className = 'action-btn error'; + btn.innerHTML = `βœ— Error: ${error.message}`; + } + } + // Send message handler async function sendMessage() { const message = textarea.value.trim(); diff --git a/lib/generators.sh b/lib/generators.sh index 72f030e..9ad0f2a 100644 --- a/lib/generators.sh +++ b/lib/generators.sh @@ -518,6 +518,12 @@ services: CHAT_MAX_REQUESTS_PER_HOUR: ${CHAT_MAX_REQUESTS_PER_HOUR:-60} CHAT_MAX_REQUESTS_PER_DAY: ${CHAT_MAX_REQUESTS_PER_DAY:-500} CHAT_MAX_TOKENS_PER_DAY: ${CHAT_MAX_TOKENS_PER_DAY:-1000000} + # Action endpoints (#712) + WOODPECKER_TOKEN: ${WOODPECKER_TOKEN:-} + WOODPECKER_URL: http://woodpecker:8000 + FORGE_TOKEN: ${FORGE_TOKEN:-} + FORGE_OWNER: ${FORGE_OWNER:-} + FORGE_REPO: ${FORGE_REPO:-} networks: - disinto-net From f398b3295277b4d36130077933ed21114ad58537 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 12 Apr 2026 05:53:20 +0000 Subject: [PATCH 7/7] fix: agent-smoke.sh - add lib/env.sh as extra source for ci-debug.sh --- .woodpecker/agent-smoke.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.woodpecker/agent-smoke.sh b/.woodpecker/agent-smoke.sh index 86ee756..f963414 100644 --- a/.woodpecker/agent-smoke.sh +++ b/.woodpecker/agent-smoke.sh @@ -222,7 +222,7 @@ check_script lib/issue-lifecycle.sh lib/secret-scan.sh # Standalone lib scripts (not sourced by agents; run directly or as services). # Still checked for function resolution against LIB_FUNS + own definitions. -check_script lib/ci-debug.sh +check_script lib/ci-debug.sh lib/env.sh check_script lib/parse-deps.sh # Agent scripts β€” list cross-sourced files where function scope flows across files.